text
stringlengths
1
1.05M
#!/bin/sh # If a command fails then the deploy stops set -e printf "\033[0;32mDeploying updates to GitHub...\033[0m\n" # Build the project. hugo # if using a theme, replace with `hugo -t <YOURTHEME>` # Go To Public folder cd public # Add changes to git. git add . # Commit changes. msg="rebuilding site $(date)" if [ -n "$*" ]; then msg="$*" fi git commit -m "$msg" # Push source and build repos. git push origin master -f
<filename>scripts/log_table_generation/inv_log_generate.c /* * Copyright (C) 2018-2020, Advanced Micro Devices, Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * */ /* Program for generating F_inv table for log function The values of the table are generated as : Let table_size be the number of values required in the table Let step_size = 2 * table_size Then, each value of the table = 2 * [ step_size / ( i + step_size) ], where i = 0,2,4,....,step_size libquadmath is used for getting the required precision */ #include<stdio.h> #include<quadmath.h> #include<stdint.h> typedef union { double doublex; unsigned long long int hexval; } doubleword; void main() { __float128 step=2048.0, i=0.0; __float128 val; doubleword vald; while (i<= step) { val = step / (i + step); val = 2.0 * val ; vald.doublex = val; printf(" .quad 0x%llx # %1.23f\n", vald.hexval, vald.doublex); i+=2.0q; } }
// import the necessary packages const express = require('express'); const bodyParser = require('body-parser'); // create a new express app const app = express(); // add body-parser middleware app.use(bodyParser.json()); // create a list of user details let users = [ { id: 1, name: "John Smith", age: 25 }, { id: 2, name: "Jane Doe", age: 30 } ]; // define the REST API routes app.get('/users', (req, res) => { res.status(200).json(users); }); app.get('/users/:id', (req, res) => { const user = users.find(u => u.id == req.params.id); if (!user) { res.status(400).json({error: 'User does not exist.'}); } else { res.status(200).json(user); } }); app.post('/users', (req, res) => { const user = { id: Math.floor(Math.random() * Math.floor(9999)), ...req.body }; users.push(user); res.status(201).json(user); }); app.put('/users/:id', (req, res) => { const user = users.find(u => u.id == req.params.id); if (!user) { res.status(400).json({error: 'User does not exist.'}); } else { user.name = req.body.name; user.age = req.body.age; res.status(200).json(user); } }); app.delete('/users/:id', (req, res) => { const userIndex = users.findIndex(u => u.id == req.params.id); if (userIndex == -1) { res.status(400).json({error: 'User does not exist.'}); } else { users.splice(userIndex, 1); res.status(200).json({success: true}); } }); // start the API server const port = process.env.PORT || 3000; app.listen(port, () => { console.log(`Listening on port ${port}...`); });
package org.junithelper.core.meta; import static org.junit.Assert.*; import org.junit.Test; public class ExceptionMetaTest { @Test public void type() throws Exception { assertNotNull(ExceptionMeta.class); } @Test public void instantiation() throws Exception { ExceptionMeta target = new ExceptionMeta(); assertNotNull(target); } }
#!/bin/bash ./.venv/bin/pytest
<gh_stars>0 import { DataTableComponent } from './data-table/data-table.component'; import { PageSizeChooserComponent } from './page-size-chooser/page-size-chooser.component'; import { PaginatorComponent } from './paginator/paginator.component'; import { SearchableDropdownComponent } from './searchable-dropdown/searchable-dropdown.component'; export const components = [ DataTableComponent, PaginatorComponent, PageSizeChooserComponent, SearchableDropdownComponent ];
/* * Copyright (c) 2018 Tsinghua University, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef OPENFLOW_ONF_TT_EXT_H #define OPENFLOW_ONF_TT_EXT_H 1 #include <openflow/openflow.h> #include <openvswitch/types.h> /* The following experiment extensions, proposed by Tsinghua University, * are not yet standardized, so they are not included in openflow.h. * These extensions are based ONF experimenter mechanism. */ /* Tsinghua experiment Time-Triggered mechanism extension. * * +---------+---------------+--------+ * | version | ONF_VENDOR_ID | length | * +---------+---------------+--------+ * | struct onf_exp_header | * +----------------------------------+ */ /* Experiment extension message. */ struct onf_exp_header { struct ofp_header header; ovs_be32 vendor; /* ONF_VENDOR_ID */ ovs_be32 subtype; /* See the TXT numbers in ofp-mags.h. */ }; OFP_ASSERT(sizeof(struct onf_exp_header) == 16); enum onf_tt_flow_ctrl_command { ONF_TFCC_ADD = 0, /* New flow. */ ONF_TFCC_CLEAR = 1, /* Delete old flow table. */ ONF_TFCC_QUERY = 2, /* Get whole tt flow table. */ }; /* TT flow control message type */ enum onf_tt_flow_ctrl_type { ONF_TFCT_ADD_TABLE_REQUEST = 0, ONF_TFCT_ADD_TABLE_REPLY = 1, ONF_TFCT_DELETE_TABLE_REQUEST = 2, ONF_TFCT_DELETE_TABLE_REPLY = 3, ONF_TFCT_QUERY_TABLE_REQUEST = 4, ONF_TFCT_QUERY_TABLE_REPLY = 5, }; /* Message structure for ONF_ET_TT_FLOW_CONTROL. */ struct onf_tt_flow_ctrl { ovs_be16 table_id; ovs_be16 type; /* ONF_TFCT_*. */ }; OFP_ASSERT(sizeof(struct onf_tt_flow_ctrl) == 4); /* Message structure for ONF_ET_TT_FLOW_MDOD. */ struct onf_tt_flow_mod { /* Entry control */ ovs_be16 table_id; uint8_t pad[6]; ovs_be32 metadata; /* Entry field */ ovs_be32 port; /* The entry related port. */ ovs_be32 etype; /* Send entry or receive entry. */ ovs_be32 flow_id; /* The identify of a flow. */ ovs_be64 base_offset; /* The scheduled time that the flow packet is received or sent. */ ovs_be64 period; /* The scheduling period. */ ovs_be32 buffer_id; /* Buffered packet to apply to. */ ovs_be32 packet_size; /* The flow packet size. */ ovs_be64 execute_time; /* The time this entry take effect. */ }; OFP_ASSERT(sizeof(struct onf_tt_flow_mod) == 56); #endif /* openflow/onf-tt-ext.h */
<gh_stars>1-10 const find = require('./utils/find') /** * Find exactly one entity or throw error * * @example * findAll('errorSet', 'My error set') * .then(errorSets => console.log('Found errorSets ', errorSets)) * .catch(console.error) * * @param {string} entity - name of entity * @param {string} query * @param {Object} [options] * @param {boolean} [options.limit=9999] * @param {boolean} [options.fields] * @returns {Promise<Array<Object>, Error>} found entity */ function findAll (entity, query, { limit = 9999, fields }) { return find(entity, query, { limit, fields }) } module.exports = findAll
#!/usr/bin/env bash version=2 if [ -n "$HIPPO_BRANCH" ]; then BRANCH="$HIPPO_BRANCH" fi CACHE_ROOT="${HOME}/.uoa-cache-root" TPREFIX="/data/data/com.termux/files" SCRIPT_DIR="${TPREFIX}/usr/etc/proot-distro" INSTALL_FOLDER="${TPREFIX}/usr/var/lib/proot-distro/installed-rootfs" DLCACHE="${PREFIX}/usr/var/lib/proot-distro/dlcache" HIPPO_DIR="${INSTALL_FOLDER}/udroid" HIPPO_SCRIPT_FILE="${SCRIPT_DIR}/udroid.sh" # SOCIAL_PLATFORM="\e[34mhttps://discord.gg/TAqaG5sEfW" # HIPPO_DIR = "${INSTALL_FOLDER}/${HIPPO_DEFAULT}" # HIPPO_SCRIPT_FILE="${SCRIPT_DIR}/udroid.sh" # * Usefull functions # die() exit with code 1 with printing given string # warn() like die() without exit status (used when exit is not necessary) # shout() pring messege in a good way with some lines # lshout() print messege in a standard way # msg() print's normal echo die () { echo -e "${RED}!! ${*}${RST}";exit 1 ;:;} warn () { echo -e "${RED}?? ${*}${RST}";:;} shout () { echo -e "${DS}=> ${*}${RST}";:; } lshout () { echo -e "${DC}-> ${*}${RST}";:; } msg () { echo -e "\e[38;5;228m ${*} \e[0m" >&2 ;:; } function __check_for_hippo() { if [ -d ${HIPPO_DIR} ] && [ -f ${HIPPO_SCRIPT_FILE} ]; then return 0; else return 1; fi } function __check_for_plugin() { if [ -f ${HIPPO_SCRIPT_FILE} ]; then return 0 else return 1 fi } function __check_for_filesystem() { if [ -d ${HIPPO_DIR}/bin ]; then return 0 else return 1 fi } function __verify_bin_path() { BINPATH="${SHELL}" if [ -n "$BINPATH" ]; then if [ "$BINPATH" != "/data/data/com.termux/files/usr/bin/bash" ]; then msg "This has to be done inside termux environment" die "\$SHELL != $BINPATH" exit 1 fi else warn "SHELL value is empty.." fi } function __upgrade() { # setup downloader if ! command -v axel >> /dev/null; then apt install axel -y fi mkdir -p "${CACHE_ROOT}" axel -o "${CACHE_ROOT}"/version https://raw.githubusercontent.com/RandomCoderOrg/fs-manager-udroid/main/version >> /dev/null || { die "error" } origin_version=$(cat "${CACHE_ROOT}"/version) rm -rf "${CACHE_ROOT}" if [ "$origin_version" -gt "$version" ]; then lshout "upgrdae avalibe to \e[1;32mV${origin_version}\e[0m" elif [ "$origin_version" -eq "$version" ]; then lshout "You are on latest version \e[1;32mV${origin_version}\e[0m" exit 0 else die "Upgrader hit unexpected condition..." exit 1 fi if start_upgrade; then bash -x "${CACHE_ROOT}"/upgrade --summary rm -rf "${CACHE_ROOT}" else die "Error" fi } function start_upgrade() { mkdir -p "${CACHE_ROOT}" axel -o "${CACHE_ROOT}"/upgrade.sh https://raw.githubusercontent.com/RandomCoderOrg/fs-manager-udroid/main/etc/scripts/upgrade_patch/upgrade.sh >> /dev/null || { die "Error"; exit 1 } bash -x upgrade.sh || { return 1 } return 0 } function __force_uprade_hippo() { if [ ! -d "$CACHE_ROOT" ]; then mkdir "$CACHE_ROOT" else rm -rf "${CACHE_ROOT}/fs-manager-udroid" fi FSM_URL="https://github.com/RandomCoderOrg/fs-manager-udroid" if [ -z "${BRANCH}" ]; then git clone ${FSM_URL} "${CACHE_ROOT}/fs-manager-udroid" || die "failed to clone repo" else git clone -b "${BRANCH}" "${CACHE_ROOT}/fs-manager-udroid" || die "failed to clone repo" fi if [ -f "${CACHE_ROOT}"/fs-manager-udroid/install.sh ]; then cd "${CACHE_ROOT}"/fs-manager-udroid || die "failed to cd ..." bash install.sh || die "failed to install manager..." fi } progressfilt () { local flag=false c count cr=$'\r' nl=$'\n' while IFS='' read -d '' -rn 1 c do if $flag then printf '%s' "$c" else if [[ $c != $cr && $c != $nl ]] then count=0 else ((count++)) if ((count > 1)) then flag=true fi fi fi done } _download () { link=$1 wget --progress=bar:force $link || die "download failed" } function __help() { msg "udroid - termux Version ${version} by saicharankandukuri" msg msg "A bash script to make basic action(login, vncserver) easier for ubuntu-on-android project" msg msg "Usage ${0} [options]" msg msg "Options:" msg "--install To try installing udroid" msg "--help To display this message" msg "--enable-dbus To start terminal session with dbus enabled" msg "--force-upgrade To reinstall this script of origin" msg "startvnc To start udroid vncserver" msg "stopvnc To stop udroid vncserver" msg "--enable-dbus-startvnc To start vnc with dbus" msg "------------------"#links goes here msg "for additional documentation see: \e[1;34mhttps://github.com/RandomCoderOrg/ubuntu-on-android#basic-usage" msg "report issues and feature requests at: \e[1;34mhttps://github.com/RandomCoderOrg/ubuntu-on-android/issues" # msg "Join the community at DISCORD -> $SOCIAL_PLATFORM" msg "------------------" } function __split_tarball_handler() { target_plugin=$1 if [ -n "$target_plugin" ] && [ -f "$target_plugin" ]; then source $target_plugin else die "Could not find script in tmp directory: This attribute is not for manuall entry" fi if ! $SPLIT_TARBALL_FS; then cp "$target_plugin" "$SCRIPT_DIR/udroid.sh" shift; _lauch_or_install "$@" fi shout "starting download.. this may take some time" if [ ! -d ${CACHE_ROOT} ]; then mkdir -v ${CACHE_ROOT} fi mkdir -p "${CACHE_ROOT}/fs-cache" # count no.of parts x=0 for part in $PARTS; do ((x=x+1)) done cd ${CACHE_ROOT}/fs-cache || die "failed.. cd" # start download y=0 for links in $PARTS; do ((y=y+1)) shout "downloading [$(basename $links)] part($y/$x).. " _download $links done cd $HOME || die "failed.. cd" shout "combining parts to one.. ( ̄︶ ̄)↗" cat "${CACHE_ROOT}/fs-cache/*" > "${DLCACHE}/${FINAL_NAME}" shout "triggering installation.." shift ; _lauch_or_install "$@" } function _lauch_or_install() { if ! __check_for_plugin; then echo -e "Plugin at ${HIPPO_SCRIPT_FILE} is missing ......" echo -e "May be this not a correct installation...." echo -e "Try to notice us at \e[34m${SOCIAL_PLATFORM}\e[0m" exit 1 fi if ! __check_for_filesystem; then echo -e "Installing udroid..........." if proot-distro install udroid; then echo -e "Installation Done......\a\a" # \a triggers vibration in termux echo "Waiting..." sleep 4 clear echo -e "Now You can launch your ubuntu 21.04 with command \e[1;32mudroid\e[0m" echo -e "use udroid --help for more option and comming up features" fi else ####################################################################################################### # Thanks to @GxmerSam Sam Alarie, @mizzunet, @Andre-cmd-rgb for the issues randome ideas and suggestion pulseaudio --start --load="module-native-protocol-tcp auth-ip-acl=127.0.0.1 auth-anonymous=1" --exit-idle-time=-1 >> /dev/null if [[ -f "${CACHE_ROOT}"/fs-manager-udroid/etc/scripts/vncserver/startvnc.sh ]] && [[ ! -f ${HIPPO_DIR}/bin/startvnc ]]; then DIR="${CACHE_ROOT}/fs-manager-udroid/etc/scripts/vncserver/startvnc.sh" cp "${DIR}" ${HIPPO_DIR}/bin/startvnc proot-distro login udroid -- chmod 775 /bin/startvnc fi if [ -f "${CACHE_ROOT}"/fs-manager-udroid/etc/scripts/vncserver/stopvnc.sh ] && [ ! -f ${HIPPO_DIR}/bin/stopvnc ]; then DIR="${CACHE_ROOT}/fs-manager-udroid/etc/scripts/vncserver/stopvnc.sh" cp "${DIR}" ${HIPPO_DIR}/bin/stopvnc proot-distro login udroid -- chmod 775 /bin/stopvnc fi proot-distro login udroid "$@" || warn "program exited unexpectedly..." fi } # __verify_bin_path if [ $# -ge 1 ]; then case "$1" in upgrade) __upgrade;; --init-setup-tarball) shift 1; __split_tarball_handler "$@";; --force-upgrade) __force_uprade_hippo;; --enable-dbus) shift 1; _lauch_or_install --bind /dev/null:/proc/sys/kernel/cap_last_cap "$@" ;; "--enable-dbus-startvnc") shift 1; _lauch_or_install --bind /dev/null:/proc/sys/kernel/cap_last_cap -- startvnc "$@" ;; "--enable-dbus-stopvnc") shift 1; _lauch_or_install --bind /dev/null:/proc/sys/kernel/cap_last_cap -- stopvnc "$@" ;; # no use --install) _lauch_or_install;; --help) __help;; startvnc) if __check_for_hippo; then proot-distro login udroid --no-kill-on-exit -- startvnc else echo -e "This command is supposed to run after installing udroid" echo -e "Use \e[1;32mhippo --install\e[0m install" echo -e "\e[32mError:\e[0m udroid not found" fi ;; stoptvnc) if __check_for_hippo; then proot-distro login udroid --no-kill-on-exit -- stoptvnc else echo -e "This command is supposed to run after installing udroid" echo -e "Use \e[1;32mhippo --install\e[0m install" echo -e "\e[32mError:\e[0m udroid not found" fi ;; *) _lauch_or_install "$@";; esac else _lauch_or_install "$@" fi
<reponame>rmalired/microservices<gh_stars>0 /** * */ package indiv.rakesh.microservices; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; /** * @author rakesh.malireddy * */ @RestController public class CommentsController { static Map<String,List<String>> comments = null; static{ comments = new HashMap<>(); List<String> learn001Comments = new ArrayList<>(); learn001Comments.add("I am very much a machine learning novice although with a technical background much of the math with its focus on linear algebra and probability and statistics is familiar. I have purchased several other ML books in the past year and have found some better than others. This new book I really like. I downloaded the sample first chapter which is length and a great read and I knew the authors writing style and combination of theory (without a lot of equations) and practice (the hands-on) via Python was just right to encourage me to purchase and read the rest of this book."); learn001Comments.add("This book has the quality and utility I expect from O'Reilly titles. There is a mix of theory and example I prefer and when learning new topics, and I have always found these titles provide a quick entry into a new topic. The author is very knowledgeable and covers a lot of material. This book has a better combination of breadth and depth than most O'Reilly titles. Different methods and approaches are described in greater detail than usually found in these books."); // Hands-On Machine Learning with Scikit-Learn and TensorFlow: Concepts, Tools, and Techniques to Build Intelligent Systems List<String> learn002Comments = new ArrayList<>(); learn002Comments.add("Great starter book on the concept. High level selection of topics, conversational presentation, and most importantly a fast read. This is an excellent strategy because it covers all the essentials, while still leaving you enough time to dig into some application or play with a build as you go along (which is ultimately the point). Leaves you free time to explore the topic and truly digest it, without assuming prior experience. Well done!"); // Machine Learning With Random Forests And Decision Trees: A Visual Guide For Beginners List<String> con001Comments = new ArrayList<>(); con001Comments.add("Better documentation then the kubernetes.io one. I bought this book with the hope that Getting started means instructions and help to install Kubernetes. No, the author uses ready made cloud providers, where Kubernetes is already installed. Too bad that I didn't see this in the preview of this book. Hope you know it through this comment."); con001Comments.add("It is fine, given the limited alternatives out there, but it reads as if the author gathered together a lot of material and examples and put them into the book without enough attention to providing conceptual background. To a large extent, the approach reads like: type these commands and you'll get it. The book is slightly dated now, since the software is changing. This makes the demo-first method more of an issue than it otherwise would be."); // Getting Started with Kubernetes comments.put("learn001", learn001Comments); comments.put("learn002",learn002Comments); comments.put("con001", con001Comments); } @RequestMapping("/comments/{skuId}") public @ResponseBody List<String> getCommentsOnProduct(@PathVariable String skuId){ return comments.getOrDefault(skuId, new ArrayList<>()); } }
def is_anagram(str1, str2): list_str1 = list(str1) list_str1.sort() list_str2 = list(str2) list_str2.sort() return (list_str1 == list_str2)
from functools import reduce from faker import Faker from faker.providers import address from sys import argv from getopt import getopt from pandas import DataFrame def generate_row(carry, faker): lat, long, _, country_code, city = faker.location_on_land() carry['housing_type'].append( faker.random_element(['Housing', 'Hospital', 'Train station', 'Business', 'Police Station', 'Shop'])) carry['latitude'].append(lat) carry['longitude'].append(long) carry['country_code'].append(country_code) carry['surface (m2)'].append(faker.random_int(min=20, max=300)) carry['height (m)'].append(faker.random_int(min=2, max=20)) carry['city'].append(city) return carry def generate_random_dataset(row_count): faker = Faker() faker.add_provider(address) return DataFrame( data=reduce( lambda carry, index: generate_row(carry, faker), range(0, row_count), {'housing_type': [], 'latitude': [], 'longitude': [], 'country_code': [], 'surface (m2)': [], 'height (m)': [], 'city': []} ), columns=['housing_type', 'latitude', 'longitude', 'country_code', 'surface (m2)', 'height (m)', 'city'] ) def main(): options, *_ = getopt(argv[1:], 'or', ['output-file=', 'rows=']) output_file_src = 'output/data.csv' row_count = 1000 for opt, arg in options: if opt in ('-o', '--output-file'): output_file_src = arg elif opt in ('-r', '--rows'): row_count = int(arg) generate_random_dataset(row_count).to_csv(output_file_src) if __name__ == "__main__": main()
package com.uber.myapplication; import android.app.Activity; import android.os.Bundle; import android.os.PersistableBundle; public class CoreActivity extends Activity { private Object mOnCreateInitialisedField1; private Object mOnCreateInitialisedField2; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mOnCreateInitialisedField1 = new Object(); } @Override public void onCreate(Bundle savedInstanceState, PersistableBundle persistentState) { super.onCreate(savedInstanceState); mOnCreateInitialisedField2 = new Object(); } }
import React from "react"; import "./LanguageSwitcherDropdown.scss"; import { languages } from "../../../i18n"; import useStoreSettingsSelector from "../../../hooks/useStoreSettingsSelector"; import { useDispatch } from "react-redux"; import { changeLanguage } from "../../../store/actions/settings"; import { Select, MenuItem } from "@material-ui/core"; // import FlagIcon from "components/FlagIcon"; const LanguageSwitcher = () => { const storeSettings = useStoreSettingsSelector(); const dispatch = useDispatch(); /** * Dispatch language selection persistance in the store. * * @param {React.ChangeEvent<*>} e Language selection click. * @returns {Void} None. */ const handleLanguageSelection = (e) => { let targetValue = e.target.value; dispatch(changeLanguage(targetValue)); }; return ( <div className="languageSwitcherDropdown"> <Select className="languageSelect" classes={{ selectMenu: "menu", }} onChange={handleLanguageSelection} value={storeSettings.locale} > {languages.map((lang) => ( <MenuItem key={lang.locale} value={lang.locale}> {/* <FlagIcon className="flag" code={lang.countryCode} titleName={lang.label} /> */} {lang.label} </MenuItem> ))} </Select> </div> ); }; export default LanguageSwitcher;
The algorithm should traverse the string in a double loop and compare each character in the string with its corresponding character in the opposite direction. If the characters match, they should be marked as part of the palindrome. If, at any point, the characters do not match, the algorithm should return false and terminate the process.
current_dir=$BASH_SOURCE script_dir=$(dirname $0) source_dir="$script_dir/../bin" config_file="${script_dir}/conf.json" theme_dir="${script_dir}/theme" readme_file="${script_dir}/../README.md" package_file="${script_dir}/../package.json" output_dir="${script_dir}/html" command -v jsdoc >/dev/null 2>&1 || { echo >&2 "I require the command jsdoc but it does not seem to be around??"; exit 1; } jsdoc $source_dir -c $config_file -t $theme_dir -R $readme_file -P $package_file -d $output_dir
from datetime import datetime class WebRequestManager: def __init__(self): self.requests = [] def add_request(self, request_type, status, reception_datetime, commit_datetime, request_body): self.requests.append({ 'request_type': request_type, 'status': status, 'reception_datetime': datetime.strptime(reception_datetime, '%Y-%m-%d %H:%M:%S'), 'commit_datetime': datetime.strptime(commit_datetime, '%Y-%m-%d %H:%M:%S'), 'request_body': request_body }) def update_status(self, request_index, new_status): if 0 <= request_index < len(self.requests): self.requests[request_index]['status'] = new_status def get_requests_by_status(self, status): return [req for req in self.requests if req['status'] == status] def get_requests_by_type(self, request_type): return [req for req in self.requests if req['request_type'] == request_type] def get_requests_by_date_range(self, start_date, end_date): start = datetime.strptime(start_date, '%Y-%m-%d') end = datetime.strptime(end_date, '%Y-%m-%d') return [req for req in self.requests if start <= req['reception_datetime'] <= end] def __str__(self): return '\n'.join([f"{i+1}. {req['request_type']} - {req['status']} - {req['reception_datetime']}" for i, req in enumerate(self.requests)])
let ServerItem = require('../Utility/ServerItem') let Vector2 = require('../Vector2') module.exports = class AIBase extends ServerItem { constructor() { super(); this.username = "AI_Base"; this.health = new Number(100); this.isDead = false; this.respawnTicker = new Number(0); this.respawnTime = new Number(0); } onUpdate(onUpdatePosition, onUpdateRotation) { //Calculate Statemachine } onObtainTarget(connections) { } respawnCounter() { this.respawnTicker = this.respawnTicker + 1; if(this.respawnTicker >= 10) { this.respawnTicker = new Number(0); this.respawnTime = this.respawnTime + 1; //Three second respond time if(this.respawnTime >= 3) { console.log('Respawning AI: ' + this.id); this.isDead = false; this.respawnTicker = new Number(0); this.respawnTime = new Number(0); this.health = new Number(100); this.position = new Vector2(-7, 3); return true; } } return false; } dealDamage(amount = Number) { //Adjust Health on getting hit this.health = this.health - amount; //Check if we are dead if(this.health <= 0 ) { this.isDead = true; this.respawnTicker = new Number(0); this.respawnTime = new Number(0); } return this.isDead; } radiansToDegrees() { return new Number(57.29578); } }
<gh_stars>1-10 #ifndef _EFFEL_ATA #define _EFFEL_ATA 1 #include <stdint.h> void ata_read(void* dst, uint64_t lba, size_t size, void* dpte); #endif
public class Fibonacci { public static void main(String[] args) { int n = 10; int first = 0, second = 1; System.out.print("Fibonacci Series of "+n+" numbers:"); for (int i = 1; i <= n; ++i) { System.out.print(first + " + "); int sum = first + second; first = second; second = sum; } } }
<gh_stars>0 #!/usr/bin/env python2.7 # TODO: Error handling for upload. import ConfigParser, json, subprocess, time, urllib, urllib2 config = ConfigParser.ConfigParser({ 'rom_directory': '/home/pi/RetroPie/roms/', 'copies_to_retain': '30' }) config.read(['./config.ini', '/opt/retropie/configs/retro-drop/config.ini']) def upload(): today = time.strftime("%Y%m%d-%H%M") # We do the zip file building in bash so that we can easily stream it straight to the server. # File patterns copied from https://retropie.org.uk/forum/topic/13108/script-to-backup-save-states-and-sram upload_command_template = """\ tar --ignore-failed-read -cvzf - */*.srm* */*.bsv* */*.sav* */*.sta* */*.fs* */*.nv* */*.rtc* | \ curl -X POST https://content.dropboxapi.com/2/files/upload \ --header "Authorization: Bearer {dropbox_access_token}" \ --header "Dropbox-API-Arg: {{\\"path\\": \\"/{retropi_name}/saved-games-{today}.tar.gz\\",\\"mode\\": \\"add\\",\\"autorename\\": true,\\"mute\\": false,\\"strict_conflict\\": false}}" \ --header "Content-Type: application/octet-stream\" \ --data-binary @- """ upload_command = upload_command_template.format( dropbox_access_token=config.get('DEFAULT', 'dropbox_access_token'), retropi_name=config.get('DEFAULT', 'retropi_name'), today=today ) process = subprocess.Popen( upload_command, shell=True, stdout=subprocess.PIPE, cwd=config.get('DEFAULT', 'rom_directory') ) output, error = process.communicate() def dropbox_post(url, data): request = urllib2.Request( url, json.dumps(data), { 'Authorization': 'Bearer {dropbox_access_token}'.format( dropbox_access_token=config.get('DEFAULT', 'dropbox_access_token') ), 'Content-Type': 'application/json' } ) try: connection = urllib2.urlopen(request) return json.loads(connection.read()) except urllib2.HTTPError,e: print e.read() return {} def get_existing_files(): response = dropbox_post( "https://api.dropboxapi.com/2/files/list_folder", { 'path': '/' + config.get('DEFAULT', 'retropi_name') } ) return [e['name'] for e in response['entries']] def delete_file(filename): print 'Deleting ' + filename dropbox_post( "https://api.dropboxapi.com/2/files/delete_v2", { 'path': '/' + config.get('DEFAULT', 'retropi_name') + '/' + filename } ) upload() existing_files = get_existing_files() to_delete = existing_files[:-config.getint('DEFAULT', 'copies_to_retain')] for filename in to_delete: delete_file(filename)
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import Thenable = monaco.Thenable; import IWorkerContext = monaco.worker.IWorkerContext; import * as jsonService from 'vscode-json-languageservice'; let defaultSchemaRequestService; if (typeof fetch !== 'undefined') { defaultSchemaRequestService = function (url) { return fetch(url).then(response => response.text()) }; } class PromiseAdapter<T> implements jsonService.Thenable<T> { private wrapped: Promise<T>; constructor(executor: (resolve: (value?: T | jsonService.Thenable<T>) => void, reject: (reason?: any) => void) => void) { this.wrapped = new Promise<T>(executor); } public then<TResult>(onfulfilled?: (value: T) => TResult | jsonService.Thenable<TResult>, onrejected?: (reason: any) => void): jsonService.Thenable<TResult> { let thenable: jsonService.Thenable<T> = this.wrapped; return thenable.then(onfulfilled, onrejected); } public getWrapped(): monaco.Thenable<T> { return this.wrapped; } public static resolve<T>(v: T | Thenable<T>): jsonService.Thenable<T> { return <monaco.Thenable<T>>Promise.resolve(v); } public static reject<T>(v: T): jsonService.Thenable<T> { return Promise.reject(<any>v); } public static all<T>(values: jsonService.Thenable<T>[]): jsonService.Thenable<T[]> { return Promise.all(values); } } export class JSONWorker { private _ctx: IWorkerContext; private _languageService: jsonService.LanguageService; private _languageSettings: jsonService.LanguageSettings; private _languageId: string; constructor(ctx: IWorkerContext, createData: ICreateData) { this._ctx = ctx; this._languageSettings = createData.languageSettings; this._languageId = createData.languageId; this._languageService = jsonService.getLanguageService({ schemaRequestService: createData.enableSchemaRequest && defaultSchemaRequestService, promiseConstructor: PromiseAdapter }); this._languageService.configure(this._languageSettings); } doValidation(uri: string): Thenable<jsonService.Diagnostic[]> { let document = this._getTextDocument(uri); if (document) { let jsonDocument = this._languageService.parseJSONDocument(document); return this._languageService.doValidation(document, jsonDocument); } return Promise.resolve([]); } doComplete(uri: string, position: jsonService.Position): Thenable<jsonService.CompletionList> { let document = this._getTextDocument(uri); let jsonDocument = this._languageService.parseJSONDocument(document); return this._languageService.doComplete(document, position, jsonDocument); } doResolve(item: jsonService.CompletionItem): Thenable<jsonService.CompletionItem> { return this._languageService.doResolve(item); } doHover(uri: string, position: jsonService.Position): Thenable<jsonService.Hover> { let document = this._getTextDocument(uri); let jsonDocument = this._languageService.parseJSONDocument(document); return this._languageService.doHover(document, position, jsonDocument); } format(uri: string, range: jsonService.Range, options: jsonService.FormattingOptions): Thenable<jsonService.TextEdit[]> { let document = this._getTextDocument(uri); let textEdits = this._languageService.format(document, range, options); return Promise.resolve(textEdits); } resetSchema(uri: string): Thenable<boolean> { return Promise.resolve(this._languageService.resetSchema(uri)); } findDocumentSymbols(uri: string): Thenable<jsonService.SymbolInformation[]> { let document = this._getTextDocument(uri); let jsonDocument = this._languageService.parseJSONDocument(document); let symbols = this._languageService.findDocumentSymbols(document, jsonDocument); return Promise.resolve(symbols); } findDocumentColors(uri: string): Thenable<jsonService.ColorInformation[]> { let document = this._getTextDocument(uri); let jsonDocument = this._languageService.parseJSONDocument(document); let colorSymbols = this._languageService.findDocumentColors(document, jsonDocument); return Promise.resolve(colorSymbols); } getColorPresentations(uri: string, color: jsonService.Color, range: jsonService.Range): Thenable<jsonService.ColorPresentation[]> { let document = this._getTextDocument(uri); let jsonDocument = this._languageService.parseJSONDocument(document); let colorPresentations = this._languageService.getColorPresentations(document, jsonDocument, color, range); return Promise.resolve(colorPresentations); } getFoldingRanges(uri: string, context?: { rangeLimit?: number; }): Thenable<jsonService.FoldingRange[]> { let document = this._getTextDocument(uri); let ranges = this._languageService.getFoldingRanges(document, context); return Promise.resolve(ranges); } getSelectionRanges(uri: string, positions: jsonService.Position[]): Thenable<jsonService.SelectionRange[]> { let document = this._getTextDocument(uri); let jsonDocument = this._languageService.parseJSONDocument(document); let ranges = this._languageService.getSelectionRanges(document, positions, jsonDocument); return Promise.resolve(ranges); } private _getTextDocument(uri: string): jsonService.TextDocument { let models = this._ctx.getMirrorModels(); for (let model of models) { if (model.uri.toString() === uri) { return jsonService.TextDocument.create(uri, this._languageId, model.version, model.getValue()); } } return null; } } export interface ICreateData { languageId: string; languageSettings: jsonService.LanguageSettings; enableSchemaRequest: boolean; } export function create(ctx: IWorkerContext, createData: ICreateData): JSONWorker { return new JSONWorker(ctx, createData); }
<filename>hashstore/bakery/lite/tests/backend_tests.py<gh_stars>1-10 import os from hashkernel.bakery import NotFoundError from hashstore.bakery.lite.node import ContentAddress from hashstore.tests import TestSetup, seed, random_bytes from ..node.blobs import BlobStore from hashkernel.bakery import Cake from hs_build_tools.nose import eq_,ok_ # to test if sniffer is not hanging uncomment next line & save # raise Exception() test = TestSetup(__name__,ensure_empty=True) log = test.log inline_udk = '5vdffgpxyadwoi3y91bhli3azarn3xn1jnm5i1bct2ktl547mi' db_udk = '3vww43t0xcq6tq586pdtaapa8ubae45ith1zwyr4jd5oigcflp' file_udk = '40b01hzgoes1zkf7p0v5bion6zxtxltu9t39zufamdk5i2ax54' def test_LiteBackend(): hs = BlobStore(os.path.join(test.dir, 'test_HashStore')) not_existent = '4no3jb46qaff0a0pwg24lu0y8eq5ldmdich3su14mkcr76m8wr' def store(): seed(0) s = random_bytes(40) eq_(len(s), 40) w0 = hs.writer() r0 = w0.write(s, done=True) eq_(inline_udk, str(r0)) r0a = ContentAddress(Cake.from_bytes(s)) eq_(r0, r0a) eq_(False, r0 == 0 ) eq_(False, r0a == 0 ) eq_(hash(r0), hash(r0a)) ok_(hs.lookup(Cake.from_bytes(s)).found()) w1 = hs.writer() for _ in range(3): w1.write(random_bytes(100)) r1 = w1.done() s1 = str(r1) eq_(db_udk, s1) w2 = hs.writer() for _ in range(100): # 100Mb w2.write(random_bytes(1000)) w2.done() r2 = w2.done() # call done twice eq_(file_udk, str(r2)) return r0, r1, r2 r0, r1, r2 = store() #test recall seed(0) o0 = hs.get_content(r0).stream() eq_(o0.read(40), random_bytes(40)) eq_(0, len(o0.read())) o1=hs.get_content(r1).stream() for _ in range(3): eq_(o1.read(100), random_bytes(100)) eq_(0, len(o1.read())) o2 = hs.get_content(r2).stream() for _ in range(100): eq_(o2.read(1000), random_bytes(1000)) eq_(0, len(o2.read())) #store again store() #retrieve non existent try: hs.get_content(not_existent) ok_(False) except NotFoundError: pass all = list(hs) eq_(3,len(all))
<filename>vodmodule/doc.go<gh_stars>10-100 // Package vodmodule defines types and a Mapper type that provides the ability // of mapping media content by a common prefix. package vodmodule
SELECT Salary FROM (SELECT DISTINCT Salary FROM Employees ORDER BY Salary DESC) AS Salaries LIMIT 1 OFFSET n-1;
<reponame>ContentPI/ui-k<gh_stars>1-10 import React from 'react' import Text from './index' const stories = { component: 'Text', props: [ { name: 'align', type: 'TextAlign', default: 'left', description: 'The alignament of the text', }, { name: 'className', type: 'string', default: 'primary', description: 'The className of the text', }, { name: 'color', type: 'TextColor', default: 'textPrimary', description: 'The color of the text', }, { name: 'component', type: ' keyof JSX.IntrinsicElements', default: 'undefined', description: 'The component where the text will be displayed', }, { name: 'status', type: 'StatusColor', default: ' ', description: 'The status color of the text', }, { name: 'variant', type: 'Typography', default: 'paragraph1', description: 'The typography of the text', }, ], stories: [ { name: 'Text', description: 'left, center or right', render: <Text align="left">Left text</Text>, prop: 'align', code: ` <Text align="left">My text</Text> `, }, { name: 'Text', description: 'left, center or right', render: <Text align="center">Centered text</Text>, prop: 'align', code: ` <Text align="center">Centered text</Text> `, }, { name: 'Text', description: 'left, center or right', render: <Text align="right">Right text</Text>, prop: 'align', code: ` <Text align="right">Centered text</Text> `, }, ], } export default stories
#!/bin/sh # shellcheck disable=SC2039,SC2155 MIRROR_URI="http://dl-cdn.alpinelinux.org/alpine/$RELEASE" APORTS_DIR="${APORTS_DIR:-/home/build}" die() { echo "$@" 1>&2 echo 1>&2 exit 1 } # Prints names of repo's subdirs (i.e. abuilds) that contains APKBUILDs which # has been changed/created in the specified revisions. The abuild names are # printed in a build order. changed_abuilds() { local repo="$1" local commit_ish="$2" # Get names of repo's subdirectories with modified APKBUILD, # but ignore deleted ones. local committed_aports="$(git diff-tree -r --relative="$repo" --name-only --diff-filter=ACMR \ "$commit_ish" -- '*APKBUILD' | xargs -I% dirname % | xargs)" local uncommitted_aports="$(git status -s -u -- "${repo}"/*APKBUILD | xargs -I% dirname % | cut -f2 -d/)" # remove duplicates from the lists local aports="$(echo -e "$committed_aports"\\n"$uncommitted_aports" | uniq)" # Sort abuilds by build order. # ap builddirs -d "$(pwd)/$repo" $aports 2>/dev/null | xargs -I% basename % echo "$aports" } # Replaces /etc/apk/repositories with repositories at $MIRROR_URI that are on # the same or higher level than the given repo (main > community > testing) # and after that runs `apk update`. # # $1: the target repository; main, community, or testing set_repositories_for() { local target_repo="$1" local repo; for repo in main community testing; do echo "Adding $MIRROR_URI/$repo to /etc/apk/repositories" echo "$MIRROR_URI/$repo" | sudo tee -a /etc/apk/repositories [ "$repo" = "$target_repo" ] && break done sudo apk update } cd "$APORTS_DIR" || exit mkdir -p "$HOME"/packages/"$RELEASE"/main/x86_64 # lay down private key file echo -en "${SIGNING_KEY}" > ~/.abuild/packages@kws1.com-5f35c485.rsa sudo cp ~/.abuild/packages@kws1.com-5f35c485.rsa.pub /etc/apk/keys/ commit_range="$(git rev-parse 'HEAD^1')..HEAD" echo 'Diffstat:' git --no-pager diff --color --stat "$commit_range" # shellcheck disable=SC2043 for repo in main; do set_repositories_for "$repo" oIFS=$IFS IFS=" " for pkgname in $(changed_abuilds "$repo" "$commit_range"); do qname="$repo/$pkgname" echo "$pkgname" "Building package $qname" cd "$qname" || continue if abuild -r; then checkapk || : successful_pkgs="$successful_pkgs $qname" else failed_pkgs="$failed_pkgs $qname" fi cd "$APORTS_DIR" || exit echo "$pkgname" done IFS=$oIFS done # find ~ # env printf '\n----\n' if [ -n "$successful_pkgs" ]; then echo "Successfully built packages:$successful_pkgs" fi if [ -n "$failed_pkgs" ]; then die "Failed to build packages:$failed_pkgs" elif [ -z "$successful_pkgs" ]; then echo 'No packages found to be built.' fi
<filename>src/store/modules/text.js const state = { commandTitle: "", locationArrShow: [], } const getters = { commandTitle(state) { let info = JSON.parse(localStorage.getItem("commandTitle")) if (!info) { state.commandTitle = "请选择" } else { info.label = state.app.language === "zh-CN" ? info.cn : info.en state.commandTitle = info.label } return state.commandTitle }, locationArrShow(state) { if (state.locationArrShow.length !== 0) { let rece = [] state.locationArrShow.forEach((item) => { if (item.show && item.show === 1) { item['label'] = state.app.language === "zh-CN" ? item['cn'] : item['en'] rece.push(item) } }) state.locationArrShow = rece return state.locationArrShow } } } const mutations = { SET_TITLE: (state, item) => { item.label = state.app.language === "zh-CN" ? item.cn : item.en; state.commandTitle = item.label; let obj = { "code": item.code, "label": item.label, "cn": item.cn, "en": item.en } localStorage.setItem("commandTitle", JSON.stringify(obj)) } } const actions = { setTitle({ commit }, item) { commit('SET_TITLE', item) } } export default { namespaced: true, state, getters, mutations, actions }
wget https://raw.githubusercontent.com/m4rktn/jogan/master/jogan.py rm jogan.py mv jogan.py.1 jogan.py python2 jogan.py
#ifndef _EEPROM_H #define _EEPROM_H #ifndef _EEPROM_C #endif //I2C and EEPROM Operate function. unsigned char eeprom_read_byte(unsigned char addr); void eeprom_write_byte(unsigned char addr, unsigned char dat); void eeprom_read_multi(unsigned char *buffer, unsigned char addr, unsigned char len); void eeprom_write_multi(unsigned char *buffer, unsigned char addr, unsigned char len); #endif
<reponame>vany152/FilesHash //----------------------------------------------------------------------------- // boost-libs variant/test/variant_plymorphic_get_test.cpp source file // See http://www.boost.org for updates, documentation, and revision history. //----------------------------------------------------------------------------- // // Copyright (c) 2003 <NAME> // Copyright (c) 2013-2021 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include "boost/variant/variant.hpp" #include "boost/variant/apply_visitor.hpp" #include "boost/variant/static_visitor.hpp" #include "boost/variant/polymorphic_get.hpp" #include "boost/core/lightweight_test.hpp" struct base {int trash;}; struct derived1 : base{}; struct derived2 : base{}; struct vbase { short trash; virtual ~vbase(){} virtual int foo() const { return 0; } }; struct vderived1 : virtual vbase{ virtual int foo() const { return 1; } }; struct vderived2 : virtual vbase{ virtual int foo() const { return 3; } }; struct vderived3 : vderived1, vderived2 { virtual int foo() const { return 3; } }; template <class T, class Variant> inline void check_throws(Variant& v) { try { boost::polymorphic_get<T>(v); BOOST_TEST(false); } catch (const boost::bad_polymorphic_get& e) { BOOST_TEST(!!e.what()); BOOST_TEST(std::string(e.what()) != boost::bad_get().what()); } } int main() { typedef boost::variant<int, base, derived1, derived2> var_t; var_t var1; BOOST_TEST(!boost::polymorphic_get<base>(&var1)); check_throws<base>(var1); BOOST_TEST(!boost::polymorphic_get<const base>(&var1)); check_throws<base, const var_t>(var1); var1 = derived1(); BOOST_TEST(boost::polymorphic_get<base>(&var1)); BOOST_TEST(boost::polymorphic_get<const base>(&var1)); derived2 d; d.trash = 777; var_t var2 = d; BOOST_TEST(boost::polymorphic_get<base>(var2).trash == 777); BOOST_TEST(boost::polymorphic_get<const base>(var2).trash == 777); var2 = 777; BOOST_TEST(!boost::polymorphic_get<base>(&var2)); check_throws<base>(var2); BOOST_TEST(!boost::polymorphic_get<const base>(&var2)); check_throws<base, const var_t>(var2); BOOST_TEST(boost::polymorphic_get<int>(var2) == 777); BOOST_TEST(boost::polymorphic_get<const int>(var2) == 777); typedef boost::variant<int, vbase, vderived1, vderived2, vderived3> vvar_t; vvar_t v = vderived3(); boost::polymorphic_get<vderived3>(v).trash = 777; const vvar_t& cv = v; BOOST_TEST(boost::polymorphic_get<vbase>(cv).trash == 777); BOOST_TEST(boost::polymorphic_get<const vbase>(cv).trash == 777); BOOST_TEST(boost::polymorphic_get<vbase>(cv).foo() == 3); BOOST_TEST(boost::polymorphic_get<vbase>(v).foo() == 3); BOOST_TEST(boost::polymorphic_get<const vbase>(cv).foo() == 3); BOOST_TEST(boost::polymorphic_get<const vbase>(v).foo() == 3); return boost::report_errors(); }
#!/bin/bash # Copyright (c) 2021, Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. # # Initialize script="${BASH_SOURCE[0]}" scriptDir="$( cd "$( dirname "${script}" )" && pwd )" warDir=$PWD source ${scriptDir}/utils.sh # Setting default values initialize # Function to lowercase a value and make it a legal DNS1123 name # $1 - value to convert to lowercase function toDNS1123Legal { local val=`echo $1 | tr "[:upper:]" "[:lower:]"` val=${val//"_"/"-"} echo "$val" } # username and password from Kubernetes secret username=`kubectl get secrets ${weblogicCredentialsSecretName} -n ${domainNamespace} -o=jsonpath='{.data.username}'|base64 --decode` password=`kubectl get secrets ${weblogicCredentialsSecretName} -n ${domainNamespace} -o=jsonpath='{.data.password}'|base64 --decode` adminServerPodName="${domainUID}-$(toDNS1123Legal ${adminServerName})" InputParameterList=" -domainName ${domainUID} -adminServerName ${adminServerName} -adminURL ${adminServerPodName}:${adminServerPort} -username ${username} -password ${password}" InputParameterList="${InputParameterList} -oamClusterName ${oamClusterName} -wlsMonitoringExporterTooamCluster ${wlsMonitoringExporterTooamCluster}" InputParameterList="${InputParameterList} -policyClusterName ${policyClusterName} -wlsMonitoringExporterTopolicyCluster ${wlsMonitoringExporterTopolicyCluster}" echo "Deploying WebLogic Monitoring Exporter with domainNamespace[$domainNamespace], domainUID[$domainUID], adminServerPodName[$adminServerPodName]" . $scriptDir/get-wls-exporter.sh kubectl cp $scriptDir/wls-exporter-deploy ${domainNamespace}/${adminServerPodName}:/u01/oracle kubectl cp $scriptDir/deploy-weblogic-monitoring-exporter.py ${domainNamespace}/${adminServerPodName}:/u01/oracle/wls-exporter-deploy EXEC_DEPLOY="kubectl exec -it -n ${domainNamespace} ${adminServerPodName} -- /u01/oracle/oracle_common/common/bin/wlst.sh /u01/oracle/wls-exporter-deploy/deploy-weblogic-monitoring-exporter.py ${InputParameterList}" eval ${EXEC_DEPLOY}
<reponame>luissaiz/apicheck<filename>refactor/old/apitest/actions/sendto/cli.py # Copyright 2017 BBVA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import click import logging from apitest import global_options from .proxy.console import * log = logging.getLogger('apitest') # -------------------------------------------------------------------------- # CLI APITest # -------------------------------------------------------------------------- @click.group() @click.pass_context def sendto(ctx, **kwargs): # pragma no cover ctx.obj.update(kwargs) @sendto.command(help="Send API end-point queries thought a proxy") @click.pass_context @click.option('-P', '--proxy', "proxy_url", required=True, default="http://127.0.0.1:8080") @click.argument('apitest_file', required=True) def proxy(ctx, **kwargs): launch_apitest_sento_proxy_console(ctx.obj, **kwargs)
#!/bin/bash set -e R10K="/opt/puppetlabs/puppet/bin/r10k" #FLAGS="-v debug" FLAGS="" PUPPET_UID=997 # 997=puppet if [ $EUID -ne $PUPPET_UID ]; then (>&2 echo $0 must be run as the puppet user) exit 1 fi # Args are repository, ref, deleted if [ $# -ne 3 ]; then # Missing args, run full deploy ${R10K} deploy environment --puppetfile ${FLAGS} elif [[ "${1}" =~ ^module-(.+)$ ]]; then # Module repo, deploy module module=${BASH_REMATCH[1]} echo "Deploying module ${module}" ${R10K} deploy module ${module} ${FLAGS} else if [[ "${1}" =~ ^(.+)_control$ ]]; then # Control repo, add prefix to environment prefix="${BASH_REMATCH[1]}_" fi if [[ "${2}" =~ ^refs/heads/(.+)$ ]]; then branch=${BASH_REMATCH[1]} fi if [ "${3}" = "true" ]; then echo "Purging environment ${prefix}${branch}" ${R10K} deploy environment ${FLAGS} else echo "Deploying environment ${prefix}${branch}" ${R10K} deploy environment ${prefix}${branch} --puppetfile ${FLAGS} fi fi if [[ -f /etc/puppetlabs/r10k/gitsync/master/r10k.yaml ]]; then if ! diff -q /etc/puppetlabs/r10k/r10k.yaml /etc/puppetlabs/r10k/gitsync/master/r10k.yaml; then cp /etc/puppetlabs/r10k/gitsync/master/r10k.yaml /etc/puppetlabs/r10k/r10k.yaml echo "r10k.yaml change detected. Running full r10k deploy..." ${R10K} deploy environment --puppetfile ${FLAGS} fi fi if [[ -f /etc/puppetlabs/r10k/gitsync/master/envlink.yaml ]]; then cp /etc/puppetlabs/r10k/gitsync/master/envlink.yaml /etc/puppetlabs/envlink/envlink.yaml fi cd ${HOME}/envlink /opt/puppetlabs/puppet/bin/ruby exe/envlink
<filename>src/types.ts import SingleSpa, { LifeCycles } from 'single-spa' export interface Config { port: number mountPath: string publicPath?: string // default same with mountPath output?: string // default "dist" default?: boolean // default false } export interface NormalizedConfig extends Readonly<Required<Config>> { readonly entry: string // full url to access index.html } export type NormalizedConfigs = Record<string, NormalizedConfig> export interface SingleApp { startApp: (appName: string, lifecycles: LifeCycles) => void, singleSpa: typeof SingleSpa, singleAppConfig: NormalizedConfigs }
<gh_stars>0 package org.openmucextensions.app.recorder; import java.io.IOException; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; import org.apache.felix.service.command.Descriptor; import org.openmuc.framework.data.Record; import org.openmuc.framework.dataaccess.Channel; import org.openmuc.framework.dataaccess.DataAccessService; import org.openmuc.framework.dataaccess.DataLoggerNotAvailableException; import org.osgi.service.component.ComponentContext; public class ConsoleCommands { private DataAccessService dataAccessService = null; protected void deactivate(ComponentContext context) { } @Descriptor("exports a data table for the specified time span") public void exporttable( @Descriptor("path and filename of the export file") String filename, @Descriptor("start time with format yyyy-MM-dd HH:mm:ss (use quotes)") String startTimeArg, @Descriptor("end time with format yyyy-MM-dd HH:mm:ss (use quotes)") String endTimeArg, @Descriptor("interval in minutes") String intervalArg) { DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); try { long startTime = formatter.parse(startTimeArg).getTime(); long endTime = formatter.parse(endTimeArg).getTime(); long interval = Long.parseLong(intervalArg); List<String> channelIds = getLoggedChannels(); System.out.println("Found " + channelIds.size() + " logged channels..."); DataTable table = new DataTable(startTime, endTime, interval * 60000, channelIds); for (String channelId : channelIds) { List<Record> records = dataAccessService.getChannel(channelId).getLoggedRecords(startTime, endTime); table.insertRecords(channelId, records); } table.writeCSVFile(filename, ",", "yyyy-MM-dd HH:mm:ss"); System.out.println("Data exported to file " + filename); } catch (ParseException e) { System.out.println("Cannot parse begin or end date, correct format is yyyy-MM-dd HH:mm:ss"); } catch (NumberFormatException e) { System.out.println("Cannot parse interval"); } catch (DataLoggerNotAvailableException e) { System.out.println("Cannot export table because data logger is not available"); } catch (IOException e) { System.out.println("IO error: " + e.getMessage()); } } protected void setDataAccessService(DataAccessService service) { this.dataAccessService = service; } protected void unsetDataAccessService(DataAccessService service) { this.dataAccessService = null; } private List<String> getLoggedChannels() { List<String> allChannelIds = dataAccessService.getAllIds(); List<String> channelsToLog = new ArrayList<>(); for (String channelId : allChannelIds) { Channel channel = dataAccessService.getChannel(channelId); if(channel!=null) { if(channel.getLoggingInterval()>0) channelsToLog.add(channelId); } } return channelsToLog; } }
#!/usr/bin/env bash # # Generate all the repository Dockerfiles from templates # set -euo pipefail declare -A modClusterVersions=( ['1.3']='1.3.8.Final' ) declare -A modClusterMd5sums=( ['1.3']='93dc6218d6dd14ae4ce24c5c09f20ab5' ) declare -a supportedTomcats=( 6 7 8 ) cd "$(dirname "$(readlink -f "$BASH_SOURCE")")" versions=( "$@" ) if [ ${#versions[@]} -eq 0 ]; then versions=( */ ) fi versions=( "${versions[@]%/}" ) for version in "${versions[@]}"; do modClusterVersion="${modClusterVersions[$version]}" modClusterMd5sum="${modClusterMd5sums[$version]}" for variant in "$version"/*/; do variant="$(basename "$variant")" # "8" or "8-alpine" tcVariant="${variant%-*}" # "8" or "8.5" tcMajor="${tcVariant%.*}" # "8" shopt -s extglob subVariant="${variant##${tcVariant}?(-)}" # "" or "alpine" shopt -u extglob case "$subVariant" in centos) # no "centos" variant in official tomcat repo baseImage='antoineco\/tomcat' ;; *) baseImage='tomcat' ;; esac case "$variant" in [7-8]*) baseImage+=":${tcVariant}${subVariant:+-$subVariant}" # ":8" or ":8-alpine" ;; *) echo >&2 "not sure what to do with $version/$variant re: baseImage; skipping" continue ;; esac tarExclude=() for tomcat in "${supportedTomcats[@]}"; do if [ "$tomcat" != "$tcMajor" ]; then tarExclude+=( "--exclude=mod_cluster-container-tomcat${tomcat}*.jar" ) fi done cp -v "Dockerfile${subVariant:+-$subVariant}.template" "$version/$variant/Dockerfile" sed -ri -e \ " \ s/__BASEIMAGE__/$baseImage/; \ s/__MODCLUSTERVERSION__/$modClusterVersion/; \ s/__MODCLUSTERMD5SUM__/$modClusterMd5sum/; \ s/__TAREXCLUDE__/${tarExclude[*]}/ \ " \ "$version/$variant/Dockerfile" cat >> "$version/$variant/Dockerfile" <<-'EOD' # verify mod_cluster is working properly RUN set -e \ && clusterLines="$(catalina.sh configtest 2>&1)" \ && clusterLines="$(echo "$clusterLines" | grep -i 'modcluster')" \ && if ! echo "$clusterLines" | grep 'INFO: MODCLUSTER000001: Initializing mod_cluster' >&2; then \ echo >&2 "$clusterLines"; \ exit 1; \ fi EOD done done
module SVGAbstract #Base SVG object, all others inherit from this class SVGObject def initialize @attributes = {} @name = 'abstract' end def deep_copy Marshal.load( Marshal.dump self ) end #Some methods for performing escaping of text, etc attr_accessor :escape alias_method :"escape?", :escape def escape_xml(s) #Escape the basic entities < > & and " #Make sure we escape ampersand first to avoid double escapes if (@escape) s.gsub('&','&amp;').gsub('<','&lt;').gsub('>','&gt;').gsub('"','&quot;') else s end end private :escape_xml def escape_quote(s) if (@escape) s.gsub('"','&quot;') else s end end private :escape_quote #helper methods which will be used in to_xml def attributes_string #note that underscores are converted to hyphens. Quotes are entity encoded attrs = @attributes.reject{|k,v| v.nil?}.map do |k,v| vv = escape_quote v.to_s kk = k.to_s.gsub('_', '-') %Q[#{kk}="#{vv}"] end return attrs.join(' ') end private :attributes_string #This is not the way RVG does attribute setting, but it's nicer. Instead #of using self.attr1 = value 1, self.attr2 = value2, we should be able #to do self.attributes(:attr1 => value1, :attr2 => value2) #This method is also chainable and can yield to a block. It's just #all round neater. #Note that we CANNOT simply do @attributes.merge!(attrs), we instead call #each method. The reason for this is that some of the methods have overridden #non-default implementations, in particular when an attr helper is required #e.g. clip_path. If we simply do a merge, the IdHelper won't be run def attributes(attrs) attrs.each do |k,v| setter = "#{k}=".to_sym self.send(setter, v) end yield self if block_given? return self end #Refer to method above, this version disables all attribute helper methods. #Potentially useful if you don't know whether there exists a helper or not #(e.g. when setting ids), and you want to make sure your code will work #without mucking about checking the (non-existent) docs def attributes_no_helpers(attrs) @attributes.merge! attrs yield self if block_given? return self end #Override method missing to provide svg attribute setters/getters. #This allows user to use non-standard attributes (which will probably #be ignored when the rendered svg file is viewed in a standard viewer) #Defining them this way also saves us an enormous amount of work #e.g we can do things like "svg.version = 1.1" to set the version def method_missing(attr, *vals) attr_str = attr.to_s case attr_str when /^[a-zA-Z_]+$/ #getter if @attributes.has_key? attr return @attributes[attr] else super(attr, *vals) #method missing end when /^[a-zA-Z_]+=$/ #setter if vals.empty? raise ArgumentError, "No arguments provided to attribute setter #{attr_str}" end @attributes[attr_str.slice(0..-2).to_sym] = vals.first else super(attr, *vals) #method missing end end #Should usually supply an implementation for this method (override in subclass). #Default implementation below is for primitive objects only (objects which do #not contain other objects, and have self closing tags.) This is sensible, as #container objects will usually inherit from SVGContainer, which overrides #this default implementation def to_xml "<#{@name} #{attributes_string} />" end end end
#!/bin/sh SETUP_DIR=/Users/jash/src/cyphernode_satoshiportal/dist DEFAULT_CERT_HOSTNAME=disk0book.local PROXYCRON_VERSION=v0.2.0-rc.5 PYCOIN_VERSION=v0.2.0-rc.5 SETUP_VERSION=v0.2.0-rc.5 BITCOIN_VERSION=v0.17.1 LIGHTNING_VERSION=v0.7.0 DEFAULT_DATADIR_BASE=/Users/jash GATEKEEPER_VERSION=v0.2.0-rc.5 PROXY_VERSION=v0.2.0-rc.5 OTSCLIENT_VERSION=v0.2.0-rc.5 DEFAULT_USER=jash EDITOR=/usr/bin/nano node index.js $@
#!/usr/bin/env bash set -e printf "\nStarting Vitess cluster\n" export VTROOT=/vagrant export VTDATAROOT=/tmp/vtdata-dev export MYSQL_FLAVOR=MySQL56 cd "$VITESS_WORKSPACE"/examples/local export SHARD="-" export TOPO="zk2" ./zk-up.sh ./vtctld-up.sh --enable-grpc-static-auth ./vttablet-up.sh --enable-grpc-static-auth ./vtgate-up.sh --enable-grpc-static-auth sleep 3 ./lvtctl.sh InitShardPrimary -force test_keyspace/- test-100 ./lvtctl.sh ApplySchema -sql "$(cat create_test_table.sql)" test_keyspace ./lvtctl.sh ApplyVSchema -vschema_file vschema.json test_keyspace ./lvtctl.sh RebuildVSchemaGraph printf "\nVitess cluster started successfully.\n\n"
package be.kwakeroni.test.util; import be.kwakeroni.parameters.backend.api.Configuration; import be.kwakeroni.parameters.backend.api.ConfigurationProvider; public class TestConfigurationProvider implements ConfigurationProvider { private static Configuration CONFIGURATION; public static void setConfiguration(Configuration configuration) { CONFIGURATION = configuration; } public static void clear() { CONFIGURATION = null; } @Override public Configuration getConfiguration() { return CONFIGURATION; } }
""" Declares a @builtin decorator class for tagging php built-in functions. """ class builtin(object): "Class for tagging built in functions" def __init__(self, func): self.func = func def __call__(self, *args, **kw): return self.func(*args, **kw) def __repr__(self): return "<php-builtin-function %r>"%self.func.__name__
package io.opensphere.mantle.data.util; import java.util.Collection; import java.util.List; import java.util.Set; import gnu.trove.map.hash.TLongObjectHashMap; import io.opensphere.core.model.time.TimeSpan; import io.opensphere.core.model.time.TimeSpanList; import io.opensphere.core.util.rangeset.RangedLongSet; import io.opensphere.mantle.data.DataTypeInfo; import io.opensphere.mantle.data.element.DataElement; import io.opensphere.mantle.data.element.MapDataElement; import io.opensphere.mantle.data.element.MetaDataProvider; import io.opensphere.mantle.data.element.VisualizationState; import io.opensphere.mantle.data.geom.MapGeometrySupport; /** * Assistant utility for finding components for a DataElement within a registry. */ public interface DataElementLookupUtils { /** * Looks up the times ( if provided) for each of the specified data element * ids and filters them for overlap with the time span of interest. * * Note: any timeless data elements will match any timespan, and if * {@link TimeSpan}.Timeless is provided all data elements will match. * * @param tsOfInterest array of {@link TimeSpan}'s of interest to check * @param cacheIds the set of ids to be filtered * @return the filtered set of ids of elements that overlapped the desired * timespan. */ List<Long> filterIdsByTimeOfInterest(TimeSpan[] tsOfInterest, Collection<? extends Long> cacheIds); /** * Looks up the times ( if provided) for each of the specified data element * ids and filters them for overlap with the time span of interest. * * Note: any timeless data elements will match any timespan, and if * {@link TimeSpan}.Timeless is provided all data elements will match. * * @param tsOfInterest {@link TimeSpanList} of {@link TimeSpan}'s of * interest to check * @param cacheIds the set of ids to be filtered * @return the filtered set of ids of elements that overlapped the desired * timespan. */ List<Long> filterIdsByTimeOfInterest(TimeSpanList tsOfInterest, Collection<? extends Long> cacheIds); /** * Retrieves all the parts of a DataElement and reconstitutes them into an * actual DataElement. The dtiHint and dataTypeInfoKeyHint can help prevent * multiple queries from running against the data model. They are used in * the dtiHint first, then the dataTypeInfoKeyHint second. If neither hint * is provided then they will be queried first so that the remainder of the * element can be retrieved and reformed. * * @param dataElementId the internal registry id of the element * @param dtiHint the {@link DataTypeInfo} for the point if known ( null if * not known is okay ) * @param dataTypeInfoKeyHint the key for the DataTypeInfo if known ( null * if not known is okay ) * @return the data element */ DataElement getDataElement(long dataElementId, DataTypeInfo dtiHint, String dataTypeInfoKeyHint); /** * Gets the {@link DataElement} or {@link MapDataElement} data registry ids * for the given DataTypeInfo. * * @param dti the DataTypeInfo * @param tsOfInterest the {@link TimeSpan}'s of interest ( or null if no * time filtering is desired ), if provided only those elements * that overlap the span of interest will be returned. Note: that * use of this parameter will slow down the lookup significantly. * @return the data element ids or empty array if none are found. */ List<Long> getDataElementCacheIds(DataTypeInfo dti, TimeSpan... tsOfInterest); /** * Gets the {@link DataElement} or {@link MapDataElement} data registry ids * for the given DataTypeInfo. * * @param dti the DataTypeInfo * @param tsOfInterest the {@link TimeSpanList}'s of interest ( or null if * no time filtering is desired ), if provided only those * elements that overlap the span of interest will be returned. * Note: that use of this parameter will slow down the lookup * significantly. * @return the data element ids or empty array if none are found. */ List<Long> getDataElementCacheIds(DataTypeInfo dti, TimeSpanList tsOfInterest); /** * Gets the {@link DataElement} or {@link MapDataElement} data registry ids * for the given DataTypeInfo key. * * @param dtiKey the DataTypeInfo key * @param tsOfInterest the {@link TimeSpan}'s of interest ( or null if no * time filtering is desired ). Note: that use of this parameter * will slow down the lookup significantly. * @return the data element ids */ List<Long> getDataElementCacheIds(String dtiKey, TimeSpan... tsOfInterest); /** * Gets the {@link DataElement} or {@link MapDataElement} data registry ids * for the given DataTypeInfo key. * * @param dtiKey the DataTypeInfo key * @param tsOfInterest the {@link TimeSpanList}'s of interest ( or null if * no time filtering is desired ). Note: that use of this * parameter will slow down the lookup significantly. * @return the data element ids */ List<Long> getDataElementCacheIds(String dtiKey, TimeSpanList tsOfInterest); /** * Retrieves the DataElements by id and adds them to the provided list. The * dtiHint and dataTypeInfoKeyHint can help prevent multiple queries from * running against the data model. They are used in the dtiHint first, then * the dataTypeInfoKeyHint second. If neither hint is provided then they * will be queried first so that the remainder of the element can be * retrieved and reformed. * * All of the id's requested must be of the same data type or an exception * will be generated. * * @param dataElementIds the data element ids to lookup * @param dtiHint the {@link DataTypeInfo} for the point if known ( null if * not known is okay ) * @param dataTypeInfoKeyHint the key for the DataTypeInfo if known ( null * if not known is okay ) * @param ignoreMapGeometrySupport the ignore map data elements map geometry * support ( don't get the extra MGS parts ) * @return the number of elements added to the list. * @throws DataElementLookupException if the dtiHint or dataTypeInfoKeyHint * are the wrong type for any of the ids provided, or if the * types retrieved are of different data types, or if the data * type cannot be determined, or if all the ids cannot be * retrieved. */ List<DataElement> getDataElements(List<Long> dataElementIds, DataTypeInfo dtiHint, String dataTypeInfoKeyHint, boolean ignoreMapGeometrySupport) throws DataElementLookupException; /** * Gets all data elements for the given data type. * * @param type the data type * @return the data elements */ List<DataElement> getDataElements(DataTypeInfo type); /** * Gets the DataTypeInfo for a given DataElement. * * @param dataElementId the internal registry id of the element * @return the DataTypeInfo or null id not found or no DataTypeInfo * available. */ DataTypeInfo getDataTypeInfo(long dataElementId); /** * Gets the DataTypeInfo for a given DataElement. * * @param dataElementId the internal registry id of the element * @param dtiKeyHint the data type info key ( if available, otherwise will * be queried). * @return the DataTypeInfo or null id not found or no DataTypeInfo * available. */ DataTypeInfo getDataTypeInfo(long dataElementId, String dtiKeyHint); /** * Gets the DataTypeInfo key for the . * * @param dataElementId the internal registry id of the element * @return the data type info key or null if not available or id not found. */ String getDataTypeInfoKey(long dataElementId); /** * Gets the DataTypeInfo keys for the requested ids. * * Will return a list with one to one correspondence between the * dataElementIds and the type keys, null will be inserted for ids with no * associated type. * * @param dataElementIds the data element ids * @return the data type info key */ List<String> getDataTypeInfoKeys(List<Long> dataElementIds); /** * Gets the unique set of DataTypeInfo keys for the set of data element ids. * * @param dataElementIds the data element ids * @return the unique set of DataTypeInfo keys */ Set<String> getDataTypeInfoKeySet(List<Long> dataElementIds); /** * Retrieves all the parts of a MapDataElement and reconstitutes them into * an actual MapDataElement. The dtiHint and dataTypeInfoKeyHint can help * prevent multiple queries from running against the data model. They are * used in the dtiHint first, then the dataTypeInfoKeyHint second. If * neither hint is provided then they will be queried first so that the * remainder of the element can be retrieved and reformed. * * @param dataElementId the internal registry id of the element * @param dtiHint the {@link DataTypeInfo} for the point if known ( null if * not known is okay ) * @param dataTypeInfoKeyHint the key for the DataTypeInfo if known ( null * if not known is okay ) * @param ignoreMapGeometrySupport the ignore map data elements map geometry * support ( don't get the extra MGS parts ) * @return the map data element */ MapDataElement getMapDataElement(long dataElementId, DataTypeInfo dtiHint, String dataTypeInfoKeyHint, boolean ignoreMapGeometrySupport); /** * Retrieves all the parts of a MapDataElement and reconstitutes them into * an actual MapDataElement. The dtiHint and dataTypeInfoKeyHint can help * prevent multiple queries from running against the data model. They are * used in the dtiHint first, then the dataTypeInfoKeyHint second. If * neither hint is provided then they will be queried first so that the * remainder of the element can be retrieved and reformed. * * @param dataElementIds the internal registry ids of the elements to * retrieve. * @param dtiHint the {@link DataTypeInfo} for the point if known ( null if * not known is okay ) * @param dataTypeInfoKeyHint the key for the DataTypeInfo if known ( null * if not known is okay ) * @param ignoreMapGeometrySupport the ignore map data elements map geometry * support ( don't get the extra MGS parts ) * @return the map data elements. * @throws DataElementLookupException if the dtiHint or dataTypeInfoKeyHint * are the wrong type for any of the ids provided, or if the * types retrieved are of different data types, or if the data * type cannot be determined, or if all the ids cannot be * retrieved. */ List<MapDataElement> getMapDataElements(List<Long> dataElementIds, DataTypeInfo dtiHint, String dataTypeInfoKeyHint, boolean ignoreMapGeometrySupport) throws DataElementLookupException; /** * Gets the MapGeometrySupport for given list DataElements. * * @param dataElementIds the internal registry ids of the elements * @return the MapGeometrySupport or null if not found. */ List<MapGeometrySupport> getMapGeometrySupport(List<Long> dataElementIds); /** * Gets the MapGeometrySupport for a given DataElement. * * @param dataElementId the internal registry id of the element * @return the MapGeometrySupport or null if not found. */ MapGeometrySupport getMapGeometrySupport(long dataElementId); /** * Gets the MapGeometrySupport for given list DataElements. * * @param dataElementIds the internal registry ids of the elements * @return the MapGeometrySupport or null if not found. */ List<MapGeometrySupport> getMapGeometrySupport(RangedLongSet dataElementIds); /** * Gets the meta data list for a given DataElement. * * @param dataElementId the internal registry id of the element * @return the meta data List or null if id not found. */ List<Object> getMetaData(long dataElementId); /** * Retrieve meta data property values for a key name of a specific type of * data element. * * @param keyName the key name * @param dtiHint the {@link DataTypeInfo} for the point if known ( null if * not known is okay ) * @param dataTypeInfoKeyHint the key for the DataTypeInfo if known ( null * if not known is okay ) * @param maxSamples the max samples desired from the data type. * @param maxToQuery the max to query, the maximum number of elements to * check ( -1 if no limit ) * @return the list of property value samples * @throws DataElementLookupException if there is a problem determining the * type or if the keyName is invalid. */ List<Object> getMetaDataPropertySamples(String keyName, DataTypeInfo dtiHint, String dataTypeInfoKeyHint, int maxSamples, int maxToQuery) throws DataElementLookupException; /** * Retrieve meta data property values for a key name of a specific type of * data element. * * @param dataElementIds the data element ids to lookup * @param keyName the key name * @param dtiHint the {@link DataTypeInfo} for the point if known ( null if * not known is okay ) * @param dataTypeInfoKeyHint the key for the DataTypeInfo if known ( null * if not known is okay ) * @return the list of property values * @throws DataElementLookupException if there is a problem determining the * type or if the keyName is invalid. */ List<Object> getMetaDataPropertyValues(List<Long> dataElementIds, String keyName, DataTypeInfo dtiHint, String dataTypeInfoKeyHint) throws DataElementLookupException; /** * Gets a MetaDataProvider for a given DataElement. * * @param dataElementId the internal registry id of the element * @return the MetaDataProvider or null if the id is not in the registry or * the DataTypeInfo cannot be located for the data element. */ MetaDataProvider getMetaDataProvider(long dataElementId); /** * Gets the origin ID for the specified data element. * * @param dataElementId the internal registry id of the element * @return the origin id or -1 if not found. */ Long getOriginId(long dataElementId); /** * Gets a map of cache ids to origin ids for the requested DataElements. The * map will contain null values for all un-found registry ids. * * If the query fails the map will be empty. * * @param dataElementIds the data element ids to use to get the origin ids * @return the Map of cache id to origin ids. */ TLongObjectHashMap<Long> getOriginIds(List<Long> dataElementIds); /** * Gets the {@link TimeSpan} for the specified data element. * * @param dataElementId internal registry id of the element * @return the {@link TimeSpan} or null if not found or it has no timespan. */ TimeSpan getTimespan(long dataElementId); /** * Gets the {@link TimeSpan}s for the specified data elements. * * @param dataElementIds internal registry ids of the elements * @return the list of {@link TimeSpan} elements with no time span will be * null entries in the map. */ List<TimeSpan> getTimespans(Collection<? extends Long> dataElementIds); /** * Gets the {@link VisualizationState} for a given DataElement. * * @param dataElementId the internal registry id of the element * @return the VisualizationState or null if not found. */ VisualizationState getVisualizationState(long dataElementId); /** * Gets the VisualizationState for given list DataElements. * * @param dataElementIds the internal registry ids of the elements * @return the VisualizationState or null if not found. */ List<VisualizationState> getVisualizationStates(List<Long> dataElementIds); /** * Gets the VisualizationState for given list DataElements. * * @param dataElementIds the internal registry ids of the elements * @return the VisualizationState or null if not found. */ List<VisualizationState> getVisualizationStates(long[] dataElementIds); /** * Gets the VisualizationState for given list DataElements. * * @param dataElementIds the internal registry ids of the elements * @return the VisualizationState or null if not found. */ List<VisualizationState> getVisualizationStates(RangedLongSet dataElementIds); }
#! /bin/sh # Copyright (C) 2013 Free Software Foundation, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Verify our probe that checks that "rm -f" doesn't complain if called # without file operands works as expected. See automake bug#10828. . test-init.sh echo AC_OUTPUT >> configure.ac : > Makefile.am $ACLOCAL $AUTOCONF $AUTOMAKE mkdir bin cat > bin/rm <<'END' #!/bin/sh set -e; set -u; PATH=$original_PATH; export PATH rm_opts= while test $# -gt 0; do case $1 in -*) rm_opts="$rm_opts $1";; *) break;; esac shift done if test $# -eq 0; then echo "Oops, fake rm called without arguments" >&2 exit 1 else exec rm $rm_opts "$@" fi END chmod a+x bin/rm original_PATH=$PATH PATH=$(pwd)/bin$PATH_SEPARATOR$PATH export PATH original_PATH rm -f && exit 99 # Sanity check. ./configure 2>stderr && { cat stderr >&2; exit 1; } cat stderr >&2 grep "'rm' program.* unable to run without file operands" stderr $FGREP "tell bug-automake@gnu.org about your system" stderr $FGREP "install GNU coreutils" stderr $EGREP "(^| |')ACCEPT_INFERIOR_RM_PROGRAM($| |')" stderr ACCEPT_INFERIOR_RM_PROGRAM=yes; export ACCEPT_INFERIOR_RM_PROGRAM ./configure $MAKE $MAKE distcheck # For the sake of our exit trap. PATH=$original_PATH; export PATH :
package org.para.testdata; public class StringArrayData { static String[] testStrings = new String[200]; static { for (int i = 0; i < testStrings.length; i++) { testStrings[i] = "" + i; } } }
package com.ervin.litepal.api; import com.ervin.litepal.model.Contributor; import java.util.List; import retrofit2.Call; import retrofit2.http.GET; import retrofit2.http.Path; /** * Created by Ervin on 2016/6/3. */ public interface GitHubClient { @GET("/repos/{owner}/{repo}/contributors") Call<List<Contributor>> contributors( @Path("owner") String owner, @Path("repo") String repo ); }
#!/bin/bash export MD_CONFIG_ENVIRONMENTS=dev,docker python -u -m mdstudio_atb
import os import numpy as np from skimage import color import matplotlib.pylab as plt def remove_files(files): """ Remove files from disk args: files (str or list) remove all files in 'files' """ if isinstance(files, (list, tuple)): for f in files: if os.path.isfile(os.path.expanduser(f)): os.remove(f) elif isinstance(files, str): if os.path.isfile(os.path.expanduser(files)): os.remove(files) def create_dir(dirs): """ Create directory args: dirs (str or list) create all dirs in 'dirs' """ if isinstance(dirs, (list, tuple)): for d in dirs: if not os.path.exists(os.path.expanduser(d)): os.makedirs(d) elif isinstance(dirs, str): if not os.path.exists(os.path.expanduser(dirs)): os.makedirs(dirs) def setup_logging(model_name): model_dir = "../../models" # Output path where we store experiment log and weights model_dir = os.path.join(model_dir, model_name) fig_dir = "../../figures" # Create if it does not exist create_dir([model_dir, fig_dir]) def plot_batch(color_model, q_ab, X_batch_black, X_batch_color, batch_size, h, w, nb_q, epoch): # Format X_colorized X_colorized = color_model.predict(X_batch_black / 100.)[:, :, :, :-1] X_colorized = X_colorized.reshape((batch_size * h * w, nb_q)) X_colorized = q_ab[np.argmax(X_colorized, 1)] X_a = X_colorized[:, 0].reshape((batch_size, 1, h, w)) X_b = X_colorized[:, 1].reshape((batch_size, 1, h, w)) X_colorized = np.concatenate((X_batch_black, X_a, X_b), axis=1).transpose(0, 2, 3, 1) X_colorized = [np.expand_dims(color.lab2rgb(im), 0) for im in X_colorized] X_colorized = np.concatenate(X_colorized, 0).transpose(0, 3, 1, 2) X_batch_color = [np.expand_dims(color.lab2rgb(im.transpose(1, 2, 0)), 0) for im in X_batch_color] X_batch_color = np.concatenate(X_batch_color, 0).transpose(0, 3, 1, 2) list_img = [] for i, img in enumerate(X_colorized[:min(32, batch_size)]): arr = np.concatenate([X_batch_color[i], np.repeat(X_batch_black[i] / 100., 3, axis=0), img], axis=2) list_img.append(arr) plt.figure(figsize=(20,20)) list_img = [np.concatenate(list_img[4 * i: 4 * (i + 1)], axis=2) for i in range(len(list_img) / 4)] arr = np.concatenate(list_img, axis=1) plt.imshow(arr.transpose(1,2,0)) ax = plt.gca() ax.get_xaxis().set_ticks([]) ax.get_yaxis().set_ticks([]) plt.tight_layout() plt.savefig("../../figures/fig_epoch%s.png" % epoch) plt.clf() plt.close() def plot_batch_eval(color_model, q_ab, X_batch_black, X_batch_color, batch_size, h, w, nb_q, T): # Format X_colorized X_colorized = color_model.predict(X_batch_black / 100.)[:, :, :, :-1] X_colorized = X_colorized.reshape((batch_size * h * w, nb_q)) # Reweight probas X_colorized = np.exp(np.log(X_colorized) / T) X_colorized = X_colorized / np.sum(X_colorized, 1)[:, np.newaxis] # Reweighted q_a = q_ab[:, 0].reshape((1, 313)) q_b = q_ab[:, 1].reshape((1, 313)) X_a = np.sum(X_colorized * q_a, 1).reshape((batch_size, 1, h, w)) X_b = np.sum(X_colorized * q_b, 1).reshape((batch_size, 1, h, w)) X_colorized = np.concatenate((X_batch_black, X_a, X_b), axis=1).transpose(0, 2, 3, 1) X_colorized = [np.expand_dims(color.lab2rgb(im), 0) for im in X_colorized] X_colorized = np.concatenate(X_colorized, 0).transpose(0, 3, 1, 2) X_batch_color = [np.expand_dims(color.lab2rgb(im.transpose(1, 2, 0)), 0) for im in X_batch_color] X_batch_color = np.concatenate(X_batch_color, 0).transpose(0, 3, 1, 2) list_img = [] for i, img in enumerate(X_colorized[:min(32, batch_size)]): arr = np.concatenate([X_batch_color[i], np.repeat(X_batch_black[i] / 100., 3, axis=0), img], axis=2) list_img.append(arr) plt.figure(figsize=(20,20)) list_img = [np.concatenate(list_img[4 * i: 4 * (i + 1)], axis=2) for i in range(len(list_img) / 4)] arr = np.concatenate(list_img, axis=1) plt.imshow(arr.transpose(1,2,0)) ax = plt.gca() ax.get_xaxis().set_ticks([]) ax.get_yaxis().set_ticks([]) plt.tight_layout() plt.show()
#!/usr/bin/env bash source prepare-env.sh java -jar target/jira-cli-1.0-SNAPSHOT-jar-with-dependencies.jar --action link --source "$1" --target "$2" --link-type "$3"
<filename>sources/include/nx/nx/attributes.hpp #ifndef __NX_ATTRIBUTES_H__ #define __NX_ATTRIBUTES_H__ #include <ostream> #include <string> #include <unordered_map> #include <nx/config.h> namespace nx { struct NX_API attribute_base { attribute_base(const std::string& n, const std::string& v); attribute_base(std::string&& n, std::string&& v); std::ostream& operator()(std::ostream& os) const; std::string name; std::string value; }; class NX_API attribute_map { public: using map_type = std::unordered_map<std::string, std::string>; using iterator = map_type::iterator; using const_iterator = map_type::const_iterator; attribute_map(char sep = ';'); attribute_map(const std::string& data, char sep = ';'); attribute_map(const attribute_map& other); attribute_map(attribute_map&& other); virtual ~attribute_map(); attribute_map& operator=(const attribute_map& other); attribute_map& operator=(attribute_map&& other); iterator begin(); const_iterator begin() const; iterator end(); const_iterator end() const; bool has(const std::string& name) const; std::string& operator[](const std::string& name); const std::string& operator[](const std::string& name) const; attribute_map& operator<<(const attribute_base& a); attribute_map& operator<<(attribute_base&& a); attribute_map& operator<<(const attribute_map& other); attribute_map& operator<<(attribute_map&& other); virtual std::ostream& operator()(std::ostream& os) const; protected: char sep_; map_type m_; map_type lcm_; std::string empty_; }; inline std::ostream& operator<<(std::ostream& os, const attribute_map& a) { a(os); return os; } class attributes : public attribute_map { public: using attribute_map::attribute_map; }; struct attribute : public attribute_base { using attribute_base::attribute_base; }; } // namespace nx #endif // __NX_ATTRIBUTES_H__
# vector of numbers input_vector <- c(1, 3, 5, 4, 2) # calculate median median(input_vector) # output # [1] 3.5
var BeachWaterQuality = require('../index'); var bwq = new BeachWaterQuality(); bwq.getBeachForcastLevel('BW').then(function(forecast) { console.log(forecast); });
class SyncHandlerManager: def __init__(self, inbox_manager): self.inbox_manager = inbox_manager self._client_event_runner = None self.receiver_handlers_running = False self.client_event_handlers_running = False def start_receiver_handlers(self): # Logic to start receiver handlers self.receiver_handlers_running = True def stop_receiver_handlers(self): # Logic to stop receiver handlers self.receiver_handlers_running = False def start_client_event_handlers(self): # Logic to start client event handlers self.client_event_handlers_running = True def stop_client_event_handlers(self): # Logic to stop client event handlers self.client_event_handlers_running = False def stop(self): if self.receiver_handlers_running: self.stop_receiver_handlers() if self.client_event_handlers_running: self.stop_client_event_handlers() # Test cases def test_stop_no_handlers_running(): hm = SyncHandlerManager(inbox_manager) hm.stop() assert not hm.receiver_handlers_running assert not hm.client_event_handlers_running def test_stop_some_receiver_handlers_running(): hm = SyncHandlerManager(inbox_manager) hm.start_receiver_handlers() hm.stop() assert not hm.receiver_handlers_running assert not hm.client_event_handlers_running def test_stop_some_client_event_handlers_running(): hm = SyncHandlerManager(inbox_manager) hm.start_client_event_handlers() hm.stop() assert not hm.receiver_handlers_running assert not hm.client_event_handlers_running def test_stop_some_receiver_and_client_event_handlers_running(): hm = SyncHandlerManager(inbox_manager) hm.start_receiver_handlers() hm.start_client_event_handlers() hm.stop() assert not hm.receiver_handlers_running assert not hm.client_event_handlers_running def test_stop_all_handlers_running(): hm = SyncHandlerManager(inbox_manager) hm.start_receiver_handlers() hm.start_client_event_handlers() hm.stop() assert not hm.receiver_handlers_running assert not hm.client_event_handlers_running
/** * The ACE Editor TemplateProcessor base * * @module aui-ace-editor * @submodule aui-ace-autocomplete-templateprocessor */ var Lang = A.Lang, AArray = A.Array, AObject = A.Object, Base = A.AceEditor.AutoCompleteBase, MATCH_DIRECTIVES = 0, MATCH_VARIABLES = 1, TOKEN_PUNCTUATOR_DOT = 1, TOKEN_UNRECOGNIZED = -1, TOKEN_VARIABLE = 0, /** * A base class for TemplateProcessor. * * @class A.AceEditor.TemplateProcessor * @extends Base * @param {Object} config Object literal specifying widget configuration * properties. * @constructor */ TemplateProcessor = A.Base.create('aui-ace-autocomplete-templateprocessor', A.Base, [ ], { /** * Accepts match and depending on its type processes directives or * variables. In case of success, calls the provided success callback, * or the error callback otherwise. * * @method getResults * @param {Object} match The provided match. It should contain at least * type and content properties * @param {Function} callbackSuccess The function to be called in case * of success * @param {Function} callbackError The function to be called in case of * error */ getResults: function(match, callbackSuccess, callbackError) { var instance = this, content, host, matchDirectives, matches, type; type = match.type; if (type === MATCH_DIRECTIVES) { matchDirectives = instance.get('directives'); content = match.content.toLowerCase(); if (content.length) { host = instance.get('host'); matchDirectives = host._filterResults(content, matchDirectives); } callbackSuccess(matchDirectives); } else if (type === MATCH_VARIABLES) { matches = instance._getVariableMatches(match.content); callbackSuccess(matches); } else { callbackError(); } }, /** * Formats the selected suggestion depending on the match type and * currently selected editor mode. The match type can be one of: * MATCH_DIRECTOVES or MATCH_VARIABLES. The selected editor mode can be * one of the following: INSERT or OVERWRITE. See {{#crossLink * "AceEditor.AutoCompleteBase/fillMode:attribute"}}{{/crossLink}} * * @method getSuggestion * @param {Object} match The provided match. It should contain at least * type and content properties * @param {String} selectedSuggestion The selected suggestion from the * list with suggestions * @return {String} The final suggestion which should be inserted to the * editor */ getSuggestion: function(match, selectedSuggestion) { var instance = this, fillMode, lastEntry, result, type, variables; result = selectedSuggestion || ''; if (selectedSuggestion) { fillMode = instance.get('host').get('fillMode'); type = match.type; if (fillMode === Base.FILL_MODE_INSERT) { if (type === MATCH_DIRECTIVES) { if (match.content && selectedSuggestion.indexOf(match.content) === 0) { result = selectedSuggestion.substring(match.content.length); } } else if (type === MATCH_VARIABLES) { variables = match.content.split('.'); lastEntry = variables[variables.length - 1]; if (lastEntry && selectedSuggestion.indexOf(lastEntry) === 0) { result = selectedSuggestion.substring(lastEntry.length); } } } else if (type === MATCH_VARIABLES) { variables = match.content.split('.'); variables[variables.length - 1] = selectedSuggestion; result = variables.join('.'); } } return result; }, /** * Checks if the the provided index is the last token in the list of * tokens. * * @method _isLastToken * @param {Number} index The index which should be checked * @param {Array} tokens The array with tokens * @protected * @return {Boolean} True if the provided index is the last token in the * list */ _isLastToken: function(index, tokens) { return index === tokens.length - 1; }, /** * Retrieves the type of a token. It can be one of these: * TOKEN_PUNCTUATOR_DOT = 1 * TOKEN_UNRECOGNIZED = -1 * TOKEN_VARIABLE = 0 * * @method _getTokenType * @param {Number} token The type of the token * @protected * @return {Number} The token type */ _getTokenType: function(token) { var tokenType = TOKEN_UNRECOGNIZED; if (Lang.isString(token)) { if (token.length) { tokenType = TOKEN_VARIABLE; } else { tokenType = TOKEN_PUNCTUATOR_DOT; } } return tokenType; }, /** * Scans the content and extracts variables. * * @method _getVariableMatches * @param {String} content The content from which variable matches will * be extracted * @protected * @return {Array} List with variable matches */ _getVariableMatches: function(content) { var instance = this, curVariableData, data, host, i, isLastToken, lastEntry, leftPartheseIndex, matches, results, resultsData, token, tokens, tokenType, variableData, variableType; results = []; data = instance.get('variables'); resultsData = {}; curVariableData = data.variables; if (content) { tokens = content.split('.'); lastEntry = tokens[tokens.length - 1]; for (i = 0; i < tokens.length; i++) { token = tokens[i]; tokenType = instance._getTokenType(token); if (tokenType === TOKEN_PUNCTUATOR_DOT) { if (i === 0) { curVariableData = {}; } else { resultsData = curVariableData; } } else if (tokenType === TOKEN_VARIABLE) { isLastToken = instance._isLastToken(i, tokens); if (isLastToken) { resultsData = curVariableData; break; } leftPartheseIndex = token.indexOf('('); if (leftPartheseIndex !== -1) { token = token.substring(0, leftPartheseIndex); } variableData = curVariableData[token]; if (variableData) { if (i === 0) { variableType = variableData.type; } else { variableType = variableData.returnType; } curVariableData = data.types[variableType] || {}; } else if (isLastToken) { resultsData = curVariableData; break; } else { resultsData = {}; break; } } } } else { resultsData = data.variables; } results = AObject.keys(resultsData); matches = results.sort(); if (lastEntry) { host = instance.get('host'); matches = host._filterResults(lastEntry, matches); } if (matches.length) { matches = AArray.map( matches, function(item) { var args, data; data = resultsData[item]; if (data.type === 'Method') { args = AArray.map( data.argumentTypes, function(item) { var parts = item.split('.'); return parts[parts.length - 1]; } ); return item + '(' + args.join(', ') + ')'; } else { return item; } } ); } return matches; }, /** * Normalizes a regualr expression value. If the value is String, it * will be converted to an RegExp. * * @method _setRegexValue * @param {String|RegExp} value The provided regualr expression value * @protected * @return {RegExp} The final instance of RegExp object */ _setRegexValue: function(value) { var result = A.AttributeCore.INVALID_VALUE; if (Lang.isString(value)) { result = new RegExp(value); } else if (value instanceof RegExp) { result = value; } return result; } }, { /** * Static property which provides a string to identify the class. * * @property NAME * @type String * @static */ NAME: 'aui-ace-autocomplete-templateprocessor', /** * Static property provides a string to identify the namespace. * * @property NS * @type String * @static */ NS: 'aui-ace-autocomplete-templateprocessor', /** * Static property used to define the default attribute * configuration for the TemplateProcessor. * * @property ATTRS * @type Object * @static */ ATTRS: { /** * Contains an array of all possible directives for the * corresponding language. * * @attribute directives * @type Array */ directives: { validator: Lang.isArray }, /** * The Editor in which the current instance is plugged. * * @attribute host * @type Object */ host: { validator: Lang.isObject }, /** * Contains the supported variables for the corresponding language. * * @attribute variables * @type Object */ variables: { validator: Lang.isObject } } }); A.AceEditor.TemplateProcessor = TemplateProcessor;
#!/bin/bash set -ex for d in $(find -mindepth 1 -maxdepth 1 -type d) do tag=$(basename $d) sudo docker build -t $tag $d done
def slot_filling_algorithm(user_input): tokens = nltk.word_tokenize(user_input) slots = { 'name': '', 'age': '', 'gender': '' } for token in tokens: if token.lower() in ["name", "age", "gender"]: current_slot_key = token.lower() else: slots[current_slot_key] = token return slots
import map from 'lodash/map' import { all, fork } from 'redux-saga/effects' import Dashboard from './containers/Dashboard/Store/saga' const sagas = [ Dashboard, ] export default function* () { yield all(map(sagas, saga => fork(saga))) }
def find_repeated_words(text): words = text.split() repeated_words = [] for i in range(len(words)): for j in range(i+1, len(words)): if words[i] == words[j] and words[i] not in repeated_words: repeated_words.append(words[i]) return repeated_words
#!/bin/bash # Get local IP LocalIP="$(ifconfig eth0 | grep 'inet addr' | cut -d: -f2 | cut -d ' ' -f1)" # Change local IP to hosts command="/.novalocal/s/127.0.0.1/$LocalIP/g" sed -i.bkp -e $command /etc/hosts
<filename>src/main/java/br/indie/fiscal4j/danfe/MDFeDanfeReport.java package br.indie.fiscal4j.danfe; import br.indie.fiscal4j.mdfe3.classes.nota.MDFProcessado; import com.google.zxing.BarcodeFormat; import com.google.zxing.EncodeHintType; import com.google.zxing.WriterException; import com.google.zxing.common.BitMatrix; import com.google.zxing.qrcode.QRCodeWriter; import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel; import net.sf.jasperreports.engine.*; import net.sf.jasperreports.engine.data.JRXmlDataSource; import org.w3c.dom.Document; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.awt.*; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.util.EnumMap; import java.util.HashMap; import java.util.Map; public class MDFeDanfeReport { private final MDFProcessado processado; private Map<String, Object> parameters; public MDFeDanfeReport(String xml) { this(new DFParser().mdfProcessadaParaObjeto(xml)); } public MDFeDanfeReport(MDFProcessado processado) { this.processado = processado; this.parameters = new HashMap<>(); } public byte[] gerarDanfeMDFe(byte[] logoEmpresa, String rodape) throws Exception { return toPDF(createJasperPrintMDFe(logoEmpresa, rodape)); } private static byte[] toPDF(JasperPrint print) throws JRException { return JasperExportManager.exportReportToPdf(print); } private JasperPrint createJasperPrintMDFe(byte[] logoEmpresa, String rodape) throws IOException, JRException, ParserConfigurationException, SAXException, WriterException { try (InputStream in = MDFeDanfeReport.class.getClassLoader().getResourceAsStream("danfe/mdfe/DAMDFeRetratoA4.jasper")) { final JRPropertiesUtil jrPropertiesUtil = JRPropertiesUtil.getInstance(DefaultJasperReportsContext.getInstance()); jrPropertiesUtil.setProperty("net.sf.jasperreports.xpath.executer.factory", "net.sf.jasperreports.engine.util.xml.JaxenXPathExecuterFactory"); parameters.put("LOGO", (logoEmpresa == null ? null : new ByteArrayInputStream(logoEmpresa))); parameters.put("QR_CODE", gerarQRCode()); return JasperFillManager.fillReport(in, parameters, new JRXmlDataSource(convertStringXMl2DOM(processado.toString()), "/mdfeProc")); } } private BufferedImage gerarQRCode() throws WriterException { int size = 250; Map<EncodeHintType, Object> hintMap = new EnumMap<>(EncodeHintType.class); hintMap.put(EncodeHintType.CHARACTER_SET, "UTF-8"); hintMap.put(EncodeHintType.MARGIN, 1); /* default = 4 */ hintMap.put(EncodeHintType.ERROR_CORRECTION, ErrorCorrectionLevel.L); QRCodeWriter qrCodeWriter = new QRCodeWriter(); BitMatrix byteMatrix = qrCodeWriter.encode(processado.getMdfe().getMdfInfoSuplementar().getQrCodMDFe(), BarcodeFormat.QR_CODE, size, size, hintMap); int byteMatrixWidth = byteMatrix.getWidth(); BufferedImage image = new BufferedImage(byteMatrixWidth, byteMatrixWidth, BufferedImage.TYPE_INT_RGB); image.createGraphics(); Graphics2D graphics = (Graphics2D) image.getGraphics(); graphics.setColor(Color.WHITE); graphics.fillRect(0, 0, byteMatrixWidth, byteMatrixWidth); graphics.setColor(Color.BLACK); for (int i = 0; i < byteMatrixWidth; i++) { for (int j = 0; j < byteMatrixWidth; j++) { if (byteMatrix.get(i, j)) { graphics.fillRect(i, j, 1, 1); } } } return image; } private Document convertStringXMl2DOM(String xml) throws ParserConfigurationException, IOException, SAXException { try (StringReader stringReader = new StringReader(xml)) { InputSource inputSource = new InputSource(); inputSource.setCharacterStream(stringReader); return DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(inputSource); } } }
<reponame>twitter-zuiwanyuan/finatra<filename>thrift/src/test/scala/com/twitter/finatra/thrift/tests/DoEverythingThriftServerStartupTest.scala package com.twitter.finatra.thrift.tests import com.google.inject.Stage import com.twitter.finatra.thrift.EmbeddedThriftServer import com.twitter.finatra.thrift.tests.doeverything.DoEverythingThriftServer import com.twitter.inject.server.FeatureTest class DoEverythingThriftServerStartupTest extends FeatureTest { override val server = new EmbeddedThriftServer( twitterServer = new DoEverythingThriftServer, stage = Stage.PRODUCTION) "Server" should { "start healthy" in { server.assertHealthy() } } }
public class ByteWriter { public List<byte> Bytes { get; } = new List<byte>(); public void Write(int i) { Bytes.Add(Convert.ToByte(i)); } public void Write(long i) { byte[] longBytes = BitConverter.GetBytes(i); Bytes.AddRange(longBytes); } }
<filename>src/main/java/net/b07z/sepia/websockets/mqtt/SepiaMqttClient.java package net.b07z.sepia.websockets.mqtt; import java.util.function.Consumer; import org.eclipse.paho.client.mqttv3.IMqttClient; import org.eclipse.paho.client.mqttv3.IMqttMessageListener; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; import org.eclipse.paho.client.mqttv3.MqttMessage; import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import net.b07z.sepia.server.core.tools.Is; import net.b07z.sepia.server.core.tools.JSON; /** * Abstraction layer for MQTT client. * * @author <NAME> * */ public class SepiaMqttClient { private String brokerAddress; private IMqttClient client; private SepiaMqttClientOptions clientOptions; /** * Create MQTT client with default options and in-memory persistence. * @param brokerAddress - address of MQTT broker, e.g. tcp://iot.eclipse.org:1883 or ws://broker.hivemq.com:8000 * @throws Exception */ public SepiaMqttClient(String brokerAddress) throws Exception { this.brokerAddress = brokerAddress; this.clientOptions = new SepiaMqttClientOptions(); this.client = new MqttClient(this.brokerAddress, clientOptions.publisherId, new MemoryPersistence()); } /** * Create MQTT client with custom options and in-memory persistence. * @param brokerAddress - address of MQTT broker, e.g. tcp://iot.eclipse.org:1883 or ws://broker.hivemq.com:8000 * @param clientOptions - {@link SepiaMqttClientOptions} * @throws Exception */ public SepiaMqttClient(String brokerAddress, SepiaMqttClientOptions clientOptions) throws Exception { this.brokerAddress = brokerAddress; this.clientOptions = clientOptions; this.client = new MqttClient(this.brokerAddress, clientOptions.publisherId, new MemoryPersistence()); } /** * Retrieve options set or created during construction. */ public SepiaMqttClientOptions getOptions(){ return this.clientOptions; } /** * Connect to broker with given options. This is a blocking method that returns when connected (or on error). * @throws Exception */ public void connect() throws Exception { MqttConnectOptions options = new MqttConnectOptions(); options.setAutomaticReconnect(this.clientOptions.automaticReconnect); options.setCleanSession(this.clientOptions.cleanSession); options.setConnectionTimeout(this.clientOptions.connectionTimeoutSec); if (Is.notNullOrEmpty(this.clientOptions.userName)){ options.setUserName(this.clientOptions.userName); } if (Is.notNullOrEmpty(this.clientOptions.password)){ options.setPassword(this.clientOptions.password.toCharArray()); } //some defaults options.setMaxInflight(10); this.client.connect(options); } /** * Is client connected to server? */ public boolean isConnected(){ return this.client.isConnected(); } /** * Disconnects from the server. An attempt is made to quiesce the client allowing outstanding work to complete before disconnecting. * It will wait for a maximum of 30 seconds for work to quiesce before disconnecting. * This is a blocking method that returns once disconnect completes. * @throws Exception - if a problem is encountered while disconnecting */ public void disconnect() throws Exception { this.client.disconnect(); } /** * Close the client Releases all resource associated with the client. * After the client has been closed it cannot be reused. For instance attempts to connect will fail. * @throws Exception - if the client is not disconnected */ public void close() throws Exception { this.client.close(); } /** * Publish a MQTT message to broker. * @param topic * @param message * @throws Exception */ public void publish(String topic, SepiaMqttMessage message) throws Exception { MqttMessage msg = new MqttMessage(); msg.setPayload(message.getPayload()); msg.setQos(message.getQos()); msg.setRetained(message.getRetained()); this.client.publish(topic, msg); } /** * Subscribe to a topic and register a message handler. * @param topic - any MQTT topic (can include wildcards) * @param callbackHandler - consumer to handle JSON response (keys: id, topic, payload) * @throws Exception */ public void subscribe(String topic, Consumer<JSONObject> callbackHandler) throws Exception { this.client.subscribe(topic, new IMqttMessageListener(){ @Override public void messageArrived(String top, MqttMessage message) throws Exception{ String msg = message.toString(); JSONObject payload; if (msg != null && msg.getClass().equals(String.class)){ try{ if (msg.startsWith("{")){ //parse to JSONObject payload = JSON.parseStringOrFail(msg); }else if (msg.startsWith("[")){ //parse to JSONArray JSONArray data = JSON.parseStringToArrayOrFail(msg); payload = JSON.make("data", data); }else{ payload = JSON.make("message", msg); } }catch(Exception e){ payload = JSON.make("message", msg); } }else{ //failed to convert MqttMessage payload = new JSONObject(); } JSONObject jo = JSON.make( "topic", top, "id", message.getId(), "payload", payload ); callbackHandler.accept(jo); } }); } /** * Unsubscribe from topic or fail * @param topic - topic to unsubscribe from * @throws MqttException */ public void unsubscribe(String topic) throws MqttException { this.client.unsubscribe(topic); } }
class InputError(Exception): def __init__(self, fname, fext): self.fname = fname self.fext = fext class FileLoader: def __init__(self, fname, fext): self.fname = fname self.fext = fext def load(self, fname): try: sys.stdout.write("Loading " + fname + " file...") except InputError as e: print("Input error for file " + e.fname) if e.fext != [""]: print(" Acceptable file types:") for ext in e.fext: print(" " + ext) except IOError: raise
public class UserAuthenticator { private Context context; private DBHelper dbHelper; public UserAuthenticator(Context context, DBHelper dbHelper) { this.context = context; this.dbHelper = dbHelper; } public void login(UserCredentials userCredentials, RequestListener<Object> listener) { int methodCode = 1; // Assuming method code for login is 1 String[] params = { /* any additional parameters required for login */ }; getDataFromSignet(methodCode, userCredentials, listener, params); } private void getDataFromSignet(int method, final UserCredentials creds, final RequestListener<Object> listener, String... params) { new SignetsRequestTask(context, creds, listener, dbHelper).execute(method, params); } } class SignetsRequestTask extends AsyncTask<Object, Void, Object> { private Context context; private UserCredentials userCredentials; private RequestListener<Object> listener; private DBHelper dbHelper; public SignetsRequestTask(Context context, UserCredentials userCredentials, RequestListener<Object> listener, DBHelper dbHelper) { this.context = context; this.userCredentials = userCredentials; this.listener = listener; this.dbHelper = dbHelper; } @Override protected Object doInBackground(Object... objects) { // Implement the login process using the provided method code and parameters // Example: Make a network request to authenticate the user return null; // Replace with the actual result of the login process } @Override protected void onPostExecute(Object result) { // Handle the result of the login process and notify the listener listener.onResponse(result); } }
module.exports = function (data) { this.keys = this.game.input.keyboard.addKeys({ left: Phaser.KeyCode.LEFT, right: Phaser.KeyCode.RIGHT, up: Phaser.KeyCode.UP, down: Phaser.KeyCode.DOWN, action: Phaser.KeyCode.SPACEBAR, wrath: Phaser.KeyCode.W }) this.game.renderer.renderSession.roundPixels = true this.game.physics.arcade.gravity.y = 1000 }
import PropTypes from 'prop-types'; import { mount } from 'enzyme'; import { shallow } from 'enzyme'; import { expect } from 'chai'; import { Select } from '../src'; describe('<Select/>', function () { const options = [ { value: 'one' }, { value: 'two' }, { value: 'three' }, ]; const component = mount(<Select placeholder='placeholder' isSearchable={true} options={options} />); it('has an outermost div with form-group class', function () { expect(component.find('div.form-group')).to.have.length(1); }); it('has a div with btn-group class', function () { expect(component.find('div.btn-group')).to.have.length(1); }); it('contains a button', function () { expect(component.find('button')).to.have.length(1); }); it('contains a span with bs-caret class', function () { expect(component.find('span.bs-caret')).to.have.length(1); }); it('contains a span with caret class', function () { expect(component.find('span.caret')).to.have.length(1); }); it('contains a div with dropdown-menu class', function () { expect(component.find('div.dropdown-menu')).to.have.length(1); }); it('contains a ul with dropdown-menu class', function () { expect(component.find('ul.dropdown-menu')).to.have.length(1); }); it('has a placeholder', function () { expect(component.find('span.filter-option').text()).to.contain('placeholder'); }); it('renders options', function () { expect(component.find('ul.dropdown-menu li')).to.have.length(3); }); it('contains isSearchable', function () { expect(component.find('div.bs-searchbox')).to.have.length(1); }); it('is searchable is false, searchbox is not rendered ', function () { const newComponent = mount(<Select placeholder='placeholder' isSearchable={false} />); expect(newComponent.find('div.bs-searchbox')).to.have.length(0); }); it('has no options', function () { const newComponent = mount(<Select placeholder='placeholder' isSearchable={false} />); expect(newComponent.find('ul-dropdown-menu li')).to.have.length(0); }); it('has a placeholder', function () { const newComponent = mount(<Select placeholder='placeholder' isSearchable={false} />); expect(newComponent.find('div.form-group').text()).to.contain('placeholder'); }); it('has a maxheight', function () { const options2 = [ { value: 'one' }, { value: 'two' }, { value: 'three' }, ]; const newComponent = mount(<Select placeholder='placeholder' maxHeight='50px' options={options2} isSearchable={false} />); expect(newComponent.find('ul').prop('style')).property('maxHeight', '50px'); }); it('renders a value', function () { const options3 = [ { value: 'one' }, { value: 'two' }, { value: 'three' }, ]; const newComponent = mount(<Select placeholder='placeholder' options={options3} value='one' isSearchable={false} />); expect(newComponent.find('li').at(0).hasClass('selected')).to.equal(true); }); it('is open', function () { const newComponent = mount(<Select placeholder='placeholder' isOpen={true} isSearchable={false} />); expect(newComponent.find('div.btn-group').hasClass('open')).to.equal(true); }); });
<reponame>developertown/soft_validate ActiveRecord::Schema.define(:version => 0) do create_table :dumb_users, :force => true do |t| t.column "email", :string t.column "first_name", :string t.column "last_name", :string end create_table :non_validated_users, :force => true do |t| t.column "email", :string end create_table :user_with_non_existent_validated_attrs, :force => true do |t| t.column "email", :string t.column "first_name", :string t.column "last_name", :string end end
export default { from({ age }) { const match = age.match(/^from(\d+)/); return match && +match[1]; }, to({ age }) { const match = age.match(/to(\d+)$/); return match && +match[1]; }, };
import React, { useState } from 'react'; const App = () => { // User input and response const [userInput, setUserInput] = useState(''); const [userResponse, setUserResponse] = useState(''); const handleUserInput = e => { setUserInput(e.target.value); }; const handleUserResponse = responseText => { setUserResponse(responseText); }; const sendMessage = () => { // Logic to respond to user input goes here setUserInput(''); setUserResponse(''); }; return ( <div> <h1>Chatbot</h1> <p>{userResponse}</p> <input type="text" placeholder="Enter your message" onChange={handleUserInput} /> <button onClick={sendMessage}>Send</button> </div> ); }; export default App;
#!/bin/bash components="1 2 3 4 6 7 8 14 15 19" #insert component number of ICA VOLdir="./HCP_results/fullbrainECM" mkdir ./HCP_results/FinalResults/meanEC_of_IC_weighted_Zwb ##Calculate weighted average ECz of every component for every subject ##use thresholded ICA spatial map as mask in fslmeants SUBs="" #insert subject IDs for sub in ${SUBs[@]} do image="${VOLdir}/ECM_Z/ECM_${sub}_Zwb.nii.gz" for c in ${components} do fslmeants -w -i ${image} -o ./HCP_results/FinalResults/meanEC_of_IC_weighted_Zwb/meanEC_of_IC${c}_${sub}_Z.txt -m ./HCP_results/FinalResults/groupICA20_100s_REST12/stats/thresh_zstat${c}.nii done done
#!/usr/bin/env bash ./bin/start-nginx -f ./bin/wait.sh
#include "../deps/imgui/imgui.h" #include <jc3/entities/character.h> #include <jc3/entities/vehicle.h> #include <json.hpp> #include <jc3/hashes/vehicles.h> #include <jc3/entities/pfx/land_steering.h> struct JCString { union _Bxty { char _Buf[16]; char *_Ptr; char _Alias[16]; } _Bx; unsigned __int64 _Mysize; unsigned __int64 _Myres; std::allocator<char> _Alval; const char* c_str() { if (_Myres >= 0x10) { return _Bx._Ptr; } return _Bx._Buf; } }; #include <json.hpp> nlohmann::json CarSettingsToJson(boost::shared_ptr<jc3::CVehicle> vehicle) { auto pfxVehicle = vehicle->PfxVehicle; assert(pfxVehicle->GetType() == jc3::PfxType::Car && "This vehicle is not a car"); auto pfxCar = static_cast<jc3::CPfxCar*>(pfxVehicle); namespace json = nlohmann; json::json settings_json; settings_json["topSpeed"] = *(float*)((char*)pfxVehicle + 0x3EC); settings_json["dragCoefficient"] = vehicle->dragCoefficient; settings_json["mass"] = vehicle->mass; settings_json["linearDamping"] = vehicle->linearDamping; settings_json["angularDamping"] = vehicle->angularDamping; settings_json["gravityFactor"] = vehicle->gravityFactor; auto engine = pfxCar->landVehicleEngine; auto engineTransmission = pfxCar->landVehicleTransmission; // TODO(alexander): Find the per vehicle thing for steering auto landSteering = util::hooking::func_call<jc3::SLandSteering*>(0x1434CD7E0, &pfxCar->landSteeringResourceCachePtr); std::vector<float> wheel_torque_ratio; for (int i = 0; i < engineTransmission->transmissionProperties.wheelsTorqueRatio.size; ++i) { wheel_torque_ratio.emplace_back(engineTransmission->transmissionProperties.wheelsTorqueRatio.Data[i]); } std::vector<json::json> wheels; std::vector<json::json> wheel_suspension; for (int i = 0; i < pfxCar->wheelInfo.size; ++i) { auto wheelInfo = &pfxCar->wheelInfo.Data[i]; json::json wheel_info = { { "spinVelocity", wheelInfo->spinVelocity }, { "spinAngle", wheelInfo->spinAngle }, { "sideForce", wheelInfo->sideForce }, { "forwardSlipVelocity", wheelInfo->forwardSlipVelocity }, { "sideSlipVelocity", wheelInfo->sideSlipVelocity }, { "torque", wheelInfo->torque }, { "angularVelocity", wheelInfo->angularVelocity }, { "invInertia", wheelInfo->invInertia }, { "slipAngleDeg", wheelInfo->slipAngleDeg }, { "slipRatioSAE", wheelInfo->slipRatioSAE }, { "camberAngleDeg", wheelInfo->camberAngleDeg }, { "lateralTireForceOffset", wheelInfo->lateralTireForceOffset }, { "longitudinalTireForceOffset", wheelInfo->longitudinalTireForceOffset }, { "tireDragForceOffset", wheelInfo->tireDragForceOffset }, { "wheelFrictionMultiplier", wheelInfo->wheelFrictionMultiplier }, { "wheelDragMultiplier", wheelInfo->wheelDragMultiplier }, { "burnoutFrictionMultiplier", wheelInfo->burnoutFrictionMultiplier }, { "groundFrictionTorque", wheelInfo->groundFrictionTorque }, { "unkown1", wheelInfo->unkown1 }, { "isConstrainedToGround", wheelInfo->isConstrainedToGround }, }; wheels.push_back(wheel_info); } for (int i = 0; i < pfxCar->wheelInfo.size; ++i) { auto &suspension = pfxCar->wheelSuspensionConstants[i]; auto properties = suspension.suspensionProperties; json::json meow_suspension = { { "suspensionForceMagnitudeAtRest_N", suspension.suspensionForceMagnitudeAtRest_N }, { "suspensionLengthAtRest_m", suspension.suspensionLengthAtRest_m }, { "properties",{ { "antirollbar_strength", properties->antirollbar_strength }, { "compression", properties->compression }, { "length", properties->length }, { "relaxation", properties->relaxation }, { "strength", properties->strength }, { "lateral_tire_force_offset", properties->lateral_tire_force_offset }, { "longitudinal_tire_force_offset", properties->longitudinal_tire_force_offset }, { "tire_drag_force_offset", properties->tire_drag_force_offset }, { "hardpoint_offset_along_spring", properties->hardpoint_offset_along_spring }, } }, }; wheel_suspension.push_back(meow_suspension); } std::vector<float> gear_ratios; std::vector<float> upshift_rpm; std::vector<float> downshift_rpm; for (auto &gear_ratio : pfxCar->transmissionResourceCachePtr.data->gear_ratios) { gear_ratios.push_back(gear_ratio); } for (auto &upshift_rpm_ : pfxCar->transmissionResourceCachePtr.data->upshift_rpm) { upshift_rpm.push_back(upshift_rpm_); } for (auto &downshift_rpm_ : pfxCar->transmissionResourceCachePtr.data->downshift_rpm) { downshift_rpm.push_back(downshift_rpm_); } settings_json["car"] = { { "top_speed_kph", pfxCar->topSpeedKph }, { "top_speed", *(float*)((char*)pfxVehicle + 0x3EC) }, { "drag_coefficient", vehicle->dragCoefficient }, { "mass", vehicle->mass }, { "linearDamping", vehicle->linearDamping }, { "angularDamping", vehicle->angularDamping }, { "gravityFactor", vehicle->gravityFactor }, { "land_global",{ { "linear_damping_x", pfxCar->customLandGloabl->linear_damping[0] }, { "linear_damping_y", pfxCar->customLandGloabl->linear_damping[1] }, { "linear_damping_z", pfxCar->customLandGloabl->linear_damping[2] }, { "gravity_multiplier_grounded", pfxCar->customLandGloabl->gravity_multiplier_grounded }, { "gravity_multiplier_in_air_up", pfxCar->customLandGloabl->gravity_multiplier_in_air_up }, { "gravity_multiplier_in_air_down", pfxCar->customLandGloabl->gravity_multiplier_in_air_down }, { "takeoff_pitch_damping", pfxCar->customLandGloabl->takeoff_pitch_damping }, { "front_wheels_damage",{ { "skew_health", pfxCar->customLandGloabl->front_wheels_damage.skew_health }, { "broken_wheel_friction_fraction", pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_friction_fraction }, { "broken_wheel_radius_fraction", pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_radius_fraction }, } }, { "rear_wheels_damage",{ { "skew_health", pfxCar->customLandGloabl->rear_wheels_damage.skew_health }, { "broken_wheel_friction_fraction", pfxCar->customLandGloabl->rear_wheels_damage.broken_wheel_friction_fraction }, { "broken_wheel_radius_fraction", pfxCar->customLandGloabl->rear_wheels_damage.broken_wheel_radius_fraction }, } }, { "drift",{ { "drift_entry_slip_angle", pfxCar->customLandGloabl->drift.drift_entry_slip_angle }, { "drift_exit_slip_angle", pfxCar->customLandGloabl->drift.drift_exit_slip_angle }, { "max_drift_angle_deg", pfxCar->customLandGloabl->drift.max_drift_angle_deg }, { "drift_limit_spread_angle_deg", pfxCar->customLandGloabl->drift.drift_limit_spread_angle_deg }, { "constant_drift_torque", pfxCar->customLandGloabl->drift.constant_drift_torque }, { "max_drift_torque", pfxCar->customLandGloabl->drift.max_drift_torque }, { "counter_steer_torque", pfxCar->customLandGloabl->drift.counter_steer_torque }, { "counter_steer_torque_handbrake", pfxCar->customLandGloabl->drift.counter_steer_torque_handbrake }, { "counter_steer_torque_brake", pfxCar->customLandGloabl->drift.counter_steer_torque_brake }, { "drift_yaw_vel_damp", pfxCar->customLandGloabl->drift.drift_yaw_vel_damp }, { "overdrift_yaw_vel_damp", pfxCar->customLandGloabl->drift.overdrift_yaw_vel_damp }, { "exit_drift_yaw_vel_damp", pfxCar->customLandGloabl->drift.exit_drift_yaw_vel_damp }, { "velocity_rotation_start_angle", pfxCar->customLandGloabl->drift.velocity_rotation_start_angle }, { "velocity_rotation_end_angle", pfxCar->customLandGloabl->drift.velocity_rotation_end_angle }, { "velocity_rotation_amount", pfxCar->customLandGloabl->drift.velocity_rotation_amount }, { "velocity_rotation_angle_exp", pfxCar->customLandGloabl->drift.velocity_rotation_angle_exp }, { "counter_steer_rot_factor", pfxCar->customLandGloabl->drift.counter_steer_rot_factor }, { "steering_sensitivity", pfxCar->customLandGloabl->drift.steering_sensitivity }, { "min_speed_to_drift_kmph", pfxCar->customLandGloabl->drift.min_speed_to_drift_kmph }, { "keep_velocity_strength", pfxCar->customLandGloabl->drift.keep_velocity_strength }, { "max_keep_velocity_acceleration_g", pfxCar->customLandGloabl->drift.max_keep_velocity_acceleration_g }, } }, { "arcade",{ { "heat_boost",{ { "torque_multiplier", pfxCar->customLandGloabl->arcade.heat_boost.torque_multiplier }, { "grip_multiplier", pfxCar->customLandGloabl->arcade.heat_boost.grip_multiplier }, { "push_force", pfxCar->customLandGloabl->arcade.heat_boost.push_force }, { "boost_blend_time", pfxCar->customLandGloabl->arcade.heat_boost.boost_blend_time }, { "extra_top_speed", pfxCar->customLandGloabl->arcade.heat_boost.extra_top_speed }, } }, { "nitro_boost",{ { "torque_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost.torque_multiplier }, { "grip_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost.grip_multiplier }, { "push_force", pfxCar->customLandGloabl->arcade.nitro_boost.push_force }, { "boost_blend_time", pfxCar->customLandGloabl->arcade.nitro_boost.boost_blend_time }, { "extra_top_speed", pfxCar->customLandGloabl->arcade.nitro_boost.extra_top_speed }, } }, { "nitro_boost_upgraded",{ { "torque_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.torque_multiplier }, { "grip_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.grip_multiplier }, { "push_force", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.push_force }, { "boost_blend_time", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.boost_blend_time }, { "extra_top_speed", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.extra_top_speed }, } }, { "turbo_jump",{ { "f_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump.f_multiplier }, { "r_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump.r_multiplier }, { "punch_delay_time", pfxCar->customLandGloabl->arcade.turbo_jump.punch_delay_time }, { "punch_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump.punch_speed_kph }, { "top_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump.top_speed_kph }, { "top_speed_jump_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump.top_speed_jump_multiplier }, } }, { "turbo_jump_upgraded",{ { "f_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.f_multiplier }, { "r_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.r_multiplier }, { "punch_delay_time", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.punch_delay_time }, { "punch_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.punch_speed_kph }, { "top_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.top_speed_kph }, { "top_speed_jump_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.top_speed_jump_multiplier }, } }, } }, } }, { "engine",{ { "isClutching", *(bool*)&engine->isClutching }, { "clutchDelay", engine->clutchDelay }, { "clutchingTime", engine->clutchingTime }, { "clutchAmount", engine->clutchAmount }, { "manualClutchEngageTimer", engine->manualClutchEngageTimer }, { "sourceClutchRpm", engine->sourceClutchRpm }, { "targetClutchRpm", engine->targetClutchRpm }, { "engineRevs", engine->engineRevs }, { "engineDamage", engine->engineDamage }, { "revLimiterMagnitudeRPM", engine->revLimiterMagnitudeRPM }, { "isRevLimiting", *(bool*)&engine->isRevLimiting }, { "fullLoadTorque", engine->fullLoadTorque }, { "lowestMaxTorque", engine->lowestMaxTorque }, { "engineMinNoise", engine->engineMinNoise }, { "engineDamageNoiseScale", engine->engineDamageNoiseScale }, { "engineMaxDamageTorqueFactor", engine->engineMaxDamageTorqueFactor }, { "minRPM", engine->minRPM }, { "optRPM", engine->optRPM }, { "maxTorque", engine->maxTorque }, { "torqueFactorAtMinRPM", engine->torqueFactorAtMinRPM }, { "torqueFactorAtMaxRPM", engine->torqueFactorAtMaxRPM }, { "resistanceFactorAtMinRPM", engine->resistanceFactorAtMinRPM }, { "resistanceFactorAtOptRPM", engine->resistanceFactorAtOptRPM }, { "resistanceFactorAtMaxRPM", engine->resistanceFactorAtMaxRPM }, { "clutchSlipRPM", engine->clutchSlipRPM }, { "maxRPM", engine->maxRPM }, { "overdriveMaxRPM", engine->overdriveMaxRPM }, { "isOverdriveActive", engine->isOverdriveActive }, } }, { "engine_transmission",{ { "gears", pfxCar->transmissionResourceCachePtr.data->gears }, { "gear_ratios", gear_ratios }, { "upshift_rpm", upshift_rpm }, { "downshift_rpm", downshift_rpm }, { "nitrous_gears", pfxCar->transmissionResourceCachePtr.data->nitrous_gears }, { "sequential", pfxCar->transmissionResourceCachePtr.data->sequential }, { "manual_clutch", pfxCar->transmissionResourceCachePtr.data->manual_clutch }, { "manual_clutch_blend_rpm", pfxCar->transmissionResourceCachePtr.data->manual_clutch_blend_rpm }, { "manual_clutch_blend_time", pfxCar->transmissionResourceCachePtr.data->manual_clutch_blend_time }, { "forward_ratio_percentage", pfxCar->transmissionResourceCachePtr.data->forward_ratio_percentage }, { "low_gear_forward_ratio_pct", pfxCar->transmissionResourceCachePtr.data->low_gear_forward_ratio_pct }, { "top_speed", pfxCar->transmissionResourceCachePtr.data->top_speed }, { "low_gears_final_drive", pfxCar->transmissionResourceCachePtr.data->low_gears_final_drive }, { "final_drive", pfxCar->transmissionResourceCachePtr.data->final_drive }, { "reverse_uses_forward_gears", pfxCar->transmissionResourceCachePtr.data->reverse_uses_forward_gears }, { "reverse_gear_ratio", pfxCar->transmissionResourceCachePtr.data->reverse_gear_ratio }, { "clutch_delay", pfxCar->transmissionResourceCachePtr.data->clutch_delay }, { "decay_time_to_cruise_rpm", pfxCar->transmissionResourceCachePtr.data->decay_time_to_cruise_rpm }, { "target_cruise_rpm", pfxCar->transmissionResourceCachePtr.data->target_cruise_rpm }, { "wheel_torque_ratio", wheel_torque_ratio } } }, { "suspension", wheel_suspension }, { "brakes",{ { "front",{ { "handbrake", pfxCar->brakesResourceCachePtr.data->front.handbrake }, { "max_brake_torque", pfxCar->brakesResourceCachePtr.data->front.max_brake_torque }, { "min_time_to_block", pfxCar->brakesResourceCachePtr.data->front.min_time_to_block }, } }, { "rear",{ { "handbrake", pfxCar->brakesResourceCachePtr.data->rear.handbrake }, { "max_brake_torque", pfxCar->brakesResourceCachePtr.data->rear.max_brake_torque }, { "min_time_to_block", pfxCar->brakesResourceCachePtr.data->rear.min_time_to_block }, } }, { "handbrake_friction_factor", pfxCar->brakesResourceCachePtr.data->handbrake_friction_factor }, } }, { "aerodynamics",{ { "air_density", pfxCar->landAerodynamicsResourceCachePtr.data->air_density }, { "frontal_area", pfxCar->landAerodynamicsResourceCachePtr.data->frontal_area }, { "drag_coefficient", pfxCar->landAerodynamicsResourceCachePtr.data->drag_coefficient }, { "top_speed_drag_coefficient", pfxCar->landAerodynamicsResourceCachePtr.data->top_speed_drag_coefficient }, { "lift_coefficient", pfxCar->landAerodynamicsResourceCachePtr.data->lift_coefficient }, } }, { "wheels", wheels } }; if (landSteering) { settings_json["car"]["steering"] = { { "dead_zone", landSteering->dead_zone }, { "saturation_zone", landSteering->saturation_zone }, { "t_to_full_steer_s", landSteering->t_to_full_steer_s }, { "max_speed_t_to_full_steer_s", landSteering->max_speed_t_to_full_steer_s }, { "min_speed_kmph", landSteering->min_speed_kmph }, { "max_speed_kmph", landSteering->max_speed_kmph }, { "steer_angle_min_speed_deg", landSteering->steer_angle_min_speed_deg }, { "steer_angle_max_speed_deg", landSteering->steer_angle_max_speed_deg }, { "steer_curve_falloff", landSteering->steer_curve_falloff }, { "countersteer_speed_factor", landSteering->countersteer_speed_factor }, { "steer_in_speed_factor", landSteering->steer_in_speed_factor }, { "steer_input_power_pc", landSteering->steer_input_power_pc }, { "steer_input_power_durango", landSteering->steer_input_power_durango }, { "steer_input_power_orbis", landSteering->steer_input_power_orbis }, { "wheel_drift_aligning_strength", landSteering->wheel_drift_aligning_strength }, }; } return settings_json; } void CarSettingsFromJson(boost::shared_ptr<jc3::CVehicle> vehicle, nlohmann::json settings_json) { auto pfxVehicle = vehicle->PfxVehicle; assert(pfxVehicle->GetType() == jc3::PfxType::Car && "This vehicle is not a car"); auto pfxCar = static_cast<jc3::CPfxCar*>(pfxVehicle); *(float*)((char*)pfxVehicle + 0x3EC) = settings_json.value("topSpeed", *(float*)((char*)pfxVehicle + 0x3EC)); vehicle->dragCoefficient = settings_json.value("dragCoefficient", vehicle->dragCoefficient); vehicle->mass = settings_json.value("mass", vehicle->mass); vehicle->linearDamping = settings_json.value("linearDamping", vehicle->linearDamping); vehicle->angularDamping = settings_json.value("angularDamping", vehicle->angularDamping); vehicle->gravityFactor = settings_json.value("gravityFactor", vehicle->gravityFactor); //if (settings_json.find("gravityModifiers") != settings_json.end()) { // pfxCar->someGravityModifiers->gravityMultiplierGrounded = settings_json["gravityModifiers"].value("gravityGrounded", pfxCar->someGravityModifiers->gravityMultiplierGrounded); //} if (settings_json.find("car") == settings_json.end()) { return; } auto car_json = settings_json["car"]; pfxCar->topSpeedKph = settings_json["car"]["top_speed_kph"]; *(float*)((char*)pfxVehicle + 0x3EC) = settings_json["car"]["top_speed"]; vehicle->dragCoefficient = car_json["drag_coefficient"]; vehicle->mass = car_json["mass"]; vehicle->linearDamping = car_json["linearDamping"]; vehicle->angularDamping = car_json["angularDamping"]; vehicle->gravityFactor = car_json["gravityFactor"]; if(car_json.find("land_global") != car_json.end()) { auto global_json = car_json["land_global"]; pfxCar->customLandGloabl->linear_damping[0] = global_json.value("linear_damping_x", pfxCar->customLandGloabl->linear_damping[0]); pfxCar->customLandGloabl->linear_damping[1] = global_json.value("linear_damping_y", pfxCar->customLandGloabl->linear_damping[1]); pfxCar->customLandGloabl->linear_damping[2] = global_json.value("linear_damping_z", pfxCar->customLandGloabl->linear_damping[2]); pfxCar->customLandGloabl->linear_damping[2] = global_json.value("linear_damping_z", pfxCar->customLandGloabl->linear_damping[2]); pfxCar->customLandGloabl->gravity_multiplier_grounded = global_json.value("gravity_multiplier_grounded", pfxCar->customLandGloabl->gravity_multiplier_grounded); pfxCar->customLandGloabl->gravity_multiplier_in_air_up = global_json.value("gravity_multiplier_in_air_up", pfxCar->customLandGloabl->gravity_multiplier_in_air_up); pfxCar->customLandGloabl->gravity_multiplier_in_air_down = global_json.value("gravity_multiplier_in_air_down", pfxCar->customLandGloabl->gravity_multiplier_in_air_down); pfxCar->customLandGloabl->takeoff_pitch_damping = global_json.value("takeoff_pitch_damping", pfxCar->customLandGloabl->takeoff_pitch_damping); if (global_json.find("front_wheels_damage") != global_json.end()) { pfxCar->customLandGloabl->front_wheels_damage.skew_health = global_json["front_wheels_damage"].value("skew_health", pfxCar->customLandGloabl->front_wheels_damage.skew_health); pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_friction_fraction = global_json["front_wheels_damage"].value("broken_wheel_friction_fraction", pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_friction_fraction); pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_radius_fraction = global_json["front_wheels_damage"].value("broken_wheel_radius_fraction", pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_radius_fraction); } if (global_json.find("rear_wheels_damage") != global_json.end()) { pfxCar->customLandGloabl->rear_wheels_damage.skew_health = global_json["rear_wheels_damage"].value("skew_health", pfxCar->customLandGloabl->front_wheels_damage.skew_health); pfxCar->customLandGloabl->rear_wheels_damage.broken_wheel_friction_fraction = global_json["rear_wheels_damage"].value("broken_wheel_friction_fraction", pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_friction_fraction); pfxCar->customLandGloabl->rear_wheels_damage.broken_wheel_radius_fraction = global_json["rear_wheels_damage"].value("broken_wheel_radius_fraction", pfxCar->customLandGloabl->front_wheels_damage.broken_wheel_radius_fraction); } if (global_json.find("drift") != global_json.end()) { auto drift_json = global_json["drift"]; pfxCar->customLandGloabl->drift.drift_entry_slip_angle = drift_json.value("drift_entry_slip_angle", pfxCar->customLandGloabl->drift.drift_entry_slip_angle); pfxCar->customLandGloabl->drift.drift_exit_slip_angle = drift_json.value("drift_exit_slip_angle", pfxCar->customLandGloabl->drift.drift_exit_slip_angle); pfxCar->customLandGloabl->drift.max_drift_angle_deg = drift_json.value("max_drift_angle_deg", pfxCar->customLandGloabl->drift.max_drift_angle_deg); pfxCar->customLandGloabl->drift.drift_limit_spread_angle_deg = drift_json.value("drift_limit_spread_angle_deg", pfxCar->customLandGloabl->drift.drift_limit_spread_angle_deg); pfxCar->customLandGloabl->drift.constant_drift_torque = drift_json.value("constant_drift_torque", pfxCar->customLandGloabl->drift.constant_drift_torque); pfxCar->customLandGloabl->drift.max_drift_torque = drift_json.value("max_drift_torque", pfxCar->customLandGloabl->drift.max_drift_torque); pfxCar->customLandGloabl->drift.counter_steer_torque = drift_json.value("drift_entry_slip_angle", pfxCar->customLandGloabl->drift.counter_steer_torque); pfxCar->customLandGloabl->drift.counter_steer_torque_handbrake = drift_json.value("counter_steer_torque_handbrake", pfxCar->customLandGloabl->drift.counter_steer_torque_handbrake); pfxCar->customLandGloabl->drift.counter_steer_torque_brake = drift_json.value("counter_steer_torque_brake", pfxCar->customLandGloabl->drift.counter_steer_torque_brake); pfxCar->customLandGloabl->drift.drift_yaw_vel_damp = drift_json.value("drift_yaw_vel_damp", pfxCar->customLandGloabl->drift.drift_yaw_vel_damp); pfxCar->customLandGloabl->drift.overdrift_yaw_vel_damp = drift_json.value("overdrift_yaw_vel_damp", pfxCar->customLandGloabl->drift.overdrift_yaw_vel_damp); pfxCar->customLandGloabl->drift.exit_drift_yaw_vel_damp = drift_json.value("exit_drift_yaw_vel_damp", pfxCar->customLandGloabl->drift.exit_drift_yaw_vel_damp); pfxCar->customLandGloabl->drift.velocity_rotation_start_angle = drift_json.value("velocity_rotation_start_angle", pfxCar->customLandGloabl->drift.velocity_rotation_start_angle); pfxCar->customLandGloabl->drift.velocity_rotation_end_angle = drift_json.value("velocity_rotation_end_angle", pfxCar->customLandGloabl->drift.velocity_rotation_end_angle); pfxCar->customLandGloabl->drift.velocity_rotation_amount = drift_json.value("velocity_rotation_amount", pfxCar->customLandGloabl->drift.velocity_rotation_amount); pfxCar->customLandGloabl->drift.velocity_rotation_angle_exp = drift_json.value("velocity_rotation_angle_exp", pfxCar->customLandGloabl->drift.velocity_rotation_angle_exp); pfxCar->customLandGloabl->drift.counter_steer_rot_factor = drift_json.value("counter_steer_rot_factor", pfxCar->customLandGloabl->drift.counter_steer_rot_factor); pfxCar->customLandGloabl->drift.steering_sensitivity = drift_json.value("steering_sensitivity", pfxCar->customLandGloabl->drift.steering_sensitivity); pfxCar->customLandGloabl->drift.min_speed_to_drift_kmph = drift_json.value("min_speed_to_drift_kmph", pfxCar->customLandGloabl->drift.min_speed_to_drift_kmph); pfxCar->customLandGloabl->drift.keep_velocity_strength = drift_json.value("keep_velocity_strength", pfxCar->customLandGloabl->drift.keep_velocity_strength); pfxCar->customLandGloabl->drift.max_keep_velocity_acceleration_g = drift_json.value("max_keep_velocity_acceleration_g", pfxCar->customLandGloabl->drift.max_keep_velocity_acceleration_g); } if (global_json.find("arcade") != global_json.end()) { auto arcade_json = global_json["arcade"]; if (arcade_json.find("heat_boost") != arcade_json.end()) { pfxCar->customLandGloabl->arcade.heat_boost.torque_multiplier = arcade_json["heat_boost"].value("torque_multiplier", pfxCar->customLandGloabl->arcade.heat_boost.torque_multiplier); pfxCar->customLandGloabl->arcade.heat_boost.grip_multiplier = arcade_json["heat_boost"].value("grip_multiplier", pfxCar->customLandGloabl->arcade.heat_boost.grip_multiplier); pfxCar->customLandGloabl->arcade.heat_boost.push_force = arcade_json["heat_boost"].value("push_force", pfxCar->customLandGloabl->arcade.heat_boost.push_force); pfxCar->customLandGloabl->arcade.heat_boost.boost_blend_time = arcade_json["heat_boost"].value("boost_blend_time", pfxCar->customLandGloabl->arcade.heat_boost.boost_blend_time); pfxCar->customLandGloabl->arcade.heat_boost.extra_top_speed = arcade_json["heat_boost"].value("extra_top_speed", pfxCar->customLandGloabl->arcade.heat_boost.extra_top_speed); } if (arcade_json.find("nitro_boost") != arcade_json.end()) { pfxCar->customLandGloabl->arcade.nitro_boost.torque_multiplier = arcade_json["nitro_boost"].value("torque_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost.torque_multiplier); pfxCar->customLandGloabl->arcade.nitro_boost.grip_multiplier = arcade_json["nitro_boost"].value("grip_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost.grip_multiplier); pfxCar->customLandGloabl->arcade.nitro_boost.push_force = arcade_json["nitro_boost"].value("push_force", pfxCar->customLandGloabl->arcade.nitro_boost.push_force); pfxCar->customLandGloabl->arcade.nitro_boost.boost_blend_time = arcade_json["nitro_boost"].value("boost_blend_time", pfxCar->customLandGloabl->arcade.nitro_boost.boost_blend_time); pfxCar->customLandGloabl->arcade.nitro_boost.extra_top_speed = arcade_json["nitro_boost"].value("extra_top_speed", pfxCar->customLandGloabl->arcade.nitro_boost.extra_top_speed); } if (arcade_json.find("nitro_boost_upgraded") != arcade_json.end()) { pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.torque_multiplier = arcade_json["nitro_boost_upgraded"].value("torque_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.torque_multiplier); pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.grip_multiplier = arcade_json["nitro_boost_upgraded"].value("grip_multiplier", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.grip_multiplier); pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.push_force = arcade_json["nitro_boost_upgraded"].value("push_force", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.push_force); pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.boost_blend_time = arcade_json["nitro_boost_upgraded"].value("boost_blend_time", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.boost_blend_time); pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.extra_top_speed = arcade_json["nitro_boost_upgraded"].value("extra_top_speed", pfxCar->customLandGloabl->arcade.nitro_boost_upgraded.extra_top_speed); } if (arcade_json.find("turbo_jump") != arcade_json.end()) { pfxCar->customLandGloabl->arcade.turbo_jump.f_multiplier = arcade_json["turbo_jump"].value("f_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump.f_multiplier); pfxCar->customLandGloabl->arcade.turbo_jump.r_multiplier = arcade_json["turbo_jump"].value("r_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump.r_multiplier); pfxCar->customLandGloabl->arcade.turbo_jump.punch_delay_time = arcade_json["turbo_jump"].value("punch_delay_time", pfxCar->customLandGloabl->arcade.turbo_jump.punch_delay_time); pfxCar->customLandGloabl->arcade.turbo_jump.punch_speed_kph = arcade_json["turbo_jump"].value("punch_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump.punch_speed_kph); pfxCar->customLandGloabl->arcade.turbo_jump.top_speed_kph = arcade_json["turbo_jump"].value("top_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump.top_speed_kph); pfxCar->customLandGloabl->arcade.turbo_jump.top_speed_jump_multiplier = arcade_json["turbo_jump"].value("f_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump.top_speed_jump_multiplier); } if (arcade_json.find("turbo_jump_upgraded") != arcade_json.end()) { pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.f_multiplier = arcade_json["turbo_jump_upgraded"].value("f_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.f_multiplier); pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.r_multiplier = arcade_json["turbo_jump_upgraded"].value("r_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.r_multiplier); pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.punch_delay_time = arcade_json["turbo_jump_upgraded"].value("punch_delay_time", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.punch_delay_time); pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.punch_speed_kph = arcade_json["turbo_jump_upgraded"].value("punch_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.punch_speed_kph); pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.top_speed_kph = arcade_json["turbo_jump_upgraded"].value("top_speed_kph", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.top_speed_kph); pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.top_speed_jump_multiplier = arcade_json["turbo_jump"].value("f_multiplier", pfxCar->customLandGloabl->arcade.turbo_jump_upgraded.top_speed_jump_multiplier); } } } auto engine = pfxCar->landVehicleEngine; if (settings_json["car"].find("engine") != settings_json.end()) { auto & engine_json = settings_json["car"]["engine"]; engine->isClutching = engine_json.value("isClutching", *(bool*)&engine->isClutching); engine->clutchDelay = engine_json.value("clutchDelay", engine->clutchDelay); engine->clutchingTime = engine_json.value( "clutchingTime", engine->clutchingTime ); engine->clutchAmount = engine_json.value( "clutchAmount", engine->clutchAmount ); engine->manualClutchEngageTimer = engine_json.value( "manualClutchEngageTimer", engine->manualClutchEngageTimer ); engine->sourceClutchRpm = engine_json.value( "sourceClutchRpm", engine->sourceClutchRpm ); engine->targetClutchRpm = engine_json.value( "targetClutchRpm", engine->targetClutchRpm ); engine->engineRevs = engine_json.value( "engineRevs", engine->engineRevs ); engine->engineDamage = engine_json.value( "engineDamage", engine->engineDamage ); engine->revLimiterMagnitudeRPM = engine_json.value( "revLimiterMagnitudeRPM", engine->revLimiterMagnitudeRPM ); engine->isRevLimiting = engine_json.value( "isRevLimiting", *(bool*)&engine->isRevLimiting ); engine->fullLoadTorque = engine_json.value( "fullLoadTorque", engine->fullLoadTorque ); engine->lowestMaxTorque = engine_json.value( "lowestMaxTorque", engine->lowestMaxTorque ); engine->engineMinNoise = engine_json.value( "engineMinNoise", engine->engineMinNoise ); engine->engineDamageNoiseScale = engine_json.value( "engineDamageNoiseScale", engine->engineDamageNoiseScale ); engine->engineMaxDamageTorqueFactor = engine_json.value( "engineMaxDamageTorqueFactor", engine->engineMaxDamageTorqueFactor ); engine->minRPM = engine_json.value( "minRPM", engine->minRPM); engine->optRPM = engine_json.value( "optRPM", engine->optRPM); engine->maxTorque = engine_json.value( "maxTorque", engine->maxTorque); engine->torqueFactorAtMinRPM = engine_json.value( "torqueFactorAtMinRPM", engine->torqueFactorAtMinRPM); engine->torqueFactorAtMaxRPM = engine_json.value( "torqueFactorAtMaxRPM", engine->torqueFactorAtMaxRPM); engine->resistanceFactorAtMinRPM = engine_json.value( "resistanceFactorAtMinRPM", engine->resistanceFactorAtMinRPM); engine->resistanceFactorAtOptRPM = engine_json.value( "resistanceFactorAtOptRPM", engine->resistanceFactorAtOptRPM); engine->resistanceFactorAtMaxRPM = engine_json.value( "resistanceFactorAtMaxRPM", engine->resistanceFactorAtMaxRPM); engine->clutchSlipRPM = engine_json.value( "clutchSlipRPM", engine->clutchSlipRPM); engine->maxRPM = engine_json.value( "maxRPM", engine->maxRPM); engine->overdriveMaxRPM = engine_json.value( "overdriveMaxRPM", engine->overdriveMaxRPM); engine->isOverdriveActive = engine_json.value("isOverdriveActive", engine->isOverdriveActive); } std::vector<jc3::WheelInfo> wheel_infos; for (auto & meow : settings_json["car"]["wheels"]) { jc3::WheelInfo wheel_info; wheel_info.spinVelocity = meow["spinVelocity"]; wheel_info.spinAngle = meow["spinAngle"]; wheel_info.sideForce = meow["sideForce"]; wheel_info.forwardSlipVelocity = meow["forwardSlipVelocity"]; wheel_info.sideSlipVelocity = meow["sideSlipVelocity"]; wheel_info.torque = meow["torque"]; wheel_info.angularVelocity = meow["angularVelocity"]; wheel_info.invInertia = meow["invInertia"]; wheel_info.slipAngleDeg = meow["slipAngleDeg"]; wheel_info.slipRatioSAE = meow["slipRatioSAE"]; wheel_info.camberAngleDeg = meow["camberAngleDeg"]; wheel_info.lateralTireForceOffset = meow["lateralTireForceOffset"]; wheel_info.longitudinalTireForceOffset = meow["longitudinalTireForceOffset"]; wheel_info.tireDragForceOffset = meow["tireDragForceOffset"]; wheel_info.wheelFrictionMultiplier = meow["wheelFrictionMultiplier"]; wheel_info.wheelDragMultiplier = meow["wheelDragMultiplier"]; wheel_info.burnoutFrictionMultiplier = meow["burnoutFrictionMultiplier"]; wheel_info.groundFrictionTorque = meow["groundFrictionTorque"]; wheel_info.unkown1 = meow["unkown1"]; wheel_info.isConstrainedToGround = meow["isConstrainedToGround"]; wheel_infos.emplace_back(wheel_info); } try { for (int i = 0; i < pfxCar->wheelInfo.size; ++i) { pfxCar->wheelInfo.Data[i] = wheel_infos.at(i); } } catch (...) {} std::vector<jc3::SWheelSuspensionConstant> wheel_suspension; if (car_json.find("suspension") != car_json.end()) { int i = 0; for (auto & meow : car_json["suspension"]) { auto &suspension = pfxCar->wheelSuspensionConstants[i]; auto properties = suspension.suspensionProperties; suspension.suspensionForceMagnitudeAtRest_N = meow.value("suspensionForceMagnitudeAtRest_N", suspension.suspensionForceMagnitudeAtRest_N); suspension.suspensionLengthAtRest_m = meow.value("suspensionLengthAtRest_m", suspension.suspensionLengthAtRest_m); properties->antirollbar_strength = meow.value("antirollbar_strength", properties->antirollbar_strength); properties->compression = meow.value("compression", properties->compression); properties->length = meow.value("length", properties->length); properties->relaxation = meow.value("relaxation", properties->relaxation); properties->strength = meow.value("strength", properties->strength); properties->lateral_tire_force_offset = meow.value("lateral_tire_force_offset", properties->lateral_tire_force_offset); properties->longitudinal_tire_force_offset = meow.value("longitudinal_tire_force_offset", properties->longitudinal_tire_force_offset); properties->tire_drag_force_offset = meow.value("tire_drag_force_offset", properties->tire_drag_force_offset); properties->hardpoint_offset_along_spring = meow.value("hardpoint_offset_along_spring", properties->hardpoint_offset_along_spring); ++i; // Let's hope we don't get fucked here... } } } void DoCarHandlingUI(boost::shared_ptr<jc3::CVehicle> real_vehicle, jc3::CPfxVehicle *pfxVehicle) { auto pfxCar = static_cast<jc3::CPfxCar*>(pfxVehicle); using json = nlohmann::json; static json vehicle_hashes = json::parse(jc3::vehicle_hashes); assert(vehicle_hashes.is_array() && "Vehicle hashes is not an array"); // Car stuff auto hash = real_vehicle->GetNameHash(); for (auto &vehicle : vehicle_hashes) { if (vehicle["hash"].is_number() && static_cast<uint32_t>(vehicle["hash"]) == hash) { std::string t = vehicle["model_name"]; ImGui::BulletText("Model Name: %s", t.c_str()); } } ImGui::BulletText("Engine Torque %f", pfxCar->engineTorque); ImGui::BulletText("Engine RPM %f", pfxCar->engineRPM); ImGui::BulletText("Top Speed %f", pfxCar->topSpeedKph); ImGui::Separator(); ImGui::DragFloat("Top Speed", (float*)((char*)pfxVehicle + 0x3EC)); ImGui::DragFloat("Drag Coefficient", &real_vehicle->dragCoefficient); ImGui::DragFloat("Mass", &real_vehicle->mass); ImGui::DragFloat("Linear Damping", &real_vehicle->linearDamping); ImGui::DragFloat("Angular Damping", &real_vehicle->angularDamping); ImGui::SliderFloat("Gravity Factor", &real_vehicle->gravityFactor, -128, 128); ImGui::Separator(); if (ImGui::CollapsingHeader("Gravity Modifiers")) { ImGui::TreePush("Gravity Modifiers"); //ImGui::DragFloat("Gravity Grounded", &pfxCar->someGravityModifiers->gravityMultiplierGrounded); ImGui::TreePop(); } if (ImGui::CollapsingHeader("Engine")) { ImGui::TreePush("Engine"); auto engine = pfxCar->landEngineResourceCachePtr.data; ImGui::DragFloat("resistance_at_min_rpm", &engine->resistance_at_min_rpm); ImGui::DragFloat("resistance_at_max_rpm", &engine->resistance_at_max_rpm); ImGui::DragFloat("resistance_at_optimal_rpm", &engine->resistance_at_optimal_rpm); ImGui::DragFloat("rev_limiter_rpm_drop", &engine->rev_limiter_rpm_drop); ImGui::DragFloat("max_rpm", &engine->max_rpm); ImGui::DragFloat("min_rpm", &engine->min_rpm); ImGui::DragFloat("optimal_rpm", &engine->optimal_rpm); ImGui::DragFloat("torque_factor_at_max_rpm", &engine->torque_factor_at_max_rpm); ImGui::DragFloat("torque_factor_at_min_rpm", &engine->torque_factor_at_min_rpm); ImGui::DragFloat("torque_factor_at_optimal_rpm", &engine->torque_factor_at_optimal_rpm); ImGui::DragFloat("clutch_slip_rpm", &engine->clutch_slip_rpm); ImGui::DragFloat("engine_min_noise", &engine->engine_min_noise); ImGui::DragFloat("engine_damage_noise_scale", &engine->engine_damage_noise_scale); ImGui::DragFloat("engine_max_damage_torque", &engine->engine_max_damage_torque); ImGui::TreePop(); pfxCar->ApplyLandEngine(*pfxCar->landEngineResourceCachePtr.data); } if (ImGui::CollapsingHeader("Engine Transmission")) { ImGui::TreePush("Engine Transmission"); ImGui::DragInt("gears", &pfxCar->transmissionResourceCachePtr.data->gears); ImGui::DragInt("nitrous_gears", &pfxCar->transmissionResourceCachePtr.data->nitrous_gears); ImGui::DragInt("sequential", &pfxCar->transmissionResourceCachePtr.data->sequential); ImGui::DragInt("manual_clutch", &pfxCar->transmissionResourceCachePtr.data->manual_clutch); ImGui::DragFloat("manual_clutch_blend_rpm", &pfxCar->transmissionResourceCachePtr.data->manual_clutch_blend_rpm); ImGui::DragFloat("manual_clutch_blend_time", &pfxCar->transmissionResourceCachePtr.data->manual_clutch_blend_time); ImGui::DragFloat("forward_ratio_percentage", &pfxCar->transmissionResourceCachePtr.data->forward_ratio_percentage); ImGui::DragFloat("low_gear_forward_ratio_pct", &pfxCar->transmissionResourceCachePtr.data->low_gear_forward_ratio_pct); ImGui::DragFloat("top_speed", &pfxCar->transmissionResourceCachePtr.data->top_speed); ImGui::DragFloat("low_gears_final_drive", &pfxCar->transmissionResourceCachePtr.data->low_gears_final_drive); ImGui::DragFloat("final_drive", &pfxCar->transmissionResourceCachePtr.data->final_drive); ImGui::DragFloat("reverse_gear_ratio", &pfxCar->transmissionResourceCachePtr.data->reverse_gear_ratio); ImGui::DragFloat("clutch_delay", &pfxCar->transmissionResourceCachePtr.data->clutch_delay); ImGui::DragFloat("decay_time_to_cruise_rpm", &pfxCar->transmissionResourceCachePtr.data->decay_time_to_cruise_rpm); ImGui::DragFloat("target_cruise_rpm", &pfxCar->transmissionResourceCachePtr.data->target_cruise_rpm); for (int i = 0; i < pfxCar->transmissionResourceCachePtr.data->gears; ++i) { char wheel_text[100]; sprintf(wheel_text, "Wheel Torque Ratio %d", i); ImGui::TreePush(wheel_text); ImGui::SliderFloat(wheel_text, &pfxCar->transmissionResourceCachePtr.data->gear_ratios[i], 0, 128); ImGui::TreePop(); } ImGui::TreePop(); pfxCar->ApplyTransmission(*pfxCar->transmissionResourceCachePtr.data); } if (ImGui::CollapsingHeader("Suspension")) { ImGui::TreePush("Suspension"); for (int i = 0; i < pfxCar->wheelInfo.size; ++i) { auto & suspension = pfxCar->wheelSuspensionConstants[i]; char wheel_text[100]; sprintf(wheel_text, "Suspension Wheel %d", i); if (ImGui::CollapsingHeader(wheel_text)) { ImGui::TreePush(wheel_text); ImGui::DragFloat("Suspension Force Mag At Rest", &suspension.suspensionForceMagnitudeAtRest_N); ImGui::DragFloat("Suspension Length At Rest", &suspension.suspensionLengthAtRest_m); if (ImGui::CollapsingHeader("Properties")) { auto properties = suspension.suspensionProperties; ImGui::DragFloat("Antirollbar Strength", &properties->antirollbar_strength); ImGui::DragFloat("Compression", &properties->compression); ImGui::DragFloat("Length", &properties->length); ImGui::DragFloat("Relaxation", &properties->relaxation); ImGui::DragFloat("Strength", &properties->strength); ImGui::DragFloat("Lateral Tire Force Offset", &properties->lateral_tire_force_offset); ImGui::DragFloat("Longtid Tire Force Offset", &properties->longitudinal_tire_force_offset); ImGui::DragFloat("Tire drag Force offset", &properties->tire_drag_force_offset); ImGui::DragFloat("Hardpoint offset along spring", &properties->hardpoint_offset_along_spring); } ImGui::TreePop(); } } ImGui::TreePop(); } if (ImGui::CollapsingHeader("Brakes")) { ImGui::TreePush("Brakes Front"); ImGui::Text("Front"); ImGui::Checkbox("Handbrake", (bool*)&pfxCar->brakesResourceCachePtr.data->front.handbrake); ImGui::DragFloat("Max Brake Torque", &pfxCar->brakesResourceCachePtr.data->front.max_brake_torque); ImGui::DragFloat("Time To Block", &pfxCar->brakesResourceCachePtr.data->front.min_time_to_block); ImGui::TreePop(); ImGui::Separator(); ImGui::Text("Rear"); ImGui::TreePush("Brakes Rear"); ImGui::Checkbox("Handbrake", (bool*)&pfxCar->brakesResourceCachePtr.data->rear.handbrake); ImGui::DragFloat("Max Brake Torque", &pfxCar->brakesResourceCachePtr.data->rear.max_brake_torque); ImGui::DragFloat("Time To Block", &pfxCar->brakesResourceCachePtr.data->rear.min_time_to_block); ImGui::TreePop(); pfxCar->ApplyBrakes(*pfxCar->brakesResourceCachePtr.data); } if (ImGui::CollapsingHeader("Aerodynamics")) { ImGui::TreePush("Aerodynamic"); ImGui::DragFloat("Air Density", &pfxCar->landAerodynamicsResourceCachePtr.data->air_density); ImGui::DragFloat("Frontal Area", &pfxCar->landAerodynamicsResourceCachePtr.data->frontal_area); ImGui::DragFloat("Drag Coefficient", &pfxCar->landAerodynamicsResourceCachePtr.data->drag_coefficient); ImGui::DragFloat("Top Speed Drag Coefficient", &pfxCar->landAerodynamicsResourceCachePtr.data->top_speed_drag_coefficient); ImGui::DragFloat("Lift Coefficient", &pfxCar->landAerodynamicsResourceCachePtr.data->lift_coefficient); ImGui::TreePop(); pfxCar->ApplyLandAerodynamics(*pfxCar->landAerodynamicsResourceCachePtr.data); } if (ImGui::CollapsingHeader("Wheels")) { auto DrawWheelInfo = [](const char *id, jc3::WheelInfo * wheelInfo) { ImGui::DragFloat("Spin Velocity", &wheelInfo->spinVelocity); ImGui::DragFloat("Spin Angle", &wheelInfo->spinAngle); ImGui::DragFloat("Side Force", &wheelInfo->sideForce); ImGui::DragFloat("Forward Slip Velocity", &wheelInfo->forwardSlipVelocity); ImGui::DragFloat("Side Slip Velocity", &wheelInfo->sideSlipVelocity); ImGui::DragFloat("Torque", &wheelInfo->torque); ImGui::DragFloat("Angular Velocity", &wheelInfo->angularVelocity); ImGui::DragFloat("Inv Inertia", &wheelInfo->invInertia); ImGui::DragFloat("Slip Angle Deg", &wheelInfo->slipAngleDeg); ImGui::DragFloat("Slip Ratio AE", &wheelInfo->slipRatioSAE); ImGui::DragFloat("Camber Angle Deg", &wheelInfo->camberAngleDeg); ImGui::DragFloat("Lateral Tire Force Offset", &wheelInfo->lateralTireForceOffset); ImGui::DragFloat("Longitudinal Tire Force Offset", &wheelInfo->longitudinalTireForceOffset); ImGui::DragFloat("Tire Drag Force Offset", &wheelInfo->tireDragForceOffset); ImGui::DragFloat("Friction Multiplier", &wheelInfo->wheelFrictionMultiplier); ImGui::DragFloat("Drag Multiplier", &wheelInfo->wheelDragMultiplier); ImGui::DragFloat("Burnout Friction Multiplier", &wheelInfo->burnoutFrictionMultiplier); ImGui::DragFloat("Ground Friction Torque", &wheelInfo->groundFrictionTorque); ImGui::DragFloat("Depth of Contact Point Underwater", &wheelInfo->unkown1); ImGui::Checkbox("Constrained to Ground", (bool*)&wheelInfo->isConstrainedToGround); }; for (int i = 0; i < pfxCar->wheelInfo.size; ++i) { char wheel_text[100]; sprintf(wheel_text, "Wheel %d", i); ImGui::TreePush(wheel_text); if (ImGui::CollapsingHeader(wheel_text)) { DrawWheelInfo(wheel_text, &pfxCar->wheelInfo.Data[i]); } ImGui::TreePop(); } } auto landSteering = util::hooking::func_call<jc3::SLandSteering*>(0x1434CD7E0, &pfxCar->landSteeringResourceCachePtr); if (landSteering) { if (ImGui::CollapsingHeader("Steering")) { ImGui::TreePush("Steering"); ImGui::DragFloat("dead_zone", &landSteering->dead_zone); ImGui::DragFloat("saturation_zone", &landSteering->saturation_zone); ImGui::DragFloat("t_to_full_steer_s", &landSteering->t_to_full_steer_s); ImGui::DragFloat("max_speed_t_to_full_steer_s", &landSteering->max_speed_t_to_full_steer_s); ImGui::DragFloat("min_speed_kmph", &landSteering->min_speed_kmph); ImGui::DragFloat("max_speed_kmph", &landSteering->max_speed_kmph); ImGui::DragFloat("steer_angle_min_speed_deg", &landSteering->steer_angle_min_speed_deg); ImGui::DragFloat("steer_angle_max_speed_deg", &landSteering->steer_angle_max_speed_deg); ImGui::DragFloat("steer_curve_falloff", &landSteering->steer_curve_falloff); ImGui::DragFloat("countersteer_speed_factor", &landSteering->countersteer_speed_factor); ImGui::DragFloat("steer_in_speed_factor", &landSteering->steer_in_speed_factor); ImGui::DragFloat("steer_input_power_pc", &landSteering->steer_input_power_pc); ImGui::DragFloat("steer_input_power_durango", &landSteering->steer_input_power_durango); ImGui::DragFloat("steer_input_power_orbis", &landSteering->steer_input_power_orbis); ImGui::DragFloat("wheel_drift_aligning_strength", &landSteering->wheel_drift_aligning_strength); ImGui::TreePop(); pfxCar->ApplyLandSteering(*landSteering); } } //util::hooking::func_call<void>(0x143794F60, real_vehicle); util::hooking::func_call<void>(0x1434A64B0, pfxCar); // This calculates some speed stuff, don't really know if it is required tbh }
#!/usr/bin/env bash # # Copyright 2013-2018 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) if [[ "$1" == "intel64" ]] ; then export FOO='intel64' else export FOO='default' fi
<reponame>rocketmo/nba.js import should from "should"; import e from "./"; import { ENDPOINTS as c } from "./constants"; describe("api/data/index", () => { describe("exports", () => { it("should export a functon for each ENDPOINT constants", done => { should.equal(Object.keys(c).length, Object.keys(e).length); done(); }); it("should only export functions", done => { Object.keys(e) .every(o => typeof e[o] === "function") .should.be.true(); done(); }); it("should not export endpoint runner", done => { e.should.not.have.property("nba"); done(); }); }); describe("requests", () => { it("should respond with a Promise", done => { let request = e.calendar(); request.should.be.Promise(); done(); }); it("should respond with error-first callback", done => { e.calendar((err, res) => { should.exist(res); should.not.exist(err); done(); }); }); }); });
<filename>forge/dist/utils/index.d.ts export { ILogger } from './Logger'; export { Validate } from './Validate';
#!/bin/bash # This script that takes in a URL, sends a GET request to the URL. curl -sL "$1"
import { Component, OnInit, ViewEncapsulation } from '@angular/core'; @Component({ selector: 'my-custom-component', templateUrl: './my-custom-component.component.html', styleUrls: ['./my-custom-component.component.scss'], encapsulation: ViewEncapsulation.ShadowDom }) export class MyCustomComponentComponent implements OnInit { constructor() { } ngOnInit(): void { } }
(make -C ../src Xlib.cma GLX.cma) opam install glMLite GL_DIR=`ocamlfind query glMLite` ocaml -I ../src Xlib.cma GLX.cma -I $GL_DIR GL.cma glxdemo.ml $*
<filename>mobile-launcher/SideQuestLauncher/AppStarter/src/main/java/com/sidequest/launcher/tools/Updater.java package com.sidequest.launcher.tools; import android.app.DownloadManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.database.Cursor; import android.net.Uri; import android.os.Environment; import android.util.Log; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Semaphore; import com.sidequest.launcher.gui.UpdaterDialogHandler; /** * Base Updater functionality */ public abstract class Updater { /** Holds the current updater dialog handler */ public UpdaterDialogHandler DialogHandler = null; /** Latest version of AppStarter */ protected String mLatestVersion = null; /** Download URL of the latest APK */ protected String mApkDownloadUrl = null; /** Indicates if process is busy */ private Boolean mIsBusy = false; /** Update dir on external storage */ private String mDownloadFolder = "AppStarterUpdates"; /** Indicates if the download was succesful */ private Boolean mDownloadSuccessful = false; /** Error reason */ private String mDownloadErrorReason = null; /** Queue value of running download */ private Long mQueueValue; /** Download manager */ private DownloadManager mDownloadManager; /** Update semaphore */ private Semaphore mUpdateSemaphore = null; /** Check for update listener */ private OnCheckForUpdateFinishedListener mOnCheckForUpdateFinishedListener; /** Update progress listener */ private OnUpdateProgressListener mOnUpdateProgressListener; /** Returns the name of the App */ public abstract String getAppName(); /** Returns the name of the App */ public abstract String getPackageName(Context context); public abstract Boolean isVersionNewer(String oldVersion, String newVersion); /** Update the values of the latest version and of the APK download URL for the latest version */ protected abstract void updateLatestVersionAndApkDownloadUrl() throws Exception; /** Return the current version */ public String getCurrentVersion(Context context) { return Tools.getCurrentAppVersion(context, getPackageName(context)); } /** Return the latest version */ public String getLatestVersion() { return mLatestVersion; } /** Set the check for update listener */ public void setOnCheckForUpdateFinishedListener(OnCheckForUpdateFinishedListener listener) { mOnCheckForUpdateFinishedListener = listener; } /** Set progress update listener */ public void setOnUpdateProgressListener(OnUpdateProgressListener listener) { mOnUpdateProgressListener = listener; } /** * Compares standard version strings like "2.1", "v2.3.1.0" or "version 1.3.2" * @param oldVersion Old version String * @param newVersion New version String * @return true if newVersion String is newer than oldVersion String */ public Boolean isVersionNewerStandardCheck(String oldVersion, String newVersion) { Boolean retVal = false; try { List<Integer> oldVerList = getVersionList(oldVersion); List<Integer> newVerList = getVersionList(newVersion); if(oldVerList.size() > 0 && newVerList.size() > 0) { for(Integer i = 0; i < newVerList.size(); i++) { // If oldversion has no additional step and all // steps before have been equal, newVersion is newer if(i >= oldVerList.size()) { retVal = true; break; } // If newVersions current step is higher than oldversions stage, // newVersion is newer if(newVerList.get(i) > oldVerList.get(i)) { retVal = true; break; } // If oldVersions current step is higher than newVersions stage, // oldVersion is newer if(oldVerList.get(i) > newVerList.get(i)) { break; } // Else versions have been equal --> no newer // --> check next stage or finish } } else if(oldVerList.size() == 0 && newVerList.size() > 0) { // This happens if old version is not installed / not found // which should mean that the latest version is anyway newer retVal = true; } } catch(Exception ignore){} return retVal; } /** * Separate version string in major, minor, .. * Most significant value first * @param versionString Version string to be parsed * @return List of Integers */ private List<Integer> getVersionList(String versionString) { List<Integer> retVal = new ArrayList<Integer>(); try { if(versionString != null && !versionString.equals("")) { // Delete everything that is no digit and no dot (like e.g. "v" or "version") versionString = versionString.replaceAll("[^\\d.]", ""); // Split the remaining part by the dots String[] parts = versionString.split("\\."); // Now create the version list if(parts != null && parts.length > 0) { for(String part : parts) { retVal.add(Integer.valueOf(part)); } } } } catch(Exception ignore) { } return retVal; } /** Check github for update */ public void checkForUpdate(Boolean synchron) { Thread checkForUpdateThread = new Thread(new Runnable() { @Override public void run() { try { if (mIsBusy) { throw new Exception("Updater is already working.."); } mIsBusy = true; // Reset variables mApkDownloadUrl = null; mLatestVersion = null; // Call the update mechanism of the actual updater updateLatestVersionAndApkDownloadUrl(); // Check if latest version is not null if(mLatestVersion == null) { throw new Exception("Latest version not found."); } // Check if download url is not null if(mApkDownloadUrl == null) { throw new Exception("No .apk download URL found."); } // If everything was fine show success-message: fireOnCheckForUpdateFinished("Check for update finished successful, found version: " + getLatestVersion()); Log.d(AppStarterUpdater.class.getName(), "Check for update finished successful, found version: " + getLatestVersion()); } catch (Exception e) { Log.d(AppStarterUpdater.class.getName(), "Update-Check-Error: " + e.getMessage()); fireOnCheckForUpdateFinished("Update-Check-Error: " + e.getMessage()); } finally { mIsBusy = false; } } }); checkForUpdateThread.start(); if(synchron) { try { checkForUpdateThread.join(); } catch (InterruptedException ignore) {} } } public void update(final Context context) { Thread updateThread = new Thread(new Runnable() { @Override public void run() { try { if (mIsBusy) { throw new Exception("AppStarterUpdater is already working.."); } // Check for update synchron checkForUpdate(true); mIsBusy = true; // Check if update-check was successful and version is newer String oldVersion = getCurrentVersion(context); String latestVersion = getLatestVersion(); if (latestVersion == null || !isVersionNewer(oldVersion, latestVersion)) { throw new Exception("No newer version found.."); } String apkUrl = mApkDownloadUrl; if(apkUrl == null) { throw new Exception("Download URL of new version not found.."); } Log.d(AppStarterUpdater.class.getName(), "Download from URL: " + apkUrl); fireOnUpdateProgressListener(false, 10, "Newer version found, start download.."); // Create download-dir and start download File downloadDir = new File(Environment.getExternalStorageDirectory(), mDownloadFolder); context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + downloadDir.getAbsolutePath()))); if(downloadDir.exists() && !downloadDir.isDirectory()) { if(!downloadDir.delete()) { throw new Exception("Can not delete file: " + downloadDir.getAbsolutePath()); } } if(!downloadDir.exists() && !downloadDir.mkdir()) { throw new Exception("Can not create download folder: " + downloadDir.getAbsolutePath()); } else { Tools.deleteDirectoryRecursively(context, downloadDir, true); } context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + downloadDir.getAbsolutePath()))); File downloadFile = new File(downloadDir, getAppName() + "-" + latestVersion + ".apk"); mDownloadSuccessful = false; mDownloadErrorReason = null; DownloadManager.Request localRequest = new DownloadManager.Request(Uri.parse(apkUrl)); localRequest.setDescription("Downloading " + getAppName() + " " + latestVersion); localRequest.setTitle(getAppName() + " Update"); localRequest.allowScanningByMediaScanner(); localRequest.setNotificationVisibility(1); Log.d(AppStarterUpdater.class.getName(), "Download to file://" + downloadFile.getAbsolutePath()); localRequest.setDestinationUri(Uri.parse("file://" + downloadFile.getAbsolutePath())); context.registerReceiver(new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { String action = intent.getAction(); Log.d(AppStarterUpdater.class.getName(), "Received intent: " + action); if (DownloadManager.ACTION_DOWNLOAD_COMPLETE.equals(action)) { DownloadManager.Query query = new DownloadManager.Query(); query.setFilterById(mQueueValue); Cursor c = mDownloadManager.query(query); if (c.moveToFirst()) { int columnIndex = c.getColumnIndex(DownloadManager.COLUMN_STATUS); if (DownloadManager.STATUS_SUCCESSFUL == c.getInt(columnIndex)) { mDownloadSuccessful = true; } else { // Try to get error reason switch(c.getInt(c.getColumnIndex(DownloadManager.COLUMN_REASON))) { case DownloadManager.ERROR_CANNOT_RESUME: mDownloadErrorReason = "ERROR_CANNOT_RESUME"; break; case DownloadManager.ERROR_DEVICE_NOT_FOUND: mDownloadErrorReason = "ERROR_DEVICE_NOT_FOUND"; break; case DownloadManager.ERROR_FILE_ALREADY_EXISTS: mDownloadErrorReason = "ERROR_FILE_ALREADY_EXISTS"; break; case DownloadManager.ERROR_FILE_ERROR: mDownloadErrorReason = "ERROR_FILE_ERROR"; break; case DownloadManager.ERROR_HTTP_DATA_ERROR: mDownloadErrorReason = "ERROR_HTTP_DATA_ERROR"; break; case DownloadManager.ERROR_INSUFFICIENT_SPACE: mDownloadErrorReason = "ERROR_INSUFFICIENT_SPACE"; break; case DownloadManager.ERROR_TOO_MANY_REDIRECTS: mDownloadErrorReason = "ERROR_TOO_MANY_REDIRECTS"; break; case DownloadManager.ERROR_UNHANDLED_HTTP_CODE: mDownloadErrorReason = "ERROR_UNHANDLED_HTTP_CODE"; break; default: mDownloadErrorReason = "ERROR_UNKNOWN"; break; } } } c.close(); } // Unregister receiver context.unregisterReceiver(this); // Release semaphore in any case.. Log.d(AppStarterUpdater.class.getName(), "Release semaphore.."); mUpdateSemaphore.release(); } }, new IntentFilter(DownloadManager.ACTION_DOWNLOAD_COMPLETE)); Log.d(AppStarterUpdater.class.getName(), "Aquire semaphore"); mUpdateSemaphore = new Semaphore(1); mUpdateSemaphore.acquire(); // Here the download is performed Log.d(AppStarterUpdater.class.getName(), "Start download"); mDownloadManager = (DownloadManager)context.getSystemService(context.DOWNLOAD_SERVICE); mQueueValue = mDownloadManager.enqueue(localRequest); Log.d(AppStarterUpdater.class.getName(), "Aquire semaphore again"); int lastPercentage = 0; while(!mUpdateSemaphore.tryAcquire()) { DownloadManager.Query q = new DownloadManager.Query(); q.setFilterById(mQueueValue); Cursor cursor = mDownloadManager.query(q); int percentage = 0; if(cursor.moveToFirst()) { int bytes_downloaded = cursor.getInt(cursor.getColumnIndex(DownloadManager.COLUMN_BYTES_DOWNLOADED_SO_FAR)); int bytes_total = cursor.getInt(cursor.getColumnIndex(DownloadManager.COLUMN_TOTAL_SIZE_BYTES)); percentage = (int)Math.round((((double)bytes_downloaded / (double)bytes_total) * 100.0) * 8.0/10.0); if(percentage < 0) percentage = 0; if(percentage > 100) percentage = 100; } cursor.close(); if(percentage > lastPercentage) { lastPercentage = percentage; fireOnUpdateProgressListener(false, 10 + percentage, "Download in progress.."); } Thread.sleep(500); } mUpdateSemaphore.release(); mUpdateSemaphore = null; Log.d(AppStarterUpdater.class.getName(), "Download finished"); if(!mDownloadSuccessful) { String reason = ""; if(mDownloadErrorReason != null) { reason = " Reason: " + mDownloadErrorReason; } throw new Exception("Download failed.." + reason); } context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + downloadFile.getAbsolutePath()))); fireOnUpdateProgressListener(false, 80, "Download finished, start installation.."); Intent installIntent = new Intent(Intent.ACTION_VIEW); installIntent.setDataAndType(Uri.parse("file://" + downloadFile.getAbsolutePath()), "application/vnd.android.package-archive"); installIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(installIntent); fireOnUpdateProgressListener(false, 100, "Successfully initiated update.."); } catch(Exception e) { Log.d(AppStarterUpdater.class.getName(), "UpdateError: " + e.getMessage()); fireOnUpdateProgressListener(true, 100, e.getMessage()); } finally { mIsBusy = false; } } }); updateThread.start(); } /** Check for update */ public void checkForUpdate() { checkForUpdate(false); } /** * Fire update progress * @param percent Percentage * @param message Message */ protected void fireOnUpdateProgressListener(final Boolean isError, final Integer percent, final String message) { Thread fireThread = new Thread(new Runnable() { @Override public void run() { if(mOnUpdateProgressListener != null) { mOnUpdateProgressListener.onUpdateProgress(isError, percent, message); } } }); fireThread.start(); } /** * Fire check for update finished message * @param message Message to fire */ protected void fireOnCheckForUpdateFinished(final String message) { Thread fireThread = new Thread(new Runnable() { @Override public void run() { if(mOnCheckForUpdateFinishedListener != null) { mOnCheckForUpdateFinishedListener.onCheckForUpdateFinished(message); } } }); fireThread.start(); } /** * Interface for progress messages of update check */ public interface OnCheckForUpdateFinishedListener { public void onCheckForUpdateFinished(String message); } /** * Interface for progress messages of performing an update */ public interface OnUpdateProgressListener { public void onUpdateProgress(Boolean isError, Integer percent, String message); } }
<reponame>drzamich/warsawjs-workshop-35-legacy-code module.exports = class CounterBar { constructor(streak) { this.streak = streak; } getBar() { let bar = '\x1B[42m'; // green color for (let i = 0; i <= this.streak.noDays(); i += 1) { bar += ' '; // add spaces } bar += '\x1B[0m '; // reset return bar; } };
#import necessary libraries import pandas as pd import numpy as np from nltk.corpus import stopwords from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_extraction.text import TfidfTransformer from sklearn.naive_bayes import MultinomialNB # load the data set data = pd.read_csv('dataset.csv', encoding = "ISO-8859-1") # clean and preprocess the text X = data.drop(columns=['class']) y = data['class'] # vectorize the text words = set(stopwords.words('english')) vect = CountVectorizer().fit(X['text']) # transform the text into vector of term/token counts X_vect = vect.transform(X['text']).toarray() # use a TF-IDF transformer to reduce the influence of more common words tfidf_transformer = TfidfTransformer().fit(X_vect) X_tfidf = tfidf_transformer.transform(X_vect).toarray() # apply the Multinomial Naive Bayes classifer to the transformed text classifier = MultinomialNB().fit(X_tfidf, y) # Create a function for prediction def prediction(text): # vectorize the input input_data = vect.transform([text]).toarray() # transform the input into vector of term/token counts input_tfidf = tfidf_transformer.transform(input_data).toarray() # apply the classifier to predict the input pred = classifier.predict(input_tfidf) print(pred)
using System; public class MetadataManager { public string GetMetadataFilename(Guid guid) { string filename = $"metadata_{guid.ToString().ToLower()}.txt"; return filename; } }
<filename>node_modules/@angular-eslint/eslint-plugin-template/dist/rules/no-positive-tabindex.d.ts export declare type MessageIds = 'noPositiveTabindex' | 'suggestNonNegativeTabindex'; export declare const RULE_NAME = "no-positive-tabindex"; declare const _default: import("@typescript-eslint/experimental-utils/dist/ts-eslint/Rule").RuleModule<MessageIds, [], import("@typescript-eslint/experimental-utils/dist/ts-eslint/Rule").RuleListener>; export default _default;
#!/bin/bash -ex # source config.cfg echo "Install python client" apt-get -y install python-openstackclient sleep 5 echo "Install and config NTP" sleep 3 apt-get install ntp -y cp /etc/ntp.conf /etc/ntp.conf.bka rm /etc/ntp.conf cat /etc/ntp.conf.bka | grep -v ^# | grep -v ^$ >> /etc/ntp.conf ## Config NTP in LIBERTY sed -i 's/server ntp.ubuntu.com/ \ server 0.vn.pool.ntp.org iburst \ server 1.asia.pool.ntp.org iburst \ server 2.asia.pool.ntp.org iburst/g' /etc/ntp.conf sed -i 's/restrict -4 default kod notrap nomodify nopeer noquery/ \ #restrict -4 default kod notrap nomodify nopeer noquery/g' /etc/ntp.conf sed -i 's/restrict -6 default kod notrap nomodify nopeer noquery/ \ restrict -4 default kod notrap nomodify \ restrict -6 default kod notrap nomodify/g' /etc/ntp.conf # sed -i 's/server/#server/' /etc/ntp.conf # echo "server $LOCAL_IP" >> /etc/ntp.conf ############################################## echo "Install and Config RabbitMQ" sleep 3 apt-get install rabbitmq-server -y rabbitmqctl add_user openstack $RABBIT_PASS rabbitmqctl set_permissions openstack ".*" ".*" ".*" # rabbitmqctl change_password guest $RABBIT_PASS sleep 3 service rabbitmq-server restart echo "Finish setup pre-install package !!!" echo "##### Install MYSQL #####" sleep 3 echo mysql-server mysql-server/root_password password $MYSQL_PASS \ | debconf-set-selections echo mysql-server mysql-server/root_password_again password $MYSQL_PASS \ | debconf-set-selections apt-get -y install mariadb-server python-mysqldb curl echo "##### Configuring MYSQL #####" sleep 3 echo "########## CONFIGURING FOR MYSQL ##########" sleep 5 touch /etc/mysql/conf.d/mysqld_openstack.cnf cat << EOF > /etc/mysql/conf.d/mysqld_openstack.cnf [mysqld] bind-address = 0.0.0.0 [mysqld] default-storage-engine = innodb innodb_file_per_table collation-server = utf8_general_ci init-connect = 'SET NAMES utf8' character-set-server = utf8 EOF sleep 5 echo "Restart MYSQL" service mysql restart
from typing import List, Dict, Type def map_urls_to_views(urlpatterns: List[str], view_classes: List[Type[View]]) -> Dict[str, Type[View]]: url_view_mapping = {} for i in range(len(urlpatterns)): url_view_mapping[urlpatterns[i]] = view_classes[i] return url_view_mapping
#!/bin/bash # This script depends on the ./java_oracle_license.sh for installation of Oracle java dependencies if [ -d /usr/lib/jvm/java-7-oracle ]; then echo "Found java-7-oracle installation" else echo "java 7 installation" apt-get install -y -q oracle-java7-installer yes "" | apt-get -f install fi
import string def count_unique_words(file_path: str) -> int: # Read the file and convert its content to lowercase with open(file_path, 'r') as file: content = file.read().lower() # Remove punctuation from the content content = content.translate(str.maketrans('', '', string.punctuation)) # Split the content into words words = content.split() # Exclude common stop words stop_words = set([ "i", "me", "my", "myself", "we", "our", "ours", "ourselves", "you", "your", "yours", "yourself", "yourselves", "he", "him", "his", "himself", "she", "her", "hers", "herself", "it", "its", "itself", "they", "them", "their", "theirs", "themselves", "what", "which", "who", "whom", "this", "that", "these", "those", "am", "is", "are", "was", "were", "be", "been", "being", "have", "has", "had", "having", "do", "does", "did", "doing", "a", "an", "the", "and", "but", "if", "or", "because", "as", "until", "while", "of", "at", "by", "for", "with", "about", "against", "between", "into", "through", "during", "before", "after", "above", "below", "to", "from", "up", "down", "in", "out", "on", "off", "over", "under", "again", "further", "then", "once", "here", "there", "when", "where", "why", "how", "all", "any", "both", "each", "few", "more", "most", "other", "some", "such", "no", "nor", "not", "only", "own", "same", "so", "than", "too", "very", "s", "t", "can", "will", "just", "don", "should", "now" ]) words = [word for word in words if word not in stop_words] # Count the unique words unique_words = set(words) return len(unique_words) # Example usage file_path = "sample.txt" print(count_unique_words(file_path)) # Output: 7
(function(angular) { 'use strict'; angular.module('OMDbAPISearch', []) .controller('searchMovies', ['$scope', '$http', function($scope, $http) { $scope.method = 'GET'; $scope.fetch = function() { if ($scope.searchparam) { $scope.url = 'https://www.omdbapi.com/?apikey=b801da26&s=' + $scope.searchparam + '&type=movie&r=json'; $http({ method: $scope.method, url: $scope.url }). then(function(response) { if (response.data.Response) { $('.results').css('display', 'block'); $('.noResults').css('display', 'none'); var theSrchResults = response.data["Search"]; angular.forEach(theSrchResults, function(obj) { // on boucle sur chaque film avec ses résultats $http({ method: $scope.method, url: 'https://www.omdbapi.com/?apikey=b801da26&i=' + obj.imdbID + '&plot=full&r=json&tomatoes=true' }). then(function(response) { obj.details = response.data; }); }); $scope.movieResults = theSrchResults; } else { //erreur, film non trouvé console.log("non trouvé"); $('.results').css('display', 'none'); $('.noResults').css('display', 'block'); $('.noResults').html("<strong>Aucun résultat trouvé :(.</strong>"); } }, function(response) { console.log("failure"); $('.results').css('display', 'none'); $('.noResults').css('display', 'block'); $('.noResults').html("<strong>Erreur.</strong>"); }); } else { // pas de saisie $('.results').css('display', 'none'); $('.noResults').css('display', 'none'); $('#theSearch').fadeIn(100).fadeOut(100).fadeIn(100).fadeOut(100).fadeIn(100); } }; } ]) .directive('movieSrchResults', function() { return { templateUrl: '../movieResults.html' }; }); })(window.angular);
import { EditorState, basicSetup } from "@codemirror/basic-setup"; import { EditorView, ViewUpdate } from "@codemirror/view"; import { highlightSpecialChars } from "@codemirror/highlight"; function setupCustomEditor(initialText, specificWord) { const state = EditorState.create({ doc: initialText, extensions: [basicSetup, highlightSpecialChars()], }); const editorView = new EditorView({ state, parent: document.body, }); function highlightWord() { const regex = new RegExp(specificWord, "g"); const decorations = []; editorView.state.doc.iterate({ from: 0, to: editorView.state.doc.length, change: (from, to, text) => { const matches = text.match(regex); if (matches) { for (const match of matches) { decorations.push({ from: from + text.indexOf(match), to: from + text.indexOf(match) + match.length, class: "highlighted-word", }); } } }, }); editorView.dispatch({ effects: EditorView.decorations.update.of(decorations), }); } return { editorView, highlightWord }; }
import chalk from 'chalk'; import fs from 'fs'; import JSZip from 'jszip'; import path from 'path'; import request from 'request'; import tmp from 'tmp'; export class GitHubSource { branch: string; githubListApi: string; githubDownloadUrl: string; argv: any; destinationPath: string; constructor(argv: any, destinationPath: string) { const repository = argv.repository || 'Sitecore/jss'; const branch = argv.branch || 'master'; this.branch = branch; this.githubListApi = `https://api.github.com/repos/${repository}/contents/samples?ref=${branch}`; this.githubDownloadUrl = `https://github.com/${repository}/archive/${branch}.zip`; this.argv = argv; this.destinationPath = destinationPath; } async getFromSource() { await this.verifyTemplate(); return new Promise((resolve, reject) => { tmp.setGracefulCleanup(); // unsafeCleanup just means it'll kill any files left in the temp folder // that we've created. tmp.dir({ unsafeCleanup: true }, async (err, tempDir, cleanupTempDir) => { if (err) { reject(err); } const zipFileName = path.join(tempDir, 'jss.zip'); await this.downloadRepo(zipFileName); await this.extractTemplateFiles(zipFileName); cleanupTempDir(); resolve(); }); }); } async verifyTemplate() { return new Promise((resolve) => { // tslint:disable-next-line:max-line-length request.get(this.githubListApi, { proxy: this.argv.proxy, json: true, headers: { 'User-Agent': 'SitecoreJSSCLI' } }, (error, response, body) => { if (error) { console.error(chalk.red(error)); process.exit(1); } if (response.statusCode !== 200) { console.error(chalk.red(`Server sent ${response.statusCode} ${response.statusMessage} while enumerating templates.`)); process.exit(1); } if (!body || !Array.isArray(body)) { console.log(body); console.error(chalk.red('Received unexpected response from server while trying to enumerate templates.')); process.exit(1); } const apiResult: Array<{ name: string }> = body; if (!apiResult.some((result) => result.name === this.argv.template)) { console.error(chalk.red(`Template ${this.argv.template} did not exist.`)); console.error(chalk.red('Valid templates are: ')); apiResult.forEach((result) => { if (result.name === 'node-headless-ssr-proxy') { return; } console.error(chalk.yellow(result.name)); }); process.exit(1); } resolve(apiResult); }); }); } async downloadRepo(fileName: string) { console.log(chalk.cyan(`Acquiring templates from ${this.githubDownloadUrl}...`)); await new Promise((resolve, reject) => { const res = request.get(this.githubDownloadUrl, { proxy: this.argv.proxy, headers: { 'User-Agent': 'SitecoreJSSCLI' } }); const fileStream = fs.createWriteStream(fileName, { autoClose: true }); res.pipe(fileStream); res.on('error', reject); fileStream.on('finish', resolve); }); } async extractTemplateFiles(zipFile: string) { console.log(chalk.cyan(`Extracting template ${this.argv.template}...`)); const filter = new RegExp(`^[^/]+/samples/${this.argv.template}/(.+)`); fs.mkdirSync(this.destinationPath); return new Promise((resolve, reject) => { fs.readFile(zipFile, (err, data) => { if (err) { reject(err); } const jszip = new JSZip(); const writePromises: Array<Promise<any>> = []; jszip.loadAsync(data).then(async (zip) => { zip.filter((innerPath) => filter.test(innerPath)).forEach((file) => { const relativePath = (filter.exec(file.name) as any)[1]; const outputPath = path.join(this.destinationPath, relativePath); if (file.dir) { // create directory if (!fs.existsSync(outputPath)) { console.log(chalk.gray(`mkdir ${outputPath}`)); fs.mkdirSync(outputPath); } } else { // read file from zip, write to file writePromises.push( file.async('nodebuffer').then((content) => { console.log(chalk.gray(`write ${outputPath}`)); fs.writeFileSync(outputPath, content); }) ); } }); await Promise.all(writePromises); resolve(); }); }); }); } }
<gh_stars>0 import React, { Component, } from 'react'; import { BrowserRouter, Route, } from 'react-router-dom'; import Home from './pages/home/index'; import Right from './common/right/index'; import Write from './pages/write/index'; import List from './pages/list/index'; import Detail from './pages/detail/index'; import Category from './pages/category/index'; import Axios from './axios'; import { Left, Main, } from './style'; Component.prototype.$axios = Axios; // 全局使用 function App() { return ( <BrowserRouter> <Main> <Left> <Route path="/" exact component={Home} /> <Route path="/write" exact component={Write} /> <Route path="/list" component={List} exac /> <Route path="/detail/:id" component={Detail} exact /> <Route path="/category" component={Category} exact /> </Left> <Right /> </Main> </BrowserRouter> ); } export default App;
#!/usr/bin/env sh ################################################################################ # RUN EACH COMMAND ON SEPARATE TERMINALS [before Docker implementation] ################################################################################ #local Kafka: /Users/screative/devbox/engineering/kafka/kafka3 local_kafka=$HOME/devbox/engineering/kafka/kafka3 # Start ZooKeeper #sh $local_kafka/bin/zookeeper-server-start.sh $local_kafka/config/zookeeper.properties sh /Users/screative/devbox/engineering/kafka/kafka3/bin/zookeeper-server-start.sh /Users/screative/devbox/engineering/kafka/kafka3/config/zookeeper.properties # Start Kafka Server #sh $local_kafka/bin/kafka-server-start.sh $local_kafka/config/server.properties sh /Users/screative/devbox/engineering/kafka/kafka3/bin/kafka-server-start.sh /Users/screative/devbox/engineering/kafka/kafka3/config/server.properties # Create topic #sh $local_kafka/bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic invoices sh /Users/screative/devbox/engineering/kafka/kafka3/bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic invoices #Start Producer #sh $local_kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic invoices sh /Users/screative/devbox/engineering/kafka/kafka3/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic invoices
<reponame>DigitalGenius/react-chat-window import React from 'react'; import PulseLoader from 'react-spinners/PulseLoader'; const AgentTypingMessage = () => { return ( <div className="sc-message--agent-typing"> <PulseLoader size={6} margin={1} color={'#cccccc'} loading={true} /> <span>Agent is typing</span> </div> ); }; export default AgentTypingMessage;
#!/bin/bash # Change to the parent directory. cd "$(dirname "$(dirname "$(readlink -fm "$0")")")" # Generate Thrift code. cd src rm -rf $1/gen_inbox mkdir $1/gen_inbox thrift -r --gen $1 -out $1/gen_inbox spec.thrift
<filename>spec/unit/puppet/type/consul_token_spec.rb require 'spec_helper' describe Puppet::Type.type(:consul_token) do it 'fails if no name is provided' do expect do Puppet::Type.type(:consul_token).new(type: 'client') end.to raise_error(Puppet::Error, %r{Title or name must be provided}) end it 'fails if accessor_id ist not a string' do expect do Puppet::Type.type(:consul_token).new(name: 'foo', accessor_id: {}) end.to raise_error(Puppet::Error, %r{ID must be a string}) end it 'fails if secret_id ist not a string' do expect do Puppet::Type.type(:consul_token).new(name: 'foo', secret_id: {}) end.to raise_error(Puppet::Error, %r{ID must be a string}) end it 'fails if policy name list is not an array' do expect do Puppet::Type.type(:consul_token).new(name: 'foo', policies_by_name: [[]]) end.to raise_error(Puppet::Error, %r{Policy name list must be an array of strings}) end it 'fails if policy ID list is not an array' do expect do Puppet::Type.type(:consul_token).new(name: 'foo', policies_by_id: [[]]) end.to raise_error(Puppet::Error, %r{Policy ID list must be an array of strings}) end context 'with name defined' do policies_by_name = ['test_1' 'test_2'] policies_by_id = ['abc-123' 'xyz-456'] before do @token = Puppet::Type.type(:consul_token).new( name: 'testing', accessor_id: '39c75e12-7f43-0a40-dfba-9aa3fcda08d4', policies_by_name: policies_by_name, policies_by_id: policies_by_id ) end it 'accepts a accessor id' do expect(@token[:accessor_id]).to eq('39c75e12-7f43-0a40-dfba-9aa3fcda08d4') end it 'accepts policy names' do expect(@token[:policies_by_name]).to eq(policies_by_name) end it 'accepts policy IDs' do expect(@token[:policies_by_id]).to eq(policies_by_id) end it 'defaults to localhost' do expect(@token[:hostname]).to eq('localhost') end it 'defaults to http' do expect(@token[:protocol]).to eq(:http) end it 'defaults to port 8500' do expect(@token[:port]).to eq(8500) end end end
# Launch NPM linter COLOR_NAME='\e[33m' COLOR_ARROW='\e[90m' COLOR_FILES='\e[96m' COLOR_DEFAULT='\e[39m' PACKAGE_FILES='. ./packages' LOCK_FILE='package-lock.json' echo "${COLOR_NAME}npmlint ${COLOR_ARROW}-> ${COLOR_FILES}${PACKAGE_FILES}${COLOR_DEFAULT}" npmPkgJsonLint ${PACKAGE_FILES} echo "${COLOR_NAME}lockfilelint ${COLOR_ARROW}-> ${COLOR_FILES}${LOCK_FILE}${COLOR_DEFAULT}" lockfile-lint --type npm --path ${LOCK_FILE}
import { createElement } from 'react' import { componentIndex } from 'react-dnd-documentation-examples' import processImages from './processImagesInMarkdownAst' const log = require('debug')('site:renderHtmlAst') const rehypeReact = require('rehype-react') // Registers the examples as custom components const renderAst = new rehypeReact({ createElement, components: componentIndex, }).Compiler export default function renderHtmlAst(node: any) { try { processImages(node) const result = renderAst(node) return result } catch (err) { log('error rendering doc page', err) } }
#ifdef ENABLE_CUDA #include "DEM2DForceComputeGPU.h" #include "cuda_runtime.h" #include "cuda.h" #include <stdexcept> #include <iostream> #include <hoomd/extern/pybind/include/pybind11/pybind11.h> using namespace std; class DEM2DForceComputeGPU { public: DEM2DForceComputeGPU(std::shared_ptr<SystemDefinition> sysdef) : m_sysdef(sysdef) { // Initialize GPU resources and data structures cudaError_t cuda_status = cudaSetDevice(0); if (cuda_status != cudaSuccess) { throw runtime_error("Error setting CUDA device"); } } void computeForcesGPU() { // Offload force computation to the GPU using CUDA // Implement the DEM force computation using CUDA kernels // Ensure proper memory management and error handling for CUDA operations // Example: // cudaError_t cuda_status = cudaMalloc((void**)&d_force, sizeof(float) * num_particles); // if (cuda_status != cudaSuccess) // { // throw runtime_error("Error allocating device memory for forces"); // } // ... } private: std::shared_ptr<SystemDefinition> m_sysdef; // Define any necessary data members for GPU computation // Example: // float* d_force; // Device array for storing forces }; #endif
<gh_stars>1-10 const uuid = require('uuid'); const moment = require('moment'); const {update} = require('./../../util/dynamo/operations'); const {BOOK_TABLE_NAME} = process.env; exports.handler = async (event) => { console.log('--------------------'); console.log('---- updateBook/index.js'); console.log('--------------------'); console.log({event}); const dateNowStr = moment().format(); const updateBookInput = event.arguments.input; const bookId = updateBookInput.id; const updatedBook = { ...updateBookInput, updatedAt: dateNowStr }; // We remove ID from fields to update delete updatedBook.id; await update({ id: bookId, data: updatedBook, tableName: BOOK_TABLE_NAME }); // We add ID to return updated object. updatedBook.id = bookId; return updatedBook; };