text
stringlengths
1
1.05M
<filename>APM-Start/src/app/products/product-detail.component.ts import { Component, OnInit } from '@angular/core'; import { ActivatedRoute } from '@angular/router'; import { Router } from '@angular/router'; import { IProduct } from './product'; @Component({ selector: 'pm-product-detail', templateUrl: './product-detail.component.html', styleUrls: ['./product-detail.component.css'] }) export class ProductDetailComponent implements OnInit { pageTitle: string = 'Product Detail'; product: IProduct; constructor(private _activatedRoute: ActivatedRoute, private _router: Router) { } ngOnInit() { let productId = +this._activatedRoute.snapshot.paramMap.get('id'); this.product = { 'productId': productId, 'productName': 'A great product', 'productCode': 'string', 'releaseDate': 'string', 'price': 1, 'description': 'string', 'starRating': 4, 'imageUrl': 'string', } } onBackButtonClicked(): void { this._router.navigate(['/product']); } }
# Program to find the largest number of its pair in an array def largest_pair(arr): max_pair_object = [] for i in range(len(arr)): try: pair = (arr[i], arr[i+1]) if max(pair) > max(max_pair_object): max_pair_object = pair except IndexError: pass return max_pair_object # Driver code array = [2, 7, 3, 4, 10, 8] print(largest_pair(array))
<filename>components/Survey/Results/index.js import React from 'react' import Coin from '../Coin' import styles from './Results.module.css' const Results = ({ result, isActive }) => ( <div className={styles.content}> <h1 className={styles.title}> {isActive ? 'Thanks for your input' : 'Survey is closed'} </h1> <h2 className={styles.subtitle}>Here are the results {isActive ? 'so far' : ''}:</h2> {Object.values(result) .sort((a, b) => b.rate - a.rate) .map((t, i) => ( <Result {...t} index={i} key={t.topicId} /> ))} </div> ) const Result = ({ name, rate, index }) => ( <div className={styles.result}> <div className={styles.topicName}> {index + 1}. {name} </div> <div className={styles.topicRate}> {rate} <Coin /> </div> </div> ) export default Results
from ._box import *
<gh_stars>10-100 #ifndef NEO_PLAYER_SHARED_H #define NEO_PLAYER_SHARED_H #ifdef _WIN32 #pragma once #endif #include "neo_predicted_viewmodel.h" #ifdef INCLUDE_WEP_PBK // Type to use if we need to ensure more than 32 bits in the mask. #define NEO_WEP_BITS_UNDERLYING_TYPE long long int #else // Using plain int if we don't need to ensure >32 bits in the mask. #define NEO_WEP_BITS_UNDERLYING_TYPE int #endif // All of these should be able to stack create even slower speeds (at least in original NT) #define NEO_SPRINT_MODIFIER 1.6 #define NEO_SLOW_MODIFIER 0.75 #define NEO_BASE_NORM_SPEED 136 #define NEO_BASE_SPRINT_SPEED (NEO_BASE_NORM_SPEED * NEO_SPRINT_MODIFIER) #define NEO_BASE_WALK_SPEED (NEO_BASE_NORM_SPEED * NEO_SLOW_MODIFIER) #define NEO_BASE_CROUCH_SPEED (NEO_BASE_NORM_SPEED * NEO_SLOW_MODIFIER) #define NEO_RECON_SPEED_MODIFIER 1.25 #define NEO_ASSAULT_SPEED_MODIFIER 1.0 #define NEO_SUPPORT_SPEED_MODIFIER 0.75 #define NEO_RECON_NORM_SPEED (NEO_BASE_NORM_SPEED * NEO_RECON_SPEED_MODIFIER) #define NEO_RECON_SPRINT_SPEED (NEO_BASE_SPRINT_SPEED * NEO_RECON_SPEED_MODIFIER) #define NEO_RECON_WALK_SPEED (NEO_BASE_WALK_SPEED * NEO_RECON_SPEED_MODIFIER) #define NEO_RECON_CROUCH_SPEED (NEO_BASE_CROUCH_SPEED * NEO_RECON_SPEED_MODIFIER) #define NEO_ASSAULT_NORM_SPEED (NEO_BASE_NORM_SPEED * NEO_ASSAULT_SPEED_MODIFIER) #define NEO_ASSAULT_SPRINT_SPEED (NEO_BASE_SPRINT_SPEED * NEO_ASSAULT_SPEED_MODIFIER) #define NEO_ASSAULT_WALK_SPEED (NEO_BASE_WALK_SPEED * NEO_ASSAULT_SPEED_MODIFIER) #define NEO_ASSAULT_CROUCH_SPEED (NEO_BASE_CROUCH_SPEED * NEO_ASSAULT_SPEED_MODIFIER) #define NEO_SUPPORT_NORM_SPEED (NEO_BASE_NORM_SPEED * NEO_SUPPORT_SPEED_MODIFIER) #define NEO_SUPPORT_SPRINT_SPEED (NEO_BASE_SPRINT_SPEED * NEO_SUPPORT_SPEED_MODIFIER) #define NEO_SUPPORT_WALK_SPEED (NEO_BASE_WALK_SPEED * NEO_SUPPORT_SPEED_MODIFIER) #define NEO_SUPPORT_CROUCH_SPEED (NEO_BASE_CROUCH_SPEED * NEO_SUPPORT_SPEED_MODIFIER) // Sanity checks for class speeds. // These values are divided with in some contexts, so should never equal zero. COMPILE_TIME_ASSERT(NEO_RECON_NORM_SPEED > 0); COMPILE_TIME_ASSERT(NEO_RECON_SPRINT_SPEED > 0); COMPILE_TIME_ASSERT(NEO_RECON_WALK_SPEED > 0); COMPILE_TIME_ASSERT(NEO_RECON_CROUCH_SPEED > 0); COMPILE_TIME_ASSERT(NEO_ASSAULT_NORM_SPEED > 0); COMPILE_TIME_ASSERT(NEO_ASSAULT_SPRINT_SPEED > 0); COMPILE_TIME_ASSERT(NEO_ASSAULT_WALK_SPEED > 0); COMPILE_TIME_ASSERT(NEO_ASSAULT_CROUCH_SPEED > 0); COMPILE_TIME_ASSERT(NEO_SUPPORT_NORM_SPEED > 0); COMPILE_TIME_ASSERT(NEO_SUPPORT_SPRINT_SPEED > 0); COMPILE_TIME_ASSERT(NEO_SUPPORT_WALK_SPEED > 0); COMPILE_TIME_ASSERT(NEO_SUPPORT_CROUCH_SPEED > 0); // Class speeds hierarchy should be: recon > assault > support. COMPILE_TIME_ASSERT(NEO_RECON_NORM_SPEED > NEO_ASSAULT_NORM_SPEED); COMPILE_TIME_ASSERT(NEO_ASSAULT_NORM_SPEED > NEO_SUPPORT_NORM_SPEED); COMPILE_TIME_ASSERT(NEO_RECON_SPRINT_SPEED > NEO_ASSAULT_SPRINT_SPEED); COMPILE_TIME_ASSERT(NEO_ASSAULT_SPRINT_SPEED > NEO_SUPPORT_SPRINT_SPEED); COMPILE_TIME_ASSERT(NEO_RECON_WALK_SPEED > NEO_ASSAULT_WALK_SPEED); COMPILE_TIME_ASSERT(NEO_ASSAULT_WALK_SPEED > NEO_SUPPORT_WALK_SPEED); COMPILE_TIME_ASSERT(NEO_RECON_CROUCH_SPEED > NEO_ASSAULT_CROUCH_SPEED); COMPILE_TIME_ASSERT(NEO_ASSAULT_CROUCH_SPEED > NEO_SUPPORT_CROUCH_SPEED); #define SUPER_JMP_COST 45.0f #define CLOAK_AUX_COST ((GetClass() == NEO_CLASS_RECON) ? 17.5f : 19.0f) // Original NT allows chaining superjumps up ramps, // so leaving this zeroed for enabling movement tricks. #define SUPER_JMP_DELAY_BETWEEN_JUMPS 0 // NEO Activities #define ACT_NEO_ATTACK ACT_RANGE_ATTACK1 #define ACT_NEO_RELOAD ACT_RELOAD #define ACT_NEO_IDLE_STAND ACT_IDLE #define ACT_NEO_IDLE_CROUCH ACT_CROUCHIDLE #define ACT_NEO_MOVE_RUN ACT_RUN #define ACT_NEO_MOVE_WALK ACT_WALK #define ACT_NEO_MOVE_CROUCH ACT_RUN_CROUCH #define ACT_NEO_DIE ACT_DIESIMPLE #define ACT_NEO_HOVER ACT_HOVER #define ACT_NEO_JUMP ACT_HOP #define ACT_NEO_SWIM ACT_SWIM #ifdef GAME_DLL #define NEO_ACT_TABLE_ENTRY_REQUIRED false #define NEO_IMPLEMENT_ACTTABLE(CNEOWepClass) acttable_t CNEOWepClass::m_acttable[] = {\ { ACT_NEO_ATTACK, ACT_NEO_ATTACK, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_RELOAD, ACT_NEO_RELOAD, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_IDLE_STAND, ACT_NEO_IDLE_STAND, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_IDLE_STAND, ACT_NEO_IDLE_STAND, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_IDLE_CROUCH, ACT_NEO_IDLE_CROUCH, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_MOVE_RUN, ACT_NEO_MOVE_RUN, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_MOVE_WALK, ACT_NEO_MOVE_WALK, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_MOVE_CROUCH, ACT_NEO_MOVE_CROUCH, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_DIE, ACT_NEO_DIE, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_HOVER, ACT_NEO_HOVER, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_JUMP, ACT_NEO_JUMP, NEO_ACT_TABLE_ENTRY_REQUIRED },\ { ACT_NEO_SWIM, ACT_NEO_SWIM, NEO_ACT_TABLE_ENTRY_REQUIRED },\ };IMPLEMENT_ACTTABLE(CNEOWepClass); #else #define NEO_IMPLEMENT_ACTTABLE(CNEOWepClass) #endif #define NEO_RECON_EYE_HEIGHT_STANDING 57.0 #define NEO_RECON_EYE_HEIGHT_DUCKING 43.0 #define NEO_ASSAULT_EYE_HEIGHT_STANDING 58.0 #define NEO_ASSAULT_EYE_HEIGHT_DUCKING 44.0 #define NEO_SUPPORT_EYE_HEIGHT_STANDING 60.0 #define NEO_SUPPORT_EYE_HEIGHT_DUCKING 47.0 #define HL2DM_DEFAULT_PLAYERMODEL_HEIGHT 74.0 #define NEO_RECON_PLAYERMODEL_HEIGHT 66.0 #define NEO_ASSAULT_PLAYERMODEL_HEIGHT 67.0 #define NEO_SUPPORT_PLAYERMODEL_HEIGHT 72.0 #define HL2DM_DEFAULT_PLAYERMODEL_DUCK_HEIGHT 36.0 #define NEO_RECON_PLAYERMODEL_DUCK_HEIGHT 48.0 #define NEO_ASSAULT_PLAYERMODEL_DUCK_HEIGHT 50.0 #define NEO_SUPPORT_PLAYERMODEL_DUCK_HEIGHT 61.0 // "Magic number" for scaling the hull ratios below to make them match original NT. // It makes the hull clearance slightly larger than the actual models (as is the // case with the original), but ensures that reachability of various places // inside NT levels remains the same. This specific value was found by binary // searching until the hulls matched within 1 unit in-game. #define NEO_HULL_TOLERANCE_SCALE 1.078125 #define NEO_RECON_MODEL_SCALE ((NEO_RECON_PLAYERMODEL_HEIGHT / HL2DM_DEFAULT_PLAYERMODEL_HEIGHT) * NEO_HULL_TOLERANCE_SCALE) #define NEO_ASSAULT_MODEL_SCALE ((NEO_ASSAULT_PLAYERMODEL_HEIGHT / HL2DM_DEFAULT_PLAYERMODEL_HEIGHT) * NEO_HULL_TOLERANCE_SCALE) #define NEO_SUPPORT_MODEL_SCALE ((NEO_SUPPORT_PLAYERMODEL_HEIGHT / HL2DM_DEFAULT_PLAYERMODEL_HEIGHT) * NEO_HULL_TOLERANCE_SCALE) #define NEO_RECON_DAMAGE_MODIFIER 1.2f #define NEO_ASSAULT_DAMAGE_MODIFIER 1.0f #define NEO_SUPPORT_DAMAGE_MODIFIER 0.56f #define NEO_ANIMSTATE_LEGANIM_TYPE LegAnimType_t::LEGANIM_9WAY #define NEO_ANIMSTATE_USES_AIMSEQUENCES true #define NEO_ANIMSTATE_MAX_BODY_YAW_DEGREES 90.0f enum NeoSkin { NEO_SKIN_FIRST = 0, NEO_SKIN_SECOND, NEO_SKIN_THIRD, NEO_SKIN_ENUM_COUNT }; enum NeoClass { NEO_CLASS_RECON = 0, NEO_CLASS_ASSAULT, NEO_CLASS_SUPPORT, // NOTENOTE: VIP *must* be last, because we are // using array offsets for recon/assault/support NEO_CLASS_VIP, NEO_CLASS_ENUM_COUNT }; enum NeoStar { STAR_NONE = 0, STAR_ALPHA, STAR_BRAVO, STAR_CHARLIE, STAR_DELTA, STAR_ECHO, STAR_FOXTROT }; #define NEO_DEFAULT_STAR STAR_ALPHA // Implemented by CNEOPlayer::m_fNeoFlags. // Rolling our own because Source FL_ flags already reserve all 32 bits, // and extending the type would require a larger refactor. #define NEO_FL_FREEZETIME (1 << 1) // Freeze player movement, but allow looking around. #if defined(CLIENT_DLL) && !defined(CNEOBaseCombatWeapon) #define CNEOBaseCombatWeapon C_NEOBaseCombatWeapon #endif #define COLOR_JINRAI COLOR_NEO_GREEN #define COLOR_NSF COLOR_NEO_BLUE #define COLOR_SPEC COLOR_NEO_ORANGE #define COLOR_NEO_BLUE Color(181, 216, 248, 255) #define COLOR_NEO_GREEN Color(192, 244, 196, 255) #define COLOR_NEO_ORANGE Color(243, 190, 52, 255) #define COLOR_NEO_WHITE Color(218, 217, 213, 255) class CNEO_Player; class CNEOBaseCombatWeapon; enum PlayerAnimEvent_t : uint; extern bool IsThereRoomForLeanSlide(CNEO_Player *player, const Vector &targetViewOffset, bool &outStartInSolid); // Is the player allowed to aim zoom with a weapon of this type? bool IsAllowedToZoom(CNEOBaseCombatWeapon *pWep); extern ConVar neo_recon_superjump_intensity; //ConVar sv_neo_resupply_anywhere("sv_neo_resupply_anywhere", "0", FCVAR_CHEAT | FCVAR_REPLICATED); inline const char *GetRankName(int xp) { if (xp < 0) { return "Rankless Dog"; } else if (xp < 4) { return "Private"; } else if (xp < 10) { return "Corporal"; } else if (xp < 20) { return "Sergeant"; } else { return "Lieutenant"; } } CBaseCombatWeapon* GetNeoWepWithBits(const CNEO_Player* player, const NEO_WEP_BITS_UNDERLYING_TYPE& neoWepBits); // Temporary helper for converting between these. Should refactor this to use the same structure for both. // Returns true on success. If returns false, the out value will not be set. bool PlayerAnimToPlayerAnimEvent(const PLAYER_ANIM playerAnim, PlayerAnimEvent_t& outAnimEvent); #endif // NEO_PLAYER_SHARED_H
#ifndef TRIUMF_SUPERCONDUCTIVITY_GC_HPP #define TRIUMF_SUPERCONDUCTIVITY_GC_HPP #include <cmath> #include "triumf/superconductivity/phenomenology.hpp" // TRIUMF: Canada's particle accelerator centre namespace triumf { // namespace superconductivity { // Gorter-Casimir phenomenological two-fluid model namespace gc { // temperature dependence of the (reduced) penetration depth template <typename T = double> T reduced_penetration_depth(T reduced_temperature) { return phenomenology::reduced_penetration_depth<T>(reduced_temperature, 4); } // temperature dependence of the (reduced) penetration depth template <typename T = double> T reduced_penetration_depth(T temperature, T critical_temperature, T exponent) { return phenomenology::reduced_penetration_depth<T>(temperature, critical_temperature, 4); } // temperature dependence of the penetration depth template <typename T = double> T penetration_depth(T temperature, T critical_temperature, T exponent, T lambda_0) { return phenomenology::penetration_depth<T>(temperature, critical_temperature, 4, lambda_0); } } // namespace gc } // namespace superconductivity } // namespace triumf #endif // TRIUMF_SUPERCONDUCTIVITY_GC_HPP
<reponame>kotarondo/persha-vm // Copyright (c) 2017, <NAME>. // All rights reserved. // License: "BSD-3-Clause" var global_names = Object.getOwnPropertyNames(global) var global_names = global_names.concat("fs", "VM", "ExternalObject", "test_success", "assert", "assert_equals"); require('./harness') Object.getOwnPropertyNames(global).forEach(function(e) { assert(e.indexOf("_pershavm") === 0 || global_names.indexOf(e) >= 0, e) }) var vm1 = new VM() vm1.initialize() var value = vm1.evaluateProgram("test=1") assert_equals(value, 1) Object.getOwnPropertyNames(global).forEach(function(e) { assert(e.indexOf("_pershavm") === 0 || global_names.indexOf(e) >= 0, e) }) var vm2 = new VM() vm2.initialize() try { vm2.evaluateProgram("test") assert(false, "must throw ReferenceError") } catch (err) { assert(err instanceof ReferenceError) } Object.getOwnPropertyNames(global).forEach(function(e) { assert(e.indexOf("_pershavm") === 0 || global_names.indexOf(e) >= 0, e) }) var value = vm1.evaluateProgram("++test") assert_equals(value, 2) Object.getOwnPropertyNames(global).forEach(function(e) { assert(e.indexOf("_pershavm") === 0 || global_names.indexOf(e) >= 0, e) }) test_success()
<filename>electron/renderer-backend/src/handlers/filter-images-with-location/filter-images-with-location.spec.ts import { types } from "taggr-shared"; import filterImagesWithLocationFactory from "./filter-images-with-location"; import { Type as DatabaseType } from "../../database"; import { Type as imageServiceType } from "../../services/image"; const IMAGE_WITH_LOCATION: types.ImageWithLocation = { hash: "hash-2", path: "file:///Users/path/image2.jpeg", rawPath: "Users/path/image2.jpeg", tags: [], location: { latitude: 1, longitude: 2 }, creationDate: 2, }; describe("handler - filter images with location", () => { let db: DatabaseType, imageService: imageServiceType, sendToFrontend: any; beforeEach(() => { db = { get: jest.fn() } as any; imageService = ({ filterImagesWithLocation: jest.fn(() => []), } as any) as imageServiceType; sendToFrontend = jest.fn(); }); it("should send all images to FE, when DB images pass the filter", () => { imageService = ({ filterImagesWithLocation: () => [IMAGE_WITH_LOCATION], } as any) as imageServiceType; const filterImages = filterImagesWithLocationFactory({ db, imageService, sendToFrontend, }); filterImages({ fromDate: null, toDate: null, tags: [], }); expect(sendToFrontend).toHaveBeenCalledWith({ payload: [IMAGE_WITH_LOCATION], type: "frontend_set-images-with-location", }); }); it("should not send images to FE, when DB images dont pass filter", () => { imageService = ({ filterImagesWithLocation: () => [], } as any) as imageServiceType; const filterImages = filterImagesWithLocationFactory({ db, imageService, sendToFrontend, }); filterImages({ fromDate: null, toDate: null, tags: [], }); expect(sendToFrontend).toHaveBeenCalledWith({ payload: [], // no image passed the filter type: "frontend_set-images-with-location", }); }); });
<gh_stars>1-10 import { Depth } from '../Object/_Internal'; import { ReadonlyPart } from '../Object/Readonly'; import { List } from './List'; import { Cast } from '../Any/Cast'; /** * Make `L` readonly (deeply or not) * @param L to make readonly * @param depth (?=`'flat'`) 'deep' to do it deeply * @returns [[List]] * @example * ```ts * ``` */ export declare type Readonly<L extends List, depth extends Depth = 'flat'> = Cast<ReadonlyPart<L, depth>, List>;
<filename>api/users/login.js require("dotenv").config(); import { Router } from "express"; const router = Router(); import moment from "moment-timezone"; import bcrypt from "bcrypt"; import xssFilters from "xss-filters"; import response from "../../assets/response"; import textPack from "../../assets/textPack.json"; import User from "../../assets/models/User"; import Token from "../../assets/token"; import Session from "../../assets/models/Session"; import Performance from "../../assets/tests/performance"; moment().locale("pt-br"); moment().tz("America/Maceio"); function findUser(username) { const promise = new Promise(async (resolve, reject) => { try { const userQuery = await User.findOne({ username }, "+password"); return resolve(userQuery); } catch (err) { console.error(err); return reject(err); } }); return promise; } function verifyTfaState(uid) { const promise = new Promise(async (resolve, reject) => { try { const userData = await User.findOne({ _id: uid }); if (userData.state.tfaActivated) { return resolve(true); } else { return resolve(false); } } catch (err) { return reject(err); } }); return promise; } function comparePasswords(text, hash) { const promise = new Promise(async (resolve, reject) => { try { const same = await bcrypt.compare(text, hash); return resolve(same); } catch (err) { console.error(err); return reject(err); } }); return promise; } function updateUserLastLogin({ id, agent, ip, app, token }) { const promise = new Promise(async (resolve, reject) => { try { await User.updateOne( { _id: id }, { lastLogin: { date: moment().valueOf(), agent, ip, app, token, }, } ); return resolve(); } catch (err) { console.error(err); return reject(err); } }); return promise; } function verifyAndUpdateUserApps(user, app) { const promise = new Promise(async (resolve, reject) => { try { if (!user.apps.includes(app)) { await User.updateOne( { _id: user.id }, { apps: [...user.apps, app] } ); return resolve(); } else { return resolve(); } } catch (err) { console.error(err); return reject(err); } }); return promise; } router.post("/", async (req, res) => { const performanceLog = new Performance(req.baseUrl); let { username, password } = req.body; const app = req.headers["x-from-app"] || "noapp"; const agent = req.headers["user-agent"]; const ip = req.headers["x-ip"]; if (!username || !password) { performanceLog.finish(); return res .status(400) .json(response(true, textPack.standards.nullFields)); } if (!textPack.authorize.apps.includes(app)) { performanceLog.finish(); return res .status(400) .json(response(true, textPack.authorize.invalidApp)); } username = xssFilters.uriQueryInHTMLData(username); Promise.resolve([]) .then(async (all) => { return await findUser(username) .then((data) => { if (!data.state.emailConfirmed) { throw new Error( `401:${textPack.users.login.emailNotConfirmed}` ); } if (data.state.banned) { throw new Error( `400:${textPack.users.login.bannedUser}` ); } all.push(data); return all; }) .catch(() => { throw new Error(`500:${textPack.users.login.unknownUser}`); }); }) .then(async (all) => { return await comparePasswords(password, all[0].password) .then((matches) => { if (matches) { return all; } else { throw new Error( `401:${textPack.users.login.wrongPassword}` ); } }) .catch(() => { throw new Error( `401:${textPack.users.login.wrongPassword}` ); }); }) .then(async (all) => { const accessToken = Token().create( { id: all[0]._id, app, }, "access" ); const refreshToken = Token().create( { id: all[0]._id, app, }, "refresh" ); if (accessToken.error || refreshToken.error) { throw new Error(`500:${textPack.standards.responseError}`); } all.push(accessToken.token); all.push(refreshToken.token); return all; }) .then(async (all) => { const refreshToken = Session().model({ uid: all[0]._id, username: all[0].username, agent, ip, tfaValidated: false, app, refreshToken: all[2], }); if (refreshToken.error) { throw new Error(`400:${refreshToken.message}`); } return await Session() .create(refreshToken.session) .then(() => { return all; }) .catch((err) => { console.error(err); throw new Error(`500:${textPack.standards.responseError}`); }); }) .then(async (all) => { return await updateUserLastLogin({ id: all[0]._id, agent, ip, app, token: all[2], }) .then(() => { return all; }) .catch(() => { throw new Error( `500:${textPack.standards.responseCriticError}` ); }); }) .then(async (all) => { return await verifyAndUpdateUserApps(all[0], app) .then(() => { req.session.refreshToken = all[2]; performanceLog.finish(); return res.json( response(false, textPack.users.login.logged, { accessToken: all[1], }) ); }) .catch(() => { throw new Error( `500:${textPack.standards.responseCriticError}` ); }); }) .catch((err) => { performanceLog.finish(); const error = err.message.split(":"); return res.status(error[0]).json(response(true, error[1])); }); }); export default router;
const stringManipulation = (arr) => { // Reverse the array let reversedArr = arr.reverse(); // Join the elements of the array into a string let joinedArr = reversedArr.join(' '); // Add the suffix to the string let combinedString = 'The ' + joinedArr + ' and the colors are'; // Loop through the array for (let i = 0; i < arr.length; i++) { // If the last element, add the 'are' instead of 'is' if (i === arr.length - 1) { combinedString += ' are'; continue; } combinedString += ' ' + arr[i].toLowerCase() + ' is'; } return combinedString; }; console.log(stringManipulation(['Cat', 'House', 'Green', 'Blue'])); // Output: The cat is in the house and the colors are green and blue
<reponame>alice1017/coadlib<filename>examples/calc.py<gh_stars>0 #!/usr/bin/env python # coding: utf-8 from coadlib.apps import InteractiveLoopApplication app = InteractiveLoopApplication( name="calcuation program", desc="please input number, and return total.", version="1.0", padding=4, margin=3, suffix=" > " ) app.total = 0 @app.loop def main(): response = app.input_console("number", int, validate=False) if response == "": app.write("total: {:,}".format(app.total)) return app.STATUS_EXIT else: try: app.total += int(response) return app.STATUS_CONTINUE except: app.write_error("Error: incorrect data.") return app.STATUS_CONTINUE if __name__ == "__main__": app.write_usage() main()
import numpy as np class ClassSelector: def __init__(self, classes): self.classes = classes def select_classes(self): if len(self.classes) <= 2: return self.classes else: n_classes = np.random.randint(low=2, high=len(self.classes)) classes_sample = np.random.choice(self.classes, n_classes, replace=False) return classes_sample # Example usage classes = ['A', 'B', 'C', 'D', 'E'] selector = ClassSelector(classes) selected_classes = selector.select_classes() print(selected_classes)
def replace_word(txt, replace_word, replace_with): words = txt.split(" ") for i in range(0, len(words)): if (words[i] == replace_word): words[i] = replace_with return " ".join(words) s = "Roses are red, violets are blue." print(replace_word(s, "blue", "red")) # Roses are red, violets are red.
<filename>src/pages/index.js import React from "react"; import Layout from "components/Layout"; import Scroller from "components/Scroller"; import SEO from "components/Seo"; import Cursor from "components/Cursor"; import { shuffle } from "../libs/Utility"; import { graphql } from "gatsby"; export const query = graphql` { allPrismicItem { nodes { data { type image { alt localFile { childImageSharp { fluid(quality: 100) { ...GatsbyImageSharpFluid } } } } } uid } } } `; const IndexPage = ({ data }) => { const sortedHomeData = shuffle(data.allPrismicItem.nodes); return ( <Layout pageName="home"> <SEO title="Home" pageName="home" /> <div className="page-home__circle"> <svg viewBox="0 0 679 681"> <path d="M445.77 341.44c0-42-25.61-80.69-64-97.54-38.14-16.73-84.25-8.77-114.66 19.64-30.66 28.65-41.89 73.67-27.91 113.3s51 67.55 92.85 70.67c41.49 3.08 82.43-19.78 101.59-56.71a107.26 107.26 0 0012.13-49.36z" /> <path d="M658.42 341.5c-.37-40.94-7.67-81.89-23.37-119.83a319 319 0 00-168.7-172.81c-37.8-16.58-78.61-25-119.77-26.26a309.58 309.58 0 00-120 20.56c-38.4 14.89-74.09 36.43-104.2 64.64A319.12 319.12 0 0020.8 326.92a319.06 319.06 0 0080.28 226.53A324.08 324.08 0 00199.15 628c37.14 18.29 77.57 28.73 118.8 31.79a318.94 318.94 0 00227.66-74.85 319.08 319.08 0 00111.55-215c.9-9.47 1.18-18.94 1.26-28.44z" /> </svg> </div> <div className="page-home__thed"> <svg viewBox="0 0 1106 1041"> <path d="M152.24 1019V414c0-220.35 178.63-399 399-399s399 178.63 399 399v609.83M10.5 407.67h1081.45" /> </svg> </div> <Scroller prismicdata={sortedHomeData} /> <Cursor pageName="home" /> </Layout> ); }; export default IndexPage;
#--------------------------------------------------------------- # 02_Aggregate_Data_Table.py #--------------------------------------------------------------- import os import pandas as pd import pdb # Root paths inDataDir = r'/Volumes/Seagate Backup Plus Drive/contour/tnc/urbantree/data/v9_Input_Data' outTableDir = r'/Volumes/Seagate Backup Plus Drive/contour/tnc/urbantree/data/v9_Output_Tables' # Output Tables outTable = os.path.join(outTableDir, 'CBG_Table_v9.csv') outTable_forGEE = os.path.join(outTableDir, 'CBG_Table_v9_forGEE.csv') # Data Tables lstDissTable = os.path.join(inDataDir, 'CalUHI_diss_vf_All.csv') # Rural LSTs lstCBGTable = os.path.join(inDataDir, 'CalUHI_cb_vf_All.csv') # Urban LSTs canopyTableDir = os.path.join(inDataDir, 'Canopy_Percent_Tables') # Canopy Percentages distanceTable = os.path.join(inDataDir, 'Distances_CBG_Table_All.csv')# Distance from Coast and Centroid of City nlcdTable = os.path.join(inDataDir, 'NLCD_Percent_CBG_Table_All.csv') incPopTable = os.path.join(inDataDir, 'Income_Population_CBG_Table_All.csv') potentialAreaTable = os.path.join(inDataDir, 'Potential_Area_CBG_Table_All.csv') climateZoneTable = os.path.join(inDataDir, 'Climate_Zones_CBG_Table_All.csv') clippedGeometryDir = os.path.join(inDataDir, 'Clipped_Geometries') #--------------------------------------------------------------- # Read in Tables #--------------------------------------------------------------- print('UHI Tables') lstDiss = pd.read_csv(lstDissTable) lstCBG = pd.read_csv(lstCBGTable) print(lstCBG['NAME'].nunique(), ' Cities') print('Canopy Percent') canopyPercent = pd.concat([pd.read_csv(os.path.join(canopyTableDir, file)) for file in os.listdir(canopyTableDir)]) print(canopyPercent['NAME'].nunique(), ' Cities') print('Distances') distance = pd.read_csv(distanceTable) print(distance['NAME'].nunique(), ' Cities') print('NLCD') nlcd = pd.read_csv(nlcdTable) print(nlcd ['NAME'].nunique(), ' Cities') print('Potential Area') potArea = pd.read_csv(potentialAreaTable) print(potArea['NAME'].nunique(), ' Cities') print('Income & Population') incPop = pd.read_csv(incPopTable) print('No City Names in Dataset') print('Climate Zones') climateZones = pd.read_csv(climateZoneTable) print(climateZones['NAME'].nunique(), ' Cities') print('Clipped Geometries') clippedGeometries = pd.concat([pd.read_csv(os.path.join(clippedGeometryDir, file)) for file in os.listdir(clippedGeometryDir)]) #print(clippedGeometries['NAME'].nunique(), ' Cities') #--------------------------------------------------------------- # Prep Data and Merge As We Go #--------------------------------------------------------------- # Prep CBG LST table lstCBG = lstCBG.loc[:,~lstCBG.columns.str.startswith('b')].drop(columns=['system:index','.geo']).drop_duplicates() lstCBG = lstCBG[lstCBG.Urban_LST.notnull()] # Drop any CBGs with no Urban LST data lstCBG = lstCBG.fillna(0) # Prep Rural LST table lstDiss = lstDiss.loc[:,lstDiss.columns.str.contains('Rural.*|NAME10')].rename(columns={'NAME10':'NAME'}) lstDiss = lstDiss[lstDiss.Rural_LST.notnull()] # Drop any CBGs with no Rural LST data # Merge Urban and Rural tables to create main data table data = lstCBG.merge(lstDiss, how='inner', on='NAME', copy=False).drop(columns = ['countyfp','funcstat','geoid_data','intptlat','intptlon','mtfcc','namelsad','statefp','tractce']) print('Create Table with LST Data: ', len(data)) # Add Canopy Percent to main data table canopyPercent = canopyPercent.drop(columns=['system:index']).drop_duplicates()[canopyPercent.Canopy_Percent.notnull()].rename(columns = {'Canopy_Percent': 'canopyPercent'}) data = canopyPercent[['canopyPercent','geoid']].merge(data, how='inner', on='geoid', copy=False) print('Add Canopy Percent: ', len(data)) # Add Distances to main data table distance = distance.drop(columns=['system:index']).drop_duplicates()[distance.Dist_coast.notnull()] data = distance[['Dist_coast','Dist_urbCenter','geoid']].merge(data, how='inner', on='geoid', copy=False) print('Add Distances: ', len(data)) # Add NLCD nlcd = nlcd.drop(columns=['system:index']).drop_duplicates()[nlcd.NLCD_Dev_HighIntensity_Perc.notnull()] data = data.merge(nlcd.loc[:,nlcd.columns.str.contains('geoid|NLCD.*')], how='inner', on='geoid', copy=False, sort=True) print('Add NLCD: ', len(data)) # Add Potential Area ** and add .geo column here!! ** potArea = potArea.drop(columns=['system:index']).drop_duplicates()[potArea.PotentialArea_Acres.notnull()]\ .rename(columns={'PotentialArea_Acres':'PotAreaAcr'}) data = data.merge(potArea[['PotAreaAcr', 'geoid']], how='inner', on='geoid', copy=False, sort=True) print('Add Potential Area: ', len(data)) # Add Climate Zone climateZones = climateZones.fillna(0).astype({'Climate_Zone_State': 'int', 'Climate_Zone_DOE': 'int', 'Climate_Zone_Ecoregion': 'int', 'Climate_Zone_McPherson': 'int'}) data = data.merge(climateZones[['Climate_Zone_State', 'Climate_Zone_DOE', 'Climate_Zone_Ecoregion', 'Climate_Zone_McPherson', 'geoid']], how='inner', on='geoid', copy=False) print('Add Climate Zones: ', len(data)) # Add Income & Population incPop = incPop[['geoid','Population','Income','Households']].drop_duplicates() data = data.merge(incPop, how='inner', on='geoid', copy=False, sort=True) print('Add Income & Population: ', len(data)) data = data[data.Income.notnull()] print('Drop Blocks with No Income Data: ', len(data)) # Drop Block Groups with Population = 0 data = data[data['Population'] != 0] print('Drop Blocks with No Population: ', len(data)) # Clipped Geometries #geos = clippedGeometries.drop(columns =['system:index','NAME']) data = data.merge(clippedGeometries[['geoid','PolyArea_Acres','.geo']], how='inner', on='geoid', copy=False, sort=True) print('Add Clipped Geometries and Drop CBGs that Dont Meet Size Requirements: ', len(data)) print('Cities: ', data['NAME'].nunique()) print(data.columns) #--------------------------------------------------------------- # Save Geometries #--------------------------------------------------------------- if not os.path.isdir(outTableDir): os.mkdir(outTableDir) data.fillna(0).to_csv(outTable_forGEE, index = False) data.drop(columns = ['.geo']).to_csv(outTable, index = False)
<filename>pkg/buses/cache_test.go /* Copyright 2018 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package buses_test import ( "testing" channelsv1alpha1 "github.com/knative/eventing/pkg/apis/channels/v1alpha1" "github.com/knative/eventing/pkg/buses" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) const ( cacheDefaultNamespace = "default" cacheTestChannel = "test-channel" cacheTestSubscription = "test-subscription" ) func TestCacheErrsForUnknownChannel(t *testing.T) { cache := buses.NewCache() channelRef := buses.NewChannelReferenceFromNames(cacheTestChannel, cacheDefaultNamespace) var expected *channelsv1alpha1.Channel actual, err := cache.Channel(channelRef) if err == nil { t.Errorf("%s expected: %+v got: %+v", "Error", "<error>", err) } if expected != actual { t.Errorf("%s expected: %+v got: %+v", "Unexpected channel", nil, actual) } } func TestCacheRetrievesKnownChannel(t *testing.T) { cache := buses.NewCache() channelRef := buses.NewChannelReferenceFromNames(cacheTestChannel, cacheDefaultNamespace) expected := makeChannel(channelRef) cache.AddChannel(expected) actual, err := cache.Channel(channelRef) if err != nil { t.Errorf("%s expected: %+v got: %+v", "Unexpected error", nil, err) } if expected != actual { t.Errorf("%s expected: %+v got: %+v", "Channel", expected, actual) } } func TestCacheRemovesKnownChannel(t *testing.T) { cache := buses.NewCache() channelRef := buses.NewChannelReferenceFromNames(cacheTestChannel, cacheDefaultNamespace) channel := makeChannel(channelRef) cache.AddChannel(channel) cache.RemoveChannel(channel) var expected *channelsv1alpha1.Channel actual, err := cache.Channel(channelRef) if err == nil { t.Errorf("%s expected: %+v got: %+v", "Unexpected error", nil, err) } if expected != actual { t.Errorf("%s expected: %+v got: %+v", "Channel", expected, actual) } } func TestCacheNilChannel(t *testing.T) { cache := buses.NewCache() var channel *channelsv1alpha1.Channel cache.AddChannel(channel) cache.RemoveChannel(channel) } func TestCacheErrsForUnknownSubscription(t *testing.T) { cache := buses.NewCache() subscriptionRef := buses.NewSubscriptionReferenceFromNames(cacheTestSubscription, cacheDefaultNamespace) var expected *channelsv1alpha1.Subscription actual, err := cache.Subscription(subscriptionRef) if err == nil { t.Errorf("%s expected: %+v got: %+v", "Error", "<error>", err) } if expected != actual { t.Errorf("%s expected: %+v got: %+v", "Unexpected subscription", nil, actual) } } func TestCacheRetrievesKnownSubscription(t *testing.T) { cache := buses.NewCache() subscriptionRef := buses.NewSubscriptionReferenceFromNames(cacheTestSubscription, cacheDefaultNamespace) expected := makeSubscription(subscriptionRef) cache.AddSubscription(expected) actual, err := cache.Subscription(subscriptionRef) if err != nil { t.Errorf("%s expected: %+v got: %+v", "Unexpected error", nil, err) } if expected != actual { t.Errorf("%s expected: %+v got: %+v", "Subscription", expected, actual) } } func TestCacheRemovesKnownSubscription(t *testing.T) { cache := buses.NewCache() subscriptionRef := buses.NewSubscriptionReferenceFromNames(cacheTestSubscription, cacheDefaultNamespace) subscription := makeSubscription(subscriptionRef) cache.AddSubscription(subscription) cache.RemoveSubscription(subscription) var expected *channelsv1alpha1.Subscription actual, err := cache.Subscription(subscriptionRef) if err == nil { t.Errorf("%s expected: %+v got: %+v", "Unexpected error", nil, err) } if expected != actual { t.Errorf("%s expected: %+v got: %+v", "Subscription", expected, actual) } } func TestCacheNilSubscription(t *testing.T) { cache := buses.NewCache() var subscription *channelsv1alpha1.Subscription cache.AddSubscription(subscription) cache.RemoveSubscription(subscription) } func makeChannel(channelRef buses.ChannelReference) *channelsv1alpha1.Channel { return &channelsv1alpha1.Channel{ ObjectMeta: metav1.ObjectMeta{ Name: channelRef.Name, Namespace: channelRef.Namespace, }, } } func makeSubscription(subscriptionRef buses.SubscriptionReference) *channelsv1alpha1.Subscription { return &channelsv1alpha1.Subscription{ ObjectMeta: metav1.ObjectMeta{ Name: subscriptionRef.Name, Namespace: subscriptionRef.Namespace, }, } }
#!/bin/sh # base16-shell (https://github.com/chriskempson/base16-shell) # Base16 Shell template by Chris Kempson (http://chriskempson.com) # Materia scheme by Defman21 color00="26/32/38" # Base 00 - Black color01="EC/5F/67" # Base 08 - Red color02="8B/D6/49" # Base 0B - Green color03="FF/CC/00" # Base 0A - Yellow color04="89/DD/FF" # Base 0D - Blue color05="82/AA/FF" # Base 0E - Magenta color06="80/CB/C4" # Base 0C - Cyan color07="CD/D3/DE" # Base 05 - White color08="70/78/80" # Base 03 - Bright Black color09=$color01 # Base 08 - Bright Red color10=$color02 # Base 0B - Bright Green color11=$color03 # Base 0A - Bright Yellow color12=$color04 # Base 0D - Bright Blue color13=$color05 # Base 0E - Bright Magenta color14=$color06 # Base 0C - Bright Cyan color15="FF/FF/FF" # Base 07 - Bright White color16="EA/95/60" # Base 09 color17="EC/5F/67" # Base 0F color18="2C/39/3F" # Base 01 color19="37/47/4F" # Base 02 color20="C9/CC/D3" # Base 04 color21="D5/DB/E5" # Base 06 color_foreground="CD/D3/DE" # Base 05 color_background="26/32/38" # Base 00 if [ -n "$TMUX" ]; then # Tell tmux to pass the escape sequences through # (Source: http://permalink.gmane.org/gmane.comp.terminal-emulators.tmux.user/1324) put_template() { printf '\033Ptmux;\033\033]4;%d;rgb:%s\033\033\\\033\\' $@; } put_template_var() { printf '\033Ptmux;\033\033]%d;rgb:%s\033\033\\\033\\' $@; } put_template_custom() { printf '\033Ptmux;\033\033]%s%s\033\033\\\033\\' $@; } elif [ "${TERM%%[-.]*}" = "screen" ]; then # GNU screen (screen, screen-256color, screen-256color-bce) put_template() { printf '\033P\033]4;%d;rgb:%s\007\033\\' $@; } put_template_var() { printf '\033P\033]%d;rgb:%s\007\033\\' $@; } put_template_custom() { printf '\033P\033]%s%s\007\033\\' $@; } elif [ "${TERM%%-*}" = "linux" ]; then put_template() { [ $1 -lt 16 ] && printf "\e]P%x%s" $1 $(echo $2 | sed 's/\///g'); } put_template_var() { true; } put_template_custom() { true; } else put_template() { printf '\033]4;%d;rgb:%s\033\\' $@; } put_template_var() { printf '\033]%d;rgb:%s\033\\' $@; } put_template_custom() { printf '\033]%s%s\033\\' $@; } fi # 16 color space put_template 0 $color00 put_template 1 $color01 put_template 2 $color02 put_template 3 $color03 put_template 4 $color04 put_template 5 $color05 put_template 6 $color06 put_template 7 $color07 put_template 8 $color08 put_template 9 $color09 put_template 10 $color10 put_template 11 $color11 put_template 12 $color12 put_template 13 $color13 put_template 14 $color14 put_template 15 $color15 # 256 color space put_template 16 $color16 put_template 17 $color17 put_template 18 $color18 put_template 19 $color19 put_template 20 $color20 put_template 21 $color21 # foreground / background / cursor color if [ -n "$ITERM_SESSION_ID" ]; then # iTerm2 proprietary escape codes put_template_custom Pg CDD3DE # foreground put_template_custom Ph 263238 # background put_template_custom Pi FFFFFF # bold color put_template_custom Pj 37474F # selection color put_template_custom Pk CDD3DE # selected text color put_template_custom Pl CDD3DE # cursor put_template_custom Pm 263238 # cursor text else put_template_var 10 $color_foreground if [ "$BASE16_SHELL_SET_BACKGROUND" != false ]; then put_template_var 11 $color_background if [ "${TERM%%-*}" = "rxvt" ]; then put_template_var 708 $color_background # internal border (rxvt) fi fi put_template_custom 12 ";7" # cursor (reverse video) printf "\033]12;#CDD3DE\007" fi # clean up unset -f put_template unset -f put_template_var unset -f put_template_custom unset color00 unset color01 unset color02 unset color03 unset color04 unset color05 unset color06 unset color07 unset color08 unset color09 unset color10 unset color11 unset color12 unset color13 unset color14 unset color15 unset color16 unset color17 unset color18 unset color19 unset color20 unset color21 unset color_foreground unset color_background
<reponame>tmstew/BIOPHYS535_FinalProject<filename>Final Version of Files For Submission/Final Project/BS/bp_searching_cs.py<gh_stars>0 ## Import Module import matplotlib.pyplot as plt import seaborn as sns; sns.set() import pandas as pd import numpy as np import io from sklearn import datasets, svm, metrics from sklearn.preprocessing import StandardScaler from sklearn.metrics import recall_score, precision_score ################################################################ ## This is used to search the critical chemical shift for ## prediciting base pair. ## neighbors = 1 (best f1-score) : 0.9229 +/- 0.0851 ## LOO model ## sklearn: clf = svm.SVC(gamma='auto') ################################################################ ################################################################ ## Read in and pre-process chemical shift data ################################################################ c=pd.read_csv('final_training.csv', sep=" ") cs_data=c.replace({'stack':1}) # Replace stack with 1 cs_data=cs_data.replace({'non-stack':0}) # Replace non-stack with 0 cs_data=cs_data.drop(columns=['base_pairing', 'orientation', 'sugar_puckering', 'pseudoknot']) if 'Unnamed: 0' in cs_data.columns: cs_data=cs_data.drop(columns=['Unnamed: 0']) cs_data=cs_data.rename(columns={'stacking':'class'}) ## Creat chemical shift list cs_list = list(cs_data.columns) cs_list.remove('id') cs_list.remove('resid') cs_list.remove('resname') cs_list.remove('class') num = len(cs_list) ################################################################ ## Global variables and functions ################################################################ NUMBER_CHEMICAL_SHIFT_TYPE = 18 neighbors = np.loadtxt("neighbors", dtype = int) f1 = np.loadtxt("f1", dtype = float) recall = np.loadtxt("recall", dtype = float) precision = np.loadtxt("precision", dtype = float) def get_cs_all(cs_all, id): ''' This function gets chemical shifts for a particular RNA. ''' return(cs_all[cs_all.id == id]) def get_cs_residues(cs_i, resid, dummy=0): ''' This function return an array contining the chemical shifts for a particular residues in an RNA. ''' cs_tmp=cs_i[(cs_i.resid == resid)].drop(['id', 'resid', 'resname', 'class'], axis=1) info_tmp=cs_i[(cs_i.resid == resid)] if (cs_tmp.shape[0] != 1): return(dummy*np.ones(shape=(1, NUMBER_CHEMICAL_SHIFT_TYPE))) else: return(cs_tmp.values) def get_resnames(cs_i, resid, dummy="UNK"): ''' This function returns the residue name for specified residue (resid) ''' cs_tmp=cs_i[(cs_i.resid == resid)] if (cs_tmp.shape[0] != 1): return(dummy) else: return(cs_tmp['resname'].values[0]) def get_cs_features(cs_i, resid, neighbors): ''' This function return chemical shifts and resnames for residues (resid) and its neighbors ''' cs=[] resnames=[] for i in range(resid-neighbors, resid+neighbors+1): cs.append(get_cs_residues(cs_i, i)) resnames.append(get_resnames(cs_i, i)) return(resnames, np.array(cs)) def get_columns_name(neighbors=3, chemical_shift_types = NUMBER_CHEMICAL_SHIFT_TYPE): ''' Helper function that writes out the required column names ''' #tmp=2*neighbors+1 #neighbors=1 columns=['id', 'resname', 'resid', 'class'] for i in range(0, neighbors*NUMBER_CHEMICAL_SHIFT_TYPE): columns.append(i) return(columns) def write_out_resname(neighbors=1): ''' Helper function that writes out the column names associated resnames for a given residue and its neighbors ''' colnames = [] for i in range(1-neighbors-1, neighbors+1): if i < 0: colnames.append('R%s'%i) elif i > 0: colnames.append('R+%s'%i) else: colnames.append('R') return(colnames) def get_cs_features_rna(cs, neighbors=1, retain = ['id', 'class', 'resid']): ''' This function generates the complete required data frame an RNA ''' all_features = [] all_resnames = [] for resid in cs['resid'].unique(): resnames, features = get_cs_features(cs, resid, neighbors) all_features.append(features.flatten()) all_resnames.append(resnames) all_resnames = pd.DataFrame(all_resnames, dtype='object', columns = write_out_resname(neighbors)) all_features = pd.DataFrame(all_features, dtype='object') info = pd.DataFrame(cs[retain].values, dtype='object', columns = retain) return(pd.concat([info, all_resnames, all_features], axis=1)) def get_cs_features_rna_all(cs, neighbors): ''' This function generate a pandas dataframe containing training data for all RNAs Each row in the data frame should contain the class and chemical shifts for given residue and neighbors in a given RNA. ''' cs_new=pd.DataFrame() for pdbid in cs['id'].unique()[0 :]: tmp=get_cs_features_rna(get_cs_all(cs, id=pdbid), neighbors) cs_new=pd.concat([cs_new, tmp], axis=0) return(cs_new) ################################################################ ## Build model and test ################################################################ drop_names = ['id', 'class', 'resid'] target_name = 'class' col = 2*neighbors + 1 totalscore = [] totalrecall = [] totalprecision = [] for atom in cs_list: print(f"[SET UP DATA]: The chemical shift dropped is {atom}") tmp_c = cs_data.drop(atom, axis=1) cs_all = get_cs_features_rna_all(tmp_c, neighbors=neighbors) score = [] recall = [] precision = [] for pdbid in cs_all['id'].unique()[0 :]: print(f"[INFO]: Now predict RNA --> {pdbid}") ## Prepare test set test = cs_all[cs_all.id == pdbid] tmp = test.drop(drop_names, axis=1) tmp_testX = tmp.iloc[:, col :] tmp_testy = test[target_name] testX = tmp_testX.values testy = tmp_testy.values id = pd.unique(test.id) print(f"[INFO]: Test set --> {id}") ## Prepare training set train = cs_all[cs_all.id != pdbid] tmp = train.drop(drop_names, axis=1) tmp_trainX = tmp.iloc[:, col :] tmp_trainy = train[target_name] trainX = tmp_trainX.values trainy = tmp_trainy.values id = pd.unique(train.id) print(f"[INFO]: Test set --> {id}") ## Normalization of the training set and test set scaler = StandardScaler() scaler.fit(trainX) trainX_scaled = scaler.transform(trainX) testX_scaled = scaler.transform(testX) print(f"[INFO]: Scale the features") ## Train model clf = svm.SVC(gamma='auto') clf.fit(trainX_scaled, np.int_(trainy)) ## Test model predicted = clf.predict(testX_scaled) print(f"[INFO]: Running prediction") ## Recall recall.append(recall_score(np.int_(testy), predicted)) ## Precision precision.append(precision_score(np.int_(testy), predicted)) ## f1-score score.append(metrics.f1_score(np.int_(testy),predicted)) print(" ") ## Total f1-score totalscore.append(score) totalrecall.append(recall) totalprecision.append(precision) print(f"[INFO]: Now appending recall to total recall") print(f"[INFO]: Now appending f1-score to total score") print(f"[INFO]: Now appending precision to total precision") print(" ") print(" ") ################################################################ ## Prediction analysis ################################################################ ## Prepare data totalscore = np.asarray(totalscore) totalrecall = np.asarray(totalrecall) totalprecision = np.asarray(totalprecision) totalscore = totalscore.reshape(num,-1) totalrecall = totalrecall.reshape(num,-1) totalprecision = totalprecision.reshape(num,-1) pdbid_list = cs_data['id'].unique()[0 :] average_name = ['Average of f1 score'] std_name = ['Std of f1 score'] ## Analyze prediction result print(f"[ANALYSIS RESULT]: LOO model result") i = 0 average_f1 = [] average_recall = [] average_precision = [] std_f1 = [] std_recall = [] std_precision = [] while i < num : score = totalscore[i,:] recall = totalrecall[i,:] precision = totalprecision[i,:] average_f1.append(np.average(score)) average_recall.append(np.average(recall)) average_precision.append(np.average(precision)) std_f1.append(np.std(score)) std_recall.append(np.std(recall)) std_precision.append(np.std(precision)) print(f"[INFO]: The chemical shift {cs_list[i]} is dropped -->") print(f"[ANALYSIS RESULT]: The average f1-score is: {average_f1[i]} +/- {std_f1[i]}") print(f"[ANALYSIS RESULT]: The average recall is: {average_recall[i]} +/- {std_recall[i]}") print(f"[ANALYSIS RESULT]: The average precision is: {average_precision[i]} +/- {std_precision[i]}") print(" ") i += 1 ## Save f1-score data to a csv print(f"[INFO]: Save f1-score data") tmp_score = pd.DataFrame(totalscore, dtype = 'object', columns = pdbid_list, index = cs_list) tmp_average = pd.DataFrame(average_f1, dtype = 'object', columns = ['Average of f1-score'], index = cs_list) tmp_std = pd.DataFrame(std_f1, dtype = 'object', columns = ['Std of f1-score'], index = cs_list) tmp = pd.concat([tmp_score, tmp_average], axis=1) all_score = pd.concat([tmp, tmp_std], axis=1) all_score.to_csv('all_score.csv', sep=' ') ## Save recall data to a csv print(f"[INFO]: Save recall data") tmp_recall = pd.DataFrame(totalrecall, dtype = 'object', columns = pdbid_list, index = cs_list) tmp_average = pd.DataFrame(average_recall, dtype = 'object', columns = ['Average of recall'], index = cs_list) tmp_std = pd.DataFrame(std_recall, dtype = 'object', columns = ['Std of recall'], index = cs_list) tmp = pd.concat([tmp_score, tmp_average], axis=1) all_recall = pd.concat([tmp, tmp_std], axis=1) all_recall.to_csv('all_recall.csv', sep=' ') ## Save precision data to a csv print(f"[INFO]: Save recall data") tmp_recall = pd.DataFrame(totalprecision, dtype = 'object', columns = pdbid_list, index = cs_list) tmp_average = pd.DataFrame(average_precision, dtype = 'object', columns = ['Average of precision'], index = cs_list) tmp_std = pd.DataFrame(std_precision, dtype = 'object', columns = ['Std of precision'], index = cs_list) tmp = pd.concat([tmp_score, tmp_average], axis=1) all_precision = pd.concat([tmp, tmp_std], axis=1) all_precision.to_csv('all_precision.csv', sep=' ') ## Plot heatmap print(f"[INFO]: Plotting heatmap") delta_score = totalscore - f1 delta_score = delta_score * 100 plt.figure(figsize = (30,15)) ax = sns.heatmap(delta_score, center = 0, xticklabels = pdbid_list, yticklabels = cs_list) ax.set_title("f1 heatmap") plt.savefig("f1.png") delta_recall = totalrecall - recall delta_recall = delta_recall * 100 plt.figure(figsize = (30,15)) ax = sns.heatmap(delta_recall, center = 0, xticklabels = pdbid_list, yticklabels = cs_list) ax.set_title("recall heatmap") plt.savefig("recall.png") delta_precision = totalprecision - precision delta_precision = delta_precision * 100 plt.figure(figsize = (30,15)) ax = sns.heatmap(delta_precision, center = 0, xticklabels = pdbid_list, yticklabels = cs_list) ax.set_title("precision heatmap") plt.savefig("precision.png")
package mindustry.world.blocks.logic; import mindustry.world.Block; public class LogicBlock extends Block{ public LogicBlock(String name){ super(name); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.catalog; import java.net.URISyntaxException; import java.util.List; import java.util.Map; import java.util.Set; import javax.management.MXBean; /** * Catalog of components, data formats, models (EIPs), languages, and more from this Apache Camel release. */ @MXBean public interface CamelCatalog { /** * To plugin a custom {@link RuntimeProvider} that amends the catalog to only include information that is supported on the runtime. */ void setRuntimeProvider(RuntimeProvider provider); /** * Gets the {@link RuntimeProvider} in use. * @return */ RuntimeProvider getRuntimeProvider(); /** * Enables caching of the resources which makes the catalog faster, but keeps data in memory during caching. * <p/> * The catalog does not cache by default. */ void enableCache(); /** * Whether caching has been enabled. */ boolean isCaching(); /** * To plugin a custom {@link SuggestionStrategy} to provide suggestion for unknown options */ void setSuggestionStrategy(SuggestionStrategy suggestionStrategy); /** * Gets the {@link SuggestionStrategy} in use */ SuggestionStrategy getSuggestionStrategy(); /** * To plugin a custom {@link VersionManager} to load other versions of Camel the catalog should use. */ void setVersionManager(VersionManager versionManager); /** * Gets the {@link VersionManager} in use */ VersionManager getVersionManager(); /** * Adds a 3rd party component to this catalog. * * @param name the component name * @param className the fully qualified class name for the component class */ void addComponent(String name, String className); /** * Adds a 3rd party component to this catalog. * * @param name the component name * @param className the fully qualified class name for the component class * @param jsonSchema the component JSon schema */ void addComponent(String name, String className, String jsonSchema); /** * Adds a 3rd party data format to this catalog. * * @param name the data format name * @param className the fully qualified class name for the data format class */ void addDataFormat(String name, String className); /** * Adds a 3rd party data format to this catalog. * * @param name the data format name * @param className the fully qualified class name for the data format class * @param jsonSchema the data format JSon schema */ void addDataFormat(String name, String className, String jsonSchema); /** * The version of this Camel Catalog */ String getCatalogVersion(); /** * Attempt to load the Camel version to be used by the catalog. * <p/> * Loading the camel-catalog JAR of the given version of choice may require internet access * to download the JAR from Maven central. You can pre download the JAR and install in a local * Maven repository to avoid internet access for offline environments. * <p/> * When loading a new version the cache will be invalidated. * * @param version the Camel version such as <tt>2.17.1</tt> * @return <tt>true</tt> if the version was loaded, <tt>false</tt> if not. */ boolean loadVersion(String version); /** * Gets the current loaded Camel version used by the catalog. */ String getLoadedVersion(); /** * Find all the component names from the Camel catalog */ List<String> findComponentNames(); /** * Find all the data format names from the Camel catalog */ List<String> findDataFormatNames(); /** * Find all the language names from the Camel catalog */ List<String> findLanguageNames(); /** * Find all the model names from the Camel catalog */ List<String> findModelNames(); /** * Find all the other (miscellaneous) names from the Camel catalog */ List<String> findOtherNames(); /** * Find all the component names from the Camel catalog that matches the label */ List<String> findComponentNames(String filter); /** * Find all the data format names from the Camel catalog that matches the label */ List<String> findDataFormatNames(String filter); /** * Find all the language names from the Camel catalog that matches the label */ List<String> findLanguageNames(String filter); /** * Find all the model names from the Camel catalog that matches the label */ List<String> findModelNames(String filter); /** * Find all the other (miscellaneous) names from the Camel catalog that matches the label */ List<String> findOtherNames(String filter); /** * Returns the component information as JSon format. * * @param name the component name * @return component details in JSon */ String componentJSonSchema(String name); /** * Returns the data format information as JSon format. * * @param name the data format name * @return data format details in JSon */ String dataFormatJSonSchema(String name); /** * Returns the language information as JSon format. * * @param name the language name * @return language details in JSon */ String languageJSonSchema(String name); /** * Returns the other (miscellaneous) information as JSon format. * * @param name the other (miscellaneous) name * @return other (miscellaneous) details in JSon */ String otherJSonSchema(String name); /** * Returns the model information as JSon format. * * @param name the model name * @return model details in JSon */ String modelJSonSchema(String name); /** * Returns the component documentation as Ascii doc format. * * @param name the component name * @return component documentation in ascii doc format. */ String componentAsciiDoc(String name); /** * Returns the component documentation as HTML format. * * @param name the component name * @return component documentation in html format. */ String componentHtmlDoc(String name); /** * Returns the data format documentation as Ascii doc format. * * @param name the data format name * @return data format documentation in ascii doc format. */ String dataFormatAsciiDoc(String name); /** * Returns the data format documentation as HTML format. * * @param name the data format name * @return data format documentation in HTML format. */ String dataFormatHtmlDoc(String name); /** * Returns the language documentation as Ascii doc format. * * @param name the language name * @return language documentation in ascii doc format. */ String languageAsciiDoc(String name); /** * Returns the language documentation as HTML format. * * @param name the language name * @return language documentation in HTML format. */ String languageHtmlDoc(String name); /** * Returns the other (miscellaneous) documentation as Ascii doc format. * * @param name the other (miscellaneous) name * @return other (miscellaneous) documentation in ascii doc format. */ String otherAsciiDoc(String name); /** * Returns the other (miscellaneous) documentation as HTML format. * * @param name the other (miscellaneous) name * @return other (miscellaneous) documentation in HTML format. */ String otherHtmlDoc(String name); /** * Find all the unique label names all the components are using. * * @return a set of all the labels. */ Set<String> findComponentLabels(); /** * Find all the unique label names all the data formats are using. * * @return a set of all the labels. */ Set<String> findDataFormatLabels(); /** * Find all the unique label names all the languages are using. * * @return a set of all the labels. */ Set<String> findLanguageLabels(); /** * Find all the unique label names all the models are using. * * @return a set of all the labels. */ Set<String> findModelLabels(); /** * Find all the unique label names all the other (miscellaneous) are using. * * @return a set of all the labels. */ Set<String> findOtherLabels(); /** * Returns the Apache Camel Maven Archetype catalog in XML format. * * @return the catalog in XML */ String archetypeCatalogAsXml(); /** * Returns the Camel Spring XML schema * * @return the spring XML schema */ String springSchemaAsXml(); /** * Returns the Camel Blueprint XML schema * * @return the blueprint XML schema */ String blueprintSchemaAsXml(); /** * Parses the endpoint uri and constructs a key/value properties of each option * * @param uri the endpoint uri * @return properties as key value pairs of each endpoint option */ Map<String, String> endpointProperties(String uri) throws URISyntaxException; /** * Parses the endpoint uri and constructs a key/value properties of only the lenient properties (eg custom options) * <p/> * For example using the HTTP components to provide query parameters in the endpoint uri. * * @param uri the endpoint uri * @return properties as key value pairs of each lenient properties */ Map<String, String> endpointLenientProperties(String uri) throws URISyntaxException; /** * Validates the pattern whether its a valid time pattern. * * @param pattern the pattern such as 5000, 5s, 5sec, 4min, 4m30s, 1h, etc. * @return <tt>true</tt> if valid, <tt>false</tt> if invalid */ boolean validateTimePattern(String pattern); /** * Parses and validates the endpoint uri and constructs a key/value properties of each option. * * @param uri the endpoint uri * @return validation result */ EndpointValidationResult validateEndpointProperties(String uri); /** * Parses and validates the endpoint uri and constructs a key/value properties of each option. * <p/> * The option ignoreLenientProperties can be used to ignore components that uses lenient properties. * When this is true, then the uri validation is stricter but would fail on properties that are not part of the component * but in the uri because of using lenient properties. * For example using the HTTP components to provide query parameters in the endpoint uri. * * @param uri the endpoint uri * @param ignoreLenientProperties whether to ignore components that uses lenient properties. * @return validation result */ EndpointValidationResult validateEndpointProperties(String uri, boolean ignoreLenientProperties); /** * Parses and validates the endpoint uri and constructs a key/value properties of each option. * <p/> * The option ignoreLenientProperties can be used to ignore components that uses lenient properties. * When this is true, then the uri validation is stricter but would fail on properties that are not part of the component * but in the uri because of using lenient properties. * For example using the HTTP components to provide query parameters in the endpoint uri. * * @param uri the endpoint uri * @param ignoreLenientProperties whether to ignore components that uses lenient properties. * @param consumerOnly whether the endpoint is only used as a consumer * @param producerOnly whether the endpoint is only used as a producer * @return validation result */ EndpointValidationResult validateEndpointProperties(String uri, boolean ignoreLenientProperties, boolean consumerOnly, boolean producerOnly); /** * Parses and validates the simple expression. * <p/> * <b>Important:</b> This requires having <tt>camel-core</tt> on the classpath * * @param simple the simple expression * @return validation result * @deprecated use {@link #validateSimpleExpression(ClassLoader, String)} */ @Deprecated SimpleValidationResult validateSimpleExpression(String simple); /** * Parses and validates the simple expression. * <p/> * <b>Important:</b> This requires having <tt>camel-core</tt> on the classpath * * @param classLoader a custom classloader to use for loading the language from the classpath, or <tt>null</tt> for using default classloader * @param simple the simple expression * @return validation result */ SimpleValidationResult validateSimpleExpression(ClassLoader classLoader, String simple); /** * Parses and validates the simple predicate * <p/> * <b>Important:</b> This requires having <tt>camel-core</tt> on the classpath * * @param simple the simple predicate * @return validation result * @deprecated use {@link #validateSimplePredicate(ClassLoader, String)} */ @Deprecated SimpleValidationResult validateSimplePredicate(String simple); /** * Parses and validates the simple predicate * <p/> * <b>Important:</b> This requires having <tt>camel-core</tt> on the classpath * * @param classLoader a custom classloader to use for loading the language from the classpath, or <tt>null</tt> for using default classloader * @param simple the simple predicate * @return validation result */ SimpleValidationResult validateSimplePredicate(ClassLoader classLoader, String simple); /** * Parses and validates the language as a predicate * <p/> * <b>Important:</b> This requires having <tt>camel-core</tt> and the language dependencies on the classpath * * @param classLoader a custom classloader to use for loading the language from the classpath, or <tt>null</tt> for using default classloader * @param language the name of the language * @param text the predicate text * @return validation result */ LanguageValidationResult validateLanguagePredicate(ClassLoader classLoader, String language, String text); /** * Parses and validates the language as an expression * <p/> * <b>Important:</b> This requires having <tt>camel-core</tt> and the language dependencies on the classpath * * @param classLoader a custom classloader to use for loading the language from the classpath, or <tt>null</tt> for using default classloader * @param language the name of the language * @param text the expression text * @return validation result */ LanguageValidationResult validateLanguageExpression(ClassLoader classLoader, String language, String text); /** * Returns the component name from the given endpoint uri * * @param uri the endpoint uri * @return the component name (aka scheme), or <tt>null</tt> if not possible to determine */ String endpointComponentName(String uri); /** * Creates an endpoint uri in Java style from the information in the json schema * * @param scheme the endpoint schema * @param json the json schema with the endpoint properties * @param encode whether to URL encode the returned uri or not * @return the constructed endpoint uri * @throws java.net.URISyntaxException is thrown if there is encoding error */ String asEndpointUri(String scheme, String json, boolean encode) throws URISyntaxException; /** * Creates an endpoint uri in XML style (eg escape & as &ampl;) from the information in the json schema * * @param scheme the endpoint schema * @param json the json schema with the endpoint properties * @param encode whether to URL encode the returned uri or not * @return the constructed endpoint uri * @throws java.net.URISyntaxException is thrown if there is encoding error */ String asEndpointUriXml(String scheme, String json, boolean encode) throws URISyntaxException; /** * Creates an endpoint uri in Java style from the information from the properties * * @param scheme the endpoint schema * @param properties the properties as key value pairs * @param encode whether to URL encode the returned uri or not * @return the constructed endpoint uri * @throws java.net.URISyntaxException is thrown if there is encoding error */ String asEndpointUri(String scheme, Map<String, String> properties, boolean encode) throws URISyntaxException; /** * Creates an endpoint uri in XML style (eg escape & as &ampl;) from the information from the properties * * @param scheme the endpoint schema * @param properties the properties as key value pairs * @param encode whether to URL encode the returned uri or not * @return the constructed endpoint uri * @throws java.net.URISyntaxException is thrown if there is encoding error */ String asEndpointUriXml(String scheme, Map<String, String> properties, boolean encode) throws URISyntaxException; /** * Lists all the components summary details in JSon */ String listComponentsAsJson(); /** * Lists all the data formats summary details in JSon */ String listDataFormatsAsJson(); /** * Lists all the languages summary details in JSon */ String listLanguagesAsJson(); /** * Lists all the models (EIPs) summary details in JSon */ String listModelsAsJson(); /** * Lists all the others (miscellaneous) summary details in JSon */ String listOthersAsJson(); /** * Reports a summary what the catalog contains in JSon */ String summaryAsJson(); }
<reponame>steinarb/ukelonn /* * Copyright 2018-2021 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations * under the License. */ package no.priv.bang.ukelonn.api.resources; import static no.priv.bang.ukelonn.testutils.TestUtils.*; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.sql.DataSource; import javax.ws.rs.InternalServerErrorException; import org.junit.jupiter.api.Test; import no.priv.bang.osgi.service.mocks.logservice.MockLogService; import no.priv.bang.ukelonn.UkelonnService; import no.priv.bang.ukelonn.beans.TransactionType; import no.priv.bang.ukelonn.backend.UkelonnServiceProvider; class AdminPaymenttypeTest { @Test void testModifyPaymenttype() { // Create the resource that is to be tested AdminPaymenttype resource = new AdminPaymenttype(); // Inject fake OSGi service UkelonnService UkelonnService ukelonn = mock(UkelonnService.class); resource.ukelonn = ukelonn; // Find a payment type to modify List<TransactionType> paymenttypes = getPaymenttypes(); TransactionType paymenttype = paymenttypes.get(1); Double originalAmount = paymenttype.getTransactionAmount(); // Modify the amount of the payment type paymenttype = TransactionType.with(paymenttype).transactionAmount(originalAmount + 1).build(); when(ukelonn.modifyPaymenttype(paymenttype)).thenReturn(Arrays.asList(paymenttype)); // Run the method that is to be tested List<TransactionType> updatedPaymenttypes = resource.modify(paymenttype); // Verify that the updated amount is larger than the original amount TransactionType updatedPaymenttype = updatedPaymenttypes.get(0); assertThat(updatedPaymenttype.getTransactionAmount()).isGreaterThan(originalAmount); } @Test void testModifyPaymenttypeFailure() throws Exception { // Create the resource that is to be tested AdminPaymenttype resource = new AdminPaymenttype(); // Inject fake OSGi service UkelonnService UkelonnServiceProvider ukelonn = new UkelonnServiceProvider(); resource.ukelonn = ukelonn; // Inject a fake OSGi log service MockLogService logservice = new MockLogService(); resource.setLogservice(logservice); ukelonn.setLogservice(logservice); // Create a mock database that throws exceptions and inject it DataSource datasource = mock(DataSource.class); Connection connection = mock(Connection.class); when(datasource.getConnection()).thenReturn(connection); PreparedStatement statement = mock(PreparedStatement.class); when(connection.prepareStatement(anyString())).thenReturn(statement); when(statement.executeUpdate()).thenThrow(SQLException.class); ukelonn.setDataSource(datasource); // Create a non-existing payment type TransactionType paymenttype = TransactionType.with() .id(-2001) .transactionTypeName("Bar") .transactionAmount(0.0) .transactionIsWagePayment(true) .build(); // Try update the payment type in the database, which should cause an // "500 Internal Server Error" exception assertThrows(InternalServerErrorException.class, () -> resource.modify(paymenttype)); } @Test void testCreatePaymenttype() { // Create the resource that is to be tested AdminPaymenttype resource = new AdminPaymenttype(); // Inject fake OSGi service UkelonnService UkelonnService ukelonn = mock(UkelonnService.class); resource.ukelonn = ukelonn; // Get the list of payment types before adding a new job type List<TransactionType> originalPaymenttypes = getPaymenttypes(); List<TransactionType> newPaymenttypes = new ArrayList<>(originalPaymenttypes); // Create new payment type TransactionType paymenttype = TransactionType.with() .id(-2001) .transactionTypeName("Bar") .transactionAmount(0.0) .transactionIsWagePayment(true) .build(); newPaymenttypes.add(paymenttype); when(ukelonn.createPaymenttype(any())).thenReturn(newPaymenttypes); // Add the payment type to the database List<TransactionType> updatedPaymenttypes = resource.create(paymenttype); // Verify that a new jobtype has been added assertThat(updatedPaymenttypes.size()).isGreaterThan(originalPaymenttypes.size()); } @Test void testCreatePaymenttypeFailure() throws Exception { // Create the resource that is to be tested AdminPaymenttype resource = new AdminPaymenttype(); // Inject fake OSGi service UkelonnService UkelonnServiceProvider ukelonn = new UkelonnServiceProvider(); resource.ukelonn = ukelonn; // Inject a fake OSGi log service MockLogService logservice = new MockLogService(); ukelonn.setLogservice(logservice); resource.setLogservice(logservice); // Create a mock database that throws exceptions and inject it DataSource datasource = mock(DataSource.class); Connection connection = mock(Connection.class); when(datasource.getConnection()).thenReturn(connection); PreparedStatement statement = mock(PreparedStatement.class); when(connection.prepareStatement(anyString())).thenReturn(statement); when(statement.executeUpdate()).thenThrow(SQLException.class); ukelonn.setDataSource(datasource); // Create new payment type TransactionType paymenttype = TransactionType.with() .id(-2001) .transactionTypeName("Bar") .transactionAmount(0.0) .transactionIsWagePayment(true) .build(); // Try update the jobtype in the database, which should cause an // "500 Internal Server Error" exception assertThrows(InternalServerErrorException.class, () -> resource.create(paymenttype)); } }
# The Book of Ruby - http://www.sapphiresteel.com words = ["hello", "world", "goodbye", "mars" ] numbers = [1,2,3,4,5,6,7,8,9,10] startTime = Time.new puts( "Start: %10.9f" % startTime ) wordsThread = Thread.new{ words.each{ |word| puts( word ) } } numbersThread = Thread.new{ numbers.each{ |number| puts( number ) } } [wordsThread, numbersThread].each{ |t| t.join } endTime = Time.new puts( "End: %10.9f" % endTime.to_f ) totalTime = endTime-startTime puts( "Total Time: %10.9f" % totalTime.to_f )
<gh_stars>10-100 import React, { useRef } from 'react' import { arrowButton } from '../constants' import styles from './styled-counter.module.css' function StyledCounter(props) { const { duration, setDuration, min, max } = props // unit of "duration" in minutes const incrTimeoutID = useRef(null) const decrTimeoutID = useRef(null) const getNewRate = (rate) => { switch (rate) { case 1: rate = 2 break case 2: rate = 5 break case 5: rate = 10 break case 10: rate = 15 break default: break } return rate } const incr = (rate, oldDuration) => { let newDuration = oldDuration + rate if (newDuration <= max) { incrTimeoutID.current = setTimeout(() => { setDuration(newDuration) incr(getNewRate(rate), newDuration) }, 450) } else if (oldDuration + 1 <= max) { rate = 1 newDuration = oldDuration + rate incrTimeoutID.current = setTimeout(() => { setDuration(newDuration) incr(getNewRate(rate), newDuration) }, 450) } } const decr = (rate, oldDuration) => { let newDuration = oldDuration - rate if (newDuration >= min) { decrTimeoutID.current = setTimeout(() => { setDuration(newDuration) decr(getNewRate(rate), newDuration) }, 450) } else if (oldDuration - 1 >= min) { rate = 1 newDuration = oldDuration - rate decrTimeoutID.current = setTimeout(() => { setDuration(newDuration) decr(getNewRate(rate), newDuration) }, 450) } } return ( <div className={styles.root} {...props}> <div className={styles.main}> <span className={styles.buttonWrap}> <img className={styles.increase} src={arrowButton} alt="inc" onMouseDown={() => { duration + 1 <= max && setDuration(duration + 1) incr(1, duration) }} onMouseUp={() => clearTimeout(incrTimeoutID.current)} onMouseLeave={() => clearTimeout(incrTimeoutID.current)} /> </span> <input className={styles.display} type="number" value={duration} /> <span className={styles.buttonWrap}> <img className={styles.decrease} src={arrowButton} alt="dec" onMouseDown={() => { duration - 1 >= min && setDuration(duration - 1) decr(1, duration) }} onMouseUp={() => clearTimeout(decrTimeoutID.current)} onMouseLeave={() => clearTimeout(decrTimeoutID.current)} /> </span> </div> <p className={styles.minutes}>Minutes</p> </div> ) } export default StyledCounter
<gh_stars>1-10 # frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! module Google module Devtools module Remoteworkers module V1test2 # Describes a shell-style task to execute, suitable for providing as the Bots # interface's `Lease.payload` field. # @!attribute [rw] inputs # @return [::Google::Devtools::Remoteworkers::V1test2::CommandTask::Inputs] # The inputs to the task. # @!attribute [rw] expected_outputs # @return [::Google::Devtools::Remoteworkers::V1test2::CommandTask::Outputs] # The expected outputs from the task. # @!attribute [rw] timeouts # @return [::Google::Devtools::Remoteworkers::V1test2::CommandTask::Timeouts] # The timeouts of this task. class CommandTask include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods # Describes the inputs to a shell-style task. # @!attribute [rw] arguments # @return [::Array<::String>] # The command itself to run (e.g., argv). # # This field should be passed directly to the underlying operating system, # and so it must be sensible to that operating system. For example, on # Windows, the first argument might be "C:\Windows\System32\ping.exe" - # that is, using drive letters and backslashes. A command for a *nix # system, on the other hand, would use forward slashes. # # All other fields in the RWAPI must consistently use forward slashes, # since those fields may be interpretted by both the service and the bot. # @!attribute [rw] files # @return [::Array<::Google::Devtools::Remoteworkers::V1test2::Digest>] # The input filesystem to be set up prior to the task beginning. The # contents should be a repeated set of FileMetadata messages though other # formats are allowed if better for the implementation (eg, a LUCI-style # .isolated file). # # This field is repeated since implementations might want to cache the # metadata, in which case it may be useful to break up portions of the # filesystem that change frequently (eg, specific input files) from those # that don't (eg, standard header files). # @!attribute [rw] inline_blobs # @return [::Array<::Google::Devtools::Remoteworkers::V1test2::Blob>] # Inline contents for blobs expected to be needed by the bot to execute the # task. For example, contents of entries in `files` or blobs that are # indirectly referenced by an entry there. # # The bot should check against this list before downloading required task # inputs to reduce the number of communications between itself and the # remote CAS server. # @!attribute [rw] environment_variables # @return [::Array<::Google::Devtools::Remoteworkers::V1test2::CommandTask::Inputs::EnvironmentVariable>] # All environment variables required by the task. # @!attribute [rw] working_directory # @return [::String] # Directory from which a command is executed. It is a relative directory # with respect to the bot's working directory (i.e., "./"). If it is # non-empty, then it must exist under "./". Otherwise, "./" will be used. class Inputs include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods # An environment variable required by this task. # @!attribute [rw] name # @return [::String] # The envvar name. # @!attribute [rw] value # @return [::String] # The envvar value. class EnvironmentVariable include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end end # Describes the expected outputs of the command. # @!attribute [rw] files # @return [::Array<::String>] # A list of expected files, relative to the execution root. All paths # MUST be delimited by forward slashes. # @!attribute [rw] directories # @return [::Array<::String>] # A list of expected directories, relative to the execution root. All paths # MUST be delimited by forward slashes. # @!attribute [rw] stdout_destination # @return [::String] # The destination to which any stdout should be sent. The method by which # the bot should send the stream contents to that destination is not # defined in this API. As examples, the destination could be a file # referenced in the `files` field in this message, or it could be a URI # that must be written via the ByteStream API. # @!attribute [rw] stderr_destination # @return [::String] # The destination to which any stderr should be sent. The method by which # the bot should send the stream contents to that destination is not # defined in this API. As examples, the destination could be a file # referenced in the `files` field in this message, or it could be a URI # that must be written via the ByteStream API. class Outputs include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # Describes the timeouts associated with this task. # @!attribute [rw] execution # @return [::Google::Protobuf::Duration] # This specifies the maximum time that the task can run, excluding the # time required to download inputs or upload outputs. That is, the worker # will terminate the task if it runs longer than this. # @!attribute [rw] idle # @return [::Google::Protobuf::Duration] # This specifies the maximum amount of time the task can be idle - that is, # go without generating some output in either stdout or stderr. If the # process is silent for more than the specified time, the worker will # terminate the task. # @!attribute [rw] shutdown # @return [::Google::Protobuf::Duration] # If the execution or IO timeouts are exceeded, the worker will try to # gracefully terminate the task and return any existing logs. However, # tasks may be hard-frozen in which case this process will fail. This # timeout specifies how long to wait for a terminated task to shut down # gracefully (e.g. via SIGTERM) before we bring down the hammer (e.g. # SIGKILL on *nix, CTRL_BREAK_EVENT on Windows). class Timeouts include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end end # DEPRECATED - use CommandResult instead. # Describes the actual outputs from the task. # @!attribute [rw] exit_code # @return [::Integer] # exit_code is only fully reliable if the status' code is OK. If the task # exceeded its deadline or was cancelled, the process may still produce an # exit code as it is cancelled, and this will be populated, but a successful # (zero) is unlikely to be correct unless the status code is OK. # @!attribute [rw] outputs # @return [::Google::Devtools::Remoteworkers::V1test2::Digest] # The output files. The blob referenced by the digest should contain # one of the following (implementation-dependent): # * A marshalled DirectoryMetadata of the returned filesystem # * A LUCI-style .isolated file class CommandOutputs include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # DEPRECATED - use CommandResult instead. # Can be used as part of CompleteRequest.metadata, or are part of a more # sophisticated message. # @!attribute [rw] duration # @return [::Google::Protobuf::Duration] # The elapsed time between calling Accept and Complete. The server will also # have its own idea of what this should be, but this excludes the overhead of # the RPCs and the bot response time. # @!attribute [rw] overhead # @return [::Google::Protobuf::Duration] # The amount of time *not* spent executing the command (ie # uploading/downloading files). class CommandOverhead include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # All information about the execution of a command, suitable for providing as # the Bots interface's `Lease.result` field. # @!attribute [rw] status # @return [::Google::Rpc::Status] # An overall status for the command. For example, if the command timed out, # this might have a code of DEADLINE_EXCEEDED; if it was killed by the OS for # memory exhaustion, it might have a code of RESOURCE_EXHAUSTED. # @!attribute [rw] exit_code # @return [::Integer] # The exit code of the process. An exit code of "0" should only be trusted if # `status` has a code of OK (otherwise it may simply be unset). # @!attribute [rw] outputs # @return [::Google::Devtools::Remoteworkers::V1test2::Digest] # The output files. The blob referenced by the digest should contain # one of the following (implementation-dependent): # * A marshalled DirectoryMetadata of the returned filesystem # * A LUCI-style .isolated file # @!attribute [rw] duration # @return [::Google::Protobuf::Duration] # The elapsed time between calling Accept and Complete. The server will also # have its own idea of what this should be, but this excludes the overhead of # the RPCs and the bot response time. # @!attribute [rw] overhead # @return [::Google::Protobuf::Duration] # The amount of time *not* spent executing the command (ie # uploading/downloading files). # @!attribute [rw] metadata # @return [::Array<::Google::Protobuf::Any>] # Implementation-dependent metadata about the task. Both servers and bots # may define messages which can be encoded here; bots are free to provide # metadata in multiple formats, and servers are free to choose one or more # of the values to process and ignore others. In particular, it is *not* # considered an error for the bot to provide the server with a field that it # doesn't know about. class CommandResult include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # The metadata for a file. Similar to the equivalent message in the Remote # Execution API. # @!attribute [rw] path # @return [::String] # The path of this file. If this message is part of the # CommandOutputs.outputs fields, the path is relative to the execution root # and must correspond to an entry in CommandTask.outputs.files. If this # message is part of a Directory message, then the path is relative to the # root of that directory. All paths MUST be delimited by forward slashes. # @!attribute [rw] digest # @return [::Google::Devtools::Remoteworkers::V1test2::Digest] # A pointer to the contents of the file. The method by which a client # retrieves the contents from a CAS system is not defined here. # @!attribute [rw] contents # @return [::String] # If the file is small enough, its contents may also or alternatively be # listed here. # @!attribute [rw] is_executable # @return [::Boolean] # Properties of the file class FileMetadata include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # The metadata for a directory. Similar to the equivalent message in the Remote # Execution API. # @!attribute [rw] path # @return [::String] # The path of the directory, as in # {::Google::Devtools::Remoteworkers::V1test2::FileMetadata#path FileMetadata.path}. # @!attribute [rw] digest # @return [::Google::Devtools::Remoteworkers::V1test2::Digest] # A pointer to the contents of the directory, in the form of a marshalled # Directory message. class DirectoryMetadata include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # The CommandTask and CommandResult messages assume the existence of a service # that can serve blobs of content, identified by a hash and size known as a # "digest." The method by which these blobs may be retrieved is not specified # here, but a model implementation is in the Remote Execution API's # "ContentAddressibleStorage" interface. # # In the context of the RWAPI, a Digest will virtually always refer to the # contents of a file or a directory. The latter is represented by the # byte-encoded Directory message. # @!attribute [rw] hash # @return [::String] # A string-encoded hash (eg "1a2b3c", not the byte array [0x1a, 0x2b, 0x3c]) # using an implementation-defined hash algorithm (eg SHA-256). # @!attribute [rw] size_bytes # @return [::Integer] # The size of the contents. While this is not strictly required as part of an # identifier (after all, any given hash will have exactly one canonical # size), it's useful in almost all cases when one might want to send or # retrieve blobs of content and is included here for this reason. class Digest include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # Describes a blob of binary content with its digest. # @!attribute [rw] digest # @return [::Google::Devtools::Remoteworkers::V1test2::Digest] # The digest of the blob. This should be verified by the receiver. # @!attribute [rw] contents # @return [::String] # The contents of the blob. class Blob include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end # The contents of a directory. Similar to the equivalent message in the Remote # Execution API. # @!attribute [rw] files # @return [::Array<::Google::Devtools::Remoteworkers::V1test2::FileMetadata>] # The files in this directory # @!attribute [rw] directories # @return [::Array<::Google::Devtools::Remoteworkers::V1test2::DirectoryMetadata>] # Any subdirectories class Directory include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods end end end end end
<reponame>Miciah/rhc<gh_stars>0 require 'rhc/rest/base' module RHC module Rest class GearGroup < Base include Rest define_attr :gears, :cartridges end end end
#!/bin/bash #================================================= # Description: DIY script # Lisence: MIT # Author: P3TERX # Blog: https://p3terx.com #================================================= # Modify default IP #sed -i 's/192.168.1.1/192.168.50.5/g' package/base-files/files/bin/config_generate SED -i ' S / 192.168.1.1 / 10.10.10.1 /克'包/基层-文件/文件/ bin中/ config_generate
<gh_stars>10-100 const path = require('path'); const supertest = require('supertest'); const { execSync } = require('child_process'); const { generateExamples, removeExamples } = require('../../utils/scripts/generate-examples'); const { chrome, safari } = require('./ua'); describe('server', () => { describe('express', () => { let request; beforeAll(() => { generateExamples('../../server/fixtures/express-server'); const configPath = path.join(__dirname, 'fixtures/express-server', 'webpack.config.js'); execSync(`webpack --config=${configPath}`); const app = require('./fixtures/express-server/server'); request = supertest.agent(app); }); afterAll(() => { removeExamples('../../server/fixtures/express-server'); }); it('express simple', async () => { const chromeRes = await request .get('/main.js') .set('user-agent', chrome) .expect(200); const safariRes = await request .get('/main.js') .set('user-agent', safari) .expect(200); expect(chromeRes.text.length).toBeGreaterThan(0); expect(safariRes.text.length).toBeGreaterThan(0); expect(safariRes.text.length).toBeGreaterThan(chromeRes.text.length); }); }); });
'use strict'; angular.module('mean.links').controller('LinksController', ['$scope', '$stateParams', '$location', 'Global', 'Links', function($scope, $stateParams, $location, Global, Links) { $scope.global = Global; $scope.hasAuthorization = function(link) { if (!link || !link.user) return false; return $scope.global.isAdmin || link.user._id === $scope.global.user._id; }; $scope.create = function(isValid) { if (isValid) { var link = new Links({ label: this.label, link: this.link }); link.$save(function(response) { $location.path('links/' + response._id); }); this.label = ''; this.link = ''; } else { $scope.submitted = true; } }; $scope.remove = function(link) { if (link) { link.$remove(); for (var i in $scope.links) { if ($scope.links[i] === link) { $scope.links.splice(i, 1); } } } else { $scope.link.$remove(function(response) { $location.path('links'); }); } }; $scope.update = function(isValid) { if (isValid) { var link = $scope.link; if (!link.updated) { link.updated = []; } link.updated.push(new Date().getTime()); link.$update(function() { $location.path('links/' + link._id); }); } else { $scope.submitted = true; } }; $scope.find = function() { Links.query(function(links) { $scope.links = links; }); }; $scope.findOne = function() { Links.get({ linkId: $stateParams.linkId }, function(link) { $scope.link = link; }); }; } ]);
import { map } from './Map/map.js'; import { mq } from './constants.js'; import LineGraph from './Graph.js'; export default class MapRegions { constructor(regionName, regionTitle, coords, coords1, filter, symbolID) { const regionN = document.getElementById(regionName); this._regName = regionName; this._regN = regionN; this._regT = regionTitle; this._coord = coords; this._coord1 = coords1; this._filter = filter; this._symbolID = symbolID; } draw() { map.addSource(this._regName, { type: 'geojson', data: this._filter, 'generateId': true, // "promoteId": {"original": "features[0].properties.NAME_1"} }); map.addLayer( { id: `${this._regName}-fill-layer`, type: "fill", source: this._regName, layout: { 'visibility': 'none', }, paint: { // "fill-color": "rgba(3,169,244,0.0)", "fill-color": "rgba(255,255,255,0.1)", "fill-outline-color": "rgba(255,255,255,0.9)", }, }, this._symbolID ); map.addLayer( { id: `${this._regName}-layer`, type: "line", source: this._regName, layout: { 'visibility': 'none', "line-join": "round", }, paint: { // "line-color": "rgba(3,169,244,0.1)", "line-color": "rgba(255,255,244,0.9)", "line-width": 2, // "fill-color": "rgba(0,0,0,0.7)", }, }, this._symbolID ); const resetBorder = document.querySelector('.reset'); const resetBorder2 = document.querySelector('.reset svg'); // map.on('zoomend', function() { // resetBorder.classList.add('resetB'); // resetBorder2.classList.add('resetBB'); // // console.log('A zoomend event occurred.'); // }); // let x = 0; this._regN.addEventListener('click', () => { document.getElementById("arrow-left").innerHTML = ""; document.getElementById("arrow-left").innerHTML = ` <div id="RTitle">${this._regT[0].name}: <div id="AActive"> Active: ${this._regT[0].active}</div> <div id="NNew"> New: ${this._regT[0].newlyConfirmed}</div> </div> <div class="statsR stats-dayR graph-wrapR"> <div class="bar-innerR"> <canvas width="550" height="250" class="graph" id="graph_active_dailyR" ></canvas> <div id="chartjs-tooltipA"> <table id="num"></table> </div> </div> </div> `; const lineChartC = new LineGraph(this._regT); lineChartC.plot(); if(mq.matches) { $( ".regModal" ).fadeIn(1000); setTimeout(() => { $( "#RTitle" ).css({ "transition": "all 1s ease", "opacity": "1" }); }, 500); setTimeout(() => { $( "#AActive" ).css({ "transition": "all 1s ease-in-out", "opacity": "1", "transform": "translate(50px, 0px)" }); $( "#NNew" ).css({ "transition": "all 1s ease-in-out", "opacity": "1", "transform": "translate(85px, 0px)" }); }, 1000); } else { $( ".regModal" ).fadeIn(1000); setTimeout(() => { //styles.css 1991 // $( "#RTitle" ).css("opacity", "1"); $( "#graph_active_dailyR" ).css("opacity", "1"); }, 100); setTimeout(() => { //styles.css 1991 $( "#RTitle" ).css("opacity", "1"); // $( "#graph_active_dailyR" ).css("opacity", "1"); }, 500); }; // const regions = document.querySelector('#regions'); // regions.style.display = 'none' // const regN = document.getElementById(this._regN.id); // const regN = document.querySelectorAll('.regN'); //add color to map reset when regions are clicked resetBorder.classList.add('resetB'); resetBorder2.classList.add('resetBB'); // regN.style.color = '#FFF'; // console.log(regN); // if(mq.matches) { // regN.forEach((elem, a) => { // // a.style.color = 'rgba(255,255,255,0.6)'; // regN[a].addEventListener('touchend', () => { // regN[a].style.color = 'rgb(183, 196, 0)'; // }); // }); // } // regModal.style.transform = "scale(0.01)"; // regModal.style.display = "block"; // if (regModal.style.display === "none") { // regModal.classList.add(".regModal-active"); // regModal.style.display = "block"; // regModal.style.transform = "scale(1)"; // regModal.style.transition = "all 0.5s"; // console.log('hello Alec, how are you this afternoon?'); // } else { // regModal.style.transform = "scale(1)"; // regModal.style.transition = "all 0.5s"; // } // regModal.classList.add(".regModal-active"); // regModal.style.transform = "scale(1)"; // regModal.style.transition = "all 0.5s"; // regModal.style.display = "block"; // const RTsvg = document.getElementById('RTsvg'); // RTsvg.style.display = "block"; // RTsvg.style.marginTop = "10px"; const regionArray = ['region-layer', 'region2-layer', 'region3-layer', 'region4-layer', 'region5-layer', 'region6-layer', 'region7-layer', 'region8-layer']; const regionArrayFill = ['region-fill-layer', 'region2-fill-layer', 'region3-fill-layer', 'region4-fill-layer', 'region5-fill-layer', 'region6-fill-layer', 'region7-fill-layer', 'region8-fill-layer']; regionArrayFill.map(i => { map.setLayoutProperty(i, 'visibility', 'none'); }); regionArray.map(i => { map.setLayoutProperty(i, 'visibility', 'none'); }); var clickedLayer = `${this._regName}-layer`; var clickedLayerFill = `${this._regName}-fill-layer`; map.setLayoutProperty(clickedLayer, 'visibility', 'visible'); map.setLayoutProperty(clickedLayerFill, 'visibility', 'visible'); // region.style.color = "rgba(255,255,255,0.8)" // region.style.color = "rgba(243,43,0,0.8)" // NOT working // this._regN.style.color = "rgb(255,255,255)" // this._regN.style.color = "rgba(44,44,44,0.9)" if (mq.matches) { map.flyTo({ center: [this._coord, this._coord1], zoom: 6, offset: [0, -200], }); } else { map.flyTo({ center: [this._coord, this._coord1], zoom: 6, offset: [200, 0], }); } }); } }
<filename>src/app/core/guards/game-enter/game-enter.guard.spec.ts<gh_stars>0 import {TestBed} from '@angular/core/testing'; import {GameEnterGuard} from './game-enter.guard'; describe('GameEnterGuard', () => { let guard: GameEnterGuard; beforeEach(() => { TestBed.configureTestingModule({}); guard = TestBed.inject(GameEnterGuard); }); it('should be created', () => { expect(guard).toBeTruthy(); }); });
package cz.net21.ttulka.json.mock.generator; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.stream.IntStream; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; /** * Entry point of the application. * * @author ttulka */ public class App { static final String OUTPUT_DEFAULT = "output.json"; public static void main(String[] args) { Options cmdOptions = new Options(); cmdOptions.addOption("g", "guess", true, "Try to guess a configuration from a JSON file."); cmdOptions.addOption("f", "files", true, "Generate multiple files."); try { CommandLine cmdLine = new DefaultParser().parse(cmdOptions, args); if (cmdLine.hasOption("g")) { guess(Paths.get(cmdLine.getOptionValue("g"))); } else { generate(getNumberOfFiles(cmdLine)); } } catch (Exception e) { new HelpFormatter().printHelp("json-mock-data-generator", cmdOptions); } System.exit(0); } private static int getNumberOfFiles(CommandLine cmdLine) { if (cmdLine.hasOption("f")) { try { return Integer.parseInt(cmdLine.getOptionValue("f")); } catch (Exception e) { System.err.println("Option '" + cmdLine.getOptionValue("f") + "' is not a number. Continuing with default one file."); } } return 1; } private static void guess(Path jsonToGuessFrom) { try { Path guessedConfig = Paths.get(jsonToGuessFrom.toString() + ".guess-conf"); new Guesser(jsonToGuessFrom, guessedConfig).run(); System.out.println("Configuration guess written into " + guessedConfig); } catch (Exception e) { e.printStackTrace(); System.exit(1); } } private static void generate(int numberOfFiles) { Path confPath = readConfigPath(); Path outputPath = readOutputPath(OUTPUT_DEFAULT); IntStream.range(0, numberOfFiles).forEach(i -> { try { Path outputFilePath = getOutputPathForNumber(outputPath, i); new Generator(confPath, outputFilePath).run(); } catch (Exception e) { e.printStackTrace(); System.exit(1); } }); } private static Path getOutputPathForNumber(Path outputPath, int count) { if (count < 1) { return outputPath; } return Paths.get(outputPath + "." + count); } private static Path readConfigPath() { String conf = null; while (conf == null || conf.isEmpty()) { conf = System.console().readLine("Configuration file: "); Path path = Paths.get(conf); if (Files.notExists(path) || !Files.isRegularFile(path) || !Files.isReadable(path)) { System.err.println("Configuration file '" + conf + "' is not a readable file."); conf = null; } } return Paths.get(conf); } private static Path readOutputPath(String defaultOutputPath) { Path path = null; while (path == null) { String output = System.console().readLine("Output JSON file (%s): ", defaultOutputPath); if (output == null || output.isEmpty()) { output = defaultOutputPath; } path = Paths.get(output); if (Files.exists(path) && (!Files.isRegularFile(path) || !Files.isWritable(path))) { System.err.println("Output file '" + output + "' is not a writable file."); path = null; } } return path; } }
<gh_stars>0 package controller.file; import java.util.HashMap; import java.util.Map; import javafx.scene.paint.Color; /** * * @author cy122 * * This class is to help the front-end convert between Color and index * */ @SuppressWarnings("serial") public class PaletteManager extends HashMap<Integer,Color>{ public PaletteManager(Map<Integer,Color> palettes){ super(palettes); } /* return -1 if such color doesn't exist in PaletteManager*/ public int Color2Int(Color color){ for(int index:this.keySet()){ if(this.get(index).equals(color)){ return index; } } return -1; } public Color Int2Color(int index){ return this.get(index); } public void setPalette(int index, Color color){ this.put(new Integer(index), color); } }
#!/bin/bash printf "========== Testing ==========" cd ../demo printf "\n========== AEL ==========\n" python AEL_demo.py printf "\n========== Drain ==========\n" python Drain_demo.py printf "\n========== IPLoM ==========\n" python IPLoM_demo.py printf "\n========== LenMa ==========\n" python LenMa_demo.py printf "\n========== LFA ==========\n" python LFA_demo.py printf "\n========== LKE ==========\n" python LKE_demo.py printf "\n========== LogCluster ==========\n" python LogCluster_demo.py printf "\n========== LogMine ==========\n" python LogMine_demo.py printf "\n========== LogSig ==========\n" python LogSig_demo.py printf "\n========== SHISO ==========\n" python SHISO_demo.py printf "\n========== SLCT ==========\n" python SLCT_demo.py printf "\n========== Spell ==========\n" python Spell_demo.py printf "\n========== Testing done =========\n"
#!/usr/bin/env python3 """This phase infers arrays when there are large clusters of similar things around""" import copy from lxml import etree import sys import logging from logging import debug, info, warning, error, critical from collections import OrderedDict from pprint import pprint import re re_index = re.compile(r"^([0-9]+)$") def eval_int(element): """ Given a SVD node, extracts the integer from its text. """ s = element.text if s.startswith("0x"): return int(s[len("0x"):], 16) else: return int(s) def path_string(node): """ Returns the path to the given NODE as a str. """ if node is None: return "" else: return path_string(node.getparent()) + "/" + (node.find("name").text if node.find("name") is not None else "*") registers = {} def register_registers(root): """ Starting at ROOT, finds all <register>s and registers them in REGISTERS, recursively. """ if root.tag == "register": name_node = root.find("name") name = name_node.text if name_node is not None else None registers[name] = root for child in root: register_registers(child) def normalize(root): if root.tag == "register": return root for tag in reversed(["dim", "dimIncrement", "dimIndex", "name", "displayName", "description", "alternateRegister", "addressOffset", "size"]): orig = root.find(tag) if orig is not None: # Re-inserts the Element at a reordered location. This makes it easier to compare--and also it pacifies CMSIS. root.remove(orig) root.insert(0, orig) if root.find("displayName") is not None: # Name would be something like "FOO_link0" which would not match "FOO_link1". Previous phases ensured that there's a displayName="FOO" in that case. name_node = root.find("name") if name_node is not None: root.remove(name_node) for child in root: normalize(child) return root def extract_array_element_contents(root): """ Returns a new XML element with just the actual child elements (excluding pseudo attributes). """ result = etree.Element(root.tag) for child in root: if child.tag in ["name", "addressOffset"]: # skip array index and addressOffset--both of which are very likely different continue # This also unlinks the child from the original document! result.append(child) return result def xml_elements_eqP(a, b): """ Returns whether A and B are equal. """ return etree.tostring(a, pretty_print=False) == etree.tostring(b, pretty_print=False) def flatten(root): """ If there's a derivedFrom, resolves it. """ derivedFrom = root.attrib.get("derivedFrom") if derivedFrom is not None: reference = registers[derivedFrom] del root.attrib["derivedFrom"] # Add stuff from REFERENCE that we don't already have known_attrs = [child.tag for child in root] # known_attrs == ['name', 'addressOffset', 'size', 'displayName'] #print("KNOWN", known_attrs) for child in reference: if child.tag not in known_attrs: root.append(copy.deepcopy(child)) for child in root: flatten(child) return root def create_element_and_text(name, value): result = etree.Element(name) result.text = value return result def calculate_increments(items): reference = None dimIncrements = [] for item in items: if reference is None: reference = item dimIncrement = item - reference reference = item dimIncrements.append(dimIncrement) return dimIncrements[1:] def add_default_names(root): displayName_node = root.find("displayName") name_node = root.find("name") if displayName_node is not None and name_node is None: name_node = etree.Element("name") name_node.text = displayName_node.text root.append(name_node) root.remove(displayName_node) for child in root: add_default_names(child) def infer_arrays(root): for child in root: infer_arrays(child) name_node = root.find("name") name = name_node.text if name_node is not None else None root_name = name indexed_stuff = {} has_indexed_child = False has_non_indexed_child = False addresses_disjunct = True for child in root: if child.tag in ["name", "addressOffset", "size", "displayName"]: # logically those are attributes continue name_node = child.find("name") name = name_node.text if name_node is not None else None if name is not None and re_index.match(name): # FIXME: assert mode is None or mode == "indexing", root_name has_indexed_child = True assert root.tag == "cluster" assert child.tag == "cluster" assert root_name is not None and (root_name.startswith("_") or root_name == "DEVINDCFG"), etree.tostring(root, pretty_print=True).decode("utf-8") #print(root.find("name").text, name) index = int(name) child_addressOffset = eval_int(child.find("addressOffset")) if child_addressOffset in indexed_stuff: logging.warning("Same value for addressOffset ({!r}) is used multiple times, among others at {!r}".format(child.find("addressOffset").text, path_string(child))) addresses_disjunct = False indexed_stuff[child_addressOffset] = normalize(flatten(copy.deepcopy(child))), index, child else: #print("NON-INDEXED") #print(etree.tostring(child, pretty_print=True)) has_non_indexed_child = True #infer_arrays(child) if root_name is not None and (root_name.startswith("_") or root_name == "DEVINDCFG"): assert has_indexed_child if has_indexed_child or has_non_indexed_child: assert has_indexed_child ^ has_non_indexed_child, path_string(root) if indexed_stuff and addresses_disjunct: all_similar = True reference_element = None for child_addressOffset, (flattened_child, index, child) in sorted(indexed_stuff.items()): #print(index) contents = extract_array_element_contents(flattened_child) if reference_element is None: reference_element = contents if not xml_elements_eqP(reference_element, contents): all_similar = False #print(etree.tostring(contents, pretty_print=True).decode("utf-8")) if all_similar: child_addressOffsets = [child_addressOffset for child_addressOffset, _ in sorted(indexed_stuff.items())] increments = calculate_increments(child_addressOffsets) if len(set(increments)) == 1: dimIncrement = increments[0] dimIndex = [index for child_addressOffset, (flattened_child, index, child) in sorted(indexed_stuff.items())] assert len(dimIndex) == len(set(dimIndex)) assert child_addressOffsets[0] == 0 assert root.tag == "cluster" and root.find("dim") is None and root.find("dimIncrement") is None and root.find("dimIndex") is None, path_string(root) if dimIndex == [x for x in range(len(dimIndex))]: # If using "[%s]", dimIndex is not allowed--so I guess it needs to be the default [0,1,2,..,N-1] root.find("name").text = "{}[%s]".format(root.find("name").text) logging.info("Inferring array for {!r}.".format(path_string(root))) else: root.insert(0, create_element_and_text("dimIndex", ",".join([str(x) for x in dimIndex]))) root.find("name").text = "{}_%s".format(root.find("name").text) logging.info("Inferring list for {!r}.".format(path_string(root))) root.insert(0, create_element_and_text("dimIncrement", "0x{:x}".format(dimIncrement))) root.insert(0, create_element_and_text("dim", str(len(dimIndex)))) for child in reference_element: assert child.tag != "name" add_default_names(child) root.append(child) first = True for child_addressOffset, (flattened_child, index, child) in sorted(indexed_stuff.items()): if first: first = False # Insert array_cluster before where the elements were root.remove(child) else: if len(set(increments)) == 0: # Array with one element? Err... okay then pass else: logging.warning("Not inferring array from element {!r} since there are different increments between consecutive addressOffsets of the array elements ({!r}).".format(path_string(root), increments)) else: logging.warning("Not inferring array from element {!r} since there are too many differences between the array elements.".format(path_string(root))) for _, (_, _, cluster) in indexed_stuff.items(): assert cluster.tag == "cluster" logging.basicConfig(level=logging.INFO) parser = etree.XMLParser(remove_blank_text=True) #with (sys.stdin if len(sys.argv) == 1 else open(sys.argv[-1])) as f: # data = f.read() #tree = etree.XML(data, parser=parser) #print(tree) tree = etree.parse(sys.stdin if len(sys.argv) == 1 else open(sys.argv[-1]), parser=parser) root = tree.getroot() register_registers(root) infer_arrays(root) tree.write(sys.stdout.buffer, pretty_print=True) sys.stdout.flush()
import Post from '../../posts/Post'; import User from '../../users/User'; import Comment from '../Comment'; import request from 'supertest'; import mongoose from 'mongoose'; import { mongoURI } from '../../config/db'; import app from '../../app'; import jwt from 'jsonwebtoken'; mongoose.connect(mongoURI, { useNewUrlParser: true }); const port = 8080; describe('rule routes', (): void => { beforeAll( async (): Promise<void> => { await mongoose.disconnect(); await mongoose.connect(mongoURI, { useNewUrlParser: true }); app.listen(port); await User.deleteMany({}).exec(); await Post.deleteMany({}).exec(); await Comment.deleteMany({}).exec(); }, ); afterEach( async (): Promise<void> => { await User.deleteMany({}).exec(); await Post.deleteMany({}).exec(); await Comment.deleteMany({}).exec(); }, ); afterAll( async (): Promise<void> => { await User.deleteMany({}).exec(); await Post.deleteMany({}).exec(); await Comment.deleteMany({}).exec(); await mongoose.disconnect(); }, ); const text = 'testName'; const userId = mongoose.Types.ObjectId(); const secret: any = process.env.SECRET; const onModel = 'Post'; const email = '<EMAIL>'; const username = 'test2UserName'; const password = '<PASSWORD>'; const token = jwt.sign( { email, userId: userId, }, secret, { expiresIn: '1h' }, ); const communityId = mongoose.Types.ObjectId(); describe('post /communities/:communityId/posts/:postId/comments', (): void => { let userId: string; beforeEach( async (): Promise<void> => { const user = new User({ email, username, password, }); await user.save(); userId = user._id; }, ); afterEach( async (): Promise<void> => { await User.deleteMany({}).exec(); }, ); it('should create a new comment', async (): Promise<void> => { const token = jwt.sign( { email, userId, }, secret, { expiresIn: '1h' }, ); const postId = mongoose.Types.ObjectId(); const response = await request(app) .post(`/communities/${communityId}/posts/${postId}/comments`) .set('Authorization', 'Bearer ' + token) .send({ text, }); expect(response.status).toEqual(200); }); }); describe('post /communities/:communityId/comments/:commentId/comments', (): void => { let userId: string; beforeEach( async (): Promise<void> => { const user = new User({ email, username, password, }); await user.save(); userId = user._id; }, ); it('should create a new comment', async (): Promise<void> => { const token = jwt.sign( { email, userId: userId, }, secret, { expiresIn: '1h' }, ); const commentId = mongoose.Types.ObjectId(); const response = await request(app) .post(`/communities/${communityId}/comments/${commentId}/comments`) .set('Authorization', 'Bearer ' + token) .send({ text, }); expect(response.status).toEqual(200); }); }); describe('patch /comments/:commentId', (): void => { const newText = 'newTestText'; it('should change the text of a comments', async (): Promise<void> => { const postId = mongoose.Types.ObjectId(); const comment = new Comment({ text, user: userId, source: postId, onModel, }); await comment.save(); const { _id } = comment; const response = await request(app) .patch(`/comments/${_id}`) .set('Authorization', 'Bearer ' + token) .send({ text: newText, }); expect(response.status).toEqual(200); }); it('should return 404 response ', async (): Promise<void> => { const commentId = mongoose.Types.ObjectId(); const response = await request(app) .patch(`/comments/${commentId}`) .set('Authorization', 'Bearer ' + token) .send({ text: newText, }); expect(response.status).toEqual(404); }); }); describe('delete/ comments/:commentId', (): void => { it('should delete comment', async (): Promise<void> => { const postId = mongoose.Types.ObjectId(); const comment = new Comment({ text, user: userId, source: postId, onModel, }); await comment.save(); const { _id } = comment; const response = await request(app) .delete(`/comments/${_id}`) .set('Authorization', 'Bearer ' + token); expect(response.status).toEqual(204); }); it('should return 404 response', async (): Promise<void> => { const commentId = mongoose.Types.ObjectId(); const response = await request(app) .delete(`/comments/${commentId}`) .set('Authorization', 'Bearer ' + token); expect(response.status).toEqual(404); }); }); describe('get posts/:postId?sort=${new,top,comments}&limit=${0-50}&page=${page}', (): void => { it('should get a list of comments sorted by postId,sort options,limit and page', async (): Promise< void > => { const postId = mongoose.Types.ObjectId(); const comment = new Comment({ text, user: userId, source: postId, onModel, }); await comment.save(); const response = await request(app).get( `/posts/${postId}/comments?sort=new&limit=10&page=1`, ); expect(response.status).toEqual(200); }); it('should return a 404 response', async (): Promise<void> => { const postId = mongoose.Types.ObjectId(); const response = await request(app).get( `/posts/${postId}/comments?sort=new&limit=10&page=1`, ); expect(response.status).toEqual(404); }); }); describe('get comments/:commentId?sort=${new,top,comments}&limit=${0-50}&page=${page}', (): void => { it('should get a list of comments sorted by commentId,sort options,limit and page', async (): Promise< void > => { const commentId = mongoose.Types.ObjectId(); const comment = new Comment({ text, user: userId, source: commentId, onModel, }); await comment.save(); const response = await request(app).get( `/comments/${commentId}/comments?sort=new&limit=10&page=1`, ); expect(response.status).toEqual(200); }); it('should return a 404 response', async (): Promise<void> => { const commentId = mongoose.Types.ObjectId(); const response = await request(app).get( `/comments/${commentId}/comments?sort=new&limit=10&page=1`, ); expect(response.status).toEqual(404); }); }); });
#!/usr/bin/env bash ################################################################################################################################################################ # @project ESP 8266 # @file tools/development/start.sh # @author Lucas Brémond <lucas.bremond@gmail.com> # @license MIT License ################################################################################################################################################################ script_directory="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" pushd "${script_directory}" > /dev/null # Load environment source ".env" # Start development environment docker run \ --name=${container_name} \ -it \ --privileged \ --rm \ --volume="${project_directory}:/app" \ --device="/dev/ttyUSB0:/dev/ttyUSB0" \ --workdir="/app/tools/development/helpers" \ "${image_name}:${image_version}" --device="/dev/tty.SLAB_USBtoUART:/dev/tty.USB0" \ popd > /dev/null ################################################################################################################################################################
package appDb; import exceptions.AppException; import exceptions.LoginCredentialException; import exceptions.UserNotFoundException; import model.Order; import model.User; import java.util.Map; public interface AppDb { User addUser(User user) throws AppException; User removeUser(User user) throws UserNotFoundException; Map<String, User> getUsers() throws AppException; void setUsers(Map<String, User> users); Map<String, User> getUsersFromDb(String userDbPath) throws AppException; Map<Integer, Order> getOrdersFromDb(String ordersDbPath) throws AppException; Map<Integer, Order> getOrders()throws AppException; String createAccessToken(User user) throws LoginCredentialException; boolean hasToken(String accessToken)throws AppException; }
#! /bin/bash clear echo '-----------------------------------' echo pushing version "$1" to master branch echo '-----------------------------------' git add . git commit -m "$1" -a git push
var MockStackManager = { init: function sm_init() { }, getCurrent: function sm_getCurrent() { }, getPrev: function sm_getPrev() { }, getNext: function sm_getNext() { }, goPrev: function sm_goPrev() { }, goNext: function sm_goNext() { }, length: 0 };
""" Verilator Python Wrapper Package """ from typing import List from math import ceil # Maintains persistent background tasks in the form of a list of generators # that get incremented every clock cycle. background = [] def init(testbench, trace: bool = True): global dut dut = testbench dut.init(trace) def prep(port: str, value: List[int]): dut.prep(port, value) def pack(data_width: int, val: int) -> List[int]: if data_width <= 64: return [val] else: start = ceil(data_width / 32) shift = [32*s for s in range(start)] return [((val >> s) & 0xffffffff) for s in shift] def unpack(data_width: int, val: List[int]) -> int: if data_width <= 64: return val[0] else: start = ceil(data_width / 32) shift = [32*s for s in range(start)] number: int = 0 for v, s in zip(val, shift): number = number | (v << s) return number def register(interface): """ When an interface is registered with VPW it's first initiated and then its generator is run in the background """ gen = interface.init(dut) next(gen) background.append(gen) def tick(): """ Advance TB clock """ io = dut.tick() for gen in background: try: gen.send(io) except StopIteration: background.remove(gen) return io def idle(time: int = 1): """ Idle for a number of clock cycles """ for _ in range(time): tick() def finish(): dut.finish()
#!/bin/sh env="MPE" scenario="rel_formation_form_error" num_landmarks=1 num_agents=4 algo="rmappo" exp="08-12-rel-formation-form-nav10-train-mpe" seed_max=1 echo "env is ${env}, scenario is ${scenario}, algo is ${algo}, exp is ${exp}, max seed is ${seed_max}" for seed in `seq ${seed_max}`; do let "seed=$seed+1" echo "seed is ${seed}:" CUDA_VISIBLE_DEVICES=1 python train/train_mpe.py \ --env_name ${env} \ --algorithm_name ${algo} \ --experiment_name ${exp} \ --scenario_name ${scenario} \ --num_agents ${num_agents} \ --num_landmarks ${num_landmarks} \ --seed 50 \ --n_training_threads 4 \ --n_rollout_threads 32 \ --num_mini_batch 1 \ --episode_length 250 \ --num_env_steps 50000000 \ --ppo_epoch 10 \ --gain 0.01 \ --lr 7e-4 \ --critic_lr 7e-4 \ --user_name "mapping" \ --avoid-rew-weight 5 \ --form-rew-weight 0.05 \ --nav-rew-weight 10 \ --num_static_obs 0 done
#!/bin/bash # Make sure you have the latest version of the repo echo git pull echo # Ask the user for login details #read -p 'Git repository url: ' upstreamVar #read -p 'Git Username: ' userVar #read -p 'Git email: ' emailVar upstreamVar=https://github.com/nyukhalov/CarND-LaneLines-P1.git userVar=nyukhalov emailVar=r.nyukhalov@gmail.com echo echo Thank you $userVar!, we now have your credentials echo for upstream $upstreamVar. You must supply your password for each push. echo echo setting up git git config --global user.name $userVar git config --global user.email $emailVar git remote set-url origin $upstreamVar echo echo Please verify remote: git remote -v echo echo Please verify your credentials: echo username: `git config user.name` echo email: `git config user.email`
package com.qtimes.utils.android; /** * Author: JackHou * Date: 2020/4/20. */ import java.lang.reflect.Method; public class PropertyUtils { private static volatile Method set = null; private static volatile Method get = null; public static void set(String prop, String value) { try { if (null == set) { synchronized (PropertyUtils.class) { if (null == set) { Class cls = Class.forName("android.os.SystemProperties"); set = cls.getDeclaredMethod("set", new Class[]{String.class, String.class}); } } } set.invoke(null, new Object[]{prop, value}); } catch (Throwable e) { e.printStackTrace(); } } public static String get(String prop, String defaultvalue) { String value = defaultvalue; try { if (null == get) { synchronized (PropertyUtils.class) { if (null == get) { Class cls = Class.forName("android.os.SystemProperties"); get = cls.getDeclaredMethod("get", new Class[]{String.class, String.class}); } } } value = (String) (get.invoke(null, new Object[]{prop, defaultvalue})); } catch (Throwable e) { e.printStackTrace(); } return value; } /** * 获取系统版本名称 * * @return */ public static String getSystemVersionName() { return get("ro.product.version", ""); } /** * 获取系统版本名称 * * @return */ public static String getSystemSN() { return get("ro.serialno", ""); } /** * open wireless adb */ public static void openAdb() { PropertyUtils.set("persist.internet.adb.enable", "1"); } /** * close wireless adb */ public static void closeAdb() { PropertyUtils.set("persist.internet.adb.enable", "0"); } }
<reponame>freerware/negotiator<filename>internal/header/content_coding_range.go /* Copyright 2020 Freerware * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package header import ( "errors" "fmt" "regexp" "strconv" "strings" "github.com/stretchr/stew/slice" ) var ( contentCodingRangeRegex = regexp.MustCompile(`^([A-Za-z0-9-]+|\*)(;\s?q=(\d(\.\d{1,3})?))?$`) ) var ( gzip = "gzip" xgzip = "x-gzip" deflate = "deflate" compress = "compress" xcompress = "x-compress" identity = "identity" contentCodings = []string{ gzip, xgzip, deflate, compress, xcompress, identity, "*", } // defaultContentCodingRange represents the default content coding range. defaultContentCodingRange = ContentCodingRange{ coding: "*", qValue: QualityValueMaximum, } // ErrEmptyContentCodingRange is an error that indicates that the content // coding range cannot be empty. ErrEmptyContentCodingRange = errors.New("content coding range cannot be empty") // ErrInvalidContentCodingRange is an error that indicates that the content // coding range is invalid. ErrInvalidContentCodingRange = errors.New("content coding range is invalid") ) // ContentCodingRange represents an expression that indicates an encoding // transformation. // // Content coding values indicate an encoding transformation that has // been or can be applied to a representation. Content codings are // primarily used to allow a representation to be compressed or // otherwise usefully transformed without losing the identity of its // underlying media type and without loss of information. type ContentCodingRange struct { coding string qValue QualityValue } // NewContentCodingRange constructs a content coding from the textual representation. func NewContentCodingRange(contentCoding string) (ContentCodingRange, error) { if len(contentCoding) == 0 { return ContentCodingRange{}, ErrEmptyContentCodingRange } if ok := contentCodingRangeRegex.MatchString(contentCoding); !ok { return ContentCodingRange{}, ErrInvalidContentCodingRange } groups := contentCodingRangeRegex.FindStringSubmatch(contentCoding) var valid []string valid = append(valid, contentCodings...) if !slice.ContainsString(contentCodings, groups[1]) { return ContentCodingRange{}, ErrInvalidContentCodingRange } cc := ContentCodingRange{coding: groups[1], qValue: QualityValue(1.0)} if len(groups[3]) > 0 { q, _ := strconv.ParseFloat(groups[4], 32) qv, err := NewQualityValue(float32(q)) if err != nil { return ContentCodingRange{}, err } cc.qValue = qv } return cc, nil } // IsWildcard indicates if the specified coding range is '*'. func (cc ContentCodingRange) IsWildcard() bool { return cc.coding == "*" } // IsIdentity indicates if the specified coding range is 'identi'. func (cc ContentCodingRange) IsIdentity() bool { return strings.ToLower(cc.coding) == strings.ToLower(identity) } // IsCoding indicates if the specified coding range is a content coding. func (cc ContentCodingRange) IsCoding() bool { return !cc.IsWildcard() && !cc.IsIdentity() } // Coding retrieves the content coding. func (cc ContentCodingRange) CodingRange() string { return cc.coding } // Compatible determines if the provided content coding is compatible with the // content coding range. func (cc ContentCodingRange) Compatible(coding string) bool { if !slice.ContainsString(contentCodings, strings.ToLower(coding)) { return false } if cc.IsWildcard() { return true } return strings.ToLower(cc.CodingRange()) == strings.ToLower(coding) } // QualityValue retrieves the quality value of the content coding. // // Each codings value MAY be given an associated quality value // representing the preference for that encoding, as defined in // Section 5.3.1. func (cc ContentCodingRange) QualityValue() QualityValue { return cc.qValue } // String provides a textual representation of the content coding. func (cc ContentCodingRange) String() string { return fmt.Sprintf("%s;q=%s", cc.CodingRange(), cc.QualityValue().String()) }
<filename>MySNiPs/snapi/snapi/actions/parse.rb module SNaPi module Actions class Parse < SNaPi::Actions::Get # Parse the content of this page. # @param value [String] # @return [self] def page(value) merge(page: value.to_s) end # Parse the content of this page. Overrides page. # @param value [Integer] # @return [self] def pageid(value) merge(pageid: value.to_s) end # Which pieces of information to get: # @return [self] def prop(*values) values.inject(self) {|res, val| res._prop(val) or raise ArgumentError, "Unknown value for prop: #{val}" } end def _prop(value) defined?(super) && super || %w[revid displaytitle wikitext].include?(value.to_s) && merge(prop: value.to_s, replace: false) end end end end
// @flow import React, { useCallback } from 'react' import { StyleSheet } from 'react-native' import InputText from '../common/form/InputText' import { Section, Wrapper } from '../common' import TopBar from '../common/view/TopBar' import { BackButton, NextButton, useScreenState } from '../appNavigation/stackNavigation' import { withStyles } from '../../lib/styles' import { getDesignRelativeHeight } from '../../lib/utils/sizes' import { navigationOptions } from './utils/sendReceiveFlow' export type AmountProps = { screenProps: any, navigation: any, styles: any, } const SendReason = (props: AmountProps) => { const { screenProps } = props const { params } = props.navigation.state const [screenState, setScreenState] = useScreenState(screenProps) const { reason, ...restState } = screenState const next = useCallback(() => { const [nextRoute, ...nextRoutes] = screenState.nextRoutes || [] props.screenProps.push(nextRoute, { nextRoutes, ...restState, reason, params, }) }, [restState, reason, screenState.nextRoutes, params]) return ( <Wrapper> <TopBar push={screenProps.push} /> <Section grow> <Section.Stack style={styles.container}> <Section.Title fontWeight="medium">What For?</Section.Title> <InputText maxLength={256} autoFocus style={[props.styles.input, styles.bottomContent, styles.margin]} value={reason} onChangeText={reason => setScreenState({ reason })} placeholder="Add a message" enablesReturnKeyAutomatically onSubmitEditing={next} /> </Section.Stack> <Section.Row style={styles.bottomContent}> <Section.Row grow={1} justifyContent="flex-start"> <BackButton mode="text" screenProps={screenProps}> Cancel </BackButton> </Section.Row> <Section.Stack grow={3}> <NextButton nextRoutes={screenState.nextRoutes} values={{ ...params, ...restState, reason }} {...props} label={reason ? 'Next' : 'Skip'} /> </Section.Stack> </Section.Row> </Section> </Wrapper> ) } const styles = StyleSheet.create({ container: { minHeight: getDesignRelativeHeight(180), height: getDesignRelativeHeight(180), justifyContent: 'flex-start', }, bottomContent: { marginTop: 'auto', position: 'relative', }, margin: { marginTop: 40, }, }) SendReason.navigationOptions = navigationOptions SendReason.shouldNavigateToComponent = props => { const { screenState } = props.screenProps return screenState.amount >= 0 && screenState.nextRoutes } export default withStyles(({ theme }) => ({ input: { marginTop: theme.sizes.defaultDouble, }, }))(SendReason)
// // StreamSTF.hpp // AxiSEM3D // // Created by <NAME> on 5/14/20. // Copyright © 2020 <NAME>. All rights reserved. // // stream source-time function #ifndef StreamSTF_hpp #define StreamSTF_hpp #include "STF.hpp" #include <vector> class StreamSTF: public STF { public: // constructor StreamSTF(const std::string &fileName, PaddingMode padding, numerical::Real left, numerical::Real right); // get start time double getStartTime() const { return mTimes.front(); } // get value numerical::Real getValue(double time); // verbose std::string verbose() const; private: // file const std::string mFileName; // data std::vector<double> mTimes; std::vector<numerical::Real> mData; // padding const bool mPadding; numerical::Real mLeftPadding = 0.; numerical::Real mRightPadding = 0.; }; #endif /* StreamSTF_hpp */
<gh_stars>1-10 // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { ComponentConnectionMetadata, ComponentDependency, ComponentType, CreateComponentRecipeRequest, CreateComponentRecipeResponse } from '../types/greengrass-v2-handler-types'; import { MachineProtocol } from '../types/solution-common-types'; const { ARTIFACT_BUCKET, KINESIS_STREAM, TIMESTREAM_KINESIS_STREAM } = process.env; /** * Python modules have same versions with /source/machine_connector/m2c2_opcda_connector/requirements.txt * and /source/machine_connector/m2c2_publisher/requirements.txt */ const PYTHON_MODULE_VERSION = { awsiotsdk: '1.7.1', backoff: '1.10.0', greengrasssdk: '1.6.0', 'OpenOPC-Python3x': '1.3.1', Pyro4: '4.81', 'python-dateutil': '2.8.1' }; export class GreengrassV2ComponentBuilder { /** * Creates a Greengrass v2 component recipe based on the parameters. * @param params The component recipe build request parameters * @returns The component recipe */ public static createRecipe(params: CreateComponentRecipeRequest): CreateComponentRecipeResponse { const { area, componentType, componentVersion, connectionName, machineName, process, protocol, siteName } = params; const { sendDataToIoTSiteWise, sendDataToIoTTopic, sendDataToKinesisStreams, sendDataToTimestream } = params; // By default, all components have the Greengrass Nucleus and stream manager as dependencies. const componentDependencies: Record<string, ComponentDependency> = { 'aws.greengrass.Nucleus': { VersionRequirement: '>=2.0.0 <2.6.0', DependencyType: 'HARD' }, 'aws.greengrass.StreamManager': { VersionRequirement: '>=2.0.10 <3.0.0', DependencyType: 'HARD' } }; const connectionMetadata: ComponentConnectionMetadata = { area, connectionName, machineName, process, siteName, streamName: `m2c2_${connectionName}_stream` }; const componentEnvironmentVariables: Record<string, string> = { AREA: '{configuration:/connectionMetadata/area}', CONNECTION_GG_STREAM_NAME: '{configuration:/connectionMetadata/streamName}', CONNECTION_NAME: '{configuration:/connectionMetadata/connectionName}', MACHINE_NAME: '{configuration:/connectionMetadata/machineName}', PROCESS: '{configuration:/connectionMetadata/process}', SITE_NAME: '{configuration:/connectionMetadata/siteName}' }; // By default, all components install `awsiotsdk`, `backoff`, `greengrasssdk`, and `python-dateutil` before running. const pythonPackages = ['awsiotsdk', 'backoff', 'greengrasssdk', 'python-dateutil']; let componentName = `m2c2-${connectionName}`; let topic = `m2c2/+/${connectionName}`; let artifact = 'm2c2_opcda_connector'; if (componentType === ComponentType.PUBLISHER) { /** * The requirements of the publisher components are, * 1. publisher components need to send data to `m2c2/data/{connectionName}/#`. * 2. publisher components have the IoT SiteWise edge publisher as a dependency to send data to IoT SiteWise. * 3. publisher components have the IoT SiteWise edge collector OPC UA as a dependency when the machine protocol is OPC UA. * 4. publisher components have the data destination environment variables. */ componentName = `${componentName}-publisher`; topic = `${topic}/#`; artifact = 'm2c2_publisher'; componentDependencies['aws.iot.SiteWiseEdgePublisher'] = { VersionRequirement: '>=2.0.1 <3.0.0', DependencyType: 'HARD' }; if (protocol === MachineProtocol.OPCUA) { componentDependencies['aws.iot.SiteWiseEdgeCollectorOpcua'] = { VersionRequirement: '>=2.0.2 <3.0.0', DependencyType: 'HARD' }; } // Set the data destination metadata for the publisher component. connectionMetadata.sendDataToIoTTopic = sendDataToIoTTopic ? 'Yes' : ''; connectionMetadata.sendDataToIoTSiteWise = sendDataToIoTSiteWise ? 'Yes' : ''; connectionMetadata.sendDataToKinesisStreams = sendDataToKinesisStreams ? 'Yes' : ''; connectionMetadata.sendDataToTimestream = sendDataToTimestream ? 'Yes' : ''; // Set the environment variables for the publisher component. componentEnvironmentVariables.KINESIS_STREAM_NAME = KINESIS_STREAM; componentEnvironmentVariables.PROTOCOL = protocol; componentEnvironmentVariables.SEND_TO_IOT_TOPIC = '{configuration:/connectionMetadata/sendDataToIoTTopic}'; componentEnvironmentVariables.SEND_TO_SITEWISE = '{configuration:/connectionMetadata/sendDataToIoTSiteWise}'; componentEnvironmentVariables.SEND_TO_KINESIS_STREAM = '{configuration:/connectionMetadata/sendDataToKinesisStreams}'; componentEnvironmentVariables.SEND_TO_TIMESTREAM = '{configuration:/connectionMetadata/sendDataToTimestream}'; componentEnvironmentVariables.TIMESTREAM_KINESIS_STREAM = TIMESTREAM_KINESIS_STREAM; } else { /** * Currently, only the OPC DA collector component is supported. * The OPC DA collector needs to install OpenOPC and Pyro4 before running. */ pythonPackages.push(...['OpenOPC-Python3x', 'Pyro4']); } const pythonPackagesInstall = pythonPackages .map(pythonPackage => `${pythonPackage}==${PYTHON_MODULE_VERSION[pythonPackage]}`) .join(' '); return { RecipeFormatVersion: '2020-01-25', ComponentName: componentName, ComponentVersion: componentVersion, ComponentType: 'aws.greengrass.generic', ComponentDescription: `M2C2 ${componentName} component`, ComponentConfiguration: { DefaultConfiguration: { accessControl: { 'aws.greengrass.ipc.mqttproxy': { [`${componentName}:mqttproxy:1`]: { policyDescription: `Allows access to subscribe/publish a topic for ${componentName}.`, operations: ['aws.greengrass#PublishToIoTCore', 'aws.greengrass#SubscribeToIoTCore'], resources: [topic] } } }, connectionMetadata } }, ComponentDependencies: componentDependencies, Manifests: [ { Platform: { os: 'linux' }, Name: 'Linux', Lifecycle: { Setenv: componentEnvironmentVariables, Install: `pip3 install -I ${pythonPackagesInstall}`, Run: `python3 {artifacts:decompressedPath}/${artifact}/${artifact}.py` }, Artifacts: [ { Uri: `s3://${ARTIFACT_BUCKET}/${artifact}.zip`, Algorithm: 'SHA-256', Unarchive: 'ZIP' } ] } ], Lifecycle: {} }; } }
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ic_panorama_twotone = void 0; var ic_panorama_twotone = { "viewBox": "0 0 24 24", "children": [{ "name": "path", "attribs": { "d": "M0 0h24v24H0V0z", "fill": "none" }, "children": [] }, { "name": "path", "attribs": { "d": "M3 18h18V6H3v12zm5.5-5.5l2.5 3.01L14.5 11l4.5 6H5l3.5-4.5z", "opacity": ".3" }, "children": [] }, { "name": "path", "attribs": { "d": "M21 4H3c-1.1 0-2 .9-2 2v12c0 1.1.9 2 2 2h18c1.1 0 2-.9 2-2V6c0-1.1-.9-2-2-2zm0 14H3V6h18v12zm-6.5-7L11 15.51 8.5 12.5 5 17h14z" }, "children": [] }] }; exports.ic_panorama_twotone = ic_panorama_twotone;
<reponame>abh1nay/voldemort<filename>src/java/voldemort/store/readonly/JsonStoreBuilder.java /* * Copyright 2008-2013 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.store.readonly; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.security.MessageDigest; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import joptsimple.OptionParser; import joptsimple.OptionSet; import org.apache.log4j.Logger; import voldemort.VoldemortException; import voldemort.cluster.Cluster; import voldemort.cluster.Node; import voldemort.routing.RoutingStrategy; import voldemort.routing.RoutingStrategyFactory; import voldemort.serialization.DefaultSerializerFactory; import voldemort.serialization.Serializer; import voldemort.serialization.SerializerDefinition; import voldemort.serialization.SerializerFactory; import voldemort.serialization.json.EndOfFileException; import voldemort.serialization.json.JsonReader; import voldemort.store.StoreDefinition; import voldemort.store.compress.CompressionStrategy; import voldemort.store.compress.CompressionStrategyFactory; import voldemort.utils.ByteUtils; import voldemort.utils.CmdUtils; import voldemort.utils.Pair; import voldemort.utils.Utils; import voldemort.xml.ClusterMapper; import voldemort.xml.StoreDefinitionsMapper; import com.google.common.base.Joiner; import com.google.common.collect.AbstractIterator; import com.google.common.collect.Maps; /** * Build a read-only store from given input. * * */ public class JsonStoreBuilder { private static final Logger logger = Logger.getLogger(JsonStoreBuilder.class); private final JsonReader reader; private final Cluster cluster; private final StoreDefinition storeDefinition; private final RoutingStrategy routingStrategy; private final File outputDir; private final File tempDir; private final int internalSortSize; private final int numThreads; private final int numChunks; private final int ioBufferSize; private final boolean gzipIntermediate; public JsonStoreBuilder(JsonReader reader, Cluster cluster, StoreDefinition storeDefinition, RoutingStrategy routingStrategy, File outputDir, File tempDir, int internalSortSize, int numThreads, int numChunks, int ioBufferSize, boolean gzipIntermediate) { if(cluster.getNumberOfNodes() < storeDefinition.getReplicationFactor()) throw new IllegalStateException("Number of nodes is " + cluster.getNumberOfNodes() + " but the replication factor is " + storeDefinition.getReplicationFactor() + "."); this.reader = reader; this.cluster = cluster; this.storeDefinition = storeDefinition; if(tempDir == null) this.tempDir = new File(Utils.notNull(System.getProperty("java.io.tmpdir"))); else this.tempDir = tempDir; this.outputDir = outputDir; this.routingStrategy = routingStrategy; this.internalSortSize = internalSortSize; this.numThreads = numThreads; this.numChunks = numChunks; this.ioBufferSize = ioBufferSize; this.gzipIntermediate = gzipIntermediate; } /** * Main method to run on a input text file * * @param args see USAGE for details * @throws IOException */ public static void main(String[] args) throws IOException { OptionParser parser = new OptionParser(); parser.accepts("help", "print usage information"); parser.accepts("cluster", "[REQUIRED] path to cluster xml config file") .withRequiredArg() .describedAs("cluster.xml"); parser.accepts("stores", "[REQUIRED] path to stores xml config file") .withRequiredArg() .describedAs("stores.xml"); parser.accepts("name", "[REQUIRED] store name").withRequiredArg().describedAs("store name"); parser.accepts("buffer", "[REQUIRED] number of key/value pairs to buffer in memory") .withRequiredArg() .ofType(Integer.class); parser.accepts("input", "[REQUIRED] input file to read from") .withRequiredArg() .describedAs("input-file"); parser.accepts("output", "[REQUIRED] directory to output stores to") .withRequiredArg() .describedAs("output directory"); parser.accepts("threads", "number of threads").withRequiredArg().ofType(Integer.class); parser.accepts("chunks", "number of chunks [per node, per partition, per partition + replica]") .withRequiredArg() .ofType(Integer.class); parser.accepts("io-buffer-size", "size of i/o buffers in bytes") .withRequiredArg() .ofType(Integer.class); parser.accepts("temp-dir", "temporary directory for sorted file pieces") .withRequiredArg() .describedAs("temp dir"); parser.accepts("gzip", "compress intermediate chunk files"); parser.accepts("format", "read-only store format [" + ReadOnlyStorageFormat.READONLY_V0.getCode() + "," + ReadOnlyStorageFormat.READONLY_V1.getCode() + "," + ReadOnlyStorageFormat.READONLY_V2.getCode() + "]") .withRequiredArg() .ofType(String.class); OptionSet options = parser.parse(args); if(options.has("help")) { parser.printHelpOn(System.out); System.exit(0); } Set<String> missing = CmdUtils.missing(options, "cluster", "stores", "name", "buffer", "input", "output"); if(missing.size() > 0) { System.err.println("Missing required arguments: " + Joiner.on(", ").join(missing)); parser.printHelpOn(System.err); System.exit(1); } String clusterFile = (String) options.valueOf("cluster"); String storeDefFile = (String) options.valueOf("stores"); String storeName = (String) options.valueOf("name"); int sortBufferSize = (Integer) options.valueOf("buffer"); String inputFile = (String) options.valueOf("input"); File outputDir = new File((String) options.valueOf("output")); int numThreads = CmdUtils.valueOf(options, "threads", 2); int chunks = CmdUtils.valueOf(options, "chunks", 2); int ioBufferSize = CmdUtils.valueOf(options, "io-buffer-size", 1000000); ReadOnlyStorageFormat storageFormat = ReadOnlyStorageFormat.fromCode(CmdUtils.valueOf(options, "format", ReadOnlyStorageFormat.READONLY_V2.getCode())); boolean gzipIntermediate = options.has("gzip"); File tempDir = new File(CmdUtils.valueOf(options, "temp-dir", System.getProperty("java.io.tmpdir"))); try { JsonReader reader = new JsonReader(new BufferedReader(new FileReader(inputFile), ioBufferSize)); Cluster cluster = new ClusterMapper().readCluster(new BufferedReader(new FileReader(clusterFile))); StoreDefinition storeDef = null; List<StoreDefinition> stores = new StoreDefinitionsMapper().readStoreList(new BufferedReader(new FileReader(storeDefFile))); for(StoreDefinition def: stores) { if(def.getName().equals(storeName)) storeDef = def; } if(storeDef == null) Utils.croak("No store found with name \"" + storeName + "\""); if(!outputDir.exists()) Utils.croak("Directory \"" + outputDir.getAbsolutePath() + "\" does not exist."); RoutingStrategy routingStrategy = new RoutingStrategyFactory().updateRoutingStrategy(storeDef, cluster); new JsonStoreBuilder(reader, cluster, storeDef, routingStrategy, outputDir, tempDir, sortBufferSize, numThreads, chunks, ioBufferSize, gzipIntermediate).build(storageFormat); } catch(FileNotFoundException e) { Utils.croak(e.getMessage()); } } public void build(ReadOnlyStorageFormat type) throws IOException { switch(type) { case READONLY_V0: buildVersion0(); break; case READONLY_V1: buildVersion1(); break; case READONLY_V2: buildVersion2(); break; default: throw new VoldemortException("Invalid storage format " + type); } } public void buildVersion0() throws IOException { logger.info("Building store " + storeDefinition.getName() + " for " + cluster.getNumberOfNodes() + " with " + numChunks + " chunks per node and type " + ReadOnlyStorageFormat.READONLY_V0); // initialize nodes int numNodes = cluster.getNumberOfNodes(); DataOutputStream[][] indexes = new DataOutputStream[numNodes][numChunks]; DataOutputStream[][] datas = new DataOutputStream[numNodes][numChunks]; int[][] positions = new int[numNodes][numChunks]; for(Node node: cluster.getNodes()) { int nodeId = node.getId(); File nodeDir = new File(outputDir, "node-" + Integer.toString(nodeId)); nodeDir.mkdirs(); // Create metadata file BufferedWriter writer = new BufferedWriter(new FileWriter(new File(nodeDir, ".metadata"))); ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata(); metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V0.getCode()); writer.write(metadata.toJsonString()); writer.close(); for(int chunk = 0; chunk < numChunks; chunk++) { File indexFile = new File(nodeDir, chunk + ".index"); File dataFile = new File(nodeDir, chunk + ".data"); positions[nodeId][chunk] = 0; indexes[nodeId][chunk] = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(indexFile), ioBufferSize)); datas[nodeId][chunk] = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile), ioBufferSize)); } } logger.info("Reading items..."); int count = 0; ExternalSorter<KeyValuePair> sorter = new ExternalSorter<KeyValuePair>(new KeyValuePairSerializer(), new KeyMd5Comparator(), internalSortSize, tempDir.getAbsolutePath(), ioBufferSize, numThreads, gzipIntermediate); JsonObjectIterator iter = new JsonObjectIterator(reader, storeDefinition); for(KeyValuePair pair: sorter.sorted(iter)) { List<Node> nodes = this.routingStrategy.routeRequest(pair.getKey()); byte[] keyMd5 = pair.getKeyMd5(); for(int i = 0; i < this.storeDefinition.getReplicationFactor(); i++) { int nodeId = nodes.get(i).getId(); int chunk = ReadOnlyUtils.chunk(keyMd5, numChunks); int numBytes = pair.getValue().length; datas[nodeId][chunk].writeInt(numBytes); datas[nodeId][chunk].write(pair.getValue()); indexes[nodeId][chunk].write(keyMd5); indexes[nodeId][chunk].writeInt(positions[nodeId][chunk]); positions[nodeId][chunk] += numBytes + 4; checkOverFlow(chunk, positions[nodeId][chunk]); } count++; } logger.info(count + " items read."); // sort and write out logger.info("Closing all store files."); for(int node = 0; node < numNodes; node++) { for(int chunk = 0; chunk < numChunks; chunk++) { indexes[node][chunk].close(); datas[node][chunk].close(); } } } public void buildVersion1() throws IOException { logger.info("Building store " + storeDefinition.getName() + " for " + cluster.getNumberOfPartitions() + " partitions with " + numChunks + " chunks per partitions and type " + ReadOnlyStorageFormat.READONLY_V1); // initialize nodes int numNodes = cluster.getNumberOfNodes(); DataOutputStream[][] indexes = new DataOutputStream[numNodes][]; DataOutputStream[][] datas = new DataOutputStream[numNodes][]; int[][] positions = new int[numNodes][]; int[] partitionIdToChunkOffset = new int[cluster.getNumberOfPartitions()]; int[] partitionIdToNodeId = new int[cluster.getNumberOfPartitions()]; for(Node node: cluster.getNodes()) { int nodeId = node.getId(); indexes[nodeId] = new DataOutputStream[node.getNumberOfPartitions() * numChunks]; datas[nodeId] = new DataOutputStream[node.getNumberOfPartitions() * numChunks]; positions[nodeId] = new int[node.getNumberOfPartitions() * numChunks]; File nodeDir = new File(outputDir, "node-" + Integer.toString(nodeId)); nodeDir.mkdirs(); // Create metadata file BufferedWriter writer = new BufferedWriter(new FileWriter(new File(nodeDir, ".metadata"))); ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata(); metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V1.getCode()); writer.write(metadata.toJsonString()); writer.close(); int globalChunk = 0; for(Integer partition: node.getPartitionIds()) { partitionIdToChunkOffset[partition] = globalChunk; partitionIdToNodeId[partition] = node.getId(); for(int chunk = 0; chunk < numChunks; chunk++) { File indexFile = new File(nodeDir, Integer.toString(partition) + "_" + Integer.toString(chunk) + ".index"); File dataFile = new File(nodeDir, Integer.toString(partition) + "_" + Integer.toString(chunk) + ".data"); positions[nodeId][globalChunk] = 0; indexes[nodeId][globalChunk] = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(indexFile), ioBufferSize)); datas[nodeId][globalChunk] = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile), ioBufferSize)); globalChunk++; } } } logger.info("Reading items..."); int count = 0; ExternalSorter<KeyValuePair> sorter = new ExternalSorter<KeyValuePair>(new KeyValuePairSerializer(), new KeyMd5Comparator(), internalSortSize, tempDir.getAbsolutePath(), ioBufferSize, numThreads, gzipIntermediate); JsonObjectIterator iter = new JsonObjectIterator(reader, storeDefinition); for(KeyValuePair pair: sorter.sorted(iter)) { byte[] keyMd5 = pair.getKeyMd5(); List<Integer> partitionIds = this.routingStrategy.getPartitionList(pair.getKey()); for(Integer partitionId: partitionIds) { int localChunkId = ReadOnlyUtils.chunk(keyMd5, numChunks); int chunk = localChunkId + partitionIdToChunkOffset[partitionId]; int nodeId = partitionIdToNodeId[partitionId]; datas[nodeId][chunk].writeInt(pair.getValue().length); datas[nodeId][chunk].write(pair.getValue()); indexes[nodeId][chunk].write(keyMd5); indexes[nodeId][chunk].writeInt(positions[nodeId][chunk]); positions[nodeId][chunk] += pair.getValue().length + 4; checkOverFlow(chunk, positions[nodeId][chunk]); } count++; } logger.info(count + " items read."); // sort and write out logger.info("Closing all store files."); for(Node node: cluster.getNodes()) { for(int chunk = 0; chunk < numChunks * node.getNumberOfPartitions(); chunk++) { indexes[node.getId()][chunk].close(); datas[node.getId()][chunk].close(); } } } public void buildVersion2() throws IOException { logger.info("Building store " + storeDefinition.getName() + " for " + cluster.getNumberOfPartitions() + " partitions, " + storeDefinition.getReplicationFactor() + " replica types, " + numChunks + " chunks per partitions per replica type and type " + ReadOnlyStorageFormat.READONLY_V2); // Initialize files DataOutputStream[][] indexes = new DataOutputStream[cluster.getNumberOfPartitions()][]; DataOutputStream[][] datas = new DataOutputStream[cluster.getNumberOfPartitions()][]; int[][] positions = new int[cluster.getNumberOfPartitions()][]; File tempDirectory = new File(Utils.notNull(System.getProperty("java.io.tmpdir")), "tempDir-" + Integer.toString(new Random().nextInt())); Utils.mkdirs(tempDirectory); for(int partitionId = 0; partitionId < cluster.getNumberOfPartitions(); partitionId++) { indexes[partitionId] = new DataOutputStream[storeDefinition.getReplicationFactor() * numChunks]; datas[partitionId] = new DataOutputStream[storeDefinition.getReplicationFactor() * numChunks]; positions[partitionId] = new int[storeDefinition.getReplicationFactor() * numChunks]; int globalChunkId = 0; for(int repType = 0; repType < storeDefinition.getReplicationFactor(); repType++) { for(int chunk = 0; chunk < numChunks; chunk++) { File indexFile = new File(tempDirectory, Integer.toString(partitionId) + "_" + Integer.toString(repType) + "_" + Integer.toString(chunk) + ".index"); File dataFile = new File(tempDirectory, Integer.toString(partitionId) + "_" + Integer.toString(repType) + "_" + Integer.toString(chunk) + ".data"); positions[partitionId][globalChunkId] = 0; indexes[partitionId][globalChunkId] = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(indexFile), ioBufferSize)); datas[partitionId][globalChunkId] = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile), ioBufferSize)); globalChunkId++; } } } logger.info("Reading items..."); ExternalSorter<KeyValuePair> sorter = new ExternalSorter<KeyValuePair>(new KeyValuePairSerializer(), new KeyMd5Comparator(), internalSortSize, tempDir.getAbsolutePath(), ioBufferSize, numThreads, gzipIntermediate); JsonObjectIterator iter = new JsonObjectIterator(reader, storeDefinition); int count = 0; HashMap<Pair<Integer, Integer>, Pair<byte[], byte[]>> previousElements = Maps.newHashMap(); for(KeyValuePair currentElement: sorter.sorted(iter)) { List<Integer> partitionIds = this.routingStrategy.getPartitionList(currentElement.getKey()); int masterPartition = partitionIds.get(0); int localChunkId = ReadOnlyUtils.chunk(currentElement.getKeyMd5(), numChunks); for(int replicaType = 0; replicaType < partitionIds.size(); replicaType++) { int globalChunkId = (replicaType * numChunks) + localChunkId; Pair<Integer, Integer> key = Pair.create(masterPartition, globalChunkId); if(!previousElements.containsKey(key)) { // First element, lets write it to map previousElements.put(key, Pair.create(ByteUtils.copy(currentElement.getKeyMd5(), 0, 2 * ByteUtils.SIZE_OF_INT), generateFirstElement(currentElement))); } else { Pair<byte[], byte[]> previousElement = previousElements.get(key); // If the current element is same as previous element, // append it... if(ByteUtils.compare(previousElement.getFirst(), currentElement.getKeyMd5(), 0, 2 * ByteUtils.SIZE_OF_INT) == 0) { short numKeys = ByteUtils.readShort(previousElement.getSecond(), 0); ByteArrayOutputStream stream = new ByteArrayOutputStream(); DataOutputStream valueStream = new DataOutputStream(stream); valueStream.writeShort(numKeys + 1); // Append the previous tuples valueStream.write(ByteUtils.copy(previousElement.getSecond(), ByteUtils.SIZE_OF_SHORT, previousElement.getSecond().length)); valueStream.writeInt(currentElement.getKey().length); valueStream.writeInt(currentElement.getValue().length); valueStream.write(currentElement.getKey()); valueStream.write(currentElement.getValue()); valueStream.flush(); previousElements.put(key, Pair.create(previousElement.getFirst(), stream.toByteArray())); } else { // ...else, flush the previous element to disk indexes[masterPartition][globalChunkId].write(previousElement.getFirst()); indexes[masterPartition][globalChunkId].writeInt(positions[masterPartition][globalChunkId]); datas[masterPartition][globalChunkId].write(previousElement.getSecond()); positions[masterPartition][globalChunkId] += previousElement.getSecond().length; // ...and add current element as previous element previousElements.put(key, Pair.create(ByteUtils.copy(currentElement.getKeyMd5(), 0, 2 * ByteUtils.SIZE_OF_INT), generateFirstElement(currentElement))); } } } count++; } logger.info(count + " items read."); // If any element still left in previous elements, flush them out to // files for(Entry<Pair<Integer, Integer>, Pair<byte[], byte[]>> entry: previousElements.entrySet()) { int partitionId = entry.getKey().getFirst(); int globalChunkId = entry.getKey().getSecond(); byte[] keyMd5 = entry.getValue().getFirst(); byte[] value = entry.getValue().getSecond(); indexes[partitionId][globalChunkId].write(keyMd5); indexes[partitionId][globalChunkId].writeInt(positions[partitionId][globalChunkId]); datas[partitionId][globalChunkId].write(value); } // Create node folders File[] nodeDirs = new File[cluster.getNumberOfNodes()]; for(Node node: cluster.getNodes()) { int nodeId = node.getId(); // Create data directory File nodeDir = new File(outputDir, "node-" + Integer.toString(nodeId)); nodeDir.mkdirs(); // Add the data directory to the array nodeDirs[node.getId()] = nodeDir; // Create metadata file BufferedWriter writer = new BufferedWriter(new FileWriter(new File(nodeDir, ".metadata"))); ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata(); metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V2.getCode()); writer.write(metadata.toJsonString()); writer.close(); } // Close everything logger.info("Closing all store files."); for(int partitionId = 0; partitionId < cluster.getNumberOfPartitions(); partitionId++) { for(int chunk = 0; chunk < numChunks * storeDefinition.getReplicationFactor(); chunk++) { indexes[partitionId][chunk].close(); datas[partitionId][chunk].close(); } } // Start moving files over to their correct node RoutingStrategy strategy = new RoutingStrategyFactory().updateRoutingStrategy(storeDefinition, cluster); Map<Integer, Integer> replicaMapping = cluster.getPartitionIdToNodeIdMap(); for(File file: tempDirectory.listFiles()) { String fileName = file.getName(); if(fileName.matches("^[\\d]+_[\\d]+_[\\d]+\\.(data|index)")) { String[] props = fileName.split("_"); int partitionId = Integer.parseInt(props[0]); int replicaType = Integer.parseInt(props[1]); int nodeId = replicaMapping.get(strategy.getReplicatingPartitionList(partitionId) .get(replicaType)); Utils.move(file, new File(nodeDirs[nodeId], fileName)); } } } private byte[] generateFirstElement(KeyValuePair currentPair) throws IOException { ByteArrayOutputStream stream = new ByteArrayOutputStream(); DataOutputStream valueStream = new DataOutputStream(stream); valueStream.writeShort(1); valueStream.writeInt(currentPair.getKey().length); valueStream.writeInt(currentPair.getValue().length); valueStream.write(currentPair.getKey()); valueStream.write(currentPair.getValue()); valueStream.flush(); return stream.toByteArray(); } /* Check if the position has exceeded Integer.MAX_VALUE */ private void checkOverFlow(int chunk, int position) { if(position < 0) throw new VoldemortException("Chunk overflow: chunk " + chunk + " has exceeded " + Integer.MAX_VALUE + " bytes."); } private static class KeyValuePairSerializer implements Serializer<KeyValuePair> { private final MessageDigest digest = ByteUtils.getDigest("MD5"); public byte[] toBytes(KeyValuePair pair) { byte[] key = pair.getKey(); byte[] value = pair.getValue(); byte[] bytes = new byte[key.length + value.length + 8]; ByteUtils.writeInt(bytes, key.length, 0); ByteUtils.writeInt(bytes, value.length, 4); System.arraycopy(key, 0, bytes, 8, key.length); System.arraycopy(value, 0, bytes, 8 + key.length, value.length); return bytes; } public KeyValuePair toObject(byte[] bytes) { int keySize = ByteUtils.readInt(bytes, 0); int valueSize = ByteUtils.readInt(bytes, 4); byte[] key = new byte[keySize]; byte[] value = new byte[valueSize]; System.arraycopy(bytes, 8, key, 0, keySize); System.arraycopy(bytes, 8 + keySize, value, 0, valueSize); byte[] md5 = digest.digest(key); digest.reset(); return new KeyValuePair(key, md5, value); } } private static class JsonObjectIterator extends AbstractIterator<KeyValuePair> { private final JsonReader reader; private final Serializer<Object> keySerializer; private final Serializer<Object> valueSerializer; private final MessageDigest digest; private final SerializerDefinition keySerializerDefinition; private final SerializerDefinition valueSerializerDefinition; private CompressionStrategy valueCompressor; private CompressionStrategy keyCompressor; @SuppressWarnings("unchecked") public JsonObjectIterator(JsonReader reader, StoreDefinition storeDefinition) { SerializerFactory factory = new DefaultSerializerFactory(); this.reader = reader; this.digest = ByteUtils.getDigest("MD5"); this.keySerializerDefinition = storeDefinition.getKeySerializer(); this.valueSerializerDefinition = storeDefinition.getValueSerializer(); this.keySerializer = (Serializer<Object>) factory.getSerializer(storeDefinition.getKeySerializer()); this.valueSerializer = (Serializer<Object>) factory.getSerializer(storeDefinition.getValueSerializer()); this.keyCompressor = new CompressionStrategyFactory().get(keySerializerDefinition.getCompression()); this.valueCompressor = new CompressionStrategyFactory().get(valueSerializerDefinition.getCompression()); } @Override protected KeyValuePair computeNext() { try { Object key = reader.read(); Object value = null; try { value = reader.read(); } catch(EndOfFileException e) { throw new VoldemortException("Invalid file: reached end of file with key but no matching value.", e); } byte[] keyBytes = keySerializer.toBytes(key); byte[] valueBytes = valueSerializer.toBytes(value); // compress key and values if required if(keySerializerDefinition.hasCompression()) { keyBytes = keyCompressor.deflate(keyBytes); } if(valueSerializerDefinition.hasCompression()) { valueBytes = valueCompressor.deflate(valueBytes); } byte[] keyMd5 = digest.digest(keyBytes); digest.reset(); return new KeyValuePair(keyBytes, keyMd5, valueBytes); } catch(EndOfFileException e) { return endOfData(); } catch(IOException e) { throw new VoldemortException("Unable to deflate key/value pair.", e); } } } public static class KeyMd5Comparator implements Comparator<KeyValuePair> { public int compare(KeyValuePair kv1, KeyValuePair kv2) { return ByteUtils.compare(kv1.getKeyMd5(), kv2.getKeyMd5()); } } private static class KeyValuePair { private final byte[] key; private final byte[] keyMd5; private final byte[] value; public KeyValuePair(byte[] key, byte[] keyMd5, byte[] value) { this.key = key; this.keyMd5 = keyMd5; this.value = value; } public byte[] getKey() { return key; } public byte[] getKeyMd5() { return this.keyMd5; } public byte[] getValue() { return value; } @Override public String toString() { return new String("Key - " + new String(this.key) + " - Value - " + new String(this.value) + " - KeyMD5 - " + new String(this.keyMd5)); } } }
<filename>out-tsc/src/elements/shared-animations.js /* Bouncing animations*/ import '@polymer/polymer'; const documentContainer = document.createElement('template'); documentContainer.innerHTML = `<dom-module id="fade-animations"> <template> <style> @keyframes fadeInUp { from { opacity: 0; transform: translate3d(0, 100%, 0); } to { opacity: 1; transform: none; } } @keyframes fadeInAttendeesCount { from { opacity: 0; transform: translate3d(200px, 170px, 0); } to { transform: translate3d(0, 0, 0); } } @keyframes fadeInSessionsCount { from { opacity: 0; transform: translate3d(200px, -170px, 0px); } to { transform: translate3d(0, 0, 0); } } @keyframes fadeInTracksCount { from { opacity: 0; transform: translate3d(180px, -170px, 0px); } to { transform: translate3d(0, 0, 0); } } @keyframes fadeInDaysCount { from { opacity: 0; transform: translate3d(180px, 170px, 0px); } to { transform: translate3d(0, 0, 0); } } </style> </template> </dom-module><dom-module id="scale-animations"> <template> <style> @keyframes grow { from { transform: scale(0); } to { transform: scale(1); } } </style> </template> </dom-module>`; document.head.appendChild(documentContainer.content); //# sourceMappingURL=shared-animations.js.map
<reponame>zaidmukaddam/linkto export const canUseDOM = () => { return !!( typeof window !== "undefined" && window.document && window.document.createElement ); }; export const isBrowser = canUseDOM();
#!/usr/bin/env bash # Run a nice end to end test, covering all our formats, and all our transports, # and many different schemas. # # This assumes a prime/router is already running and available on your local host # This assumes batch is on a 1-minute timer. # # This script is very ugly and repetitive. Replace with kotlin. # RED='\033[0;31m' BLUE='\033[0;34m' GREEN='\033[0;32m' NC='\033[0m' # No Color # Use this variable to point to a different hostname on which your 'local' API runs # This can be useful if you are running the end-to-end test in a container # as opposed to on your actual localhost (e.g. the builder container) # Default Value (i.e. if unspecified): localhost PRIME_RS_API_ENDPOINT_HOST=${PRIME_RS_API_ENDPOINT_HOST:-localhost} outputdir=./build/csv_test_files starter_schema=primedatainput/pdi-covid-19 database=prime_data_hub testfile1=$outputdir/prime1.csv testfile2=$outputdir/prime2.csv rows=20 printf "${BLUE}End to end test: generate 2 fake data files, split/transform to 4 schemas, in 3 formats, then merge, transport using 2 transports${NC}\n\n" mkdir -p $outputdir # Generate two fake data files # Dev note: early on, I had problems with parsing output from ./prime before the file was actually created. # So I got in the habit of grabbing the output, and parsing it separately. printf "${BLUE}Generating fake data with $rows rows${NC}\n" fake1="./prime data --input-fake $rows --input-schema $starter_schema --output $testfile1 --target-counties=CSV,HL7_BATCH,HL7,REDOX --target-states=PM" echo $fake1 text=$($fake1) printf "$text\n" printf "${BLUE}Generating second fake data file with $rows rows${NC}\n" fake2="./prime data --input-fake $rows --input-schema $starter_schema --output $testfile2 --target-counties=CSV,HL7_BATCH,HL7,REDOX --target-states=PM" echo $fake2 text=$($fake2) printf "$text\n" printf "${BLUE}Post first fake report to prime hub${NC}\n" boilerplate_front="curl --silent -X POST -H client:simple_report -H Content-Type:text/csv " boilerplate_back="http://${PRIME_RS_API_ENDPOINT_HOST?}:7071/api/reports" echo Posting $testfile1 to reports endpoint $boilerplate_front --data-binary @$testfile1 $boilerplate_back | cat > $testfile1.json # Get the report_id from the output report_id1=$(cat $testfile1.json | python <( echo ' import sys, json print json.loads(sys.stdin.read())["id"] ' )) if [ -z $report_id1 ] ; then printf "${RED}Post to prime hub failed, json response in $testfile1.json:${NC}\n" cat $testfile1.json exit 1 else printf "${GREEN}SUCCESS: Submitted report_id=$report_id1 ${NC}(json response in $testfile1.json)\n" fi printf "${BLUE}Post second fake report to prime hub${NC}\n" echo Posting $testfile2 to reports endpoint $boilerplate_front --data-binary @$testfile2 $boilerplate_back | cat > $testfile2.json # Get the report_id from the output report_id2=$(cat $testfile2.json | python <( echo ' import sys, json print json.loads(sys.stdin.read())["id"] ' )) if [ -z $report_id2 ] ; then printf "${RED}Post to prime hub failed, json response in $testfile2.json:${NC}\n" cat $testfile2.json exit 1 else printf "${GREEN}SUCCESS: Submitted report_id=$report_id2 ${NC}(json response in $testfile2.json)\n" fi # Assume Batch step is on a 1 minute timer. printf "${BLUE}Sleeping for 75 seconds to test Batching timer${NC}\n" sleep 75 (( successCount = rows / 4)) printf "\n\n${BLUE}Count item lineages from $report_id1 ${NC}\n" printf "${BLUE}SUCCESS is 9 rows, all with item_count = $successCount${NC}\n" psql prime_data_hub <<EOF select count(*) item_count, IL.child_report_id, RF.receiving_org, RF.receiving_org_svc, A.action_name, schema_name from item_lineage as IL join report_file as RF on IL.child_report_id = RF.report_id join action as A on A.action_id = RF.action_id where receiving_org_svc != 'hl7-test' and item_lineage_id in (select item_descendants('$report_id1')) group by IL.child_report_id, RF.receiving_org, RF.receiving_org_svc, A.action_name, schema_name order by RF.receiving_org_svc, A.action_name; EOF printf "\n${BLUE}Count item lineages for hl7-test from $report_id1 ${NC}\n" printf "${BLUE}SUCCESS is 3 rows, all with item_count = $successCount${NC}\n" psql prime_data_hub <<EOF select count(*) item_count, A.action_name, schema_name from item_lineage as IL join report_file as RF on IL.child_report_id = RF.report_id join action as A on A.action_id = RF.action_id where receiving_org_svc = 'hl7-test' and item_lineage_id in (select item_descendants('$report_id1')) group by A.action_name, schema_name order by A.action_name; EOF printf "\n${BLUE}Count item lineages from $report_id2 ${NC}\n" printf "${BLUE}SUCCESS is 9 rows, all with item_count = $successCount${NC}\n" psql prime_data_hub <<EOF select count(*) item_count, IL.child_report_id, RF.receiving_org, RF.receiving_org_svc, A.action_name, schema_name from item_lineage as IL join report_file as RF on IL.child_report_id = RF.report_id join action as A on A.action_id = RF.action_id where receiving_org_svc != 'hl7-test' and item_lineage_id in (select item_descendants('$report_id2')) group by IL.child_report_id, RF.receiving_org, RF.receiving_org_svc, A.action_name, schema_name order by RF.receiving_org_svc, A.action_name; EOF printf "\n${BLUE}Count item lineages for hl7-test from $report_id2 ${NC}\n" printf "${BLUE}SUCCESS is 3 rows, all with item_count = $successCount${NC}\n" psql prime_data_hub <<EOF select count(*) item_count, A.action_name, schema_name from item_lineage as IL join report_file as RF on IL.child_report_id = RF.report_id join action as A on A.action_id = RF.action_id where receiving_org_svc = 'hl7-test' and item_lineage_id in (select item_descendants('$report_id2')) group by A.action_name, schema_name order by A.action_name; EOF (( mergeSuccessCount = successCount * 2 )) printf "\n${BLUE}Now show merging, by looking at level descendants for $report_id1${NC}\n" printf "${BLUE}SUCCESS is 8 rows, all with item_count = $mergeSuccessCount${NC}\n" psql prime_data_hub <<EOF select sum(item_count) item_count, A.action_name, receiving_org_svc AS rcvr_service, schema_name from report_file as RF join action as A ON A.action_id = RF.action_id where RF.report_id in (select report_descendants('$report_id1')) and action_name in ('send', 'batch') group by receiving_org_svc, A.action_name, schema_name order by receiving_org_svc, A.action_name; EOF printf "\n${BLUE}Now print out report level descendants for $report_id2${NC}\n" printf "${BLUE}SUCCESS is 8 rows, all with item_count = $mergeSuccessCount${NC}\n" psql prime_data_hub <<EOF select sum(item_count) item_count, A.action_name, receiving_org_svc AS rcvr_service, schema_name from report_file as RF join action as A ON A.action_id = RF.action_id where RF.report_id in (select report_descendants('$report_id2')) and action_name in ('send', 'batch') group by receiving_org_svc, A.action_name, schema_name order by receiving_org_svc, A.action_name; EOF exit 0
<gh_stars>10-100 require 'test_helper' class ProjectTest < ActiveSupport::TestCase test "should not save project without name" do project = Project.new assert_not project.save end test "should save project with name" do project = Project.new(name: '<NAME>') assert project.save end end
{ "article": { "title": "", "author": { "firstName": "", "lastName": "" } } }
<gh_stars>0 # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- import datetime import sphinx_rtd_theme # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- Project information ----------------------------------------------------- # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'1.0' # The full version, including alpha/beta/rc tags. release = u'1.0' project = 'GLPI JSON Protocol' thisyear = datetime.datetime.now().year copyright = u'2016-%s, GLPI Project, Teclib\'' % thisyear author = u'GLPI Project, Teclib\'' # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx_rtd_theme', 'sphinx-prompt', 'sphinx_substitution_extensions', 'sphinx.ext.todo', 'sphinx.ext.ifconfig', ] todo_include_todos = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "sphinx_rtd_theme" html_logo = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If true, links to the reST sources are added to the pages. # # html_show_sourcelink = True html_show_sourcelink = False html_theme_options = { #'style_nav_header_background': 'white', 'logo_only': False, # Toc options 'collapse_navigation': True, 'sticky_navigation': True, 'navigation_depth': 3, 'includehidden': True, 'titles_only': False, # Misc options 'prev_next_buttons_location': 'both', } html_favicon = '_static/images/favicon.ico' # Disable smartquotes smartquotes = False
import React, { Component } from "react"; import Slider from "./components/navigation/Slider"; import Gallery from "./components/navigation/Gallery"; import "./App.css"; import navData from "./api/navigation.json"; import request from "superagent"; class App extends Component { state = { data: [] }; componentDidMount() { console.log(navData); function status(response) { if (response.status >= 200 && response.status < 300) { return Promise.resolve(response) } else { return Promise.reject(new Error(response.statusText)) } } function json(response) { return response.json() } fetch('./api/navigation.json') .then(status) .then(json) .then(function (data) { console.log('Request succeeded with JSON response', data); }).catch(function (error) { console.log('Request failed', error); }); // request // .get("./navigation.json") // .then(response => { // if (response.status == 200) { // console.log(response); // return response.json; // } else { // console.log("navData here", navData); // this.setState({ // data: navData // }); // console.log("navData here", this.state.data); // return Promise.reject("something went wrong!"); // } // }) // .then(data => console.log("data is", data)) // .catch(error => console.log("error is", error)); } render() { return ( <div className="App"> {/* <h1>Let's Put slider Here</h1> */} <Gallery /> <Slider navData={navData}/> </div> ); } } export default App;
#!/bin/bash # Module specific variables go here # Files: file=/path/to/file # Arrays: declare -a array_name # Strings: foo="bar" # Integers: x=9 ############################################### # Bootstrapping environment setup ############################################### # Get our working directory cwd="$(pwd)" # Define our bootstrapper location bootstrap="${cwd}/tools/bootstrap.sh" # Bail if it cannot be found if [ ! -f ${bootstrap} ]; then echo "Unable to locate bootstrap; ${bootstrap}" && exit 1 fi # Load our bootstrap source ${bootstrap} ############################################### # Metrics start ############################################### # Get EPOCH s_epoch="$(gen_epoch)" # Create a timestamp timestamp="$(gen_date)" # Whos is calling? 0 = singular, 1 is as group caller=$(ps $PPID | grep -c stigadm) ############################################### # Perform restoration ############################################### # If ${restore} = 1 go to restoration mode if [ ${restore} -eq 1 ]; then report "Not yet implemented" && exit 1 fi ############################################### # STIG validation/remediation ############################################### # Module specific validation code should go here # Errors should go in ${errors[@]} array (which on remediation get handled) # All inspected items should go in ${inspected[@]} array errors=("${stigid}") # If ${change} = 1 #if [ ${change} -eq 1 ]; then # Create the backup env #backup_setup_env "${backup_path}" # Create a backup (configuration output, file/folde permissions output etc #bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')" #bu_file "${backup_path}" "${author}" "${stigid}" "${file}" #if [ $? -ne 0 ]; then # Stop, we require a backup #report "Unable to create backup" && exit 1 #fi # Iterate ${errors[@]} #for error in ${errors[@]}; do # Work to remediate ${error} should go here #done #fi # Remove dupes #inspected=( $(remove_duplicates "${inspected[@]}") ) ############################################### # Results for printable report ############################################### # If ${#errors[@]} > 0 if [ ${#errors[@]} -gt 0 ]; then # Set ${results} error message #results="Failed validation" UNCOMMENT ONCE WORK COMPLETE! results="Not yet implemented!" fi # Set ${results} passed message [ ${#errors[@]} -eq 0 ] && results="Passed validation" ############################################### # Report generation specifics ############################################### # Apply some values expected for report footer [ ${#errors[@]} -eq 0 ] && passed=1 || passed=0 [ ${#errors[@]} -gt 0 ] && failed=1 || failed=0 # Calculate a percentage from applied modules & errors incurred percentage=$(percent ${passed} ${failed}) # If the caller was only independant if [ ${caller} -eq 0 ]; then # Show failures [ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}" # Provide detailed results to ${log} if [ ${verbose} -eq 1 ]; then # Print array of failed & validated items [ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}" fi # Generate the report report "${results}" # Display the report cat ${log} else # Since we were called from stigadm module_header "${results}" # Show failures [ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}" # Provide detailed results to ${log} if [ ${verbose} -eq 1 ]; then # Print array of failed & validated items [ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}" fi # Finish up the module specific report module_footer fi ############################################### # Return code for larger report ############################################### # Return an error/success code (0/1) exit ${#errors[@]} # Date: 2018-09-18 # # Severity: CAT-II # Classification: UNCLASSIFIED # STIG_ID: V0038511 # STIG_Version: SV-50312r2 # Rule_ID: RHEL-06-000082 # # OS: Red_Hat # Version: 6 # Architecture: # # Title: IP forwarding for IPv4 must not be enabled, unless the system is a router. # Description: IP forwarding permits the kernel to forward packets from one network interface to another. The ability to forward packets between two networks is only appropriate for systems acting as routers.
<filename>src/pages/Classes/index.tsx import { useRequest } from 'ahooks'; import React from 'react'; import { useNavigate } from 'react-router-dom'; import Layout from '@/components/Layout'; import { DB } from '@/utils/apis/dbConfig'; import { getData } from '@/utils/apis/getData'; import { staleTime } from '@/utils/constant'; import { Title } from '../titleConfig'; import ClassBar from './ClassBar'; import s from './index.scss'; interface ClassType { _id: string; class: string; count: number; } const Classes: React.FC = () => { const navigate = useNavigate(); const { data, loading } = useRequest(getData, { defaultParams: [DB.Class], retryCount: 3, cacheKey: `Classes-${DB.Class}`, staleTime }); return ( <Layout title={Title.Classes} loading={loading} className={s.classBox} rows={8}> {data?.data.map((item: ClassType) => ( <ClassBar className={s.classItem} key={item._id} content={item.class} num={item.count} onClick={() => navigate(`/artDetail?class=${encodeURIComponent(item.class)}`)} /> ))} </Layout> ); }; export default Classes;
require 'grape-apiary'
public List<Number> findMaxValuesPerRow(Matrix matrix) { List<Number> maxValues = new ArrayList<>(); int rowSize = matrix.getRowSize(); int columnSize = matrix.getColumnSize(); for (int i = 0; i < rowSize; i++) { Number max = matrix.getValue(i, 0); // Initialize max with the first value in the row for (int j = 1; j < columnSize; j++) { Number currentValue = matrix.getValue(i, j); if (currentValue.doubleValue() > max.doubleValue()) { max = currentValue; // Update max if a larger value is found } } maxValues.add(max); // Add the maximum value of the current row to the list } return maxValues; }
set -eux if [ ! -f ~/$BLOCKCHAIN_PREFIX/$1.vkey ]; then cardano-cli address key-gen --verification-key-file ~/$BLOCKCHAIN_PREFIX/$1.vkey --signing-key-file ~/$BLOCKCHAIN_PREFIX/$1.skey cardano-cli address build $BLOCKCHAIN --payment-verification-key-file ~/$BLOCKCHAIN_PREFIX/$1.vkey --out-file ~/$BLOCKCHAIN_PREFIX/$1.addr fi
import React from 'react'; export default class ArrowDown extends React.Component { render() { const { width, height, color } = this.props; return ( <svg width={width} height={height} viewBox="0 0 140 140" version="1.1" > <defs> <linearGradient x1="50%" y1="100%" x2="50%" y2="0%" id="linearGradient-1"> <stop stopColor="#4482FC" offset="0%"></stop> <stop stopColor="#20B3FF" offset="100%"></stop> </linearGradient> </defs> <g id="Icons" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd"> <g transform="translate(-5553.000000, -1033.000000)" fillRule="nonzero" id="icon_Timeline_3"> <g transform="translate(5553.000000, 1033.000000)"> <g id="Group" fill="url(#linearGradient-1)"> <circle id="椭圆_1_拷贝_5-2" cx="70" cy="70" r="70"></circle> </g> <g id="组_1_拷贝" transform="translate(36.102500, 43.143333)" fill="#FFFFFF"> <path d="M66.99,7.57166667 C68.1495389,8.66199685 68.807107,10.1829331 68.807107,11.7745833 C68.807107,13.3662335 68.1495389,14.8871698 66.99,15.9775 L31.8266667,49.6008333 C29.3565708,51.9232982 25.5059292,51.9232982 23.0358333,49.6008333 C21.8747209,48.5113774 21.2160393,46.9901153 21.2160393,45.3979167 C21.2160393,43.8057181 21.8747209,42.284456 23.0358333,41.195 L58.1991667,7.56583333 C60.6708034,5.24500684 64.5214455,5.24756201 66.99,7.57166667 Z" id="圆角矩形_563_拷贝"></path> <path d="M16.4325,26.5125 L31.815,41.2241667 C32.9761124,42.3136226 33.634794,43.8348847 33.634794,45.4270833 C33.634794,47.0192819 32.9761124,48.540544 31.815,49.63 C29.3449041,51.9524649 25.4942626,51.9524649 23.0241667,49.63 L7.64166667,34.9241667 C6.47866658,33.8345565 5.81876187,32.3120145 5.81876187,30.7183333 C5.81876187,29.1246522 6.47866658,27.6021102 7.64166667,26.5125 C10.1117626,24.1900351 13.9624041,24.1900351 16.4325,26.5125 Z" id="圆角矩形_563_拷贝_2"></path> </g> </g> </g> </g> </svg> ) } }
const fse = require("fs-extra"); const path = require("path"); const args = require("yargs").argv; const TruffleConfig = require("@truffle/config"); const config = TruffleConfig.detect(); const ethers = require("ethers"); const main = async (callback) => { // take contracts from artifacts and turn them into the format expected in the react app file, // deployed_contracts.json async function builtContracts(artifacts, buildDirectory, networkId) { let contracts = {}; artifacts.map((file => { const artifact = JSON.parse(fse.readFileSync(path.join(buildDirectory, file))); contracts[artifact.contractName] = { address: artifact.networks[networkId].address, abi: artifact.abi } })); return contracts; } const network = args.network; const provider = new ethers.providers.JsonRpcProvider( config.networks[network].url ); const { chainId: networkId } = await provider.getNetwork(); const buildDirectory = '../react-app/src/contracts/truffle/'; const contractsFile = '../react-app/src/contracts/deployed_contracts.json'; const artifacts = fse.readdirSync(buildDirectory); const newContracts = await builtContracts(artifacts, buildDirectory, networkId); if (fse.existsSync(contractsFile)) { const deployedContracts = JSON.parse(await fse.readFile(contractsFile)); if ([networkId] in deployedContracts) { const merged = { ...deployedContracts[networkId][network].contracts, ...newContracts }; deployedContracts[networkId][network].contracts = merged; fse.writeFileSync(contractsFile, JSON.stringify(deployedContracts, null, 2)); } else { deployedContracts[networkId] = { [network]: { name: network, chainId: networkId, contracts: newContracts } } fse.writeFileSync(contractsFile, JSON.stringify(deployedContracts, null, 2)); } } else { let reactContracts = {}; reactContracts[networkId] = { [network]: { name: network, chainId: networkId, contracts: newContracts } }; fse.writeFileSync(contractsFile, JSON.stringify(reactContracts, null, 2)); } } main();
// Copyright 2017 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef V8_OBJECTS_MAP_INL_H_ #define V8_OBJECTS_MAP_INL_H_ #include "src/field-type.h" #include "src/objects/map.h" // Has to be the last include (doesn't have include guards): #include "src/objects/object-macros.h" namespace v8 { namespace internal { CAST_ACCESSOR(Map) InterceptorInfo* Map::GetNamedInterceptor() { DCHECK(has_named_interceptor()); FunctionTemplateInfo* info = GetFunctionTemplateInfo(); return InterceptorInfo::cast(info->named_property_handler()); } InterceptorInfo* Map::GetIndexedInterceptor() { DCHECK(has_indexed_interceptor()); FunctionTemplateInfo* info = GetFunctionTemplateInfo(); return InterceptorInfo::cast(info->indexed_property_handler()); } bool Map::IsInplaceGeneralizableField(PropertyConstness constness, Representation representation, FieldType* field_type) { if (FLAG_track_constant_fields && FLAG_modify_map_inplace && (constness == kConst)) { // kConst -> kMutable field generalization may happen in-place. return true; } if (representation.IsHeapObject() && !field_type->IsAny()) { return true; } return false; } int NormalizedMapCache::GetIndex(Handle<Map> map) { return map->Hash() % NormalizedMapCache::kEntries; } bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) { if (!obj->IsFixedArray()) return false; if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) { return false; } #ifdef VERIFY_HEAP if (FLAG_verify_heap) { reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj)) ->NormalizedMapCacheVerify(); } #endif return true; } } // namespace internal } // namespace v8 #include "src/objects/object-macros-undef.h" #endif // V8_OBJECTS_MAP_INL_H_
#!/bin/bash trap 'kill %1; kill %2; kill %3' SIGINT kubectl port-forward -n selenium svc/selenium-hub 4444:4444 & \ kubectl port-forward -n selenium svc/selenium-node-s7-droid-9-0 6080:6080 & \ #kubectl port-forward -n selenium svc/appium 4723:4723 & \ kubectl port-forward -n selenium svc/selenium-node-s7-droid-9-0 4723:4723 & \ sleep infinity
tinygo build -gc=leaking -target eosio -wasm-abi=generic -scheduler=none -opt 0 -tags=math_big_pure_go -gen-code=true -strip=false -o test.wasm .
#!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2010 <NAME>. All rights reserved. # """Converting from degrees to radians. """ #end_pymotw_header import math print '{:^7} {:^7} {:^7}'.format('Degrees', 'Radians', 'Expected') print '{:-^7} {:-^7} {:-^7}'.format('', '', '') for deg, expected in [ ( 0, 0), ( 30, math.pi/6), ( 45, math.pi/4), ( 60, math.pi/3), ( 90, math.pi/2), (180, math.pi), (270, 3/2.0 * math.pi), (360, 2 * math.pi), ]: print '{:7d} {:7.2f} {:7.2f}'.format(deg, math.radians(deg), expected, )
package io.github.jhipster.sample.service; import io.github.jhipster.sample.service.dto.BankAccountDTO; import java.util.List; import java.util.Optional; /** * Service Interface for managing {@link io.github.jhipster.sample.domain.BankAccount}. */ public interface BankAccountService { /** * Save a bankAccount. * * @param bankAccountDTO the entity to save. * @return the persisted entity. */ BankAccountDTO save(BankAccountDTO bankAccountDTO); /** * Partially updates a bankAccount. * * @param bankAccountDTO the entity to update partially. * @return the persisted entity. */ Optional<BankAccountDTO> partialUpdate(BankAccountDTO bankAccountDTO); /** * Get all the bankAccounts. * * @return the list of entities. */ List<BankAccountDTO> findAll(); /** * Get the "id" bankAccount. * * @param id the id of the entity. * @return the entity. */ Optional<BankAccountDTO> findOne(Long id); /** * Delete the "id" bankAccount. * * @param id the id of the entity. */ void delete(Long id); }
<filename>index.js var path = require('path'); var eejs = require("ep_etherpad-lite/node/eejs"); var settings = require('ep_etherpad-lite/node/utils/Settings'); var fs = require("fs"); exports.eejsBlock_exportColumn = function(hook_name, args, cb) { args.content = args.content + eejs.require('./templates/exportcolumn.html', {}, module); return cb(); }; exports.eejsBlock_scripts = function (hook_name, args, cb) { args.content = args.content + eejs.require('./templates/scripts.html', {}, module); return cb(); }; exports.eejsBlock_styles = function (hook_name, args, cb) { args.content = args.content + eejs.require('./templates/styles.html', {}, module); return cb(); }; exports.eejsBlock_mySettings = function (hook_name, args, cb) { if (!settings.ep_markdown_default){ checked_state = 'unchecked'; }else{ if(settings.ep_markdown_default == true){ checked_state = 'checked'; } } args.content = args.content + eejs.require('./templates/markdown_entry.ejs', {checked: checked_state}, module); return cb(); } exports.import = function (hook_name, args ,callback){ if(args.fileEnding.indexOf(".md") === -1) return callback(); // It is Markdown file, let's go! var markdown = fs.readFileSync(args.srcFile, 'utf-8'); var showdown = require('showdown'); var converter = new showdown.Converter({completeHTMLDocument: true}); var html = converter.makeHtml(markdown); fs.writeFile(args.destFile, html, 'utf8', function(err){ if(err) callback(err, null); callback(args.destFile); }); }
<gh_stars>1-10 import unittest from pebbles.tests.base import SeleniumBaseTestCase from pebbles.models import Variable from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC class LoginTestCase(SeleniumBaseTestCase): """ Tests basic login and logout functionality. """ def test_login_as_admin(self): for driver in self.drivers: self._do_login( self.known_admin_eppn, self.known_admin_password, driver, wait_for_element_id="admin-dashboard" ) elements = driver.find_elements_by_id("admin-dashboard") self.assertIsNotNone(elements) assert len(elements) >= 1 def test_login_as_user(self): for driver in self.drivers: self._do_login( self.known_user_eppn, self.known_user_password, driver, ) elements = driver.find_elements_by_id("user-dashboard") self.assertIsNotNone(elements) assert len(elements) >= 1 def test_login_fail_as_user(self): for driver in self.drivers: driver.get(self.get_server_url() + "/") element = driver.find_element_by_id("invalid-login") assert not element.is_displayed() self._do_login( self.known_user_eppn, "open sesame", driver, wait_for=2 ) element = driver.find_element_by_id("invalid-login") assert element.is_displayed() i_should_be_empty = driver.find_elements_by_id("user-dashboard") assert len(i_should_be_empty) == 0 def test_login_logout_as_user(self): for driver in self.drivers: self._do_login( self.known_user_eppn, self.known_user_password, driver ) self._do_logout(driver) elements = driver.find_elements_by_id("user-dashboard") assert len(elements) == 0 def test_frontpage(self): """ test more for the set-up of the system than any actual functionality. asserts that the front page can be loaded and the notification tag is present. It was added so that a developer doesn't get depressed when all the other tests fail. """ driver = self.drivers[0] driver.get(self.get_server_url() + "/") wait = WebDriverWait(driver, 10) wait.until(EC.visibility_of_element_located((By.TAG_NAME, "pb-notifications"))) element = driver.find_element_by_tag_name("pb-notifications") self.assertIsNotNone(element) def test_frontpage_name_description(self): """ Tests that the configurable installation name and description are present on the login page. """ driver = self.drivers[0] driver.get(self.get_server_url() + "/") wait = WebDriverWait(driver, 10) wait.until(EC.visibility_of_element_located((By.NAME, "installation-name"))) element = driver.find_element_by_name("installation-name") config = self.config assert config["INSTALLATION_NAME"] == element.text element = driver.find_element_by_name("installation-description") assert config["INSTALLATION_DESCRIPTION"] == element.text element = driver.find_element_by_name("short-description") assert config["SHORT_DESCRIPTION"] == element.text def test_frontpage_login_visibility(self): """ If shibboleth login is enabled, it should be the only visible way to log in and form should be hidden. Also vice versa. """ shibboleth_enabled = \ Variable.query.filter_by(key="ENABLE_SHIBBOLETH_LOGIN").first() saved = shibboleth_enabled.value # Set Value to True # Show shibboleth, don't show login by default shibboleth_enabled.value = True self.db.session.commit() driver = self.drivers[0] driver.get(self.get_server_url() + "/") wait = WebDriverWait(driver, 10) wait.until(EC.visibility_of_element_located((By.NAME, "shibboleth-login"))) element = driver.find_element_by_name("shibboleth-login") assert element.is_displayed() other_element = driver.find_element_by_name("password-login") assert not other_element.is_displayed() # Set Value to True # Don't show shibboleth, do show login by default shibboleth_enabled.value = False self.db.session.commit() driver.get(self.get_server_url() + "/") wait = WebDriverWait(driver, 10) wait.until(EC.visibility_of_element_located((By.NAME, "password-login"))) element = driver.find_element_by_name("shibboleth-login") assert not element.is_displayed() other_element = driver.find_element_by_name("password-login") assert other_element.is_displayed() # Don't remember if live tests are run in isolation so revert original # value just in case shibboleth_enabled.value = saved self.db.session.commit() if __name__ == "__main__": unittest.main()
import React, { useState } from 'react'; const usePhoto = (initialPhotoSrc) => { const [photoSrc, setPhotoSrc] = useState(initialPhotoSrc); const photoElement = <img src={photoSrc} />; return [photoElement, setPhotoSrc] } export default usePhoto; // Component example const MyComponent = () => { const [photoElement, setPhotoSrc] = usePhoto(''); return ( <> {photoElement} <button onClick={() => setPhotoSrc('https://example.com/picture.jpg')}> Show Picture </button> </> ); };
public class QuizApp extends Activity { private TextView mTimerTextView; private CountDownTimer mCountDownTimer; private int mTotalQuestions; private int mCurrentQuestion; private int mScore; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_quiz_app); mTimerTextView = (TextView) findViewById(R.id.timer); // Set timer mCountDownTimer = new CountDownTimer(30000, 1000) { @Override public void onTick(long millisUntilFinished) { // Update the timer mTimerTextView.setText("Time remaining: "+ millisUntilFinished / 1000); } @Override public void onFinish() { // Check answer checkAnswer(); } }; mTotalQuestions = getTotalQuestions(); mCurrentQuestion = 0; // Start the game mCountDownTimer.start(); showNextQuestion(); } private void showNextQuestion(){ // Write code to show the next question. } private void checkAnswer(){ // Write code to check the answer. } private int getTotalQuestions(){ // Write code to get the total questions. return 0; } }
import numpy as np import pandas as pd from sklearn.linear_model import Ridge from sklearn.model_selection import train_test_split from sklearn.metrics import mean_squared_error # Load dataset df = pd.read_csv('elevator_data.csv') # Split dataset x = df.drop(labels=['elevator_load'], axis=1) y = df['elevator_load'] x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=0) # Model 1 - Ridge Regression model = Ridge(alpha=20, random_state=0) model.fit(x_train, y_train) y_pred = model.predict(x_test) # Evaluate model rmse = np.sqrt(mean_squared_error(y_test, y_pred)) print('RMSE:', rmse)
#! /bin/sh set -eu for shell in sh bash dash zsh # csh fish ksh tcsh do if ! shell_path="$(command -v $shell)" then echo "Skipping $shell" else ./test.sh "$shell_path" fi done
import { useEffect, useState } from "react"; import produce from "immer"; import { Box, Button, Dialog, DialogActions, DialogContent, DialogTitle, FormControl, FormHelperText, TextField, } from "@material-ui/core"; import RecursivePartial from "../../lib/util/recursive-partial"; import { Track } from "../store/types"; export interface TrackModalProps { isOpen: boolean; onClose: () => void; track?: Track; onSubmit?: (track: Track) => void; onChange?: (trackPart: RecursivePartial<Track>) => void; } // type ErrorsFor<T extends Record<string, any>> = { // [K in keyof T]: T[K] extends Array<any> // ? string | undefined // : T[K] extends Record<string, any> // ? ErrorsFor<T[K]> // : string | undefined; // }; // type TrackErrors = ErrorsFor<Track>; interface TrackErrors { name: string | undefined; "config.bpm": string | undefined; "config.patternLen": string | undefined; "config.barLen": string | undefined; } export default function TrackModal({ isOpen, onClose, track, onSubmit, onChange, }: TrackModalProps) { const [trackPart, setTrackPart] = useState<RecursivePartial<Track>>( track || {}, ); const errors: TrackErrors = { name: !trackPart.name ? "Name must be given" : undefined, "config.bpm": trackPart.config?.bpm === undefined ? "BPM number must be given" : undefined, "config.patternLen": trackPart.config?.patternLen === undefined ? "Pattern length number must be given" : undefined, "config.barLen": trackPart.config?.barLen === undefined ? "Bar length number must be given" : undefined, }; const isValid = Object.values(errors).every((err) => !err); const handleSubmit = onSubmit && isValid ? () => onSubmit(trackPart as Track) : undefined; const handleChange = onChange ? () => onChange(trackPart) : () => {}; useEffect(handleChange, [trackPart, handleChange]); const title = track ? `Edit Track ${track.name}` : "New Track"; const submitText = track ? "Save" : "Create"; const updateName = (name: string) => setTrackPart((baseState) => produce(baseState, (state) => { state.name = name; }), ); const updateConfigValue = ( field: keyof Track["config"], valueStr: string, ) => { const valueNum = parseInt(valueStr); if (!Number.isNaN(valueNum)) { setTrackPart((baseState) => produce(baseState, (state) => { if (!state.config) { state.config = {}; } state.config[field] = valueNum; }), ); } }; return ( <Dialog open={isOpen} onClose={onClose} fullWidth> <DialogTitle>{title}</DialogTitle> <DialogContent> <Box display="flex" flexDirection="column" gridGap="16px"> <FormControl> <TextField label="Name" value={trackPart.name} onChange={(e) => updateName(e.target.value)} /> <FormHelperText>Your track's name.</FormHelperText> {errors.name && ( <FormHelperText error>{errors.name}</FormHelperText> )} </FormControl> <FormControl> <TextField label="BPM" type="number" value={trackPart.config?.bpm} onChange={(e) => updateConfigValue("bpm", e.target.value) } /> <FormHelperText> Your track's Beats-Per-Minute. </FormHelperText> {errors["config.bpm"] && ( <FormHelperText error> {errors["config.bpm"]} </FormHelperText> )} </FormControl> <FormControl> <TextField label="Pattern Length" type="number" value={trackPart.config?.patternLen} onChange={(e) => updateConfigValue("patternLen", e.target.value) } /> <FormHelperText> Number of beats in each pattern. </FormHelperText> {errors["config.patternLen"] && ( <FormHelperText error> {errors["config.patternLen"]} </FormHelperText> )} </FormControl> <FormControl> <TextField label="Bar Length" type="number" value={trackPart.config?.barLen} onChange={(e) => updateConfigValue("barLen", e.target.value) } /> <FormHelperText> Number of beats in a bar. </FormHelperText> {errors["config.barLen"] && ( <FormHelperText error> {errors["config.barLen"]} </FormHelperText> )} </FormControl> </Box> </DialogContent> <DialogActions> <Button variant="contained" onClick={onClose}> Cancel </Button> <Button variant="contained" color="secondary" onClick={handleSubmit} disabled={!isValid} > {submitText} </Button> </DialogActions> </Dialog> ); }
<filename>src/index.js import React from 'react'; import { render } from 'react-dom'; import { ApolloProvider } from 'react-apollo'; import Routes from './routes'; import store from './store'; import apolloClient from './apolloClient'; import 'bootstrap/dist/css/bootstrap.min.css'; import 'ionicons/css/ionicons.min.css'; import 'react-datetime/css/react-datetime.css'; import 'react-select-plus/dist/react-select-plus.css'; import 'react-toggle/style.css'; // global style import './modules/common/styles/global-styles.js'; const target = document.querySelector('#root'); render( <ApolloProvider store={store} client={apolloClient}> <Routes /> </ApolloProvider>, target );
package main.basicprogramming; import java.util.Scanner; public class ToggleString { public static void main(String[] args) { Scanner sc = new Scanner(System.in); char[] chr = sc.nextLine().toCharArray(); char[] temp = new char[chr.length]; for(int i= 0;i<chr.length;i++){ if(Character.isLowerCase(chr[i])) { temp[i] = Character.toUpperCase(chr[i]); } if(Character.isUpperCase(chr[i])) { temp[i] = Character.toLowerCase(chr[i]); } } System.out.println(String.valueOf(temp)); } }
def largestElement(myList): return max(myList)
#!/usr/bin/env bash . ../tutaut.sh BASE=/tmp/git-tutorial rm -rf $BASE #MAX_WAIT_CHAR=0 DEBUG=1 operator master create_dir $BASE/public/project change_dir $BASE/public/project operator master2 create_dir $BASE/public/project change_dir $BASE/public/project operator master3 create_dir $BASE/public/project change_dir $BASE/public/project vi_open src1 vi_add_line Initial content first line vi_add_line Initial content second line vi_save_and_close vi_open src1 vi_search second vi_add_line feature1 vi_save_and_close vi_open src1 vi_search feature1 vi_add_line feature2 vi_save_and_close MAX_WAIT_CHAR=500 vi_open src1 vi_search feature1 vi_change_line feature1.1 vi_save_and_close MAX_WAIT_CHAR=500 print_file src1
package eu.iamgio.jrfl.api.commands; /** * Represents a command alias * @author Gio */ public class Alias extends Command { private Command command; Alias(String name, Command command) { super(name, command.getDescription(), command.getUsage().replace(command.getName(), name)); this.command = command; } /** * @return Original command */ public Command getOriginalCommand() { return command; } @Override public void onCommand(String[] args) { command.onCommand(args); } }
package com.jbattiste.conference.repository; import java.util.ArrayList; import java.util.List; import com.jbattiste.conference.model.Speaker; public class HibernateSpeakerRepositoryImpl implements SpeakerRepository{ public List<Speaker> findAll(){ List<Speaker> speakers = new ArrayList<>(); Speaker speaker = new Speaker(); speaker.setFirstName("jbattiste"); speaker.setLastName("Dutta"); speakers.add(speaker); return speakers; } }
#!/bin/sh eval "./replibyte $@"
#!/bin/bash # Copyright (c) 2014 by Michael Berlin, Zuse Institute Berlin # # Licensed under the BSD License, see LICENSE file for details. # This test runs all C++ unit tests through Valgrind which will check for # memory leaks. # # Make sure that you did run export BUILD_CLIENT_TESTS=true before running # "make client_debug". Otherwise, the unit tests won't be built. set -e function warn_missing_url() { cat <<EOF INFO: URL to XtreemFS $1 not given as $2 parameter. INFO: Tests will use the default URL at localhost and the default port. INFO: Make sure to run an XtreemFS setup on this address or specify a different URL as argument. EOF } hash valgrind 2>/dev/null || { echo "ERROR: valgrind not found, but required by this test." exit 1 } # Parse arguments. TEST_DIR=$4 if [ -z $TEST_DIR ] then TEST_DIR=/tmp/xtreemfs-cpp-valgrind if [ ! -d "$TEST_DIR" ]; then mkdir "$TEST_DIR"; fi if [ ! -d "${TEST_DIR}/log" ]; then mkdir "${TEST_DIR}/log"; fi fi export XTREEMFS_TEST_DIR="$TEST_DIR" echo "INFO: TEST_DIR: $TEST_DIR" VALGRIND_LOG_FILE="${TEST_DIR}/log/valgrind.log" if [ -n "$1" ] then XTREEMFS_DIR="$1" else # Try to guess the path of the XtreemFS repository. [ -d "cpp" ] && XTREEMFS_DIR="." [ -d "../cpp" ] && XTREEMFS_DIR=".." [ -d "../../cpp" ] && XTREEMFS_DIR="../.." if [ -n "$XTREEMFS_DIR" ] then echo "INFO: Path to XtreemFS repository auto-detected and set to: ${XTREEMFS_DIR}" else echo "ERROR: Path to XtreemFS repository not found. Set it as first parameter. Aborting." exit 2 fi fi if [ -n "$2" ] then export XTREEMFS_DIR_URL="$2" else warn_missing_url "DIR" "second" fi if [ -n "$3" ] then export XTREEMFS_MRC_URL="$3" else warn_missing_url "MRC" "third" fi # Run tests cd "$XTREEMFS_DIR" cd cpp/build global_rc=0 for test in test_* do # disable test_object_cache due to frequent failures (feature is not maintained actively) if [ "$test" = "test_object_cache" ] then continue fi set +e valgrind --leak-check=full --show-reachable=yes --error-exitcode=23 --suppressions="${XTREEMFS_DIR}/cpp/valgrind.supp" ./$test &>>$VALGRIND_LOG_FILE rc=$? set -e # Add some whitespace to the logfile between runs. echo -e "\n\n\n" >> $VALGRIND_LOG_FILE if [ $rc -eq 0 ] then echo "Valgrind memory-leak check PASSED for: $test" else echo "Valgrind memory-leak check FAILED for: $test" global_rc=1 fi done exit $global_rc
// ----- 类 ----- // 类声明 // class Person {} // 类表达式 // const Animal = class {}; // 注意,Class 无法像 Function 声明一样实现提升 // 函数受函数作用域限制,类受块作用域限制 /* { function FunctionDeclaration() {} class ClassDeclaration {} } console.log(FunctionDeclaration); // [Function: FunctionDeclaration] console.log(ClassDeclaration); // ReferenceError: ClassDeclaration is not defined */ // --- 类的构成 --- // 1. 构造函数方法 // 2. 实例方法 // 3. 获取函数 // 4. 设置函数 // 5. 静态类方法 // class Foo {} // class Bar { // constructor() {} // } // 获取函数 // class Baz { // get myBaz() {} // } // 静态方法 // class Qux { // static myQux() {} // } // 类表达式 // 把类表达式赋值给变量后,可以通过 name 属性取得类表达式的名称字符串(Person.name === PersonName),但不能在类表达式作用域外访问该标识符(PersonName) /* let Person = class PersonName { identify() { console.log(Person.name, PersonName.name); } }; let p = new Person(); p.identify(); // PersonName PersonName console.log(Person.name); // PersonName console.log(PersonName); // ReferenceError: PersonName is not defined */ // --- 类构造函数 --- // constructor 关键字用于在类定义块内部,创建类的构造函数 // 在使用 new 操作符创建类的新实例时会调用这个函数,若无则默认为空函数 // 使用 new 后会在内存中创建新对象,[[Prototype]]指针指向构造函数的 prototype,构造函数 this 指向新对象 // 执行构造函数内部的代码(给对象添加属性),如果构造函数有返回则返回(与类无关),无则返回刚创建的对象(类的实例) // 类实例化时传入的参数会用作构造函数的参数(可选) /* class Person { constructor(name) { console.log(arguments.length); this.name = name; // 若加 || null 则下面为 null } } let p1 = new Person; // 0 console.log(p1.name); // undefined let p2 = new Person(); // 0 console.log(p2.name); // undefined let p3 = new Person('Jake'); // 1 console.log(p3.name); // Jake */ // 调用类构造函数如果忘了 new 会抛出 TypeError,普通构造函数忘了 new 则会以全局为 this // --- 类是特殊函数 --- // ES 中没有类这个类型,ES 类就是一种特殊函数 /* class Person {} console.log(Person); // [class Person] console.log(typeof Person); // function // 有些语言实例化类可省略 new 是因为存在类的数据类型(但是类与函数不能重名) */ // 类标签符有 prototype 属性,这个原型也有一个 constructor 属性指向类自身 /* class Person {} console.log(Person.prototype); // {} console.log(Person === Person.prototype.constructor); // true // 可以使用 instanceof 检查对象是不是类的实例 let p = new Person(); console.log(p instanceof Person); // true // 类本身在调用 new 时就会被当作构造函数(类中的constructor则不是) console.log(p instanceof Person.constructor); // false */ // 类是一等公民,可以作为参数传递,也可以立即实例化 let p = new class Foo { constructor(x) { console.log(x); } }('bar'); // bar console.log(p); // Foo {}
#!/usr/bin/env bash export SPARK_MASTER_IP=`awk 'NR==1 {print $1}' /etc/hosts` export SPARK_LOCAL_IP=`awk 'NR==1 {print $1}' /etc/hosts` /usr/local/spark/sbin/start-master.sh --properties-file /spark-defaults.conf -i $SPARK_LOCAL_IP "$@" /bin/bash
<reponame>ckaratzas/tus-server-implementation<gh_stars>10-100 package com.tus.oss.server.core; import com.tus.oss.server.openapi.OpenApiRoutePublisher; import io.vertx.core.AbstractVerticle; import io.vertx.ext.web.Router; import io.vertx.ext.web.handler.BodyHandler; import io.vertx.ext.web.handler.LoggerHandler; import io.vertx.ext.web.handler.ResponseTimeHandler; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import javax.inject.Inject; /** * @author ckaratza * The Tus Server Verticle with the route definitions. */ @Component public class ServerVerticle extends AbstractVerticle { private final Integer port; private final String host; private final String contextPath; private final OptionsHandler optionsHandler; private final HeadHandler headHandler; private final PostHandler postHandler; private final PatchHandler patchHandler; private final DeleteHandler deleteHandler; @Inject public ServerVerticle(@Value("${port}") Integer port, @Value("${host}") String host, @Value("${contextPath}") String contextPath, OptionsHandler optionsHandler, HeadHandler headHandler, PostHandler postHandler, PatchHandler patchHandler, DeleteHandler deleteHandler) { this.port = port; this.host = host; this.contextPath = contextPath; this.optionsHandler = optionsHandler; this.headHandler = headHandler; this.postHandler = postHandler; this.patchHandler = patchHandler; this.deleteHandler = deleteHandler; } @Override public void start() { Router router = Router.router(vertx); router.route().handler(BodyHandler.create()).handler(LoggerHandler.create()).handler(ResponseTimeHandler.create()).enable(); router.head(contextPath + ":uploadID").handler(headHandler::handleRequest); router.options(contextPath).handler(optionsHandler::handleRequest); router.post(contextPath).handler(postHandler::handleRequest); router.delete(contextPath + ":uploadID").handler(deleteHandler::handleRequest); router.patch(contextPath + ":uploadID").handler(patchHandler::handleRequestForPatch); //POST can replace PATCH because of buggy jre... router.post(contextPath + ":uploadID").handler(patchHandler::handleRequestForPost); OpenApiRoutePublisher.publishOpenApiSpec(router, contextPath + "spec", "Tus.io Resumable File Upload Protocol Server", "1.0.0", "http://" + host + ":" + port + "/"); vertx.createHttpServer().requestHandler(router::accept).listen(port, host); } }
#!/usr/bin/env node const core = require("@actions/core"); const { Octokit } = require("@octokit/core"); const GetPullRequestId = async (octokit, owner, repoName, pullNumber) => { const query = `query FindPullRequestId($owner: String!, $repoName: String!, $pullNumber: Int!) { repository(owner: $owner, name: $repoName) { pullRequest(number: $pullNumber) { id } } } `; const result = await octokit.graphql(query, { owner, repoName, pullNumber }); return result.repository.pullRequest.id; }; const enableAutoMerge = async (octokit, pullRequestId, mergeMethod) => { const mutation = ` mutation EnableAutoMerge($pullRequestId: ID!, $mergeMethod: PullRequestMergeMethod!) { enablePullRequestAutoMerge(input: { pullRequestId: $pullRequestId, mergeMethod: $mergeMethod }) { pullRequest { id state } } } `; const result = await octokit.graphql(mutation, { pullRequestId, mergeMethod }); core.info( `successfully enabled auto merge on pull request. pull request id: ${ result.enablePullRequestAutoMerge.pullRequest.id }. state: ${ result.enablePullRequestAutoMerge.pullRequest.state }` ); }; async function main(octokit) { try { const owner = core.getInput("owner"); const repoName = core.getInput("repo"); const pullNumber = +core.getInput("pull-number"); const pullRequestId = await GetPullRequestId( octokit, owner, repoName, pullNumber ); const mergeMethod = core.getInput("merge-method"); core.info( `pull request id is ${pullRequestId}. will send enablePullRequestAutoMerge on it` ); await enableAutoMerge(octokit, pullRequestId, mergeMethod); } catch (err) { core.setFailed(err.message); } } main( new Octokit({ auth: core.getInput("token"), }) );
<gh_stars>0 from rest_framework.test import APITestCase from rest_framework_jwt.settings import api_settings from factories.factories import ProfileFactory class APIHeaderAuthorization(APITestCase): """Base class used to attach header to all request on setup.""" def setUp(self): """Include an appropriate `Authorization:` header on all requests""" self.profile = ProfileFactory() jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER payload = jwt_payload_handler(self.profile.user) token = jwt_encode_handler(payload) self.client.credentials(HTTP_AUTHORIZATION='JWT ' + token)
/* * Copyright (C) 2012-2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.archinnov.achilles.embedded; import java.util.Set; import com.datastax.driver.core.ProtocolOptions.Compression; import com.datastax.driver.core.policies.FallthroughRetryPolicy; import com.datastax.driver.core.policies.Policies; import com.google.common.collect.ImmutableSet; import info.archinnov.achilles.type.TypedMap; public class CassandraEmbeddedConfigParameters { /** * Configuration parameters */ public static final String CLEAN_CASSANDRA_DATA_FILES = "cleanCassandraDataFiles"; public static final String CLEAN_CASSANDRA_CONFIG_FILE = "cleanCassandraConfigFile"; public static final String DATA_FILE_FOLDER = "datafileFolder"; public static final String COMMIT_LOG_FOLDER = "commitlogFolder"; public static final String SAVED_CACHES_FOLDER = "savedCachesFolder"; public static final String CONFIG_YAML_FILE = "configYamlFile"; public static final String CLUSTER_NAME = "clusterName"; public static final String COMPRESSION_TYPE = "compressionType"; public static final String LOAD_BALANCING_POLICY = "loadBalancingPolicy"; public static final String RETRY_POLICY = "retryPolicy"; public static final String RECONNECTION_POLICY = "reconnectionPolicy"; public static final String CASSANDRA_THRIFT_PORT = "thriftPort"; public static final String CASSANDRA_CQL_PORT = "cqlPort"; public static final String CASSANDRA_STORAGE_PORT = "storagePort"; public static final String CASSANDRA_STORAGE_SSL_PORT = "storageSSLPort"; public static final String KEYSPACE_DURABLE_WRITE = "keyspaceDurableWrite"; public static final String BUILD_NATIVE_SESSION_ONLY = "buildNativeSessionOnly"; public static final String KEYSPACE_NAME = "keyspaceName"; /* * Default values */ public static final String DEFAULT_CASSANDRA_HOST = "localhost"; public static final String DEFAULT_ACHILLES_TEST_KEYSPACE_NAME = "achilles_test"; static final String DEFAULT_ACHILLES_TEST_DATA_FOLDER = "target/cassandra_embedded/data"; static final String DEFAULT_ACHILLES_TEST_COMMIT_LOG_FOLDER = "target/cassandra_embedded/commitlog"; static final String DEFAULT_ACHILLES_TEST_SAVED_CACHES_FOLDER = "target/cassandra_embedded/saved_caches"; static final String DEFAULT_ACHILLES_TEST_TRIGGERS_FOLDER = "/cassandra_triggers"; static final Set<String> DEFAULT_ACHILLES_TEST_FOLDERS = ImmutableSet.of(DEFAULT_ACHILLES_TEST_DATA_FOLDER, DEFAULT_ACHILLES_TEST_COMMIT_LOG_FOLDER, DEFAULT_ACHILLES_TEST_SAVED_CACHES_FOLDER); static final String DEFAULT_ACHILLES_TEST_CONFIG_YAML_FILE = "target/cassandra_embedded/cassandra.yaml"; static final String DEFAULT_CASSANDRA_EMBEDDED_CLUSTER_NAME = "Achilles Embedded Cassandra Cluster"; static final String DEFAULT_CASSANDRA_EMBEDDED_KEYSPACE_NAME = "achilles_embedded"; static final Boolean DEFAULT_CASSANDRA_EMBEDDED_KEYSPACE_DURABLE_WRITE = true; /** * Default values */ static TypedMap mergeWithDefaultParameters(TypedMap parameters) { TypedMap defaultParams = new TypedMap(); defaultParams.put(CLEAN_CASSANDRA_DATA_FILES, true); defaultParams.put(CLEAN_CASSANDRA_CONFIG_FILE, true); defaultParams.put(DATA_FILE_FOLDER, DEFAULT_ACHILLES_TEST_DATA_FOLDER); defaultParams.put(COMMIT_LOG_FOLDER, DEFAULT_ACHILLES_TEST_COMMIT_LOG_FOLDER); defaultParams.put(SAVED_CACHES_FOLDER, DEFAULT_ACHILLES_TEST_SAVED_CACHES_FOLDER); defaultParams.put(CONFIG_YAML_FILE, DEFAULT_ACHILLES_TEST_CONFIG_YAML_FILE); defaultParams.put(CLUSTER_NAME, DEFAULT_CASSANDRA_EMBEDDED_CLUSTER_NAME); defaultParams.put(KEYSPACE_NAME, DEFAULT_CASSANDRA_EMBEDDED_KEYSPACE_NAME); defaultParams.put(KEYSPACE_DURABLE_WRITE, DEFAULT_CASSANDRA_EMBEDDED_KEYSPACE_DURABLE_WRITE); defaultParams.put(COMPRESSION_TYPE, Compression.NONE); defaultParams.put(LOAD_BALANCING_POLICY, Policies.defaultLoadBalancingPolicy()); defaultParams.put(RETRY_POLICY, Policies.defaultRetryPolicy()); defaultParams.put(RECONNECTION_POLICY, Policies.defaultReconnectionPolicy()); defaultParams.put(BUILD_NATIVE_SESSION_ONLY, false); defaultParams.putAll(parameters); return defaultParams; } }
#!/bin/bash # Post install script for Jerakia IS_SYSTEMD=$((pidof systemd 2>&1 > /dev/null) && echo "yes" || echo "no") if [ "$IS_SYSTEMD" == "yes" ]; then cp /opt/jerakia/ext/systemd/jerakia.service /etc/systemd/system/jerakia.service systemctl daemon-reload else if [ -f "/etc/init.d/functions" ]; then cp /opt/jerakia/ext/init.d/jerakia /etc/init.d/jerakia chmod +x /etc/init.d/jerakia fi fi mkdir -p /var/log/jerakia mkdir -p /var/db/jerakia mkdir -p /etc/jerakia/policy.d [ -f "/etc/jerakia/policy.d/default.rb" ] || cp /opt/jerakia/ext/jerakia/policy.skel.rb /etc/jerakia/policy.d/default.rb [ -f "/etc/jerakia/jerakia.yaml" ] || cp /opt/jerakia/ext/jerakia/jerakia.skel.yaml /etc/jerakia/jerakia.yaml chmod 550 /etc/jerakia /etc/jerakia/policy.d chmod 660 /etc/jerakia/jerakia.yaml /etc/jerakia/policy.d/default.rb
import { CoreScriptBase } from '../coreScriptBase'; import { PrettierScript } from './prettier'; import { TsLintScript } from './tsLint'; export class LintScript extends CoreScriptBase { get name(): string { return 'lint'; } get description(): string { return `Run the Prettier and TSlint validation.`; } protected async main() { await this.invokeScript(PrettierScript, {}, {}); await this.invokeScript(TsLintScript, {}, {}); } }
#!/bin/sh # Settings COLUMNS=$(tput cols) # Colors NC='\033[0m' # No Color INFO='\033[1;35m' # PURPLE STEP='\033[1;36m' # CYAN WARNING='\033[0;33m' # YELLOW warning_message() { echo echo -e ${WARNING} printf "%*s\n" $(((6+$COLUMNS)/2)) "(@SED)" echo printf "%*s\n" $(((11+$COLUMNS)/2)) "WARNING !!!" echo printf "%*s\n" $(((${#1}+$COLUMNS)/2)) "${1^^}...." echo -e ${NC} echo } step_message() { echo echo -e ${STEP} printf "%*s\n" $(((6+$COLUMNS)/2)) "(@SED)" echo printf "%*s\n" $(((${#1}+$COLUMNS)/2)) "${1^}...." echo -e ${NC} echo } info_message() { echo echo -e ${INFO} printf "%*s\n" $(((6+$COLUMNS)/2)) "(@SED)" echo printf "%*s\n" $(((${#1}+$COLUMNS)/2)) "${1^}" echo -e ${NC} echo } conf_message() { echo printf "(@SED) Do you want to $1? (Y/n)" } proceed_next() { echo printf "(@SED) Do you want to proceed to the next step? (Y/n)" while true; do read -p "" yn case $yn in [Yy]*|"" ) clear break;; [Nn]* ) exit;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done } warning_message "before running this script first goto the 'config/personal_Info.json' file, put all of your config data and then run this. By default all of these data's are set to NULL. So if you continue without changing it, it may not configure the settings correctly" proceed_next info_message " Author's information: Shahriar Elahi Dhruvo Student at Shahjalal University of Science & Technology (2017-2022) How to configure it: (If for some reason you don't like mine) 1.You can edit the config/extra_software_list.txt to add/remove softwares/tools of your choice 2.Replace the sublime-text-3.zip with yours 3.Replace the .zshrc with yours 4.To add/remove extra gnome extensions edit the config/extra_gnome-extensions_list.txt 5.To add/remove extra aliases edit the config/custom_aliases.txt 6.And feel free to change any commands of this script as you like This script is all about what will I do/change if I want to use manjaro linux as my primary Operating System It is recommended to check this script's code before you run it. And of course I am not responsible for any of your damage" proceed_next # Enable AUR step_message "enabling AUR" sudo sed --in-place "s/#EnableAUR/EnableAUR/" "/etc/pamac.conf" # Change Power Settings while true; do info_message " 1.Increase Blank Screen idle-time to 10 minutes 2.Change power button action to 'suspend' 3.Turn off Automatic Suspend" conf_message "change above power settings" read -p "" yn case $yn in [Yy]*|"" ) step_message "applying these power management settings" gsettings set org.gnome.desktop.session idle-delay 600 gsettings set org.gnome.settings-daemon.plugins.power power-button-action suspend gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-type "nothing" break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # GRUB settings while true; do conf_message "change grub default timeout to 3s" read -p "" yn case $yn in [Yy]*|"" ) step_message "updating grub timeout" sudo sed -i 's/GRUB_TIMEOUT=10/GRUB_TIMEOUT=3/g' /etc/default/grub sudo update-grub break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Set custom keybindings while true; do info_message " 1.gnome-terminal: ctrl + alt + T 2.nautilus (File manager): super (windows key) + E 3.subl (sublime-text): ctrl + alt + S" conf_message "add above custom keyboard shortcuts" read -p "" yn case $yn in [Yy]*|"" ) step_message "defining custom keyboard shortcut" python helpers/add_keybindings.py 'open Terminal' 'gnome-terminal' '<Control><Alt>T' python helpers/add_keybindings.py 'open File manager' 'nautilus' '<Super>E' python helpers/add_keybindings.py 'open Sublime-text' 'subl' '<Control><Alt>S' break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Update the system & install necessary tools while true; do conf_message "update the system & install necessary tools" read -p "" yn case $yn in [Yy]*|"" ) step_message "upgrading the system & install necessary tools" sudo pacman -Syyu sudo pacman -S yay yay -S base-devel jq break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Update NVIDIA Gpu driver while true; do conf_message "update NVIDIA Gpu driver (Select it only if you have NVIDIA GPU)" read -p "" yn case $yn in [Yy]*|"" ) step_message "upgrading the NVIDIA Gpu driver" sudo mhwd -a pci nonfree 0300 break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Enable TLP (only for laptops) while true; do conf_message "add laptop configuration" read -p "" yn case $yn in [Yy]*|"" ) yay -S tlp sudo tlp start break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Git configuration while true; do conf_message "set up global email and username for github" warning_message "(Edit the 'personal_Info.json' to change your email and username for github by default it is set up as NULL)" read -p "" yn case $yn in [Yy]*|"" ) git config --global user.email $(jq '.git_email' config/personal_Info.json) git config --global user.name $(jq '.git_username' config/personal_Info.json) break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Install extra softwares while true; do clear cat config/extra_softwares_list.txt echo conf_message "install these softwares above" read -p "" yn case $yn in [Yy]*|"" ) step_message "running update again for safety measures :)" yay -Syyu step_message "installing necessary tools" yay -S - < config/extra_softwares_list.txt break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Install extra gnome-extensions while true; do clear cat config/extra_gnome-extensions_list.txt echo conf_message "install these gnome extensions above" read -p "" yn case $yn in [Yy]*|"" ) step_message "installing extra gnome extensions" yay -S - < config/extra_gnome-extensions_list.txt # Installing NVIDIA GPU Stats Tool git clone https://github.com/ethanwharris/gnome-nvidia-extension.git cd gnome-nvidia-extension make make install cd .. rm -rf gnome-nvidia-extension break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Configuaring Sublime-text while true; do conf_message "configure sublime-text" read -p "" yn case $yn in [Yy]*|"" ) step_message "configuring sublime-text" mkdir ~/Documents/my_code touch ~/Documents/my_code/input.txt touch ~/Documents/my_code/output.txt if [ -d ~/.config/sublime-text-3 ]; then rm -rf ~/.config/sublime-text-3/ step_message "sublime-text's existing config deleted" fi step_message "copying new sublime-config files" unzip config/sublime-text-3.zip mv sublime-text-3 ~/.config break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Installing & Configuaring virt-manager (virtualization softwares) while true; do conf_message "install & configure virtualization" read -p "" yn case $yn in [Yy]*|"" ) step_message "installing & configuring virtualization" yay -S virt-manager qemu vde2 ebtables dnsmasq bridge-utils openbsd-netcat sudo systemctl enable libvirtd.service sudo systemctl start libvirtd.service # Enable this if virt-manager says your default is not active # sudo virsh net-autostart --network default # sudo virsh net-start default break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Configuaring & Installing additional themes and fonts while true; do conf_message "configure themes and fonts" read -p "" yn case $yn in [Yy]*|"" ) # Date-time configuration step_message "changing default date-time behaviour" timedatectl set-ntp 1 # Automatic update date-time gsettings set org.gnome.desktop.interface clock-format 12h gsettings set org.gnome.desktop.interface clock-show-date true gsettings set org.gnome.desktop.interface clock-show-seconds true # Fonts step_message "copying fonts" unzip config/fonts.zip sudo mv fonts/* /usr/share/fonts/TTF/ rm -r fonts # Configuaring & Installing necessary tools mkdir ~/.themes ~/.icons # yay -S mojave-gtk-theme canta-icon-theme-git yay -S canta-icon-theme-git # Theme -> Applications: Ant-Dracula git clone -b alt-style --single-branch https://github.com/dracula/gtk.git mv gtk ~/.themes/ gsettings set org.gnome.desktop.interface gtk-theme "gtk" gsettings set org.gnome.desktop.wm.preferences theme "gtk" # Theme -> Shell: Mojave-dark # gsettings set org.gnome.shell.extensions.user-theme name "Mojave-dark" # Theme -> Shell: Matcha-dark-azul gsettings set org.gnome.shell.extensions.user-theme name "Matcha-dark-azul" # Theme -> Cursor: PearDark unzip config/PearDarkCursors.zip -d ~/.icons gsettings set org.gnome.desktop.interface cursor-theme 'PearDarkCursors' # Background -> Image: 1.jpg wallpaper_path=$(pwd)"/config/wallpapers/W1.jpg" gsettings set org.gnome.desktop.background picture-uri "file://$wallpaper_path" #Theme -> Icons: Canta gsettings set org.gnome.desktop.interface icon-theme 'Canta' #Theme -> Icons: numix-circle + canta # yay -S numix-circle-icon-theme-git canta-icon-theme-git # gsettings set org.gnome.desktop.interface icon-theme 'Numix-Circle-Light' # gsettings set org.gnome.desktop.interface icon-theme 'Canta' # Theme -> Cursor: Mcmojave-cursors # yay -S mcmojave-cursors # gsettings set org.gnome.desktop.interface cursor-theme 'mcmojave-cursors' break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Theming gnome-terminal while true; do conf_message "change theme for gnome-terminal (Select ONE-DARK)" read -p "" yn case $yn in [Yy]*|"" ) bash -c "$(wget -qO- https://git.io/vQgMr)" break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # # Using Manjaro's default zsh configurations # # Installing oh-my-zsh # while true; do # conf_message "intall oh-my-zsh" # read -p "" yn # case $yn in # [Yy]*|"" ) # step_message "installing oh-my-zsh" # # sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" # yay -S oh-my-zsh-git # break;; # [Nn]* ) break;; # * ) echo "Please answer Y/y or N/n as yes or no.";; # esac # done # # Configuaring ZSH # while true; do # conf_message "set custom configuration for zsh" # read -p "" yn # case $yn in # [Yy]*|"" ) # step_message "configuring zsh" # git clone https://github.com/zsh-users/zsh-autosuggestions ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-autosuggestions # git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting # step_message "replacing .zshrc" # sudo rm -rf ~/.zshrc # cp config/.zshrc ~/ # # step_message "changing default shell to zsh" # # chsh -s /bin/zsh # # sudo chsh -s /bin/zsh # break;; # [Nn]* ) break;; # * ) echo "Please answer Y/y or N/n as yes or no.";; # esac # done # Updating database step_message "updating database" sudo updatedb # This will add "most" package functionality for manuals while true; do conf_message "add 'most' package functionality for manuals" read -p "" yn case $yn in [Yy]*|"" ) yay -S most echo "export PAGER=most" >> ~/.profile break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Adding custom aliases while true; do clear cat config/custom_aliases.txt echo conf_message "add these custom aliases above" read -p "" yn case $yn in [Yy]*|"" ) cat config/custom_aliases.txt >> ~/.zshrc break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done # Cleanup orphan packages while true; do conf_message "remove unneeded packages" read -p "" yn case $yn in [Yy]*|"" ) yay -Rns manjaro-hello yay -Rns $(yay -Qtdq) break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done info_message " Success.................................... Everything executed without any error :)..." proceed_next # Change the Password while true; do warning_message "It is recommended to change your current user password with a strong one" conf_message "change your current password" read -p "" yn case $yn in [Yy]*|"" ) passwd break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done warning_message "You need to restart your system next. After restart check out the 'post_config/post_configuration.txt' for furthur customizations" # Reboot the system while true; do conf_message "restart the system now" read -p "" yn case $yn in [Yy]*|"" ) sudo reboot break;; [Nn]* ) break;; * ) echo "Please answer Y/y or N/n as yes or no.";; esac done
The model would use data consisting of emails labeled as either spam or not. This data could either be manually labeled or by using a pre-existing dataset. First, the emails would need to be pre-processed, by tokenizing the sentences, removing stopwords and punctuation, and maybe lemmatizing and stemming the words. Then, the vectorized input data would be split into a training and testing dataset. A suitable machine learning model, such as logistic regression or a support vector machine, could then be used to fit the training data. This model would then be tested on the test dataset, and the accuracy of the classification can be evaluated.
/* * Copyright (C) 2018-2019 <NAME> (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.aufnahme.businessobj; import com.helger.photon.security.object.AbstractBusinessObjectMicroTypeConverter; import com.helger.xml.microdom.IMicroElement; import com.helger.xml.microdom.MicroElement; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * <p>Default MicroTypeConverter implementation of {@link com.helger.aufnahme.businessobj.IExTrunkSizeBO}</p> * <p>This class was initially automatically created</p> * * * @author JDMCodeGenerator */ public class ExTrunkSizeBOMicroTypeConverter extends AbstractBusinessObjectMicroTypeConverter<ExTrunkSizeBO> { private static final String ATTR_BHD = "bhd"; private static final String ATTR_HEIGHT = "height"; @Nonnull public IMicroElement convertToMicroElement(@Nonnull final ExTrunkSizeBO aValue, @Nullable final String sNamespaceURI, @Nonnull final String sTagName) { final IMicroElement aElement = new MicroElement(sNamespaceURI, sTagName); super.setObjectFields(aValue, aElement); aElement.setAttribute(ATTR_BHD, aValue.getBHD()); aElement.setAttribute(ATTR_HEIGHT, aValue.getHeight().getID()); return aElement; } @Nonnull public ExTrunkSizeBO convertToNative(@Nonnull final IMicroElement aElement) { final int nBHD = aElement.getAttributeValueAsInt(ATTR_BHD, -1); final EExTreeHeightBO eHeight = EExTreeHeightBO.getFromIDOrNull(aElement.getAttributeValue(ATTR_HEIGHT)); return new ExTrunkSizeBO(super.getStubObject(aElement), nBHD, eHeight); } }
class PropertyAccessor { protected $redirectTo; public function getProperty() { return property_exists($this, 'redirectTo') ? $this->redirectTo : '/home'; } } // Test the implementation $accessor = new PropertyAccessor(); $accessor->redirectTo = '/dashboard'; echo $accessor->getProperty(); // Output: /dashboard $accessor2 = new PropertyAccessor(); echo $accessor2->getProperty(); // Output: /home
#!/bin/sh set -x BUILDDIR=.gh-pages-build TARGET_BRANCH=gh-pages # Check out the current `master` branch in a new directory test -f docs/index.xhtml || exit 1 git worktree add --detach $BUILDDIR master pushd $BUILDDIR # Build things (runs `make` in the docs/ directory) make DOC_TARGET=docs BROWSERIFY=../node_modules/.bin/browserify CODEMIRROR=../node_modules/codemirror # Add and commit the new tree to the $TARGET_BRANCH branch git add -f docs/* BUILD_ID=$(git commit-tree -p master -m 'gh-pages build' $(git write-tree)) popd # Push that branch to the gh-publish remote git worktree remove -f $BUILDDIR git push -f gh-publish $BUILD_ID:$TARGET_BRANCH
<gh_stars>0 import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose'; import * as mongoose from 'mongoose'; import { Product } from '../../products/schemas/product.schema'; import { Field, ID, ObjectType } from '@nestjs/graphql'; @ObjectType() @Schema() export class Cart { @Field(() => ID) _id: string; @Field(() => String) @Prop() user: string; @Field(() => Product) @Prop({ type: mongoose.Schema.Types.ObjectId, ref: 'Product' }) product: Product; @Field(() => Number) @Prop() count: number; @Field(() => Number) @Prop() price: number; } export const CartSchema = SchemaFactory.createForClass(Cart);
def invokeAPI(url, body): import requests response = requests.post(url, json=body)