text
stringlengths
1
1.05M
<gh_stars>1-10 package main.java.api.db; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import main.java.api.SaveAttendanceRequest; /** * Query to insert an item of attendance into the lectureattendance table * @author Maria * */ public class SubmitAttendanceQuery { private static Logger LOGGER = LogManager.getLogger("SubmitAttendanceQuery"); private String sql; private PreparedStatement stmt; private Connection conn; public SubmitAttendanceQuery(Connection conn) throws SQLException { this.conn = conn; sql = "INSERT INTO lectureattendance" + "(barcode, day, month, year, hour, minute, course) VALUES" + "(?,?,?,?,?,?,?)"; stmt = this.conn.prepareStatement(sql); } public void executeQuery(SaveAttendanceRequest request) throws SQLException { stmt.setString(1, request.getBarcode()); stmt.setInt(2, request.getDate()); stmt.setInt(3, request.getMonth()); stmt.setInt(4, request.getYear()); stmt.setInt(5, request.getHour()); stmt.setInt(6, request.getMinute()); stmt.setString(7, request.getCourse()); LOGGER.info("Executing query to save student attendance. " + request); stmt.executeUpdate(); } }
const genericOperate = (name, operate) => (state, metric) => { return Object.assign( {}, state, { valueAt: i => operate(state.valueAt(i), metric.valueAt(i)), toString: () => `${state} ${name} ${metric}`, on: (type, listener = null) => { if (listener === null) return state.on(type); state.on(type, listener); metric.on(type, listener); }, }, { shift: offset => genericOperate(name, operate)( state.shift(offset), metric.shift(offset) ), } ); }; const apiOperator = state => ({ add: metric => genericOperate('+', (a, b) => a + b)(state, metric), subtract: metric => genericOperate('-', (a, b) => a - b)(state, metric), multiply: metric => genericOperate('*', (a, b) => a * b)(state, metric), divide: metric => genericOperate('/', (a, b) => a / b)(state, metric), }); export default apiOperator;
#!/bin/bash # ============================== ⬇⬇⬇ 无需改动 ⬇⬇⬇ ============================== # 重载环境变量 . Init ENV || echo "Not initialized" && [ -z "$ENV_FILE" ] && exit 1 # 获取脚本名称 SELF_NAME=$(basename $BASH_SOURCE) # 获取脚本路径 SELF_PATH=$(cd `dirname $0` && pwd)/$SELF_NAME # ============================== ⬆⬆⬆ 无需改动 ⬆⬆⬆ ============================== # 软件名称 ZIP_NAME=idea-server # 下载链接 DOWNLOAD_URL=$GIT_HOST/idea/$ZIP_NAME # 安装路径 SETUP_PATH=$SOFTWARE_PATH/$ZIP_NAME # 启动路径 STARTUP=$SETUP_PATH # # 软件参数 PORT=1117 USER=epoch stop(){ PID=$(pgrep -f $STARTUP) && kill $PID 2>/dev/null } start(){ stop [ -f $STARTUP ] || install || exit 1 $STARTUP -p $PORT -u $USER 1>/dev/null 2>&1 & } uninstall(){ stop rm -rf $SETUP_PATH startup_clear $SELF_NAME } install(){ uninstall wget -c -t 3 $DOWNLOAD_URL -O $SETUP_PATH || exit 1 chmod +x $STARTUP startup $SELF_NAME start echo "install idea-server success!" } # ============================== ⬇⬇⬇ Main ⬇⬇⬇ ============================== case $1 in stop) stop ;; start) start ;; uninstall) uninstall ;; install) install ;; *) echo "Usage: $SELF_NAME <start|stop|install|uninstall>" ;; esac
#!/bin/bash ROOT=/opt/selenium CONF=$ROOT/config.json /opt/bin/generate_config >$CONF echo "starting selenium hub with configuration:" cat $CONF if [ ! -z "$SE_OPTS" ]; then echo "appending selenium options: ${SE_OPTS}" fi function shutdown { echo "shutting down hub.." kill -s SIGTERM $NODE_PID wait $NODE_PID echo "shutdown complete" } java ${JAVA_OPTS} -jar /opt/selenium/selenium-server-standalone.jar \ -role hub \ -hubConfig $CONF \ ${SE_OPTS} & NODE_PID=$! trap shutdown SIGTERM SIGINT wait $NODE_PID
import screenpoint import cv2 # Load input images. screen = cv2.imread('example/screen.png', 0) view = cv2.imread('example/view.jpg', 0) # Project centroid. x, y, img_debug = screenpoint.project(view, screen, True) # Write debug image. cv2.imwrite('example/match_debug.png', img_debug)
<filename>.dev2/ft_printf.h<gh_stars>0 #ifndef FT_PRINTF_H # define FT_PRINTF_H # include <stdio.h> # include <stdarg.h> # include <unistd.h> # include <stdlib.h> # define ERROR -1 # define OFF 0 # define ON 1 # define PLUS 2 # define MINUS -2 typedef struct { va_list args; size_t total_len; //char type; int status; int left_align; int zero_pad; // int notation; int hash; int sign; int space; // int flag; int width; int precision; size_t len; } t_stock; typedef struct { size_t padding; size_t prefix; size_t body; size_t precision; size_t sum; } t_length; int ft_printf(const char *fmt, ...); size_t ft_analyze_fmt(const char *fmt, t_stock *lst); void ft_print_str(t_stock *lst); void ft_print_char(t_stock *lst); void ft_print_address(t_stock *lst, size_t address); void ft_print_unsigned(t_stock *lst, unsigned int decimal); void ft_print_decimal(t_stock *lst, int decimal); void ft_print_hex(t_stock *lst, int decimal); void ft_print_hex_cap(t_stock *lst, int decimal); void ft_left_align(const char *fmt, t_stock *lst, size_t *i); void ft_zero_pad(const char *fmt, t_stock *lst, size_t *i); void ft_sign(const char *fmt, t_stock *lst, size_t *i); void ft_space(const char *fmt, t_stock *lst, size_t *i); void ft_hash(const char *fmt, t_stock *lst, size_t *i); void ft_precision(const char *fmt, t_stock *lst, size_t *i); void ft_width(const char *fmt, t_stock *lst, size_t *i); void ft_print_space(t_stock *lst, int len); void ft_print_sign(t_stock *lst); void ft_print_left_align(t_stock *lst, int len); void ft_print_zero_pad(t_stock *lst, int len); #endif
import { BaseProvider, LightTheme } from "baseui" import React from "react" import IconButton from "@material-ui/core/IconButton" import ThumbUpIcon from '@mui/icons-material/ThumbUp'; import ThumbDownIcon from '@mui/icons-material/ThumbDown'; import { withStreamlitConnection, StreamlitComponentBase, Streamlit, } from "streamlit-component-lib" interface State { hasClickedLike: boolean, hasClickedDislike: boolean, } class streamlit_text_rating extends StreamlitComponentBase<State> { public state = {hasClickedLike: false,hasClickedDislike: false} public render = (): React.ReactNode => { // Streamlit sends us a theme object via props that we can use to ensure // that our component has visuals that match the active theme in a // streamlit app. const { theme } = this.props const style: React.CSSProperties = {} const text = this.props.args["text"] as string const color_text = this.props.args["color_text"] as string const color_background = this.props.args["color_background"] as string const font_size = this.props.args['font_size'] as string const font_family = this.props.args['font_family'] as string const font_weight=this.props.args['font_weight'] as number return ( <div style={{display:'flex', alignItems:'center', justifyContent:'flex-start',width:'100%',margin:'0px',}}> <p style={{color:color_text, background:color_background, fontSize:font_size, fontWeight:font_weight, marginBottom:'0px', fontFamily:font_family, padding:'-6px', borderRadius:'6px',}}> <li>{text}</li> </p> <IconButton onClick={this.onClickedLike} style={{outline:'none',paddingTop:'0px',paddingBottom:'0px'}}> <ThumbUpIcon color={this.state.hasClickedLike?'success':'disabled'}/> </IconButton> <IconButton onClick={this.onClickedDislike} style={{outline:'none',paddingTop:'0px',paddingBottom:'0px'}} > <ThumbDownIcon color={this.state.hasClickedDislike?'error':'disabled'}/> </IconButton> </div> ) } private onClickedLike = (): void => { this.setState( prevState => ({hasClickedLike: ! this.state.hasClickedLike}), () => {if (this.state.hasClickedLike) {Streamlit.setComponentValue('liked');} else {Streamlit.setComponentValue('None');}} ); this.setState( prevState => ({hasClickedDislike: false}) ); } private onClickedDislike = (): void => { this.setState( prevState => ({hasClickedDislike: ! this.state.hasClickedDislike}), () => {if (this.state.hasClickedDislike) {Streamlit.setComponentValue('disliked');} else {Streamlit.setComponentValue('None');}} ); this.setState( prevState => ({hasClickedLike: false}) ); } } export default withStreamlitConnection(streamlit_text_rating)
<filename>test/node.ts const path = require("path"); const { exec } = require("child_process"); describe("autoRef option", () => { const fixture = (filename) => process.execPath + " " + path.join(__dirname, "fixtures", filename); it("should stop once the timer is triggered", (done) => { exec(fixture("unref.ts"), done); }); it("should stop once the timer is triggered (even when trying to reconnect)", (done) => { exec(fixture("unref-during-reconnection.ts"), done); }); it("should stop once the timer is triggered (polling)", (done) => { exec(fixture("unref-polling-only.ts"), done); }); it("should stop once the timer is triggered (websocket)", (done) => { exec(fixture("unref-websocket-only.ts"), done); }); it("should not stop with autoUnref set to false", (done) => { const process = exec(fixture("no-unref.ts"), () => { done(new Error("should not happen")); }); setTimeout(() => { process.kill(); done(); }, 1000); }); });
#!/bin/bash # # SPARQL Build Script # @author Loreto Parisi (loretoparisi at gmail dot com) # v1.0.0 # @2018 Loreto Parisi (loretoparisi at gmail dot com) # # wikidata dump volume folder # it will contain the split folder # and the wikidata journal file: wikidata.jnl # example: /root/wikidata ROOT=/root/data convertsecs() { #Function used to convert seconds into HH:MM:SS ((h=${1}/3600)) ((m=(${1}%3600)/60)) ((s=${1}%60)) printf "%02d:%02d:%02d\n" $h $m $s } typeset -fx convertsecs # export to sub-shells # maybe this is implicit with your version of Bash NOW=$(exec date +%s) cmd() { #echo it"'"s fact of life than shell # Replace by *your* #echo quoting can be a '"nightmare"' # actual commands ls -hl $ROOT/wikidata.jnl } typeset -fx cmd watch -n 1 \ 'bash -c '"'"'cat <(cmd) \ <(echo Elapsed $(convertsecs $(($(exec date +%s) - '$NOW'))))'"'"
<reponame>MichalPaszkiewicz/michals-react-components import * as React from 'react'; import {ReactProps} from "./reactprops"; import {ModalProps, Modal, ModalButton} from "./modal"; export class ConfirmProps extends ReactProps{ title: string; show: boolean; onConfirm: (e?: any) => void; onReject: (e?: any) => void; } export var Confirm = (props: ConfirmProps) => { return ( <Modal title={props.title} show={props.show} onClose={props.onReject} buttons={[new ModalButton("OK", props.onConfirm)]}> {props.children} </Modal> ) }
/* * Copyright 2015 Textocat * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.textocat.textokit.commons.cas; import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import org.apache.uima.cas.*; import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.cas.FSArray; import org.apache.uima.jcas.cas.StringArray; import java.util.*; import static com.google.common.collect.Lists.newLinkedList; import static com.google.common.collect.Sets.newHashSet; import static java.lang.String.format; /** * @author <NAME> */ public class FSUtils { private FSUtils() { } public static JCas getJCas(FeatureStructure fs) { try { return fs.getCAS().getJCas(); } catch (CASException e) { throw new RuntimeException(e); } } public static boolean contain(ArrayFS arr, FeatureStructure targetFS) { if (arr == null) { return false; } for (int i = 0; i < arr.size(); i++) { if (Objects.equal(arr.get(i), targetFS)) { return true; } } return false; } public static FSArray toFSArray(JCas cas, Collection<? extends FeatureStructure> srcCol) { return toFSArray(cas, srcCol, srcCol.size()); } public static FSArray toFSArray(JCas cas, FeatureStructure... srcArr) { return toFSArray(cas, Arrays.asList(srcArr), srcArr.length); } public static FSArray toFSArray(JCas cas, Iterable<? extends FeatureStructure> srcCol, int srcSize) { FSArray result = new FSArray(cas, srcSize); int i = 0; for (FeatureStructure fs : srcCol) { result.set(i, fs); i++; } return result; } public static StringArray toStringArray(JCas cas, String... srcArr) { return toStringArray(cas, Arrays.asList(srcArr)); } public static StringArray toStringArray(JCas cas, Collection<String> srcCol) { StringArray result = new StringArray(cas, srcCol.size()); int i = 0; for (String gr : srcCol) { result.set(i, gr); i++; } return result; } public static Set<String> toSet(StringArrayFS fsArr) { if (fsArr == null) return ImmutableSet.of(); ImmutableSet.Builder<String> resultBuilder = ImmutableSet.builder(); for (int i = 0; i < fsArr.size(); i++) { resultBuilder.add(fsArr.get(i)); } return resultBuilder.build(); } public static List<String> toList(StringArrayFS fsArr) { if (fsArr == null) return ImmutableList.of(); ImmutableList.Builder<String> resultBuilder = ImmutableList.builder(); for (int i = 0; i < fsArr.size(); i++) { resultBuilder.add(fsArr.get(i)); } return resultBuilder.build(); } public static FSTypeConstraint getTypeConstraint(Type firstType, Type... otherTypes) { FSTypeConstraint constr = ConstraintFactory.instance().createTypeConstraint(); constr.add(firstType); for (Type t : otherTypes) { constr.add(t); } return constr; } public static FSTypeConstraint getTypeConstraint(String firstType, String... otherTypes) { FSTypeConstraint constr = ConstraintFactory.instance().createTypeConstraint(); constr.add(firstType); for (String t : otherTypes) { constr.add(t); } return constr; } public static <FST extends FeatureStructure> List<FST> filterToList(CAS cas, FSIterator<FST> srcIter, FSMatchConstraint... constraints) { FSIterator<FST> resultIter = filter(cas, srcIter, constraints); return toList(resultIter); } public static <F extends FeatureStructure> FSIterator<F> filter(CAS cas, FSIterator<F> srcIter, FSMatchConstraint... constraints) { if (constraints.length == 0) { return srcIter; } FSMatchConstraint resultConstr = and(constraints); return cas.createFilteredIterator(srcIter, resultConstr); } public static <F extends FeatureStructure> List<F> filter(List<F> srcList, FSMatchConstraint... constraints) { if (constraints.length == 0) { return ImmutableList.copyOf(srcList); } ArrayList<F> resultList = Lists.newArrayListWithCapacity(srcList.size()); FSMatchConstraint conj = and(constraints); for (F fs : srcList) { if (conj.match(fs)) { resultList.add(fs); } } return Collections.unmodifiableList(resultList); } public static FSMatchConstraint and(FSMatchConstraint... constraints) { if (constraints.length == 0) { throw new IllegalArgumentException("Constraints array are empty"); } ConstraintFactory cf = ConstraintFactory.instance(); FSMatchConstraint resultConstr = constraints[0]; for (int i = 1; i < constraints.length; i++) { resultConstr = cf.and(resultConstr, constraints[i]); } return resultConstr; } public static <FST extends FeatureStructure> List<FST> toList(FSIterator<FST> iter) { LinkedList<FST> result = newLinkedList(); fill(iter, result); return result; } public static <FST extends FeatureStructure> Set<FST> toSet(FSIterator<FST> iter) { HashSet<FST> result = newHashSet(); fill(iter, result); return result; } public static <FST extends FeatureStructure> void fill(FSIterator<FST> srcIter, Collection<FST> destCol) { srcIter.moveToFirst(); while (srcIter.isValid()) { destCol.add(srcIter.get()); srcIter.moveToNext(); } } /* * Note that getIntValue will return 0 if feature value is not set. */ public static int intMinBy(Iterable<? extends FeatureStructure> fsCollection, Feature intFeat) { Integer min = Integer.MAX_VALUE; boolean hasResult = false; for (FeatureStructure fs : fsCollection) { int intValue = fs.getIntValue(intFeat); hasResult = true; if (intValue < min) { min = intValue; } } if (!hasResult) { throw new IllegalArgumentException("fsCollection is empty"); } return min; } /* * Note that getIntValue will return 0 if feature value is not set. */ public static int intMaxBy(Iterable<? extends FeatureStructure> fsCollection, Feature intFeat) { Integer max = Integer.MIN_VALUE; boolean hasResult = false; for (FeatureStructure fs : fsCollection) { int intValue = fs.getIntValue(intFeat); hasResult = true; if (intValue > max) { max = intValue; } } if (!hasResult) { throw new IllegalArgumentException("fsCollection is empty"); } return max; } public static Function<FeatureStructure, String> stringFeatureFunction(Type fsType, String featName) { final Feature feat = fsType.getFeatureByBaseName(featName); if (feat == null) throw new IllegalStateException( format("%s does not have feature %s", fsType.getName(), featName)); return new Function<FeatureStructure, String>() { @Override public String apply(FeatureStructure fs) { return fs.getStringValue(feat); } }; } }
/* * Copyright (C) 2018-2019 <NAME> (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.postpone; import java.io.File; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.time.Duration; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.OffsetDateTime; import java.time.Period; import java.time.ZonedDateTime; import com.helger.commons.datetime.PDTFactory; import com.helger.commons.math.MathHelper; import com.helger.photon.app.mock.PhotonAppWebTestRule; import com.helger.xml.serialize.read.DOMReader; import javax.xml.namespace.QName; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.w3c.dom.Element; /** * This is the self-test class of JDM * This class was initially automatically created * * * @author JDMCodeGenerator */ public final class JDMSelfTest { @Rule public final TestRule m_aRule = new PhotonAppWebTestRule(); @Test @SuppressWarnings({ "unused", "cast" }) public void testMockValueCreation() { IA var0; var0 = new A(new B(new C(EE.ONE))); IB var1; var1 = new B(new C(EE.ONE)); BigDecimal var2; var2 = MathHelper.toBigDecimal("12.3456"); Assert.assertNotNull(var2); BigInteger var3; var3 = MathHelper.toBigInteger("7890"); Assert.assertNotNull(var3); Boolean var4; var4 = Boolean.TRUE; Assert.assertNotNull(var4); boolean var5; var5 = true; byte var6; var6 = 2; Byte var7; var7 = Byte.valueOf(((byte) 1)); Assert.assertNotNull(var7); IC var8; var8 = new C(EE.ONE); Character var9; var9 = Character.valueOf(' '); Assert.assertNotNull(var9); char var10; var10 = 'x'; double var11; var11 = 4.0; Double var12; var12 = Double.valueOf(3.0); Assert.assertNotNull(var12); Duration var13; var13 = Duration.ofDays(1L); Assert.assertNotNull(var13); EE var14; var14 = EE.ONE; Assert.assertNotNull(var14); Element var15; var15 = DOMReader.readXMLDOM("<item x='y'/>").getDocumentElement(); Assert.assertNotNull(var15); File var16; var16 = new File("file.txt"); Assert.assertNotNull(var16); float var17; var17 = 6.0F; Float var18; var18 = Float.valueOf(5.0F); Assert.assertNotNull(var18); int var19; var19 = 8; Integer var20; var20 = Integer.valueOf(7); Assert.assertNotNull(var20); LocalDate var21; var21 = PDTFactory.getCurrentLocalDate(); Assert.assertNotNull(var21); LocalDateTime var22; var22 = PDTFactory.getCurrentLocalDateTime(); Assert.assertNotNull(var22); LocalTime var23; var23 = PDTFactory.getCurrentLocalTime(); Assert.assertNotNull(var23); long var24; var24 = 10L; Long var25; var25 = Long.valueOf(9L); Assert.assertNotNull(var25); OffsetDateTime var26; var26 = PDTFactory.getCurrentOffsetDateTime(); Assert.assertNotNull(var26); Period var27; var27 = Period.ofDays(2); Assert.assertNotNull(var27); QName var28; var28 = new QName("urn:example", "elem"); Assert.assertNotNull(var28); Serializable var29; var29 = ((Serializable)"object"); Assert.assertNotNull(var29); Short var30; var30 = Short.valueOf(((short) 11)); Assert.assertNotNull(var30); short var31; var31 = 12; String var32; var32 = "foo"; Assert.assertNotNull(var32); ZonedDateTime var33; var33 = PDTFactory.getCurrentZonedDateTime(); Assert.assertNotNull(var33); } }
function findMembersMatchingName(members, name, recursive) { name = name.toLowerCase(); const result = []; members.forEach((member) => { if (member.name.toLowerCase().indexOf(name) >= 0) { result.push(member); } if (recursive && member.members) { const subMembers = findMembersMatchingName( member.members, name, recursive ); if (subMembers.length > 0) { result.push(...subMembers); } } }); return result; } module.exports = findMembersMatchingName;
#include "queens.h" #include "gtest/gtest.h" using std::cout; using std::endl; using nq::Queens; TEST(QueensTest, Two) { Queens q = Queens::Create(2); cout << "====(Initial)====" << endl << q << endl; EXPECT_EQ(2UL, q.num_attacks()); q.Swap(0, 1); cout << "====Swap(0,1)====" << endl << q << endl; EXPECT_EQ(2UL, q.num_attacks()); } TEST(QueensTest, Three) { Queens q = Queens::Create(3); cout << "====(Initial)====" << endl << q << endl; EXPECT_EQ(6UL, q.num_attacks()); q.Swap(0, 1); cout << "====Swap(0,1)====" << endl << q << endl; EXPECT_EQ(2UL, q.num_attacks()); } TEST(QueensTest, Four) { Queens q = Queens::Create(4); cout << "====(Initial)====" << endl << q << endl; EXPECT_EQ(12UL, q.num_attacks()); q.Swap(0, 3); cout << "====Swap(0,3)====" << endl << q << endl; EXPECT_EQ(4UL, q.num_attacks()); q.Swap(0, 1); cout << "====Swap(0,1)====" << endl << q << endl; q.Swap(2, 3); cout << "====Swap(2,3)====" << endl << q << endl; EXPECT_EQ(0UL, q.num_attacks()); } TEST(QueensTest, PermuteFour) { Queens q = Queens::Create(4); q.Permute(0, 2); cout << "====Permute(0,2)====" << endl << q << endl; EXPECT_EQ(2UL, q.num_attacks()); q.Swap(1, 3); cout << "====Permute(2, 3)====" << endl << q << endl; EXPECT_EQ(0UL, q.num_attacks()); } TEST(QueensTest, Copying) { Queens q1 = Queens::Create(4); Queens q2 = q1; q1.Permute(0, 2); EXPECT_EQ(2UL, q1.num_attacks()); EXPECT_EQ(12UL, q2.num_attacks()); }
#!/usr/bin/env bash if [ "x${FEE_SCHEMA}" = "xZERO_FEE" ] ; then echo "[Config] Fee Schema: Zero Fee" cp -r config-template/zerofee/* config/ else echo "[Config] Fee Schema: With Fee" cp -r config-template/fee/* config/ fi echo "/usr/bin/tendermint node --proxy_app=${PROXY_APP}" /usr/bin/tendermint node --proxy_app=${PROXY_APP}
# ----------------------------------------------------------------------------- # # Package : thrift # Version : v0.13.0 # Source repo : https://github.com/apache/thrift # Tested on : UBI 8.5 # Script License: Apache License, Version 2 or later # Maintainer : Atharv Phadnis <Atharv.Phadnis@ibm.com> # # Disclaimer: This script has been tested in root mode on given # ========== platform using the mentioned version of the package. # It may not work as expected with newer versions of the # package and/or distribution. In such case, please # contact "Maintainer" of this script. # # ---------------------------------------------------------------------------- PACKAGE_NAME=thrift PACKAGE_VERSION=${1:-v0.13.0} PACKAGE_URL=https://github.com/apache/thrift yum install -y git make libtool gcc-c++ libevent-devel zlib-devel openssl-devel python3 python3-devel # Install extra packages from CentOS-8 rpm -ivh https://rpmfind.net/linux/centos/8-stream/AppStream/ppc64le/os/Packages/bison-3.0.4-10.el8.ppc64le.rpm rpm -ivh https://rpmfind.net/linux/centos/8-stream/AppStream/ppc64le/os/Packages/flex-2.6.1-9.el8.ppc64le.rpm # Create symlink for python ln -s /usr/bin/python3 /usr/bin/python HOME_DIR=`pwd` OS_NAME=$(cat /etc/os-release | grep ^PRETTY_NAME | cut -d= -f2) if ! git clone $PACKAGE_URL $PACKAGE_NAME; then echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" exit 1 fi cd $HOME_DIR/$PACKAGE_NAME git checkout $PACKAGE_VERSION if ! ./bootstrap.sh; then echo "------------------$PACKAGE_NAME:bootstrap_fails-------------------------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Bootstrap_Fails" exit 1 fi cd $HOME_DIR/$PACKAGE_NAME if ! ./configure; then echo "------------------$PACKAGE_NAME:configure_fails-------------------------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Configure_Fails" exit 1 fi cd $HOME_DIR/$PACKAGE_NAME if ! make; then echo "------------------$PACKAGE_NAME:make_fails-------------------------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Make_Fails" exit 1 fi cd $HOME_DIR/$PACKAGE_NAME if ! make install; then echo "------------------$PACKAGE_NAME:install_fails-------------------------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails" exit 1 fi cd $HOME_DIR/$PACKAGE_NAME if ! make -k check; then echo "------------------$PACKAGE_NAME:check_fails---------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Check_Fails" exit 1 fi if ! make cross; then echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails" exit 1 else echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------" echo "$PACKAGE_URL $PACKAGE_NAME" echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success" exit 0 fi
<gh_stars>1-10 /* * Copyright (c) Open Source Strategies, Inc. * * Opentaps is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Opentaps is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Opentaps. If not, see <http://www.gnu.org/licenses/>. */ package org.opentaps.domain.party; import org.opentaps.foundation.service.ServiceInterface; import org.opentaps.foundation.service.ServiceException; /** * Merge parties services. */ public interface PartyMergeServiceInterface extends ServiceInterface { /** * Sets source party identifier. */ public void setPartyIdFrom(String partyId); /** * Sets target party identifier. */ public void setPartyIdTo(String partyId); /** * This attribute may disallow preliminary parties validation if equals to <code>N</code>. * @param s Text flag having a boolean meaning, can equals to Y or N. */ public void setValidate(String s); /** * Validate two parties if they can participate in merger. * Ensures two parties can be merged. Returns service error if they cannot. A merge requires *_${type}_UPDATE permission * where type is the roleTypeId of the party, such as ACCOUNT, CONTACT, LEAD or SUPPLIER. Also, the input must be two * different partyIds with the same roleTypeId. */ public void validateMergeParties() throws ServiceException; /** * Merge two parties. Checks <code>crmsfa.validateMergeParties</code> as a precaution if the validate * parameter is not set to N. The From party will be deleted after the merge. * @throws ServiceException */ public void mergeParties() throws ServiceException; }
import org.junit.runner.RunWith; import org.junit.runners.Suite; @RunWith(Suite.class) @Suite.SuiteClasses({ CliTest.class, ConstantParserTest.class, EvokeTest.class, ExampleEvokeTest.class, ExampleTest.class, GetValueTest.class, JDBCQueryTest.class }) public class AllTests { // This class remains empty, it is used only as a holder for the above annotations }
def cat_identifier(image): # pre-processing # convert image to numpy array image_np = np.array(image) # model building model = Sequential() model.add(Conv2D(32, (3, 3), activation='relu', input_shape=image_np.shape)) model.add(MaxPooling2D(2, 2)) model.add(Conv2D(64, (3, 3), activation='relu')) model.add(MaxPooling2D(2, 2)) model.add(Flatten()) model.add(Dense(128, activation='relu')) model.add(Dense(1, activation='sigmoid')) model.compile(loss='binary_crossentropy', optimizer='rmsprop', metrics=['accuracy']) # fit model model.fit(images_np, labels) # prediction and classification prediction = model.predict(image_np) class = 1 if prediction >= 0.5 else 0 return class
<filename>request/alipay_trade_page_pay.go package request const AlipayTradePagePayMethod = "alipay.trade.page.pay" type AlipayTradePagePayRequest struct { OutTradeNo string `json:"out_trade_no"` ProductCode string `json:"product_code"` TotalAmount string `json:"total_amount"` Subject string `json:"subject"` Body string `json:"body,omitempty"` GoodsDetail string `json:"goods_detail,omitempty"` PassbackParams string `json:"passback_params,omitempty"` ExtendParams *ATPPExtendParams `json:"extend_params,omitempty"` GoodsType string `json:"goods_type,omitempty"` TimeoutExpress string `json:"timeout_express,omitempty"` EnablePayChannels string `json:"enable_pay_channels,omitempty"` DisablePayChannels string `json:"disable_pay_channels,omitempty"` AuthToken string `json:"auth_token,omitempty"` QrPayMode string `json:"qr_pay_mode,omitempty"` QrcodeWidth string `json:"qrcode_width,omitempty"` } type ATPPExtendParams struct { SysServiceProviderId string `json:"sys_service_provider_id,omitempty"` HbFqNum string `json:"hb_fq_num,omitempty"` HbFqSellerPercent string `json:"hb_fq_seller_percent,omitempty"` }
import { BaseController } from "./base-controller"; export class UserController extends BaseController { public constructor () { super(); } }
import typing def validate_functionalities(module, functionalities, type, MARA_XXX): if isinstance(functionalities, typing.Dict): functionalities = functionalities.values() if not isinstance(functionalities, typing.Iterable): raise TypeError( f'{module.__name__}.{MARA_XXX} should be or return a list or dict of {type.__name__}. Got "{functionalities}".') for functionality in functionalities: if not isinstance(functionality, type): raise TypeError(f'In {module.__name__}.{MARA_XXX}: Expected a {type.__name__}, got "{functionality}"') return list(functionalities)
#!/bin/bash source ./scripts/init.env function cleanup { cluster_name=${1} kubectl config delete-context ${cluster_name} } function get_credential_and_rename_context { cluster_name=${1} cluster_zone=${2} gcloud container clusters get-credentials ${cluster_name} --zone ${cluster_zone} kubectl config rename-context gke_${PROJECT_ID}_${cluster_zone}_${cluster_name} ${cluster_name} } cleanup ${CLUSTER2} get_credential_and_rename_context ${CLUSTER2} ${CLUSTER2_ZONE}
<reponame>wetherbeei/gopar /* * Copyright © 2012 <NAME> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package main import ( "fmt" "github.com/tones111/go-opencl/cl" ) func main() { fmt.Println("Number of Platforms:", len(cl.Platforms)) for _, platform := range cl.Platforms { fmt.Println(" Platform Profile:", platform.Property(cl.PLATFORM_PROFILE)) fmt.Println(" Platform Version:", platform.Property(cl.PLATFORM_VERSION)) fmt.Println(" Platform Name:", platform.Property(cl.PLATFORM_NAME)) fmt.Println(" Platform Vendor:", platform.Property(cl.PLATFORM_VENDOR)) fmt.Printf(" Platform Extensions: %v\n\n", platform.Property(cl.PLATFORM_EXTENSIONS)) fmt.Println(" Platform Name:", platform.Property(cl.PLATFORM_NAME)) fmt.Println("Number of devices:", len(platform.Devices)) for _, device := range platform.Devices { fmt.Println(" Device Type:", device.Property(cl.DEVICE_TYPE)) //fmt.Println(" Device ID:", "TODO") //fmt.Println(" Board name:", "TODO") fmt.Println(" Max compute units:", device.Property(cl.DEVICE_MAX_COMPUTE_UNITS)) fmt.Println(" Max work items dimensions:", device.Property(cl.DEVICE_MAX_WORK_ITEM_DIMENSIONS)) //fmt.Println(" Max work items[]", "TODO") fmt.Println(" Max work group size:", device.Property(cl.DEVICE_MAX_WORK_GROUP_SIZE)) fmt.Println(" Preferred vector width char:", device.Property(cl.DEVICE_PREFERRED_VECTOR_WIDTH_CHAR)) fmt.Println(" Preferred vector width short:", device.Property(cl.DEVICE_PREFERRED_VECTOR_WIDTH_SHORT)) fmt.Println(" Preferred vector width int:", device.Property(cl.DEVICE_PREFERRED_VECTOR_WIDTH_INT)) fmt.Println(" Preferred vector width long:", device.Property(cl.DEVICE_PREFERRED_VECTOR_WIDTH_LONG)) fmt.Println(" Preferred vector width float:", device.Property(cl.DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT)) fmt.Println(" Preferred vector width double:", device.Property(cl.DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE)) fmt.Println(" Native vector width char:", device.Property(cl.DEVICE_NATIVE_VECTOR_WIDTH_CHAR)) fmt.Println(" Native vector width short:", device.Property(cl.DEVICE_NATIVE_VECTOR_WIDTH_SHORT)) fmt.Println(" Native vector width int:", device.Property(cl.DEVICE_NATIVE_VECTOR_WIDTH_INT)) fmt.Println(" Native vector width long:", device.Property(cl.DEVICE_NATIVE_VECTOR_WIDTH_LONG)) fmt.Println(" Native vector width float:", device.Property(cl.DEVICE_NATIVE_VECTOR_WIDTH_FLOAT)) fmt.Println(" Native vector width double:", device.Property(cl.DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE)) fmt.Printf(" Max clock frequency: %dMhz\n", device.Property(cl.DEVICE_MAX_CLOCK_FREQUENCY)) fmt.Println(" Address bits:", device.Property(cl.DEVICE_ADDRESS_BITS)) fmt.Println(" Max memory allocation:", device.Property(cl.DEVICE_MAX_MEM_ALLOC_SIZE)) fmt.Println(" Image support:", device.Property(cl.DEVICE_IMAGE_SUPPORT)) fmt.Println(" Max number of images read arguments:", device.Property(cl.DEVICE_MAX_READ_IMAGE_ARGS)) fmt.Println(" Max number of images write arguments:", device.Property(cl.DEVICE_MAX_WRITE_IMAGE_ARGS)) fmt.Println(" Max image 2D width:", device.Property(cl.DEVICE_IMAGE2D_MAX_WIDTH)) fmt.Println(" Max image 2D height:", device.Property(cl.DEVICE_IMAGE2D_MAX_HEIGHT)) fmt.Println(" Max image 3D width:", device.Property(cl.DEVICE_IMAGE3D_MAX_WIDTH)) fmt.Println(" Max image 3D height:", device.Property(cl.DEVICE_IMAGE3D_MAX_HEIGHT)) fmt.Println(" Max image 3D depth:", device.Property(cl.DEVICE_IMAGE3D_MAX_DEPTH)) fmt.Println(" Max samplers within kernel:", device.Property(cl.DEVICE_MAX_SAMPLERS)) fmt.Println(" Max size of kernel argument:", device.Property(cl.DEVICE_MAX_PARAMETER_SIZE)) fmt.Println(" Alignment (bits) of base address:", device.Property(cl.DEVICE_MEM_BASE_ADDR_ALIGN)) fmt.Println(" Minimum alignment (bytes) for any datatype:", device.Property(cl.DEVICE_MIN_DATA_TYPE_ALIGN_SIZE)) /*fmt.Println(" Single precision floating point capability") fmt.Println(" Denorms:", "TODO") fmt.Println(" Quiet NaNs:", "TODO") fmt.Println(" Round to nearest even:", "TODO") fmt.Println(" Round to zero:", "TODO") fmt.Println(" Round to +ve and infinity:", "TODO") fmt.Println(" IEEE754-2008 fused multiply-add:", "TODO") */ //fmt.Println(" Cache type:", "TODO" /*device.Property(cl.DEVICE_GLOBAL_MEM_CACHE_TYPE)*/ ) //fmt.Println(" Cache line size:", "TODO" /*device.Property(cl.DEVICE_GLOBAL_MEM_CACHELINE_SIZE)*/ ) fmt.Println(" Cache size:", device.Property(cl.DEVICE_GLOBAL_MEM_CACHE_SIZE)) fmt.Println(" Global memory size:", device.Property(cl.DEVICE_GLOBAL_MEM_SIZE)) fmt.Println(" Constant buffer size:", device.Property(cl.DEVICE_MAX_CONSTANT_BUFFER_SIZE)) fmt.Println(" Max number of constant args:", device.Property(cl.DEVICE_MAX_CONSTANT_ARGS)) //fmt.Println(" Local memory type:", "TODO" /*device.Property(cl.DEVICE_LOCAL_MEM_TYPE)*/ ) fmt.Println(" Local memory size:", device.Property(cl.DEVICE_LOCAL_MEM_SIZE)) //fmt.Println(" Kernel Preferred work group size multiple:", "TODO") fmt.Println(" Error correction support:", device.Property(cl.DEVICE_ERROR_CORRECTION_SUPPORT)) fmt.Println(" Unified memory for Host and Device:", device.Property(cl.DEVICE_HOST_UNIFIED_MEMORY)) fmt.Println(" Profiling timer resolution:", device.Property(cl.DEVICE_PROFILING_TIMER_RESOLUTION)) fmt.Println(" Little endian:", device.Property(cl.DEVICE_ENDIAN_LITTLE)) fmt.Println(" Available:", device.Property(cl.DEVICE_AVAILABLE)) fmt.Println(" Compiler available:", device.Property(cl.DEVICE_COMPILER_AVAILABLE)) /*fmt.Println(" Execution capabilities:") fmt.Println(" Execute OpenCL kernels:", "TODO") fmt.Println(" Execute native function:", "TODO") */ /*fmt.Println(" Queue properties:") fmt.Println(" Out-of-Order:", "TODO") fmt.Println(" Profiling:", "TODO") */ //fmt.Println(" Platform ID:", "TODO" /* device.Property(cl.DEVICE_PLATFORM)*/ ) fmt.Println(" Name:", device.Property(cl.DEVICE_NAME)) fmt.Println(" Vendor:", device.Property(cl.DEVICE_VENDOR)) fmt.Println(" Device OpenCL C version:", device.Property(cl.DEVICE_OPENCL_C_VERSION)) fmt.Println(" Driver version:", device.Property(cl.DRIVER_VERSION)) fmt.Println(" Profile:", device.Property(cl.DEVICE_PROFILE)) fmt.Println(" Version:", device.Property(cl.DEVICE_VERSION)) fmt.Println(" Extensions:", device.Property(cl.DEVICE_EXTENSIONS)) } } }
#!/bin/bash # Copyright (C) 2016 Nicolas Lamirault <nicolas.lamirault@gmail.com> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. hostname=$1 ssid=$2 wifipassword=$3 host=$4 NO_COLOR="\033[0m" OK_COLOR="\033[32;01m" ERROR_COLOR="\033[31;01m" WARN_COLOR="\033[33;01m" DEBUG_COLOR="\033[34;01m" HYPRIOTOS_VERSION=1.4.0 echo -e "${OK_COLOR}== Jarvis OS: Hypriot ${HYPRIOTOS_VERSION} ==${NO_COLOR}" if [ $# -ne 4 ]; then echo -e "${ERROR_COLOR}Usage: $0 hostname ssid wifipassword Linux|Darwin${NO_COLOR}" exit 1 fi echo -e "${DEBUG_COLOR}Download flash${NO_COLOR}" curl -LO --progress-bar https://raw.githubusercontent.com/hypriot/flash/master/${host}/flash chmod +x flash # ./flash --hostname ${hostname} --ssid ${ssid} --password ${wifipassword} https://downloads.hypriot.com/hypriotos-rpi-v${HYPRIOTOS_VERSION}.img.zip ./flash --hostname ${hostname} --ssid ${ssid} --password ${wifipassword} https://github.com/hypriot/image-builder-rpi/releases/download/v${HYPRIOTOS_VERSION}/hypriotos-rpi-v${HYPRIOTOS_VERSION}.img.zip echo -e "${DEBUG_COLOR}Cleanup${NO_COLOR}" rm ./flash echo -e "${OK_COLOR}== Done ==${NO_COLOR}"
timetable = ["Tuesday", "Wednesday", "Thursday", "Friday"] for day in reversed(timetable): print(day)
/* The file contains what deals with content displayed in the HTML (injected in the DOM), thus it must run as the last script */ const estimateTime = ids => { let totalSeconds = Number(ids) * 12; let totalMinutes = 0; if (totalSeconds > 60) { totalMinutes = parseInt(totalSeconds / 60); totalSeconds -= totalMinutes * 60; } let output; if (totalMinutes > 0) { output = `${totalMinutes} minutes`; if (totalSeconds > 0) { output += `, and ${totalSeconds} seconds`; } } else { output = `${totalSeconds} seconds`; } return output; }; const originalThesesContainer = document.querySelectorAll( "#Table2 > tbody > tr:nth-child(2) > td:nth-child(2)" )[1]; originalThesesContainer.appendChild(document.createElement("br")); /* ============================================= */ /* ==================CLEAR ALL================== */ const clearAllButton = document.createElement("button"); clearAllButton.title = "Clears all topics submitted in the system (the blue box above)."; clearAllButton.innerText = "Clear all registered topics"; clearAllButton.classList = "button button-danger"; originalThesesContainer.appendChild(clearAllButton); clearAllButton.addEventListener("click", async event => { event.preventDefault(); const chosenIds = getChosenIds(); if (!chosenIds || chosenIds.length === 0) { alert("You don't have any chosen (registered in the system) theses."); return; } if ( sortableThesesList.toArray().length > 0 && !confirm( "This will discard all the added topics in the sortable list below, are you sure?" ) ) { return; } if ( confirm( `This will remove every single entry registered on the system, you currently have (${ chosenIds.length } topics), are you sure?` ) ) { if ( !confirm( `Estimated: up to ${estimateTime( chosenIds.length )}\nDON'T INTERRUPT UNTIL FINISHED.\nReady to start?` ) ) { return; } toggleSpinner(); await clearIds(chosenIds); toggleSpinner(); alert( `All cleared \\_0_/\nThe page will reload now to sync with the system.` ); window.location.reload(); } }); // This is the kind of shit they're using, sorry ~_~ const spacingNode = document.createTextNode(`\u00A0`.repeat(77)); originalThesesContainer.appendChild(spacingNode); /* ============================================= */ /* ================ADD REMAINING================ */ const addRemainingButton = document.createElement("button"); addRemainingButton.innerText = "Add remaining to sortable list"; addRemainingButton.title = "Adds all the topics remaining above to the sortable list, their order will be maintained (appended to the current list). Clicking will not submit to the system"; addRemainingButton.classList = "button button-primary"; originalThesesContainer.appendChild(addRemainingButton); /* ============================================= */ /* ==================SELECTION================== */ const sortableThesesListNode = document.createElement("ul"); sortableThesesListNode.className = "sortable"; const sortableThesesList = new Sortable(sortableThesesListNode); const header = document.createElement("h1"); header.innerText = "Sortable list"; header.style.textAlign = "center"; originalThesesContainer.appendChild(header); originalThesesContainer.appendChild(sortableThesesListNode); /* ============================================= */ /* ================ADD SELECTED================ */ const addSelectedThesesButton = document.createElement("button"); addSelectedThesesButton.title = "Adds topics in the list above to your selected topics on the system, this will not finalize your submission to the system (you will still need to commit to the system at the end)"; addSelectedThesesButton.innerText = "Submit batch"; addSelectedThesesButton.classList = "button button-success"; addSelectedThesesButton.addEventListener("click", async event => { event.preventDefault(); let ids = sortableThesesList.toArray(); if (!ids || ids.length === 0) { alert("Sorry, you need to choose some topics first"); return; } if ( confirm( `This will submit the currently selected theses: ${ ids.length } topic, are you sure?` ) ) { if ( !confirm( `Estimated: up to ${estimateTime( ids.length )}\nDON'T INTERRUPT UNTIL FINISHED.\nReady to start?` ) ) { return; } toggleSpinner(); await addTheses(ids); toggleSpinner(); alert( `All selected ${ ids.length } topic have been added successfully.\nThe page will reload now to sync with the system.` ); window.location.reload(); } }); originalThesesContainer.appendChild(addSelectedThesesButton); // ============================================= // /* This is done just to remove the old event listener, it's ugly and bad :') */ const selectionList = document.getElementById("thesisIdLst"); const selectionListClone = selectionList.cloneNode(true); selectionList.parentNode.appendChild(selectionListClone); selectionList.parentNode.removeChild(selectionList); // ================ADDITION LISTENERS================ // const addToSortableList = node => { const id = "SO__" + node.value; const sortableEntry = document.createElement("li"); sortableEntry.id = id; sortableEntry.textContent = node.textContent; sortableThesesListNode.appendChild(sortableEntry); selectionListClone.removeChild(node); }; selectionListClone.addEventListener("change", event => { const node = event.target.selectedOptions[0]; addToSortableList(node); }); addRemainingButton.addEventListener("click", async event => { event.preventDefault(); const remaining = getRemainingNodes(); if (!remaining || remaining.length === 0) { alert("You have no more remaining theses :-)"); return; } if ( confirm( `This will add the rest of the unselected theses (${ remaining.length } topic) to your selection (sortable list), their order will be as seen, with the first one having the highest priority, are you sure?` ) ) { remaining.forEach(addToSortableList); } });
#!/bin/bash # Find a shared library in the system and copy it to a destination LIB_NAME=$1 INSTALL_DIR=$2 # Find the binary for the library (avoid symlinks) LIB=`find /usr/lib/ -name "$LIB_NAME*" -type f` # Get the SO-name since this is what the applications link to, # this is often not the same as the library binary name since # praxis is to symlink for version compatibility SO_NAME=`objdump -p $LIB | grep SO | awk '{print $2}'` # Copy the binary to the destination as the SO-name cp $LIB $INSTALL_DIR/$SO_NAME
def calcAverage(arr): sum = 0 count = 0 for i in range(len(arr)): if arr[i] > 0: sum += arr[i] count += 1 return (sum/count)
// try { // const cookieJar = await boxrec.login(TUAN_fish, Password12); // // successfully logged in // } catch (e) { // console.log('error ') // } // const gennadyGolovkin = await boxrec.getPersonById(cookieJar, 356831); // console.log(gennadyGolovkin.name); // <NAME> // console.log(gennadyGolovkin.division); // middleweight // console.log(gennadyGolovkin.titlesHeld); // currently held titles // console.log(gennadyGolovkin.otherInfo); // other info that couldn't be categorized // console.log(gennadyGolovkin.suspended); // will tell if boxer is currently suspended // console.log(gennadyGolovkin.bouts); // list of bouts // console.log(gennadyGolovkin.bouts[37].opponent.name); // <NAME>
#!/usr/bin/env bash function make_dir () { if [[ ! -d "$1" ]]; then mkdir $1 fi } SRC_DIR=../.. DATA_DIR=${SRC_DIR}/data MODEL_DIR=${SRC_DIR}/tmp SEED=1013 declare -A LANG_MAP LANG_MAP['en']='English' LANG_MAP['ar']='Arabic' LANG_MAP['zh']='Chinese' if [[ ! -d $DATA_DIR ]]; then echo "${DATA_DIR} does not exist" exit 1 fi if [[ ! -d $MODEL_DIR ]]; then make_dir $MODEL_DIR fi BERT_FEAT_DIR=${DATA_DIR}/bert_features/ace_event #BERT_FEAT_DIR=${DATA_DIR}/xlmroberta_features/ace_event OPTIM=sgd LR=0.1 LR_DECAY=0.9 #OPTIM=adam #LR=0.0001 #LR_DECAY=0.9 USE_BERT=True if [[ $USE_BERT == True ]]; then USE_WORD=False else USE_WORD=True fi function train () { echo "============TRAINING============" eval "LANG=($2)" MODEL_NAME=$3 TARGET_DIR=$4 make_dir $TARGET_DIR export PYTHONPATH=$SRC_DIR export CUDA_VISIBLE_DEVICES=$1 python -W ignore ${SRC_DIR}/main/main.py \ --random_seed $SEED \ --data_workers 5 \ --language ${LANG[*]} \ --use_bert $USE_BERT \ --use_word $USE_WORD \ --data_dir ${DATA_DIR}/ace_event \ --embed_dir ${DATA_DIR}/ace_event \ --embedding_file aligned.embed.300.vec \ --bert_feats $BERT_FEAT_DIR \ --train_filename train \ --valid_filename dev \ --vocab_file vocab.txt \ --max_examples -1 \ --fix_embeddings True \ --batch_size 50 \ --test_batch_size 50 \ --num_epochs 200 \ --pos_dim 30 \ --ner_dim 30 \ --deprel_dim 30 \ --type_dim 30 \ --model_type gtn \ --embed_graph 4 \ --max_tree_dist 1 1 1 1 \ --max_src_len 1000 \ --struct_position False \ --position_dim 0 \ --max_relative_pos 0 \ --use_neg_dist True \ --tran_hid 512 \ --num_head 8 \ --d_k 64 \ --d_v 64 \ --d_ff 2048 \ --gcn_hid 0 \ --tran_layers 1 \ --gcn_layers 2 \ --pool_type max \ --mlp_layers 2 \ --dropout_emb 0.5 \ --dropout_gcn 0.5 \ --trans_drop 0.5 \ --early_stop 20 \ --prune_k 1 \ --optimizer $OPTIM \ --learning_rate $LR \ --lr_decay $LR_DECAY \ --warmup_epochs 0 \ --decay_epoch 5 \ --max_grad_norm 5.0 \ --valid_metric f1 \ --checkpoint True \ --model_dir $TARGET_DIR \ --model_name $MODEL_NAME; } function test () { echo "============TESTING============" eval "LANG=($2)" MODEL_NAME=$3 TARGET_DIR=$4 if [[ ! -d $TARGET_DIR ]]; then echo "${TARGET_DIR} does not exist" exit 1 fi export PYTHONPATH=$SRC_DIR export CUDA_VISIBLE_DEVICES=$1 python -W ignore ${SRC_DIR}/main/main.py \ --random_seed $SEED \ --only_test True \ --data_workers 5 \ --language ${LANG[*]} \ --data_dir ${DATA_DIR}/ace_event \ --bert_feats $BERT_FEAT_DIR \ --valid_filename test \ --test_batch_size 50 \ --model_dir $TARGET_DIR \ --model_name $MODEL_NAME; } function single_source_transfer() { # read the split words into an array based on comma delimiter IFS=',' read -a GPU_IDS <<< "$1" if [[ ${#GPU_IDS[@]} -eq 1 ]]; then echo "Warning: one GPU is not enough to run for Arabic dataset" fi TARGET_DIR=${MODEL_DIR}/$2_single LOG_FILENAME=${TARGET_DIR}/full.log for src_lang in ar en zh; do if [[ $src_lang == "ar" ]]; then gpu_id=$1; else gpu_id=${GPU_IDS[0]}; fi train "$gpu_id" $src_lang ${src_lang}_$2 ${TARGET_DIR} for tgt_lang in ar en zh; do if [[ $tgt_lang == "ar" ]]; then gpu_id=$1; else gpu_id=${GPU_IDS[0]}; fi test "$gpu_id" $tgt_lang ${src_lang}_$2 ${TARGET_DIR} |& tee -a $LOG_FILENAME done done python -W ignore ../preparer.py --model_name $2 \ --dir ${TARGET_DIR} --multi_source False |& tee -a $LOG_FILENAME } function multi_source_transfer() { # read the split words into an array based on comma delimiter IFS=',' read -a GPU_IDS <<< "$1" if [[ ${#GPU_IDS[@]} -eq 1 ]]; then echo "Warning: one GPU is not enough to run for Arabic dataset" fi declare -a src_langs=("en ar" "ar zh" "en zh") TARGET_DIR=${MODEL_DIR}/$2_multi LOG_FILENAME=${TARGET_DIR}/full.log for i in "${!src_langs[@]}"; do if [[ ${src_langs[$i]} == *"ar"* ]]; then gpu_id=$1; else gpu_id=${GPU_IDS[0]}; fi model_name=${src_langs[$i]// /_}_$2 train "$gpu_id" "${src_langs[$i]}" ${model_name} ${TARGET_DIR} for tgt_lang in en ar zh; do if [[ $tgt_lang == *"ar"* ]]; then gpu_id=$1; else gpu_id=${GPU_IDS[0]}; fi test "$gpu_id" $tgt_lang ${model_name} ${TARGET_DIR} |& tee -a $LOG_FILENAME done done python -W ignore ../preparer.py --model_name $2 \ --dir ${TARGET_DIR} --multi_source True |& tee -a $LOG_FILENAME } single_source_transfer $1 $2 multi_source_transfer $1 $2
<reponame>Nelias/smashing-ui import React from 'react' import {storiesOf, addDecorator} from '@storybook/react' import {Select} from '@smashing/select' import {withA11y} from '@storybook/addon-a11y' import {SmashingThemeProvider} from '@smashing/theme' import styled from 'styled-components' const SpecimenContainer = styled.div` margin-bottom: 16px; ` addDecorator(withA11y) const optionsWithLabels = [ {value: 'abc', label: 'ABC Option'}, {value: 'bca', label: 'BCA Option'}, {value: 'cde', label: 'Longer Option'}, {value: 'def', label: 'DEF Option'} ] storiesOf('Core|Select', module) .addDecorator(story => ( <SmashingThemeProvider theme={{}}>{story()}</SmashingThemeProvider> )) .add('Simple usage', () => ( <React.Fragment> <Select options={optionsWithLabels} /> </React.Fragment> )) .add('With value', () => { const INITIAL_STATE = optionsWithLabels[1].value const ComponentWithState = () => { const [selected, setSelected] = React.useState(INITIAL_STATE) return ( <Select options={optionsWithLabels} value={selected} onChange={event => setSelected(event.target.value)} /> ) } return <ComponentWithState /> }) .add('Height', () => ( <React.Fragment> <SpecimenContainer> <Select options={optionsWithLabels} /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} height={34} /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} height={48} /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} height={64} /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} height={96} /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} height={128} /> </SpecimenContainer> </React.Fragment> )) .add('Primary appearance', () => ( <React.Fragment> <SpecimenContainer> <Select options={optionsWithLabels} appearance="primary" intent="none" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="primary" intent="danger" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="primary" intent="info" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="primary" intent="success" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="primary" intent="warning" /> </SpecimenContainer> </React.Fragment> )) .add('Flat appearance', () => ( <React.Fragment> <SpecimenContainer> <Select options={optionsWithLabels} appearance="flat" intent="none" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="flat" intent="danger" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="flat" intent="info" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="flat" intent="success" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="flat" intent="warning" /> </SpecimenContainer> </React.Fragment> )) .add('Minimal appearance', () => ( <React.Fragment> <SpecimenContainer> <Select options={optionsWithLabels} appearance="minimal" intent="none" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="minimal" intent="danger" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="minimal" intent="info" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="minimal" intent="success" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="minimal" intent="warning" /> </SpecimenContainer> </React.Fragment> )) .add('Subtle appearance', () => ( <React.Fragment> <SpecimenContainer> <Select options={optionsWithLabels} appearance="subtle" intent="none" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="subtle" intent="danger" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="subtle" intent="info" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="subtle" intent="success" /> </SpecimenContainer> <SpecimenContainer> <Select options={optionsWithLabels} appearance="subtle" intent="warning" /> </SpecimenContainer> </React.Fragment> )) .add('Full', () => ( <React.Fragment> <Select full options={optionsWithLabels} /> </React.Fragment> )) .add('Disabled', () => ( <React.Fragment> <Select disabled options={optionsWithLabels} /> </React.Fragment> ))
package com.java; import java.util.ArrayList; public class Class { public static void main(String[] args) { //for (int loopx=0;loopx<8;loopx++){ //for (int loop=0;loop<2;loop++){ //c.drawRect( x , y , w+x , h+y , paint); ArrayList arrl2 =new ArrayList(); int a=5 ; int b =6 ; int sens1 = 25; int jk = 255/sens1; int sk = jk; //System.out.println(jk); String s=""; // byte [] data=new byte[sens1]; for(int kl=0;kl<sens1;kl++){ // jk=jk/2; arrl2.add(jk) ; s+=jk+"\n"; jk+= sk ; if(jk>=255-sk){ jk=255; } } System.out.println(s); //))x=+341; //w=+ //h=+ //System.out.println("\n"); //x=0; //y=+ 101; } }
CREATE TABLE customers ( CustomerID char(10) PRIMARY KEY, Name varchar(255) NOT NULL, Address varchar(255) NOT NULL, PhoneNumber varchar(20) NOT NULL ); CREATE TABLE orders ( OrderID int PRIMARY KEY, OrderDate date NOT NULL, CustomerID char(10) NOT NULL, FOREIGN KEY (CustomerID) REFERENCES customers(CustomerID) ); CREATE TABLE products ( ProductID int PRIMARY KEY, ProductName varchar(255) NOT NULL ); CREATE TABLE orderItems ( OrderItemID int PRIMARY KEY, OrderID int NOT NULL, ProductID int NOT NULL, FOREIGN KEY (OrderID) REFERENCES orders(OrderID), FOREIGN KEY (ProductID) REFERENCES products(ProductID) Quantity int NOT NULL );
class NaiveBayesClassifier: """ A simple Naive Bayes classifier implementation """ def __init__(self): self.classes = [] self.word_counts = {} self.priors = {} def fit(self, X, y): """ Train the classifier X: List of documents y: List of labels """ # compute class priors self.classes = list(set(y)) for c in self.classes: self.priors[c] = (y.count(c) / len(y)) # compute count of words for x, c in zip(X, y): words = x.split() for word in words: if (c, word) not in self.word_counts: self.word_counts[(c, word)] = 0 self.word_counts[(c, word)] += 1 def predict(self, X): """ Predict classes for a list of documents X: List of documents """ pred_y = [] for x in X: pred_classes = [] # compute posterior for each class for c in self.classes: posterior = self.priors[c] for word in x.split(): if (c, word) not in self.word_counts: count = 0 else: count = self.word_counts[(c, word)] posterior *= (count + 1) / (sum([self.word_counts[(c, w)] for w in self.word_counts if w[0] == c]) + len(self.word_counts)) pred_classes.append((c, posterior)) # select the most probable class p = max(pred_classes, key=lambda x: x[1]) pred_y.append(p[0]) return pred_y
/* * Copyright (c) 2012 Nordic Semiconductor ASA * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list * of conditions and the following disclaimer. * * 2. Redistributions in binary form, except as embedded into a Nordic Semiconductor ASA * integrated circuit in a product or a software update for such product, must reproduce * the above copyright notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the distribution. * * 3. Neither the name of Nordic Semiconductor ASA nor the names of its contributors may be * used to endorse or promote products derived from this software without specific prior * written permission. * * 4. This software, with or without modification, must only be used with a * Nordic Semiconductor ASA integrated circuit. * * 5. Any software provided in binary or object form under this license must not be reverse * engineered, decompiled, modified and/or disassembled. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ #ifndef PCA10000_H #define PCA10000_H #ifdef __cplusplus extern "C" { #endif #include "nrf_gpio.h" // Definitions for PCA10000 v2.0.0 or higher #if 1 #define LEDS_NUMBER 3 // there is RGB LED on this board #define LED_RGB_RED 21 #define LED_RGB_GREEN 22 #define LED_RGB_BLUE 23 #define LED_START LED_RGB_RED #define BSP_LED_0 LED_RGB_RED #define BSP_LED_1 LED_RGB_GREEN #define BSP_LED_2 LED_RGB_BLUE #define LED_STOP LED_RGB_BLUE #define LEDS_ACTIVE_STATE 0 #define BUTTONS_LIST {} #define LEDS_LIST { LED_RGB_RED, LED_RGB_GREEN, LED_RGB_BLUE } #define LEDS_INV_MASK LEDS_MASK // there are no buttons on this board #define BUTTONS_NUMBER 0 // UART pins connected to J-Link #define RX_PIN_NUMBER 11 #define TX_PIN_NUMBER 9 #define CTS_PIN_NUMBER 10 #define RTS_PIN_NUMBER 8 #define HWFC true // Definitions for PCA10000 v1.0 #else #define RX_PIN_NUMBER 3 #define TX_PIN_NUMBER 1 #define CTS_PIN_NUMBER 2 #define RTS_PIN_NUMBER 0 #define HWFC true #endif // Low frequency clock source to be used by the SoftDevice #define NRF_CLOCK_LFCLKSRC {.source = NRF_CLOCK_LF_SRC_XTAL, \ .rc_ctiv = 0, \ .rc_temp_ctiv = 0, \ .xtal_accuracy = NRF_CLOCK_LF_XTAL_ACCURACY_20_PPM} #ifdef __cplusplus } #endif #endif
package page_home import "net/http" func HandleHome(w http.ResponseWriter, r *http.Request) { w.Write([]byte("hello")) }
<filename>spec/horizontal_table_spec.rb # encoding: utf-8 require 'spec_helper' describe HorizontalTable do before(:all) do Person = Struct.new( :name, :address) @output = [ Person.new('<NAME>', 'Sesamestreet 1'), Person.new('<NAME>', 'Musterstroat 1'), ] end it "outputs a table for an array of objects" do HorizontalTable.run(@output, attributes: [:name, :address]).should == <<-EOF ┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┓ ┃ <NAME> ┃ Sesamestreet 1 ┃ ┣━━━━━━━━━━━━━━━━━━━━━╊━━━━━━━━━━━━━━━━┫ ┃ <NAME> ┃ Musterstroat 1 ┃ ┗━━━━━━━━━━━━━━━━━━━━━┻━━━━━━━━━━━━━━━━┛ EOF end it "fails when an invalid attribute is given" do lambda{ HorizontalTable.run(@output, attributes: [:typo_name, :address]) }.should raise_error UnknownDataAttribute end it "could have a header row" do HorizontalTable.run(@output, attributes: [:name, :address], header: ["Name", "Address"]).should == <<-EOF ┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┓ ┃ \e[1mName \e[0m ┃ \e[1mAddress \e[0m ┃ ┣━━━━━━━━━━━━━━━━━━━━━╊━━━━━━━━━━━━━━━━┫ ┃ <NAME> ┃ Sesamestreet 1 ┃ ┣━━━━━━━━━━━━━━━━━━━━━╊━━━━━━━━━━━━━━━━┫ ┃ <NAME> ┃ Musterstroat 1 ┃ ┗━━━━━━━━━━━━━━━━━━━━━┻━━━━━━━━━━━━━━━━┛ EOF end it "fails if the count of header column does not match the count of the attribute columns" do lambda{HorizontalTable.run(@output, attributes: [:name, :address], header: ["Name" ])}.should raise_error InvalidTableDefinition end # describe '#calc_column_width' do # # it 'calculates the column width based on a percentage' do # ENV['COLUMNS'] = '100' # calc_column_width('50%').should == 50 # # ENV['COLUMNS'] = '150' # calc_column_width('50%').should == 75 # end # # it 'calculates the column width based on absolute value' do # ENV['COLUMNS'] = '150' # calc_column_width('50').should == 50 # end # # end # # describe '#calc_relative_column_width' do # # it 'calculates the column width based on a percentage' do # ENV['COLUMNS'] = '100' # calc_relative_column_width(50).should == 50 # # ENV['COLUMNS'] = '150' # calc_relative_column_width(50).should == 75 # end # # end # # describe '#delete_newline' do # it 'deletes newline in string' do # "text\ntext".delete_newline.should == 'texttext' # end # end # # describe "prepares the column widths" do # it "overwrite global options with special ones" do # table_options = { width: 90 } # # column_options = { width: 45 } # column_widths = %W[ 40 30 20 ] # # prepare_column_widths( column_widths, table_options, column_options ).should == [ 40, 30, 20 ] # end # # it "fills empty options with global ones" do # table_options = { width: 115 } # # column_options = { width: 45 } # column_widths = %W[ 40 30 ] # # prepare_column_widths( column_widths, table_options, column_options ).should == [ 40, 30, 45] # end # # it "fails when table width is smaller than all column widths together" do # table_options = { width: 100 } # # column_options = { width: 45 } # column_widths = %W[ 40 30 ] # # lambda{prepare_column_widths( column_widths, table_options, column_options )}.should raise_error InvalidTableDefinition # # end # # it "converts string percent values in absolute ones" do # column_widths = %W[ 40% 30% 30%] # ENV['COLUMNS'] = '100' # prepare_column_widths( column_widths, table_options, column_options ).should == [ 40, 30, 30] # end # # end end
#!/bin/sh # # Vivado(TM) # runme.sh: a Vivado-generated Runs Script for UNIX # Copyright 1986-2017 Xilinx, Inc. All Rights Reserved. # if [ -z "$PATH" ]; then PATH=/opt/Xilinx/SDK/2017.4/bin:/opt/Xilinx/Vivado/2017.4/ids_lite/ISE/bin/lin64:/opt/Xilinx/Vivado/2017.4/bin else PATH=/opt/Xilinx/SDK/2017.4/bin:/opt/Xilinx/Vivado/2017.4/ids_lite/ISE/bin/lin64:/opt/Xilinx/Vivado/2017.4/bin:$PATH fi export PATH if [ -z "$LD_LIBRARY_PATH" ]; then LD_LIBRARY_PATH=/opt/Xilinx/Vivado/2017.4/ids_lite/ISE/lib/lin64 else LD_LIBRARY_PATH=/opt/Xilinx/Vivado/2017.4/ids_lite/ISE/lib/lin64:$LD_LIBRARY_PATH fi export LD_LIBRARY_PATH HD_PWD='/ectf/pl/proj/ip_repo/edit_birdwtch_iface_v1_0.runs/synth_1' cd "$HD_PWD" HD_LOG=runme.log /bin/touch $HD_LOG ISEStep="./ISEWrap.sh" EAStep() { $ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1 if [ $? -ne 0 ] then exit fi } EAStep vivado -log birdwtch_iface_v1_0.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source birdwtch_iface_v1_0.tcl
#!/bin/bash # This function takes no arguments # It tries to determine the name of this file in a programatic way. function _get_sourced_filename() { if [ -n "${BASH_SOURCE[0]}" ]; then basename "${BASH_SOURCE[0]}" elif [ -n "${(%):-%x}" ]; then # in zsh use prompt-style expansion to introspect the same information # see http://stackoverflow.com/questions/9901210/bash-source0-equivalent-in-zsh basename "${(%):-%x}" else echo "UNKNOWN FILE" fi } # The arguments to this are: # 1. activation nature {activate|deactivate} # 2. toolchain nature {build|host|ccc} # 3. machine (should match -dumpmachine) # 4. prefix (including any final -) # 5+ program (or environment var comma value) # The format for 5+ is name{,,value}. If value is specified # then name taken to be an environment variable, otherwise # it is taken to be a program. In this case, which is used # to find the full filename during activation. The original # value is stored in environment variable CONDA_BACKUP_NAME # For deactivation, the distinction is irrelevant as in all # cases NAME simply gets reset to CONDA_BACKUP_NAME. It is # a fatal error if a program is identified but not present. function _tc_activation() { local act_nature=$1; shift local tc_prefix=$1; shift local thing local newval local from local to local pass if [ "${act_nature}" = "activate" ]; then from="" to="CONDA_BACKUP_" else from="CONDA_BACKUP_" to="" fi for pass in check apply; do for thing in "$@"; do case "${thing}" in *,*) newval=$(echo "${thing}" | sed "s,^[^\,]*\,\(.*\),\1,") thing=$(echo "${thing}" | sed "s,^\([^\,]*\)\,.*,\1,") ;; *) newval="${CONDA_PREFIX}/bin/${tc_prefix}${thing}" if [ ! -x "${newval}" -a "${pass}" = "check" ]; then echo "ERROR: This cross-compiler package contains no program ${newval}" return 1 fi ;; esac if [ "${pass}" = "apply" ]; then thing=$(echo ${thing} | tr 'a-z+-' 'A-ZX_') eval oldval="\$${from}$thing" if [ -n "${oldval}" ]; then eval export "${to}'${thing}'=\"${oldval}\"" else eval unset '${to}${thing}' fi if [ -n "${newval}" ]; then eval export "'${from}${thing}=${newval}'" else eval unset '${from}${thing}' fi fi done done return 0 } # When people are using conda-build, assume that adding rpath during build, and pointing at # the host env's includes and libs is helpful default behavior if [ "${CONDA_BUILD:-0}" = "1" ]; then CXXFLAGS_USED="@CXXFLAGS@ -isystem ${PREFIX}/include -fdebug-prefix-map=${SRC_DIR}=/usr/local/src/conda/${PKG_NAME}-${PKG_VERSION} -fdebug-prefix-map=${PREFIX}=/usr/local/src/conda-prefix" DEBUG_CXXFLAGS_USED="@DEBUG_CXXFLAGS@ -isystem ${PREFIX}/include -fdebug-prefix-map=${SRC_DIR}=/usr/local/src/conda/${PKG_NAME}-${PKG_VERSION} -fdebug-prefix-map=${PREFIX}=/usr/local/src/conda-prefix" else CXXFLAGS_USED="@CXXFLAGS@ -isystem ${CONDA_PREFIX}/include" DEBUG_CXXFLAGS_USED="@DEBUG_CXXFLAGS@ -isystem ${CONDA_PREFIX}/include" fi if [ "${CONDA_BUILD:-0}" = "1" ]; then if [ -f /tmp/old-env-$$.txt ]; then rm -f /tmp/old-env-$$.txt || true fi env > /tmp/old-env-$$.txt fi _tc_activation \ activate @CHOST@- \ c++ g++ \ "CXXFLAGS,${CXXFLAGS:-${CXXFLAGS_USED}}" \ "DEBUG_CXXFLAGS,${DEBUG_CXXFLAGS:-${DEBUG_CXXFLAGS_USED}}" \ "CXX_FOR_BUILD,${CONDA_PREFIX}/bin/@CBUILD@-c++" if [ $? -ne 0 ]; then echo "ERROR: $(_get_sourced_filename) failed, see above for details" #exit 1 else if [ "${CONDA_BUILD:-0}" = "1" ]; then if [ -f /tmp/new-env-$$.txt ]; then rm -f /tmp/new-env-$$.txt || true fi env > /tmp/new-env-$$.txt echo "INFO: $(_get_sourced_filename) made the following environmental changes:" diff -U 0 -rN /tmp/old-env-$$.txt /tmp/new-env-$$.txt | tail -n +4 | grep "^-.*\|^+.*" | grep -v "CONDA_BACKUP_" | sort rm -f /tmp/old-env-$$.txt /tmp/new-env-$$.txt || true fi fi
#!/bin/sh -ex celery -A core worker -l info & celery -A core beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler & # celery -A core beat -l info & tail -f /dev/null
import { Component, OnInit } from '@angular/core'; import { PeopleService } from '../shared/people-service'; @Component({ selector: 'sfeir-home', templateUrl: 'home.component.html', styleUrls: ['home.component.css'] }) export class HomeComponent implements OnInit { person: any = {}; constructor(private readonly peopleService: PeopleService) {} /** * OnInit implementation */ ngOnInit(): void { this.peopleService.fetch().subscribe(people => { const [firstPerson] = people; this.person = firstPerson; }); } /** * Returns random people */ random(): void { this.peopleService.fetchRandom().subscribe(person => { this.person = person; }); } }
#!/bin/bash # Env vars required: # - METASHARE_DIR # - TEST_DIR # - PYTHON get_node_count() { local NODE_COUNT=`$PYTHON "$MSERV_DIR/get_node_count.py"` echo $NODE_COUNT } import_file_on_node() { local NODE_NUM="$1" ; shift local IMP_FILE="$1" ; shift local ID_FILE="$1" ; shift local NODE_NAME=`get_node_info $NODE_NUM NODE_NAME` local IMPORTS_LOG=$TEST_DIR/imp.log export NODE_DIR=$TEST_DIR/$NODE_NAME local ret_val echo "Import file $IMP_FILE on node $NODE_NAME" #cd "$METASHARE_DIR" if [[ "$ID_FILE" == "" ]] ; then $PYTHON metashare/import_xml.py "$IMP_FILE" > "$IMPORTS_LOG" ret_val=$? else $PYTHON metashare/import_xml.py --id-file="$ID_FILE" "$IMP_FILE" > "$IMPORTS_LOG" ret_val=$? fi rm -f "$IMPORTS_LOG" #cd "$CURRENT_DIR" return $ret_val } import_fileset_on_node() { local NODE_NUM="$1" ; shift local FSET_NAME="$1" ; shift get_fileset $FSET_NAME $NODE_NUM | while read LINE do import_file_on_node $NODE_NUM "$LINE" done } # The import_files accept a parameter 'type' that can have the following values: # inner : import files only on META-SHARE Managing Nodes (aka. inner nodes) # outer : import files only on normal META-SHARE Nodes (aka. outer nodes) # all (default): import files on all nodes import_files() { local FSET_NAME="$1" ; shift local TARGET_TYPE="${1:-all}" local NODE_COUNT=`get_node_count` echo "Importing files on $TARGET_TYPE nodes" for (( j=0; j<$NODE_COUNT; j++ )) do local DO_IMPORT=0 if [[ "$TARGET_TYPE" == "all" ]] ; then DO_IMPORT=1 else local NODE_TYPE=`get_node_info $j NODE_TYPE` if [[ "$NODE_TYPE" == "$TARGET_TYPE" ]] ; then DO_IMPORT=1 fi fi if [[ $DO_IMPORT -eq 1 ]] ; then import_fileset_on_node $j "${FSET_NAME}" fi done } synchronize_node() { local NODE_NUM="$1" local NODE_NAME=`get_node_info $NODE_NUM NODE_NAME` local REMOTE_DATA_FILE=$TEST_DIR/rem_$NODE_NAME export NODE_DIR=$TEST_DIR/$NODE_NAME echo "Synchronizing $NODE_NAME" #cd "$METASHARE_DIR" $PYTHON manage.py synchronize > "$REMOTE_DATA_FILE" local ret_val=$? if [[ $ret_val -ne 0 ]] ; then echo -n "Error in synchronizing $NODE_NAME" >&3 return $ret_val fi rm -f "$REMOTE_DATA_FILE" local ret_val=$? #cd "$CURRENT_DIR" return $ret_val } synchronize_node_idf() { local NODE_NUM="$1" ; shift local ID_FILE="$1" ; shift local NODE_NAME=`get_node_info $NODE_NUM NODE_NAME` local REMOTE_DATA_FILE=$TEST_DIR/$NODE_NAME/rem.log export NODE_DIR=$TEST_DIR/$NODE_NAME echo "Synchronizing $NODE_NAME" #cd "$METASHARE_DIR" $PYTHON manage.py synchronize --id-file=$ID_FILE > $REMOTE_DATA_FILE local ret_val=$? #cd "$CURRENT_DIR" return $ret_val } synchronize_nodes() { local NODE_COUNT=`get_node_count` local ret_val=0 local last_ret_val for (( j=0; j<$NODE_COUNT; j++ )) do synchronize_node $j last_ret_val=$? if [[ $ret_val -eq 0 ]] ; then ret_val=$last_ret_val fi done return $ret_val } get_node_resource_list() { local NODE_NUM="$1" ; shift local EXT="$1" ; shift local NODE_NAME=`get_node_info $NODE_NUM NODE_NAME` #echo "NODE_NUM = $NODE_NUM , NODE_NAME = $NODE_NAME" export NODE_DIR=$TEST_DIR/$NODE_NAME #echo "Get resource list for node $NODE_NAME" local EXTRA_INFO="" if [ "$EXT" == "ext" ] ; then EXTRA_INFO="--extended" fi #cd "$METASHARE_DIR" $PYTHON manage.py get_resource_list "$EXTRA_INFO" | sort #cd "$CURRENT_DIR" } get_digest() { local FILENAME="$1" local DIG=`get_key_value "$FILENAME" digest_checksum` echo $DIG } get_publ_status() { local FILENAME="$1" local PUB_ST=`get_key_value "$FILENAME" publication_status` echo $PUB_ST } get_key_value() { local FILENAME="$1" local KEY="$2" local VAL=`cat "$FILENAME" | sed -e "s/\(.*\)\(\"$KEY\":\)\(\"\)\([a-z0-9]*\)\(\"\)\(.*\)/\4/"` echo $VAL } update_digests_on_node() { local NODE_NUM="$1" local NODE_NAME=`get_node_info $NODE_NUM NODE_NAME` export NODE_DIR=$TEST_DIR/$NODE_NAME echo "Updating digests on node $NODE_NAME" #cd "$METASHARE_DIR" $PYTHON manage.py update_digests local ret_val=$? #cd "$CURRENT_DIR" return $ret_val } update_digests() { local NODE_COUNT=`get_node_count` for (( j=0; j<$NODE_COUNT; j++ )) do update_digests_on_node $j done } # The following function performs the check by using # the folders in the storageFolder. # It seems to not work appropriately: maybe if a resource # is imported more than once it uses a new storage directory # and the old one is not deleted. # Needs more checking. check_resources() { local NODES="$1" ; shift echo "Checking that all nodes contain the same set of published resources" echo " and the corresponding resources have the same digest" for NODE_NUM in $NODES do local NODE_NAME=`get_node_info $NODE_NUM NODE_NAME` echo "Get list of resources from node $NODE_NAME" local DIR=$TEST_DIR/$NODE_NAME/storageFolder ls "$DIR" | while read LINE do echo "--> " $LINE local G_JSON=$DIR/$LINE/storage-global.json local L_JSON=$DIR/$LINE/storage-local.json local DIG=`get_digest "$L_JSON"` local PUB_ST=`get_publ_status "$G_JSON"` echo " pub status = $PUB_ST" echo " digest = $DIG" if [[ "$PUB_ST" == "p" ]] ; then echo "$LINE:$DIG" >> $TEST_DIR/stat-$NODE_NAME.res fi done done } # This function gets the list of resource for a given node # by querying the database and listing all the # published resources. check_resources_2() { local NODES="$1" ; shift echo "Checking that all nodes contain the same set of published resources" echo " and the corresponding resources have the same digest" local CHECK_OK=1 local PREVIOUS_RES="" local PREVIOUS_NODE_NAME="" local RES_DETAILS=$TEST_DIR/res-details.log touch "$RES_DETAILS" for NODE_NUM in $NODES do local NODE_NAME=`get_node_info $NODE_NUM NODE_NAME` local RES_FILE=$TEST_DIR/stat-$NODE_NAME.res get_node_resource_list $NODE_NUM "ext" > "$RES_FILE" echo "Resources on node $NODE_NAME: (id:digest:source_url)" >> "$RES_DETAILS" cat "$RES_FILE" >> "$RES_DETAILS" get_node_resource_list $NODE_NUM > "$RES_FILE" if [[ "$PREVIOUS_RES" != "" ]] ; then echo "Comparing $RES_FILE and $PREVIOUS_RES." C=`diff "$RES_FILE" "$PREVIOUS_RES" | wc -l` if [[ "$C" != "0" ]] ; then echo "FAILED" CHECK_OK=0 echo "Resource list for node $NODE_NAME:" cat "$RES_FILE" echo "Resource list for node $PREVIOUS_NODE_NAME:" cat "$PREVIOUS_RES" fi fi rm -f "$PREVIOUS_RES" PREVIOUS_RES=$RES_FILE PREVIOUS_NODE_NAME=$NODE_NAME done rm -f $PREVIOUS_RES if [[ "$CHECK_OK" == "1" ]] ; then echo "Synchronization successful" rm -f "$RES_DETAILS" else echo "Synchronization failed" echo -n "Synchronization failed" >&3 echo "Dumping details" cat "$RES_DETAILS" rm -f "$RES_DETAILS" return 1 fi } check_resources_on_inner_nodes() { local NODE_COUNT=`get_node_count` local NODES="" for (( j=0; j<$NODE_COUNT; j++ )) do local NODE_TYPE=`get_node_info $j NODE_TYPE` if [[ "$NODE_TYPE" == "inner" ]] ; then NODES="$NODES $j" fi done echo "Checking resources on nodes $NODES" check_resources_2 "$NODES" local ret_val=$? return $ret_val }
<gh_stars>0 package br.univille.felipedacs2021.controller; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.servlet.ModelAndView; import br.univille.felipedacs2021.model.Fornecedor; import br.univille.felipedacs2021.service.FornecedorService; @Controller @RequestMapping("/fornecedor") public class FornecedorController { @Autowired private FornecedorService fornecedorService; @GetMapping public ModelAndView index(){ List<Fornecedor> listaFornecedores = fornecedorService.getAllFornecedores(); return new ModelAndView("fornecedor/index","listaFornecedores",listaFornecedores); } @GetMapping("/novo") public ModelAndView novo(@ModelAttribute Fornecedor fornecedor){ return new ModelAndView("fornecedor/form"); } @PostMapping(params = "form") public ModelAndView save(Fornecedor fornecedor){ fornecedorService.save(fornecedor); return new ModelAndView("redirect:/fornecedor"); } @GetMapping(value = "/alterar/{id}") public ModelAndView alterar(@PathVariable("id") Fornecedor fornecedor){ return new ModelAndView("fornecedor/form", "fornecedor", fornecedor); } @GetMapping(value = "/delete/{id}") public ModelAndView delete(@PathVariable("id") Fornecedor fornecedor){ fornecedorService.delete(fornecedor); return new ModelAndView("redirect:/fornecedor"); } }
# Environment variables for system tests. export GCLOUD_PROJECT=your-project-id export GCP_PROJECT=$GCLOUD_PROJECT export GOOGLE_CLOUD_PROJECT=$GCLOUD_PROJECT export FIRESTORE_PROJECT= export CLOUD_STORAGE_BUCKET=$GCLOUD_PROJECT export REQUESTER_PAYS_TEST_BUCKET="${CLOUD_STORAGE_BUCKET}-requester-pays-test" export API_KEY= export BIGTABLE_CLUSTER=bigtable-test export BIGTABLE_ZONE=us-central1-c export BIGTABLE_INSTANCE= export SPANNER_INSTANCE= export COMPOSER_LOCATION=us-central1 export COMPOSER_ENVIRONMENT= export CLOUD_KMS_KEY= export MYSQL_INSTANCE= export MYSQL_USER= export MYSQL_PASSWORD= export MYSQL_DATABASE= export POSTGRES_INSTANCE= export POSTGRES_USER= export POSTGRES_PASSWORD= export POSTGRES_DATABASE= # Environment variables for App Engine Flexible system tests. export GA_TRACKING_ID= export SQLALCHEMY_DATABASE_URI=sqlite:// export PUBSUB_TOPIC=gae-mvm-pubsub-topic export PUBSUB_VERIFICATION_TOKEN=1234abc # Mailgun, Sendgrid, and Twilio config. # These aren't current used because tests do not exist for these. export MAILGUN_DOMAIN_NAME= export MAILGUN_API_KEY= export SENDGRID_API_KEY= export SENDGRID_SENDER= export TWILIO_ACCOUNT_SID= export TWILIO_AUTH_TOKEN= export TWILIO_NUMBER=
<filename>jackson/src/test/scala/com/twitter/finatra/json/tests/internal/caseclass/validation/validators/PastTimeValidatorTest.scala<gh_stars>0 package com.twitter.finatra.json.tests.internal.caseclass.validation.validators import com.twitter.finatra.json.internal.caseclass.validation.validators.PastTimeValidator._ import com.twitter.finatra.validation.ValidationResult._ import com.twitter.finatra.validation.{ErrorCode, PastTime, ValidationResult, ValidatorTest} import org.joda.time.DateTime class PastTimeValidatorTest extends ValidatorTest { "past validator" should { "pass validation for valid datetime" in { val minDateTime = new DateTime(0) validate[PastExample](minDateTime) should equal(Valid) } "fail validation for invalid datetime" in { val futureDateTime = DateTime.now().plusDays(1) validate[PastExample](futureDateTime) should equal( Invalid( errorMessage(messageResolver, futureDateTime), ErrorCode.TimeNotPast(futureDateTime))) } } private def validate[C: Manifest](value: Any): ValidationResult = { super.validate(manifest[C].runtimeClass, "dateTime", classOf[PastTime], value) } } case class PastExample( @PastTime dateTime: DateTime)
<gh_stars>0 import { Document } from 'mongoose'; export declare type UserDocument = User & Document; export declare class User { first_name: string; last_name: string; email: string; country_code: string; phone_number: string; verify_otp: string; gender: string; dob: string; profession: string; profile: string; driving_licence: string; cnic: string; license: string; registration: string; insurance: string; account_status: string; fcm: string; role: string; wallet_amount_user: number; wallet_amount_driver: number; created_at: string; updated_at: string; } export declare const UserSchema: import("mongoose").Schema<Document<User, any, any>, import("mongoose").Model<Document<User, any, any>, any, any, any>, {}>;
@app.task(name="sdc.move12", bind=True) def task_2(self, y): time.sleep(2) return MoveApps(":move", y).bar()
from source.models.service import DockerService import subprocess import socket import json HOST = 'localhost' PORT = 65432 def check_docker_status(label: str) -> bytes: try: subprocess.check_output('systemctl status docker', shell=True) return format_response(label, True) except: return format_response(label, False) def list_services(label: str) -> bytes: try: ps = subprocess.check_output('docker ps --format "@{{.ID}}\t@{{.Names}}\t@{{.Status}}\t@{{.Ports}}\t@{{.CreatedAt}}"', shell=True) services_data = ps.decode().strip().split('\n') services = [DockerService(data).serialize() for data in services_data] return format_response(label, services) except Exception as e: return format_response(label, []) def list_images(label: str) -> bytes: try: ps = subprocess.check_output("docker image ls | awk '!/none/' | awk '{print($1, $2)}'", shell=True) images = ps.decode().strip().split('\n')[1:] return format_response(label, images) except Exception as e: return format_response(label, []) def format_response(label: str, data) -> bytes: response = { "label": label, "data": data } return json.dumps(response).encode() if __name__ == '__main__': with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((HOST, PORT)) sock.listen() conn, addr = sock.accept() with conn: while True: data = conn.recv(1024) if not data: break cmd = data.decode('ascii').strip().upper() if cmd == 'STATUS': conn.sendall(check_docker_status(cmd)) elif cmd == 'LIST': conn.sendall(list_services(cmd)) elif cmd == 'IMAGES': conn.sendall(list_images(cmd))
def detect_biggest_square(matrix): '''This function takes a matrix and returns the coordinates for the biggest square in the matrix. If there are multiple squares with the same size, it will return the coordinates of any of them.''' max_size = 0 coords = None for i in range(len(matrix)): for j in range(len(matrix[0])): size = 0 if matrix[i][j] == 1: while i + size < len(matrix) and j + size < len(matrix[0]): for x in range(i, i+size+1): for y in range(j, j+size+1): if matrix[x][y] == 0: break else: size += 1 coords = (i, j, i+size, j+size) if size > max_size: max_size = size coords = (i, j, i+size, j+size) return coords
<reponame>istxing/kgo package kgo import ( "bytes" "crypto" "crypto/aes" "crypto/cipher" "crypto/hmac" "crypto/rand" "crypto/rsa" "crypto/sha1" "crypto/sha256" "crypto/sha512" "crypto/x509" "encoding/base64" "encoding/hex" "encoding/pem" "errors" "fmt" "golang.org/x/crypto/bcrypt" "hash" "io" "math/big" "strconv" "strings" "time" ) // Base64Encode 使用 MIME base64 对数据进行编码. func (ke *LkkEncrypt) Base64Encode(str []byte) string { return base64.StdEncoding.EncodeToString(str) } // Base64Decode 对使用 MIME base64 编码的数据进行解码. func (ke *LkkEncrypt) Base64Decode(str string) ([]byte, error) { switch len(str) % 4 { case 2: str += "==" case 3: str += "=" } data, err := base64.StdEncoding.DecodeString(str) if err != nil { return nil, err } return data, nil } // Base64UrlSafeEncode url安全的Base64Encode,没有'/'和'+'及结尾的'=' . func (ke *LkkEncrypt) Base64UrlEncode(source []byte) string { // Base64 Url Safe is the same as Base64 but does not contain '/' and '+' (replaced by '_' and '-') and trailing '=' are removed. bytearr := base64.StdEncoding.EncodeToString(source) safeurl := strings.Replace(string(bytearr), "/", "_", -1) safeurl = strings.Replace(safeurl, "+", "-", -1) safeurl = strings.Replace(safeurl, "=", "", -1) return safeurl } // Base64UrlDecode url安全的Base64Decode. func (ke *LkkEncrypt) Base64UrlDecode(data string) ([]byte, error) { var missing = (4 - len(data)%4) % 4 data += strings.Repeat("=", missing) return base64.URLEncoding.DecodeString(data) } // AuthCode 授权码编码或解码;encode为true时编码,为false解码;expiry为有效期,秒;返回结果为加密/解密的字符串和有效期时间戳. func (ke *LkkEncrypt) AuthCode(str, key string, encode bool, expiry int64) (string, int64) { // DYNAMIC_KEY_LEN 动态密钥长度,相同的明文会生成不同密文就是依靠动态密钥 // 加入随机密钥,可以令密文无任何规律,即便是原文和密钥完全相同,加密结果也会每次不同,增大破解难度。 // 取值越大,密文变动规律越大,密文变化 = 16 的 DYNAMIC_KEY_LEN 次方 // 当此值为 0 时,则不产生随机密钥 if str == "" { return "", 0 } else if !encode && len(str) < DYNAMIC_KEY_LEN { return "", 0 } // 密钥 keyByte := md5Str([]byte(key), 32) // 密钥a会参与加解密 keya := md5Str(keyByte[:16], 32) // 密钥b会用来做数据完整性验证 keyb := md5Str(keyByte[16:], 32) // 密钥c用于变化生成的密文 var keyc []byte if encode == false { keyc = []byte(str[:DYNAMIC_KEY_LEN]) } else { cLen := 32 - DYNAMIC_KEY_LEN now, _ := time.Now().MarshalBinary() timeBytes := md5Str(now, 32) keyc = timeBytes[cLen:] } // 参与运算的密钥 keyd := md5Str(append(keya, keyc...), 32) cryptkey := append(keya, keyd...) keyLength := len(cryptkey) // 明文,前10位用来保存时间戳,解密时验证数据有效性,10到26位用来保存keyb(密钥b),解密时会通过这个密钥验证数据完整性 // 如果是解码的话,会从第 DYNAMIC_KEY_LEN 位开始,因为密文前 DYNAMIC_KEY_LEN 位保存 动态密钥,以保证解密正确 if encode == false { strByte, err := ke.Base64UrlDecode(str[DYNAMIC_KEY_LEN:]) if err != nil { return "", 0 } str = string(strByte) } else { if expiry != 0 { expiry = expiry + time.Now().Unix() } expMd5 := md5Str(append([]byte(str), keyb...), 16) str = fmt.Sprintf("%010d%s%s", expiry, expMd5, str) } stringLength := len(str) resdata := make([]byte, 0, stringLength) var rndkey, box [256]int // 产生密钥簿 j := 0 a := 0 i := 0 for i = 0; i < 256; i++ { rndkey[i] = int(cryptkey[i%keyLength]) box[i] = i } // 用固定的算法,打乱密钥簿,增加随机性,好像很复杂,实际上并不会增加密文的强度 for i = 0; i < 256; i++ { j = (j + box[i] + rndkey[i]) % 256 box[i], box[j] = box[j], box[i] } // 核心加解密部分 a = 0 j = 0 for i = 0; i < stringLength; i++ { a = ((a + 1) % 256) j = ((j + box[a]) % 256) box[a], box[j] = box[j], box[a] // 从密钥簿得出密钥进行异或,再转成字符 resdata = append(resdata, byte(int(str[i])^box[(box[a]+box[j])%256])) } result := string(resdata) if encode == false { //解密 // substr($result, 0, 10) == 0 验证数据有效性 // substr($result, 0, 10) - time() > 0 验证数据有效性 // substr($result, 10, 16) == substr(md5(substr($result, 26).$keyb), 0, 16) 验证数据完整性 // 验证数据有效性,请看未加密明文的格式 if len(result) <= 26 { return "", 0 } expTime, _ := strconv.ParseInt(result[:10], 10, 0) if (expTime == 0 || expTime-time.Now().Unix() > 0) && result[10:26] == string(md5Str(append(resdata[26:], keyb...), 16)) { return result[26:], expTime } else { return "", expTime } } else { //加密 // 把动态密钥保存在密文里,这也是为什么同样的明文,生产不同密文后能解密的原因 result = string(keyc) + ke.Base64UrlEncode(resdata) return result, expiry } } // PasswordHash 创建密码的散列值;costs为算法的cost,范围4~31,默认10;注意:值越大越耗时. func (ke *LkkEncrypt) PasswordHash(password []byte, costs ...int) ([]byte, error) { var cost int if len(costs) == 0 { cost = 10 } else { cost = costs[0] if cost < 4 { cost = 4 } else if cost > 31 { cost = 15 } } bytes, err := bcrypt.GenerateFromPassword(password, cost) return bytes, err } // PasswordVerify 验证密码是否和散列值匹配. func (ke *LkkEncrypt) PasswordVerify(password, hash []byte) bool { err := bcrypt.CompareHashAndPassword(hash, password) return err == nil } // EasyEncrypt 简单加密. // data为要加密的原字符串,key为密钥. func (ke *LkkEncrypt) EasyEncrypt(data, key string) string { dataLen := len(data) if dataLen == 0 { return "" } keyByte := md5Str([]byte(key), 32) keyLen := len(keyByte) var i, x, c int var str []byte for i = 0; i < dataLen; i++ { if x == keyLen { x = 0 } c = (int(data[i]) + int(keyByte[x])) % 256 str = append(str, byte(c)) x++ } res := string(keyByte[:DYNAMIC_KEY_LEN]) + ke.Base64UrlEncode(str) return res } // EasyDecrypt 简单解密. // val为待解密的字符串,key为密钥. func (ke *LkkEncrypt) EasyDecrypt(val, key string) string { if len(val) <= DYNAMIC_KEY_LEN { return "" } data, err := ke.Base64UrlDecode(val[DYNAMIC_KEY_LEN:]) if err != nil { return "" } keyByte := md5Str([]byte(key), 32) if val[:DYNAMIC_KEY_LEN] != string(keyByte[:DYNAMIC_KEY_LEN]) { return "" } dataLen := len(data) keyLen := len(keyByte) var i, x, c int var str []byte for i = 0; i < dataLen; i++ { if x == keyLen { x = 0 } if data[i] < keyByte[x] { c = int(data[i]) + 256 - int(keyByte[x]) } else { c = int(data[i]) - int(keyByte[x]) } str = append(str, byte(c)) x++ } return string(str) } // HmacShaX HmacSHA-x加密,x为1/256/512 . func (ke *LkkEncrypt) HmacShaX(data, secret []byte, x uint16) string { // Create a new HMAC by defining the hash type and the key (as byte array) var h hash.Hash switch x { case 1: h = hmac.New(sha1.New, secret) break case 256: h = hmac.New(sha256.New, secret) break case 512: h = hmac.New(sha512.New, secret) break default: panic("[HmacShaX] x must be in [1, 256, 512]") } // Write Data to it h.Write(data) // Get result and encode as hexadecimal string sha := hex.EncodeToString(h.Sum(nil)) return sha } // aesEncrypt AES加密. // clearText为明文;key为密钥,长度16/24/32; // mode为模式,枚举值(CBC,CFB,CTR,OFB); // paddingType为填充方式,枚举(PKCS_NONE,PKCS_ZERO,PKCS_SEVEN),默认PKCS_SEVEN. func (ke *LkkEncrypt) aesEncrypt(clearText, key []byte, mode string, paddingType ...LkkPKCSType) ([]byte, error) { block, err := aes.NewCipher(key) if err != nil { return nil, err } pt := PKCS_SEVEN blockSize := block.BlockSize() if len(paddingType) > 0 { pt = paddingType[0] } switch pt { case PKCS_ZERO: clearText = zeroPadding(clearText, blockSize) case PKCS_SEVEN: clearText = pkcs7Padding(clearText, blockSize, false) } cipherText := make([]byte, blockSize+len(clearText)) //初始化向量 iv := cipherText[:blockSize] _, _ = io.ReadFull(rand.Reader, iv) //if _, err := io.ReadFull(rand.Reader, iv); err != nil { // return nil, err //} switch mode { case "CBC": cipher.NewCBCEncrypter(block, iv).CryptBlocks(cipherText[blockSize:], clearText) case "CFB": cipher.NewCFBEncrypter(block, iv).XORKeyStream(cipherText[blockSize:], clearText) case "CTR": cipher.NewCTR(block, iv).XORKeyStream(cipherText[blockSize:], clearText) case "OFB": cipher.NewOFB(block, iv).XORKeyStream(cipherText[blockSize:], clearText) } return cipherText, nil } // aesDecrypt AES解密. // cipherText为密文;key为密钥,长度16/24/32; // mode为模式,枚举值(CBC,CFB,CTR,OFB); // paddingType为填充方式,枚举(PKCS_NONE,PKCS_ZERO,PKCS_SEVEN),默认PKCS_SEVEN. func (ke *LkkEncrypt) aesDecrypt(cipherText, key []byte, mode string, paddingType ...LkkPKCSType) ([]byte, error) { block, err := aes.NewCipher(key) if err != nil { return nil, err } pt := PKCS_SEVEN if len(paddingType) > 0 { pt = paddingType[0] } blockSize := block.BlockSize() clen := len(cipherText) if clen < blockSize { return nil, errors.New("cipherText too short") } iv := cipherText[:blockSize] cipherText = cipherText[blockSize:] switch mode { case "CBC": cipher.NewCBCDecrypter(block, iv).CryptBlocks(cipherText, cipherText) case "CFB": cipher.NewCFBDecrypter(block, iv).XORKeyStream(cipherText, cipherText) case "CTR": cipher.NewCTR(block, iv).XORKeyStream(cipherText, cipherText) case "OFB": cipher.NewOFB(block, iv).XORKeyStream(cipherText, cipherText) } clen = len(cipherText) if pt != PKCS_NONE && clen > 0 && int(cipherText[clen-1]) > clen { return nil, errors.New(fmt.Sprintf("aes [%s] decrypt failed", mode)) } var plainText []byte switch pt { case PKCS_ZERO: plainText = zeroUnPadding(cipherText) case PKCS_SEVEN: plainText = pkcs7UnPadding(cipherText, blockSize) case PKCS_NONE: plainText = cipherText } return plainText, nil } // AesCBCEncrypt AES-CBC密码分组链接(Cipher-block chaining)模式加密.加密无法并行,不适合对流数据加密. // clearText为明文;key为密钥,长16/24/32;paddingType为填充方式,枚举(PKCS_ZERO,PKCS_SEVEN),默认PKCS_SEVEN. func (ke *LkkEncrypt) AesCBCEncrypt(clearText, key []byte, paddingType ...LkkPKCSType) ([]byte, error) { return ke.aesEncrypt(clearText, key, "CBC", paddingType...) } // AesCBCDecrypt AES-CBC密码分组链接(Cipher-block chaining)模式解密. // cipherText为密文;key为密钥,长16/24/32;paddingType为填充方式,枚举(PKCS_NONE,PKCS_ZERO,PKCS_SEVEN),默认PKCS_SEVEN. func (ke *LkkEncrypt) AesCBCDecrypt(cipherText, key []byte, paddingType ...LkkPKCSType) ([]byte, error) { return ke.aesDecrypt(cipherText, key, "CBC", paddingType...) } // AesCFBEncrypt AES-CFB密文反馈(Cipher feedback)模式加密.适合对流数据加密. // clearText为明文;key为密钥,长16/24/32. func (ke *LkkEncrypt) AesCFBEncrypt(clearText, key []byte) ([]byte, error) { return ke.aesEncrypt(clearText, key, "CFB", PKCS_NONE) } // AesCFBDecrypt AES-CFB密文反馈(Cipher feedback)模式解密. // cipherText为密文;key为密钥,长16/24/32. func (ke *LkkEncrypt) AesCFBDecrypt(cipherText, key []byte) ([]byte, error) { return ke.aesDecrypt(cipherText, key, "CFB", PKCS_NONE) } // AesECBEncrypt AES-CTR计算器(Counter)模式加密. // clearText为明文;key为密钥,长16/24/32. func (ke *LkkEncrypt) AesCTREncrypt(clearText, key []byte) ([]byte, error) { return ke.aesEncrypt(clearText, key, "CTR", PKCS_NONE) } // AesECBDecrypt AES-CTR计算器(Counter)模式解密. // cipherText为密文;key为密钥,长16/24/32. func (ke *LkkEncrypt) AesCTRDecrypt(cipherText, key []byte) ([]byte, error) { return ke.aesDecrypt(cipherText, key, "CTR", PKCS_NONE) } // AesOFBEncrypt AES-OFB输出反馈(Output feedback)模式加密.适合对流数据加密. // clearText为明文;key为密钥,长16/24/32. func (ke *LkkEncrypt) AesOFBEncrypt(clearText, key []byte) ([]byte, error) { return ke.aesEncrypt(clearText, key, "OFB", PKCS_NONE) } // AesOFBDecrypt AES-OFB输出反馈(Output feedback)模式解密. // cipherText为密文;key为密钥,长16/24/32. func (ke *LkkEncrypt) AesOFBDecrypt(cipherText, key []byte) ([]byte, error) { return ke.aesDecrypt(cipherText, key, "OFB", PKCS_NONE) } // GenerateRsaKeys 生成RSA密钥对.bits为密钥位数,通常为1024或2048. func (ke *LkkEncrypt) GenerateRsaKeys(bits int) (private []byte, public []byte, err error) { // 生成私钥文件 var privateKey *rsa.PrivateKey privateKey, err = rsa.GenerateKey(rand.Reader, bits) if err != nil { return } derStream := x509.MarshalPKCS1PrivateKey(privateKey) block := &pem.Block{ Type: "RSA PRIVATE KEY", Bytes: derStream, } privateBuff := new(bytes.Buffer) _ = pem.Encode(privateBuff, block) // 生成公钥文件 var derPkix []byte publicKey := &privateKey.PublicKey derPkix, _ = x509.MarshalPKIXPublicKey(publicKey) block = &pem.Block{ Type: "RSA PUBLIC KEY", Bytes: derPkix, } publicBuff := new(bytes.Buffer) _ = pem.Encode(publicBuff, block) private = privateBuff.Bytes() public = publicBuff.Bytes() return } // RsaPublicEncrypt RSA公钥加密. // clearText为明文,publicKey为公钥. func (ke *LkkEncrypt) RsaPublicEncrypt(clearText, publicKey []byte) ([]byte, error) { // 解密pem格式的公钥 block, _ := pem.Decode(publicKey) if block == nil { return nil, errors.New("public key error") } // 解析公钥 pubInterface, err := x509.ParsePKIXPublicKey(block.Bytes) if err != nil { return nil, err } // 类型断言 pubKey := pubInterface.(*rsa.PublicKey) //加密 return rsa.EncryptPKCS1v15(rand.Reader, pubKey, clearText) } // RsaPrivateDecrypt RSA私钥解密.比加密耗时. // cipherText为密文,privateKey为私钥. func (ke *LkkEncrypt) RsaPrivateDecrypt(cipherText, privateKey []byte) ([]byte, error) { // 获取私钥 block, _ := pem.Decode(privateKey) if block == nil { return nil, errors.New("private key error!") } // 解析PKCS1格式的私钥 priv, err := x509.ParsePKCS1PrivateKey(block.Bytes) if err != nil { return nil, err } // 解密 return rsa.DecryptPKCS1v15(rand.Reader, priv, cipherText) } // RsaPrivateEncrypt RSA私钥加密.比解密耗时. // clearText为明文,privateKey为私钥. func (ke *LkkEncrypt) RsaPrivateEncrypt(clearText, privateKey []byte) ([]byte, error) { // 获取私钥 block, _ := pem.Decode(privateKey) if block == nil { return nil, errors.New("private key error!") } // 解析PKCS1格式的私钥 priv, err := x509.ParsePKCS1PrivateKey(block.Bytes) if err != nil { return nil, err } return rsa.SignPKCS1v15(nil, priv, crypto.Hash(0), clearText) } // RsaPublicDecrypt RSA公钥解密. // cipherText为密文,publicKey为公钥. func (ke *LkkEncrypt) RsaPublicDecrypt(cipherText, publicKey []byte) ([]byte, error) { // 解密pem格式的公钥 block, _ := pem.Decode(publicKey) if block == nil { return nil, errors.New("public key error") } // 解析公钥 pubInterface, err := x509.ParsePKIXPublicKey(block.Bytes) if err != nil { return nil, err } // 类型断言 pubKey := pubInterface.(*rsa.PublicKey) c := new(big.Int) m := new(big.Int) m.SetBytes(cipherText) e := big.NewInt(int64(pubKey.E)) c.Exp(m, e, pubKey.N) out := c.Bytes() olen := len(out) skip := 0 for i := 2; i < olen; i++ { if (i+1 < olen) && out[i] == 0xff && out[i+1] == 0 { skip = i + 2 break } } return out[skip:], nil }
package benchmarks.bess.bessj.Eq; public class oldV { public static double snippet(double n, double x) { int IEXP=2 * 1024; double ACC=2.0; boolean jsum = false; int j = 0; int k = 0; int m = 0; double ax = 0; double bj = 0; double bjm = 0; double bjp = 0; double dum = 0; double sum = 0; double tox = 0; double ans = 0; if (n < 2) return -1000; ax=Math.abs(x); if (ax*ax <= 0) return 0.0; else if (ax > (n)) { tox=2.0/ax; bjm=bessj0(ax); bj=bessj1(ax); for (j=1;j<n;j++) { bjp=j*tox*bj-bjm; bjm=bj; bj=bjp; } ans=bj; } else { tox=2.0/ax; m=(int) (2*((n+(Math.sqrt(ACC*n)))/2)); jsum=false; bjp=0.0; ans= 0.0; sum=0.0; bj=1.0; for (j=m;j>0;j--) { bjm=j*tox*bj-bjp; bjp=bj; bj=bjm; k = Math.getExponent(bj); dum=bj/Math.pow(2, bj); if (k > IEXP) { bj*=Math.pow(2, -IEXP); bjp*=Math.pow(2, -IEXP); ans*=Math.pow(2, -IEXP); sum*=Math.pow(2, -IEXP); } if (jsum) sum += bj; jsum=!jsum; if (j == n) ans=bjp; } sum=2.0*sum-bj; ans /= sum; } if (x < 0.0) return -ans ; else return ans ; } private static double bessj1(double x){ double ax,z,xx,y,ans,ans1,ans2; if ((ax=Math.abs(x)) < 8.0) { y=x*x; ans1=x*(72362614232.0+y*(-7895059235.0+y*(242396853.1 +y*(-2972611.439+y*(15704.48260+y*(-30.16036606)))))); ans2=144725228442.0+y*(2300535178.0+y*(18583304.74 +y*(99447.43394+y*(376.9991397+y*1.0)))); ans=ans1/ans2; } else { z=8.0/ax; y=z*z; xx=ax-2.356194491; ans1=1.0+y*(0.183105e-2+y*(-0.3516396496e-4 +y*(0.2457520174e-5+y*(-0.240337019e-6)))); ans2=0.04687499995+y*(-0.2002690873e-3 +y*(0.8449199096e-5+y*(-0.88228987e-6 +y*0.105787412e-6))); ans=Math.sqrt(0.636619772/ax)*(Math.cos(xx)*ans1-z*Math.sin(xx)*ans2); if (x < 0.0) ans = -ans; } return ans; } private static double bessj0(double x){ double ax,z,xx,y,ans,ans1,ans2; if ((ax=Math.abs(x)) < 8.0) { y=x*x; ans1=57568490574.0+y*(-13362590354.0+y*(651619640.7 +y*(-11214424.18+y*(77392.33017+y*(-184.9052456))))); ans2=57568490411.0+y*(1029532985.0+y*(9494680.718 +y*(59272.64853+y*(267.8532712+y*1.0)))); ans=ans1/ans2; } else { z=8.0/ax; y=z*z; xx=ax-0.785398164; ans1=1.0+y*(-0.1098628627e-2+y*(0.2734510407e-4 +y*(-0.2073370639e-5+y*0.2093887211e-6))); ans2 = -0.1562499995e-1+y*(0.1430488765e-3 +y*(-0.6911147651e-5+y*(0.7621095161e-6 -y*0.934945152e-7))); ans=Math.sqrt(0.636619772/ax)*(Math.cos(xx)*ans1-z*Math.sin(xx)*ans2); } return ans; } }
<reponame>J1Mtonic/venus-protocol-interface import React, { useEffect, useState } from 'react'; import { RouteComponentProps, withRouter } from 'react-router-dom'; import BigNumber from 'bignumber.js'; import commaNumber from 'comma-number'; import { Row, Col, Icon } from 'antd'; import styled from 'styled-components'; import { connect } from 'react-redux'; import MainLayout from 'containers/Layout/MainLayout'; import * as constants from 'utilities/constants'; import { currencyFormatter, formatApy } from 'utilities/common'; import { uid } from 'react-uid'; import { Setting } from 'types'; import { useMarkets } from 'hooks/useMarkets'; import { State } from 'core/modules/initialState'; const MarketWrapper = styled.div` width: 100%; display: flex; align-items: center; justify-content: center; `; const TableWrapper = styled.div` position: relative; width: 100%; background: var(--color-bg-primary); border: 1px solid var(--color-bg-primary); box-sizing: content-box; box-shadow: 0 4px 4px rgba(0, 0, 0, 0.03); border-radius: 8px; margin: 20px 0; max-width: 1200px; .vai-apy { color: var(--color-green); font-size: 18px; margin: 0 80px; padding-bottom: 20px; font-weight: bold; border-bottom: 1px solid var(--color-bg-active); } @media (max-width: 768px) { width: 90%; } .total-info { background: var(--color-bg-active); border-radius: 16px; margin: 30px 80px; padding: 20px 40px; display: flex; justify-content: space-between; @media (max-width: 992px) { margin: 20px; padding: 20px; flex-direction: column; } .total-item { margin: 10px; width: 23%; @media (max-width: 992px) { width: 100%; } .prop { font-weight: 600; font-size: 20px; color: var(--color-text-secondary); } .value { font-weight: 600; font-size: 24px; color: var(--color-white); margin-top: 10px; overflow: hidden; text-overflow: ellipsis; } } } .table_header { padding: 20px 50px; border-bottom: 1px solid rgba(0, 0, 0, 0.05); > div { color: var(--color-white); font-weight: bold; cursor: pointer; img { width: 16px; height: 16px; margin: 0 10px; } } @media (max-width: 992px) { .total-supply, .supply-apy, .total-borrow, .borrow-apy, .liquidity, .price { display: none; } } } .table_content { padding: 0 30px; .table_item { padding: 20px; border-bottom: 1px solid rgba(0, 0, 0, 0.05); &:hover { background-color: var(--color-bg-active); border-left: 2px solid var(--color-yellow); } div { color: var(--color-white); max-width: 100%; } .mobile-label { display: none; @media (max-width: 992px) { font-weight: bold; display: block; } } .item-title { font-weight: 600; font-size: 16px; color: var(--color-white); &.green { color: #9dd562; } &.red { color: #f9053e; } } .item-value { font-weight: 600; font-size: 14px; color: var(--color-text-secondary); } .market { .highlight { word-break: break-all; white-space: break-spaces; } .asset-img { width: 30px; height: 30px; margin-right: 10px; } } } } `; const format = commaNumber.bindWith(',', '.'); interface MarketProps extends RouteComponentProps { settings: Setting; } function Market({ history, settings }: MarketProps) { const [totalSupply, setTotalSupply] = useState('0'); const [totalBorrow, setTotalBorrow] = useState('0'); const [availableLiquidity, setAvailableLiquidity] = useState('0'); const [sortInfo, setSortInfo] = useState({ field: '', sort: 'desc' }); const { markets, treasuryTotalUSDBalance } = useMarkets(); const getTotalInfo = async () => { const tempTS = (markets || []).reduce( (accumulator, market) => new BigNumber(accumulator).plus(new BigNumber(market.totalSupplyUsd)), new BigNumber(0), ); const tempTB = (markets || []).reduce( (accumulator, market) => new BigNumber(accumulator).plus(new BigNumber(market.totalBorrowsUsd)), new BigNumber(0), ); const tempAL = (markets || []).reduce( (accumulator, market) => new BigNumber(accumulator).plus(new BigNumber(market.liquidity)), new BigNumber(0), ); setTotalSupply( tempTS .plus(settings.vaultVaiStaked || new BigNumber(0)) .dp(2, 1) .toString(10), ); setTotalBorrow(tempTB.dp(2, 1).toString(10)); setAvailableLiquidity( tempAL .plus(settings.vaultVaiStaked || new BigNumber(0)) .dp(2, 1) .toString(10), ); }; useEffect(() => { if (markets) { getTotalInfo(); } }, [markets]); const handleSort = (field: $TSFixMe) => { setSortInfo({ field, sort: sortInfo.field === field && sortInfo.sort === 'desc' ? 'asc' : 'desc', }); }; return ( <MainLayout title="All Markets"> <MarketWrapper> <TableWrapper> <div className="total-info"> <div className="total-item"> <div className="prop">Total Supply</div> <div className="value" title={format(totalSupply)}> ${format(totalSupply)} </div> </div> <div className="total-item"> <div className="prop">Total Borrow</div> <div className="value" title={format(totalBorrow)}> ${format(totalBorrow)} </div> </div> <div className="total-item"> <div className="prop">Available Liquidity</div> <div className="value" title={format(availableLiquidity)}> ${format(availableLiquidity)} </div> </div> <div className="total-item"> <div className="prop">Total Treasury</div> <div className="value" title={format(treasuryTotalUSDBalance.dp(2).toString())}> ${format(treasuryTotalUSDBalance.dp(2).toString())} </div> </div> </div> {settings.vaiAPY && ( <div className="vai-apy"> VAI Staking APY: {settings.vaiAPY}% </div> )} <Row className="table_header"> <Col xs={{ span: 24 }} lg={{ span: 2 }} className="market" /> <Col xs={{ span: 6 }} lg={{ span: 4 }} className="total-supply right"> <span onClick={() => handleSort('total_supply')}> Total Supply{' '} {sortInfo.field === 'total_supply' && ( <Icon type={sortInfo.sort === 'desc' ? 'caret-down' : 'caret-up'} /> )} </span> </Col> <Col xs={{ span: 6 }} lg={{ span: 3 }} className="supply-apy right"> <span onClick={() => handleSort('supply_apy')}> Supply APY{' '} {sortInfo.field === 'supply_apy' && ( <Icon type={sortInfo.sort === 'desc' ? 'caret-down' : 'caret-up'} /> )} </span> </Col> <Col xs={{ span: 6 }} lg={{ span: 4 }} className="total-borrow right"> <span onClick={() => handleSort('total_borrow')}> Total Borrow{' '} {sortInfo.field === 'total_borrow' && ( <Icon type={sortInfo.sort === 'desc' ? 'caret-down' : 'caret-up'} /> )} </span> </Col> <Col xs={{ span: 6 }} lg={{ span: 3 }} className="borrow-apy right"> <span onClick={() => handleSort('borrow_apy')}> Borrow APY{' '} {sortInfo.field === 'borrow_apy' && ( <Icon type={sortInfo.sort === 'desc' ? 'caret-down' : 'caret-up'} /> )} </span> </Col> <Col xs={{ span: 6 }} lg={{ span: 4 }} className="liquidity right"> <span onClick={() => handleSort('liquidity')}> Liquidity{' '} {sortInfo.field === 'liquidity' && ( <Icon type={sortInfo.sort === 'desc' ? 'caret-down' : 'caret-up'} /> )} </span> </Col> <Col xs={{ span: 6 }} lg={{ span: 4 }} className="price right"> <span onClick={() => handleSort('price')}> Price{' '} {sortInfo.field === 'price' && ( <Icon type={sortInfo.sort === 'desc' ? 'caret-down' : 'caret-up'} /> )} </span> </Col> </Row> <div className="table_content"> {markets && (markets || []) .map(market => ({ ...market, totalSupplyApy: new BigNumber(market.supplyApy).plus( new BigNumber(market.supplyVenusApy), ), totalBorrowApy: new BigNumber(market.borrowVenusApy).plus( new BigNumber(market.borrowApy), ), })) .sort((a, b) => { if (!sortInfo.field) { return +new BigNumber(b.totalBorrowsUsd) .minus(new BigNumber(a.totalBorrowsUsd)) .toString(10); } if (sortInfo.field === 'total_supply') { return sortInfo.sort === 'desc' ? +new BigNumber(b.totalSupplyUsd) .minus(new BigNumber(a.totalSupplyUsd)) .toString(10) : +new BigNumber(a.totalSupplyUsd) .minus(new BigNumber(b.totalSupplyUsd)) .toString(10); } if (sortInfo.field === 'supply_apy') { return sortInfo.sort === 'desc' ? b.totalSupplyApy.minus(a.totalSupplyApy).toNumber() : a.totalSupplyApy.minus(b.totalSupplyApy).toNumber(); } if (sortInfo.field === 'total_borrow') { return sortInfo.sort === 'desc' ? +new BigNumber(b.totalBorrowsUsd) .minus(new BigNumber(a.totalBorrowsUsd)) .toString(10) : +new BigNumber(a.totalBorrowsUsd) .minus(new BigNumber(b.totalBorrowsUsd)) .toString(10); } if (sortInfo.field === 'borrow_apy') { return sortInfo.sort === 'desc' ? b.totalBorrowApy.minus(a.totalBorrowApy).toNumber() : a.totalBorrowApy.minus(b.totalBorrowApy).toNumber(); } if (sortInfo.field === 'liquidity') { return sortInfo.sort === 'desc' ? +new BigNumber(b.liquidity).minus(new BigNumber(a.liquidity)).toString(10) : +new BigNumber(a.liquidity).minus(new BigNumber(b.liquidity)).toString(10); } if (sortInfo.field === 'price') { return sortInfo.sort === 'desc' ? +new BigNumber(b.tokenPrice).minus(new BigNumber(a.tokenPrice)).toString(10) : +new BigNumber(a.tokenPrice) .minus(new BigNumber(b.tokenPrice)) .toString(10); } return 0; }) .map(item => ( <Row className="table_item pointer" key={uid(item)} onClick={() => history.push(`/market/${item.underlyingSymbol}`)} > <Col xs={{ span: 24 }} lg={{ span: 2 }} className="flex align-center market"> <img className="asset-img" src={ // @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message constants.CONTRACT_TOKEN_ADDRESS[item.underlyingSymbol.toLowerCase()] ? // @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message constants.CONTRACT_TOKEN_ADDRESS[item.underlyingSymbol.toLowerCase()] .asset : null } alt="asset" /> <p className="item-title">{item.underlyingSymbol}</p> </Col> <Col xs={{ span: 24 }} lg={{ span: 4 }} className="total-supply right"> <p className="mobile-label">Total Supply</p> <p className="item-title">{currencyFormatter(item.totalSupplyUsd)}</p> <p className="item-value"> {format( new BigNumber(item.totalSupplyUsd) .div(new BigNumber(item.tokenPrice)) .dp(0, 1) .toString(10), )}{' '} {item.underlyingSymbol} </p> </Col> <Col xs={{ span: 24 }} lg={{ span: 3 }} className="supply-apy right"> <p className="mobile-label">Supply APY</p> <p className="item-title green">{formatApy(item.totalSupplyApy)}</p> <p className="item-value">{formatApy(item.supplyVenusApy)}</p> </Col> <Col xs={{ span: 24 }} lg={{ span: 4 }} className="total-borrow right"> <p className="mobile-label">Total Borrow</p> <p className="item-title">{currencyFormatter(item.totalBorrowsUsd)}</p> <p className="item-value"> {format( new BigNumber(item.totalBorrowsUsd) .div(new BigNumber(item.tokenPrice)) .dp(0, 1) .toString(10), )}{' '} {item.underlyingSymbol} </p> </Col> <Col xs={{ span: 24 }} lg={{ span: 3 }} className="borrow-apy right"> <p className="mobile-label">Borrow APY</p> <p className={`item-title${item.totalBorrowApy.lt(0) ? ' red' : ' green'}`}> {formatApy(item.totalBorrowApy)} </p> <p className="item-value">{formatApy(item.borrowVenusApy)}</p> </Col> <Col xs={{ span: 24 }} lg={{ span: 4 }} className="liquidity right"> <p className="mobile-label">Liquidity</p> <p className="item-title">{currencyFormatter(item.liquidity)}</p> </Col> <Col xs={{ span: 24 }} lg={{ span: 4 }} className="price right"> <p className="mobile-label">Price</p> <p className="item-title">{currencyFormatter(item.tokenPrice)}</p> <p className="item-value" /> </Col> </Row> ))} </div> </TableWrapper> </MarketWrapper> </MainLayout> ); } const mapStateToProps = ({ account }: State) => ({ settings: account.setting, }); export default connect(mapStateToProps)(withRouter(Market));
java -classpath L1.3-1.0-jar-with-dependencies.jar example.Main 8080
def fibonacci_sequence(n): sequence = [1] if n == 1: return sequence else: sequence.append(1) for i in range(2, n): sequence.append(sequence[i-1] + sequence[i-2]) return sequence n = 10 result = fibonacci_sequence(n) print(result)
#!/usr/bin/env bash # run in a container docker-compose \ --env-file $DIR/dev.env \ -force-recreate \ up
<gh_stars>1-10 import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collections; import java.util.TreeMap; public class Boj1467 { private static int[] numbers = new int[10]; private static ArrayList<Integer>[] index = new ArrayList[10]; private static TreeMap<Integer, Integer> result = new TreeMap<>(); public static void main(String[] args) throws Exception { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); char[] target = br.readLine().toCharArray(); char[] erase = br.readLine().toCharArray(); System.out.println(removed(target, erase)); } private static String removed(char[] t, char[] e) { for(int i = 0; i < 10; i++) { index[i] = new ArrayList<>(); } for(int i = 0; i < t.length; i++) { int value = t[i] - '0'; numbers[value]++; index[value].add(i); } for(int i = 1; i < 10; i++) { Collections.sort(index[i]); } for(int i = 0; i < e.length; i++) { numbers[e[i] - '0']--; } int prev = -1; for(int i = 9; i > 0; i--) { if (numbers[i] == 0) continue; for(int idx: index[i]) { if(numbers[i] == 0) break; result.put(idx, i); numbers[i]--; } } StringBuilder sb = new StringBuilder(); for(int key: result.keySet()) { sb.append(result.get(key)); } return sb.toString(); } }
ActiveAdmin.register User do permit_params [:name, :password, :email] end
package gv package isi package functional import language.higherKinds trait Traversable[F[_]] extends Any { def traverse[A, Z]: Z ⇒ ((Z, A) ⇒ Z) ⇒ F[A] ⇒ Z }
require 'fog/storm_on_demand' require 'fog/compute' module Fog module Compute class StormOnDemand < Fog::Service API_URL = 'https://api.stormondemand.com' requires :storm_on_demand_username, :storm_on_demand_password recognizes :storm_on_demand_auth_url model_path 'fog/storm_on_demand/models/compute' model :config collection :configs model :image collection :images model :server collection :servers model :balancer collection :balancers model :private_ip collection :private_ips model :stat collection :stats model :template collection :templates request_path 'fog/storm_on_demand/requests/compute' request :clone_server request :delete_server request :reboot_server request :list_servers request :get_server request :create_server request :resize_server request :remove_balancer_node request :add_balancer_node request :list_balancers request :list_configs request :list_templates request :list_images request :get_stats request :list_private_ips class Mock def self.data @data ||= Hash.new do |hash, key| hash[key] = { :last_modified => { :images => {}, :servers => {} }, :images => {}, :servers => {} } end end def self.reset @data = nil end def self.reset_data(keys=data.keys) for key in [*keys] data.delete(key) end end def initialize(options={}) @storm_on_demand_username = options[:storm_on_demand_username] end def data self.class.data[@storm_on_demand_username] end def reset_data self.class.data.delete(@storm_on_demand_username) end end class Real def initialize(options={}) uri = URI.parse(options[:storm_on_demand_auth_url] ||= API_URL) @connection_options = options[:connection_options] || {} @host = uri.host @path = uri.path @persistent = options[:persistent] || false @port = uri.port @scheme = uri.scheme @storm_on_demand_username = options[:storm_on_demand_username] @storm_on_demand_password = options[:storm_on_demand_password] @connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}", @persistent, @connection_options) end def reload @connection.reset end def request(params) begin response = @connection.request(params.merge!({ :headers => { 'Content-Type' => 'application/json', 'Authorization' => 'Basic ' << Base64.encode64("#{@storm_on_demand_username}:#{@storm_on_demand_password}").chomp }.merge!(params[:headers] || {}), :host => @host, :path => "#{@path}/#{params[:path]}", :expects => 200, :method => :post })) rescue Excon::Errors::HTTPStatusError => error raise case error when Excon::Errors::NotFound Fog::StormOnDemand::Compute::NotFound.slurp(error) else error end end unless response.body.empty? response.body = Fog::JSON.decode(response.body) end if response.body.has_key?('full_error') raise(Fog::Compute::StormOnDemand::Error, response.body.inspect) end response end end end end end
/** * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at the * <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a> * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Initial code contributed and copyrighted by<br> * frentix GmbH, http://www.frentix.com * <p> */ package org.olat.ims.qti21.resultexport; import org.olat.core.commons.services.export.ExportManager; import org.olat.core.commons.services.pdf.PdfModule; import org.olat.core.gui.UserRequest; import org.olat.core.gui.components.form.flexible.FormItem; import org.olat.core.gui.components.form.flexible.FormItemContainer; import org.olat.core.gui.components.form.flexible.elements.SingleSelection; import org.olat.core.gui.components.form.flexible.elements.TextElement; import org.olat.core.gui.components.form.flexible.impl.FormBasicController; import org.olat.core.gui.components.form.flexible.impl.FormEvent; import org.olat.core.gui.components.util.SelectionValues; import org.olat.core.gui.components.util.SelectionValues.SelectionValue; import org.olat.core.gui.control.Controller; import org.olat.core.gui.control.Event; import org.olat.core.gui.control.WindowControl; import org.olat.core.id.Identity; import org.olat.core.util.FileUtils; import org.olat.core.util.Formatter; import org.olat.core.util.StringHelper; import org.olat.course.nodes.IQTESTCourseNode; import org.olat.course.run.environment.CourseEnvironment; import org.olat.resource.OLATResource; import org.olat.user.UserManager; import org.springframework.beans.factory.annotation.Autowired; /** * * Initial date: 1 févr. 2022<br> * @author srosse, <EMAIL>, http://www.frentix.com * */ public class QTI21NewExportController extends FormBasicController { private String defaultTitle; private TextElement titleEl; private SingleSelection withPdfEl; private final IdentitiesList identities; private final CourseEnvironment courseEnv; private final IQTESTCourseNode courseNode; @Autowired private PdfModule pdfModule; @Autowired private UserManager userManager; @Autowired private ExportManager exportManager; public QTI21NewExportController(UserRequest ureq, WindowControl wControl, CourseEnvironment courseEnv, IQTESTCourseNode courseNode, IdentitiesList identities) { super(ureq, wControl); this.identities = identities; this.courseEnv = courseEnv; this.courseNode = courseNode; initForm(ureq); } @Override protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) { setFormTitle("new.export.title"); SelectionValues withPdfValues = new SelectionValues(); withPdfValues.add(new SelectionValue("wo", translate("export.wo.option"), translate("export.wo.option.desc"))); withPdfValues.add(new SelectionValue("with", translate("export.with.option"), translate("export.with.option.desc"))); withPdfEl = uifactory.addCardSingleSelectHorizontal("export.with", formLayout, withPdfValues.keys(), withPdfValues.values(), withPdfValues.descriptions(), null); withPdfEl.addActionListener(FormEvent.ONCHANGE); withPdfEl.setElementCssClass("o_radio_cards_lg"); withPdfEl.setVisible(pdfModule.isEnabled()); withPdfEl.select("wo", true); String text; int numOfUsers = identities.getIdentities().size(); if(identities.isAll()) { text = translate("export.participants.all"); } else if(numOfUsers <= 1) { text = translate("export.participants.num.singular", Integer.toString(numOfUsers)); } else { text = translate("export.participants.num.plural", Integer.toString(numOfUsers)); } uifactory.addStaticTextElement("export.participants", "export.participants", text, formLayout); defaultTitle = getDefaultTitle(ureq, false); titleEl = uifactory.addTextElement("export.title", 64, defaultTitle, formLayout); titleEl.setMandatory(true); uifactory.addFormSubmitButton("export", formLayout); } @Override protected boolean validateFormLogic(UserRequest ureq) { boolean allOk = super.validateFormLogic(ureq); titleEl.clearError(); if(!StringHelper.containsNonWhitespace(titleEl.getValue())) { titleEl.setErrorKey("form.legende.mandatory", null); allOk &= false; } return allOk; } @Override protected void formInnerEvent(UserRequest ureq, FormItem source, FormEvent event) { if(withPdfEl == source) { titleEl.setValue(getDefaultTitle(ureq, isWithPdfs())); } super.formInnerEvent(ureq, source, event); } @Override protected void formOK(UserRequest ureq) { if (!identities.isEmpty()) { boolean withPdfs = isWithPdfs(); OLATResource resource = courseEnv.getCourseGroupManager().getCourseResource(); String title = titleEl.getValue(); String description = buildDescription(); String filename = FileUtils.normalizeFilename(title); QTI21ResultsExportTask task = new QTI21ResultsExportTask(resource, courseNode, identities.getIdentities(), title, description, filename, identities.isWithNonParticipants(), withPdfs, getLocale()); exportManager.startExport(task, getIdentity(), resource, courseNode.getIdent()); } else { showWarning("error.no.assessed.users"); } fireEvent(ureq, Event.DONE_EVENT); } private boolean isWithPdfs() { return withPdfEl.isVisible() && "with".equals(withPdfEl.getSelectedKey()); } private String getDefaultTitle(UserRequest ureq, boolean withPdf) { String date = Formatter.getInstance(getLocale()).formatDate(ureq.getRequestTimestamp()); int numOfUsers = identities.getIdentities().size(); String[] args = { Integer.toString(numOfUsers), date }; String title; if(withPdf) { if(identities.isAll()) { title = translate("export.title.pdf.all", args); } else if (numOfUsers <= 1) { title = translate("export.title.pdf.participant", args); } else { title = translate("export.title.pdf.participants", args); } } else if(identities.isAll()) { title = translate("export.title.all", args); } else if (numOfUsers <= 1) { title = translate("export.title.participant", args); } else { title = translate("export.title.participants", args); } return title; } private String buildDescription() { int numOfIdentities = identities.getNumOfIdentities(); StringBuilder filters = new StringBuilder(128); if(identities.getHumanReadableFiltersValues() != null) { for(String filter:identities.getHumanReadableFiltersValues()) { if(filters.length() > 0) { filters.append(", "); } filters.append(filter); } } StringBuilder participants = new StringBuilder(numOfIdentities + 32); for(Identity participant:identities.getIdentities()) { if(participants.length() > 0) { participants.append("; "); } participants.append(userManager.getUserDisplayName(participant)); } String[] args = { Integer.toString(numOfIdentities), filters.toString(), participants.toString() }; String i18nKey; if(identities.getHumanReadableFiltersValues() != null && !identities.getHumanReadableFiltersValues().isEmpty()) { if(numOfIdentities <= 1) { i18nKey = "export.description.participant.filter"; } else { i18nKey = "export.description.participants.filter"; } } else { if(numOfIdentities <= 1) { i18nKey = "export.description.participant"; } else { i18nKey = "export.description.participants"; } } return translate(i18nKey, args); } }
var _optimize_inverse_permutes_8hpp = [ [ "OptimizeInversePermutesImpl", "classarmnn_1_1optimizations_1_1_optimize_inverse_permutes_impl.xhtml", "classarmnn_1_1optimizations_1_1_optimize_inverse_permutes_impl" ], [ "OptimizeInversePermutes", "_optimize_inverse_permutes_8hpp.xhtml#aa31127c77d2117f78d43ca2958dcae19", null ], [ "OptimizeInverseTransposes", "_optimize_inverse_permutes_8hpp.xhtml#a2f9d1a13be2ac1c4213729a0ef181fc0", null ] ];
import { dirname, join } from 'path'; import { fileURLToPath } from 'url'; import { rmSync, readFileSync, readdirSync } from 'fs'; import Inquirer from 'inquirer'; import init from './init.js'; import final from './final.js'; const __dirname = dirname(fileURLToPath(import.meta.url)); const buildPath = join(__dirname, '..', 'build'); const distPath = join(__dirname, '..', 'dist'); const defaultPatches = [ 'gooseupdate', 'portable', 'branding_files' ]; const allPatches = readdirSync(join(__dirname, 'patches')).map((x) => x.split('.').slice(0, -1).join('.')); let { channel, platform, name, source, patches } = await Inquirer.prompt([ { type: 'input', name: 'name', default: 'goosestandalone', message: 'Client name' }, { type: 'list', loop: false, name: 'platform', default: 'linux', message: 'Discord platform', choices: [ 'linux', 'windows' ] }, { type: 'list', loop: false, name: 'channel', default: 'canary', message: 'Discord channel', choices: [ 'stable', 'ptb', 'canary' ] }, { type: 'list', loop: false, name: 'source', default: 'download', message: 'Discord client source', choices: [ 'download', 'local' ] }, { type: 'checkbox', name: 'patches', message: 'Client patches', choices: allPatches.map((x) => ({ checked: defaultPatches.includes(x), name: x })) } ]); console.log('\nInitialising...'); const dirs = await init(platform, channel, source, buildPath); const buildInfo = JSON.parse(readFileSync(join(dirs.basePath, 'resources', 'build_info.json'), 'utf8')); console.log('\nPatching...'); const extraInfo = { channel, name, platform, buildInfo }; for (const m of patches) { console.log(m); const exports = await import(`./patches/${m}.js`); await exports.default(dirs, extraInfo); } console.log('\n\nFinalising...'); const finalPath = join(distPath, channel, platform, platform === 'windows' ? `app-0.0.0` : ''); rmSync(finalPath, { recursive: true, force: true }); await final(dirs, extraInfo, finalPath);
#!/bin/bash # dev go get -v -u github.com/divan/gofresh go get -v -u github.com/gravityblast/fresh go get -v -u github.com/derekparker/delve/cmd/dlv go get -v -u github.com/davecgh/go-spew/spew go get -v github.com/smartystreets/goconvey go get -u github.com/davecgh/go-spew/spew go get -u github.com/k0kubun/pp go get -u -v github.com/gosuri/uiprogress go get -u -v github.com/vbauerster/mpb
<filename>node_modules/@reactivex/rxjs/dist/amd/scheduler/queue.js define(["require", "exports", './QueueScheduler'], function (require, exports, QueueScheduler_1) { "use strict"; exports.queue = new QueueScheduler_1.QueueScheduler(); }); //# sourceMappingURL=queue.js.map
(function () { 'use strict' var fnr = () => { const randomDate = randomDateBetween(new Date(1854, 1), new Date()); const dateISO = randomDate.toISOString(); const individsiffer = individsifferAsString(randomIndividsiffer(randomDate.getFullYear())); const d1 = dateISO.substring(8, 9); const d2 = dateISO.substring(9, 10); const m1 = dateISO.substring(5, 6); const m2 = dateISO.substring(6, 7); const aa1 = dateISO.substring(2, 3); const aa2 = dateISO.substring(3, 4); const i1 = individsiffer.substring(0, 1); const i2 = individsiffer.substring(1, 2); const i3 = individsiffer.substring(2, 3); const k1 = elevenGivesZero(11 - ((3 * d1 + 7 * d2 + 6 * m1 + 1 * m2 + 8 * aa1 + 9 * aa2 + 4 * i1 + 5 * i2 + 2 * i3) % 11)); const k2 = elevenGivesZero(11 - ((5 * d1 + 4 * d2 + 3 * m1 + 2 * m2 + 7 * aa1 + 6 * aa2 + 5 * i1 + 4 * i2 + 3 * i3 + 2 * k1) % 11)); const ret = "" + d1 + d2 + m1 + m2 + aa1 + aa2 + i1 + i2 + i3 + k1 + k2; if (ret.length === 11) return ret; else return fnr(); }; var elevenGivesZero = (number) => { if (number === 11) { return 0; } return number; } var el = id => document.getElementById("" + id); var randomDateBetween = (d1, d2) => new Date(d1.getTime() + Math.random() * (d2.getTime() - d1.getTime())); var randomIndividsiffer = year => { if (year >= 1854 && year <= 1899) return getRandomInt(500, 749); if (year >= 1900 && year <= 1999) return getRandomInt(0, 499); if (year >= 2000 && year <= 2039) return getRandomInt(500, 999); }; var getRandomInt = (min, max) => { min = Math.ceil(min); max = Math.floor(max); return Math.floor(Math.random() * (max - min)) + min; } var individsifferAsString = individsiffer => { if (individsiffer <= 9) { return "00" + individsiffer; } else if (individsiffer <= 99) { return "0" + individsiffer; } else if (individsiffer > 99) { return "" + individsiffer; } }; el("fnr").innerHTML = fnr(); })();
import nltk from nltk.tokenize import word_tokenize from nltk.corpus import stopwords # Function to get the trending words of a topic def top_10_trending_words(text): # Tokenize into words tokens = word_tokenize(text) # Filter out the stopwords from the text words = [word for word in tokens if word not in stopwords.words('english')] # Create a dictionary to keep track of the count of each word word_count = dict() # Iterate through the words for word in words: # Initialize if current word is not in dictionary if word not in word_count.keys(): word_count[word] = 0 # Increment count word_count[word] += 1 # Sort the dictionary in descending order sorted_words = sorted(word_count.items(), key=lambda x: x[1], reverse=True) # Return the top 10 trending words return sorted_words[:10] # Demonstration text = "Yesterday was a beautiful day. The sky was clear with bright sunshine." print("The top 10 trending words of the text are:", top_10_trending_words(text))
<filename>src/whoosh/minterm.py # Copyright 2021 <NAME> and <NAME>. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO # EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # The views and conclusions contained in the software and documentation are # those of the authors and should not be interpreted as representing official # policies, either expressed or implied, of Matt Chaput. """ Minterm for term correlated based algorithms. """ from collections import namedtuple, defaultdict from functools import lru_cache from math import sqrt from typing import List, Set, Dict from whoosh.weighting_schema import IDF, TF Correlation = namedtuple("Correlation", "keyword cir") Minterm = namedtuple("Minterm", "correlations id") def __build_term_representation(minterm_db, minterms): term_repr = defaultdict(lambda: [0.0] * len(minterms)) normalization = defaultdict(float) count = 0 for minterm in sorted(minterms, key=lambda x: x.id): for correlation in minterm.correlations: term_repr[correlation.keyword][count] = correlation.cir normalization[correlation.keyword] += correlation.cir ** 2 count += 1 for term in term_repr: norm = normalization[term] if normalization[term] != 0 else 1.0 term_repr[term] = [score / sqrt(norm) if sqrt(norm) != 0 else 0.0 for score in term_repr[term]] minterm_db.set(term, term_repr[term]) def __build_document_to_minterm_mapping(docs_to_minterm, minterms, document_term_correlations): mtid = 0 for doc, correlations in document_term_correlations.items(): flag = True for minterm in minterms: if {correlation.keyword for correlation in minterm.correlations} == \ {correlation.keyword for correlation in correlations}: flag = False new_correlations = set() for act_cor in minterm.correlations: for oth_cor in correlations: if oth_cor.keyword == act_cor.keyword: new_correlations.add(Correlation(act_cor.keyword, act_cor.cir + oth_cor.cir)) minterms[minterms.index(minterm)] = Minterm(new_correlations, minterm.id) docs_to_minterm.set(doc, minterm.id) if flag: minterms.append(Minterm(correlations, mtid)) docs_to_minterm.set(doc, mtid) mtid += 1 def __build_term_correlations_by_document(all_docs, searcher, fieldname, tf_schema, idf_schema, doc_minterm: Dict[int, Set[Correlation]]): for docnum in all_docs: matcher = searcher.vector_as("weight", docnum, fieldname) for term, freq in matcher: max_weight = searcher.term_info(fieldname, term).max_weight() tf = tf_schema(freq, max_weight) idf = searcher.idf(fieldname, term, idf_schema) if docnum not in doc_minterm: doc_minterm[docnum] = set() doc_minterm[docnum].add(Correlation(term, tf * idf)) def exist_minterm() -> bool: try: import diskcache return True except ImportError: return False @lru_cache(maxsize=None) def get_minterm(mdb, term): try: import diskcache as dc minterm_db = "tmp/{}/minterm_db".format(mdb) minterm = __get_from_cache(minterm_db, term) # convert bytes to dict if not minterm: raise KeyError(f"Minterm for {term} not found.") return minterm except ImportError: raise Exception("Diskcache is required to use Minterms.extract()") @lru_cache(maxsize=None) def get_minterm_match(mdb, match_id): try: import diskcache as dc docs_to_minterm = "tmp/{}/docs_to_minterm".format(mdb) doc_idx = __get_from_cache(docs_to_minterm, match_id) # convert bytes to dict if not doc_idx: raise KeyError(f"Minterm for {match_id} not found.") return doc_idx except ImportError: raise Exception("Diskcache is required to use Minterms.extract()") @lru_cache(maxsize=None) def __get_from_cache(cache_name, term): import diskcache as dc return dc.Cache(cache_name).get(term) def index_minterms(mdb, fieldname, ix, tf_schema=TF.frequency, idf_schema=IDF.inverse_frequency): """ Build minterms for a certain fieldname. It must be executed after all indexing. """ try: import diskcache as dc docs_to_minterm = dc.Cache("tmp/{}/docs_to_minterm".format(mdb)) # Dict[int, int] minterm_db = dc.Cache("tmp/{}/minterm_db".format(mdb)) minterms: List[Minterm] = [] document_term_correlations: Dict[int, Set[Correlation]] = dict() with ix.searcher() as searcher: doc_ids = searcher.ixreader.all_doc_ids() # build document_term_correlations __build_term_correlations_by_document(doc_ids, searcher, fieldname, tf_schema, idf_schema, document_term_correlations) # build docs_to_minterm and minterms __build_document_to_minterm_mapping(docs_to_minterm, minterms, document_term_correlations) # build minterm_db __build_term_representation(minterm_db, minterms) except ImportError: raise Exception("Diskcache is required to use Minterms.index()")
<filename>utilities/TileSetImporterApp.java import java.util.Scanner; import java.io.FileNotFoundException; /** This program is for taking a tile set made * with Tiled and generating Java code containing * the tile set data. It is a simple command line * app. It asks the user to enter in the file path * of the tile set and then the path to generate * the code at. * @see TileSet * @see TileSetReader * */ public class TileSetImporterApp { /** The entry point of the program. * Prompts the user for two paths, * a tile set input path and a source * file output path. * */ public static void main(String[] args) { Scanner scanner = new Scanner(System.in); System.out.print("Enter path to .tsx file (tile set file): "); String tileSetPath = scanner.next(); String sourcePath = "GeneratedTileSet.java"; processTileSet(tileSetPath, sourcePath); } /** Here we run the rest of the program * after we've gotten the tile set path and * the Java source file path. * @param tileSetPath The path of the tile set file to read. * @param sourcePath The path of the Java file to generate. * @return True on success, false on failure. * */ private static boolean processTileSet(String tileSetPath, String sourcePath) { TileSet tileSet = new TileSet(); TileSetReader tileSetReader = new TileSetReader(tileSet); try { tileSetReader.readFromFile(tileSetPath); TileSetWriter writer = new TileSetWriter(sourcePath); writer.write(tileSet); } catch (FileNotFoundException e) { System.err.println(e); } catch (Exception e) { e.printStackTrace(); } return true; } }
<gh_stars>0 var path = require('path'); module.exports = function (fox) { fox.load(path.join(__dirname, '/lib')); };
window.onscroll = function() { myFunction() }; var navbar = document.getElementById("nav-bar"); var offset = navbar.offsetTop; function myFunction() { if(window.pageYOffset >= offset) { navbar.classList.add("stick-on-top") } else { navbar.classList.remove("stick-on-top"); } }
echo "Conducting unit test" jest --coverage echo "Conducting end to end test" cd tests/end2end newman run bookmarks-collection.postman_collection.json cd ../../
#!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2008 <NAME> All rights reserved. # """ """ #__version__ = "$Id$" #end_pymotw_header def recurse(level): print 'recurse(%s)' % level if level: recurse(level-1) return def not_called(): print 'This function is never called.'
struct U256; struct State { stack: Vec<U256>, return_data: Vec<u8>, message: Message, } struct Message { depth: u32, // Other message attributes } enum ExecutionResult { Success, DepthExceededError, } fn executeInstruction(state: &mut State) -> ExecutionResult { state.stack.push(U256::zero()); state.return_data.clear(); if state.message.depth >= 1024 { return ExecutionResult::DepthExceededError; } ExecutionResult::Success }
const ws = new WebSocket('ws://example.com/socket'); ws.onopen = () => { console.log('Successfully Connected'); }; ws.onmessage = (msg) => {     console.log(msg); }; ws.onclose = () => {     console.log('Connection Closed'); };
rm -rf build-pkg cp -r pkg-hildon build-pkg mkdir -p build-pkg/usr/bin mkdir -p build-pkg/usr/libexec strip spectool_hildon usbcontrol cp spectool_hildon build-pkg/usr/bin/spectool cp usbcontrol build-pkg/usr/libexec/usbcontrol for x in `find build-pkg -name .svn`; do rm -rf $x; done fakeroot dpkg -b build-pkg spectool-armel.deb
"""CONFIG file for web""" # pylint: disable=too-few-public-methods import os import sys import logging import json from datetime import timedelta as td from celery.task.control import rate_limit BASE_DIR = os.path.abspath(os.path.dirname(__file__)) class Config(object): """ Sets Config defaults Attributes: SECRET_KEY: Gets the secret key for Config. TRAP_BAD_REQUEST_ERRORS: Establishes that bad request errors will be trapped, i.e. TRUE. SQLALCHEMY_COMMIT_ON_TEARDOWN: Establishes that upon teardown SQLAlchemy will commit. POSTS_PER_PAGE: Maximum posts per page. REDIS_CACHE_TIMEOUT: Time limit for the Redis cache. LOGGING_FORMAT: Establishes logging format. ERROR_LOGGING_LOCATION: Establishes where the errors are logged. ERROR_LOGGING_LEVEL: Establishes the error level as WARNING. ERROR_LOGGING_MAX_BYTES: Maximum amount of bytes for error logging. ERROR_LOGGING_BACKUP_COUNT: Number of error log backups. OAUTH_CREDENTIALS: Credentials for using Google OAuth MAIL_SERVER: Google server for sending emails MAIL_PORT: Port that emails are sent over MAIL_USE_TLS: Adds email encryption MAIL_USERNAME: Gmail username (email) MAIL_PASSWORD: <PASSWORD> BACKEND_MAIL_SUBJECT_PREFIX: Email subject prefix BACKEND_MAIL_SENDER: Email sender shown in email """ try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_SECRET_KEY' in line: # Take the VAL part of ARG=VAL, strip newlines SECRET_KEY = line.split("=")[1].rstrip() try: SECRET_KEY except NameError: print('CANNOT FIND FLASK_SECRET_KEY IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) PREFERRED_URL_SCHEME = 'https' SESSION_COOKIE_SECURE = True CELERY_RESULT_BACKEND = 'redis://redis:6379' CELERY_TASK_RESULT_EXPIRES = td(seconds=1800) CELERY_BROKER_URL = 'redis://redis:6379' CELERY_ACKS_LATE = True CELERYD_PREFETCH_MULTIPLIER = 1 rate_limit = '4/m' TRAP_BAD_REQUEST_ERRORS = True SQLALCHEMY_COMMIT_ON_TEARDOWN = False # Log database requests that take a long time SQLALCHEMY_RECORD_QUERIES = True # Database query timeout in seconds SQLALCHEMY_DATABASE_QUERY_TIMEOUT = 0.05 POSTS_PER_PAGE = 20 MAX_API_DATA_PER_REQUEST = 1800 # cannot pull more than an hour for API REDIS_CACHE_TIMEOUT = 3600 * 24 * 3 LOGGING_FORMAT = ('%(asctime)s - %(name)s - %(levelname)s - %(message)s ' '[in %(pathname)s: line %(lineno)d]') ERROR_LOGGING_LOCATION = 'error_log.log' ERROR_LOGGING_LEVEL = logging.WARNING ERROR_LOGGING_MAX_BYTES = 25 * 1024 * 1024 ERROR_LOGGING_BACKUP_COUNT = 7 try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_OAUTH_CREDENTIALS' in line: # Take the VAL part of ARG=VAL, strip newlines OAUTH_CREDENTIALS = json.loads(line.split("=")[1].rstrip()) try: OAUTH_CREDENTIALS except NameError: print('CANNOT FIND FLASK_OAUTH_CREDENTIALS IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) MAIL_SERVER = 'smtp.googlemail.com' MAIL_PORT = 587 MAIL_USE_TLS = True try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_MAIL_USERNAME' in line: # Take the VAL part of ARG=VAL, strip newlines MAIL_USERNAME = line.split("=")[1].rstrip() try: MAIL_USERNAME except NameError: print('CANNOT FIND FLASK_MAIL_USERNAME IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_MAIL_DOMAIN' in line: # Take the VAL part of ARG=VAL, strip newlines MAIL_DOMAIN = line.split("=")[1].rstrip() try: MAIL_DOMAIN except NameError: print('CANNOT FIND FLASK_MAIL_DOMAIN IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_MAIL_PASSWORD' in line: # Take the VAL part of ARG=VAL, strip newlines MAIL_PASSWORD = line.split("=")[1].rstrip() try: MAIL_PASSWORD except NameError: print('CANNOT FIND FLASK_MAIL_PASSWORD IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) BACKEND_MAIL_SUBJECT_PREFIX = '[Backend]' BACKEND_MAIL_SENDER = 'Backend Admin' @staticmethod def init_app(app): """ For configuration specific initialization of the app. Args: app: The application. """ pass class LocalConfig(Config): """ Local defaults Attributes: DEBUG: Sets DEBUG status for error log while in local (True or False). SQLALCHEMY_DATABASE_URI: Sets the path to the databse. """ DEBUG = True try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_LOCAL_DATABASE' in line: # Take the VAL part of ARG=VAL, strip newlines SQLALCHEMY_DATABASE_URI = line.split("=")[1].rstrip() try: SQLALCHEMY_DATABASE_URI except NameError: print('CANNOT FIND FLASK_LOCAL_DATABASE IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) class DevelopmentConfig(Config): """ Dev defaults Attributes: DEBUG: Sets DEBUG status for error log while in Dev (True or False). SQLALCHEMY_DATABASE_URI: Sets the path to the databse. """ DEBUG = True try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_DEV_DATABASE' in line: # Take the VAL part of ARG=VAL, strip newlines SQLALCHEMY_DATABASE_URI = line.split("=")[1].rstrip() try: SQLALCHEMY_DATABASE_URI except NameError: print('CANNOT FIND FLASK_DEV_DATABASE IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) class TestingConfig(Config): """ Test defaults Attributes: TESTING: Sets TESTING status for error log while Testing (True/False). WTF_CSRF_ENABLED: Sets status for CSRF during testing (True or False). SQLALCHEMY_DATABASE_URI: Sets the path to the database. """ TESTING = True WTF_CSRF_ENABLED = False CELERY_ALWAYS_EAGER = True try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_TEST_DATABASE' in line: # Take the VAL part of ARG=VAL, strip newlines SQLALCHEMY_DATABASE_URI = line.split("=")[1].rstrip() try: SQLALCHEMY_DATABASE_URI except NameError: print('CANNOT FIND FLASK_TEST_DATABASE IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) class ProductionConfig(Config): """ Production defaults Attributes: SQLALCHEMY_DATABASE_URI: Sets the path to the database. """ try: with open('/run/secrets/chamber_of_secrets') as secret_chamber: for line in secret_chamber: if 'FLASK_PRODUCTION_DATABASE' in line: # Take the VAL part of ARG=VAL, strip newlines SQLALCHEMY_DATABASE_URI = line.split("=")[1].rstrip() try: SQLALCHEMY_DATABASE_URI except NameError: print('CANNOT FIND FLASK_PRODUCTION_DATABASE IN CHAMBER') print('EXITING.') sys.exit(-1) except OSError as e: print('CANNOT FIND CHAMBER') print(e) print('EXITING.') sys.exit(-1) # pylint: disable=invalid-name config = { 'local': LocalConfig, 'development': DevelopmentConfig, 'testing': TestingConfig, 'production': ProductionConfig, }
import { Injectable } from '@angular/core'; import { HttpClient } from "@angular/common/http"; import { Observable } from "rxjs"; @Injectable({ providedIn: 'root' }) export class LoadDataService { api_url:string = "https://jsonplaceholder.typicode.com/comments"; constructor(private http:HttpClient) { } getApiData(): Observable<any> { return this.http.get<any>(`${this.api_url}`); } }
<gh_stars>0 import "reflect-metadata"; import { GiftCard } from "../../../.."; import { DeleteResponse, PaginatedResponse } from "../../../../types/common"; declare const _default: (app: any) => any; export default _default; export declare const defaultAdminGiftCardFields: string[]; export declare const defaultAdminGiftCardRelations: string[]; export declare const allowedAdminGiftCardFields: string[]; export declare const allowedAdminGiftCardRelations: string[]; export declare type AdminGiftCardsRes = { gift_card: GiftCard; }; export declare type AdminGiftCardsDeleteRes = DeleteResponse; export declare type AdminGiftCardsListRes = PaginatedResponse & { gift_cards: GiftCard[]; }; export * from "./create-gift-card"; export * from "./list-gift-cards"; export * from "./update-gift-card";
import datetime import math from tensorflow.keras import callbacks from tensorflow.keras.optimizers import Adam from nn_project.data import get_data_generator, get_vocabs, MAX_ROWS_TRAIN from nn_project.metrics import accuracy, word_error_rate from nn_project.model import EncoderDecoder from nn_project.utils import get_project_file def train( data_limit=None, data_offset=None, validation_split=0.2, vocab_size=None, padding='post', cell_type='lstm', input_embedding_size=256, context_vector_size=1024, output_embedding_size=256, learning_rate=0.01, batch_size=128, epochs=1000, early_stopping=5, save_models=True, save_logs=True, save_freq=50, queue_size=10, ): data_offset = data_offset if data_offset is not None else 0 data_limit = data_limit if data_limit is not None else MAX_ROWS_TRAIN data_limit = min(data_limit, MAX_ROWS_TRAIN - data_offset) vocab_en, vocab_cs, length_en, length_cs = get_vocabs( kind='train', limit=data_limit, offset=data_offset, vocab_size=vocab_size, ) print(f'Vocabulary size: {len(vocab_en)} -> {len(vocab_cs)}') print(f'Sentence length: {length_en} -> {length_cs}') training_offset = data_offset training_limit = int(data_limit * (1.0 - validation_split)) validation_offset = training_offset + training_limit validation_limit = data_limit - training_limit training_data = get_data_generator( kind='train', batch_size=batch_size, limit=training_limit, offset=training_offset, vocab_en=vocab_en, vocab_cs=vocab_cs, length_en=length_en, length_cs=length_cs, padding=padding, ) validation_data = get_data_generator( kind='train', batch_size=batch_size, limit=validation_limit, offset=validation_offset, vocab_en=vocab_en, vocab_cs=vocab_cs, length_en=length_en, length_cs=length_cs, padding=padding, ) model = EncoderDecoder( input_length=length_en, input_vocab_size=len(vocab_en), input_embedding_size=input_embedding_size, context_vector_size=context_vector_size, output_length=length_cs, output_vocab_size=len(vocab_cs), output_embedding_size=output_embedding_size, cell_type=cell_type, enable_masking=True, ) model.compile( optimizer=Adam(learning_rate), loss='sparse_categorical_crossentropy', metrics=[accuracy, word_error_rate], ) steps_per_epoch = math.ceil(training_limit / batch_size) validation_steps = math.ceil(validation_limit / batch_size) callback_list = get_callbacks( early_stopping=early_stopping, save_models=save_models, save_logs=save_logs, save_freq=save_freq*batch_size, ) history = model.fit_generator( generator=training_data, steps_per_epoch=steps_per_epoch, epochs=epochs, callbacks=callback_list, validation_data=validation_data, validation_steps=validation_steps, max_queue_size=queue_size, ) model.summary() return history def get_callbacks(early_stopping, save_models, save_logs, save_freq): timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") callback_list = [] if early_stopping is not None and early_stopping is not False: callback = callbacks.EarlyStopping(patience=early_stopping) callback_list.append(callback) if save_models: latest = callbacks.ModelCheckpoint( filepath=get_project_file('models', f'{timestamp}-latest'), save_freq=save_freq, ) latest_epoch = callbacks.ModelCheckpoint( filepath=get_project_file('models', f'{timestamp}-latest-epoch'), ) best_loss = callbacks.ModelCheckpoint( filepath=get_project_file('models', f'{timestamp}-best-loss'), monitor='val_loss', save_best_only=True, mode='min', ) best_acc = callbacks.ModelCheckpoint( filepath=get_project_file('models', f'{timestamp}-best-acc'), monitor='val_accuracy', save_best_only=True, mode='max', ) best_wer = callbacks.ModelCheckpoint( filepath=get_project_file('models', f'{timestamp}-best-wer'), monitor='val_word_error_rate', save_best_only=True, mode='min', ) callback_list += [latest, latest_epoch, best_loss, best_acc, best_wer] if save_logs: callback = callbacks.TensorBoard( log_dir=get_project_file('logs', f'{timestamp}'), update_freq=save_freq, ) callback_list.append(callback) return callback_list if __name__ == '__main__': train()
<reponame>firatalcin/Patika-Java-Web-Development package com.company; import java.util.Scanner; public class Main { public static void main(String[] args) { Scanner scanner = new Scanner(System.in); System.out.print("Bir sayı giriniz: "); int number = scanner.nextInt(),sum = 0; for(int i = 1; i< number; i++){ if(number % i == 0){ sum += i; } } if(sum == number){ System.out.println(number + " mükemmel sayıdır."); } else{ System.out.println(number + " mükemmel sayı değildir."); } } }
package main; /** * @author <NAME> * */ public class NByNMatrixOf0sAnd1s { public static void main(String[] args) { printMatrix(3); } /** * Prints a NxN matrix of random binary digits given the integer argument. * <ul> * <li> * If integer argument is less than 1, an error will be displayed. * </li> * </ul> * * @param n matrix size */ public static void printMatrix(int n) { if (n < 1) { System.out.println("Error: n must be a nonzero positive integer"); System.exit(0); } for (int row = 1; row <= n; row++) { for (int column = 1; column <= n; column++) { int bit = (int)(Math.random() * 2); System.out.print(bit + " "); } System.out.println(); } } }
double balance = 3500; balance += 1000; balance -= 500; //balance is now 4000
<reponame>mcatmos/react-native-books import { create } from 'apisauce' import Base64 from '../Base64' const initializeAPI = () => { const api = create({ baseURL: 'https://openlibrary.org', timeout: 20000 }) requestSearch = (query) => { const params = { q: query } return api.get('/search.json', params) } return { requestSearch } } export default { initializeAPI }
def triangle_area(a, b, c): # compute the semi-perimeter s = (a + b + c) / 2 # calculate the area area = (s*(s-a)*(s-b)*(s-c)) ** 0.5 return area a = 6 b = 8 c = 10 print(triangle_area(a, b, c)) # prints 24.0
A possible solution is to use the Monte Carlo integration to estimate the value of 𝜋. The algorithm works by randomly sampling points within a given area and then calculating the ratio of area of the sampled points within a circle to the total area of the rectangle that encompasses the circle. By making the area of the rectangle large enough, the ratio approximates the ratio of the area of the circle to the area of the rectangle which is 𝜋/4.
import './restaurant-listings.css' import React from "react"; import {Badge} from "react-bootstrap"; import {useDispatch, useSelector} from "react-redux"; import {fetchRestaurantsByTypeAndZip} from "../../store/restaurants"; import {RestaurantCard} from "./RestaurantCard"; export const RestaurantListings = ({location}) => { console.log(location.state) console.log(location.state.categoryType) console.log(location.state.restaurantZip) const restaurants = useSelector((store) => { return store.restaurants ? store.restaurants : [] }) const dispatch = useDispatch() const initialEffects = () => { dispatch(fetchRestaurantsByTypeAndZip(location.state.categoryType, location.state.restaurantZip)) } React.useEffect(initialEffects, [dispatch]) return ( <> <div className="body"> <div className="container"> <div className="row d-flex justify-content-center"> <div className="col-lg-12 px-0"> {restaurants.map(restaurant => ( <RestaurantCard restaurant={restaurant} key={restaurant.restaurantId}/>)) } </div> </div> </div> </div> </> ) }
<gh_stars>1-10 "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); const chalk_1 = __importDefault(require("chalk")); const fs = __importStar(require("fs-extra")); const DetoxTemplate_1 = __importDefault(require("../presets/DetoxTemplate")); const registerCommand_1 = require("../registerCommand"); function findTest(config, test) { for (const appName in config.applications) { const app = config.applications[appName]; for (const testName in app.tests) { if (test === testName) { return [appName, app]; } } } throw new Error(`Couldn't find test: ${test}`); } async function runTestAsync(config, options) { const [appName, app] = findTest(config, options.test); const test = app.tests[options.test]; if (app.preset === 'detox') { console.log(`Using ${chalk_1.default.green('detox')} preset.`); const preset = new DetoxTemplate_1.default(app, appName, options.platform, options.configFile); console.log(`Creating test app in ${chalk_1.default.green(options.path)}.`); await preset.createApplicationAsync(options.path); console.log(`Building app.`); await preset.build(options.path, test); console.log(`Running tests.`); await preset.run(options.path, test); if (options.shouldBeCleaned) { console.log(`Cleaning.`); await fs.remove(options.path); } } else { throw new Error(`Unknown preset: ${app.preset}`); } } exports.default = (program) => { registerCommand_1.registerCommand(program, 'run-test', runTestAsync).option('-t, --test [string]', 'Name of the test case to run.'); }; //# sourceMappingURL=RunTest.js.map
#!/bin/sh # # The olsr.org Optimized Link-State Routing daemon(olsrd) # Copyright (c) 2008, Hannes Gredler (hannes@gredler.at) # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of olsr.org, olsrd nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Visit http://www.olsr.org for more information. # # If you find this software useful feel free to make a donation # to the project. For more information see the website or contact # the copyright holders. # # # mk-tarball.sh # Create a release tarball based on the current VERS variable in the Makefile. # # first determine the tarball name NAME=`grep -E "^VERS" ../Makefile | sed 's/^VERS..../olsrd-/;s/ *$//'` #empty the directory in case it exists already rm -rf /tmp/$NAME mkdir /tmp/$NAME # clean stuff up first cd ..;make uberclean # sync the stuff to a working directory rsync -a . /tmp/$NAME/ --exclude=.hg* --exclude=*.rej --delete cd /tmp/ echo "### creating /tmp/$NAME.tar.gz" tar -czf /tmp/$NAME.tar.gz $NAME md5sum /tmp/$NAME.tar.gz echo "### creating /tmp/$NAME.tar.bz2" tar -cjf /tmp/$NAME.tar.bz2 $NAME md5sum /tmp/$NAME.tar.bz2 #clean up rm -rf /tmp/$NAME
const mongoose = require('mongoose'); const adopterSchema = new mongoose.Schema({ genderPref: { type: String, required: true }, agePref: { type: Array, required: true }, sizePref: { type: Array, required: true }, breedPref: { type: Array, required: true }, outdoorSpace: { type: String, required: true }, dogs: { type: Boolean, required: true }, cats: { type: Boolean, required: true }, smallAnimals: { type: Boolean, required: true }, children: { type: String, required: true }, exercise: { type: String, required: true }, specialNeeds: { type: Boolean, required: true }, maxAlone: { type: Number, required: true }, firstName: { type: String, required: true }, lastName: { type: String, required: true }, phone: { type: String, required: true }, email: { type: String, required: true }, password: { type: String, required: true }, favouritesList: Array, dontShow: Array }) const adopter = mongoose.model('adopter', adopterSchema); module.exports = adopter;
SELECT name, age FROM Employees WHERE age >= 25;
package mindustry.entities; import arc.*; import arc.func.*; import arc.graphics.*; import arc.graphics.g2d.*; import arc.math.*; import arc.math.geom.*; import arc.struct.*; import arc.util.Tmp; import arc.util.pooling.*; import arc.z.util.ISOUtils; import mindustry.entities.type.*; import static z.debug.ZDebug.enable_isoInput; public class Effects{ private static final EffectContainer container = new EffectContainer(); private static Array<Effect> effects = new Array<>(); private static ScreenshakeProvider shakeProvider; private static float shakeFalloff = 10000f; private static EffectProvider provider = (effect, color, x, y, rotation, data) -> { EffectEntity entity = Pools.obtain(EffectEntity.class, EffectEntity::new); entity.effect = effect; entity.color = color; entity.rotation = rotation; entity.data = data; entity.set(x, y); entity.add(); }; public static void setEffectProvider(EffectProvider prov){ provider = prov; } public static void setScreenShakeProvider(ScreenshakeProvider provider){ shakeProvider = provider; } public static void renderEffect(int id, Effect render, Color color, float life, float rotation, float x, float y, Object data){ container.set(id, color, life, render.lifetime, rotation, x, y, data); render.draw.render(container); Draw.reset(); } public static Effect getEffect(int id){ if(id >= effects.size || id < 0) throw new IllegalArgumentException("The effect with ID \"" + id + "\" does not exist!"); return effects.get(id); } public static Array<Effect> all(){ return effects; } public static void effect(Effect effect, float x, float y, float rotation){ if (effect == null) { // zones add code return; } if (enable_isoInput) { Vec2 pos = ISOUtils.tileToWorldCoords(x, y); x = pos.x; y = pos.y; } provider.createEffect(effect, Color.white, x, y, rotation, null); } public static void effect(Effect effect, float x, float y){ if (enable_isoInput) { Vec2 pos = ISOUtils.tileToWorldCoords(x, y); x = pos.x; y = pos.y; } effect(effect, x, y, 0); } public static void effect(Effect effect, Color color, float x, float y){ if (enable_isoInput){ Vec2 pos = ISOUtils.tileToWorldCoords(x, y); x = pos.x; y = pos.y; } provider.createEffect(effect, color, x, y, 0f, null); } public static void effect(Effect effect, Position loc){ if (enable_isoInput) { Vec2 pos = ISOUtils.tileToWorldCoords(loc); provider.createEffect(effect, Color.white, pos.getX(), pos.getY(), 0f, null); return; } provider.createEffect(effect, Color.white, loc.getX(), loc.getY(), 0f, null); } public static void effect(Effect effect, Color color, float x, float y, float rotation){ if (enable_isoInput){ Vec2 pos = ISOUtils.tileToWorldCoords(x, y); x = pos.x; y = pos.y; } provider.createEffect(effect, color, x, y, rotation, null); } public static void effect(Effect effect, Color color, float x, float y, float rotation, Object data){ provider.createEffect(effect, color, x, y, rotation, data); } public static void effect(Effect effect, float x, float y, float rotation, Object data){ if (effect == null) { // zones add code return; } if (enable_isoInput) { Vec2 pos = Tmp.v11.set(ISOUtils.tileToWorldCoords(x, y)); x = pos.x; y = pos.y; rotation = 360 - rotation + 45; } provider.createEffect(effect, Color.white, x, y, rotation, data); } /** Default value is 1000. Higher numbers mean more powerful shake (less falloff). */ public static void setShakeFalloff(float falloff){ shakeFalloff = falloff; } private static void shake(float intensity, float duration){ if(shakeProvider == null) throw new RuntimeException("Screenshake provider is null! Set it first."); shakeProvider.accept(intensity, duration); } public static void shake(float intensity, float duration, float x, float y){ if(Core.camera == null) return; if (enable_isoInput) { Vec2 pos = ISOUtils.tileToWorldCoords(x, y); x = pos.x; y = pos.y; } float distance = Core.camera.position.dst(x, y); if(distance < 1) distance = 1; shake(Mathf.clamp(1f / (distance * distance / shakeFalloff)) * intensity, duration); } public static void shake(float intensity, float duration, Position loc){ if (enable_isoInput) { Vec2 pos = ISOUtils.tileToWorldCoords(loc); shake(intensity, duration, pos.getX(), pos.getY()); return; } shake(intensity, duration, loc.getX(), loc.getY()); } public interface ScreenshakeProvider{ void accept(float intensity, float duration); } public static class Effect{ private static int lastid = 0; public final int id; public final EffectRenderer draw; public final float lifetime; /** Clip size. */ public float size; public Effect(float life, float clipsize, EffectRenderer draw){ this.id = lastid++; this.lifetime = life; this.draw = draw; this.size = clipsize; effects.add(this); } public Effect(float life, EffectRenderer draw){ this(life, 28f, draw); } } public static class EffectContainer implements Scaled{ public float x, y, time, lifetime, rotation; public Color color; public int id; public Object data; private EffectContainer innerContainer; public void set(int id, Color color, float life, float lifetime, float rotation, float x, float y, Object data){ this.x = x; this.y = y; this.color = color; this.time = life; this.lifetime = lifetime; this.id = id; this.rotation = rotation; this.data = data; } public void scaled(float lifetime, Cons<EffectContainer> cons){ if(innerContainer == null) innerContainer = new EffectContainer(); if(time <= lifetime){ innerContainer.set(id, color, time, lifetime, rotation, x, y, data); cons.get(innerContainer); } } @Override public float fin(){ return time / lifetime; } } public interface EffectProvider{ void createEffect(Effect effect, Color color, float x, float y, float rotation, Object data); } public interface EffectRenderer{ void render(EffectContainer effect); } }
<filename>tests/update.test.ts import { cleanVersion, getBugfixVersionString, getMinorVersionString, updateCheck, updateInfo } from "../src/utils/update"; import { OnlinePackageProvider } from "../src/providers/online"; import { createMockNpmServer, IMockServer } from "./server"; describe(`Update Tests`, () => { let server: IMockServer; let provider: OnlinePackageProvider; beforeAll(async () => { server = await createMockNpmServer(); provider = new OnlinePackageProvider(`http://localhost:${server.port}`); }); afterAll(() => server.close()); test(`Clean semantic version strings`, () => { const baseVersion = `1.2.3`; const normal = cleanVersion(baseVersion); const bugfix = cleanVersion(`~${baseVersion}`); const minor = cleanVersion(`^${baseVersion}`); expect(normal).toMatch(baseVersion); expect(bugfix).toMatch(baseVersion); expect(minor).toMatch(baseVersion); }); test(`Should throw for invalid version strings`, () => { expect.assertions(1); try { cleanVersion(`adf`); } catch (e) { expect(e).toBeInstanceOf(Error); } }); test(`Should throw on unknown package`, async () => { expect.assertions(1); try { await updateCheck("doesntexist", "16.8.0", provider); } catch (e) { expect(e).toBeInstanceOf(Error); } }); test(`Should throw on unknown version`, async () => { expect.assertions(1); try { await updateCheck("react", "169.8.0", provider); } catch (e) { expect(e).toBeInstanceOf(Error); } }); test(`Should throw on missing release data`, async () => { expect.assertions(1); try { await updateCheck("missingdates", "16.8.0", provider); } catch (e) { expect(e).toBeInstanceOf(Error); } }); test(`Correctly creates bugfix version string`, () => { const baseVersion = `1.2.3`; const normal = getBugfixVersionString(baseVersion); const bugfix = getBugfixVersionString(`~${baseVersion}`); const minor = getBugfixVersionString(`^${baseVersion}`); expect(normal).toMatch(`~${baseVersion}`); expect(bugfix).toMatch(`~${baseVersion}`); expect(minor).toMatch(`~${baseVersion}`); }); test(`Correctly creates minor version string`, () => { const baseVersion = `1.2.3`; const normal = getMinorVersionString(baseVersion); const bugfix = getMinorVersionString(`~${baseVersion}`); const minor = getMinorVersionString(`^${baseVersion}`); expect(normal).toMatch(`^${baseVersion}`); expect(bugfix).toMatch(`^${baseVersion}`); expect(minor).toMatch(`^${baseVersion}`); }); test(`Correctly finds basic version update`, async () => { const { version } = await updateCheck("react", "16.8.0", provider); expect(version).toMatch(`16.8.0`); }); test(`Correctly finds bugifx version update`, async () => { const { version } = await updateCheck("react", "~16.8.0", provider); expect(version).toMatch(`16.8.6`); }); test(`Correctly finds minor version update`, async () => { const { version } = await updateCheck("react", "^16.0.0", provider); expect(version).toMatch(`16.8.6`); }); test(`Correctly calculates update info`, async () => { const info = await updateInfo("react", "^15.5.0", provider); expect(info.version).toMatch(`^15.5.0`); expect(info.latestOverall.version).toMatch(`16.8.6`); expect(info.latestSemanticMatch.version).toMatch(`15.6.2`); expect(info.latestBugfix.version).toMatch(`15.5.4`); expect(info.latestMinor.version).toMatch(`15.6.2`); }); });
#!/usr/bin/env bash go run ./cmd/generator/generator.go --out-dir=$PWD --tmpl-dir=$PWD/cmd/generator goimports -w $PWD
docker run -d --name switchdev --workdir /build/git -v "${PWD}:/build/git" devkitpro/devkita64:20210306 tail -f /dev/null curl -LOC - https://github.com/uyjulian/pacman-packages/releases/download/v2.2.3-1-pkgbuild-helpers/devkitpro-pkgbuild-helpers-2.2.3-1-any.pkg.tar.xz docker exec switchdev /bin/bash -c 'dkp-pacman -U --noconfirm devkitpro-pkgbuild-helpers-2.2.3-1-any.pkg.tar.xz' docker exec switchdev /bin/bash -c 'apt-get -y update' docker exec switchdev /bin/bash -c 'apt-get -y install python' curl -LOC - https://github.com/uyjulian/switch-libpython2/releases/download/v2.7.18-switch1/python27-switch.tar.gz docker exec switchdev /bin/bash -c 'tar -xf python27-switch.tar.gz -C $DEVKITPRO/portlibs/switch'
<reponame>GuRuGuMaWaRu/CodeProblems function circleOfNumbers(n, firstNumber) { if (firstNumber >= n / 2) { return firstNumber - n / 2; } return firstNumber + n / 2; }
<filename>test/modulemanager/TestModuleManager.py import pulsar as psr import os class MyPyBase(psr.TestModule): def __init__(self,myid): super(MyPyBase,self).__init__(myid) def run_test_(self): return def run_test(): tester=psr.PyTester("Testing ModuleManager Python Bindings") mm=psr.ModuleManager() minfo=psr.ModuleInfo() mod_name,not_name,opt_key="my module","Not a module","Option1" opt=2.0 minfo.options.add_option(opt_key,psr.OptionType.Float,True,None,"",None) load_module=mm.load_module_from_minfo tester.test_return("has non-existent module",True,False,mm.has_key,mod_name) tester.test_call("No path causes failure",False,load_module,minfo,mod_name) minfo.path="a/fake/path" tester.test_call("No handler",False,load_module,minfo,mod_name) minfo.language="c_module" tester.test_call("Not a path",False,load_module,minfo,mod_name) cwd=os.path.dirname(os.path.realpath(__file__)) minfo.path=os.path.join(cwd,"CXXModule.so") tester.test_call("No name",False,load_module,minfo,mod_name) minfo.name="C++ Module" tester.test_call("load_module",True,load_module,minfo,mod_name) tester.test_return("has module",True,True,mm.has_key,mod_name) tester.test_return("size",True,1,mm.size) tester.test_return("module key info",True,minfo,mm.module_key_info,mod_name) tester.test_call("module key info non-existent key",False, mm.module_key_info,not_name) tester.test_call("test all no required opt",False,mm.test_all) tester.test_call("set required opt bad key",False,mm.change_option,not_name, opt_key,opt) tester.test_call("set required opt bad opt key",False,mm.change_option, mod_name,not_name,opt) tester.test_call("get non-existent module",False,mm.get_module,not_name,0) tester.test_call("get bad parent ID",False,mm.get_module,mod_name,9) tester.test_call("get not required options ",False,mm.get_module,mod_name,0) tester.test_call("set required opt",True,mm.change_option,mod_name,opt_key, opt) tester.test_call("get module",True,mm.get_module,mod_name,0) tester.test_call("can't change option for used key",False,mm.change_option, mod_name,opt_key,opt) tester.test_call("can't duplicate to existing key",False,mm.duplicate_key, not_name,mod_name) tester.test_call("can't duplicate non-existing key",False,mm.duplicate_key, not_name,not_name) new_name="new mod name" tester.test_call("duplicate key",True,mm.duplicate_key,"my module",new_name) unq_key=mm.generate_unique_key() tester.test_return("unique key",True,False,mm.has_key,unq_key) tester.test_call("add lambda module",True,mm.load_lambda_module,MyPyBase,"Py Module",unq_key) tester.test_call("can get module from creation funcs",True,mm.get_module, unq_key,0) tester.print_results() return tester.nfailed()
use std::collections::HashMap; enum Error { PtraceError, KillError, ContextError, } fn manage_process(handle: &Handle) -> Result<(), Error> { // Close the process session if ptrace::close_session(handle.info.pid).is_err() { return Err(Error::PtraceError); } // Check and send SIGKILL if O_EXCL flag is set if handle.info.flags & O_EXCL == O_EXCL { if syscall::kill(handle.info.pid, SIGKILL).is_err() { return Err(Error::KillError); } } // Update process context let mut contexts = context::contexts(); if let Some(context) = contexts.get_mut(handle.info.pid) { let mut context = context.write(); context.ptrace_stop = false; } else { return Err(Error::ContextError); } Ok(()) }
#!/usr/bin/env node /* Use this script for local development against a remote API without CORS issues. This script will launch a reverse proxy listening on localhost:5050 with all related API calls forwarding to http://localhost:9000 and all other requests forwarding to the local Angular development server at http://localhost:4200 */ var http = require('http'); var httpProxy = require('http-proxy'); const type = process.argv[2]; var GUI_URL = 'http://localhost:5050'; if(type === 'dev') { GUI_URL = 'http://localhost:4200'; } var API_PATHS = ['/api']; var API_URL = 'http://localhost:9000'; var AUTH_PATHS = ['/auth']; var AUTH_URL = 'http://localhost:9002'; var proxy = httpProxy.createProxyServer({ changeOrigin: true }); var server = http.createServer(function (req, res) { var target = GUI_URL; for (var i = 0; i < API_PATHS.length; i++) { if (req.url.startsWith(API_PATHS[i])) { target = API_URL; break; } } for (var i = 0; i < AUTH_PATHS.length; i++) { if (req.url.startsWith(AUTH_PATHS[i])) { target = AUTH_URL; break; } } proxy.web(req, res, { target: target }); proxy.on('error', function (e) { console.log(`${'Proxy Error:'}${e}`); }); }); console.log('Proxy Server listening on port 5052'); server.listen(5052);
#!/bin/bash #SBATCH --account=def-dkulic #SBATCH --mem=8000M # memory per node #SBATCH --time=23:00:00 # time (DD-HH:MM) #SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolInvertedDoublePendulum-v1_ddpg_hardcopy_action_noise_seed2_run6_%N-%j.out # %N for node name, %j for jobID module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn source ~/tf_cpu/bin/activate python ./ddpg_discrete_action.py --env RoboschoolInvertedDoublePendulum-v1 --random-seed 2 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolInvertedDoublePendulum-v1/ddpg_hardcopy_action_noise_seed2_run6 --continuous-act-space-flag --double-ddpg-flag --target-hard-copy-flag