text
stringlengths
1
1.05M
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package it.infn.ct.security.listeners; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import org.hibernate.SessionFactory; import org.hibernate.cfg.Configuration; /** * Web application lifecycle listener. * * @author marco */ public class HibernateListener implements ServletContextListener { public void contextInitialized(ServletContextEvent sce) { SessionFactory factory; factory = new Configuration().configure().buildSessionFactory(); sce.getServletContext().setAttribute("IDPPublic.hibernatefactory", factory); } public void contextDestroyed(ServletContextEvent sce) { } }
#!/usr/bin/env bash # ---------------------------------------------------------------------------- # (C) Copyright IBM Corp. 2021 # # SPDX-License-Identifier: Apache-2.0 # ---------------------------------------------------------------------------- ############################################################################## # Description: # This script is the entrypoint used by the ibm-fhir-server docker image, and # optionally bootstraps a derby database prior to running the IBM FHIR Server. set -e -o pipefail ############################################################################## # The global variables used are: SCRIPT_NAME="$(basename ${BASH_SOURCE[0]})" CUR_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" FHIR_PERSISTENCE_SCHEMA_CLI_LOCATION="/opt/ibm-fhir-server/tools" PERFORM_BOOTSTRAP_DB=${BOOTSTRAP_DB} [ -z "${BOOTSTRAP_DB}" ] && PERFORM_BOOTSTRAP_DB="false" ############################################################################## # Helper Functions # info - - local function to echo info message # ARGUMENTS: # String of message function info { echo "${SCRIPT_NAME} - [INFO]: $(date +"%Y-%m-%d_%T") - ${1}" } # _call_derby_db - local function to call derby database # ARGUMENTS: # String of additional parameters function _call_derby_db { /opt/java/openjdk/bin/java -jar ${FHIR_PERSISTENCE_SCHEMA_CLI_LOCATION}/fhir-persistence-schema-*-cli.jar \ --prop "db.create=Y" \ --prop "db.database=/output/derby/fhirDB" \ --db-type derby \ ${1} 2>&1 } # _bootstrap_db - local function to perform database bootstrapping function _bootstrap_db { if [ "$PERFORM_BOOTSTRAP_DB" = "true" ] then info "Performing Derby database bootstrapping" _call_derby_db "--update-schema" info "Finished Derby database bootstrapping" else info "Skipping Derby database bootstrapping" fi } ############################################################################## # Script logic: info "Current directory: $CUR_DIR" _bootstrap_db # Pass it on to the Liberty entrypoint /opt/ol/helpers/runtime/docker-server.sh "$@" # EOF
#!/bin/sh su -c 'emerge -u app-admin/doas'
<gh_stars>1-10 export * from 'node-sleuth'; export * from './dubbo-injector'; export { sleuth } from './sleuth';
<filename>player/src/test/java/fr/unice/polytech/si3/qgl/soyouz/classes/marineland/entities/onboard/RameTest.java<gh_stars>0 package fr.unice.polytech.si3.qgl.soyouz.classes.marineland.entities.onboard; import fr.unice.polytech.si3.qgl.soyouz.classes.types.PosOnShip; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; class RameTest { Rame rame1; Rame rame2; Rame rame3; @BeforeEach void setUp() { rame1 = new Rame(1, 0); rame2 = new Rame(1, 1); rame3 = new Rame(2, 2); } @Test void getX() { assertEquals(1, rame1.getX()); assertEquals(1, rame2.getX()); assertEquals(2, rame3.getX()); } @Test void getY() { assertEquals(0, rame1.getY()); assertEquals(1, rame2.getY()); assertEquals(2, rame3.getY()); } @Test void getPos() { assertEquals(PosOnShip.of(1, 0), rame1.getPos()); assertEquals(PosOnShip.of(1, 1), rame2.getPos()); assertEquals(PosOnShip.of(2, 2), rame3.getPos()); } @Test void testEquals() { boolean notEquals = rame1.equals("Hello"); assertEquals(rame1, rame1); assertNotEquals(rame1, rame2); assertNotEquals(rame2, rame3); assertNotEquals(rame1, rame3); assertFalse(notEquals); } @Test void testHashCode() { assertNotEquals(rame1.hashCode(), rame2.hashCode()); } @Test void isLeft() { assertTrue(rame1.isLeft()); assertFalse(rame2.isLeft()); assertFalse(rame3.isLeft()); } }
#!/bin/sh if pgrep wf-recorder &> /dev/null then echo '{"class": "recording"}' exit fi echo '{"class": ""}'
cask 'screenhero' do version '2.3.6.0' sha256 'b89302e56554f79cb342e54273df99d2319638dc2e1fb68c6772a05eaed8d2dd' url "https://secure.screenhero.com/update/screenhero/Screenhero-#{version}.dmg" appcast 'https://d3hb26arjl8wb7.cloudfront.net/jsherwani/public/update/mac/screenhero/sparkle.xml', checkpoint: '971582489e19086e79213cf7ef32dd9b2f5da8e1fa24c31d9dda71558226ba4e' name 'Screenhero' homepage 'https://screenhero.com/' license :commercial app 'Screenhero.app' end
<filename>src/main/java/com/mx/atrium/auth/Authentication.java /* * MX API * The MX Atrium API supports over 48,000 data connections to thousands of financial institutions. It provides secure access to your users' accounts and transactions with industry-leading cleansing, categorization, and classification. Atrium is designed according to resource-oriented REST architecture and responds with JSON bodies and HTTP response codes. Use Atrium's development environment, vestibule.mx.com, to quickly get up and running. The development environment limits are 100 users, 25 members per user, and access to the top 15 institutions. Contact MX to purchase production access. * * OpenAPI spec version: 0.1 * */ package com.mx.atrium.auth; import com.mx.atrium.Pair; import java.util.Map; import java.util.List; public interface Authentication { /** * Apply authentication settings to header and query params. * * @param queryParams List of query parameters * @param headerParams Map of header parameters */ void applyToParams(List<Pair> queryParams, Map<String, String> headerParams); }
#!/bin/bash #SBATCH --job-name=eval_UNITER_basic_both # 任务名 #SBATCH --nodes=1 # 这里不用动 多节点脚本请查官方文档 #SBATCH --ntasks=1 # 这里不用动 多任务脚本请查官方文档 #SBATCH --cpus-per-task=8 # 要几块CPU (一般4块就够用了) #SBATCH --mem=256GB # 最大内存 #SBATCH --time=24:00:00 # 运行时间上限 #SBATCH --mail-type=END # ALL / END #SBATCH --mail-user=yh2689@nyu.edu # 结束之后给哪里发邮件 #SBATCH --output=%x%A.out # 正常输出写入的文件 #SBATCH --error=%x%A.err # 报错信息写入的文件 #SBATCH --gres=gpu:1 # 需要几块GPU (同时最多8块) #SBATCH -p aquila # 有GPU的partition #SBATCH --nodelist=agpu7 module purge # 清除所有已加载的模块 module load anaconda3 cuda/11.1.1 # 加载anaconda (load virtual env for training) nvidia-smi nvcc --version cd /gpfsnyu/scratch/yh2689/MMCoref/MMCoref_cleaned # 切到程序目录 echo "START" # 输出起始信息 source deactivate source /gpfsnyu/packages/anaconda3/5.2.0/bin/activate wilson # 调用 virtual env python -u infer_eval.py \ --NAME eval_UNITER_basic_both \ --CHECKPOINT UNITER_basic_both \ --obj_id False \ --vis_feats_clip True \ --vis_feats_rcnn True \ --pos False \ --scene_seg False \ --obj_embs_bert False \ --obj_embs_sbert False \ --kb_id_bert False \ --kb_id_sbert False python -u infer_eval.py \ --NAME eval_UNITER_basic_both \ --CHECKPOINT UNITER_basic_both \ --obj_id False \ --vis_feats_clip True \ --vis_feats_rcnn True \ --pos False \ --scene_seg False \ --obj_embs_bert False \ --obj_embs_sbert False \ --kb_id_bert False \ --kb_id_sbert False \ --SPLIT dev echo "FINISH" # 输出起始信息
# Set DIB_DISTRIBUTION_MIRROR and related if running in openstack gate # don't spam logs with this source _xtrace=$(set +o | grep xtrace) set +o xtrace if [ -f /etc/ci/mirror_info.sh ]; then # outside chroot mirror_info=/etc/ci/mirror_info.sh elif [ -f /tmp/in_target.d/mirror_info.sh ]; then # inside chroot mirror_info=/tmp/in_target.d/mirror_info.sh else echo "No mirror file found. Not an OpenStack CI node?" return 0 fi source $mirror_info $_xtrace # note 11- is after 10- which is where DISTRO_NAME is set usually if [[ "${DISTRO_NAME}" == "ubuntu" ]]; then if [[ "${ARCH}" == "arm64" ]]; then export DIB_DISTRIBUTION_MIRROR=${NODEPOOL_UBUNTU_PORTS_MIRROR} elif [[ "${ARCH}" == "amd64" ]]; then export DIB_DISTRIBUTION_MIRROR=$NODEPOOL_UBUNTU_MIRROR fi export DIB_DEBOOTSTRAP_EXTRA_ARGS+=" --no-check-gpg" elif [[ "${DISTRO_NAME}" == "debian" ]]; then export DIB_DISTRIBUTION_MIRROR=$NODEPOOL_DEBIAN_MIRROR export DIB_DEBOOTSTRAP_EXTRA_ARGS+=" --no-check-gpg" elif [[ "${DISTRO_NAME}" == "fedora" ]]; then export DIB_DISTRIBUTION_MIRROR=$NODEPOOL_FEDORA_MIRROR elif [[ "${DISTRO_NAME}" == "centos" ]]; then if [[ "${DIB_RELEASE}" == '9-stream' ]]; then # NOTE(ianw) 2021-10-18 : no 9-stream mirrors, yet : else export DIB_DISTRIBUTION_MIRROR=$NODEPOOL_CENTOS_MIRROR export DIB_EPEL_MIRROR=$NODEPOOL_EPEL_MIRROR fi elif [[ "${DISTRO_NAME}" == "centos7" ]]; then export DIB_DISTRIBUTION_MIRROR=$NODEPOOL_CENTOS_MIRROR export DIB_EPEL_MIRROR=$NODEPOOL_EPEL_MIRROR elif [[ "${DISTRO_NAME}" == "opensuse" ]]; then export DIB_DISTRIBUTION_MIRROR=$NODEPOOL_OPENSUSE_MIRROR fi # Infra doesn't mirror non-free repos, so instruct to ignore these export DIB_DISTRIBUTION_MIRROR_UBUNTU_IGNORE="(universe|multiverse)" export DIB_DISTRIBUTION_MIRROR_UBUNTU_INSECURE=1 # These repo files are pre-created for the fedora/centos-minimal jobs # in the gate. Not relevant inside the chroot. if [[ -d ${DIB_OS_CI_YUM_REPOS:-/not/a/path/} ]]; then if [[ "${DISTRO_NAME}" == "fedora" ]]; then if [[ -d ${DIB_OS_CI_YUM_REPOS}/fedora-minimal/${DIB_RELEASE} ]]; then export DIB_YUM_MINIMAL_BOOTSTRAP_REPOS=${DIB_OS_CI_YUM_REPOS}/fedora-minimal/${DIB_RELEASE}/yum.repos.d else export DIB_YUM_MINIMAL_BOOTSTRAP_REPOS=${DIB_OS_CI_YUM_REPOS}/fedora-minimal/default/yum.repos.d fi elif [[ "${DISTRO_NAME}" == "centos" ]]; then export DIB_YUM_MINIMAL_BOOTSTRAP_REPOS=${DIB_OS_CI_YUM_REPOS}/centos-minimal/${DIB_RELEASE}/yum.repos.d elif [[ "${DISTRO_NAME}" == "openeuler" ]]; then export DIB_YUM_MINIMAL_BOOTSTRAP_REPOS=${DIB_OS_CI_YUM_REPOS}/openeuler-minimal/${DIB_RELEASE}/yum.repos.d fi fi
package com.nextbreakpoint; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.NoSuchElementException; import java.util.function.Function; import java.util.function.Predicate; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; public class MapTest { @Rule public ExpectedException exception = ExpectedException.none(); @Test public void shouldThrowNullPointerExceptionWhenFunctionInNull() { exception.expect(NullPointerException.class); Try.success("X").map(null); } @Test public void shouldNotCallFunctionWhenFailure() { Function<Object, Object> function = mock(Function.class); Try.failure(new Exception()).map(function).orElse(null); verify(function, times(0)).apply(any()); } @Test public void shouldNotCallFunctionWhenSuccessAndValueIsNull() { Function<Object, Object> function = mock(Function.class); when(function.apply(null)).thenReturn("Y"); Try.success(null).map(function).orElse(null); verify(function, times(0)).apply(any()); } @Test public void shouldCallFunctionWhenSuccessAndValueIsNotNull() { Function<String, Object> function = mock(Function.class); when(function.apply("X")).thenReturn("Y"); Try.success("X").map(function).get(); verify(function, times(1)).apply("X"); } @Test public void shouldNotCallFunctionWhenCallableThrowsException() { Function<Object, Object> function = mock(Function.class); Try.of(() -> { throw new Exception(); }).map(function).orElse(null); verify(function, times(0)).apply(any()); } @Test public void shouldNotCallFunctionWhenCallableReturnsNull() { Function<Object, Object> function = mock(Function.class); Try.of(() -> null).map(function).orElse(null); verify(function, times(0)).apply(any()); } @Test public void shouldCallFunctionWhenCallableReturnsValue() { Function<String, Object> function = mock(Function.class); when(function.apply("X")).thenReturn("Y"); Try.of(() -> "X").map(function).get(); verify(function, times(1)).apply("X"); } @Test public void shouldReturnFailureWhenFunctionThrowsException() { Function<String, Object> function = mock(Function.class); when(function.apply(any())).thenThrow(RuntimeException.class); assertTrue(Try.success("X").map(function).isFailure()); } @Test public void shouldThrowNoSuchElementExceptionWhenFunctionReturnsNull() { exception.expect(NoSuchElementException.class); Function<String, Object> function = mock(Function.class); when(function.apply(any())).thenReturn(null); Try.success("X").map(function).get(); } @Test public void shouldHaveValueWhenFilterReturnsTrueAndFilterIsAfterMapAndValueIsNotNull() { Predicate<Object> filter = mock(Predicate.class); when(filter.test("x")).thenReturn(true); assertTrue(Try.success("X").map(v -> v.toLowerCase()).filter(filter).isPresent()); } @Test public void shouldNotHaveValueWhenFilterReturnsFalseAndFilterIsBeforeMapAndValueIsNotNull() { Predicate<Object> filter = mock(Predicate.class); when(filter.test("X")).thenReturn(false); assertFalse(Try.success("X").filter(filter).map(v -> v.toLowerCase()).isPresent()); } @Test public void shouldHaveValueWhenFilterReturnsTrueAndFilterIsAfterMapAndCallableReturnsValue() { Predicate<Object> filter = mock(Predicate.class); when(filter.test("x")).thenReturn(true); assertTrue(Try.of(() -> "X").map(v -> v.toLowerCase()).filter(filter).isPresent()); } @Test public void shouldNotHaveValueWhenFilterReturnsFalseAndFilterIsBeforeMapAndCallableReturnsValue() { Predicate<Object> filter = mock(Predicate.class); when(filter.test("X")).thenReturn(false); assertFalse(Try.of(() -> "X").filter(filter).map(v -> v.toLowerCase()).isPresent()); } }
#!/bin/bash read -e -p "Docker container name: " name read -e -p "Mount volume path: " folder read -e -p "Jupyter notebook port: " port image=tarashan/samap:latest path="${folder/#\~/$HOME}" parentdir="$(dirname "$path")" chmod 755 $parentdir docker run -d --rm --name=$name \ -v $path:/jupyter/notebooks \ -p $port:$port -e PORT=$port $image
package views; import models.StateModel.InventoryViewModel; import models.StateModel.PlayStateModel; import java.awt.*; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; /** * Implemented by <NAME> */ public class InventoryView extends View{ private final String TITLE = "Inventory"; private Font titleFont; private InventoryViewModel model; public InventoryView(int width, int height, Canvas canvas, InventoryViewModel model){ super(width,height, canvas); this.model = model; } @Override protected void render(Graphics g) { BufferedImage image = new BufferedImage(getScreenWidth(),getScreenHeight(), BufferedImage.TYPE_4BYTE_ABGR); Graphics2D g2 = image.createGraphics(); renderTitle(g2); renderSlots(g2); g.drawImage(image,(int)(getScreenWidth()*0.2),(int)( getScreenHeight()*0.2),(int)( getScreenWidth()*0.6),(int)( getScreenHeight()*0.6),null); } private void renderTitle(Graphics g) { titleFont = new Font("Serif", Font.BOLD, (int) (getScreenHeight() * .1)); g.setFont(titleFont); FontMetrics fm = g.getFontMetrics(); Rectangle2D rectangle = fm.getStringBounds(TITLE,g); int x = getScreenWidth() / 2 - (int)rectangle.getWidth() / 2; int y = (int)rectangle.getHeight(); g.setColor(Color.WHITE); g.drawString(TITLE, x, y); } private void renderSlots(Graphics g){ titleFont = new Font("Serif", Font.BOLD, (int) (getScreenHeight() * .05)); g.setFont(titleFont); FontMetrics fm = g.getFontMetrics(); Color selectedColor = Color.YELLOW; Color regularColor = Color.WHITE; g.setColor(regularColor); int Xinc = (int)(getScreenWidth()*0.16); int Yinc = (int)(getScreenHeight()*0.18); int blockW = (int)(getScreenWidth()*0.14); int blockH = (int)(getScreenHeight()*0.16); int size = model.getInventorySize(); int xStart = (int)(getScreenWidth()*0.1); int yStart = (int)(getScreenHeight()*0.2); int xpos = xStart; int ypos = yStart; for(int i = 0 ; i < size ; i++) { if (model.getCurrentIndex() == i) { g.setColor(selectedColor); g.fillRect(xpos, ypos, blockW, blockH); } else { g.setColor(regularColor); g.fillRect(xpos, ypos, blockW, blockH); } BufferedImage image = model.getItemImageAt(i); if (image != null) { g.drawImage(model.getItemImageAt(i), xpos, ypos, blockW, blockH, null); } xpos += Xinc; if(i%5 == 4){ xpos = xStart; ypos += Yinc ; } } } }
import pandas as pd import numpy as np from sklearn.metrics import mean_squared_error from sklearn.model_selection import train_test_split from sklearn.linear_model import LinearRegression # Load and clean the data data = pd.read_csv('student_grades.csv') data = data.dropna() # Split the data into input (X) and output (Y) X = data.drop('grade',axis=1) y = data['grade'] # Split the data into training and test sets X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0) # Fit the model to the training data regressor = LinearRegression() regressor.fit(X_train, y_train) # Evaluate the model on the test data y_pred = regressor.predict(X_test) mse = mean_squared_error(y_test, y_pred) # Create multiple predictions and based on each prediction calculate a score # for each transaction predictions = [] for pred in y_pred: score = calculate_score(pred) predictions.append(score) # Create a list of transactions transactions = [] for predictions in predictions: transactions.append(create_transaction(predictions)) # Process the transactions in parallel process_transactions(transactions) # Print the mean squared error print(mse)
<gh_stars>0 import { Injectable, OnDestroy } from '@angular/core'; import { HttpClient } from '@angular/common/http'; import { Router } from '@angular/router'; import { Store } from '@ngrx/store'; import { Actions, Effect, ofType } from '@ngrx/effects'; import { of, Subject, forkJoin } from 'rxjs'; import { map, mergeMap, catchError, take, withLatestFrom } from 'rxjs/operators'; import { MatDialog } from '@angular/material'; import { MatSnackBar } from '@angular/material/snack-bar'; import { environment, API_URL } from '../../environments/environment'; import { LoggerService } from '../shared/services/logger.service'; import { SessionService } from '../shared/services/session.service'; import { CommonService } from '../shared/services/common.service'; import { DataService } from '../shared/services/data.service'; import { Settings, RTLConfiguration, ConfigSettingsNode } from '../shared/models/RTLconfig'; import { AuthenticateWith, CURRENCY_UNITS, ScreenSizeEnum } from '../shared/services/consts-enums-functions'; import { SpinnerDialogComponent } from '../shared/components/data-modal/spinner-dialog/spinner-dialog.component'; import { AlertMessageComponent } from '../shared/components/data-modal/alert-message/alert-message.component'; import { ConfirmationMessageComponent } from '../shared/components/data-modal/confirmation-message/confirmation-message.component'; import { ShowPubkeyComponent } from '../shared/components/data-modal/show-pubkey/show-pubkey.component'; import * as RTLActions from './rtl.actions'; import * as fromRTLReducer from './rtl.reducers'; import { ErrorMessageComponent } from '../shared/components/data-modal/error-message/error-message.component'; @Injectable() export class RTLEffects implements OnDestroy { dialogRef: any; CHILD_API_URL = API_URL + '/lnd'; screenSize = ''; alertWidth = '55%'; confirmWidth = '70%'; private unSubs: Array<Subject<void>> = [new Subject(), new Subject()]; constructor( private actions$: Actions, private httpClient: HttpClient, private store: Store<fromRTLReducer.RTLState>, private logger: LoggerService, private sessionService: SessionService, private commonService: CommonService, private dataService: DataService, public dialog: MatDialog, private snackBar: MatSnackBar, private router: Router) {} @Effect({ dispatch: false }) openSnackBar = this.actions$.pipe( ofType(RTLActions.OPEN_SNACK_BAR), map((action: RTLActions.OpenSnackBar) => { this.snackBar.open(action.payload); } )); @Effect({ dispatch: false }) openSpinner = this.actions$.pipe( ofType(RTLActions.OPEN_SPINNER), map((action: RTLActions.OpenSpinner) => { this.dialogRef = this.dialog.open(SpinnerDialogComponent, { data: { titleMessage: action.payload}}); } )); @Effect({ dispatch: false }) closeSpinner = this.actions$.pipe( ofType(RTLActions.CLOSE_SPINNER), map((action: RTLActions.CloseSpinner) => { if (this.dialogRef) { this.dialogRef.close(); } } )); @Effect({ dispatch: false }) openAlert = this.actions$.pipe( ofType(RTLActions.OPEN_ALERT), map((action: RTLActions.OpenAlert) => { action.payload.width = this.alertWidth; if(action.payload.data.component) { this.dialogRef = this.dialog.open(action.payload.data.component, action.payload); } else { this.dialogRef = this.dialog.open(AlertMessageComponent, action.payload); } } )); @Effect({ dispatch: false }) closeAlert = this.actions$.pipe( ofType(RTLActions.CLOSE_ALERT), map((action: RTLActions.CloseAlert) => { if (this.dialogRef) { this.dialogRef.close(); } } )); @Effect({ dispatch: false }) openConfirm = this.actions$.pipe( ofType(RTLActions.OPEN_CONFIRMATION), map((action: RTLActions.OpenConfirmation) => { action.payload.width = this.confirmWidth; this.dialogRef = this.dialog.open(ConfirmationMessageComponent, action.payload); }) ); @Effect({ dispatch: false }) closeConfirm = this.actions$.pipe( ofType(RTLActions.CLOSE_CONFIRMATION), take(1), map((action: RTLActions.CloseConfirmation) => { if (this.dialogRef) { this.dialogRef.close(); } this.logger.info(action.payload); return action.payload; } )); @Effect() showNodePubkey = this.actions$.pipe( ofType(RTLActions.SHOW_PUBKEY), withLatestFrom(this.store.select('root')), mergeMap(([action, rootData]: [RTLActions.ShowPubkey, fromRTLReducer.RootState]) => { if (!this.sessionService.getItem('token') || !rootData.nodeData.identity_pubkey) { this.snackBar.open('Node Pubkey does not exist.'); } else { this.store.dispatch(new RTLActions.OpenAlert({width: '70%', data: { information: rootData.nodeData, component: ShowPubkeyComponent }})); } return of({type: RTLActions.VOID}); })); @Effect() appConfigFetch = this.actions$.pipe( ofType(RTLActions.FETCH_RTL_CONFIG), mergeMap((action: RTLActions.FetchRTLConfig) => { this.screenSize = this.commonService.getScreenSize(); if(this.screenSize === ScreenSizeEnum.XS || this.screenSize === ScreenSizeEnum.SM) { this.alertWidth = '95%'; this.confirmWidth = '95%'; } else if(this.screenSize === ScreenSizeEnum.MD) { this.alertWidth = '80%'; this.confirmWidth = '80%'; } else { this.alertWidth = '55%'; this.confirmWidth = '60%'; } this.store.dispatch(new RTLActions.ClearEffectErrorRoot('FetchRTLConfig')); return this.httpClient.get(environment.CONF_API + '/rtlconf'); }), map((rtlConfig: RTLConfiguration) => { this.logger.info(rtlConfig); let searchNode: ConfigSettingsNode; rtlConfig.nodes.forEach(node => { node.settings.currencyUnits = [...CURRENCY_UNITS, node.settings.currencyUnit]; if(+node.index === rtlConfig.selectedNodeIndex) { searchNode = node; } }); if(searchNode) { this.store.dispatch(new RTLActions.SetSelelectedNode({lnNode: searchNode, isInitialSetup: true})) return { type: RTLActions.SET_RTL_CONFIG, payload: rtlConfig }; } else { return { type: RTLActions.VOID } } }, catchError((err) => { this.handleErrorWithoutAlert('FetchRTLConfig', err); return of({type: RTLActions.VOID}); }) )); @Effect() settingSave = this.actions$.pipe( ofType(RTLActions.SAVE_SETTINGS), mergeMap((action: RTLActions.SaveSettings) => { this.store.dispatch(new RTLActions.ClearEffectErrorRoot('UpdateSettings')); if(action.payload.settings && action.payload.defaultNodeIndex) { let settingsRes = this.httpClient.post<Settings>(environment.CONF_API, { updatedSettings: action.payload.settings }); let defaultNodeRes = this.httpClient.post(environment.CONF_API + '/updateDefaultNode', { defaultNodeIndex: action.payload.defaultNodeIndex }); return forkJoin([settingsRes, defaultNodeRes]); } else if(action.payload.settings && !action.payload.defaultNodeIndex) { return this.httpClient.post<Settings>(environment.CONF_API, { updatedSettings: action.payload.settings }); } else if(!action.payload.settings && action.payload.defaultNodeIndex) { return this.httpClient.post(environment.CONF_API + '/updateDefaultNode', { defaultNodeIndex: action.payload.defaultNodeIndex }); } }), map((updateStatus: any) => { this.store.dispatch(new RTLActions.CloseSpinner()); this.logger.info(updateStatus); return { type: RTLActions.OPEN_SNACK_BAR, payload: (!updateStatus.length) ? updateStatus.message + '.' : updateStatus[0].message + '.' }; }, catchError((err) => { this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'UpdateSettings', code: (!err.length) ? err.status : err[0].status, message: (!err.length) ? err.error.error : err[0].error.error })); this.handleErrorWithAlert('ERROR', 'Update Settings Failed!', environment.CONF_API, (!err.length) ? err : err[0]); return of({type: RTLActions.VOID}); }) )); @Effect() twoFASettingSave = this.actions$.pipe( ofType(RTLActions.TWO_FA_SAVE_SETTINGS), mergeMap((action: RTLActions.TwoFASaveSettings) => { this.store.dispatch(new RTLActions.ClearEffectErrorRoot('Update2FASettings')); return this.httpClient.post(environment.CONF_API + '/update2FA', { secret2fa: action.payload.secret2fa }); }), map((updateStatus: any) => { this.store.dispatch(new RTLActions.CloseSpinner()); this.logger.info(updateStatus); return { type: RTLActions.VOID }; }, catchError((err) => { this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'Update2FASettings', code: (!err.length) ? err.status : err[0].status, message: (!err.length) ? err.error.error : err[0].error.error })); this.handleErrorWithAlert('ERROR', 'Update 2FA Settings Failed!', environment.CONF_API, (!err.length) ? err : err[0]); return of({type: RTLActions.VOID}); }) )); @Effect() configFetch = this.actions$.pipe( ofType(RTLActions.FETCH_CONFIG), mergeMap((action: RTLActions.FetchConfig) => { this.store.dispatch(new RTLActions.ClearEffectErrorRoot('fetchConfig')); return this.httpClient.get(environment.CONF_API + '/config/' + action.payload) .pipe( map((configFile: any) => { this.store.dispatch(new RTLActions.CloseSpinner()); return { type: RTLActions.SHOW_CONFIG, payload: configFile }; }), catchError((err: any) => { this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'fetchConfig', code: err.status, message: err.error.error })); this.handleErrorWithAlert('ERROR', 'Fetch Config Failed!', environment.CONF_API + '/config/' + action.payload, err); return of({type: RTLActions.VOID}); } )); }) ); @Effect({ dispatch: false }) showLnConfig = this.actions$.pipe( ofType(RTLActions.SHOW_CONFIG), map((action: RTLActions.ShowConfig) => { return action.payload; }) ); @Effect() isAuthorized = this.actions$.pipe( ofType(RTLActions.IS_AUTHORIZED), withLatestFrom(this.store.select('root')), mergeMap(([action, store]: [RTLActions.IsAuthorized, any]) => { this.store.dispatch(new RTLActions.ClearEffectErrorRoot('IsAuthorized')); return this.httpClient.post(environment.AUTHENTICATE_API, { authenticateWith: (!action.payload || action.payload.trim() === '') ? AuthenticateWith.TOKEN : AuthenticateWith.PASSWORD, authenticationValue: (!action.payload || action.payload.trim() === '') ? (this.sessionService.getItem('token') ? this.sessionService.getItem('token') : '') : action.payload }) .pipe( map((postRes: any) => { this.logger.info(postRes); this.logger.info('Successfully Authorized!'); return { type: RTLActions.IS_AUTHORIZED_RES, payload: postRes }; }), catchError((err) => { this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'IsAuthorized', code: err.status, message: err.error.message })); this.handleErrorWithAlert('ERROR', 'Authorization Failed', environment.AUTHENTICATE_API, err); return of({ type: RTLActions.IS_AUTHORIZED_RES, payload: 'ERROR' }); }) ); })); @Effect({ dispatch: false }) isAuthorizedRes = this.actions$.pipe( ofType(RTLActions.IS_AUTHORIZED_RES), map((action: RTLActions.IsAuthorizedRes) => { return action.payload; }) ); setLoggedInDetails(initialPass: boolean, postRes: any, rootStore: any) { this.logger.info('Successfully Authorized!'); this.SetToken(postRes.token); rootStore.selNode.settings.currencyUnits = [...CURRENCY_UNITS, rootStore.selNode.settings.currencyUnit]; if(initialPass) { this.store.dispatch(new RTLActions.OpenSnackBar('Reset your password.')); this.router.navigate(['/settings'], { state: { loadTab: 'authSettings', initializeNodeData: true }}); } else { this.store.dispatch(new RTLActions.SetSelelectedNode({lnNode: rootStore.selNode, isInitialSetup: true})); } } @Effect({ dispatch: false }) authLogin = this.actions$.pipe( ofType(RTLActions.LOGIN), withLatestFrom(this.store.select('root')), mergeMap(([action, rootStore]: [RTLActions.Login, fromRTLReducer.RootState]) => { this.store.dispatch(new RTLActions.ClearEffectErrorLnd('FetchInfo')); this.store.dispatch(new RTLActions.ClearEffectErrorCl('FetchInfoCL')); this.store.dispatch(new RTLActions.ClearEffectErrorRoot('Login')); return this.httpClient.post(environment.AUTHENTICATE_API, { authenticateWith: (!action.payload.password) ? AuthenticateWith.TOKEN : AuthenticateWith.PASSWORD, authenticationValue: (!action.payload.password) ? (this.sessionService.getItem('token') ? this.sessionService.getItem('token') : '') : action.payload.password }) .pipe( map((postRes: any) => { this.logger.info(postRes); this.setLoggedInDetails(action.payload.initialPass, postRes, rootStore); }), catchError((err) => { this.logger.info('Redirecting to Login Error Page'); this.handleErrorWithAlert('ERROR', 'Authorization Failed!', environment.AUTHENTICATE_API, {status: err.status, error: err.error.error}); this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'Login', code: err.status, message: err.error.error })); if (+rootStore.appConfig.sso.rtlSSO) { this.router.navigate(['/error'], { state: { errorCode: '401', errorMessage: 'Single Sign On Failed!' }}); } else { this.router.navigate([rootStore.appConfig.sso.logoutRedirectLink]); } return of({type: RTLActions.VOID}); }) ); })); @Effect({ dispatch: false }) tokenVerify = this.actions$.pipe( ofType(RTLActions.VERIFY_TWO_FA), withLatestFrom(this.store.select('root')), mergeMap(([action, rootStore]: [RTLActions.VerifyTwoFA, fromRTLReducer.RootState]) => { this.store.dispatch(new RTLActions.ClearEffectErrorRoot('VerifyToken')); return this.httpClient.post(environment.AUTHENTICATE_API + '/token', {authentication2FA: action.payload.token}) .pipe( map((postRes: any) => { this.logger.info(postRes); this.logger.info('Token Successfully Verified!'); this.setLoggedInDetails(false, action.payload.authResponse, rootStore); }), catchError((err) => { this.handleErrorWithAlert('ERROR', 'Authorization Failed!', environment.AUTHENTICATE_API + '/token', {status: err.status, error: err.error.error}); this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'VerifyToken', code: err.status, message: err.error.error })); return of({type: RTLActions.VOID}); }) ); })); @Effect({ dispatch: false }) logOut = this.actions$.pipe( ofType(RTLActions.LOGOUT), withLatestFrom(this.store.select('root')), mergeMap(([action, store]) => { if (+store.appConfig.sso.rtlSSO) { window.location.href = store.appConfig.sso.logoutRedirectLink; } else { this.router.navigate(['/login']); } this.sessionService.removeItem('clUnlocked'); this.sessionService.removeItem('lndUnlocked'); this.sessionService.removeItem('token'); this.logger.warn('LOGGED OUT'); return of(); })); @Effect({ dispatch: false }) resetPassword = this.actions$.pipe( ofType(RTLActions.RESET_PASSWORD), withLatestFrom(this.store.select('root')), mergeMap(([action, rootStore]: [RTLActions.ResetPassword, fromRTLReducer.RootState]) => { this.store.dispatch(new RTLActions.ClearEffectErrorRoot('ResetPassword')); return this.httpClient.post(environment.AUTHENTICATE_API + '/reset', { currPassword: action.payload.currPassword, newPassword: action.payload.newPassword }) .pipe( map((postRes: any) => { this.logger.info(postRes); this.logger.info('Password Reset Successful!'); this.store.dispatch(new RTLActions.OpenSnackBar('Password Reset Successful!')); this.SetToken(postRes.token); }), catchError((err) => { this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'ResetPassword', code: err.status, message: err.error.message })); this.handleErrorWithAlert('ERROR', 'Password Reset Failed!', environment.AUTHENTICATE_API + '/reset', err.error); return of({type: RTLActions.VOID}); }) ); })); @Effect() setSelectedNode = this.actions$.pipe( ofType(RTLActions.SET_SELECTED_NODE), mergeMap((action: RTLActions.SetSelelectedNode) => { this.store.dispatch(new RTLActions.ClearEffectErrorRoot('UpdateSelNode')); return this.httpClient.post(environment.CONF_API + '/updateSelNode', { selNodeIndex: action.payload.lnNode.index }) .pipe( map((postRes: any) => { this.logger.info(postRes); this.store.dispatch(new RTLActions.CloseSpinner()); this.initializeNode(action.payload.lnNode, action.payload.isInitialSetup); return { type: RTLActions.VOID }; }), catchError((err: any) => { this.store.dispatch(new RTLActions.EffectErrorRoot({ action: 'UpdateSelNode', code: err.status, message: err.error.message })); this.handleErrorWithAlert('ERROR', 'Update Selected Node Failed!', environment.CONF_API + '/updateSelNode', err); return of({type: RTLActions.VOID}); }) ); } )); initializeNode(node: any, isInitialSetup: boolean) { const landingPage = isInitialSetup ? '' : 'HOME'; let selNode = {}; if(node.settings.fiatConversion && node.settings.currencyUnit) { selNode = { userPersona: node.settings.userPersona, channelBackupPath: node.settings.channelBackupPath, selCurrencyUnit: node.settings.currencyUnit, currencyUnits: [...CURRENCY_UNITS, node.settings.currencyUnit], fiatConversion: node.settings.fiatConversion, lnImplementation: node.lnImplementation, swapServerUrl: node.settings.swapServerUrl }; } else { selNode = { userPersona: node.settings.userPersona, channelBackupPath: node.settings.channelBackupPath, selCurrencyUnit: node.settings.currencyUnit, currencyUnits: CURRENCY_UNITS, fiatConversion: node.settings.fiatConversion, lnImplementation: node.lnImplementation, swapServerUrl: node.settings.swapServerUrl }; } this.store.dispatch(new RTLActions.ResetRootStore(node)); this.store.dispatch(new RTLActions.ResetLNDStore(selNode)); this.store.dispatch(new RTLActions.ResetCLStore(selNode)); if(this.sessionService.getItem('token')) { node.lnImplementation = node.lnImplementation.toUpperCase(); this.dataService.setChildAPIUrl(node.lnImplementation); if(node.lnImplementation === 'CLT') { this.CHILD_API_URL = API_URL + '/cl'; this.store.dispatch(new RTLActions.FetchInfoCL({loadPage: landingPage})); } else { this.CHILD_API_URL = API_URL + '/lnd'; this.store.dispatch(new RTLActions.FetchInfo({loadPage: landingPage})); } } } SetToken(token: string) { if (token) { this.sessionService.setItem('lndUnlocked', 'true'); this.sessionService.setItem('token', token); } else { this.sessionService.removeItem('lndUnlocked'); this.sessionService.removeItem('token'); } } handleErrorWithoutAlert(actionName: string, err: { status: number, error: any }) { this.logger.error('ERROR IN: ' + actionName + '\n' + JSON.stringify(err)); if (err.status === 401) { this.logger.info('Redirecting to Login'); this.store.dispatch(new RTLActions.Logout()); } else { this.store.dispatch(new RTLActions.EffectErrorRoot({ action: actionName, code: err.status.toString(), message: err.error.error })); } } handleErrorWithAlert(alertType: string, alertTitle: string, errURL: string, err: { status: number, error: any }) { this.logger.error(err); if (err.status === 401) { this.logger.info('Redirecting to Login'); this.store.dispatch(new RTLActions.Logout()); } else { this.store.dispatch(new RTLActions.CloseSpinner()); this.store.dispatch(new RTLActions.OpenAlert({data: { type: alertType, alertTitle: alertTitle, message: { code: err.status ? err.status : 'Unknown Error', message: (err.error && err.error.error) ? err.error.error : (err.error) ? err.error : 'Unknown Error', URL: errURL }, component: ErrorMessageComponent } })); } } ngOnDestroy() { this.unSubs.forEach(completeSub => { completeSub.next(); completeSub.complete(); }); } }
#!/usr/bin/env bash set -euo pipefail; [[ -z ${TRACE:-} ]] || set -x shellcheck --version bats --version
#!/bin/sh # # Copyright (c) 2018-2020, Christer Edwards <christer.edwards@gmail.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. . /usr/local/share/bastille/colors.pre.sh usage() { echo -e "${COLOR_RED}Usage: bastille console TARGET [user]'.${COLOR_RESET}" exit 1 } # Handle special-case commands first. case "$1" in help|-h|--help) usage ;; esac if [ $# -gt 2 ] || [ $# -lt 1 ]; then usage fi TARGET="${1}" shift USER="${1}" if [ "${TARGET}" = 'ALL' ]; then JAILS=$(jls name) fi if [ "${TARGET}" != 'ALL' ]; then JAILS=$(jls name | grep -w "${TARGET}") fi validate_user() { if jexec -l ${_jail} id "${USER}" >/dev/null 2>&1; then USER_SHELL="$(jexec -l ${_jail} getent passwd "${USER}" | cut -d: -f7)" if [ -n "${USER_SHELL}" ]; then if jexec -l ${_jail} grep -qwF "${USER_SHELL}" /etc/shells; then jexec -l ${_jail} /usr/bin/login -f "${USER}" else echo "Invalid shell for user ${USER}" fi else echo "User ${USER} has no shell" fi else echo "Unknown user ${USER}" fi } for _jail in ${JAILS}; do echo -e "${COLOR_GREEN}[${_jail}]:${COLOR_RESET}" if [ ! -z "${USER}" ]; then validate_user else jexec -l ${_jail} /usr/bin/login -f root fi echo done
<gh_stars>0 import Controllers.ControllerAdotante; import Controllers.ControllerAnimal; import Controllers.ControllerPedidos; import Controllers.ControllerUser; import Objects.Adotante; import Objects.Animal; import Objects.PedidoAdocao; import Objects.User; import java.awt.Graphics; import java.awt.Image; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.File; import java.sql.Date; import java.util.ArrayList; import java.util.Base64; import javax.imageio.ImageIO; import javax.swing.JOptionPane; /* * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template */ /** * * @author Gilberto */ public class PedidosAdocao { public static void main(String[] args) throws Exception { pegarPedidosAdocao(); } public static void pegarPedidosAdocao() throws Exception{ ControllerPedidos c = new ControllerPedidos(); ArrayList<PedidoAdocao> p = c.getList(); for (int i = 0; i < p.size(); i++) { System.out.println(p.get(i)); } } }
import tensorflow as tf class LayerNormalization(tf.keras.layers.Layer): def __init__(self, eps=1e-6): super(LayerNormalization, self).__init__() self.eps = eps def build(self, input_shape): # Add the necessary variables for layer normalization # Initialize gamma and beta as trainable variables self.gamma = self.add_weight(name='gamma', shape=input_shape[-1:], initializer='ones', trainable=True) self.beta = self.add_weight(name='beta', shape=input_shape[-1:], initializer='zeros', trainable=True) super(LayerNormalization, self).build(input_shape) def call(self, inputs): # Perform layer normalization on the input tensor mean, variance = tf.nn.moments(inputs, axes=-1, keepdims=True) normalized = (inputs - mean) / tf.sqrt(variance + self.eps) return self.gamma * normalized + self.beta def get_config(self): config = {'eps': self.eps} base_config = super(LayerNormalization, self).get_config() return dict(list(base_config.items()) + list(config.items())
#!/bin/bash this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P) if [ -f "${bin}/../conf/env.sh" ]; then set -a . "${bin}/../conf/env.sh" set +a fi APP=TriangleCount APP_DIR=${DATA_HDFS}/${APP} INPUT_HDFS=${DATA_HDFS}/${APP}/Input OUTPUT_HDFS=${DATA_HDFS}/${APP}/Output # either stand alone or yarn cluster APP_MASTER=${SPARK_MASTER} set_gendata_opt set_run_opt function print_config(){ get_config_values $1 $2 $3 $4 $5 $6 } function get_config_fields(){ local report_field=$(get_report_field_name) echo -n "#${report_field},AppType,nExe,driverMem,exeMem,exeCore,nPar,nIter,memoryFraction,numV,mu,sigma,reset_prob" echo -en "\n" } function get_config_values(){ gen_report $1 $2 $3 $4 $5 $6 echo -n ",${APP}-MLlibConfig,$nexe,$dmem,$emem,$ecore,${NUM_OF_PARTITIONS},${MAX_ITERATION},${memoryFraction},${numV},${mu},${sigma},${RESET_PROB}" echo -en "\n" return 0 }
<reponame>szab100/secmgr // Copyright 2018 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.secmgr.common; import junit.framework.TestCase; public class IdentityUtilTest extends TestCase { private static final String USER = "foo"; private static final String DOMAIN = "bar.com"; private static final String USER_DOMAIN1 = "<EMAIL>"; private static final String USER_DOMAIN2 = "bar.com\\foo"; private static final String USER_DOMAIN3 = "bar.com/foo"; public void testParseNameAndDomain() { String[] userDomain = IdentityUtil.parseNameAndDomain(USER_DOMAIN1); assertEquals(USER, userDomain[0]); assertEquals(DOMAIN, userDomain[1]); userDomain = IdentityUtil.parseNameAndDomain(USER_DOMAIN2); assertEquals(USER, userDomain[0]); assertEquals(DOMAIN, userDomain[1]); userDomain = IdentityUtil.parseNameAndDomain(USER_DOMAIN3); assertEquals(USER, userDomain[0]); assertEquals(DOMAIN, userDomain[1]); } public void testNormalizeDomain() { String domain = IdentityUtil.normalizeDomain(""); assertEquals(null, domain); domain = IdentityUtil.normalizeDomain("foo.google.com"); assertEquals("foo", domain); domain = IdentityUtil.normalizeDomain("google"); assertEquals("google", domain); } }
#!/bin/bash source "$(dirname "$0")/../setup.sh" which skopeo if [ $? -ne 0 ]; then echo "skopeo not available, exiting" exit 1 fi OSETESTS_IMAGE="${OSETESTS_IMAGE:-quay.io/openshift/origin-tests:4.8}" function get_ose_tests_binary { # As CI runs in a pod, we can't directly use the image. # For this reason, we download the image and fetch the binary from the right layer skopeo copy docker://"$OSETESTS_IMAGE" oci:osetests echo "Fetching openshift-tests binary from $OSETESTS_IMAGE" for layer in osetests/blobs/sha256/*; do testsbin=$(tar -t -f "$layer" | grep openshift-tests) if [[ $testsbin ]]; then echo "Found $testsbin on $layer" tar xfv "$layer" "$testsbin" mv "$testsbin" _cache/tools/openshift-tests chmod +x _cache/tools/openshift-tests rm -rf osetests break fi done } if [ -f _cache/tools/openshift-tests ]; then echo "openshift-tests binary already present" else get_ose_tests_binary fi echo "Provider: $TEST_PROVIDER" kubectl version _cache/tools/openshift-tests run openshift/conformance/parallel --provider "${TEST_PROVIDER:-}" -o /tmp/artifacts/e2e.log --junit-dir /tmp/artifacts/junit
#!/bin/bash command=`basename $0` function usage { echo "" echo "USAGE: $command <yyyy-MM-dd> <password> [<limit>]" echo "" } if [ $# -ne 2 ] && [ $# -ne 3 ] then usage exit fi dt=$1 password=$2 if [ $# -eq 3 ] then limit=$3 fi echo "Running Java ETL application..." java -Dcom.datastax.driver.NATIVE_TRANSPORT_MAX_FRAME_SIZE_IN_MB=512 -jar target/etl-java.jar $dt $password $limit # NOTES # The NATIVE_TRANSPORT_MAX_FRAME_SIZE_IN_MB definition is needed to get passed this exception that occurs after reading 50K rows: # Exception: com.datastax.driver.core.exceptions.FrameTooLongException: Response frame exceeded maximum allowed length
<filename>ze_common/test/test_random.cpp<gh_stars>10-100 // Copyright (c) 2015-2016, ETH Zurich, <NAME>, Zurich Eye // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of the ETH Zurich, Wyss Zurich, Zurich Eye nor the // names of its contributors may be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL ETH Zurich, <NAME>urich, Zurich Eye BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include <ze/common/benchmark.hpp> #include <ze/common/test_entrypoint.hpp> #include <ze/common/random.hpp> #include <ze/common/running_statistics.hpp> TEST(RandomTests, testRandomSampling) { using namespace ze; // Deterministic sampling results always the same series of random numbers. EXPECT_EQ(sampleUniformIntDistribution<uint8_t>(true), 140u); EXPECT_EQ(sampleUniformIntDistribution<uint8_t>(true), 151u); EXPECT_EQ(sampleUniformIntDistribution<uint8_t>(true), 183u); EXPECT_EQ(sampleUniformIntDistribution<int>(true), 209652396); EXPECT_EQ(sampleUniformIntDistribution<int>(true), 398764591); EXPECT_EQ(sampleUniformIntDistribution<int>(true), 924231285); EXPECT_NEAR(sampleUniformRealDistribution<double>(true), 0.592844, 1e-5); EXPECT_NEAR(sampleUniformRealDistribution<double>(true), 0.844265, 1e-5); EXPECT_NEAR(sampleUniformRealDistribution<double>(true), 0.857945, 1e-5); EXPECT_NEAR(sampleNormalDistribution<double>(true, 1.0, 4.0), 5.4911797, 1e-5); EXPECT_NEAR(sampleNormalDistribution<double>(true, 1.0, 4.0), 1.2834369, 1e-5); EXPECT_NEAR(sampleNormalDistribution<double>(true, 1.0, 4.0), -4.689303, 1e-5); EXPECT_TRUE (flipCoin(true, 0.7)); EXPECT_FALSE(flipCoin(true, 0.7)); EXPECT_FALSE(flipCoin(true, 0.7)); EXPECT_FALSE(flipCoin(true, 0.7)); EXPECT_TRUE (flipCoin(true, 0.7)); EXPECT_TRUE (flipCoin(true, 0.7)); // Non-deterministic sampling, always results in different numbers: EXPECT_NE(sampleUniformIntDistribution<int>(false), 209652396); EXPECT_NE(sampleUniformIntDistribution<int>(false), 398764591); EXPECT_NE(sampleUniformIntDistribution<int>(false), 924231285); // Test mean and standard deviation of normal distribution. { RunningStatistics statistics; for (int i = 0; i < 10000; ++i) { statistics.addSample(sampleNormalDistribution<double>(false, 2.0, 5.0)); } EXPECT_NEAR(statistics.mean(), 2.0, 0.2); EXPECT_NEAR(statistics.std(), 5.0, 0.2); } // Test coin flips. { RunningStatistics statistics; for (int i = 0; i < 10000; ++i) { statistics.addSample(static_cast<int>(flipCoin(false, 0.2))); } EXPECT_NEAR(statistics.mean(), 0.2, 0.2); } } TEST(RandomTests, testDistribution) { using namespace ze; // Deterministic sampling results always the same series of random numbers. { auto f = uniformDistribution<uint8_t>(true); EXPECT_EQ(f(), 140u); EXPECT_EQ(f(), 151u); EXPECT_EQ(f(), 183u); } // Deterministic sampling results always the same series of random numbers. { auto f = uniformDistribution<double>(true, 1.0, 2.0); EXPECT_NEAR(f(), 1.59284, 1e-5); EXPECT_NEAR(f(), 1.84427, 1e-5); EXPECT_NEAR(f(), 1.85795, 1e-5); } // Deterministic sampling results always the same series of random numbers. { auto f = normalDistribution<float>(true, 3.0, 5.0); EXPECT_NEAR(f(), 14.06103, 1e-5); EXPECT_NEAR(f(), 8.81539, 1e-5); EXPECT_NEAR(f(), 6.87001, 1e-5); } } TEST(RandomTests, benchmark) { using namespace ze; auto lambda1 = [&]() { int sum = 0; for (int i = 0; i < 100000; ++i) sum += sampleUniformIntDistribution<uint8_t>(false); }; runTimingBenchmark(lambda1, 10, 10, "sampleSeparately", true); auto lambda2 = [&]() { int sum = 0; auto dist = uniformDistribution<uint8_t>(false); for (int i = 0; i < 100000; ++i) sum += dist(); }; runTimingBenchmark(lambda2, 10, 10, "sampleFromDistribution", true); auto lambda3 = [&]() { int sum = 0; static std::mt19937 gen_deterministic(0); std::uniform_int_distribution<uint8_t> distribution(0, 255); for (int i = 0; i < 100000; ++i) { sum += distribution(gen_deterministic); } }; runTimingBenchmark(lambda3, 10, 10, "Using std interface", true); } ZE_UNITTEST_ENTRYPOINT
var assert = require("assert"), querystring = require("../src/querystring"); var sourceURL = "www.google.com/?foo=bar&foo=boo&roo=bar;bee=bop;=ghost;=ghost2;&;checkbox%5B%5D=b1;checkbox%5B%5D=b2;dd=;http=http%3A%2F%2Fw3schools.com%2Fmy%20test.asp%3Fname%3Dst%C3%A5le%26car%3Dsaab&http=http%3A%2F%2Fw3schools2.com%2Fmy%20test.asp%3Fname%3Dst%C3%A5le%26car%3Dsaab"; var validMessage = JSON.stringify({ "foo": [ "bar", "boo" ], "roo": "bar", "bee": "bop", "": [ "ghost", "ghost2" ], "checkbox[]": [ "b1", "b2" ], "dd": null, "http": [ "http://w3schools.com/my test.asp?name=ståle&car=saab", "http://w3schools2.com/my test.asp?name=ståle&car=saab" ] }); var currentMessage = JSON.stringify(querystring(sourceURL)); assert.deepStrictEqual( currentMessage, validMessage, "Invalid output from parsing!" );
import { get } from "./api.service"; const SONGS_URL = "./songs/chillhopList.json"; export const getAll = async () => { const res = await get(SONGS_URL); return res ? res.data : []; };
<filename>src/ball_chaser/src/process_image.cpp<gh_stars>0 #include "ros/ros.h" #include "ball_chaser/DriveToTarget.h" #include <sensor_msgs/Image.h> // Camera sensor Field of view (FOV) which's defined in gazebo plugin #define HORIZONTAL_FOV 1.3962634 /* * Helper functions for converting RGB to HSL color model * HSL color is easier to identify white color if there is ambiance light involve * class RGB is the object structure of RGB * class HSL is the object structure of HSL * * function RGBToHSL to convert RGB instant and then return HSL instant * * ref.from https://www.programmingalgorithms.com/algorithm/rgb-to-hsl/cpp/ */ // RGB class for storing pixel in rgb format class RGB { public: unsigned char R; unsigned char G; unsigned char B; RGB (unsigned char r, unsigned char g, unsigned char b) { R = r; G = g; B = b; } bool Equals (RGB rgb) { return (R == rgb.R) && (G == rgb.G) && (B == rgb.B); } }; // HSL class for storing pixel in hsl format class HSL { public: int H; float S; float L; HSL (int h, float s, float l) { H = h; S = s; L = l; } bool Equals (HSL hsl) { return (H == hsl.H) && (S == hsl.S) && (L == hsl.L); } }; //RGBToHSL to convert RGB instant and then return HSL instant static HSL RGBToHSL (RGB rgb) { HSL hsl = HSL (0, 0, 0); float r = (rgb.R / 255.0f); float g = (rgb.G / 255.0f); float b = (rgb.B / 255.0f); float min_val = std::min (std::min (r, g), b); float max_val = std::max (std::max (r, g), b); float delta = max_val - min_val; hsl.L = (max_val + min_val) / 2; if (delta == 0) { hsl.H = 0; hsl.S = 0.0f; } else { hsl.S = (hsl.L <= 0.5) ? (delta / (max_val + min_val)) : (delta / (2 - max_val - min_val)); float hue; if (r == max_val) { hue = ((g - b) / 6) / delta; } else if (g == max_val) { hue = (1.0f / 3) + ((b - r) / 6) / delta; } else { hue = (2.0f / 3) + ((r - g) / 6) / delta; } if (hue < 0) hue += 1; if (hue > 1) hue -= 1; hsl.H = (int) (hue * 360); } return hsl; } /* * The map function is for changing one range of values into another range of values * x is input value * in_min would get mapped to out_min * in_max to out_max */ float map(float x, float in_min, float in_max, float out_min, float out_max) { return (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min; } /* * The find_white_pixel function is to convert RGB8 encoded image to HSL color * and create histogram of white pixel that can pass threshold into x_lightness[800] array */ int find_white_pixel (const sensor_msgs::Image img, float light_thres, bool verbose) { // Create array to keep histogram of white pixel int x_lightness[img.width] = { 0 }; // Tmp val of RGB int r, g, b; // Tmp instant of HSL HSL hsl_value = HSL (0, 0, 0); // For 800 loop for y axis for (int i = 0; i < img.height; i++) // For 800 loop for x axis for (int j = 0, x = 0; j < img.step; j = j + 3, x++) { // Read 3 bytes of RGB8 data store in r, g, b r = img.data[i * img.step + j]; g = img.data[i * img.step + j + 1]; b = img.data[i * img.step + j + 2]; // Create RGB instant and call function RGBToHSL then store in hsl_value hsl_value = RGBToHSL (RGB (r, g, b)); // If the pixel has a higher L value than the threshold then // x_lightness plus 1 of this particular x position if (hsl_value.L >= light_thres) { ++x_lightness[x]; } } // Vistualize white pixel histogram if (verbose) { for (int i = 0; i < img.width; i++) { // 1 score = 10 pixels int score = x_lightness[i] / 10; std::string o = ""; for (int j = 0; j < score; j++) o = o + "o"; //ROS_INFO ("[%d]:%3d %s", i, x_lightness[i], o.c_str ()); } } // Find the maximum value in x_lightness then its index int x_of_white = std::distance (x_lightness, std::max_element (x_lightness, x_lightness + img.width)); // If no white pixel then function return -1 if (x_lightness[x_of_white] == 0) { ROS_INFO ("White pixel's not found!"); return -1; } // Return position x of the maximum white pixel ROS_INFO ("1st maximum white pixel at X: %d", x_of_white); return x_of_white; } // Define a global client that can request services ros::ServiceClient client; // This function calls the command_robot service to drive the robot in the specified direction void drive_robot (float lin_x, float ang_z) { // TODO: Request a service and pass the velocities to it to drive the robot ball_chaser::DriveToTarget drive_cmd; drive_cmd.request.linear_x = lin_x; drive_cmd.request.angular_z = ang_z; if (!client.call (drive_cmd)) { ROS_ERROR ("Failed to execute drive command"); } // Request to drive } // This callback function continuously executes and reads the image data void process_image_callback (const sensor_msgs::Image img) { // TODO: Loop through each pixel in the image and check if there's a bright white one // Then, identify if this pixel falls in the left, mid, or right side of the image // Depending on the white ball position, call the drive_bot function and pass velocities to it // Request a stop when there's no white ball seen by the camera // Define white lightness (L) threshold of HSL, if the value is 1.0 means pure white // It can work more tolerance to ambiance light change by setting light_thres lower than 1.0 e.g. 0.95 const float light_thres = 1.0; int x_of_white; float linear_x = 0.0, angular_z = 0.0, // Use x=0 in map function to find coff of camera sensor field of view vs -1 to 1 angular_fov_coff = map (0, -1, 1, 0, HORIZONTAL_FOV); // Call find_white_pixel to locate the maximum white pixel in the x-axis x_of_white = find_white_pixel (img, light_thres, false); // If no white pixel's found then rotate around if (x_of_white < 0) { linear_x = 0.0; angular_z = -.5; } else { // Move forward linear_x = 0.1; // Mapping x position of the white pixel to angular_z // the result of angular_x will head robot about to the center of the white ball angular_z = -map (x_of_white, 0, img.width - 1, -angular_fov_coff, angular_fov_coff); ROS_INFO ("White at x :%d, calculated angular_z:%f", x_of_white, angular_z); } drive_robot (linear_x, angular_z); } int main (int argc, char **argv) { // Initialize the process_image node and create a handle to it ros::init (argc, argv, "process_image"); ros::NodeHandle n; // Define a client service capable of requesting services from command_robot client = n.serviceClient < ball_chaser::DriveToTarget > ("/ball_chaser/command_robot"); // Subscribe to /camera/rgb/image_raw topic to read the image data inside the process_image_callback function ros::Subscriber sub1 = n.subscribe ("/camera/rgb/image_raw", 10, process_image_callback); // Handle ROS communication events ros::spin (); return 0; }
#!/bin/bash # SPDX-License-Identifier: BSD-3-Clause # Copyright(c) 2018-2020 Intel Corporation. All rights reserved. # stop on most errors set -e usage () { cat <<EOFHELP Usage: $0 <options> <comp direction bits_in bits_out fs_in fs_out input output> Example 1: $0 volume playback 16 16 48000 48000 input.raw output.raw Example 2: $0 -e volume_trace.txt -t volume_config.sh Where volume_config.sh could be e.g. next. Minimal configuration need is only the COMP line. # Volume component configuration COMP=volume DIRECTION=playback BITS_IN=16 BITS_OUT=16 CHANNELS_IN=2 CHANNELS_OUT=2 FS_IN=48000 FS_OUT=48000 FN_IN=input.raw FN_OUT=output.raw FN_TRACE:=trace.txt # This is default value if FN_TRACE is not set via -e option VALGRIND=yes EOFHELP } parse_args () { # Defaults DIRECTION=playback BITS_IN=16 BITS_OUT= CHANNELS_IN=2 CHANNELS_OUT= FS_IN=48000 FS_OUT= VALGRIND=true DEBUG= SOURCED_CONFIG=false FN_TRACE= EXTRA_OPTS= while getopts ":he:t:" opt; do case "${opt}" in e) FN_TRACE="${OPTARG}" ;; h) usage exit ;; t) # shellcheck disable=SC1090 source "${OPTARG}" SOURCED_CONFIG=true ;; *) usage exit 1 ;; esac done shift $((OPTIND -1)) if ! "$SOURCED_CONFIG"; then [ $# -eq 8 ] || { usage "$0" exit 1 } COMP="$1" DIRECTION="$2" BITS_IN="$3" BITS_OUT="$4" FS_IN="$5" FS_OUT="$6" FN_IN="$7" FN_OUT="$8" fi if [[ -z $BITS_OUT ]]; then BITS_OUT=$BITS_IN fi if [[ -z $FS_OUT ]]; then FS_OUT=$FS_IN fi if [[ -z $CHANNELS_OUT ]]; then CHANNELS_OUT=$CHANNELS_IN fi } delete_file_check () { if [ -f "$1" ]; then rm -f "$1" fi } run_testbench () { delete_file_check "$FN_OUT" delete_file_check "$FN_TRACE" if [ -z "$FN_TRACE" ]; then # shellcheck disable=SC2086 $VALGRIND_CMD $CMD else # shellcheck disable=SC2086 $VALGRIND_CMD $CMD 2> "$FN_TRACE" fi } parse_args "$@" # Paths HOST_ROOT=../../testbench/build_testbench HOST_EXE=$HOST_ROOT/install/bin/testbench HOST_LIB=$HOST_ROOT/sof_ep/install/lib TPLG_LIB=$HOST_ROOT/sof_parser/install/lib TPLG_DIR=../../build_tools/test/topology # Use topology from component test topologies INFMT=s${BITS_IN}le OUTFMT=s${BITS_OUT}le TPLGFN=test-${DIRECTION}-ssp5-mclk-0-I2S-${COMP}-${INFMT}-${OUTFMT}-48k-24576k-codec.tplg TPLG=${TPLG_DIR}/${TPLGFN} # If binary test vectors if [ "${FN_IN: -4}" == ".raw" ]; then BINFMT="-b S${BITS_IN}_LE" else BINFMT="" fi # Run command OPTS="$DEBUG -r $FS_IN -R $FS_OUT -c $CHANNELS_IN -n $CHANNELS_OUT $BINFMT -t $TPLG" DATA="-i $FN_IN -o $FN_OUT" ARG="$OPTS $EXTRA_OPTS $DATA" CMD="$HOST_EXE $ARG" if "$VALGRIND"; then VALGRIND_CMD="valgrind --leak-check=yes --error-exitcode=1" else VALGRIND_CMD= fi export LD_LIBRARY_PATH=$HOST_LIB:$TPLG_LIB # Run test bench echo "Command: $HOST_EXE" echo "Argument: $ARG" echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}" run_testbench
/** * @private */ Ext.define('Ext.behavior.Draggable', { extend: 'Ext.behavior.Behavior', requires: [ 'Ext.util.Draggable' ], setConfig: function(config) { var draggable = this.draggable, component = this.component; if (config) { if (!draggable) { component.setTranslatable(config.translatable); this.draggable = draggable = new Ext.util.Draggable(config); draggable.setTranslatable(component.getTranslatable()); draggable.setElement(component.renderElement); draggable.on('destroy', 'onDraggableDestroy', this); component.on(this.listeners); } else if (Ext.isObject(config)) { draggable.setConfig(config); } } else if (draggable) { draggable.destroy(); } return this; }, getDraggable: function() { return this.draggable; }, onDraggableDestroy: function() { delete this.draggable; }, onComponentDestroy: function() { var draggable = this.draggable; if (draggable) { draggable.destroy(); } } });
<filename>internal/model/init.go package model import ( "Opendulum/global" "Opendulum/pkg/setting" "fmt" "github.com/jinzhu/gorm" _ "github.com/jinzhu/gorm/dialects/mysql" ) func NewDBEngine(databaseSetting *setting.DatabaseSettings) (*gorm.DB, error) { args := fmt.Sprintf("%s:%s@tcp(%s)/%s?charset=%s&parseTime=%t&loc=Local", databaseSetting.Username, databaseSetting.Password, databaseSetting.Host, databaseSetting.DBName, databaseSetting.Charset, databaseSetting.ParseTime) db, err := gorm.Open(databaseSetting.DBType, args) if err != nil { return nil, err } if global.ServerSetting.RunMode == "debug" { db.LogMode(true) } db.SingularTable(true) db.DB().SetMaxIdleConns(databaseSetting.MaxIdleConns) db.DB().SetMaxOpenConns(databaseSetting.MaxOpenConns) return db, nil } func SetupDBEngine() error { var err error global.DBEngine, err = NewDBEngine(global.DatabaseSetting) if err != nil { return err } _ = global.DBEngine.AutoMigrate(&Good{}) // 自动化更新 _ = global.DBEngine.AutoMigrate(&GoodJingDong{}) return nil }
#!/bin/bash if [ "$#" -ne 1 ]; then echo "Usage: $0 name_of_new_subfolder" echo "Don't forget to checkout all branches first - please use checkout_all_branches.sh for that" exit 1 fi echo "Moving the whole repo to subfolder $1" NEW_GIT_SUBFOLDER=$1 git filter-branch -f --index-filter $(dirname $0)/git_move_to_subfolder_helper.sh --tag-name-filter cat -- --all
<gh_stars>0 import React from "react"; // nodejs library that concatenates classes import classnames from "classnames"; import {graphql} from 'gatsby' import Layout from '../components/Layout' // reactstrap components import { Button, Container, Row, Col } from "reactstrap"; // core components import DemoNavbar from "../components/Navbars/DemoNavbar.jsx"; import CardsFooter from "../components/Footers/CardsFooter.jsx"; import ServiceNavigation from "../components/ServicesNavigation"; const practitioners = [ { name:"<NAME>", role:"Chiropractor", location:"Integrate Healthcare Collective, Ottawa, ON.", eduTraining:" B.Sc in Human Kinetics, University of Guelph | M.Sc in Neuroscience, University of Guelph | Graduate: Canadian Memorial Chiropractic College, 2001", serviceProvided:"Chiropractic | Manual Therapy | Diversified adjustments and mobilizations | ART®: Active Release Techniques | Cox Flexion Distraction Technique | Neurofunctional Medical Acupuncture | Graston® Technique | Myofascial Release Techniques", specialization:"Evidence-based Spine Care | Musculoskeletal pain | Injury Prevention", experienceWithMusicians:"Treating injuries related to playing an instrument since 2001", language:"English", image:"../img/geoff.jpg", moreInfo:[ "https://www.integrateottawa.ca/geoff-outerbridge-msc-dc", "https://www.worldspinecare.org/" ], bookAppointments:"https://www.worldspinecare.org/", serviceCategory:"Chiropractic", vidPresenterName:"<NAME>", learMoreText:"To learn more about chiropractic and Dr. Geoff Outerbridge:", learnMoreLink:"https://www.integrateottawa.ca/geoff-outerbridge-msc-dc", videos: [ { video:"/videos/geoff-intro.mp4", thumbnail: "/img/geoff-intro.png" }, { video:"/videos/geoff-techniques.mp4", thumbnail: "/img/geoff-treatments.png" }, { video:"/videos/geoff-approach.mp4", thumbnail: "/img/geoff-approach.png" }, ] }, { name:"<NAME>", role:"Physiotherapist (Venezuela)", location:"Ottawa, ON", eduTraining:" B.Sc. in Physiotherapy, Arturo’s Michelena University | M.Sc. in Human Kinetics, University of Ottawa", serviceProvided:"Global Postural Rehabilitation Method (GPR)\n" + "| Neuro-musculoskeletal manual therapy | Myofascial Induction | Craniosacral therapy", specialization:"Musculoskeletal pain | Biomechanics and Injury prevention", experienceWithMusicians:"", language:"English and Spanish", image:"/img/Dapne.jpg", moreInfo:"", bookAppointments:"", serviceCategory:"Physiotherapy", vidPresenterName:"<NAME>", learMoreText:"", learnMoreLink:"", videos: [ { video:"/videos/dapne-intro.mp4", thumbnail: "/img/dapne-intro.png" }, { video:"/videos/dapne-work-as-an-educator.mp4", thumbnail: "/img/dapne-teaching.png" }, { video:"/videos/dapne-benefits.mp4", thumbnail: "/img/dapne-benefits.png" }, ] }, { name:"<NAME>", role:"Certified Mindfulness Based\n" + " Cognitive Therapy Teacher", location:"Ottawa, ON", eduTraining:"B.Sc. in Occupational Therapy | M.Sc. in Speech-Language Pathology | University Certificate in Theology | Teacher training in Mindfulness Based Cognitive Therapy, University of California San Diego, 2011", serviceProvided:"Mindfulness Based Cognitive Therapy | Mindfulness and meditation training", specialization:"Performance anxiety | Depression", experienceWithMusicians:"Collaborated in research on music performance anxiety", language:"English and French", image:"/img/evelyn.jpg", moreInfo:"https://mbctottawa.wordpress.com/evelyn-tan/", bookAppointments:"https://mbctottawa.wordpress.com/contact/", serviceCategory:"Mindfulness Training", vidPresenterName:"<NAME>", learMoreText:"To learn more about Mindfulness Training and Evelyn Tan:", learnMoreLink:"https://mbctottawa.wordpress.com/", videos: [ { video:"/videos/evelyn-intro.mp4", thumbnail: "/img/evelyn-intro.png" }, { video:"/videos/evelyn-anxiety-and-mindfulness.mp4", thumbnail: "/img/evelyn-mindfulness.png" }, { video:"/img/evelyn-good-fit.png", thumbnail: "/videos/evelyn-good-fit.mp4" }, ] }, { name:"<NAME>", role:"Orthopaedic Manipulative Physiotherapist", location:"P3 Physiotherapy Clinic (owner), Metcalfe, ON. Will provide on-site services at the MWC clinic (Pérez Hall)", eduTraining:"B.Sc. in Physical Therapy, Queen’s University, 1988 | Advanced Diploma in Manual and Manipulative Physiotherapy, 1997 | Fellow of the Canadian Academy of Manipulative Physiotherapy, 1997", serviceProvided:"Manual Therapy | Gunn technique of Intramuscular Stimulation (IMS) | Manipulation of peripheral and spinal joints", specialization:"Musculoskeletal pain | Injury Prevention", experienceWithMusicians:"Experience with professional musicians since 1998, including members of NACO", language:"English", image:"/img/patricia.jpg", moreInfo:"http://www.phyzio.biz/patriciaPalmer.php", bookAppointments:"http://www.phyzio.biz/contactUs.php", serviceCategory:"Physiotherapy", vidPresenterName:"<NAME>", learMoreText:"To learn more about physiotherapy and Patricia Palmer:", learnMoreLink:"http://www.phyzio.biz/musiciansInjuries.php", videos: [ { video:"/videos/pat-intro.mp4", thumbnail: "/img/pat-intro.png" }, { video:"/videos/pat-physio.mp4", thumbnail: "/img/pat-treatments.png" }, { video:"/videos/pat-good-fit.mp4", thumbnail: "/img/pat-physiotherapy-treatments.png" }, ] }, { name:"<NAME>", role:"Feldenkrais Practitioner", location:"Ottawa, ON. Will provide on-site services at the MWC clinic (Pérez Hall). Services also available in residence in Sandy Hill", eduTraining:"B.Mus. in Integrated Studies, specialization in piano and composition, University of Calgary | Graduate Diploma and M.A. in Piano Pedagogy Research, University of Ottawa | Ph.D. candidate in Human Kinetics, University of Ottawa | Guild Certified Feldenkrais Practitioner (Awareness through Movement and Functional Integration)", serviceProvided:"Feldenkrais Method: Awareness through Movement (group classes or individual) | Functional Integration (one-on-one) | Piano technique development", specialization:"Motor learning skills training | Sensorimotor skills training", experienceWithMusicians:"Addressing playing-related physical pain - Teaching healthy habits - Mind-body awareness for musicians, including pre-performance routines and daily practice tools", language:"English", image:"/img/jillian.JPG", moreInfo:"https://www.movingthroughmusic.com/biography.html", bookAppointments:"https://www.movingthroughmusic.com/contact.html", serviceCategory:"Feldenkrais Method", vidPresenterName:"<NAME>", learMoreText:"To learn more about Feldenkrais Awareness through Movement and Jillian Beacon:", learnMoreLink:"https://www.movingthroughmusic.com/feldenkrais-method.html", videos: [ { video:"/videos/jillian-intro.mp4", thumbnail: "/img/jillian-intro.png" }, { video:"/videos/jillian-feldenkrais.mp4", thumbnail: "/img/jillian-what-is-feldenkrais.png" }, { video:"/videos/jillian-good-fit.mp4", thumbnail: "/img/jillian-benefits-from-feldenkrais.png" }, ] }, { name:"<NAME>", role:"Yoga and QiGong Instructor", location:"Empower Me Yoga, Ottawa, ON", affiliations: "Actors Equity | ACTRA", eduTraining:"", serviceProvided:" Hatha Yoga, Restorative Yoga, and Yoga Ball\n" + "| Meditation techniques | QiGong", specialization:"Postural Restoration | Body Awareness", experienceWithMusicians:"35 years teaching yoga for chronic pain related to playing an instrument | 35 years teaching guitar", language:"English", image:"/img/michael.png", moreInfo:[ "http://www.michaelfahey.ca/", "shorelinestudio.ca" ], bookAppointments:"http://www.michaelfahey.ca/yoga/about-michael-fahey.php", serviceCategory:"Qigong and Yoga", vidPresenterName:"<NAME>", learMoreText:"To learn more about Yoga/QiGong and Michael Fahey:", learnMoreLink:"http://www.michaelfahey.ca/yoga/yoga-classes.php", videos: [ { video:"/videos/michael-intro.mp4", thumbnail: "/img/michael-intro.png" }, { video:"/videos/michael-yoga.mp4", thumbnail: "/img/michael-what-is-yoga.png" }, { video:"/videos/michael-good-fit.mp4", thumbnail: "/img/michael-benefits-from-yoga.png" }, ] }, { name:"<NAME>", role:"Mental Performance Skills Coach", location:"Montreal, QC. Will provide on-site services at the MWC clinic (Pérez Hall)", eduTraining:"B.Mus. in Flute Performance, McGill University (1975) | M.H.K. with a concentration in Intervention and Consultation, University of Ottawa, 2016", serviceProvided:"Workshops and private counselling for professional and student musicians | Mental skills for optimal performance under stress", specialization:"Mental skills for music performance excellence and public presentations (dissertations, conference speeches, etc.)", experienceWithMusicians:"Second Flutist, Montreal Symphony Orchestra, 1978 to 2014 | Associate Professor of Flute, McGill University 1982 | Mental Performance Skills Coach, National Youth Orchestra of Canada 2019 | Flute instructor at National Youth Orchestra of Canada, Domaine Forget, and CAMMAC, retired", language:"English and French", image:"/img/carolyn.png", moreInfo:"https://www.mcgill.ca/music/carolyn-christie", bookAppointments:"mailto:<EMAIL>", serviceCategory:"Mental Skills Coaching", vidPresenterName:"<NAME>", learMoreText:"To learn more about mental skills coaching for performance and <NAME>:", learnMoreLink:"https://www.mcgill.ca/music/carolyn-christie", videos: [ { video:"/videos/carolyn-intro.mp4", thumbnail: "/img/carolyn-intro.png" }, { video:"/videos/carolyn-coaching.mp4", thumbnail: "/img/carolyn-coaching.png" }, { video:"/videos/carolyn-good-fit.mp4", thumbnail: "/img/carolyn-good-fit.png" }, ] }, { name:"<NAME>", role:"Optometrist", location:"NUVO Eye Centre - Ottawa, Ontario", eduTraining:"O.D. University of Montreal", serviceProvided:"Visual Examinations, Visual Therapy", specialization:"Overall ocular health care for all ages,\n" + "Pre/post surgery care for refractive procedures and cataracts", experienceWithMusicians:"", language:"English and French", image:"", moreInfo:"https://nuvoeyes.ca/dr-francine-gauthier/", bookAppointments:"https://nuvoeyes.ca/request-appointment/", serviceCategory:"Optometry", vidPresenterName:"<NAME>", learMoreText:"To learn more about optometry and Francine Gauthier:", learnMoreLink:"https://nuvoeyes.ca/dr-francine-gauthier/", videos: [ { video:"/videos/Gauthier-Intro.mp4", thumbnail: "/img/Gauthier-Intro.png" }, { video:"/videos/Gauthier-vision-therapy.mp4", thumbnail: "/img/Gauthier-vision-therapy.png" }, { video:"/videos/Gauthier-wellness.mp4", thumbnail: "/img/Gauthier-wellness.png" }, ] }, { name:"<NAME>, <NAME> and the Ottawa Alexander School", role:"Certified Alexander Technique Teachers", location:"Ottawa School of The Alexander Technique (director), Ottawa, ON", eduTraining:"Certified: Canadian Society of Teachers of the Alexander Technique", serviceProvided:"<NAME>", specialization:"Repetitive strain injuries | Performance anxiety", experienceWithMusicians:"", language:"English and French", image:"/img/alexander.jpg", moreInfo:"https://ottawaalexandertechnique.ca/en/teachers/francis-caron/", bookAppointments:"https://ottawaalexandertechnique.ca/en/contact/", serviceCategory:"Alexander Technique", vidPresenterName:"", learMoreText:"To learn more about the Alexander Technique and <NAME>:", learnMoreLink:"https://ottawaalexandertechnique.ca/en/", videos: [ { video:"/videos/brigitte-intro.mp4", thumbnail:"/img/brigitte-intro.png" }, { video:"/videos/brigitte-what-is-alexander.mp4", thumbnail:"/img/brigitte-what-is-alexander.png" }, { video:"/videos/brigitte-more-alexander.mp4", thumbnail:"/img/brigitte-how-it-works.png" } ] } ] class PractitionersAndServicesTemplate extends React.Component { state = { exampleModal: false, masterProgramModal: false, fourCourseModal:false, workshopsModal:false, masterclassModal:false }; toggleModal = state => { this.setState({ [state]: !this.state[state] }); }; componentDidMount() { document.documentElement.scrollTop = 0; document.scrollingElement.scrollTop = 0; } render() { return ( <Layout> <DemoNavbar/> <main ref="main"> <div className="position-relative"> {/* shape Hero */} <section className="section section-shaped ext-large"> <Container className="py-lg-md d-flex"> <div className="col px-0"> <Row> <Col lg="6"> <h1 className="display-3 text-white"> Education{" "} </h1> <p className="lead text-white"> We offer a diverse set of educational programs taught by health professionals and researchers, aimed to inform about physical, mental, auditory, and visual wellness. </p> <div className="btn-wrapper"> <Button className="btn-icon mb-3 mb-sm-0" color="info" href="https://demos.creative-tim.com/argon-design-system-react/#/documentation/alerts?ref=adsr-landing-page" > <span className="btn-inner--icon mr-1"> <i className="fa fa-code"/> </span> <span className="btn-inner--text">Explore</span> </Button> </div> </Col> </Row> </div> </Container> {/* SVG separator */} </section> {/* 1st Hero Variation */} </div> <section className="section section-lg bg-gradient-default"> <ServiceNavigation naviItems={practitioners} /> <Container> <Row className="text-center justify-content-center"> <Col lg="10"> <h2 className="display-3 text-white">We offer programs related to the following topics:</h2> <p className="lead text-white"> Basic anatomical and physiological concepts applied to artists<br/> Analysis and prevention of musicians' injuries<br/> Strategies for minimizing effects of performance anxiety<br/> Applying mindfulness to music performance<br/> Auditory health and hearing loss prevention<br/> Health problems faced by musicians<br/> Available health approaches to improve musicians’ wellness<br/> Development of healthy habits and practice strategies </p> </Col> </Row> </Container> </section> </main> </Layout> ); } } const PractitionersAndServices = ({data}) => { return ( <PractitionersAndServicesTemplate/> ) } export default PractitionersAndServices
/* class.js * * ++[black[Atomic OS Class: HxClass]++ * * Base class for Atomic OS objects * * @author <NAME> <<EMAIL> (http://www.psema4.com) * @version 2.0.0 */ var HxClass = Class.extend({ /* @constructor * @method init * @param {Object} opts Options dictionary * */ init: function(opts) { opts = opts || {}; } });
def maxLength(sentence): words = sentence.split() max_length = 0 for word in words: if len(word) > max_length: max_length = len(word) return max_length
///////////////////////////////////////////////////////////////////////////// // Name: splash.cpp // Purpose: wxSplashScreen sample // Author: <NAME> // Modified by: // Created: 04/08/2004 // Copyright: (c) <NAME> // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// // ============================================================================ // declarations // ============================================================================ // ---------------------------------------------------------------------------- // headers // ---------------------------------------------------------------------------- // For compilers that support precompilation, includes "wx/wx.h". #include "wx/wxprec.h" // for all others, include the necessary headers (this file is usually all you // need because it includes almost all "standard" wxWidgets headers) #ifndef WX_PRECOMP #include "wx/wx.h" #endif #include "wx/image.h" #include "wx/splash.h" #include "wx/mediactrl.h" // ---------------------------------------------------------------------------- // resources // ---------------------------------------------------------------------------- // the application icon (under Windows it is in resources and even // though we could still include the XPM here it would be unused) #ifndef wxHAS_IMAGES_IN_RESOURCES #include "../sample.xpm" #endif // for smartphone, pda and other small screens use resized embedded image // instead of full colour png dedicated to desktops #include "mobile.xpm" // ---------------------------------------------------------------------------- // private classes // ---------------------------------------------------------------------------- // Define a new application type, each program should derive a class from wxApp class MyApp : public wxApp { public: // override base class virtuals // ---------------------------- // this one is called on application startup and is a good place for the app // initialization (doing it here and not in the ctor allows to have an error // return: if OnInit() returns false, the application terminates) virtual bool OnInit() wxOVERRIDE; void DecorateSplashScreen(wxBitmap& bmp); }; // Define a new frame type: this is going to be our main frame class MyFrame : public wxFrame { public: // ctor(s) MyFrame(const wxString& title); // event handlers (these functions should _not_ be virtual) void OnQuit(wxCommandEvent& event); void OnAbout(wxCommandEvent& event); bool m_isPda; private: // any class wishing to process wxWidgets events must use this macro wxDECLARE_EVENT_TABLE(); }; // ---------------------------------------------------------------------------- // constants // ---------------------------------------------------------------------------- // IDs for the controls and the menu commands enum { Minimal_Run = wxID_HIGHEST + 1 }; // ---------------------------------------------------------------------------- // event tables and other macros for wxWidgets // ---------------------------------------------------------------------------- // the event tables connect the wxWidgets events with the functions (event // handlers) which process them. It can be also done at run-time, but for the // simple menu events like this the static method is much simpler. wxBEGIN_EVENT_TABLE(MyFrame, wxFrame) EVT_MENU(wxID_EXIT, MyFrame::OnQuit) EVT_MENU(wxID_ABOUT, MyFrame::OnAbout) wxEND_EVENT_TABLE() // Create a new application object: this macro will allow wxWidgets to create // the application object during program execution (it's better than using a // static object for many reasons) and also implements the accessor function // wxGetApp() which will return the reference of the right type (i.e. MyApp and // not wxApp) wxIMPLEMENT_APP(MyApp); // ============================================================================ // implementation // ============================================================================ // ---------------------------------------------------------------------------- // the application class // ---------------------------------------------------------------------------- // 'Main program' equivalent: the program execution "starts" here bool MyApp::OnInit() { if ( !wxApp::OnInit() ) return false; wxImage::AddHandler(new wxPNGHandler); // create the main application window MyFrame *frame = new MyFrame("wxSplashScreen sample application"); wxBitmap bitmap; if (frame->m_isPda) bitmap = wxBitmap(mobile_xpm); bool ok = frame->m_isPda ? bitmap.IsOk() : bitmap.LoadFile("splash.png", wxBITMAP_TYPE_PNG); if (ok) { // we can even draw dynamic artwork onto our splashscreen DecorateSplashScreen(bitmap); // show the splashscreen new wxSplashScreen(bitmap, wxSPLASH_CENTRE_ON_SCREEN|wxSPLASH_TIMEOUT, 6000, frame, wxID_ANY, wxDefaultPosition, wxDefaultSize, wxSIMPLE_BORDER|wxSTAY_ON_TOP); } #if !defined(__WXGTK20__) // we don't need it at least on wxGTK with GTK+ 2.12.9 wxYield(); #endif // and show it (the frames, unlike simple controls, are not shown when // created initially) frame->Show(true); // success: wxApp::OnRun() will be called which will enter the main message // loop and the application will run. If we returned false here, the // application would exit immediately. return true; } // Draws artwork onto our splashscreen at runtime void MyApp::DecorateSplashScreen(wxBitmap& bmp) { // use a memory DC to draw directly onto the bitmap wxMemoryDC memDc(bmp); // draw an orange box (with black outline) at the bottom of the splashscreen. // this box will be 10% of the height of the bitmap, and be at the bottom. const wxRect bannerRect(wxPoint(0, (bmp.GetHeight() / 10)*9), wxPoint(bmp.GetWidth(), bmp.GetHeight())); wxDCBrushChanger bc(memDc, wxBrush(wxColour(255, 102, 0))); memDc.DrawRectangle(bannerRect); memDc.DrawLine(bannerRect.GetTopLeft(), bannerRect.GetTopRight()); // dynamically get the wxWidgets version to display wxString description = wxString::Format("wxWidgets %s", wxVERSION_NUM_DOT_STRING); // create a copyright notice that uses the year that this file was compiled wxString year(__DATE__); wxString copyrightLabel = wxString::Format("%s%s wxWidgets. %s", wxString::FromUTF8("\xc2\xa9"), year.Mid(year.Length() - 4), "All rights reserved."); // draw the (white) labels inside of our orange box (at the bottom of the splashscreen) memDc.SetTextForeground(*wxWHITE); // draw the "wxWidget" label on the left side, vertically centered. // note that we deflate the banner rect a little bit horizontally // so that the text has some padding to its left. memDc.DrawLabel(description, bannerRect.Deflate(5, 0), wxALIGN_CENTRE_VERTICAL|wxALIGN_LEFT); // draw the copyright label on the right side memDc.SetFont(wxFontInfo(8)); memDc.DrawLabel(copyrightLabel, bannerRect.Deflate(5, 0), wxALIGN_CENTRE_VERTICAL | wxALIGN_RIGHT); } // ---------------------------------------------------------------------------- // main frame // ---------------------------------------------------------------------------- // frame constructor MyFrame::MyFrame(const wxString& title) : wxFrame(NULL, wxID_ANY, title) { m_isPda = (wxSystemSettings::GetScreenType() <= wxSYS_SCREEN_PDA); // set the frame icon SetIcon(wxICON(sample)); #if wxUSE_MENUS // create a menu bar wxMenu *menuFile = new wxMenu; // the "About" item should be in the help menu wxMenu *helpMenu = new wxMenu; helpMenu->Append(wxID_ABOUT, "&About\tF1", "Show about frame"); menuFile->Append(wxID_EXIT, "E&xit\tAlt-X", "Quit this program"); // now append the freshly created menu to the menu bar... wxMenuBar *menuBar = new wxMenuBar(); menuBar->Append(menuFile, "&File"); menuBar->Append(helpMenu, "&Help"); // ... and attach this menu bar to the frame SetMenuBar(menuBar); #endif // wxUSE_MENUS #if wxUSE_STATUSBAR // create a status bar just for fun (by default with 1 pane only) CreateStatusBar(2); SetStatusText("Welcome to wxWidgets!"); #endif // wxUSE_STATUSBAR } // event handlers void MyFrame::OnQuit(wxCommandEvent& WXUNUSED(event)) { // true is to force the frame to close Close(true); } void MyFrame::OnAbout(wxCommandEvent& WXUNUSED(event)) { wxBitmap bitmap; if (m_isPda) bitmap = wxBitmap(mobile_xpm); bool ok = m_isPda ? bitmap.IsOk() : bitmap.LoadFile("splash.png", wxBITMAP_TYPE_PNG); if (ok) { wxImage image = bitmap.ConvertToImage(); // do not scale on already small screens if (!m_isPda) image.Rescale( bitmap.GetWidth()/2, bitmap.GetHeight()/2 ); bitmap = wxBitmap(image); wxSplashScreen *splash = new wxSplashScreen(bitmap, wxSPLASH_CENTRE_ON_PARENT | wxSPLASH_NO_TIMEOUT, 0, this, wxID_ANY, wxDefaultPosition, wxDefaultSize, wxSIMPLE_BORDER|wxSTAY_ON_TOP); wxWindow *win = splash->GetSplashWindow(); #if wxUSE_MEDIACTRL wxMediaCtrl *media = new wxMediaCtrl( win, wxID_EXIT, "press.mpg", wxPoint(2,2)); media->Play(); #else wxStaticText *text = new wxStaticText( win, wxID_EXIT, "click somewhere\non this image", wxPoint(m_isPda ? 0 : 13, m_isPda ? 0 : 11) ); text->SetBackgroundColour(*wxWHITE); text->SetForegroundColour(*wxBLACK); wxFont font = text->GetFont(); font.SetFractionalPointSize(2.0*font.GetFractionalPointSize()/3.0); text->SetFont(font); #endif } }
<reponame>eclab/mason /* Copyright 2021 by <NAME> and <NAME> University Licensed under the Academic Free License version 3.0 See the file "LICENSE" for more information */ package sim.util.distribution; /** Scale is a set of utility functions to transform a distribution into another by multiplying its values and optionally translating them. **/ public class Scale { public static AbstractContinuousDistribution scale(AbstractContinuousDistribution dist, double multiply) { return scale(dist, multiply, 0.0); } public static AbstractContinuousDistribution scale(final AbstractContinuousDistribution dist, final double multiply, final double add) { return new AbstractContinuousDistribution() { public double nextDouble() { return dist.nextDouble() * multiply + add; } }; } public static AbstractDiscreteDistribution scale(AbstractDiscreteDistribution dist, int multiply) { return scale(dist, multiply, 0); } public static AbstractDiscreteDistribution scale(final AbstractDiscreteDistribution dist, final int multiply, final int add) { return new AbstractDiscreteDistribution() { public int nextInt() { return dist.nextInt() * multiply + add; } }; } }
<filename>test/spec/oidc/endpoints/authorize.ts /*! * Copyright (c) 2015-present, Okta, Inc. and/or its affiliates. All rights reserved. * The Okta software accompanied by this notice is provided pursuant to the Apache License, Version 2.0 (the "License.") * * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and limitations under the License. */ describe('authorize endpoint', () => { describe('buildAuthorizeParams', () => { it('throws if no clientId', () => { }); it('throws if multiple response types are specified as string', () => { }); it('converts tokenParams to query params', () => { }); it('converts array parameters "idpScope", "responseType", and "scopes" to space-separated string', () => { }); it('throws if responseType includes id_token but scopes does not include openid', () => { }); }); });
#include <cstdint> class EEPROMDevice { public: // Constructor and other necessary methods // Method to read the contents of the status register uint8_t readStatusRegister() { // Implement the logic to send the ReadStatusRegister command (0x05) to the device and retrieve the status register contents // Example: // return sendCommand(ReadStatusRegister); } // Method to disable writes to the device void writeDisable() { // Implement the logic to send the WriteDisable command (0x04) to the device // Example: // sendCommand(WriteDisable); } // Method to enable writes to the device void writeEnable() { // Implement the logic to send the WriteEnable command (0x06) to the device // Example: // sendCommand(WriteEnable); } // Method to send a generic command to the device void sendCommand(uint8_t command) { // Implement the logic to send the specified command to the device } };
/* * Copyright (c) Open Source Strategies, Inc. * * Opentaps is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Opentaps is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Opentaps. If not, see <http://www.gnu.org/licenses/>. */ package org.opentaps.gwt.common.server.form; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.ofbiz.base.util.Debug; import org.ofbiz.service.DispatchContext; import org.ofbiz.service.GenericServiceException; import org.opentaps.gwt.common.server.InputProviderInterface; /** * This is the wrapper RPC service to call ofbiz services. */ public abstract class GenericService { private static final String MODULE = GenericService.class.getName(); private InputProviderInterface provider; // can be used to hold temporary service parameters instead of getting them from the provider private Map<String, Object> serviceParameters; // store errors related to input private Map<String, String> customErrors; // custom errors, used for special validation, for example a field that must have a specific format private List<String> missingFields; // missing field error, a required field was missing private List<String> extraFields; // extra field error, an unexpected field was given // store service results private Map<String, Object> callResults; /** * Creates a <code>GenericService</code> instance from an <code>InputProviderInterface</code>. * @param provider an <code>InputProviderInterface</code> */ public GenericService(InputProviderInterface provider) { this.provider = provider; customErrors = new HashMap<String, String>(); extraFields = new ArrayList<String>(); missingFields = new ArrayList<String>(); } /** * Gets the <code>InputProviderInterface</code>. * @return the <code>InputProviderInterface</code> */ public InputProviderInterface getProvider() { return this.provider; } /** * A placeholder method that can be overriden in derived class to implement * input validation complementary to the service engine validation. * @see #addFieldError(String, String) * @see #addExtraFieldError(String) * @see #addMissingFieldError(String) */ public void validate() { } /** * An utility method that can be used in {@link #validate()} to * check if a required parameter is present. * @param fieldName the parameter name * @return <code>true</code> if the parameter is present */ public final boolean validateParameterPresent(String fieldName) { if (!provider.parameterIsPresent(fieldName)) { addMissingFieldError(fieldName); return false; } return true; } /** * An utility method to mark an extra (unexpected) field. * There is visual clue or error message in the client UI currently. * @param fieldName the name of the missing field in the client form */ public final void addExtraFieldError(String fieldName) { extraFields.add(fieldName); } /** * An utility method to mark a missing required field. * There will be a visual clue and an error message in the client UI * (simply because there is no such field in the UI). * @param fieldName the name of the missing field in the client form */ public final void addMissingFieldError(String fieldName) { missingFields.add(fieldName); } /** * An utility method to set an arbitrary error message to the client field. * There will be a visual clue and an error message in the client UI if the * field is present in the form. * @param fieldName the name of the missing field in the client form * @param error the error message */ public final void addFieldError(String fieldName, String error) { customErrors.put(fieldName, error); } /** * Checks if there is any kind of validation error. * @return <code>true</code> if there is any kind of validation error */ public final boolean hasValidationErrors() { return !(customErrors.isEmpty() && missingFields.isEmpty() && extraFields.isEmpty()); } /** * Sets the service parameters to use instead of getting them from the provider; you need to reset them to null * in order to use the provider parameters again. * @param serviceParameters the service parameters to use in the <code>callService</code> method, <code>null</code> to use the provider parameters */ public void setServiceParameters(Map<String, Object> serviceParameters) { this.serviceParameters = serviceParameters; } /** * The placeholder where derived classes make the call to the service. * @return the service result <code>Map</code> * @throws GenericServiceException if an error occurs * @see #call() */ protected abstract Map<String, Object> callService() throws GenericServiceException; /** * Most basic implementation of <code>callService</code>. * Makes the service input valid from the parameters (except if some parameters are missing) * with the addition of the userLogin, and return the service response <code>Map</code>. * @param serviceName name of the service to call * @return the service result <code>Map</code> * @throws GenericServiceException if an error occurs, such as a validation error or a service error */ protected final Map<String, Object> callService(String serviceName) throws GenericServiceException { Map<String, Object> params; if (serviceParameters == null) { params = provider.getParameterMap(); Debug.logInfo("Using provider's parameter map", MODULE); } else { params = serviceParameters; Debug.logInfo("Using specific parameter map", MODULE); } return callService(serviceName, params); } /** * Most basic implementation of <code>callService</code>. * Makes the service input valid from the parameters (except if some parameters are missing) * with the addition of the userLogin, and return the service response <code>Map</code>. * @param serviceName name of the service to call * @param parameters a <code>Map</code> of parameters from the service, it will passed to <code>makeValidContext</code> * @return the service result <code>Map</code> * @throws GenericServiceException if an error occurs, such as a validation error or a service error */ @SuppressWarnings("unchecked") protected final Map<String, Object> callService(String serviceName, Map<String, Object> parameters) throws GenericServiceException { DispatchContext dctx = provider.getInfrastructure().getDispatcher().getDispatchContext(); Map callCtxt = dctx.makeValidContext(serviceName, "IN", interceptParameters(parameters)); callCtxt.put("userLogin", provider.getUser().getOfbizUserLogin()); Debug.logInfo("Calling service [" + serviceName + "] with input [" + callCtxt + "]", MODULE); callResults = provider.getInfrastructure().getDispatcher().runSync(serviceName, callCtxt); if (callResults == null) { throw new GenericServiceException("Empty service response"); } return callResults; } /** * Placeholder method that allow processing of parameters before they are passed to the service call. * @param params a <code>Map</code> of parameter name to parameter value * @return the <code>Map</code> of parameter name to parameter value that is passed to the service */ @SuppressWarnings("unchecked") protected Map interceptParameters(Map params) { return params; } /** * Entry method for the service calling class. * First validates the parameters, and throw an exception if a validation error is found. * Then calls the implementation of <code>callService</code> which should return the service * response <code>Map</code> or throw a <code>GenericServiceException</code>. * @return the service result <code>Map</code> * @throws GenericServiceException if an error occurs, such as a validation error or a service error */ public final Map<String, Object> call() throws GenericServiceException { validate(); checkValidationErrors(); return callService(); } /** * Checks if any validation errors are present. * Those are added by the <code>addFieldError</code> methods. * @throws CustomServiceValidationException if a validation error is present */ public final void checkValidationErrors() throws CustomServiceValidationException { if (hasValidationErrors()) { throw new CustomServiceValidationException(missingFields, extraFields, customErrors); } } }
# frozen_string_literal: true module RuboCop module Cop module InternalAffairs # Checks for redundant message arguments to `#add_offense`. This method # will automatically use `#message` or `MSG` (in that order of priority) # if they are defined. # # @example # # # bad # add_offense(node, message: MSG) # add_offense(node, message: message) # add_offense(node, message: message(node)) # # # good # add_offense(node) # add_offense(node, message: CUSTOM_MSG) # add_offense(node, message: message(other_node)) # class RedundantMessageArgument < Base include RangeHelp extend AutoCorrector MSG = 'Redundant message argument to `#add_offense`.' RESTRICT_ON_SEND = %i[add_offense].freeze def_node_matcher :node_type_check, <<~PATTERN (send nil? :add_offense $_node $hash) PATTERN def_node_matcher :redundant_message_argument, <<~PATTERN (pair (sym :message) ${(const nil? :MSG) (send nil? :message) (send nil? :message _)}) PATTERN def_node_matcher :message_method_call, '(send nil? :message $_node)' def on_send(node) node_type_check(node) do |node_arg, kwargs| find_offending_argument(node_arg, kwargs) do |pair| add_offense(pair) do |corrector| range = offending_range(pair) corrector.remove(range) end end end end private def offending_range(node) with_space = range_with_surrounding_space(range: node.loc.expression) range_with_surrounding_comma(with_space, :left) end def find_offending_argument(searched_node, kwargs) kwargs.pairs.each do |pair| redundant_message_argument(pair) do |message_argument| node = message_method_call(message_argument) yield pair if !node || node == searched_node end end end end end end end
<gh_stars>1-10 load File.join(__dir__, "helper.rb") require 'plumo' $w = 200 $h = 200 $grid = [] $cell_width_px = 2 $cv_w = $w * $cell_width_px $cv_h = $h * $cell_width_px def each_cell (0...$h).each do |y| (0...$w).each do |x| yield(x, y) end end end def generate_blank_grid Array.new($h){ [] } end def generate_cmds(grid) cmds = [] cmds << [:fillStyle, "rgb(0, 0, 0)"] cmds << [:fillRect, 0, 0, $cv_w, $cv_h] cmds << [:fillStyle, "#88aa00"] each_cell do |x ,y| if grid[y][x] == 1 cmds << [ :fillRect, x * $cell_width_px, y * $cell_width_px, $cell_width_px, $cell_width_px, ] end end cmds end def reset_random each_cell do |x, y| $grid[y][x] = (rand < 0.5) ? 0 : 1 end end # -------------------------------- plumo = Plumo.new( $cv_w, $cv_h, num_deq_max: 1 ) plumo.start $grid = generate_blank_grid reset_random i = 0 loop do i += 1 if i > 1000 i = 0 reset_random end buf = generate_blank_grid each_cell do |x, y| xl = (x == 0 ) ? $w - 1 : x - 1 xr = (x == $w - 1) ? 0 : x + 1 yt = (y == 0 ) ? $h - 1 : y - 1 yb = (y == $h - 1) ? 0 : y + 1 n = 0 n += $grid[yt][xl] n += $grid[y ][xl] n += $grid[yb][xl] n += $grid[yt][x ] n += $grid[yb][x ] n += $grid[yt][xr] n += $grid[y ][xr] n += $grid[yb][xr] buf[y][x] = if $grid[y][x] == 0 (n == 3) ? 1 : 0 else (n == 2 or n == 3) ? 1 : 0 end end $grid = buf cmds = generate_cmds($grid) plumo.draw(*cmds) sleep 0.001 end
import React from "react"; import "./style.css"; function Footer() { return ( <footer className="text-center fixed-bottom footer-color footer-height-test"> <div className="card-footer"> <a className="icon-spacer" target="_blank" href="https://github.com/btparker70"><i class="fab fa-github fa-2x" style={{color: "white"}}></i></a> <a className="icon-spacer" target="_blank" href="https://www.linkedin.com/in/brian-parker-79871819b/"><i class="fab fa-linkedin-in fa-2x" style={{backgroundColor: "#2867B2", color: "white"}}></i></a> <a className="icon-spacer" target="_blank" href="assets/pdf/TechResumeBrianParker2020.pdf" download><i class="far fa-file-pdf fa-2x" style={{backgroundColor: "#313841", color: "white"}}></i></a> </div> </footer> ) } export default Footer;
import { MediaGallery } from './components' export { MediaGallery }
#!/bin/sh ### General options ### –- specify queue -- (gpuv100/gpua100) -- #BSUB -q hpcintrogpu ### -- set the job Name -- JOB="danet" #BSUB -J "danet" ### -- ask for number of cores (default: 1) -- #BSUB -n 12 #BSUB -R "span[ptile=1]" ### -- Select the resources: 3 gpu in exclusive process mode -- #BSUB -gpu "num=2:mode=exclusive_process" ### -- specify gpu memory #BSUB -R "select[gpu32gb]" ### -- set walltime limit: hh:mm -- maximum 24 hours for GPU-queues right now #BSUB -W 24:00 # request 5GB of system-memory #BSUB -R "rusage[mem=64GB]" ### -- set the email address -- # please uncomment the following line and put in your e-mail address, # if you want to receive e-mail notifications on a non-default address #BSUB -u s210203@student.dtu.dk ### -- send notification at start -- #BSUB -B ### -- send notification at completion-- #BSUB -N ### -- Specify the output and error file. %J is the job-id -- ### -- -o and -e mean append, -oo and -eo mean overwrite -- #BSUB -o gpu_%J.out #BSUB -e gpu_%J.err # -- end of LSF options -- nvidia-smi # Load the cuda module ## module load cuda/11.5 module load python3/3.6.2 ###python3 -m venv torch2 ###source torch2/bin/activate ###python -m pip install pyhocon #echo "=> $JOB start" #cd /zhome/f9/4/160189/IGR/data #source torch/bin/activate #cd /zhome/f9/4/160189/SegmenTron/ #bash train_danet_mul_gpus.sh
#!/usr/bin/env bash set -eo pipefail RELEASE_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd) pushd "${RELEASE_DIR}" > /dev/null team="main" pipeline="traefik-boshrelease" credhub set -n "/concourse/${team}/git-commit-email" -t value -v "$(bosh int ci/secrets.yml --path /git_user_email)" credhub set -n "/concourse/${team}/git-commit-name" -t value -v "$(bosh int ci/config.yml --path /git_user_name)" credhub set -n "/concourse/${team}/aws-access-key" -t value -v "$(bosh int config/private.yml --path /blobstore/options/access_key_id)" credhub set -n "/concourse/${team}/aws-secret-key" -t value -v "$(bosh int config/private.yml --path /blobstore/options/secret_access_key)" credhub set -n "/concourse/${team}/slack-username" -t value -v "gk-concourse-ninja" credhub set -n "/concourse/${team}/slack-icon-url" -t value -v "https://cl.ly/2F421Y300u07/concourse-logo-blue-transparent.png" credhub set -n "/concourse/${team}/${pipeline}/slack-webhook" -t value -v "$(bosh int ci/secrets.yml --path /slack_webhook)" credhub set -n "/concourse/${team}/github-access-token" -t value -v "$(bosh int ci/secrets.yml --path /github_access_token)" credhub set -n "/concourse/${team}/github-private-key" -t value -v "$(bosh int ci/secrets.yml --path /github_private_key)" credhub set -n "/concourse/${team}/bosh-lite-environment" -t value -v "$(bosh int ci/secrets.yml --path /bosh-lite-environment)" credhub set -n "/concourse/${team}/bosh-lite-ca-cert" -t value -v "$(bosh int ci/secrets.yml --path /bosh-lite-ca-cert)" credhub set -n "/concourse/${team}/bosh-lite-client" -t value -v "$(bosh int ci/secrets.yml --path /bosh-lite-client)" credhub set -n "/concourse/${team}/bosh-lite-client-secret" -t value -v "$(bosh int ci/secrets.yml --path /bosh-lite-client-secret)" # To delete all: # # credhub find | awk '/concourse/{print $3}' | xargs -n 1 credhub delete -n # or # credhub find --path "/concourse/main" --output-json | jq -r ".credentials[].name" | xargs -n 1 credhub delete -n popd > /dev/null
<gh_stars>0 package io.cattle.platform.process.agent; import com.google.common.util.concurrent.ListenableFuture; import com.netflix.config.DynamicIntProperty; import com.netflix.config.DynamicLongProperty; import io.cattle.platform.agent.AgentLocator; import io.cattle.platform.agent.RemoteAgent; import io.cattle.platform.agent.server.ping.PingMonitor; import io.cattle.platform.agent.util.AgentUtils; import io.cattle.platform.archaius.util.ArchaiusUtil; import io.cattle.platform.async.utils.AsyncUtils; import io.cattle.platform.async.utils.TimeoutException; import io.cattle.platform.core.constants.AgentConstants; import io.cattle.platform.core.constants.CommonStatesConstants; import io.cattle.platform.core.model.Agent; import io.cattle.platform.core.model.Instance; import io.cattle.platform.engine.handler.CompletableLogic; import io.cattle.platform.engine.handler.HandlerResult; import io.cattle.platform.engine.process.ProcessInstance; import io.cattle.platform.engine.process.ProcessState; import io.cattle.platform.eventing.EventCallOptions; import io.cattle.platform.eventing.model.Event; import io.cattle.platform.framework.event.Ping; import io.cattle.platform.json.JsonMapper; import io.cattle.platform.object.ObjectManager; import io.cattle.platform.object.util.DataAccessor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static io.cattle.platform.core.model.tables.InstanceTable.*; public class AgentActivateReconnect implements CompletableLogic { private static final DynamicIntProperty PING_RETRY = ArchaiusUtil.getInt("agent.activate.ping.retries"); private static final DynamicLongProperty PING_TIMEOUT = ArchaiusUtil.getLong("agent.activate.ping.timeout"); private static final DynamicLongProperty PING_DISCONNECT_TIMEOUT = ArchaiusUtil.getLong("agent.disconnect.after.seconds"); private static final Logger log = LoggerFactory.getLogger(AgentActivateReconnect.class); ObjectManager objectManager; AgentLocator agentLocator; PingMonitor pingMonitor; JsonMapper jsonMapper; public AgentActivateReconnect(ObjectManager objectManager, AgentLocator agentLocator, PingMonitor pingMonitor, JsonMapper jsonMapper) { this.objectManager = objectManager; this.agentLocator = agentLocator; this.pingMonitor = pingMonitor; this.jsonMapper = jsonMapper; } @Override public HandlerResult handle(ProcessState state, ProcessInstance process) { /* This will save the time */ checkDisconnect(state); Agent agent = (Agent) state.getResource(); Instance instance = objectManager.findAny(Instance.class, INSTANCE.AGENT_ID, agent.getId()); /* Don't ping non-system container agent instances */ if (instance != null) { return null; } for (String prefix : AgentConstants.AGENT_IGNORE_PREFIXES) { if (agent.getUri() == null || agent.getUri().startsWith(prefix)) { return new HandlerResult(); } } RemoteAgent remoteAgent = agentLocator.lookupAgent(agent); ListenableFuture<? extends Event> future = remoteAgent.call(AgentUtils.newPing(agent) .withOption(Ping.STATS, true) .withOption(Ping.RESOURCES, true), new EventCallOptions(PING_RETRY.get(), PING_TIMEOUT.get()) .withRetryCallback((event) -> { Agent newAgent = objectManager.reload(agent); if (AgentConstants.STATE_DISCONNECTING.equals(newAgent.getState()) || CommonStatesConstants.DEACTIVATING.equals(newAgent.getState())) { throw new TimeoutException(); } return event; })); return new HandlerResult().withFuture(future); } @Override public HandlerResult complete(ListenableFuture<?> future, ProcessState state, ProcessInstance process) { Agent agent = (Agent)state.getResource(); Object obj = null; try { obj = AsyncUtils.get(future); } catch (TimeoutException e) { HandlerResult result = checkDisconnect(state); if (result == null) { throw e; } else { return result; } } Ping resp = jsonMapper.convertValue(obj, Ping.class); pingMonitor.pingSuccess(agent, resp); return null; } protected HandlerResult checkDisconnect(ProcessState state) { DataAccessor acc = DataAccessor.fromMap(state.getData()).withScope(AgentActivateReconnect.class).withKey("start"); Long startTime = acc.as(Long.class); if (startTime == null) { startTime = System.currentTimeMillis(); acc.set(startTime); } if (PING_DISCONNECT_TIMEOUT.get() * 1000L < (System.currentTimeMillis() - startTime)) { return new HandlerResult().withChainProcessName(AgentConstants.PROCESS_DECONNECT); } return null; } }
package modele; import org.json.JSONArray; import org.json.JSONObject; import javax.annotation.processing.FilerException; import java.io.*; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; public class ObjectManagement { enum PRIORITY { LOW, MEDIUM, HIGH } private final String objectsPath; private final Phase phase; private JSONObject jsonObject; /* Constructeur : ObjectManagement(phase) ------------------------------------------ Classe permettant de gérer les objets d'une phase à partir du chargement depuis le fichier jusqu'à la récupération des objets sous forme de variable. phase (Pḧase): phase à partir de laquelle récupérer les objets */ public ObjectManagement(Phase phase) { this.objectsPath = "res/Objects.json"; this.phase = phase; // on va essayer de parser les données try { // on récupère les données JSON du fichier this.jsonObject = new JSONObject(Files.readString(Paths.get(this.objectsPath))); String value = (String) jsonObject.get("type"); // si le fichier est du bon type if (value.compareTo("objects")==0) { // on récupère les objets de la phase JSONArray ph = (JSONArray) jsonObject.get(this.phase.getName()); // pour chaque objet for (java.lang.Object val : ph) { if (val instanceof JSONObject) { JSONObject data = (JSONObject) val; // on crée une nouvelle instance de variable this.phase.addObject(new Object( data.getString("name"), Math.toIntExact(data.getLong("price")), Math.toIntExact(data.getLong("weight")), Math.toIntExact(data.getLong("quantity")), data.getString("priority") )); } } } else { throw new InvalidJSONFileException("Wrong object type!"); } } catch (IOException | InvalidJSONFileException e) { e.printStackTrace(); } } /* Constructeur : ObjectManagement(phase) ------------------------------------------ Classe permettant de gérer les objets d'une phase à partir du chargement depuis le fichier jusqu'à la récupération des objets sous forme de variable. objectsPath (String): chemin vers le fichier d'objets phase (Pḧase): phase à partir de laquelle récupérer les objets */ public ObjectManagement(String objectsPath, Phase phase) { this.objectsPath = objectsPath; this.phase = phase; try { this.jsonObject = new JSONObject(Files.readString(Paths.get(this.objectsPath))); } catch (IOException e) { e.printStackTrace(); } } public String getObjectsPath() { return objectsPath; } public Phase getPhase() { return phase; } public ArrayList<Object> getObjects() { return phase.getObjects(); } public void addObjects(Object object) throws IOException, InvalidJSONFileException { String value = (String) jsonObject.get("type"); if (value.compareTo("objects")==0) { HashMap<String, java.lang.Object> newObject = new HashMap<>(); newObject.put("name", object.getName()); newObject.put("price", object.getPrice()); newObject.put("weight", object.getWeight()); newObject.put("quantity", object.getQuantity()); newObject.put("priority", object.getPriority()); JSONArray phase = (JSONArray) jsonObject.get(this.phase.getName()); phase.put(newObject); FileWriter writer = new FileWriter(this.objectsPath); writer.write(jsonObject.toString(4)); writer.close(); } else { throw new InvalidJSONFileException("Invalid type !"); } } }
class DataProcessor: def __init__(self, dmp): self.dmp = dmp def get_n_params(self): return self.dmp.get_n_params() def get_params(self): return self.dmp.get_params()
require_relative "test_helper" class ClientTest < Minitest::Test def test_works morph = Morph::Client.new morph.flushall morph.set("hello", "world") assert_equal "world", morph.get("hello") assert_nil morph.get("missing") assert_equal ["hello"], morph.keys("*") assert_equal 1, morph.dbsize assert_match "morph_version", morph.info end def test_keygen morph = Morph::Client.new Dir.chdir(Dir.mktmpdir) do morph.keygen assert File.exist?("morph.sk") assert File.exist?("morph.pk") end end end
#!/usr/bin/env bash set -e if [ "$DEBUG" = true ] ; then echo 'Debugging - ON' # uvicorn main:app --host 0.0.0.0 --port 8088 --reload --ssl-keyfile=./key.pem --ssl-certfile=./cert.pem uvicorn main:app --host 0.0.0.0 --port 8088 --reload else echo 'Debugging - OFF' # uvicorn main:app --host 0.0.0.0 --port 8088 --ssl-keyfile=./key.pem --ssl-certfile=./cert.pem uvicorn main:app --host 0.0.0.0 --port 8088 fi
#!/bin/bash if [ -d "$1" ]; then cd "$1" else echo "Usage: $0 <datadir>" >&2 echo "Removes obsolete Arion database files" >&2 exit 1 fi LEVEL=0 if [ -f wallet.dat -a -f addr.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=1; fi if [ -f wallet.dat -a -f peers.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=2; fi if [ -f wallet.dat -a -f peers.dat -a -f coins/CURRENT -a -f blktree/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=3; fi if [ -f wallet.dat -a -f peers.dat -a -f chainstate/CURRENT -a -f blocks/index/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=4; fi case $LEVEL in 0) echo "Error: no Arion datadir detected." exit 1 ;; 1) echo "Detected old Arion datadir (before 0.7)." echo "Nothing to do." exit 0 ;; 2) echo "Detected Arion 0.7 datadir." ;; 3) echo "Detected Arion pre-0.8 datadir." ;; 4) echo "Detected Arion 0.8 datadir." ;; esac FILES="" DIRS="" if [ $LEVEL -ge 3 ]; then FILES=$(echo $FILES blk????.dat blkindex.dat); fi if [ $LEVEL -ge 2 ]; then FILES=$(echo $FILES addr.dat); fi if [ $LEVEL -ge 4 ]; then DIRS=$(echo $DIRS coins blktree); fi for FILE in $FILES; do if [ -f $FILE ]; then echo "Deleting: $FILE" rm -f $FILE fi done for DIR in $DIRS; do if [ -d $DIR ]; then echo "Deleting: $DIR/" rm -rf $DIR fi done echo "Done."
import pytest from typing import List from your_module import calculate_average @pytest.fixture def sample_numbers(): return [1, 2, 3, 4, 5] def test_calculate_average_with_positive_numbers(sample_numbers): assert calculate_average(sample_numbers) == 3.0 def test_calculate_average_with_negative_numbers(): assert calculate_average([-1, -2, -3, -4, -5]) == -3.0 def test_calculate_average_with_mixed_numbers(): assert calculate_average([1, -2, 3, -4, 5]) == 0.6 def test_calculate_average_with_empty_list(): assert calculate_average([]) == 0.0 def test_calculate_average_with_single_number(): assert calculate_average([10]) == 10.0 def test_calculate_average_with_large_numbers(): assert calculate_average([1000000, 2000000, 3000000]) == 2000000.0
package mekanism.common.tile.qio; import java.util.HashSet; import java.util.Map; import java.util.Optional; import java.util.Set; import mekanism.api.NBTConstants; import mekanism.common.Mekanism; import mekanism.common.content.qio.QIOFrequency; import mekanism.common.content.qio.filter.QIOFilter; import mekanism.common.content.transporter.TransporterManager; import mekanism.common.integration.computer.ComputerException; import mekanism.common.integration.computer.annotation.ComputerMethod; import mekanism.common.inventory.container.MekanismContainer; import mekanism.common.inventory.container.sync.SyncableBoolean; import mekanism.common.lib.inventory.HashedItem; import mekanism.common.registries.MekanismBlocks; import mekanism.common.util.CapabilityUtils; import mekanism.common.util.InventoryUtils; import mekanism.common.util.ItemDataUtils; import mekanism.common.util.MekanismUtils; import mekanism.common.util.NBTUtils; import mekanism.common.util.WorldUtils; import net.minecraft.item.ItemStack; import net.minecraft.nbt.CompoundNBT; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.Direction; import net.minecraftforge.items.CapabilityItemHandler; import net.minecraftforge.items.IItemHandler; public class TileEntityQIOImporter extends TileEntityQIOFilterHandler { private static final int MAX_DELAY = 10; private int delay = 0; private boolean importWithoutFilter = true; public TileEntityQIOImporter() { super(MekanismBlocks.QIO_IMPORTER); } @Override protected void onUpdateServer() { super.onUpdateServer(); if (MekanismUtils.canFunction(this)) { if (delay > 0) { delay--; return; } tryImport(); delay = MAX_DELAY; } } private void tryImport() { QIOFrequency freq = getQIOFrequency(); Direction direction = getDirection(); TileEntity back = WorldUtils.getTileEntity(getLevel(), worldPosition.relative(direction.getOpposite())); if (freq == null || !InventoryUtils.isItemHandler(back, direction)) { return; } if (!importWithoutFilter && getFilters().isEmpty()) { return; } Optional<IItemHandler> capability = CapabilityUtils.getCapability(back, CapabilityItemHandler.ITEM_HANDLER_CAPABILITY, direction).resolve(); if (!capability.isPresent()) { return; } IItemHandler inventory = capability.get(); Set<HashedItem> typesAdded = new HashSet<>(); int maxTypes = getMaxTransitTypes(), maxCount = getMaxTransitCount(), countAdded = 0; for (int i = inventory.getSlots() - 1; i >= 0; i--) { ItemStack stack = inventory.extractItem(i, maxCount - countAdded, true); if (stack.isEmpty()) { continue; } HashedItem type = HashedItem.create(stack); // if we don't have room for another item type, skip if (!typesAdded.contains(type) && typesAdded.size() == maxTypes) { continue; } // if we can't filter this item type, skip if (!canFilter(stack)) { continue; } ItemStack used = TransporterManager.getToUse(stack, freq.addItem(stack)); ItemStack ret = inventory.extractItem(i, used.getCount(), false); if (!InventoryUtils.areItemsStackable(used, ret) || used.getCount() != ret.getCount()) { Mekanism.logger.error("QIO insertion error: item handler {} returned {} during simulated extraction, " + "but returned {} during execution. This is wrong!", back, stack, ret); } typesAdded.add(type); countAdded += used.getCount(); } } private boolean canFilter(ItemStack stack) { // quickly return true if we don't have any filters installed and we allow for filterless importing if (importWithoutFilter && getFilters().isEmpty()) { return true; } for (QIOFilter<?> filter : getFilters()) { if (filter.getFinder().modifies(stack)) { return true; } } return false; } @ComputerMethod public boolean getImportWithoutFilter() { return importWithoutFilter; } public void toggleImportWithoutFilter() { importWithoutFilter = !importWithoutFilter; markDirty(false); } @Override public void addContainerTrackers(MekanismContainer container) { super.addContainerTrackers(container); container.track(SyncableBoolean.create(this::getImportWithoutFilter, value -> importWithoutFilter = value)); } @Override public void writeSustainedData(ItemStack itemStack) { super.writeSustainedData(itemStack); ItemDataUtils.setBoolean(itemStack, NBTConstants.AUTO, importWithoutFilter); } @Override public void readSustainedData(ItemStack itemStack) { super.readSustainedData(itemStack); importWithoutFilter = ItemDataUtils.getBoolean(itemStack, NBTConstants.AUTO); } @Override public Map<String, String> getTileDataRemap() { Map<String, String> remap = super.getTileDataRemap(); remap.put(NBTConstants.AUTO, NBTConstants.AUTO); return remap; } @Override protected CompoundNBT getGeneralPersistentData(CompoundNBT nbtTags) { super.getGeneralPersistentData(nbtTags); nbtTags.putBoolean(NBTConstants.AUTO, importWithoutFilter); return nbtTags; } @Override protected void setGeneralPersistentData(CompoundNBT data) { super.setGeneralPersistentData(data); NBTUtils.setBooleanIfPresent(data, NBTConstants.AUTO, value -> importWithoutFilter = value); } //Methods relating to IComputerTile @ComputerMethod private void setImportsWithoutFilter(boolean value) throws ComputerException { validateSecurityIsPublic(); if (importWithoutFilter != value) { toggleImportWithoutFilter(); } } //End methods IComputerTile }
let array = [8, 5, 2, 3, 6]; let highest = array[0]; for (let i = 0; i < array.length; i++) { if (array[i] > highest) { highest = array[i]; } } console.log('The highest number is', highest);
<filename>error.go<gh_stars>1-10 package toglacier import ( "fmt" "reflect" "strings" "github.com/pkg/errors" ) const ( // ErrorCodeModifyTolerance error when too many files were modified between // backups. This is an alert for ransomware infection. ErrorCodeModifyTolerance ErrorCode = "modify-tolerance" ) // ErrorCode stores the error type that occurred while processing commands from // toglacier. type ErrorCode string // String translate the error code to a human readable text. func (e ErrorCode) String() string { switch e { case ErrorCodeModifyTolerance: return "too many files modified, aborting for precaution" } return "unknown error code" } // Error stores error details from a problem occurred while executing high level // commands from toglacier. type Error struct { Paths []string Code ErrorCode Err error } func newError(paths []string, code ErrorCode, err error) *Error { return &Error{ Paths: paths, Code: code, Err: errors.WithStack(err), } } // Error returns the error in a human readable format. func (e Error) Error() string { return e.String() } // String translate the error to a human readable text. func (e Error) String() string { var paths string if e.Paths != nil { paths = fmt.Sprintf("paths [%s], ", strings.Join(e.Paths, ", ")) } var err string if e.Err != nil { err = fmt.Sprintf(". details: %s", e.Err) } return fmt.Sprintf("toglacier: %s%s%s", paths, e.Code, err) } // ErrorEqual compares two Error objects. This is useful to compare down to the // low level errors. func ErrorEqual(first, second error) bool { if first == nil || second == nil { return first == second } err1, ok1 := errors.Cause(first).(*Error) err2, ok2 := errors.Cause(second).(*Error) if !ok1 || !ok2 { return false } if !reflect.DeepEqual(err1.Paths, err2.Paths) || err1.Code != err2.Code { return false } errCause1 := errors.Cause(err1.Err) errCause2 := errors.Cause(err2.Err) if errCause1 == nil || errCause2 == nil { return errCause1 == errCause2 } return errCause1.Error() == errCause2.Error() }
#!/bin/bash # Step 1: Download the OpenSSL source code tarball curl -sSL -O https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/openssl-1.0.2k.tar.gz # Step 2: Extract the downloaded tarball tar xf openssl-1.0.2k.tar.gz # Step 3: Navigate into the extracted directory cd openssl-1.0.2k # Step 4: Modify the Makefile within the crypto/cmac directory sed -i .bak 's/Never mind.*$/aaaaa\'$'\n exit 1/' crypto/cmac/Makefile # Step 5: Configure OpenSSL with specific options ./Configure no-dso no-ssl2 no-ssl3 darwin64-x86_64-cc # Step 6: Display the contents of the modified Makefile cat crypto/cmac/Makefile
<reponame>AochongZhang/mqcloud package com.sohu.tv.mq.cloud.task.monitor; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import org.apache.rocketmq.common.MixAll; import org.apache.rocketmq.common.message.MessageQueue; import org.apache.rocketmq.common.protocol.body.Connection; import org.apache.rocketmq.common.protocol.body.ConsumerConnection; import org.apache.rocketmq.common.protocol.body.ConsumerRunningInfo; import org.apache.rocketmq.common.protocol.body.ProcessQueueInfo; import org.apache.rocketmq.common.protocol.heartbeat.ConsumeType; import org.apache.rocketmq.common.protocol.heartbeat.SubscriptionData; import org.apache.rocketmq.common.protocol.topic.OffsetMovedEvent; import org.apache.rocketmq.tools.monitor.DeleteMsgsEvent; import org.apache.rocketmq.tools.monitor.FailedMsgs; import org.apache.rocketmq.tools.monitor.MonitorListener; import org.apache.rocketmq.tools.monitor.UndoneMsgs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.sohu.tv.mq.cloud.bo.ConsumerBlock; import com.sohu.tv.mq.cloud.bo.ConsumerClientStat; import com.sohu.tv.mq.cloud.bo.ConsumerStat; import com.sohu.tv.mq.cloud.bo.Topic; import com.sohu.tv.mq.cloud.bo.TypedUndoneMsgs; import com.sohu.tv.mq.cloud.bo.User; import com.sohu.tv.mq.cloud.bo.UserConsumer; import com.sohu.tv.mq.cloud.dao.ConsumerStatDao; import com.sohu.tv.mq.cloud.service.AlarmConfigBridingService; import com.sohu.tv.mq.cloud.service.AlertService; import com.sohu.tv.mq.cloud.service.ConsumerClientStatService; import com.sohu.tv.mq.cloud.service.TopicService; import com.sohu.tv.mq.cloud.service.UserConsumerService; import com.sohu.tv.mq.cloud.service.UserService; import com.sohu.tv.mq.cloud.util.DateUtil; import com.sohu.tv.mq.cloud.util.MQCloudConfigHelper; import com.sohu.tv.mq.cloud.util.Result; import com.sohu.tv.mq.util.CommonUtil; /** * 监控搜狐实现 * @author yongfeigao * */ @Component public class SohuMonitorListener implements MonitorListener { private final Logger log = LoggerFactory.getLogger(this.getClass()); @Autowired private ConsumerStatDao consumerStatDao; @Autowired private UserConsumerService userConsumerService; @Autowired private TopicService topicService; @Autowired private AlertService alertService; @Autowired private UserService userService; @Autowired private AlarmConfigBridingService alarmConfigBridingService; private long time; @Autowired private MQCloudConfigHelper mqCloudConfigHelper; @Autowired private ConsumerClientStatService consumerClientStatService; @Override public void beginRound() { time = System.currentTimeMillis(); log.info("monitor begin"); } @Override public void reportUndoneMsgs(UndoneMsgs undoneMsgs) { String topic = undoneMsgs.getTopic(); // 忽略topic if(mqCloudConfigHelper.isIgnoreTopic(topic)) { return; } try { //保存堆积消息的consumer的状态 consumerStatDao.saveConsumerStat(undoneMsgs.getConsumerGroup(), topic, (int)undoneMsgs.getUndoneMsgsTotal(), (int)undoneMsgs.getUndoneMsgsSingleMQ(), undoneMsgs.getUndoneMsgsDelayTimeMills()); } catch (Exception e) { log.error("save {}",undoneMsgs ,e); } veriftAccumulateAlarm(undoneMsgs); } /** * 校验是否发送报警邮件 * @param topic * @param undoneMsgs */ private void veriftAccumulateAlarm(UndoneMsgs undoneMsgs) { long accumulateTime = alarmConfigBridingService.getAccumulateTime(undoneMsgs.getConsumerGroup()); long accumulateCount = alarmConfigBridingService.getAccumulateCount(undoneMsgs.getConsumerGroup()); if (accumulateTime < 0 && accumulateCount < 0) { return; } if (undoneMsgs instanceof TypedUndoneMsgs) { // 广播模式消费者堆积,无法检测堆积时间 if (!((TypedUndoneMsgs) undoneMsgs).isClustering()) { if (undoneMsgs.getUndoneMsgsTotal() > accumulateCount) { accumulateWarn(undoneMsgs); } } else { // 发送报警 if (undoneMsgs.getUndoneMsgsDelayTimeMills() > accumulateTime && undoneMsgs.getUndoneMsgsTotal() > accumulateCount) { accumulateWarn(undoneMsgs); } } } } /** * 堆积报警 * @param undoneMsgs */ public void accumulateWarn(UndoneMsgs undoneMsgs) { // 验证报警频率 if (!alarmConfigBridingService.needWarn("accumulate", undoneMsgs.getTopic(), undoneMsgs.getConsumerGroup())) { return; } TopicExt topicExt = getUserEmail(undoneMsgs.getTopic(), undoneMsgs.getConsumerGroup()); if(topicExt == null) { return; } String content = getAccumulateWarnContent(topicExt.getTopic(), undoneMsgs); alertService.sendWarnMail(topicExt.getReceiver(), "堆积", content); } /** * 获取用户邮件地址 * @param topic * @param userID * @return */ private TopicExt getUserEmail(String topic, String consumerGroup) { // 获取topic Result<Topic> topicResult = topicService.queryTopic(topic); if(topicResult.isNotOK()) { return null; } TopicExt topicExt = new TopicExt(); topicExt.setTopic(topicResult.getResult()); // 获取用户 Set<Long> userID = getUserID(topicResult.getResult().getId(), consumerGroup); String receiver = null; // 获取用户id if(!userID.isEmpty()) { // 获取用户信息 Result<List<User>> userListResult = userService.query(userID); StringBuilder sb = new StringBuilder(); if(userListResult.isNotEmpty()) { for(User u : userListResult.getResult()) { sb.append(u.getEmail()); sb.append(","); } } if(sb.length() > 0) { sb.deleteCharAt(sb.length() - 1); receiver = sb.toString(); } } topicExt.setReceiver(receiver); return topicExt; } /** * 获取用户ID * @param topic * @param consuemrGroup * @return */ private Set<Long> getUserID(long tid, String consuemrGroup) { // 获取用户id Set<Long> uidList = new HashSet<Long>(); Result<List<UserConsumer>> udListResult = userConsumerService.queryByNameAndTid(tid, consuemrGroup); if(udListResult.isNotEmpty()) { for(UserConsumer uc : udListResult.getResult()) { uidList.add(uc.getUid()); } } return uidList; } /** * 获取堆积预警信息 * @param topic * @param undoneMsgs * @return */ private String getAccumulateWarnContent(Topic topic, UndoneMsgs undoneMsgs) { StringBuilder content = new StringBuilder("详细如下:<br><br>"); content.append("topic:<b>"); content.append(topic.getName()); content.append("</b> 的消费者:<b>"); content.append(mqCloudConfigHelper.getTopicConsumeLink(topic.getId(), undoneMsgs.getConsumerGroup())); content.append("</b> 检测到堆积,总堆积消息量:"); content.append(undoneMsgs.getUndoneMsgsTotal()); content.append(",单个队列最大堆积消息量:"); content.append(undoneMsgs.getUndoneMsgsSingleMQ()); if (undoneMsgs.getUndoneMsgsDelayTimeMills() > 0) { content.append(",消费滞后时间(相对于broker最新消息时间):"); content.append(undoneMsgs.getUndoneMsgsDelayTimeMills() / 1000f); content.append("秒"); } return content.toString(); } @Override public void reportFailedMsgs(FailedMsgs failedMsgs) { } @Override public void reportDeleteMsgsEvent(DeleteMsgsEvent deleteMsgsEvent) { try { log.warn("receive offset event:{}", deleteMsgsEvent); OffsetMovedEvent event = deleteMsgsEvent.getOffsetMovedEvent(); String consumerGroup = event.getConsumerGroup(); if(MixAll.TOOLS_CONSUMER_GROUP.equals(consumerGroup)) { return; } // 保存consume状态 ConsumerStat consumerStat = new ConsumerStat(); consumerStat.setConsumerGroup(consumerGroup); consumerStat.setTopic(event.getMessageQueue().getTopic()); consumerStatDao.saveSimpleConsumerStat(consumerStat); int id = consumerStat.getId(); // 保存block状态 long time = deleteMsgsEvent.getEventTimestamp(); String broker = event.getMessageQueue().getBrokerName(); int qid = event.getMessageQueue().getQueueId(); consumerStatDao.saveSomeConsumerBlock(id, broker, qid, time); // 预警 offsetMoveWarn(deleteMsgsEvent); } catch (Exception e) { log.error("receive offset event:{}", deleteMsgsEvent, e); } } /** * 偏移量预警 */ public void offsetMoveWarn(DeleteMsgsEvent deleteMsgsEvent) { OffsetMovedEvent event = deleteMsgsEvent.getOffsetMovedEvent(); TopicExt topicExt = getUserEmail(event.getMessageQueue().getTopic(), event.getConsumerGroup()); if(topicExt == null) { return; } // 验证报警频率 if (!alarmConfigBridingService.needWarn("offsetMove", event.getMessageQueue().getTopic(), event.getConsumerGroup())) { return; } StringBuilder content = new StringBuilder("详细如下:<br><br>"); content.append("消费者:<b>"); content.append(mqCloudConfigHelper.getTopicConsumeLink(topicExt.getTopic().getId(), event.getConsumerGroup())); content.append("</b> 偏移量错误,broker时间:<b>"); content.append(DateUtil.getFormat(DateUtil.YMD_DASH_BLANK_HMS_COLON).format( new Date(deleteMsgsEvent.getEventTimestamp()))); content.append("</b> ,请求偏移量:<b>"); content.append(event.getOffsetRequest()); content.append("</b>,broker偏移量:<b>"); content.append(event.getOffsetNew()); content.append("</b>。队列信息如下:<br>"); content.append("broker:"); content.append(event.getMessageQueue().getBrokerName()); content.append(" topic:"); content.append(event.getMessageQueue().getTopic()); content.append(" 队列:"); content.append(event.getMessageQueue().getQueueId()); alertService.sendWarnMail(topicExt.getReceiver(), "偏移量错误", content.toString()); } @Override public void reportConsumerRunningInfo( TreeMap<String, ConsumerRunningInfo> criTable) { if(criTable == null || criTable.size() == 0) { return; } String consumerGroup = criTable.firstEntry().getValue().getProperties().getProperty("consumerGroup"); try { // 分析订阅关系 boolean result = ConsumerRunningInfo.analyzeSubscription(criTable); if (!result) { log.warn("ConsumerGroup: {}, Subscription different", consumerGroup); //同一个ConsumerGroup订阅了不同的topic,进行记录 Set<SubscriptionData> set = new HashSet<SubscriptionData>(); for(ConsumerRunningInfo info : criTable.values()) { set.addAll(info.getSubscriptionSet()); } StringBuilder sb = new StringBuilder(); Set<String> uniqSet = new HashSet<String>(); for(SubscriptionData s : set) { if(CommonUtil.isRetryTopic(s.getTopic())) { continue; } String tmp = s.getTopic()+":"+s.getSubString(); if(uniqSet.add(tmp)) { sb.append(tmp); sb.append(";"); } } String sbscription = sb.toString(); ConsumerStat consumerStat = new ConsumerStat(); consumerStat.setConsumerGroup(consumerGroup); consumerStat.setSbscription(sbscription); consumerStatDao.saveSimpleConsumerStat(consumerStat); subscriptionWarn(consumerGroup, sbscription); } } catch (NumberFormatException e) { log.warn("num parse err"); } catch (Exception e) { log.error("save subscription:{}", criTable, e); } // 分析客户端卡主的情况 Map<TopicConsumer, List<ConsumerBlock>> map = new HashMap<TopicConsumer, List<ConsumerBlock>>(); for(String clientId : criTable.keySet()) { ConsumerRunningInfo info = criTable.get(clientId); String property = info.getProperties().getProperty(ConsumerRunningInfo.PROP_CONSUME_TYPE); if (property == null) { property = ((ConsumeType) info.getProperties().get(ConsumerRunningInfo.PROP_CONSUME_TYPE)).name(); } // 只能分析push的情况 if(ConsumeType.valueOf(property) != ConsumeType.CONSUME_PASSIVELY) { return; } String orderProperty = info.getProperties().getProperty(ConsumerRunningInfo.PROP_CONSUME_ORDERLY); boolean orderMsg = Boolean.parseBoolean(orderProperty); // 只分析非一致性消费 if(orderMsg) { return; } Iterator<Entry<MessageQueue, ProcessQueueInfo>> it = info.getMqTable().entrySet().iterator(); while (it.hasNext()) { Entry<MessageQueue, ProcessQueueInfo> next = it.next(); MessageQueue mq = next.getKey(); ProcessQueueInfo pq = next.getValue(); long diff = System.currentTimeMillis() - pq.getLastConsumeTimestamp(); if (diff < (1000 * 60) || pq.getCachedMsgCount() < 100) { continue; } // 组装信息 TopicConsumer tc = new TopicConsumer(); tc.setTopic(mq.getTopic()); tc.setConsumer(consumerGroup); List<ConsumerBlock> consumerBlockList = map.get(tc); if(consumerBlockList == null) { consumerBlockList = new ArrayList<ConsumerBlock>(); map.put(tc, consumerBlockList); } ConsumerBlock cb = new ConsumerBlock(); cb.setBlockTime(diff); cb.setInstance(clientId); cb.setBroker(mq.getBrokerName()); cb.setQid(mq.getQueueId()); consumerBlockList.add(cb); } } if(map.size() <= 0) { return; } for(TopicConsumer tc : map.keySet()) { ConsumerStat consumerStat = new ConsumerStat(); consumerStat.setConsumerGroup(tc.getConsumer()); consumerStat.setTopic(tc.getTopic()); consumerStatDao.saveSimpleConsumerStat(consumerStat); int id = consumerStat.getId(); List<ConsumerBlock> list = map.get(tc); for(ConsumerBlock cb : list) { consumerStatDao.saveConsumerBlock(id, cb.getInstance(), cb.getBroker(), cb.getQid(), cb.getBlockTime()); } } // 报警 blockWarn(map); } /** * 订阅报警 */ public void subscriptionWarn(String consumerGroup, String topics) { // 验证报警频率 if (!alarmConfigBridingService.needWarn("subscribe", topics, consumerGroup)) { return; } StringBuilder content = new StringBuilder("详细如下:<br><br>"); content.append("消费者:<b>"); content.append(consumerGroup); content.append("</b> 同时订阅了:<b>"); content.append(topics); content.append("</b>。"); alertService.sendWarnMail(null, "订阅错误", content.toString()); } /** * 客户端阻塞预警 */ public void blockWarn(Map<TopicConsumer, List<ConsumerBlock>> map) { for (TopicConsumer tc : map.keySet()) { Result<Topic> topicResult = topicService.queryTopic(tc.getTopic()); if (topicResult.isNotOK()) { log.error("get topic err. topic:{}", tc.getTopic()); continue; } List<ConsumerBlock> list = map.get(tc); // 获取预警配置 long blockTime = alarmConfigBridingService.getBlockTime(tc.getConsumer()); if (blockTime < 0) { continue; } // 验证报警频率 if (!alarmConfigBridingService.needWarn("clientBlock", tc.getTopic(), tc.getConsumer())) { continue; } // 是否报警 Iterator<ConsumerBlock> iterator = list.iterator(); while(iterator.hasNext()) { ConsumerBlock consumerBlock = iterator.next(); if(consumerBlock.getBlockTime() < blockTime) { iterator.remove(); } } if(list.size() <= 0) { continue; } StringBuilder content = new StringBuilder("详细如下:<br><br>"); content.append("topic: <b>"); content.append(tc.getTopic()); content.append("</b> 的消费者:"); content.append(mqCloudConfigHelper.getTopicConsumeLink(topicResult.getResult().getId(), tc.getConsumer())); content.append(" 检测到阻塞: <br>"); content.append("<table border=1>"); content.append("<thead>"); content.append("<tr>"); content.append("<th>clientId</th>"); content.append("<th>broker</th>"); content.append("<th>队列</th>"); content.append("<th>阻塞时间</th>"); content.append("</tr>"); content.append("</thead>"); content.append("<tbody>"); for (ConsumerBlock cb : list) { content.append("<tr>"); content.append("<td>"); content.append(cb.getInstance()); content.append("</td>"); content.append("<td>"); content.append(cb.getBroker()); content.append("</td>"); content.append("<td>"); content.append(cb.getQid()); content.append("</td>"); content.append("<td>"); content.append(cb.getBlockTime() / 1000f); content.append("秒</td>"); content.append("</tr>"); } content.append("</tbody>"); content.append("</table>"); TopicExt topicExt = getUserEmail(tc.getTopic(), tc.getConsumer()); alertService.sendWarnMail(topicExt.getReceiver(), "客户端阻塞", content.toString()); } } /** * 保存consumer-client信息 * @param consumerGroup * @param cc */ public void saveConsumerGroupClientInfo(String consumerGroup, ConsumerConnection cc) { for (Connection c : cc.getConnectionSet()) { String clientId = c.getClientId(); // 解析出ip if (clientId.contains("@")) { clientId = clientId.split("@")[0]; if (clientId.contains("-")) { String[] s = clientId.split("-"); if (s.length > 1) { clientId = clientId.substring(s[0].length() + 1); } } } else { log.warn("consumer clientId is not recognized, clientId:{}", clientId); } ConsumerClientStat consumerClientStat = new ConsumerClientStat(consumerGroup, clientId); consumerClientStatService.save(consumerClientStat); } } @Override public void endRound() { long use = System.currentTimeMillis() - time; log.info("monitor end use:{}ms", use); } private class TopicExt { private Topic topic; private String receiver; public Topic getTopic() { return topic; } public void setTopic(Topic topic) { this.topic = topic; } public String getReceiver() { return receiver; } public void setReceiver(String receiver) { this.receiver = receiver; } } private class TopicConsumer { private String topic; private String consumer; public String getTopic() { return topic; } public void setTopic(String topic) { this.topic = topic; } public String getConsumer() { return consumer; } public void setConsumer(String consumer) { this.consumer = consumer; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + getOuterType().hashCode(); result = prime * result + ((consumer == null) ? 0 : consumer.hashCode()); result = prime * result + ((topic == null) ? 0 : topic.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TopicConsumer other = (TopicConsumer) obj; if (!getOuterType().equals(other.getOuterType())) return false; if (consumer == null) { if (other.consumer != null) return false; } else if (!consumer.equals(other.consumer)) return false; if (topic == null) { if (other.topic != null) return false; } else if (!topic.equals(other.topic)) return false; return true; } private SohuMonitorListener getOuterType() { return SohuMonitorListener.this; } } }
package bootcamp.mercado.produto.compra.gateway.response; import bootcamp.mercado.produto.compra.gateway.GatewayList; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import javax.persistence.EntityManager; @RestController @RequestMapping("/pagamentos/paypal") public class PaypalGatewayController extends PagamentoController<PaypalRequest> { public PaypalGatewayController(EntityManager entityManager, GatewayList gatewayList, ProcessaPagamento processaPagamento) { super(entityManager, gatewayList, "Paypal", processaPagamento, "[PaypalGatewayController] "); } }
#!/bin/bash -e SCRIPT_DIR=$(cd `dirname $0`; pwd) export ANDROID_API=21 compile_arch() { echo -e '\033]2;'"compiling toolchain for $JSC_ARCH $FLAVOR"'\007' printf "\n\n\n\t\t=================== compiling toolchain for $JSC_ARCH $FLAVOR ===================\n\n\n" $SCRIPT_DIR/toolchain.sh echo -e '\033]2;'"compiling icu for $JSC_ARCH $FLAVOR"'\007' printf "\n\n\n\t\t=================== compiling icu for $JSC_ARCH $FLAVOR ===================\n\n\n" $SCRIPT_DIR/icu.sh echo -e '\033]2;'"compiling jsc for $JSC_ARCH $FLAVOR"'\007' printf "\n\n\n\t\t=================== compiling jsc for $JSC_ARCH $FLAVOR ===================n\n\n" $SCRIPT_DIR/jsc.sh } compile() { for arch in arm x86 do export JSC_ARCH=$arch export ENABLE_COMPAT=1 compile_arch done for arch in arm64 x86_64 do export JSC_ARCH=$arch export ENABLE_COMPAT=0 compile_arch done } if ${I18N} then export FLAVOR=intl export ENABLE_INTL=1 compile else export FLAVOR=no-intl export ENABLE_INTL=0 compile fi
#include<iostream> #include <numeric> using namespace std; #define mod 1000000007 #define int long long int32_t main(){ int n; cin>>n; int sum=0; for(int i=0;i<n;i++){ int x; cin>>x; sum=(sum+x)%mod; } int q; cin>>q; while(q--){ int t; cin>>t; sum=(sum*2)%mod; cout<<sum<<endl; } return 0; }
class Clock { constructor() { this._localStorage = window.localStorage; this._clock = document.querySelector('#clock'); this._setTime = this._setTime.bind(this); this._twentyFourMode = false; this._clockUpdater = null; this._init(); } _appendZero(k) { // Append zero if k < 10 return k = (k < 10) ? '0' + k : k; } _setTime() { const date = new Date(); let hour = date.getHours(); let min = date.getMinutes(); let midDay = null; min = this._appendZero(min); // 24-hour mode if (this._twentyFourMode === true) { hour = this._appendZero(hour); this._clock.innerText = `${hour}:${min}`; return; } // 12-hour mode midDay = (hour >= 12) ? 'PM' : 'AM'; hour = (hour === 0) ? 12 : ((hour > 12) ? (hour - 12) : hour); hour = this._appendZero(hour); this._clock.innerText = `${hour}:${min} ${midDay}`; } _startClock() { this._setTime(); this._clockUpdater = setInterval(this._setTime, 1000); } _updateClockMode() { clearInterval(this._clockUpdater); this._twentyFourMode = !this._twentyFourMode; this._localStorage.setItem('twentyFourMode', JSON.stringify(this._twentyFourMode)); this._startClock(); } _clockClickEvent() { this._clock.addEventListener( 'click', () => { console.log('toggle 24-hour clock mode'); this._updateClockMode(); } ); } _init() { this._twentyFourMode = JSON.parse(this._localStorage.getItem('twentyFourMode')) || false; this._startClock(); this._clockClickEvent(); } }
import heapq import collections def encode_huffman(data): """Compress string using Huffman Encoding""" h = [] for ch, freq in collections.Counter(data).items(): h.append([freq, [ch, ""]]) heapq.heapify(h) while len(h) > 1: l = heapq.heappop(h) r = heapq.heappop(h) for pair in l[1:]: pair[1] = '0' + pair[1] for pair in r[1:]: pair[1] = '1' + pair[1] heapq.heappush(h, [l[0]+r[0]] + l[1:] + r[1:]) pair_dict = dict(sorted(heapq.heappop(h)[1:], key=lambda item: (len(item[-1]), item))) encoded_data = ''.join(pair_dict[ch] for ch in data) return encoded_data, pair_dict encode_huffman("Hello World!")
#!/bin/bash # Function to fail abort() { cat <<< "$@" 1>&2; exit 1; } # # Generate openvpn.config file # read -d '' EXTRA_SERVER_CONF << EOF management localhost 7505 max-clients 10 EOF SERV_IP=$(ip -4 -o addr show scope global | awk '{print $4}' | sed -e 's:/.*::' | head -n1) ovpn_genconfig -u udp://$SERV_IP -f 1400 -e "$EXTRA_SERVER_CONF" # # grep for config lines from openvpn.conf # add more tests for more configs as required # # 1. verb config CONFIG_REQUIRED_VERB="verb 3" CONFIG_MATCH_VERB=$(busybox grep verb /etc/openvpn/openvpn.conf) # 2. fragment config CONFIG_REQUIRED_FRAGMENT="fragment 1400" CONFIG_MATCH_FRAGMENT=$(busybox grep fragment /etc/openvpn/openvpn.conf) # 3. management config CONFIG_REQUIRED_MANAGEMENT="^management localhost 7505" CONFIG_MATCH_MANAGEMENT=$(busybox grep management /etc/openvpn/openvpn.conf) # 4. max-clients config CONFIG_REQUIRED_MAX_CLIENTS="^max-clients 10" CONFIG_MATCH_MAX_CLIENTS=$(busybox grep max-clients /etc/openvpn/openvpn.conf) # # Tests # if [[ $CONFIG_MATCH_VERB =~ $CONFIG_REQUIRED_VERB ]] then echo "==> Config match found: $CONFIG_REQUIRED_VERB == $CONFIG_MATCH_VERB" else abort "==> Config match not found: $CONFIG_REQUIRED_VERB != $CONFIG_MATCH_VERB" fi if [[ $CONFIG_MATCH_FRAGMENT =~ $CONFIG_REQUIRED_FRAGMENT ]] then echo "==> Config match found: $CONFIG_REQUIRED_FRAGMENT == $CONFIG_MATCH_FRAGMENT" else abort "==> Config match not found: $CONFIG_REQUIRED_FRAGMENT != $CONFIG_MATCH_FRAGMENT" fi if [[ $CONFIG_MATCH_MANAGEMENT =~ $CONFIG_REQUIRED_MANAGEMENT ]] then echo "==> Config match found: $CONFIG_REQUIRED_MANAGEMENT == $CONFIG_MATCH_MANAGEMENT" else abort "==> Config match not found: $CONFIG_REQUIRED_MANAGEMENT != $CONFIG_MATCH_MANAGEMENT" fi if [[ $CONFIG_MATCH_MAX_CLIENTS =~ $CONFIG_REQUIRED_MAX_CLIENTS ]] then echo "==> Config match found: $CONFIG_REQUIRED_MAX_CLIENTS == $CONFIG_MATCH_MAX_CLIENTS" else abort "==> Config match not found: $CONFIG_REQUIRED_MAX_CLIENTS != $CONFIG_MATCH_MAX_CLIENTS" fi
<gh_stars>0 // @flow import { splitTrainType } from 'server/Abfahrten/Timetable'; function testTrainType(input, expThirdParty, expTrainType, expTrainId) { // $FlowFixMe it(`${input} to match ${expThirdParty}, ${expTrainType}, ${expTrainId}`, () => { const { thirdParty, trainId, trainType } = splitTrainType(input); expect(thirdParty).toBe(expThirdParty); expect(trainId).toBe(expTrainId); expect(trainType).toBe(expTrainType); }); } describe('Abfahrten', () => { describe('Correct train split', () => { testTrainType(''); testTrainType(); testTrainType('RE 123', undefined, 'RE', '123'); testTrainType('VIA RB10', 'VIA', 'RB', '10'); testTrainType('NWB RE2', 'NWB', 'RB', '2'); testTrainType('WFB RE60', 'WFB', 'RB', '60'); testTrainType('S 5X', undefined, 'S', '5X'); testTrainType('EBx 12', 'EBx', 'RB', '12'); testTrainType('ALX 84111', 'ALX', 'RB', '84111'); testTrainType('M 79073', 'M', 'RB', '79073'); testTrainType('BOB 86975', 'BOB', 'RB', '86975'); testTrainType('BSB 88378', 'BSB', 'S', '88378'); testTrainType('ECE 123', undefined, 'EC', '123'); testTrainType('IRE 87488', undefined, 'IRE', '87488'); testTrainType('ABR RB40', 'ABR', 'RB', '40'); testTrainType('NWB RB75', 'NWB', 'RB', '75'); testTrainType('NWB RE18', 'NWB', 'RB', '18'); testTrainType('ERB 61', 'E', 'RB', '61'); testTrainType('ME RE5', 'ME', 'RB', '5'); testTrainType('ME RB61', 'ME', 'RB', '61'); testTrainType('FLX 1807', 'FLX', 'IR', '1807'); }); });
<reponame>team-38-codeu-summer-2019/team38 // Get ?user=XYZ parameter value const url = new URLSearchParams(window.location.search); const parameterUserID = url.get('user'); // URL must include ?user=XYZ parameter. If not, redirect to homepage. if (!parameterUserID) { window.location.replace('/'); } /** Fetches messages and add them to the page. */ function fetchReviews() { console.log("here fetch reviews") const url = '/user-reviews?user=' + parameterUserID; fetch(url) .then((response) => { return response.json(); }) .then((reviews) => { const reviewsContainer = document.getElementById('reviews-container'); if (reviews.length == 0) { reviewsContainer.innerHTML = '<p>This user has no reviews yet.</p>'; } else { reviewsContainer.innerHTML = ''; } reviews.forEach((review) => { const reviewDiv = buildReviewDiv(review); reviewsContainer.appendChild(reviewDiv); }); }); } /** * Builds an element that displays the review. * @param {Review} review * @return {Element} */ function buildReviewDiv(review) { const headerDiv = document.createElement('div'); headerDiv.classList.add('review-header'); headerDiv.appendChild(document.createTextNode( review.merchantName + ' - ' + new Date(review.timestamp))); const ratingDiv = document.createElement('div'); ratingDiv.classList.add('rating-body'); ratingDiv.innerHTML = 'Rating = ' + review.rating; const bodyDiv = document.createElement('div'); bodyDiv.classList.add('review-body'); bodyDiv.innerHTML = review.text; const reviewDiv = document.createElement('div'); reviewDiv.classList.add('review-div'); reviewDiv.appendChild(headerDiv); reviewDiv.appendChild(ratingDiv); reviewDiv.appendChild(bodyDiv); return reviewDiv; } /** Fetches data and populates the UI of the page. */ function buildReviewUI() { // setPageTitle(); fetchReviews(); }
xdata = [1 3 5]; ydata = [2 5 10]; % Fit the curve ft = fittype( 'a*x^2 + b*x + c' ); fittedmodel = fit(xdata',ydata',ft); % Display the equation disp(fittedmodel)
package mubft import ( "fmt" "math/big" "sort" "github.com/incognitochain/incognito-chain/common" "github.com/incognitochain/incognito-chain/common/base58" "github.com/incognitochain/incognito-chain/incognitokey" "github.com/incognitochain/incognito-chain/privacy" "github.com/pkg/errors" ) type bftCommittedSig struct { ValidatorsIdxR []int Sig string } type multiSigScheme struct { //user data use for sign dataToSig common.Hash personal struct { Ri []byte r []byte } //user data user for combine sig combine struct { CommitSig string R string ValidatorsIdxR []int ValidatorsIdxAggSig []int SigningCommittee []string } scheme *privacy.MultiSigScheme } func (multiSig *multiSigScheme) Init(userKeySet *incognitokey.KeySet, committee []string) { multiSig.combine.SigningCommittee = make([]string, len(committee)) copy(multiSig.combine.SigningCommittee, committee) multiSig.scheme = new(privacy.MultiSigScheme) multiSig.scheme.Init() multiSig.scheme.GetKeyset().Set(&userKeySet.PrivateKey, &userKeySet.PaymentAddress.Pk) } func (multiSig *multiSigScheme) Prepare() error { myRiECCPoint, myrBigInt := multiSig.scheme.GenerateRandom() myRi := myRiECCPoint.Compress() myr := myrBigInt.Bytes() for len(myr) < common.BigIntSize { myr = append([]byte{0}, myr...) } multiSig.personal.Ri = myRi multiSig.personal.r = myr return nil } func (multiSig *multiSigScheme) SignData(RiList map[string][]byte) error { numbOfSigners := len(RiList) listPubkeyOfSigners := make([]*privacy.PublicKey, numbOfSigners) listROfSigners := make([]*privacy.EllipticPoint, numbOfSigners) RCombined := new(privacy.EllipticPoint) RCombined.Set(big.NewInt(0), big.NewInt(0)) counter := 0 for szPubKey, bytesR := range RiList { pubKeyTemp, byteVersion, err := base58.Base58Check{}.Decode(szPubKey) listPubkeyOfSigners[counter] = new(privacy.PublicKey) *listPubkeyOfSigners[counter] = pubKeyTemp if (err != nil) || (byteVersion != common.ZeroByte) { return err } listROfSigners[counter] = new(privacy.EllipticPoint) err = listROfSigners[counter].Decompress(bytesR) if err != nil { return err } RCombined = RCombined.Add(listROfSigners[counter]) multiSig.combine.ValidatorsIdxR = append(multiSig.combine.ValidatorsIdxR, common.IndexOfStr(szPubKey, multiSig.combine.SigningCommittee)) counter++ } sort.Ints(multiSig.combine.ValidatorsIdxR) commitSig, err := multiSig.scheme.GetKeyset().SignMultiSig(multiSig.dataToSig.GetBytes(), listPubkeyOfSigners, listROfSigners, new(big.Int).SetBytes(multiSig.personal.r)) if err != nil { Logger.log.Error("SignData", err) return err } multiSig.combine.R = base58.Base58Check{}.Encode(RCombined.Compress(), common.ZeroByte) commitSigInBytes, err := commitSig.Bytes() if err != nil { Logger.log.Error("SignData", err) return err } multiSig.combine.CommitSig = base58.Base58Check{}.Encode(commitSigInBytes, common.ZeroByte) return nil } func (multiSig *multiSigScheme) VerifyCommitSig(validatorPk string, commitSig string, R string, validatorsIdx []int) error { RCombined := new(privacy.EllipticPoint) RCombined.Set(big.NewInt(0), big.NewInt(0)) Rbytesarr, byteVersion, err := base58.Base58Check{}.Decode(R) if (err != nil) || (byteVersion != common.ZeroByte) { return err } err = RCombined.Decompress(Rbytesarr) if err != nil { return err } listPubkeyOfSigners, err := GetPubKeysFromIdx(multiSig.combine.SigningCommittee, validatorsIdx) if err != nil { return err } validatorPubkey := new(privacy.PublicKey) pubKeyTemp, byteVersion, err := base58.Base58Check{}.Decode(validatorPk) if err != nil { return err } if byteVersion != common.ZeroByte { return fmt.Errorf("VerifyCommitSig byte version %+v", byteVersion) } *validatorPubkey = pubKeyTemp var valSigbytesarr []byte valSigbytesarr, byteVersion, err = base58.Base58Check{}.Decode(commitSig) if err != nil { return err } if byteVersion != common.ZeroByte { return fmt.Errorf("VerifyCommitSig byte version %+v", byteVersion) } valSig := new(privacy.SchnMultiSig) err = valSig.SetBytes(valSigbytesarr) if err != nil { return err } resValidateEachSigOfSigners, err := valSig.VerifyMultiSig(multiSig.dataToSig.GetBytes(), listPubkeyOfSigners, []*privacy.PublicKey{validatorPubkey}, RCombined) if !resValidateEachSigOfSigners || err != nil { return errors.New("Validator's sig is invalid " + validatorPk) } return nil } func (multiSig *multiSigScheme) CombineSigs(R string, commitSigs map[string]bftCommittedSig) (string, error) { var listSigOfSigners []*privacy.SchnMultiSig var validatorsIdxR []int for pubkey, valSig := range commitSigs { sig := new(privacy.SchnMultiSig) bytesSig, byteVersion, err := base58.Base58Check{}.Decode(valSig.Sig) if (err != nil) || (byteVersion != common.ZeroByte) { return "", err } sig.SetBytes(bytesSig) if err != nil { return "", err } listSigOfSigners = append(listSigOfSigners, sig) multiSig.combine.ValidatorsIdxAggSig = append(multiSig.combine.ValidatorsIdxAggSig, common.IndexOfStr(pubkey, multiSig.combine.SigningCommittee)) validatorsIdxR = valSig.ValidatorsIdxR } sort.Ints(multiSig.combine.ValidatorsIdxAggSig) multiSig.combine.R = R multiSig.combine.ValidatorsIdxR = make([]int, len(validatorsIdxR)) copy(multiSig.combine.ValidatorsIdxR, validatorsIdxR) aggregatedSig := multiSig.scheme.CombineMultiSig(listSigOfSigners) aggregatedSigInByte, err := aggregatedSig.Bytes() if err != nil { Logger.log.Error("CombineSigs", err) return common.EmptyString, err } return base58.Base58Check{}.Encode(aggregatedSigInByte, common.ZeroByte), nil }
package org.slos.battle.abilities.attack; import org.slos.battle.abilities.Ability; import org.slos.battle.abilities.AbilityClassification; import org.slos.battle.abilities.AbilityType; import org.slos.battle.abilities.rule.OnRoundStartRule; import org.slos.battle.abilities.rule.target.TargetRuleset; import org.slos.battle.abilities.rule.turn.PoisonedRule; public class IsPoisonedAbility extends Ability { public IsPoisonedAbility() { super(AbilityType.POISONED, AbilityClassification.ON_ROUND_START); } @Override public OnRoundStartRule getEffect() { return new PoisonedRule(); } @Override public TargetRuleset getTargetRuleset() { return TargetRuleset.SELF; } }
<filename>ide/machine_list/controller.cpp // // Ryu // // Copyright (C) 2017 <NAME> // All Rights Reserved. // // See the LICENSE file for details about the license covering // this source code file. // #include <ide/ide_types.h> #include <core/input_action.h> #include <hardware/registry.h> #include "controller.h" namespace ryu::ide::machine_list { static logger* s_log = logger_factory::instance()->create( "machine_list::controller", logger::level::info); controller::controller(const std::string& name) : ryu::core::state(name) { } void controller::define_actions() { auto leave_action = core::input_action::create_no_map( "machine_list_leave", "Internal", "Close the machine list and return to previous state."); if (!leave_action->has_bindings()) leave_action->bind_keys({core::key_escape}); auto add_action = core::input_action::create_no_map( "machine_list_add", "Internal", "Add a new machine and open the editor."); if (!add_action->has_bindings()) add_action->bind_keys({core::key_f1}); auto delete_action = core::input_action::create_no_map( "machine_list_delete", "Internal", "Delete the selection machine from the registry."); if (!delete_action->has_bindings()) delete_action->bind_keys({core::key_delete}); } void controller::bind_events() { action_provider().register_handler( core::input_action::find_by_name("machine_list_leave"), [this](const core::event_data_t& data) { end_state(); return true; }); action_provider().register_handler( core::input_action::find_by_name("machine_list_add"), [this](const core::event_data_t& data) { edit_new_machine(); return true; }); action_provider().register_handler( core::input_action::find_by_name("machine_list_delete"), [this](const core::event_data_t& data) { delete_selected_machine(); return true; }); } void controller::on_initialize() { define_actions(); bind_events(); } void controller::on_deactivate() { _layout_panel->visible(false); _pick_list->reset_search(); } void controller::edit_new_machine() { auto new_machine_name = fmt::format( "Machine {}", hardware::registry::instance()->machines().size()); transition_to( core::system_commands::edit_machine, {{"name", new_machine_name}}); } void controller::edit_selected_machine() { auto machine = hardware::registry::instance() ->find_machine(_pick_list->rows()[_pick_list->selected()].key); if (machine == nullptr) return; _pick_list->reset_search(); transition_to( core::system_commands::edit_machine, {{"name", machine->name()}}); } void controller::delete_selected_machine() { core::result result; hardware::registry::instance() ->remove_machine(result, _pick_list->rows()[_pick_list->selected()].key); if (!result.is_failed()) _pick_list->remove_row(_pick_list->selected()); } bool controller::on_load(core::result& result) { _layout_panel = core::view_factory::create_loadable_view( this, "loadable-view", context()->font_family(), &context()->palette(), context()->prefs(), ide::colors::info_text, ide::colors::fill_color, result, "assets/views/machine-list.yaml"); s_log->result(result); if (result.is_failed()) return false; _add_button = _layout_panel->find_by_name<core::button>("add-button"); _edit_button = _layout_panel->find_by_name<core::button>("edit-button"); _delete_button = _layout_panel->find_by_name<core::button>("delete-button"); _pick_list = _layout_panel->find_by_name<core::column_pick_list>("pick-list"); _pick_list->on_activated([this](uint32_t row) { edit_selected_machine(); }); _pick_list->on_selection_changed([this](int32_t row) { _edit_button->enabled(row != -1); _delete_button->enabled(row != -1); }); _add_button->on_clicked([this]() { edit_new_machine(); }); _edit_button->on_clicked([this]() { edit_selected_machine(); }); _delete_button->on_clicked([this]() { delete_selected_machine(); }); return !result.is_failed(); } void controller::on_draw(core::renderer& surface) { _layout_panel->draw(surface); } void controller::on_resize(const core::rect& bounds) { layout_engine()->size(bounds.to_size()); } void controller::on_activate(const core::parameter_dict& params) { _pick_list->clear_rows(); for (const auto machine : hardware::registry::instance()->machines()) { std::string display_name("(none)"); if (machine->display() != nullptr) display_name = machine->display()->name(); _pick_list->add_row({ machine->id(), { ryu::core::pick_list_column_t(machine->id()), ryu::core::pick_list_column_t(machine->name()), ryu::core::pick_list_column_t(machine->address_space()), ryu::core::pick_list_column_t(display_name) } }); } _layout_panel->visible(true); context()->resize(); } void controller::on_update(uint32_t dt, core::pending_event_list& events) { _layout_panel->update(dt, events); } }
#!/bin/bash if [ $# -eq 0 ] then exit 0 fi ## Testing_Path : All testing log file will put in this folder ## Training_Path : All training txt file will put in this folder ## Result_F : Result file with path ## Report_Path : All report file that create by this script will put in this folder Training_File=$1 Result_File=$2 Testing_File=$3 Log_Path=$4 Bash=`which perl` Threshold=0 Accuracy_F="Accuracy_Report_Perl_"$Training_File".txt" cat /dev/null > Accuracy_Report_Perl.txt echo "Key Threshold Candidate Total Expected_Wrong_Detect Wrong_Detect Wrong_Detect_NotExpected Expected_Recognition Not_Expected_Recognition Correct_Recognition Wrong_Recognition_Not_Expected " > $Accuracy_F while [ $Threshold -le 1000 ] do Testing_F=$Testing_File Training_F=$Training_File Output_F=$Log_Path"/ComparisonResult_"$Training_File"_S"$Threshold".log" Result_F=$Result_File ## get_Result.pl Training-File Result-File Testing-Log Threshold-Value $Bash get_Result.pl $Training_F $Result_F $Testing_F $Threshold > $Output_F Total_Face=`cat $Output_F | grep " Total Face : " | cut -d ":" -f 2` Wrong_Detect=`cat $Output_F | grep " Not Face True : " | cut -d ":" -f 2` Expected_Recognition=`cat $Output_F | grep " Expected True : " | cut -d ":" -f 2` Not_Expected_Recognition=`cat $Output_F | grep " Expected False : " | cut -d ":" -f 2` Correct_Recognition=`cat $Output_F | grep "Recognition True : " | cut -d ":" -f 2` Wrong_Recognition_Not_Expected=`cat $Output_F | grep " Recognition False (Expected False) : " | cut -d ":" -f 2` Wrong_Detect_NotExpected=`cat $Output_F | grep " Not Face False : " | cut -d ":" -f 2` Expected_Wrong_Detect=`cat $Output_F | grep " Total Not Face : " | cut -d ":" -f 2` echo "$Key $Threshold $Candidate $Total_Face $Expected_Wrong_Detect $Wrong_Detect $Wrong_Detect_NotExpected $Expected_Recognition $Not_Expected_Recognition $Correct_Recognition $Wrong_Recognition_Not_Expected" >> $Accuracy_F Threshold=$(($Threshold+10)) done
from typing import List class OTLAttribuut: def __init__(self, field, naam, label, objectUri, definition, owner): self.field = field self.naam = naam self.label = label self.objectUri = objectUri self.definition = definition self.owner = owner def extract_attribute_names(attributes: List[OTLAttribuut]) -> List[str]: return [attr.naam for attr in attributes] # Example usage basisoppervlakte = OTLAttribuut(field='KwantWrdInVierkanteMeter', naam='basisoppervlakte', label='oppervlakte', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.basisoppervlakte', definition='De basisoppervlakte van de dwarse markering in vierkante meter.', owner='self') code = OTLAttribuut(field='KlDwarseMarkeringVerschuindCode', naam='code', label='code', objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.code', definition='De (COPRO/BENOR) code van dwarse markering.', owner='self') attributes = [basisoppervlakte, code] print(extract_attribute_names(attributes)) # Output: ['basisoppervlakte', 'code']
import { useState, useEffect } from "react"; const useGeolocation = () => { const [location, setLocation] = useState(null); // Use the built-in Geolocation API to get the user's location useEffect(() => { const watchId = navigator.geolocation.watchPosition(position => { setLocation({ latitude: position.coords.latitude, longitude: position.coords.longitude, }); }); // When the component unmounts, stop watching the user's location return () => navigator.geolocation.clearWatch(watchId); }, []); return location; }; export default useGeolocation;
gcc -DHELLO=66 main.c -o main
def mergeSort(arr): if len(arr) >1: mid = len(arr)//2 #Finding the mid of the array L = arr[:mid] # Dividing the array elements R = arr[mid:] # into 2 halves mergeSort(L) # Sorting the first half mergeSort(R) # Sorting the second half i = j = k = 0 # Copy data to temp arrays L[] and R[] while i < len(L) and j < len(R): if L[i] < R[j]: arr[k] = L[i] i+=1 else: arr[k] = R[j] j+=1 k+=1 # Checking if any element was left while i < len(L): arr[k] = L[i] i+=1 k+=1 while j < len(R): arr[k] = R[j] j+=1 k+=1
class BankAccount: def __init__(self, name, balance): self.name = name self.balance = balance self.transactions = [] def deposit(self, amount): self.balance += amount self.transactions.append(amount) def withdraw(self, amount): if self.balance >= amount: self.balance -= amount self.transactions.append(amount * -1) return True else: return False
<gh_stars>0 package com.company.sales.api.handlers; import com.microsoft.azure.functions.*; import com.microsoft.azure.functions.annotation.*; import java.math.BigDecimal; import java.time.LocalDate; import java.util.UUID; import com.company.sales.api.models.*; public class SubscriptionHandler { final static String CollectionName = "Subscriptions"; @FunctionName("GetSubscriptions") public HttpResponseMessage getSubscriptions( @HttpTrigger( name = "req", methods = {HttpMethod.GET}, authLevel = AuthorizationLevel.ANONYMOUS, route = "customers/{customerId}/subscriptions") HttpRequestMessage<String> request, @CosmosDBInput(name = "subscriptionDatabase", databaseName = "%DatabaseName%", collectionName = CollectionName, sqlQuery = "select * from s where s.customerId = {customerId}", connectionStringSetting = "CosmosConnectionStr") Subscription[] subscriptions, @CosmosDBInput(name = "customerDatabase", databaseName = "%DatabaseName%", collectionName = CustomerHandler.CollectionName, id = "{customerId}", partitionKey = "{customerId}", connectionStringSetting = "CosmosConnectionStr") Customer customer, @BindingName("customerId") final UUID customerId, final ExecutionContext context) { if (customer == null) { return request.createResponseBuilder(HttpStatus.BAD_REQUEST).body("No customer with Id " + customerId).build(); } else { return request.createResponseBuilder(HttpStatus.OK).body(subscriptions).build(); } } @FunctionName("CreateSubscription") public HttpResponseMessage createSubscription( @HttpTrigger( name = "req", methods = {HttpMethod.POST}, authLevel = AuthorizationLevel.ANONYMOUS, route = "customers/{customerId}/subscriptions") HttpRequestMessage<Subscription> request, @CosmosDBInput(name = "customerDatabase", databaseName = "%DatabaseName%", collectionName = CustomerHandler.CollectionName, id = "{customerId}", partitionKey = "{customerId}", connectionStringSetting = "CosmosConnectionStr") Customer customer, @CosmosDBOutput(name = "subscriptionDatabase", databaseName = "%DatabaseName%", collectionName = CollectionName, connectionStringSetting = "CosmosConnectionStr") OutputBinding<Subscription> subscriptionOutput, @BindingName("customerId") final UUID customerId, final ExecutionContext context) { Subscription subscription = request.getBody(); if (customer == null) { return request.createResponseBuilder(HttpStatus.BAD_REQUEST).body("No customer with Id " + customerId).build(); } else if (subscription.price.compareTo(BigDecimal.ZERO) <= 0) { return request.createResponseBuilder(HttpStatus.BAD_REQUEST).body("Price invalid.").build(); } else { subscription.customerId = customerId; subscription.id = UUID.randomUUID(); subscription.dayOfMonth = LocalDate.now().getDayOfMonth(); subscriptionOutput.setValue(subscription); return request.createResponseBuilder(HttpStatus.CREATED).body(subscription).build(); } } @FunctionName("RetrieveDailySubscriptions") public void retrieveDailySubscriptions( @TimerTrigger( name = "timer", schedule = "0 0 0 * * *") String timerInfo, @CosmosDBInput(name = "subscriptionDatabase", databaseName = "%DatabaseName%", collectionName = CollectionName, sqlQuery = "SELECT * FROM s where s.dayOfMonth = DateTimePart(\"d\", GetCurrentDateTime())", connectionStringSetting = "CosmosConnectionStr") Subscription[] subscriptions, @ServiceBusQueueOutput(name = "sb", queueName = "%PaymentQueue%", connection = "ServiceBusConnectionStr") OutputBinding<Subscription[]> subscriptionOutput, final ExecutionContext context) { subscriptionOutput.setValue(subscriptions); } }
const openMenuBtn = document.querySelector(".toggle-menu"), seasonsMenu = document.querySelector(".seasons"); openMenuBtn.addEventListener("click", () => { seasonsMenu.classList.toggle("open"); openMenuBtn.classList.toggle("rotate"); });
<filename>xilinx_temp_project/skopro_project_xilinx_ise12migration/isim/main_isim_beh.exe.sim/work/a_2399776393_1516540902.c<gh_stars>1-10 /**********************************************************************/ /* ____ ____ */ /* / /\/ / */ /* /___/ \ / */ /* \ \ \/ */ /* \ \ Copyright (c) 2003-2009 Xilinx, Inc. */ /* / / All Right Reserved. */ /* /---/ /\ */ /* \ \ / \ */ /* \___\/\___\ */ /***********************************************************************/ /* This file is designed for use with ISim build 0x2f00eba5 */ #define XSI_HIDE_SYMBOL_SPEC true #include "xsi.h" #include <memory.h> #ifdef __GNUC__ #include <stdlib.h> #else #include <malloc.h> #define alloca _alloca #endif static const char *ng0 = "E:/skopro2/datapath.vhd"; static void work_a_2399776393_1516540902_p_0(char *t0) { char *t1; char *t2; unsigned char t4; unsigned int t5; char *t6; char *t7; char *t8; char *t9; unsigned int t10; unsigned int t11; unsigned int t12; char *t13; char *t14; char *t15; char *t16; char *t17; char *t18; char *t19; unsigned char t21; unsigned int t22; char *t23; char *t24; char *t25; char *t26; char *t27; char *t28; char *t29; char *t30; char *t31; char *t32; char *t33; char *t34; char *t35; char *t36; char *t37; char *t38; LAB0: xsi_set_current_line(72, ng0); t1 = (t0 + 1052U); t2 = *((char **)t1); t1 = (t0 + 6431); t4 = 1; if (2U == 2U) goto LAB5; LAB6: t4 = 0; LAB7: if (t4 != 0) goto LAB3; LAB4: t18 = (t0 + 1052U); t19 = *((char **)t18); t18 = (t0 + 6433); t21 = 1; if (2U == 2U) goto LAB13; LAB14: t21 = 0; LAB15: if (t21 != 0) goto LAB11; LAB12: LAB19: t31 = xsi_get_transient_memory(4U); memset(t31, 0, 4U); t32 = t31; memset(t32, (unsigned char)2, 4U); t33 = (t0 + 3880); t34 = (t33 + 32U); t35 = *((char **)t34); t36 = (t35 + 40U); t37 = *((char **)t36); memcpy(t37, t31, 4U); xsi_driver_first_trans_fast(t33); LAB2: t38 = (t0 + 3812); *((int *)t38) = 1; LAB1: return; LAB3: t8 = (t0 + 2340U); t9 = *((char **)t8); t10 = (4 - 3); t11 = (t10 * 1U); t12 = (0 + t11); t8 = (t9 + t12); t13 = (t0 + 3880); t14 = (t13 + 32U); t15 = *((char **)t14); t16 = (t15 + 40U); t17 = *((char **)t16); memcpy(t17, t8, 4U); xsi_driver_first_trans_fast(t13); goto LAB2; LAB5: t5 = 0; LAB8: if (t5 < 2U) goto LAB9; else goto LAB7; LAB9: t6 = (t2 + t5); t7 = (t1 + t5); if (*((unsigned char *)t6) != *((unsigned char *)t7)) goto LAB6; LAB10: t5 = (t5 + 1); goto LAB8; LAB11: t25 = (t0 + 1972U); t26 = *((char **)t25); t25 = (t0 + 3880); t27 = (t25 + 32U); t28 = *((char **)t27); t29 = (t28 + 40U); t30 = *((char **)t29); memcpy(t30, t26, 4U); xsi_driver_first_trans_fast(t25); goto LAB2; LAB13: t22 = 0; LAB16: if (t22 < 2U) goto LAB17; else goto LAB15; LAB17: t23 = (t19 + t22); t24 = (t18 + t22); if (*((unsigned char *)t23) != *((unsigned char *)t24)) goto LAB14; LAB18: t22 = (t22 + 1); goto LAB16; LAB20: goto LAB2; } static void work_a_2399776393_1516540902_p_1(char *t0) { char *t1; char *t2; unsigned char t3; unsigned char t4; char *t5; unsigned int t6; unsigned int t7; unsigned int t8; char *t9; char *t10; char *t11; char *t12; char *t13; char *t14; char *t15; char *t16; char *t17; char *t18; char *t19; char *t20; LAB0: xsi_set_current_line(74, ng0); t1 = (t0 + 960U); t2 = *((char **)t1); t3 = *((unsigned char *)t2); t4 = (t3 == (unsigned char)3); if (t4 != 0) goto LAB3; LAB4: LAB5: t14 = (t0 + 1604U); t15 = *((char **)t14); t14 = (t0 + 3916); t16 = (t14 + 32U); t17 = *((char **)t16); t18 = (t17 + 40U); t19 = *((char **)t18); memcpy(t19, t15, 4U); xsi_driver_first_trans_fast(t14); LAB2: t20 = (t0 + 3820); *((int *)t20) = 1; LAB1: return; LAB3: t1 = (t0 + 2340U); t5 = *((char **)t1); t6 = (4 - 3); t7 = (t6 * 1U); t8 = (0 + t7); t1 = (t5 + t8); t9 = (t0 + 3916); t10 = (t9 + 32U); t11 = *((char **)t10); t12 = (t11 + 40U); t13 = *((char **)t12); memcpy(t13, t1, 4U); xsi_driver_first_trans_fast(t9); goto LAB2; LAB6: goto LAB2; } static void work_a_2399776393_1516540902_p_2(char *t0) { char *t1; char *t2; int t3; unsigned int t4; unsigned int t5; unsigned int t6; unsigned char t7; unsigned char t8; char *t9; char *t10; char *t11; char *t12; char *t13; char *t14; char *t15; char *t16; char *t17; char *t18; char *t19; LAB0: xsi_set_current_line(76, ng0); t1 = (t0 + 2340U); t2 = *((char **)t1); t3 = (4 - 4); t4 = (t3 * -1); t5 = (1U * t4); t6 = (0 + t5); t1 = (t2 + t6); t7 = *((unsigned char *)t1); t8 = (t7 == (unsigned char)3); if (t8 != 0) goto LAB3; LAB4: LAB5: t14 = (t0 + 3952); t15 = (t14 + 32U); t16 = *((char **)t15); t17 = (t16 + 40U); t18 = *((char **)t17); *((unsigned char *)t18) = (unsigned char)2; xsi_driver_first_trans_fast_port(t14); LAB2: t19 = (t0 + 3828); *((int *)t19) = 1; LAB1: return; LAB3: t9 = (t0 + 3952); t10 = (t9 + 32U); t11 = *((char **)t10); t12 = (t11 + 40U); t13 = *((char **)t12); *((unsigned char *)t13) = (unsigned char)3; xsi_driver_first_trans_fast_port(t9); goto LAB2; LAB6: goto LAB2; } static void work_a_2399776393_1516540902_p_3(char *t0) { char *t1; char *t2; unsigned int t3; unsigned int t4; unsigned int t5; char *t6; char *t7; char *t8; char *t9; char *t10; char *t11; LAB0: xsi_set_current_line(78, ng0); LAB3: t1 = (t0 + 2340U); t2 = *((char **)t1); t3 = (4 - 3); t4 = (t3 * 1U); t5 = (0 + t4); t1 = (t2 + t5); t6 = (t0 + 3988); t7 = (t6 + 32U); t8 = *((char **)t7); t9 = (t8 + 40U); t10 = *((char **)t9); memcpy(t10, t1, 4U); xsi_driver_first_trans_fast_port(t6); LAB2: t11 = (t0 + 3836); *((int *)t11) = 1; LAB1: return; LAB4: goto LAB2; } extern void work_a_2399776393_1516540902_init() { static char *pe[] = {(void *)work_a_2399776393_1516540902_p_0,(void *)work_a_2399776393_1516540902_p_1,(void *)work_a_2399776393_1516540902_p_2,(void *)work_a_2399776393_1516540902_p_3}; xsi_register_didat("work_a_2399776393_1516540902", "isim/main_isim_beh.exe.sim/work/a_2399776393_1516540902.didat"); xsi_register_executes(pe); }
<filename>examples/getBeachDetails.js<gh_stars>1-10 var BeachWaterQuality = require('../index'); var bwq = new BeachWaterQuality(); bwq.getBeachDetails('CSU').then(function(details) { console.log(details); }); bwq.getBeachDetails(['CSU','GEM']).then(function(details) { console.log(details); }); bwq.getBeachDetails('CSU,GEM').then(function(details) { console.log(details); });
Start by checking for a matching character in the string with the first character in the pattern. If a match is found, compare the remaining characters in the pattern with the sub-string that follows the matching character in the string. Repeat this process until the characters in the pattern have all been matched and compared, or until the end of the string is reached.
<gh_stars>10-100 /** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.dynomitemanager.storage; import com.google.inject.Inject; import com.google.inject.Singleton; import com.netflix.dynomitemanager.config.InstanceState; import com.netflix.dynomitemanager.dynomite.DynomiteRest; import com.netflix.dynomitemanager.dynomite.IDynomiteProcess; import com.netflix.nfsidecar.identity.AppsInstance; import com.netflix.nfsidecar.identity.InstanceIdentity; import com.netflix.nfsidecar.resources.env.IEnvVariables; import com.netflix.nfsidecar.scheduler.SimpleTimer; import com.netflix.nfsidecar.scheduler.Task; import com.netflix.nfsidecar.scheduler.TaskTimer; import com.netflix.nfsidecar.tokensdb.IAppsInstanceFactory; import com.netflix.nfsidecar.utils.Sleeper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.joda.time.DateTime; import java.io.IOException; import java.util.ArrayList; import java.util.List; @Singleton public class WarmBootstrapTask extends Task { private static final Logger logger = LoggerFactory.getLogger(WarmBootstrapTask.class); public static final String JOBNAME = "Bootstrap-Task"; private final IDynomiteProcess dynProcess; private final StorageProxy storageProxy; private final IAppsInstanceFactory appsInstanceFactory; private final InstanceIdentity ii; private final InstanceState state; private final Sleeper sleeper; private final StorageProcessManager storageProcessMgr; private final IEnvVariables envVariables; @Inject public WarmBootstrapTask(IAppsInstanceFactory appsInstanceFactory, InstanceIdentity id, IDynomiteProcess dynProcess, StorageProxy storageProxy, InstanceState ss, Sleeper sleeper, StorageProcessManager storageProcessMgr, IEnvVariables envVariables) { this.dynProcess = dynProcess; this.storageProxy = storageProxy; this.appsInstanceFactory = appsInstanceFactory; this.ii = id; this.state = ss; this.sleeper = sleeper; this.storageProcessMgr = storageProcessMgr; this.envVariables = envVariables; } public void execute() throws IOException { logger.info("Running warmbootstrapping ..."); this.state.setFirstBootstrap(false); this.state.setBootstrapTime(DateTime.now()); // Just to be sure testing again if (!state.isStorageAlive()) { // starting storage this.storageProcessMgr.start(); logger.info("Redis is up ---> Starting warm bootstrap."); // setting the status to bootstraping this.state.setBootstrapping(true); // sleep to make sure Storage process is up. this.sleeper.sleepQuietly(5000); String[] peers = getLocalPeersWithSameTokensRange(); // try one node only for now // TODOs: if this peer is not good, try the next one until we can // get the data if (peers != null && peers.length != 0) { /** * Check the warm up status. */ Bootstrap boostrap = this.storageProxy.warmUpStorage(peers); if (boostrap == Bootstrap.IN_SYNC_SUCCESS || boostrap == Bootstrap.EXPIRED_BOOTSTRAPTIME_FAIL || boostrap == Bootstrap.RETRIES_FAIL) { // Since we are ready let us start Dynomite. try { this.dynProcess.start(); } catch (IOException ex) { logger.error("Dynomite failed to start"); } // Wait for 1 second before we check dynomite status sleeper.sleepQuietly(1000); if (this.dynProcess.dynomiteCheck()) { logger.info("Dynomite health check passed"); } else { logger.error("Dynomite health check failed"); } // Set the state of bootstrap as successful. this.state.setBootstrapStatus(boostrap); logger.info("Set Dynomite to allow writes only!!!"); DynomiteRest.sendCommand("/state/writes_only"); logger.info("Stop Redis' Peer syncing!!!"); this.storageProxy.stopPeerSync(); logger.info("Set Dynomite to resuming state to allow writes and flush delayed writes"); DynomiteRest.sendCommand("/state/resuming"); // sleep 15s for the flushing to catch up sleeper.sleepQuietly(15000); logger.info("Set Dynomite to normal state"); DynomiteRest.sendCommand("/state/normal"); } else { logger.error("Warm up failed: Stop Redis' Peer syncing!!!"); this.storageProxy.stopPeerSync(); } } else { logger.error("Unable to find any peer with the same token!"); } /* * Performing a check of Dynomite after bootstrap is complete. This * is important as there are cases that Dynomite reaches the 1M * messages limit and is unaccessible after bootstrap. */ if (this.dynProcess.dynomiteCheck()) { logger.error("Dynomite is up since warm up succeeded"); } // finalizing bootstrap this.state.setBootstrapping(false); } } @Override public String getName() { return JOBNAME; } public static TaskTimer getTimer() { // run once every 10mins return new SimpleTimer(JOBNAME, 10 * 60 * 1000); } private String[] getLocalPeersWithSameTokensRange() { String tokens = ii.getTokens(); logger.info("Warming up node's own token(s) : " + tokens); List<AppsInstance> instances = appsInstanceFactory.getLocalDCIds(envVariables.getDynomiteClusterName(), envVariables.getRegion()); List<String> peers = new ArrayList<String>(); for (AppsInstance ins : instances) { logger.info("Instance's token(s); " + ins.getToken()); if (!ins.getRack().equals(ii.getInstance().getRack()) && ins.getToken().equals(tokens)) { peers.add(ins.getHostName()); } } logger.info("peers size: " + peers.size()); return peers.toArray(new String[0]); } }
#pragma once #include "Globals.h" enum class ComponentType { NO_TYPE = -1, TRANSFORM, MESHRENDERER, RENDERER2D, CAMERA }; class GameObject; class Component { public: Component(GameObject* gameObject, ComponentType type); virtual void OnInspector() {} virtual void OnTransformChanged() {} public: inline bool IsActive() const { return active; } bool IsTreeActive() const; inline void ToggleActive() { active = !active; } public: GameObject* gameObject = nullptr; protected: bool active = true; ComponentType type = ComponentType::NO_TYPE; uint UUID = 0u; };
<reponame>maheshrajamani/stargate<filename>restapi/src/main/java/io/stargate/web/docsapi/service/query/filter/operation/impl/NotNullValueFilterOperation.java /* * Copyright The Stargate Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.stargate.web.docsapi.service.query.filter.operation.impl; import io.stargate.web.docsapi.exception.ErrorCode; import io.stargate.web.docsapi.exception.ErrorCodeRuntimeException; import io.stargate.web.docsapi.service.query.filter.operation.ComparingValueFilterOperation; /** Shared abstract class for all the filter operation that require non-null filter values. */ public abstract class NotNullValueFilterOperation implements ComparingValueFilterOperation { /** {@inheritDoc} */ @Override public void validateNumberFilterInput(Number filterValue) { validateFilterInput(filterValue); } /** {@inheritDoc} */ @Override public void validateStringFilterInput(String filterValue) { validateFilterInput(filterValue); } /** {@inheritDoc} */ @Override public void validateBooleanFilterInput(Boolean filterValue) { validateFilterInput(filterValue); } private void validateFilterInput(Object filterInput) { if (null == filterInput) { String msg = String.format("Operation %s was expecting a non-null value", getOpCode().getRawValue()); throw new ErrorCodeRuntimeException(ErrorCode.DOCS_API_SEARCH_FILTER_INVALID, msg); } } }
<gh_stars>0 package mg.utils; import org.json.JSONArray; import org.json.JSONObject; import java.util.Map; public interface JSONConsumer { JSONObject getJsonObject(String url); JSONObject getJsonObject(String url, Map<String, String> headers); JSONArray getJsonArray(String url); JSONArray getJsonArray(String url, Map<String, String> headers); }
<filename>7-assets/past-student-repos/React-Todo-master/src/components/TodoComponents/TodoList.js // your components will all go in this `component` directory. // feel free to change this component.js into TodoList.js import React from 'react'; import ToDo from './Todo'; const List = (props) => { return ( <ul> {props.todolist.map(newToDo => { return ( <ToDo key={newToDo.id} value={newToDo.task} crossout={props.crossout} id={newToDo.id} completed={newToDo.completed} /> ) })} </ul> ) } export default List; //Must pass id=newToDo.id again because key cannot be used for anything other than setting the unique key value //to access id as a variable in Todo.js, we have to set it to id as well
#!/bin/sh # vim:sw=4:ts=4:et set -e if [ "$1" = "nginx" -o "$1" = "nginx-debug" ]; then # change UID usermod -u $NGINX_UID nginx # chown -Rf nginx /var/cache/nginx/* # chown -Rf nginx /var/lib/nginx/tmp/* # chown nginx:nginx /var/lib/nginx/tmp chown nginx:nginx /var/lib/nginx echo "Modify user nginx uid to $NGINX_UID" fi exec "$@"
#!/bin/bash #isPresent=$(lsusb -D $(lsusb | awk '/ 03c3:/ { bus=$2; dev=$4; gsub(/[^0-9]/,"",dev); print "/dev/bus/usb/"bus"/"dev;}') | grep -c 'iProduct .*ASI[0-9]') #if [ $isPresent -eq 0 ]; then # echo ZWO Camera not found. Exiting. >&2 # sudo systemctl stop allsky # exit 0 #fi source /home/pi/allsky/config.sh source /home/pi/allsky/scripts/filename.sh echo "Starting allsky camera..." cd /home/pi/allsky # Building the arguments to pass to the capture binary ARGUMENTS="" KEYS=( $(jq -r 'keys[]' $CAMERA_SETTINGS) ) for KEY in ${KEYS[@]} do ARGUMENTS="$ARGUMENTS -$KEY `jq -r '.'$KEY $CAMERA_SETTINGS` " done # When using a desktop environment (Remote Desktop, VNC, HDMI output, etc), a preview of the capture can be displayed in a separate window # The preview mode does not work if allsky.sh is started as a service or if the debian distribution has no desktop environment. if [[ $1 == "preview" ]] ; then ARGUMENTS="$ARGUMENTS -preview 1" fi ARGUMENTS="$ARGUMENTS -daytime $DAYTIME" echo "$ARGUMENTS" ./capture $ARGUMENTS
init-scripts-lib() { local lib_path="${1}" source "${lib_path}/require.sh" init-require-system "${lib_path}" } init-scripts-lib "${1}"
#!/bin/sh sudo apt-get -y install \ build-essential \ cmake-curses-gui \ libboost-all-dev \ libeigen3-dev \ libeigen3-doc \ libxfixes-dev \ libyaml-dev \ libfmt-dev \ gettext \ zlib1g-dev \ libjpeg-dev \ libpng-dev \ qt5-default \ libqt5x11extras5-dev \ qt5-style-plugins \ python3-dev \ python3-numpy \ python2.7-dev \ python-numpy \ libassimp-dev \ libsdformat6-dev \ libogre-1.9-dev \ libode-dev \ libomniorb4-dev \ libcos4-dev \ omniidl \ omniorb-nameserver \ python-omniorb \ omniidl-python \ uuid-dev \ libpulse-dev \ libsndfile1-dev \ libgstreamer1.0-dev \ libgstreamer-plugins-base1.0-dev \ lua5.3 \ liblua5.3-dev \ lua-posix
#!/bin/bash # remove non-protein non-ion molecules from pdb STDIN=( ${@} ) for i in "${STDIN[@]}"; do # remove xtal lipids/ligands/non-k ions sed -i '.bak' '/PC1/d' $i sed -i '.bak' '/40D/d' $i sed -i '.bak' '/ CD /d' $i sed -i '.bak' '/B40/d' $i sed -i '.bak' '/A40/d' $i sed -i '.bak' '/A405/d' $i sed -i '.bak' '/A408/d' $i sed -i '.bak' '/TRD/d' $i done
import numpy as np import matplotlib.pyplot as plt ''' Explicit 古典显格式,划分为102个单元的两端封闭河道,中心扩散 ''' def Explicit_method(im,it,r): # Set initial concentration: 50th cell was 100, the others were 0 c = np.zeros([im, it + 1]) c[int(im/2),0] = 100 for t in range(it-1): # Start to diffuse for i in range(1, im-1): c[i,t+1] = (1 - 2*r) * c[i,t] + r * c[i+1,t] + r * c[i-1,t] # The virtual boundary setting c[0,t+1] = c[1,t+1] c[im - 1, t+1] = c[im-2, t+1] return c def plot_conc(im,c): plt.figure() plt.plot(np.arange(im), c[:,10], label = '10dt') plt.plot(np.arange(im), c[:,100], label = '100dt') plt.plot(np.arange(im), c[:,1000], label = '1000dt') plt.plot(np.arange(im), c[:,10000], label = '10000dt') plt.legend() plt.show() if __name__ == '__main__': im = 102 # 100 + 2 cells it = 10000 # 100 time steps DX = 100 # step length = 1 (meter) spacially DT = 100 # step length = 1 (seconds) tempororally K = 10 # Diffusion coefficient r = K * DT/(DX**2) c = Explicit_method(im,it,r) plot_conc(im,c)
import discord, sqlite3 from discord.ext import commands from discord_slash import cog_ext, SlashContext from discord_slash.utils.manage_commands import create_option, create_choice class Slash(commands.Cog): def __init__(self, bot): self.bot = bot @cog_ext.cog_slash(name="top", description="Voir le classement des systèmes d'économie !", options=[ create_option( name="catégorie", description="Catégorie de classement", option_type=3, required=True, choices=[ create_choice( name="rep", value="rep" ), create_choice( name="daily", value="daily" ), create_choice( name="exp", value="exp" )])]) async def _top(self, ctx, catégorie: str): m_list, bs_n, counter_rep, limite_max = [], "\n", 1, 10 if catégorie == "rep": connection = sqlite3.connect("iso_card.db") cursor = connection.cursor() member_id = (f"{ctx.author.id}",) cursor.execute('SELECT * FROM tt_iso_card WHERE user_id = ?', member_id) author_values = cursor.fetchone() cursor.execute('SELECT * FROM tt_iso_card ORDER BY rep_points DESC') values = cursor.fetchall()[0:limite_max] if author_values != None: for element in values: if int(element[1]) > 0: member = await self.bot.fetch_user(str(element[0])) if member == ctx.author: author_rank = counter_rep author_rep = element[1] if author_rank == 1: embed = discord.Embed(title="Classement des points de réputation", description="** **", color=0xFFAC33) elif author_rank == 2: embed = discord.Embed(title="Classement des points de réputation", description="** **", color=0xCCD6DD) elif author_rank == 3: embed = discord.Embed(title="Classement des points de réputation", description="** **", color=0xFF8A3B) else: embed = discord.Embed(title="Classement des points de réputation", description="** **") m_list.append(f"#{counter_rep} **{member.name}** : {element[1]}") else: embed = discord.Embed(title="Classement des points de réputation", description="** **") m_list.append(f"#{counter_rep} {member.name} : {element[1]}") counter_rep += 1 embed.add_field(name=f"Ta position dans le classement est : **#{author_rank}** !\nAvec un total de **{author_rep}** point(s) de réputation !", value=f"\n{bs_n.join(m_list)}", inline=False) else: embed = discord.Embed(title="Classement des points de réputation", description="** **") for element in values: if int(element[1]) > 0: member = await self.bot.fetch_user(str(element[0])) m_list.append(f"#{counter_rep} {member.name} : {element[1]}") counter_rep += 1 embed.add_field(name=f"Tu n'es pas noté dans le classement car n'es pas inscrit à l'aventure ISO land...\nTu peux t'inscrire avec la commande **/start** !", value=f"\n{bs_n.join(m_list)}", inline=False) await ctx.send(embed=embed) elif catégorie == "daily": connection = sqlite3.connect("iso_card.db") cursor = connection.cursor() member_id = (f"{ctx.author.id}",) cursor.execute('SELECT * FROM tt_iso_card WHERE user_id = ?', member_id) author_values = cursor.fetchone() cursor.execute('SELECT * FROM tt_iso_card ORDER BY dailies DESC') values = cursor.fetchall()[0:limite_max] if author_values != None: for element in values: if int(element[5]) > 0: member = await self.bot.fetch_user(str(element[0])) if member == ctx.author: author_rank = counter_rep author_rep = element[5] if author_rank == 1: embed = discord.Embed(title="Classement des crédits", description="** **", color=0xFFAC33) elif author_rank == 2: embed = discord.Embed(title="Classement des crédits", description="** **", color=0xCCD6DD) elif author_rank == 3: embed = discord.Embed(title="Classement des crédits", description="** **", color=0xFF8A3B) else: embed = discord.Embed(title="Classement des crédits", description="** **") m_list.append(f"#{counter_rep} **{member.name}** : {element[5]}") else: embed = discord.Embed(title="Classement des points de réputation", description="** **") m_list.append(f"#{counter_rep} {member.name} : {element[5]}") counter_rep += 1 embed.add_field(name=f"Ta position dans le classement est : **#{author_rank}** !\nAvec un total de **{author_rep}** crédit(s) !", value=f"\n{bs_n.join(m_list)}", inline=False) else: embed = discord.Embed(title="Classement des crédits", description="** **") for element in values: if int(element[5]) > 0: member = await self.bot.fetch_user(str(element[0])) m_list.append(f"#{counter_rep} {member.name} : {element[1]}") counter_rep += 1 embed.add_field(name=f"Tu n'es pas noté dans le classement car n'es pas inscrit à l'aventure ISO land...\nTu peux t'inscrire avec la commande **/start** !", value=f"\n{bs_n.join(m_list)}", inline=False) await ctx.send(embed=embed) elif catégorie == "exp": connection = sqlite3.connect("levels.db") cursor = connection.cursor() member_id = (f"{ctx.author.id}",) guild_name = "_" + str(ctx.guild.id) cursor.execute('SELECT * FROM {} WHERE user_id = ?'.format(guild_name), member_id) author_values = cursor.fetchone() cursor.execute('SELECT * FROM {} ORDER BY exp DESC'.format(guild_name)) # points d'expérience triés values = cursor.fetchall()[0:limite_max] if author_values != None: embed = discord.Embed(title="Classement des points d'expérience", description="** **") author_rank = "non classé(e)" author_rep = int(author_values[1]) author_level_s = int(author_values[2]) for element in values: if int(element[1]) > 0: member = await self.bot.fetch_user(str(element[0])) member_id = (f"{member.id}",) cursor.execute('SELECT * FROM {} WHERE user_id = ?'.format(guild_name), member_id) # niveaux triés author_level = cursor.fetchone() author_level_a = int(author_level[2]) if member == ctx.author: author_level_s = author_level_a author_rank = counter_rep author_rep = element[1] if author_rank == 1: embed = discord.Embed(title="Classement des points d'expérience", description="** **", color=0xFFAC33) elif author_rank == 2: embed = discord.Embed(title="Classement des points d'expérience", description="** **", color=0xCCD6DD) elif author_rank == 3: embed = discord.Embed(title="Classement des points d'expérience", description="** **", color=0xFF8A3B) else: embed = discord.Embed(title="Classement des points d'expérience", description="** **") m_list.append(f"#{counter_rep} **{member.name}** : {element[1]} ({author_level_a})") else: m_list.append(f"#{counter_rep} {member.name} : {element[1]} ({author_level_a})") counter_rep += 1 embed.add_field(name=f"Ta position dans le classement de ce serveur est : **#{author_rank}** !\nAvoir atteint le niveau **{author_level_s}** et un total de **{author_rep}** points d'expérience !", value=f"{bs_n.join(m_list)}", inline=False) else: embed = discord.Embed(title="Classement des points d'expérience", description="** **") for element in values: if int(element[1]) > 0: member = await self.bot.fetch_user(str(element[0])) m_list.append(f"#{counter_rep} {member.name} : {element[1]} ({author_level_a})") counter_rep += 1 embed.add_field(name=f"Tu n'es pas noté dans le classement car n'es pas inscrit à l'aventure ISO land...\nTu peux t'inscrire avec la commande **/start** !", value=f"\n{bs_n.join(m_list)}", inline=False) await ctx.send(embed=embed) connection.close() def setup(bot): bot.add_cog(Slash(bot)) def teardown(bot): bot.remove_cog("top")
source $stdenv/setup myPatchPhase() { find .. -name CMakeLists.txt | xargs sed -i -e "s@DESTINATION \${KDE4_DBUS_INTERFACES_DIR}@DESTINATION \${CMAKE_INSTALL_PREFIX}/share/dbus-1/interfaces/@" } patchPhase=myPatchPhase genericBuild
#! /bin/sh set -e set -o pipefail >&2 echo "-----" if [ "${S3_ACCESS_KEY_ID}" = "**None**" -a "${S3_ACCESS_KEY_ID_FILE}" = "**None**" ]; then echo "You need to set the S3_ACCESS_KEY_ID environment variable." exit 1 fi if [ "${S3_SECRET_ACCESS_KEY}" = "**None**" -a "${S3_SECRET_ACCESS_KEY_FILE}" = "**None**" ]; then echo "You need to set the S3_SECRET_ACCESS_KEY environment variable." exit 1 fi if [ "${S3_BUCKET}" = "**None**" ]; then echo "You need to set the S3_BUCKET environment variable." exit 1 fi if [ "${POSTGRES_DB}" = "**None**" -a "${POSTGRES_DB_FILE}" = "**None**" ]; then echo "You need to set the POSTGRES_DB environment variable." exit 1 fi if [ "${POSTGRES_HOST}" = "**None**" ]; then if [ -n "${POSTGRES_PORT_5432_TCP_ADDR}" ]; then POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT else echo "You need to set the POSTGRES_HOST environment variable." exit 1 fi fi if [ "${POSTGRES_USER}" = "**None**" -a "${POSTGRES_USER_FILE}" = "**None**" ]; then echo "You need to set the POSTGRES_USER environment variable." exit 1 fi if [ "${POSTGRES_PASSWORD}" = "**None**" -a "${POSTGRES_PASSWORD_FILE}" = "**None**" ]; then echo "You need to set the POSTGRES_PASSWORD environment variable or link to a container named POSTGRES." exit 1 fi if [ "${S3_ENDPOINT}" == "**None**" ]; then AWS_ARGS="" else AWS_ARGS="--endpoint-url ${S3_ENDPOINT}" fi #Process vars if [ "${POSTGRES_DB_FILE}" = "**None**" ]; then POSTGRES_DB=$(echo "${POSTGRES_DB}" | tr , " ") elif [ -r "${POSTGRES_DB_FILE}" ]; then POSTGRES_DB=$(cat "${POSTGRES_DB_FILE}") else echo "Missing POSTGRES_DB_FILE file." exit 1 fi if [ "${POSTGRES_USER_FILE}" = "**None**" ]; then export PGUSER="${POSTGRES_USER}" elif [ -r "${POSTGRES_USER_FILE}" ]; then export PGUSER=$(cat "${POSTGRES_USER_FILE}") else echo "Missing POSTGRES_USER_FILE file." exit 1 fi if [ "${POSTGRES_PASSWORD_FILE}" = "**None**" ]; then export PGPASSWORD="${POSTGRES_PASSWORD}" elif [ -r "${POSTGRES_PASSWORD_FILE}" ]; then export PGPASSWORD=$(cat "${POSTGRES_PASSWORD_FILE}") else echo "Missing POSTGRES_PASSWORD_FILE file." exit 1 fi if [ "${S3_ACCESS_KEY_ID_FILE}" = "**None**" ]; then export AWS_ACCESS_KEY_ID="${S3_ACCESS_KEY_ID}" elif [ -r "${S3_ACCESS_KEY_ID_FILE}" ]; then export AWS_ACCESS_KEY_ID=$(cat "${S3_ACCESS_KEY_ID_FILE}") else echo "Missing S3_ACCESS_KEY_ID_FILE file." exit 1 fi if [ "${S3_SECRET_ACCESS_KEY_FILE}" = "**None**" ]; then export AWS_SECRET_ACCESS_KEY="${S3_SECRET_ACCESS_KEY}" elif [ -r "${S3_SECRET_ACCESS_KEY_FILE}" ]; then export AWS_SECRET_ACCESS_KEY=$(cat "${S3_SECRET_ACCESS_KEY_FILE}") else echo "Missing S3_SECRET_ACCESS_KEY_FILE file." exit 1 fi if [ "${ENCRYPTION_PASSWORD_FILE}" = "**None**" ]; then ENCRYPTION_PASSWORD="${ENCRYPTION_PASSWORD}" elif [ -r "${ENCRYPTION_PASSWORD_FILE}" ]; then ENCRYPTION_PASSWORD=$(cat "${ENCRYPTION_PASSWORD_FILE}") else echo "Missing ENCRYPTION_PASSWORD_FILE file." exit 1 fi export AWS_DEFAULT_REGION=$S3_REGION POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $PGUSER $POSTGRES_EXTRA_OPTS" echo "Creating dump of ${POSTGRES_DB} database from ${POSTGRES_HOST}..." SRC_FILE=dump.sql.gz DEST_FILE=${POSTGRES_DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz pg_dump $POSTGRES_HOST_OPTS $POSTGRES_DB | gzip > $SRC_FILE if [ "${ENCRYPTION_PASSWORD}" != "**None**" ]; then >&2 echo "Encrypting ${SRC_FILE}" openssl enc -aes-256-cbc -in $SRC_FILE -out ${SRC_FILE}.enc -k $ENCRYPTION_PASSWORD if [ $? != 0 ]; then >&2 echo "Error encrypting ${SRC_FILE}" fi rm $SRC_FILE SRC_FILE="${SRC_FILE}.enc" DEST_FILE="${DEST_FILE}.enc" fi echo "Uploading dump to $S3_BUCKET" cat $SRC_FILE | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/$DEST_FILE || exit 2 if [ "${DELETE_OLDER_THAN}" != "**None**" ]; then >&2 echo "Checking for files older than ${DELETE_OLDER_THAN}" aws $AWS_ARGS s3 ls s3://$S3_BUCKET/$S3_PREFIX/ | grep " PRE " -v | while read -r line; do fileName=`echo $line|awk {'print $4'}` created=`echo $line|awk {'print $1" "$2'}` created=`date -d "$created" +%s` older_than=`date -d "$DELETE_OLDER_THAN" +%s` if [ $created -lt $older_than ] then if [ $fileName != "" ] then >&2 echo "DELETING ${fileName}" aws $AWS_ARGS s3 rm s3://$S3_BUCKET/$S3_PREFIX/$fileName fi else >&2 echo "${fileName} not older than ${DELETE_OLDER_THAN}" fi done; fi echo "SQL backup finished" >&2 echo "-----"
<filename>ODFAEG/include/odfaeg/Graphics/Vulkan/instance.hpp #pragma once #include <vulkan/vulkan_core.h> #include <string_view> #include "odfaeg/Core/crtp.hpp" namespace odfaeg { class VulkanWindow; class Instance final : public NoCopy, public NoMove { public: explicit Instance(const VulkanWindow &window, std::string_view application_name, std::uint32_t application_version); ~Instance() noexcept; [[nodiscard]] auto getInstance() const { return m_instance; } [[nodiscard]] auto getWindowSurface() const { return m_window_surface; } private: #ifdef NDEBUG [[nodiscard]] VkDebugUtilsMessengerEXT createDebugMessenger(); void populateDebugMessenger(VkDebugUtilsMessengerCreateInfoEXT &debugInfo); #endif private: VkInstance m_instance{nullptr}; VkSurfaceKHR m_window_surface{nullptr}; #ifdef NDEBUG VkDebugUtilsMessengerEXT m_debug_messenger{nullptr}; #endif }; } // namespace odfaeg
<gh_stars>0 export const AUTH_SUCCESS = "AUTH_SUCCESS"; export const AUTH_ERROR = "AUTH_ERROR";
<reponame>duanduan2288/golang<gh_stars>0 // Copyright 2013 The StudyGolang Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // http://studygolang.com // Author:polaris <EMAIL> package filter import ( "encoding/json" "fmt" "html/template" "net/http" "path/filepath" "strings" "time" "config" "github.com/gorilla/context" "github.com/studygolang/mux" "logger" "service" "util" "util/version" ) // 自定义模板函数 var funcMap = template.FuncMap{ // 获取gravatar头像 "gravatar": util.Gravatar, // 转为前端显示需要的时间格式 "formatTime": func(i interface{}) string { ctime, ok := i.(string) if !ok { return "" } t, _ := time.Parse("2006-01-02 15:04:05", ctime) return t.Format(time.RFC3339) + "+08:00" }, "substring": util.Substring, "add": func(nums ...interface{}) int { total := 0 for _, num := range nums { if n, ok := num.(int); ok { total += n } } return total }, "explode": func(s, sep string) []string { return strings.Split(s, sep) }, "noescape": func(s string) template.HTML { return template.HTML(s) }, } // 保存模板路径的key const CONTENT_TPL_KEY = "__content_tpl" // 页面展示 过滤器 type ViewFilter struct { commonHtmlFiles []string // 通用的html文件 baseTplName string // 第一个基础模板的名称 isBackView bool // 是否是后端 view 过滤器 // "继承"空实现 *mux.EmptyFilter } func NewViewFilter(isBackView bool, files ...string) *ViewFilter { viewFilter := new(ViewFilter) if len(files) == 0 { // 默认使用前端通用模板 viewFilter.commonHtmlFiles = []string{config.ROOT + "/template/common/layout.html"} viewFilter.baseTplName = "layout.html" } else { viewFilter.commonHtmlFiles = files viewFilter.baseTplName = filepath.Base(files[0]) } viewFilter.isBackView = isBackView return viewFilter } func (this *ViewFilter) PreFilter(rw http.ResponseWriter, req *http.Request) bool { logger.Debugln(req.RequestURI) // ajax请求头设置 if strings.HasSuffix(req.URL.Path, ".json") || req.FormValue("format") == "json" { setData(req, formatkey, "json") rw.Header().Set("Content-Type", "application/json; charset=utf-8") } else if strings.HasSuffix(req.URL.Path, ".html") { setData(req, formatkey, "ajaxhtml") } return true } // 在逻辑处理完之后,最后展示页面 func (this *ViewFilter) PostFilter(rw http.ResponseWriter, req *http.Request) bool { data := GetData(req) format := "html" if formatInter := getData(req, formatkey); formatInter != nil { format = formatInter.(string) } switch format { case "json": if len(data) != 0 { result, err := json.Marshal(data) if err != nil { logger.Errorf("json.Marshal error:[%q] %s\n", req.RequestURI, err) return false } fmt.Fprint(rw, string(result)) } case "ajaxhtml": contentHtml := req.FormValue(CONTENT_TPL_KEY) if contentHtml == "" { return true } contentHtml = "/template/admin/common_query.html," + contentHtml contentHtmls := strings.Split(contentHtml, ",") for i, contentHtml := range contentHtmls { contentHtmls[i] = config.ROOT + strings.TrimSpace(contentHtml) } tpl, err := template.New("common_query.html").Funcs(funcMap).ParseFiles(contentHtmls...) if err != nil { logger.Errorf("解析模板出错(ParseFiles):[%q] %s\n", req.RequestURI, err) return false } err = tpl.Execute(rw, data) if err != nil { logger.Errorf("执行模板出错(Execute):[%q] %s\n", req.RequestURI, err) return false } default: contentHtml := req.FormValue(CONTENT_TPL_KEY) if contentHtml == "" { return true } contentHtmls := strings.Split(contentHtml, ",") for i, contentHtml := range contentHtmls { contentHtmls[i] = config.ROOT + strings.TrimSpace(contentHtml) } if !this.isBackView { // TODO: 旧模板还未完成的页面 if strings.HasPrefix(req.RequestURI, "/wiki") { this.commonHtmlFiles = []string{config.ROOT + "/template/common/base.html"} this.baseTplName = "base.html" } else { this.commonHtmlFiles = []string{config.ROOT + "/template/common/layout.html"} this.baseTplName = "layout.html" } } // 为了使用自定义的模板函数,首先New一个以第一个模板文件名为模板名。 // 这样,在ParseFiles时,新返回的*Template便还是原来的模板实例 tpl, err := template.New(this.baseTplName).Funcs(funcMap).ParseFiles(append(this.commonHtmlFiles, contentHtmls...)...) if err != nil { logger.Errorf("解析模板出错(ParseFiles):[%q] %s\n", req.RequestURI, err) return false } // 如果没有定义css和js模板,则定义之 if jsTpl := tpl.Lookup("js"); jsTpl == nil { tpl.Parse(`{{define "js"}}{{end}}`) } if jsTpl := tpl.Lookup("css"); jsTpl == nil { tpl.Parse(`{{define "css"}}{{end}}`) } // 当前用户信息 me, _ := CurrentUser(req) data["me"] = me if this.isBackView { if menu1, menu2, curMenu1 := service.GetUserMenu(me["uid"].(int), req.RequestURI); menu2 != nil { data["menu1"] = menu1 data["menu2"] = menu2 data["uri"] = req.RequestURI data["cur_menu1"] = curMenu1 } } // websocket主机 data["wshost"] = config.Config["wshost"] data["build"] = map[string]string{ "version": version.Version, "date": version.Date, } err = tpl.Execute(rw, data) if err != nil { logger.Errorf("执行模板出错(Execute):[%q] %s\n", req.RequestURI, err) return false } } return true } type viewKey int const ( datakey viewKey = 0 formatkey viewKey = 1 // 存 希望返回的数据格式,如 "html", "json" 等 ) func GetData(req *http.Request) map[string]interface{} { data := getData(req, datakey) if data == nil { return make(map[string]interface{}) } return data.(map[string]interface{}) } func SetData(req *http.Request, data map[string]interface{}) { setData(req, datakey, data) } func getData(req *http.Request, viewkey viewKey) interface{} { if rv := context.Get(req, viewkey); rv != nil { // 获取之后立马删除 context.Delete(req, viewkey) return rv } return nil } func setData(req *http.Request, viewkey viewKey, data interface{}) { context.Set(req, viewkey, data) }
class AddColumnToBoard < ActiveRecord::Migration[5.0] def change # boardsテーブルのnameカラムを削除 remove_column :boards, :name, :string # boardsテーブルにusersテーブルの参照キーを追加 add_reference :boards, :user, foreign_key: true end end
import re def validate_file_name(file_name): goodSeqFileRegex = r'^[a-zA-Z0-9_]+\.(fasta|fastq)(\.gz)?$' allowed_extensions = ['.fasta', '.fasta.gz', '.fastq', '.fastq.gz'] if re.match(goodSeqFileRegex, file_name) and file_name.endswith(tuple(allowed_extensions)): return True else: return False
#!/usr/bin/env bash # Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -o errexit set -o nounset set -o pipefail SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. CODEGEN_PKG=${CODEGEN_PKG:-$(cd "${SCRIPT_ROOT}"; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)} CLIENTSET_NAME_VERSIONED=clientset \ CLIENTSET_PKG_NAME=clientset_generated \ "${CODEGEN_PKG}/generate-groups.sh" deepcopy,client,lister,informer \ k8s.io/kube-aggregator/pkg/client k8s.io/kube-aggregator/pkg/apis \ "apiregistration:v1beta1,v1" \ --output-base "$(dirname "${BASH_SOURCE[0]}")/../../.." \ --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.go.txt" CLIENTSET_NAME_VERSIONED=clientset \ CLIENTSET_PKG_NAME=clientset_generated \ CLIENTSET_NAME_INTERNAL=internalclientset \ "${CODEGEN_PKG}/generate-internal-groups.sh" deepcopy,client,lister,informer,conversion \ k8s.io/kube-aggregator/pkg/client k8s.io/kube-aggregator/pkg/apis k8s.io/kube-aggregator/pkg/apis \ "apiregistration:v1beta1,v1" \ --output-base "$(dirname "${BASH_SOURCE[0]}")/../../.." \ --go-header-file "${SCRIPT_ROOT}/hack/boilerplate.go.txt"