text stringlengths 1 1.05M |
|---|
export default {
breakPoints: {
medium: 900
},
dark: {
background: '#252525',
backgroundFadeFrom: '#373737',
primary: {
main: '#06b783',
light: '#5aeab3',
dark: '#008656',
text: '#ebebeb'
},
secondary: {
main: '#ffca28',
light: '#fffd61',
dark: '#c79a00',
text: '#ebebeb'
}
},
light: {
background: '#fff',
backgroundFadeFrom: '#ebebeb',
primary: {
main: '#06b783',
light: '#5aeab3',
dark: '#008656',
text: '#3e3e3e'
},
secondary: {
main: '#ffca28',
light: '#fffd61',
dark: '#c79a00',
text: '#3e3e3e'
}
}
};
|
<reponame>DSP3460/7TimeApp<gh_stars>0
package com.example.wy.tickto.timingpush;
import android.app.AlarmManager;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.provider.MediaStore;
import android.support.annotation.RequiresApi;
import com.example.wy.tickto.MainActivity;
import com.example.wy.tickto.R;
import java.util.Calendar;
public class AlertService extends Service {
private Context mContext;
private NotificationManager notificationManager;
private Notification.Builder mBuilder;
private Notification notification;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
mContext = this;
notificationManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE);
mBuilder = new Notification.Builder(mContext);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Intent intent2=new Intent();
intent2.setClass(this, MainActivity.class);//点击通知需要跳转的activity
PendingIntent contentIntent = PendingIntent.getActivity(mContext,0, intent2,
PendingIntent.FLAG_UPDATE_CURRENT);
notification = mBuilder.setContentTitle(intent.getStringExtra("title"))
.setContentText(intent.getStringExtra("contentText"))
.setSmallIcon(R.mipmap.ic_launcher)
.setLargeIcon(BitmapFactory.decodeResource(mContext.getResources(), R.mipmap.ic_launcher))
.setContentIntent(contentIntent)
.setDefaults(Notification.DEFAULT_SOUND)
.build();
notification.flags |= Notification.FLAG_AUTO_CANCEL;
notificationManager.notify(0, notification);
return START_REDELIVER_INTENT;
}
} |
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
# encode the labels as categorical values
y = np.array([0,1,2])
# create the model
model = Sequential()
model.add(Dense(64, input_dim=3, activation="relu"))
model.add(Dense(32, activation="relu"))
model.add(Dense(16, activation="relu"))
model.add(Dense(8, activation="relu"))
model.add(Dense(3, activation="softmax"))
# compile the model
model.compile(optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"])
# fit the model
model.fit(X, y, epochs=10, batch_size=32, validation_split=0.2) |
package com.ermathias.example.apicomposition.customer;
import com.ermathias.example.apicomposition.WebClientConfiguration;
import lombok.AllArgsConstructor;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;
@Service
@AllArgsConstructor
public class CustomerClientService {
private final WebClientConfiguration webClientConfiguration;
public Mono<CustomerDTO> getCustomerById(Long customerId) {
return webClientConfiguration.getCustomerWebClient()
.get()
.uri("/customers/{id}", customerId)
.accept(MediaType.APPLICATION_JSON)
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals, clientResponse -> Mono.empty())
.bodyToMono(CustomerDTO.class)
.log();
}
}
|
/*
* Copyright 2017-2018 <NAME>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
* REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
* INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
* LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
* OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
/**
*@addtogroup SOFA_Reader
*@{
* @file saf_sofa_reader.h
* @brief Main header for the sofa reader module (#SAF_SOFA_READER_MODULE)
*
* @note This (optional) SOFA reader, requires netcdf (and its dependencies)
* and/or zlib to be linked to your project. Note that zlib is used by
* default, unless "SAF_ENABLE_NETCDF" is defined.
* Refer to docs/SOFA_READER_MODULE_DEPENDENCIES.md for more information.
*
* @author <NAME>
* @date 21.11.2017
* @license ISC
*/
#ifndef __SAF_SOFA_READER_H_INCLUDED__
#define __SAF_SOFA_READER_H_INCLUDED__
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
#ifdef SAF_ENABLE_SOFA_READER_MODULE
/* Include also the interface for the libmysofa reader (BSD-3-Clause license),
* which only depends on zlib.h */
#include "libmysofa/mysofa.h"
/* ========================================================================== */
/* Public Structures/Enums */
/* ========================================================================== */
/**
* SOFA container struct comprising all possible data that can be extracted
* from SOFA 1.0 files; as laid down in the GeneralFIR and SimpleFreeFieldHRIR
* specifications:
* https://www.sofaconventions.org/mediawiki/index.php/GeneralFIR
* https://www.sofaconventions.org/mediawiki/index.php/SimpleFreeFieldHRIR
*/
typedef struct _saf_sofa_container{
/* All possible SOFA variables (defaults={-1|NULL}) */
int nSources; /**< Number of source/measurement positions */
int nReceivers; /**< Number of ears/number of mics etc. */
int DataLengthIR; /**< Length of the IRs, in samples */
float* DataIR; /**< The impulse response (IR) Data;
* FLAT:nSources x nReceivers x DataLengthIR*/
float DataSamplingRate; /**< Sampling rate used to measure the IRs */
float* DataDelay; /**< Delay in samples; nReceivers x 1 */
float* SourcePosition; /**< Source positions (refer to
* SourcePositionType & SourcePositionUnits
* for the convention and units);
* FLAT: nSources x 3 */
float* ReceiverPosition; /**< Receiver positions (refer to
* ReceiverPositionType &
* ReceiverPositionUnits for the convention
* and units);
* FLAT: nReceivers x 3 */
int nListeners; /**< Number of listener positions (cannot be
* more than 1) */
int nEmitters; /**< Number of emitter positions */
float* ListenerPosition; /**< Listener position (The object
* incorporating all receivers; refer to
* ListenerPositionType &
* ListenerPositionUnits for the convention
* and units); FLAT: nListeners x 3 */
float* ListenerUp; /**< Vector pointing upwards from the listener
* position (Cartesian); 3 x 1 */
float* ListenerView; /**< Vector pointing forwards from the
* listener position (Cartesian); 3 x 1 */
float* EmitterPosition; /**< Positions of acoustic excitation used for
* the measurement (refer to
* EmitterPositionType &
* EmitterPositionUnits for the convention
* and units); FLAT: nEmitters x 3 */
/* All possible SOFA variable attributes (defaults=NULL) */
char* ListenerPositionType; /**< {'cartesian'|'spherical'} */
char* ListenerPositionUnits; /**< {'degree, degree, metre'|'metre'} */
char* ListenerViewType; /**< {'cartesian'|'spherical'} */
char* ListenerViewUnits; /**< {'degree, degree, metre'|'metre'} */
char* ReceiverPositionType; /**< {'cartesian'|'spherical'} */
char* ReceiverPositionUnits; /**< {'degree, degree, metre'|'metre'} */
char* SourcePositionType; /**< {'cartesian'|'spherical'} */
char* SourcePositionUnits; /**< {'degree, degree, metre'|'metre'} */
char* EmitterPositionType; /**< {'cartesian'|'spherical'} */
char* EmitterPositionUnits; /**< {'degree, degree, metre'|'metre'} */
char* DataSamplingRateUnits; /**< {'hertz'} */
/* All possible SOFA global attributes (defaults=NULL) */
char* Conventions; /**< {'SOFA'} */
char* Version; /**< Version number */
char* SOFAConventions; /**< {'GeneralFIR'|'GeneralTF'|
* 'SimpleFreeFieldHRIR'} */
char* SOFAConventionsVersion; /**< SOFA convention number */
char* APIName; /**< API name */
char* APIVersion; /**< API version */
char* ApplicationName; /**< Name of Application that created file */
char* ApplicationVersion; /**< Ver. of Application that created file */
char* AuthorContact; /**< Contact information */
char* Comment; /**< File comments */
char* DataType; /**< {'FIR'|'TF'} */
char* History; /**< History information */
char* License; /**< License under which file is provided */
char* Organisation; /**< Organisation reponsible for the file */
char* References; /**< References */
char* RoomType; /**< Room type (free field etc.) */
char* Origin; /**< Where this file came from */
char* DateCreated; /**< Date file was created */
char* DateModified; /**< Date file was modified */
char* Title; /**< Title of file */
char* DatabaseName; /**< Name of database this file belongs to */
char* ListenerShortName; /**< Name of the listener/dummyhead/mic etc.*/
/* libmysofa handle, which is used if SAF_ENABLE_NETCDF is not defined */
void* hLMSOFA; /**< libmysofa handle */
}saf_sofa_container;
/** SOFA loader error codes */
typedef enum{
/** None of the error checks failed */
SAF_SOFA_OK,
/** Not a SOFA file, or no such file was found in the specified location */
SAF_SOFA_ERROR_INVALID_FILE_OR_FILE_PATH,
/** Dimensions of the SOFA data were not as expected */
SAF_SOFA_ERROR_DIMENSIONS_UNEXPECTED,
/** The data-type of the SOFA data was not as expected */
SAF_SOFA_ERROR_FORMAT_UNEXPECTED,
/** NetCDF is not thread safe! */
SAF_SOFA_ERROR_NETCDF_IN_USE
} SAF_SOFA_ERROR_CODES;
/* ========================================================================== */
/* Main Functions */
/* ========================================================================== */
/**
* Fills a 'sofa_container' with data found in a SOFA file (GeneralFIR or
* SimpleFreeFieldHRIR), as detailed in the SOFA 1.0 standard [1,2,3]
*
* @warning This loader currently does not support TF SOFA files!
* @note If you encounter a SOFA file that this SOFA loader cannot load, (or it
* misses some of the data) then please send it to the developers :-)
*
* @param[in] hSOFA The sofa_container
* @param[in] sofa_filepath SOFA file path (including .sofa extension)
* @returns An error code (see #SAF_SOFA_ERROR_CODES)
*
* @see [1] <NAME>., <NAME>., <NAME>., <NAME>., <NAME>.,
* <NAME>., <NAME>., <NAME>., <NAME>., Ziegelwanger,
* H. and <NAME>., 2013, May. Spatially oriented format for
* acoustics: A data exchange format representing head-related transfer
* functions. In Audio Engineering Society Convention 134. Audio
* Engineering Society.
* @see [2] https://www.sofaconventions.org/mediawiki/index.php/GeneralFIR
* @see [3] https://www.sofaconventions.org/mediawiki/index.php/SimpleFreeFieldHRIR
*/
SAF_SOFA_ERROR_CODES saf_sofa_open(saf_sofa_container* hSOFA,
char* sofa_filepath);
/**
* Frees all SOFA data in a sofa_container
*
* @param[in] hSOFA The sofa_container
*/
void saf_sofa_close(saf_sofa_container* hSOFA);
/* ========================================================================== */
/* Deprecated Functions */
/* ========================================================================== */
/**
* A bare-bones SOFA file reader
*
* Allocates memory and copies the values of the essential data contained in a
* SOFA file to the output arguments.
*
* @warning This function is deprecated, use saf_sofa_open().
* @warning This function assumes the SOFA file comprises HRIR data! (i.e.
* not general IR measurement data).
* @note The hrirs are returned as NULL if the file does not exist.
*
* @param[in] sofa_filepath Directory/file_name of the SOFA file you wish to
* load. Optionally, you may set this as NULL, and the
* function will return the default HRIR data.
* @param[out] hrirs (&) the HRIR data;
* FLAT: N_hrir_dirs x #NUM_EARS x hrir_len
* @param[out] hrir_dirs_deg (&) the HRIR positions; FLAT: N_hrir_dirs x 2
* @param[out] N_hrir_dirs (&) number of HRIR positions
* @param[out] hrir_len (&) length of the HRIRs, in samples
* @param[out] hrir_fs (&) sampling rate of the HRIRs
*/
void loadSofaFile(/* Input Arguments */
char* sofa_filepath,
/* Output Arguments */
float** hrirs,
float** hrir_dirs_deg,
int* N_hrir_dirs,
int* hrir_len,
int* hrir_fs );
#endif /* SAF_ENABLE_SOFA_READER_MODULE */
#ifdef __cplusplus
} /* extern "C" */
#endif /* __cplusplus */
#endif /* __SAF_SOFA_READER_H_INCLUDED__ */
/**@} */ /* doxygen addtogroup SOFA_Reader */
|
#!bin/bash
# ASTRID - k8s configuration
# author: Alex Carrega <alessandro.carrega@cnit.it>
if [ -z $1 ]; then
echo "Error: missing mode"
elif [ ! -f "context-broker-$1.yaml" ]; then
echo "Error: unknown mode"
else
MODE=$1
## Create the astrid-kube namespace
kubectl apply -f namespace.yaml
## Config-maps for elasticsearch and logstash setting files
kubectl -n astrid-kube create configmap elasticsearch-config --from-file=../platform/elasticsearch/settings/7.8.0/config/ -o yaml
kubectl -n astrid-kube create configmap logstash-config --from-file=../platform/logstash/settings/7.8.0/config/ -o yaml
kubectl -n astrid-kube create configmap logstash-pipeline --from-file=../platform/logstash/settings/7.8.0/pipeline/ -o yaml
## Persistence storage for elasticsearch
kubectl apply -f storage.yaml
## Service for cb-manager, elasticsearch and kafka
kubectl apply -f service.yaml
## Set the context broker
kubectl apply -f context-broker-$MODE.yaml
fi
|
dbName: publications
collections:
1. authors: {
"_id": "ObjectId",
"name": "String"
}
2. publications: {
"_id": "ObjectId",
"title": "String",
"authorId: "ObjectId"
} |
/**
*/
package edu.kit.ipd.sdq.kamp4hmi.model.Kamp4hmiModel.impl;
import edu.kit.ipd.sdq.kamp4hmi.model.Kamp4hmiModel.*;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.impl.EFactoryImpl;
import org.eclipse.emf.ecore.plugin.EcorePlugin;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Factory</b>.
* <!-- end-user-doc -->
* @generated
*/
public class Kamp4hmiModelFactoryImpl extends EFactoryImpl implements Kamp4hmiModelFactory {
/**
* Creates the default factory implementation.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static Kamp4hmiModelFactory init() {
try {
Kamp4hmiModelFactory theKamp4hmiModelFactory = (Kamp4hmiModelFactory)EPackage.Registry.INSTANCE.getEFactory(Kamp4hmiModelPackage.eNS_URI);
if (theKamp4hmiModelFactory != null) {
return theKamp4hmiModelFactory;
}
}
catch (Exception exception) {
EcorePlugin.INSTANCE.log(exception);
}
return new Kamp4hmiModelFactoryImpl();
}
/**
* Creates an instance of the factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Kamp4hmiModelFactoryImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EObject create(EClass eClass) {
switch (eClass.getClassifierID()) {
case Kamp4hmiModelPackage.ACTOR_STEP: return createActorStep();
case Kamp4hmiModelPackage.SYSTEM_STEP: return createSystemStep();
case Kamp4hmiModelPackage.MODE: return createMode();
case Kamp4hmiModelPackage.REPOSITORY: return createRepository();
case Kamp4hmiModelPackage.IF: return createIf();
case Kamp4hmiModelPackage.FOR: return createFor();
case Kamp4hmiModelPackage.CONDITION: return createCondition();
default:
throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier");
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ActorStep createActorStep() {
ActorStepImpl actorStep = new ActorStepImpl();
return actorStep;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SystemStep createSystemStep() {
SystemStepImpl systemStep = new SystemStepImpl();
return systemStep;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Mode createMode() {
ModeImpl mode = new ModeImpl();
return mode;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Repository createRepository() {
RepositoryImpl repository = new RepositoryImpl();
return repository;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public If createIf() {
IfImpl if_ = new IfImpl();
return if_;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public For createFor() {
ForImpl for_ = new ForImpl();
return for_;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Condition createCondition() {
ConditionImpl condition = new ConditionImpl();
return condition;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Kamp4hmiModelPackage getKamp4hmiModelPackage() {
return (Kamp4hmiModelPackage)getEPackage();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @deprecated
* @generated
*/
@Deprecated
public static Kamp4hmiModelPackage getPackage() {
return Kamp4hmiModelPackage.eINSTANCE;
}
} //Kamp4hmiModelFactoryImpl
|
module.exports = function blank (obj) {
const out = Object.create(null);
if (obj) {
Object.keys(obj).forEach(key => {
out[key] = obj[key];
});
}
return out;
}; |
<filename>src/dumb/editor/custom_field/CustomFieldTypeSelect.js<gh_stars>0
import React from 'react';
import {Dropdown} from "semantic-ui-react";
import {CUSTOM_FIELD_TYPES} from "../../../constants";
export const CustomFieldTypeSelect = ({
selectedTypeId,
onChange
}) => {
const options = Object.values(CUSTOM_FIELD_TYPES)
.map(type => ({
text: type.name,
value: type.id
}));
return (
<Dropdown
options={options}
value={selectedTypeId}
onChange={(_, {value}) => onChange(value)}
selection={true}
/>
);
}; |
import SwiftyJSON
import RealmSwift
class News: Object {
@objc dynamic var groupId: Int = 0
@objc dynamic var userId: Int = 0
// Other properties and methods can be added here
static func getNewsByAuthorId(authorId: Int) -> Results<News>? {
do {
let realm = try Realm()
let newsByAuthor = realm.objects(News.self).filter("userId == %@", authorId)
return newsByAuthor
} catch {
print("Error accessing Realm database: \(error)")
return nil
}
}
} |
#!/bin/bash
# adopted from https://github.com/debezium/docker-images/blob/master/connect-base/1.6/docker-entrypoint.sh
# Exit immediately if a *pipeline* returns a non-zero status. (Add -x for command tracing)
set -e
if [[ -z "$SENSITIVE_PROPERTIES" ]]; then
SENSITIVE_PROPERTIES="CONNECT_SASL_JAAS_CONFIG,CONNECT_CONSUMER_SASL_JAAS_CONFIG,CONNECT_PRODUCER_SASL_JAAS_CONFIG,CONNECT_SSL_KEYSTORE_PASSWORD,CONNECT_PRODUCER_SSL_KEYSTORE_PASSWORD,CONNECT_SSL_TRUSTSTORE_PASSWORD,CONNECT_PRODUCER_SSL_TRUSTSTORE_PASSWORD,CONNECT_SSL_KEY_PASSWORD,CONNECT_PRODUCER_SSL_KEY_PASSWORD,CONNECT_CONSUMER_SSL_TRUSTSTORE_PASSWORD,CONNECT_CONSUMER_SSL_KEYSTORE_PASSWORD,CONNECT_CONSUMER_SSL_KEY_PASSWORD"
fi
if [[ -z "$BOOTSTRAP_SERVERS" ]]; then
# Look for any environment variables set by Docker container linking. For example, if the container
# running Kafka were aliased to 'kafka' in this container, then Docker should have created several envs,
# such as 'KAFKA_PORT_9092_TCP'. If so, then use that to automatically set the 'bootstrap.servers' property.
BOOTSTRAP_SERVERS=$(env | grep .*PORT_9092_TCP= | sed -e 's|.*tcp://||' | uniq | paste -sd ,)
fi
if [[ "x$BOOTSTRAP_SERVERS" = "x" ]]; then
export BOOTSTRAP_SERVERS=0.0.0.0:9092
fi
echo "Using BOOTSTRAP_SERVERS=$BOOTSTRAP_SERVERS"
if [[ -z "$HOST_NAME" ]]; then
HOST_NAME=$(ip addr | grep 'BROADCAST' -A2 | tail -n1 | awk '{print $2}' | cut -f1 -d'/')
fi
: ${REST_PORT:=8083}
: ${REST_HOST_NAME:=$HOST_NAME}
: ${ADVERTISED_PORT:=8083}
: ${ADVERTISED_HOST_NAME:=$HOST_NAME}
: ${GROUP_ID:=1}
: ${OFFSET_FLUSH_INTERVAL_MS:=60000}
: ${OFFSET_FLUSH_TIMEOUT_MS:=5000}
: ${SHUTDOWN_TIMEOUT:=10000}
: ${KEY_CONVERTER:=org.apache.kafka.connect.json.JsonConverter}
: ${VALUE_CONVERTER:=org.apache.kafka.connect.json.JsonConverter}
: ${INTERNAL_KEY_CONVERTER:=org.apache.kafka.connect.json.JsonConverter}
: ${INTERNAL_VALUE_CONVERTER:=org.apache.kafka.connect.json.JsonConverter}
: ${ENABLE_APICURIO_CONVERTERS:=false}
: ${ENABLE_DEBEZIUM_SCRIPTING:=false}
: ${CONNECT_REST_ADVERTISED_PORT:=$ADVERTISED_PORT}
: ${CONNECT_REST_ADVERTISED_HOST_NAME:=$ADVERTISED_HOST_NAME}
: ${CONNECT_REST_PORT:=$REST_PORT}
: ${CONNECT_REST_HOST_NAME:=$REST_HOST_NAME}
: ${CONNECT_BOOTSTRAP_SERVERS:=$BOOTSTRAP_SERVERS}
: ${CONNECT_GROUP_ID:=$GROUP_ID}
: ${CONNECT_CONFIG_STORAGE_TOPIC:=$CONFIG_STORAGE_TOPIC}
: ${CONNECT_OFFSET_STORAGE_TOPIC:=$OFFSET_STORAGE_TOPIC}
if [[ -n "$STATUS_STORAGE_TOPIC" ]]; then
export CONNECT_STATUS_STORAGE_TOPIC=$STATUS_STORAGE_TOPIC
fi
: ${CONNECT_KEY_CONVERTER:=$KEY_CONVERTER}
: ${CONNECT_VALUE_CONVERTER:=$VALUE_CONVERTER}
: ${CONNECT_INTERNAL_KEY_CONVERTER:=$INTERNAL_KEY_CONVERTER}
: ${CONNECT_INTERNAL_VALUE_CONVERTER:=$INTERNAL_VALUE_CONVERTER}
: ${CONNECT_TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS:=$SHUTDOWN_TIMEOUT}
: ${CONNECT_OFFSET_FLUSH_INTERVAL_MS:=$OFFSET_FLUSH_INTERVAL_MS}
: ${CONNECT_OFFSET_FLUSH_TIMEOUT_MS:=$OFFSET_FLUSH_TIMEOUT_MS}
if [[ -n "$HEAP_OPTS" ]]; then
export KAFKA_HEAP_OPTS=$HEAP_OPTS
fi
unset HOST_NAME
unset REST_PORT
unset REST_HOST_NAME
unset ADVERTISED_PORT
unset ADVERTISED_HOST_NAME
unset GROUP_ID
unset OFFSET_FLUSH_INTERVAL_MS
unset OFFSET_FLUSH_TIMEOUT_MS
unset SHUTDOWN_TIMEOUT
unset KEY_CONVERTER
unset VALUE_CONVERTER
unset INTERNAL_KEY_CONVERTER
unset INTERNAL_VALUE_CONVERTER
unset HEAP_OPTS
unset MD5HASH
unset SCALA_VERSION
#
# Set up the classpath with all the plugins ...
#
if [ -z "$CONNECT_PLUGIN_PATH" ]; then
CONNECT_PLUGIN_PATH=$KAFKA_CONNECT_PLUGINS_DIR
fi
echo "Plugins are loaded from $CONNECT_PLUGIN_PATH"
if [[ "${ENABLE_APICURIO_CONVERTERS}" == "true" && ! -z "$EXTERNAL_LIBS_DIR" && -d "$EXTERNAL_LIBS_DIR/apicurio" ]] ; then
plugin_dirs=(${CONNECT_PLUGIN_PATH//,/ })
for plugin_dir in $plugin_dirs ; do
for connector in $plugin_dir/*/ ; do
ln -snf $EXTERNAL_LIBS_DIR/apicurio/* "$connector"
done
done
echo "Apicurio connectors enabled!"
else
plugin_dirs=(${CONNECT_PLUGIN_PATH//,/ })
for plugin_dir in $plugin_dirs ; do
find $plugin_dir/ -lname "$EXTERNAL_LIBS_DIR/apicurio/*" -exec rm -f {} \;
done
fi
if [[ "${ENABLE_DEBEZIUM_SCRIPTING}" == "true" && ! -f "$EXTERNAL_LIBS_DIR" && -d "$EXTERNAL_LIBS_DIR/debezium-scripting" ]] ; then
plugin_dirs=(${CONNECT_PLUGIN_PATH//,/ })
for plugin_dir in $plugin_dirs ; do
for connector in $plugin_dir/*/ ; do
ln -snf $EXTERNAL_LIBS_DIR/debezium-scripting/*.jar "$connector"
done
done
echo "Debezium Scripting enabled!"
else
plugin_dirs=(${CONNECT_PLUGIN_PATH//,/ })
for plugin_dir in $plugin_dirs ; do
find $plugin_dir/ -lname "$EXTERNAL_LIBS_DIR/debezium-scripting/*" -exec rm -f {} \;
done
fi
#
# Set up the JMX options
#
: ${JMXAUTH:="false"}
: ${JMXSSL:="false"}
if [[ -n "$JMXPORT" && -n "$JMXHOST" ]]; then
echo "Enabling JMX on ${JMXHOST}:${JMXPORT}"
export KAFKA_JMX_OPTS="-Djava.rmi.server.hostname=${JMXHOST} -Dcom.sun.management.jmxremote.rmi.port=${JMXPORT} -Dcom.sun.management.jmxremote.port=${JMXPORT} -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=${JMXAUTH} -Dcom.sun.management.jmxremote.ssl=${JMXSSL} "
fi
#
# Setup Flight Recorder
#
if [[ "$ENABLE_JFR" == "true" ]]; then
JFR_OPTS="-XX:StartFlightRecording"
opt_delimiter="="
for VAR in $(env); do
if [[ "$VAR" == JFR_RECORDING_* ]]; then
opt_name=`echo "$VAR" | sed -r "s/^JFR_RECORDING_([^=]*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ -`
opt_value=`echo "$VAR" | sed -r "s/^JFR_RECORDING_[^=]*=(.*)/\1/g"`
JFR_OPTS="${JFR_OPTS}${opt_delimiter}${opt_name}=${opt_value}"
opt_delimiter=","
fi
done
opt_delimiter=" -XX:FlightRecorderOptions="
for VAR in $(env); do
if [[ "$VAR" == JFR_OPT_* ]]; then
opt_name=`echo "$VAR" | sed -r "s/^JFR_OPT_([^=]*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ -`
opt_value=`echo "$VAR" | sed -r "s/^JFR_OPT_[^=]*=(.*)/\1/g"`
JFR_OPTS="${JFR_OPTS}${opt_delimiter}${opt_name}=${opt_value}"
opt_delimiter=","
fi
done
echo "Java Flight Recorder enabled and configured with options $JFR_OPTS"
if [[ -n "$KAFKA_OPTS" ]]; then
export KAFKA_OPTS="$KAFKA_OPTS $JFR_OPTS"
else
export KAFKA_OPTS="$JFR_OPTS"
fi
unset JFR_OPTS
fi
#
# Make sure the directory for logs exists ...
#
mkdir -p $KAFKA_HOME/data/$KAFKA_BROKER_ID
# Process the argument to this container ...
case $1 in
start)
if [[ "x$CONNECT_BOOTSTRAP_SERVERS" = "x" ]]; then
echo "The BOOTSTRAP_SERVERS variable must be set, or the container must be linked to one that runs Kafka."
exit 1
fi
if [[ "x$CONNECT_GROUP_ID" = "x" ]]; then
echo "The GROUP_ID must be set to an ID that uniquely identifies the Kafka Connect cluster these workers belong to."
echo "Ensure this is unique for all groups that work with a Kafka cluster."
exit 1
fi
if [[ "x$CONNECT_CONFIG_STORAGE_TOPIC" = "x" ]]; then
echo "The CONFIG_STORAGE_TOPIC variable must be set to the name of the topic where connector configurations will be stored."
echo "This topic must have a single partition, be highly replicated (e.g., 3x or more) and should be configured for compaction."
exit 1
fi
if [[ "x$CONNECT_OFFSET_STORAGE_TOPIC" = "x" ]]; then
echo "The OFFSET_STORAGE_TOPIC variable must be set to the name of the topic where connector offsets will be stored."
echo "This topic should have many partitions (e.g., 25 or 50), be highly replicated (e.g., 3x or more) and be configured for compaction."
exit 1
fi
if [[ "x$CONNECT_STATUS_STORAGE_TOPIC" = "x" ]]; then
echo "WARNING: it is recommended to specify the STATUS_STORAGE_TOPIC variable for defining the name of the topic where connector statuses will be stored."
echo "This topic may have multiple partitions, be highly replicated (e.g., 3x or more) and should be configured for compaction."
echo "As no value is given, the default of 'connect-status' will be used."
fi
echo "Using the following environment variables:"
echo " GROUP_ID=$CONNECT_GROUP_ID"
echo " CONFIG_STORAGE_TOPIC=$CONNECT_CONFIG_STORAGE_TOPIC"
echo " OFFSET_STORAGE_TOPIC=$CONNECT_OFFSET_STORAGE_TOPIC"
if [[ "x$CONNECT_STATUS_STORAGE_TOPIC" != "x" ]]; then
echo " STATUS_STORAGE_TOPIC=$CONNECT_STATUS_STORAGE_TOPIC"
fi
echo " BOOTSTRAP_SERVERS=$CONNECT_BOOTSTRAP_SERVERS"
echo " REST_HOST_NAME=$CONNECT_REST_HOST_NAME"
echo " REST_PORT=$CONNECT_REST_PORT"
echo " ADVERTISED_HOST_NAME=$CONNECT_REST_ADVERTISED_HOST_NAME"
echo " ADVERTISED_PORT=$CONNECT_REST_ADVERTISED_PORT"
echo " KEY_CONVERTER=$CONNECT_KEY_CONVERTER"
echo " VALUE_CONVERTER=$CONNECT_VALUE_CONVERTER"
echo " INTERNAL_KEY_CONVERTER=$CONNECT_INTERNAL_KEY_CONVERTER"
echo " INTERNAL_VALUE_CONVERTER=$CONNECT_INTERNAL_VALUE_CONVERTER"
echo " OFFSET_FLUSH_INTERVAL_MS=$CONNECT_OFFSET_FLUSH_INTERVAL_MS"
echo " OFFSET_FLUSH_TIMEOUT_MS=$CONNECT_OFFSET_FLUSH_TIMEOUT_MS"
echo " SHUTDOWN_TIMEOUT=$CONNECT_TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS"
# Copy config files if not provided in volume
cp -rn $KAFKA_HOME/config.orig/* $KAFKA_HOME/config
#
# Configure the log files ...
#
if [[ -n "$CONNECT_LOG4J_LOGGERS" ]]; then
sed -i -r -e "s|^(log4j.rootLogger)=.*|\1=${CONNECT_LOG4J_LOGGERS}|g" $KAFKA_HOME/config/log4j.properties
unset CONNECT_LOG4J_LOGGERS
fi
env | grep '^CONNECT_LOG4J' | while read -r VAR;
do
env_var=`echo "$VAR" | sed -r "s/([^=]*)=.*/\1/g"`
prop_name=`echo "$VAR" | sed -r "s/^CONNECT_([^=]*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
prop_value=`echo "$VAR" | sed -r "s/^CONNECT_[^=]*=(.*)/\1/g"`
if egrep -q "(^|^#)$prop_name=" $KAFKA_HOME/config/log4j.properties; then
#note that no config names or values may contain an '@' char
sed -r -i "s@(^|^#)($prop_name)=(.*)@\2=${prop_value}@g" $KAFKA_HOME/config/log4j.properties
else
echo "$prop_name=${prop_value}" >> $KAFKA_HOME/config/log4j.properties
fi
if [[ "$SENSITIVE_PROPERTIES" = *"$env_var"* ]]; then
echo "--- Setting logging property from $env_var: $prop_name=[hidden]"
else
echo "--- Setting logging property from $env_var: $prop_name=${prop_value}"
fi
unset $env_var
done
if [[ -n "$LOG_LEVEL" ]]; then
sed -i -r -e "s|=INFO, stdout|=$LOG_LEVEL, stdout|g" $KAFKA_HOME/config/log4j.properties
sed -i -r -e "s|^(log4j.appender.stdout.threshold)=.*|\1=${LOG_LEVEL}|g" $KAFKA_HOME/config/log4j.properties
fi
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$KAFKA_HOME/config/log4j.properties"
#
# Process all environment variables that start with 'CONNECT_'
#
env | while read -r VAR;
do
env_var=`echo "$VAR" | sed -r "s/([^=]*)=.*/\1/g"`
if [[ $env_var =~ ^CONNECT_ ]]; then
prop_name=`echo "$VAR" | sed -r "s/^CONNECT_([^=]*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
prop_value=`echo "$VAR" | sed -r "s/^CONNECT_[^=]*=(.*)/\1/g"`
if egrep -q "(^|^#)$prop_name=" $KAFKA_HOME/config/connect-distributed.properties; then
#note that no config names or values may contain an '@' char
sed -r -i "s@(^|^#)($prop_name)=(.*)@\2=${prop_value}@g" $KAFKA_HOME/config/connect-distributed.properties
else
# echo "Adding property $prop_name=${prop_value}"
echo "$prop_name=${prop_value}" >> $KAFKA_HOME/config/connect-distributed.properties
fi
if [[ "$SENSITIVE_PROPERTIES" = *"$env_var"* ]]; then
echo "--- Setting property from $env_var: $prop_name=[hidden]"
else
echo "--- Setting property from $env_var: $prop_name=${prop_value}"
fi
fi
done
#
# Execute the Kafka Connect distributed service, replacing this shell process with the specified program ...
#
exec $KAFKA_HOME/bin/connect-distributed.sh $KAFKA_HOME/config/connect-distributed.properties
;;
esac
# Otherwise just run the specified command
exec "$@" |
#!/usr/bin/env bash
###############################################################################
# Note:
#
# This is an ERB template that is converted into a post-installation
# script run by `fpm`. It is to be used with the `fpm` arguments
# `--template-script` and `--template-value`.
#
# IT IS NOT MEANT TO BE RUN DIRECTLY!
#
# Expected variables:
#
# - ps_path : string =>
#
# The path to the Pennsieve installation, e.g. "/usr/local/opt/pennsieve",
# "C:\Program Files\pennsieve", etc.
#
# - ps_release_name : string =>
#
# The name of the binary itself ("pennsieve")
#
# - ps_version : string =>
#
# The version string of the release ("0.1.x")
#
# - ps_executable : string =>
#
# The absolute path to the Pennsieve binary, e.g
# /usr/local/opt/pennsieve/bin/${ps_release_name}
#
###############################################################################
PS_HOME="$HOME/.pennsieve"
PS_PATH="<%= ps_path %>"
PS_EXECUTABLE="<%= ps_executable %>"
# Create the Pennsieve home directory, if needed:
if [ ! -d "$PS_HOME" ]; then
mkdir "$PS_HOME"
fi
INSTALL_LOG="$PS_HOME/install.log"
echo "Install log: $INSTALL_LOG"
echo "Installed $(date -u +"%Y-%m-%dT%H:%M:%SZ")" > $INSTALL_LOG
echo "PS_HOME=$PS_HOME" >> $INSTALL_LOG
echo "PS_PATH=<%= ps_path %>" >> $INSTALL_LOG
echo "PS_RELEASE_NAME=<%= ps_release_name %>" >> $INSTALL_LOG
echo "PS_VERSION=<%= ps_version %>" >> $INSTALL_LOG
echo "PS_EXECUTABLE=<%= ps_executable %>" >> $INSTALL_LOG
# Set the appropriate permissions:
USER=$(who | awk '{print $1}')
sudo chown -R $USER:$USER "$PS_HOME/"
chmod -R a+rX "$PS_HOME"
chmod 755 "$PS_PATH"
# Create the cache directory:
if [ ! -d "$PS_HOME/cache" ]; then
mkdir "$PS_HOME/cache"
fi
# Symlink $PS_EXECUTABLE to /usr/local/bin:
if [ -d "/usr/local/bin" ]; then
sudo ln -s -f "$PS_EXECUTABLE" "/usr/local/bin/pennsieve"
fi
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.Reply24 = factory());
}(this, (function () { 'use strict';
var _24 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: {
d:
'M28.88 30a1 1 0 0 1-.88-.5A15.19 15.19 0 0 0 15 22v6a1 1 0 0 1-.62.92 1 1 0 0 1-1.09-.21l-12-12a1 1 0 0 1 0-1.42l12-12a1 1 0 0 1 1.09-.21A1 1 0 0 1 15 4v6.11a17.19 17.19 0 0 1 15 17 16.34 16.34 0 0 1-.13 2 1 1 0 0 1-.79.86zM14.5 20A17.62 17.62 0 0 1 28 26a15.31 15.31 0 0 0-14.09-14 1 1 0 0 1-.91-1V6.41L3.41 16 13 25.59V21a1 1 0 0 1 1-1h.54z',
},
},
],
name: 'reply',
size: 24,
};
return _24;
})));
|
function sumOfPositiveIntegers(arr) {
let sum = 0;
for (let i = 0; i < arr.length; i++) {
if (arr[i] > 0) {
sum += arr[i];
}
}
return sum;
} |
#
# Copyright (c) 2019 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import io
import json
import zipfile
from wsgiref.util import FileWrapper
from django.urls import reverse
from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bridge.vars import MARK_SOURCE, SAFE_VERDICTS, UNSAFE_VERDICTS
from bridge.utils import logger, BridgeException
from bridge.ZipGenerator import ZipStream, CHUNK_SIZE
from marks.models import (
MarkSafe, MarkUnsafe, MarkUnknown, SafeTag, UnsafeTag, MarkSafeTag, MarkUnsafeTag,
MarkSafeAttr, MarkUnsafeAttr, MarkUnknownAttr
)
from caches.models import ReportSafeCache, ReportUnsafeCache, ReportUnknownCache
from marks.serializers import SafeMarkSerializer, UnsafeMarkSerializer, UnknownMarkSerializer
from marks.SafeUtils import ConnectSafeMark
from marks.UnsafeUtils import ConnectUnsafeMark
from marks.UnknownUtils import ConnectUnknownMark
from caches.utils import UpdateCachesOnMarkPopulate
class MarkGeneratorBase:
type = None
attrs_model = None
tags_model = None
def __init__(self, mark):
assert self.type is not None, 'Wrong usage'
self.mark = mark
self.name = 'Mark-{}-{}.zip'.format(self.type, self.mark.identifier)
self.stream = ZipStream()
def common_data(self):
return {
'type': self.type,
'identifier': str(self.mark.identifier),
'is_modifiable': self.mark.is_modifiable
}
def version_data(self, version):
data = {
'comment': version.comment,
'description': version.description,
'attrs': self.attrs.get(version.id, []),
}
if self.tags is not None:
data['tags'] = self.tags.get(version.id, [])
return data
@cached_property
def attrs(self):
assert self.attrs_model is not None, 'Wrong usage'
mark_attrs = {}
for mattr in self.attrs_model.objects.filter(mark_version__mark=self.mark).order_by('id'):
mark_attrs.setdefault(mattr.mark_version_id, [])
mark_attrs[mattr.mark_version_id].append({
'name': mattr.name, 'value': mattr.value, 'is_compare': mattr.is_compare
})
return mark_attrs
@cached_property
def tags(self):
if not self.tags_model:
return None
all_tags = {}
for version_id, tag_name in self.tags_model.objects.filter(mark_version__mark=self.mark) \
.values_list('mark_version_id', 'tag__name'):
all_tags.setdefault(version_id, [])
all_tags[version_id].append(tag_name)
return all_tags
def versions_queryset(self):
return self.mark.versions.all()
def __iter__(self):
# Add main mark data
content = json.dumps(self.common_data(), ensure_ascii=False, sort_keys=True, indent=4)
for data in self.stream.compress_string('mark.json', content):
yield data
# Add versions data
for markversion in self.versions_queryset():
content = json.dumps(self.version_data(markversion), ensure_ascii=False, sort_keys=True, indent=4)
for data in self.stream.compress_string('version-{}.json'.format(markversion.version), content):
yield data
yield self.stream.close_stream()
class SafeMarkGenerator(MarkGeneratorBase):
type = 'safe'
attrs_model = MarkSafeAttr
tags_model = MarkSafeTag
def version_data(self, version):
data = super().version_data(version)
data['verdict'] = version.verdict
return data
class UnsafeMarkGenerator(MarkGeneratorBase):
type = 'unsafe'
attrs_model = MarkUnsafeAttr
tags_model = MarkUnsafeTag
def versions_queryset(self):
return self.mark.versions.select_related('error_trace')
def version_data(self, version):
data = super().version_data(version)
with version.error_trace.file.file as fp:
error_trace = fp.read().decode('utf8')
data.update({
'verdict': version.verdict,
'status': version.status,
'function': version.function,
'error_trace': error_trace,
'threshold': version.threshold_percentage
})
return data
class UnknownMarkGenerator(MarkGeneratorBase):
type = 'unknown'
attrs_model = MarkUnknownAttr
def common_data(self):
data = super().common_data()
data['component'] = self.mark.component
return data
def version_data(self, version):
data = super().version_data(version)
data.update({
'function': version.function,
'problem_pattern': version.problem_pattern,
'is_regexp': version.is_regexp,
'link': version.link
})
return data
class SeveralMarksGenerator:
def __init__(self, marks):
self.marks = marks
self.stream = ZipStream()
self.name = 'KleverMarks.zip'
def generate_mark(self, markgen):
buf = b''
for data in self.stream.compress_stream(markgen.name, markgen):
buf += data
if len(buf) > CHUNK_SIZE:
yield buf
buf = b''
if len(buf) > 0:
yield buf
def __iter__(self):
for mark in self.marks:
if isinstance(mark, MarkSafe):
markgen = SafeMarkGenerator(mark)
elif isinstance(mark, MarkUnsafe):
markgen = UnsafeMarkGenerator(mark)
elif isinstance(mark, MarkUnknown):
markgen = UnknownMarkGenerator(mark)
else:
continue
yield from self.generate_mark(markgen)
yield self.stream.close_stream()
class PresetMarkFile(FileWrapper):
attrs_model = None
def __init__(self, mark):
self.mark = mark
self.name = '{}.json'.format(self.mark.identifier)
content = json.dumps(self.get_data(), indent=2, sort_keys=True).encode('utf8')
self.size = len(content)
super().__init__(io.BytesIO(content), 8192)
@cached_property
def last_version(self):
return self.mark.versions.filter(version=self.mark.version).first()
def get_data(self):
return {
'is_modifiable': self.mark.is_modifiable,
'description': self.last_version.description,
'attrs': list(self.attrs_model.objects.filter(mark_version=self.last_version, is_compare=True)
.order_by('id').values('name', 'value', 'is_compare'))
}
class SafePresetFile(PresetMarkFile):
attrs_model = MarkSafeAttr
def get_data(self):
data = super().get_data()
data.update({
'verdict': self.mark.verdict,
'tags': list(MarkSafeTag.objects.filter(mark_version=self.last_version)
.values_list('tag__name', flat=True))
})
return data
class UnsafePresetFile(PresetMarkFile):
attrs_model = MarkUnsafeAttr
def get_data(self):
data = super().get_data()
data.update({
'verdict': self.mark.verdict,
'status': self.mark.status,
'function': self.mark.function,
'threshold': self.mark.threshold_percentage,
'tags': list(MarkUnsafeTag.objects.filter(mark_version=self.last_version)
.values_list('tag__name', flat=True))
})
with self.mark.error_trace.file.file as fp:
data['error_trace'] = json.loads(fp.read().decode('utf8'))
return data
class UnknownPresetFile(PresetMarkFile):
attrs_model = MarkUnknownAttr
def get_data(self):
data = super().get_data()
data.update({
'function': self.mark.function,
'problem_pattern': self.mark.problem_pattern,
'is_regexp': self.mark.is_regexp,
'link': self.mark.link
})
return data
class AllMarksGenerator:
def __init__(self):
curr_time = now()
self.name = 'Marks--%s-%s-%s.zip' % (curr_time.day, curr_time.month, curr_time.year)
self.stream = ZipStream()
def generators(self):
for mark in MarkSafe.objects.all():
yield SafeMarkGenerator(mark)
for mark in MarkUnsafe.objects.all():
yield UnsafeMarkGenerator(mark)
for mark in MarkUnknown.objects.all():
yield UnknownMarkGenerator(mark)
def __iter__(self):
for markgen in self.generators():
buf = b''
for data in self.stream.compress_stream(markgen.name, markgen):
buf += data
if len(buf) > CHUNK_SIZE:
yield buf
buf = b''
if len(buf) > 0:
yield buf
yield self.stream.close_stream()
class MarksUploader:
def __init__(self, user):
self._user = user
self._safe_tags_names = self._safe_tags_tree = None
self._unsafe_tags_names = self._unsafe_tags_tree = None
def get_tags(self, tags_model):
tags_tree = {}
tags_names = {}
for t_id, parent_id, t_name in tags_model.objects.values_list('id', 'parent_id', 'name'):
tags_tree[t_id] = parent_id
tags_names[t_name] = t_id
return tags_tree, tags_names
def __create_safe_mark(self, mark_data, versions_data):
if self._safe_tags_names is None or self._safe_tags_tree is None:
self._safe_tags_tree, self._safe_tags_names = self.get_tags(SafeTag)
mark = None
for version_number in sorted(versions_data):
mark_version = versions_data[version_number]
if mark is None:
# Get identifier and is_modifiable from mark_data
mark_version.update(mark_data)
serializer_fields = ('identifier', 'is_modifiable', 'verdict', 'mark_version')
save_kwargs = {'source': MARK_SOURCE[2][0], 'author': self._user}
else:
serializer_fields = ('verdict', 'mark_version')
save_kwargs = {}
serializer = SafeMarkSerializer(instance=mark, data=mark_version, context={
'tags_names': self._safe_tags_names, 'tags_tree': self._safe_tags_tree
}, fields=serializer_fields)
serializer.is_valid(raise_exception=True)
mark = serializer.save(**save_kwargs)
# Calculate mark caches
res = ConnectSafeMark(mark)
UpdateCachesOnMarkPopulate(mark, res.new_links).update()
return reverse('marks:safe', args=[mark.id])
def __create_unsafe_mark(self, mark_data, versions_data):
if self._unsafe_tags_names is None or self._unsafe_tags_tree is None:
self._unsafe_tags_tree, self._unsafe_tags_names = self.get_tags(UnsafeTag)
mark = None
for version_number in sorted(versions_data):
mark_version = versions_data[version_number]
if mark is None:
# Get identifier and is_modifiable from mark_data
mark_version.update(mark_data)
serializer_fields = ('identifier', 'is_modifiable', 'verdict', 'mark_version', 'function')
save_kwargs = {'source': MARK_SOURCE[2][0], 'author': self._user}
else:
serializer_fields = ('verdict', 'mark_version', 'function')
save_kwargs = {}
serializer = UnsafeMarkSerializer(instance=mark, data=mark_version, context={
'tags_names': self._unsafe_tags_names, 'tags_tree': self._unsafe_tags_tree
}, fields=serializer_fields)
serializer.is_valid(raise_exception=True)
mark = serializer.save(**save_kwargs)
# Calculate mark caches
res = ConnectUnsafeMark(mark)
UpdateCachesOnMarkPopulate(mark, res.new_links).update()
return reverse('marks:unsafe', args=[mark.id])
def __create_unknown_mark(self, mark_data, versions_data):
mark = None
for version_number in sorted(versions_data):
mark_version = versions_data[version_number]
if mark is None:
# Get identifier, component and is_modifiable from mark_data
mark_version.update(mark_data)
serializer_fields = (
'identifier', 'component', 'is_modifiable', 'mark_version',
'function', 'is_regexp', 'problem_pattern', 'link'
)
save_kwargs = {'source': MARK_SOURCE[2][0], 'author': self._user}
else:
serializer_fields = ('mark_version', 'function', 'is_regexp', 'problem_pattern', 'link')
save_kwargs = {}
serializer = UnknownMarkSerializer(instance=mark, data=mark_version, fields=serializer_fields)
serializer.is_valid(raise_exception=True)
mark = serializer.save(**save_kwargs)
# Calculate mark caches
res = ConnectUnknownMark(mark)
UpdateCachesOnMarkPopulate(mark, res.new_links).update()
return reverse('marks:unknown', args=[mark.id])
def upload_mark(self, archive):
mark_data = None
versions_data = {}
with zipfile.ZipFile(archive, 'r') as zfp:
for file_name in zfp.namelist():
if file_name == 'mark.json':
mark_data = json.loads(zfp.read(file_name).decode('utf8'))
elif file_name.startswith('version-'):
try:
version_id = int(os.path.splitext(file_name)[0].replace('version-', ''))
versions_data[version_id] = json.loads(zfp.read(file_name).decode('utf8'))
except ValueError:
raise BridgeException(_("The mark archive is corrupted"))
if mark_data is None or len(versions_data) == 0:
raise BridgeException(_("The mark archive is corrupted: it doesn't contain necessary data"))
if not isinstance(mark_data, dict):
raise ValueError('Unsupported mark data type: %s' % type(mark_data))
mark_type = mark_data.pop('type', None)
if mark_type == 'safe':
return mark_type, self.__create_safe_mark(mark_data, versions_data)
elif mark_type == 'unsafe':
return mark_type, self.__create_unsafe_mark(mark_data, versions_data)
elif mark_type == 'unknown':
return mark_type, self.__create_unknown_mark(mark_data, versions_data)
raise ValueError('Unsupported mark type: %s' % mark_type)
class UploadAllMarks:
def __init__(self, user, marks_dir, delete_all_marks):
self._uploader = MarksUploader(user)
if delete_all_marks:
self.__clear_old_marks()
self.numbers = self.__upload_all(marks_dir)
def __clear_old_marks(self):
MarkSafe.objects.all().delete()
MarkUnsafe.objects.all().delete()
MarkUnknown.objects.all().delete()
ReportSafeCache.objects.update(marks_total=0, marks_confirmed=0, verdict=SAFE_VERDICTS[4][0], tags={})
ReportUnsafeCache.objects.update(marks_total=0, marks_confirmed=0, verdict=UNSAFE_VERDICTS[5][0], tags={})
ReportUnknownCache.objects.update(marks_total=0, marks_confirmed=0, problems={})
def __upload_all(self, marks_dir):
upload_result = {'safe': 0, 'unsafe': 0, 'unknown': 0, 'fail': 0}
for file_name in os.listdir(marks_dir):
mark_path = os.path.join(marks_dir, file_name)
if os.path.isfile(mark_path):
with open(mark_path, mode='rb') as fp:
try:
mark_type = self._uploader.upload_mark(fp)[0]
except Exception as e:
logger.exception(e)
mark_type = 'fail'
upload_result.setdefault(mark_type, 0)
upload_result[mark_type] += 1
return upload_result
|
#!/bin/bash
#Exit immediately if a command exits with a non-zero exit status.
set -e
##Check if enough arguements are passed
if [ $# -lt 1 ]; then
echo "Please provide stack name ! Try Again."
echo "e.g. ./csye6225-aws-cf-create-stack.sh <STACK_NAME>"
exit 1
fi
# echo "The following are the regions available for creating VPC : "
# REGIONS=$(aws ec2 describe-regions | jq '.Regions')
# echo $REGIONS | jq -c '.[]' | while read i; do
# REGION=$(echo $i | jq -r '.RegionName')
# echo "$REGION"
# done
# echo ""
# echo "Lets first configure your AWS account"
# aws configure
##Creating Stack
echo "Creating Stack $1"
response=$(aws cloudformation create-stack --stack-name "$1" --template-body file://csye6225-cf-networking.yaml --parameters file://csye-6225-cf-networking-parameters.json)
#response=$(aws cloudformation create-stack --stack-name "$1" --template-body file://csye6225-cf-networking.yaml)
echo "Waiting for Stack $1 to be created"
echo "$response"
aws cloudformation wait stack-create-complete --stack-name $1
echo "Stack $1 created successfully"
##To Revoke public access
SECURITY_GROUP_ID=$(aws cloudformation list-exports --query "Exports[?Name=='"$1"-SGId'].Value" --no-paginate --output text)
aws ec2 revoke-security-group-ingress --group-id $SECURITY_GROUP_ID --protocol all --source-group $SECURITY_GROUP_ID
if [ $? = "0" ]
then
echo "Revoked public access Successfully"
else
echo "Error : Revoke public access failed"
exit
fi
aws cloudformation describe-stack-resources --stack-name $1| jq '.StackResources' | jq -c '.[]' | jq '.PhysicalResourceId'
|
package evilcraft.items;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import net.minecraftforge.fluids.FluidStack;
import evilcraft.api.config.ExtendedConfig;
import evilcraft.api.config.ItemConfig;
import evilcraft.api.config.configurable.ConfigurableDamageIndicatedItemFluidContainer;
import evilcraft.entities.item.EntityBloodPearl;
import evilcraft.fluids.Blood;
/**
* Ender pearl that runs on blood.
* @author rubensworks
*
*/
public class BloodPearlOfTeleportation extends ConfigurableDamageIndicatedItemFluidContainer {
private static BloodPearlOfTeleportation _instance = null;
/**
* Initialise the configurable.
* @param eConfig The config.
*/
public static void initInstance(ExtendedConfig<ItemConfig> eConfig) {
if(_instance == null)
_instance = new BloodPearlOfTeleportation(eConfig);
else
eConfig.showDoubleInitError();
}
/**
* Get the unique instance.
* @return The instance.
*/
public static BloodPearlOfTeleportation getInstance() {
return _instance;
}
private BloodPearlOfTeleportation(ExtendedConfig<ItemConfig> eConfig) {
super(eConfig, 1000, Blood.getInstance());
}
@Override
public ItemStack onItemRightClick(ItemStack itemStack, World world, EntityPlayer player) {
FluidStack fluidStack = null;
if(itemStack != null && itemStack.stackTagCompound != null)
fluidStack = this.drain(itemStack, 100, false);
if(fluidStack != null && fluidStack.amount > 0) {
if (!player.capabilities.isCreativeMode) {
this.drain(itemStack, 100, true);
}
world.playSoundAtEntity(player, "random.bow", 0.5F, 0.4F / (itemRand.nextFloat() * 0.4F + 0.8F));
if (!world.isRemote) {
world.spawnEntityInWorld(new EntityBloodPearl(world, player));
}
return itemStack;
}
return itemStack;
}
}
|
/*
* Copyright (c) Microsoft. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project root for full license information.
*/
package com.microsoft.azure.eventhubs.spring;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties(
prefix = "azure.eventhub"
)
public class EventHubTemplate {
private String serviceBusNamespaceName;
private String eventHubName;
private String sharedAccessSignatureKeyName;
private String sharedAccessSignatureKey;
public EventHubTemplate(String eventHubName,
String serviceBusNamespaceName,
String sharedAccessSignatureKeyName,
String sharedAccessSignatureKey) throws Exception {
assert !(eventHubName == null || eventHubName.isEmpty()
|| serviceBusNamespaceName == null || serviceBusNamespaceName.isEmpty()
|| sharedAccessSignatureKeyName == null || sharedAccessSignatureKeyName.isEmpty()
|| sharedAccessSignatureKey == null || sharedAccessSignatureKey.isEmpty())
: "Event Hub Parameters not properly set. Check Configuration";
this.eventHubName = eventHubName;
this.serviceBusNamespaceName = serviceBusNamespaceName;
this.sharedAccessSignatureKeyName = sharedAccessSignatureKeyName;
this.sharedAccessSignatureKey = sharedAccessSignatureKey;
}
public EventHubTemplate setServiceBusNamespaceName(String serviceBusNamespaceName) {
this.serviceBusNamespaceName = serviceBusNamespaceName;
return this;
}
public EventHubTemplate setEventHubName(String eventHubName) {
this.eventHubName = eventHubName;
return this;
}
public EventHubTemplate setSharedAccessSignatureKeyName(String sharedAccessSignatureKeyName) {
this.sharedAccessSignatureKeyName = sharedAccessSignatureKeyName;
return this;
}
public EventHubTemplate setSharedAccessSignatureKey(String sharedAccessSignatureKey) {
this.sharedAccessSignatureKey = sharedAccessSignatureKey;
return this;
}
public String getServiceBusNamespaceName() {
return serviceBusNamespaceName;
}
public String getEventHubName() {
return eventHubName;
}
public String getSharedAccessSignatureKeyName() {
return sharedAccessSignatureKeyName;
}
public String getSharedAccessSignatureKey() {
return sharedAccessSignatureKey;
}
}
|
package ru.job4j.professions.engineers.builders;
public class Hammer {
private int mass;
public int getMass() {
return mass;
}
}
|
<gh_stars>1-10
package cim4j;
import java.util.Map;
import java.util.HashMap;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
/*
The enumeration defines the kinds of the limit types.
*/
public class LimitTypeKind extends BaseClass
{
private enum LimitTypeKind_ENUM
{
/**
* The Permanent Admissible Transmission Loading (PATL) is the loading in Amps, MVA or MW that can be accepted by a network branch for an unlimited duration without any risk for the material. The duration attribute is not used and shall be excluded for the PATL limit type. Hence only one limit value exists for the PATL type.
*/
patl,
/**
* Permanent Admissible Transmission Loading Threshold (PATLT) is a value in engineering units defined for PATL and calculated using percentage less than 100 of the PATL type intended to alert operators of an arising condition. The percentage should be given in the name of the OperationalLimitSet. The aceptableDuration is another way to express the severity of the limit.
*/
patlt,
/**
* Temporarily Admissible Transmission Loading (TATL) which is the loading in Amps, MVA or MW that can be accepted by a branch for a certain limited duration. The TATL can be defined in different ways: Such a definition of TATL can depend on the initial operating conditions of the network element (sag situation of a line). The duration attribute can be used define several TATL limit types. Hence multiple TATL limit values may exist having different durations.
*/
tatl,
/**
* Tripping Current (TC) is the ultimate intensity without any delay. It is defined as the threshold the line will trip without any possible remedial actions. The tripping of the network element is ordered by protections against short circuits or by overload protections, but in any case, the activation delay of these protections is not compatible with the reaction delay of an operator (less than one minute). The duration is always zero and the duration attribute may be left out. Hence only one limit value exists for the TC type.
*/
tc,
/**
* Tripping Current Threshold (TCT) is a value in engineering units defined for TC and calculated using percentage less than 100 of the TC type intended to alert operators of an arising condition. The percentage should be given in the name of the OperationalLimitSet. The aceptableDuration is another way to express the severity of the limit.
*/
tct,
/**
* Referring to the rating of the equipments, a voltage too high can lead to accelerated ageing or the destruction of the equipment. This limit type may or may not have duration.
*/
highVoltage,
/**
* A too low voltage can disturb the normal operation of some protections and transformer equipped with on-load tap changers, electronic power devices or can affect the behaviour of the auxiliaries of generation units. This limit type may or may not have duration.
*/
lowVoltage,
MAX_LimitTypeKind_ENUM;
}
private LimitTypeKind_ENUM value;
public BaseClass construct() {
return new LimitTypeKind();
}
public LimitTypeKind() {}
public LimitTypeKind(java.lang.String s) {
setValue(s);
}
public void setValue(java.lang.String s) {
try
{
value = LimitTypeKind_ENUM.valueOf(s.trim());
}
catch (IllegalArgumentException iae)
{
System.out.println("NumberFormatException: " + iae.getMessage());
}
}
public void setAttribute(java.lang.String a, java.lang.String s) {
try
{
value = LimitTypeKind_ENUM.valueOf(s.trim());
}
catch (IllegalArgumentException iae)
{
System.out.println("NumberFormatException: " + iae.getMessage());
}
}
public void setAttribute(java.lang.String attributeName, BaseClass value) {
throw new IllegalArgumentException("ENUM cannot set attribute: " + attributeName);
}
private java.lang.String debugName = "LimitTypeKind";
public java.lang.String debugString(){
return debugName;
}
public java.lang.String toString(boolean b) {
return "Enum (" + value.toString() + ")";
}
};
|
<filename>src/common/utils_test.cc
#include "common/utils.h"
#include "gtest/gtest.h"
#include <string>
#include <vector>
TEST(MemCompareTest,test1) {
EXPECT_EQ(0,MemCompare(nullptr,nullptr));
EXPECT_EQ(0,MemCompare(nullptr,""));
EXPECT_LT(MemCompare(nullptr,"a"),0);
EXPECT_EQ(0,MemCompare("",nullptr));
EXPECT_GT(MemCompare("a",nullptr),0);
EXPECT_EQ(0,MemCompare("",""));
EXPECT_LT(MemCompare("","A"),0);
EXPECT_LT(0,MemCompare("A",""));
EXPECT_EQ(0,MemCompare("0","0"));
EXPECT_GT(0,MemCompare("0","1"));
EXPECT_GT(0,MemCompare("0","00"));
EXPECT_GT(0,MemCompare("00","01"));
EXPECT_LT(0,MemCompare("10","01"));
EXPECT_LT(0,MemCompare("1","0"));
EXPECT_LT(0,MemCompare("00","0"));
}
TEST(LineTraverseTest,test)
{
auto Split = [] (const char *src) -> std::vector<std::string> {
std::vector<std::string> result;
LineTraverse(src, [&] (const char *ptr, size_t size) -> LineTraverseReturn {
EXPECT_NE(size, 0U);
result.emplace_back(ptr,size);
return LineTraverseReturn::kContinue;
});
return result;
};
auto result = Split("");
ASSERT_EQ(0U,result.size());
result = Split("\n\n\n");
ASSERT_EQ(0U,result.size());
result = Split("hello");
ASSERT_EQ(1U,result.size());
EXPECT_EQ("hello",result[0]);
result = Split("hello\nworld");
ASSERT_EQ(2U,result.size());
EXPECT_EQ("hello",result[0]);
EXPECT_EQ("world",result[1]);
result = Split("hello\nworld\n\n\n");
ASSERT_EQ(2U,result.size());
EXPECT_EQ("hello",result[0]);
EXPECT_EQ("world",result[1]);
result = Split("\n\n\nhello\nworld");
ASSERT_EQ(2U,result.size());
EXPECT_EQ("hello",result[0]);
EXPECT_EQ("world",result[1]);
result = Split("\n\n\nhello\nworld\n\n\n");
ASSERT_EQ(2U,result.size());
EXPECT_EQ("hello",result[0]);
EXPECT_EQ("world",result[1]);
result = Split("\n\n\nhello\n\n\nworld\n\n\n");
ASSERT_EQ(2U,result.size());
EXPECT_EQ("hello",result[0]);
EXPECT_EQ("world",result[1]);
}
namespace {
}
TEST(GetUtf8LetterNumberTest, test) {
EXPECT_EQ(0, GetUtf8LetterNumber(""));
EXPECT_EQ(18, GetUtf8LetterNumber("Hello,文明富强。!·.,、【】"));
}
TEST(LoopbackTraverseTest,test) {
auto Test = [] (const std::vector<int> &v,
const std::vector<int>::const_iterator ptr,
const std::vector<int> &expected) {
std::vector<int> tmp;
LoopbackTraverse(v.begin(), v.end(), ptr, [&] (std::vector<int>::const_iterator iter) -> int {tmp.push_back(*iter); return 0; });
EXPECT_EQ(expected, tmp);
};
std::vector<int> v1 {1};
Test(v1, v1.begin(), {1});
std::vector<int> v2 {1, 2};
Test(v2, v2.begin(), {1, 2});
Test(v2, v2.begin() + 1, {2, 1});
std::vector<int> v4 {1, 2, 4, 8};
Test(v4, v4.begin(), {1, 2, 4, 8});
Test(v4, v4.begin() + 1, {2, 4, 8, 1});
Test(v4, v4.begin() + 2, {4, 8, 1, 2});
Test(v4, v4.begin() + 3, {8, 1, 2, 4});
}
TEST(GetSetIntersectionNumberTest, test) {
auto DoTest = [] (const std::vector<int> &left, const std::vector<int> &right, size_t expect_val) {
EXPECT_EQ(expect_val, GetSetIntersectionNumber(left.cbegin(), left.cend(), right.cbegin(), right.cend()));
};
DoTest({1, 2}, {4, 5, 6, 7, 8}, 0);
DoTest({1, 2}, {1, 5, 6, 7, 8}, 1);
DoTest({1, 2}, {-2, -1, 1, 2, 8}, 2);
}
|
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.core.util;
import org.eclipse.dltk.core.ISourceModule;
import org.eclipse.php.core.compiler.ast.nodes.ClassDeclaration;
import org.eclipse.php.core.compiler.ast.nodes.NamespaceDeclaration;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import com.dubture.symfony.core.log.Logger;
import com.dubture.symfony.core.model.Bundle;
import com.dubture.symfony.core.model.Service;
/**
* Encoding / Decoding for json metadata in the SqlIndex.
*
*
*
* @author <NAME> <<EMAIL>>
*
*/
@SuppressWarnings("restriction")
public class JsonUtils {
private static JSONParser parser = new JSONParser();
@SuppressWarnings("unchecked")
public static String createReference(String elementName, String qualifier, String viewPath, String method) {
JSONObject data = new JSONObject();
data.put("elementName", elementName);
data.put("qualifier", qualifier);
data.put("viewPath", viewPath);
data.put("method", method);
JSONObject header = new JSONObject();
header.put("type", "reference");
header.put("data", data);
return header.toString();
}
@SuppressWarnings("unchecked")
public static String createDefaultSyntheticServices() {
JSONArray data = new JSONArray();
JSONObject request = new JSONObject();
request.put(Service.NAME, "request");
request.put(Service.CLASS, "Symfony\\Component\\HttpFoundation\\Request");
data.add(request);
return data.toString();
}
public static String getElementType(String metadata) {
try {
JSONObject json = (JSONObject) parser.parse(metadata);
String type = (String) json.get("type");
return type;
} catch (ParseException e) {
Logger.logException(e);
}
return null;
}
public static JSONObject getReferenceData(String metadata) {
try {
JSONObject header = (JSONObject) parser.parse(metadata);
return (JSONObject) header.get("data");
} catch (ParseException e) {
Logger.logException(e);
}
return null;
}
@SuppressWarnings("unchecked")
public static JSONObject createService(String id, String className) {
JSONObject service = new JSONObject();
service.put(Service.NAME, id);
service.put(Service.CLASS, className);
return service;
}
public static JSONArray parseArray(String defaults) {
try {
return (JSONArray) parser.parse(defaults);
} catch (Exception e) {
Logger.logException(e);
return new JSONArray();
}
}
@SuppressWarnings("unchecked")
public static JSONObject createBundle(ISourceModule sourceModule,
ClassDeclaration classDec, NamespaceDeclaration namespace) {
JSONObject bundle = new JSONObject();
bundle.put(Bundle.NAME, classDec.getName());
bundle.put(Bundle.NAMESPACE, namespace != null ? namespace.getName() : "");
bundle.put(Bundle.PATH, sourceModule.getPath().removeLastSegments(1).toString());
return bundle;
}
public static Bundle unpackBundle(String metadata) {
try {
JSONObject json = (JSONObject) parser.parse(metadata);
Bundle bundle = new Bundle(null, (String) json.get(Bundle.NAME));
bundle.setPath((String) json.get(Bundle.PATH));
return bundle;
} catch (ParseException e) {
Logger.logException(e);
}
return null;
}
@SuppressWarnings("unchecked")
public static String createScalar(String elementName, String viewPath, String method) {
JSONObject data = new JSONObject();
data.put("elementName", elementName);
data.put("viewPath", viewPath);
data.put("method", method);
JSONObject header = new JSONObject();
header.put("type", "scalar");
header.put("data", data);
return header.toString();
}
public static JSONObject getScalar(String metadata) {
try {
JSONObject header = (JSONObject) parser.parse(metadata);
return (JSONObject) header.get("data");
} catch (ParseException e) {
Logger.logException(e);
}
return null;
}
}
|
#
function safetycheck () {
if [[ $1 =~ force ]]; then
read REPLY\?"Dangerous command, continue? [y/n] "
case "$REPLY" in
y|Y ) ;;
n|N ) return 0;;
* ) return 1;;
esac
fi
}
#autoload -Uz add-zsh-hook
#add-zsh-hook preexec safetycheck |
# =================== The following code **should** be executed inside Docker container ===================
# Install dependencies
sudo apt-get -y update
sudo apt-get -y install expect-dev
# This is where the local pytorch install in the docker image is located
pt_checkout="/var/lib/jenkins/workspace"
# Since we're cat-ing this file, we need to escape all $'s
echo "cpp_doc_push_script.sh: Invoked with $*"
# for statements like ${1:-${DOCS_INSTALL_PATH:-docs/}}
# the order of operations goes:
# 1. Check if there's an argument $1
# 2. If no argument check for environment var DOCS_INSTALL_PATH
# 3. If no environment var fall back to default 'docs/'
# NOTE: It might seem weird to gather the second argument before gathering the first argument
# but since DOCS_INSTALL_PATH can be derived from DOCS_VERSION it's probably better to
# try and gather it first, just so we don't potentially break people who rely on this script
# Argument 2: What version of the Python API docs we are building.
version="${2:-${DOCS_VERSION:-master}}"
if [ -z "$version" ]; then
echo "error: cpp_doc_push_script.sh: version (arg2) not specified"
exit 1
fi
# Argument 1: Where to copy the built documentation for Python API to
# (pytorch.github.io/$install_path)
install_path="${1:-${DOCS_INSTALL_PATH:-docs/${DOCS_VERSION}}}"
if [ -z "$install_path" ]; then
echo "error: cpp_doc_push_script.sh: install_path (arg1) not specified"
exit 1
fi
is_main_doc=false
if [ "$version" == "master" ]; then
is_main_doc=true
fi
echo "install_path: $install_path version: $version"
# ======================== Building PyTorch C++ API Docs ========================
echo "Building PyTorch C++ API docs..."
# Clone the cppdocs repo
rm -rf cppdocs
git clone https://github.com/pytorch/cppdocs
set -ex
sudo apt-get -y install doxygen
# Generate ATen files
pushd "${pt_checkout}"
pip install -r requirements.txt
time python -m torchgen.gen \
-s aten/src/ATen \
-d build/aten/src/ATen
# Copy some required files
cp torch/_utils_internal.py tools/shared
# Generate PyTorch files
time python tools/setup_helpers/generate_code.py \
--native-functions-path aten/src/ATen/native/native_functions.yaml \
--tags-path aten/src/ATen/native/tags.yaml
# Build the docs
pushd docs/cpp
pip install -r requirements.txt
time make VERBOSE=1 html -j
popd
popd
pushd cppdocs
# Purge everything with some exceptions
mkdir /tmp/cppdocs-sync
mv _config.yml README.md /tmp/cppdocs-sync/
rm -rf *
# Copy over all the newly generated HTML
cp -r "${pt_checkout}"/docs/cpp/build/html/* .
# Copy back _config.yml
rm -rf _config.yml
mv /tmp/cppdocs-sync/* .
# Make a new commit
git add . || true
git status
git config user.email "soumith+bot@pytorch.org"
git config user.name "pytorchbot"
# If there aren't changes, don't make a commit; push is no-op
git commit -m "Generate C++ docs from pytorch/pytorch@${GITHUB_SHA}" || true
git status
if [[ "${WITH_PUSH:-}" == true ]]; then
git push -u origin
fi
popd
# =================== The above code **should** be executed inside Docker container ===================
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/OptimalLabelTextSize/OptimalLabelTextSize.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/OptimalLabelTextSize/OptimalLabelTextSize.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#Faça um programa que seja capaz de receber do usario a DISTANCIA de uma viagem
#e o TEMPO em que essa viagem aconteceu. O programa deve exibir, no final, a velocidade média
#
print("VELO IDEAL PARA SUA TRIP")
distanc = float(input('Digite a distância'))
time = float(input('Digite o tempo da viagem'))
veloci_media = distanc / time
print('Para a distancia {} e o tempo {} a velocidade média é {} km/h '.format(distanc,time,veloci_media)) |
<filename>pyvoltha/adapters/extensions/events/heartbeat_event.py
# Copyright 2017-present Adtran, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from voltha_protos.events_pb2 import AlarmEventType, AlarmEventSeverity, AlarmEventCategory
from adapter_alarms import AlarmBase
class HeartbeatAlarm(AlarmBase):
def __init__(self, alarm_mgr, object_type='olt', heartbeat_misses=0):
super(HeartbeatAlarm, self).__init__(alarm_mgr, object_type,
alarm='Heartbeat',
alarm_category=AlarmEventCategory.PON,
alarm_type=AlarmEventType.EQUIPMENT,
alarm_severity=AlarmEventSeverity.CRITICAL)
self._misses = heartbeat_misses
def get_context_data(self):
return {'heartbeats-missed': self._misses}
|
import os
from shutil import copyfile
import argparse
def create_from_and_to(TargetDir, GameModFolder, ModName, file):
return ["%s\%s" % (TargetDir, file), "%s\%s\%s" % (GameModFolder, ModName, file)]
parser = argparse.ArgumentParser(description='Perform copy operations')
parser.add_argument('--ModName', metavar='M', type=str, nargs=1, help='The name of the mod')
parser.add_argument('--TargetDir', metavar='T', type=str, nargs=1, help="The dir of the compiled results")
parser.add_argument('--GameModFolder', metavar='G', type=str, nargs=1, help="The path to the game directory")
args = parser.parse_args()
ModName = args.ModName[0]
TargetDir = args.TargetDir[0]
GameModFolder = args.GameModFolder[0] if args.GameModFolder != None and len(args.GameModFolder) > 0 else None
if GameModFolder == None:
GameModFolder = "D:\Program Files (x86)\Steam\steamapps\common\Tyranny\Mods"
# If we cannot find the game folder we can do nothing
if not os.path.exists(GameModFolder):
exit(1)
files_to_copy = []
files_to_copy.append(create_from_and_to(TargetDir, GameModFolder, ModName, "%s.dll" % ModName))
files_to_copy.append(create_from_and_to(TargetDir, GameModFolder, ModName, "info.json"))
for file_copy in files_to_copy:
print("%s %s" % (file_copy[0], file_copy[1]))
copyfile(file_copy[0], file_copy[1]) |
singularity shell -B /lus/theta-fs0/projects/AtlasADSP/profilers/quadlibs:/afs/.cern.ch/sw/lcg/contrib/gcc/4.9.3/x86_64-slc6/lib/../lib64/:ro -B /soft/perftools/tau/tau-2.27.1:/soft/perftools/tau/tau-2.27.1:ro -B /lus/theta-fs0/projects/AtlasADSP:/lus/theta-fs0/projects/AtlasADSP:rw -B /lus/theta-fs0/projects/datascience:/lus/theta-fs0/projects/datascience:rw -B /gpfs/mira-home/parton/:/gpfs/mira-home/parton/:rw /lus/theta-fs0/projects/AtlasADSP/atlas/singularity_images/centos6-cvmfs.atlas.cern.ch.x86_64-slc6-gcc49n62.201804250020.sqsh
|
cd '/scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_06_PostprocessGermlineCNVCalls/result/P_181'
set -o pipefail
cd /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_06_PostprocessGermlineCNVCalls/result/P_181
gatk --java-options "-Xmx40G" PostprocessGermlineCNVCalls \
--calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_001/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_002/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_003/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_004/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_005/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_006/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_007/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_008/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_009/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_010/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_011/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_012/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_013/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_014/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_015/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_016/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_017/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_018/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_019/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_020/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_021/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_022/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_023/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_024/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_025/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_026/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_027/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_028/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_029/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_030/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_031/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_032/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_033/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_034/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_035/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_036/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_037/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_038/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_039/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_040/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_041/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_042/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_043/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_044/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_045/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_046/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_047/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_048/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_049/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_050/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_051/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_052/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_053/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_054/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_055/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_056/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_057/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_058/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_059/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_060/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_061/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_062/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_063/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_064/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_065/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_066/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_067/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_068/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_069/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_070/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_071/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_072/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_073/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_074/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_075/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_076/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_077/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_078/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_079/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_080/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_081/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_082/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_083/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_084/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_085/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_086/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_087/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_088/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_089/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_090/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_091/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_092/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_093/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_094/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_095/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_096/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_097/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_098/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_099/gcc-calls --calls-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_100/gcc-calls \
--model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_001/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_002/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_003/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_004/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_005/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_006/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_007/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_008/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_009/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_010/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_011/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_012/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_013/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_014/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_015/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_016/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_017/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_018/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_019/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_020/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_021/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_022/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_023/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_024/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_025/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_026/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_027/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_028/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_029/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_030/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_031/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_032/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_033/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_034/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_035/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_036/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_037/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_038/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_039/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_040/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_041/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_042/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_043/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_044/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_045/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_046/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_047/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_048/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_049/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_050/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_051/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_052/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_053/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_054/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_055/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_056/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_057/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_058/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_059/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_060/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_061/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_062/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_063/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_064/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_065/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_066/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_067/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_068/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_069/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_070/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_071/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_072/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_073/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_074/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_075/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_076/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_077/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_078/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_079/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_080/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_081/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_082/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_083/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_084/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_085/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_086/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_087/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_088/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_089/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_090/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_091/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_092/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_093/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_094/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_095/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_096/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_097/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_098/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_099/gcc-model --model-shard-path /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_05_GermlineCNVCaller_2_scatterCall/result/lindsay_exomeseq_3772_ITER_100/gcc-model \
--sample-index 11 \
--allosomal-contig chrX \
--allosomal-contig chrY \
--autosomal-ref-copy-number 2 \
--contig-ploidy-calls /scratch/cqs/shengq2/jennifer/20200407_lindsay_exomeseq_3772_hg38/bwa_refine_nosoftclip_gatk4_CNV_Germline_04_DetermineGermlineContigPloidyCohortMode/result/lindsay_exomeseq_3772-calls \
--output-denoised-copy-ratios P_181.denoised_copy_ratios.tsv \
--output-genotyped-intervals P_181.genotyped_intervals.vcf.gz \
--output-genotyped-segments P_181.genotyped_segments.vcf.gz
rm -rf .cache .conda .config .theano
|
# coding=utf-8
# Copyright 2020 The Tensor2Robot Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for run_collect_eval."""
import os
from absl import flags
from absl.testing import absltest
from absl.testing import parameterized
import gin
from tensor2robot.research.pose_env import pose_env
from tensor2robot.utils import continuous_collect_eval
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
class PoseEnvModelsTest(parameterized.TestCase):
@parameterized.parameters(
(pose_env.PoseEnvRandomPolicy,),
)
def test_run_pose_env_collect(self, demo_policy_cls):
urdf_root = pose_env.get_pybullet_urdf_root()
config_dir = 'research/pose_env/configs'
gin_config = os.path.join(
FLAGS.test_srcdir, config_dir, 'run_random_collect.gin')
gin.parse_config_file(gin_config)
tmp_dir = absltest.get_default_test_tmpdir()
root_dir = os.path.join(tmp_dir, str(demo_policy_cls))
gin.bind_parameter('PoseToyEnv.urdf_root', urdf_root)
gin.bind_parameter(
'collect_eval_loop.root_dir', root_dir)
gin.bind_parameter('run_meta_env.num_tasks', 2)
gin.bind_parameter('run_meta_env.num_episodes_per_adaptation', 1)
gin.bind_parameter(
'collect_eval_loop.policy_class', demo_policy_cls)
continuous_collect_eval.collect_eval_loop()
output_files = tf.io.gfile.glob(os.path.join(
root_dir, 'policy_collect', '*.tfrecord'))
self.assertLen(output_files, 2)
if __name__ == '__main__':
absltest.main()
|
<filename>app/controllers/onLoadControllerUsers.js<gh_stars>1-10
'use strict';
(function() {
var userList = document.querySelector('.user-list');
ajaxFunctions.ready(function() {
var allUsersPromise = new Promise(function(resolve, reject) {
ajaxFunctions.ajaxRequest('GET', appUrl + '/users', function(data) {
resolve(JSON.parse(data));
});
});
allUsersPromise.then(function(data) {
data.sort(function(a, b) {
return a.order - b.order;
}).forEach(function(user) {
// console.log(user.order);
var tr = document.createElement('tr');
tr.setAttribute('class', 'bill-item');
tr.setAttribute('id', user._id);
tr.innerHTML = "<td>" + user.name + "</td><td>" + user.username + "</td><td>" + user.admin + "</td><td><button class='btn-primary'><i class='fa fa-arrow-up up' aria-hidden='true'></i></button> <button class='btn-primary '><i class='fa fa-arrow-down down' aria-hidden='true'></i></button></td>"+"<td><button class='btn2 userdel' >delete user</button></td>";
userList.append(tr);
});
}).then(function() {
document.querySelectorAll('.userdel').forEach(btn => btn.addEventListener('click', function(e) {
// console.log(e.target.parentElement.parentElement.id);
ajaxFunctions.ajaxRequest('GET', appUrl + '/user_del?' + 'id=' + e.target.parentElement.parentElement.id, function(msg) {
window.location.reload(true);
});
}));
document.querySelectorAll('.up').forEach(btn => btn.addEventListener('click', function(e) {
// console.log(e.target.parentElement.parentElement.parentElement.id);
ajaxFunctions.ajaxRequest('GET', appUrl + '/moveup?' + 'id=' + e.target.parentElement.parentElement.parentElement.id, function(msg) {
window.location.reload(true);
});
}));
document.querySelectorAll('.down').forEach(btn => btn.addEventListener('click', function(e) {
// console.log(e.target.parentElement.parentElement.parentElement.id);
ajaxFunctions.ajaxRequest('GET', appUrl + '/movedown?' + 'id=' + e.target.parentElement.parentElement.parentElement.id, function(msg) {
window.location.reload(true);
});
}));
});
});
})();
|
<gh_stars>0
import React, { Component } from "react";
import { View, Text } from "react-native";
import { connect } from "react-redux";
import { loadingTrue, loadingFalse } from "../../store/actions/index";
import { Button } from "react-native-elements";
import ResponsiveImage from "react-native-responsive-image";
// LANGUAGES LIBRARY
import { setI18nConfig } from "../../../languages/i18n";
var i18n = setI18nConfig();
// STYLES
import ReserveConfirmedStyles from "./ReserveConfirmedStyles";
class ReserveConfirmed extends Component {
static navigationOptions = {
title: i18n.t("reserve_confirmed_view_title")
};
state = {
appJson: ""
};
/**
* COMPONENT DID MOUNT
* @description Acciones cuando se carga el componente
*/
async componentDidMount() {
//Do Something
this.subs = [
this.props.navigation.addListener("willFocus", async () => {
// console.log("inicio");
//Do Something
}),
this.props.navigation.addListener("willBlur", async () => {
// console.log("me voy");
//Do Something
})
];
}
/**
* COMPONENT WILL UNMOUNT
* @description Acciones cuando se desmonta el componente
*/
componentWillUnmount() {
this.subs.forEach(sub => sub.remove());
// console.log("Desmontado");
//Do Something
}
render() {
return (
<View style={ReserveConfirmedStyles.container}>
<View style={ReserveConfirmedStyles.view_empty}>
<View style={ReserveConfirmedStyles.logo}>
<ResponsiveImage
source={require("../../../assets/img/yay-logo-rounded.png")}
initWidth="200"
initHeight="200"
/>
</View>
<Text style={ReserveConfirmedStyles.view_empty_text}>
{i18n.t("reserve_confirmed_view_text_1")}
</Text>
<Text style={ReserveConfirmedStyles.view_empty_text}>
{i18n.t("reserve_confirmed_view_text_2")}
</Text>
<Button
loading={false}
title={i18n.t("reserve_confirmed_button").toUpperCase()}
containerStyle={
ReserveConfirmedStyles.button_reserve_container_style
}
buttonStyle={ReserveConfirmedStyles.button_reserve_style}
titleStyle={ReserveConfirmedStyles.button_title_style}
onPress={() => {
this.props.navigation.navigate("DetailView");
}}
disabled={false}
/>
</View>
</View>
);
}
}
/**
* @description
* @param {*} state
*/
const mapStateToProps = state => {
return {
appJson: state.mainReducer.appJson,
loading_bar: state.mainReducer.loading
};
};
/**
* @description
* @param {*} dispatch
*/
const mapDispatchToProps = dispatch => {
return {
c_loadingTrue: () => dispatch(loadingTrue()),
c_loadingFalse: () => dispatch(loadingFalse())
};
};
export default connect(
mapStateToProps,
mapDispatchToProps
)(ReserveConfirmed);
|
#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script should be sourced into other zookeeper
# scripts to setup the env variables
# We use ZOOCFGDIR if defined,
# otherwise we use /etc/zookeeper
# or the conf directory that is
# a sibling of this script's directory.
# Or you can specify the ZOOCFGDIR using the
# '--config' option in the command line.
ZOOBINDIR=${ZOOBINDIR:-/usr/bin}
ZOOKEEPER_PREFIX=${ZOOBINDIR}/..
#check to see if the conf dir is given as an optional argument
if [ $# -gt 1 ]
then
if [ "--config" = "$1" ]
then
shift
confdir=$1
shift
ZOOCFGDIR=$confdir
fi
fi
if [ "x$ZOOCFGDIR" = "x" ]
then
if [ -e "${ZOOKEEPER_PREFIX}/conf" ]; then
ZOOCFGDIR="$ZOOBINDIR/../conf"
else
ZOOCFGDIR="$ZOOBINDIR/../etc/zookeeper"
fi
fi
if [ -f "${ZOOCFGDIR}/zookeeper-env.sh" ]; then
. "${ZOOCFGDIR}/zookeeper-env.sh"
fi
if [ "x$ZOOCFG" = "x" ]
then
ZOOCFG="zoo.cfg"
fi
ZOOCFG="$ZOOCFGDIR/$ZOOCFG"
if [ -f "$ZOOCFGDIR/java.env" ]
then
. "$ZOOCFGDIR/java.env"
fi
if [ "x${ZOO_LOG_DIR}" = "x" ]
then
ZOO_LOG_DIR="."
fi
if [ "x${ZOO_LOG4J_PROP}" = "x" ]
then
ZOO_LOG4J_PROP="INFO,CONSOLE"
fi
if [ "$JAVA_HOME" != "" ]; then
JAVA="$JAVA_HOME/bin/java"
else
JAVA=java
fi
#add the zoocfg dir to classpath
CLASSPATH="$ZOOCFGDIR:$CLASSPATH"
for i in "$ZOOBINDIR"/../src/java/lib/*.jar
do
CLASSPATH="$i:$CLASSPATH"
done
#make it work in the binary package
if [ -e "${ZOOKEEPER_PREFIX}/share/zookeeper/zookeeper-*.jar" ]; then
LIBPATH="${ZOOKEEPER_PREFIX}"/share/zookeeper/*.jar
else
#release tarball format
for i in "$ZOOBINDIR"/../zookeeper-*.jar
do
CLASSPATH="$i:$CLASSPATH"
done
LIBPATH="${ZOOBINDIR}"/../lib/*.jar
fi
for i in ${LIBPATH}
do
CLASSPATH="$i:$CLASSPATH"
done
#make it work for developers
for d in "$ZOOBINDIR"/../build/lib/*.jar
do
CLASSPATH="$d:$CLASSPATH"
done
#make it work for developers
CLASSPATH="$ZOOBINDIR/../build/classes:$CLASSPATH"
case "`uname`" in
CYGWIN*) cygwin=true ;;
*) cygwin=false ;;
esac
if $cygwin
then
CLASSPATH=`cygpath -wp "$CLASSPATH"`
fi
#echo "CLASSPATH=$CLASSPATH"
|
#!/bin/bash
set -e
if [ -z "${isApt+x}" ]; then
echo "[ERROR] \$isApt is not set."
exit 1
fi
echo " [-] Installing fonts..."
if [ "$isApt" = true ]; then
echo ttf-mscorefonts-installer msttcorefonts/accepted-mscorefonts-eula select true | sudo debconf-set-selections
sudo apt-get install -y \
fonts-dejavu \
fonts-droid-fallback \
fonts-firacode \
fonts-noto \
fonts-roboto \
fonts-open-sans \
ttf-mscorefonts-installer \
fonts-ubuntu \
&>> "$DOTFILES_LOG_FILE"
# Adobe Source Code Pro
echo " [-] Installing Adobe Source Code Pro font..."
mkdir -p "$HOME/.local/share/fonts/source-code-pro"
git clone --depth 1 --branch release https://github.com/adobe-fonts/source-code-pro.git "$HOME/.local/share/fonts/source-code-pro" &>> "$DOTFILES_LOG_FILE"
fc-cache -f -v "$HOME/.local/share/fonts/source-code-pro" > /dev/null
else
sudo dnf install -y \
adobe-source-code-pro-fonts \
adobe-source-sans-pro-fonts \
dejavu-sans-fonts \
google-droid-sans-fonts \
google-droid-sans-mono-fonts \
google-noto-sans-fonts \
google-noto-serif-fonts \
google-roboto-fonts \
mozilla-fira-mono-fonts \
mozilla-fira-sans-fonts \
open-sans-fonts \
ubuntu-title-fonts \
&>> "$DOTFILES_LOG_FILE"
# Microsoft's Core Fonts (http://mscorefonts2.sourceforge.net/)
sudo dnf install -y \
https://rpmfind.net/linux/sourceforge/m/ms/mscorefonts2/rpms/msttcore-fonts-installer-2.6-1.noarch.rpm \
&>> "$DOTFILES_LOG_FILE"
fi
|
const router = require("express").Router();
const jwt_decode = require("jwt-decode");
const Exercises = require("./exercise-model");
// ---------------------- /api/exercises ---------------------- //
router.post("/", (req, res) => {
let exercise = req.body;
const token = req.headers.authorization;
const decoded = jwt_decode(token);
exercise.user_id = decoded.subject;
Exercises.add(exercise)
.then(newExercise => {
res.status(201).json({ newExercise });
})
.catch(err => {
res.status(500).json(err);
});
});
router.get("/", async (req, res) => {
try {
const exercises = await Exercises.find();
res.status(200).json({ exercises });
} catch (error) {
res.status(500).json(error);
}
});
router.put("/:id", (req, res) => {
const id = req.params.id;
const changes = req.body;
Exercises.update(id, changes)
.then(updatedExercise => {
res.status(201).json({ updatedExercise });
})
.catch(err => {
res.status(500).json(err);
});
});
router.delete("/:id", (req, res) => {
const id = req.params.id;
Exercises.remove(id)
.then(deleted => {
res.status(200).json({ message: "Exercise deleted successfully" });
})
.catch(err => {
res.status(500).json(err);
});
});
module.exports = router;
|
<filename>client/session/api_token.go
package session
import (
"context"
"errors"
"fmt"
"net/http"
"strings"
"sync"
"time"
"github.com/dgrijalva/jwt-go/v4"
"github.com/shurcooL/graphql"
)
// If the token is about to expire, we'd rather exchange it now than risk having
// a stale one.
const timePadding = 30 * time.Second
// FromAPIToken creates a session from a ready API token.
func FromAPIToken(_ context.Context, client *http.Client) func(string) (Session, error) {
return func(token string) (Session, error) {
var claims jwt.StandardClaims
_, _, err := (&jwt.Parser{}).ParseUnverified(token, &claims)
if unverifiable := new(jwt.UnverfiableTokenError); err != nil && !errors.As(err, &unverifiable) {
return nil, fmt.Errorf("could not parse the API token: %w", err)
}
if len(claims.Audience) != 1 {
return nil, fmt.Errorf("unexpected audience: %v", claims.Audience)
}
return &apiToken{
client: client,
endpoint: claims.Audience[0],
jwt: token,
tokenValidUntil: claims.ExpiresAt.Time,
timer: time.Now,
}, nil
}
}
type apiToken struct {
client *http.Client
endpoint string
jwt string
tokenMutex sync.RWMutex
tokenValidUntil time.Time
timer func() time.Time
}
func (a *apiToken) BearerToken(ctx context.Context) (string, error) {
a.tokenMutex.RLock()
defer a.tokenMutex.RUnlock()
return a.jwt, nil
}
func (a *apiToken) Endpoint() string {
return strings.TrimRight(a.endpoint, "/") + "/graphql"
}
func (a *apiToken) isFresh() bool {
a.tokenMutex.RLock()
defer a.tokenMutex.RUnlock()
return a.timer().Add(timePadding).Before(a.tokenValidUntil)
}
func (a *apiToken) mutate(ctx context.Context, m interface{}, variables map[string]interface{}) error {
return graphql.NewClient(a.Endpoint(), a.client).Mutate(ctx, m, variables)
}
func (a *apiToken) setJWT(user *user) {
a.tokenMutex.Lock()
defer a.tokenMutex.Unlock()
a.jwt = user.JWT
a.tokenValidUntil = time.Unix(user.ValidUntil, 0)
}
|
<filename>LightSentinel/src/main/java/com/resms/lightsentinel/common/handler/AbstractLightSentinelEventHandler.java
package com.resms.lightsentinel.common.handler;
import com.resms.lightsentinel.common.LightSentinelException;
import org.springframework.context.ApplicationEvent;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
/**
* LightSentinel事件插件抽象类,自定义事件需要继承此抽象基类
*
* @param <EVENT>
*
* @author sam
*/
public abstract class AbstractLightSentinelEventHandler<EVENT extends ApplicationEvent> implements LightSentinelEventHandler<EVENT> {
protected LightSentinelEventRegistry registry;
protected Class<EVENT> eventType;
protected String guid;
public AbstractLightSentinelEventHandler(LightSentinelEventRegistry registry) throws LightSentinelException {
this.registry = registry;
initGenericClass();
if(!mount()) {
throw new LightSentinelException("Handler already exists!");
}
}
@Override
public Class<EVENT> getEventType() {
return eventType;
}
@Override
public String getGuid() {
return guid == null?this.getClass().getName():guid;
}
public void setGuid(String guid) {
this.guid = guid;
}
/**
* 获得泛型事件类型
*/
private void initGenericClass(){
try {
Type parentType = this.getClass().getGenericSuperclass();
while (!(parentType instanceof ParameterizedType)){
parentType = ((Class<?>)parentType).getGenericSuperclass();
if (parentType == null || Object.class.equals(parentType.getClass())) {
break;
}
}
if (parentType instanceof ParameterizedType){
ParameterizedType genericParentType = (ParameterizedType)parentType;
if (genericParentType.getRawType().equals(AbstractLightSentinelEventHandler.class)){
this.eventType = (Class<EVENT>)genericParentType.getActualTypeArguments()[0];
}
} else {
//System.out.println("非直接继承泛型事件基类AbstractLightSentinelEvent");
if (this.getClass().equals(AbstractLightSentinelEventHandler.class)){
this.eventType = (Class<EVENT>)((ParameterizedType)this.getClass().getGenericInterfaces()[0]).getActualTypeArguments()[0];
}
}
} catch (Exception e){
throw new LightSentinelException("generic event type init fail!");
}
}
@Override
public boolean mount() {
return registry.mount(this);
}
@Override
public void umount(boolean force) {
registry.unmount(this);
}
@Override
public String getDesc() {
return "";
}
@Override
public abstract void onEvent(EVENT event);
} |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.cosmos.implementation.patch;
final class PatchConstants {
// Properties
static final String PropertyNames_OperationType = "op";
static final String PropertyNames_Path = "path";
static final String PropertyNames_Value = "value";
// Operations
static final String OperationTypeNames_Add = "add";
static final String OperationTypeNames_Remove = "remove";
static final String OperationTypeNames_Replace = "replace";
static final String OperationTypeNames_Set = "set";
static final String OperationTypeNames_Increment = "incr";
}
|
import os
import unittest
from binstar_client.utils import get_binstar
from argparse import Namespace
try:
import conda_build.api
except ImportError:
import conda_build.config
from conda_build_all.build import build, upload
from conda_build_all.inspect_binstar import (distribution_exists,
distribution_exists_on_channel,
add_distribution_to_channel,
copy_distribution_to_owner)
from conda_build_all.tests.integration.test_builder import RecipeCreatingUnit
def clear_binstar(cli, owner):
"""
Empty all distributions for a user.
The "rm -rf *" of the binstar world.
"""
for channel in cli.list_channels(owner):
cli.remove_channel(owner, channel)
for package in cli.user_packages(owner):
cli.remove_package(owner, package['name'])
OWNER = 'Obvious-ci-tests'
CLIENT = get_binstar(Namespace(token=os.environ.get('BINSTAR_TOKEN', None), site=None))
@unittest.skipIf(os.environ.get('CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD', False) != '1',
"Not testing real binstar usage as the "
"CONDA_BUILD_ALL_TEST_ANACONDA_CLOUD environment variable is not "
"set to '1'.")
class Test(RecipeCreatingUnit):
# Note: These tests upload things to anaconda.org and are completely global. That is,
# if somebody else in the world is running the tests at the same time anywhere on the planet,
# they will behave in very strange ways (highly likely to fail).
def setUp(self):
clear_binstar(CLIENT, OWNER)
super(Test, self).setUp()
def tearDown(self):
clear_binstar(CLIENT, OWNER)
super(Test, self).tearDown()
def test_distribution_exists(self):
# Build a recipe.
meta = self.write_meta('test_recipe_1', """
package:
name: test_recipe_1
version: 'determined_at_build_time'
build:
script: echo "v0.1.0.dev1" > __conda_version__.txt
""")
meta = build(meta)
if hasattr(conda_build, 'api'):
build_config = conda_build.api.Config()
else:
build_config = conda_build.config.config
# Check distribution exists returns false when there is no distribution.
self.assertFalse(distribution_exists(CLIENT, OWNER, meta))
# upload the distribution
upload(CLIENT, meta, OWNER, channels=['testing'], config=build_config)
# Check the distribution exists. Notice there is no channel being supplied here.
self.assertTrue(distribution_exists(CLIENT, OWNER, meta))
# Check the distribution is on testing but not on main.
self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='testing'))
self.assertFalse(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='main'))
add_distribution_to_channel(CLIENT, OWNER, meta, channel='main')
# Check that the distribution has been added.
self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, meta, channel='main'))
# Add the meta for a recipe known to exist on conda-forge
meta2 = self.write_meta('conda_build_all', """
package:
name: conda-build-all
version: 0.12.0
""")
copy_distribution_to_owner(CLIENT, 'conda-forge', OWNER, meta2, channel='main')
self.assertTrue(distribution_exists_on_channel(CLIENT, OWNER, meta2))
if __name__ == '__main__':
unittest.main()
|
using System;
namespace VoteKit.Api
{
public record Config(string BasePath, Guid EmptyGuid);
public class QueryProcessor
{
private readonly Config _config;
public QueryProcessor(Config config)
{
_config = config;
}
public string ProcessQuery(string input)
{
return _config.BasePath + input;
}
}
} |
#!/bin/sh
# -*- tab-width: 4 -*- ;; Emacs
# vi: set noexpandtab :: Vi/ViM
############################################################ IDENT(1)
#
# $Title: Script to create a new package $
# $Copyright: 1999-2017 Devin Teske. All rights reserved. $
# $FrauBSD: pkgcenter-project7R/freebsd/create.sh 2019-07-21 17:32:07 -0700 freebsdfrau $
#
############################################################ INFORMATION
#
# Usage: create.sh [OPTIONS] package-dir ...
# OPTIONS:
# -h Print this message to stderr and exit.
# -f Force. Allow creation over a directory that already exists.
#
############################################################ GLOBALS
pgm="${0##*/}" # Program basename
progdir="${0%/*}" # Program directory
#
# Global exit status
#
SUCCESS=0
FAILURE=1
#
# OS Glue
#
: ${UNAME_s:=$( uname -s )}
#
# Command-line options
#
FORCE= # -f
############################################################ FUNCTIONS
# err FORMAT [ARGUMENT ...]
#
# Print a message to stderr.
#
err()
{
local fmt="$1"
shift 1 # fmt
[ "$fmt" ] || return $SUCCESS
printf "$fmt\n" "$@" >&2
}
# die [FORMAT [ARGUMENT ...]]
#
# Optionally print a message to stderr before exiting with failure status.
#
die()
{
err "$@"
exit $FAILURE
}
# usage
#
# Prints a short syntax statement and exits.
#
usage()
{
local optfmt="\t%-4s %s\n"
exec >&2
printf "Usage: %s [OPTIONS] package-dir ...\n" "$pgm"
printf "OPTIONS:\n"
printf "$optfmt" "-h" \
"Print this message to stderr and exit."
printf "$optfmt" "-f" \
"Force. Allow creation over a directory that already exists."
die
}
if ! type realpath > /dev/null 2>&1; then
case "$UNAME_s" in
Darwin)
realpath()
{
perl -le 'use Cwd; print Cwd::abs_path(@ARGV)' -- "$@"
}
;;
*)
realpath()
{
readlink -f "$@"
}
esac
fi
############################################################ MAIN
#
# Process command-line options
#
while getopts fh flag; do
case "$flag" in
f) FORCE=1 ;;
*) usage # NOTREACHED
esac
done
shift $(( $OPTIND - 1 ))
#
# Validate number of arguments
#
[ $# -gt 0 ] || usage
#
# Loop over each remaining package-dir argument(s)
#
while [ $# -gt 0 ]; do
DEST="$1"
shift 1
#
# Get the package name and proper Makefile from pathname
#
NAME="${DEST##*/}"
if [ "$DEST" -a ! "$NAME" ]; then
DEST="${DEST%/*}"
NAME="${DEST##*/}"
fi
if [ "$NAME" ]; then
printf "===> Creating \`%s'\n" "$DEST"
else
usage # NOTREACHED
fi
#
# Detect older package creation (used later in Makefile fixup)
#
ar_opt=J
case "$( realpath "$DEST" )" in
*/RELENG_[1-4][_/]*) ar_opt=z ;;
*/RELENG_[5-9][_/]*) ar_opt=j ;;
esac
#
# Make sure that the directory we are going to create doesn't already
# exist. If it does, issue an error and skip this package (unless `-f'
# is passed).
#
# Otherwise, create the destination.
#
printf "Creating package repository directory: "
if [ -e "$DEST" ]; then
printf "\n"
err "ERROR: Directory \`%s' already exists" "$DEST"
if [ ! "$FORCE" ]; then
err "ERROR: Skipping package (use \`-f' to override)"
continue
else
err "ERROR: Proceeding anyway (\`-f' was passed)"
fi
fi
if ! mkdir -p "$DEST"; then
printf "\n"
err "ERROR: Could not create directory \`%s'" "$DEST"
die "ERROR: Exiting"
fi
printf "%s\n" "$DEST"
#
# Create the `stage' directory within the package repository
#
printf "Creating package \`stage' directory...\n"
if ! mkdir -p "$DEST/stage"; then
err "ERROR: Could not create directory \`%s/stage'" "$DEST"
die "ERROR: Exiting"
fi
#
# Extract skeleton directory into package repository
#
printf "Copying \`skel' structure into package repository...\n"
tar co --exclude CVS -f - -C "$progdir/skel" . | tar xkvf - -C "$DEST"
#
# Makefile fixup: move in the appropriate Makefile
#
printf "Adjusting for archive format...\n"
case "$ar_opt" in
z) if [ ! -e "$DEST/Makefile" ] || \
cmp "$DEST/Makefile" "$progdir/skel/Makefile"; then
mv -vf "$DEST/Makefile.old" "$DEST/Makefile"
else
rm -vf "$DEST/Makefile.old"
fi
rm -vf "$DEST/Makefile.ng" "$DEST/MANIFEST"
;;
j) rm -vf "$DEST/Makefile.old" "$DEST/Makefile.ng" "$DEST/MANIFEST" ;;
J) rm -vf "$DEST/Makefile.old"
if [ ! -e "$DEST/Makefile" ] ||
cmp "$DEST/Makefile" "$progdir/skel/Makefile"
then
mv -vf "$DEST/Makefile.ng" "$DEST/Makefile"
else
rm -vf "$DEST/Makefile.ng"
fi
rm -vf "$DEST/PLIST" "$DEST/stage/+COMMENT" "$DEST/stage/+DESC"
;;
esac
#
# That's it (onto the next, back at the top).
#
printf "Done.\n"
done
################################################################################
# END
################################################################################
|
<gh_stars>1-10
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
#include "tink/aead/aead_key_templates.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "tink/aead/aes_ctr_hmac_aead_key_manager.h"
#include "tink/aead/aes_eax_key_manager.h"
#include "tink/aead/aes_gcm_key_manager.h"
#include "tink/aead/aes_gcm_siv_key_manager.h"
#include "tink/aead/xchacha20_poly1305_key_manager.h"
#include "tink/core/key_manager_impl.h"
#include "tink/util/test_matchers.h"
#include "proto/aes_ctr_hmac_aead.pb.h"
#include "proto/aes_eax.pb.h"
#include "proto/aes_gcm.pb.h"
#include "proto/aes_gcm_siv.pb.h"
#include "proto/common.pb.h"
#include "proto/tink.pb.h"
#include "proto/xchacha20_poly1305.pb.h"
using google::crypto::tink::AesCtrHmacAeadKeyFormat;
using google::crypto::tink::AesEaxKeyFormat;
using google::crypto::tink::AesGcmKeyFormat;
using google::crypto::tink::AesGcmSivKeyFormat;
using google::crypto::tink::HashType;
using google::crypto::tink::KeyTemplate;
using google::crypto::tink::OutputPrefixType;
namespace crypto {
namespace tink {
namespace {
using ::crypto::tink::test::IsOk;
using ::testing::Eq;
using ::testing::Ref;
TEST(AeadKeyTemplatesTest, testAesEaxKeyTemplates) {
std::string type_url = "type.googleapis.com/google.crypto.tink.AesEaxKey";
{ // Test Aes128Eax().
// Check that returned template is correct.
const KeyTemplate& key_template = AeadKeyTemplates::Aes128Eax();
EXPECT_EQ(type_url, key_template.type_url());
EXPECT_EQ(OutputPrefixType::TINK, key_template.output_prefix_type());
AesEaxKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_EQ(16, key_format.key_size());
EXPECT_EQ(16, key_format.params().iv_size());
// Check that reference to the same object is returned.
const KeyTemplate& key_template_2 = AeadKeyTemplates::Aes128Eax();
EXPECT_EQ(&key_template, &key_template_2);
// Check that the template works with the key manager.
AesEaxKeyManager key_type_manager;
auto key_manager = internal::MakeKeyManager<Aead>(&key_type_manager);
EXPECT_EQ(key_manager->get_key_type(), key_template.type_url());
auto new_key_result =
key_manager->get_key_factory().NewKey(key_template.value());
EXPECT_TRUE(new_key_result.ok()) << new_key_result.status();
}
{ // Test Aes256Eax().
// Check that returned template is correct.
const KeyTemplate& key_template = AeadKeyTemplates::Aes256Eax();
EXPECT_EQ(type_url, key_template.type_url());
EXPECT_EQ(OutputPrefixType::TINK, key_template.output_prefix_type());
AesEaxKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_EQ(32, key_format.key_size());
EXPECT_EQ(16, key_format.params().iv_size());
// Check that reference to the same object is returned.
const KeyTemplate& key_template_2 = AeadKeyTemplates::Aes256Eax();
EXPECT_EQ(&key_template, &key_template_2);
// Check that the template works with the key manager.
AesEaxKeyManager key_type_manager;
auto key_manager = internal::MakeKeyManager<Aead>(&key_type_manager);
EXPECT_EQ(key_manager->get_key_type(), key_template.type_url());
auto new_key_result =
key_manager->get_key_factory().NewKey(key_template.value());
EXPECT_TRUE(new_key_result.ok()) << new_key_result.status();
}
}
TEST(Aes256GcmNoPrefix, Basics) {
EXPECT_THAT(AeadKeyTemplates::Aes256GcmNoPrefix().type_url(),
Eq("type.googleapis.com/google.crypto.tink.AesGcmKey"));
EXPECT_THAT(AeadKeyTemplates::Aes256GcmNoPrefix().type_url(),
Eq(AesGcmKeyManager().get_key_type()));
}
TEST(Aes256GcmNoPrefix, OutputPrefixType) {
EXPECT_THAT(AeadKeyTemplates::Aes256GcmNoPrefix().output_prefix_type(),
Eq(OutputPrefixType::RAW));
}
TEST(Aes256GcmNoPrefix, MultipleCallsSameReference) {
EXPECT_THAT(AeadKeyTemplates::Aes256GcmNoPrefix(),
Ref(AeadKeyTemplates::Aes256GcmNoPrefix()));
}
TEST(Aes256GcmNoPrefix, WorksWithKeyTypeManager) {
const KeyTemplate& key_template = AeadKeyTemplates::Aes256GcmNoPrefix();
AesGcmKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_THAT(AesGcmKeyManager().ValidateKeyFormat(key_format), IsOk());
}
TEST(Aes256GcmNoPrefix, CheckValues) {
const KeyTemplate& key_template = AeadKeyTemplates::Aes256GcmNoPrefix();
AesGcmKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_THAT(key_format.key_size(), Eq(32));
}
TEST(Aes256Gcm, Basics) {
EXPECT_THAT(AeadKeyTemplates::Aes256Gcm().type_url(),
Eq("type.googleapis.com/google.crypto.tink.AesGcmKey"));
EXPECT_THAT(AeadKeyTemplates::Aes256Gcm().type_url(),
Eq(AesGcmKeyManager().get_key_type()));
}
TEST(Aes256Gcm, OutputPrefixType) {
EXPECT_THAT(AeadKeyTemplates::Aes256Gcm().output_prefix_type(),
Eq(OutputPrefixType::TINK));
}
TEST(Aes256Gcm, MultipleCallsSameReference) {
EXPECT_THAT(AeadKeyTemplates::Aes256Gcm(),
Ref(AeadKeyTemplates::Aes256Gcm()));
}
TEST(Aes256Gcm, WorksWithKeyTypeManager) {
const KeyTemplate& key_template = AeadKeyTemplates::Aes256Gcm();
AesGcmKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_THAT(AesGcmKeyManager().ValidateKeyFormat(key_format), IsOk());
}
TEST(Aes256Gcm, CheckValues) {
const KeyTemplate& key_template = AeadKeyTemplates::Aes256Gcm();
AesGcmKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_THAT(key_format.key_size(), Eq(32));
}
TEST(Aes128Gcm, Basics) {
EXPECT_THAT(AeadKeyTemplates::Aes128Gcm().type_url(),
Eq("type.googleapis.com/google.crypto.tink.AesGcmKey"));
EXPECT_THAT(AeadKeyTemplates::Aes128Gcm().type_url(),
Eq(AesGcmKeyManager().get_key_type()));
}
TEST(Aes128Gcm, OutputPrefixType) {
EXPECT_THAT(AeadKeyTemplates::Aes128Gcm().output_prefix_type(),
Eq(OutputPrefixType::TINK));
}
TEST(Aes128Gcm, MultipleCallsSameReference) {
EXPECT_THAT(AeadKeyTemplates::Aes128Gcm(),
Ref(AeadKeyTemplates::Aes128Gcm()));
}
TEST(Aes128Gcm, WorksWithKeyTypeManager) {
const KeyTemplate& key_template = AeadKeyTemplates::Aes128Gcm();
AesGcmKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_THAT(AesGcmKeyManager().ValidateKeyFormat(key_format), IsOk());
}
TEST(Aes128Gcm, CheckValues) {
const KeyTemplate& key_template = AeadKeyTemplates::Aes128Gcm();
AesGcmKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_THAT(key_format.key_size(), Eq(16));
}
TEST(AeadKeyTemplatesTest, testAesGcmSivKeyTemplates) {
std::string type_url = "type.googleapis.com/google.crypto.tink.AesGcmSivKey";
{ // Test Aes128GcmSiv().
// Check that returned template is correct.
const KeyTemplate& key_template = AeadKeyTemplates::Aes128GcmSiv();
EXPECT_EQ(type_url, key_template.type_url());
EXPECT_EQ(OutputPrefixType::TINK, key_template.output_prefix_type());
AesGcmSivKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_EQ(16, key_format.key_size());
// Check that reference to the same object is returned.
const KeyTemplate& key_template_2 = AeadKeyTemplates::Aes128GcmSiv();
EXPECT_EQ(&key_template, &key_template_2);
// Check that the template works with the key manager.
AesGcmSivKeyManager key_type_manager;
auto key_manager = internal::MakeKeyManager<Aead>(&key_type_manager);
EXPECT_EQ(key_manager->get_key_type(), key_template.type_url());
auto new_key_result =
key_manager->get_key_factory().NewKey(key_template.value());
EXPECT_TRUE(new_key_result.ok()) << new_key_result.status();
}
{ // Test Aes256GcmSiv().
// Check that returned template is correct.
const KeyTemplate& key_template = AeadKeyTemplates::Aes256GcmSiv();
EXPECT_EQ(type_url, key_template.type_url());
EXPECT_EQ(OutputPrefixType::TINK, key_template.output_prefix_type());
AesGcmSivKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_EQ(32, key_format.key_size());
// Check that reference to the same object is returned.
const KeyTemplate& key_template_2 = AeadKeyTemplates::Aes256GcmSiv();
EXPECT_EQ(&key_template, &key_template_2);
// Check that the template works with the key manager.
AesGcmSivKeyManager key_type_manager;
auto key_manager = internal::MakeKeyManager<Aead>(&key_type_manager);
EXPECT_EQ(key_manager->get_key_type(), key_template.type_url());
auto new_key_result =
key_manager->get_key_factory().NewKey(key_template.value());
EXPECT_TRUE(new_key_result.ok()) << new_key_result.status();
}
}
TEST(AeadKeyTemplatesTest, testAesCtrHmacAeadKeyTemplates) {
std::string type_url =
"type.googleapis.com/google.crypto.tink.AesCtrHmacAeadKey";
{ // Test Aes128CtrHmacSha256().
// Check that returned template is correct.
const KeyTemplate& key_template = AeadKeyTemplates::Aes128CtrHmacSha256();
EXPECT_EQ(type_url, key_template.type_url());
EXPECT_EQ(OutputPrefixType::TINK, key_template.output_prefix_type());
AesCtrHmacAeadKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_EQ(16, key_format.aes_ctr_key_format().key_size());
EXPECT_EQ(16, key_format.aes_ctr_key_format().params().iv_size());
EXPECT_EQ(32, key_format.hmac_key_format().key_size());
EXPECT_EQ(16, key_format.hmac_key_format().params().tag_size());
EXPECT_EQ(HashType::SHA256, key_format.hmac_key_format().params().hash());
// Check that reference to the same object is returned.
const KeyTemplate& key_template_2 = AeadKeyTemplates::Aes128CtrHmacSha256();
EXPECT_EQ(&key_template, &key_template_2);
// Check that the template works with the key manager.
AesCtrHmacAeadKeyManager key_type_manager;
auto key_manager = internal::MakeKeyManager<Aead>(&key_type_manager);
EXPECT_EQ(key_manager->get_key_type(), key_template.type_url());
auto new_key_result =
key_manager->get_key_factory().NewKey(key_template.value());
EXPECT_TRUE(new_key_result.ok()) << new_key_result.status();
}
{ // Test Aes256CtrHmacSha256().
// Check that returned template is correct.
const KeyTemplate& key_template = AeadKeyTemplates::Aes256CtrHmacSha256();
EXPECT_EQ(type_url, key_template.type_url());
EXPECT_EQ(OutputPrefixType::TINK, key_template.output_prefix_type());
AesCtrHmacAeadKeyFormat key_format;
EXPECT_TRUE(key_format.ParseFromString(key_template.value()));
EXPECT_EQ(32, key_format.aes_ctr_key_format().key_size());
EXPECT_EQ(16, key_format.aes_ctr_key_format().params().iv_size());
EXPECT_EQ(32, key_format.hmac_key_format().key_size());
EXPECT_EQ(32, key_format.hmac_key_format().params().tag_size());
EXPECT_EQ(HashType::SHA256, key_format.hmac_key_format().params().hash());
// Check that reference to the same object is returned.
const KeyTemplate& key_template_2 = AeadKeyTemplates::Aes256CtrHmacSha256();
EXPECT_EQ(&key_template, &key_template_2);
// Check that the template works with the key manager.
AesCtrHmacAeadKeyManager key_type_manager;
auto key_manager = internal::MakeKeyManager<Aead>(&key_type_manager);
EXPECT_EQ(key_manager->get_key_type(), key_template.type_url());
auto new_key_result =
key_manager->get_key_factory().NewKey(key_template.value());
EXPECT_TRUE(new_key_result.ok()) << new_key_result.status();
}
}
TEST(AeadKeyTemplatesTest, testXChaCha20Poly1305KeyTemplates) {
std::string type_url =
"type.googleapis.com/google.crypto.tink.XChaCha20Poly1305Key";
// Check that returned template is correct.
const KeyTemplate& key_template = AeadKeyTemplates::XChaCha20Poly1305();
EXPECT_EQ(type_url, key_template.type_url());
EXPECT_EQ(OutputPrefixType::TINK, key_template.output_prefix_type());
// Check that reference to the same object is returned.
const KeyTemplate& key_template_2 = AeadKeyTemplates::XChaCha20Poly1305();
EXPECT_EQ(&key_template, &key_template_2);
// Check that the template works with the key manager.
XChaCha20Poly1305KeyManager key_type_manager;
auto key_manager = internal::MakeKeyManager<Aead>(&key_type_manager);
EXPECT_EQ(key_manager->get_key_type(), key_template.type_url());
auto new_key_result =
key_manager->get_key_factory().NewKey(key_template.value());
EXPECT_TRUE(new_key_result.ok()) << new_key_result.status();
}
} // namespace
} // namespace tink
} // namespace crypto
|
/*******************************************************************************
* Copyright (c) 2015, 2016 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
'use strict';
/**
* @ngdoc service
* @name greensopinionfinanceApp.toastService
* @description
* # toastService
* Service in the greensopinionfinanceApp.
*/
angular.module('greensopinionfinanceApp')
.service('toastService',['$timeout', function ($timeout) {
var messageText = '';
var clearMessage = function(textToMatch) {
if (textToMatch === undefined || messageText === textToMatch) {
messageText = '';
}
};
return {
show: function(text) {
messageText = text;
$timeout(function() {
clearMessage(text);
},3500);
},
clearMessage: clearMessage,
message: function() {
return messageText;
}
};
}]);
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.coach.ui.curriculum.certificate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.olat.NewControllerFactory;
import org.olat.basesecurity.BaseSecurity;
import org.olat.basesecurity.OrganisationRoles;
import org.olat.core.commons.services.mark.Mark;
import org.olat.core.commons.services.mark.MarkManager;
import org.olat.core.dispatcher.mapper.MapperService;
import org.olat.core.dispatcher.mapper.manager.MapperKey;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.form.flexible.FormItem;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.FlexiTableElement;
import org.olat.core.gui.components.form.flexible.elements.FlexiTableFilter;
import org.olat.core.gui.components.form.flexible.elements.FormLink;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.components.form.flexible.impl.FormEvent;
import org.olat.core.gui.components.form.flexible.impl.elements.table.BooleanCellRenderer;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableComponentDelegate;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableCssDelegate;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableDataModelFactory;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableRendererType;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SelectionEvent;
import org.olat.core.gui.components.form.flexible.impl.elements.table.StaticFlexiCellRenderer;
import org.olat.core.gui.components.form.flexible.impl.elements.table.TextFlexiCellRenderer;
import org.olat.core.gui.components.form.flexible.impl.elements.table.TreeNodeFlexiCellRenderer;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.progressbar.ProgressBar.BarColor;
import org.olat.core.gui.components.progressbar.ProgressBar.LabelAlignment;
import org.olat.core.gui.components.progressbar.ProgressBar.RenderSize;
import org.olat.core.gui.components.progressbar.ProgressBar.RenderStyle;
import org.olat.core.gui.components.progressbar.ProgressBarItem;
import org.olat.core.gui.components.stack.BreadcrumbPanel;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.closablewrapper.CloseableModalController;
import org.olat.core.gui.control.generic.dtabs.Activateable2;
import org.olat.core.id.Identity;
import org.olat.core.id.OLATResourceable;
import org.olat.core.id.Roles;
import org.olat.core.id.context.BusinessControlFactory;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.StateEntry;
import org.olat.core.util.StringHelper;
import org.olat.core.util.Util;
import org.olat.core.util.resource.OresHelper;
import org.olat.core.util.vfs.VFSLeaf;
import org.olat.course.CorruptedCourseException;
import org.olat.course.assessment.AssessmentModule;
import org.olat.course.assessment.manager.EfficiencyStatementManager;
import org.olat.course.assessment.model.UserEfficiencyStatementLight;
import org.olat.course.certificate.CertificateLight;
import org.olat.course.certificate.CertificatesManager;
import org.olat.course.certificate.CertificatesModule;
import org.olat.course.certificate.ui.CertificateAndEfficiencyStatementListController;
import org.olat.course.certificate.ui.CertificateAndEfficiencyStatementRow;
import org.olat.course.certificate.ui.UploadExternalCertificateController;
import org.olat.modules.assessment.AssessmentEntryCompletion;
import org.olat.modules.assessment.AssessmentEntryScoring;
import org.olat.modules.assessment.AssessmentService;
import org.olat.modules.assessment.ui.AssessedIdentityListController;
import org.olat.modules.coach.RoleSecurityCallback;
import org.olat.modules.coach.ui.curriculum.certificate.CurriculumElementWithViewsDataModel.ElementViewCols;
import org.olat.modules.curriculum.Curriculum;
import org.olat.modules.curriculum.CurriculumElement;
import org.olat.modules.curriculum.CurriculumElementMembership;
import org.olat.modules.curriculum.CurriculumSecurityCallback;
import org.olat.modules.curriculum.CurriculumService;
import org.olat.modules.curriculum.model.CurriculumElementRefImpl;
import org.olat.modules.curriculum.model.CurriculumElementRepositoryEntryViews;
import org.olat.modules.curriculum.ui.CurriculumElementCalendarController;
import org.olat.modules.curriculum.ui.CurriculumListController;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.RepositoryEntryMyView;
import org.olat.repository.RepositoryEntryRef;
import org.olat.repository.RepositoryManager;
import org.olat.repository.RepositoryService;
import org.olat.repository.controllers.EntryChangedEvent;
import org.olat.repository.controllers.EntryChangedEvent.Change;
import org.olat.repository.model.RepositoryEntryRefImpl;
import org.olat.repository.model.SearchMyRepositoryEntryViewParams;
import org.olat.repository.ui.PriceMethod;
import org.olat.repository.ui.RepositoryEntryImageMapper;
import org.olat.repository.ui.list.RepositoryEntryDetailsController;
import org.olat.resource.OLATResource;
import org.olat.resource.accesscontrol.ACService;
import org.olat.resource.accesscontrol.AccessControlModule;
import org.olat.resource.accesscontrol.method.AccessMethodHandler;
import org.olat.resource.accesscontrol.model.OLATResourceAccess;
import org.olat.resource.accesscontrol.model.PriceMethodBundle;
import org.olat.resource.accesscontrol.ui.PriceFormat;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This is a list of curriculum elements and repository entries
* aimed to participants. The repository entries permissions
* follow the same rules as {@link org.olat.repository.ui.list.RepositoryEntryListController}<br>
* <p>
* <p>
* Initial date: 11 mai 2018<br>
*
* @author srosse, <EMAIL>, http://www.frentix.com
*/
public class CertificateAndEfficiencyStatementCurriculumListController extends FormBasicController implements FlexiTableCssDelegate, FlexiTableComponentDelegate, Activateable2 {
private FlexiTableElement tableEl;
private CurriculumElementWithViewsDataModel tableModel;
private final BreadcrumbPanel stackPanel;
private FormLink uploadCertificateButton;
private boolean canUploadExternalCertificate;
private int counter;
private final boolean guestOnly;
private final List<Curriculum> curriculumList;
private final MapperKey mapperThumbnailKey;
private final Identity assessedIdentity;
private final CurriculumSecurityCallback curriculumSecurityCallback;
private final RoleSecurityCallback roleSecurityCallback;
private CloseableModalController cmc;
private RepositoryEntryDetailsController detailsCtrl;
private CurriculumElementCalendarController calendarsCtrl;
private UploadExternalCertificateController uploadCertificateController;
@Autowired
private ACService acService;
@Autowired
private MarkManager markManager;
@Autowired
private MapperService mapperService;
@Autowired
private AccessControlModule acModule;
@Autowired
private RepositoryService repositoryService;
@Autowired
private CurriculumService curriculumService;
@Autowired
private RepositoryManager repositoryManager;
@Autowired
private AssessmentService assessmentService;
@Autowired
private BaseSecurity securityManager;
@Autowired
private EfficiencyStatementManager esm;
@Autowired
private CertificatesManager certificatesManager;
@Autowired
private CertificatesModule certificatesModule;
public CertificateAndEfficiencyStatementCurriculumListController(UserRequest ureq, WindowControl wControl, BreadcrumbPanel stackPanel,
Identity assessedIdentity, CurriculumSecurityCallback curriculumSecurityCallback, RoleSecurityCallback roleSecurityCallback) {
super(ureq, wControl, "curriculum_element_list");
setTranslator(Util.createPackageTranslator(AssessmentModule.class, getLocale(), getTranslator()));
setTranslator(Util.createPackageTranslator(AssessedIdentityListController.class, getLocale(), getTranslator()));
setTranslator(Util.createPackageTranslator(CertificateAndEfficiencyStatementListController.class, getLocale(), getTranslator()));
setTranslator(Util.createPackageTranslator(CurriculumListController.class, getLocale(), getTranslator()));
setTranslator(Util.createPackageTranslator(RepositoryService.class, getLocale(), getTranslator()));
setTranslator(Util.createPackageTranslator(CertificateAndEfficiencyStatementCurriculumListController.class, getLocale(), getTranslator()));
this.stackPanel = stackPanel;
this.curriculumSecurityCallback = curriculumSecurityCallback;
this.roleSecurityCallback = roleSecurityCallback;
this.assessedIdentity = assessedIdentity;
this.curriculumList = curriculumService.getMyCurriculums(assessedIdentity);
guestOnly = ureq.getUserSession().getRoles().isGuestOnly();
mapperThumbnailKey = mapperService.register(null, "repositoryentryImage", new RepositoryEntryImageMapper());
// Upload certificates
Roles userRoles = ureq.getUserSession().getRoles();
if (getIdentity().equals(assessedIdentity)) {
canUploadExternalCertificate = certificatesModule.canUserUploadExternalCertificates();
} else if (userRoles.isUserManager()) {
canUploadExternalCertificate = certificatesModule.canUserManagerUploadExternalCertificates();
} else if (userRoles.isLineManager()) {
canUploadExternalCertificate = securityManager.getRoles(assessedIdentity).hasRole(userRoles.getOrganisations(), OrganisationRoles.user);
} else if (userRoles.isAdministrator() || userRoles.isSystemAdmin()) {
canUploadExternalCertificate = true;
}
initForm(ureq);
loadModel();
}
@Override
public void activate(UserRequest ureq, List<ContextEntry> entries, StateEntry state) {
//
}
@Override
protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) {
if (canUploadExternalCertificate) {
flc.contextPut("uploadCertificate", true);
uploadCertificateButton = uifactory.addFormLink("upload.certificate", formLayout, Link.BUTTON);
uploadCertificateButton.setIconLeftCSS("o_icon o_icon_import");
}
FlexiTableColumnModel columnsModel = FlexiTableDataModelFactory.createFlexiTableColumnModel();
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(false, ElementViewCols.key));
TreeNodeFlexiCellRenderer treeNodeRenderer = new TreeNodeFlexiCellRenderer("select");
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.displayName, treeNodeRenderer));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.details));
DefaultFlexiColumnModel elementIdentifierCol = new DefaultFlexiColumnModel(ElementViewCols.identifier, "select");
elementIdentifierCol.setCellRenderer(new CurriculumElementCompositeRenderer("select", new TextFlexiCellRenderer()));
columnsModel.addFlexiColumnModel(elementIdentifierCol);
if (roleSecurityCallback.canViewCourseProgressAndStatus()) {
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.passed));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.completion));
}
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.lastModification));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.lastUserModified));
if (roleSecurityCallback.canViewEfficiencyStatements()) {
BooleanCellRenderer efficiencyStatementRenderer = new BooleanCellRenderer(new StaticFlexiCellRenderer("openStatement", translate("table.header.show")), null);
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.efficiencyStatement, efficiencyStatementRenderer));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.certificate));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.recertification));
}
if (roleSecurityCallback.canViewCalendar()) {
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ElementViewCols.calendars));
}
tableModel = new CurriculumElementWithViewsDataModel(columnsModel);
tableEl = uifactory.addTableElement(getWindowControl(), "table", tableModel, 50, false, getTranslator(), formLayout);
tableEl.setElementCssClass("o_curriculumtable");
tableEl.setCustomizeColumns(true);
tableEl.setEmptyTableMessageKey("table.curriculum.empty");
tableEl.setCssDelegate(this);
tableEl.setFilters("activity", getFilters(), false);
tableEl.setSelectedFilterKey(CurriculumElementWithViewsDataModel.FilterKeys.withStatementOnly.name());
VelocityContainer row = createVelocityContainer("curriculum_element_row");
row.setDomReplacementWrapperRequired(false); // sets its own DOM id in velocity container
tableEl.setRowRenderer(row, this);
tableEl.setAndLoadPersistedPreferences(ureq, "c123oach-mentor-curriculum-"
+ (assessedIdentity.equals(getIdentity()) ? "" : "look-"));
}
private List<FlexiTableFilter> getFilters() {
List<FlexiTableFilter> filters = new ArrayList<>(5);
filters.add(new FlexiTableFilter(translate(CurriculumElementWithViewsDataModel.FilterKeys.activ.i18nHeaderKey()), CurriculumElementWithViewsDataModel.FilterKeys.activ.name()));
filters.add(new FlexiTableFilter(translate(CurriculumElementWithViewsDataModel.FilterKeys.withStatementOnly.i18nHeaderKey()), CurriculumElementWithViewsDataModel.FilterKeys.withStatementOnly.name()));
filters.add(FlexiTableFilter.SPACER);
filters.add(new FlexiTableFilter(translate(CurriculumElementWithViewsDataModel.FilterKeys.showAll.i18nHeaderKey()), CurriculumElementWithViewsDataModel.FilterKeys.showAll.name(), true));
return filters;
}
@Override
public String getWrapperCssClass(FlexiTableRendererType type) {
return null;
}
@Override
public String getTableCssClass(FlexiTableRendererType type) {
return null;
}
@Override
public String getRowCssClass(FlexiTableRendererType type, int pos) {
StringBuilder sb = new StringBuilder(64);
CurriculumTreeWithViewsRow rowWithView = tableModel.getObject(pos);
if (type == FlexiTableRendererType.custom) {
sb.append("o_table_row ");
if (rowWithView.isCurriculumElementOnly()) {
sb.append("o_curriculum_element");
if (rowWithView.getCurriculumElementRepositoryEntryCount() > 1) {
sb.append(" o_with_multi_repository_entries");
}
} else if (rowWithView.isRepositoryEntryOnly()) {
sb.append("o_repository_entry");
} else if (rowWithView.isCurriculumElementWithEntry()) {
sb.append("o_mixed_element");
}
}
if (rowWithView.isCurriculum() || rowWithView.getLevel() == -1) {
sb.append(" o_curriculum");
} else {
int count = 0;
for (CurriculumTreeWithViewsRow parent = rowWithView.getParent(); parent != null; parent = parent.getParent()) {
count++;
}
// Substract one level for the curriculum parent
if (count > 0) {
count -= 1;
}
sb.append(" o_curriculum_element_l").append(count);
if (!rowWithView.isRepositoryEntryOnly() && rowWithView.getCurriculumElementTypeCssClass() != null) {
sb.append(" ").append(rowWithView.getCurriculumElementTypeCssClass());
}
if (rowWithView.getEntryStatus() != null) {
sb.append(" repo_status_").append(rowWithView.getEntryStatus());
}
}
return sb.toString();
}
@Override
public Iterable<Component> getComponents(int row, Object rowObject) {
return null;
}
private void loadModel() {
// Load efficiency statements
Map<Long, CertificateAndEfficiencyStatementRow> resourceKeyToStatments = new HashMap<>();
List<CertificateAndEfficiencyStatementRow> statements = new ArrayList<>();
List<UserEfficiencyStatementLight> efficiencyStatementsList = esm.findEfficiencyStatementsLight(assessedIdentity);
List<Long> courseEntryKeys = efficiencyStatementsList.stream()
.map(UserEfficiencyStatementLight::getCourseRepoKey)
.filter(key -> key != null)
.collect(Collectors.toList());
Map<Long, Double> courseEntryKeysToCompletion = assessmentService
.loadRootAssessmentEntriesByAssessedIdentity(assessedIdentity, courseEntryKeys).stream()
.filter(ae -> ae.getCompletion() != null)
.collect(Collectors.toMap(
AssessmentEntryScoring::getRepositoryEntryKey,
AssessmentEntryScoring::getCompletion
));
for(UserEfficiencyStatementLight efficiencyStatement:efficiencyStatementsList) {
CertificateAndEfficiencyStatementRow wrapper = new CertificateAndEfficiencyStatementRow();
wrapper.setDisplayName(efficiencyStatement.getShortTitle());
wrapper.setPassed(efficiencyStatement.getPassed());
wrapper.setScore(efficiencyStatement.getScore());
wrapper.setEfficiencyStatementKey(efficiencyStatement.getKey());
wrapper.setResourceKey(efficiencyStatement.getArchivedResourceKey());
wrapper.setLastModified(efficiencyStatement.getLastModified());
wrapper.setLastUserModified(efficiencyStatement.getLastUserModified());
Double completion = courseEntryKeysToCompletion.get(efficiencyStatement.getCourseRepoKey());
wrapper.setCompletion(completion);
statements.add(wrapper);
resourceKeyToStatments.put(efficiencyStatement.getArchivedResourceKey(), wrapper);
}
List<CertificateLight> certificates = certificatesManager.getLastCertificates(assessedIdentity);
for(CertificateLight certificate:certificates) {
Long resourceKey = certificate.getOlatResourceKey();
CertificateAndEfficiencyStatementRow wrapper = resourceKeyToStatments.get(resourceKey);
if(wrapper == null) {
wrapper = new CertificateAndEfficiencyStatementRow();
wrapper.setDisplayName(certificate.getCourseTitle());
resourceKeyToStatments.put(resourceKey, wrapper);
statements.add(wrapper);
} else {
if(!StringHelper.containsNonWhitespace(wrapper.getDisplayName())) {
wrapper.setDisplayName(certificate.getCourseTitle());
}
wrapper.setResourceKey(resourceKey);
}
if(resourceKey != null && wrapper.getResourceKey() == null) {
wrapper.setResourceKey(resourceKey);
}
wrapper.setCertificate(certificate);
}
for(CertificateAndEfficiencyStatementRow statment:statements) {
if(!StringHelper.containsNonWhitespace(statment.getDisplayName()) && statment.getResourceKey() != null) {
String displayName = repositoryManager.lookupDisplayNameByResourceKey(statment.getResourceKey());
statment.setDisplayName(displayName);
}
}
// Set of Olat resources with statements
Set<Long> statementEntries = statements.stream().map(CertificateAndEfficiencyStatementRow::getResourceKey).collect(Collectors.toSet());
// Set of entries, which will be added in the next step
Set<Long> alreadyAdded = new HashSet<>();
// Load Curricula
Roles roles = securityManager.getRoles(assessedIdentity);
List<CurriculumTreeWithViewsRow> allRows = new ArrayList<>();
List<CurriculumElementRepositoryEntryViews> elementsWithViewsForAll = curriculumService.getCurriculumElements(assessedIdentity, roles, curriculumList);
Map<Curriculum, List<CurriculumElementRepositoryEntryViews>> elementsMap = elementsWithViewsForAll.stream().collect(Collectors.groupingBy(row -> row.getCurriculumElement().getCurriculum(), Collectors.toList()));
for (Curriculum curriculum : curriculumList) {
CurriculumTreeWithViewsRow curriculumRow = new CurriculumTreeWithViewsRow(curriculum);
List<CurriculumElementRepositoryEntryViews> elementsWithViews = elementsMap.get(curriculum);
if (elementsWithViews != null && !elementsWithViews.isEmpty()) {
List<CurriculumTreeWithViewsRow> rows = new ArrayList<>();
Set<Long> repoKeys = new HashSet<>();
List<OLATResource> resourcesWithAC = new ArrayList<>();
for (CurriculumElementRepositoryEntryViews elementWithViews : elementsWithViews) {
for (RepositoryEntryMyView entry : elementWithViews.getEntries()) {
repoKeys.add(entry.getKey());
if (entry.isValidOfferAvailable()) {
resourcesWithAC.add(entry.getOlatResource());
}
}
}
List<OLATResourceAccess> resourcesWithOffer = acService.filterResourceWithAC(resourcesWithAC);
repositoryService.filterMembership(assessedIdentity, repoKeys);
for (CurriculumElementRepositoryEntryViews elementWithViews : elementsWithViews) {
CurriculumElement element = elementWithViews.getCurriculumElement();
CurriculumElementMembership elementMembership = elementWithViews.getCurriculumMembership();
List<RepositoryEntryMyView> repositoryEntryMyViews = new ArrayList<>();
if (elementWithViews.getEntries() != null && !elementWithViews.getEntries().isEmpty()) {
for (RepositoryEntryMyView entry : elementWithViews.getEntries()) {
// if (statementEntries.contains(entry.getOlatResource().getKey())) {
repositoryEntryMyViews.add(entry);
// }
}
}
if (repositoryEntryMyViews == null || repositoryEntryMyViews.isEmpty()) {
CurriculumTreeWithViewsRow row = new CurriculumTreeWithViewsRow(curriculum, element, elementMembership, 0);
forgeCalendarsLink(row);
rows.add(row);
} else if (repositoryEntryMyViews.size() == 1) {
CurriculumTreeWithViewsRow row = new CurriculumTreeWithViewsRow(curriculum, element, elementMembership, elementWithViews.getEntries().get(0), true);
forge(row, repoKeys, resourcesWithOffer);
forgeCalendarsLink(row);
rows.add(row);
} else {
CurriculumTreeWithViewsRow elementRow = new CurriculumTreeWithViewsRow(curriculum, element, elementMembership, elementWithViews.getEntries().size());
forgeCalendarsLink(elementRow);
rows.add(elementRow);
for (RepositoryEntryMyView entry : repositoryEntryMyViews) {
CurriculumTreeWithViewsRow row = new CurriculumTreeWithViewsRow(curriculum, element, elementMembership, entry, false);
forge(row, repoKeys, resourcesWithOffer);
rows.add(row);
}
}
}
Map<CurriculumKey, CurriculumTreeWithViewsRow> keyToRow = rows.stream()
.collect(Collectors.toMap(CurriculumTreeWithViewsRow::getKey, row -> row, (row1, row2) -> row1));
rows.forEach(row -> {
row.setParent(keyToRow.get(row.getParentKey()));
if (row.getOlatResource() != null) {
alreadyAdded.add(row.getOlatResource().getKey());
if (statementEntries.contains(row.getOlatResource().getKey())){
row.setHasStatement(true);
}
VFSLeaf image = repositoryManager.getImage(row.getRepositoryEntryResourceable().getResourceableId(), row.getOlatResource());
if (image != null) {
row.setThumbnailRelPath(mapperThumbnailKey.getUrl() + "/" + image.getName());
}
}
});
removeByPermissions(rows);
forgeCurriculumCompletions(rows);
addRoot(rows, curriculumRow);
allRows.add(curriculumRow);
allRows.addAll(rows);
}
}
// Filter for entries which are already in a curriculum
Roles assessedRoles = securityManager.getRoles(assessedIdentity);
SearchMyRepositoryEntryViewParams params = new SearchMyRepositoryEntryViewParams(assessedIdentity, assessedRoles);
params.setMembershipMandatory(true);
List<RepositoryEntryMyView> courses = repositoryService.searchMyView(params, 0, 0);
courses.removeIf(course -> alreadyAdded.contains(course.getOlatResource().getKey()));
// Filter for entries which are without curriculum
CurriculumTreeWithViewsRow foreignEntryParent = new CurriculumTreeWithViewsRow(translate("curriculum.foreign.entries"));
if (!courses.isEmpty()) {
allRows.add(foreignEntryParent);
courses.forEach(course -> {
CurriculumTreeWithViewsRow row = new CurriculumTreeWithViewsRow(course);
forgeSelectLink(row);
forgeCompletion(row,row.getRepositoryEntryCompletion());
forgeDetails(row);
row.setParent(foreignEntryParent);
row.setHasStatement(true);
allRows.add(row);
alreadyAdded.add(course.getOlatResource().getKey());
});
}
// Add Statements which don't belong to any course
Set<Long> orphanCertificates = statementEntries.stream().filter(certificateResourceKey -> !alreadyAdded.contains(certificateResourceKey)).collect(Collectors.toSet());
if (!orphanCertificates.isEmpty()) {
if (!allRows.contains(foreignEntryParent)) {
allRows.add(foreignEntryParent);
}
orphanCertificates.forEach(orphan -> {
CertificateAndEfficiencyStatementRow statement = statements.stream().filter(certStatement -> certStatement.getResourceKey().equals(orphan)).findFirst().get();
CurriculumTreeWithViewsRow row = new CurriculumTreeWithViewsRow(statement);
row.setParent(foreignEntryParent);
row.setHasStatement(true);
allRows.add(row);
});
}
Collections.sort(allRows, new CurriculumElementViewsRowComparator(getLocale()));
tableModel.setObjects(allRows);
tableEl.reset(true, true, true);
}
private void removeByPermissions(List<CurriculumTreeWithViewsRow> rows) {
// propagate the member marker along the parent line
for (CurriculumTreeWithViewsRow row : rows) {
if (row.isCurriculumMember()) {
for (CurriculumTreeWithViewsRow parentRow = row.getParent(); parentRow != null; parentRow = parentRow.getParent()) {
parentRow.setCurriculumMember(true);
}
}
}
// trim part of the tree without member flag
rows.removeIf(curriculumTreeWithViewsRow -> !curriculumTreeWithViewsRow.isCurriculumMember());
}
private void forge(CurriculumTreeWithViewsRow row, Collection<Long> repoKeys, List<OLATResourceAccess> resourcesWithOffer) {
if (row.getRepositoryEntryKey() == null || guestOnly) return;// nothing for guests
boolean isMember = repoKeys.contains(row.getRepositoryEntryKey());
row.setMember(isMember);
FormLink startLink = null;
List<PriceMethod> types = new ArrayList<>();
if (row.isAllUsers() || isMember) {
startLink = uifactory.addFormLink("start_" + (++counter), "start", "start", null, null, Link.LINK);
startLink.setElementCssClass("o_start btn-block");
startLink.setCustomEnabledLinkCSS("o_start btn-block");
startLink.setIconRightCSS("o_icon o_icon_start");
} else if (row.isBookable()) {
// collect access control method icons
OLATResource resource = row.getOlatResource();
for (OLATResourceAccess resourceAccess : resourcesWithOffer) {
if (resource.getKey().equals(resourceAccess.getResource().getKey())) {
for (PriceMethodBundle bundle : resourceAccess.getMethods()) {
String type = (bundle.getMethod().getMethodCssClass() + "_icon").intern();
String price = bundle.getPrice() == null || bundle.getPrice().isEmpty() ? "" : PriceFormat.fullFormat(bundle.getPrice());
AccessMethodHandler amh = acModule.getAccessMethodHandler(bundle.getMethod().getType());
String displayName = amh.getMethodName(getLocale());
types.add(new PriceMethod(price, type, displayName));
}
}
}
startLink = uifactory.addFormLink("start_" + (++counter), "start", "book", null, null, Link.LINK);
startLink.setElementCssClass("o_start btn-block");
startLink.setCustomEnabledLinkCSS("o_book btn-block");
startLink.setIconRightCSS("o_icon o_icon_start");
}
if(startLink != null) {
startLink.setUserObject(row);
String businessPath = "[RepositoryEntry:" + row.getRepositoryEntryKey() + "]";
String startUrl = BusinessControlFactory.getInstance().getAuthenticatedURLFromBusinessPathString(businessPath);
startLink.setUrl(startUrl);
row.setStartLink(startLink, startUrl);
}
if (!row.isAllUsers() && !row.isGuests()) {
// members only always show lock icon
types.add(new PriceMethod("", "o_ac_membersonly_icon", translate("cif.access.membersonly.short")));
}
if (!types.isEmpty()) {
row.setAccessTypes(types);
}
forgeDetails(row);
forgeMarkLink(row);
forgeSelectLink(row);
forgeCompletion(row, row.getRepositoryEntryCompletion());
}
private void forgeDetails(CurriculumTreeWithViewsRow row) {
FormLink detailsLink = uifactory.addFormLink("details_" + (++counter), "details", "details", null, null, Link.LINK);
detailsLink.setCustomEnabledLinkCSS("o_details");
detailsLink.setUserObject(row);
Long repoEntryKey = row.getRepositoryEntryKey();
String detailsUrl = null;
if(repoEntryKey != null) {
String businessPath = "[RepositoryEntry:" + repoEntryKey + "][Infos:0]";
detailsUrl = BusinessControlFactory.getInstance()
.getAuthenticatedURLFromBusinessPathString(businessPath);
detailsLink.setUrl(detailsUrl);
}
row.setDetailsLink(detailsLink, detailsUrl);
}
private void forgeMarkLink(CurriculumTreeWithViewsRow row) {
if (!guestOnly) {
FormLink markLink = uifactory.addFormLink("mark_" + (++counter), "mark", "", null, null, Link.NONTRANSLATED);
markLink.setIconLeftCSS(row.isMarked() ? Mark.MARK_CSS_LARGE : Mark.MARK_ADD_CSS_LARGE);
markLink.setTitle(translate(row.isMarked() ? "details.bookmark.remove" : "details.bookmark"));
markLink.setUserObject(row);
row.setMarkLink(markLink);
}
}
private void forgeSelectLink(CurriculumTreeWithViewsRow row) {
if (row.isCurriculumElementOnly()) return;
String displayName = StringHelper.escapeHtml(row.getRepositoryEntryDisplayName());
FormLink selectLink = uifactory.addFormLink("select_" + (++counter), "select", displayName, null, null, Link.NONTRANSLATED);
if (row.isClosed()) {
selectLink.setIconLeftCSS("o_icon o_CourseModule_icon_closed");
}
Long repoEntryKey = row.getRepositoryEntryKey();
if (repoEntryKey != null) {
String businessPath = "[RepositoryEntry:" + repoEntryKey + "]";
selectLink.setUrl(BusinessControlFactory.getInstance()
.getAuthenticatedURLFromBusinessPathString(businessPath));
}
selectLink.setUserObject(row);
row.setSelectLink(selectLink);
}
private void forgeCalendarsLink(CurriculumTreeWithViewsRow row) {
if (row.isCalendarsEnabled()) {
FormLink calendarLink = uifactory.addFormLink("cals_" + (++counter), "calendars", "calendars", null, null, Link.LINK);
calendarLink.setIconLeftCSS("o_icon o_icon-fw o_icon_timetable");
calendarLink.setUserObject(row);
row.setCalendarsLink(calendarLink);
}
}
private void forgeCurriculumCompletions(List<CurriculumTreeWithViewsRow> rows) {
Map<Long, Double> completions = loadCurriculumElementCompletions(rows);
if(!completions.isEmpty()) {
for (CurriculumTreeWithViewsRow row : rows) {
if (row.getCompletionItem() == null && row.getCurriculumElementKey() != null) { // does not show completion of the child entry
forgeCompletion(row, completions.get(row.getCurriculumElementKey()));
}
}
}
}
private void forgeCompletion(CurriculumTreeWithViewsRow row, Double completion) {
if (completion != null) {
ProgressBarItem completionItem = new ProgressBarItem("completion_" + row.getKey(), 100,
completion.floatValue(), Float.valueOf(1), null);
completionItem.setWidthInPercent(true);
completionItem.setLabelAlignment(LabelAlignment.none);
completionItem.setRenderStyle(RenderStyle.radial);
completionItem.setRenderSize(RenderSize.inline);
completionItem.setBarColor(BarColor.success);
row.setCompletionItem(completionItem);
}
}
private void addRoot(List<CurriculumTreeWithViewsRow> rows, CurriculumTreeWithViewsRow parent) {
rows.stream().filter(row -> row.getParent() == null).forEach(row -> {
row.setParent(parent);
if (row.hasStatement()) {
parent.setHasStatement(true);
}
});
}
private Map<Long, Double> loadCurriculumElementCompletions(List<CurriculumTreeWithViewsRow> rows) {
List<Long> curEleLearningProgressKeys = rows.stream()
.filter(CurriculumTreeWithViewsRow::isLearningProgressEnabled)
.map(CurriculumTreeWithViewsRow::getKey)
.map(CurriculumKey::getCurriculumElement)
.filter(Objects::nonNull)
.collect(Collectors.toList());
List<AssessmentEntryCompletion> loadAvgCompletionsByCurriculumElements = assessmentService
.loadAvgCompletionsByCurriculumElements(assessedIdentity, curEleLearningProgressKeys);
Map<Long, Double> completions = new HashMap<>();
for (AssessmentEntryCompletion completion : loadAvgCompletionsByCurriculumElements) {
if (completion.getCompletion() != null) {
completions.put(completion.getKey(), completion.getCompletion());
}
}
return completions;
}
@Override
protected void propagateDirtinessToContainer(FormItem fiSrc, FormEvent event) {
//do not update the
}
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
if(uploadCertificateController == source) {
if (event == Event.DONE_EVENT) {
loadModel();
tableEl.reset();
}
cmc.deactivate();
cleanUp();
}
super.event(ureq, source, event);
}
private void cleanUp() {
removeAsListenerAndDispose(uploadCertificateController);
removeAsListenerAndDispose(cmc);
uploadCertificateController = null;
cmc = null;
}
@Override
public void event(UserRequest ureq, Component source, Event event) {
if (source == mainForm.getInitialComponent()) {
if ("ONCLICK".equals(event.getCommand())) {
String rowKeyStr = ureq.getParameter("select_row");
if (StringHelper.isLong(rowKeyStr)) {
try {
Long rowKey = Long.valueOf(rowKeyStr);
List<CurriculumTreeWithViewsRow> rows = tableModel.getObjects();
for (CurriculumTreeWithViewsRow row : rows) {
if (row != null && row.getRepositoryEntryKey() != null && row.getRepositoryEntryKey().equals(rowKey)) {
if (row.isMember()) {
doOpen(ureq, row, null);
} else {
doOpenDetails(ureq, row);
}
}
}
} catch (NumberFormatException e) {
logWarn("Not a valid long: " + rowKeyStr, e);
}
}
}
}
super.event(ureq, source, event);
}
@Override
protected void formInnerEvent(UserRequest ureq, FormItem source, FormEvent event) {
if(uploadCertificateButton == source) {
showUploadCertificateController(ureq);
} else if (source instanceof FormLink) {
FormLink link = (FormLink) source;
if ("start".equals(link.getCmd())) {
CurriculumTreeWithViewsRow row = (CurriculumTreeWithViewsRow) link.getUserObject();
doOpen(ureq, row, null);
} else if ("details".equals(link.getCmd())) {
CurriculumTreeWithViewsRow row = (CurriculumTreeWithViewsRow) link.getUserObject();
doOpenDetails(ureq, row);
} else if ("select".equals(link.getCmd())) {
CurriculumTreeWithViewsRow row = (CurriculumTreeWithViewsRow) link.getUserObject();
if (row.isMember()) {
doOpen(ureq, row, null);
} else {
doOpenDetails(ureq, row);
}
} else if ("mark".equals(link.getCmd())) {
CurriculumTreeWithViewsRow row = (CurriculumTreeWithViewsRow) link.getUserObject();
boolean marked = doMark(ureq, row);
link.setIconLeftCSS(marked ? "o_icon o_icon_bookmark o_icon-lg" : "o_icon o_icon_bookmark_add o_icon-lg");
link.setTitle(translate(marked ? "details.bookmark.remove" : "details.bookmark"));
link.getComponent().setDirty(true);
row.setMarked(marked);
} else if ("calendars".equals(link.getCmd())) {
CurriculumTreeWithViewsRow row = (CurriculumTreeWithViewsRow) link.getUserObject();
doOpenCalendars(ureq, row);
}
} else if (source == tableEl) {
if (event instanceof SelectionEvent) {
SelectionEvent se = (SelectionEvent) event;
CurriculumTreeWithViewsRow row = tableModel.getObject(se.getIndex());
if (row.isMember()) {
doOpen(ureq, row, null);
} else {
doOpenDetails(ureq, row);
}
}
}
super.formInnerEvent(ureq, source, event);
}
private void showUploadCertificateController(UserRequest ureq) {
if(guardModalController(uploadCertificateController)) return;
uploadCertificateController = new UploadExternalCertificateController(ureq, getWindowControl(), assessedIdentity);
listenTo(uploadCertificateController);
cmc = new CloseableModalController(getWindowControl(), null, uploadCertificateController.getInitialComponent(), true, translate("upload.certificate"), true);
cmc.addControllerListener(this);
cmc.activate();
}
@Override
protected void formOK(UserRequest ureq) {
//
}
private void doOpen(UserRequest ureq, CurriculumTreeWithViewsRow row, String subPath) {
try {
String businessPath = "[RepositoryEntry:" + row.getRepositoryEntryKey() + "]";
if (subPath != null) {
businessPath += subPath;
}
NewControllerFactory.getInstance().launch(businessPath, ureq, getWindowControl());
} catch (CorruptedCourseException e) {
logError("Course corrupted: " + row.getKey() + " (" + row.getOlatResource().getResourceableId() + ")", e);
showError("cif.error.corrupted");
}
}
private void doOpenDetails(UserRequest ureq, CurriculumTreeWithViewsRow row) {
// to be more consistent: course members see info page within the course, non-course members see it outside the course
if (row.isMember()) {
doOpen(ureq, row, "[Infos:0]");
} else {
removeAsListenerAndDispose(detailsCtrl);
OLATResourceable ores = OresHelper.createOLATResourceableInstance("Infos", 0l);
WindowControl bwControl = BusinessControlFactory.getInstance().createBusinessWindowControl(ores, null, getWindowControl());
Long repoEntryKey = row.getRepositoryEntryKey();
RepositoryEntry entry = repositoryService.loadByKey(repoEntryKey);
if (repoEntryKey == null) {
showInfo("curriculum.element.empty");
} else if (entry == null) {
showWarning("repositoryentry.not.existing");
} else {
detailsCtrl = new RepositoryEntryDetailsController(ureq, bwControl, entry, false);
listenTo(detailsCtrl);
addToHistory(ureq, detailsCtrl);
String displayName = row.getRepositoryEntryDisplayName();
stackPanel.pushController(displayName, detailsCtrl);
}
}
}
private void doOpenCalendars(UserRequest ureq, CurriculumTreeWithViewsRow row) {
removeAsListenerAndDispose(calendarsCtrl);
OLATResourceable ores = OresHelper.createOLATResourceableInstance("Calendars", row.getCurriculumElementKey());
WindowControl bwControl = BusinessControlFactory.getInstance().createBusinessWindowControl(ores, null, getWindowControl());
CurriculumElement element = curriculumService
.getCurriculumElement(new CurriculumElementRefImpl(row.getCurriculumElementKey()));
List<CurriculumTreeWithViewsRow> rows = tableModel.getObjects();
Set<Long> entryKeys = new HashSet<>();
for (CurriculumTreeWithView elementWithView : rows) {
if (elementWithView.isCurriculumMember()
&& !elementWithView.getEntries().isEmpty()
&& elementWithView.isParentOrSelf(row)) {
for (RepositoryEntryMyView view : elementWithView.getEntries()) {
if ("CourseModule".equals(view.getOlatResource().getResourceableTypeName())) {
entryKeys.add(view.getKey());
}
}
}
}
List<RepositoryEntry> entries = repositoryService.loadByKeys(entryKeys);
calendarsCtrl = new CurriculumElementCalendarController(ureq, bwControl, element, entries, curriculumSecurityCallback);
listenTo(calendarsCtrl);
stackPanel.pushController(translate("calendars"), calendarsCtrl);
}
private boolean doMark(UserRequest ureq, CurriculumTreeWithViewsRow row) {
OLATResourceable item = OresHelper.createOLATResourceableInstance("RepositoryEntry", row.getRepositoryEntryKey());
RepositoryEntryRef ref = new RepositoryEntryRefImpl(row.getRepositoryEntryKey());
if (markManager.isMarked(item, getIdentity(), null)) {
markManager.removeMark(item, getIdentity(), null);
EntryChangedEvent e = new EntryChangedEvent(ref, getIdentity(), Change.removeBookmark, "curriculum");
ureq.getUserSession().getSingleUserEventCenter().fireEventToListenersOf(e, RepositoryService.REPOSITORY_EVENT_ORES);
return false;
} else {
String businessPath = "[RepositoryEntry:" + item.getResourceableId() + "]";
markManager.setMark(item, getIdentity(), null, businessPath);
EntryChangedEvent e = new EntryChangedEvent(ref, getIdentity(), Change.addBookmark, "curriculum");
ureq.getUserSession().getSingleUserEventCenter().fireEventToListenersOf(e, RepositoryService.REPOSITORY_EVENT_ORES);
return true;
}
}
}
|
import click
from .index_selection import *
import logging
logger = logging.getLogger(__name__)
@cli.group('optimize')
@click.option('--delay', type=int, default=0, show_default=True, expose_value=True,
help='Pause *n* seconds after optimizing an index.')
@click.option('--max_num_segments', type=int, default=2, show_default=True,
expose_value=True,
help='Merge to this number of segments per shard.')
@click.option('--request_timeout', type=int, default=218600, show_default=True,
expose_value=True,
help='Allow this many seconds before the transaction times out.')
@click.pass_context
def optimize(ctx, delay, max_num_segments, request_timeout):
"""Optimize Indices"""
optimize.add_command(indices)
|
// SPDX-License-Identifier: BSD-2-Clause
//
// Copyright (c) 2016-2019, NetApp, Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include <stdint.h>
#include <quant/quant.h>
#include "conn.h"
#include "quic.h" // IWYU pragma: keep
typedef void (*func_ptr)(void);
extern func_ptr api_func;
extern void * api_conn;
extern void * api_strm;
extern void loop_init(void);
extern uint64_t loop_now(void);
extern void loop_break(void);
extern void __attribute__((nonnull(1))) loop_run(struct w_engine * const w,
const func_ptr f,
struct q_conn * const c,
struct q_stream * const s);
// see https://stackoverflow.com/a/45600545/2240756
//
#define OVERLOADED_MACRO(M, ...) OVR(M, CNT_ARGS(__VA_ARGS__))(__VA_ARGS__)
#define OVR(macro_name, nargs) OVR_EXPAND(macro_name, nargs)
#define OVR_EXPAND(macro_name, nargs) macro_name##nargs
#define CNT_ARGS(...) ARG_MATCH(__VA_ARGS__, 9, 8, 7, 6, 5, 4, 3, 2, 1)
#define ARG_MATCH(_1, _2, _3, _4, _5, _6, _7, _8, _9, N, ...) N
#define maybe_api_return(...) \
__extension__(OVERLOADED_MACRO(maybe_api_return, __VA_ARGS__))
#ifdef DEBUG_EXTRA
#define DEBUG_EXTRA_warn warn
#else
#define DEBUG_EXTRA_warn(...)
#endif
/// If current API function and argument match @p func and @p arg - and @p strm
/// if it is non-zero - exit the event loop.
///
/// @param func The API function to potentially return to.
/// @param conn The connection to check API activity on.
/// @param strm The stream to check API activity on.
///
/// @return True if the event loop was exited.
///
#define maybe_api_return3(func, conn, strm) \
__extension__({ \
if (unlikely(api_func == (func_ptr)(&(func)) && api_conn == (conn) && \
((strm) == 0 || api_strm == (strm)))) { \
loop_break(); \
DEBUG_EXTRA_warn(DBG, #func "(" #conn ", " #strm \
") done, exiting event loop"); \
} \
api_func == 0; \
})
/// If current API argument matches @p arg - and @p strm if it is non-zero -
/// exit the event loop (for any active API function).
///
/// @param conn The connection to check API activity on.
/// @param strm The stream to check API activity on.
///
/// @return True if the event loop was exited.
///
#define maybe_api_return2(conn, strm) \
__extension__({ \
if (unlikely(api_conn == (conn) && \
((strm) == 0 || api_strm == (strm)))) { \
loop_break(); \
DEBUG_EXTRA_warn(DBG, "<any>(" #conn ", " #strm \
") done, exiting event loop"); \
} \
api_func == 0; \
})
|
const API_URL = 'https://api.iclient.ifeng.com';
// const API_URL = 'HTTPS://i.sports.ifeng.com';
const API_URL2 = 'https://nine.ifeng.com';
const API_LIVE = 'https://sports.live.ifeng.com/API';
const Promise = require('./bluebird')
function fetchApi (type, params, flag) {
return new Promise((resolve, reject) => {
wx.request({
url: `${(flag == true) ? API_URL2 : API_URL}/${type}`,
data: Object.assign({}, params),
header: { 'Content-Type': 'json' },
success: resolve,
fail: reject
})
})
}
module.exports = {
API_URL: API_URL,
API_URL2: API_URL2,
find(type, params, flag=false) {
return fetchApi(type, params, flag)
.then(res => res.data)
},
findLive(type, params) {
return fetchApi(type, params, true)
.then(res => res.data)
},
findOne (id) {
return fetchApi('subject/' + id)
.then(res => res.data)
}
}
|
<reponame>Haarmees/azure-devops-intellij<gh_stars>10-100
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.plugin.idea.git.extensions;
import com.intellij.openapi.project.Project;
import com.intellij.util.AuthData;
import com.microsoft.alm.plugin.authentication.AuthenticationInfo;
import com.microsoft.alm.plugin.context.ServerContext;
import com.microsoft.alm.plugin.context.ServerContextManager;
import com.microsoft.alm.plugin.idea.IdeaAbstractTest;
import com.microsoft.alm.plugin.idea.common.settings.TeamServicesSecrets;
import com.microsoft.alm.plugin.idea.git.utils.TfGitHelper;
import git4idea.repo.GitRemote;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.net.URI;
import java.util.Arrays;
import java.util.Collections;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.when;
@RunWith(PowerMockRunner.class)
@PrepareForTest({GitRemote.class, TeamServicesSecrets.class, TfGitHelper.class})
public class TfGitHttpAuthDataProviderTest extends IdeaAbstractTest {
private final String SERVER_URL = "https://dev.azure.com/username";
private final AuthenticationInfo authenticationInfo = new AuthenticationInfo(
"userName",
"password",
"serverUri",
"userNameForDisplay");
private final Project project = Mockito.mock(Project.class);
private TfGitHttpAuthDataProvider authDataProvider;
@Before
public void setUpTest() {
PowerMockito.mockStatic(TeamServicesSecrets.class, TfGitHelper.class);
authDataProvider = new TfGitHttpAuthDataProvider();
ServerContext context = Mockito.mock(ServerContext.class);
when(context.getUri()).thenReturn(URI.create(SERVER_URL));
when(context.getKey()).thenReturn(ServerContext.getKey(SERVER_URL));
when(context.getAuthenticationInfo()).thenReturn(authenticationInfo);
ServerContextManager.getInstance().add(context);
}
@After
public void cleanupTest() {
ServerContextManager.getInstance().remove(SERVER_URL);
}
@Test
public void httpAuthShouldWorkOnCanonicalUrl() {
AuthData authData = authDataProvider.getAuthData(project, "https://dev.azure.com/username");
assertAuthenticationInfoEquals(authenticationInfo, authData);
}
@Test
public void testAuthDataWithValidRemoteUrl() {
AuthData result = authDataProvider.getAuthData(project, "https://username@dev.azure.com");
assertNull(result);
}
@Test
public void testAuthDataWithZeroRemotes() {
when(TfGitHelper.getTfGitRemotes(any(Project.class))).thenReturn(Collections.emptyList());
AuthData result = authDataProvider.getAuthData(project, "https://dev.azure.com");
assertNull(result);
}
@Test
public void testAuthDataWithOneRemote() {
GitRemote gitRemote = PowerMockito.mock(GitRemote.class);
when(gitRemote.getFirstUrl()).thenReturn("https://dev.azure.com/username/myproject/_git/myproject");
when(TfGitHelper.getTfGitRemotes(any(Project.class))).thenReturn(Collections.singleton(gitRemote));
AuthData result = authDataProvider.getAuthData(project, "https://dev.azure.com");
assertAuthenticationInfoEquals(authenticationInfo, result);
}
@Test
public void testAuthDataWithTwoRemotesSameOrganization() {
GitRemote gitRemote1 = Mockito.mock(GitRemote.class);
when(gitRemote1.getFirstUrl()).thenReturn("https://dev.azure.com/username/myproject1/_git/myproject1");
GitRemote gitRemote2 = Mockito.mock(GitRemote.class);
when(gitRemote2.getFirstUrl()).thenReturn("https://dev.azure.com/username/myproject2/_git/myproject2");
when(TfGitHelper.getTfGitRemotes(any(Project.class))).thenReturn(Arrays.asList(gitRemote1, gitRemote2));
AuthData result = authDataProvider.getAuthData(project, "https://dev.azure.com");
assertAuthenticationInfoEquals(authenticationInfo, result);
}
@Test
public void testAuthDataWithTwoRemotesDifferentOrganizations() {
GitRemote gitRemote1 = Mockito.mock(GitRemote.class);
when(gitRemote1.getFirstUrl()).thenReturn("https://dev.azure.com/username1/myproject1/_git/myproject1");
GitRemote gitRemote2 = Mockito.mock(GitRemote.class);
when(gitRemote2.getFirstUrl()).thenReturn("https://dev.azure.com/username2/myproject2/_git/myproject2");
when(TfGitHelper.getTfGitRemotes(any(Project.class))).thenReturn(Arrays.asList(gitRemote1, gitRemote2));
AuthData result = authDataProvider.getAuthData(project, "https://dev.azure.com");
assertNull(result);
}
private static void assertAuthenticationInfoEquals(AuthenticationInfo authenticationInfo, AuthData result) {
assertNotNull(result);
assertEquals(authenticationInfo.getUserName(), result.getLogin());
assertEquals(authenticationInfo.getPassword(), result.getPassword());
}
}
|
<gh_stars>1-10
/*
* Copyright 2018 - Swiss Data Science Center (SDSC)
* A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
* Eidgenössische Technische Hochschule Zürich (ETHZ).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.storageBackends
import java.io._
import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.StreamConverters
import akka.util.ByteString
import javax.inject.{ Inject, Singleton }
import models.Repository
import org.eclipse.jgit.errors.{ CorruptObjectException, PackProtocolException, UnpackException }
import org.eclipse.jgit.storage.file.FileRepositoryBuilder
import org.eclipse.jgit.transport.RefAdvertiser.PacketLineOutRefAdvertiser
import org.eclipse.jgit.transport._
import play.api.libs.concurrent.ActorSystemProvider
import play.api.libs.streams.Accumulator
import play.api.mvc.Results._
import play.api.mvc.{ RequestHeader, Result }
import play.api.{ Configuration, Logger }
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration.{ FiniteDuration, _ }
import scala.util.Try
/**
* Created by julien on 01/02/18.
*/
@Singleton
class LocalGitBackend @Inject() (
configuration: Configuration,
actorSystemProvider: ActorSystemProvider,
implicit val ec: ExecutionContext
) extends GitBackend {
lazy val logger: Logger = Logger( "application.storageBackends.LocalGitBackend" )
private[this] lazy val rootDir: String = configuration.get[String]( "storage.backend.localgit.root" )
/*
* derived from org.eclipse.jgit.http.server.ServletUtils.consumeRequestBody to remove dependencies on servlets
*/
def flushBody( in: InputStream ) {
if ( in == null )
return
try {
while ( 0 < in.skip( 1024 ) || 0 <= in.read() ) {
// Discard until EOF.
}
}
catch {
case e: IOException => {}
// Discard IOException during read or skip.
}
finally {
try {
in.close()
}
catch {
case e: IOException => {
// Discard IOException during close of input stream.
}
}
}
}
override def getRefs( request: RequestHeader, url: String, user: String ): Future[Result] = Future {
val svc = request.queryString.getOrElse( "service", Seq( "" ) ).head
val output = Try {
StreamConverters.asOutputStream( 10.seconds ).mapMaterializedValue { os =>
Future {
try {
val plo = new PacketLineOut( os )
plo.writeString( "# service=" + svc + "\n" )
plo.end()
if ( svc.equals( "git-upload-pack" ) ) {
val up = new UploadPack( FileRepositoryBuilder.create( new File( rootDir, url ) ) )
try {
up.setBiDirectionalPipe( false )
up.sendAdvertisedRefs( new PacketLineOutRefAdvertiser( plo ) )
}
finally {
up.getRevWalk.close()
}
}
else if ( svc.equals( "git-receive-pack" ) ) {
val rep = new ReceivePack( FileRepositoryBuilder.create( new File( rootDir, url ) ) )
try {
rep.sendAdvertisedRefs( new PacketLineOutRefAdvertiser( plo ) )
}
finally {
rep.getRevWalk.close()
}
}
os.close()
}
finally {
os.close()
}
}
}
}
output.map( o => Ok.chunked( o ).as( "application/x-" + svc + "-advertisement" ) )
.getOrElse( InternalServerError )
}
override def upload( req: RequestHeader, url: String, user: String ): Accumulator[ByteString, Result] = {
implicit val actorSystem: ActorSystem = actorSystemProvider.get
implicit val mat: ActorMaterializer = ActorMaterializer()
Accumulator.source[ByteString].mapFuture { source =>
Future {
val inputStream = source.runWith(
StreamConverters.asInputStream( FiniteDuration( 3, TimeUnit.SECONDS ) )
)
val output = Try {
StreamConverters.asOutputStream( 10.seconds ).mapMaterializedValue { os =>
Future {
try {
val up = new UploadPack( FileRepositoryBuilder.create( new File( rootDir, url ) ) )
up.setBiDirectionalPipe( false )
up.upload( inputStream, os, null )
os.close()
}
catch {
case e: ServiceMayNotContinueException =>
if ( e.isOutput ) {
flushBody( inputStream )
os.close()
}
throw e
case e: UploadPackInternalServerErrorException =>
// Special case exception, error message was sent to client.
flushBody( inputStream )
os.close()
}
finally {
os.close()
}
}
}
}
output.map( o => Ok.chunked( o ).as( "application/x-git-upload-pack-result" ) )
.getOrElse( InternalServerError )
}
}
}
override def receive( req: RequestHeader, url: String, user: String ): Accumulator[ByteString, Result] = {
implicit val actorSystem: ActorSystem = actorSystemProvider.get
implicit val mat: ActorMaterializer = ActorMaterializer()
Accumulator.source[ByteString].mapFuture { source =>
Future {
val inputStream = source.runWith(
StreamConverters.asInputStream( FiniteDuration( 3, TimeUnit.SECONDS ) )
)
val output = Try {
StreamConverters.asOutputStream( 10.seconds ).mapMaterializedValue { os =>
Future {
try {
val rep = new ReceivePack( FileRepositoryBuilder.create( new File( rootDir, url ) ) )
rep.setBiDirectionalPipe( false )
rep.receive( inputStream, os, null )
os.close()
}
catch {
case e@( _: CorruptObjectException | _: UnpackException | _: PackProtocolException ) =>
flushBody( inputStream )
os.close()
throw e
}
finally {
os.close()
}
}
}
}
output.map( o => Ok.chunked( o ).as( "application/x-git-receive-pack-result" ) )
.getOrElse( InternalServerError )
}
}
}
override def createRepo( request: Repository ): Future[Option[String]] = Future {
if ( request.path.contains( ".." ) ) {
None
}
else {
val f = new File( rootDir, request.path )
f.mkdirs()
FileRepositoryBuilder.create( f ).create( true )
Some( request.path )
}
}
}
|
#!/bin/sh
if [ "$DECONZ_START_VERBOSE" = 1 ]; then
set -x
fi
echo "[deconzcommunity/deconz] Starting deCONZ..."
echo "[deconzcommunity/deconz] Current deCONZ version: $DECONZ_VERSION"
echo "[deconzcommunity/deconz] Web UI port: $DECONZ_WEB_PORT"
echo "[deconzcommunity/deconz] Websockets port: $DECONZ_WS_PORT"
DECONZ_OPTS="--auto-connect=1 \
--appdata=/opt/deCONZ \
--dbg-info=$DEBUG_INFO \
--dbg-aps=$DEBUG_APS \
--dbg-zcl=$DEBUG_ZCL \
--dbg-zdp=$DEBUG_ZDP \
--dbg-otau=$DEBUG_OTAU \
--dbg-error=$DEBUG_ERROR \
--http-port=$DECONZ_WEB_PORT \
--ws-port=$DECONZ_WS_PORT"
echo "[deconzcommunity/deconz] Modifying user and group ID"
if [ "$DECONZ_UID" != 1000 ]; then
DECONZ_UID=${DECONZ_UID:-1000}
sudo usermod -o -u "$DECONZ_UID" deconz
fi
if [ "$DECONZ_GID" != 1000 ]; then
DECONZ_GID=${DECONZ_GID:-1000}
sudo groupmod -o -g "$DECONZ_GID" deconz
fi
echo "[deconzcommunity/deconz] Checking device group ID"
if [ "$DECONZ_DEVICE" != 0 ]; then
DEVICE=$DECONZ_DEVICE
else
if [ -e /dev/ttyUSB0 ]; then
DEVICE=/dev/ttyUSB0
fi
if [ -e /dev/ttyACM0 ]; then
DEVICE=/dev/ttyACM0
fi
if [ -e /dev/ttyAMA0 ]; then
DEVICE=/dev/ttyAMA0
fi
if [ -e /dev/ttyS0 ]; then
DEVICE=/dev/ttyS0
fi
fi
DIALOUTGROUPID=$(stat --printf='%g' $DEVICE)
DIALOUTGROUPID=${DIALOUTGROUPID:-20}
if [ "$DIALOUTGROUPID" != 20 ]; then
sudo groupmod -o -g "$DIALOUTGROUPID" dialout
fi
#workaround if the group of the device doesn't have any permissions
GROUPPERMISSIONS=$(stat -c "%A" $DEVICE | cut -c 5-7)
if [ "$GROUPPERMISSIONS" = "---" ]; then
sudo chmod g+rw $DEVICE
fi
if [ "$DECONZ_VNC_MODE" != 0 ]; then
if [ "$DECONZ_VNC_PORT" -lt 5900 ]; then
echo "[deconzcommunity/deconz] ERROR - VNC port must be 5900 or greater!"
exit 1
fi
DECONZ_VNC_DISPLAY=:$(($DECONZ_VNC_PORT - 5900))
echo "[deconzcommunity/deconz] VNC port: $DECONZ_VNC_PORT"
if [ ! -e /opt/deCONZ/vnc ]; then
mkdir /opt/deCONZ/vnc
fi
sudo -u deconz ln -sf /opt/deCONZ/vnc /home/deconz/.vnc
chown deconz:deconz /opt/deCONZ -R
# Set VNC password
if [ "$DECONZ_VNC_PASSWORD_FILE" != 0 ] && [ -f "$DECONZ_VNC_PASSWORD_FILE" ]; then
DECONZ_VNC_PASSWORD=$(cat $DECONZ_VNC_PASSWORD_FILE)
fi
echo "$DECONZ_VNC_PASSWORD" | tigervncpasswd -f > /opt/deCONZ/vnc/passwd
chmod 600 /opt/deCONZ/vnc/passwd
chown deconz:deconz /opt/deCONZ/vnc/passwd
# Cleanup previous VNC session data
sudo -u deconz tigervncserver -kill "$DECONZ_VNC_DISPLAY"
# Set VNC security
sudo -u deconz tigervncserver -SecurityTypes VncAuth,TLSVnc "$DECONZ_VNC_DISPLAY"
# Export VNC display variable
export DISPLAY=$DECONZ_VNC_DISPLAY
if [ "$DECONZ_NOVNC_PORT" = 0 ]; then
echo "[deconzcommunity/deconz] noVNC Disabled"
else
if [ "$DECONZ_NOVNC_PORT" -lt 6080 ]; then
echo "[deconzcommunity/deconz] ERROR - NOVNC port must be 6080 or greater!"
exit 1
fi
# Assert valid SSL certificate
NOVNC_CERT="/opt/deCONZ/vnc/novnc.pem"
if [ -f "$NOVNC_CERT" ]; then
openssl x509 -noout -in "$NOVNC_CERT" -checkend 0 > /dev/null
if [ $? != 0 ]; then
echo "[deconzcommunity/deconz] The noVNC SSL certificate has expired; generating a new certificate now."
rm "$NOVNC_CERT"
fi
fi
if [ ! -f "$NOVNC_CERT" ]; then
openssl req -x509 -nodes -newkey rsa:2048 -keyout "$NOVNC_CERT" -out "$NOVNC_CERT" -days 365 -subj "/CN=deconz"
fi
chown deconz:deconz $NOVNC_CERT
#Start noVNC
sudo -u deconz websockify -D --web=/usr/share/novnc/ --cert="$NOVNC_CERT" $DECONZ_NOVNC_PORT localhost:$DECONZ_VNC_PORT
echo "[deconzcommunity/deconz] NOVNC port: $DECONZ_NOVNC_PORT"
fi
else
echo "[deconzcommunity/deconz] VNC Disabled"
DECONZ_OPTS="$DECONZ_OPTS -platform minimal"
fi
if [ "$DECONZ_DEVICE" != 0 ]; then
DECONZ_OPTS="$DECONZ_OPTS --dev=$DECONZ_DEVICE"
fi
if [ "$DECONZ_UPNP" != 1 ]; then
DECONZ_OPTS="$DECONZ_OPTS --upnp=0"
fi
mkdir -p /opt/deCONZ/otau
sudo -u deconz ln -sf /opt/deCONZ/otau /home/deconz/otau
chown deconz:deconz /opt/deCONZ -R
sudo -u deconz /usr/bin/deCONZ $DECONZ_OPTS
|
const users = require('./users/users.service.js');
const projects = require('./projects/projects.service.js');
const teams = require('./teams/teams.service.js');
const messages = require('./messages/messages.service.js');
const friends = require('./friends/friends.service.js');
module.exports = function () {
const app = this; // eslint-disable-line no-unused-vars
app.configure(users);
app.configure(projects);
app.configure(teams);
app.configure(messages);
app.configure(friends);
};
|
#!/bin/bash
set -e
rm -rf storybook-static
STORYBOOK_ENV=1 ./node_modules/.bin/build-storybook
mkdir -p storybook-static/static/fonts/opensans storybook-static/static/fonts/rubik
cp -R static/fonts/opensans storybook-static/static/fonts
cp -R static/fonts/rubik storybook-static/static/fonts
cd storybook-static
git init
git remote add origin git@github.com:LedgerHQ/ledger-live-desktop.git
git add .
git commit -m 'deploy storybook'
git push -f origin master:gh-pages
printf "deployed with success!"
|
import { Component, OnInit, Input } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Location } from '@angular/common';
import { TServerResponse } from '../../struct/types';
import { TRouteData } from '../../struct/types';
import { TEdition } from '../../struct/tcgtypes';
import { MainService } from '../../services/main.service';
import { SessionsService } from '../../services/sessions.service';
import { TSessionComponent } from '../../lib/session.component';
import { ObjectID } from 'bson';
@Component({
selector: 'app-edition-detail',
templateUrl: './editions.detail.html'
})
export class EditionsDetailComponent extends TSessionComponent {
@Input() item: TEdition;
constructor(
sessions: SessionsService,
private route: ActivatedRoute,
private service: MainService,
private location: Location) {
super(sessions);
}
OnRun() {
this.getItem();
}
goBack(): void {
this.location.back();
}
getItem(): void {
const id = this.route.snapshot.paramMap.get('id');
if (id != '0') {
this.service.doGet('/api/edition/' + id, res => {
if (res.status === 200) {
this.item = res.messages[0];
}
});
} else {
this.item = new TEdition;
}
}
save(): void {
if (this.item._id) {
this.service.doPut('/api/edition/', this.item, res => {
if (res.status === 200) {
this.item = res.messages[0];
}
});
} else {
this.service.doPost('/api/edition', this.item, res => {
if (res.status === 200) {
this.item = res.messages[0];
}
});
}
}
}
export const EditionsDetailData: TRouteData = {caption: 'Editions', comp: EditionsDetailComponent, hideMenu: true, href: 'edition/:id'};
|
<reponame>davidleiva/folio_portfolio
import React from 'react'
import styled from 'styled-components'
import { Pagination as BSPagination } from 'react-bootstrap'
const PaginationStyled = styled(BSPagination)`
.page-item {
margin: 0 5px;
}
.page-link {
width: 38px;
height: 38px;
border-radius: 5px;
border: 1px solid #eae9f2;
background-color: #ffffff;
color: ${({ theme }) => theme.colors.dark} !important;
display: flex;
justify-content: center;
align-items: center;
&:hover,
&:focus {
z-index: 2;
color: #fff !important;
text-decoration: none;
background-color: ${({ theme }) => theme.colors.secondary};
border-color: ${({ theme }) => theme.colors.secondary};
box-shadow: none;
}
}
`
const Pagination = (props) => (
<>
<PaginationStyled {...props} />
</>
)
export default Pagination
export const PageItem = (props) => (
<>
<PaginationStyled.Item {...props} />
</>
)
|
def convertToZigzag(s: str, nRows: int) -> str:
if nRows == 1 or nRows >= len(s):
return s
res = [''] * nRows
index, step = 0, 1
for char in s:
res[index] += char
if index == 0:
step = 1
elif index == nRows - 1:
step = -1
index += step
return ''.join(res) |
// Define the Field trait
trait Field {
type RustType;
fn count(&self, field: FieldId) -> usize;
fn add(&mut self, field: FieldId, value: &Self::RustType);
fn get_list(&self, field: FieldId) -> Result<Vec<Self::RustType>>;
}
// Define the IndexedField trait extending the Field trait
trait IndexedField: Field {
fn get_index(&self, field: FieldId, index: usize) -> Result<Self::RustType>;
fn set_index(&mut self, field: FieldId, index: usize, value: &Self::RustType);
}
// Implement the Field trait for a specific data structure
struct YourDataStructure {
// Implement the required methods for the Field trait
// ...
}
// Implement the IndexedField trait for the data structure
impl IndexedField for YourDataStructure {
fn get_index(&self, field: FieldId, index: usize) -> Result<Self::RustType> {
// Implement the logic to retrieve the value at the specified index for the field
// ...
}
fn set_index(&mut self, field: FieldId, index: usize, value: &Self::RustType) {
// Implement the logic to set the value at the specified index for the field
// ...
}
} |
<filename>src/gameObjects/Border.ts
import { GameObject } from '../core';
import * as config from '../config';
import { BorderWall } from './BorderWall';
export class Border extends GameObject {
protected setup(): void {
config.BORDER_RECTS.forEach((rect) => {
const wall = new BorderWall(rect.width, rect.height);
wall.position.set(rect.x, rect.y);
this.add(wall);
});
}
}
|
from typing import List, Tuple, Dict, Callable
def generate_reverse_codes(operations: List[Tuple[str, Callable]]) -> Dict[str, Callable]:
reverse_codes = {operation[0]: operation[1] for operation in operations}
return reverse_codes |
<gh_stars>0
import React from 'react';
import { Padding } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/Padding';
import { Background } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/Background';
import { Stack } from '@/components/UI/Stack';
import { TextAlign } from '@/components/EmailEditor/components/ConfigurationPanel/components/AttributesManager/components/TextAlign';
import { TextField } from '@/components/core/Form';
import { useFocusIdx } from '@/hooks/useFocusIdx';
export function Panel() {
const { focusIdx } = useFocusIdx();
return (
<Stack>
<Padding />
<Background />
<TextAlign />
<Stack vertical>
<TextField
label='Border'
name={`${focusIdx}.attributes.border`}
inline
quickchange
/>
<TextField
label='Background border radius'
name={`${focusIdx}.attributes.border-radius`}
inline
quickchange
/>
</Stack>
</Stack>
);
}
|
<reponame>edwardbr/Angular-Slickgrid
import 'slickgrid/lib/jquery.jsonp-2.4.min';
import 'slickgrid/slick.remotemodel'; // SlickGrid Remote Plugin
import { Component, OnInit, OnDestroy } from '@angular/core';
import { AngularGridInstance, Column, Formatter, GridOption } from './../modules/angular-slickgrid';
declare var Slick: any;
const brandFormatter: Formatter = (row: number, cell: number, value: any, columnDef: Column, dataContext: any) => {
return dataContext && dataContext.brand && dataContext.brand.name || '';
};
const mpnFormatter: Formatter = (row: number, cell: number, value: any, columnDef: Column, dataContext: any) => {
let link = '';
if (dataContext && dataContext.octopart_url && dataContext.mpn) {
link = `<a href="${dataContext.octopart_url}" target="_blank">${dataContext.mpn}</a>`;
}
return link;
};
@Component({
templateUrl: './grid-remote.component.html'
})
export class GridRemoteComponent implements OnDestroy, OnInit {
private _eventHandler: any = new Slick.EventHandler();
title = 'Example 18: Octopart Catalog Search - Remote Model Plugin';
subTitle = `
This example demonstrates how to use "slick.remotemodel.js" or any Remote implementation through an external Remote Service
<ul>
<li>
Your browser (Chrome) might block access to the Octopart query, if you get "block content" then just unblock it
or try with different browser like Firefox or Edge
</li>
<li>If the demo throws some errors, try again later (there's a limit per day).</li>
<li>
Uses <a href="https://github.com/6pac/SlickGrid/blob/master/slick.remotemodel.js" target="_blank">slick.remotemodel.js</a>
which is hooked up to load search results from Octopart, but can easily be extended
to support any JSONP-compatible backend that accepts paging parameters.
</li>
<li>
This demo implements a custom DataView, however please note that you are on your own to implement all necessary DataView methods
for Sorting, Filtering, etc...
</li>
<li>
Soure code for this example is available <a href="https://github.com/ghiscoding/Angular-Slickgrid/blob/master/src/app/examples/grid-remote.component.ts" target="_blank">here</a>
</li>
</ul>
`;
angularGrid: AngularGridInstance;
columnDefinitions: Column[];
customDataView: any;
gridObj: any;
gridOptions: GridOption;
dataset = [];
loaderDataView: any;
loading = false; // spinner when loading data
search = 'switch';
constructor() {
this.loaderDataView = new Slick.Data.RemoteModel();
this.customDataView = this.loaderDataView && this.loaderDataView.data;
}
angularGridReady(angularGrid: AngularGridInstance) {
this.angularGrid = angularGrid;
this.gridObj = angularGrid.slickGrid; // grid object
this.loaderDataView.setSort('score', -1);
this.gridObj.setSortColumn('score', false);
// notify of a change to preload the first page
this.gridObj.onViewportChanged.notify();
}
ngOnDestroy() {
// unsubscribe all SlickGrid events
this._eventHandler.unsubscribeAll();
}
ngOnInit(): void {
this.defineGrid();
this.hookAllLoaderEvents();
this.loaderDataView.setSearch(this.search);
}
defineGrid() {
this.columnDefinitions = [
{ id: 'mpn', name: 'MPN', field: 'mpn', formatter: mpnFormatter, width: 100, sortable: true },
{ id: 'brand', name: 'Brand', field: 'brand.name', formatter: brandFormatter, width: 100, sortable: true },
{ id: 'short_description', name: 'Description', field: 'short_description', width: 520 },
];
this.gridOptions = {
enableAutoResize: true,
autoResize: {
containerId: 'demo-container',
sidePadding: 15
},
enableCellNavigation: true,
enableColumnReorder: false,
enableGridMenu: false,
multiColumnSort: false
};
}
hookAllLoaderEvents() {
if (this._eventHandler && this._eventHandler.subscribe && this.loaderDataView && this.loaderDataView.onDataLoading && this.loaderDataView.onDataLoaded) {
this._eventHandler.subscribe(this.loaderDataView.onDataLoading, (e: Event, args: any) => {
this.loading = true;
});
this._eventHandler.subscribe(this.loaderDataView.onDataLoaded, (e: Event, args: any) => {
if (args && this.gridObj && this.gridObj.invalidateRow && this.gridObj.updateRowCount && this.gridObj.render) {
for (let i = args.from; i <= args.to; i++) {
this.gridObj.invalidateRow(i);
}
this.gridObj.updateRowCount();
this.gridObj.render();
this.loading = false;
}
});
}
}
onSort(e, args) {
if (this.gridObj && this.gridObj.getViewport && this.loaderDataView && this.loaderDataView.ensureData && this.loaderDataView.setSort) {
const vp = this.gridObj.getViewport();
if (args && args.sortCol && args.sortCol.field) {
this.loaderDataView.setSort(args.sortCol.field, args.sortAsc ? 1 : -1);
}
this.loaderDataView.ensureData(vp.top, vp.bottom);
}
}
onViewportChanged(e, args) {
if (this.gridObj && this.gridObj.getViewport && this.loaderDataView && this.loaderDataView.ensureData) {
const vp = this.gridObj.getViewport();
this.loaderDataView.ensureData(vp.top, vp.bottom);
}
}
searchChanged(newValue: string) {
if (newValue && this.gridObj && this.gridObj.getViewport && this.loaderDataView && this.loaderDataView.ensureData && this.loaderDataView.setSearch) {
const vp = this.gridObj.getViewport();
this.loaderDataView.setSearch(newValue);
this.loaderDataView.ensureData(vp.top, vp.bottom);
}
}
}
|
import UIKit
class ListItemPositioning {
enum Position {
case top, center, bottom
}
let position: Position
let isItemVisible: Bool
let customOffset: CGFloat?
init(position: Position, isItemVisible: Bool, customOffset: CGFloat? = nil) {
self.position = position
self.isItemVisible = isItemVisible
self.customOffset = customOffset
}
}
// Example usage
let listItem1 = ListItemPositioning(position: .center, isItemVisible: true)
let listItem2 = ListItemPositioning(position: .bottom, isItemVisible: false, customOffset: 20.0) |
import tensorflow as tf
# Create the model
model = tf.keras.models.Sequential([
tf.keras.layers.Dense(64, activation='relu', input_shape=(9,)),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
# Compile the model
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# Train the model
model.fit(train_data, train_labels, epochs=5)
# Test the model
test_loss, test_acc = model.evaluate(test_data, test_labels) |
_registratedUsers = []
_sendMail = []
def register_user(user):
_registratedUsers.append(user)
_sendMail.append({
'to': user['recoveryEmail'],
'subject': 'Добро пожаловать в экосистему колледжа!',
'message': getFormattedText('email/registration.mail', user)
})
def getFormattedText(template, user):
with open(template, 'r') as file:
template_content = file.read()
formatted_message = template_content.format(
username=user['username'],
college=user['college']
)
return formatted_message |
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense
# Convert text into numerical features
def get_features(text):
feature_vector = []
words = text.split(' ')
for word in words:
if word == 'happy':
feature_vector.append(1)
else:
feature_vector.append(0)
return np.array(feature_vector)
# Define model
model = Sequential([
Dense(3, input_shape=(1,), activation='relu'),
Dense(1, activation='sigmoid')
])
# Train model
X = get_features("I'm feeling very happy today!")
y = np.array([1])
model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['accuracy'])
model.fit(X, y, epochs=100, batch_size=1)
# Save model
model.save('sentiment_detection.h5') |
'use strict';
const dayjs = require('dayjs');
/**
* 默认配置
*/
module.exports = (appInfo) => {
/**
* built-in config
* @type {Ee.EeAppConfig}
**/
const config = {};
/* 应用模式配置 */
config.developmentMode = {
default: 'vue',
mode: {
vue: {
hostname: 'localhost',
port: 8080
},
react: {
hostname: 'localhost',
port: 3000
},
html: {
hostname: 'localhost',
indexPage: 'index.html'
},
}
};
/* 开发者工具 */
config.openDevTools = false;
/* 应用程序顶部菜单 */
config.openAppMenu = true;
/* 加载loading页 */
config.loadingPage = true;
/* 主窗口 */
config.windowsOption = {
width: 980,
height: 650,
minWidth: 800,
minHeight: 650,
webPreferences: {
//webSecurity: false,
contextIsolation: false, // 设置此项为false后,才可在渲染进程中使用electron api
nodeIntegration: true,
},
frame: true,
//titleBarStyle: 'hidden'
};
/* ee框架日志 */
config.logger = {
appLogName: `ee-${dayjs().format('YYYY-MM-DD')}.log`,
errorLogName: `ee-error-${dayjs().format('YYYY-MM-DD')}.log`
}
/* 远程web地址 (可选) */
config.remoteUrl = {
enable: false, // 是否启用
url: 'https://discuz.chat/' // Any web url
};
/* 内置socket服务 */
config.socketServer = {
port: 7070, // 默认端口
isDynamic: false // 如果值为false,框架默认使用port端口(如果默认端口被使用,则随机获取一个);如果为true,默认端口无效,框架随机生成
};
/* 应用自动升级 (可选) */
config.autoUpdate = {
windows: false, // windows平台
macOS: false, // macOs 需要签名验证
linux: false, // linux平台
options: {
provider: 'generic', // or github, s3, bintray
url: 'http://kodo.qiniu.com/' // resource dir, end with '/'
},
force: false, // 强制更新(运行软件时,检查新版本并后台下载安装)
};
/* 被浏览器唤醒 (可选) */
config.awakeProtocol = {
protocol: 'ee', // 自定义协议名(默认你的应用名称-英文)
args: []
};
/* 托盘 (可选) */
config.tray = {
title: 'EE程序', // 托盘显示标题
icon: '/public/images/tray_logo.png' // 托盘图标
}
return {
...config
};
}
|
<gh_stars>0
package by.kam32ar.server.logic;
public class Message extends AbstractObject {
protected String room;
protected String nick;
protected int time;
protected String message;
public Message() {
super(0);
}
public Message(int id, String room, String nick, int time, String msg) {
super(id);
this.nick = nick;
this.time = time;
this.message = msg;
this.room = room;
}
public String getRoom() {
return room;
}
public void setRoom(String room) {
this.room = room;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public int getTime() {
return time;
}
public void setTime(int time) {
this.time = time;
}
public String getNick() {
return nick;
}
public void setNick(String nick) {
this.nick = nick;
}
}
|
ssh $1 'mkdir /tmp/wpasupplicant'
scp Dockerfile.wpasupplicant $1:/tmp/wpasupplicant/
ssh $1 'docker image build -t wpasupplicant -f /tmp/wpasupplicant/Dockerfile.wpasupplicant .'
#ssh $1 'docker network create --subnet 172.23.0.0/16 wpasupplicant-net'
ssh $1 'docker network create -d macvlan --subnet=172.23.0.0/16 wpasupplicant-net'
# need to enable /sys/devices/virtual/net/br0/bridge/multicast_querier
#bridge_id=`docker network inspect wpasupplicant-net -f "{{.Id}}"`
#bridge_name=br-${bridge_id:0:12}
# no permissions to set this?!
#ssh $1 'echo 1 | sudo tee /sys/devices/virtual/net/${bridge_name}/bridge/multicast_querier'
#ssh $1 'echo 0 | sudo tee /sys/devices/virtual/net/${bridge_name}/bridge/multicast_snooping'
#ssh $1 'echo 0 | sudo tee /sys/devices/virtual/net/${bridge_name}/bridge/multicast_router' |
<reponame>ctuning/ck-spack
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Dyninst(Package):
"""API for dynamic binary instrumentation. Modify programs while they
are executing without recompiling, re-linking, or re-executing."""
homepage = "https://paradyn.org"
url = "https://github.com/dyninst/dyninst/archive/v9.2.0.tar.gz"
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
version('develop', git="https://github.com/dyninst/dyninst.git", branch='master')
version('9.3.2', git="https://github.com/dyninst/dyninst.git", tag='v9.3.2')
version('9.3.0', git="https://github.com/dyninst/dyninst.git", tag='v9.3.0')
version('9.2.0', git="https://github.com/dyninst/dyninst.git", tag='v9.2.0')
version('9.1.0', git="https://github.com/dyninst/dyninst.git", tag='v9.1.0')
version('8.2.1', git="https://github.com/dyninst/dyninst.git", tag='v8.2.1')
version('8.1.2', git="https://github.com/dyninst/dyninst.git", tag='v8.1.2')
version('8.1.1', git="https://github.com/dyninst/dyninst.git", tag='v8.1.1')
variant('stat_dysect', default=False,
description="patch for STAT's DySectAPI")
# Dyninst depends on libelf and libdwarf prior to @9.3.0
# Dyninst depends on elfutils and libdwarf from @9.3.0 to but
# not including @develop
# Dyninst depends on elfutils and elfutils libdw from @develop forward
# elf@0 is an abstaction for libelf
# elf@1 is an abstaction for elfutils
depends_on("elf@0", type='link', when='@:9.2.99')
# The sorting algorithm puts numbered releases as newer than alphabetic
# releases, but spack has special logic in place to ensure that
# develop is considered newer than all other releases.
# So, develop is included in the elf@1 line below.
depends_on("elf@1", type='link', when='@9.3.0:')
depends_on("libdwarf", when='@:9')
depends_on("boost@1.42:")
depends_on('cmake', type='build')
patch('stat_dysect.patch', when='+stat_dysect')
patch('stackanalysis_h.patch', when='@9.2.0')
# new version uses cmake
def install(self, spec, prefix):
if spec.satisfies('@:8.1'):
configure("--prefix=" + prefix)
make()
make("install")
return
libelf = spec['elf'].prefix
if spec.satisfies('@:9'):
libdwarf = spec['libdwarf'].prefix
with working_dir('spack-build', create=True):
args = ['..',
'-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include,
'-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib,
'-DBoost_NO_SYSTEM_PATHS=TRUE',
'-DLIBELF_INCLUDE_DIR=%s' % join_path(
libelf.include, 'libelf'),
'-DLIBELF_LIBRARIES=%s' % join_path(
libelf.lib, "libelf." + dso_suffix)]
if spec.satisfies('@:9'):
args.append('-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include)
args.append('-DLIBDWARF_LIBRARIES=%s' % join_path(
libdwarf.lib, "libdwarf." + dso_suffix))
# For @develop + use elfutils libdw, libelf is an abstraction
# we are really using elfutils here
if spec.satisfies('@develop'):
args.append('-DLIBDWARF_INCLUDE_DIR=%s' % libelf.include)
args.append('-DLIBDWARF_LIBRARIES=%s' % join_path(
libelf.lib, "libdw." + dso_suffix))
if spec.satisfies('arch=linux-redhat7-ppc64le'):
args.append('-Darch_ppc64_little_endian=1')
args += std_cmake_args
cmake(*args)
make()
make("install")
@when('@:8.1')
def install(self, spec, prefix):
configure("--prefix=" + prefix)
make()
make("install")
|
SELECT name, age, address, email, phone_number
FROM users |
#!/bin/bash
# Switch to the directory containing this script,
cd "$(dirname "$0")"
# up a directory should be the main codebase.
cd ../..
mkdir -p build/1d_dust_runs
cd build/1d_dust_runs
pwd
declare -a Ls=('0.1' '0.12865616' '0.16552408' '0.213' '0.274' '0.352' '0.456' '0.583' '0.751' '0.966' '1.24' '1.60' '2.06' '2.65' '3.40' '4.38' '5.63' '7.25' '9.33' '12.0')
declare -a As=('4.629e-03' '3.219e-03' '2.181e-03' '1.444e-03' '9.382e-04' '6.000e-04' '3.789e-04' '2.368e-04' '1.468e-04' '9.038e-05' '5.538e-05' '3.382e-05' '2.060e-05' '1.253e-05' '7.615e-06' '4.623e-06' '2.804e-06' '1.697e-06' '1.022e-06' '6.090e-07')
for ((i=0; i<20; i+=1));
do
A=${As[$i]}
L=${Ls[$i]}
echo "Running for L = $L, A = $A..."
../../scripts/1d_runs/1d_runs.sh -C -N=0064 -A=$A -l=$L -g=Static
../../scripts/1d_runs/1d_runs.sh -C -N=0096 -A=$A -l=$L -g=Static
../../scripts/1d_runs/1d_runs.sh -C -N=0128 -A=$A -l=$L -g=Static
done
|
<filename>src/test/java/com/googlecode/junittoolbox/InnerTestClassesSuiteTest.java
package com.googlecode.junittoolbox;
import com.googlecode.junittoolbox.samples.LoginBeanTest;
import org.junit.Test;
import org.junit.internal.requests.ClassRequest;
import org.junit.runner.Runner;
import java.util.Collection;
import static com.googlecode.junittoolbox.TestHelper.getChildren;
import static com.googlecode.junittoolbox.TestHelper.hasItemWithTestClass;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
public class InnerTestClassesSuiteTest {
@Test
public void test() throws Exception {
Runner runner = ClassRequest.aClass(LoginBeanTest.class).getRunner();
Collection<?> children = getChildren(runner);
assertThat(children.size(), is(3));
assertThat(children, hasItemWithTestClass(LoginBeanTest.UnitTests.class));
assertThat(children, hasItemWithTestClass(LoginBeanTest.IntegrationTests.class));
assertThat(children, hasItemWithTestClass(LoginBeanTest.TheoryTests.class));
}
}
|
<filename>src/app/services/auth.service.ts
import { Observable, from, of } from 'rxjs';
import { AngularFireAuth } from 'angularfire2/auth';
import { Injectable } from '@angular/core';
import { User } from 'firebase';
@Injectable({
providedIn: 'root',
})
export class AuthService {
constructor(private fireAuth: AngularFireAuth) {}
getCurrentUser(): Observable<User> {
return this.fireAuth.authState;
}
getAuthToken(): Observable<string> {
if (this.fireAuth.auth.currentUser) {
return from(this.fireAuth.auth.currentUser.getIdToken());
}
return of('');
}
signIn(email: string, password: string): Promise<User> {
return new Promise((resolve, reject) => {
this.fireAuth.auth
.signInWithEmailAndPassword(email, password)
.then(userCred => resolve(userCred.user), err => reject(err));
});
}
signOut() {
this.fireAuth.auth.signOut();
}
signUp(email: string, password: string): Promise<User> {
return new Promise((resolve, reject) => {
this.fireAuth.auth
.createUserWithEmailAndPassword(email, password)
.then(userCred => resolve(userCred.user), err => reject(err));
});
}
updateName(name: string) {
this.fireAuth.auth.currentUser.updateProfile({
displayName: name,
photoURL: null,
});
}
}
|
from typing import List, Optional
def get_date_columns(table_name: str) -> List[Optional[str]]:
table_info = {
'measurement_occurrence': [
'measurement_date'
],
'observation_occurrence': [
'observation_date'
],
'procedure_occurrence': [
'procedure_date'
],
'condition_occurrence': [
'condition_start_date'
],
'drug_exposure': [
'drug_exposure_start_date'
],
'visit_occurrence': [
'visit_start_date'
]
}
return table_info.get(table_name, [None]) |
<filename>src/Modules/Finance/util.ts
export const toMoneyString = (n: number) => {
return 'R$' + n.toString().replace('.', ',');
};
|
# Add a swapfile on the data store drive
# (rsync needs this for large file copies)
sed -i 's/SWAP=noswap/SWAP=swap/' /etc/firmware
cat <<'EOF' > /etc/init.d/swap
STORE_DIR=/sdcopies
CONFIG_DIR="$STORE_DIR"/config
#rm -f "$STORE_DIR"/log/swapinfo
while read device mountpoint fstype remainder; do
if [ ${device:0:7} == "/dev/sd" -a -e "$mountpoint$CONFIG_DIR" ];then
swapfile="$mountpoint$CONFIG_DIR"/swapfile
if [ ! -e "$swapfile" ]; then
dd if=/dev/zero of="$swapfile" bs=1024 count=131072
echo "$(date): Creating swapfile $swapfile" >> "$STORE_DIR"/log/swapinfo
fi
swapon "$swapfile" >> /tmp/swapinfo 2>&1
if [ $? -eq 0 ]; then
echo "$(date): Turned on swap for $swapfile" >> "$STORE_DIR"/log/swapinfo
else
echo "$(date): There was an error turning on swap" >> "$STORE_DIR"/log/swapinfo
fi
exit 0
fi
done < /proc/mounts
exit 0
EOF
|
package be.kwakeroni.parameters.client.api;
import be.kwakeroni.parameters.client.api.query.Query;
import be.kwakeroni.parameters.client.api.model.EntryType;
import java.util.Optional;
/**
* Retrieves values of business parameters of a specific group.
*/
public interface BusinessParameterGroup<ET extends EntryType> {
public String getName();
public <T> Optional<T> get(Query<ET, T> query);
}
|
class InfoRunner
#### Attributes ####
attr_accessor :token, :region, :connectedRealmId, :namespace, :locale
#### Instance Methods ####
def initialize
# get token and set major api call attributes
@token = JSON.parse(get_token.body)
@region = "us"
@connectedRealmId = 1146
@namespace = "dynamic-us"
@locale = "en_US"
end
def get_token
#### the code below should be equivalent to the following curl ####
# curl -u 57afd6363525481c996bc55483249d5c:wKH18WdIQfV7Fb4W5m1qg8JU1HwI5iuQ -d grant_type=client_credentials https://us.battle.net/oauth/token
uri = URI.parse("https://us.battle.net/oauth/token")
request = Net::HTTP::Post.new(uri)
request.basic_auth("<KEY>", "<KEY>")
request.set_form_data(
"grant_type" => "client_credentials",
)
req_options = {
use_ssl: uri.scheme == "https",
}
response = Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|
http.request(request)
end
end
def get_auction_list
# call blizzard auction api get request, parse json into hash and return
uri = URI.parse("https://#{@region}.api.blizzard.com/data/wow/connected-realm/#{@connectedRealmId}/auctions?namespace=#{@namespace}&locale=#{@locale}&access_token=#{token['access_token']}")
auction_list = Net::HTTP.get_response(uri)
JSON.parse(auction_list.body)
end
def get_item_by_id(item_id)
# call blizzard item info api get request using supplied item id, parse
# the returned information and creates and returns an item hash
uri = URI.parse("https://#{@region}.api.blizzard.com/data/wow/item/#{item_id}?namespace=static-us&locale=#{@locale}&access_token=#{token['access_token']}")
item = Net::HTTP.get_response(uri)
item_json = JSON.parse(item.body)
item_hash = {
:name => item_json["name"],
:quality => item_json["quality"]["name"],
:level => item_json["level"],
:item_class => item_json["item_class"]["name"],
:item_subclass => item_json["item_subclass"]["name"]
}
end
end
|
<filename>algorithm/0/38-countAndSay.go<gh_stars>1-10
//给定一个正整数 n ,输出外观数列的第 n 项。
//
// 「外观数列」是一个整数序列,从数字 1 开始,序列中的每一项都是对前一项的描述。
//
// 你可以将其视作是由递归公式定义的数字字符串序列:
//
//
// countAndSay(1) = "1"
// countAndSay(n) 是对 countAndSay(n-1) 的描述,然后转换成另一个数字字符串。
//
//
// 前五项如下:
//
//
//1. 1
//2. 11
//3. 21
//4. 1211
//5. 111221
//第一项是数字 1
//描述前一项,这个数是 1 即 “ 一 个 1 ”,记作 "11"
//描述前一项,这个数是 11 即 “ 二 个 1 ” ,记作 "21"
//描述前一项,这个数是 21 即 “ 一 个 2 + 一 个 1 ” ,记作 "1211"
//描述前一项,这个数是 1211 即 “ 一 个 1 + 一 个 2 + 二 个 1 ” ,记作 "111221"
//
//
// 要 描述 一个数字字符串,首先要将字符串分割为 最小 数量的组,每个组都由连续的最多 相同字符 组成。然后对于每个组,先描述字符的数量,然后描述字符,形成
//一个描述组。要将描述转换为数字字符串,先将每组中的字符数量用数字替换,再将所有描述组连接起来。
//
// 例如,数字字符串 "3322251" 的描述如下图:
//
//
//
//
//
//
// 示例 1:
//
//
//输入:n = 1
//输出:"1"
//解释:这是一个基本样例。
//
//
// 示例 2:
//
//
//输入:n = 4
//输出:"1211"
//解释:
//countAndSay(1) = "1"
//countAndSay(2) = 读 "1" = 一 个 1 = "11"
//countAndSay(3) = 读 "11" = 二 个 1 = "21"
//countAndSay(4) = 读 "21" = 一 个 2 + 一 个 1 = "12" + "11" = "1211"
//
//
//
//
// 提示:
//
//
// 1 <= n <= 30
//
// Related Topics 字符串 👍 797 👎 0
package algorithm_0
import (
"strconv"
)
func countAndSay(n int) (res string) {
res = "1"
for i := 2; i <= n; i++ {
var tmp = ""
for n, s := 0, 0; n < len(res); s = n {
for n < len(res) && res[n] == res[s] {
n++
}
tmp += strconv.Itoa(n-s) + string(res[s])
}
res = tmp
}
return
}
|
today = datetime.date.today()
birthday = datetime.date(1977, 5, 4)
currenttime = datetime.datetime.now().time()
lunchtime = datetime.time(12, 00)
now = datetime.datetime.now()
epoch = datetime.datetime(1970, 1, 1)
meeting = datetime.datetime(2005, 8, 3, 15, 30)
|
package grammar
import "testing"
type testSymbolGenerator func(text string) symbol
func newTestSymbolGenerator(t *testing.T, symTab *symbolTable) testSymbolGenerator {
return func(text string) symbol {
t.Helper()
sym, ok := symTab.toSymbol(text)
if !ok {
t.Fatalf("symbol was not found: %v", text)
}
return sym
}
}
type testProductionGenerator func(lhs string, rhs ...string) *production
func newTestProductionGenerator(t *testing.T, genSym testSymbolGenerator) testProductionGenerator {
return func(lhs string, rhs ...string) *production {
t.Helper()
rhsSym := []symbol{}
for _, text := range rhs {
rhsSym = append(rhsSym, genSym(text))
}
prod, err := newProduction(genSym(lhs), rhsSym)
if err != nil {
t.Fatalf("failed to create a production: %v", err)
}
return prod
}
}
type testLR0ItemGenerator func(lhs string, dot int, rhs ...string) *lrItem
func newTestLR0ItemGenerator(t *testing.T, genProd testProductionGenerator) testLR0ItemGenerator {
return func(lhs string, dot int, rhs ...string) *lrItem {
t.Helper()
prod := genProd(lhs, rhs...)
item, err := newLR0Item(prod, dot)
if err != nil {
t.Fatalf("failed to create a LR0 item: %v", err)
}
return item
}
}
func withLookAhead(item *lrItem, lookAhead ...symbol) *lrItem {
if item.lookAhead.symbols == nil {
item.lookAhead.symbols = map[symbol]struct{}{}
}
for _, a := range lookAhead {
item.lookAhead.symbols[a] = struct{}{}
}
return item
}
|
import { Team } from '../constant';
import axios from '../helper/axios';
const getTeamRequest = () => ({
type: Team.GET_TEAM_REQUEST,
});
const getTeamSuccess = (payload) => ({
type: Team.GET_TEAM_SUCCESS,
payload,
});
const getTeamFailed = (payload) => ({
type: Team.GET_TEAM_FAILED,
payload,
});
export const TeamAction = () => {
return async (dispatch) => {
try {
dispatch(getTeamRequest());
const res = await axios.get('teams');
// console.log(res);
if (res.status === 200) {
// console.log(res.data);
dispatch(getTeamSuccess(res.data.data));
}
} catch (err) {
const error = 'oops, something went wrong';
// console.log(err.response.data.message);
dispatch(getTeamFailed(error));
}
};
};
|
<gh_stars>10-100
//
// Created by ooooo on 2019/12/29.
//
#ifndef CPP_0349_SOLUTION1_H
#define CPP_0349_SOLUTION1_H
#include <iostream>
#include <vector>
#include <unordered_set>
using namespace std;
class Solution {
public:
vector<int> intersection(vector<int> &nums1, vector<int> &nums2) {
unordered_set<int> set(nums1.size());
for (const auto &num : nums1) {
set.insert(num);
}
vector<int> res;
for (const auto &num : nums2) {
if (set.find(num) != set.end()) {
set.erase(num);
res.push_back(num);
}
}
return res;
}
};
#endif //CPP_0349_SOLUTION1_H
|
/* eslint-disable no-restricted-globals */
const axios = require('axios');
class Botlists {
/**
* Botlists stats
* @param { string } apiKey
* @param { string } botId
*/
constructor(apiKey, botId) {
this.url = 'https://api.botlists.com';
this.apiKey = apiKey;
this.botId = botId;
if (!this.botId) {
console.log('/*------ BOT LIST API ERROR ------*\\');
console.log('Missing Required Bot ID Param.');
}
}
async stats() {
try {
const { data } = await axios.get(`${this.url}/bot/${this.botId}`, {
headers: {
Authorization: this.apiKey,
},
});
return data;
} catch (e) {
console.log('/*------ BOT LIST API ERROR ------*\\');
console.log(e);
}
}
/**
* post bots guild and shard count
* @param { number } guildCount
* @param { number } shardCount
* @param { string } status
* @return {Promise<void>}
*/
async postStats(guildCount, shardCount, status = 'online') {
if (!guildCount) throw new Error('missing guildCount param');
if (isNaN(guildCount)) throw new Error('guildCount param must be a number');
if (isNaN(shardCount)) throw new Error('guildCount param must be a number');
const post = await axios.patch(`${this.url}/bot/${this.botId}`, {
stats: {
guilds: guildCount,
shards: shardCount || 0,
status: status,
},
}, {
headers: {
'Content-Type': 'application/json',
authorization: this.apiKey,
},
});
return post.data;
}
}
module.exports = Botlists;
|
alias reload!='. ~/.zshrc'
alias rm='rm -i'
alias et='exit'
alias rmold='rm -f ./*~'
alias directory_hog='find $1 -type d | xargs du -sm | sort -g'
alias hpg='history | grep $1'
# ssh external from ORNL
alias sshout='ssh -o "ProxyCommand=corkscrew snowman.ornl.gov 3128 %h %p"'
alias scpout='scp -o "ProxyCommand=corkscrew snowman.ornl.gov 3128 %h %p"'
alias rsyncout='rsync -e "sshout"'
alias e='emacsclient -n'
alias mkd='take'
function findsrc() {
find $1 -name \*.hh -print -o -name \*.cc -print
}
# custom cd
function cd() {
new_directory="$*";
if [ $# -eq 0 ]; then
new_directory=${HOME};
fi;
builtin cd "${new_directory}" && ls
}
compdef _gnu_generic cd
# useful functions for parsing ATS output
function showc() {
grep "Cycle = ${1}" ${2} | head -n1
}
function lastc() {
grep "Cycle = " ${1} | tail -n1
}
# timediff file1 file2 shows difference, in seconds, of file2 - file1
function timediff() {
echo $((`gstat --format=%Y $2` - `gstat --format=%Y $1`))
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o nounset
set -o errexit # exit script if any command exits with nonzero value
readonly PROG_NAME=$(basename $0)
readonly PROG_DIR=$(dirname $(realpath $0))
readonly INVOKE_DIR=$(pwd)
readonly ARGS="$@"
# overrideable defaults
AWS=false
PARALLEL=true
MAX_PARALLEL=5
readonly USAGE="Usage: $PROG_NAME [-h | --help] [--aws [--no-parallel] [--max-parallel MAX]]"
readonly HELP="$(cat <<EOF
Tool to bring up a vagrant cluster on local machine or aws.
-h | --help Show this help message
--aws Use if you are running in aws
--no-parallel Bring up machines not in parallel. Only applicable on aws
--max-parallel MAX Maximum number of machines to bring up in parallel. Note: only applicable on test worker machines on aws. default: $MAX_PARALLEL
Approximately speaking, this wrapper script essentially wraps 2 commands:
vagrant up
vagrant hostmanager
The situation on aws is complicated by the fact that aws imposes a maximum request rate,
which effectively caps the number of machines we are able to bring up in parallel. Therefore, on aws,
this wrapper script attempts to bring up machines in small batches.
If you are seeing rate limit exceeded errors, you may need to use a reduced --max-parallel setting.
EOF
)"
function help {
echo "$USAGE"
echo "$HELP"
exit 0
}
while [[ $# > 0 ]]; do
key="$1"
case $key in
-h | --help)
help
;;
--aws)
AWS=true
;;
--no-parallel)
PARALLEL=false
;;
--max-parallel)
MAX_PARALLEL="$2"
shift
;;
*)
# unknown option
echo "Unknown option $1"
exit 1
;;
esac
shift # past argument or value
done
# Get a list of vagrant machines (in any state)
function read_vagrant_machines {
local ignore_state="ignore"
local reading_state="reading"
local tmp_file="tmp-$RANDOM"
local state="$ignore_state"
local machines=""
while read -r line; do
# Lines before the first empty line are ignored
# The first empty line triggers change from ignore state to reading state
# When in reading state, we parse in machine names until we hit the next empty line,
# which signals that we're done parsing
if [[ -z "$line" ]]; then
if [[ "$state" == "$ignore_state" ]]; then
state="$reading_state"
else
# all done
echo "$machines"
return
fi
continue
fi
# Parse machine name while in reading state
if [[ "$state" == "$reading_state" ]]; then
line=$(echo "$line" | cut -d ' ' -f 1)
if [[ -z "$machines" ]]; then
machines="$line"
else
machines="${machines} ${line}"
fi
fi
done < <(vagrant status)
}
# Filter "list", returning a list of strings containing pattern as a substring
function filter {
local list="$1"
local pattern="$2"
local result=""
for item in $list; do
if [[ ! -z "$(echo $item | grep "$pattern")" ]]; then
result="$result $item"
fi
done
echo "$result"
}
# Given a list of machine names, return only test worker machines
function worker {
local machines="$1"
local workers=$(filter "$machines" "worker")
workers=$(echo "$workers" | xargs) # trim leading/trailing whitespace
echo "$workers"
}
# Given a list of machine names, return only zookeeper and broker machines
function zk_broker {
local machines="$1"
local zk_broker_list=$(filter "$machines" "zk")
zk_broker_list="$zk_broker_list $(filter "$machines" "broker")"
zk_broker_list=$(echo "$zk_broker_list" | xargs) # trim leading/trailing whitespace
echo "$zk_broker_list"
}
# Run a vagrant command on batches of machines of size $group_size
# This is annoying but necessary on aws to avoid errors due to AWS request rate
# throttling
#
# Example
# $ vagrant_batch_command "vagrant up" "m1 m2 m3 m4 m5" "2"
#
# This is equivalent to running "vagrant up" on groups of machines of size 2 or less, i.e.:
# $ vagrant up m1 m2
# $ vagrant up m3 m4
# $ vagrant up m5
function vagrant_batch_command {
local vagrant_cmd="$1"
local machines="$2"
local group_size="$3"
local count=1
local m_group=""
# Using --provision flag makes this command useable both when bringing up a cluster from scratch,
# and when bringing up a halted cluster. Permissions on certain directores set during provisioning
# seem to revert when machines are halted, so --provision ensures permissions are set correctly in all cases
for machine in $machines; do
m_group="$m_group $machine"
if [[ $(expr $count % $group_size) == 0 ]]; then
# We've reached a full group
# Bring up this part of the cluster
$vagrant_cmd $m_group
m_group=""
fi
((count++))
done
# Take care of any leftover partially complete group
if [[ ! -z "$m_group" ]]; then
$vagrant_cmd $m_group
fi
}
# We assume vagrant-hostmanager is installed, but may or may not be disabled during vagrant up
# In this fashion, we ensure we run hostmanager after machines are up, and before provisioning.
# This sequence of commands is necessary for example for bringing up a multi-node zookeeper cluster
function bring_up_local {
vagrant up --no-provision
vagrant hostmanager
vagrant provision
}
function bring_up_aws {
local parallel="$1"
local max_parallel="$2"
local machines="$(read_vagrant_machines)"
zk_broker_machines=$(zk_broker "$machines")
worker_machines=$(worker "$machines")
if [[ "$parallel" == "true" ]]; then
if [[ ! -z "$zk_broker_machines" ]]; then
# We still have to bring up zookeeper/broker nodes serially
echo "Bringing up zookeeper/broker machines serially"
vagrant up --provider=aws --no-parallel --no-provision $zk_broker_machines
vagrant hostmanager
vagrant provision
fi
if [[ ! -z "$worker_machines" ]]; then
echo "Bringing up test worker machines in parallel"
vagrant_batch_command "vagrant up --provider=aws" "$worker_machines" "$max_parallel"
vagrant hostmanager
fi
else
vagrant up --provider=aws --no-parallel --no-provision
vagrant hostmanager
vagrant provision
fi
}
function main {
if [[ "$AWS" == "true" ]]; then
bring_up_aws "$PARALLEL" "$MAX_PARALLEL"
else
bring_up_local
fi
}
main
|
/*
Sensor Base Class
Refactored into C++ class: <NAME>
Contribution: epierre
Based on <NAME> http://davidegironi.blogspot.fr/2014/01/cheap-co2-meter-using-mq135-sensor-with.html
License: Attribution-NonCommercial-ShareAlike 3.0 Unported (CC BY-NC-SA 3.0)
*/
#ifndef Included_SensorBase_H
#define Included_SensorBase_H
#include <cmath>
class SensorBase {
public:
SensorBase(int sampling_frequency, int sampling_interval_ms, int rl_value) {
_sampling_frequency = sampling_frequency;
_sampling_interval_ms = sampling_interval_ms;
_rl_value = rl_value;
_is_sampling_complete = true;
}
void startSampling(unsigned long start_time_ms);
bool isSamplingComplete();
bool isTimeToRead(unsigned long current_time_ms);
void setAnalogRead(int raw_adc, unsigned long current_time_ms);
void startCalibrating();
protected:
bool _is_sampling_complete;
int _sampling_interval_ms;
int _sampling_frequency;
int _sampling_count;
int _rl_value;
float _sample_sum;
float _sampling_average;
unsigned long _start_time_ms;
int calibration_count;
float calibration_total;
SensorBase() {};
int getPercentage(float ro, float *pcurve);
float calibrateInCleanAir(int raw_adc, int ppm, float *pcurve);
float getResistanceCalculation(int raw_adc);
};
#endif //Included_SensorBase_H
|
import VNode from '@/node';
import ArtRender from '@/renders/artRender';
import Cursor from '../../../cursor';
export class TableMoreTool {
moreDom: HTMLUListElement;
tableDom: HTMLTableElement;
tableNode: VNode;
thtdDom: Node;
thtdNode: VNode;
pos: { column: number, row: number };
artRender: ArtRender;
constructor() {
this.moreDom = null;
}
private createLi(text: string, fun: Function) {
let li = document.createElement('li');
li.className = 'art-floatTool-li';
li.innerHTML = text;
let _this = this;
function c() {
fun(_this);
_this.close();
}
li.onclick = c as any;
this.moreDom.appendChild(li);
}
public createDom(): HTMLUListElement {
this.moreDom = document.createElement('ul');
this.moreDom.style.display = 'none';
this.moreDom.setAttribute('class', 'art-floatTool');
this.createLi('上方插入一行', this.insertUpLine);
this.createLi('下方插入一行', this.insertDownLine);
let li = document.createElement('li');
li.className = 'art-divider';
this.moreDom.appendChild(li);
this.createLi('左边插入一列', this.insertLeftColumn);
this.createLi('右边插入一列', this.insertRightColumn);
li = document.createElement('li');
li.className = 'art-divider';
this.moreDom.appendChild(li);
this.createLi('删除行', this.deleteLine);
this.createLi('删除列', this.deleteColumn);
return this.moreDom;
}
private insertUpLine(_this: TableMoreTool) {
let refTr = _this.thtdDom.parentNode;
let selected: VNode;
let tr = new VNode("tr");
if (_this.tableDom.rows[0] == refTr) {
let tr2 = new VNode("tr"), child_th = _this.thtdNode.parent.firstChild;
for (let j = 0; j < refTr.childNodes.length && child_th; j++, child_th = child_th.next) {
let th = new VNode("th");
if (j == _this.pos.column)
selected = th;
tr.appendChild(th);
let td = new VNode("td");
let md = child_th.getMd();
if (md.length && md.charCodeAt(md.length - 1) === 10)
md = md.substring(0, md.length - 1);
td._string_content = md;
_this.artRender.interaction.parser.inlineParse(td);
tr2.appendChild(td);
let style = (<HTMLElement>refTr.childNodes[j]).getAttribute('style');
if (style) {
th._info.style = style;
td._info.style = style;
}
}
_this.artRender.operation.replace(tr, _this.tableNode.firstChild.firstChild)
_this.artRender.operation.insertBefore(tr2, _this.tableNode.lastChild.firstChild)
} else {
for (let j = 0; j < refTr.childNodes.length; j++) {
let td = new VNode("td");
if (j == _this.pos.column)
selected = td;
let style = (<HTMLElement>refTr.childNodes[j]).getAttribute('style');
if (style) {
td._info.style = style;
}
tr.appendChild(td);
}
_this.artRender.operation.insertBefore(tr, _this.thtdNode.parent)
}
_this.artRender.operation.update();
Cursor.setCursor(selected.dom, 0);
}
private insertDownLine(_this: TableMoreTool) {
let refTr = _this.thtdDom.parentNode;
let selected: VNode;
let tr = new VNode("tr");
for (let i = 0; i < refTr.childNodes.length; i++) {
let td = new VNode("td");
if (i == _this.pos.column)
selected = td;
tr.appendChild(td);
let style = (<HTMLElement>refTr.childNodes[i]).getAttribute('style');
if (style)
td._info.style = style;
}
if (_this.tableDom.rows[_this.tableDom.rows.length - 1] == refTr) {
_this.artRender.operation.appendChild(tr, _this.thtdNode.parent);
} else if (_this.tableDom.rows[0] == refTr) {
_this.artRender.operation.insertBefore(tr, _this.tableNode.lastChild.firstChild)
} else {
_this.artRender.operation.insertAfter(tr, _this.thtdNode.parent)
}
_this.artRender.operation.update();
Cursor.setCursor(selected.dom, 0);
}
private insertLeftColumn(_this: TableMoreTool) {
let selected: VNode, newNode: VNode;
let th = _this.tableNode.firstChild.firstChild.firstChild;
for (let i = 0; i < _this.pos.column; i++) {
th = th.next;
}
newNode = new VNode("th");
if (_this.pos.row === 0)
selected = newNode;
_this.artRender.operation.insertBefore(newNode, th);
let tr = _this.tableNode.lastChild.firstChild, td: VNode;
for (let j = 1; j < _this.tableDom.rows.length; j++, tr = tr.next) {
td = tr.firstChild;
for (let i = 0; i < _this.pos.column; i++) {
td = td.next;
}
newNode = new VNode("td");
if (_this.pos.row === j)
selected = newNode;
_this.artRender.operation.insertBefore(newNode, td);
}
_this.artRender.operation.update();
Cursor.setCursor(selected.dom, 0);
}
private insertRightColumn(_this: TableMoreTool) {
let selected: VNode, newNode: VNode;
let th = _this.tableNode.firstChild.firstChild.firstChild;
for (let i = 0; i < _this.pos.column; i++) {
th = th.next;
}
newNode = new VNode("th");
if (_this.pos.row === 0)
selected = newNode;
_this.artRender.operation.insertAfter(newNode, th);
let tr = _this.tableNode.lastChild.firstChild, td: VNode;
for (let j = 1; j < _this.tableDom.rows.length; j++, tr = tr.next) {
td = tr.firstChild;
for (let i = 0; i < _this.pos.column; i++) {
td = td.next;
}
newNode = new VNode("td");
if (_this.pos.row === j)
selected = newNode;
_this.artRender.operation.insertAfter(newNode, td);
}
_this.artRender.operation.update();
Cursor.setCursor(selected.dom, 0);
}
private deleteLine(_this: TableMoreTool) {
let selected: VNode, newNode: VNode;
if (_this.pos.row) {
let tr = _this.tableNode.lastChild.firstChild;
for (let i = 1; i < _this.pos.row; i++) {
tr = tr.next;
}
newNode = tr.next;
for (let i = 0; i < _this.pos.column; i++) {
newNode = newNode.next;
}
selected = newNode;
_this.artRender.operation.remove(tr);
} else {
let tr = new VNode("tr");
let td = _this.tableNode.lastChild.firstChild.firstChild;
while (td) {
let th = new VNode("th");
let style = (td.dom as HTMLElement).getAttribute('style');
if (style)
th._info.style = style;
let md = td.getMd();
if (md.length && md.charCodeAt(md.length - 1) === 10)
md = md.substring(0, md.length - 1);
th._string_content = md;
_this.artRender.interaction.parser.inlineParse(th);
tr.appendChild(th);
td = td.next;
}
_this.artRender.operation.replace(tr, _this.tableNode.firstChild.firstChild);
_this.artRender.operation.remove(_this.tableNode.lastChild.firstChild);
newNode = tr.firstChild;
for (let i = 0; i < _this.pos.column; i++) {
newNode = newNode.next;
}
selected = newNode;
}
_this.artRender.operation.update();
Cursor.setCursor(selected.dom, 0);
}
private deleteColumn(_this: TableMoreTool) {
let selected: VNode, newNode: VNode;
let th = _this.tableNode.firstChild.firstChild.firstChild;
for (let i = 0; i < _this.pos.column; i++) {
th = th.next;
}
if (_this.pos.row === 0) {
if (th.next)
selected = th.next;
else
selected = th.prev;
}
_this.artRender.operation.remove(th);
let tr = _this.tableNode.lastChild.firstChild, td: VNode;
for (let j = 1; j < _this.tableDom.rows.length; j++, tr = tr.next) {
td = tr.firstChild;
for (let i = 0; i < _this.pos.column; i++) {
td = td.next;
}
if (_this.pos.row === j) {
if (td.next)
selected = td.next;
else
selected = td.prev;
}
_this.artRender.operation.remove(td);
}
_this.artRender.operation.update();
Cursor.setCursor(selected.dom, 0);
}
public open(artRender: ArtRender, detail: { xy: [number, number], table: HTMLTableElement }): void {
this.artRender = artRender;
let { anchorNode, anchorOffset } = Cursor.sel;
Cursor.setCursor(anchorNode, anchorOffset);
let dom = anchorNode;
while (dom.parentNode.nodeName != 'TR' && dom.parentNode != document.body) {
dom = dom.parentNode;
}
if (dom.parentNode.nodeName == 'TR') {
this.thtdDom = dom;
this.moreDom.style.display = 'inherit';
this.moreDom.style.top = detail.xy[1].toString() + 'px';
this.moreDom.style.left = detail.xy[0].toString() + 'px';
let vnode = artRender.doc.firstChild, i = artRender.cursor.pos.rowAnchorOffset;
while (--i != -1) {
vnode = vnode.next;
}
this.tableNode = vnode;
this.tableDom = this.tableNode.dom.childNodes[1] as HTMLTableElement;
this.pos = { column: 0, row: 0 };
let th = this.tableNode.firstChild.firstChild.firstChild;
while (th) {
if (th.dom === artRender.cursor.pos.rowNode) {
this.thtdNode = th;
break;
}
this.pos.column++;
th = th.next;
}
if (!th) {
let tr = this.tableNode.lastChild.firstChild, td: VNode;
while (tr) {
td = tr.firstChild;
this.pos.row++;
this.pos.column = 0;
while (td) {
if (td.dom === artRender.cursor.pos.rowNode) {
this.thtdNode = td;
break;
}
this.pos.column++;
td = td.next;
}
if (td) {
break;
}
tr = tr.next;
}
}
}
}
public close(): void {
this.moreDom.style.display = 'none';
}
} |
def longest_word(words):
longest = 0
for word in words:
if(len(word) > longest):
longest = len(word)
longest_word = word
return longest_word
print(longest_word([“hello”, “monday”, “class”])) |
<filename>frontend/src/api/Board/GetBoard.tsx
import axios from "axios";
import { createContext, Dispatch, useReducer, useState } from "react";
import { useParams } from "react-router-dom";
import { useMountEffect } from "../../hooks/useMountEffect";
import BoardData from "../../pages/Project/Subpages/BoardPage";
import { Action, ActionTypes, boardContentReducer } from "../../reducers/BoardReducer";
import { NestedBoardTemplate } from "../../types/ModelContentTemplate";
// context for avoiding propagating function fetchBoard() for refreshing
// the board content to child components
export const BoardReducerContext = createContext<{ boardState: any, dispatch: Dispatch<Action> }>({
boardState: null,
dispatch: () => null,
});
const GetBoard = () => {
const { boardId } = useParams<{ boardId: string }>();
const [isLoaded, setIsLoaded] = useState<boolean>(false);
const [boardState, dispatch] = useReducer(boardContentReducer, NestedBoardTemplate)
useMountEffect(fetchBoard);
function fetchBoard() {
axios.get(`/boards/getContent/${boardId}`)
.then(resp => {
dispatch({ type: ActionTypes.FetchData, payload: resp.data });
setIsLoaded(true);
}).catch((err) => {
console.log(err);
});
}
function changeColumn(newColumnId: string, issueId: string) {
const issueChanges = {
columnId: newColumnId,
}
axios.post(`/issues/update/${issueId}`, issueChanges)
.catch((err) => {
console.log(err);
});
}
return (
<>
{isLoaded &&
<BoardReducerContext.Provider value={{ boardState, dispatch }}>
<BoardData board={boardState} changeColumn={changeColumn} />
</BoardReducerContext.Provider>
}
</>
);
}
export default GetBoard; |
<filename>player/src/test/java/fr/unice/polytech/si3/qgl/soyouz/classes/marineland/MarinTest.java
package fr.unice.polytech.si3.qgl.soyouz.classes.marineland;
import fr.unice.polytech.si3.qgl.soyouz.classes.marineland.entities.onboard.Gouvernail;
import fr.unice.polytech.si3.qgl.soyouz.classes.types.PosOnShip;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class MarinTest
{
Marin sailor;
@BeforeEach
void init()
{
sailor = new Marin(1, 3, 2, "<NAME>");
}
@Test
void getId()
{
assertNotEquals(0, sailor.getId());
assertEquals(1, sailor.getId());
assertNotEquals(2, sailor.getId());
}
@Test
void getX()
{
assertNotEquals(2, sailor.getX());
assertEquals(3, sailor.getX());
assertNotEquals(4, sailor.getX());
}
@Test
void setX()
{
sailor.setX(2);
assertNotEquals(1, sailor.getX());
assertEquals(2, sailor.getX());
assertNotEquals(3, sailor.getX());
}
@Test
void setY()
{
sailor.setY(2);
assertNotEquals(1, sailor.getY());
assertEquals(2, sailor.getY());
assertNotEquals(3, sailor.getY());
}
@Test
void isAbsPosReachableTest()
{
assertTrue(sailor.isAbsPosReachable(PosOnShip.of(3, 3)));
assertTrue(sailor.isAbsPosReachable(PosOnShip.of(6, 4)));
assertFalse(sailor.isAbsPosReachable(PosOnShip.of(7, 4)));
assertFalse(sailor.isAbsPosReachable(PosOnShip.of(6, 5)));
}
@Test
void numberExtraRoundsToReachEntity()
{
assertEquals(0, sailor.numberExtraRoundsToReachEntity(PosOnShip.of(3, 3)));
assertEquals(0, sailor.numberExtraRoundsToReachEntity(3, 3));
assertEquals(1, sailor.numberExtraRoundsToReachEntity(PosOnShip.of(8, 3)));
assertEquals(1, sailor.numberExtraRoundsToReachEntity(8, 3));
}
@Test
void getY()
{
assertNotEquals(1, sailor.getY());
assertEquals(2, sailor.getY());
assertNotEquals(3, sailor.getY());
}
@Test
void getPos()
{
assertNotEquals(PosOnShip.of(2, 2), sailor.getPos());
assertEquals(PosOnShip.of(3, 2), sailor.getPos());
assertNotEquals(PosOnShip.of(3, 3), sailor.getPos());
}
@Test
void getName()
{
assertEquals("<NAME>", sailor.getName());
}
@Test
void moveRelative()
{
sailor.moveRelative(PosOnShip.of(2, 1));
assertEquals(5, sailor.getX());
assertEquals(3, sailor.getY());
assertThrows(IllegalArgumentException.class,
() -> sailor.moveRelative(PosOnShip.of(4, 4))); //NOSONAR
}
@Test
void testMoveRelative()
{
sailor.moveRelative(PosOnShip.of(2, 1));
assertEquals(5, sailor.getX());
assertEquals(3, sailor.getY());
assertThrows(IllegalArgumentException.class,
() -> sailor.moveRelative(PosOnShip.of(4, 4))); //NOSONAR
}
@Test
void moveAbsolute()
{
sailor.moveAbsolute(PosOnShip.of(5, 3));
assertEquals(5, sailor.getX());
assertEquals(3, sailor.getY());
assertThrows(IllegalArgumentException.class, () -> sailor.moveAbsolute(PosOnShip.of(17,
//NOSONAR
6)));
}
@Test
void testMoveAbsolute()
{
sailor.moveAbsolute(PosOnShip.of(5, 3));
assertEquals(5, sailor.getX());
assertEquals(3, sailor.getY());
assertThrows(IllegalArgumentException.class, () -> sailor.moveAbsolute(PosOnShip.of(17,
//NOSONAR
6)));
}
@Test
void isRelPosReachable()
{
assertTrue(sailor.isRelPosReachable(PosOnShip.of(4, 1)));
assertFalse(sailor.isRelPosReachable(PosOnShip.of(4, 2)));
assertFalse(sailor.isRelPosReachable(PosOnShip.of(5, 1)));
}
@Test
void isAbsPosReachable()
{
assertTrue(sailor.isAbsPosReachable(3, 3));
assertTrue(sailor.isAbsPosReachable(6, 4));
assertFalse(sailor.isAbsPosReachable(7, 4));
assertFalse(sailor.isAbsPosReachable(6, 5));
}
@Test
void hashCodeTest()
{
assertEquals(sailor.hashCode(), sailor.hashCode());
assertNotEquals(sailor.hashCode(), new Marin(2, 3, 2, "<NAME>").hashCode());
}
@Test
void equalsTest()
{
boolean notEquals = sailor.equals(new Gouvernail(1, 2));
boolean equals = sailor.equals(sailor);
boolean notEquals2 = sailor.equals(new Marin(2, 3, 2, "<NAME>"));
assertTrue(equals);
assertFalse(notEquals);
assertFalse(notEquals2);
}
} |
package com.honeyedoak.ppksecuredjson.converter.processor;
import com.google.auto.service.AutoService;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.Diagnostic;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
@SupportedSourceVersion(SourceVersion.RELEASE_8)
@AutoService(Processor.class)
public class SecureJsonConverterProcessor extends AbstractProcessor {
private Types typeUtils;
private Elements elementUtils;
private Filer filer;
private Messager messager;
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
typeUtils = processingEnv.getTypeUtils();
elementUtils = processingEnv.getElementUtils();
filer = processingEnv.getFiler();
messager = processingEnv.getMessager();
}
@Override
public boolean process(Set<? extends TypeElement> set, RoundEnvironment roundEnv) {
// Iterate over all @Factory annotated elements
for (Element annotatedElement : roundEnv.getElementsAnnotatedWith(com.honeyedoak.securedjson.converter.annotation.SecureJsonConverter.class)) {
// Check if a class has been annotated with @Factory
if (annotatedElement.getKind() != ElementKind.CLASS) {
error(annotatedElement, "Only class can be annotated with @%s",
com.honeyedoak.securedjson.converter.annotation.SecureJsonConverter.class.getSimpleName());
return true; // Exit processing
}
TypeElement typeElement = (TypeElement) annotatedElement;
try {
SecureJsonConverterAnnotatedClass annotatedClass = new SecureJsonConverterAnnotatedClass(typeElement);
if (!isValid(annotatedClass)) {
return true; // Exit processing
}
annotatedClass.generateCode(filer);
} catch (IllegalArgumentException e) {
error(annotatedElement, e.getMessage());
return true; // Exit processing
} catch (IOException e) {
error(annotatedElement, "Failed to write java class generated by annotation @%s",
com.honeyedoak.securedjson.converter.annotation.SecureJsonConverter.class.getSimpleName());
return true; // Exit processing
}
}
return true;
}
private boolean isValid(SecureJsonConverterAnnotatedClass annotatedClass) {
//TODO
return true;
}
private void error(Element e, String msg, Object... args) {
messager.printMessage(
Diagnostic.Kind.ERROR,
String.format(msg, args),
e);
}
@Override
public Set<String> getSupportedAnnotationTypes() {
Set<Class> suppertedAnnotationClasses = new HashSet<>();
suppertedAnnotationClasses.add(com.honeyedoak.securedjson.converter.annotation.SecureJsonConverter.class);
return suppertedAnnotationClasses.stream().map(Class::getName).collect(Collectors.toSet());
}
}
|
def min_max(array):
min_val = array[0]
max_val = array[0]
for element in array:
if(element > max_val):
max_val = element
if(element < min_val):
min_val = element
return (min_val, max_val) |
package br.com.agateownz.foodsocial.modules.shared.service;
import br.com.agateownz.foodsocial.config.ApplicationProfiles;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import static org.assertj.core.api.Assertions.assertThat;
@ActiveProfiles(ApplicationProfiles.TEST)
@SpringBootTest(classes = UuidService.class)
class UuidServiceTest {
@Autowired
private UuidService uuidService;
@DisplayName("getRandomUuuid should return different values each time")
@Test
public void getRandomUuidTest() {
var uuid1 = uuidService.getRandomUuuid();
var uuid2 = uuidService.getRandomUuuid();
assertThat(uuid1).isNotEqualTo(uuid2);
}
} |
def flatten_array(nestArray)
flattenedArray = []
if nestArray.empty?
return flattenedArray
end
flatten_array(nestArray, flattenedArray)
flattenedArray
end
def flatten_array(nestArray, flattenedArray)
for i in 0...nestArray.length
if nestArray[i].class == Array
flatten_array(nestArray[i], flattenedArray)
else
flattenedArray.push(nestArray[i])
end
end
end
multidimensionalArray = [[1,2,3],[4,5,6],[7,8,9]]
result = flatten_array(multidimensionalArray)
puts result
# Output: [1,2,3,4,5,6,7,8,9] |
<filename>src/ompl/multilevel/datastructures/src/BundleSpace.cpp
/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 2020,
* Max Planck Institute for Intelligent Systems (MPI-IS).
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the MPI-IS nor the names
* of its contributors may be used to endorse or promote products
* derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/
/* Author: <NAME> */
#include <ompl/multilevel/datastructures/BundleSpace.h>
#include <ompl/multilevel/datastructures/Projection.h>
#include <ompl/base/objectives/PathLengthOptimizationObjective.h>
#include <ompl/base/goals/GoalSampleableRegion.h>
#include <ompl/control/SpaceInformation.h>
#include <ompl/base/spaces/SO2StateSpace.h>
#include <ompl/base/spaces/SO3StateSpace.h>
#include <ompl/base/spaces/RealVectorStateSpace.h>
#include <ompl/base/spaces/TimeStateSpace.h>
#include <ompl/base/spaces/DiscreteStateSpace.h>
#include <ompl/tools/config/MagicConstants.h>
#include <ompl/util/Exception.h>
#include <cmath> //to use isnan(d)
using namespace ompl::base;
using namespace ompl::multilevel;
unsigned int BundleSpace::counter_ = 0;
BundleSpace::BundleSpace(const SpaceInformationPtr &si, BundleSpace *child)
: Planner(si, "BundleSpace"), childBundleSpace_(child), totalSpace_(si)
{
id_ = counter_++;
if (child)
{
baseSpace_ = childBundleSpace_->getBundle();
childBundleSpace_->setParent(this);
xBaseTmp_ = getBase()->allocState();
}
std::stringstream ss;
ss << (*this);
OMPL_DEBUG(ss.str().c_str());
if (!Bundle_valid_sampler_)
{
Bundle_valid_sampler_ = getBundle()->allocValidStateSampler();
}
if (!Bundle_sampler_)
{
Bundle_sampler_ = getBundle()->allocStateSampler();
}
xBundleTmp_ = getBundle()->allocState();
}
BundleSpace::~BundleSpace()
{
if (hasBaseSpace())
{
if (xBaseTmp_)
{
getBase()->freeState(xBaseTmp_);
}
}
if (xBundleTmp_)
{
getBundle()->freeState(xBundleTmp_);
}
}
bool BundleSpace::makeProjection()
{
ProjectionFactory projectionFactory;
projection_ = projectionFactory.makeProjection(getBundle(), getBase());
if (!projection_)
return false;
sanityChecks();
return true;
}
bool BundleSpace::hasBaseSpace() const
{
return !(baseSpace_ == nullptr);
}
bool BundleSpace::findSection()
{
return false;
}
bool BundleSpace::hasParent() const
{
return !(parentBundleSpace_ == nullptr);
}
bool BundleSpace::isDynamic() const
{
return isDynamic_;
}
void BundleSpace::setup()
{
BaseT::setup();
hasSolution_ = false;
firstRun_ = true;
if (pdef_)
{
if (!pdef_->hasOptimizationObjective())
{
OptimizationObjectivePtr lengthObj = std::make_shared<base::PathLengthOptimizationObjective>(getBundle());
lengthObj->setCostThreshold(base::Cost(std::numeric_limits<double>::infinity()));
pdef_->setOptimizationObjective(lengthObj);
}
}
else
{
OMPL_ERROR("Called without ProblemDefinitionPtr");
throw "NoProblemDef";
}
}
GoalSampleableRegion *BundleSpace::getGoalPtr() const
{
base::GoalSampleableRegion *goal = static_cast<base::GoalSampleableRegion *>(pdef_->getGoal().get());
return goal;
}
void BundleSpace::clear()
{
BaseT::clear();
hasSolution_ = false;
firstRun_ = true;
pdef_->clearSolutionPaths();
}
void BundleSpace::sanityChecks() const
{
const StateSpacePtr Bundle_space = getBundle()->getStateSpace();
checkBundleSpaceMeasure("Bundle", Bundle_space);
if (hasBaseSpace())
{
const StateSpacePtr Base_space = getBase()->getStateSpace();
checkBundleSpaceMeasure("Base", Base_space);
if (getProjection()->getDimension() != getBundleDimension())
{
throw Exception("BundleSpace Dimensions are wrong.");
}
}
}
void BundleSpace::checkBundleSpaceMeasure(std::string name, const StateSpacePtr space) const
{
OMPL_DEVMSG1("%s dimension: %d measure: %f", name.c_str(), space->getDimension(), space->getMeasure());
if ((space->getMeasure() >= std::numeric_limits<double>::infinity()))
{
throw Exception("Space infinite measure.");
}
}
PlannerStatus BundleSpace::solve(const PlannerTerminationCondition &)
{
throw Exception("A Bundle-Space cannot be solved alone. \
Use class BundleSpaceSequence to solve Bundle-Spaces.");
}
void BundleSpace::setProblemDefinition(const ProblemDefinitionPtr &pdef)
{
BaseT::setProblemDefinition(pdef);
}
void BundleSpace::resetCounter()
{
BundleSpace::counter_ = 0;
}
void BundleSpace::setProjection(ProjectionPtr projection)
{
projection_ = projection;
if (getProjection() == nullptr)
{
OMPL_ERROR("Projection is nullptr.");
throw "Projection is nullptr.";
}
}
ProjectionPtr BundleSpace::getProjection() const
{
return projection_;
}
void BundleSpace::allocIdentityState(State *s, StateSpacePtr space) const
{
if (space->isCompound())
{
CompoundStateSpace *cspace = space->as<CompoundStateSpace>();
const std::vector<StateSpacePtr> compounds = cspace->getSubspaces();
for (unsigned int k = 0; k < compounds.size(); k++)
{
StateSpacePtr spacek = compounds.at(k);
State *xk = s->as<CompoundState>()->as<State>(k);
allocIdentityState(xk, spacek);
}
}
else
{
int stype = space->getType();
switch (stype)
{
case STATE_SPACE_SO3:
{
static_cast<SO3StateSpace::StateType *>(s)->setIdentity();
break;
}
case STATE_SPACE_SO2:
{
static_cast<SO2StateSpace::StateType *>(s)->setIdentity();
break;
}
case STATE_SPACE_TIME:
{
static_cast<TimeStateSpace::StateType *>(s)->position = 0;
break;
}
case STATE_SPACE_DISCRETE:
{
DiscreteStateSpace *space_Discrete = space->as<DiscreteStateSpace>();
int lb = space_Discrete->getLowerBound();
static_cast<DiscreteStateSpace::StateType *>(s)->value = lb;
break;
}
case STATE_SPACE_REAL_VECTOR:
{
RealVectorStateSpace::StateType *sRN = s->as<RealVectorStateSpace::StateType>();
RealVectorStateSpace *RN = space->as<RealVectorStateSpace>();
const std::vector<double> &bl = RN->getBounds().low;
const std::vector<double> &bh = RN->getBounds().high;
for (unsigned int k = 0; k < space->getDimension(); k++)
{
double &v = sRN->values[k];
v = 0.0;
// if zero is not valid, use mid point as identity
if (v < bl.at(k) || v > bh.at(k))
{
v = bl.at(k) + 0.5 * (bh.at(k) - bl.at(k));
}
}
break;
}
default:
{
OMPL_ERROR("Type: %d not recognized.", stype);
throw Exception("Type not recognized.");
}
}
}
}
State *BundleSpace::allocIdentityState(StateSpacePtr space) const
{
if (space != nullptr)
{
State *s = space->allocState();
allocIdentityState(s, space);
return s;
}
else
{
return nullptr;
}
}
State *BundleSpace::allocIdentityStateBundle() const
{
return allocIdentityState(getBundle()->getStateSpace());
}
State *BundleSpace::allocIdentityStateBase() const
{
return allocIdentityState(getBase()->getStateSpace());
}
const SpaceInformationPtr &BundleSpace::getBundle() const
{
return totalSpace_;
}
const SpaceInformationPtr &BundleSpace::getBase() const
{
return baseSpace_;
}
unsigned int BundleSpace::getBaseDimension() const
{
if (getBase())
return getBase()->getStateDimension();
else
return 0;
}
unsigned int BundleSpace::getBundleDimension() const
{
return getBundle()->getStateDimension();
}
unsigned int BundleSpace::getCoDimension() const
{
return getBundleDimension() - getBaseDimension();
}
const StateSamplerPtr &BundleSpace::getBaseSamplerPtr() const
{
if (hasBaseSpace())
{
return getChild()->getBundleSamplerPtr();
}
else
{
OMPL_ERROR("Cannot get Base Sampler without Base Space.");
throw Exception("Tried Calling Non-existing base space sampler");
}
}
const StateSamplerPtr &BundleSpace::getBundleSamplerPtr() const
{
return Bundle_sampler_;
}
bool BundleSpace::isInfeasible()
{
return false;
}
bool BundleSpace::hasConverged()
{
return false;
}
bool BundleSpace::hasSolution()
{
if (!hasSolution_)
{
PathPtr path;
hasSolution_ = getSolution(path);
}
return hasSolution_;
}
BundleSpace *BundleSpace::getChild() const
{
return childBundleSpace_;
}
void BundleSpace::setChild(BundleSpace *child)
{
childBundleSpace_ = child;
}
BundleSpace *BundleSpace::getParent() const
{
return parentBundleSpace_;
}
void BundleSpace::setParent(BundleSpace *parent)
{
parentBundleSpace_ = parent;
}
unsigned int BundleSpace::getLevel() const
{
return level_;
}
void BundleSpace::setLevel(unsigned int level)
{
level_ = level;
}
OptimizationObjectivePtr BundleSpace::getOptimizationObjectivePtr() const
{
return pdef_->getOptimizationObjective();
}
bool BundleSpace::sampleBundleValid(State *xRandom)
{
bool found = false;
unsigned int attempts = 0;
do
{
sampleBundle(xRandom);
found = getBundle()->getStateValidityChecker()->isValid(xRandom);
attempts++;
} while (attempts < magic::FIND_VALID_STATE_ATTEMPTS_WITHOUT_TERMINATION_CHECK && !found);
return found;
}
void BundleSpace::sampleBundle(State *xRandom)
{
if (!hasBaseSpace())
{
Bundle_sampler_->sampleUniform(xRandom);
}
else
{
if (getProjection()->getCoDimension() > 0)
{
// Adjusted sampling function: Sampling in G0 x Fiber
getChild()->sampleFromDatastructure(xBaseTmp_);
getProjection()->lift(xBaseTmp_, xRandom);
}
else
{
getChild()->sampleFromDatastructure(xRandom);
}
}
}
void BundleSpace::lift(const ompl::base::State *xBase, ompl::base::State *xBundle) const
{
projection_->lift(xBase, xBundle);
}
void BundleSpace::project(const ompl::base::State *xBundle, ompl::base::State *xBase) const
{
projection_->project(xBundle, xBase);
}
void BundleSpace::print(std::ostream &out) const
{
out << getProjection();
}
namespace ompl
{
namespace multilevel
{
std::ostream &operator<<(std::ostream &out, const BundleSpace &bundleSpace)
{
bundleSpace.print(out);
return out;
}
}
}
|
public class Student {
private int id;
private String name;
private String course;
public Student(int id, String name, String course) {
this.id = id;
this.name = name;
this.course = course;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCourse() {
return course;
}
public void setCourse(String course) {
this.course = course;
}
public String toString() {
return "Student{" + "id=" + id + ", name=" + name +
", course=" + course + '}';
}
} |
<filename>NTJBilateralCIFilteriOS/NTJBilateralCIFilteriOS.h
//
// NTJBilateralCIFilteriOS.h
// NTJBilateralCIFilter
//
// Created by <NAME> on 17/10/17.
// Copyright © 2017 nojo inc. All rights reserved.
//
#ifndef NTJBilateralCIFilteriOS_h
#define NTJBilateralCIFilteriOS_h
#import "NTJBilateralCIFilter.h"
#endif /* NTJBilateralCIFilteriOS_h */
|
<filename>src/utils/lib/analytics/src/ga.js
import _get from 'lodash.get';
import axios from 'axios';
import analyticsWrapper from '../../../../helpers/analytics/analyticsWrapper';
const validVideoEventsMap = [
'VIDEOREQUEST',
'VIDEOLOADREQUEST',
'VIDEOREADY',
'VIDEOVIEW',
'VIDEOCOMPLETE',
'ADREQUEST',
'ADLOADED',
'ADCOMPLETE',
'ADSKIP',
'ADVIEW',
'ADERROR',
];
const validAdEventsMap = [
'ADREQUEST',
'ADLOADED',
'ADCOMPLETE',
'ADSKIP',
'ADVIEW',
'ADERROR',
];
const adTypeConfig = {
pre: 'Preroll',
post: 'Postroll',
};
const TimesApps = {
playingSubsequentVideo: false,
currentVideoData: null,
};
TimesApps.FLAGS = {
READY: 'READY',
PLAYING: 'PLAYING',
PAUSED: 'PAUSED',
COMPLETED: 'COMPLETED',
AD_COMPLETED: 'AD_COMPLETED',
REPLAY: 'REPLAY',
NEXT_VIDEO: 'NEXT_VIDEO',
};
TimesApps.getVideoData = (msid, configObj) => {
axios
.get(
`/feeds/videomediainfo_v1/msid-${msid},feedtype-json.cms`,
{},
'skipfeedengine',
)
.then(res => {
const { item } = res && res.data;
if (!item) {
return;
}
TimesApps.videoDataReceivedCallback(item, configObj);
})
.catch(err => {
console.log(err);
});
};
TimesApps.videoDataReceivedCallback = (data, configObj) => {
if (!data || Object.keys(data).length <= 0) {
return;
}
TimesApps.currentVideoData = { ...data };
TimesApps.fireGAForVideoEvents(configObj);
};
TimesApps.makeGAForVideoEvents = ({ eventData, eventType, player }) => {
const { msid } = eventData;
if (!msid) return;
const ga = window.ga || window.parent.ga;
window.TimesApps = TimesApps;
if (
typeof ga === 'undefined' ||
validVideoEventsMap.indexOf(eventType) === -1
) {
return;
}
if (TimesApps.currentVideoData === null) {
TimesApps.getVideoData(msid, {
eventData,
eventType,
player,
});
} else {
const { msid: _msid } = TimesApps.currentVideoData;
if (_msid !== msid) {
TimesApps.getVideoData(msid, {
eventData,
eventType,
player,
});
} else {
TimesApps.fireGAForVideoEvents({
eventData,
eventType,
player,
});
}
}
};
TimesApps.fireGAForVideoEvents = ({ eventType }) => {
const newEventType = eventType ? eventType.toUpperCase() : '';
const { seopath, title } = TimesApps.currentVideoData;
const _title = TimesApps.getEventLabel({ eventType, title });
analyticsWrapper('gaAndGrx', 'send', {
hitType: 'event',
eventCategory: newEventType,
eventAction: seopath,
eventLabel: _title,
});
};
TimesApps.getEventLabel = ({ eventType, title }) => {
if (
eventType === 'VIDEOCOMPLETE' ||
eventType === 'VIDEOREADY' ||
eventType === 'VIDEOVIEW'
) {
return title;
}
return 'amp_videoshow';
};
TimesApps.makeGAForAdEvents = ({ eventData = {}, eventType, player }) => {
const { msid } = eventData;
if (!msid) return;
const title = _get(player, 'store.mediaConfig.title', {}) || '';
const ga = window.ga || window.parent.ga;
if (typeof ga === 'undefined' || validAdEventsMap.indexOf(eventType) === -1) {
return;
}
const { type } = eventData;
const adType = adTypeConfig[type] ? adTypeConfig[type] : '';
const eventTypeConfig = {
ADVIEW: 'AdView',
ADCOMPLETE: 'AdComplete',
ADSKIP: 'AdSkip',
ADLOADED: 'AdLoaded',
ADREQUEST: 'AdRequest',
ADERROR: 'AdError',
};
if (adType) {
if (
'|AdRequest|AdLoaded|AdView|AdSkip|AdComplete|AdError|'.indexOf(
eventTypeConfig[eventType],
) !== -1
) {
analyticsWrapper('gaAndGrx', 'send', {
hitType: 'event',
eventCategory: eventTypeConfig[eventType],
eventAction: adType,
eventLabel: title,
});
}
}
};
export default TimesApps;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.