text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
newgrp docker << END
docker build -t cryptoac_opa --file ./DockerFileOPA .
END
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# no public APIs here
# SHELLDOC-IGNORE
# this bug system handles JIRA. Personalities
# can override the following variables:
# base JIRA URL
JIRA_URL=${JIRA_URL:-"https://issues.apache.org/jira"}
# Issue regex to help identify the project
JIRA_ISSUE_RE=''
# If the issue status is matched with this pattern, the attached patch is regarded as ready to be applied
JIRA_STATUS_RE='Patch Available'
add_bugsystem jira
# Simple function to set a default JIRA user after PROJECT_NAME has been set
function jira_set_jira_user
{
if [[ -n "${PROJECT_NAME}" && ! "${PROJECT_NAME}" = unknown ]]; then
JIRA_USER=${JIRA_USER:-"${PROJECT_NAME}qa"}
fi
}
function jira_usage
{
jira_set_jira_user
yetus_add_option "--jira-base-url=<url>" "The URL of the JIRA server (default:'${JIRA_URL}')"
yetus_add_option "--jira-issue-re=<expr>" "Bash regular expression to use when trying to find a jira ref in the patch name (default: '${JIRA_ISSUE_RE}')"
yetus_add_option "--jira-password=<pw>" "The password for accessing JIRA"
yetus_add_option "--jira-status-re=<expr>" "Grep regular expression representing the issue status whose patch is applicable to the codebase (default: '${JIRA_STATUS_RE}')"
yetus_add_option "--jira-user=<user>" "The user to access JIRA command (default: ${JIRA_USER})"
}
function jira_parse_args
{
declare i
jira_set_jira_user
for i in "$@"; do
case ${i} in
--jira-base-url=*)
delete_parameter "${i}"
JIRA_URL=${i#*=}
;;
--jira-issue-re=*)
delete_parameter "${i}"
JIRA_ISSUE_RE=${i#*=}
;;
--jira-password=*)
delete_parameter "${i}"
JIRA_PASSWD=${i#*=}
;;
--jira-status-re=*)
delete_parameter "${i}"
JIRA_STATUS_RE=${i#*=}
;;
--jira-user=*)
delete_parameter "${i}"
JIRA_USER=${i#*=}
;;
esac
done
}
## @description provides issue determination based upon the URL and more.
## @description WARNING: called from the github plugin!
function jira_determine_issue
{
declare input=$1
declare patchnamechunk
declare maybeissue
if [[ -n ${JIRA_ISSUE} ]]; then
return 0
fi
if [[ -z "${JIRA_ISSUE_RE}" ]]; then
return 1
fi
# shellcheck disable=SC2016
patchnamechunk=$(echo "${input}" | "${AWK}" -F/ '{print $NF}')
maybeissue=$(echo "${patchnamechunk}" | cut -f1,2 -d-)
if [[ ${maybeissue} =~ ${JIRA_ISSUE_RE} ]]; then
# shellcheck disable=SC2034
ISSUE=${maybeissue}
JIRA_ISSUE=${maybeissue}
add_footer_table "JIRA Issue" "${JIRA_ISSUE}"
return 0
fi
return 1
}
function jira_http_fetch
{
declare input=$1
declare output=$2
declare ec
yetus_debug "jira_http_fetch: ${JIRA_URL}/${input}"
if [[ -n "${JIRA_USER}"
&& -n "${JIRA_PASSWD}" ]]; then
"${CURL}" --silent --fail \
--user "${JIRA_USER}:${JIRA_PASSWD}" \
--output "${output}" \
--location \
"${JIRA_URL}/${input}"
else
"${CURL}" --silent --fail \
--output "${output}" \
--location \
"${JIRA_URL}/${input}"
fi
ec=$?
case "${ec}" in
"0")
;;
"1")
yetus_debug "jira_http_fetch: Unsupported protocol. Maybe misspelled jira's url?"
;;
"3")
yetus_debug "jira_http_fetch: ${JIRA_URL}/${input} url is malformed."
;;
"6")
yetus_debug "jira_http_fetch: Could not resolve host in URL ${JIRA_URL}."
;;
"22")
yetus_debug "jira_http_fetch: ${JIRA_URL}/${input} returned 4xx status code. Maybe incorrect username/password?"
;;
*)
yetus_debug "jira_http_fetch: ${JIRA_URL}/${input} returned $ec error code. See https://ec.haxx.se/usingcurl-returns.html for details."
;;
esac
return ${ec}
}
function jira_locate_patch
{
declare input=$1
declare patchout=$2
declare diffout=$3
declare jsonloc
declare relativeurl
declare retval
declare found=false
yetus_debug "jira_locate_patch: trying ${JIRA_URL}/browse/${input}"
if [[ "${OFFLINE}" == true ]]; then
yetus_debug "jira_locate_patch: offline, skipping"
return 1
fi
if ! jira_http_fetch "browse/${input}" "${PATCH_DIR}/jira"; then
yetus_debug "jira_locate_patch: not a JIRA."
return 1
fi
# if github is configured check to see if there is a URL in the text
# that is a github patch file or pull request
if [[ -n "${GITHUB_BASE_URL}" ]]; then
jira_determine_issue "${input}"
# Download information via REST API
jsonloc="${PATCH_DIR}/jira-json"
jira_http_fetch "rest/api/2/issue/${input}" "${jsonloc}"
# Parse the downloaded information to check if the issue is
# just a pointer to GitHub.
if github_jira_bridge "${jsonloc}" "${patchout}" "${diffout}"; then
echo "${input} appears to be a Github PR. Switching Modes."
return 0
fi
yetus_debug "jira_locate_patch: ${input} seemed like a Github PR, but there was a failure."
fi
# Not reached if there is a successful github plugin return
if [[ $("${GREP}" -c "${JIRA_STATUS_RE}" "${PATCH_DIR}/jira") == 0 ]]; then
if [[ ${ROBOT} == true ]]; then
yetus_error "ERROR: ${input} issue status is not matched with \"${JIRA_STATUS_RE}\"."
cleanup_and_exit 1
else
yetus_error "WARNING: ${input} issue status is not matched with \"${JIRA_STATUS_RE}\"."
fi
fi
# See https://jira.atlassian.com/browse/JRA-27637 as why we can't use
# the REST interface here. :(
# the assumption here is that attachment id's are given in an
# ascending order. so bigger # == newer file
#shellcheck disable=SC2016
tr '>' '\n' < "${PATCH_DIR}/jira" \
| "${AWK}" 'match($0,"/secure/attachment/[0-9]*/[^\"]*"){print substr($0,RSTART,RLENGTH)}' \
| "${GREP}" -v -e 'htm[l]*$' \
| "${SED}" -e 's,[ ]*$,,g' \
| sort -n -r -k4 -t/ \
| uniq \
> "${PATCH_DIR}/jira-attachments.txt"
echo "${input} patch is being downloaded at $(date) from"
while read -r relativeurl && [[ ${found} = false ]]; do
PATCHURL="${JIRA_URL}${relativeurl}"
printf " %s -> " "${PATCHURL}"
jira_http_fetch "${relativeurl}" "${patchout}"
retval=$?
if [[ ${retval} == 0 ]]; then
found=true
echo "Downloaded"
elif [[ ${retval} == 22 ]]; then
echo "404"
yetus_debug "Presuming the attachment was deleted, trying the next one (see YETUS-298)"
else
echo "Error (curl returned ${retval})"
break
fi
done < <(cat "${PATCH_DIR}/jira-attachments.txt")
if [[ "${found}" = false ]]; then
yetus_error "ERROR: ${input} could not be downloaded."
cleanup_and_exit 1
fi
if [[ ! ${PATCHURL} =~ \.patch$ ]]; then
if guess_patch_file "${patchout}"; then
yetus_debug "The patch ${PATCHURL} was not named properly, but it looks like a patch file. Proceeding, but issue/branch matching might go awry."
add_vote_table_v2 0 patch "" "The patch file was not named according to ${PROJECT_NAME}'s naming conventions. Please see ${PATCH_NAMING_RULE} for instructions."
else
# this definitely isn't a patch so just bail out.
return 1
fi
fi
add_footer_table "JIRA Patch URL" "${PATCHURL}"
return 0
}
## @description Try to guess the branch being tested using a variety of heuristics
## @audience private
## @stability evolving
## @replaceable no
## @return 0 on success, with PATCH_BRANCH updated appropriately
function jira_determine_branch
{
declare patchnamechunk
declare total
declare count
declare hinttype
for hinttype in "${PATCHURL}" "${PATCH_OR_ISSUE}"; do
if [[ -z "${hinttype}" ]]; then
continue
fi
# If one of these matches the JIRA issue regex
# then we don't want it to trigger the branch
# detection since that's almost certainly not
# intended. In other words, if ISSUE-99 is the
# name of a branch, you want to test ISSUE-99
# against main, not ISSUE-99's branch
if [[ ${hinttype} =~ ${JIRA_ISSUE_RE} ]]; then
continue
fi
yetus_debug "Determine branch: starting with ${hinttype}"
patchnamechunk=$(echo "${hinttype}" \
| "${SED}" -e 's,.*/\(.*\)$,\1,' \
-e 's,\.txt,.,' \
-e 's,.patch,.,g' \
-e 's,.diff,.,g' \
-e 's,\.\.,.,g' \
-e 's,\.$,,g' )
# ISSUE-branch-##
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | cut -f1,2 -d-)
yetus_debug "Determine branch: ISSUE-branch-## = ${PATCH_BRANCH}"
if [[ -n "${PATCH_BRANCH}" ]]; then
if verify_valid_branch "${PATCH_BRANCH}"; then
return 0
fi
fi
# ISSUE-##[.##].branch
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d. )
count="${PATCH_BRANCH//[^.]}"
total=${#count}
((total = total + 3 ))
until [[ ${total} -lt 3 ]]; do
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut "-f3-${total}" -d.)
yetus_debug "Determine branch: ISSUE[.##].branch = ${PATCH_BRANCH}"
((total=total-1))
if [[ -n "${PATCH_BRANCH}" ]]; then
if verify_valid_branch "${PATCH_BRANCH}"; then
return 0
fi
fi
done
# ISSUE.branch.##
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f2- -d. )
count="${PATCH_BRANCH//[^.]}"
total=${#count}
((total = total + 3 ))
until [[ ${total} -lt 2 ]]; do
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut "-f2-${total}" -d.)
yetus_debug "Determine branch: ISSUE.branch[.##] = ${PATCH_BRANCH}"
((total=total-1))
if [[ -n "${PATCH_BRANCH}" ]]; then
if verify_valid_branch "${PATCH_BRANCH}"; then
return 0
fi
fi
done
# ISSUE-branch.##
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | cut -f1- -d. )
count="${PATCH_BRANCH//[^.]}"
total=${#count}
((total = total + 1 ))
until [[ ${total} -lt 1 ]]; do
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | cut "-f1-${total}" -d. )
yetus_debug "Determine branch: ISSUE-branch[.##] = ${PATCH_BRANCH}"
((total=total-1))
if [[ -n "${PATCH_BRANCH}" ]]; then
if verify_valid_branch "${PATCH_BRANCH}"; then
return 0
fi
fi
done
done
return 1
}
## @description Write the contents of a file to JIRA
## @param filename
## @stability stable
## @audience public
## @return exit code from posting to jira
function jira_write_comment
{
declare -r commentfile=${1}
declare retval=0
if [[ "${OFFLINE}" == true ]]; then
echo "JIRA Plugin: Running in offline, comment skipped."
return 0
fi
if [[ -n ${JIRA_PASSWD}
&& -n ${JIRA_USER} ]]; then
# RESTify the comment
{
echo "{\"body\":\""
"${SED}" -e 's,\\,\\\\,g' \
-e 's,\",\\\",g' \
-e 's,$,\\r\\n,g' "${commentfile}" \
| tr -d '\n'
echo "\"}"
} > "${PATCH_DIR}/jiracomment.$$"
"${CURL}" -X POST \
-H "Accept: application/json" \
-H "Content-Type: application/json" \
-u "${JIRA_USER}:${JIRA_PASSWD}" \
-d @"${PATCH_DIR}/jiracomment.$$" \
--silent --location \
"${JIRA_URL}/rest/api/2/issue/${JIRA_ISSUE}/comment" \
>/dev/null
retval=$?
rm "${PATCH_DIR}/jiracomment.$$"
else
echo "JIRA Plugin: no credentials provided to write a comment."
fi
return ${retval}
}
## @description Print out the finished details to the JIRA issue
## @audience private
## @stability evolving
## @replaceable no
## @param runresult
function jira_finalreport
{
declare result=$1
declare i
declare commentfile=${PATCH_DIR}/jiracommentfile
declare comment
declare vote
declare ourstring
declare ela
declare subs
declare color
declare comment
declare calctime
declare url
declare logfile
url=$(get_artifact_url)
rm "${commentfile}" 2>/dev/null
if [[ ${ROBOT} == "false"
|| ${OFFLINE} == true ]] ; then
return 0
fi
#if [[ -z "${JIRA_ISSUE}" ]]; then
# return 0
#fi
big_console_header "Adding comment to JIRA"
if [[ ${result} == 0 ]]; then
echo "| (/) *{color:green}+1 overall{color}* |" >> "${commentfile}"
else
echo "| (x) *{color:red}-1 overall{color}* |" >> "${commentfile}"
fi
echo "\\\\" >> "${commentfile}"
i=0
until [[ $i -ge ${#TP_HEADER[@]} ]]; do
printf '%s\n' "${TP_HEADER[${i}]}" >> "${commentfile}"
((i=i+1))
done
echo "\\\\" >> "${commentfile}"
echo "|| Vote || Subsystem || Runtime || Logfile || Comment ||" >> "${commentfile}"
i=0
until [[ $i -ge ${#TP_VOTE_TABLE[@]} ]]; do
ourstring=$(echo "${TP_VOTE_TABLE[${i}]}" | tr -s ' ')
vote=$(echo "${ourstring}" | cut -f2 -d\| | tr -d ' ')
subs=$(echo "${ourstring}" | cut -f3 -d\|)
ela=$(echo "${ourstring}" | cut -f4 -d\|)
calctime=$(clock_display "${ela}")
logfile=$(echo "${ourstring}" | cut -f5 -d\| | tr -d ' ')
comment=$(echo "${ourstring}" | cut -f6 -d\|)
if [[ "${vote}" = "H" ]]; then
echo "|| || || || {color:brown}${comment}{color} || ||" >> "${commentfile}"
((i=i+1))
continue
fi
# summary line
if [[ -z ${vote}
&& -n ${ela} ]]; then
color="black"
elif [[ -z ${vote} ]]; then
# keep same color
true
else
# new vote line
case ${vote} in
1|"+1")
color="green"
;;
-1)
color="red"
;;
0)
color="blue"
;;
-0)
color="orange"
;;
H)
# this never gets called (see above) but this is here so others know the color is taken
color="brown"
;;
*)
color="black"
;;
esac
fi
if [[ -n "${logfile}" ]]; then
logfile=$(echo "${logfile}" |
"${SED}" -e "s,@@BASE@@,${url},g")
else
logfile=""
fi
printf '| {color:%s}%s{color} | {color:%s}%s{color} | {color:%s}%s{color} | {color:%s}%s{color} | {color:%s}%s{color} |\n' \
"${color}" "${vote}" \
"${color}" "${subs}" \
"${color}" "${calctime}" \
"${color}" "${logfile}" \
"${color}" "${comment}" \
>> "${commentfile}"
((i=i+1))
done
if [[ ${#TP_TEST_TABLE[@]} -gt 0 ]]; then
{ echo "\\\\" ; echo "\\\\"; } >> "${commentfile}"
echo "|| Reason || Tests ||" >> "${commentfile}"
i=0
until [[ $i -ge ${#TP_TEST_TABLE[@]} ]]; do
printf '%s\n' "${TP_TEST_TABLE[${i}]}" >> "${commentfile}"
((i=i+1))
done
fi
{ echo "\\\\" ; echo "\\\\"; } >> "${commentfile}"
echo "|| Subsystem || Report/Notes ||" >> "${commentfile}"
i=0
until [[ $i -ge ${#TP_FOOTER_TABLE[@]} ]]; do
comment=$(echo "${TP_FOOTER_TABLE[${i}]}" | "${SED}" -e "s,@@BASE@@,${url},g")
printf '%s\n' "${comment}" >> "${commentfile}"
((i=i+1))
done
printf '\n\nThis message was automatically generated.\n\n' >> "${commentfile}"
cp "${commentfile}" "${commentfile}-jira.txt"
jira_write_comment "${commentfile}"
}
|
def recursive_function(arg, max_depth):
if arg < max_depth:
recursive_function(arg + 1, max_depth)
|
/*******************************************************************************
* Copyright (c) 2014-2015 IBM Corp.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v1.0 which accompany this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* <NAME> - initial contribution
*******************************************************************************/
package org.pyrrha_platform.utils;
import org.pyrrha_platform.BuildConfig;
public class Constants {
public final static String APP_ID = BuildConfig.FLAVOR_APP_ID;
public final static String SETTINGS = APP_ID + ".Settings";
// IoT properties
public final static String AUTH_TOKEN = "<PASSWORD>";
public final static String DEVICE_ID = "deviceid";
public final static String ORGANIZATION = "organization";
public final static String DEVICE_TYPE = BuildConfig.FLAVOR_DEVICE_TYPE;
// IoT events and commands
public final static String ALERT_EVENT = "alert";
public final static String TEXT_EVENT = "text";
public final static String CONNECTIVITY_MESSAGE = "connectivityMessage";
public final static String ACTION_INTENT_CONNECTIVITY_MESSAGE_RECEIVED = Constants.APP_ID + "." + "CONNECTIVITY_MESSAGE_RECEIVED";
// Fragment intents
public final static String INTENT_LOGIN = "INTENT_LOGIN";
public final static String INTENT_IOT = "INTENT_IOT";
public final static String INTENT_LOG = "INTENT_LOG";
public final static String INTENT_PROFILES = "INTENT_PROFILES";
public final static String INTENT_DATA = "data";
// MQTT action intent data
public final static String INTENT_DATA_CONNECT = "connect";
public final static String INTENT_DATA_DISCONNECT = "disconnect";
public final static String INTENT_DATA_PUBLISHED = "publish";
public final static String INTENT_DATA_RECEIVED = "receive";
public final static String INTENT_DATA_MESSAGE = "message";
public final static int ERROR_BROKER_UNAVAILABLE = 3;
public enum ActionStateStatus {
CONNECTING, DISCONNECTING, SUBSCRIBE, PUBLISH
}
}
|
public class MaxMin {
public static void findMaxMin(int[] array) {
int max = array[0];
int min = array[0];
for (int i = 1; i < array.length; i++) {
if (array[i] > max)
max = array[i];
else if (array[i] < min)
min = array[i];
}
System.out.println("The maximum element in the array is: " + max);
System.out.println("The minimum element in the array is: " +min);
}
public static void main(String[] args) {
int [] array = {1, 5, 2, 4, 10};
MaxMin.findMaxMin(array);
}
}
|
<reponame>yyzclyang/algae-ui
import './affix.scss';
|
#!/bin/sh
edgecount/run.sh
matrixbuilder/run.sh
matrixmultiplier/run.sh
|
import { RelationsEventMap } from './RelationsEventMap'
type RelationsMap = {
[key: string]: {
on: RelationsEventMap
emit: RelationsEventMap
}
}
export default RelationsMap
|
#!/bin/bash
FN="JASPAR2014_1.22.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.10/data/experiment/src/contrib/JASPAR2014_1.22.0.tar.gz"
"https://bioarchive.galaxyproject.org/JASPAR2014_1.22.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-jaspar2014/bioconductor-jaspar2014_1.22.0_src_all.tar.gz"
)
MD5="be25a9c4799095141e830e1fb5290b41"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
SELECT city, COUNT(city) AS city_count
FROM customers
GROUP BY city
ORDER BY city_count DESC
LIMIT 1;
|
<reponame>mjochab/Inzynierski_projekt_zespolowy_2018_gr3<filename>src/main/java/patron/mains/bootstraps/BootstrapConfigurator.java
package patron.mains.bootstraps;
import com.appscharles.libs.aller.exceptions.AllerException;
import com.appscharles.libs.databaser.exceptions.DatabaserException;
import com.appscharles.libs.dialoger.factories.AlertFactory;
import com.appscharles.libs.fxer.exceptions.FxerException;
import com.appscharles.libs.ioer.services.FileWriter;
import com.appscharles.libs.logger.configurators.Log4j2ConsoleFileRoller;
import com.appscharles.libs.logger.services.LoggerConfigurator;
import com.appscharles.libs.proper.exceptions.ProperException;
import com.sun.javafx.application.PlatformImpl;
import javafx.application.Platform;
import javafx.scene.control.Alert;
import jdk.nashorn.internal.objects.Global;
import org.apache.logging.log4j.Level;
import patron.auth.configurations.DefaultAdminConfiguration;
import patron.auth.globals.UserGlobal;
import patron.auth.guis.login.LoginViewFactory;
import patron.mains.guis.popups.CommonPopup;
import patron.mains.managers.app.AppManager;
import patron.mains.managers.app.AppManagerConfiguration;
import patron.mains.managers.database.DatabaseManager;
import patron.mains.managers.database.DatabaseManagerConfiguration;
import java.io.File;
import java.io.IOException;
/**
* The type Bootstrap configurator.
*/
public class BootstrapConfigurator {
/**
* Config.
*
* @throws ProperException the proper exception
* @throws IOException the io exception
* @throws DatabaserException the databaser exception
* @throws FxerException the fxer exception
*/
public void config() throws ProperException, IOException, DatabaserException, FxerException {
PlatformImpl.startup(() -> {
});
AppManager.setConfiguration(new AppManagerConfiguration());
AppManager.init();
LoggerConfigurator.config(new Log4j2ConsoleFileRoller(Level.INFO).setLogsDir(new File(AppManager.getDirApp(), "Logs")));
DatabaseManager.setConfiguration(new DatabaseManagerConfiguration(AppManager.getDirApp(), AppManager.getName(), AppManager.getDatabasePort(), AppManager.getSaltPassword()));
try{
File f = new File("app/pro.settings");
f.getParentFile().mkdirs();
FileWriter.write(f, "t");
}catch (Exception e){
PlatformImpl.runAndWait(()->{
AlertFactory.create(Alert.AlertType.ERROR, "Brak uprawnień. Nie można utworzyć pliku.", "Aby program działał poprawnie, należy przenieść go do innego folderu lub uruchomić z uprawnieniami administratora.").build().showAndWait();
});
Platform.exit();
System.exit(0);
}
DatabaseManager.init();
// Event ticket = new Event("Title", "Description", 23.33, false, EventType.FLIGHT, UserGlobal.getUser());
// DBOperator.save(ticket);
// DBOperator.save(UserGlobal.getUser());
}
}
|
#Update this path to your virtual environment
source /env/cons2label/bin/activate
TEST_NAME="test"
INPUT=../dataset/ctb/ctb-$TEST_NAME.seq_lu
TEST_PATH=../CTB_pred_tags/$TEST_NAME"_ch.trees"
USE_GPU=True
EVALB=../EVALB/evalb
OUTPUT=../output/
MODELS=../models/
LOGS=../logs/
#ENRICHED
taskset --cpu-list 1 \
python ../baselines.py \
--test $INPUT \
--gold $TEST_PATH \
--model $MODELS/ctb/enriched/mlp/ctb-2.2 \
--baseline emlp \
--status test \
--gpu $USE_GPU \
--output_decode $OUTPUT/ctb-2.2.emlp.enriched.$TEST_NAME.txt \
--evalb $EVALB > $LOGS/ctb.enriched.emlp.cores=1.nogpu=$USE_GPU.$TEST_NAME.log 2>&1
#RETAGGER
taskset --cpu-list 1 \
python ../baselines.py \
--test $INPUT \
--gold $TEST_PATH \
--model $MODELS/ctb/retagger/mlp/ctb-2.2-rt \
--baseline emlp \
--status test \
--gpu $USE_GPU \
--output_decode $OUTPUT/ctb-2.2.emlp.retagger.$TEST_NAME.txt \
--retagger \
--evalb $EVALB > $LOGS/ctb.retagger.emlp.cores=1.nogpu=$USE_GPU.$TEST_NAME.log 2>&1
|
/*
* == BSD2 LICENSE ==
* Copyright (c) 2016, Tidepool Project
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the associated License, which is identical to the BSD 2-Clause
* License as published by the Open Source Initiative at opensource.org.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the License for more details.
*
* You should have received a copy of the License along with this program; if
* not, you can obtain one from Tidepool Project at tidepool.org.
* == BSD2 LICENSE ==
*/
/* eslint-disable max-len */
import React from 'react';
import { mount } from 'enzyme';
import { formatClassesAsSelector } from '../../helpers/cssmodules';
import colors from '../../../src/styles/colors.css';
import SMBGTooltip from '../../../src/components/daily/smbgtooltip/SMBGTooltip';
import styles from '../../../src/components/daily/smbgtooltip/SMBGTooltip.css';
const bgPrefs = {
bgClasses: {
'very-high': { boundary: 600 },
high: { boundary: 300 },
target: { boundary: 180 },
low: { boundary: 70 },
'very-low': { boundary: 54 },
},
bgUnits: 'mg/dL',
};
const target = {
type: 'smbg',
units: 'mg/dL',
value: 100,
};
const low = {
type: 'smbg',
units: 'mg/dL',
value: 65,
};
const high = {
type: 'smbg',
units: 'mg/dL',
value: 200,
};
const veryHigh = {
type: 'smbg',
units: 'mg/dL',
value: 601,
annotations: [
{
code: 'bg/out-of-range',
value: 'high',
threshold: 600,
},
],
};
const veryLow = {
type: 'smbg',
units: 'mg/dL',
value: 39,
annotations: [
{
code: 'bg/out-of-range',
value: 'low',
threshold: 40,
},
],
};
const manual = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'manual',
};
const linked = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'linked',
};
const medT600accepted = {
type: 'smbg',
units: 'mg/dL',
value: 100,
annotations: [
{ code: 'medtronic600/smbg/user-accepted-remote-bg' },
],
};
const medT600rejected = {
type: 'smbg',
units: 'mg/dL',
value: 100,
annotations: [
{ code: 'medtronic600/smbg/user-rejected-remote-bg' },
],
};
const medT600timeout = {
type: 'smbg',
units: 'mg/dL',
value: 100,
annotations: [
{ code: 'medtronic600/smbg/remote-bg-acceptance-screen-timeout' },
],
};
const medT600acceptedManual = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'manual',
annotations: [
{ code: 'medtronic600/smbg/user-accepted-remote-bg' },
],
};
const medT600rejectedLinked = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'linked',
annotations: [
{ code: 'medtronic600/smbg/user-rejected-remote-bg' },
],
};
const medT600timeoutManual = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'manual',
annotations: [
{ code: 'medtronic600/smbg/remote-bg-acceptance-screen-timeout' },
],
};
const medT600calibManual = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'manual',
annotations: [
{ code: 'medtronic600/smbg/bg-sent-for-calib' },
],
};
const medT600noncalibManual = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'manual',
annotations: [
{ code: 'medtronic600/smbg/user-rejected-sensor-calib' },
],
};
const medT600acceptedNoncalibManual = {
type: 'smbg',
units: 'mg/dL',
value: 100,
subType: 'manual',
annotations: [
{ code: 'medtronic600/smbg/user-accepted-remote-bg' },
{ code: 'medtronic600/smbg/user-rejected-sensor-calib' },
],
};
const props = {
position: { top: 200, left: 200 },
timePrefs: { timezoneAware: false },
bgPrefs,
};
const bgValueSelector = `${formatClassesAsSelector(styles.confirmBg)} ${formatClassesAsSelector(styles.value)}`;
const sourceValueSelector = `${formatClassesAsSelector(styles.source)} ${formatClassesAsSelector(styles.value)}`;
const glucoseValueSelector = `${formatClassesAsSelector(styles.bg)} ${formatClassesAsSelector(styles.value)}`;
describe('SMBGTooltip', () => {
it('should render without issue when all properties provided', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={target} />);
expect(wrapper.find(formatClassesAsSelector(styles.bg))).to.have.length(1);
});
it('should render "target" color for target bg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={target} />);
expect(wrapper.find('Tooltip').instance().props.tailColor).to.equal(colors.target);
expect(wrapper.find('Tooltip').instance().props.borderColor).to.equal(colors.target);
});
it('should render "high" color for high bg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={high} />);
expect(wrapper.find('Tooltip').instance().props.tailColor).to.equal(colors.high);
expect(wrapper.find('Tooltip').instance().props.borderColor).to.equal(colors.high);
});
it('should render "veryHigh" color for very high bg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={veryHigh} />);
expect(wrapper.find('Tooltip').instance().props.tailColor).to.equal(colors.veryHigh);
expect(wrapper.find('Tooltip').instance().props.borderColor).to.equal(colors.veryHigh);
});
it('should render "low" color for low bg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={low} />);
expect(wrapper.find('Tooltip').instance().props.tailColor).to.equal(colors.low);
expect(wrapper.find('Tooltip').instance().props.borderColor).to.equal(colors.low);
});
it('should render "veryLow" color for very low bg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={veryLow} />);
expect(wrapper.find('Tooltip').instance().props.tailColor).to.equal(colors.veryLow);
expect(wrapper.find('Tooltip').instance().props.borderColor).to.equal(colors.veryLow);
});
it('should render "manual" for a manual smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={manual} />);
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Manual');
});
it('should render "linked" for a linked smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={linked} />);
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Linked');
});
it('should render "Yes" for a confirmed medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600accepted} />);
expect(wrapper.find(formatClassesAsSelector(styles.confirmBg))).to.have.length(1);
expect(wrapper.find(bgValueSelector).text()).to.equal('Yes');
});
it('should render "No" for a rejected medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600rejected} />);
expect(wrapper.find(formatClassesAsSelector(styles.confirmBg))).to.have.length(1);
expect(wrapper.find(bgValueSelector).text()).to.equal('No');
});
it('should render "Timed Out" for a timed out medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600timeout} />);
expect(wrapper.find(formatClassesAsSelector(styles.confirmBg))).to.have.length(1);
expect(wrapper.find(bgValueSelector).text()).to.equal('Timed Out');
});
it('should render "Yes" and "Manual" for a confirmed manual medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600acceptedManual} />);
expect(wrapper.find(formatClassesAsSelector(styles.confirmBg))).to.have.length(1);
expect(wrapper.find(bgValueSelector).text()).to.equal('Yes');
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Manual');
});
it('should render "No" and "Linked" for a rejected linked medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600rejectedLinked} />);
expect(wrapper.find(formatClassesAsSelector(styles.confirmBg))).to.have.length(1);
expect(wrapper.find(bgValueSelector).text()).to.equal('No');
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Linked');
});
it('should render "Timed Out" and "Manual" for a timed out manual medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600timeoutManual} />);
expect(wrapper.find(formatClassesAsSelector(styles.confirmBg))).to.have.length(1);
expect(wrapper.find(bgValueSelector).text()).to.equal('Timed Out');
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Manual');
});
it('should render "Manual" for a manual medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600calibManual} />);
expect(wrapper.find(formatClassesAsSelector(styles.calibration))).to.have.length(0);
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Manual');
});
it('should render "Manual" for a non-calibration manual medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600noncalibManual} />);
expect(wrapper.find(formatClassesAsSelector(styles.calibration))).to.have.length(0);
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Manual');
});
it('should render "Yes" and "Manual" for an accepted non-calibration manual medtronic 600 series smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={medT600acceptedNoncalibManual} />);
expect(wrapper.find(formatClassesAsSelector(styles.confirmBg))).to.have.length(1);
expect(wrapper.find(bgValueSelector).text()).to.equal('Yes');
expect(wrapper.find(formatClassesAsSelector(styles.calibration))).to.have.length(0);
expect(wrapper.find(formatClassesAsSelector(styles.source))).to.have.length(1);
expect(wrapper.find(sourceValueSelector).text()).to.equal('Manual');
});
it('should render "High" and an annotation for a "very-high" smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={veryHigh} />);
expect(wrapper.find(formatClassesAsSelector(styles.annotation))).to.have.length(1);
expect(wrapper.find(glucoseValueSelector).text()).to.equal('High');
});
it('should render "Low" and an annotation for a "very-low" smbg', () => {
const wrapper = mount(<SMBGTooltip {...props} smbg={veryLow} />);
expect(wrapper.find(formatClassesAsSelector(styles.annotation))).to.have.length(1);
expect(wrapper.find(glucoseValueSelector).text()).to.equal('Low');
});
});
|
import requests
from bs4 import BeautifulSoup
def get_all_links(url):
r = requests.get(url)
data = r.text
soup = BeautifulSoup(data, 'html.parser')
links = []
for link in soup.find_all('a'):
links.append(link.get('href'))
return links
# Testing the function
print(get_all_links('https://www.example.com'))
|
// file : xsde/cxx/parser/validating/gmonth-day.hxx
// author : <NAME> <<EMAIL>>
// copyright : Copyright (c) 2005-2011 Code Synthesis Tools CC
// license : GNU GPL v2 + exceptions; see accompanying LICENSE file
#ifndef XSDE_CXX_PARSER_VALIDATING_GMONTH_DAY_HXX
#define XSDE_CXX_PARSER_VALIDATING_GMONTH_DAY_HXX
#include <xsde/cxx/string.hxx>
#include <xsde/cxx/parser/validating/xml-schema-pskel.hxx>
namespace xsde
{
namespace cxx
{
namespace parser
{
namespace validating
{
#ifdef XSDE_REUSE_STYLE_MIXIN
struct gmonth_day_pimpl: virtual gmonth_day_pskel
#else
struct gmonth_day_pimpl: gmonth_day_pskel
#endif
{
virtual void
_pre ();
virtual void
_characters (const ro_string&);
virtual void
_post ();
virtual gmonth_day
post_gmonth_day ();
protected:
string str_;
unsigned short month_, day_;
bool z_;
short zh_, zm_;
};
}
}
}
}
#endif // XSDE_CXX_PARSER_VALIDATING_GMONTH_DAY_HXX
|
<reponame>lionelpa/openvalidation
/*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.openvalidation.common.unittesting.astassertion;
import io.openvalidation.common.ast.ASTModel;
import io.openvalidation.common.ast.operand.ASTOperandStaticString;
public class StaticStringAssertion extends StaticAssertion<ASTOperandStaticString> {
public StaticStringAssertion(ASTOperandStaticString item, ASTModel ast, ASTAssertionBase parent) {
super(item, ast, parent);
}
public StaticStringAssertion hasValue(String value) {
shouldEquals(model.getValue(), value, "STRING VALUE");
return this;
}
public ArrayAssertion parentArray() {
shouldBeInstanceOf(this.parent(), ArrayAssertion.class, "PARENT Array");
return (ArrayAssertion) parent();
}
}
|
/*
*
*/
package net.community.chest.io.output;
import java.io.IOException;
import java.io.Writer;
import java.nio.channels.Channel;
/**
* <P>Copyright as per GPLv2</P>
*
* <P>Accumulates all written data into a work buffer and calls the actual
* writing method only when LF detected</P>
* @author <NAME>.
* @since Jun 30, 2009 12:15:01 PM
*/
public abstract class LineWriter extends Writer implements Channel, LineLevelAppender {
protected LineWriter ()
{
super();
}
private StringBuilder _workBuf;
/**
* Called in order to retrieve a work buffer. <B>Note:</B> the call occurs
* every time data is to be appended. It is up to the implementor to "reset"
* the work buffer instance after actual write takes place.
* @param reqSize Minimum size of requested buffer size - should be used
* in order to make a smart allocation
* @return The {@link StringBuilder} instance to be used as the work buffer.
* The accumulated line data is appended to it - except for the CR/LF. Once
* end of line is detected this instance is passed to actual write method.
* If <code>null</code> then same as write disabled.
*/
protected StringBuilder getWorkBuffer (final int reqSize)
{
final int effSize=Math.max(reqSize, Byte.MAX_VALUE);
if (null == _workBuf)
_workBuf = new StringBuilder(effSize);
else
_workBuf.ensureCapacity(effSize);
return _workBuf;
}
protected StringBuilder clearWorkBuffer ()
{
if ((_workBuf != null) && (_workBuf.length() > 0))
_workBuf.setLength(0);
return _workBuf;
}
protected void processAccumulatedMessage (final StringBuilder sb) throws IOException
{
int dLen=(null == sb) ? 0 : sb.length();
if (dLen <= 0)
return;
// check if data buffer ends in line separator pattern
if (sb.charAt(dLen - 1) != '\n')
return;
if ((dLen > 1) && (sb.charAt(dLen - 2) == '\r'))
dLen -= 2;
else
dLen--;
writeLineData(sb, dLen);
}
private boolean _closed /* =false */;
/*
* @see java.nio.channels.Channel#isOpen()
*/
@Override
public boolean isOpen ()
{
return !_closed;
}
public void setOpen (boolean s)
{
_closed = !s;
}
/*
* @see java.io.Writer#append(java.lang.CharSequence, int, int)
*/
@Override
public Writer append (CharSequence csq, int start, int end)
throws IOException
{
if (!isOpen())
throw new IOException("append(" + csq.subSequence(start, end) + ") not open");
if (!isWriteEnabled())
{
clearWorkBuffer();
return this;
}
final int wLen=end - start;
if (wLen <= 0)
return this;
final StringBuilder sb=getWorkBuffer(wLen);
int lOffset=start;
for (int cOffset=lOffset; cOffset < end; cOffset++)
{
final char c=csq.charAt(cOffset);
// if not part of the line separator then skip it
if (c != '\n')
continue;
if (lOffset < cOffset)
sb.append(csq, lOffset, cOffset + 1 /* including this character */);
else
sb.append(c);
processAccumulatedMessage(sb);
lOffset = cOffset + 1; // skip current character
}
// check if have any leftovers
if (lOffset < end) // the leftover(s) have no line separator characters for sure
sb.append(csq, lOffset, end);
return this;
}
/*
* @see java.io.Writer#append(char)
*/
@Override
public Writer append (final char c) throws IOException
{
if (!isOpen())
throw new IOException("append(char=" + String.valueOf(c) + ") not open");
if (!isWriteEnabled())
{
clearWorkBuffer();
return this;
}
final StringBuilder sb=getWorkBuffer(1);
sb.append(c);
if (c == '\n')
processAccumulatedMessage(sb);
return this;
}
/*
* @see java.io.Writer#append(java.lang.CharSequence)
*/
@Override
public Writer append (final CharSequence csq) throws IOException
{
if (null == csq)
return append("null");
return append(csq, 0, csq.length());
}
/*
* @see java.io.Writer#write(char[], int, int)
*/
@Override
public void write (char[] cbuf, int off, int len) throws IOException
{
if (!isOpen())
throw new IOException("write(buf=" + String.valueOf(cbuf, off, len) + ") not open");
if (len <= 0)
return;
if (!isWriteEnabled())
{
clearWorkBuffer();
return;
}
final StringBuilder sb=getWorkBuffer(len);
int lOffset=off, maxOffset=off + len;
for (int cOffset=lOffset; cOffset < maxOffset; cOffset++)
{
final char c=cbuf[cOffset];
// if not part of the line separator then skip it
if (c != '\n')
continue;
final int cLen=cOffset - lOffset;
if (cLen > 0)
sb.append(cbuf, lOffset, cLen + 1 /* including this character */);
else
sb.append(c);
processAccumulatedMessage(sb);
lOffset = cOffset + 1; // skip current character
}
// check if have any leftovers
final int remLen=maxOffset - lOffset;
if (remLen > 0) // the leftover(s) have no line separator characters for sure
sb.append(cbuf, lOffset, remLen);
}
/*
* @see java.io.Writer#write(char[])
*/
@Override
public void write (char[] cbuf) throws IOException
{
write(cbuf, 0, cbuf.length);
}
/*
* @see java.io.Writer#write(int)
*/
@Override
public void write (int c) throws IOException
{
append((char) c);
}
/*
* @see java.io.Writer#write(java.lang.String, int, int)
*/
@Override
public void write (String str, int off, int len) throws IOException
{
append(str, off, len);
}
/*
* @see java.io.Writer#write(java.lang.String)
*/
@Override
public void write (String str) throws IOException
{
if (null == str)
append("null");
else
append(str, 0, str.length());
}
/*
* @see java.io.Writer#close()
*/
@Override
public void close () throws IOException
{
if (isOpen())
setOpen(false);
}
/*
* @see java.io.Writer#flush()
*/
@Override
public void flush () throws IOException
{
if (!isOpen())
throw new IOException("flush() - not open");
}
/*
* @see java.lang.Object#toString()
*/
@Override
public String toString ()
{
final CharSequence o=getWorkBuffer(0);
if ((null == o) || (o.length() <= 0))
return "";
else
return o.toString();
}
}
|
<filename>packages/ux-core/src/index.ts
import App from './lib/app'
export { App }
|
import random
import numpy as np
# Parameters
N_POPULATION = 1000 # number of individuals
N_GENERATIONS = 100 # number of generations
P_MUTATION = 0.1 # probability of mutation
# Objective function
def f(x, y):
return x**2 + 2 * y**2 - 2 * x + 2 * y
# Initialization
population = np.random.rand(N_POPULATION, 2)
# Genetic algorithm loop
for _ in range(N_GENERATIONS):
# Fitness
fitness = np.array([f(*ind) for ind in population])
# Selection
fitness_normalized = fitness - min(fitness)
fitness_normalized /= sum(fitness_normalized)
parents_idx = np.random.choice(range(N_POPULATION),
size=N_POPULATION,
p=fitness_normalized,
replace=True)
parents = population[parents_idx]
# Crossover
children = np.empty((N_POPULATION, 2))
for i in range(0, N_POPULATION, 2):
child_1 = np.array(parents[i])
child_2 = np.array(parents[i+1])
r = random.random()
for idx in range(2):
if r < 0.5:
child_1[idx] = parents[i][idx]
child_2[idx] = parents[i+1][idx]
else:
child_1[idx] = parents[i+1][idx]
child_2[idx] = parents[i][idx]
children[i] = child_1
children[i+1] = child_2
# Mutation
for idx, child in enumerate(children):
if P_MUTATION > random.random():
r = random.random()
if r < 0.5:
children[idx][0] += random.gauss(0, 0.2)
else:
children[idx][1] += random.gauss(0, 0.2)
# Replacement
population = children
# Final result
fitness = np.array([f(*ind) for ind in population])
optimal_individual = population[np.argmin(fitness)]
print('Optimal individual: {}'.format(optimal_individual))
|
def bubble_sort(arr):
n = len(arr)
for i in range(n):
for j in range(0, n-i-1):
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
arr = [5, 1, 4, 2, 8]
bubble_sort(arr)
print("Sorted list is:", arr)
|
function validatePasswordChangeForm($oldPassword, $newPassword, $confirmPassword) {
$errors = [];
if (empty($oldPassword)) {
$errors['old_password'] = 'Old password is required.';
}
if (empty($newPassword)) {
$errors['new_password'] = 'New password is required.';
} elseif (strlen($newPassword) < 8) {
$errors['new_password'] = 'New password must be at least 8 characters long.';
}
if ($newPassword !== $confirmPassword) {
$errors['confirm_password'] = 'Confirm password does not match the new password.';
}
if (empty($errors)) {
return ['valid' => true];
} else {
return ['valid' => false, 'errors' => $errors];
}
}
// Example usage:
$input = [
'old_password' => $_POST['old_password'],
'new_password' => $_POST['new_password'],
'confirm_password' => $_POST['confirm_password']
];
$result = validatePasswordChangeForm($input['old_password'], $input['new_password'], $input['confirm_password']);
if ($result['valid']) {
// Proceed with password change
} else {
// Display error messages to the user
foreach ($result['errors'] as $field => $error) {
echo "Error in $field: $error<br>";
}
}
|
Objective function: Maximize profit = 80A+90O+20B+30P+200M+100W+50S+60C+70K
Subject to:
A+O+B+P+M+W+S+C+K <= 10 # constraint of storage space
A<=10 # constraint of maximum pieces of apples
O<=10 # constraint of maximum pieces of oranges
B<=10 # constraint of maximum pieces of bananas
P<=10 # constraint of maximum pieces of pineapples
M<=10 # constraint of maximum pieces of mangos
W<=10 # constraint of maximum pieces of watermelons
S<=10 # constraint of maximum pieces of strawberries
C<=10 # constraint of maximum pieces of cherries
K<=10 # constraint of maximum pieces of kiwis
A,O,B,P,M,W,S,C,K >= 0 # non-negative constraints
|
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.openflow.controller.impl;
import static org.slf4j.LoggerFactory.getLogger;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
/**
* Trigger a timeout if a switch fails to complete handshake soon enough.
*/
public class HandshakeTimeoutHandler
extends ChannelDuplexHandler {
private static final Logger log = getLogger(HandshakeTimeoutHandler.class);
final OFChannelHandler channelHandler;
final long timeoutMillis;
volatile long deadline;
public HandshakeTimeoutHandler(OFChannelHandler channelHandler,
long timeoutSeconds) {
super();
this.channelHandler = channelHandler;
this.timeoutMillis = TimeUnit.SECONDS.toMillis(timeoutSeconds);
this.deadline = System.currentTimeMillis() + timeoutMillis;
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
if (timeoutMillis > 0) {
// set Handshake deadline
deadline = System.currentTimeMillis() + timeoutMillis;
}
super.channelActive(ctx);
}
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
checkTimeout(ctx);
super.read(ctx);
}
@Override
public void write(ChannelHandlerContext ctx, Object msg,
ChannelPromise promise)
throws Exception {
checkTimeout(ctx);
super.write(ctx, msg, promise);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx,
Object evt)
throws Exception {
// expecting idle event
checkTimeout(ctx);
super.userEventTriggered(ctx, evt);
}
void checkTimeout(ChannelHandlerContext ctx) {
if (channelHandler.isHandshakeComplete()) {
// handshake complete, Handshake monitoring timeout no-longer needed
ctx.channel().pipeline().remove(this);
return;
}
if (!ctx.channel().isActive()) {
return;
}
if (System.currentTimeMillis() > deadline) {
log.info("Handshake time out {}", channelHandler);
ctx.fireExceptionCaught(new HandshakeTimeoutException());
}
}
}
|
#!/bin/bash
if [ $# -lt 1 ]; then
echo "You must provide the name of the project as the first argument"
echo "Usage: ./new-framework.sh <dir-path>/<framework-name>"
echo "Example: ./new-framework.sh frameworks/myframework"
exit 1
elif [ -d $1 ]; then
echo "A project with the given name '$1' already exists. Choose a different name"
exit 1
fi
PROJECT_NAME=$(basename $1)
PROJECT_PATH=$(dirname $1)
mkdir -p $PROJECT_PATH
cp -R frameworks/template $1
rm -rf $1/build
rm -rf $1/cli/dcos-*/*.whl
rm -rf $1/cli/dcos-*/dcos-*
rm -rf $1/cli/python/{build,dist}
mv $1/cli/dcos-template $1/cli/dcos-$PROJECT_NAME
mv $1/src/main/java/com/mesosphere/sdk/template/ $1/src/main/java/com/mesosphere/sdk/$PROJECT_NAME/
mv $1/src/test/java/com/mesosphere/sdk/template/ $1/src/test/java/com/mesosphere/sdk/$PROJECT_NAME/
if [ ! -d $1 ]; then
echo "Failed to create new project"
exit 1
fi
UPPER_CASE_PROJECT_NAME=$(echo $PROJECT_NAME | awk '{print toupper($0)}')
find $1 -type f -exec sed -i.bak "s/template/$PROJECT_NAME/g; s/TEMPLATE/$UPPER_CASE_PROJECT_NAME/g; s/template/$PROJECT_NAME/g" {} \;
sed -i.bak -e '25,$ d' $1/src/main/dist/svc.yml
find $1 -type f -name *.bak -exec rm {} \;
if [ $? -eq 0 ]; then
echo "" >> settings.gradle
echo "include '$1'" >> settings.gradle
echo "project(\":$1\").name = \"$PROJECT_NAME\"" >> settings.gradle
echo "New project created successfully"
else
echo "Failed to create new project"
exit 1
fi
|
def copyArray(fromList, toList):
for item in fromList[:]:
toList.append(item)
|
<gh_stars>0
// @flow
import ReactDOM from 'react-dom';
import * as React from 'react';
import { Component } from 'react-simplified';
import { TextInput } from '../../widgets/textInput/textInput';
import { Button } from '../../widgets/button/button';
import { history } from '../../app';
import style from './css/header.module.css';
//header at the top of the page
export class Header extends Component {
render() {
return (
<div className={style.header}>
<div className={style.title}>
<p>Tulleposten - vi publiserer dine saker!</p>
</div>
<button className={style.logo} onClick={() => history.push('/')}>
TP
</button>
<Button.Menu className={style.news} onClick={() => history.push('/nyheter')}>
NYHETER
</Button.Menu>
<Button.Menu className={style.culture} onClick={() => history.push('/kultur')}>
KULTUR
</Button.Menu>
<Button.Menu className={style.sport} onClick={() => history.push('/sport')}>
SPORT
</Button.Menu>
<Button.Menu className={style.celeb} onClick={() => history.push('/kjendis')}>
KJENDIS
</Button.Menu>
<Button.Menu className={style.register} onClick={() => history.push('/registrer/ny')}>
REGISTER SAK
</Button.Menu>
<TextInput.Dark className={style.search} placeholder="Søk" onChange={this.search} required={false} />
</div>
);
}
//searches for keyword in articles
search = (e: Event): void => {
if (!(e.target instanceof HTMLInputElement)) return;
let keyword = e.target.value;
if (!keyword) history.push('/');
//go to front page if no keyword
else history.push('/artikkel/søk/' + keyword); //show results otherwise
};
}
|
#!/bin/bash
#### debug model prod
#nohup java -Xdebug -Xrunjdwp:server=y,transport=dt_socket,address=8000,suspend=n -jar -Dspring.profiles.active=dev UserService.jar > user.log 2>&1 &
#### normal prod model
#nohup java -jar -Dspring.profiles.active=prod UserService.jar > user.log 2>&1 &
#### normal test model
#nohup java -jar -Dspring.profiles.active=test UserService.jar > user.log 2>&1 &
#### normal dev model
nohup java -jar -Dspring.profiles.active=dev UserService.jar > user.log 2>&1 &
tail -100f user.log
|
# -*- coding: iso-8859-1 -*-
"""
Created on December 9 2019
Description: This routine performs LSD analysis of SPIRou spectro-polarimetric data.
@author: <NAME> <<EMAIL>>
Institut d'Astrophysique de Paris, France.
Simple usage example:
GamEqu:
python ~/spirou-tools/spirou-polarimetry/spirou_lsd.py --input=2329699p.fits --lsdmask=$PATH/lsd_masks/marcs_t5000g50_all --output=2329699_lsd.fits -p
"""
__version__ = "1.0"
__copyright__ = """
Copyright (c) ... All rights reserved.
"""
from optparse import OptionParser
import os,sys
import spirouPolar, polar_param
import spirouLSD
parser = OptionParser()
parser.add_option("-i", "--input", dest="input", help="Input SPIRou spectro-polarimetry file",type='string',default="")
parser.add_option("-m", "--lsdmask", dest="lsdmask", help="LSD mask",type='string', default="")
parser.add_option("-o", "--output", dest="output", help="Output LSD file",type='string', default="")
parser.add_option("-p", action="store_true", dest="plot", help="plot", default=False)
parser.add_option("-v", action="store_true", dest="verbose", help="verbose", default=False)
try:
options,args = parser.parse_args(sys.argv[1:])
except:
print("Error: check usage with -h spirou_lsd.py")
sys.exit(1)
if options.verbose:
print('Input SPIRou spectro-polarimetry file: ', options.input)
if options.lsdmask != "" :
print('LSD mask: ', options.lsdmask)
print('Output file: ', options.output)
# set up data storage
loc = {}
# define polarimetry parameters
p = polar_param.load_polar_parameters()
# set input polarimetry file
loc['POL_FITS_FILE'] = options.input
# load files
loc = spirouPolar.load_pol_fits(options.input, loc)
# select LSD mask file
if options.lsdmask != "" :
# set lsd mask file from input
loc['LSD_MASK_FILE'] = options.lsdmask
else :
# select an lsd mask file from repositories
loc['LSD_MASK_FILE'] = spirouLSD.select_lsd_mask(p, loc)
print('LSD mask (auto selection): ', loc['LSD_MASK_FILE'])
# ------------------------------------------------------------------
# Run LSD Analysis
# ------------------------------------------------------------------
loc = spirouLSD.lsd_analysis_wrapper(p, loc, options.verbose)
# save LSD data to fits
if options.output != "" :
spirouLSD.save_lsd_fits(options.output, loc, p)
if options.plot :
# plot LSD analysis
spirouLSD.polar_lsd_plot(p, loc)
|
function Vector3(a, b, c)
{
this.x = 0;
this.y = 0;
this.z = 0;
if (a != undefined)
{
if (a.constructor == Number)
{
this.x = a;
this.y = b;
this.z = c;
}
else if (a.constructor == Array)
{
if (a.length != 3)
{
throw new Error("Trying to create 3 vector from array of length: " + a.length);
}
this.x = a[0];
this.y = a[1];
this.z = a[2];
}
}
this.copy = function ()
{
return new Vector3(this.x, this.y, this.z);
};
this.assign = function (x, y, z)
{
if (x instanceof Vector3)
{
this.x = x.x;
this.y = x.y;
this.z = x.z;
return;
}
this.x = x;
this.y = y;
this.z = z;
}
this.length = function ()
{
return Math.sqrt(this.x*this.x + this.y*this.y + this.z*this.z);
}
/**
* Normalize the vector in place, i.e., map it to the corresponding unit vector.
*/
this.normalize = function ()
{
var norm = this.length();
this.x = this.x / norm;
this.y = this.y / norm;
this.z = this.z / norm;
}
/**
* Scale the vector in place.
*/
this.scale = function (scale)
{
this.x = this.x * scale;
this.y = this.y * scale;
this.z = this.z * scale;
}
this.toFloatArray = function ()
{
return [x, y, z];
}
this.equals = function (object)
{
if (!(object instanceof Vector3))
{
return false;
}
// float equals is a bit of a dodgy concept
return other.x == this.x && other.y == this.y && other.z == this.z;
}
}
|
<reponame>Polidea/SiriusObfuscator<filename>SymbolExtractorAndRenamer/lldb/include/lldb/Interpreter/OptionValueString.h<gh_stars>100-1000
//===-- OptionValueString.h -------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef liblldb_OptionValueString_h_
#define liblldb_OptionValueString_h_
// C Includes
// C++ Includes
#include <string>
// Other libraries and framework includes
// Project includes
#include "lldb/Core/Flags.h"
#include "lldb/Interpreter/OptionValue.h"
namespace lldb_private {
class OptionValueString : public OptionValue {
public:
typedef Error (*ValidatorCallback)(const char *string, void *baton);
enum Options { eOptionEncodeCharacterEscapeSequences = (1u << 0) };
OptionValueString()
: OptionValue(), m_current_value(), m_default_value(), m_options(),
m_validator(), m_validator_baton() {}
OptionValueString(ValidatorCallback validator, void *baton = nullptr)
: OptionValue(), m_current_value(), m_default_value(), m_options(),
m_validator(validator), m_validator_baton(baton) {}
OptionValueString(const char *value)
: OptionValue(), m_current_value(), m_default_value(), m_options(),
m_validator(), m_validator_baton() {
if (value && value[0]) {
m_current_value.assign(value);
m_default_value.assign(value);
}
}
OptionValueString(const char *current_value, const char *default_value)
: OptionValue(), m_current_value(), m_default_value(), m_options(),
m_validator(), m_validator_baton() {
if (current_value && current_value[0])
m_current_value.assign(current_value);
if (default_value && default_value[0])
m_default_value.assign(default_value);
}
OptionValueString(const char *value, ValidatorCallback validator,
void *baton = nullptr)
: OptionValue(), m_current_value(), m_default_value(), m_options(),
m_validator(validator), m_validator_baton(baton) {
if (value && value[0]) {
m_current_value.assign(value);
m_default_value.assign(value);
}
}
OptionValueString(const char *current_value, const char *default_value,
ValidatorCallback validator, void *baton = nullptr)
: OptionValue(), m_current_value(), m_default_value(), m_options(),
m_validator(validator), m_validator_baton(baton) {
if (current_value && current_value[0])
m_current_value.assign(current_value);
if (default_value && default_value[0])
m_default_value.assign(default_value);
}
~OptionValueString() override = default;
//---------------------------------------------------------------------
// Virtual subclass pure virtual overrides
//---------------------------------------------------------------------
OptionValue::Type GetType() const override { return eTypeString; }
void DumpValue(const ExecutionContext *exe_ctx, Stream &strm,
uint32_t dump_mask) override;
Error
SetValueFromString(llvm::StringRef value,
VarSetOperationType op = eVarSetOperationAssign) override;
Error
SetValueFromString(const char *,
VarSetOperationType = eVarSetOperationAssign) = delete;
bool Clear() override {
m_current_value = m_default_value;
m_value_was_set = false;
return true;
}
lldb::OptionValueSP DeepCopy() const override;
//---------------------------------------------------------------------
// Subclass specific functions
//---------------------------------------------------------------------
Flags &GetOptions() { return m_options; }
const Flags &GetOptions() const { return m_options; }
const char *operator=(const char *value) {
SetCurrentValue(llvm::StringRef::withNullAsEmpty(value));
return m_current_value.c_str();
}
const char *GetCurrentValue() const { return m_current_value.c_str(); }
llvm::StringRef GetCurrentValueAsRef() const { return m_current_value; }
const char *GetDefaultValue() const { return m_default_value.c_str(); }
llvm::StringRef GetDefaultValueAsRef() const { return m_default_value; }
Error SetCurrentValue(const char *) = delete;
Error SetCurrentValue(llvm::StringRef value);
Error AppendToCurrentValue(const char *value);
void SetDefaultValue(const char *value) {
if (value && value[0])
m_default_value.assign(value);
else
m_default_value.clear();
}
bool IsCurrentValueEmpty() const { return m_current_value.empty(); }
bool IsDefaultValueEmpty() const { return m_default_value.empty(); }
protected:
std::string m_current_value;
std::string m_default_value;
Flags m_options;
ValidatorCallback m_validator;
void *m_validator_baton;
};
} // namespace lldb_private
#endif // liblldb_OptionValueString_h_
|
#!/usr/bin/env bats
load helpers
@test "podman images - basic output" {
run_podman images -a
is "${lines[0]}" "REPOSITORY *TAG *IMAGE ID *CREATED *SIZE" "header line"
is "${lines[1]}" "$PODMAN_TEST_IMAGE_REGISTRY/$PODMAN_TEST_IMAGE_USER/$PODMAN_TEST_IMAGE_NAME *$PODMAN_TEST_IMAGE_TAG *[0-9a-f]\+" "podman images output"
}
@test "podman images - custom formats" {
tests="
--format {{.ID}} | [0-9a-f]\\\{12\\\}
--format {{.ID}} --no-trunc | sha256:[0-9a-f]\\\{64\\\}
--format {{.Repository}}:{{.Tag}} | $PODMAN_TEST_IMAGE_FQN
"
parse_table "$tests" | while read fmt expect; do
run_podman images $fmt
is "$output" "$expect\$" "podman images $fmt"
done
}
@test "podman images - json" {
tests="
names[0] | $PODMAN_TEST_IMAGE_FQN
id | [0-9a-f]\\\{64\\\}
digest | sha256:[0-9a-f]\\\{64\\\}
created | [0-9-]\\\+T[0-9:]\\\+\\\.[0-9]\\\+Z
size | [0-9]\\\+
"
run_podman images -a --format json
parse_table "$tests" | while read field expect; do
actual=$(echo "$output" | jq -r ".[0].$field")
dprint "# actual=<$actual> expect=<$expect}>"
is "$actual" "$expect" "jq .$field"
done
}
# vim: filetype=sh
|
<reponame>andromeda/mir
new Uint16Array();
|
list_items = ['apple', 'banana', 'pear', 'strawberry']
index = 0
while index < len(list_items):
item = list_items[index]
print(item)
index += 1
|
# (C) Copyright 1988- ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
# Source me to get the correct configure/build/run environment
# Store tracing and disable (module is *way* too verbose)
{ tracing_=${-//[^x]/}; set +x; } 2>/dev/null
module_load() {
if [ "$2" == "ECBUILD_CONFIGURE_ONLY" ]; then
if [ -n "${ECBUILD_CONFIGURE}" ]; then
echo "+ module load $1"
module load $1
else
echo " WARNING: Module $1 not loaded (only during configuration)"
fi
else
echo "+ module load $1"
module load $1
fi
}
module_unload() {
echo "+ module unload $1"
module unload $1
}
# Unload to be certain
module_unload cmake
module_unload python
module_unload python3
module_unload boost
module_unload ecbuild
module_unload ifs-support
module_unload cdt
module_unload boost
module_unload PrgEnv-cray
module_unload PrgEnv-intel
module_unload intel
module_unload gcc
export EC_CRAYPE_INTEGRATION=off
prgenvswitchto intel
# Load modules
module_unload intel
module_load intel/2021.1.1
module_load python3
module_load boost
module_load ninja
module_load cmake
module_load boost
set -x
# This is used to download binary test data
export http_proxy="http://slb-proxy-web.ecmwf.int:3333/"
# Restore tracing to stored setting
{ if [[ -n "$tracing_" ]]; then set -x; else set +x; fi } 2>/dev/null
export ECBUILD_TOOLCHAIN="./toolchain.cmake"
|
import React, { useMemo } from 'react';
import { TextField } from '@/components/core/Form';
import { Stack } from '@/components/UI/Stack';
import { useFocusIdx } from '@/hooks/useFocusIdx';
import { TextStyle } from '@/components/UI/TextStyle';
export function Margin() {
const { focusIdx } = useFocusIdx();
return useMemo(() => {
return (
<Stack vertical spacing='extraTight'>
<TextStyle size='large'>Margin</TextStyle>
<Stack wrap={false}>
<Stack.Item fill>
<TextField
label='Top'
quickchange
name={`${focusIdx}.attributes.marginTop`}
inline
/>
</Stack.Item>
<Stack.Item fill>
<TextField
label='Bottom'
quickchange
name={`${focusIdx}.attributes.marginBottom`}
inline
/>
</Stack.Item>
</Stack>
<Stack wrap={false}>
<Stack.Item fill>
<TextField
label='Left'
quickchange
name={`${focusIdx}.attributes.marginLeft`}
inline
/>
</Stack.Item>
<Stack.Item fill>
<TextField
label='Right'
quickchange
name={`${focusIdx}.attributes.marginRight`}
inline
/>
</Stack.Item>
</Stack>
</Stack>
);
}, [focusIdx]);
}
|
#include<bits/stdc++.h>
int t1, t2; // Two machines' clocks
void syncClocks() {
if (t1 > t2)
t2 = t2 + (t1 - t2);
else
t1 = t1 + (t2 - t1);
}
int main() {
syncClocks();
return 0;
}
|
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
# Written by <NAME> <<EMAIL>>
#
# Handle various things related to ELF images
#
from collections import namedtuple, OrderedDict
import io
import os
import re
import shutil
import struct
import tempfile
from patman import command
from patman import tools
from patman import tout
ELF_TOOLS = True
try:
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
except: # pragma: no cover
ELF_TOOLS = False
Symbol = namedtuple('Symbol', ['section', 'address', 'size', 'weak'])
# Information about an ELF file:
# data: Extracted program contents of ELF file (this would be loaded by an
# ELF loader when reading this file
# load: Load address of code
# entry: Entry address of code
# memsize: Number of bytes in memory occupied by loading this ELF file
ElfInfo = namedtuple('ElfInfo', ['data', 'load', 'entry', 'memsize'])
def GetSymbols(fname, patterns):
"""Get the symbols from an ELF file
Args:
fname: Filename of the ELF file to read
patterns: List of regex patterns to search for, each a string
Returns:
None, if the file does not exist, or Dict:
key: Name of symbol
value: Hex value of symbol
"""
stdout = tools.Run('objdump', '-t', fname)
lines = stdout.splitlines()
if patterns:
re_syms = re.compile('|'.join(patterns))
else:
re_syms = None
syms = {}
syms_started = False
for line in lines:
if not line or not syms_started:
if 'SYMBOL TABLE' in line:
syms_started = True
line = None # Otherwise code coverage complains about 'continue'
continue
if re_syms and not re_syms.search(line):
continue
space_pos = line.find(' ')
value, rest = line[:space_pos], line[space_pos + 1:]
flags = rest[:7]
parts = rest[7:].split()
section, size = parts[:2]
if len(parts) > 2:
name = parts[2] if parts[2] != '.hidden' else parts[3]
syms[name] = Symbol(section, int(value, 16), int(size,16),
flags[1] == 'w')
# Sort dict by address
return OrderedDict(sorted(syms.items(), key=lambda x: x[1].address))
def GetSymbolAddress(fname, sym_name):
"""Get a value of a symbol from an ELF file
Args:
fname: Filename of the ELF file to read
patterns: List of regex patterns to search for, each a string
Returns:
Symbol value (as an integer) or None if not found
"""
syms = GetSymbols(fname, [sym_name])
sym = syms.get(sym_name)
if not sym:
return None
return sym.address
def LookupAndWriteSymbols(elf_fname, entry, section):
"""Replace all symbols in an entry with their correct values
The entry contents is updated so that values for referenced symbols will be
visible at run time. This is done by finding out the symbols offsets in the
entry (using the ELF file) and replacing them with values from binman's data
structures.
Args:
elf_fname: Filename of ELF image containing the symbol information for
entry
entry: Entry to process
section: Section which can be used to lookup symbol values
"""
fname = tools.GetInputFilename(elf_fname)
syms = GetSymbols(fname, ['image', 'binman'])
if not syms:
return
base = syms.get('__image_copy_start')
if not base:
return
for name, sym in syms.items():
if name.startswith('_binman'):
msg = ("Section '%s': Symbol '%s'\n in entry '%s'" %
(section.GetPath(), name, entry.GetPath()))
offset = sym.address - base.address
if offset < 0 or offset + sym.size > entry.contents_size:
raise ValueError('%s has offset %x (size %x) but the contents '
'size is %x' % (entry.GetPath(), offset,
sym.size, entry.contents_size))
if sym.size == 4:
pack_string = '<I'
elif sym.size == 8:
pack_string = '<Q'
else:
raise ValueError('%s has size %d: only 4 and 8 are supported' %
(msg, sym.size))
# Look up the symbol in our entry tables.
value = section.GetImage().LookupImageSymbol(name, sym.weak, msg,
base.address)
if value is None:
value = -1
pack_string = pack_string.lower()
value_bytes = struct.pack(pack_string, value)
tout.Debug('%s:\n insert %s, offset %x, value %x, length %d' %
(msg, name, offset, value, len(value_bytes)))
entry.data = (entry.data[:offset] + value_bytes +
entry.data[offset + sym.size:])
def MakeElf(elf_fname, text, data):
"""Make an elf file with the given data in a single section
The output file has a several section including '.text' and '.data',
containing the info provided in arguments.
Args:
elf_fname: Output filename
text: Text (code) to put in the file's .text section
data: Data to put in the file's .data section
"""
outdir = tempfile.mkdtemp(prefix='binman.elf.')
s_file = os.path.join(outdir, 'elf.S')
# Spilt the text into two parts so that we can make the entry point two
# bytes after the start of the text section
text_bytes1 = ['\t.byte\t%#x' % byte for byte in text[:2]]
text_bytes2 = ['\t.byte\t%#x' % byte for byte in text[2:]]
data_bytes = ['\t.byte\t%#x' % byte for byte in data]
with open(s_file, 'w') as fd:
print('''/* Auto-generated C program to produce an ELF file for testing */
.section .text
.code32
.globl _start
.type _start, @function
%s
_start:
%s
.ident "comment"
.comm fred,8,4
.section .empty
.globl _empty
_empty:
.byte 1
.globl ernie
.data
.type ernie, @object
.size ernie, 4
ernie:
%s
''' % ('\n'.join(text_bytes1), '\n'.join(text_bytes2), '\n'.join(data_bytes)),
file=fd)
lds_file = os.path.join(outdir, 'elf.lds')
# Use a linker script to set the alignment and text address.
with open(lds_file, 'w') as fd:
print('''/* Auto-generated linker script to produce an ELF file for testing */
PHDRS
{
text PT_LOAD ;
data PT_LOAD ;
empty PT_LOAD FLAGS ( 6 ) ;
note PT_NOTE ;
}
SECTIONS
{
. = 0xfef20000;
ENTRY(_start)
.text . : SUBALIGN(0)
{
*(.text)
} :text
.data : {
*(.data)
} :data
_bss_start = .;
.empty : {
*(.empty)
} :empty
/DISCARD/ : {
*(.note.gnu.property)
}
.note : {
*(.comment)
} :note
.bss _bss_start (OVERLAY) : {
*(.bss)
}
}
''', file=fd)
# -static: Avoid requiring any shared libraries
# -nostdlib: Don't link with C library
# -Wl,--build-id=none: Don't generate a build ID, so that we just get the
# text section at the start
# -m32: Build for 32-bit x86
# -T...: Specifies the link script, which sets the start address
cc, args = tools.GetTargetCompileTool('cc')
args += ['-static', '-nostdlib', '-Wl,--build-id=none', '-m32', '-T',
lds_file, '-o', elf_fname, s_file]
stdout = command.Output(cc, *args)
shutil.rmtree(outdir)
def DecodeElf(data, location):
"""Decode an ELF file and return information about it
Args:
data: Data from ELF file
location: Start address of data to return
Returns:
ElfInfo object containing information about the decoded ELF file
"""
file_size = len(data)
with io.BytesIO(data) as fd:
elf = ELFFile(fd)
data_start = 0xffffffff;
data_end = 0;
mem_end = 0;
virt_to_phys = 0;
for i in range(elf.num_segments()):
segment = elf.get_segment(i)
if segment['p_type'] != 'PT_LOAD' or not segment['p_memsz']:
skipped = 1 # To make code-coverage see this line
continue
start = segment['p_paddr']
mend = start + segment['p_memsz']
rend = start + segment['p_filesz']
data_start = min(data_start, start)
data_end = max(data_end, rend)
mem_end = max(mem_end, mend)
if not virt_to_phys:
virt_to_phys = segment['p_paddr'] - segment['p_vaddr']
output = bytearray(data_end - data_start)
for i in range(elf.num_segments()):
segment = elf.get_segment(i)
if segment['p_type'] != 'PT_LOAD' or not segment['p_memsz']:
skipped = 1 # To make code-coverage see this line
continue
start = segment['p_paddr']
offset = 0
if start < location:
offset = location - start
start = location
# A legal ELF file can have a program header with non-zero length
# but zero-length file size and a non-zero offset which, added
# together, are greater than input->size (i.e. the total file size).
# So we need to not even test in the case that p_filesz is zero.
# Note: All of this code is commented out since we don't have a test
# case for it.
size = segment['p_filesz']
#if not size:
#continue
#end = segment['p_offset'] + segment['p_filesz']
#if end > file_size:
#raise ValueError('Underflow copying out the segment. File has %#x bytes left, segment end is %#x\n',
#file_size, end)
output[start - data_start:start - data_start + size] = (
segment.data()[offset:])
return ElfInfo(output, data_start, elf.header['e_entry'] + virt_to_phys,
mem_end - data_start)
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""
"""
__version__ = "$Id$"
#end_pymotw_header
import tempfile
print 'gettempdir():', tempfile.gettempdir()
print 'gettempprefix():', tempfile.gettempprefix()
|
var util = require("util");
var nodemailer = require('nodemailer');
var smtpTransport = require('nodemailer-smtp-transport');
var wellknown = require('nodemailer-wellknown');
//Setup Nodemailer transport
smtpTrans = nodemailer.createTransport(smtpTransport({
service: 'Gmail',
auth: {
user: "<EMAIL>",
pass: "<PASSWORD>"
}
}));
// send mail
exports.send = function(contacts, subject, text, callback) {
for (var i = 0; i < contacts.length; i++) {
mailOpts = createMail(contacts[i], subject, text);
smtpTrans.sendMail(mailOpts, function(error, response) {
if (error) {
doError(error);
}
else {
callback('Yay! Email sent!');
}
});
}
}
// set up mail information
function createMail(contactInfo, subjectname, body) {
editedMessage = replaceParams(body, contactInfo);
mailOpts = {
from: '<NAME> <<EMAIL>>',
to: contactInfo.email,
subject: subjectname,
html: editedMessage
};
return mailOpts;
}
// substitute params with user's first name / last name
function replaceParams(message, params) {
console.log(message);
var edited = message;
if (typeof message === 'undefined') {
edited = "This message has no content. Sorry!";
return edited;
}
else if (message.indexOf("<<<firstname>>>") >= 0) {
edited = edited.replace("<<<firstname>>>", params.firstname);
}
if (message.indexOf("<<<lastname>>>") >= 0) {
edited = edited.replace("<<<lastname>>>", params.lastname);
}
console.log(edited);
return edited;
}
var doError = function(e) {
util.debug("ERROR: " + e);
throw new Error(e);
}
|
#!/bin/bash
set -e # Exit on error
# Main storage directory. You'll need disk space to dump the WHAM mixtures and the wsj0 wav
# files if you start from sphere files.
storage_dir=
# If you start from the sphere files, specify the path to the directory and start from stage 0
sphere_dir= # Directory containing sphere files
# If you already have wsj0 wav files, specify the path to the directory here and start from stage 1
wsj0_wav_dir=
# If you already have the WHAM mixtures, specify the path to the directory here and start from stage 2
wham_wav_dir=
# After running the recipe a first time, you can run it from stage 3 directly to train new models.
# Path to the python you'll use for the experiment. Defaults to the current python
# You can run ./utils/prepare_python_env.sh to create a suitable python environment, paste the output here.
python_path=python
# Example usage
# ./run.sh --stage 3 --tag my_tag --task sep_noisy --id 0,1
# General
stage=0 # Controls from which stage to start
tag="" # Controls the directory name associated to the experiment
# You can ask for several GPUs using id (passed to CUDA_VISIBLE_DEVICES)
id=
# Data
task=sep_clean # Specify the task here (sep_clean, sep_noisy, enh_single, enh_both)
sample_rate=8000
mode=min
nondefault_src= # If you want to train a network with 3 output streams for example.
# Training
batch_size=8
num_workers=8
#optimizer=adam
lr=0.001
epochs=200
# Architecture
n_blocks=8
n_repeats=3
mask_nonlinear=relu
# Evaluation
eval_use_gpu=1
. utils/parse_options.sh
sr_string=$(($sample_rate/1000))
suffix=wav${sr_string}k/$mode
dumpdir=data/$suffix # directory to put generated json file
train_dir=$dumpdir/tr
valid_dir=$dumpdir/cv
test_dir=$dumpdir/tt
if [[ $stage -le 0 ]]; then
echo "Stage 0: Converting sphere files to wav files"
. local/convert_sphere2wav.sh --sphere_dir $sphere_dir --wav_dir $wsj0_wav_dir
fi
if [[ $stage -le 1 ]]; then
echo "Stage 1: Generating 8k and 16k WHAM dataset"
. local/prepare_data.sh --wav_dir $wsj0_wav_dir --out_dir $wham_wav_dir --python_path $python_path
fi
if [[ $stage -le 2 ]]; then
# Make json directories with min/max modes and sampling rates
echo "Stage 2: Generating json files including wav path and duration"
for sr_string in 8 16; do
for mode_option in min max; do
tmp_dumpdir=data/wav${sr_string}k/$mode_option
echo "Generating json files in $tmp_dumpdir"
[[ ! -d $tmp_dumpdir ]] && mkdir -p $tmp_dumpdir
local_wham_dir=$wham_wav_dir/wav${sr_string}k/$mode_option/
$python_path local/preprocess_wham.py --in_dir $local_wham_dir --out_dir $tmp_dumpdir
done
done
fi
# Generate a random ID for the run if no tag is specified
uuid=$($python_path -c 'import uuid, sys; print(str(uuid.uuid4())[:8])')
if [[ -z ${tag} ]]; then
tag=${task}_${sr_string}k${mode}_${uuid}
fi
expdir=exp/train_convtasnet_${tag}
mkdir -p $expdir && echo $uuid >> $expdir/run_uuid.txt
echo "Results from the following experiment will be stored in $expdir"
if [[ $stage -le 3 ]]; then
echo "Stage 3: Training"
mkdir -p logs
CUDA_VISIBLE_DEVICES=$id $python_path train.py \
--train_dir $train_dir \
--valid_dir $valid_dir \
--task $task \
--sample_rate $sample_rate \
--lr $lr \
--epochs $epochs \
--batch_size $batch_size \
--num_workers $num_workers \
--mask_act $mask_nonlinear \
--n_blocks $n_blocks \
--n_repeats $n_repeats \
--exp_dir ${expdir}/ | tee logs/train_${tag}.log
cp logs/train_${tag}.log $expdir/train.log
fi
if [[ $stage -le 4 ]]; then
echo "Stage 4 : Evaluation"
CUDA_VISIBLE_DEVICES=$id $python_path eval.py \
--task $task \
--test_dir $test_dir \
--use_gpu $eval_use_gpu \
--exp_dir ${expdir} | tee logs/eval_${tag}.log
cp logs/eval_${tag}.log $expdir/eval.log
fi
|
<filename>app/src/main/java/sample/sadashiv/examplerealmmvp/presenter/DetailPresenter.java
package sample.sadashiv.examplerealmmvp.presenter;
import sample.sadashiv.examplerealmmvp.model.realm.RealmService;
import sample.sadashiv.examplerealmmvp.ui.detail.DetailView;
public class DetailPresenter extends BasePresenter<DetailView> {
private final String mBookId;
public DetailPresenter(final DetailView view, final RealmService realmService, final String bookId) {
super(view, realmService);
mBookId = bookId;
}
public void showBookDetails() {
mView.showBookDetails(mRealmService.getBook(mBookId));
}
}
|
<reponame>totemkevin/poc
const express = require("express");
const path = require("path");
const history = require("connect-history-api-fallback");
const app = express();
app.use(history());
app.use(express.static(path.join(__dirname, "storybook-static")));
app.listen(3000, () => {
console.log('server start')
});
|
<filename>src/main/java/com/ipec/trazactivo/repository/EspecialidadAcademicaDao.java<gh_stars>0
package com.ipec.trazactivo.repository;
import com.ipec.trazactivo.model.EspecialidadAcademica;
import org.springframework.data.jpa.repository.JpaRepository;
public interface EspecialidadAcademicaDao extends JpaRepository<EspecialidadAcademica, Integer> {
}
|
<reponame>drkitty/cyder
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('name', 'network', 'ip_type', 'range_type', 'start_str',
'end_str', 'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled', 'description')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
self.fields['network'].widget.attrs.update({'class': 'networkWizard'})
def filter_by_ctnr_all(self, ctnr):
super(RangeForm, self).filter_by_ctnr_all(ctnr, skip='network')
RangeAVForm = get_eav_form(RangeAV, Range)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from amaasutils.random_utils import random_string, random_date
import random
from amaascore.assets.enums import WINE_CLASSIFICATIONS, WINE_PACKING_TYPE
from amaascore.assets.wine import Wine
from amaascore.core.comment import Comment
from amaascore.tools.generate_asset import generate_common
def generate_wine(asset_manager_id=None, asset_id=None):
props = generate_common(asset_manager_id=asset_manager_id, asset_id=asset_id)
wine = Wine(year=random_date(start_year=1950, end_year=2016),
producer=random_string(5),
region=random.choice(['Bordeaux', 'Burgundy', 'Tuscany', 'Napa Valley']),
appellation=random.choice([None]*3 + ['Côtes du Rhône', 'Graves', 'Saint-Émilion']),
classification=random.choice(list(WINE_CLASSIFICATIONS)),
color=random.choice(['Red', 'White']),
bottle_size=random.choice(['0.75L']*3 + ['1.5L']),
bottle_in_cellar=random.choice([True]*3 + [False]),
bottle_location=random_string(20),
storage_cost=None,
rating_type='Parker',
rating_value=random.randint(93, 100),
packing_type=random.choice(list(WINE_PACKING_TYPE)),
to_drink_start=random_date(start_year=2000),
to_drink_end=random_date(end_year=2050),
comments = {'DrinkingNotes': Comment(comment_value=random_string(100))},
**props)
return wine
|
# This shell script executes Slurm jobs for thresholding
# predictions of NTT-like convolutional
# neural network on BirdVox-70k full audio
# with logmelspec input.
# Augmentation kind: none.
# Test unit: unit02.
# Trial ID: 5.
sbatch 042_aug-none_test-unit02_predict-unit02_trial-5.sbatch
sbatch 042_aug-none_test-unit02_predict-unit10_trial-5.sbatch
sbatch 042_aug-none_test-unit02_predict-unit01_trial-5.sbatch
|
/**
* @license
* Copyright 2014 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Creates a multi-user pond (duck page).
* @author <EMAIL> (<NAME>)
*/
'use strict';
goog.provide('Pond.Duck');
goog.require('Blockly.FlyoutButton');
goog.require('Blockly.utils.Coordinate');
goog.require('Blockly.utils.dom');
goog.require('Blockly.ZoomControls');
goog.require('BlocklyAce');
goog.require('BlocklyDialogs');
goog.require('BlocklyGames');
goog.require('BlocklyInterface');
goog.require('Pond');
goog.require('Pond.Battle');
goog.require('Pond.Blocks');
goog.require('Pond.Duck.soy');
goog.require('Pond.Visualization');
BlocklyGames.NAME = 'pond-duck';
/**
* Array of editor tabs (Blockly and ACE).
* @type Array.<!Element>
*/
Pond.Duck.editorTabs = null;
/**
* ACE editor fires change events even on programmatically caused changes.
* This property is used to signal times when a programmatic change is made.
*/
Pond.Duck.ignoreEditorChanges_ = true;
/**
* Currently selected avatar.
* @type Pond.Avatar
*/
Pond.currentAvatar = null;
/**
* Array of duck data that was loaded separately.
* @type Array.<!Object>
*/
Pond.Duck.duckData = null;
/**
* Indices of tabs.
*/
Pond.Duck.tabIndex = {
BLOCKS: 0,
JAVASCRIPT: 1
};
/**
* Initialize Ace and the pond. Called on page load.
*/
Pond.Duck.init = function() {
Pond.Duck.duckData = window['DUCKS'];
// Render the Soy template.
document.body.innerHTML = Pond.Duck.soy.start({}, null,
{lang: BlocklyGames.LANG,
html: BlocklyGames.IS_HTML});
Pond.init();
// Setup the tabs.
function tabHandler(selectedIndex) {
return function() {
if (Blockly.utils.dom.hasClass(tabs[selectedIndex], 'tab-disabled')) {
return;
}
Pond.Duck.changeTab(selectedIndex);
};
}
var tabs = Array.prototype.slice.call(
document.querySelectorAll('#editorBar>.tab'));
Pond.Duck.editorTabs = tabs;
for (var i = 0; i < tabs.length; i++) {
BlocklyGames.bindClick(tabs[i], tabHandler(i));
}
var rtl = BlocklyGames.isRtl();
var visualization = document.getElementById('visualization');
var tabDiv = document.getElementById('tabarea');
var blocklyDiv = document.getElementById('blockly');
var editorDiv = document.getElementById('editor');
var divs = [blocklyDiv, editorDiv];
var onresize = function(e) {
var top = visualization.offsetTop;
tabDiv.style.top = (top - window.pageYOffset) + 'px';
tabDiv.style.left = rtl ? '10px' : '420px';
tabDiv.style.width = (window.innerWidth - 440) + 'px';
var divTop =
Math.max(0, top + tabDiv.offsetHeight - window.pageYOffset) + 'px';
var divLeft = rtl ? '10px' : '420px';
var divWidth = (window.innerWidth - 440) + 'px';
for (var i = 0, div; (div = divs[i]); i++) {
div.style.top = divTop;
div.style.left = divLeft;
div.style.width = divWidth;
}
};
window.addEventListener('scroll', function() {
onresize(null);
Blockly.svgResize(BlocklyInterface.workspace);
});
window.addEventListener('resize', onresize);
onresize(null);
// Inject JS editor.
var session = BlocklyAce.makeAceSession();
session['on']('change', Pond.Duck.editorChanged);
// Lazy-load the ESx-ES5 transpiler.
BlocklyAce.importBabel();
Blockly.JavaScript.addReservedWords('scan,cannon,drive,swim,stop,speed,' +
'damage,health,loc_x,getX,loc_y,getY,log');
var coordinates = [
new Blockly.utils.Coordinate(20, 80),
new Blockly.utils.Coordinate(80, 80),
new Blockly.utils.Coordinate(20, 20),
new Blockly.utils.Coordinate(80, 20)
];
var avatarSelect = document.getElementById('avatar-select');
for (var duckData, i = 0; (duckData = Pond.Duck.duckData[i]); i++) {
if (duckData['name'] === null) {
duckData['name'] = BlocklyGames.getMsg('Pond_myName');
}
var option = new Option(duckData['name'], duckData['id']);
avatarSelect.add(option);
var avatar = new Pond.Avatar(duckData['name'], coordinates[i], 0,
duckData['editable'], Pond.Battle);
if (duckData['blockly'] === undefined && duckData['js'] === undefined) {
duckData['js'] = duckData['compiled'];
}
avatar.setCode(duckData['blockly'], duckData['js'], duckData['compiled']);
}
avatarSelect.addEventListener('change', Pond.Duck.changeAvatar);
Pond.reset();
if (avatarSelect) {
Pond.Duck.changeAvatar();
// BUG: Setting the background colour of a select element causes the arrow
// to disappear in Firefox:
// https://bugzilla.mozilla.org/show_bug.cgi?id=1298510
avatarSelect.style.width = '0';
setTimeout(function() {avatarSelect.style.width = 'auto';}, 0);
}
};
/**
* Called by the avatar selector when changed.
*/
Pond.Duck.changeAvatar = function() {
Pond.Duck.ignoreEditorChanges_ = true;
var avatarSelect = document.getElementById('avatar-select');
var i = avatarSelect.selectedIndex;
if (Pond.currentAvatar == Pond.Battle.AVATARS[i]) {
return;
}
Pond.saveAvatar();
Pond.currentAvatar = Pond.Battle.AVATARS[i];
avatarSelect.style.backgroundColor =
Pond.Visualization.getColour(Pond.currentAvatar);
if (Pond.currentAvatar.blockly !== undefined) {
if (BlocklyInterface.workspace) {
BlocklyInterface.workspace.dispose();
}
BlocklyInterface.injectBlockly(
{'rtl': false,
'trashcan': true,
'readOnly': !Pond.currentAvatar.editable,
'zoom': {'controls': true, 'wheel': true}});
var xml = Blockly.Xml.textToDom(Pond.currentAvatar.blockly);
Blockly.Xml.domToWorkspace(xml, BlocklyInterface.workspace);
BlocklyInterface.workspace.clearUndo();
Pond.Duck.setBlocksDisabled(false);
Pond.Duck.changeTab(Pond.Duck.tabIndex.BLOCKS);
}
if (Pond.currentAvatar.js !== undefined) {
BlocklyInterface.editor['setValue'](Pond.currentAvatar.js, -1);
Pond.Duck.setBlocksDisabled(true);
Pond.Duck.changeTab(Pond.Duck.tabIndex.JAVASCRIPT);
}
BlocklyInterface.editor['setReadOnly'](!Pond.currentAvatar.editable);
Pond.Duck.ignoreEditorChanges_ = false;
};
/**
* Called by the tab bar when a tab is selected.
* @param {number} index Which tab is now active (0-1).
*/
Pond.Duck.changeTab = function(index) {
// Change highlighting.
for (var i = 0; i < Pond.Duck.editorTabs.length; i++) {
if (index == i) {
Blockly.utils.dom.addClass(Pond.Duck.editorTabs[i], 'tab-selected');
} else {
Blockly.utils.dom.removeClass(Pond.Duck.editorTabs[i], 'tab-selected');
}
}
// Show the correct tab contents.
var names = ['blockly', 'editor'];
for (var i = 0, name; (name = names[i]); i++) {
var div = document.getElementById(name);
div.style.visibility = (i == index) ? 'visible' : 'hidden';
}
Blockly.hideChaff(false);
// Synchronize the documentation popup.
document.getElementById('docsButton').disabled = false;
BlocklyGames.LEVEL = (index == Pond.Duck.tabIndex.BLOCKS) ? 11 : 12;
if (Pond.isDocsVisible_) {
var frame = document.getElementById('frameDocs');
frame.src = 'pond/docs.html?lang=' + BlocklyGames.LANG +
'&mode=' + BlocklyGames.LEVEL;
}
// Synchronize the JS editor.
if (!Pond.Duck.ignoreEditorChanges_ && !BlocklyInterface.blocksDisabled &&
index == Pond.Duck.tabIndex.JAVASCRIPT) {
var code = Blockly.JavaScript.workspaceToCode(BlocklyInterface.workspace);
Pond.Duck.ignoreEditorChanges_ = true;
BlocklyInterface.editor['setValue'](code, -1);
Pond.Duck.ignoreEditorChanges_ = false;
}
};
/**
* Change event for JS editor. Warn the user, then disconnect the link from
* blocks to JavaScript.
*/
Pond.Duck.editorChanged = function() {
if (Pond.Duck.ignoreEditorChanges_) {
return;
}
var code = BlocklyInterface.getJsCode();
if (BlocklyInterface.blocksDisabled) {
if (!code.trim()) {
// Reestablish link between blocks and JS.
BlocklyInterface.workspace.clear();
Pond.Duck.setBlocksDisabled(false);
}
} else {
if (!BlocklyInterface.workspace.getTopBlocks(false).length ||
confirm(BlocklyGames.getMsg('Games_breakLink'))) {
// Break link between blocks and JS.
Pond.Duck.setBlocksDisabled(true);
} else {
// Abort change, preserve link.
Pond.Duck.ignoreEditorChanges_ = true;
setTimeout(function() {
BlocklyInterface.editor['setValue'](code, -1);
Pond.Duck.ignoreEditorChanges_ = false;
}, 0);
}
}
};
/**
* Enable or disable the ability to use Blockly.
* @param {boolean} disabled True if Blockly is disabled and JS is to be used.
*/
Pond.Duck.setBlocksDisabled = function(disabled) {
BlocklyInterface.blocksDisabled = disabled;
var tab = Pond.Duck.editorTabs[Pond.Duck.tabIndex.BLOCKS];
if (disabled) {
Blockly.utils.dom.addClass(tab, 'tab-disabled');
} else {
Blockly.utils.dom.removeClass(tab, 'tab-disabled');
}
};
(function() {
//<script type="text/javascript" src="pond/duck/default-ducks.js"></script>
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = 'pond/duck/default-ducks.js';
document.head.appendChild(script);
})();
window.addEventListener('load', Pond.Duck.init);
|
package bigtablestore
import (
"context"
"fmt"
"reflect"
"strconv"
"testing"
"time"
"cloud.google.com/go/bigtable"
"google.golang.org/protobuf/types/known/durationpb"
feast "github.com/feast-dev/feast/sdk/go"
"github.com/feast-dev/feast/sdk/go/protos/feast/serving"
"github.com/feast-dev/feast/sdk/go/protos/feast/types"
"github.com/gojek/merlin/pkg/transformer/spec"
"github.com/golang/protobuf/proto"
"github.com/linkedin/goavro/v2"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
func TestEncoder_Encode(t *testing.T) {
tests := []struct {
name string
want RowQuery
req *feast.OnlineFeaturesRequest
featureTables []*spec.FeatureTable
metadata []*spec.FeatureTableMetadata
}{
{
name: "multiple entities, single feature table",
want: RowQuery{
table: "default__driver_id",
entityKeys: []*spec.Entity{
{
Name: "driver_id",
ValueType: types.ValueType_STRING.String(),
},
},
rowList: &bigtable.RowList{"2", "1"},
rowFilter: bigtable.FamilyFilter("driver_trips"),
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"driver_trips:trips_today"},
Entities: []feast.Row{
{
"driver_id": feast.Int64Val(2),
},
{
"driver_id": feast.Int64Val(1),
},
},
Project: "default",
},
featureTables: []*spec.FeatureTable{
{
Project: "default",
Entities: []*spec.Entity{
{
Name: "driver_id",
ValueType: types.ValueType_STRING.String(),
},
},
Features: []*spec.Feature{
{
Name: "driver_trips:trips_today",
ValueType: types.ValueType_INT32.String(),
},
},
TableName: "driver_trips",
},
},
metadata: []*spec.FeatureTableMetadata{
{
Name: "driver_trips",
Project: "default",
},
},
},
{
name: "composite entities",
want: RowQuery{
table: "default__driver_id_customer_id",
entityKeys: []*spec.Entity{
{
Name: "driver_id",
ValueType: types.ValueType_INT32.String(),
},
{
Name: "customer_id",
ValueType: types.ValueType_INT64.String(),
},
},
rowList: &bigtable.RowList{"2#9", "1#8"},
rowFilter: bigtable.FamilyFilter("driver_customer_interaction"),
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"driver_customer_interaction:rating"},
Entities: []feast.Row{
{
"driver_id": feast.Int32Val(2),
"customer_id": feast.Int64Val(9),
},
{
"driver_id": feast.Int32Val(1),
"customer_id": feast.Int64Val(8),
},
},
Project: "default",
},
featureTables: []*spec.FeatureTable{
{
Project: "default",
Entities: []*spec.Entity{
{
Name: "driver_id",
ValueType: types.ValueType_INT32.String(),
},
{
Name: "customer_id",
ValueType: types.ValueType_INT64.String(),
},
},
Features: []*spec.Feature{
{
Name: "driver_customer_interaction:rating",
ValueType: types.ValueType_INT32.String(),
},
},
TableName: "driver_customer_interaction",
},
},
metadata: []*spec.FeatureTableMetadata{
{
Name: "driver_customer_interaction",
Project: "default",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
registry := &CachedCodecRegistry{}
encoder, _ := newEncoder(registry, tt.featureTables, tt.metadata)
rowQuery, err := encoder.Encode(tt.req)
if err != nil {
panic(err)
}
if !reflect.DeepEqual(rowQuery.rowList, tt.want.rowList) {
t.Errorf("expected %v, actual %v", tt.want.rowList, rowQuery.rowList)
}
})
}
}
func TestEncoder_Decode(t *testing.T) {
avroSchema := "{\"type\":\"record\",\"name\":\"topLevelRecord\"," +
"\"fields\":[" +
"{\"name\":\"lats\",\"type\":[{\"type\":\"array\",\"items\":[\"double\",\"null\"]},\"null\"]}," +
"{\"name\":\"login_type\",\"type\":[\"string\",\"null\"]}]}"
schemaRefBytes := []byte{92, 57, 144, 80}
featureTable := []*spec.FeatureTable{{
Project: "project",
Entities: []*spec.Entity{
{
Name: "customer_phone",
ValueType: types.ValueType_STRING.String(),
},
{
Name: "resource_type",
ValueType: types.ValueType_INT64.String(),
},
},
Features: []*spec.Feature{
{
Name: "login_requests:login_type",
ValueType: types.ValueType_STRING.String(),
},
{
Name: "login_requests:lats",
ValueType: types.ValueType_DOUBLE_LIST.String(),
},
},
TableName: "login_requests",
}}
entityKeys := []*spec.Entity{
{
Name: "customer_phone",
ValueType: types.ValueType_STRING.String(),
},
{
Name: "resource_type",
ValueType: types.ValueType_INT64.String(),
},
}
codec, err := goavro.NewCodec(avroSchema)
if err != nil {
panic(err)
}
avroRecord := map[string]interface{}{
"login_type": map[string]interface{}{
"string": "OTP",
},
"lats": map[string]interface{}{
"array": []interface{}{
map[string]interface{}{
"double": 2.0,
},
map[string]interface{}{
"double": 1.0,
},
},
},
}
avroValue, err := codec.BinaryFromNative(nil, avroRecord)
if err != nil {
panic(err)
}
avroRecordWithNullValue := map[string]interface{}{
"login_type": map[string]interface{}{
"null": nil,
},
"lats": map[string]interface{}{
"null": nil,
},
}
avroNullValue, err := codec.BinaryFromNative(nil, avroRecordWithNullValue)
if err != nil {
panic(err)
}
defaultCodecs := map[string]*goavro.Codec{
string(schemaRefBytes): codec,
}
tests := []struct {
name string
want *feast.OnlineFeaturesResponse
req *feast.OnlineFeaturesRequest
rows []bigtable.Row
registryFn func() *CachedCodecRegistry
featureValues []map[string]interface{}
featureTimestamps []map[string]time.Time
metadata []*spec.FeatureTableMetadata
err string
}{
{
name: "features with non null values",
want: &feast.OnlineFeaturesResponse{
RawResponse: &serving.GetOnlineFeaturesResponse{
FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{
{
Fields: map[string]*types.Value{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
"login_requests:login_type": feast.StrVal("OTP"),
"login_requests:lats": {Val: &types.Value_DoubleListVal{
DoubleListVal: &types.DoubleList{Val: []float64{2.0, 1.0}},
}},
},
Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{
"customer_phone": serving.GetOnlineFeaturesResponse_PRESENT,
"resource_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:login_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:lats": serving.GetOnlineFeaturesResponse_PRESENT,
},
},
},
},
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"login_requests:login_type", "login_requests:lats"},
Entities: []feast.Row{
{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
},
},
Project: "project",
},
rows: []bigtable.Row{
{
"login_requests": []bigtable.ReadItem{
{
Row: "1234#1",
Timestamp: bigtable.Time(time.Now()),
Value: append(schemaRefBytes, avroValue...),
},
},
},
},
registryFn: func() *CachedCodecRegistry {
return &CachedCodecRegistry{codecs: defaultCodecs}
},
metadata: []*spec.FeatureTableMetadata{{
Name: "login_requests",
Project: "project",
MaxAge: nil,
}},
},
{
name: "features with non null values - registry doesn't have codec yet",
want: &feast.OnlineFeaturesResponse{
RawResponse: &serving.GetOnlineFeaturesResponse{
FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{
{
Fields: map[string]*types.Value{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
"login_requests:login_type": feast.StrVal("OTP"),
"login_requests:lats": {Val: &types.Value_DoubleListVal{
DoubleListVal: &types.DoubleList{Val: []float64{2.0, 1.0}},
}},
},
Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{
"customer_phone": serving.GetOnlineFeaturesResponse_PRESENT,
"resource_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:login_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:lats": serving.GetOnlineFeaturesResponse_PRESENT,
},
},
},
},
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"login_requests:login_type", "login_requests:lats"},
Entities: []feast.Row{
{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
},
},
Project: "project",
},
rows: []bigtable.Row{
{
"login_requests": []bigtable.ReadItem{
{
Row: "1234#1",
Timestamp: bigtable.Time(time.Now()),
Value: append(schemaRefBytes, avroValue...),
},
},
},
},
registryFn: func() *CachedCodecRegistry {
loginRequestStorage := &storageMock{}
schema := bigtable.Row{"metadata": []bigtable.ReadItem{
{
Value: []byte(avroSchema),
},
}}
loginRequestStorage.On("readRow", mock.Anything, fmt.Sprintf("schema#%s", schemaRefBytes)).Return(schema, nil)
tables := map[string]storage{"project__customer_phone__resource_type": loginRequestStorage}
return newCachedCodecRegistry(tables)
},
metadata: []*spec.FeatureTableMetadata{{
Name: "login_requests",
Project: "project",
MaxAge: nil,
}},
},
{
name: "features with non null values - registry doesn't have codec yet got error when fetching schema metadata",
want: nil,
err: "bigtable is down",
req: &feast.OnlineFeaturesRequest{
Features: []string{"login_requests:login_type", "login_requests:lats"},
Entities: []feast.Row{
{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
},
},
Project: "project",
},
rows: []bigtable.Row{
{
"login_requests": []bigtable.ReadItem{
{
Row: "1234#1",
Timestamp: bigtable.Time(time.Now()),
Value: append(schemaRefBytes, avroValue...),
},
},
},
},
registryFn: func() *CachedCodecRegistry {
loginRequestStorage := &storageMock{}
loginRequestStorage.On("readRow", mock.Anything, fmt.Sprintf("schema#%s", schemaRefBytes)).Return(nil, fmt.Errorf("bigtable is down"))
tables := map[string]storage{"project__customer_phone__resource_type": loginRequestStorage}
return newCachedCodecRegistry(tables)
},
metadata: []*spec.FeatureTableMetadata{{
Name: "login_requests",
Project: "project",
MaxAge: nil,
}},
},
{
name: "features with null values",
want: &feast.OnlineFeaturesResponse{
RawResponse: &serving.GetOnlineFeaturesResponse{
FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{
{
Fields: map[string]*types.Value{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
"login_requests:login_type": {},
"login_requests:lats": {},
},
Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{
"customer_phone": serving.GetOnlineFeaturesResponse_PRESENT,
"resource_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:login_type": serving.GetOnlineFeaturesResponse_NULL_VALUE,
"login_requests:lats": serving.GetOnlineFeaturesResponse_NULL_VALUE,
},
},
},
},
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"login_requests:login_type", "login_requests:lats"},
Entities: []feast.Row{
{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
},
},
Project: "project",
},
rows: []bigtable.Row{
{
"login_requests": []bigtable.ReadItem{
{
Row: "1234#1",
Timestamp: bigtable.Time(time.Now()),
Value: append(schemaRefBytes, avroNullValue...),
},
},
},
},
registryFn: func() *CachedCodecRegistry {
return &CachedCodecRegistry{codecs: defaultCodecs}
},
metadata: []*spec.FeatureTableMetadata{{
Name: "login_requests",
Project: "project",
MaxAge: nil,
}},
},
{
name: "missing key",
want: &feast.OnlineFeaturesResponse{
RawResponse: &serving.GetOnlineFeaturesResponse{
FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{
{
Fields: map[string]*types.Value{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
"login_requests:login_type": {},
"login_requests:lats": {},
},
Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{
"customer_phone": serving.GetOnlineFeaturesResponse_PRESENT,
"resource_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:login_type": serving.GetOnlineFeaturesResponse_NOT_FOUND,
"login_requests:lats": serving.GetOnlineFeaturesResponse_NOT_FOUND,
},
},
},
},
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"login_requests:login_type", "login_requests:lats"},
Entities: []feast.Row{
{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
},
},
Project: "project",
},
rows: []bigtable.Row{},
registryFn: func() *CachedCodecRegistry {
return &CachedCodecRegistry{codecs: defaultCodecs}
},
metadata: []*spec.FeatureTableMetadata{{
Name: "login_requests",
Project: "project",
MaxAge: nil,
}},
},
{
name: "stale features",
want: &feast.OnlineFeaturesResponse{
RawResponse: &serving.GetOnlineFeaturesResponse{
FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{
{
Fields: map[string]*types.Value{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
"login_requests:login_type": {},
"login_requests:lats": {},
},
Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{
"customer_phone": serving.GetOnlineFeaturesResponse_PRESENT,
"resource_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:login_type": serving.GetOnlineFeaturesResponse_OUTSIDE_MAX_AGE,
"login_requests:lats": serving.GetOnlineFeaturesResponse_OUTSIDE_MAX_AGE,
},
},
},
},
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"login_requests:login_type", "login_requests:lats"},
Entities: []feast.Row{
{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
},
},
Project: "project",
},
rows: []bigtable.Row{
{
"login_requests": []bigtable.ReadItem{
{
Row: "1234#1",
Timestamp: bigtable.Time(time.Now().Add(-1 * time.Second)),
Value: append(schemaRefBytes, avroValue...),
},
},
},
},
registryFn: func() *CachedCodecRegistry {
return &CachedCodecRegistry{codecs: defaultCodecs}
},
metadata: []*spec.FeatureTableMetadata{{
Name: "login_requests",
Project: "project",
MaxAge: durationpb.New(1 * time.Second),
}},
},
{
name: "features some of feature table doesn't have record in bigtable",
want: &feast.OnlineFeaturesResponse{
RawResponse: &serving.GetOnlineFeaturesResponse{
FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{
{
Fields: map[string]*types.Value{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
"login_requests:login_type": feast.StrVal("OTP"),
"login_requests:lats": {Val: &types.Value_DoubleListVal{
DoubleListVal: &types.DoubleList{Val: []float64{2.0, 1.0}},
}},
"user_stat:num_force_logout": {},
},
Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{
"customer_phone": serving.GetOnlineFeaturesResponse_PRESENT,
"resource_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:login_type": serving.GetOnlineFeaturesResponse_PRESENT,
"login_requests:lats": serving.GetOnlineFeaturesResponse_PRESENT,
"user_stat:num_force_logout": serving.GetOnlineFeaturesResponse_NOT_FOUND,
},
},
},
},
},
req: &feast.OnlineFeaturesRequest{
Features: []string{"login_requests:login_type", "login_requests:lats", "user_stat:num_force_logout"},
Entities: []feast.Row{
{
"customer_phone": feast.StrVal("1234"),
"resource_type": feast.Int64Val(1),
},
},
Project: "project",
},
rows: []bigtable.Row{
{
"login_requests": []bigtable.ReadItem{
{
Row: "1234#1",
Timestamp: bigtable.Time(time.Now()),
Value: append(schemaRefBytes, avroValue...),
},
},
},
},
registryFn: func() *CachedCodecRegistry {
return &CachedCodecRegistry{codecs: defaultCodecs}
},
metadata: []*spec.FeatureTableMetadata{
{
Name: "login_requests",
Project: "project",
MaxAge: nil,
},
{
Name: "user_stat",
Project: "project",
MaxAge: nil,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
registry := tt.registryFn()
encoder, _ := newEncoder(registry, featureTable, tt.metadata)
response, err := encoder.Decode(context.Background(), tt.rows, tt.req, entityKeys)
if err != nil {
assert.EqualError(t, err, tt.err)
} else {
if !proto.Equal(response.RawResponse, tt.want.RawResponse) {
t.Errorf("expected %s, actual %s", tt.want.RawResponse, response.RawResponse)
}
}
})
}
}
func TestAvroToValueConversion(t *testing.T) {
testCases := []struct {
desc string
avroValue interface{}
featureType string
wantValue *types.Value
err error
}{
{
desc: "string type",
avroValue: map[string]interface{}{
"string": "OTP",
},
featureType: types.ValueType_STRING.String(),
wantValue: &types.Value{
Val: &types.Value_StringVal{
StringVal: "OTP",
},
},
err: nil,
},
{
desc: "feature type string, but avro value in int format",
avroValue: map[string]interface{}{
"string": 1000,
},
featureType: types.ValueType_STRING.String(),
wantValue: &types.Value{
Val: &types.Value_StringVal{
StringVal: "1000",
},
},
err: nil,
},
{
desc: "int type",
avroValue: map[string]interface{}{
"int": 32,
},
featureType: types.ValueType_INT32.String(),
wantValue: &types.Value{
Val: &types.Value_Int32Val{
Int32Val: 32,
},
},
err: nil,
},
{
desc: "feature type int32, but avro value in numeric string",
avroValue: map[string]interface{}{
"int": "2200",
},
featureType: types.ValueType_INT32.String(),
wantValue: &types.Value{
Val: &types.Value_Int32Val{
Int32Val: 2200,
},
},
err: nil,
},
{
desc: "feature type int32, but avro value in non-numeric string",
avroValue: map[string]interface{}{
"int": "randomVal",
},
featureType: types.ValueType_INT32.String(),
wantValue: nil,
err: &strconv.NumError{
Func: "Atoi",
Num: "randomVal",
Err: strconv.ErrSyntax,
},
},
{
desc: "int64 type",
avroValue: map[string]interface{}{
"long": 100000000000,
},
featureType: types.ValueType_INT64.String(),
wantValue: &types.Value{
Val: &types.Value_Int64Val{
Int64Val: 100000000000,
},
},
err: nil,
},
{
desc: "feature type int64, avro value in numeric string",
avroValue: map[string]interface{}{
"long": "100000000000",
},
featureType: types.ValueType_INT64.String(),
wantValue: &types.Value{
Val: &types.Value_Int64Val{
Int64Val: 100000000000,
},
},
err: nil,
},
{
desc: "feature type int64, avro value in non-numeric string",
avroValue: map[string]interface{}{
"long": "nonnumeric",
},
featureType: types.ValueType_INT64.String(),
wantValue: nil,
err: &strconv.NumError{
Func: "ParseInt",
Num: "nonnumeric",
Err: strconv.ErrSyntax,
},
},
{
desc: "bool type",
avroValue: map[string]interface{}{
"boolean": true,
},
featureType: types.ValueType_BOOL.String(),
wantValue: &types.Value{
Val: &types.Value_BoolVal{
BoolVal: true,
},
},
err: nil,
},
{
desc: "float type",
avroValue: map[string]interface{}{
"float": float32(1.2),
},
featureType: types.ValueType_FLOAT.String(),
wantValue: &types.Value{
Val: &types.Value_FloatVal{
FloatVal: 1.2,
},
},
err: nil,
},
{
desc: "feature type float, avro type is int",
avroValue: map[string]interface{}{
"float": 2000,
},
featureType: types.ValueType_FLOAT.String(),
wantValue: &types.Value{
Val: &types.Value_FloatVal{
FloatVal: 2000,
},
},
err: nil,
},
{
desc: "feature type float, avro type is numeric string",
avroValue: map[string]interface{}{
"float": "0.5",
},
featureType: types.ValueType_FLOAT.String(),
wantValue: &types.Value{
Val: &types.Value_FloatVal{
FloatVal: 0.5,
},
},
err: nil,
},
{
desc: "feature type float, avro type is non-numeric string",
avroValue: map[string]interface{}{
"float": "string",
},
featureType: types.ValueType_FLOAT.String(),
wantValue: nil,
err: &strconv.NumError{
Func: "ParseFloat",
Num: "string",
Err: strconv.ErrSyntax,
},
},
{
desc: "double type",
avroValue: map[string]interface{}{
"double": float64(1.2),
},
featureType: types.ValueType_DOUBLE.String(),
wantValue: &types.Value{
Val: &types.Value_DoubleVal{
DoubleVal: 1.2,
},
},
err: nil,
},
{
desc: "feature type double, avro value is numeric string",
avroValue: map[string]interface{}{
"double": "1.2",
},
featureType: types.ValueType_DOUBLE.String(),
wantValue: &types.Value{
Val: &types.Value_DoubleVal{
DoubleVal: 1.2,
},
},
err: nil,
},
{
desc: "feature type double, avro value is non-numeric string",
avroValue: map[string]interface{}{
"double": "string",
},
featureType: types.ValueType_DOUBLE.String(),
wantValue: nil,
err: &strconv.NumError{
Func: "ParseFloat",
Num: "string",
Err: strconv.ErrSyntax,
},
},
{
desc: "string list type",
avroValue: map[string]interface{}{
"array": []interface{}{
map[string]interface{}{
"string": "var1",
},
map[string]interface{}{
"string": "var2",
},
},
},
featureType: types.ValueType_STRING_LIST.String(),
wantValue: &types.Value{
Val: &types.Value_StringListVal{
StringListVal: &types.StringList{
Val: []string{
"var1", "var2",
},
},
},
},
err: nil,
},
{
desc: "int32 list type",
avroValue: map[string]interface{}{
"array": []interface{}{
map[string]interface{}{
"int": int32(1),
},
map[string]interface{}{
"int": int32(2),
},
},
},
featureType: types.ValueType_INT32_LIST.String(),
wantValue: &types.Value{
Val: &types.Value_Int32ListVal{
Int32ListVal: &types.Int32List{
Val: []int32{
1, 2,
},
},
},
},
err: nil,
},
{
desc: "int64 list type",
avroValue: map[string]interface{}{
"array": []interface{}{
map[string]interface{}{
"long": int64(10000000000),
},
map[string]interface{}{
"long": int64(10000000000),
},
},
},
featureType: types.ValueType_INT64_LIST.String(),
wantValue: &types.Value{
Val: &types.Value_Int64ListVal{
Int64ListVal: &types.Int64List{
Val: []int64{
10000000000, 10000000000,
},
},
},
},
err: nil,
},
{
desc: "float list type",
avroValue: map[string]interface{}{
"array": []interface{}{
map[string]interface{}{
"float": float32(0.1),
},
map[string]interface{}{
"float": float32(0.2),
},
},
},
featureType: types.ValueType_FLOAT_LIST.String(),
wantValue: &types.Value{
Val: &types.Value_FloatListVal{
FloatListVal: &types.FloatList{
Val: []float32{
0.1, 0.2,
},
},
},
},
err: nil,
},
{
desc: "double list type",
avroValue: map[string]interface{}{
"array": []interface{}{
map[string]interface{}{
"double": float64(0.1),
},
map[string]interface{}{
"double": float64(0.2),
},
},
},
featureType: types.ValueType_DOUBLE_LIST.String(),
wantValue: &types.Value{
Val: &types.Value_DoubleListVal{
DoubleListVal: &types.DoubleList{
Val: []float64{
0.1, 0.2,
},
},
},
},
err: nil,
},
}
for _, tC := range testCases {
t.Run(tC.desc, func(t *testing.T) {
got, err := avroToValueConversion(tC.avroValue, tC.featureType)
assert.Equal(t, tC.err, err)
assert.Equal(t, tC.wantValue, got)
})
}
}
func TestEntityKeysToBigTable(t *testing.T) {
testCases := []struct {
desc string
project string
entityKeys []*spec.Entity
want string
}{
{
desc: "concatenation string of project and entityKeys less than 50 characters",
project: "default",
entityKeys: []*spec.Entity{
{
Name: "driver_id",
},
{
Name: "geohash",
},
},
want: "default__driver_id__geohash",
},
{
desc: "concatenation string of project and entityKeys more than 50 characters",
project: "default-project-mobility-nationwide",
entityKeys: []*spec.Entity{
{
Name: "driver_geohash",
},
{
Name: "driver_id",
},
},
want: "default-project-mobility-nationwide__drivede1619bb",
},
{
desc: "sort entity keys",
project: "default",
entityKeys: []*spec.Entity{
{
Name: "driver_id",
},
{
Name: "driver_geohash",
},
},
want: "default__driver_geohash__driver_id",
},
}
for _, tC := range testCases {
t.Run(tC.desc, func(t *testing.T) {
got := entityKeysToBigTable(tC.project, entitiesToEntityNames(tC.entityKeys))
assert.Equal(t, tC.want, got)
})
}
}
|
<gh_stars>1-10
package cucumber.runtime.android;
import cucumber.runtime.ClassFinder;
import cucumber.runtime.CucumberException;
import dalvik.system.DexFile;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
/**
* Android specific implementation of {@link cucumber.runtime.ClassFinder} which loads classes contained in the provided {@link dalvik.system.DexFile}.
*/
public class DexClassFinder implements ClassFinder {
/**
* Symbol name of the manifest class.
*/
private static final String MANIFEST_CLASS_NAME = "Manifest";
/**
* Symbol name of the resource class.
*/
private static final String RESOURCE_CLASS_NAME = "R";
/**
* Symbol name prefix of any inner class of the resource class.
*/
private static final String RESOURCE_INNER_CLASS_NAME_PREFIX = "R$";
/**
* The file name separator.
*/
private static final String FILE_NAME_SEPARATOR = ".";
/**
* The class loader to actually load the classes specified by the {@link dalvik.system.DexFile}.
*/
private static final ClassLoader CLASS_LOADER = DexClassFinder.class.getClassLoader();
/**
* The "symbol" representing the default package.
*/
private static final String DEFAULT_PACKAGE = "";
/**
* The {@link dalvik.system.DexFile} to load classes from
*/
private final DexFile dexFile;
/**
* Creates a new instance for the given parameter.
*
* @param dexFile the {@link dalvik.system.DexFile} to load classes from
*/
public DexClassFinder(final DexFile dexFile) {
this.dexFile = dexFile;
}
@Override
public <T> Collection<Class<? extends T>> getDescendants(final Class<T> parentType, final String packageName) {
final List<Class<? extends T>> result = new ArrayList<Class<? extends T>>();
final Enumeration<String> entries = dexFile.entries();
while (entries.hasMoreElements()) {
final String className = entries.nextElement();
if (isInPackage(className, packageName) && !isGenerated(className)) {
try {
final Class<? extends T> clazz = loadClass(className);
if (clazz != null && !parentType.equals(clazz) && parentType.isAssignableFrom(clazz)) {
result.add(clazz.asSubclass(parentType));
}
} catch (ClassNotFoundException e) {
throw new CucumberException(e);
}
}
}
return result;
}
@Override
public <T> Class<? extends T> loadClass(final String className) throws ClassNotFoundException {
return (Class<? extends T>) Class.forName(className, false, CLASS_LOADER);
}
private boolean isInPackage(final String className, final String packageName) {
final int lastDotIndex = className.lastIndexOf(FILE_NAME_SEPARATOR);
final String classPackage = lastDotIndex == -1 ? DEFAULT_PACKAGE : className.substring(0, lastDotIndex);
return classPackage.startsWith(packageName);
}
private boolean isGenerated(final String className) {
final int lastDotIndex = className.lastIndexOf(FILE_NAME_SEPARATOR);
final String shortName = lastDotIndex == -1 ? className : className.substring(lastDotIndex + 1);
return shortName.equals(MANIFEST_CLASS_NAME) || shortName.equals(RESOURCE_CLASS_NAME) || shortName.startsWith(RESOURCE_INNER_CLASS_NAME_PREFIX);
}
}
|
<gh_stars>0
"use strict";
class DrumMachineController {
constructor($timeout) {
this.$timeout = $timeout;
this.instruments = [
new Audio("components/drum-machine/audio/Note_Low.wav"),
new Audio("components/drum-machine/audio/Clap.wav"),
new Audio("components/drum-machine/audio/Tom_Hi.wav"),
new Audio("components/drum-machine/audio/Tom_Low.wav"),
new Audio("components/drum-machine/audio/HiHat_Open.wav"),
new Audio("components/drum-machine/audio/HiHat_Closed.wav"),
new Audio("components/drum-machine/audio/Snare.wav"),
new Audio("components/drum-machine/audio/Bass.wav"),
];
this.bpm = 60;
this.ms = 30000 / this.bpm;
this.loop = true;
this.grid = Array(8)
.fill()
.map(
(rowItem, rowIndex) =>
(rowItem = Array(8)
.fill()
.map(
(cellItem, cellIndex) =>
(cellItem = {
cell: cellIndex + rowIndex * 8,
value: 0,
focus: false,
}),
)),
);
}
cycle(iterations) {
let grid = this.grid;
for (let i = 0; i < grid.length; i++) {
grid[i].map((item, index) => {
this.playAudio(item, index, iterations, i);
item.focus = iterations % 8 === index;
});
}
this.grid = grid;
console.log(grid);
if (this.loop) {
this.$timeout(() => {
this.cycle(iterations + 1);
}, this.ms);
}
}
playAudio(item, index, iterations, i) {
if (item.value % 4 === 1 && index === iterations % 8) {
this.resetAndPlay(i);
}
if (item.value % 4 === 2 && index === iterations % 8) {
this.resetAndPlay(i);
this.$timeout(() => {
this.resetAndPlay(i);
}, this.ms / 2);
}
if (item.value % 4 === 3 && index === iterations % 8) {
this.$timeout(() => {
this.resetAndPlay(i);
}, this.ms / 2);
}
}
playOnce(number) {
this.resetAndPlay(number & 8);
}
resetAndPlay(n) {
this.instruments[n].pause();
this.instruments[n].currentTime = 0;
this.instruments[n].play();
}
toggleLoop() {
this.loop = !this.loop;
}
changeMs(bpm) {
this.ms = 30000 / this.bpm;
}
}
DrumMachineController.$inject = ["$timeout"];
angular.module("drumMachine").component("drumMachineGrid", {
templateUrl: "components/drum-machine/drum-machine.template.html",
controller: DrumMachineController,
});
|
import logging
import argparse
import cpickle
def process_file(input_filepath):
try:
# Read book information from the input file
with open(input_filepath, mode='rb') as input_file:
book_data = input_file.read()
# Process the book data (example: convert to uppercase)
processed_data = book_data.upper()
return processed_data
except FileNotFoundError as e:
logging.error(f"Input file not found: {input_filepath}")
return None
except Exception as e:
logging.error(f"Error processing file: {e}")
return None
def main():
parser = argparse.ArgumentParser(description='Process book information')
parser.add_argument('input_file_path', type=str, help='Input file path')
parser.add_argument('output_file_path', type=str, help='Output file path')
args = parser.parse_args()
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logging.info('Working on book: {}'.format(args.input_file_path))
book_list = process_file(args.input_file_path)
if book_list:
try:
with open(args.output_file_path, mode='wb') as cpickle_file:
cpickle.dump(book_list, cpickle_file)
logging.info('Processed data saved to: {}'.format(args.output_file_path))
except Exception as e:
logging.error(f"Error saving processed data: {e}")
if __name__ == "__main__":
main()
|
# funding.py
from src.shared.entity import Base
from sqlalchemy import Column, Integer, String
class Funding(Base):
__tablename__ = 'financement'
id = Column(Integer, primary_key=True)
name = Column(String(50))
amount = Column(Integer)
|
<gh_stars>0
select event, time_waited as time_spent
from v$session_event
where sid = &sid
and event not in (
'Null event',
'client message',
'KXFX: Execution Message Dequeue - Slave',
'PX Deq: Execution Msg',
'KXFQ: kxfqdeq - normal deqeue',
'PX Deq: Table Q Normal',
'Wait for credit - send blocked',
'PX Deq Credit: send blkd',
'Wait for credit - need buffer to send',
'PX Deq Credit: need buffer',
'Wait for credit - free buffer',
'PX Deq Credit: free buffer',
'parallel query dequeue wait',
'PX Deque wait',
'Parallel Query Idle Wait - Slaves',
'PX Idle Wait',
'slave wait',
'dispatcher timer',
'virtual circuit status',
'pipe get',
'rdbms ipc message',
'rdbms ipc reply',
'pmon timer',
'smon timer',
'PL/SQL lock timer',
'SQL*Net message from client',
'WMON goes to sleep')
union all
select b.name, a.value
from v$sesstat a, v$statname b
where a.statistic# = b.statistic#
and b.name = 'CPU used when call started'
and a.sid = &sid;
|
import styled, {createGlobalStyle} from 'styled-components'
export const Sidebar = styled.div`
width: 300px;
height: 100%;
background-color: rgba(70, 70, 70, 1);
padding: 20px;
`
export const Title = styled.div`
font-size: 18px;
font-weight: bold;
margin-bottom: 15px;
`
export const GlobalStyles = createGlobalStyle`
body {
color: #FFF;
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
* {
box-sizing: border-box;
}
`
|
import { JsonResponse } from './shared/utils';
export const onRequest: PagesFunction<{
NODE_ENV: string;
}> = async function onRequest(context) {
const { env } = context;
const timestamp = Date.now();
const res = await context.next().catch((e) => {
const result = {
status: 0,
error: env.NODE_ENV === 'production' ? 'Server Error!' : (e as string)
};
return JsonResponse(result, 500);
});
const delta = Date.now() - timestamp;
res.headers.set('x-response-timing', delta);
return res;
};
|
TERMUX_PKG_HOMEPAGE=https://developer.gnome.org/gdk-pixbuf/
TERMUX_PKG_DESCRIPTION="Library for image loading and manipulation"
TERMUX_PKG_LICENSE="LGPL-2.1"
TERMUX_PKG_VERSION=2.40.0
TERMUX_PKG_SRCURL=ftp://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/${TERMUX_PKG_VERSION:0:4}/gdk-pixbuf-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=1582595099537ca8ff3b99c6804350b4c058bb8ad67411bbaae024ee7cead4e6
TERMUX_PKG_DEPENDS="glib, libpng, libtiff, libjpeg-turbo"
TERMUX_PKG_BREAKS="gdk-pixbuf-dev"
TERMUX_PKG_REPLACES="gdk-pixbuf-dev"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-Dgio_sniffing=false
-Dgir=false
-Dx11=false
"
|
i = 0
while i <= 20:
if i % 2 == 0:
print(i)
i += 1
|
<reponame>MMaus/zettelfix<filename>vue/src/shims-vue.d.ts
// see https://github.com/vuejs/vue-test-utils-next/issues/194#issuecomment-695333180
// This is supposed to get the unit tests working
/* eslint-disable */
declare module "*.vue" {
import type { DefineComponent } from "vue";
// const component: DefineComponent; //<{}, {}, any>
const component: DefineComponent<{}, {}, any>;
export default component;
}
// declare module '*.vue' {
// import type { DefineComponent } from 'vue'
// const component: DefineComponent<{}, {}, any>
// export default component
// }
|
import java.util.HashSet;
import java.util.Set;
public class DsSet {
public static void main(String[] args) {
for (int i = 1; i <= 4; i++) {
set(i);
}
}
private static void set(int level) {
Set<String> set = new HashSet<>();
set.add("a");
for (int i = 0; i < level; i++) {
Set<String> newSet = new HashSet<>();
for (String elem : set) {
newSet.add(elem);
}
for (String elem : set) {
newSet.add(String.format("(%s)", elem));
}
for (String x : set) {
for (String y : set) {
newSet.add(String.format("(%s,%s)", x, y));
}
}
set = newSet;
}
System.out.println("set:= " + set);
System.out.println("size = " + set.size());
}
}
|
#!/usr/bin/env bash
destfile="$1"
pubkey_file="$2"
cat > "$destfile" << EOF
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef SAMPLES_REMOTE_ATTESTATION_PUBKEY_H
#define SAMPLES_REMOTE_ATTESTATION_PUBKEY_H
EOF
printf 'static const char OTHER_ENCLAVE_PUBLIC_KEY[] =' >> "$destfile"
while IFS="" read -r p || [ -n "$p" ]
do
printf '\n \"%s\\n\"' "$p" >> "$destfile"
done < "$pubkey_file"
printf ';\n' >> "$destfile"
cat >> "$destfile" << EOF
#endif /* SAMPLES_REMOTE_ATTESTATION_PUBKEY_H */
EOF
|
import React, { Component } from 'react';
import { View, Text } from 'react-native';
export default class App extends Component {
state = {
quote: ''
}
componentDidMount() {
fetch('https://quotes.rest/qod?language=en').then(res => res.json()).then(res => {
this.setState({
quote: res.contents.quotes[0].quote
})
});
}
render() {
return (
<View style={{ flex: 1, justifyContent: 'center', alignItems: 'center' }}>
<Text>{this.state.quote}</Text>
</View>
);
}
}
|
<reponame>FreddieSanchez/ChoreApp<filename>back-end/src/main/scala/io/github/freddiesanchez/chore/repository/ChoreRepository.scala<gh_stars>0
package io.github.freddiesanchez.chore.repository
import doobie.imports._
import doobie.util.transactor.Transactor
import fs2.interop.cats._
import io.github.freddiesanchez.chore.models._
import scala.util.Random
class ChoreRepository(xa: Transactor[IOLite]) {
private implicit val RatingMeta: Meta[Rating] = Meta[String].xmap(Rating.unsafeFromString, _.rating)
def run[A](query:ConnectionIO[A]):A = {
query.transact(xa).unsafePerformIO
}
val createChoreTable:ConnectionIO[Int] =
sql"""CREATE TABLE chore
(id SERIAL PRIMARY KEY,
name TEXT NOT NULL,
description TEXT NOT NULL,
rating TEXT NOT NULL)""".update.run
def getChore(id:Long):ConnectionIO[Option[Chore]] = {
sql"SELECT id, name, description, rating FROM chore WHERE id = ${id}".query[Chore].option
}
def dropChoreTable: ConnectionIO[Int]=
sql"""DROP TABLE chore""".update.run
def addChore(newChore:Chore):ConnectionIO[Option[Chore]] = {
for {
id <- sql"""INSERT INTO chore (name, description, rating)
VALUES (
${newChore.name},
${newChore.description},
${newChore.rating})""".update.withUniqueGeneratedKeys[Long]("id")
chore <- getChore(id)
} yield (chore)
}
def updateChore(chore:Chore):ConnectionIO[Option[Chore]] = {
for {
id <- sql"""UPDATE chore
SET name = ${chore.name},
description = ${chore.description},
rating = ${chore.rating}
WHERE id = ${chore.id.get}""".update.run
chore <- getChore(id)
} yield (chore)
}
}
|
<reponame>JoshuaGross/yprof<gh_stars>1-10
/**
* yprof hasher.
*
* @copyright (c) 2015, Yahoo Inc. Code licensed under the MIT license. See LICENSE file for terms.
*/
module.exports = {
description: 'Get the hash of a given source file, for finding a compiled version of code in a cache directory.',
setupOptions: function (commander) {
commander.usage('<filename>');
},
validate: function (commander) {
if (commander.args.length !== 1) {
throw new Error('No filename given');
}
},
execute: function (commander) {
var fs = require('fs');
var xxhash = require('xxhashjs');
var filename = commander.args[0];
var source = fs.readFileSync(filename).toString();
var hash = xxhash(source, commander.hashSeed).toString(16);
console.log(hash);
}
};
|
<reponame>grupodesoft/appwoori<filename>assets/vue/appvue/appgrupo.js
var this_js_script = $('script[src*=appgrupo]');
var my_var_1 = this_js_script.attr('data-my_var_1');
if (typeof my_var_1 === "undefined") {
var my_var_1 = 'some_default_value';
}
var my_var_2 = this_js_script.attr('data-my_var_2');
if (typeof my_var_2 === "undefined") {
var my_var_2 = 'some_default_value';
}
Vue.config.devtools = true;
var v = new Vue({
el: '#app',
data: {
url: my_var_1,
idniveleducativo: my_var_2,
addModal: false,
editModal: false,
cargando: false,
error: false,
//deleteModal:false,
grupos: [],
niveles: [],
especialidades: [],
turnos: [],
search: {text: ''},
emptyResult: false,
newGrupo: {
idespecialidad: '',
idnivelestudio: '',
idturno: '',
nombregrupo: '',
smserror: ''},
chooseGrupo: {},
formValidate: [],
successMSG: '',
//pagination
currentPage: 0,
rowCountPage: 10,
totalGrupos: 0,
pageRange: 2,
directives: {columnSortable}
},
created() {
this.showAll();
this.showAllTurnos();
this.showAllNiveles();
this.showAllEspecialidades();
},
methods: {
orderBy(sortFn) {
// sort your array data like this.userArray
this.grupos.sort(sortFn);
}, abrirAddModal() {
$('#addRegister').modal('show');
},
abrirEditModal() {
$('#editRegister').modal('show');
},
showAll() {
axios.get(this.url + "Grupo/showAll").then(function (response) {
if (response.data.grupos == null) {
v.noResult()
} else {
v.getData(response.data.grupos);
}
})
},
showAllEspecialidades() {
axios.get(this.url + "Grupo/showAllEspecialidades/")
.then(response => (this.especialidades = response.data.especialidades));
},
showAllTurnos() {
axios.get(this.url + "Grupo/showAllTurnos/")
.then(response => (this.turnos = response.data.turnos));
},
showAllNiveles() {
axios.get(this.url + "Grupo/showAllNiveles/")
.then(response => (this.niveles = response.data.niveles));
},
searchGrupo() {
var formData = v.formData(v.search);
axios.post(this.url + "Grupo/searchGrupo", formData).then(function (response) {
if (response.data.grupos == null) {
v.noResult()
} else {
v.getData(response.data.grupos);
}
})
},
addGrupo() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.newGrupo);
axios.post(this.url + "Grupo/addGrupo", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.error = true;
v.cargando = false;
} else {
swal({
position: 'center',
type: 'success',
title: 'Exito!',
showConfirmButton: false,
timer: 3000
});
v.clearAll();
v.clearMSG();
}
})
},
updateGrupo() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.chooseGrupo);
axios.post(this.url + "Grupo/updateGrupo", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.error = true;
v.cargando = false;
} else {
//v.successMSG = response.data.success;
swal({
position: 'center',
type: 'success',
title: 'Modificado!',
showConfirmButton: false,
timer: 3000
});
v.clearAll();
v.clearMSG();
}
})
},
deleteGrupo(id) {
Swal.fire({
title: '¿Eliminar Grupo?',
text: "Realmente desea eliminar el Grupo.",
type: 'question',
showCancelButton: true,
confirmButtonColor: '#3085d6',
cancelButtonColor: '#d33',
confirmButtonText: 'Eliminar',
cancelButtonText: 'Cancelar'
}).then((result) => {
if (result.value) {
axios.get(this.url + "Grupo/deleteGrupo", {
params: {
idgrupo: id
}
}).then(function (response) {
if (response.data.error == false) {
//v.noResult()
swal({
position: 'center',
type: 'success',
title: 'Eliminado!',
showConfirmButton: false,
timer: 3000
});
v.clearAll();
v.clearMSG();
} else {
swal("Información", response.data.msg.msgerror, "info")
}
}).catch((error) => {
swal("Información", "No se puede eliminar el Grupo", "info")
})
}
})
},
formData(obj) {
var formData = new FormData();
for (var key in obj) {
formData.append(key, obj[key]);
}
return formData;
},
getData(grupos) {
v.emptyResult = false; // become false if has a record
v.totalGrupos = grupos.length //get total of user
v.grupos = grupos.slice(v.currentPage * v.rowCountPage, (v.currentPage * v.rowCountPage) + v.rowCountPage); //slice the result for pagination
// if the record is empty, go back a page
if (v.grupos.length == 0 && v.currentPage > 0) {
v.pageUpdate(v.currentPage - 1)
v.clearAll();
}
},
selectGrupo(grupo) {
v.chooseGrupo = grupo;
},
clearMSG() {
setTimeout(function () {
v.successMSG = ''
}, 3000); // disappearing message success in 2 sec
},
clearAll() {
$('#editRegister').modal('hide');
$('#addRegister').modal('hide');
v.newGrupo = {
idnivelestudio: '',
idespecialidad: '',
idturno: '',
nombregrupo: '',
smserror: ''};
v.formValidate = false;
v.addModal = false;
v.editModal = false;
v.cargando = false;
v.error = false;
v.refresh();
},
noResult() {
v.emptyResult = true; // become true if the record is empty, print 'No Record Found'
v.grupos = null;
v.totalGrupos = 0;//remove current page if is empty
},
pageUpdate(pageNumber) {
v.currentPage = pageNumber; //receive currentPage number came from pagination template
v.refresh();
},
refresh() {
v.search.text ? v.searchGrupo() : v.showAll(); //for preventing
}
}
})
|
#!/bin/sh
sudo kill -s SIGINT $(ps aux | grep node | grep server | grep -v grep | awk '{print $2}')
|
<reponame>Bahroel0/Web-Nikah-IMK
$(document).ready(function(){
$('.carousel.carousel-slider').carousel({fullWidth: true, padding:200},setTimeout(autoplay, 4500));
function autoplay() {
$('.carousel').carousel('next');
setTimeout(autoplay, 5000);
}
// the "href" attribute of the modal trigger must specify the modal ID that wants to be triggered
$('.modal').modal();
// select for form
$('select').material_select();
// date
$('.datepicker').pickadate({
selectMonths: true, // Creates a dropdown to control month
selectYears: 15, // Creates a dropdown of 15 years to control year,
today: 'Today',
clear: 'Clear',
close: 'Ok',
closeOnSelect: false // Close upon selecting a date,
});
// time
$('.timepicker').pickatime({
default: 'now', // Set default time: 'now', '1:30AM', '16:30'
fromnow: 0, // set default time to * milliseconds from now (using with default = 'now')
twelvehour: false, // Use AM/PM or 24-hour format
donetext: 'OK', // text for done-button
cleartext: 'Clear', // text for clear-button
canceltext: 'Cancel', // Text for cancel-button
autoclose: false, // automatic close timepicker
ampmclickable: true, // make AM PM clickable
aftershow: function(){} //Function for after opening timepicker
});
});
|
<reponame>waleedmashaqbeh/freequartz<gh_stars>10-100
/* Copyright 2010 Smartmobili SARL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CGIMAGEPRIV_H_
#define CGIMAGEPRIV_H_
#include <CoreFoundation/CFRuntime.h>
#include <CoreGraphics/CGPath.h>
#include "CGMacros.h"
#include "CGColorSpacePriv.h"
CF_EXTERN_C_BEGIN
enum {
kCGImageInterpolate = 0x01,
kCGImageMask = 0x10,
};
/* forward declarations */
typedef struct CGImage CGImage, *CGImageRef;
typedef struct CGImageJPEGRep CGImageJPEGRep, *CGImageJPEGRepRef;
typedef struct CGImageEPSRep CGImageEPSRep, *CGImageEPSRepRef;
struct CGImageJPEGRep {
signed long refcount; //0x00
CGDataProviderRef provider; //0x04
};
struct CGImageEPSRep {
signed long refcount; //0x00
CGDataProviderRef provider; //0x04
CGImageRef image; //0x08
};
struct CGImage {
CFRuntimeBase obj; //0x00
CFTypeID ID; //0x08
size_t width; //0x0C
size_t height; //0x10
size_t bitsPerComponent; //0x14
size_t bitsPerPixel; //0x18
size_t bytesPerRow; //0x1C
CGColorSpaceRef space; //0x20
CGBitmapInfo bitmapInfo; //0x24
CGDataProviderRef provider; //0x28
CGColorRenderingIntent intent; //0x2C
CGFloat* decode; //0x30
uint32_t flags; //0x34
CGFloat* components; //0x38
CGImageRef refs; //0x3C
CGFloat* components2; //0x40
CGImageJPEGRepRef imageJPEGRep; //0x44
CGImageEPSRepRef imageEPSRep; //0x48
CGPathRef clipPath; //0x4C
int64_t sharedID; //0x50
};
void CGImageDestroy(CFTypeRef ctf);
Boolean valid_image_colorspace(CGColorSpaceRef space, CGBitmapInfo bitmapInfo);
CGImageRef CGImageCreateCopyWithJPEGSource(CGImageRef image, CGDataProviderRef provider);
CGImageJPEGRepRef CGImageJPEGRepRetain(CGImageJPEGRepRef imageJPEG);
void CGImageJPEGRepRelease(CGImageJPEGRepRef imageJPEG);
CGImageJPEGRepRef CGImageJPEGRepCreate(CGDataProviderRef provider);
CGImageEPSRepRef CGImageEPSRepRetain(CGImageEPSRepRef imageEPS);
void CGImageEPSRepRelease(CGImageEPSRepRef imageEPS);
CGPathRef CGImageGetClipPath(CGImageRef image);
int64_t CGImageGetSharedIdentifier(CGImageRef image);
CF_EXTERN_C_END
#endif /* CGIMAGEPRIV_H_ */
|
class Query:
def __init__(self, db_connection):
self.db_connection = db_connection
def execute(self, sql_query):
try:
cursor = self.db_connection.cursor()
cursor.execute(sql_query)
columns = [col[0] for col in cursor.description]
result_set = []
for row in cursor.fetchall():
row_dict = dict(zip(columns, row))
result_set.append(row_dict)
return result_set
except Exception as e:
# Handle any specific exceptions or errors based on the database library used
# Log the error or raise a custom exception as per the application's error handling strategy
print(f"Error occurred during query execution: {e}")
return [] # Return an empty list to indicate query failure
|
angular.module("<%= appName %>").controller("TweetsListCtrl", ['$scope', '$meteor',
function($scope, $meteor){
$scope.sort = {createdAt: -1};
$meteor.autorun($scope, function() {
$meteor.subscribe('tweets', {}, $scope.getReactively('search')).then(function(){
console.log('Got tweets');
});
});
$scope.tweets = $meteor.collection(function() {
return Tweets.find({}, {
sort : $scope.getReactively('sort')
});
});
$scope.add = function(tweet){
$meteor.call('add', tweet);
};
$scope.remove = function(tweet){
$meteor.call('remove', tweet);
};
$scope.update = function(tweet) {
$meteor.call('update', tweet);
};
}]);
|
<filename>ocpp-v2_0/src/main/java/eu/chargetime/ocpp/model/basic/BootNotificationRequest.java
package eu.chargetime.ocpp.model.basic;
/*
ChargeTime.eu - Java-OCA-OCPP
MIT License
Copyright (C) 2018 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import eu.chargetime.ocpp.model.Request;
import eu.chargetime.ocpp.model.basic.types.BootReasonEnumType;
import eu.chargetime.ocpp.model.basic.types.ChargingStationType;
import eu.chargetime.ocpp.model.validation.RequiredValidator;
import eu.chargetime.ocpp.utilities.MoreObjects;
import java.util.Objects;
/**
* Sent by the Charging Station to the CSMS.
*/
public class BootNotificationRequest implements Request {
private transient RequiredValidator validator = new RequiredValidator();
private BootReasonEnumType reason;
private ChargingStationType chargingStation;
public BootNotificationRequest() {
}
/**
* This contains the reason for sending this message to the CSMS.
*
* @return {@link BootReasonEnumType}
*/
public BootReasonEnumType getReason() {
return reason;
}
/**
* Required. This contains the reason for sending this message to the CSMS.
*
* @param reason {@link BootReasonEnumType}
*/
public void setReason(BootReasonEnumType reason) {
validator.validate(reason);
this.reason = reason;
}
/**
* Identifies the Charging Station.
*
* @return {@link ChargingStationType}
*/
public ChargingStationType getChargingStation() {
return chargingStation;
}
/**
* Required. Identifies the Charging Station.
*
* @param chargingStation {@link ChargingStationType}
*/
public void setChargingStation(ChargingStationType chargingStation) {
this.chargingStation = chargingStation;
}
@Override
public boolean validate() {
return validator.safeValidate(reason) &&
validator.safeValidate(chargingStation) &&
chargingStation.validate();
}
@Override
public boolean transactionRelated() {
return false;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BootNotificationRequest that = (BootNotificationRequest) o;
return Objects.equals(reason, that.reason);
}
@Override
public int hashCode() {
return Objects.hash(reason, chargingStation);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("reason", reason)
.add("chargingStation", chargingStation)
.toString();
}
}
|
<gh_stars>1-10
import * as React from 'react';
import RouteWrapper from "@core/RouteWrapper";
import { apPathAddons } from '@paths/ap/addons';
import AddonsWizard from '@containers/ap/AddonsWizard';
import PageWrapper from '@core/PageWrapper';
import { setAPImage } from '@util/set-bg-image';
import { apiw as welcomeAPI } from "@async/member-welcome-ap";
import { getNoGP, getNoDW } from '@containers/ap/HomePageActionsAP';
import { apBasePath } from '@paths/ap/_base';
import FactaLoadingPage from '@facta/FactaLoadingPage';
type AddonsProps = {
noGP: boolean,
noDW: boolean
}
export const apAddonsPageRoute = new RouteWrapper(true, apPathAddons, history => <PageWrapper
key="addons"
history={history}
component={(urlProps: {}, async: AddonsProps) => <AddonsWizard
history={history}
noGP={async.noGP}
noDW={async.noDW}
/>}
urlProps={{}}
getAsyncProps={() => {
return welcomeAPI.send(null).then(res => {
if (res.type == "Success") {
const noGP = getNoGP(res.success.actions);
const noDW = getNoDW(res.success.actions);
if (!noGP && !noDW) {
history.push(apBasePath.getPathFromArgs({}))
return null;
} else {
return {
type: "Success",
success: {
noGP,
noDW
}
}
}
}
}).catch(err => Promise.resolve(null)); // TODO: handle failure
}}
shadowComponent={<FactaLoadingPage setBGImage={setAPImage} />}
/>);
|
<gh_stars>0
import logging
import time
from shakenfist_ci import base
logging.basicConfig(level=logging.INFO, format='%(message)s')
LOG = logging.getLogger()
class TestStateChanges(base.BaseNamespacedTestCase):
def __init__(self, *args, **kwargs):
kwargs['namespace_prefix'] = 'statechanges'
super(TestStateChanges, self).__init__(*args, **kwargs)
def setUp(self):
super(TestStateChanges, self).setUp()
self.net = self.test_client.allocate_network(
'192.168.242.0/24', True, True, '%s-net-one' % self.namespace)
def test_lifecycle_events(self):
# Start our test instance
inst = self.test_client.create_instance(
'cirros', 1, 1024,
[
{
'network_uuid': self.net['uuid']
},
],
[
{
'size': 8,
'base': 'cirros',
'type': 'disk'
}
], None, None)
ip = self.test_client.get_instance_interfaces(inst['uuid'])[0]['ipv4']
LOG.info('Started test instance %s', inst['uuid'])
# We need to start a second instance on the same node / network so that
# the network doesn't get torn down during any of the tests.
self.test_client.create_instance(
'cirros', 1, 1024,
[
{
'network_uuid': self.net['uuid']
},
],
[
{
'size': 8,
'base': 'cirros',
'type': 'disk'
}
], None, None, force_placement=inst['node'])
LOG.info('Started keep network alive instance')
# Wait for our test instance to boot
self.assertIsNotNone(inst['uuid'])
self._await_login_prompt(inst['uuid'])
LOG.info(' ping test...')
# The network can be slow to start and may not be available after the
# instance "login prompt" event. We are willing to forgive a few fails
# while the network starts.
self._test_ping(inst['uuid'], self.net['uuid'], ip, True, 10)
# Soft reboot
LOG.info('Instance Soft reboot')
self.test_client.reboot_instance(inst['uuid'])
self._await_login_prompt(inst['uuid'], after=time.time())
LOG.info(' ping test...')
self._test_ping(inst['uuid'], self.net['uuid'], ip, True, 10)
# Hard reboot
LOG.info('Instance Hard reboot')
self.test_client.reboot_instance(inst['uuid'], hard=True)
self._await_login_prompt(inst['uuid'], after=time.time())
LOG.info(' ping test...')
self._test_ping(inst['uuid'], self.net['uuid'], ip, True, 10)
return
# Power off
LOG.info('Power off')
self.test_client.power_off_instance(inst['uuid'])
self._await_power_off(inst['uuid'])
LOG.info(' ping test...')
self._test_ping(inst['uuid'], self.net['uuid'], ip, False)
# Power on
LOG.info('Instance Power on')
self.test_client.power_on_instance(inst['uuid'])
self._await_login_prompt(inst['uuid'], after=time.time())
LOG.info(' ping test...')
self._test_ping(inst['uuid'], self.net['uuid'], ip, True, 10)
# Pause
LOG.info('Instance Pause')
self.test_client.pause_instance(inst['uuid'])
LOG.info(' ping test...')
self._test_ping(inst['uuid'], self.net['uuid'], ip, False, 10)
# Unpause
LOG.info('Instance Unpause')
self.test_client.unpause_instance(inst['uuid'])
# No new login prompt after unpause, so just forgive a few fails while
# the instance is un-paused.
LOG.info(' ping test...')
self._test_ping(inst['uuid'], self.net['uuid'], ip, True, 10)
|
<reponame>JasonLiu798/javautil
package com.atjl.log.util;
import com.atjl.log.api.LogUtil;
import com.atjl.util.collection.CollectionUtil;
import com.atjl.util.json.JSONFastJsonUtil;
import java.io.PrintWriter;
import java.io.StringWriter;
public class LogCommonUtil {
private LogCommonUtil() {
super();
}
public static Object[] filter2Json(Object... params) {
if (CollectionUtil.isEmpty(params)) {
return params;
}
Object[] strParam = new Object[params.length];
for (int i = 0; i < params.length; i++) {
String objStr = null;
objStr = "[J]" + JSONFastJsonUtil.objectToJson(params[i]);
//发送exception,使用string
//objStr = "[S]" + params[i].toString();
strParam[i] = objStr;
}
return strParam;
}
/**
* 异常转string
*
* @param t
* @return
*/
public static String exception2str(Throwable t) {
if (t == null) {
return "exception null";
}
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
String res = "get stack trace fail,throwable " + t;
try {
t.printStackTrace(pw);
res = sw.toString();
return res;
} catch (Exception e) {
res = res + "," + e.getMessage();
if (LogUtil.isDebugEnabled()) {
LogUtil.debug("exception2str {}", e);
}
} finally {
pw.close();
}
return res;
}
}
|
#!/usr/bin/env bash
V="1.0.0" # The version number of this script.
PROG=$0
COMPOSEVERSION=""
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root."
exit 1
fi
main() {
while getopts ":hVv:" opt; do
case ${opt} in
h )
usage
exit 0
;;
V )
version
exit 0
;;
v )
COMPOSEVERSION=$OPTARG # set the docker compose version.
;;
\? )
echo "Invalid option: -$OPTARG" 1>&2
echo
usage
exit 1
;;
: )
echo "Invalid option: -$OPTARG requires an argument" 1>&2
echo
usage
exit 1
;;
esac
done
shift $((OPTIND -1))
run
}
usage() {
echo "Usage: $PROG"
echo " Install docker-compose on CoreOS Container Linux."
echo " -v"
echo " Supply the docker-compose version number to install."
echo " -h"
echo " Display this help message."
echo " -V"
echo " Display this script's version number and exit."
}
version() {
echo "$PROG version $V"
}
run() {
if [[ -z "$COMPOSEVERSION" ]]; then
echo "You must supply a docker-compose version number to install."
echo "See https://github.com/docker/compose/releases for available releases."
echo
usage
exit 1
fi
mkdir -p /opt/bin
echo "Downloading docker-compose version ${COMPOSEVERSION}..."
echo "https://github.com/docker/compose/releases/download/${COMPOSEVERSION}/docker-compose-$(uname -s)-$(uname -m)"
curl -s -L "https://github.com/docker/compose/releases/download/${COMPOSEVERSION}/docker-compose-$(uname -s)-$(uname -m)" -o /opt/bin/docker-compose
chmod +x /opt/bin/docker-compose
docker -v
docker-compose -v
}
main $@
|
SELECT * FROM employees WHERE salary > (SELECT AVG(salary) FROM employees);
|
#!/bin/bash
# Run_MultiCore2.sh
# Check Environment
if [ -z ${IMPERAS_HOME} ]; then
echo "IMPERAS_HOME not set. Please check environment setup."
exit
fi
${IMPERAS_ISS} --verbose --output imperas.log \
--program ../../../Applications/multicore2/multicore2.V850-O1-g.elf \
--processorvendor renesas.ovpworld.org --processorname v850 --variant V850 \
--numprocessors 2 --hishare 0x0010ffff --loshare 0x00100000 \
"$@"
|
//
// Created by ooooo on 2020/1/16.
//
#ifndef CPP_1160__SOLUTION1_H_
#define CPP_1160__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <unordered_map>
using namespace std;
class Solution {
public:
int countCharacters(vector<string> &words, string chars) {
unordered_map<char, int> m;
for_each(chars.begin(), chars.end(), [&m](char c) { m[c]++; });
int ans = 0;
for (auto &item: words) {
unordered_map<char, int> wordMap;
for_each(item.begin(), item.end(), [&wordMap](char c) { wordMap[c]++; });
bool find = true;
for (auto &entry: wordMap) {
if (m[entry.first] < entry.second) {
find = false;
break;
}
}
if (find) {
ans += item.size();
}
}
return ans;
}
};
#endif //CPP_1160__SOLUTION1_H_
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# Read Boston housing data in
boston_df = pd.read_csv(
'https://raw.githubusercontent.com/PacktWorkshops/The-Data-Science-Workshop/master/Chapter02/Dataset/boston_house_prices.csv'
)
# Visualize data
plt.style.use('ggplot')
plt.scatter(boston_df.RM, boston_df.MEDV, color='black')
plt.xlabel('Average number of rooms per dwelling (RM)')
plt.ylabel('Median Value of Homes in $1000s (MEDV)')
plt.title("Modeling Simple Linear Regression in Boston Housing Data")
# Calculate regression line
X = boston_df[['RM']]
y = boston_df['MEDV']
model = LinearRegression()
model.fit(X, y)
# Calculate predicted value
predicted_MEDV = model.predict(X)
# Plot regression line
plt.plot(boston_df.RM, predicted_MEDV, color='blue', linewidth=2)
plt.show()
|
import { DEFAULT_VIDEO_CONSTRAINTS } from '../../../constants';
import { useCallback, useState } from 'react';
import Video, { LocalVideoTrack, LocalAudioTrack, CreateLocalTrackOptions } from 'twilio-video';
import { useHasAudioInputDevices, useHasVideoInputDevices } from '../../../hooks/deviceHooks/deviceHooks';
import { useMutex } from "react-context-mutex";
import LocalStorage_TwilioVideo from '../../../../../../classes/LocalStorage/TwilioVideo';
export default function useLocalTracks() {
const [audioTrack, setAudioTrack] = useState<LocalAudioTrack>();
const [videoTrack, setVideoTrack] = useState<LocalVideoTrack>();
const [isAcquiringLocalTracks, setIsAcquiringLocalTracks] = useState(false);
const hasAudio = useHasAudioInputDevices();
const hasVideo = useHasVideoInputDevices();
const getLocalAudioTrack = useCallback((deviceId?: string) => {
const options: CreateLocalTrackOptions = {};
if (deviceId) {
options.deviceId = { exact: deviceId };
}
console.log("Running getLocalAudioTrack");
return Video.createLocalAudioTrack(options).then(newTrack => {
setAudioTrack(newTrack);
return newTrack;
});
}, []);
const getLocalVideoTrack = useCallback((newOptions?: CreateLocalTrackOptions) => {
const options: CreateLocalTrackOptions = {
...(DEFAULT_VIDEO_CONSTRAINTS as {}),
name: `camera-${Date.now()}`,
...newOptions,
};
console.log("Running getLocalVideoTrack");
return Video.createLocalVideoTrack(options).then(newTrack => {
setVideoTrack(newTrack);
return newTrack;
});
}, []);
const removeLocalVideoTrack = useCallback(() => {
if (videoTrack) {
videoTrack.stop();
setVideoTrack(undefined);
}
}, [videoTrack]);
const MutexRunner = useMutex();
const getAudioAndVideoTracksMutex = new MutexRunner("getAudioAndVideoTracks");
const getAudioAndVideoTracks = async () => {
return getAudioAndVideoTracksMutex.run(async () => {
getAudioAndVideoTracksMutex.lock();
try {
if (!hasAudio && !hasVideo) return Promise.resolve();
if (audioTrack || videoTrack) return Promise.resolve();
console.log("Running getAudioAndVideoTracks");
setIsAcquiringLocalTracks(true);
const tracks = await Video.createLocalTracks({
video: hasVideo && {
...(DEFAULT_VIDEO_CONSTRAINTS as {}),
name: `camera-${Date.now()}`,
deviceId: LocalStorage_TwilioVideo.twilioVideoLastCamera ?? undefined,
},
audio: hasAudio && {
deviceId: LocalStorage_TwilioVideo.twilioVideoLastMic ?? undefined,
},
});
const _videoTrack = tracks.find(track => track.kind === 'video');
const _audioTrack = tracks.find(track => track.kind === 'audio');
if (_videoTrack) {
console.log("Running getAudioAndVideoTracks:setVideoTrack");
setVideoTrack(_videoTrack as LocalVideoTrack);
}
if (_audioTrack) {
console.log("Running getAudioAndVideoTracks:setAudioTrack");
setAudioTrack(_audioTrack as LocalAudioTrack);
}
}
finally {
setIsAcquiringLocalTracks(false);
getAudioAndVideoTracksMutex.unlock();
}
}, () => {
});
};
return {
audioTrack,
videoTrack,
getLocalVideoTrack,
getLocalAudioTrack,
isAcquiringLocalTracks,
removeLocalVideoTrack,
getAudioAndVideoTracks,
};
}
|
#!/bin/sh
scp *.html root@sveinbjorn.org:/www/sveinbjorn/html/files/manpages/
|
<filename>src/main/java/com/assist/watchnext/service/UserService.java
package com.assist.watchnext.service;
import com.assist.watchnext.model.User;
import com.assist.watchnext.repository.UserRepository;
import org.passay.CharacterData;
import org.passay.CharacterRule;
import org.passay.EnglishCharacterData;
import org.passay.PasswordGenerator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mail.SimpleMailMessage;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import java.util.Arrays;
import java.util.regex.Pattern;
@Service
public class UserService {
@Autowired
private UserRepository repository;
@Autowired
private PasswordEncoder passwordEncoder;
@Autowired
private JavaMailSender javaMailSender;
public User getUserByEmail(String email) {
return repository.findByEmail(email);
}
public String encodePassword(String password) {
return passwordEncoder.encode(password);
}
public void save(User user)
{
repository.save(user);
}
public void sendEmail(String receiver, String message, String subject) {
SimpleMailMessage msg = new SimpleMailMessage();
msg.setTo(receiver);
msg.setSubject(subject);
msg.setText(message);
javaMailSender.send(msg);
}
public String generatePassayPassword() {
PasswordGenerator gen = new PasswordGenerator();
CharacterData lowerCaseChars = EnglishCharacterData.LowerCase;
CharacterRule lowerCaseRule = new CharacterRule(lowerCaseChars);
lowerCaseRule.setNumberOfCharacters(2);
CharacterData upperCaseChars = EnglishCharacterData.UpperCase;
CharacterRule upperCaseRule = new CharacterRule(upperCaseChars);
upperCaseRule.setNumberOfCharacters(2);
CharacterData digitChars = EnglishCharacterData.Digit;
CharacterRule digitRule = new CharacterRule(digitChars);
digitRule.setNumberOfCharacters(2);
return gen.generatePassword(10, Arrays.asList(lowerCaseRule,upperCaseRule,digitRule));
}
public boolean isMatching(String hashed, String password) {
return passwordEncoder.matches(hashed, password);
}
public boolean isValid(String email) {
Pattern ptr = Pattern.compile("^[\\w!#$%&'*+/=?`{|}~^-]+(?:\\.[\\w!#$%&'*+/=?`{|}~^-]+)*@(?:[a-zA-Z0-9-]+\\.)+[a-zA-Z]{2,6}$");
return ptr.matcher(email).matches() ? true:false;
}
}
|
package cn.airpassport.lib;
import cn.airpassport.lib.mail.LibMail;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class LibMailTests
{
@Test
public void testEMailAddressesCase1()
{
Assertions.assertEquals( false, LibMail.isValidEMail( "CN" ) );
assertEquals( false, LibMail.isValidEMail( "" ) );
assertEquals( false, LibMail.isValidEMail( null ) );
assertEquals( true, LibMail.isValidEMail( "<EMAIL>" ) );
}
}
|
package flagutil
import (
"flag"
"testing"
"github.com/google/go-cmp/cmp"
)
func TestCommaStringSlice(t *testing.T) {
fs := flag.NewFlagSet("test", flag.ContinueOnError)
var (
ss []string
)
fs.Var((*CommaStringSliceFlag)(&ss), "ss", "string slice flag")
args := []string{
"-ss", "a,b,c",
"-ss", "d,e,f",
}
err := fs.Parse(args)
if err != nil {
t.Fatal(err)
}
if !cmp.Equal(ss, []string{"d", "e", "f"}) {
t.Fatal("not correct output")
}
}
func TestRepeatingStringSlice(t *testing.T) {
fs := flag.NewFlagSet("test", flag.ContinueOnError)
var (
ss []string
)
fs.Var((*RepeatingStringSliceFlag)(&ss), "ss", "string slice flag")
args := []string{
"-ss", "a,b,c",
"-ss", "d,e,f",
}
err := fs.Parse(args)
if err != nil {
t.Fatal(err)
}
if !cmp.Equal(ss, []string{"a,b,c", "d,e,f"}) {
t.Fatal("not correct output")
}
ok := false
fs.Visit(func(f *flag.Flag) {
s := f.Value.String()
if !cmp.Equal(s, "a,b,c AND d,e,f") {
t.Fatal("not expected", s)
}
ok = true
})
if !ok {
t.Fatal("did not parse")
}
}
|
package dev.rudrecciah.admincore.staffmode.menus;
import dev.rudrecciah.admincore.staffmode.menus.providers.MainProvider;
import fr.minuskube.inv.SmartInventory;
import org.bukkit.entity.Player;
import org.bukkit.event.inventory.InventoryType;
import static dev.rudrecciah.admincore.Main.plugin;
public class MainMenu {
public static final SmartInventory MainMenu = SmartInventory.builder()
.provider(new MainProvider())
.id("staffmodeMain")
.title("Staffmode Menu")
.type(InventoryType.CHEST)
.size(1, 9)
.closeable(true)
.manager(plugin.getInvManager())
.build();
public static void openMenu(Player p) {
MainMenu.open(p);
}
public static void closeMenu(Player p) {
p.closeInventory();
}
}
|
#!/bin/bash -e
##-------------------------------------------------------------------
## @copyright 2017 DennyZhang.com
## Licensed under MIT
## https://www.dennyzhang.com/wp-content/mit_license.txt
##
## File : docker_pylint.sh
## Author : Denny <contact@dennyzhang.com>
## Description :
## --
## Created : <2017-05-12>
## Updated: Time-stamp: <2017-09-07 21:35:49>
##-------------------------------------------------------------------
code_dir=${1?""}
preinstall_pip_packages=${2-""}
ignore_file_list=${3-""}
image_name="denny/pylintcheck:1.0"
check_filename="/enforce_pylint.py"
current_filename=$(basename "$0")
test_id="${current_filename%.sh}_$$"
container_name="$test_id"
ignore_file="$test_id"
function remove_container() {
container_name=${1?}
if docker ps -a | grep "$container_name" 1>/dev/null 2>&1; then
echo "Destroy container: $container_name"
docker stop "$container_name"; docker rm "$container_name"
fi
}
function shell_exit() {
errcode=$?
if [ $errcode -eq 0 ]; then
echo "Test has passed."
else
echo "ERROR: Test has failed."
fi
echo "Remove tmp file: $ignore_file"
rm -rf "/tmp/$ignore_file"
remove_container "$container_name"
exit $errcode
}
################################################################################
trap shell_exit SIGHUP SIGINT SIGTERM 0
echo "Generate the ignore file for code check"
echo "$ignore_file_list" > "/tmp/$ignore_file"
echo "Start container"
remove_container "$container_name"
docker run -t -d --privileged -v "${code_dir}:/code" --name "$container_name" --entrypoint=/bin/sh "$image_name"
echo "Copy ignore file"
docker cp "/tmp/$ignore_file" "$container_name:/$ignore_file"
echo "Install pip packages before testing"
package_list=${preinstall_pip_packages//,/ }
for pip_package in $package_list; do
echo "pip install $pip_package"
docker exec -t "$container_name" pip install "$pip_package"
done
echo "Run code check: python $check_filename --code_dir /code --check_ignore_file /${ignore_file}"
docker exec -t "$container_name" python "$check_filename" --code_dir /code --check_ignore_file "/${ignore_file}"
## File : docker_pylint.sh ends
|
module ONCCertificationG10TestKit
module ExportKickOffPerformer
def perform_export_kick_off_request(use_token: true)
skip_if use_token && bearer_token.blank?, 'Could not verify this functionality when bearer token is not set'
headers = { accept: 'application/fhir+json', prefer: 'respond-async' }
headers.merge!({ authorization: "Bearer #{bearer_token}" }) if use_token
get("Group/#{group_id}/$export", client: :bulk_server, name: :export, headers: headers)
end
end
end
|
// Copyright (c) 2021-present, <NAME>
// Licensed under the MIT license whose full text can be found at http://opensource.org/licenses/MIT
let parse=exports
const fsp=require("fs").promises
parse.run_error=async function()
{
console.log("dataset,pub,error,url")
let data=await fsp.readFile(__dirname+"/../logs.txt","utf8")
let lines=data.split("\n")
let data_json=await fsp.readFile(__dirname+"/../packages.json","utf8")
let j=JSON.parse(data_json)
let slug
let url
let datatype
let version
let org
let orgid
let restore
let slugs_to_orgid={}
Object.getOwnPropertyNames(j).forEach(
function (slug, idx, array)
{
org=j[slug].organization
if (org)
{
orgid=org["name"]
if (orgid)
{
slugs_to_orgid[slug]=orgid
}
}
}
)
for (let line of lines)
{
let aa=line.split(" ")
if (aa[0]=="Downloading")
{
slug=aa[1]
url=aa[3]
orgid=slugs_to_orgid[slug]
}
if (aa[0]=="restored")
{
restore=aa[1]
}
if (aa[0]=="Converting")
{
console.log(`${slug},${orgid},${line},"${url}"`)
}
if (aa[0]=="dflat:" || aa[0]=="curl:" || aa[0]=="restored")
{
console.log(`${slug},${orgid},${line},"${url}"`)
}
}
}
parse.run_count=async function()
{
console.log("dataset,pub,type,num,url")
let data=await fsp.readFile(__dirname+"/../logs.txt","utf8")
let lines=data.split("\n")
let data_json=await fsp.readFile(__dirname+"/../packages.json","utf8")
let j=JSON.parse(data_json)
let slug
let count
let datatype
let org
let orgid
let restore
let slugs_to_orgid={}
Object.getOwnPropertyNames(j).forEach(
function (slug, idx, array)
{
org=j[slug].organization
if (org)
{
orgid=org["name"]
if (orgid)
{
slugs_to_orgid[slug]=orgid
}
}
}
)
for (let line of lines)
{
let aa=line.split(" ")
if (aa[0]=="Downloading")
{
slug=aa[1]
url=aa[3]
orgid=slugs_to_orgid[slug]
}
if (aa[0]=="found" || aa[0]=="restored")
{
num=aa[1]
datatype=aa[2]
restore=aa[1]
console.log(`${slug},${orgid},${datatype},${num},"${url}"`)
}
}
}
|
<gh_stars>0
package com.leetcode;
import junit.framework.TestCase;
public class Solution_405Test extends TestCase {
public void testToHex() {
Solution_405 solution_405 = new Solution_405();
System.out.println(solution_405.toHex(-1));
}
}
|
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.s2dao.identity;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import javax.sql.DataSource;
import org.dbflute.bhv.exception.SQLExceptionResource;
import org.dbflute.jdbc.StatementFactory;
import org.dbflute.s2dao.sqlhandler.TnBasicUpdateHandler;
/**
* @author modified by jflute (originated in S2Dao)
*/
public class TnIdentityAdjustmentSqlHandler extends TnBasicUpdateHandler {
public TnIdentityAdjustmentSqlHandler(DataSource dataSource, StatementFactory statementFactory, String sql) {
super(dataSource, statementFactory, sql);
}
@Override
protected Object doExecute(Connection conn, Object[] args, Class<?>[] argTypes) {
logSql(args, argTypes);
Statement st = null;
try {
// PreparedStatement is not used here
// because SQLServer do not work by PreparedStatement
// but it do work well by Statement
st = conn.createStatement();
return st.executeUpdate(_sql);
} catch (SQLException e) {
final SQLExceptionResource resource = createSQLExceptionResource();
resource.setNotice("Failed to execute the SQL to adjust identity.");
handleSQLException(e, resource);
return 0; // unreachable
} finally {
close(st);
}
};
}
|
#!/usr/bin/env python
import click as ck
import numpy as np
import pandas as pd
from tensorflow.keras.models import load_model
from subprocess import Popen, PIPE
import time
from utils import Ontology
from aminoacids import to_onehot
MAXLEN = 2000
@ck.command()
@ck.option(
'--filters-file', '-ff', default='data/filters.txt',
help='File with filters')
@ck.option(
'--interpro-file', '-if', default='data/Pfam-A.seed',
help='InterPRO domain sequences')
def main(filters_file):
filters = open(filters_file).read().splitlines()
if __name__ == '__main__':
main()
|
export { default } from "./ManagementTabs";
|
'use strict';
var total=0;
function greaterThan(myDynamicArr,num){
for (var i=0 ; i<myDynamicArr.length ; i++){
if (myDynamicArr[i] > num){
total ++;
}else{
// console.log('it is smaller than the num');
}
}
return total;
}
// greaterThan([1,2,3,4] ,2);
// console.log(total);
greaterThan([2,4,8] ,8);
console.log(total);
|
# == Schema Information
#
# Table name: places
#
# id :integer not null, primary key
# code :string(255) not null
# name :string(255) not null
# created_at :datetime not null
# updated_at :datetime not null
# state :boolean
# active :boolean
# blurb :text(65535)
#
class Place < ApplicationRecord
has_many :donations
validates :code, presence: true, uniqueness: true
validates :name, presence: true
def repr
return self.code
end
end
|
#/bin/sh
# Build docs_source.
bazel build //ll:docs
chmod 644 bazel-bin/ll/*.md
cp bazel-bin/ll/*.md docs_source
# Rebuild the Sphinx documentation.
rm -rd docs
sphinx-build -b html docs_source docs
# Rerun the pre-commit hooks so that we do not need to stage everything twice.
git add docs
pre-commit run --all-files
git add docs
# Technically unnecessary, but we want to show users whether all tests pass.
echo ""
echo "**************************"
echo "RERUNNING PRE-COMMIT HOOKS"
echo "**************************"
echo ""
pre-commit run --all-files
|
<reponame>jelly/patternfly-react
import React from 'react';
import { render } from '@testing-library/react';
import { SelectGroup } from '../SelectGroup';
import { SelectProvider } from '../selectConstants';
describe('SelectGroup', () => {
test('renders with children successfully', () => {
const { asFragment } = render(
<SelectProvider
value={{
onSelect: () => {},
onFavorite: () => {},
onClose: () => {},
variant: 'single',
inputIdPrefix: '',
shouldResetOnSelect: true
}}
>
<SelectGroup label="test">
<div>child</div>
</SelectGroup>
</SelectProvider>
);
expect(asFragment()).toMatchSnapshot();
});
});
|
import time
import requests
def monitor_health_check(url: str, interval: int) -> None:
while True:
try:
response = requests.get(url)
if response.status_code == 200:
if "ok" not in response.text:
print("Health check failed: Service is not OK")
break
else:
print(f"Health check failed: Unexpected status code {response.status_code}")
break
except requests.RequestException as e:
print(f"Health check failed: {e}")
break
time.sleep(interval)
# Example usage
monitor_health_check("http://localhost:8080/healthcheck", 60) # Check health every 60 seconds
|
#!/bin/sh
set -e
set -u
debug=0
now=$(date -u +"%s%N")
guild="guild-name"
count=0
hacky=$(mktemp)
ships=$(mktemp)
toons=$(mktemp)
if [ $debug -eq 1 ]; then
curl="/bin/echo -- curl"
else
curl="/usr/bin/curl"
fi
wget -q -O- 'guild-swgoh.gg-url' | \
egrep -o 'href="/u/.+"' | \
sed -e 's#href="#https://swgoh.gg#' -e 's#"##g' | \
while read url; do
sleep 1
user=$(echo $url | awk -F/ '{print $5}')
/path/to/swgoh.py ${user}
sleep 1
wget -q -O- "${url}" | \
grep 'class="pull-right"' | \
awk -F\> '{print $2 "#" $3}' | \
awk -F\< '{print $1 "#" $2}' | \
awk -F\# '{print $1 " " $3}' | \
sed 's/,//g' | \
egrep -v '^Guild $|^Joined|^Ally' | \
while read stat; do
case $stat in
Galactic?Power??Characters?*)
stat_name="character-gp"
stat_value=$(echo $stat | awk '{print $4}')
measure="galactic-power"
echo ${stat_value} > ${toons}
;;
Galactic?Power??Ships?*)
stat_name="ship-gp"
stat_value=$(echo $stat | awk '{print $4}')
measure="galactic-power"
echo ${stat_value} > ${ships}
;;
PVE?Battles?Won*)
stat_name="pve"
stat_value=$(echo $stat | awk '{print $4}')
measure="battles"
;;
PVE?Hard?Battles?Won*)
stat_name="pve-hard"
stat_value=$(echo $stat | awk '{print $5}')
measure="battles"
;;
Galactic?War?Battles?Won?*)
stat_name="gw"
stat_value=$(echo $stat | awk '{print $5}')
measure="battles"
;;
Arena?Battles?Won*)
stat_name="arena"
stat_value=$(echo $stat | awk '{print $4}')
measure="battles"
;;
Ship?Battles?Won*)
stat_name="ship-battles"
stat_value=$(echo $stat | awk '{print $4}')
measure="battles"
;;
*)
continue
;;
esac
${curl} -s -XPOST 'http://localhost:8086/write?db=swgoh' --data-binary "${measure},guild=${guild},user=${user} ${stat_name}=${stat_value} ${now}"
done
tot_gp=$(($(awk '{print $1}' ${ships})+$(awk '{print $1}' ${toons})))
${curl} -s -XPOST 'http://localhost:8086/write?db=swgoh' --data-binary "galactic-power,guild=${guild},user=${user} total_gp=${tot_gp} ${now}"
count=$((${count}+1))
echo "count ${count}" > ${hacky}
done
count=$(grep ^count ${hacky} | awk '{print $2}')
${curl} -s -XPOST 'http://localhost:8086/write?db=swgoh' --data-binary "users,guild=${guild} count=${count} ${now}"
rm -f ${hacky}
rm -f ${ships}
rm -f ${toons}
exit 0
|
<gh_stars>0
#include "gameAction.hpp"
using namespace odfaeg::network;
using namespace odfaeg::core;
namespace sorrok {
void GameAction::operator()(Item& item, Hero* hero) {
SymEncPacket packet;
std::string request = "CANUSE*"+conversionIntString(hero->getId())+"*"+conversionIntString(item.getType());
packet<<request;
Network::sendTcpPacket(packet);
std::string response = Network::waitForLastResponse("CANUSE");
if (response == "OK") {
item.applyBehaviour();
}
}
void GameAction::operator()(Skill& skill, Hero* hero) {
SymEncPacket packet;
std::string request = "CANLAUNCH*"+conversionIntString(hero->getId())+"*"+skill.getName();
packet<<request;
Network::sendTcpPacket(packet);
std::string response = Network::waitForLastResponse("CANLAUNCH");
std::cout<<"response : "<<response<<std::endl;
if (response == "OK") {
std::cout<<"apply skill behaviour"<<std::endl;
skill.applySkillBehaviour();
std::cout<<"skill behaviour updated"<<std::endl;
}
}
}
|
#!/bin/bash -l
#PBS -l walltime=23:59:00,nodes=1:ppn=24:gpus=2,mem=16gb
#PBS -m abe
#PBS -N 120019732_pgml_sparse
#PBS -o 120019732_pgml_sparse.stdout
#PBS -q k40
source takeme_source.sh
source activate mtl_env
python train_PGDL_custom_sparse.py 120019732
|
import EmberObject from '@ember/object';
import FormSubmissionUtilsMixin from 'ember-cli-text-support-mixins/mixins/form-submission-utils';
import { module, test } from 'qunit';
module('Unit | Mixin | form submission utils', function() {
// Replace this with your real tests.
test('it works', function (assert) {
let FormSubmissionUtilsObject = EmberObject.extend(FormSubmissionUtilsMixin);
let subject = FormSubmissionUtilsObject.create();
assert.ok(subject);
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.