text stringlengths 1 1.05M |
|---|
public static char uniqueChar(String s) {
int[] counts = new int[26];
for (int i = 0; i < s.length(); i++) {
int c = s.charAt(i) - 'a';
counts[c]++;
}
for (int i = 0; i < s.length(); i++) {
int c = s.charAt(i) - 'a';
if (counts[c] == 1) {
return s.charAt(i);
}
}
return '\0';
}
char c = uniqueChar("Hello World");
System.out.println(c); // 'H' |
#!/bin/bash
version=$(<VERSION)
datapath=$(readlink --canonicalize ../../../data)
# Inputs
opName='AddPoissonNoiseMap'
inpDir=/data/input
outDir=/data/output
# Output paths
out=/data/output
docker run --mount type=bind,source=${datapath},target=/data/ \
polusai/imagej-filter-addpoissonnoise-plugin:${version} \
--opName ${opName} \
--inpDir ${inpDir} \
--outDir ${outDir}
|
//
// UserLocationAnnotationExample.h
// Examples
//
// Created by <NAME> on 4/21/17.
// Copyright © 2017 Mapbox. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface UserLocationAnnotationExample : UIViewController
@end
|
#!/bin/sh -l
output=$(ec)
exit=$?
echo "$output"
echo ::set-output name=output::$output
exit $exit
|
#!/bin/bash
# TFRecord file containing NoteSequence protocol buffers from convert_midi_dir_to_note_sequences.py.
SEQUENCES_TFRECORD=/tmp/notesequences.tfrecord
# TFRecord file that TensorFlow's SequenceExample protos will be written to. This is the training dataset.
TRAIN_DATA=/tmp/training_melodies.tfrecord
# Optional evaluation dataset. Also, a TFRecord file containing SequenceExample protos.
EVAL_DATA=/tmp/evaluation_melodies.tfrecord
# Fraction of input data that will be written to the eval dataset (if eval_output flag is set).
EVAL_RATIO=0.10
# Name of the encoder to use. See magenta/lib/encoders.py.
ENCODER=basic_one_hot_encoder
bazel run //magenta:convert_sequences_to_melodies -- \
--input=$SEQUENCES_TFRECORD \
--train_output=$TRAIN_DATA \
--eval_output=$EVAL_DATA \
--eval_ratio=$EVAL_RATIO \
--encoder=$ENCODER
|
#!/bin/bash
# Copyright (c) 2016 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
### This script attempts to download the signature file SHA256SUMS.asc from
### digibytecore.org and digibyte.org and compares them.
### It first checks if the signature passes, and then downloads the files specified in
### the file, and checks if the hashes of these files match those that are specified
### in the signature file.
### The script returns 0 if everything passes the checks. It returns 1 if either the
### signature check or the hash check doesn't pass. If an error occurs the return value is 2
function clean_up {
for file in $*
do
rm "$file" 2> /dev/null
done
}
WORKINGDIR="/tmp/digibyte_verify_binaries"
TMPFILE="hashes.tmp"
SIGNATUREFILENAME="SHA256SUMS.asc"
RCSUBDIR="test"
HOST1="https://digibytecore.org"
HOST2="https://digibyte.io"
BASEDIR="/bin/"
VERSIONPREFIX="digibyte-core-"
RCVERSIONSTRING="rc"
if [ ! -d "$WORKINGDIR" ]; then
mkdir "$WORKINGDIR"
fi
cd "$WORKINGDIR" || exit 1
#test if a version number has been passed as an argument
if [ -n "$1" ]; then
#let's also check if the version number includes the prefix 'digibyte-',
# and add this prefix if it doesn't
if [[ $1 == "$VERSIONPREFIX"* ]]; then
VERSION="$1"
else
VERSION="$VERSIONPREFIX$1"
fi
STRIPPEDLAST="${VERSION%-*}"
#now let's see if the version string contains "rc" or a platform name (e.g. "osx")
if [[ "$STRIPPEDLAST-" == "$VERSIONPREFIX" ]]; then
BASEDIR="$BASEDIR$VERSION/"
else
# let's examine the last part to see if it's rc and/or platform name
STRIPPEDNEXTTOLAST="${STRIPPEDLAST%-*}"
if [[ "$STRIPPEDNEXTTOLAST-" == "$VERSIONPREFIX" ]]; then
LASTSUFFIX="${VERSION##*-}"
VERSION="$STRIPPEDLAST"
if [[ $LASTSUFFIX == *"$RCVERSIONSTRING"* ]]; then
RCVERSION="$LASTSUFFIX"
else
PLATFORM="$LASTSUFFIX"
fi
else
RCVERSION="${STRIPPEDLAST##*-}"
PLATFORM="${VERSION##*-}"
VERSION="$STRIPPEDNEXTTOLAST"
fi
BASEDIR="$BASEDIR$VERSION/"
if [[ $RCVERSION == *"$RCVERSIONSTRING"* ]]; then
BASEDIR="$BASEDIR$RCSUBDIR.$RCVERSION/"
fi
fi
else
echo "Error: need to specify a version on the command line"
exit 2
fi
#first we fetch the file containing the signature
WGETOUT=$(wget -N "$HOST1$BASEDIR$SIGNATUREFILENAME" 2>&1)
#and then see if wget completed successfully
if [ $? -ne 0 ]; then
echo "Error: couldn't fetch signature file. Have you specified the version number in the following format?"
echo "[$VERSIONPREFIX]<version>-[$RCVERSIONSTRING[0-9]] (example: ${VERSIONPREFIX}0.10.4-${RCVERSIONSTRING}1)"
echo "wget output:"
echo "$WGETOUT"|sed 's/^/\t/g'
exit 2
fi
WGETOUT=$(wget -N -O "$SIGNATUREFILENAME.2" "$HOST2$BASEDIR$SIGNATUREFILENAME" 2>&1)
if [ $? -ne 0 ]; then
echo "digibyte.org failed to provide signature file, but digibytecore.org did?"
echo "wget output:"
echo "$WGETOUT"|sed 's/^/\t/g'
clean_up $SIGNATUREFILENAME
exit 3
fi
SIGFILEDIFFS="$(diff $SIGNATUREFILENAME $SIGNATUREFILENAME.2)"
if [ "$SIGFILEDIFFS" != "" ]; then
echo "digibyte.org and digibytecore.org signature files were not equal?"
clean_up $SIGNATUREFILENAME $SIGNATUREFILENAME.2
exit 4
fi
#then we check it
GPGOUT=$(gpg --yes --decrypt --output "$TMPFILE" "$SIGNATUREFILENAME" 2>&1)
#return value 0: good signature
#return value 1: bad signature
#return value 2: gpg error
RET="$?"
if [ $RET -ne 0 ]; then
if [ $RET -eq 1 ]; then
#and notify the user if it's bad
echo "Bad signature."
elif [ $RET -eq 2 ]; then
#or if a gpg error has occurred
echo "gpg error. Do you have the DigiByte Core binary release signing key installed?"
fi
echo "gpg output:"
echo "$GPGOUT"|sed 's/^/\t/g'
clean_up $SIGNATUREFILENAME $SIGNATUREFILENAME.2 $TMPFILE
exit "$RET"
fi
if [ -n "$PLATFORM" ]; then
grep $PLATFORM $TMPFILE > "$TMPFILE-plat"
TMPFILESIZE=$(stat -c%s "$TMPFILE-plat")
if [ $TMPFILESIZE -eq 0 ]; then
echo "error: no files matched the platform specified" && exit 3
fi
mv "$TMPFILE-plat" $TMPFILE
fi
#here we extract the filenames from the signature file
FILES=$(awk '{print $2}' "$TMPFILE")
#and download these one by one
for file in $FILES
do
echo "Downloading $file"
wget --quiet -N "$HOST1$BASEDIR$file"
done
#check hashes
DIFF=$(diff <(sha256sum $FILES) "$TMPFILE")
if [ $? -eq 1 ]; then
echo "Hashes don't match."
echo "Offending files:"
echo "$DIFF"|grep "^<"|awk '{print "\t"$3}'
exit 1
elif [ $? -gt 1 ]; then
echo "Error executing 'diff'"
exit 2
fi
if [ -n "$2" ]; then
echo "Clean up the binaries"
clean_up $FILES $SIGNATUREFILENAME $SIGNATUREFILENAME.2 $TMPFILE
else
echo "Keep the binaries in $WORKINGDIR"
clean_up $TMPFILE
fi
echo -e "Verified hashes of \n$FILES"
exit 0
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e -x
## 1. install python env
dev/lint-python.sh -s py_env
PY_ENV_DIR=`pwd`/dev/.conda/envs
py_env=("3.5" "3.6" "3.7")
## 2. install dependency
for ((i=0;i<${#py_env[@]};i++)) do
${PY_ENV_DIR}/${py_env[i]}/bin/pip install -r dev/dev-requirements.txt
done
## 3. build wheels
for ((i=0;i<${#py_env[@]};i++)) do
if [[ "$(uname)" != "Darwin" ]]; then
# force the linker to use the older glibc version in Linux
export CFLAGS="-I. -include dev/glibc_version_fix.h"
fi
${PY_ENV_DIR}/${py_env[i]}/bin/python setup.py bdist_wheel
done
## 4. convert linux_x86_64 wheel to manylinux1 wheel in Linux
if [[ "$(uname)" != "Darwin" ]]; then
source `pwd`/dev/.conda/bin/activate
# 4.1 install patchelf
conda install -c conda-forge patchelf=0.11 -y
# 4.2 install auditwheel
pip install auditwheel==3.2.0
# 4.3 convert Linux wheel
for wheel_file in dist/*.whl; do
auditwheel repair ${wheel_file} -w dist
rm -f ${wheel_file}
done
source deactivate
fi
## see the result
ls -al dist/
|
#!/bin/sh
go() {
file="$1"
IFS=$'\n'
for pack in $(perl -n -e '/(<Pack .*\/>)/ && print "$1\n"' $file); do
remotepath=$(echo "$pack" | perl -n -e '/RemotePath="([^"]*)"/ && print $1')
hash=$(echo "$pack" | perl -n -e '/Hash="([^"]*)"/ && print $1')
url="http://cdn.unrealengine.com/dependencies/$remotepath/$hash"
until sha256=$(nix-prefetch-url $url --type sha256); do
true
done
cat <<EOF
"$hash" = fetchurl {
url = $url;
sha256 = "$sha256";
};
EOF
done
}
cat <<EOF
{ fetchurl }:
{
EOF
go Engine/Build/Commit.gitdeps.xml
go Engine/Build/Promoted.gitdeps.xml
cat <<EOF
}
EOF
|
<reponame>jmetzen/skgp
#!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
==========================================================
Comparing Bayesian Inference and MAP estimation
==========================================================
"""
print(__doc__)
# Author: <NAME> <<EMAIL>>
# Licence: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.learning_curve import learning_curve
from skgp.estimators import GaussianProcess, BayesianGaussianProcess
np.random.seed(1)
def f(X):
""" Target function for GPR.
Note that one dimension (X[:, 3]) is irrelevant and should thus be ignored
by ARD. Furthermore, the values x in R^3 and
x + \alpha (1, 2 , 0) + \beta (1, 0, 2) have the same value for all x and
all alpha and beta. This can be exploited by FAD.
"""
return np.tanh(2 * X[:, 0] - X[:, 1] - X[:, 2])
Xtrain = np.random.random((100, 6)) * 2 - 1
ytrain = f(Xtrain)
plt.figure()
colors = ['r', 'g', 'b', 'c', 'm']
labels = {True: "Bayesian GP", False: "Standard GP"}
for i, bayesian in enumerate(labels.keys()):
model = GaussianProcess(corr='squared_exponential',
theta0=[1.0] * 12, thetaL=[1e-4] * 12,
thetaU=[1e2] * 12)
if bayesian:
model = BayesianGaussianProcess(model, n_posterior_samples=25,
n_burnin=250, n_sampling_steps=25)
train_sizes, train_scores, test_scores = \
learning_curve(model, Xtrain, ytrain, scoring="mean_squared_error",
cv=10, n_jobs=1)
test_scores = -test_scores # Scores correspond to negative MSE
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_min = np.min(test_scores, axis=1)
test_scores_max = np.max(test_scores, axis=1)
plt.plot(train_sizes, test_scores_mean, label=labels[bayesian],
color=colors[i])
plt.fill_between(train_sizes, test_scores_min, test_scores_max,
alpha=0.2, color=colors[i])
plt.legend(loc="best")
plt.title("Learning Curve")
plt.xlabel("Training examples")
plt.ylabel("Mean Squared Error")
plt.yscale("symlog", linthreshy=1e-10)
plt.show()
|
// return a random item from an array
export default (arr = []) => {
if (arr.length === 0) return -1;
return (arr.length * Math.random()) << 0;
} |
app.controller('HomeController', function ($scope, $http, $timeout) {
$scope.posts = [];
$scope.loading = true;
$scope.errorLoading = false;
$scope.posts = $http({
method: 'GET',
url: '/Clone/public/post/getPosts'
})
.then(function (response) {
$scope.posts = response.data;
$scope.loading = false;
})
.catch(function (err) {
$scope.errorLoading = true;
$scope.loading = false;
});
$scope.likePost = function (id) {
$http({
method: 'POST',
url: '/Clone/public/like/like',
data: {id: id}
})
.then(function (response) {
for(var i = 0; i < $scope.posts.length; i++) {
var post = $scope.posts[i];
if (post.id == id) {
$scope.posts[i].liked = response.data.liked;
$scope.posts[i].likesCount = response.data.likesCount;
return;
}
}
})
.catch(function (error) {
alert(error);
});
}
}); |
<gh_stars>0
/*
* This file is part of Visual Illusions Minecraft Plugin Base Library.
*
* Copyright © 2013-2015 Visual Illusions Entertainment
*
* Visual Illusions Minecraft Plugin Base Library is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* Visual Illusions Minecraft Plugin Base Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with Visual Illusions Minecraft Plugin Base Library.
* If not, see http://www.gnu.org/licenses/lgpl.html.
*/
package net.visualillusionsent.minecraft.plugin.canary;
import net.canarymod.chat.MessageReceiver;
import net.visualillusionsent.minecraft.plugin.ModMessageReceiver;
/**
* Canary Message Receiver
*
* @author Jason (darkdiplomat)
*/
public class CanaryMessageReceiver implements ModMessageReceiver<MessageReceiver> {
private final MessageReceiver receiver;
public CanaryMessageReceiver(MessageReceiver receiver) {
this.receiver = receiver;
}
@Override
public final void message(String message) {
receiver.message(message);
}
@Override
public final String getName() {
return receiver.getName();
}
@Override
public final MessageReceiver unwrap() {
return receiver;
}
@Override
public final int hashCode() {
return receiver.hashCode();
}
@Override
public final boolean equals(Object obj) {
if (!(obj instanceof CanaryMessageReceiver || obj instanceof MessageReceiver)) {
return false;
}
MessageReceiver msgrec;
if (obj instanceof CanaryMessageReceiver) {
msgrec = ((CanaryMessageReceiver)obj).unwrap();
}
else {
msgrec = (MessageReceiver)obj;
}
return receiver.equals(msgrec);
}
@Override
public final String toString() {
return "CanaryMessageReceiver{" + receiver.toString() + "}";
}
}
|
import React, { Component } from 'react';
import './App.css';
class App extends Component {
state = {
users: []
}
componentDidMount() {
const data = {
"users": [
{
"name": "Alice",
"score": 100
},
{
"name": "Bob",
"score": 90
},
{
"name": "Charlie",
"score": 80
},
{
"name": "David",
"score": 70
},
{
"name": "Eve",
"score": 60
},
{
"name": "Frank",
"score": 50
},
{
"name": "Grace",
"score": 40
},
{
"name": "Heidi",
"score": 30
},
{
"name": "Ivan",
"score": 20
},
{
"name": "Jenna",
"score": 10
}
]
};
this.setState({
users: data.users
});
}
render() {
const { users } = this.state;
return (
<div className="App">
<h1>Leaderboard</h1>
<ul>
{ users.map((user, index) => (
<li key={index}>
{user.name} - Score: {user.score}
</li>
))}
</ul>
</div>
);
}
}
export default App; |
package org.sunbird.learner.util;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.math.BigInteger;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.sunbird.actor.background.BackgroundOperations;
import org.sunbird.cassandra.CassandraOperation;
import org.sunbird.common.ElasticSearchHelper;
import org.sunbird.common.exception.ProjectCommonException;
import org.sunbird.common.factory.EsClientFactory;
import org.sunbird.common.inf.ElasticSearchService;
import org.sunbird.common.models.response.Response;
import org.sunbird.common.models.util.HttpClientUtil;
import org.sunbird.common.models.util.JsonKey;
import org.sunbird.common.models.util.LoggerUtil;
import org.sunbird.common.models.util.ProjectUtil;
import org.sunbird.common.models.util.ProjectUtil.OrgStatus;
import org.sunbird.common.models.util.PropertiesCache;
import org.sunbird.common.models.util.datasecurity.DataMaskingService;
import org.sunbird.common.models.util.datasecurity.DecryptionService;
import org.sunbird.common.models.util.datasecurity.EncryptionService;
import org.sunbird.common.models.util.url.URLShortner;
import org.sunbird.common.models.util.url.URLShortnerImpl;
import org.sunbird.common.quartz.scheduler.SchedulerManager;
import org.sunbird.common.request.Request;
import org.sunbird.common.request.RequestContext;
import org.sunbird.common.responsecode.ResponseCode;
import org.sunbird.common.util.KeycloakRequiredActionLinkUtil;
import org.sunbird.dto.SearchDTO;
import org.sunbird.helper.CassandraConnectionManager;
import org.sunbird.helper.CassandraConnectionMngrFactory;
import org.sunbird.helper.ServiceFactory;
import org.sunbird.notification.sms.provider.ISmsProvider;
import org.sunbird.notification.utils.SMSFactory;
import scala.concurrent.Future;
/**
* Utility class for actors
*
* @author arvind .
*/
public final class Util {
private static LoggerUtil logger = new LoggerUtil(Util.class);
public static final Map<String, DbInfo> dbInfoMap = new HashMap<>();
private static PropertiesCache propertiesCache = PropertiesCache.getInstance();
public static final int DEFAULT_ELASTIC_DATA_LIMIT = 10000;
public static final String KEY_SPACE_NAME = "sunbird";
private static Properties prop = new Properties();
private static Map<String, String> headers = new HashMap<>();
private static Map<Integer, List<Integer>> orgStatusTransition = new HashMap<>();
private static final String SUNBIRD_WEB_URL = "sunbird_web_url";
private static CassandraOperation cassandraOperation = ServiceFactory.getInstance();
private static EncryptionService encryptionService =
org.sunbird.common.models.util.datasecurity.impl.ServiceFactory.getEncryptionServiceInstance(
null);
private static DecryptionService decService =
org.sunbird.common.models.util.datasecurity.impl.ServiceFactory.getDecryptionServiceInstance(
null);
private static DataMaskingService maskingService =
org.sunbird.common.models.util.datasecurity.impl.ServiceFactory.getMaskingServiceInstance(
null);
private static ObjectMapper mapper = new ObjectMapper();
private static ElasticSearchService esService = EsClientFactory.getInstance(JsonKey.REST);
static {
initializeOrgStatusTransition();
initializeDBProperty(); // EkStep HttpClient headers init
headers.put("content-type", "application/json");
headers.put("accept", "application/json");
new Thread(() -> SchedulerManager.getInstance()).start();
}
private Util() {}
/**
* This method will a map of organization state transaction. which will help us to move the
* organization status from one Valid state to another state.
*/
private static void initializeOrgStatusTransition() {
orgStatusTransition.put(
OrgStatus.ACTIVE.getValue(),
Arrays.asList(
OrgStatus.ACTIVE.getValue(),
OrgStatus.INACTIVE.getValue(),
OrgStatus.BLOCKED.getValue(),
OrgStatus.RETIRED.getValue()));
orgStatusTransition.put(
OrgStatus.INACTIVE.getValue(),
Arrays.asList(OrgStatus.ACTIVE.getValue(), OrgStatus.INACTIVE.getValue()));
orgStatusTransition.put(
OrgStatus.BLOCKED.getValue(),
Arrays.asList(
OrgStatus.ACTIVE.getValue(),
OrgStatus.BLOCKED.getValue(),
OrgStatus.RETIRED.getValue()));
orgStatusTransition.put(
OrgStatus.RETIRED.getValue(), Arrays.asList(OrgStatus.RETIRED.getValue()));
orgStatusTransition.put(
null,
Arrays.asList(
OrgStatus.ACTIVE.getValue(),
OrgStatus.INACTIVE.getValue(),
OrgStatus.BLOCKED.getValue(),
OrgStatus.RETIRED.getValue()));
}
/** This method will initialize the cassandra data base property */
private static void initializeDBProperty() {
// setting db info (keyspace , table) into static map
// this map will be used during cassandra data base interaction.
// this map will have each DB name and it's corresponding keyspace and table
// name.
dbInfoMap.put(JsonKey.USER_DB, getDbInfoObject(KEY_SPACE_NAME, "user"));
dbInfoMap.put(JsonKey.ORG_DB, getDbInfoObject(KEY_SPACE_NAME, "organisation"));
dbInfoMap.put(JsonKey.ROLE, getDbInfoObject(KEY_SPACE_NAME, "role"));
dbInfoMap.put(JsonKey.URL_ACTION, getDbInfoObject(KEY_SPACE_NAME, "url_action"));
dbInfoMap.put(JsonKey.ACTION_GROUP, getDbInfoObject(KEY_SPACE_NAME, "action_group"));
dbInfoMap.put(JsonKey.USER_ACTION_ROLE, getDbInfoObject(KEY_SPACE_NAME, "user_action_role"));
dbInfoMap.put(JsonKey.ROLE_GROUP, getDbInfoObject(KEY_SPACE_NAME, "role_group"));
dbInfoMap.put(JsonKey.USER_ORG_DB, getDbInfoObject(KEY_SPACE_NAME, "user_organisation"));
dbInfoMap.put(JsonKey.BULK_OP_DB, getDbInfoObject(KEY_SPACE_NAME, "bulk_upload_process"));
dbInfoMap.put(JsonKey.USER_NOTES_DB, getDbInfoObject(KEY_SPACE_NAME, "user_notes"));
dbInfoMap.put(
JsonKey.TENANT_PREFERENCE_DB, getDbInfoObject(KEY_SPACE_NAME, "tenant_preference"));
dbInfoMap.put(JsonKey.SYSTEM_SETTINGS_DB, getDbInfoObject(KEY_SPACE_NAME, "system_settings"));
dbInfoMap.put(JsonKey.USER_CERT, getDbInfoObject(KEY_SPACE_NAME, JsonKey.USER_CERT));
dbInfoMap.put(JsonKey.USER_FEED_DB, getDbInfoObject(KEY_SPACE_NAME, JsonKey.USER_FEED_DB));
dbInfoMap.put(
JsonKey.USR_DECLARATION_TABLE,
getDbInfoObject(KEY_SPACE_NAME, JsonKey.USR_DECLARATION_TABLE));
dbInfoMap.put(
JsonKey.TENANT_PREFERENCE_V2, getDbInfoObject(KEY_SPACE_NAME, "tenant_preference_v2"));
dbInfoMap.put(JsonKey.USER_LOOKUP, getDbInfoObject(KEY_SPACE_NAME, "user_lookup"));
dbInfoMap.put(JsonKey.LOCATION, getDbInfoObject(KEY_SPACE_NAME, JsonKey.LOCATION));
dbInfoMap.put(JsonKey.USER_ROLES, getDbInfoObject(KEY_SPACE_NAME, JsonKey.USER_ROLES));
}
/**
* This method will take org current state and next state and check is it possible to move
* organization from current state to next state if possible to move then return true else false.
*
* @param currentState String
* @param nextState String
* @return boolean
*/
@SuppressWarnings("rawtypes")
public static boolean checkOrgStatusTransition(Integer currentState, Integer nextState) {
List list = orgStatusTransition.get(currentState);
if (null == list) {
return false;
}
return list.contains(nextState);
}
/**
* This method will check the cassandra data base connection. first it will try to established the
* data base connection from provided environment variable , if environment variable values are
* not set then connection will be established from property file.
*/
public static void checkCassandraDbConnections() {
CassandraConnectionManager cassandraConnectionManager =
CassandraConnectionMngrFactory.getInstance();
String nodes = System.getenv(JsonKey.SUNBIRD_CASSANDRA_IP);
String[] hosts = null;
if (StringUtils.isNotBlank(nodes)) {
hosts = nodes.split(",");
} else {
hosts = new String[] {"localhost"};
}
cassandraConnectionManager.createConnection(hosts);
}
public static String getProperty(String key) {
return prop.getProperty(key);
}
private static DbInfo getDbInfoObject(String keySpace, String table) {
DbInfo dbInfo = new DbInfo();
dbInfo.setKeySpace(keySpace);
dbInfo.setTableName(table);
return dbInfo;
}
/** class to hold cassandra db info. */
public static class DbInfo {
private String keySpace;
private String tableName;
/** No-arg constructor */
DbInfo() {}
public String getKeySpace() {
return keySpace;
}
public void setKeySpace(String keySpace) {
this.keySpace = keySpace;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
}
/**
* This method will take searchQuery map and internally it will convert map to SearchDto object.
*
* @param searchQueryMap Map<String , Object>
* @return SearchDTO
*/
@SuppressWarnings("unchecked")
public static SearchDTO createSearchDto(Map<String, Object> searchQueryMap) {
SearchDTO search = new SearchDTO();
if (searchQueryMap.containsKey(JsonKey.QUERY)) {
search.setQuery((String) searchQueryMap.get(JsonKey.QUERY));
}
if (searchQueryMap.containsKey(JsonKey.QUERY_FIELDS)) {
search.setQueryFields((List<String>) searchQueryMap.get(JsonKey.QUERY_FIELDS));
}
if (searchQueryMap.containsKey(JsonKey.FACETS)) {
search.setFacets((List<Map<String, String>>) searchQueryMap.get(JsonKey.FACETS));
}
if (searchQueryMap.containsKey(JsonKey.FIELDS)) {
search.setFields((List<String>) searchQueryMap.get(JsonKey.FIELDS));
}
if (searchQueryMap.containsKey(JsonKey.FILTERS)) {
search.getAdditionalProperties().put(JsonKey.FILTERS, searchQueryMap.get(JsonKey.FILTERS));
}
if (searchQueryMap.containsKey(JsonKey.EXISTS)) {
search.getAdditionalProperties().put(JsonKey.EXISTS, searchQueryMap.get(JsonKey.EXISTS));
}
if (searchQueryMap.containsKey(JsonKey.NOT_EXISTS)) {
search
.getAdditionalProperties()
.put(JsonKey.NOT_EXISTS, searchQueryMap.get(JsonKey.NOT_EXISTS));
}
if (searchQueryMap.containsKey(JsonKey.SORT_BY)) {
search
.getSortBy()
.putAll((Map<? extends String, ? extends String>) searchQueryMap.get(JsonKey.SORT_BY));
}
if (searchQueryMap.containsKey(JsonKey.OFFSET)) {
if ((searchQueryMap.get(JsonKey.OFFSET)) instanceof Integer) {
search.setOffset((int) searchQueryMap.get(JsonKey.OFFSET));
} else {
search.setOffset(((BigInteger) searchQueryMap.get(JsonKey.OFFSET)).intValue());
}
}
if (searchQueryMap.containsKey(JsonKey.LIMIT)) {
if ((searchQueryMap.get(JsonKey.LIMIT)) instanceof Integer) {
search.setLimit((int) searchQueryMap.get(JsonKey.LIMIT));
} else {
search.setLimit(((BigInteger) searchQueryMap.get(JsonKey.LIMIT)).intValue());
}
}
if (search.getLimit() > DEFAULT_ELASTIC_DATA_LIMIT) {
search.setLimit(DEFAULT_ELASTIC_DATA_LIMIT);
}
if (search.getLimit() + search.getOffset() > DEFAULT_ELASTIC_DATA_LIMIT) {
search.setLimit(DEFAULT_ELASTIC_DATA_LIMIT - search.getOffset());
}
if (searchQueryMap.containsKey(JsonKey.GROUP_QUERY)) {
search
.getGroupQuery()
.addAll(
(Collection<? extends Map<String, Object>>) searchQueryMap.get(JsonKey.GROUP_QUERY));
}
if (searchQueryMap.containsKey(JsonKey.SOFT_CONSTRAINTS)) {
search.setSoftConstraints(
(Map<String, Integer>) searchQueryMap.get(JsonKey.SOFT_CONSTRAINTS));
}
return search;
}
/**
* if Object is not null then it will return true else false.
*
* @param obj Object
* @return boolean
*/
public static boolean isNotNull(Object obj) {
return null != obj ? true : false;
}
/**
* This method will provide user details map based on user id if user not found then it will
* return null.
*
* @param userId userId of the user
* @return userDbRecord of the user from cassandra
*/
@SuppressWarnings("unchecked")
public static Map<String, Object> getUserbyUserId(String userId, RequestContext context) {
CassandraOperation cassandraOperation = ServiceFactory.getInstance();
Util.DbInfo userdbInfo = Util.dbInfoMap.get(JsonKey.USER_DB);
Response result =
cassandraOperation.getRecordById(
userdbInfo.getKeySpace(), userdbInfo.getTableName(), userId, context);
List<Map<String, Object>> list = (List<Map<String, Object>>) result.get(JsonKey.RESPONSE);
if (!(list.isEmpty())) {
return list.get(0);
}
return null;
}
/** @param req Map<String,Object> */
public static boolean registerChannel(Map<String, Object> req, RequestContext context) {
Map<String, String> headerMap = new HashMap<>();
String header = System.getenv(JsonKey.EKSTEP_AUTHORIZATION);
if (StringUtils.isBlank(header)) {
header = PropertiesCache.getInstance().getProperty(JsonKey.EKSTEP_AUTHORIZATION);
} else {
header = JsonKey.BEARER + header;
}
headerMap.put(JsonKey.AUTHORIZATION, header);
headerMap.put("Content-Type", "application/json");
headerMap.put("user-id", "");
ProjectUtil.setTraceIdInHeader(headerMap, context);
String reqString = "";
String regStatus = "";
try {
logger.info(
context, "start call for registering the channel for org id ==" + req.get(JsonKey.ID));
String ekStepBaseUrl = System.getenv(JsonKey.EKSTEP_BASE_URL);
if (StringUtils.isBlank(ekStepBaseUrl)) {
ekStepBaseUrl = PropertiesCache.getInstance().getProperty(JsonKey.EKSTEP_BASE_URL);
}
Map<String, Object> map = new HashMap<>();
Map<String, Object> reqMap = new HashMap<>();
Map<String, Object> channelMap = new HashMap<>();
channelMap.put(JsonKey.NAME, req.get(JsonKey.CHANNEL));
channelMap.put(JsonKey.DESCRIPTION, req.get(JsonKey.DESCRIPTION));
channelMap.put(JsonKey.CODE, req.get(JsonKey.ID));
if (req.containsKey(JsonKey.LICENSE)
&& StringUtils.isNotBlank((String) req.get(JsonKey.LICENSE))) {
channelMap.put(JsonKey.DEFAULT_LICENSE, req.get(JsonKey.LICENSE));
}
String defaultFramework = (String) req.get(JsonKey.DEFAULT_FRAMEWORK);
if (StringUtils.isNotBlank(defaultFramework))
channelMap.put(JsonKey.DEFAULT_FRAMEWORK, defaultFramework);
reqMap.put(JsonKey.CHANNEL, channelMap);
map.put(JsonKey.REQUEST, reqMap);
reqString = mapper.writeValueAsString(map);
logger.info(
context, "Util:registerChannel: Channel registration request data = " + reqString);
regStatus =
HttpClientUtil.post(
(ekStepBaseUrl
+ PropertiesCache.getInstance().getProperty(JsonKey.EKSTEP_CHANNEL_REG_API_URL)),
reqString,
headerMap);
logger.info(context, "end call for channel registration for org id ==" + req.get(JsonKey.ID));
} catch (Exception e) {
logger.error(
context, "Exception occurred while registering channel in ekstep." + e.getMessage(), e);
}
return regStatus.contains("OK");
}
/** @param req Map<String,Object> */
public static boolean updateChannel(Map<String, Object> req, RequestContext context) {
Map<String, String> headerMap = new HashMap<>();
String header = System.getenv(JsonKey.EKSTEP_AUTHORIZATION);
if (StringUtils.isBlank(header)) {
header = PropertiesCache.getInstance().getProperty(JsonKey.EKSTEP_AUTHORIZATION);
} else {
header = JsonKey.BEARER + header;
}
headerMap.put(JsonKey.AUTHORIZATION, header);
headerMap.put("Content-Type", "application/json");
headerMap.put("user-id", "");
ProjectUtil.setTraceIdInHeader(headers, context);
String reqString = "";
String regStatus = "";
try {
logger.info(
context, "start call for updateChannel for hashTag id ==" + req.get(JsonKey.HASHTAGID));
String ekStepBaseUrl = System.getenv(JsonKey.EKSTEP_BASE_URL);
if (StringUtils.isBlank(ekStepBaseUrl)) {
ekStepBaseUrl = PropertiesCache.getInstance().getProperty(JsonKey.EKSTEP_BASE_URL);
}
Map<String, Object> map = new HashMap<>();
Map<String, Object> reqMap = new HashMap<>();
Map<String, Object> channelMap = new HashMap<>();
channelMap.put(JsonKey.NAME, req.get(JsonKey.CHANNEL));
channelMap.put(JsonKey.DESCRIPTION, req.get(JsonKey.DESCRIPTION));
channelMap.put(JsonKey.CODE, req.get(JsonKey.HASHTAGID));
String license = (String) req.get(JsonKey.LICENSE);
if (StringUtils.isNotBlank(license)) {
channelMap.put(JsonKey.DEFAULT_LICENSE, license);
}
reqMap.put(JsonKey.CHANNEL, channelMap);
map.put(JsonKey.REQUEST, reqMap);
reqString = mapper.writeValueAsString(map);
regStatus =
HttpClientUtil.patch(
(ekStepBaseUrl
+ PropertiesCache.getInstance()
.getProperty(JsonKey.EKSTEP_CHANNEL_UPDATE_API_URL))
+ "/"
+ req.get(JsonKey.ID),
reqString,
headerMap);
logger.info(
context, "end call for channel update for org id ==" + req.get(JsonKey.HASHTAGID));
} catch (Exception e) {
logger.error(
context, "Exception occurred while updating channel in ekstep. " + e.getMessage(), e);
}
return regStatus.contains("OK");
}
public static void initializeContext(Request request, String env) {
Map<String, Object> requestContext = request.getContext();
env = StringUtils.isNotBlank(env) ? env : "";
requestContext.put(JsonKey.ENV, env);
requestContext.put(JsonKey.REQUEST_TYPE, JsonKey.API_CALL);
if (JsonKey.USER.equalsIgnoreCase((String) request.getContext().get(JsonKey.ACTOR_TYPE))) {
String requestedByUserId = (String) request.getContext().get(JsonKey.REQUESTED_BY);
if (StringUtils.isNotBlank(requestedByUserId)) {
Util.DbInfo usrDbInfo = dbInfoMap.get(JsonKey.USER_DB);
Response userResponse =
cassandraOperation.getRecordById(
usrDbInfo.getKeySpace(),
usrDbInfo.getTableName(),
(String) request.getContext().get(JsonKey.REQUESTED_BY),
request.getRequestContext());
List<Map<String, Object>> userList =
(List<Map<String, Object>>) userResponse.get(JsonKey.RESPONSE);
if (CollectionUtils.isNotEmpty(userList)) {
Map<String, Object> result = userList.get(0);
if (result != null) {
String rootOrgId = (String) result.get(JsonKey.ROOT_ORG_ID);
if (StringUtils.isNotBlank(rootOrgId)) {
Map<String, String> rollup = new HashMap<>();
rollup.put("l1", rootOrgId);
requestContext.put(JsonKey.ROLLUP, rollup);
}
}
}
}
}
}
public static String getSunbirdWebUrlPerTenent(Map<String, Object> userMap) {
StringBuilder webUrl = new StringBuilder();
String slug = "";
if (StringUtils.isBlank(System.getenv(SUNBIRD_WEB_URL))) {
webUrl.append(propertiesCache.getProperty(SUNBIRD_WEB_URL));
} else {
webUrl.append(System.getenv(SUNBIRD_WEB_URL));
}
if (!StringUtils.isBlank((String) userMap.get(JsonKey.ROOT_ORG_ID))) {
Map<String, Object> orgMap = getOrgDetails((String) userMap.get(JsonKey.ROOT_ORG_ID), null);
slug = (String) orgMap.get(JsonKey.SLUG);
}
if (!StringUtils.isBlank(slug)) {
webUrl.append("/" + slug);
}
return webUrl.toString();
}
/**
* As per requirement this page need to be redirect to /resources always.
*
* @return url of login page
*/
public static String getSunbirdLoginUrl() {
StringBuilder webUrl = new StringBuilder();
String slug = "/resources";
if (StringUtils.isBlank(System.getenv(SUNBIRD_WEB_URL))) {
webUrl.append(propertiesCache.getProperty(SUNBIRD_WEB_URL));
} else {
webUrl.append(System.getenv(SUNBIRD_WEB_URL));
}
webUrl.append(slug);
return webUrl.toString();
}
public static Map<String, Object> getOrgDetails(String identifier, RequestContext context) {
if (StringUtils.isNotBlank(identifier)) {
DbInfo orgDbInfo = Util.dbInfoMap.get(JsonKey.ORG_DB);
Response response =
cassandraOperation.getRecordById(
orgDbInfo.getKeySpace(), orgDbInfo.getTableName(), identifier, context);
List<Map<String, Object>> res = (List<Map<String, Object>>) response.get(JsonKey.RESPONSE);
if (null != res && !res.isEmpty()) {
return res.get(0);
}
}
return Collections.emptyMap();
}
public static String encryptData(String value) {
try {
return encryptionService.encryptData(value, null);
} catch (Exception e) {
throw new ProjectCommonException(
ResponseCode.userDataEncryptionError.getErrorCode(),
ResponseCode.userDataEncryptionError.getErrorMessage(),
ResponseCode.SERVER_ERROR.getResponseCode());
}
}
/**
* This method will search in ES for user with given search query
*
* @param searchQueryMap Query filters as Map.
* @param context
* @return List<User> List of User object.
*/
public static List<Map<String, Object>> searchUser(
Map<String, Object> searchQueryMap, RequestContext context) {
List<Map<String, Object>> searchResult = new ArrayList<>();
Map<String, Object> searchRequestMap = new HashMap<>();
searchRequestMap.put(JsonKey.FILTERS, searchQueryMap);
SearchDTO searchDto = Util.createSearchDto(searchRequestMap);
Future<Map<String, Object>> resultf =
esService.search(searchDto, ProjectUtil.EsType.user.getTypeName(), context);
Map<String, Object> result =
(Map<String, Object>) ElasticSearchHelper.getResponseFromFuture(resultf);
if (MapUtils.isNotEmpty(result)) {
searchResult = (List<Map<String, Object>>) result.get(JsonKey.CONTENT);
}
return searchResult;
}
public static String getLoginId(Map<String, Object> userMap) {
String loginId;
if (StringUtils.isNotBlank((String) userMap.get(JsonKey.CHANNEL))) {
loginId =
(String) userMap.get(JsonKey.USERNAME) + "@" + (String) userMap.get(JsonKey.CHANNEL);
} else {
loginId = (String) userMap.get(JsonKey.USERNAME);
}
return loginId;
}
public static void registerUserToOrg(Map<String, Object> userMap, RequestContext context) {
Map<String, Object> reqMap = new WeakHashMap<>();
reqMap.put(JsonKey.ID, ProjectUtil.getUniqueIdFromTimestamp(1));
reqMap.put(JsonKey.USER_ID, userMap.get(JsonKey.ID));
reqMap.put(JsonKey.ORGANISATION_ID, userMap.get(JsonKey.ORGANISATION_ID));
reqMap.put(JsonKey.ORG_JOIN_DATE, ProjectUtil.getFormattedDate());
reqMap.put(JsonKey.IS_DELETED, false);
reqMap.put(JsonKey.ASSOCIATION_TYPE, userMap.get(JsonKey.ASSOCIATION_TYPE));
if (StringUtils.isNotEmpty((String) userMap.get(JsonKey.HASHTAGID))) {
reqMap.put(JsonKey.HASHTAGID, userMap.get(JsonKey.HASHTAGID));
}
Util.DbInfo usrOrgDb = Util.dbInfoMap.get(JsonKey.USER_ORG_DB);
try {
cassandraOperation.insertRecord(
usrOrgDb.getKeySpace(), usrOrgDb.getTableName(), reqMap, context);
} catch (Exception e) {
logger.error(context, e.getMessage(), e);
}
}
public static String getChannel(String rootOrgId, RequestContext context) {
Util.DbInfo orgDbInfo = Util.dbInfoMap.get(JsonKey.ORG_DB);
String channel = null;
Response resultFrRootOrg =
cassandraOperation.getRecordById(
orgDbInfo.getKeySpace(), orgDbInfo.getTableName(), rootOrgId, context);
if (CollectionUtils.isNotEmpty(
(List<Map<String, Object>>) resultFrRootOrg.get(JsonKey.RESPONSE))) {
Map<String, Object> rootOrg =
((List<Map<String, Object>>) resultFrRootOrg.get(JsonKey.RESPONSE)).get(0);
channel = (String) rootOrg.get(JsonKey.CHANNEL);
}
return channel;
}
@SuppressWarnings("unchecked")
public static void upsertUserOrgData(Map<String, Object> userMap, RequestContext context) {
Util.DbInfo usrOrgDb = Util.dbInfoMap.get(JsonKey.USER_ORG_DB);
Map<String, Object> map = new LinkedHashMap<>();
map.put(JsonKey.USER_ID, userMap.get(JsonKey.ID));
map.put(JsonKey.ORGANISATION_ID, userMap.get(JsonKey.ORGANISATION_ID));
Response response =
cassandraOperation.getRecordsByProperties(
usrOrgDb.getKeySpace(), usrOrgDb.getTableName(), map, context);
List<Map<String, Object>> resList = (List<Map<String, Object>>) response.get(JsonKey.RESPONSE);
if (!resList.isEmpty()) {
Map<String, Object> res = resList.get(0);
Map<String, Object> reqMap = new WeakHashMap<>();
reqMap.put(JsonKey.ID, res.get(JsonKey.ID));
if (null != userMap.get(JsonKey.ROLES)) {
reqMap.put(JsonKey.ROLES, userMap.get(JsonKey.ROLES));
}
reqMap.put(JsonKey.UPDATED_BY, userMap.get(JsonKey.UPDATED_BY));
reqMap.put(JsonKey.ASSOCIATION_TYPE, userMap.get(JsonKey.ASSOCIATION_TYPE));
reqMap.put(JsonKey.IS_DELETED, false);
reqMap.put(JsonKey.UPDATED_DATE, ProjectUtil.getFormattedDate());
if (StringUtils.isNotEmpty((String) userMap.get(JsonKey.HASHTAGID))) {
reqMap.put(JsonKey.HASHTAGID, userMap.get(JsonKey.HASHTAGID));
}
try {
cassandraOperation.updateRecord(
usrOrgDb.getKeySpace(), usrOrgDb.getTableName(), reqMap, context);
} catch (Exception e) {
logger.error(context, "Util:upsertUserOrgData exception : " + e.getMessage(), e);
}
} else {
registerUserToOrg(userMap, context);
}
}
@SuppressWarnings("unchecked")
public static Map<String, Object> getUserDetails(String userId, RequestContext context) {
logger.info(context, "get user profile method call started user Id : " + userId);
DbInfo userDbInfo = dbInfoMap.get(JsonKey.USER_DB);
Response response = null;
List<Map<String, Object>> userList = null;
Map<String, Object> userDetails = null;
try {
response =
cassandraOperation.getRecordById(
userDbInfo.getKeySpace(), userDbInfo.getTableName(), userId, context);
userList = (List<Map<String, Object>>) response.getResult().get(JsonKey.RESPONSE);
logger.info(
context, "Util:getUserProfile: collecting user data to save for userId : " + userId);
} catch (Exception e) {
logger.error(context, e.getMessage(), e);
}
String username = "";
if (CollectionUtils.isNotEmpty(userList)) {
userDetails = userList.get(0);
username = (String) userDetails.get(JsonKey.USERNAME);
logger.info(context, "Util:getUserDetails: userId = " + userId);
userDetails.put(JsonKey.ORGANISATIONS, getUserOrgDetails(userId, context));
Map<String, Object> orgMap =
getOrgDetails((String) userDetails.get(JsonKey.ROOT_ORG_ID), context);
if (!MapUtils.isEmpty(orgMap)) {
userDetails.put(JsonKey.ROOT_ORG_NAME, orgMap.get(JsonKey.ORG_NAME));
} else {
userDetails.put(JsonKey.ROOT_ORG_NAME, "");
}
// store alltncaccepted as Map Object in ES
Map<String, Object> allTncAccepted =
(Map<String, Object>) userDetails.get(JsonKey.ALL_TNC_ACCEPTED);
if (MapUtils.isNotEmpty(allTncAccepted)) {
convertTncJsonStringToMapObject(allTncAccepted);
}
// save masked email and phone number
addMaskEmailAndPhone(userDetails);
userDetails.remove(JsonKey.PASSWORD);
addEmailAndPhone(userDetails);
checkEmailAndPhoneVerified(userDetails);
List<Map<String, String>> userLocList = new ArrayList<>();
String profLocation = (String) userDetails.get(JsonKey.PROFILE_LOCATION);
if (StringUtils.isNotBlank(profLocation)) {
try {
userLocList = mapper.readValue(profLocation, List.class);
} catch (Exception e) {
logger.info(
context,
"Exception occurred while converting profileLocation to List<Map<String,String>>.");
}
}
userDetails.put(JsonKey.PROFILE_LOCATION, userLocList);
Map<String, Object> userTypeDetail = new HashMap<>();
String profUserType = (String) userDetails.get(JsonKey.PROFILE_USERTYPE);
if (StringUtils.isNotBlank(profUserType)) {
try {
userTypeDetail = mapper.readValue(profUserType, Map.class);
} catch (Exception e) {
logger.info(
context,
"Exception occurred while converting profileUserType to Map<String,String>.");
}
}
userDetails.put(JsonKey.PROFILE_USERTYPE, userTypeDetail);
List<Map<String, Object>> userRoleList = getUserRoles(userId, context);
userDetails.put(JsonKey.ROLES, userRoleList);
} else {
logger.info(
context,
"Util:getUserProfile: User data not available to save in ES for userId : " + userId);
}
userDetails.put(JsonKey.USERNAME, username);
return userDetails;
}
public static List<Map<String, Object>> getUserRoles(String userId, RequestContext context) {
DbInfo userRoleDbInfo = dbInfoMap.get(JsonKey.USER_ROLES);
List<String> userIds = new ArrayList<>();
userIds.add(userId);
Response result =
cassandraOperation.getRecordsByPrimaryKeys(
userRoleDbInfo.getKeySpace(),
userRoleDbInfo.getTableName(),
userIds,
JsonKey.USER_ID,
context);
List<Map<String, Object>> userRoleList =
(List<Map<String, Object>>) result.get(JsonKey.RESPONSE);
userRoleList
.stream()
.forEach(
userRole -> {
try {
String dbScope = (String) userRole.get(JsonKey.SCOPE);
if (StringUtils.isNotBlank(dbScope)) {
List<Map<String, String>> scope = mapper.readValue(dbScope, ArrayList.class);
userRole.put(JsonKey.SCOPE, scope);
}
} catch (Exception e) {
logger.error(
context,
"Exception because of mapper read value" + userRole.get(JsonKey.SCOPE),
e);
}
});
return userRoleList;
}
// Convert Json String tnc format to object to store in Elastic
private static void convertTncJsonStringToMapObject(Map<String, Object> allTncAccepted) {
for (Map.Entry<String, Object> tncAccepted : allTncAccepted.entrySet()) {
String tncType = tncAccepted.getKey();
Map<String, String> tncAcceptedDetailMap = new HashMap<>();
try {
tncAcceptedDetailMap = mapper.readValue((String) tncAccepted.getValue(), Map.class);
allTncAccepted.put(tncType, tncAcceptedDetailMap);
} catch (JsonProcessingException e) {
logger.error("Json Parsing Exception", e);
}
}
}
public static Map<String, Object> getUserDetails(
Map<String, Object> userDetails, Map<String, Object> orgMap, RequestContext context) {
String userId = (String) userDetails.get(JsonKey.USER_ID);
logger.info(context, "get user profile method call started user Id : " + userId);
List<Map<String, Object>> orgList = new ArrayList<Map<String, Object>>();
orgList.add(orgMap);
logger.info(context, "Util:getUserDetails: userId = " + userId);
userDetails.put(JsonKey.ORGANISATIONS, orgList);
Map<String, Object> rootOrg =
getOrgDetails((String) userDetails.get(JsonKey.ROOT_ORG_ID), context);
if (!MapUtils.isEmpty(rootOrg)) {
userDetails.put(JsonKey.ROOT_ORG_NAME, orgMap.get(JsonKey.ORG_NAME));
} else {
userDetails.put(JsonKey.ROOT_ORG_NAME, "");
}
// save masked email and phone number
addMaskEmailAndPhone(userDetails);
userDetails.remove(JsonKey.PASSWORD);
addEmailAndPhone(userDetails);
checkEmailAndPhoneVerified(userDetails);
List<Map<String, String>> userLocList = new ArrayList<>();
String profLocation = (String) userDetails.get(JsonKey.PROFILE_LOCATION);
if (StringUtils.isNotBlank(profLocation)) {
try {
userLocList = mapper.readValue(profLocation, List.class);
} catch (Exception e) {
logger.info(
context,
"Exception occurred while converting profileLocation to List<Map<String,String>>.");
}
}
userDetails.put(JsonKey.PROFILE_LOCATION, userLocList);
Map<String, Object> userTypeDetail = new HashMap<>();
String profUserType = (String) userDetails.get(JsonKey.PROFILE_USERTYPE);
if (StringUtils.isNotBlank(profUserType)) {
try {
userTypeDetail = mapper.readValue(profUserType, Map.class);
} catch (Exception e) {
logger.info(
context, "Exception occurred while converting profileUserType to Map<String,String>.");
}
}
userDetails.put(JsonKey.PROFILE_USERTYPE, userTypeDetail);
return userDetails;
}
public static void addEmailAndPhone(Map<String, Object> userDetails) {
userDetails.put(JsonKey.PHONE, userDetails.remove(JsonKey.ENC_PHONE));
userDetails.put(JsonKey.EMAIL, userDetails.remove(JsonKey.ENC_EMAIL));
}
public static void checkEmailAndPhoneVerified(Map<String, Object> userDetails) {
if (null != userDetails.get(JsonKey.FLAGS_VALUE)) {
int flagsValue = Integer.parseInt(userDetails.get(JsonKey.FLAGS_VALUE).toString());
Map<String, Boolean> userFlagMap = UserFlagUtil.assignUserFlagValues(flagsValue);
userDetails.putAll(userFlagMap);
}
}
public static void addMaskEmailAndPhone(Map<String, Object> userMap) {
String phone = (String) userMap.get(JsonKey.PHONE);
String email = (String) userMap.get(JsonKey.EMAIL);
userMap.put(JsonKey.ENC_PHONE, phone);
userMap.put(JsonKey.ENC_EMAIL, email);
if (!StringUtils.isBlank(phone)) {
userMap.put(JsonKey.PHONE, maskingService.maskPhone(decService.decryptData(phone, null)));
}
if (!StringUtils.isBlank(email)) {
userMap.put(JsonKey.EMAIL, maskingService.maskEmail(decService.decryptData(email, null)));
}
}
public static List<Map<String, Object>> getUserOrgDetails(String userId, RequestContext context) {
List<Map<String, Object>> userOrgList = new ArrayList<>();
List<Map<String, Object>> userOrgDataList;
try {
List<String> userIds = new ArrayList<>();
userIds.add(userId);
DbInfo orgUsrDbInfo = dbInfoMap.get(JsonKey.USER_ORG_DB);
Response result =
cassandraOperation.getRecordsByPrimaryKeys(
orgUsrDbInfo.getKeySpace(),
orgUsrDbInfo.getTableName(),
userIds,
JsonKey.USER_ID,
context);
userOrgDataList = (List<Map<String, Object>>) result.get(JsonKey.RESPONSE);
userOrgDataList
.stream()
.forEach(
(dataMap) -> {
if (null != dataMap.get(JsonKey.IS_DELETED)
&& !((boolean) dataMap.get(JsonKey.IS_DELETED))) {
userOrgList.add(dataMap);
}
});
if (CollectionUtils.isNotEmpty(userOrgList)) {
List<String> organisationIds =
userOrgList
.stream()
.map(m -> (String) m.get(JsonKey.ORGANISATION_ID))
.distinct()
.collect(Collectors.toList());
List<String> fields = Arrays.asList(JsonKey.ORG_NAME, JsonKey.ID);
DbInfo orgDbInfo = dbInfoMap.get(JsonKey.ORG_DB);
Response orgResult =
cassandraOperation.getPropertiesValueById(
orgDbInfo.getKeySpace(),
orgDbInfo.getTableName(),
organisationIds,
fields,
context);
List<Map<String, Object>> orgDataList =
(List<Map<String, Object>>) orgResult.get(JsonKey.RESPONSE);
Map<String, Map<String, Object>> orgInfoMap = new HashMap<>();
orgDataList.stream().forEach(org -> orgInfoMap.put((String) org.get(JsonKey.ID), org));
for (Map<String, Object> userOrg : userOrgList) {
Map<String, Object> orgMap = orgInfoMap.get(userOrg.get(JsonKey.ORGANISATION_ID));
userOrg.put(JsonKey.ORG_NAME, orgMap.get(JsonKey.ORG_NAME));
userOrg.remove(JsonKey.ROLES);
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return userOrgList;
}
public static Request sendOnboardingMail(Map<String, Object> emailTemplateMap) {
Request request = null;
if ((StringUtils.isNotBlank((String) emailTemplateMap.get(JsonKey.EMAIL)))) {
String envName = ProjectUtil.getConfigValue(JsonKey.SUNBIRD_INSTALLATION_DISPLAY_NAME);
String welcomeSubject = propertiesCache.getProperty(JsonKey.ONBOARDING_MAIL_SUBJECT);
emailTemplateMap.put(JsonKey.SUBJECT, ProjectUtil.formatMessage(welcomeSubject, envName));
List<String> reciptientsMail = new ArrayList<>();
reciptientsMail.add((String) emailTemplateMap.get(JsonKey.EMAIL));
emailTemplateMap.put(JsonKey.RECIPIENT_EMAILS, reciptientsMail);
emailTemplateMap.put(
JsonKey.BODY, propertiesCache.getProperty(JsonKey.ONBOARDING_WELCOME_MAIL_BODY));
emailTemplateMap.put(JsonKey.NOTE, propertiesCache.getProperty(JsonKey.MAIL_NOTE));
emailTemplateMap.put(JsonKey.ORG_NAME, envName);
String welcomeMessage = propertiesCache.getProperty(JsonKey.ONBOARDING_MAIL_MESSAGE);
emailTemplateMap.put(
JsonKey.WELCOME_MESSAGE, ProjectUtil.formatMessage(welcomeMessage, envName));
emailTemplateMap.put(JsonKey.EMAIL_TEMPLATE_TYPE, "welcome");
setRequiredActionLink(emailTemplateMap);
if (StringUtils.isBlank((String) emailTemplateMap.get(JsonKey.SET_PASSWORD_LINK))
&& StringUtils.isBlank((String) emailTemplateMap.get(JsonKey.VERIFY_EMAIL_LINK))) {
logger.info("Util:sendOnboardingMail: Email not sent as generated link is empty");
return null;
}
request = new Request();
request.setOperation(BackgroundOperations.emailService.name());
request.put(JsonKey.EMAIL_REQUEST, emailTemplateMap);
}
return request;
}
private static void setRequiredActionLink(Map<String, Object> templateMap) {
String setPasswordLink = (String) templateMap.get(JsonKey.SET_PASSWORD_LINK);
String verifyEmailLink = (String) templateMap.get(JsonKey.VERIFY_EMAIL_LINK);
if (StringUtils.isNotBlank(setPasswordLink)) {
templateMap.put(JsonKey.LINK, setPasswordLink);
templateMap.put(JsonKey.SET_PW_LINK, "true");
} else if (StringUtils.isNotBlank(verifyEmailLink)) {
templateMap.put(JsonKey.LINK, verifyEmailLink);
templateMap.put(JsonKey.SET_PW_LINK, null);
}
}
public static String getUserRequiredActionLink(
Map<String, Object> templateMap, boolean isUrlShortRequired, RequestContext context) {
URLShortner urlShortner = new URLShortnerImpl();
String redirectUri =
StringUtils.isNotBlank((String) templateMap.get(JsonKey.REDIRECT_URI))
? ((String) templateMap.get(JsonKey.REDIRECT_URI))
: null;
logger.info(context, "Util:getUserRequiredActionLink redirectURI = " + redirectUri);
if (StringUtils.isBlank((String) templateMap.get(JsonKey.PASSWORD))) {
String url =
KeycloakRequiredActionLinkUtil.getLink(
(String) templateMap.get(JsonKey.USERNAME),
redirectUri,
KeycloakRequiredActionLinkUtil.UPDATE_PASSWORD,
context);
templateMap.put(
JsonKey.SET_PASSWORD_LINK, isUrlShortRequired ? urlShortner.shortUrl(url) : url);
return isUrlShortRequired ? urlShortner.shortUrl(url) : url;
} else {
String url =
KeycloakRequiredActionLinkUtil.getLink(
(String) templateMap.get(JsonKey.USERNAME),
redirectUri,
KeycloakRequiredActionLinkUtil.VERIFY_EMAIL,
context);
templateMap.put(
JsonKey.VERIFY_EMAIL_LINK, isUrlShortRequired ? urlShortner.shortUrl(url) : url);
return isUrlShortRequired ? urlShortner.shortUrl(url) : url;
}
}
public static void getUserRequiredActionLink(
Map<String, Object> templateMap, RequestContext context) {
getUserRequiredActionLink(templateMap, true, context);
}
public static void sendSMS(Map<String, Object> userMap) {
if (StringUtils.isNotBlank((String) userMap.get(JsonKey.PHONE))) {
String envName = ProjectUtil.getConfigValue(JsonKey.SUNBIRD_INSTALLATION_DISPLAY_NAME);
setRequiredActionLink(userMap);
if (StringUtils.isBlank((String) userMap.get(JsonKey.SET_PASSWORD_LINK))
&& StringUtils.isBlank((String) userMap.get(JsonKey.VERIFY_EMAIL_LINK))) {
logger.info("Util:sendSMS: SMS not sent as generated link is empty");
return;
}
Map<String, String> smsTemplate = new HashMap<>();
smsTemplate.put("instanceName", envName);
smsTemplate.put(JsonKey.LINK, (String) userMap.get(JsonKey.LINK));
smsTemplate.put(JsonKey.SET_PW_LINK, (String) userMap.get(JsonKey.SET_PW_LINK));
String sms = ProjectUtil.getSMSBody(smsTemplate);
if (StringUtils.isBlank(sms)) {
sms = PropertiesCache.getInstance().getProperty(JsonKey.SUNBIRD_DEFAULT_WELCOME_MSG);
}
logger.info("SMS text : " + sms);
String countryCode = "";
if (StringUtils.isBlank((String) userMap.get(JsonKey.COUNTRY_CODE))) {
countryCode =
PropertiesCache.getInstance().getProperty(JsonKey.SUNBIRD_DEFAULT_COUNTRY_CODE);
} else {
countryCode = (String) userMap.get(JsonKey.COUNTRY_CODE);
}
ISmsProvider smsProvider = SMSFactory.getInstance();
logger.info("SMS text : " + sms + " with phone " + (String) userMap.get(JsonKey.PHONE));
boolean response = smsProvider.send((String) userMap.get(JsonKey.PHONE), countryCode, sms);
logger.info("Response from smsProvider : " + response);
if (response) {
logger.info("Welcome Message sent successfully to ." + (String) userMap.get(JsonKey.PHONE));
} else {
logger.info("Welcome Message failed for ." + (String) userMap.get(JsonKey.PHONE));
}
}
}
public static Request sendResetPassMail(Map<String, Object> emailTemplateMap) {
Request request = null;
if (StringUtils.isBlank((String) emailTemplateMap.get(JsonKey.SET_PASSWORD_LINK))) {
logger.info("Util:sendResetPassMail: Email not sent as generated link is empty");
return null;
} else if ((StringUtils.isNotBlank((String) emailTemplateMap.get(JsonKey.EMAIL)))) {
String envName = ProjectUtil.getConfigValue(JsonKey.SUNBIRD_INSTALLATION_DISPLAY_NAME);
String welcomeSubject = propertiesCache.getProperty(JsonKey.SUNBIRD_RESET_PASS_MAIL_SUBJECT);
emailTemplateMap.put(JsonKey.SUBJECT, ProjectUtil.formatMessage(welcomeSubject, envName));
List<String> reciptientsMail = new ArrayList<>();
reciptientsMail.add((String) emailTemplateMap.get(JsonKey.EMAIL));
emailTemplateMap.put(JsonKey.RECIPIENT_EMAILS, reciptientsMail);
emailTemplateMap.put(JsonKey.ORG_NAME, envName);
emailTemplateMap.put(JsonKey.EMAIL_TEMPLATE_TYPE, "resetPassword");
setRequiredActionLink(emailTemplateMap);
} else if (StringUtils.isNotBlank((String) emailTemplateMap.get(JsonKey.PHONE))) {
emailTemplateMap.put(
JsonKey.BODY,
ProjectUtil.formatMessage(
propertiesCache.getProperty("sunbird_reset_pass_msg"),
(String) emailTemplateMap.get(JsonKey.SET_PASSWORD_LINK)));
emailTemplateMap.put(JsonKey.MODE, "SMS");
List<String> phoneList = new ArrayList<String>();
phoneList.add((String) emailTemplateMap.get(JsonKey.PHONE));
emailTemplateMap.put(JsonKey.RECIPIENT_PHONES, phoneList);
} else {
logger.info("Util:sendResetPassMail: requested data is neither having email nor phone ");
return null;
}
request = new Request();
request.setOperation(BackgroundOperations.emailService.name());
request.put(JsonKey.EMAIL_REQUEST, emailTemplateMap);
return request;
}
public static Map<String, Object> getUserDefaultValue() {
Map<String, Object> user = new HashMap<>();
user.put("avatar", null);
user.put("gender", null);
user.put("grade", null);
user.put("language", null);
user.put("lastLoginTime", null);
user.put("location", null);
user.put("profileSummary", null);
user.put("profileVisibility", null);
user.put("tempPassword", null);
user.put("thumbnail", null);
user.put("registryId", null);
user.put("accesscode", null);
user.put("subject", null);
user.put("webPages", null);
user.put("currentLoginTime", null);
user.put("password", null);
user.put("loginId", null);
user.put(JsonKey.EMAIL_VERIFIED, true);
user.put(JsonKey.PHONE_VERIFIED, true);
return user;
}
public static Map<String, Object> getOrgDefaultValue() {
Map<String, Object> org = new HashMap<>();
org.put("dateTime", null);
org.put("preferredLanguage", null);
org.put("approvedBy", null);
org.put("addressId", null);
org.put("approvedDate", null);
org.put("communityId", null);
org.put("homeUrl", null);
org.put("imgUrl", null);
org.put("isApproved", null);
org.put("locationId", null);
org.put("noOfMembers", null);
org.put("orgCode", null);
org.put("theme", null);
org.put("thumbnail", null);
org.put("isDefault", null);
org.put("parentOrgId", null);
org.put("orgTypeId", null);
org.put("orgType", null);
return org;
}
}
|
#! /usr/bin/env bash
# Stops the execution of this script if an error occurs.
set -e
GREEN="\033[0;32m"
RED="\033[0;31m"
YELLOW="\033[1;33m"
BLUE="\033[1;34m"
NC="\033[0m"
DEFAULT_DOCKER_IMAGE="horizonrobotics/alf:0.0.6-pytorch1.8-python3.7"
function box::cli_help() {
echo -e "Usage: ${BLUE}./box.sh${NC} [DOCKER_IMAGE]"
echo ""
echo " Start an ephemeral standard docker container as Alf development environment"
echo ""
echo "Options:"
echo " DOCKER_IMAGE: specify the docker image, or otherwise the default will be used."
}
# ---- Logging Helpers ----
function box::ok() {
echo -e "[ ${GREEN}ok${NC} ] $1"
}
function box::fail() {
echo -e "[${RED}FAIL${NC}] $1"
exit -1
}
function box::warn() {
echo -e "[${YELLOW}WARN${NC}] $1"
}
function box::info() {
echo -e "[${BLUE}info${NC}] $1"
}
# ---- Actual Implementation ----
function box::get_script_dir() {
echo "$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd)"
}
function box::get_alf_dir() {
local script_dir="$(box::get_script_dir)"
local tools_dir="$(dirname ${script_dir})"
local alf_dir="$(dirname ${tools_dir})"
echo "${alf_dir}"
}
function box::init_container() {
local alf_dir=$1
local script_dir=$2
local image=$3
# Make sure that docker is installed.
if ! [ -x "$(command -v docker)" ]; then
box::fail "Command 'docker' not found. Please install docker first."
fi
# This tests whether ${alf_dir} is under the user's home directory. This is
# an assumption through out this script.
if [[ "${alf_dir##${HOME}}" = "${alf_dir}" ]]; then
box::fail "Alf directory ${alf_dir} is not under your home directory ${HOME}"
else
box::ok "Confirmed that alf directory is ${alf_dir}, which is under ${HOME}"
fi
local container=$(docker ps -a -q -f name=alf-dev-box)
if [ ! -z "${container}" ]; then
local exited=$(docker ps -aq -f status=exited -f name=alf-dev-box)
if [ ! -z "${exited}" ]; then
# The existing container alf-dev-box has exited, so we can safely remove it.
docker rm "${exited}"
box::ok "Deprecated the already exited alf-dev-box"
else
box::info "There is an active container named alf-dev-box, and it will be used."
return 0
fi
fi
box::info "Launching docker container from ${image} ..."
docker run -dit \
--name "alf-dev-box" \
--user $UID:$GID \
-v "/etc/passwd:/etc/passwd:ro" \
-v "/etc/group:/etc/group:ro" \
-v "/etc/shadow:/etc/shadow:ro" \
-v "/home/${USER}:/home/${USER}" \
-v "${script_dir}/bashrc.override:/home/${USER}/.bashrc:ro" \
-v "${script_dir}/inputrc.override:/home/${USER}/.inputrc:ro" \
-v "/tmp.X11-unix:/tmp/.x11-unix:ro" \
--workdir "${alf_dir}" \
--network host \
${image} /bin/bash
container=$(docker ps -a -q -f name=alf-dev-box)
docker exec -u 0 alf-dev-box /bin/bash -c "apt update && apt install -y rsync"
docker exec alf-dev-box /bin/bash -c "pip3 install -e ${alf_dir}"
box::ok "Successfully launched alf-dev-box with id ${container}"
}
function box::enter_container() {
box::ok "Entering the container"
docker exec -it alf-dev-box /bin/bash
}
function box::main() {
local argument="${DEFAULT_DOCKER_IMAGE}"
if [ "$#" -eq 1 ]; then
argument="$1"
fi
case "$argument" in
--help)
box::cli_help
;;
*)
local alf_dir="$(box::get_alf_dir)"
local script_dir="$(box::get_script_dir)"
box::init_container "${alf_dir}" "${script_dir}" "${argument}"
local success=$?
if [ ! "${success}" -eq "0" ]; then
box:fail "Running into error in launching the container."
fi
box::enter_container
# At this point the user finished operating in the container.
box::ok "Leaving the alf-dev-box container. The container remains active."
;;
esac
}
box::main $@
|
#!/bin/sh
for i in $(find . -name '*_*.sh' | egrep -v -e '^./_'); do j=`echo $i | sed -e 's#:[^:]*##g'`; k=`cat ./cache.txt | grep $j`; m=`echo $k | sed -e 's#[^:]*: ##g;s#.* N=#N=#g'`; sed -i "s# N=.*# $m#" $i; echo "loaded $i"; done
|
SELECT * FROM table_name
ORDER BY time_stamp DESC
LIMIT 3; |
<reponame>benwhitehair/pinkslipproperty.com.au
import React from 'react';
import '../css/background.css';
const CTA3 = () => (
<section className="background bg-pink px-4 py-16 text-center text-white">
<div className="flex flex-wrap items-center justify-around leading-none max-w-2xl mx-auto">
<h2 className="font-condensed mb-4 md:mb-0 md:px-4 text-4xl uppercase">
Call us to find out more about pink slips
</h2>
<div className="flex items-center justify-center shadow">
<a
className="bg-indigo-darker font-bold font-condensed inline-block px-16 py-6 no-underline text-white"
href="#services"
>
More info
</a>
</div>
</div>
</section>
);
export default CTA3;
|
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import("sqlbase.sqlobj");
import("stringutils");
import("etherpad.pro.pro_accounts");
import("etherpad.pro.pro_accounts.getSessionProAccount");
jimport("java.lang.System.out.println");
var _COOKIE_NAME = "PUAS";
function dmesg(m) {
if (false) {
println("[pro-account-auto-sign-in]: "+m);
}
}
function checkAutoSignin() {
dmesg("checking auto sign-in...");
if (pro_accounts.isAccountSignedIn()) {
dmesg("account already signed in...");
// don't mess with already signed-in account
return;
}
var cookie = request.cookies[_COOKIE_NAME];
if (!cookie) {
dmesg("no auto-sign-in cookie found...");
return;
}
var record = sqlobj.selectSingle('pro_accounts_auto_signin', {cookie: cookie}, {});
if (!record) {
return;
}
var now = +(new Date);
if (+record.expires < now) {
sqlobj.deleteRows('pro_accounts_auto_signin', {id: record.id});
response.deleteCookie(_COOKIE_NAME);
dmesg("deleted expired record...");
return;
}
// do auto-signin (bypasses normal security)
dmesg("Doing auto sign in...");
var account = pro_accounts.getAccountById(record.accountId);
pro_accounts.signInSession(account);
response.redirect('/ep/account/sign-in?cont='+encodeURIComponent(request.url));
}
function setAutoSigninCookie(rememberMe) {
if (!pro_accounts.isAccountSignedIn()) {
return; // only call this function after account is already signed in.
}
var accountId = getSessionProAccount().id;
// delete any existing auto-signins for this account.
sqlobj.deleteRows('pro_accounts_auto_signin', {accountId: accountId});
// set this insecure cookie just to indicate that account is auto-sign-in-able
response.setCookie({
name: "ASIE",
value: (rememberMe ? "T" : "F"),
path: "/",
domain: request.domain,
expires: new Date(32503708800000), // year 3000
});
if (!rememberMe) {
return;
}
var cookie = stringutils.randomHash(16);
var now = +(new Date);
var expires = new Date(now + 1000*60*60*24*30); // 30 days
//var expires = new Date(now + 1000 * 60 * 5); // 2 minutes
sqlobj.insert('pro_accounts_auto_signin', {cookie: cookie, accountId: accountId, expires: expires});
response.setCookie({
name: _COOKIE_NAME,
value: cookie,
path: "/ep/account/",
domain: request.domain,
expires: new Date(32503708800000), // year 3000
secure: true
});
}
|
<gh_stars>0
def concatenate_example(string)
# use concatenation to format the result to be "Classic <string>"
"Classic " << string
end
def concatenate(string)
# use concatenation to format the result to be "Hello <string>!"
"Hello " + string + "!"
end
def substrings(word)
word[0...4]
end
def capitalize(word)
word.capitalize
end
def uppercase(string)
string.upcase
end
def downcase(string)
string.downcase
end
def empty_string(string)
string.empty?
end
def string_length(string)
string.length
end
def reverse(string)
string.reverse
end
def space_remover(string)
string.gsub(/\s+/, "")
end
|
<gh_stars>100-1000
//index.js
//获取应用实例
var app = getApp()
Page({
data: {
windowHeigh:0,
hphm:"",//号牌号码
score:"",//违章扣分总数
listCount:0,//未处理违章的总条数
moneyCount:0,//总罚款
queryWZUrl:"https://api.chinadatapay.com/government/traffic/167/3",//获取违章记录url
resultData:[
{
"time": "2016-07-08 07:16:32",
"address": "[西湖区]长江路_长江路竞舟北路口(长江路)11111111",
"content": "不按规定停放影响其他车辆和行人通行的1111111111111",
"legalnum": "7003",
"price": "150",
"score": "0",
"number": "",
"illegalid": "4821518"
}
]//数据
},
onLoad: function () {
console.log(app.globalData.resultData.lists)
var that = this
//调用应用实例的方法获取全局数据
app.getUserInfo(function(userInfo){
//更新数据
that.setData({
userInfo:userInfo
})
})
wx.getSystemInfo({
success: function(res) {
that.setData({
windowHeigh:res.windowHeight
})
}
})
if(app.globalData.resultData == null){
that.queryData();
}else{
that.setResultData(app.globalData.resultData);
}
},
queryData:function(){
var that = this;
var cphm = app.globalData.cphm;
var cjh = app.globalData.cjh;
var fdjh = app.globalData.fdjh;
var lstype = app.globalData.lstype;
var lsprefix = app.globalData.lsprefix;
var carorg = app.globalData.carorg;
wx.request({
url: this.data.queryWZUrl,
data: {
key: app.globalData.appKey,
lsprefix:lsprefix,
lsnum:cphm,
lstype:lstype,
frameno:cjh,
engineno:fdjh,
carorg:carorg
},
header: {
'content-type': 'application/json'
},
success: function(res) {
console.log(res.data);
if(res.data.code == "10000"){
that.setResultData(res.data.data.list);
}else{
that.setData({
errorTip:res.data.message
})
}
}
})
},
setResultData:function(resultData){
var that = this;
console.log("resultData="+resultData);
var hphm = null;
var score = 0;
var listCount = resultData.length;
var moneyCount = 0;
hphm = app.globalData.hphm;
var myDate = new Date();
for(var i=0;i<resultData.length;i++){
if(resultData[i].handled != "1"){//未处理
score = score+parseInt(resultData[i].score);
moneyCount = moneyCount+ parseInt(resultData[i].price);
}
if(resultData[i].handled == "0"){
resultData[i].handled="未处理";
}else if(resultData[i].handled == "1"){
resultData[i].handled="已处理";
}else{
resultData[i].handled="未处理";
}
}
that.setData({
hphm:hphm,
score:score,
listCount:listCount,
moneyCount:moneyCount,
resultData:resultData
});
console.log("hphm="+that.data.hphm);
console.log("score="+that.data.score);
console.log("listCount="+that.data.listCount);
console.log("moneyCount="+that.data.moneyCount);
},
call:function(){
wx.makePhoneCall({
phoneNumber: '13563955627'
})
}
})
|
#!/bin/bash
rm -rf src/main/webapp/bower_components
bower cache clean
mvn clean install
|
<reponame>tailorcai/lang
/*
* Copyright 2017 The Mifos Initiative
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.mifos.core.lang.validation;
import io.mifos.core.lang.DateConverter;
import io.mifos.core.lang.validation.constraints.ValidLocalDateTimeString;
import org.junit.Assert;
import org.junit.Test;
import javax.annotation.Nullable;
import javax.validation.constraints.NotNull;
import java.time.LocalDateTime;
import static io.mifos.core.lang.validation.TestHelper.isValid;
/**
* @author <NAME>
*/
public class ValidLocalDateTimeStringTest {
@Test
public void now()
{
final AnnotatedClassNullable annotatedInstance = new AnnotatedClassNullable(DateConverter.toIsoString(LocalDateTime.now()));
Assert.assertTrue(isValid(annotatedInstance));
}
@Test
public void invalidString()
{
final AnnotatedClassNullable annotatedInstance = new AnnotatedClassNullable("This is not a date time.");
Assert.assertFalse(isValid(annotatedInstance));
}
@Test
public void nullLocalDateTimeStringAllowed()
{
final AnnotatedClassNullable annotatedInstance = new AnnotatedClassNullable(null);
Assert.assertTrue(isValid(annotatedInstance));
}
@Test
public void nullLocalDateTimeStringNotAllowed()
{
final AnnotatedClassNotNull annotatedInstance = new AnnotatedClassNotNull(null);
Assert.assertFalse(isValid(annotatedInstance));
}
private static class AnnotatedClassNullable {
@Nullable
@ValidLocalDateTimeString()
String localDateTimeString;
AnnotatedClassNullable(final String localDateTimeString) {
this.localDateTimeString = localDateTimeString;
}
}
private static class AnnotatedClassNotNull {
@NotNull
@ValidLocalDateTimeString()
String localDateTimeString;
AnnotatedClassNotNull(final String localDateTimeString) {
this.localDateTimeString = localDateTimeString;
}
}
}
|
<gh_stars>0
/*
* Copyright 2016 Joyent, Inc.
*/
var exec = require('child_process').exec;
var fs = require('fs');
var path = require('path');
var libuuid = require('uuid');
var MemoryStream = require('readable-stream/passthrough.js');
var bunyan = require('bunyan');
var format = require('util').format;
var vasync = require('vasync');
var logging = require('./lib/logging');
var manta = require('../lib');
/*
* Globals
*/
var log = logging.createLogger();
var ROOT = '/' + (process.env.MANTA_USER || 'admin') + '/stor';
var PUBLIC = '/' + (process.env.MANTA_USER || 'admin') + '/public';
var TSTDIR = ROOT + '/node-manta-test-muntar-' + libuuid.v4().split('-')[0];
/*
* Helper functions
*/
function test(name, testfunc) {
module.exports[name] = testfunc;
}
/*
* Pre- and Post-test actions
*/
module.exports.setUp = function (cb) {
var self = this;
var url = process.env.MANTA_URL || 'http://localhost:8080';
var user = process.env.MANTA_USER || 'admin';
function createClient(signer) {
self.client = manta.createClient({
connectTimeout: 1000,
log: log,
rejectUnauthorized: (process.env.MANTA_TLS_INSECURE ?
false : true),
sign: signer,
url: url,
user: user
});
cb();
}
if (process.env.MANTA_KEY_ID) {
createClient(manta.cliSigner({
user: user,
keyId: process.env.MANTA_KEY_ID
}));
} else {
var f = process.env.SSH_KEY || process.env.HOME + '/.ssh/id_rsa';
var cmd = 'ssh-keygen -l -f ' +
f + ' ' +
'| awk \'{print $2}\'';
fs.readFile(f, 'utf8', function (err, key) {
if (err) {
cb(err);
return;
}
exec(cmd, function (err2, stdout, stderr) {
if (err2) {
(cb(err2));
return;
}
createClient(manta.privateKeySigner({
key: key,
keyId: stdout.replace('\n', ''),
user: user
}));
return;
});
return;
});
}
};
module.exports.tearDown = function (cb) {
if (this.client) {
this.client.close();
delete this.client;
}
cb();
};
/*
* Tests
*/
// muntar tests
var cases = [
{
tarpath: 'corpus/tar1.tar',
checks: [
{
path: 'subdir1/',
type: 'application/x-json-stream; type=directory'
},
{
path: 'subdir1/test.txt',
type: 'text/plain',
size: 24,
md5: 'jio1WnSoM7CbsXjNHfTqwg=='
},
{
path: 'test.txt',
type: 'text/plain',
size: 20,
md5: 'c6scKv46Y7irTX2ipN2zUQ=='
}
]
},
{
// Skipping, see <https://github.com/joyent/node-manta/issues/259>
skip: true,
tarpath: 'corpus/259-emptydir.tar',
checks: [
{ path: 'emptydir/', type: 'directory' }
]
}
];
cases.forEach(function (c, i) {
if (c.skip) {
return;
}
var name = format('muntar case %d: %s', i, c.tarpath);
var cmd = format('%s -f %s %s', path.resolve(__dirname, '../bin/muntar'),
path.resolve(__dirname, c.tarpath), TSTDIR);
log.debug({caseName: name, cmd: cmd}, 'run case');
test(name, function (t) {
var self = this;
exec(cmd, function (err, stdout, stderr) {
t.ifError(err);
vasync.forEachPipeline({
func: function checkOne(check, cb) {
var mpath = path.join(TSTDIR, check.path);
self.client.info(mpath, function (err2, info) {
t.ifError(err2, err2);
if (!err2) {
t.equal(info.type, check.type, format(
'%s is expected type (%s): %s',
mpath, check.type, info.type));
if (check.size) {
t.equal(info.size, check.size, format(
'%s is expected size (%s): %s',
mpath, check.size, info.size));
}
if (check.md5) {
t.equal(info.md5, check.md5, format(
'%s is expected md5 (%s): %s',
mpath, check.md5, info.md5));
}
}
cb();
});
},
inputs: c.checks
}, function (err3, results) {
self.client.rmr(TSTDIR, function (rmErr) {
t.ifError(rmErr, rmErr);
t.done();
});
});
});
});
});
|
<template>
<div>
<h1>Employee List</h1>
<ul>
<li v-for="employee in employees" :key="employee.id">
<h2>{{ employee.name }}</h2>
<p>{{ employee.contact }}</p>
<p>{{ employee.role }}</p>
</li>
</ul>
</div>
</template>
<script>
export default {
data() {
return {
employees: [
{ id: 1, name: 'John Doe', contact: 'john@email.com', role: 'developer' },
{ id: 2, name: 'Jane Doe', contact: 'jane@email.com', role: 'designer' }
]
};
}
}
</script> |
/*
* Copyright © 2017 Atomist, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import "mocha";
import * as assert from "power-assert";
import { EventFired, HandlerContext } from "@atomist/automation-client/Handlers";
import { NotifyOnPush } from "../../src/events/NotifyOnPush";
import { PushWithRepo } from "../../src/typings/types";
describe("NotifyOnPush", () => {
const nop = new NotifyOnPush();
const sha = "ad31a1182a194c09b960d75b0f4002be1bbca288";
const channels: PushWithRepo.Channels[] = [
{ name: "10-day" },
{ name: "acid-rap" },
{ name: "coloring-book" },
];
it("should send a notification to a channel", done => {
const pushEvent = {
data: {
Push: [{
after: {
sha,
},
repo: {
channels: [channels[0]],
},
}],
},
} as EventFired<PushWithRepo.Subscription>;
let responseMessage: string;
let responseChannels: string[];
const ctx = {
messageClient: {
addressChannels(msg: string, toChannels: string[]): Promise<any> {
responseMessage = msg;
responseChannels = toChannels;
return Promise.resolve(msg);
},
},
} as HandlerContext;
const promise = nop.handle(pushEvent, ctx);
promise.then(result => {
assert(result.code === 0);
assert(responseMessage.indexOf(sha) > 0);
assert(responseChannels.length === 1);
assert(responseChannels[0] === channels[0].name);
}).then(done, done);
});
it("should send a notification to all repo channels", done => {
const pushEvent = {
data: {
Push: [{
after: {
sha,
},
repo: {
channels,
},
}],
},
} as EventFired<PushWithRepo.Subscription>;
let responseMessage: string;
let responseChannels: string[];
const ctx = {
messageClient: {
addressChannels(msg: string, toChannels: string[]): Promise<any> {
responseMessage = msg;
responseChannels = toChannels;
return Promise.resolve(msg);
},
},
} as HandlerContext;
const promise = nop.handle(pushEvent, ctx);
promise.then(result => {
assert(result.code === 0);
assert(responseMessage.indexOf(sha) > 0);
assert.deepEqual(responseChannels, channels.map(c => c.name));
}).then(done, done);
});
it("should send no notifications if no channels", done => {
const pushEvent = {
data: {
Push: [{
after: {
sha,
},
repo: {
channels: [],
},
}],
},
} as EventFired<PushWithRepo.Subscription>;
let responseMessage: string;
let responseChannels: string[];
const ctx = {
messageClient: {
addressChannels(msg: string, toChannels: string[]): Promise<any> {
responseMessage = msg;
responseChannels = toChannels;
return Promise.resolve(msg);
},
},
} as HandlerContext;
const promise = nop.handle(pushEvent, ctx);
promise.then(result => {
assert(result.code === 0);
assert(responseMessage === undefined);
assert(responseChannels === undefined);
}).then(done, done);
});
});
|
//
// BrandModel.h
// MyCar
//
// Created by 🐵 on 16-5-30.
// Copyright (c) 2016年 MC. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface BrandModel : NSObject
@property (nonatomic,copy) NSString * introduction;
@property (nonatomic,copy) NSString * logoMeaning;
@property (nonatomic,copy) NSString * masterId;
@property (nonatomic,copy) NSString * masterName;
@end
|
from flask_wtf import FlaskForm
from wtforms import StringField, DateField, DecimalField, SelectField, IntegerField
from wtforms.validators import DataRequired, Length, NumberRange
from datetime import datetime, timedelta
class AddFixedDepositForm(FlaskForm):
ischemes = [('Monthly', 'Monthly'),
('Quarterly', 'Quarterly'),
('Yearly', 'Yearly')]
ac_no = StringField('Acc No', validators=[DataRequired(), Length(min=5, max=50, message="Length range from 5 to 50")])
start_date = DateField('Start Date dd/mm/yyyy', validators=[DataRequired()], format='%d/%m/%Y')
end_date = StringField('End Date dd/mm/yyyy')
interest_rate = DecimalField('Interest Rate (%)', validators=[DataRequired(), NumberRange(min=0, max=100, message="Please enter percentage range 0 to 100%")])
interest_scheme = SelectField('Interest Scheme', choices=ischemes, validators=[DataRequired()])
period = IntegerField('Deposit time period (days for now)', validators=[DataRequired(), NumberRange(min=1, message="Please enter a positive integer")])
def __init__(self, *args, **kwargs):
super(AddFixedDepositForm, self).__init__(*args, **kwargs)
# Auto-increment ac_no for the user
self.ac_no.data = self.get_next_account_number()
def get_next_account_number(self):
# Implement logic to generate the next account number
# Example: return some_function_to_generate_next_account_number()
pass
def calculate_end_date(self):
if self.start_date.data and self.period.data:
start_date = self.start_date.data
period = self.period.data
end_date = start_date + timedelta(days=period)
self.end_date.data = end_date.strftime('%d/%m/%Y') |
public static boolean isPalindrome(int [] arr) {
int startIndex = 0;
int lastIndex = arr.length - 1;
while (lastIndex > startIndex) {
if (arr[startIndex] != arr[lastIndex]) {
return false;
}
++startIndex;
--lastIndex;
}
return true;
} |
#! /usr/bin/bash
####! /users/PGS0218/zpeng01/local/bin/zsh
####! /usr/local/bin/zsh
####! /bin/bash
if [ $# -ne 2 ]; then
echo "Usage: $0 <data_directory> <tag>"
exit
fi
data_dir=$1
tag=$2
#set -x
## OpenMP Affinity for ICC
#export KMP_AFFINITY="verbose,granularity=fine,compact,1,0"
#export KMP_AFFINITY="verbose,granularity=core,compact,1,0"
#export KMP_AFFINITY="verbose,granularity=fine,proclist=[0,4,8,6,2,12,16,18,14,10,20,24,28,26,22,32,36,38,34,30,1,5,9,7,3,13,17,19,15,11,21,25,29,27,23,33,37,39,35,31],explicit"
# For Pitzer
#export KMP_AFFINITY="granularity=fine,compact,0,0"
# For Skylake and KNL
export KMP_AFFINITY="granularity=core,compact,1,0"
## KNL and Pitzer are different
cd ../cmake-build-release || exit
bin="./profile_seq_single_query_top_m_search_scale_m"
:> output.${tag}.raw.txt
M=8
#for ((mid_iter = 0; mid_iter < 12; ++mid_iter)); do
#${bin} ${data_dir}/sift1m/sift_base.fvecs ${data_dir}/sift1m/sift_query.fvecs ${data_dir}/sift1m/sift.nsg 200 200 output.ivecs ${M} ${data_dir}/sift1m/sift.true-100_NN.q-10000.binary ${mid_iter} | tee -a output.${tag}.raw.txt
#done
${bin} ${data_dir}/sift1m/sift_base.fvecs ${data_dir}/sift1m/sift_query.fvecs ${data_dir}/sift1m/sift.nsg 200 200 output.ivecs ${M} ${data_dir}/sift1m/sift.true-100_NN.q-10000.binary | tee -a output.${tag}.raw.txt
python3 ../scripts/output_format.py output.${tag}.raw.txt output.${tag}.row.txt 2:4 10 12:226;
#python3 ../scripts/output_format.py output.${tag}.raw.txt output.${tag}.row.txt 2:4 10 12:29;
python3 ../scripts/output_row_minimum.py output.${tag}.row.txt output.${tag}.table.txt 2 0;
|
<reponame>cingireh/stellar
package httpjson
import (
"bytes"
"encoding/json"
"testing"
)
func TestRawObjectMarshaler(t *testing.T) {
var in RawObject
got, err := json.Marshal(in)
if err != nil {
t.Fatal(err)
}
want := []byte("{}")
if !bytes.Equal(got, want) {
t.Errorf("got: %s, want: %s", string(got), string(want))
}
var inField struct {
Input RawObject `json:"input"`
}
got, err = json.Marshal(inField)
if err != nil {
t.Fatal(err)
}
want = []byte(`{"input":{}}`)
if !bytes.Equal(got, want) {
t.Errorf("got: %s, want: %s", string(got), string(want))
}
}
func TestRawObjectUnmarshaler(t *testing.T) {
cases := []struct {
input []byte
wantErr bool
}{
{[]byte(`{"input":{}}`), false}, // empty object
{[]byte(`{"input":{"key":"value"}}`), false}, // object
{[]byte(`{"input":null}`), false}, // null
{[]byte(`{"input":[]}`), true}, // empty array
{[]byte(`{"input":"json string"}`), true}, // string
{[]byte(`{"input":10}`), true}, // positive number
{[]byte(`{"input":-10}`), true}, // negative number
{[]byte(`{"input":false}`), true}, // boolean
{[]byte(`{"input":true}`), true}, // boolean
}
for _, tc := range cases {
var out struct {
Input RawObject `json:"input"`
}
err := json.Unmarshal(tc.input, &out)
if tc.wantErr {
if err != ErrNotJSONObject {
t.Errorf("case %s wanted error but did not", string(tc.input))
}
continue
}
if err != nil {
t.Errorf("case %s got error %v but shouldn't", string(tc.input), err)
}
}
}
func TestOptStringUnmarshaler(t *testing.T) {
cases := []struct {
input []byte
isSet bool
valid bool
}{
{[]byte(`{}`), false, false},
{[]byte(`{"input":null}`), true, false},
{[]byte(`{"input":"a string"}`), true, true},
}
for _, tc := range cases {
var out struct {
Input OptString `json:"input"`
}
err := json.Unmarshal(tc.input, &out)
if err != nil {
t.Errorf("case %s got error %v but shouldn't", string(tc.input), err)
continue
}
if out.Input.IsSet != tc.isSet {
t.Errorf("case %s got IsSet: %t, want: %t ", tc.input, out.Input.IsSet, tc.isSet)
}
if out.Input.Valid != tc.valid {
t.Errorf("case %s got Valid: %t, want: %t ", tc.input, out.Input.Valid, tc.valid)
}
}
}
|
def manipulate_list(src, m):
manipulated_list = [str(ord(chr_val) * m) for chr_val in src]
return manipulated_list
# Test the function with the given example
src = [65, 66, 67]
m = 2
output = manipulate_list(src, m)
print(output) # Output: ['130', '132', '134'] |
<filename>migrations/005_saved_images.rb<gh_stars>0
Sequel.migration do
up do
alter_table :posts do
add_column :img_saved, TrueClass, :null => false, :default => false
end
end
end
|
// A binary tree node
class Node {
int data;
Node left, right;
Node(int item)
{
data = item;
left = right = null;
}
}
class BinaryTree {
Node root;
// Finds the path from root node to given root of the tree, Stores the
// path in a vector path[], returns true if path exists otherwise false
boolean findPath(Node root, ArrayList<Integer> path, int k)
{
// base case
if (root == null)
return false;
// Store this node . The node will be removed if
// not in path from root to k
path.add(root.data);
if (root.data == k)
return true;
// Check if k is found in left or right sub-tree
if (root.left != null && findPath(root.left, path, k))
return true;
if (root.right != null && findPath(root.right, path, k))
return true;
// If not present in subtree rooted with root, remove root from
// path[] and return false
path.remove(path.size()-1);
return false;
}
// Returns LCA if node n1 , n2 are present in the given binary tree,
// otherwise return -1
int findLCA(int n1, int n2)
{
// to store paths to n1 and n2 from the root
ArrayList<Integer> path1 = new ArrayList<Integer>();
ArrayList<Integer> path2 = new ArrayList<Integer>();
// Find paths from root to n1 and root to n2.
// If either n1 or n2 is not present , return -1
if (!findPath(root, path1, n1) || !findPath(root, path2, n2))
return -1;
/* Compare the paths to get the first different value */
int i;
for (i = 0; i < path1.size() && i < path2.size(); i++)
{
// Log.d("bb", path1.get(i) + " " + path2.get(i));
if (!path1.get(i).equals(path2.get(i)))
break;
}
return path1.get(i-1);
}
// Driver code
public static void main(String[] args)
{
BinaryTree tree = new BinaryTree();
int n1 = 7, n2 = 4;
tree.root = new Node(2);
tree.root.left = new Node(7);
tree.root.right = new Node(4);
tree.root.left.left = new Node(9);
tree.root.left.right = new Node(1);
tree.root.right.left = new Node(3);
tree.root.right.right = new Node(6);
System.out.println("LCA(7, 4) = " +
tree.findLCA(n1, n2));
}
} |
#!/bin/bash
set -eu
# Check if EOPEN_ROOT environment variable is set
if [ -z "${EOPEN_ROOT-}" ]; then
echo "Error: EOPEN_ROOT environment variable is not set." >&2
exit 1
fi
# Source the path.sh script
if ! source "$EOPEN_ROOT/cygwin/path.sh"; then
echo "Error: Failed to source path.sh script." >&2
exit 1
fi
# Source the open.sh script
if ! source "$EOPEN_ROOT/share/open.sh"; then
echo "Error: Failed to source open.sh script." >&2
exit 1
fi
# Open the specific file using the sourced functions or variables
if ! open_specific_file; then
echo "Error: Failed to open the specific file." >&2
exit 1
fi
echo "File opened successfully." |
#!/bin/sh
set -e
log() {
git log --reverse --date=iso --pretty="===%h;%ad;%ae" --no-merges -U0 $1 \
|grep -v -e '^diff ' -e '^new file ' -e '^index ' -e '^@@ ' -e '^--- ' -e '^$'
}
PROJECT_NAME="$1"
PROJECT_URL="$2"
BRANCH="$3"
BASE_DIR=$(cd $(dirname $0)/..; pwd)
PROJECT_DIR=${BASE_DIR}/tmp/${PROJECT_NAME}
REPO_DIR=${PROJECT_DIR}/repo
LAST_COMMIT_FILE=${PROJECT_DIR}/last-commit.txt
LOG_FILE=${PROJECT_DIR}/log.txt
mkdir -p "${PROJECT_DIR}"
if [ ! -d "${REPO_DIR}" ]; then
git clone "${PROJECT_URL}" "${REPO_DIR}"
fi
cd "${REPO_DIR}"
git fetch
git reset --hard origin/${BRANCH}
LAST_COMMIT=$(git log -n 1 --format=%h)
if [ -f "${LAST_COMMIT_FILE}" ]; then
PREVIOUS_COMMIT=$(cat "${LAST_COMMIT_FILE}")
else
PREVIOUS_COMMIT=""
fi
if [ "${PREVIOUS_COMMIT}" == "${LAST_COMMIT}" ]; then
exit 0
fi
if [ -z "${PREVIOUS_COMMIT}" ]; then
log "" > "${LOG_FILE}"
else
log ${PREVIOUS_COMMIT}...HEAD >> "${LOG_FILE}"
fi
echo ${LAST_COMMIT} > "${LAST_COMMIT_FILE}"
exit 0
|
// Copyright 2017-2019 @polkadot/app-contracts authors & contributors
// This software may be modified and distributed under the terms
// of the Apache-2.0 license. See the LICENSE file for details.
import { AppProps, I18nProps } from '@polkadot/ui-app/types';
import { TabItem } from '@polkadot/ui-app/Tabs';
import { ComponentProps, LocationProps } from './types';
import React from 'react';
import { Route, Switch } from 'react-router';
import { HelpOverlay, Tabs } from '@polkadot/ui-app';
import introMd from './md/intro.md';
import store from './store';
import translate from './translate';
import Call from './Call';
import Code from './Code';
import Instantiate from './Instantiate';
type Props = AppProps & I18nProps;
type State = {
tabs: Array<TabItem>,
updated: number
};
class App extends React.PureComponent<Props, State> {
state: State;
constructor (props: Props) {
super(props);
const { t } = props;
store.on('new-code', this.triggerUpdate);
store.on('new-contract', this.triggerUpdate);
// since we have a dep on the async API, we load here
store.loadAll().catch(() => {
// noop, handled internally
});
this.state = {
tabs: [
{
name: 'call',
text: t('Call')
},
{
name: 'instantiate',
text: t('Instance')
},
{
name: 'code',
text: t('Code')
}
],
updated: 0
};
}
render () {
const { basePath } = this.props;
const { tabs } = this.state;
const hidden = store.hasContracts
? []
: ['call'];
if (!store.hasCode) {
hidden.push('instantiate');
}
return (
<main className='contracts--App'>
<HelpOverlay md={introMd} />
<header>
<Tabs
basePath={basePath}
hidden={hidden}
items={tabs}
/>
</header>
<Switch>
<Route path={`${basePath}/instantiate`} render={this.renderComponent(Instantiate)} />
<Route path={`${basePath}/code`} render={this.renderComponent(Code)} />
<Route
render={
hidden.includes('call')
? (
hidden.includes('instantiate')
? this.renderComponent(Code)
: this.renderComponent(Instantiate)
)
: this.renderComponent(Call)
}
/>
</Switch>
</main>
);
}
private renderComponent (Component: React.ComponentType<ComponentProps>) {
return ({ match }: LocationProps) => {
const { basePath, location, onStatusChange } = this.props;
return (
<Component
basePath={basePath}
location={location}
match={match}
onStatusChange={onStatusChange}
/>
);
};
}
private triggerUpdate = (): void => {
this.setState({ updated: Date.now() });
}
}
export default translate(App);
|
<reponame>tOverney/ADA-Project<filename>web/backend/backend_django/apps/capacity/migrations/0004_auto_20170125_0658.py<gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('capacity', '0003_capacity_stop'),
]
operations = [
migrations.AlterUniqueTogether(
name='capacity',
unique_together=set([('trip', 'stop_time', 'service_date')]),
),
migrations.AlterUniqueTogether(
name='path',
unique_together=set([('trip', 'stop')]),
),
]
|
#!/usr/bin/env bash
cd "$basedir/"
echo "Including maven..."
if ! mvn -v 2> /dev/null; then
echo "mvn missing"
if [ ! -d "maven" ]; then
echo "Downloading maven"
curl -s http://apache.mirror.iphh.net/maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.tar.gz | tar -xzf -
mv apache-maven-* maven
fi
export PATH="$PATH:$basedir/maven/bin"
echo "Exported mvn: $basedir/maven/bin"
fi
|
#!/usr/bin/env sh
${DRUSH} config-import --source=${DRUPAL_CONFIGURATION_DIR}
|
import "angular";
import LazyModule from "../lazy/LazyModule";
import { ILazyStateProvider } from "../lazy/ILazyStateProvider";
import { NAME as ControllerName } from "./ITasksController";
let module = angular.module("tasks.routing", [LazyModule.name])
module.config(["lazyStateProvider", (lazyStateProvider : ILazyStateProvider) => {
lazyStateProvider.lazyState("/tasks", {
url: "/tasks",
templateUrl: "tasks/view.tpl.html",
controllerAs: "vm",
controller: ControllerName,
module: "tasks/TasksModule",
resolve: {
user: () => {
return {
name: "Janek"
}
}
}
});
}]);
export default module;
|
<reponame>dakimura/pymarketstore
from .client import Client, Params # noqa
# alias
Param = Params # noqa
from .stream import StreamConn # noqa
__version__ = '0.16'
|
import random
#defines a variable for the seed text
seed_text = "Hello world"
#defines the length of the text to generate
length = 30
#generate a random text with the given seed string
generated_text = seed_text + ' '
for i in range(length):
generated_text += random.choice(list(seed_text))
print(generated_text) |
#!/bin/sh
# script that should be executed on the slave after all files are copied
# Installing System Tools
echo ""
echo ""
echo "*****************************************************************************"
echo "* Updating System Tools *"
echo "*****************************************************************************"
echo ""
echo ""
# Updating to latest packages
sudo apt-get update
# Installing xz-utils
echo ""
echo ""
echo "Installing xz-utills........................................,"
echo ""
echo ""
sudo apt-get -y install xz-utils
# Installing python-pip
echo ""
echo ""
echo "Installing pip..............................................,"
echo ""
echo ""
sudo apt-get -y install python-pip
echo ""
echo ""
sudo pip --version
echo ""
echo ""
# Installing build-essential
echo ""
echo ""
echo "Installing build-essential..................................,"
echo ""
echo ""
sudo apt-get -y install build-essential python-dev
echo ""
echo ""
# Installing mkdocs
echo ""
echo ""
echo "Installing mkdocs..............................................,"
echo ""
echo ""
sudo pip install mkdocs && mkdocs --version
echo ""
echo ""
# Installing mkdocs-material
echo ""
echo ""
echo "Installing mkdocs-material....................................,"
echo ""
echo ""
sudo pip install mkdocs-material
echo ""
echo ""
#Creating Directories
cd /build/jenkins-home
mkdir -p software/nodejs
cd
# unzip installation file
#unzip nodejs files
echo ""
echo "*****************************************************************************"
echo "* Extracting NodeJS files *"
echo "*****************************************************************************"
echo ""
tar -xf /build/jenkins-home/slaveSetupFile/node-v6.10.0-linux-x64.tar.xz -C /build/jenkins-home/software/nodejs
echo "Extracted NodeJS files"
echo ""
#unzip nodejs files
echo ""
echo "*****************************************************************************"
echo "* Extracting JDK *"
echo "*****************************************************************************"
echo ""
tar -zxvf /build/jenkins-home/slaveSetupFile/jdk-8u144-linux-x64.tar.gz -C /build/jenkins-home/software/java
echo "Extracted jdk8.144"
echo ""
#unzip maven
echo ""
echo "*****************************************************************************"
echo "* Extracting Maven files *"
echo "*****************************************************************************"
echo ""
#tar -zxvf /build/jenkins-home/slaveSetupFile/apache-maven-3.2.2-bin.tar.gz -C /build/jenkins-home/software/maven
unzip -o /build/jenkins-home/slaveSetupFile/apache-maven-3.3.9-bin.zip -d /build/jenkins-home/software/maven
#copying gpg-keys
echo ""
echo ""
echo "*****************************************************************************"
echo "* Extracting GPG files *"
echo "*****************************************************************************"
echo ""
echo ""
#unzip -o /build/jenkins-home/slaveSetupFile/jce_policy-8.zip -d /build/jenkins-home/software/jce
#rename policy.jar
#copying gpg-keys
mv /build/jenkins-home/slaveSetupFile/gpg-keys /build/
echo "Copied gpg-keys to /build"
#mv /build/jenkins-home/software/java/jdk1.8.0_45/jre/lib/security/US_export_policy.jar /build/jenkins-home/software/java/jdk1.8.0_45/jre/lib/security/US_export_policy-original.jar
#copy jce files
#cp /build/jenkins-home/software/jce/UnlimitedJCEPolicyJDK8/local_policy.jar /build/jenkins-home/software/java/jdk1.8.0_45/jre/lib/security/
#cp /build/jenkins-home/software/jce/UnlimitedJCEPolicyJDK8/US_export_policy.jar /build/jenkins-home/software/java/jdk1.8.0_45/jre/lib/security/
#reboot node
# sudo reboot
|
<reponame>evpobr/libfoo<gh_stars>0
#include "foo/foo.h"
struct foo_object
{
int dummy;
};
|
#!/bin/bash
set -e
cargo build --release --target=x86_64-unknown-linux-musl
strip -s target/x86_64-unknown-linux-musl/release/rust-https-redirect
sudo docker build -t willwill/https-redirect .
|
#
# This file is part of KwarqsDashboard.
#
# KwarqsDashboard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# KwarqsDashboard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with KwarqsDashboard. If not, see <http://www.gnu.org/licenses/>.
#
import math
import sys
import cv2
import numpy as np
from common import settings
# using python 2.7, get some python 3 builtins
from future_builtins import zip
import logging
logger = logging.getLogger(__name__)
class ImagePreprocessor(object):
def __init__(self):
self.size = None
self.colorspace = cv2.cv.CV_BGR2HSV
# debug settings
self.show_hue = False
self.show_sat = False
self.show_val = False
self.show_bin = False
self.show_bin_overlay = False
def set_colorspace(self, colorspace):
if colorspace == 'LUV':
self.colorspace = cv2.cv.CV_BGR2Luv
else:
self.colorspace = cv2.cv.CV_BGR2HSV
# thresholds are not initialized here, someone else does it
def process_image(self, img):
'''
Processes an image and thresholds it. Returns the original
image, and a binary version of the image indicating the area
that was filtered
:returns: img, bin
'''
# reinitialize any time the image size changes
if self.size is None or self.size[0] != img.shape[0] or self.size[1] != img.shape[1]:
h, w = img.shape[:2]
self.size = (h, w)
# these are preallocated so we aren't allocating all the time
self.bin = np.empty((h, w, 1), dtype=np.uint8)
self.hsv = np.empty((h, w, 3), dtype=np.uint8)
self.hue = np.empty((h, w, 1), dtype=np.uint8)
self.sat = np.empty((h, w, 1), dtype=np.uint8)
self.val = np.empty((h, w, 1), dtype=np.uint8)
# for overlays
self.zeros = np.zeros((h, w, 1), dtype=np.bool)
# these settings should be adjusted according to the image size
# and noise characteristics
# TODO: What's the optimal setting for this? For smaller images, we
# cannot morph as much, or the features blend into each other.
# TODO: tune kMinWidth
# Note: if you set k to an even number, the detected
# contours are offset by some N pixels. Sometimes.
if w <= 320:
k = 2
offset = (0,0)
self.kHoleClosingIterations = 1 # originally 9
self.kMinWidth = 2
# drawing
self.kThickness = 1
self.kTgtThickness = 1
# accuracy of polygon approximation
self.kPolyAccuracy = 10.0
elif w <= 480:
k = 2
offset = (0,0)
self.kHoleClosingIterations = 2 # originally 9
self.kMinWidth = 5
# drawing
self.kThickness = 1
self.kTgtThickness = 2
# accuracy of polygon approximation
self.kPolyAccuracy = 15.0
else:
k = 3
offset = (1,1)
self.kHoleClosingIterations = 6 # originally 9
self.kMinWidth = 10
# drawing
self.kThickness = 1
self.kTgtThickness = 2
# accuracy of polygon approximation
self.kPolyAccuracy = 20.0
self.morphKernel = cv2.getStructuringElement(cv2.MORPH_RECT, (k,k), anchor=offset)
logging.info("New image size: %sx%s, morph size set to %s, %s iterations", w,h,k, self.kHoleClosingIterations)
# get this outside the loop
ih, iw = self.size
centerOfImageY = ih/2.0
# convert to target colorspace
# TODO: change all the variable names..
cv2.cvtColor(img, self.colorspace, self.hsv)
cv2.split(self.hsv, [self.hue, self.sat, self.val])
# Threshold each component separately
# Hue
cv2.threshold(self.hue, self.thresh_hue_p, 255, type=cv2.THRESH_BINARY, dst=self.bin)
cv2.threshold(self.hue, self.thresh_hue_n, 255, type=cv2.THRESH_BINARY_INV, dst=self.hue)
cv2.bitwise_and(self.hue, self.bin, self.hue)
if self.show_hue:
# overlay green where the hue threshold is non-zero
img[np.dstack((self.zeros, self.hue != 0, self.zeros))] = 255
# Saturation
cv2.threshold(self.sat, self.thresh_sat_p, 255, type=cv2.THRESH_BINARY, dst=self.bin)
cv2.threshold(self.sat, self.thresh_sat_n, 255, type=cv2.THRESH_BINARY_INV, dst=self.sat)
cv2.bitwise_and(self.sat, self.bin, self.sat)
if self.show_sat:
# overlay blue where the sat threshold is non-zero
img[np.dstack((self.sat != 0, self.zeros, self.zeros))] = 255
# Value
cv2.threshold(self.val, self.thresh_val_p, 255, type=cv2.THRESH_BINARY, dst=self.bin)
cv2.threshold(self.val, self.thresh_val_n, 255, type=cv2.THRESH_BINARY_INV, dst=self.val)
cv2.bitwise_and(self.val, self.bin, self.val)
if self.show_val:
# overlay red where the val threshold is non-zero
img[np.dstack((self.zeros, self.zeros, self.val != 0))] = 255
# Combine the results to obtain our binary image which should for the most
# part only contain pixels that we care about
cv2.bitwise_and(self.hue, self.sat, self.bin)
cv2.bitwise_and(self.bin, self.val, self.bin)
# Fill in any gaps using binary morphology
cv2.morphologyEx(self.bin, cv2.MORPH_CLOSE, self.morphKernel, dst=self.bin, iterations=self.kHoleClosingIterations)
#print 'bin',self.show_bin
if self.show_bin:
cv2.imshow('bin', self.bin)
# overlay the binarized image on the displayed image, instead of a separate picture
if self.show_bin_overlay:
img[np.dstack((self.bin, self.bin, self.bin)) != 0] = 255
return img, self.bin
|
from pyspark.sql import SparkSession
from pyspark.ml.feature import Tokenizer
from pyspark.sql.functions import col, udf
from pyspark.ml.feature import StopWordsRemover
from pyspark.ml.feature import HashingTF
from pyspark.ml.feature import IDF
from pyspark.ml.classification import LogisticRegression
# Create the constant of the corpus of documents
# Build the Spark Session
spark = SparkSession.builder.appName("SentimentAnalysis").getOrCreate()
# Read in the dataset
df = spark.read.csv("data/sentiment_data.csv", header=True)
# Tokenize the text
tokenizer = Tokenizer(inputCol="text", outputCol="words")
wordsData = tokenizer.transform(df)
# Remove StopWords
remover = StopWordsRemover(inputCol="words", outputCol="filtered")
filteredData = remover.transform(wordsData)
# Perform the hashing term frequency
hashingTF = HashingTF(inputCol="filtered", outputCol="rawFeatures")
tf_data = hashingTF.transform(filteredData)
# Perform the inverse document frequency
idf = IDF(inputCol="rawFeatures", outputCol="features")
idfModel = idf.fit(tf_data)
tfidf_data = idfModel.transform(tf_data)
# Split the data into training and test sets
training_data, test_data = tfidf_data.randomSplit([0.8, 0.2], seed=1234)
# Train the logistic regression model
lr = LogisticRegression(maxIter=20, regParam=0.2, elasticNetParam=0)
lrModel = lr.fit(training_data)
# Test the model on the test dataset
predictions = lrModel.transform(test_data)
# Evaluate the model's performance
correct = predictions.filter(col("label") == col("prediction")).count()
total = predictions.count()
accuracy = correct / float(total)
print("Model Accuracy:", accuracy)
spark.stop() |
<filename>src/app/manual-entry/components/meeting-request/meeting-request.component.spec.ts
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { MeetingRequestComponent } from './meeting-request.component';
describe('TableListComponent', () => {
let component: MeetingRequestComponent;
let fixture: ComponentFixture<MeetingRequestComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ MeetingRequestComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(MeetingRequestComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
#!/bin/bash
#SBATCH -N 1
#SBATCH --gres=gpu:8
#SBATCH --ntasks-per-node=10
nvidia-smi
cd /home/lishengqi_0902170602/container/mmdetection
chmod 777 ./tools/dist_train.sh
###
## PORT=7080 ./tools/dist_train.sh configs/faster_carafe/coco_faster_r50_1x_carafe_3_exp.py 8
## mask fpn & maskhead
PORT=7081 ./tools/dist_train.sh configs/mask_carafe/carafeppp_coco_mask_r50_1x_3_exp__MH.py 8
## PORT=7082 ./tools/dist_train.sh configs/mask_carafe/carafeppp_coco_mask_r50_1x_3_exp__FPN.py 8
## PORT=7083 ./tools/dist_train.sh configs/mask_carafe/carafeppp_coco_mask_r50_1x_3_exp__FPN_MH.py 8
|
<reponame>tenebrousedge/ruby-packer
require 'timeout'
def progress(n = 5)
n.times {|i| print i; STDOUT.flush; sleep 1; i+= 1}
puts "never reach"
end
p timeout(5) {
45
}
p timeout(5, Timeout::Error) {
45
}
p timeout(nil) {
54
}
p timeout(0) {
54
}
begin
timeout(5) {progress}
rescue => e
puts e.message
end
begin
timeout(3) {
begin
timeout(5) {progress}
rescue => e
puts "never reach"
end
}
rescue => e
puts e.message
end
class MyTimeout < StandardError
end
begin
timeout(2, MyTimeout) {progress}
rescue MyTimeout => e
puts e.message
end
|
#!/bin/bash
# Generator Script for NGinx Ultimate Bad Bot Blocker
# REPO: https://github.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker
# Copyright Mitchell Krog - mitchellkrog@gmail.com
# *******************************
# Input and Output File Variables
# *******************************
outputfile=${TRAVIS_BUILD_DIR}/_generator_lists/bad-ip-addresses.list
# **************
# Temp Variables
# **************
tmp=${TRAVIS_BUILD_DIR}/_generator_lists/tmp.list
# **********************************************
# Fetch our feed(s) and append to our input file
# **********************************************
fetch () {
sudo wget https://raw.githubusercontent.com/mitchellkrogza/Suspicious.Snooping.Sniffing.Hacking.IP.Addresses/master/ips.list -O ${outputfile}
}
# *************************************************
# Prepare our input lists and remove any duplicates
# *************************************************
initiate () {
# Prepare Feed 1
sudo sort -u ${outputfile} -o ${outputfile}
grep '[^[:blank:]]' < ${outputfile} > ${tmp}
sudo mv ${tmp} ${outputfile}
sudo dos2unix ${outputfile}
}
# *****************
# Trigger Functions
# *****************
fetch
initiate
# **********************
# Exit With Error Number
# **********************
exit ${?}
|
from typing import Union
def extract_directory_path(input_str: str) -> Union[str, None]:
split_args = input_str.split()
if "--ref_embed_dir" in split_args:
index = split_args.index("--ref_embed_dir")
if index + 1 < len(split_args):
return split_args[index + 1]
return None |
#!/bin/bash
set -e
# ---------------------------
# settings
# ---------------------------
DEBUG=0
notls=0
# ---------------------------
# solc call, need solc command line
# ---------------------------
function solc_compile(){
local code="$1"
local suppress_error=0
local opts=""
shift
while [[ $# -gt 0 ]]; do
case "$1" in
-q) suppress_error=1; shift ;;
-all) opts="--bin --abi --hashes"; shift;;
*) opts+=" $1"; shift;;
esac
done
if [ "$opts" == "" ]; then
solc --optimize --bin "$code" 2>/dev/null | awk /^[0-9]/
return
elif [ "$opts" == " -bin" ] ;then # by-default, out cleanup binary
solc --optimize --bin "$code" 2>/dev/null | awk /^[0-9]/ |jq -R {"binary":.}
return
elif [ "$opts" == " -abi" ];then
solc --abi "$code" 2>/dev/null | tail -n 1 |jq .
return
elif [ "$opts" == " -func" ];then
solc --hashes "$code" 2>/dev/null |awk /^[0-9a-f]/|jq -R '.|split(": ")|{name:.[1],sign: .[0]}'
return
fi;
#echo "debug opts=$opts s_err=$suppress_error"
if [[ $suppress_error -eq 0 ]]; then
solc $opts "$code"
else
solc $opts "$code" 2>/dev/null
fi
}
# ---------------------------
# All jsonrpc calls
# ---------------------------
# qitmeer
function get_block(){
local order=$1
local verbose=$2
if [ "$verbose" == "" ]; then
verbose="true"
fi
local inclTx=$3
if [ "$inclTx" == "" ]; then
inclTx="true"
fi
local fullTx=$4
if [ "$fullTx" == "" ]; then
fullTx="true"
fi
local data='{"jsonrpc":"2.0","method":"getBlockByOrder","params":['$order','$verbose','$inclTx','$fullTx'],"id":1}'
get_result "$data"
}
function get_block_by_id(){
local id=$1
local verbose=$2
if [ "$verbose" == "" ]; then
verbose="true"
fi
local inclTx=$3
if [ "$inclTx" == "" ]; then
inclTx="true"
fi
local fullTx=$4
if [ "$fullTx" == "" ]; then
fullTx="true"
fi
local data='{"jsonrpc":"2.0","method":"getBlockByID","params":['$id','$verbose','$inclTx','$fullTx'],"id":1}'
get_result "$data"
}
function get_block_number(){
local data='{"jsonrpc":"2.0","method":"getBlockCount","params":[],"id":1}'
get_result "$data"
}
# qitmeer mempool
function get_mempool(){
local type=$1
local verbose=$2
if [ "$type" == "" ]; then
type="regular"
fi
if [ "$verbose" == "" ]; then
verbose="false"
fi
local data='{"jsonrpc":"2.0","method":"getMempool","params":["'$type'",'$verbose'],"id":1}'
get_result "$data"
}
# return block by hash
# func (s *PublicBlockChainAPI) GetBlockByHash(ctx context.Context, blockHash common.Hash, fullTx bool) (map[string]interface{}, error)
function get_block_by_hash(){
local block_hash=$1
local verbose=$2
if [ "$verbose" == "" ]; then
verbose="true"
fi
local data='{"jsonrpc":"2.0","method":"getBlock","params":["'$block_hash'",'$verbose'],"id":1}'
get_result "$data"
}
function get_blockheader_by_hash(){
local block_hash=$1
local verbose=$2
if [ "$verbose" == "" ]; then
verbose="true"
fi
local data='{"jsonrpc":"2.0","method":"getBlockHeader","params":["'$block_hash'",'$verbose'],"id":1}'
get_result "$data"
}
# return tx by hash
function get_tx_by_hash(){
local tx_hash=$1
local verbose=$2
if [ "$verbose" == "" ]; then
verbose="true"
fi
local data='{"jsonrpc":"2.0","method":"getRawTransaction","params":["'$tx_hash'",'$verbose'],"id":1}'
get_result "$data"
}
# return info about UTXO
function get_utxo() {
local tx_hash=$1
local vout=$2
local include_mempool=$3
if [ "$include_mempool" == "" ]; then
include_mempool="true"
fi
local data='{"jsonrpc":"2.0","method":"getUtxo","params":["'$tx_hash'",'$vout','$include_mempool'],"id":1}'
get_result "$data"
}
function tx_sign(){
local private_key=$1
local raw_tx=$2
local data='{"jsonrpc":"2.0","method":"txSign","params":["'$private_key'","'$raw_tx'"],"id":1}'
get_result "$data"
}
#
function create_raw_tx(){
local input=$1
local data='{"jsonrpc":"2.0","method":"createRawTransaction","params":['$input'],"id":1}'
get_result "$data"
}
function decode_raw_tx(){
local input=$1
local data='{"jsonrpc":"2.0","method":"decodeRawTransaction","params":["'$input'"],"id":1}'
get_result "$data"
}
function send_raw_tx(){
local input=$1
local allow_high_fee=$2
if [ "$allow_high_fee" == "" ]; then
allow_high_fee="false"
fi
local data='{"jsonrpc":"2.0","method":"sendRawTransaction","params":["'$input'",'$allow_high_fee'],"id":1}'
get_result "$data"
}
function generate() {
local count=$1
local block_num=$2
if [ "$block_num" == "" ]; then
block_num="latest"
fi
local data='{"jsonrpc":"2.0","method":"miner_generate","params":['$count'],"id":null}'
get_result "$data"
}
function get_blockhash(){
local blk_num=$1
local data='{"jsonrpc":"2.0","method":"getBlockhash","params":['$blk_num'],"id":null}'
get_result "$data"
}
function is_on_mainchain(){
local block_hash=$1
local data='{"jsonrpc":"2.0","method":"isOnMainChain","params":["'$block_hash'"],"id":1}'
get_result "$data"
}
function get_block_template(){
local capabilities=$1
local data='{"jsonrpc":"2.0","method":"getBlockTemplate","params":[["'$capabilities'"]],"id":1}'
get_result "$data"
}
function get_mainchain_height(){
local data='{"jsonrpc":"2.0","method":"getMainChainHeight","params":[],"id":1}'
get_result "$data"
}
function get_block_weight(){
local block_hash=$1
local data='{"jsonrpc":"2.0","method":"getBlockWeight","params":["'$block_hash'"],"id":1}'
get_result "$data"
}
function get_blockhash_range(){
local blk_num0=$1
local blk_num1=$2
if [ "$block_num1" == "" ]; then
block_num1=blk_num0
fi
local data='{"jsonrpc":"2.0","method":"getBlockhashByRange","params":['$blk_num0','$blk_num1'],"id":null}'
get_result "$data"
}
function get_node_info(){
local data='{"jsonrpc":"2.0","method":"getNodeInfo","params":[],"id":null}'
get_result "$data"
}
function get_peer_info(){
local data='{"jsonrpc":"2.0","method":"getPeerInfo","params":[],"id":null}'
get_result "$data"
}
function get_orphans_total(){
local data='{"jsonrpc":"2.0","method":"getOrphansTotal","params":[],"id":null}'
get_result "$data"
}
function get_rawtxs(){
local address=$1
local param2=$2
local param3=$3
local param4=$4
local param5=$5
local param6=$6
if [ "$param2" == "" ]; then
param2=false
fi
if [ "$param3" == "" ]; then
param3=100
fi
if [ "$param4" == "" ]; then
param4=0
fi
if [ "$param5" == "" ]; then
param5=false
fi
if [ "$param6" == "" ]; then
param6=true
fi
local data='{"jsonrpc":"2.0","method":"getRawTransactions","params":["'$address'",'$param2','$param3','$param4','$param5','$param6'],"id":null}'
get_result "$data"
}
function get_result(){
local proto="https"
if [ $notls -eq 1 ]; then
proto="http"
fi
if [ -z "$host" ]; then
host=127.0.0.1
fi
if [ -z "$port" ]; then
port=1234
fi
local user="test"
local pass="test"
local data=$1
local current_result=$(curl -s -k -u "$user:$pass" -X POST -H 'Content-Type: application/json' --data $data $proto://$host:$port)
local result=$(echo $current_result|jq -r -M '.result')
if [ $DEBUG -gt 0 ]; then
local current_cmd="curl -s -k -u "$user:$pass" -X POST -H 'Content-Type: application/json' --data '"$data"' $proto://$host:$port"
echo "$current_cmd" >> "./cli.debug"
echo "$current_result" >> "./cli.debug"
fi
echo $result
}
# -------------------------
# util functions
# -------------------------
function pad_hex_prefix(){
local input=$1
if [ "${input:0:2}" == "0x" ];then
echo "$input"
else
echo "0x$input"
fi
}
# convert int to hex, also add 0x prefix if missing
function to_hex() {
printf "0x%x\n" $1
}
function to_dec() {
printf "%d\n" $1
}
function to_base64() {
echo -n $1|xxd -r -p|base64
}
function usage(){
echo "chain :"
echo " nodeinfo"
echo " peerinfo"
echo " main <hash>"
echo "block :"
echo " block <num|hash>"
echo " blockid <id>"
echo " block_count"
echo " blockrange <start,end>"
echo " mainHeight"
echo " weight <hash>"
echo " orphanstotal"
echo "tx :"
echo " tx <hash>"
echo " createRawTx"
echo " txSign <rawTx>"
echo " sendRawTx <signedRawTx>"
echo " getrawtxs <address>"
echo "utxo :"
echo " getutxo <tx_id> <index> <include_mempool,default=true>"
echo "miner :"
echo " template"
echo " generate <num>"
}
# -------------------
# level 2 functions
# -------------------
function start_mining(){
check_api_error_stop miner
local mining=$(get_mining)
if [ "$mining" == "false" ]; then
echo start mining ...
$(miner_start)
get_mining
else
echo already stated
get_mining_status
fi
}
function stop_mining(){
check_api_error_stop miner
local mining=$(get_mining)
if [ "$mining" == "true" ]; then
echo stop mining ...
miner_stop
else
echo already stopped
get_mining_status
fi
}
function check_api_error_stop(){
if [ "$(check_api $1)" == "" ]; then
echo "$1 api not find, need to enable the management APIs"
echo "For example: geth --rpcapi eth,web3,miner --rpc"
exit
fi
}
function check_api() {
local api=$1
if ! [ "$api" == "" ]; then
local api_ver=$(get_rpc_modules|jq .$api -r)
if ! [ "$api_ver" == "null" ]; then
echo $api_ver
fi
fi
}
function get_mining_status(){
local mining=$(get_mining)
local hashrate=$(get_hashrate)
local gasprice=$(get_gasprice)
local gasprice_dec=$(printf "%d" $gasprice)
echo "mining : $mining"
echo "hashRate : $hashrate"
echo "gasPrice : $gasprice -> ($gasprice_dec wei/$(wei_to_ether $gasprice) ether)"
}
function get_modules(){
get_rpc_modules|jq . -r
}
function get_status() {
#echo "debug get_status $@"
if [ "$1" == "" ]; then
get_modules
elif [ "$1" == "-module" ]; then
get_modules
elif [ "$1" == "-mining" ]; then
get_mining_status
elif [ "$1" == "-hashrate" ]; then
get_hashrate
elif [ "$1" == "-work" ]; then
get_work|jq .
elif [ "$1" == "-txpool" ]; then
txpool -status|jq .
elif [ "$1" == "-all" ]; then
echo "modules : $(get_modules|jq -c -M .)"
get_mining_status
else
echo "unknown opt $@"
fi
}
function get_current_block_num(){
get_syncing $@|jq .currentBlock -r|xargs printf "%d\n"
}
function call_get_block() {
# echo "debug call_get_block $@"
local blknum=""
local blkhash=""
local show=""
local show_opt=""
local verbose="true"
if [[ $# -eq 0 ]]; then
# echo "get lastet block"
local latest_num=$(get_block_number|xargs printf "%d")
echo "the lastet block is $latest_num"
exit
fi
if ! [ "${1:0:1}" == "-" ]; then
if [ "${1:0:2}" == "0x" ] && [[ ${#1} -eq 66 ]] ; then # 64
blkhash=$1
else
blknum=$1
fi
shift
fi
while [[ $# -gt 0 ]]; do
case "$1" in
-n|-num) shift; blknum=$1; shift ;;
-h|-hash) shift; blkhash=$1; shift ;;
-show) shift; show=${1%%=*}; show_opt="${1#*=}"; shift ;;
-v|-verbose|--verbose) shift; verbose="true"; ;;
*) echo '{ "error" : "unkown option: '$1'"}'|jq .; exit -1;;
esac
done
if [ "$show_opt" == "$show" ]; then show_opt=""; fi;
#echo "debug: blknum=$blknum,blkhash=$blkhash,show=$show;show_opt=$show_opt"
if [ "$show" == "rlp" ]; then
if ! [ "$blknum" == "" ]; then
if [ "$show_opt" == "dump" ];then
block_rlp $blknum|rlpdump
else
block_rlp $blknum
fi
return
else
echo '{ "error" : "show rlp only support by using blknum."}'|jq .; exit -1;
fi
else #default show all
if ! [ "$blknum" == "" ]; then
block_result=$(get_block "$blknum" "$verbose")
elif ! [ "$blkhash" == "" ]; then
block_result=$(get_block_by_hash "$blkhash" "$verbose")
if [ "$verbose" != "true" ]; then
block_result='{ "hex" : "'$block_result'"}'
fi
else
echo '{ "error" : "need to provide blknum or blkhash"}'; exit -1;
fi
fi
if [ "$show" == "" ]; then
echo $block_result|jq '.'
elif [ "$show" == "tx" ]; then
tx=$show_opt
if [ "${tx:0:2}" == "0x" ];then
echo $block_result|jq '.transactions|.[]|select(.transactionIndex == "'$tx'")'
else
echo $block_result|jq '.transactions['$tx']'
fi
elif [ "$show" == "txcount" ]; then
echo $block_result|jq '.transactions|length'
elif [ "$show" == "blocktime" ];then
echo $block_result|jq '.timestamp'|hex2dec|timestamp
elif [ "$show" == "stroot" ];then
echo $block_result|jq '.stateRoot'
elif [ "$show" == "txroot" ];then
echo $block_result|jq '.transactionsRoot'
elif [ "$show" == "rcroot" ];then
echo $block_result|jq '.receiptsRoot'
elif [ "$show" == "roots" ]; then
echo $block_result|jq '{"stroot":.stateRoot, "txroot":.transactionsRoot, "rcroot":.receiptsRoot}'
else
echo '{ "error" : "unkown option: '$show'"}'; exit -1;
fi
}
# main logic
if [ $? != 0 ]; then
echo "Usage: -h [host] -p [port] "
exit;
fi
#echo "$@"
while [ $# -gt 0 ] ;do
case "$1" in
-notls)
notls=1
;;
-h)
host=$2
#echo "host is $host"
shift;;
-p)
port=$2
#echo "port is $port"
shift;;
-D)
DEBUG=1
;;
*)
cmd="$@"
#echo "cmd is $cmd"
break;;
esac
shift
done
## Block
if [ "$1" == "block" ]; then
shift
if [ "$1" == "latest" ]; then
shift
call_get_block $(get_block_number)
else
call_get_block $@
fi
elif [ "$1" == "blockid" ]; then
shift
get_block_by_id $@
elif [ "$1" == "block_count" ]; then
shift
get_block_number
elif [ "$1" == "get_syncing_info" ]; then
shift
get_syncing $@
elif [ "$1" == "get_current_block" ]; then
shift
get_current_block_num
elif [ "$1" == "get_current_block2" ]; then
shift
blocknum=$(get_current_block_num)
cmd_get_block $blocknum $@
elif [ "$1" == "get_highest_block" ]; then
shift
get_syncing $@|jq .highestBlock -r|xargs printf "%d\n"
elif [ "$1" == "blockhash" ]; then
shift
get_blockhash $1
elif [ "$1" == "header" ]; then
shift
get_blockheader_by_hash $@
elif [ "$1" == "main" ]; then
shift
is_on_mainchain $1
elif [ "$1" == "template" ]; then
shift
get_block_template $1 | jq .
elif [ "$1" == "mainHeight" ]; then
shift
get_mainchain_height
elif [ "$1" == "weight" ]; then
shift
get_block_weight $1
elif [ "$1" == "blockrange" ]; then
shift
get_blockhash_range $@
elif [ "$1" == "nodeinfo" ]; then
shift
get_node_info | jq .
elif [ "$1" == "peerinfo" ]; then
shift
get_peer_info | jq .
elif [ "$1" == "orphanstotal" ]; then
shift
get_orphans_total | jq .
## Tx
elif [ "$1" == "tx" ]; then
shift
if [ "$2" == "false" ]; then
get_tx_by_hash $@
else
get_tx_by_hash $@|jq .
fi
elif [ "$1" == "createRawTx" ]; then
shift
create_raw_tx $@
elif [ "$1" == "decodeRawTx" ]; then
shift
decode_raw_tx $@|jq .
elif [ "$1" == "sendRawTx" ]; then
shift
send_raw_tx $@
elif [ "$1" == "getrawtxs" ]; then
shift
get_rawtxs $@
elif [ "$1" == "get_tx_by_block_and_index" ]; then
shift
# note: the input is block number & tx index in hex
get_tx_by_blocknum_and_index_hex $@
## MemPool
elif [ "$1" == "mempool" ]; then
shift
get_mempool $@|jq .
elif [ "$1" == "txSign" ]; then
shift
tx_sign $@
echo $@
## UTXO
elif [ "$1" == "getutxo" ]; then
shift
get_utxo $@|jq .
## Accounts
elif [ "$1" == "newaccount" ]; then
shift
new_account "$@"
elif [ "$1" == "accounts" ]; then
shift
accounts=$(get_accounts)
if [ -z "$1" ]; then
echo $accounts|jq '.'
else
echo $accounts|jq '.['$1']' -r
fi
elif [ "$1" == "balance" ]; then
shift
addr=$(pad_hex_prefix $1)
shift
if [ ! -z "$1" ]; then
num=$(to_hex $1)
shift
fi
# echo "debug get_balance $addr $num"
balance=$(get_balance $addr $num $@)
echo $balance
# echo "debug get_balance $addr $num --> $balance"
elif [ "$1" == "get_tx_count" ]; then
shift
addr=$1
shift
if [ "$1" == "-h" ]; then
get_tx_count_by_addr $(pad_hex_prefix $addr) $@|xargs printf "%d\n"
else
get_tx_count_by_addr $(pad_hex_prefix $addr) $@
fi
elif [ "$1" == "get_code" ]; then
shift
addr=$(pad_hex_prefix $1)
shift
if [ ! -z "$1" ]; then
num=$(to_hex $1)
shift
fi
get_code $addr $num
elif [ "$1" == "get_storage" ]; then
shift
addr=$(pad_hex_prefix $1)
shift
if [ ! -z "$1" ]; then
at=$(to_hex $1)
shift
fi
if [ ! -z "$1" ]; then
num=$(to_hex $1)
shift
fi
get_storage $addr $at $num
## Mining
elif [ "$1" == "get_coinbase" ]; then
shift
get_coinbase
elif [ "$1" == "start_mining" ]; then
shift
start_mining
elif [ "$1" == "stop_mining" ]; then
shift
stop_mining
elif [ "$1" == "mining" ]; then
shift
start_mining
sleep $1
stop_mining
elif [ "$1" == "generate" ]; then
shift
generate $1|jq .
## INFO & STATUS
elif [ "$1" == "status" ] || [ "$1" == "info" ] || [ "$1" == "get_status" ] || [ "$1" == "get_info" ]; then
shift
get_status $@
## Execute
elif [ "$1" == "compile" ]; then
shift
solc_compile "$@"
elif [ "$1" == "call" ]; then
shift
qitmeer_call $@
elif [ "$1" == "send_tx" ]; then
shift
send_tx $@
elif [ "$1" == "receipt" ]; then
shift
get_receipt $@ |jq .
elif [ "$1" == "contractaddr" ]; then
shift
get_receipt $@ |jq -r .contractAddress
## TXPOOL
elif [ "$1" == "txpool" ]; then
shift
txpool $@ |jq .
## DEBUG Moduls
elif [ "$1" == "dump_state" ]; then
shift
dump_block $@|jq .
elif [ "$1" == "rlp_block" ]; then
shift
block_rlp $@
elif [ "$1" == "trace_block" ]; then
shift
trace_block $@|jq .
elif [ "$1" == "trace_tx" ]; then
shift
trace_tx $@|jq .
## UTILS
elif [ "$1" == "to_hex" ]; then
shift
to_hex $1
elif [ "$1" == "to_base64" ]; then
shift
to_base64 $1
elif [ "$1" == "list_command" ]; then
usage
else
echo "Unkown cmd : $1"
usage
exit -1
fi
# debug info
if [ $DEBUG -gt 0 ]; then
echo -e "\nDebug info:"
cat ./cli.debug
rm ./cli.debug
fi |
<reponame>ourcade/phaser3-typescript-examples<filename>src/camera/follow-sprite/FollowSprite.ts
import Phaser from 'phaser'
export default class FollowSprite extends Phaser.Scene
{
private clown!: Phaser.GameObjects.Image
private accumulator = 3.14
preload()
{
this.load.image('CherilPerils', '/assets/tests/camera/CherilPerils.png')
this.load.image('clown', '/assets/sprites/clown.png')
}
create()
{
this.add.image(0, 0, 'CherilPerils').setOrigin(0)
this.cameras.main.setSize(400, 300)
const cam2 = this.cameras.add(400, 0, 400, 300)
const cam3 = this.cameras.add(0, 300, 400, 300)
const cam4 = this.cameras.add(400, 300, 400, 300)
this.clown = this.add.image(450 + Math.cos(this.accumulator) * 200, 510 + Math.sin(this.accumulator) * 200, 'clown')
this.cameras.main.startFollow(this.clown)
cam2.startFollow(this.clown, false, 0.5, 0.5)
cam3.startFollow(this.clown, false, 0.1, 0.1)
cam4.startFollow(this.clown, false, 0.05, 0.05)
}
update()
{
this.clown.x = 450 + Math.cos(this.accumulator) * 200
this.clown.y = 510 + Math.sin(this.accumulator) * 200
this.accumulator += 0.02
}
}
|
#! /usr/bin/env bash
bug=3106424
if [[ $1 == "--hwut-info" ]]; then
echo "timdawborn: $bug 0.55.2 --set-by-expression with inverse fails"
exit
fi
echo "--------------------------------------------------------------------"
echo "quex --set-by-expression [^a] --intervals"
echo
quex --set-by-expression '[^a]' --intervals
echo "--------------------------------------------------------------------"
echo "quex --set-by-expression [^a] --intervals --numeric"
echo
quex --set-by-expression '[^a]' --intervals --numeric
echo "--------------------------------------------------------------------"
echo "quex --set-by-property Script=Greek --intervals"
echo
quex --set-by-property Script=Greek --intervals
echo "--------------------------------------------------------------------"
echo "quex --set-by-property Script=Greek --intervals --numeric"
echo
quex --set-by-property Script=Greek --intervals --numeric
echo "--------------------------------------------------------------------"
echo "quex --set-by-property Script=Greek "
echo
quex --set-by-property Script=Greek
echo "--------------------------------------------------------------------"
echo quex --set-by-expression '[:complement([\1-\U10FFFE]):]' --intervals --numeric
echo
quex --set-by-expression '[:complement([\4-\U10FFFE]):]' --intervals --numeric
|
'use strict';
module.exports = {
extends: [
'./recommended.js',
'./jest.js',
'./imports.js',
'./prettier.js',
'plugin:vue/essential',
],
plugins: ['vue'],
env: {
es6: true,
browser: true,
node: true,
},
parserOptions: {
parser: 'babel-eslint',
ecmaVersion: 9,
sourceType: 'module',
},
rules: {
'vue/attribute-hyphenation': 1,
'vue/component-name-in-template-casing': 1,
'vue/name-property-casing': 1,
'vue/no-template-shadow': 2,
'vue/prop-name-casing': 1,
'vue/require-prop-types': 2,
'vue/v-bind-style': 1,
'vue/v-on-style': 1,
},
};
|
#!/bin/bash
cat <<EOF | gcc -xc -c -o tmp2.o -
#include <stdlib.h>
#include <stdio.h>
#include <stdarg.h>
int ret3() { return 3; }
int ret5() { return 5; }
int add(int x, int y) { return x+y; }
int sub(int x, int y) { return x-y; }
int mul(int x, int y) { return x*y; }
int add3(int a,int b, int c){
return a+b+c;
}
int add6(int a, int b, int c, int d, int e, int f) {
return a+b+c+d+e+f;
}
int alloc4(int **p, int x,int y,int z , int a) {
*p = malloc(sizeof(int)*4);
(*p)[0] = x; (*p)[1] = y; (*p)[2] = z; (*p)[3] = a;
return 1;
}
int add_all(int n, ...) {
va_list ap;
va_start(ap, n);
int sum = 0;
for (int i = 0; i < n; i++)
sum += va_arg(ap, int);
return sum;
}
EOF
# 1
four_op() {
assert 0 ' int main ( ) {return 0;}'
assert 4 'int main(){ return 4;}'
assert 10 "int main() {return 4+9-3; }"
assert 91 "int main(){return 4 + 90 -3; }"
assert 47 'int main(){return 5+6*7;}'
assert 15 'int main(){ return 5*(9-6);}'
assert 4 'int main(){return ( 3 + 5 )/2 ;}'
assert 10 'int main(){return -10 + 20 ;}'
assert 100 'int main(){return -(- 40) + 60;}'
}
# 2
eq() {
assert 1 'int main(){return 0==0;}'
assert 1 'int main(){return -39 == -39;}'
assert 0 'int main(){return -210 == 932;}'
}
# 3
neq() {
assert 1 'int main(){return 321!=4442;}'
assert 0 'int main(){return 33!=33;}'
}
# 4
greater() {
assert 1 'int main(){return 2 > 1 ; }'
assert 0 'int main(){return 40 > 200;}'
assert 0 'int main(){return 40>40;}'
}
# 5
lesser() {
assert 1 'int main(){return 4<200;}'
assert 0 'int main(){return 4000 < 500;}'
assert 0 'int main(){return -40<-40;}'
}
# 6
leq() {
assert 1 'int main(){return 0<=1;}'
assert 1 'int main(){return 0 <= 0;}'
assert 0 'int main(){return 4<= 0;}'
}
# 7
geq() {
assert 1 'int main() {return 0>=0;}'
assert 1 'int main() {return -11>=-11;}'
assert 1 'int main() {return 100 >= 3;}'
assert 0 'int main() {return 3 >= 100;}'
assert 0 'int main() {return -100 >= 30;}'
}
# 8
single_char_variable() {
assert 3 'int main(){int a;return a=3;}'
assert 1 'int main(){int a;a = -4; int b;b= 5; return a+b;}'
assert 2 'int main(){int a;a=1;int b;b=1;return a+b;}'
assert 14 'int main(){int a; a =3 ;int b; b = 5*6-8; return a+b/2;}'
assert 2 'int main(){int z; int h; int s;z=h=s=1;return z*(h+s);}'
}
# 9
multi_char_variable() {
assert 2 'int main(){int foo;foo=1;int bar;bar=1;return foo+bar;}'
assert 63 'int main(){int foo; int bar; foo = 13 ; bar = 50 ;return foo + bar ;}'
assert 10 'int main(){int foo; int bar;foo = -1 ; bar = 9;return foo*bar+bar*2+foo*-1;}'
assert 18 'int main(){int foo; int bar; foo = -1 ; bar = 9; foo = foo +bar; return foo +10;}'
}
# 10
return_stmt() {
assert 1 'int main(){return 1;}'
assert 11 'int main(){int foo; foo = 1;int bar; bar = 10 ; return foo + bar;}'
assert 11 'int main(){int foo; foo = 1;int bar; bar = 10 ; return foo + bar; int hoge;hoge = 20;}'
}
# 11
control_stmt() {
assert 10 'int main(){if ( 1 ==1 ) return 10;}'
assert 20 'int main(){int foo; foo = 10;int bar; bar = 20; if (foo == bar ) return foo; else return bar;}'
assert 10 'int main(){int i; i = 0; while(i <10) i = i + 1; return i;}'
assert 8 'int main(){int i; i = 1; while (i <=1024) i = i + i; return i/256;}'
assert 57 'int main(){int foo;int i; foo = 12;for(i = 0;i<10;i = i+1)foo = foo+i;return foo; }'
assert 50 'int main(){int result; int i;result = 0;for(i=1;i<=100;i=i+1) result = result+i;return result/101;}'
}
# 12
block_stmt() {
assert 4 'int main(){int foo; foo=1;{foo= foo+foo;foo=foo+foo;}return foo;}'
assert 233 'int main(){int n ;n=13;int current; current = 0; int next; next = 1;int i; i = 0; int tmp; tmp = 0; while ( i < n ) { tmp = current; current = next; next = next + tmp; i=i+1;} return current;}'
assert 233 'int main(){int n; int current; int next; int i;int tmp;n=13; current = 0;next = 1; for(i =0;i<n;i=i+1){tmp=current;current = next;next = next +tmp;}return current;}'
}
# 13
func_call() {
assert 3 'int ret3(); int main(){return ret3();}'
assert 3 'int ret3(); int main(){return ret3();}'
assert 5 'int ret5(); int main(){return ret5();}'
assert 8 'int add(int x, int y); int main(){return add(3, 5);}'
assert 2 'int sub(int x, int y); int main(){return sub(5, 3);}'
assert 10 'int mul(int x, int y); int main(){return mul(2, 5);}'
assert 6 'int add3(int x, int y, int z); int main(){return add3(1,2,3);}'
assert 21 'int add6(int a, int b, int c ,int d, int e ,int f); int main(){return add6(1,2,3,4,5,6);}'
}
# 14
zero_arity_func_def() {
assert 3 'int myfunc(){return 3;}int main(){return myfunc();}'
assert 33 'int myfunc(){int a; int b;a = 1; b =2; return a+b;} int main(){int a; int b;a = 10; b = 20; return a + b + myfunc();}'
# assert 8 'int main(){int foo; foo = 10; int bar; bar = 20; return -1 - foo + bar + myfunc();} int myfunc () {int foo; foo = -1; return foo;}'
}
# 15
six_arity_func_def() {
assert 11 'int myfunc(int x) {return x +1;}int main(){return myfunc(10);}'
assert 15 'int myfunc(int x,int y,int z){int foo; foo=10;return x*2+y+z+foo;} int main(){int foo; foo = 1;return foo+myfunc(foo,foo,foo);}'
assert 55 'int fib(int n){if (n == 0) {return 0;} else if (n == 1) {return 1;}else {return fib(n-1)+fib(n-2);}} int main(){return fib(10);}'
assert 1 'int test(int a, long b){return a;} int main(){return test(1,2);}'
assert 2 'int test(int a, long b){return b;} int main(){return test(1,2);}'
assert 11 'int test(char a, long b, short c, int d){return a-b+c*d;} int main(){return test(1,2,3,4);}'
}
# 16
unary_deref_addr() {
assert 1 'int main(){int foo; int *bar; foo=1; bar = &foo; return *bar;}'
assert 2 'int main(){int foo; int *bar; foo=1; bar = &foo; return *bar+1;}'
assert 3 'int main() {int x; x=3; return *&x; }'
assert 3 'int main() {int x; x=3; int *y;y=&x; int **z;z=&y; return **z; }'
assert 5 'int main() { int x; int y; x=3; y=5; return *(&x-1); }' # コンパイラ依存
assert 3 'int main() { int x; int y; x=3; y=5; return *(&y+1); }' # コンパイラ依存
assert 5 'int main() { int x; int *y; x=3; y=&x; *y=5; return x; }'
assert 7 'int main() { int x; int y; x=3; y=5; *(&x-1)=7; return y; }' # コンパイラ依存
assert 7 'int main() { int x; int y; x=3; y=5; *(&y+1)=7; return x; }' # コンパイラ依存
}
# 17
int_keyword() {
assert 1 'int foo(int x) {int intx; return x;} int main() { return foo(1);}'
assert 10 'int main(){int *a; int x; x = 10; a = &x; return *a; }'
# assert 127 'int foo(int x){int x; return x;}' this cause already defined error
}
# 18
pointer_type() {
assert 3 'int main(){int x; int *y; y = &x; *y = 3; return x;}'
assert 3 'int main() {int x; int *y; int **z; x = 3; y = &x; z = &y; return **z;}'
assert 11 'int main(){int x; int *y; x = 1; y = &x; return *y + 10;}'
}
# 19
pointer_operation() {
assert 1 'int alloc4(int *p,int x,int y, int z,int a);int main(){int *p; alloc4(&p,1,2,4,8); return *p;}'
assert 1 'int alloc4(int *p,int x,int y, int z,int a);int main(){int *p; alloc4(&p,1,2,4,8); int *q; q = p;return *q;}'
assert 4 'int alloc4(int *p,int x,int y, int z,int a);int main(){int *p; alloc4(&p,1,2,4,8); int *q; q = p+2;return *q;}'
assert 8 'int alloc4(int *p,int x,int y, int z,int a);int main(){int *p; alloc4(&p,1,2,4,8); int *q; q = p+3;return *q;}'
}
# 20
sizeof() {
assert 4 'int main(){return sizeof(1);}'
assert 8 'int main(){int *p; return sizeof(p);}'
assert 4 'int main() {return sizeof (1+2);} '
assert 8 'int main(){int *p; int x ; x = 8; p = &x; return sizeof (p +2);}'
assert 4 'int echo(int n){return n;} int main(){return sizeof(echo(1)); }'
assert 4 'int main(){int *y; return sizeof *y;}'
}
# 21
array() {
assert 1 'int main(){int a[1]; *a = 1; return *a;}'
assert 1 'int main(){ int y[2]; *y = 10; int x; x = 1; return x;}'
assert 10 'int main(){int x[10]; *x = 1; *(x+9) = 10; return *(x+9); }' # intのサイズは8だけどポインタ演算は4なので変になってる
assert 2 'int main(){int a[2]; *a = 1; *(a+1) = 2; int *p ;p =a; return *(p+1);}'
assert 1 'int main(){int x ; x = 1; int y[2]; *(y+1) = 10; return x;}'
assert 11 'int main(){int x ; x = 1; int y[2]; *(y+1) = 10; return *(y+1) + x;}'
assert 8 'int main(){int x; x = 1; int y[10]; int i; for(i =0; i<10; i = i+1){*(y+i)=i;} int z ; z = 20; return x + *(y+7) ; }'
assert 12 'int main(){int x[3]; return sizeof x;}'
assert 24 'int main(){int *x[3]; return sizeof x;}'
}
# 22
array_idx() {
assert 1 'int main(){int a[10]; a[1] = 1; return a[1];}'
assert 32 'int main(){int a[10]; int i; i = 2; a[0]= 10; a[9] = 20; return i+ a[0] + a[9]; } '
assert 45 'int main(){int a[10]; int i; for(i=0;i<10;i=i+1){a[i] = i;} int result; result = 0; for (i = 0;i<10;i = i+1){result = result + a[i]; }return result ; } '
assert 10 'int main(){int hoge[2]; int x; x = 2; hoge[x-1] = 10; return hoge[1];}'
}
# 23
global_variable() {
assert 2 'int main(){int a; a=2; return a;}'
assert 1 'int a; int main(){a = 1; return 1;}'
assert 1 ' int a[10]; int main(){a[0] = 1; return a[0];}'
assert 45 'int a[10];int main(){int i; for(i=0;i<10;i=i+1){a[i] = i;} int result; result = 0; for (i = 0;i<10;i = i+1){result = result + a[i]; }return result ; } '
assert 10 'int hoge[2]; int main(){ int x; x = 2; hoge[x-1] = 10; return hoge[1];}'
assert 3 'int a; int b; int add_a_b(){a = 1; b = 2; return a+b;} int main(){add_a_b(); return a + b;} '
assert 5 'int a; int b; int add_a_b(){a = 1; b = 2; return a+b;} int main(){ int a ; a = add_a_b(); return a + b ;}'
}
# 24
char() {
assert 1 'int main(){char a; a = 1; return a;}'
assert 2 'int main(){char a; int b; a =1; b =a +1; return b;}'
assert 10 'int main(){char hoge[10]; hoge[9] = 10; return hoge[9];}'
assert 3 'int main(){char x[3]; x[0] = -1; x[1] = 2; int y; y = 4; return x[0] + y;}'
assert 5 'int main(){char x[3]; x[0] = -1; x[1] = 2; int y; y = 4; return y - x[0];}'
assert 10 'char hoge[2]; int main(){hoge[0] =1; hoge[hoge[0]]= 10; return hoge[1];} '
assert 97 "int main(){char p = 'a';return p; }"
assert 10 "int main(){return '\\n';}"
}
# 25
string() {
assert 97 'int main(){return "abc"[0];}'
assert 97 'int main() { return "abc"[0]; }'
assert 98 'int main() { return "abc"[1]; }'
assert 99 'int main() { return "abc"[2]; }'
assert 100 'int main() { return "abcd"[3]; }'
assert 4 'int main() { return sizeof("abc"); }'
assert 12 'int printf(char *x); int main(){return printf("hello world!"); }'
assert 6 'int printf(char *x); int main(){printf("hello world!\n");return printf(" oops\\"); }'
assert 6 'int main(){char p[] = "hello"; return sizeof p;}'
}
# 26
init() {
assert 1 'int main(){int x = 1; return x;}'
assert 1 'int main(){int x = 1; int *y = &x; return *y;}'
assert 3 'int main(){int x[2] = {1,2}; return x[0]+x[1];} '
assert 19 'int main(){int x[10] = {10,9}; int result = 0; int i=0; for ( i ; i< 10; i = i+1){result = result +x[i];}return result;}'
assert 0 'int main(){int x[2] = {}; return x[0]+x[1];}'
assert 99 'int printf(char *x); int main(){char p[10] = "cello";return p[0]; }'
assert 111 'int main(){char *p = "hello"; return p[4];}'
assert 3 'int three() { return 3; }int arity(int x) { return x; }int main() { return arity(three()); }'
assert 0 'int printf(char *x); int main(){char p[10] = "cello";return p[9]; }'
assert 5 'int printf(char *x); int main(){char p[10] = "hello";return printf(p); }'
assert 19 'int main(){int x[] = {10,9}; int result = 0; int i=0; for ( i ; i< 2; i = i+1){result = result +x[i];}return result;}'
assert 5 'int printf(char *x); int main(){char p[] = "hello";return printf(p); }'
assert 19 'int main(){int x[] = {10,9}; int result = 0; int i=0; for ( i ; i< 2; i = i+1){result = result +x[i];}return result;}'
assert 8 'int main(){int x[] = {1,2}; return sizeof (x);}'
assert 19 'int main(){int x[] = {10,9}; int result = 0; int i=0; for ( i ; i< sizeof(x)/4; i = i+1){result = result +x[i];}return result;}'
assert 19 'int main(){int x[] = {10,9}; int result = 0; for ( int i = 0 ; i< sizeof(x)/4; i = i+1){result = result +x[i];}return result;}'
assert 10 'int a = 10; int main(){return a;}'
# assert 10 'int a ; int y = a; int main(){return 1;}' # err
assert 5 'int main(){int a = 5; int *b = &a; return *b;}'
assert 3 'int a[]= {1,2}; int main(){return a[1]+a[0];}'
assert 3 'int a[3]= {1,2}; int main(){return a[1]+a[0]+a[2];}'
assert 13 'int a[3]= {1,2}; int main(){a[2]=10;return a[1]+a[0]+a[2];}'
assert 20 'int a =20; int *b = &a; int main(){return *b;}'
assert 104 'char p[]="hello"; int main(){return p[0];}'
assert 104 'char *p = "hello"; int main(){return p[0];}'
}
# 27
stmt_expr() {
assert 0 'int main() { return ({ 0; }); }'
assert 2 'int main() { return ({ 0; 1; 2; }); }'
assert 1 'int main() { ({ 0; return 1; 2; }); return 3; }'
assert 3 'int main() { return ({ int x=3; x; }); }'
assert 1 'int main(){ return 0 + ({int x = 1; x;}); }'
}
# 28
var_scope() {
assert 2 'int main(){int x =1; return ({int x = 2; x; }); }'
assert 2 'int main() { int x=2; { int x=3; } return x; }'
assert 2 'int main() { int x=2; { int x=3; } { int y=4; return x; }}'
assert 3 'int main() { int x=2; { x=3; } return x; }'
}
# 29
multi_dimension_arr() {
assert 0 'int main() { int x[2][3]; int *y=x; *y=0; return **x; }'
assert 1 'int main() { int x[2][3]; int *y=x; *(y+1)=1; return *(*x+1); }'
assert 2 'int main() { int x[2][3]; int *y=x; *(y+2)=2; return *(*x+2); }'
assert 3 'int main() { int x[2][3]; int *y=x; *(y+3)=3; return **(x+1); }'
assert 4 'int main() { int x[2][3]; int *y=x; *(y+4)=4; return *(*(x+1)+1); }'
assert 5 'int main() { int x[2][3]; int *y=x; *(y+5)=5; return *(*(x+1)+2); }'
assert 6 'int main() { int x[2][3]; int *y=x; *(y+6)=6; return **(x+2); }'
assert 11 'int main(){int hoge[2][3]; hoge[0][0]=1;hoge[1][2]= 10;return hoge[0][0]+hoge[1][2];}'
assert 72 'int main() {int hoge[2][3][4]; for(int i = 0; i < 2; i=i+1){for (int j = 0; j < 3; j = j+1){for (int k = 0;k<4;k=k+1){hoge[i][j][k]=i+k+j;}}} int result = 0;for(int i = 0; i < 2; i=i+1){for (int j = 0; j < 3; j = j+1){for (int k = 0;k<4;k=k+1){result = result + hoge[i][j][k];}}} return result; }'
assert 96 'int main(){int hoge[2][3][4]; return sizeof hoge;}'
assert 48 'int main(){int hoge[2][3][4]; return sizeof hoge[0];}'
assert 16 'int main(){int hoge[2][3][4]; return sizeof hoge[0][0];}'
assert 4 'int main(){int hoge[2][3][4]; return sizeof hoge[0][0][0];}'
}
# 30
struct() {
assert 8 'int main(){struct square {int x; int y;} square; return sizeof square;}'
assert 3 'int main(){struct square {int x; int y;} square; square.x = 3; square.y = 2; return square.x;}'
assert 2 'int main(){struct square {int x; int y;} square; square.x = 3; square.y = 2; return square.y;}'
assert 6 'int main(){struct square {int x; int y;} square; square.x = 3; square.y = 2; return square.y *square.x;}'
assert 6 'int main(){struct {int x; int y;} square; square.x = 3; square.y = 2; return square.y *square.x;}'
assert 80 'int main(){struct subject {int math[10]; int English[10];} subject; return sizeof(subject);}'
assert 1 'int main(){struct subject {int math[10]; int English[10];} subject; subject.math[0]=1; return subject.math[0];}'
assert 90 'int main(){struct subject {int math[10]; int English[10];} subject; for(int i = 0; i < 10; i = i+1){subject.math[i]= i; subject.English[9-i]=i;} int result = 0;for(int i = 0;i<10;i=i+1){result = result + subject.math[i] + subject.English[i];} return result;}'
assert 32 'int main(){ struct hoge {struct {int a; int b[10]; }hoge; int a; } hoge; hoge.hoge.a = 19; hoge.hoge.b[0] = 1; hoge.hoge.b[2]= 2; hoge.hoge.b[9]=10;return hoge.hoge.a + hoge.hoge.b[0]+hoge.hoge.b[2] +hoge.hoge.b[9];}'
assert 12 'int main(){struct hoge{int a; int b;}hoge[10]; hoge[1].a = 2; hoge[2].b = 10; return hoge[1].a + hoge[2].b;}'
assert 8 'int main(){struct {char a; int b;}hoge; return sizeof(hoge);}'
assert 16 'int main(){struct {char a; int b; char c; }hoge; return sizeof(hoge);}'
assert 30 'int main(){struct hoge {int x; int y;} *obj; struct hoge a; obj = &a;(*obj).x = 10;(*obj).y = 20; return a.x+a.y;}'
assert 30 'int main(){struct hoge {int x; int y;} *obj; struct hoge a; obj = &a;obj->x = 10;obj->y = 20; return a.x+a.y;}'
}
# 31
typedef() {
assert 1 'int main(){typedef int INT; INT x = 1; return x;}'
assert 1 'int main(){ struct hoge {int a;}; typedef struct hoge HOGE; HOGE x; x.a = 1; return x.a;}'
assert 1 'int main(){typedef struct hoge {int a;} HOGE; HOGE x; x.a = 1; return x.a;}'
assert 1 'int main(){typedef int t; t t = 1; return t;}'
assert 2 'int main(){typedef struct {int a;} t; { typedef int t; } t x; x.a=2; return x.a; }'
}
# 32
short_long() {
assert 2 'int main(){short a = 2; return a;}'
assert 10 'int main(){long a = 10; return a;}'
assert 2 'int main(){short a; return sizeof(a);}'
assert 8 'int main(){long a; return sizeof(a);}'
assert 20 'int main(){short a[10]; return sizeof a;}'
assert 80 'int main(){long a[10]; return sizeof a;}'
assert 1 'short sub_short(short a, short c) {return a-c;} int main(){return sub_short(4,3);}'
assert 1 'long sub_long(long a, long c) {return a-c;} int main(){return sub_long(4,3);}'
assert 1 'short rt_short(short a){return a;} int rt_int(int a){return a;}int main(){return rt_int(({rt_short(1);}));}'
assert 1 'short sub_short(short a, short c) {return a-c;} int main(){return sub_short(4,3);}'
assert 20 'int test(int a, int b, int c){return c;} short ttt(){return 1;} int main(){return test(10,ttt(),20);}'
assert 1 'short test(short a){return a;} int main(){return test(1)==1;}'
}
# 33
complex_type() {
assert 24 'int main(){int *x[3]; return sizeof(x);}'
assert 8 'int main(){int (*x)[3]; return sizeof(x);}'
assert 3 'int main(){int *x[3]; int y; x[0]=&y; y=3; return x[0][0];}'
assert 4 'int main(){int x[3]; int (*y)[3]=x; y[0][0]=4;return y[0][0];}'
assert 1 'int g = 1; int *test(){return &g;} int main(){return *test();}'
}
# 34
bool() {
assert 0 'int main(){_Bool x = 0; return x;}'
assert 1 'int main(){_Bool x = 1; return x;}'
assert 1 'int main(){_Bool x = 2; return x;}'
assert 1 'int main(){_Bool x = 2==2; return x;}'
assert 0 'int main(){_Bool x = 2==3; return x;}'
assert 1 'int main(){_Bool x = 1; return sizeof (_Bool);}'
}
# 35
complex_type2() {
assert 1 'int main(){char x = 1; return sizeof x;}'
assert 2 'int main(){short int x = 1; return sizeof(x);}'
assert 2 'int main(){int short x = 1; return sizeof(x);}'
assert 4 'int main(){int x = 1; return sizeof(x);}'
assert 8 'int main(){long x = 1; return sizeof(x);}'
assert 8 'int main(){long int x = 1; return sizeof(x);}'
assert 8 'int main(){int long x = 1; return sizeof(x);}'
assert 1 'int main(){char typedef CHAR; CHAR x = 1; return sizeof x;}'
assert 4 'int main(){typedef A ; A x = 1; return sizeof x;}'
}
# 36
sizeof2() {
assert 1 'int main(){return sizeof(char);}'
assert 2 'int main(){return sizeof(short);}'
assert 2 'int main(){return sizeof(short int);}'
assert 2 'int main(){return sizeof(int short);}'
assert 4 'int main(){return sizeof(int);}'
assert 8 'int main(){return sizeof(long);}'
assert 8 'int main(){return sizeof(long int);}'
assert 8 'int main(){return sizeof(int long);}'
assert 4 'int main(){return sizeof(0);}'
assert 8 'int main(){return sizeof(4294967297);}'
}
# 37
cast() {
assert 1 'int main(){ return (char)8590066177;}'
assert 1 'int main(){ return (_Bool)1;}'
assert 1 'int main(){ return (_Bool)2;}'
assert 0 'int main(){ return (_Bool)(char)256;}'
assert 1 'int main(){ return (long)1;}'
assert 0 'int main(){ return (long)&*(int *)0;}'
assert 5 'int main(){ int x=5; long y=(long)&x; return *(int*)y;}'
assert 2 'int main(){int x; int *y = &x; char *z; char aa = 2; z = &aa; *y = *(int *)z; return x;}'
}
# 38
enum() {
assert 0 'int main(){enum {zero,one,two}; return zero;}'
assert 1 'int main(){enum {zero,one,two}; return one;}'
assert 2 'int main(){enum {zero,one,two}; return two;}'
assert 5 'int main(){enum {five=5,six,seven,}; return five;}'
assert 6 'int main(){enum {five=5,six,seven,}; return six;}'
assert 7 'int main(){enum {five=5,six,seven,}; return seven;}'
assert 0 'int main(){enum{zero, ten = 10 , five = 5}; return zero;}'
assert 10 'int main(){enum{zero, ten = 10 , five = 5}; return ten;}'
assert 5 'int main(){enum{zero, ten = 10 , five = 5}; return five;}'
assert 4 'int main(){enum hoge {zero} x; return sizeof(x);}'
assert 4 'int main(){enum hoge {zero} ; enum hoge x; return sizeof(x);}'
}
# 39
static() {
assert 1 'int count(){static int cnt; cnt = cnt+1; return cnt;} int main(){return count(); }'
assert 3 'int count(){static int cnt; cnt = cnt+1; return cnt;} int main(){count();count();return count(); }'
assert 2 'int count(){static int cnt=1; cnt = cnt+1; return cnt;} int main(){return count(); }'
assert 5 'int count(){static int cnt=2; cnt = cnt+1; return cnt;} int main(){count();count();return count(); }'
}
# 40
comma() {
assert 3 'int main(){return (1,2,3);}'
}
# 41
pp_mm() {
assert 1 'int main(){int i =1; return i++;}'
assert 2 'int main(){int i =1; return ++i;}'
assert 1 'int main(){int i =1; return i--;}'
assert 0 'int main(){int i =1; return --i;}'
assert 2 'int main(){int i =1; i++; return i;}'
assert 2 'int main(){int i =1; ++i; return i;}'
assert 0 'int main(){int i =1; i--; return i;}'
assert 0 'int main(){int i =1; --i; return i;}'
assert 3 'int main(){int a[] = {1,3,5};int *p = a+1; return *p++;}'
assert 4 'int main(){int a[] = {1,3,5};int *p = a+1; return ++*p;}'
assert 3 'int main(){int a[] = {1,3,5};int *p = a+1; return *p--;}'
assert 2 'int main(){int a[] = {1,3,5};int *p = a+1; return --*p;}'
assert 5 'int main(){int a[] = {1,3,5};int *p = a+1; *p++; return *p;}'
assert 1 'int main(){int a[] = {1,3,5};int *p = a+1; *--p; return *p;}'
}
# 42
cpx_assign() {
assert 6 'int main(){int i = 3; i+=3; return i;}'
assert 0 'int main(){int i = 3; i-=3; return i;}'
assert 9 'int main(){int i = 3; i*=3; return i;}'
assert 1 'int main(){int i = 3; i/=3; return i;}'
assert 6 'int main(){int i = 3;return i+=3; }'
assert 0 'int main(){int i = 3;return i-=3; }'
assert 9 'int main(){int i = 3;return i*=3; }'
assert 1 'int main(){int i = 3;return i/=3; }'
assert 45 'int main(){int result = 0;for (int i =0;i<10 ;i++){result +=i;}return result;}'
}
# 43
not() {
assert 1 'int main(){int i = 0; return !i;}'
assert 0 'int main(){int i = 0; return !1;}'
assert 0 'int main(){int i = 0; return !9;}'
assert 1 'int main(){int i = 0; return !0;}'
}
# 44
bitnot() {
assert 11 'int main(){int i =-12; return ~i;}'
assert 1 'int main(){return ~~1;}'
}
# 45
bit_op() {
assert 1 'int main(){return 1|0;}'
assert 3 'int main(){return 2|1;}'
assert 3 'int main(){return 3|1;}'
assert 0 'int main(){return 1&0;}'
assert 0 'int main(){return 2&1;}'
assert 1 'int main(){return 3&1;}'
assert 1 'int main(){return 1^0;}'
assert 3 'int main(){return 2^1;}'
assert 0 'int main(){return 0^0;}'
assert 0 'int main(){return 5^5;}'
assert 1 'int main(){return 1|1^2&0;}'
}
# 46
log_and_or() {
assert 1 'int main(){return 1||0;}'
assert 0 'int main(){return 0||0;}'
assert 1 'int main(){return 1||(1-1)||0;}'
assert 0 'int main(){return 0||(1-1)||0;}'
assert 1 'int main(){return 2&&2;}'
assert 0 'int main(){return 0&&2;}'
assert 0 'int main(){return 2&&0;}'
assert 0 'int main(){return 1&&(2-2)&&2;}'
}
# 47
fn_param_arr() {
assert 0 'int arr_param(int x[]){return x[0];} int main(){int x[2] ={}; return arr_param(x);}'
assert 3 'int arr_param(int x[]){return x[2];} int main(){int x[] ={1,2,3}; return arr_param(x);}'
}
# 48
incomplete_struct() {
assert 8 'int main(){struct *foo; return sizeof foo;}'
assert 8 'int main(){struct T *foo; struct T {int x;} ; return sizeof (struct T); }'
assert 1 'int main(){struct T { struct T *next; int x; } a; struct T b; b.x=1; a.next=&b; return a.next->x;}'
}
# 49
break_fn() {
assert 3 'int main(){int i = 0;for (; i<10; i++){if (i==3){break;}} return i;}'
assert 1 'int main(){int i = 0; return i++ == 0;}'
assert 0 'int main(){int i = 0; return ++i == 0;}'
assert 0 'int main(){int i =0; if (i++ == 0){return 0;}else {return 1;}}'
assert 4 'int main(){int i = 0;int j = 0; while(j<10) {if (i++==3)break; j++;} return i;}'
assert 0 'int main(){int i = 0; for (;;)break; return i;}'
assert 3 'int main(){int i = 0; for(;i<10;i++) { for (;;) break; if (i == 3) break; } return i;}'
assert 1 'int main(){int i =0; for (;;){for (;;) break; i++; break;}return i;}'
assert 4 'int main(){int i = 0; while(1) { while(1) break; if (i++==3)break;} return i;}'
}
# 50
_continue() {
assert 10 'int main(){int i = 0; for (;i<10;i++){if (i==3)continue; if (i==3){return i;}} return i;}'
assert 10 'int main(){int i =0; int j =0;for(;i<10;i++){if(i>5)continue;j++; }return i;}'
assert 6 'int main(){int i =0; int j =0;for(;i<10;i++){if(i>5)continue;j++; }return j;}'
assert 10 'int main(){int i=0; int j=0; for(;!i;) { for (;j!=10;j++) continue; break; } return j;}'
assert 10 'int main(){int i = 0; while(i<10){if (i==3){i++;continue;} if (i==3){break;} i++;} return i;}'
assert 11 'int main(){int i=0; int j=0; while (i++<10) { if (i>5) continue; j++; } return i;}'
assert 5 'int main(){int i=0; int j=0; while (i++<10) { if (i>5) continue; j++; } return j;}'
assert 11 'int main(){int i=0; int j=0; while(!i) { while (j++!=10) continue; break; } return j;}'
}
# 51
goto() {
assert 3 'int main(){int i =0; goto a; a: i++; b: i++; c: i++; return i;}'
assert 2 'int main(){int i =0; goto e; d: i++; e: i++; f: i++; return i;}'
assert 1 'int main(){int i =0; goto j; g: i++; h: i++; j: i++; return i;}'
assert 1 'int test(){a:return 0;} int main(){a:return 1;}'
}
# 52
switch() {
assert 1 'int main(){int i = 0; switch(0){case 0: i = 1;break; case 1: i = 2;break; case 3: i=3;break;} return i;}'
assert 6 'int main(){int i=0; switch(1) { case 0:i=5;break; case 1:i=6;break; case 2:i=7;break; } return i;}'
assert 7 'int main(){int i=0; switch(2) { case 0:i=5;break; case 1:i=6;break; case 2:i=7;break; } return i;}'
assert 1 'int main(){int i=1; switch(3) { case 0:i=5;break; case 1:i=6;break; case 2:i=7;break; } return i;}'
assert 5 'int main(){int i=0; switch(0) { case 0:i=5;break; default:i=7; } return i;}'
assert 7 'int main(){int i=0; switch(1) { case 0:i=5;break; default:i=7; } return i;}'
assert 2 'int main(){int i = 0;switch(0){case 0: i++; case 1: i++;} return i;}'
assert 20 'int main(){int i=0; switch(1) { case 0:i=5;break; default:i=7; switch(i){case 0: i = 11; default: i = 20;} } return i;}'
assert 11 'int main(){int i = 0; switch(1){default: i = 10; case 0: i++;}return i;}'
assert 9 'int main(){int i = 0; int j = 0;for(;i<10;i++){switch(i){case 5: break; default: j++;break;} if (j==5){ break;} } return i+j;}'
assert 5 'int main(){int i =0; switch(0){case 2:case 3:case 0:i=5;break;default: i=10;break;}return i;}'
}
# 53
void_fn() {
assert 0 'void void_fn(){} int main(){void_fn(); return 0;}'
}
# 54
_shift() {
assert 1 'int main(){return 1<<0;}'
assert 8 'int main(){return 1<<3;}'
assert 10 'int main(){return 5<<1;}'
assert 2 'int main(){return 5>>1;}'
assert 1 'int main(){int i =1; i<<= 0; return i;}'
assert 8 'int main(){int i =1; i<<= 3; return i;}'
assert 10 'int main(){int i =5; i<<= 1; return i;}'
assert 2 'int main(){int i =5; i>>= 1; return i;}'
}
# 55
ternary() {
assert 2 'int main(){return 0?1:2;}'
assert 1 'int main(){return 1?1:2;}'
}
# 56
const_expression() {
assert 10 'int main(){enum { ten=1+2+3+4, }; return ten;}'
assert 1 'int main(){int i=0; switch(3) { case 5-2+0*3: i++; } return i;}'
assert 8 'int main(){int x[1+1]; return sizeof(x);}'
assert 2 'int main(){char x[1?2:3]; return sizeof(x);}'
assert 3 'int main(){char x[0?2:3]; return sizeof(x);}'
}
# 57
lvar_initialize() {
assert 1 'int main(){int x[3]={1,2,3}; return x[0];}'
assert 2 'int main(){int x[3]={1,2,3}; return x[1];}'
assert 3 'int main(){int x[3]={1,2,3}; return x[2];}'
assert 3 'int main(){int x[3]={1,2,3,}; return x[2];}'
assert 2 'int main(){int x[2][3]={{1,2,3},{4,5,6}}; return x[0][1];}'
assert 4 'int main(){int x[2][3]={{1,2,3},{4,5,6}}; return x[1][0];}'
assert 6 'int main(){int x[2][3]={{1,2,3},{4,5,6}}; return x[1][2];}'
}
# 58
arr_zero_ini() {
assert 2 'int main(){int x[2][3]={{1,2}}; return x[0][1];}'
assert 0 'int main(){int x[2][3]={{1,2}}; return x[1][0];}'
assert 0 'int main(){int x[2][3]={{1,2}}; return x[1][2];}'
}
# 59
string_arr_ini() {
assert 104 'int main(){char p[6] = "hello"; return p[0];}'
assert 108 'int main(){char p[6] = "hello"; return p[3];}'
assert 0 'int main(){char p[6] = "hello"; return p[5];}'
assert 97 'int main(){char x[2][4]={"abc","def"};return x[0][0];}'
assert 0 'int main(){char x[2][4]={"abc","def"};return x[0][3];}'
assert 100 'int main(){char x[2][4]={"abc","def"};return x[1][0];}'
assert 102 'int main(){char x[2][4]={"abc","def"};return x[1][2];}'
}
# 60
unsized_arr() {
assert 3 'int main(){int x[]={1,2,3}; return x[2];}'
assert 16 'int main(){int x[]={1,2,3,4}; return sizeof x;}'
assert 6 'int main(){char p[] = "Hello"; return sizeof p;}'
}
# 61
struct_ini() {
assert 1 'int main(){ struct {int a; int b; int c;} x={1,2,3}; return x.a;}'
assert 2 'int main(){ struct {int a; int b; int c;} x={1,2,3}; return x.b;}'
assert 3 'int main(){ struct {int a; int b; int c;} x={1,2,3}; return x.c;}'
assert 1 'int main(){struct {int a; int b; int c;} x={1}; return x.a;}'
assert 0 'int main(){struct {int a; int b; int c;} x={1}; return x.b;}'
assert 0 'int main(){struct {int a; int b; int c;} x={1}; return x.c;}'
assert 1 'int main(){struct {int a; int b;} x[2]={{1,2},{3,4}}; return x[0].a;}'
assert 2 'int main(){struct {int a; int b;} x[2]={{1,2},{3,4}}; return x[0].b;}'
assert 3 'int main(){struct {int a; int b;} x[2]={{1,2},{3,4}}; return x[1].a;}'
assert 4 'int main(){struct {int a; int b;} x[2]={{1,2},{3,4}}; return x[1].b;}'
assert 0 'int main(){struct {int a; int b;} x[2]={{1,2}}; return x[1].b; }'
}
# 62
gvar_scalar_ini() {
assert 0 'int x; int y; int main(){return x;}'
assert 3 'char g = 3; int main(){return g;}'
assert 3 'short g = 3; int main(){return g;}'
assert 3 'int g = 3; int main(){return g;}'
assert 3 'long g = 3; int main(){return g;}'
assert 3 'int g1 = 3;int *g = &g1; int main(){return *g;}'
assert 97 ' char *g = "abc"; int main(){return g[0];}'
}
# 63
gvar_arr_ini() {
assert 2 'int g[3] = {0,1,2}; int main(){return g[2];}'
assert 98 'char *g[] = {"foo","bar"}; int main(){return g[1][0];}'
assert 3 'struct {char a; int b;}g[2]={{1,2},{3,4}}; int main(){return g[1].a;}'
assert 4 'struct {int a[2];}g[2] = {{{1,2}}, {{3,4}}}; int main(){return g[1].a[1];}'
}
# 64
omit_paran() {
assert 1 'struct {int a[2];}g[2] = {{1,2},3,4}; int main(){return g[0].a[0];}'
assert 2 'struct {int a[2];}g[2] = {{1,2},3,4}; int main(){return g[0].a[1];}'
assert 3 'struct {int a[2];}g[2] = {{1,2},3,4}; int main(){return g[1].a[0];}'
assert 4 'struct {int a[2];}g[2] = {{1,2},3,4}; int main(){return g[1].a[1];}'
assert 1 'struct {int a[2];}g[2] = {1,2,3,4}; int main(){return g[0].a[0];}'
assert 2 'struct {int a[2];}g[2] = {1,2,3,4}; int main(){return g[0].a[1];}'
assert 3 'struct {int a[2];}g[2] = {1,2,3,4}; int main(){return g[1].a[0];}'
assert 4 'struct {int a[2];}g[2] = {1,2,3,4}; int main(){return g[1].a[1];}'
assert 102 'char *g = {"foo"}; int main(){return g[0];}'
assert 102 "char g[][4] = {'f', 'o', 'o', 0, 'b', 'a', 'r', 0}; int main(){return g[0][0];}"
assert 0 "char g[][4] = {'f', 'o', 'o', 0, 'b', 'a', 'r', 0}; int main(){return g[0][3];}"
assert 98 "char g[][4] = {'f', 'o', 'o', 0, 'b', 'a', 'r', 0}; int main(){return g[1][0];}"
assert 0 "char g[][4] = {'f', 'o', 'o', 0, 'b', 'a', 'r', 0}; int main(){return g[1][3];}"
}
# 65
gvar_string() {
assert 104 'char g[] = "hello"; int main(){return g[0];}'
assert 0 'char g[] = "hello"; int main(){return g[5];}'
assert 6 'char g[] = "hello"; int main(){return sizeof g;}'
assert 10 'char g[10] = "hello";int main(){return sizeof g;}'
assert 104 'char g[10] = "hello";int main(){return g[0];}'
assert 0 'char g[10] = "hello";int main(){return g[9];}'
assert 3 'char g[3] = "hello";int main(){return sizeof g;}'
assert 104 'char g[3] = "hello";int main(){return g[0];}'
assert 108 'char g[3] = "hello";int main(){return g[2];}'
}
# 66
gvar_addend() {
assert 104 'char g[]="hello"; char *g2=g+0; int main(){return g2[0];}'
assert 101 'char g[]="hello"; char *g2=g+1; int main(){return g2[0];}'
assert 111 'char g[]="hello"; char *g2=g+4; int main(){return g2[0];}'
assert 104 'char g[]="hello"; char *g2=g-3; int main(){return g2[3];}'
}
# 67
global_typedef() {
assert 1 'typedef int INT; INT main(){INT a = 1; return a;}'
assert 1 'typedef struct node_t{struct node_t *next; int val;}node; int main(){node a ; a.val = 1; node b; b.next = &a; node c;c.next = &b; return c.next->next->val;}'
}
# 68
return_only() {
assert 1 'int printf(char *p);void ret_none(){printf("hello\n");return ;} int main(){ret_none(); return 1;}'
}
# 69
do_while() {
assert 7 'int main(){int i = 0; int j =0; do{j++;}while(i++ < 6); return j;}'
assert 4 'int main(){int i = 0; int j =0; int k = 0; do{if (++j > 3)break; continue; k++;}while(1); return j;}'
}
# 70
read_variadic_fn() {
assert 6 'int add_all(int n,...);int main(){return add_all(3,1,2,3);}'
assert 5 'int add_all(int n,...);int main(){return add_all(4,1,2,3,-1);}'
assert 3 'int printf(char *p,...);int main(){int i = 10; return printf("%d\n", i);}'
}
# 71
copy_struct() {
assert 6 'int main(){struct a{int a; int b; int c;} c={1,2,3}; struct a d; d = c; return d.a+d.b+d.c;}'
}
anonymous_struct() {
assert 3 'int main(){struct a{int a;}; struct b{int b;}; struct a a = {1}; struct b b ={2}; return a.a + b.b;}'
}
bitassign() {
assert 2 "int main(){int i=6; i&=3; return i;}"
assert 7 "int main(){int i=6; i|=3; return i;}"
assert 10 "int main(){int i = 15; i^=5; return i;}"
}
compound_literal() {
assert 1 'int main(){return (int){1}; }'
assert 2 'int main(){ return ((int[3]){0,1,2})[2]; }'
assert 2 'int main(){ return ((int[]){0,1,2})[2]; }'
assert 3 'int main(){ return ((struct {char a; int b;}){98, 3}).b; }'
assert 3 'int main(){ return ({ int x=3; (int){x}; }); }'
}
test() {
cd /rs9cc/bin &&
make test.exe &&
echo "test.c" &&
./test.exe &&
make extern.exe &&
echo " " &&
echo "extern.c" &&
./extern.exe &&
make variadic.exe &&
echo " " &&
echo "variadic.c" &&
./variadic.exe &&
make clean &&
cd -
}
assert() {
expected="$1"
input="$2"
local bin="./target/debug/rs9cc"
if [ -n "$RS9CC_ON_WORKFLOW" ]; then
bin="./target/debug/rs9cc"
fi
echo "$input" >test.c
$bin "test.c" >tmp.s
cc -no-pie -o tmp tmp.s tmp2.o
./tmp
actual="$?"
if [ "$actual" = "$expected" ]; then
echo "$input => $actual"
else
echo "$input => $expected expected, but got $actual"
exit 1
fi
}
if [ $# -eq 0 ]; then
test
__my_code=$?
exit $__my_code
fi
if [ "$1" == "sh" ]; then
shift
fi
if [ $# -eq 0 ]; then
four_op
eq
neq
greater
lesser
leq
geq
single_char_variable
multi_char_variable
return_stmt
control_stmt
block_stmt
func_call
zero_arity_func_def
six_arity_func_def
unary_deref_addr
int_keyword
pointer_type
pointer_operation
sizeof
array
array_idx
global_variable
char
string
init
stmt_expr
var_scope
multi_dimension_arr
struct
typedef
short_long
complex_type
bool
complex_type2
sizeof2
cast
enum
static
comma
pp_mm
cpx_assign
not
bitnot
bit_op
log_and_or
fn_param_arr
incomplete_struct
break_fn
_continue
goto
switch
void_fn
_shift
ternary
const_expression
lvar_initialize
arr_zero_ini
string_arr_ini
unsized_arr
struct_ini
gvar_scalar_ini
gvar_arr_ini
omit_paran
gvar_string
gvar_addend
global_typedef
return_only
do_while
read_variadic_fn
copy_struct
anonymous_struct
bitassign
compound_literal
fi
while [ $# -ne 0 ]; do
case $1 in
"1") four_op ;;
"2") eq ;;
"3") neq ;;
"4") greater ;;
"5") lesser ;;
"6") leq ;;
"7") geq ;;
"8") single_char_variable ;;
"9") multi_char_variable ;;
"10") return_stmt ;;
"11") control_stmt ;;
"12") block_stmt ;;
"13") func_call ;;
"14") zero_arity_func_def ;;
"15") six_arity_func_def ;;
"16") unary_deref_addr ;;
"17") int_keyword ;;
"18") pointer_type ;;
"19") pointer_operation ;;
"20") sizeof ;;
"21") array ;;
"22") array_idx ;;
"23") global_variable ;;
"24") char ;;
"25") string ;;
"26") init ;;
"27") stmt_expr ;;
"28") var_scope ;;
"29") multi_dimension_arr ;;
"30") struct ;;
"31") typedef ;;
"32") short_long ;;
"33") complex_type ;;
"34") bool ;;
"35") complex_type2 ;;
"36") sizeof2 ;;
"37") cast ;;
"38") enum ;;
"39") static ;;
"40") comma ;;
"41") pp_mm ;;
"42") cpx_assign ;;
"43") not ;;
"44") bitnot ;;
"45") bit_op ;;
"46") log_and_or ;;
"47") fn_param_arr ;;
"48") incomplete_struct ;;
"49") break_fn ;;
"50") _continue ;;
"51") goto ;;
"52") switch ;;
"53") void_fn ;;
"54") _shift ;;
"55") ternary ;;
"56") const_expression ;;
"57") lvar_initialize ;;
"58") arr_zero_ini ;;
"59") string_arr_ini ;;
"60") unsized_arr ;;
"61") struct_ini ;;
"62") gvar_scalar_ini ;;
"63") gvar_arr_ini ;;
"64") omit_paran ;;
"65") gvar_string ;;
"66") gvar_addend ;;
"67") global_typedef ;;
"68") return_only ;;
"69") do_while ;;
"70") read_variadic_fn ;;
"71") copy_struct ;;
"72") anonymous_struct ;;
"73") bitassign ;;
"74") compound_literal ;;
esac
shift
done
echo OK
|
<filename>src/utils/enums.js
/*
* @Descripttion: 枚举类型
* @version: 1.0.0
* @Author: LSC
* @Date: 2021-07-01 11:07:02
* @LastEditors: LSC
* @LastEditTime: 2021-08-30 16:27:52
*/
export const propertyDatatype = {
int32: 1,
float: 2,
double: 3,
isThisType
}
// 遍历this 上面挂载的所有方法
function isThisType(val) {
// console.log(this)
for (let key in this) {
// !问就是代码洁癖
if (this[key] === val) {
return true
}
}
return false
}
/**
* @description: UTC to DATE HH:MM
* @param {*} str UTC
* @return {*} 6-11
*/
function setTime(str) {
// var n = parseInt(str) * 1000
let D = new Date(str)
// let year = D.getFullYear() //四位数年份
let month = D.getMonth() + 1 //月份(0-11),0为一月份
// month = month < 10 ? '0' + month : month
let day = D.getDate() //月的某一天(1-31)
// day = day < 10 ? '0' + day : day
// let hours = D.getHours() //小时(0-23)
// hours = hours < 10 ? '0' + hours : hours
// let minutes = D.getMinutes() //分钟(0-59)
// minutes = minutes < 10 ? '0' + minutes : minutes
// let seconds = D.getSeconds() //秒(0-59)
// seconds = seconds < 10 ? '0' + seconds : seconds
// let week = D.getDay() //周几(0-6),0为周日
// let weekArr = ['周日', '周一', '周二', '周三', '周四', '周五', '周六']
let now_time = month + '-' + day
return now_time
}
// 本函数只用于匹配
export function matchType(item) {
const condition = [
{
key: 1,
name: '小于'
},
{
key: 2,
name: '小于等于'
},
{
key: 3,
name: '等于'
},
{
key: 4,
name: '大于'
},
{
key: 5,
name: '大于等于'
},
{
key: 6,
name: '不等于'
},
{
key: 7,
name: '在之间'
}
]
return item.calculationType && condition[item.calculationType - 1].name
? (item.condition = condition[item.calculationType - 1].name)
: (item.condition = '')
}
// 本函数只用于匹配内容
export function matchContent(item) {
// 4-enum;5-bool 7 date
if ([4, 5, 7].includes(item.propertyDatatype)) {
item.showValue = item.propertyShowValue
} else {
item.showValue = `${item.propertyValue}${item.propertyUnit ? item.propertyUnit : ''}`
}
}
// 本函数可以用于生成唯一的uuid
export function generateId() {
const s = []
const hexDigits = '0123456789abcdef'
for (let i = 0; i < 36; i++) {
s[i] = hexDigits.substr(Math.floor(Math.random() * 0x10), 1)
}
// bits 12-15 of the time_hi_and_version field to 0010
s[14] = '4'
// bits 6-7 of the clock_seq_hi_and_reserved to 01
s[19] = hexDigits.substr((s[19] & 0x3) | 0x8, 1)
s[8] = s[13] = s[18] = s[23] = '-'
const uuid = s.join('')
return uuid
}
// 对象形式转formData格式
export function objectToFormData(obj) {
const formData = new FormData()
Object.keys(obj).forEach((key) => {
if (obj[key] instanceof Array) {
obj[key].forEach((item) => {
formData.append(key, item)
})
return
}
formData.append(key, obj[key])
})
return formData
}
|
for (int i = 0; i <= 10; i++) {
if (i % 4 == 0) {
System.out.println(i);
}
}
Output:
0
4
8 |
#!/usr/bin/env bash
set -o nounset
export KUBECONFIG=${KUBECONFIG:-$HOME/.kube/config}
function print_log() {
echo "$(basename $0): $1"
}
function url_reachable() {
curl -s $1 --max-time 4 > /dev/null
return $?
}
function spawn_port_forwarding_command() {
service_name=$1
external_port=$2
cat << EOF > build/xinetd-${service_name}
service ${service_name}
{
flags = IPv4
bind = 0.0.0.0
type = UNLISTED
socket_type = stream
protocol = tcp
user = root
wait = no
redirect = $(minikube ip) $(kubectl --kubeconfig=${KUBECONFIG} get svc/${service_name} -n assisted-installer -o=jsonpath='{.spec.ports[0].nodePort}')
port = ${external_port}
only_from = 0.0.0.0/0
per_source = UNLIMITED
}
EOF
sudo mv build/xinetd-${service_name} /etc/xinetd.d/${service_name} --force
sudo systemctl start xinetd
sudo systemctl reload xinetd
}
function run_in_background() {
bash -c "nohup $1 >/dev/null 2>&1 &"
}
function kill_all_port_forwardings() {
sudo systemctl stop xinetd
}
function get_main_ip() {
echo "$(ip route get 1 | sed 's/^.*src \([^ ]*\).*$/\1/;q')"
}
function wait_for_url_and_run() {
RETRIES=15
RETRIES=$((RETRIES))
STATUS=1
url_reachable "$1" && STATUS=$? || STATUS=$?
until [ $RETRIES -eq 0 ] || [ $STATUS -eq 0 ]; do
RETRIES=$((RETRIES-1))
echo "Running given function"
$2
echo "Sleeping for 30 seconds"
sleep 30s
echo "Verifying URL and port are accessible"
url_reachable "$1" && STATUS=$? || STATUS=$?
done
if [ $RETRIES -eq 0 ]; then
echo "Timeout reached, URL $1 not reachable"
exit 1
fi
}
function close_external_ports() {
sudo firewall-cmd --zone=public --remove-port=6000/tcp
sudo firewall-cmd --zone=public --remove-port=6008/tcp
}
"$@"
|
<reponame>Swordce/client-master
package com.lepao.ydcgkf.ocr.bean;
import java.util.List;
/**
*
* @author Android(JiaWei)
* @date 2017/8/24
*/
public class OCRIdCardResultJson {
private List<OCROutputs> outputs;
public List<OCROutputs> getOutputs() {
return outputs;
}
public void setOutputs(List<OCROutputs> outputs) {
this.outputs = outputs;
}
public static class OCROutputs {
private String outputLabel;
private OCROutputMulti outputMulti;
private OCROutputValue outputValue;
public String getOutputLabel() {
return outputLabel;
}
public void setOutputLabel(String outputLabel) {
this.outputLabel = outputLabel;
}
public OCROutputMulti getOutputMulti() {
return outputMulti;
}
public void setOutputMulti(OCROutputMulti outputMulti) {
this.outputMulti = outputMulti;
}
public OCROutputValue getOutputValue() {
return outputValue;
}
public void setOutputValue(OCROutputValue outputValue) {
this.outputValue = outputValue;
}
}
public static class OCROutputMulti {
}
public static class OCROutputValue {
private int dataType;
private String dataValue;
public int getDataType() {
return dataType;
}
public void setDataType(int dataType) {
this.dataType = dataType;
}
public String getDataValue() {
return dataValue;
}
public void setDataValue(String dataValue) {
this.dataValue = dataValue;
}
}
}
|
#!/bin/bash
set -e # exit immediately on error
set -o nounset # abort on unbound variable
set -o pipefail # don't hide errors within pipes
# set -x # for debuging, trace what is being executed.
cd "$(dirname "$0")/.."
rm -rf "./dist"
rm -rf "./declarations"
rm -rf "./build"
rm -rf "./coverage"
|
#!/bin/bash
# ========== Experiment Seq. Idx. 2701 / 52.2.1.0 / N. 0 - _S=52.2.1.0 D1_N=39 a=-1 b=1 c=1 d=1 e=-1 f=-1 D3_N=4 g=1 h=-1 i=-1 D4_N=1 j=1 D5_N=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 2701 / 52.2.1.0 / N. 0 - _S=52.2.1.0 D1_N=39 a=-1 b=1 c=1 d=1 e=-1 f=-1 D3_N=4 g=1 h=-1 i=-1 D4_N=1 j=1 D5_N=0 ==========\n\n'
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
RESULTS_DIR="$JBHI_DIR/results"
if [[ "No" == "Yes" ]]; then
SVM_SUFFIX="svm"
PREDICTIONS_FORMAT="isbi"
else
SVM_SUFFIX="nosvm"
PREDICTIONS_FORMAT="titans"
fi
RESULTS_PREFIX="$RESULTS_DIR/deep.39.layer.4.test.1.index.2701.$SVM_SUFFIX"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$RESULTS_PREFIX.finish.txt"
# ...this experiment is a little different --- only one master procedure should run, so there's only a master lock file
METRICS_TEMP_PATH="$RESULTS_DIR/this_results.anova.txt"
METRICS_PATH="$RESULTS_DIR/all_results.anova.txt"
START_PATH="$METRICS_PATH.start.txt"
FINISH_PATH="-"
LOCK_PATH="$METRICS_PATH.running.lock"
LAST_OUTPUT="$METRICS_PATH"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
# If the experiment was started before, do any cleanup necessary
echo -n
else
echo "D1_N;D3_N;D4_N;a;b;c;d;e;f;g;h;i;j;m_ap;m_auc;m_tn;m_fp;m_fn;m_tp;m_tpr;m_fpr;k_ap;k_auc;k_tn;k_fp;k_fn;k_tp;k_tpr;k_fpr;isbi_auc" > "$METRICS_PATH"
fi
python \
"$SOURCES_GIT_DIR/etc/compute_metrics.py" \
--metadata_file "$SOURCES_GIT_DIR/data/all-metadata.csv" \
--predictions_format "$PREDICTIONS_FORMAT" \
--metrics_file "$METRICS_TEMP_PATH" \
--predictions_file "$RESULTS_PATH"
EXPERIMENT_STATUS="$?"
echo -n "39;4;1;" >> "$METRICS_PATH"
echo -n "-1;1;1;1;-1;-1;1;-1;-1;1;" >> "$METRICS_PATH"
tail "$METRICS_TEMP_PATH" -n 1 >> "$METRICS_PATH"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
<reponame>sozdayka/RealEstateAgency.Web
import { Component } from '@angular/core';
@Component({
selector: 'header-menu',
templateUrl: './header.component.html'
})
export class HeaderComponent {
// login blogin login
public LoginStatus:boolean = false;
public categorys: {name: string, img: string, link: string} [] = [
{
name: '<NAME>',
img: "images/content/pa1.jpg",
link: "filter?category=1"
},
{
name: 'Офисы',
img: "images/content/pa2.jpg",
link: "filter?category=2"
},
{
name: 'Квартиры в новострое',
img: "images/content/pa3.jpg",
link: "filter?category=3"
},
{
name: '<NAME>',
img: "images/content/pa3.jpg",
link: "filter?category=4"
},
{
name: 'Пенхаусы',
img: "images/content/pa5.jpg",
link: "filter?category=5"
},
{
name: 'Участки',
img: "images/content/pa6.jpg",
link: "filter?category=6"
},
{
name: 'Гаражи',
img: "images/content/pa7.jpg",
link: "filter?category=7"
},
{
name: 'Другое',
img: "images/content/pa8.jpg",
link: "filter?category=8"
}
];
collapse: string = 'collapse';
collapseNavbar(): void {
if (this.collapse.length > 1) {
this.collapse = '';
} else {
this.collapse = 'collapse';
}
}
collapseMenu() {
this.collapse = 'collapse';
}
}
|
package providers
import (
"bufio"
"bytes"
"encoding/xml"
"errors"
"fmt"
"os/exec"
"path/filepath"
"strconv"
"strings"
"time"
"fence-executor/utils"
"os"
)
type RHAgentProvider struct {
config *RHAgentProviderConfig
Agents map[string]*RHAgent
}
type RHAgentProviderConfig struct {
Glob string
}
type RHAgent struct {
Command string
*utils.Agent
}
func newRHAgent() *RHAgent {
return &RHAgent{Agent: utils.NewAgent()}
}
const (
defaultGlob = "/usr/sbin/fence_*"
)
var defaultConfig = &RHAgentProviderConfig{Glob: defaultGlob}
func CreateRHProvider(config *RHAgentProviderConfig) *RHAgentProvider {
p := &RHAgentProvider{Agents: make(map[string]*RHAgent)}
if config != nil {
p.config = config
} else {
p.config = defaultConfig
}
return p
}
type RHResourceAgent struct {
Command string
XMLName xml.Name `xml:"resource-agent"`
Name string `xml:"name,attr"`
ShortDesc string `xml:"shortdesc,attr"`
LongDesc string `xml:"longdesc"`
VendorUrl string `xml:"vendor-url"`
Parameters []RHParameter `xml:"parameters>parameter"`
Actions []RHAction `xml:"actions>action"`
}
type RHParameter struct {
Name string `xml:"name,attr"`
Unique bool `xml:"unique,attr"`
Required bool `xml:"required,attr"`
ShortDesc string `xml:"shortdesc"`
Content RHContent `xml:"content"`
}
type RHContent struct {
ContentType string `xml:"type,attr"`
Default string `xml:"default,attr"`
Options []RHContentOptions `xml:"option"`
}
type RHContentOptions struct {
Value string `xml:"value,attr"`
}
type RHAction struct {
Name string `xml:"name,attr"`
OnTarget string `xml:"on_target,attr"`
Automatic string `xml:"automatic,attr"`
}
func ParseMetadata(mdxml []byte) (*RHResourceAgent, error) {
v := &RHResourceAgent{}
err := xml.Unmarshal(mdxml, v)
if err != nil {
return nil, err
}
return v, nil
}
func (r *RHResourceAgent) ToResourceAgent() (*RHAgent, error) {
a := newRHAgent()
a.Command = r.Command
a.Name = r.Name
a.ShortDesc = r.ShortDesc
a.LongDesc = r.LongDesc
for _, mdp := range r.Parameters {
// If "action" parameter ignore it and set agent's DefaultAction
if mdp.Name == "action" && mdp.Content.Default != "" {
fa, err := utils.StringToAction(mdp.Content.Default)
if err != nil {
// Ignore bad default action
} else {
a.DefaultAction = fa
}
continue
}
// If "port" parameter ignore it and set agent's DefaultAction
if mdp.Name == "port" {
a.MultiplePorts = true
continue
}
// If "port" parameter ignore it and set agent's DefaultAction
if mdp.Name == "separator" {
continue
}
// TODO. All the metadatas reports unique = "0" but I think they should be unique...
p := &utils.Parameter{Name: mdp.Name, Unique: mdp.Unique, Required: mdp.Required, Desc: mdp.ShortDesc}
switch mdp.Content.ContentType {
case "boolean":
p.ContentType = utils.Boolean
if mdp.Content.Default != "" {
value, err := strconv.ParseBool(mdp.Content.Default)
if err != nil {
return nil, err
}
p.Default = value
}
case "string":
p.ContentType = utils.String
if mdp.Content.Default != "" {
p.Default = mdp.Content.Default
}
case "select":
p.HasOptions = true
p.ContentType = utils.String
if mdp.Content.Default != "" {
p.Default = mdp.Content.Default
}
default:
return nil, fmt.Errorf("Agent: %s, parameter: %s. Wrong content type: %s", a.Name, p.Name, mdp.Content.ContentType)
}
for _, o := range mdp.Content.Options {
p.Options = append(p.Options, o.Value)
}
a.Parameters[p.Name] = p
}
for _, mda := range r.Actions {
if mda.Name == "on" {
if mda.Automatic == "1" {
a.UnfenceAction = utils.On
}
if mda.OnTarget == "1" {
a.UnfenceOnTarget = true
}
}
fa, err := utils.StringToAction(mda.Name)
if err != nil {
// Ignore unknown action
continue
}
a.Actions = append(a.Actions, fa)
}
return a, nil
}
func (p *RHAgentProvider) LoadAgents(timeout time.Duration) error {
p.Agents = make(map[string]*RHAgent)
files, err := filepath.Glob(p.config.Glob)
if err != nil {
return err
}
t := time.Now()
nexttimeout := 0 * time.Second
// TODO Detected duplicate agents? (agents returning the same name in metadata)
for _, file := range files {
if timeout != 0 {
nexttimeout = timeout - time.Since(t)
if nexttimeout < 0 {
return errors.New("timeout")
}
}
a, err := p.LoadAgent(file, nexttimeout)
if err != nil {
continue
}
p.Agents[a.Name] = a
}
return nil
}
func (p *RHAgentProvider) LoadAgent(file string, timeout time.Duration) (*RHAgent, error) {
cmd := exec.Command(file, "-o", "metadata")
var b bytes.Buffer
cmd.Stdout = &b
err := cmd.Start()
if err != nil {
return nil, err
}
if timeout == time.Duration(0) {
err = cmd.Wait()
} else {
err = utils.WaitTimeout(cmd, timeout)
}
if err != nil {
return nil, err
}
mdxml := b.Bytes()
mda, err := ParseMetadata(mdxml)
if err != nil {
return nil, err
}
a, err := mda.ToResourceAgent()
if err != nil {
return nil, fmt.Errorf("Agent \"%s\": %s", mda.Name, err)
}
a.Command = file
return a, nil
}
func (p *RHAgentProvider) getRHAgent(name string) (*RHAgent, error) {
a, ok := p.Agents[name]
if !ok {
return nil, fmt.Errorf("Unknown agent: %s", name)
}
return a, nil
}
func (p *RHAgentProvider) GetAgents() (utils.Agents, error) {
fagents := make(utils.Agents)
for _, a := range p.Agents {
fagents[a.Name] = a.Agent
}
return fagents, nil
}
func (p *RHAgentProvider) GetAgent(name string) (*utils.Agent, error) {
a, ok := p.Agents[name]
if !ok {
return nil, fmt.Errorf("Unknown agent: %s", name)
}
return a.Agent, nil
}
func (p *RHAgentProvider) run(ac *utils.AgentConfig, action string, timeout time.Duration) ([]byte, error) {
var (
cmdOut []byte
err error
)
a, err := p.getRHAgent(ac.Name)
if err != nil {
return nil, err
}
cmdName := a.Command
cmdArgs := []string{fmt.Sprintf("--action=%s", action)}
for name, values := range ac.Parameters {
for _, value := range values {
cmdArgs = append(cmdArgs, fmt.Sprintf("%s=%s", name, value))
}
}
fmt.Printf("\nrunning %s with args: %s\n", cmdName, cmdArgs)
if cmdOut, err = exec.Command(cmdName, cmdArgs...).Output(); err != nil {
fmt.Fprintln(os.Stderr, "There was an error: ", err)
os.Exit(1)
}
fmt.Printf("Action was ended: %s\n", string(cmdOut))
return cmdOut, nil
}
func (p *RHAgentProvider) Status(ac *utils.AgentConfig, timeout time.Duration) (utils.DeviceStatus, error) {
_, err := p.run(ac, "status", timeout)
if err != nil {
if _, ok := err.(*exec.ExitError); ok {
// Process exited with exit code != 0
return utils.Ko, nil
}
return utils.Ko, err
}
return utils.Ok, nil
}
func (p *RHAgentProvider) Monitor(ac *utils.AgentConfig, timeout time.Duration) (utils.DeviceStatus, error) {
_, err := p.run(ac, "monitor", timeout)
if err != nil {
if _, ok := err.(*exec.ExitError); ok {
// Process exited with exit code != 0
return utils.Ko, nil
}
return utils.Ko, err
}
return utils.Ok, nil
}
func (p *RHAgentProvider) List(ac *utils.AgentConfig, timeout time.Duration) (utils.PortList, error) {
out, err := p.run(ac, "list", timeout)
if err != nil {
return nil, err
}
portList := make(utils.PortList, 0)
reader := bufio.NewReader(bytes.NewReader(out))
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
var portName utils.PortName
line := scanner.Text() // Println will add back the final '\n'
split := strings.Split(line, ",")
switch len(split) {
case 1:
portName = utils.PortName{Name: split[0]}
case 2:
portName = utils.PortName{Name: split[0], Alias: split[1]}
default:
return nil, fmt.Errorf("Wrong list format")
}
portList = append(portList, portName)
}
return portList, nil
}
func (p *RHAgentProvider) Run(ac *utils.AgentConfig, action utils.Action, timeout time.Duration) error {
// Specify action only if action !- fence.None,
// elsewhere the agent will run the default action
var actionstr string
if action != utils.None {
actionstr = utils.ActionToString(action)
}
_, err := p.run(ac, actionstr, timeout)
return err
} |
function roundNumber(num){
return Math.round(num);
} |
<filename>src/main/java/net/logstash/logback/composite/loggingevent/JsonMessageJsonProvider.java
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.logstash.logback.composite.loggingevent;
import java.io.IOException;
import org.slf4j.Marker;
import com.fasterxml.jackson.core.JsonGenerator;
import ch.qos.logback.classic.spi.ILoggingEvent;
import net.logstash.logback.composite.AbstractFieldJsonProvider;
import net.logstash.logback.marker.Markers;
/**
* Provides logic for deprecated functionality.
*
* @deprecated Use the {@link LogstashMarkersJsonProvider}, and log events with {@link Markers} instead.
*/
@Deprecated
public class JsonMessageJsonProvider extends AbstractFieldJsonProvider<ILoggingEvent> {
/**
* Name of the {@link Marker} that indicates that the log event arguments should be appended to the
* logstash json as an array with field value "json_message".
*
* @deprecated When logging, prefer using a {@link Markers#appendArray(String, Object...)} marker
* with fieldName = "json_message" and objects = an array of arguments instead.
*/
@Deprecated
public static final String JSON_MARKER_NAME = "JSON";
public JsonMessageJsonProvider() {
super();
setFieldName("json_message");
}
@Override
public void writeTo(JsonGenerator generator, ILoggingEvent event) throws IOException {
final Marker marker = event.getMarker();
if (marker != null && marker.contains(JSON_MARKER_NAME)) {
generator.writeFieldName(getFieldName());
generator.writeObject(event.getArgumentArray());
}
}
}
|
import pytest
import datetime as dt
import os
import gpx
TEST_PATH = os.path.dirname(__file__)
def test_load_file():
route = gpx.Gpx(f'{TEST_PATH}/test_cases/basic_sample.gpx')
assert route._load_file()
def test_load_file_big():
with pytest.raises(gpx.LoadGpxError):
gpx.Gpx(f'{TEST_PATH}/test_cases/over_10mb.gpx')
def test_load_file_no_permission():
file = f'{TEST_PATH}/test_cases/no_read_permission.gpx'
os.chmod(file, 37449)
with pytest.raises(gpx.LoadGpxError):
gpx.Gpx(file)
def test_to_dict():
route = gpx.Gpx(f'{TEST_PATH}/test_cases/basic_sample.gpx')
route_dict = route.to_dict()
# Extract data to check
first = [route_dict['lat'][0], route_dict['lon'][0],
route_dict['ele'][0]]
last = [route_dict['lat'][-1], route_dict['lon'][-1],
route_dict['ele'][-1]]
# Insert time in an easy-to-compare format
first_time = route_dict['time'][0]
first.append(dt.datetime(first_time.year, first_time.month,
first_time.day, first_time.hour,
first_time.minute, first_time.second))
last_time = route_dict['time'][-1]
last.append(dt.datetime(last_time.year, last_time.month,
last_time.day, last_time.hour,
last_time.minute, last_time.second))
# Reference data
first_ref = [46.2406490, 6.0342000, 442.0,
dt.datetime(2015, 7, 24, 6, 44, 14)]
last_ref = [46.2301180, 6.0525330, 428.2,
dt.datetime(2015, 7, 24, 6, 52, 24)]
# Compare lists with reference and read data
assert all([a == b for a, b in zip(first + last, first_ref + last_ref)])
def test_to_pandas():
route = gpx.Gpx(f'{TEST_PATH}/test_cases/basic_sample.gpx')
route_df = route.to_pandas()
assert route_df.iloc[0].lat == pytest.approx(46.240649)
assert route_df.iloc[0].lon == pytest.approx(6.0342)
assert route_df.iloc[0].ele == pytest.approx(442.0)
assert route_df.iloc[-1].lat == pytest.approx(46.230118)
assert route_df.iloc[-1].lon == pytest.approx(6.052533)
assert route_df.iloc[-1].ele == pytest.approx(428.2)
|
#!/bin/bash
source ../functions.sh
jsonObj=$(cat input.json)
exec_curl_mdb "/anyplace/mapping/floor/all" $jsonObj
# call script.python (make it pretty)
exec_curl_cdb "/anyplace/mapping/floor/all" $jsonObj
# call script.python (make it pretty)
python3 compare.py "floorplans"
|
<reponame>Bielwenass/unchained
const yearnApproval = {
txid: '0x72f780a8c46f20f59b9a62dd7032d02d8f284444f49238b4b957a395423ba820',
vin: [
{
n: 0,
addresses: ['0x1399D13F3A0aaf08f7C5028D81447a311e4760c4'],
isAddress: true,
},
],
vout: [
{
value: '0',
n: 0,
addresses: ['0x514910771AF9Ca656af840dff83E8264EcF986CA'],
isAddress: true,
},
],
blockHash: '0x1f2328b83caf22a839e7142118516532f04ef88285e86ce3458dcde9dcd06c57',
blockHeight: 14033422,
confirmations: 217826,
blockTime: 1642560983,
value: '0',
fees: '4519526097650998',
ethereumSpecific: {
status: 1,
nonce: 1,
gasLimit: 46643,
gasUsed: 46643,
gasPrice: '96896127986',
data:
'0x095ea7b30000000000000000000000006a1e73f12018d8e5f966ce794aa2921941feb17e00000000000000000fffffffffffffffffffffffffffffffffffffffffffffff',
},
}
export default {
tx: yearnApproval,
txMempool: { ...yearnApproval, blockHeight: -1, confirmations: 0, fee: 0, ethereumSpecific: undefined },
}
|
package jframe.pay.wx.http.util;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.Map;
import jframe.pay.wx.domain.WxConfig;
public class WxCore {
/** = */
public static final String QSTRING_EQUAL = "=";
/** & */
public static final String QSTRING_SPLIT = "&";
public static Map<String, String> parseQString(String str)
throws UnsupportedEncodingException {
Map<String, String> map = new HashMap<String, String>();
int len = str.length();
StringBuilder temp = new StringBuilder();
char curChar;
String key = null;
boolean isKey = true;
for (int i = 0; i < len; i++) {// 遍历整个带解析的字符串
curChar = str.charAt(i);// 取当前字符
if (curChar == '&') {// 如果读取到&分割符
putKeyValueToMap(temp, isKey, key, map);
temp.setLength(0);
isKey = true;
} else {
if (isKey) {// 如果当前生成的是key
if (curChar == '=') {// 如果读取到=分隔符
key = temp.toString();
temp.setLength(0);
isKey = false;
} else {
temp.append(curChar);
}
} else {// 如果当前生成的是value
temp.append(curChar);
}
}
}
putKeyValueToMap(temp, isKey, key, map);
return map;
}
private static void putKeyValueToMap(StringBuilder temp, boolean isKey,
String key, Map<String, String> map)
throws UnsupportedEncodingException {
if (isKey) {
key = temp.toString();
if (key.length() == 0) {
throw new RuntimeException("QString format illegal");
}
map.put(key, "");
} else {
if (key.length() == 0) {
throw new RuntimeException("QString format illegal");
}
map.put(key,
URLDecoder.decode(temp.toString(),
WxConfig.getConf(WxConfig.KEY_CHARSET)));
}
}
}
|
class VersionManager:
def __init__(self, current_version):
self.version = current_version
def compare_versions(self, version1, version2):
v1_parts = list(map(int, version1.split('.')))
v2_parts = list(map(int, version2.split('.'))
for i in range(3):
if v1_parts[i] > v2_parts[i]:
return 1
elif v1_parts[i] < v2_parts[i]:
return -1
return 0 |
#!/bin/bash
# Copyright JS Foundation and other contributors, http://js.foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ENGINE="$1"
PATH_TO_TEST262="$2"
OUTPUT_DIR=`dirname $ENGINE`
REPORT_PATH="${OUTPUT_DIR}/test262.report"
TIMEOUT="90s"
TIMEOUT_CMD=`which timeout`
if [ $? -ne 0 ]
then
TIMEOUT_CMD=`which gtimeout`
fi
if [ $# -lt 2 ]
then
echo "This script performs parallel test262 compliance testing of the specified engine."
echo ""
echo "Usage:"
echo " 1st parameter: JavaScript engine to be tested."
echo " 2nd parameter: path to the directory with official test262 testsuite."
echo ""
echo "Example:"
echo " ./run-test-suite-test262.sh <engine> <test262_dir>"
exit 1
fi
if [ ! -d "${PATH_TO_TEST262}/.git" ]
then
git clone https://github.com/tc39/test262.git -b es5-tests "${PATH_TO_TEST262}"
fi
rm -rf "${PATH_TO_TEST262}/test/suite/bestPractice"
rm -rf "${PATH_TO_TEST262}/test/suite/intl402"
# TODO: Enable these tests after daylight saving calculation is fixed.
rm -f "${PATH_TO_TEST262}/test/suite/ch15/15.9/15.9.3/S15.9.3.1_A5_T1.js"
rm -f "${PATH_TO_TEST262}/test/suite/ch15/15.9/15.9.3/S15.9.3.1_A5_T2.js"
rm -f "${PATH_TO_TEST262}/test/suite/ch15/15.9/15.9.3/S15.9.3.1_A5_T3.js"
rm -f "${PATH_TO_TEST262}/test/suite/ch15/15.9/15.9.3/S15.9.3.1_A5_T4.js"
rm -f "${PATH_TO_TEST262}/test/suite/ch15/15.9/15.9.3/S15.9.3.1_A5_T5.js"
rm -f "${PATH_TO_TEST262}/test/suite/ch15/15.9/15.9.3/S15.9.3.1_A5_T6.js"
echo "Starting test262 testing for ${ENGINE}. Running test262 may take a several minutes."
python2 "${PATH_TO_TEST262}"/tools/packaging/test262.py --command "${TIMEOUT_CMD} ${TIMEOUT} ${ENGINE}" \
--tests="${PATH_TO_TEST262}" --summary \
&> "${REPORT_PATH}"
TEST262_EXIT_CODE=$?
if [ $TEST262_EXIT_CODE -ne 0 ]
then
echo -e "\nFailed to run test2626\n"
echo "$0: see ${REPORT_PATH} for details about failures"
exit $TEST262_EXIT_CODE
fi
grep -A3 "=== Summary ===" "${REPORT_PATH}"
FAILURES=`sed -n '/Failed tests/,/^$/p' "${REPORT_PATH}"`
EXIT_CODE=0
if [ -n "$FAILURES" ]
then
echo -e "\n$FAILURES\n"
echo "$0: see ${REPORT_PATH} for details about failures"
EXIT_CODE=1
fi
FAILURES=`sed -n '/Expected to fail but passed/,/^$/p' "${REPORT_PATH}"`
if [ -n "$FAILURES" ]
then
echo -e "\n$FAILURES\n"
echo "$0: see ${REPORT_PATH} for details about failures"
EXIT_CODE=1
fi
exit $EXIT_CODE
|
def get_notification_description(notification_code: str) -> str:
for code, description in TYPE_OF_NOTI:
if code == notification_code:
return description
return "Unknown notification type" |
// @flow
/** Represents a GraphQL query */
export type GraphQLQuery = string
/** An API Token ( or maybe a JWT in the future? ) */
export type APIToken = string
/**
* The fb/artsy user context for recieving/sending messages.
* If you have a `userID`, there should be a `userToken`.
* No functions, needs to be storable in db.
*/
export interface MitosisUser {
/** Guest Xapp token */
xappToken?: APIToken,
/** To re-auth with a user-token we need to re-use this key */
artsyOauthAppCode?: string,
/** Logged in user OAuth2 token */
userToken?: APIToken,
/** the Facebook chat sender ID */
fbSenderID: string,
/** the corresponding Artsy User ID */
artsyUserID?: string,
/** Does the user want to get articles sent every 2 days? */
subscribeToArticlesBiDaily: boolean,
/** What time, in GMT should we send them articles? */
renderedGMTTimeForArticles?: number,
/** Name to use _occasionally_ - I personally get annoyed if a services always uses my name */
firstName: string,
/** Timezone from facebook */
timezoneOffset: number,
/** last City chosen */
favouriteCitySlug?: string,
/** Artsy location city */
artsyLocationCitySlug?: string
}
|
<gh_stars>0
import React, { useState, useEffect } from 'react';
import useInfiniteScroll from "./useInfiniteScroll";
const List = () => {
const [listItems, setListItems] = useState(Array.from(Array(30).keys(), n => n + 1));
const [isFetching, setIsFetching] = useInfiniteScroll(fetchMoreListItems);
useEffect(() => {
window.addEventListener('scroll', handleScroll);
return () => window.removeEventListener('scroll', handleScroll);
}, []);
useEffect(() => {
if (!isFetching) return;
fetchMoreListItems();
}, [isFetching]);
function handleScroll() {
if (window.innerHeight + document.documentElement.scrollTop !== document.documentElement.offsetHeight || isFetching) return;
setIsFetching(true);
}
function fetchMoreListItems() {
setTimeout(() => {
setListItems(prevState => ([...prevState, ...Array.from(Array(20).keys(), n => n + prevState.length + 1)]));
setIsFetching(false);
}, 2000);
}
return (
<>
<ul className="list-group mb-2">
{listItems.map(listItem => <li className="list-group-item">List Item {listItem}</li>)}
</ul>
{isFetching && 'Fetching more list items...'}
</>
);
};
export default List; |
from flask import Flask, Request, jsonify
app = Flask(__name__)
@app.route('/user', methods=['POST'])
def get_user():
data = request.get_json()
user = get_user_from_db(data['id'])
return jsonify(user)
def get_user_from_db(id):
# implementation details
if __name__ == '__main__':
app.run() |
# *****************************************************************************
# *****************************************************************************
#
# lmsDomTs.bash
#
# *****************************************************************************
#
# @author Jay Wheeler.
# @version 0.1.1
# @copyright © 2016, 2017. EarthWalk Software.
# @license Licensed under the Academic Free License version 3.0
# @package Linux Management Scripts
# @subpackage lmsDomToStr
#
# *****************************************************************************
#
# Copyright © 2016, 2017. EarthWalk Software
# Licensed under the Academic Free License, version 3.0.
#
# Refer to the file named License.txt provided with the source,
# or from
#
# http://opensource.org/licenses/academic.php
#
# *****************************************************************************
#
# Version 0.0.1 - 07-24-2016.
# 0.0.2 - 09-06-2016.
# 0.0.3 - 09-15-2016.
# 0.1.0 - 01-15-2017.
# 0.1.1 - 02-10-2017.
#
# *****************************************************************************
# *****************************************************************************
declare -r lmslib_lmsDomToStr="0.1.1" # version of this library
# *******************************************************
# *******************************************************
declare lmsdts_buffer
declare lmsdts_stackName="DTSBranches"
# ****************************************************************************
#
# lmsDomTsFmtIndent
#
# Add spaces (indentation) to the buffer
#
# Parameters:
# stackIndex = how many blocks to indent
# blockSize = (optional) number of spaces in a block
#
# Returns:
# 0 = no error
#
# ****************************************************************************
function lmsDomTsFmtIndent()
{
local -i stkIndent=${1:-0}
[[ ${stkIndent} -gt 0 ]] && printf -v lmsdts_buffer "%s%*s" "${lmsdts_buffer}" ${stkIndent}
return 0
}
# ****************************************************************************
#
# lmsDomTsAddAtt
#
# Parameters:
# aUid = uid of the node
# attIndent = columns to indent
#
# Returns:
# 0 = no error
# non-zero = error code
#
# ****************************************************************************
function lmsDomTsAddAtt()
{
local aUid=${1}
local attIndent=${2}
local attName="lmsdom_${aUid}_att"
local attValue
local attKey
lmsDynnReset $attName
[[ $? -eq 0 ]] || return 0
lmsDynnValid ${attName} lmsdom_nodeValid
[[ $? -eq 0 ]] || return 1
while [[ ${lmsdom_nodeValid} -eq 1 ]]
do
lmsDynnMap ${attName} attValue attKey
[[ $? -eq 0 ]] || return 1
printf -v lmsdts_buffer "%s %s=%s" "${lmsdts_buffer}" ${attKey} "${attValue}"
lmsDynnNext ${attName}
lmsDynnValid ${attName} lmsdom_nodeValid
done
return 0
}
# ****************************************************************************
#
# lmsDomTsFmtOut
#
# Add node info to the buffer
#
# Parameters:
# uid = node uid to add
#
# Returns:
# 0 = no error
# non-zero = error code
#
# ****************************************************************************
function lmsDomTsFmtOut()
{
local uid=${1}
local node="lmsdom_${uid}_node"
local tagName=""
local attcount=0
lmsDynaGetAt $node "tagname" tagName
[[ $? -eq 0 ]] || return 1
lmsDynaGetAt $node "attcount" attcount
[[ $? -eq 0 ]] || return 1
local stackIndent
lmsStackSize ${lmsdts_stackName} stackIndent
[[ $? -eq 0 ]] || return 1
lmsUtilIndent $stackIndent lmsdts_buffer
printf -v lmsdts_buffer "%s%s" "${lmsdts_buffer}" "${tagName}"
(( stackIndent++ ))
[[ ${attcount} -eq 0 ]] ||
{
lmsDomTsAddAtt ${uid} ${stackIndent}
[[ $? -eq 0 ]] || return 1
}
lmsDynaGetAt $node "content" content
[[ $? -eq 0 ]] || return 1
[[ -n "${content}" ]] && printf -v lmsdts_buffer "%s content=\"%s\"" "${lmsdts_buffer}" "${content}"
printf -v lmsdts_buffer "%s\n" "${lmsdts_buffer}"
return 0
}
# ****************************************************************************
#
# lmsDomTsTraverse
#
# Parameters:
# branch = branch node name to traverse
#
# Returns:
# 0 = no error
# non-zero = error code
#
# ****************************************************************************
function lmsDomTsTraverse()
{
local branch=${1}
local branchName="lmsdom_${branch}"
local limbs=0
local limb
lmsStackWrite ${lmsdts_stackName} ${branch}
[[ $? -eq 0 ]] || return 1
lmsDynnReset "$branchName"
[[ $? -eq 0 ]] || return 1
lmsDynnValid "$branchName" lmsdom_nodeValid
while [[ ${lmsdom_nodeValid} -eq 1 ]]
do
lmsDynnMap "$branchName" limb
[[ $? -eq 0 ]] || return 1
lmsDomTsFmtOut ${limb}
[[ $? -eq 0 ]] || return 1
lmsDomTsTraverse ${limb}
[[ $? -eq 0 ]] || break
lmsStackPeek "${lmsdts_stackName}" branch
[[ $? -eq 0 ]] || return 1
branchName="lmsdom_${branch}"
lmsDynnNext "$branchName"
lmsDynnValid "$branchName" lmsdom_nodeValid
done
lmsStackRead ${lmsdts_stackName} branch
[[ $? -eq 0 ]] ||
{
[[ $? -ne 2 ]] || return 1
}
return 0
}
# ****************************************************************************
#
# lmsDomToStr
#
# Parameters:
# returnString = place to put the generated string
#
# Returns:
# 0 = no error
# non-zero = error code
#
# ****************************************************************************
function lmsDomToStr()
{
local stackUid
lmsdts_buffer=""
[[ -z "${lmsdom_docTree}" ]] && return 1
lmsStackLookup "${lmsdts_stackName}" stackUid
[[ $? -eq 0 ]] && lmsStackDestroy ${lmsdts_stackName}
lmsStackCreate ${lmsdts_stackName} stackUid 12
[[ $? -eq 0 ]] || return 2
lmsDomTsFmtOut ${lmsdom_docTree}
[[ $? -eq 0 ]] || return 3
lmserr_result=0
lmsDomTsTraverse ${lmsdom_docTree}
[[ $? -eq 0 ]] || return 4
lmsDeclareStr ${1} "${lmsdts_buffer}"
[[ $? -eq 0 ]] || return 5
return 0
}
|
#!/bin/bash
#SBATCH -J Act_maxsig_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py maxsig 263 sgd 3 0.37534246970479646 0.010914214951220788 he_uniform 0.3
|
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
// parse application/x-www-form-urlencoded
app.use(bodyParser.urlencoded({ extended: false }));
// parse application/json
app.use(bodyParser.json());
// define route for registration
app.post('/register', function (req, res) {
const { email } = req.body;
// perform any necessary validation
// register user
// return success
return res.json({ success: true });
});
// start server
app.listen(3000); |
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
FRAMEDIR=$(dirname $0)
for i in {0..35}
do
frame=$(printf "%03d" $i)
angle=$(($i * 10))
convert $FRAMEDIR/../src/spinner_gray.png -background "rgba(0,0,0,0.0)" -distort SRT $angle $FRAMEDIR/spinner-$frame.png
done
|
require('redcarpet/render_strip')
class CommunitiesController < ApplicationController
before_action(:authenticate_user!, except: %i[index show])
def new
end
def create
if params.key?('request')
Administrate.create(user: params['request']['user'],
category: params['request']['category'],
answered: false)
redirect_back(fallback_location: root_path(), notice: '¡Tu solicitud a sido enviada!')
end
end
def index
@communities = Category.all()
end
def show
@category = Category.find(params[:id])
@posts = Post.where(category: @category)
@posts = if params['order'] == 'tiempo'
Post.sort_by_date(@posts)
elsif params['order'] == 'puntos'
Post.sort_by_points(@posts)
else
Post.sort_by_trends(@posts)
end
end
def moderator
end
end
|
<gh_stars>0
#include <lua-exports/B2WorldProxy.h>
#include <iostream>
#include <UUIDGenerator.h>
#include <tmx/MapLoader.h>
#include <tmx/tmx2box2d.h>
class B2WorldProxy::B2WorldProxyImpl{
public:
B2WorldProxyImpl(b2World& box2dWorld) : m_box2dWorld(box2dWorld){
}
~B2WorldProxyImpl(){
}
b2Joint* rope;
b2World& m_box2dWorld;
b2RopeJointDef ropeDef;
b2Body* createNewBody(b2BodyDef& bodyDef, b2PolygonShape& shape, b2FixtureDef& fixture){
fixture.shape = &shape;
b2Body* body = m_box2dWorld.CreateBody(&bodyDef);
body->CreateFixture(&fixture);
body->SetUserData(new std::string(UUIDGenerator::createUuid())); //TODO Manage this better, just here to get things working
return body;
}
b2Body* createNewBody(b2BodyDef& bodyDef, b2EdgeShape& shape, b2FixtureDef& fixture){
fixture.shape = &shape;
b2Body* body = m_box2dWorld.CreateBody(&bodyDef);
body->CreateFixture(&fixture);
body->SetUserData(new std::string(UUIDGenerator::createUuid())); //TODO Manage this better, just here to get things working
return body;
}
b2Joint* createJoint(b2RevoluteJointDef& joint) {
b2Joint* d = m_box2dWorld.CreateJoint(&joint);
return d;
}
b2PrismaticJoint* createJoint(b2PrismaticJointDef& joint) {
return static_cast<b2PrismaticJoint*>(m_box2dWorld.CreateJoint(&joint));
}
b2Joint* ropeJoint(b2RopeJointDef& joint) {
ropeDef.localAnchorA = joint.localAnchorA;
ropeDef.localAnchorB.SetZero();
float32 extraLength = 0.01f;
ropeDef.maxLength = joint.maxLength;
ropeDef.bodyB = joint.bodyB;
ropeDef.bodyA = joint.bodyA;
rope = m_box2dWorld.CreateJoint(&ropeDef);
return NULL;
}
std::string UuidOf(b2Body* body) {
void* userData = body->GetUserData();
if(userData){
std::string* uuid = static_cast<std::string*>(userData);
std::string s = *uuid;
return s;
}else{
std::string uuid = UUIDGenerator::createUuid();
body->SetUserData(new std::string(uuid)); //TODO Manage this better, just here to get things working
return uuid;
}
}
b2Fixture* addFixture(b2Body* body, b2PolygonShape& shape, b2FixtureDef& fixture) {
fixture.shape = & shape;
return body->CreateFixture(&fixture);
}
b2Body* createBodyFromMapObject(tmx::MapObject& mapObject, b2BodyType bodyType){
return tmx::BodyCreator::Add(mapObject, m_box2dWorld, bodyType);
}
};
B2WorldProxy::B2WorldProxy(b2World& box2dWorld) : m_impl(new B2WorldProxyImpl(box2dWorld)){
}
B2WorldProxy::~B2WorldProxy() {
}
b2Body* B2WorldProxy::createNewBody(b2BodyDef& bodyDef, b2PolygonShape& shape, b2FixtureDef& fixture) {
return m_impl->createNewBody(bodyDef, shape, fixture);
}
void B2WorldProxy::createJoint(b2RevoluteJointDef& joint) {
m_impl->createJoint(joint);
}
void B2WorldProxy::ropeJoint(b2RopeJointDef& joint) {
m_impl->ropeJoint(joint);
}
std::string B2WorldProxy::UuidOf(b2Body* body) {
return m_impl->UuidOf(body);
}
b2Fixture* B2WorldProxy::addFixture(b2Body* body, b2PolygonShape& shape, b2FixtureDef& fixture) {
return m_impl->addFixture(body, shape, fixture);
}
b2PrismaticJoint* B2WorldProxy::createPrismaticJoint(b2PrismaticJointDef& joint) {
return m_impl->createJoint(joint);
}
b2Body* B2WorldProxy::createEdgeNewBody(b2BodyDef& bodyDef, b2EdgeShape& shape,
b2FixtureDef& fixture) {
return m_impl->createNewBody(bodyDef, shape, fixture);
}
b2Body* B2WorldProxy::createBodyFromMapObject(tmx::MapObject& mapObject,
b2BodyType bodyType) {
return m_impl->createBodyFromMapObject(mapObject, bodyType);
}
|
package pathutil
import (
"fmt"
"path/filepath"
"strings"
)
// ChangeParent changes the parent of srcPath from srcDir to dstDir, and returns the dstPath.
// If srcPath is not a child of srcDir, it returns an error.
//
// For example
// ChangeParent("/home/user/dir/file.txt", "/home", "/root") // -> "/root/user/dir/file.txt"
// ChangeParent("/home/user/file.txt", "/home/user", "/home/user/dir") // -> "/home/user/dir/file.txt"
func ChangeParent(srcPath string, srcDir string, dstDir string) (dstPath string, err error) {
relativePath, err := filepath.Rel(srcDir, srcPath)
if err != nil {
err = fmt.Errorf("failed to make relative path: %w", err)
return
}
dstPath = filepath.Join(dstDir, relativePath)
return
}
// MustChangeParent calls ChangeParent and panics if an error occurs.
// It is safe to use MustChangeParent in, for example, os.WalkDir.
func MustChangeParent(srcPath string, srcDir string, dstDir string) string {
dstPath, err := ChangeParent(srcPath, srcDir, dstDir)
if err != nil {
panic(err)
}
return dstPath
}
const slash = "/"
func Splits(path string) (parts []string) {
path = filepath.ToSlash(path)
if filepath.IsAbs(path) && filepath.VolumeName(path) != "" {
volume := filepath.VolumeName(path) + slash
parts = append(parts, volume)
path = strings.TrimPrefix(path, volume)
} else if filepath.IsAbs(path) {
root := slash
parts = append(parts, root)
path = strings.TrimPrefix(path, root)
} else if filepath.VolumeName(path) != "" {
volume := filepath.VolumeName(path)
parts = append(parts, volume)
path = strings.TrimPrefix(path, volume)
}
parts = append(parts, strings.Split(path, slash)...)
return
}
|
# convert days
def convert_days(days):
years = days // 365
months = (days % 365) // 30
days = (days % 365) % 30
return (years, months, days)
# test
days = 900
years, months, days = convert_days(days)
print("Number of years =", years)
print("Number of months =", months)
print("Number of days =", days) |
package bar
import (
"fmt"
"goalgotrade/consts/frequency"
"time"
lg "goalgotrade/logger"
"go.uber.org/zap"
)
// Bar ...
type Bar interface {
Open() float64
High() float64
Low() float64
Close() float64
Volume() float64
AdjClose() float64
Frequency() frequency.Frequency
Price() float64
UseAdjValue() bool
SetUseAdjustedValue(useAdjusted bool) error
Time() *time.Time
}
type basicBar struct {
barTime *time.Time
open float64
high float64
low float64
close float64
adjClose float64
volume float64
frequency frequency.Frequency
useAdjustedValue bool
}
// NewBasicBar ...
func NewBasicBar(barTime time.Time, o, h, l, c, v, adjClose float64, freq frequency.Frequency) (Bar, error) {
if h < l {
lg.Logger.Error("high < low on %s", zap.Time("barTime", barTime))
return nil, fmt.Errorf("high < low ")
} else if h < o {
lg.Logger.Error("high < open on %s", zap.Time("barTime", barTime))
return nil, fmt.Errorf("high < open")
} else if h < c {
lg.Logger.Error("high < close on %s", zap.Time("barTime", barTime))
return nil, fmt.Errorf("high < close")
} else if l > o {
lg.Logger.Error("low > open on %s", zap.Time("barTime", barTime))
return nil, fmt.Errorf("low > open")
} else if l > c {
lg.Logger.Error("low > close on %s", zap.Time("barTime", barTime))
return nil, fmt.Errorf("low > close")
}
tmpTime := barTime
return &basicBar{
barTime: &tmpTime,
open: o,
high: h,
low: l,
close: c,
adjClose: adjClose,
volume: v,
frequency: freq,
useAdjustedValue: false,
}, nil
}
// SetUseAdjustedValue ...
func (b *basicBar) SetUseAdjustedValue(useAdjusted bool) error {
b.useAdjustedValue = useAdjusted
return nil
}
// UseAdjValue ...
func (b *basicBar) UseAdjValue() bool {
return b.useAdjustedValue
}
// Time ...
func (b *basicBar) Time() *time.Time {
return b.barTime
}
// Open ...
func (b *basicBar) Open() float64 {
if b.useAdjustedValue {
return b.adjClose * b.open / b.close
}
return b.open
}
// High ...
func (b *basicBar) High() float64 {
if b.useAdjustedValue {
return b.adjClose * b.high / b.close
}
return b.high
}
// Low ...
func (b *basicBar) Low() float64 {
if b.useAdjustedValue {
return b.adjClose * b.low / b.close
}
return b.low
}
// Close ...
func (b *basicBar) Close() float64 {
if b.useAdjustedValue {
return b.adjClose
}
return b.open
}
// Volume ...
func (b *basicBar) Volume() float64 {
return b.volume
}
// AdjClose ...
func (b *basicBar) AdjClose() float64 {
return b.adjClose
}
// Frequency ...
func (b *basicBar) Frequency() frequency.Frequency {
return b.frequency
}
// Price ...
func (b *basicBar) Price() float64 {
if b.useAdjustedValue {
return b.adjClose
}
return b.open
}
// Bars ...
type Bars interface {
Instruments() []string
Frequencies() []frequency.Frequency
Time() *time.Time
Bar(instrument string) Bar
AddBarList(instrument string, barList []Bar) error
}
type bars struct {
barList map[string]Bar
barTime *time.Time
}
// NewBars ...
func NewBars() Bars {
return &bars{
barList: map[string]Bar{},
}
}
// Instruments ...
func (b *bars) Instruments() []string {
keys := make([]string, len(b.barList))
i := 0
for k := range b.barList {
keys[i] = k
i++
}
return keys
}
// Frequencies ...
func (b *bars) Frequencies() []frequency.Frequency {
freqs := map[frequency.Frequency]struct{}{}
for _, bar := range b.barList {
freqs[bar.Frequency()] = struct{}{}
}
res := []frequency.Frequency{}
for f := range freqs {
res = append(res, f)
}
return res
}
// Time ...
func (b *bars) Time() *time.Time {
return b.barTime
}
// Bar ...
func (b *bars) Bar(instrument string) Bar {
if val, ok := b.barList[instrument]; ok {
return val
}
return nil
}
func (b *bars) addSingleBar(instrument string, bar Bar) error {
if _, ok := b.barList[instrument]; ok {
lg.Logger.Error("instrument exists already", zap.String("instrument", instrument))
return fmt.Errorf("instrument exists already %s", instrument)
}
b.barList[instrument] = bar
if b.barTime == nil || bar.Time().Before(*b.barTime) {
b.barTime = bar.Time()
}
return nil
}
// AddBarList ...
func (b *bars) AddBarList(instrument string, barList []Bar) error {
for _, v := range barList {
err := b.addSingleBar(instrument, v)
if err != nil {
return err
}
}
return nil
}
|
#!/usr/bin/env bash
apt-get update
apt-get install build-essential software-properties-common -y
apt-get install libssl-dev libffi-dev python-dev
apt-get install automake autoconf gfortran subversion
apt-get install python3-venv -y
apt-get install gcc g++
apt-get install flac swig
apt-get install sox automake autoconfig unzip
apt-get install python-setuptools python3-pip
apt-get install -y curl git libc-dev libreadline-dev libtool make ncurses-dev pkg-config
apt-get wavpack wget zlib1g-dev
apt-get install realpath
apt-get install coreutils
apt-get install autotools-dev
apt-get update
|
<gh_stars>10-100
#include "vm.h"
#include "conf.h"
#include "lib.h"
#include "mem.h"
#include "table.h"
#include <inttypes.h>
#include <math.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
static void err(const char *msg) {
fprintf(stderr, "riff: [vm] %s\n", msg);
exit(1);
}
static rf_htab globals;
static rf_tab argv;
static rf_tab fldv;
static rf_iter *iter;
static rf_stack stack[VM_STACK_SIZE];
// Coerce string to int unconditionally
inline rf_int str2int(rf_str *s) {
char *end;
return u_str2i64(s->str, &end, 0);
}
// Coerce string to float unconditionally
inline rf_flt str2flt(rf_str *s) {
char *end;
return u_str2d(s->str, &end, 0);
}
// Integer arithmetic (Bitwise ops)
#define int_arith(l,r,op) \
if (is_int(l) && is_int(r)) { \
l->u.i = (l->u.i op r->u.i); \
} else { \
set_int(l, (intval(l) op intval(r))); \
}
// Floating-point arithmetic (div)
#define flt_arith(l,r,op) \
set_flt(l, (numval(l) op numval(r)))
// "Polymorphic" arithmetic (add, sub, mul)
#define num_arith(l,r,op) \
if (is_int(l) && is_int(r)) { \
l->u.i = (l->u.i op r->u.i); \
} else { \
set_flt(l, (numval(l) op numval(r))); \
}
// Return boolean result of value (0/1)
static inline int test(rf_val *v) {
switch (v->type) {
case TYPE_INT: return !!(v->u.i);
case TYPE_FLT: return !!(v->u.f);
// If entire string is a numeric value, return logical result of
// the number. Otherwise, return whether the string is longer than
// 0.
case TYPE_STR: {
char *end;
rf_flt f = u_str2d(v->u.s->str, &end, 0);
if (!*end) {
// Check for literal '0' character in string
if (f == 0.0) {
for (int i = 0; i < v->u.s->l; ++i) {
if (v->u.s->str[i] == '0')
return 0;
}
} else {
return !!f;
}
}
return !!v->u.s->l;
}
case TYPE_TAB: return !!t_length(v->u.t);
case TYPE_RE: case TYPE_RNG:
case TYPE_RFN: case TYPE_CFN:
return 1;
default: return 0;
}
}
#define Z_UOP(op) static inline void z_##op(rf_val *v)
#define Z_BINOP(op) static inline void z_##op(rf_val *l, rf_val *r)
Z_BINOP(add) { num_arith(l,r,+); }
Z_BINOP(sub) { num_arith(l,r,-); }
Z_BINOP(mul) { num_arith(l,r,*); }
// Language comparison for division by zero:
// 0/0 = nan; 1/0 = inf : lua, mawk
// error: pretty much all others
Z_BINOP(div) { flt_arith(l,r,/); }
// Language comparison for modulus by zero:
// `nan`: mawk
// error: pretty much all others
Z_BINOP(mod) {
rf_flt res = fmod(numval(l), numval(r));
set_flt(l, res < 0 ? res + numval(r) : res);
}
Z_BINOP(pow) { set_flt(l, pow(fltval(l), fltval(r))); }
Z_BINOP(and) { int_arith(l,r,&); }
Z_BINOP(or) { int_arith(l,r,|); }
Z_BINOP(xor) { int_arith(l,r,^); }
Z_BINOP(shl) { int_arith(l,r,<<); }
Z_BINOP(shr) { int_arith(l,r,>>); }
Z_UOP(num) {
switch (v->type) {
case TYPE_INT:
case TYPE_FLT:
break;
case TYPE_STR:
set_flt(v, str2flt(v->u.s));
break;
default:
set_int(v, 0);
break;
}
}
Z_UOP(neg) {
switch (v->type) {
case TYPE_INT:
v->u.i = -v->u.i;
break;
case TYPE_FLT:
v->u.f = -v->u.f;
break;
case TYPE_STR:
set_flt(v, -str2flt(v->u.s));
break;
default:
set_int(v, 0);
break;
}
}
Z_UOP(not) { set_int(v, ~intval(v)); }
// == and != operators
#define cmp_eq(l,r,op) \
if (is_int(l) && is_int(r)) { \
l->u.i = (l->u.i op r->u.i); \
} else if (is_null(l) ^ is_null(r)) { \
set_int(l, !(0 op 0)); \
} else if (is_str(l) && is_str(r)) { \
if (!l->u.s->hash) l->u.s->hash = u_strhash(l->u.s->str); \
if (!r->u.s->hash) r->u.s->hash = u_strhash(r->u.s->str); \
set_int(l, (l->u.s->hash op r->u.s->hash)); \
} else if (is_str(l) && !is_str(r)) { \
if (!l->u.s->l) { \
set_int(l, !(0 op 0)); \
return; \
} \
char *end; \
rf_flt f = u_str2d(l->u.s->str, &end, 0); \
if (*end) { \
set_int(l, 0); \
} else { \
set_int(l, (f op numval(r))); \
} \
} else if (!is_str(l) && is_str(r)) { \
if (!r->u.s->l) { \
set_int(l, !(0 op 0)); \
return; \
} \
char *end; \
rf_flt f = u_str2d(r->u.s->str, &end, 0); \
if (*end) { \
set_int(l, 0); \
} else { \
set_int(l, (numval(l) op f)); \
} \
} else { \
num_arith(l,r,op); \
}
// >, <, >= and <= operators
#define cmp_rel(l,r,op) \
if (is_int(l) && is_int(r)) { \
l->u.i = (l->u.i op r->u.i); \
} else { \
set_int(l, (numval(l) op numval(r))); \
}
Z_BINOP(eq) { cmp_eq(l,r,==); }
Z_BINOP(ne) { cmp_eq(l,r,!=); }
Z_BINOP(gt) { cmp_rel(l,r,>); }
Z_BINOP(ge) { cmp_rel(l,r,>=); }
Z_BINOP(lt) { cmp_rel(l,r,<); }
Z_BINOP(le) { cmp_rel(l,r,<=); }
Z_UOP(lnot) { set_int(v, !test(v)); }
Z_UOP(len) {
rf_int l = 0;
switch (v->type) {
// For integers:
// #x = ⌊log10(x)⌋ + 1 for x > 0
// ⌊log10(-x)⌋ + 2 for x < 0
case TYPE_INT:
if (v->u.i == INT64_MIN) {
l = 20;
} else {
l = v->u.i > 0 ? (rf_int) log10(v->u.i) + 1 :
v->u.i < 0 ? (rf_int) log10(-v->u.i) + 2 : 1;
}
v->u.i = l;
return;
case TYPE_FLT:
l = (rf_int) snprintf(NULL, 0, "%g", v->u.f);
break;
case TYPE_STR: l = v->u.s->l; break;
case TYPE_TAB: l = t_length(v->u.t); break;
case TYPE_RFN: l = v->u.fn->code->n; break; // # of bytes
case TYPE_RE: // TODO - extract something from PCRE pattern?
case TYPE_RNG: // TODO
case TYPE_CFN:
l = 1;
break;
default: break;
}
set_int(v, l);
}
Z_BINOP(cat) {
char *lhs, *rhs;
char temp_lhs[32];
char temp_rhs[32];
if (!is_str(l)) {
switch (l->type) {
case TYPE_INT: u_int2str(l->u.i, temp_lhs); break;
case TYPE_FLT: u_flt2str(l->u.f, temp_lhs); break;
default: temp_lhs[0] = '\0'; break;
}
lhs = temp_lhs;
} else {
lhs = l->u.s->str;
}
if (!is_str(r)) {
switch (r->type) {
case TYPE_INT: u_int2str(r->u.i, temp_rhs); break;
case TYPE_FLT: u_flt2str(r->u.f, temp_rhs); break;
default: temp_rhs[0] = '\0'; break;
}
rhs = temp_rhs;
} else {
rhs = r->u.s->str;
}
set_str(l, s_newstr_concat(lhs, rhs, 0));
}
static rf_int match(rf_val *l, rf_val *r) {
// Common case: LHS string, RHS regex
if (is_str(l) && is_re(r))
return re_match(l->u.s->str, l->u.s->l, r->u.r, 1);
char *lhs;
size_t len = 0;
char temp_lhs[32];
char temp_rhs[32];
if (!is_str(l)) {
switch (l->type) {
case TYPE_INT: len = u_int2str(l->u.i, temp_lhs); break;
case TYPE_FLT: len = u_flt2str(l->u.f, temp_lhs); break;
default: temp_lhs[0] = '\0'; break;
}
lhs = temp_lhs;
} else {
lhs = l->u.s->str;
len = l->u.s->l;
}
if (!is_re(r)) {
rf_re *temp_re;
rf_int res;
int errcode;
int capture = 0;
switch (r->type) {
case TYPE_INT: u_int2str(r->u.i, temp_rhs); break;
case TYPE_FLT: u_flt2str(r->u.f, temp_rhs); break;
case TYPE_STR:
capture = 1;
temp_re = re_compile(r->u.s->str, r->u.s->l, 0, &errcode);
goto do_match;
default: temp_rhs[0] = '\0'; break;
}
temp_re = re_compile(temp_rhs, PCRE2_ZERO_TERMINATED, 0, &errcode);
do_match:
res = re_match(lhs, len, temp_re, capture);
re_free(temp_re);
return res;
} else {
return re_match(lhs, len, r->u.r, 1);
}
}
Z_BINOP(match) { set_int(l, match(l, r)); }
Z_BINOP(nmatch) { set_int(l, !match(l, r)); }
Z_BINOP(idx) {
char temp[32];
switch (l->type) {
case TYPE_INT: {
u_int2str(l->u.i, temp);
if (is_rng(r)) {
set_str(l, s_substr(temp, r->u.q->from, r->u.q->to, r->u.q->itvl));
} else {
rf_int r1 = intval(r);
rf_int len = (rf_int) strlen(temp);
if (r1 < 0)
r1 += len;
if (r1 > len - 1 || r1 < 0)
set_null(l);
else
set_str(l, s_newstr(temp + r1, 1, 0));
}
break;
}
case TYPE_FLT: {
u_flt2str(l->u.f, temp);
if (is_rng(r)) {
set_str(l, s_substr(temp, r->u.q->from, r->u.q->to, r->u.q->itvl));
} else {
rf_int r1 = intval(r);
rf_int len = (rf_int) strlen(temp);
if (r1 < 0)
r1 += len;
if (r1 > len - 1 || r1 < 0)
set_null(l);
else
set_str(l, s_newstr(temp + r1, 1, 0));
}
break;
}
case TYPE_STR: {
if (is_rng(r)) {
l->u.s = s_substr(l->u.s->str, r->u.q->from, r->u.q->to, r->u.q->itvl);
} else {
rf_int r1 = intval(r);
rf_int len = (rf_int) l->u.s->l;
if (r1 < 0)
r1 += len;
if (r1 > len - 1 || r1 < 0)
set_null(l);
else
l->u.s = s_newstr(&l->u.s->str[r1], 1, 0);
}
break;
}
case TYPE_TAB:
*l = *t_lookup(l->u.t, r, 0);
break;
case TYPE_RFN: {
rf_int r1 = intval(r);
if (r1 > l->u.fn->code->n - 1 || r1 < 0)
set_null(l);
else
set_int(l, l->u.fn->code->code[r1]);
break;
}
default:
set_null(l);
break;
}
}
static inline void new_iter(rf_val *set) {
rf_iter *new = malloc(sizeof(rf_iter));
new->p = iter;
iter = new;
switch (set->type) {
case TYPE_FLT:
set->u.i = (rf_int) set->u.f;
// Fall-through
case TYPE_INT:
iter->keys = NULL;
iter->t = LOOP_RNG;
iter->st = 0;
if (set->u.i >= 0) {
iter->n = set->u.i + 1; // Inclusive
iter->set.itvl = 1;
} else {
iter->n = -(set->u.i) + 1; // Inclusive
iter->set.itvl = -1;
}
break;
case TYPE_STR:
iter->t = LOOP_STR;
iter->n = set->u.s->l;
iter->keys = NULL;
iter->set.str = set->u.s->str;
break;
case TYPE_RE:
err("cannot iterate over regular expression");
case TYPE_RNG:
iter->keys = NULL;
iter->t = LOOP_RNG;
iter->set.itvl = set->u.q->itvl;
if (iter->set.itvl > 0)
iter->on = (set->u.q->to - set->u.q->from) + 1;
else
iter->on = (set->u.q->from - set->u.q->to) + 1;
if (iter->on <= 0)
iter->n = UINT64_MAX; // TODO "Infinite" loop
else
iter->n = (rf_uint) ceil(fabs(iter->on / (double) iter->set.itvl));
iter->st = set->u.q->from;
break;
case TYPE_TAB:
iter->t = LOOP_TAB;
iter->on = iter->n = t_length(set->u.t);
iter->keys = t_collect_keys(set->u.t);
iter->set.tab = set->u.t;
break;
case TYPE_RFN:
iter->t = LOOP_FN;
iter->n = set->u.fn->code->n;
iter->keys = NULL;
iter->set.code = set->u.fn->code->code;
break;
case TYPE_CFN:
err("cannot iterate over C function");
default: break;
}
}
static inline void destroy_iter(void) {
rf_iter *old = iter;
iter = iter->p;
if (old->t == LOOP_TAB) {
if (!(old->n + 1)) // Loop completed?
free(old->keys - old->on);
else
free(old->keys - (old->on - old->n));
}
free(old);
}
static inline void init_argv(rf_tab *t, rf_int arg0, int rf_argc, char **rf_argv) {
t_init(t);
for (rf_int i = 0; i < rf_argc; ++i) {
// TODO force parameter should not be set
rf_val v = (rf_val) {
TYPE_STR,
.u.s = s_newstr(rf_argv[i], strlen(rf_argv[i]), 0)
};
t_insert_int(t, i-arg0, &v, 1, 1);
}
}
static int exec(uint8_t *, rf_val *, rf_stack *, rf_stack *);
// VM entry point/initialization
int z_exec(rf_env *e) {
h_init(&globals);
iter = NULL;
t_init(&fldv);
re_register_fldv(&fldv);
init_argv(&argv, e->arg0, e->argc, e->argv);
h_insert(&globals, s_newstr("arg", 3, 1), &(rf_val){TYPE_TAB, .u.t = &argv}, 1);
h_insert(&globals, s_newstr("stdin", 5, 1), &(rf_val){TYPE_FH, .u.fh = &(rf_fh){stdin, FH_STD}}, 1);
h_insert(&globals, s_newstr("stdout", 6, 1), &(rf_val){TYPE_FH, .u.fh = &(rf_fh){stdout, FH_STD}}, 1);
h_insert(&globals, s_newstr("stderr", 6, 1), &(rf_val){TYPE_FH, .u.fh = &(rf_fh){stderr, FH_STD}}, 1);
l_register(&globals);
// Add user-defined functions to the global hash table
for (int i = 0; i < e->nf; ++i) {
// Don't add anonymous functions to globals (rf_str should not
// have a computed hash)
if (!e->fn[i]->name->hash)
continue;
rf_val *fn = malloc(sizeof(rf_val));
*fn = (rf_val) {TYPE_RFN, .u.fn = e->fn[i]};
h_insert(&globals, e->fn[i]->name, fn, 1);
}
return exec(e->main.code->code, e->main.code->k, stack, stack);
}
// Reentry point for eval()
int z_exec_reenter(rf_env *e, rf_stack *fp) {
// Add user-defined functions to the global hash table
for (int i = 0; i < e->nf; ++i) {
// Don't add anonymous functions to globals (rf_str should not
// have a computed hash)
if (!e->fn[i]->name->hash)
continue;
rf_val *fn = malloc(sizeof(rf_val));
*fn = (rf_val) {TYPE_RFN, .u.fn = e->fn[i]};
h_insert(&globals, e->fn[i]->name, fn, 1);
}
return exec(e->main.code->code, e->main.code->k, fp, fp);
}
#ifndef COMPUTED_GOTO
#ifdef __GNUC__
#define COMPUTED_GOTO
#endif
#endif
// VM interpreter loop
static int exec(uint8_t *ep, rf_val *k, rf_stack *sp, rf_stack *fp) {
if (sp - stack >= VM_STACK_SIZE)
err("stack overflow");
rf_stack *retp = sp; // Save original SP
rf_val *tp; // Temp pointer
register uint8_t *ip = ep;
#ifndef COMPUTED_GOTO
// Use standard while loop with switch/case if computed goto is
// disabled or unavailable
#define z_case(l) case OP_##l:
#define z_break break
while (1) { switch (*ip) {
#else
#include "labels.h"
dispatch();
#endif
// Unconditional jumps
#define j8 (ip += (int8_t) ip[1])
#define j16 (ip += (int16_t) ((ip[1] << 8) + ip[2]))
z_case(JMP8) j8; z_break;
z_case(JMP16) j16; z_break;
// Conditional jumps (pop stack unconditionally)
#define jc8(x) (x ? j8 : (ip += 2)); --sp;
#define jc16(x) (x ? j16 : (ip += 3)); --sp;
z_case(JNZ8) jc8(test(&sp[-1].v)); z_break;
z_case(JNZ16) jc16(test(&sp[-1].v)); z_break;
z_case(JZ8) jc8(!test(&sp[-1].v)); z_break;
z_case(JZ16) jc16(!test(&sp[-1].v)); z_break;
// Conditional jumps (pop stack if jump not taken)
#define xjc8(x) if (x) j8; else {--sp; ip += 2;}
#define xjc16(x) if (x) j16; else {--sp; ip += 3;}
z_case(XJNZ8) xjc8(test(&sp[-1].v)); z_break;
z_case(XJNZ16) xjc16(test(&sp[-1].v)); z_break;
z_case(XJZ8) xjc8(!test(&sp[-1].v)); z_break;
z_case(XJZ16) xjc16(!test(&sp[-1].v)); z_break;
// Initialize/cycle current iterator
z_case(LOOP8) z_case(LOOP16) {
int jmp16 = *ip == OP_LOOP16;
if (!iter->n--) {
if (jmp16)
ip += 3;
else
ip += 2;
z_break;
}
switch (iter->t) {
case LOOP_RNG:
if (is_null(iter->v))
*iter->v = (rf_val) {TYPE_INT, .u.i = iter->st};
else
iter->v->u.i += iter->set.itvl;
break;
case LOOP_STR:
if (iter->k != NULL) {
if (is_null(iter->k)) {
set_int(iter->k, 0);
} else {
iter->k->u.i += 1;
}
}
if (!is_null(iter->v)) {
m_freestr(iter->v->u.s);
iter->v->u.s = s_newstr(iter->set.str++, 1, 0);
} else {
*iter->v = (rf_val) {TYPE_STR, .u.s = s_newstr(iter->set.str++, 1, 0)};
}
break;
case LOOP_TAB:
if (iter->k != NULL) {
*iter->k = *iter->keys;
}
*iter->v = *t_lookup(iter->set.tab, iter->keys, 0);
iter->keys++;
break;
case LOOP_FN:
if (iter->k != NULL) {
if (is_null(iter->k)) {
set_int(iter->k, 0);
} else {
iter->k->u.i += 1;
}
}
if (is_null(iter->v)) {
*iter->v = (rf_val) {TYPE_INT, .u.i = *iter->set.code++};
} else {
iter->v->u.i = *iter->set.code++;
}
break;
default: break;
}
// Treat byte(s) following OP_LOOP as unsigned since jumps
// are always backward
if (jmp16)
ip -= (ip[1] << 8) + ip[2];
else
ip -= ip[1];
z_break;
}
// Destroy the current iterator struct
z_case(POPL) {
destroy_iter();
++ip;
z_break;
}
// Create iterator and jump to the corresponding OP_LOOP
// instruction for initialization
z_case(ITERV) z_case(ITERKV) {
int k = *ip == OP_ITERKV;
new_iter(&sp[-1].v);
--sp;
set_null(&sp++->v);
// Reserve extra stack slot for k,v iterators
if (k) {
set_null(&sp++->v);
iter->k = &sp[-2].v;
} else {
iter->k = NULL;
}
iter->v = &sp[-1].v;
j16;
z_break;
}
// Unary operations
// sp[-1].v is assumed to be safe to overwrite
#define unop(x) \
z_##x(&sp[-1].v); \
++ip;
z_case(LEN) unop(len); z_break;
z_case(LNOT) unop(lnot); z_break;
z_case(NEG) unop(neg); z_break;
z_case(NOT) unop(not); z_break;
z_case(NUM) unop(num); z_break;
// Standard binary operations
// sp[-2].v and sp[-1].v are assumed to be safe to overwrite
#define binop(x) \
z_##x(&sp[-2].v, &sp[-1].v); \
--sp; \
++ip;
z_case(ADD) binop(add); z_break;
z_case(SUB) binop(sub); z_break;
z_case(MUL) binop(mul); z_break;
z_case(DIV) binop(div); z_break;
z_case(MOD) binop(mod); z_break;
z_case(POW) binop(pow); z_break;
z_case(AND) binop(and); z_break;
z_case(OR) binop(or); z_break;
z_case(XOR) binop(xor); z_break;
z_case(SHL) binop(shl); z_break;
z_case(SHR) binop(shr); z_break;
z_case(EQ) binop(eq); z_break;
z_case(NE) binop(ne); z_break;
z_case(GT) binop(gt); z_break;
z_case(GE) binop(ge); z_break;
z_case(LT) binop(lt); z_break;
z_case(LE) binop(le); z_break;
z_case(CAT) binop(cat); z_break;
z_case(MATCH) binop(match); z_break;
z_case(NMATCH) binop(nmatch); z_break;
// Pre-increment/decrement
// sp[-1].a is address of some variable's rf_val.
// Increment/decrement this value directly and replace the stack
// element with a copy of the value.
#define pre(x) \
switch (sp[-1].a->type) { \
case TYPE_INT: sp[-1].a->u.i += x; break; \
case TYPE_FLT: sp[-1].a->u.f += x; break; \
case TYPE_STR: \
set_flt(sp[-1].a, str2flt(sp[-1].a->u.s) + x); \
break; \
default: \
set_int(sp[-1].a, x); \
break; \
} \
sp[-1].v = *sp[-1].a; \
++ip;
z_case(PREINC) pre(1); z_break;
z_case(PREDEC) pre(-1); z_break;
// Post-increment/decrement
// sp[-1].a is address of some variable's rf_val. Create a copy of
// the raw value, then increment/decrement the rf_val at the given
// address. Replace the stack element with the previously made copy
// and coerce to a numeric value if needed.
#define post(x) \
tp = sp[-1].a; \
sp[-1].v = *tp; \
switch (tp->type) { \
case TYPE_INT: tp->u.i += x; break; \
case TYPE_FLT: tp->u.f += x; break; \
case TYPE_STR: \
set_flt(tp, str2flt(tp->u.s) + x); \
break; \
default: \
set_int(tp, x); \
break; \
} \
unop(num);
z_case(POSTINC) post(1); z_break;
z_case(POSTDEC) post(-1); z_break;
// Compound assignment operations
// sp[-2].a is address of some variable's rf_val. Save the address
// and place a copy of the value in sp[-2].v. Perform the binary
// operation x and assign the result to the saved address.
#define cbinop(x) \
tp = sp[-2].a; \
sp[-2].v = *tp; \
binop(x); \
*tp = sp[-1].v;
z_case(ADDX) cbinop(add); z_break;
z_case(SUBX) cbinop(sub); z_break;
z_case(MULX) cbinop(mul); z_break;
z_case(DIVX) cbinop(div); z_break;
z_case(MODX) cbinop(mod); z_break;
z_case(CATX) cbinop(cat); z_break;
z_case(POWX) cbinop(pow); z_break;
z_case(ANDX) cbinop(and); z_break;
z_case(ORX) cbinop(or); z_break;
z_case(SHLX) cbinop(shl); z_break;
z_case(SHRX) cbinop(shr); z_break;
z_case(XORX) cbinop(xor); z_break;
// Simple pop operation
z_case(POP) --sp; ++ip; z_break;
// Pop IP+1 values from stack
z_case(POPI) sp -= ip[1]; ip += 2; z_break;
// Push null literal on stack
z_case(NULL) set_null(&sp++->v); ++ip; z_break;
// Push immediate
// Assign integer value x to the top of the stack.
#define imm(x) set_int(&sp++->v, x);
z_case(IMM8) imm(ip[1]); ip += 2; z_break;
z_case(IMM16) imm((ip[1]<<8) + ip[2]); ip += 3; z_break;
z_case(IMM0) imm(0); ++ip; z_break;
z_case(IMM1) imm(1); ++ip; z_break;
z_case(IMM2) imm(2); ++ip; z_break;
// Push constant
// Copy constant x from code object's constant table to the top of the
// stack.
#define pushk(x) sp++->v = k[(x)];
z_case(PUSHK) pushk(ip[1]); ip += 2; z_break;
z_case(PUSHK0) pushk(0); ++ip; z_break;
z_case(PUSHK1) pushk(1); ++ip; z_break;
z_case(PUSHK2) pushk(2); ++ip; z_break;
// Push global address
// Assign the address of global variable x's rf_val in the globals
// table.
// h_lookup() will create an entry if needed, accommodating
// undeclared/uninitialized variable usage.
// Parser signals for this opcode for assignment or pre/post ++/--.
#define gbla(x) sp++->a = h_lookup(&globals, k[(x)].u.s, 1);
z_case(GBLA) gbla(ip[1]); ip += 2; z_break;
z_case(GBLA0) gbla(0); ++ip; z_break;
z_case(GBLA1) gbla(1); ++ip; z_break;
z_case(GBLA2) gbla(2); ++ip; z_break;
// Push global value
// Copy the value of global variable x to the top of the stack.
// h_lookup() will create an entry if needed, accommodating
// undeclared/uninitialized variable usage.
// Parser signals for this opcode to be used when only needing the
// value, e.g. arithmetic.
#define gblv(x) sp++->v = *h_lookup(&globals, k[(x)].u.s, 0);
z_case(GBLV) gblv(ip[1]); ip += 2; z_break;
z_case(GBLV0) gblv(0); ++ip; z_break;
z_case(GBLV1) gblv(1); ++ip; z_break;
z_case(GBLV2) gblv(2); ++ip; z_break;
// Push local address
// Push the address of FP[x] to the top of the stack.
#define lcla(x) sp++->a = &fp[(x)].v;
z_case(LCLA) lcla(ip[1]) ip += 2; z_break;
z_case(LCLA0) lcla(0); ++ip; z_break;
z_case(LCLA1) lcla(1); ++ip; z_break;
z_case(LCLA2) lcla(2); ++ip; z_break;
// Push local value
// Copy the value of FP[x] to the top of the stack.
#define lclv(x) sp++->v = fp[(x)].v;
z_case(LCLV) lclv(ip[1]) ip += 2; z_break;
z_case(LCLV0) lclv(0); ++ip; z_break;
z_case(LCLV1) lclv(1); ++ip; z_break;
z_case(LCLV2) lclv(2); ++ip; z_break;
// Tailcalls
// Recycle current call frame
z_case(TCALL) {
int nargs = ip[1] + 1;
if (!is_fn(&sp[-nargs].v))
err("attempt to call non-function value");
if (is_rfn(&sp[-nargs].v)) {
sp -= nargs;
rf_fn *fn = sp->v.u.fn;
int ar1 = sp - fp - 1; // Current frame's "arity"
int ar2 = fn->arity; // Callee's arity
// Recycle call frame
for (int i = 0; i <= ar2; ++i) {
fp[i].v = sp[i].v;
}
// Increment SP without nullifying slots (preserving
// values) if number of arguments exceeds the frame's
// current "arity"
if (nargs-1 > ar1) {
sp += (nargs - ar1 - 1);
ar1 = nargs - 1;
}
// In the case of direct recursion and no call frame
// adjustments needed, quickly reset IP and dispatch
// control
if (ep == fn->code->code && ar1 == ar2) {
ip = ep;
z_break;
}
// If callee's arity is larger than the current frame,
// create stack space and nullify slots
if (ar2 > ar1) {
while (ar1++ < ar2)
set_null(&sp++->v);
}
// Else, if the current frame is too large for the next
// call, decrement SP
else if (ar2 < ar1) {
sp -= ar1 - ar2;
}
// Else else, if the size of the call frame is fine, but
// the user didn't provide enough arguments, create stack
// space and nullify slots
else if (nargs <= ar2) {
while (nargs++ <= ar2)
set_null(&sp++->v);
}
ip = ep = fn->code->code;
k = fn->code->k;
z_break;
}
// Fall-through to OP_CALL for C function calls
}
// Calling convention Arguments are pushed in-order following the
// rf_val containing a pointer to the function to be called.
// Caller sets SP and FP to appropriate positions and cleans up
// stack afterward. Callee returns from exec() the number of
// values to be returned to the caller.
z_case(CALL) {
int nargs = ip[1];
if (!is_fn(&sp[-nargs-1].v))
err("attempt to call non-function value");
int arity, nret;
// User-defined functions
if (is_rfn(&sp[-nargs-1].v)) {
rf_fn *fn = sp[-nargs-1].v.u.fn;
arity = fn->arity;
// If user called function with too few arguments,
// nullify stack slots and increment SP.
if (nargs < arity) {
for (int i = nargs; i < arity; ++i) {
set_null(&sp++->v);
}
}
// If user called function with too many arguments,
// decrement SP so it points to the appropriate slot
// for control transfer.
else if (nargs > arity) {
sp -= (nargs - arity);
}
// Pass SP-arity-1 as the FP for the succeeding call
// frame. Since the function is already at this location
// in the stack, the compiler can reserve the slot to
// accommodate any references a named function makes to
// itself without any other work required from the VM
// here. This is completely necessary for local named
// functions, but globals benefit as well.
nret = exec(fn->code->code, fn->code->k, sp, sp - arity - 1);
sp -= arity;
// Copy the function's return value to the stack top -
// this should be where the caller pushed the original
// function.
sp[-1].v = sp[arity].v;
}
// Built-in/C functions
else {
c_fn *fn = sp[-nargs-1].v.u.cfn;
arity = fn->arity;
// Most library functions are somewhat variadic; their
// arity refers to the minimum number of arguments
// they require.
if (arity && nargs < arity) {
// If user called function with too few arguments,
// nullify stack slots.
for (int i = nargs; i < arity; ++i) {
set_null(&sp[i].v);
}
}
// Decrement SP to serve as the FP for the function
// call. Library functions assign their own return
// values to SP-1.
sp -= nargs;
nret = fn->fn(&sp->v, nargs);
}
ip += 2;
// Nulllify stack slot if callee returns nothing
if (!nret) {
set_null(&sp[-1].v);
}
z_break;
}
z_case(RET) return 0;
// Caller expects return value to be at its original SP +
// arity of the function. "clean up" any created locals by
// copying the return value to the appropriate slot.
z_case(RET1)
retp->v = sp[-1].v;
return 1;
// Create a sequential table of x elements from the top
// of the stack. Leave the table rf_val on the stack.
// Tables index at 0 by default.
#define new_tab(x) \
tp = v_newtab(); \
for (int i = (x) - 1; i >= 0; --i) { \
--sp; \
t_insert_int(tp->u.t, i, &sp->v, 1, 1); \
} \
sp++->v = *tp;
z_case(TAB0) new_tab(0); ++ip; z_break;
z_case(TAB) new_tab(ip[1]) ip += 2; z_break;
z_case(TABK)
new_tab(k[ip[1]].u.i);
ip += 2;
z_break;
z_case(IDXV)
for (int i = -ip[1] - 1; i < -1; ++i) {
if (sp[i].t <= TYPE_CFN) {
z_idx(&sp[i].v, &sp[i+1].v);
sp[i+1].v = sp[i].v;
continue;
}
switch (sp[i].a->type) {
// Create array if sp[-2].a is an uninitialized variable
case TYPE_NULL:
*sp[i+1].a = *v_newtab();
// Fall-through
case TYPE_TAB:
sp[i+1].v = *t_lookup(sp[i].a->u.t, &sp[i+1].v, 0);
break;
// Dereference and call z_idx().
case TYPE_INT: case TYPE_FLT:
case TYPE_STR: case TYPE_RFN:
sp[i].v = *sp[-i].a;
z_idx(&sp[i].v, &sp[i+1].v);
sp[i+1].v = sp[i].v;
break;
case TYPE_CFN:
err("attempt to subscript a C function");
default:
break;
}
}
sp -= ip[1];
sp[-1].v = sp[ip[1] - 1].v;
ip += 2;
z_break;
z_case(IDXA)
for (int i = -ip[1] - 1; i < -1; ++i) {
if (sp[i].t <= TYPE_CFN)
tp = &sp[i].v;
else
tp = sp[i].a;
switch (tp->type) {
// Create array if sp[i].a is an uninitialized variable
case TYPE_NULL:
*tp = *v_newtab();
// Fall-through
case TYPE_TAB:
sp[i+1].a = t_lookup(tp->u.t, &sp[i+1].v, 1);
break;
// IDXA is invalid for all other types
default:
err("invalid assignment");
}
}
sp -= ip[1];
sp[-1].a = sp[ip[1] - 1].a;
ip += 2;
z_break;
// IDXA
// Perform the lookup and leave the corresponding element's
// rf_val address on the stack.
z_case(IDXA1)
// Accomodate OP_IDXA calls when SP-2 is a raw value
if (sp[-2].t <= TYPE_CFN)
tp = &sp[-2].v;
else
tp = sp[-2].a;
switch (tp->type) {
// Create array if sp[-2].a is an uninitialized variable
case TYPE_NULL:
*tp = *v_newtab();
// Fall-through
case TYPE_TAB:
sp[-2].a = t_lookup(tp->u.t, &sp[-1].v, 1);
break;
// IDXA is invalid for all other types
default:
err("invalid assignment");
}
--sp;
++ip;
z_break;
// IDXV
// Perform the lookup and leave a copy of the corresponding
// element's value on the stack.
z_case(IDXV1)
// All expressions e.g. x[y] are compiled to push the address
// of the identifier being subscripted. However OP_IDXV is
// emitted for all expressions not requiring the address of
// the set element to be left on the stack. In the event the
// instruction is OP_IDXV and SP-2 contains a raw value (not a
// pointer), the high order 64 bits will be the type tag of
// the rf_val instead of a memory address. When that happens,
// defer to z_idx().
if (sp[-2].t <= TYPE_CFN) {
binop(idx);
z_break;
}
switch (sp[-2].a->type) {
// Create array if sp[-2].a is an uninitialized variable
case TYPE_NULL:
*sp[-2].a = *v_newtab();
// Fall-through
case TYPE_TAB:
sp[-2].v = *t_lookup(sp[-2].a->u.t, &sp[-1].v, 0);
--sp;
++ip;
break;
// Dereference and call z_idx().
case TYPE_INT: case TYPE_FLT: case TYPE_STR: case TYPE_RFN:
sp[-2].v = *sp[-2].a;
binop(idx);
break;
case TYPE_CFN:
err("attempt to subscript a C function");
default:
break;
}
z_break;
z_case(FLDA)
sp[-1].a = t_lookup(&fldv, &sp[-1].v, 1);
++ip;
z_break;
z_case(FLDV)
sp[-1].v = *t_lookup(&fldv, &sp[-1].v, 0);
++ip;
z_break;
// Create a new "range" value.
// There are 8 different valid forms of a range; each has their own
// instruction.
// rng: x..y SP[-2]..SP[-1]
// rngf: x.. SP[-1]..INT_MAX
// rngt: ..y 0..SP[-1]
// rnge: .. 0..INT_MAX
// srng: x..y:z SP[-3]..SP[-2]:SP[-1]
// srngf: x..:z SP[-2]..INT_MAX:SP[-1]
// srngt: ..y:z 0..SP[-2]:SP[-1]
// srnge: ..:z 0..INT_MAX:SP[-1]
// If `z` is not provided, the interval is set to -1 if x > y
// (downward ranges). Otherwise, the interval is set to 1 (upward
// ranges).
#define z_rng(f,t,i,s) { \
rf_rng *rng = malloc(sizeof(rf_rng)); \
rf_int from = rng->from = (f); \
rf_int to = rng->to = (t); \
rf_int itvl = (i); \
rng->itvl = itvl ? itvl : from > to ? -1 : 1; \
s = (rf_val) {TYPE_RNG, .u.q = rng}; \
}
// x..y
z_case(RNG)
z_rng(intval(&sp[-2].v),
intval(&sp[-1].v),
0,
sp[-2].v);
--sp;
++ip;
z_break;
// x..
z_case(RNGF)
z_rng(intval(&sp[-1].v),
INT64_MAX,
0,
sp[-1].v);
++ip;
z_break;
// ..y
z_case(RNGT)
z_rng(0,
intval(&sp[-1].v),
0,
sp[-1].v);
++ip;
z_break;
// ..
z_case(RNGE)
++sp;
z_rng(0,
INT64_MAX,
0,
sp[-1].v);
++ip;
z_break;
// x..y:z
z_case(SRNG)
z_rng(intval(&sp[-3].v),
intval(&sp[-2].v),
intval(&sp[-1].v),
sp[-3].v);
sp -= 2;
++ip;
z_break;
// x..:z
z_case(SRNGF)
z_rng(intval(&sp[-2].v),
INT64_MAX,
intval(&sp[-1].v),
sp[-2].v);
--sp;
++ip;
z_break;
// ..y:z
z_case(SRNGT)
z_rng(0,
intval(&sp[-2].v),
intval(&sp[-1].v),
sp[-2].v);
--sp;
++ip;
z_break;
// ..:z
z_case(SRNGE)
z_rng(0,
INT64_MAX,
intval(&sp[-1].v),
sp[-1].v);
++ip;
z_break;
// Simple assignment
// copy SP[-1] to *SP[-2] and leave value on stack.
z_case(SET)
sp[-2].v = *sp[-2].a = sp[-1].v;
--sp;
++ip;
z_break;
#ifndef COMPUTED_GOTO
} }
#endif
return 0;
}
|
import React, { useState } from 'react';
const App = () => {
const [time, setTime] = useState('');
const handleClick = () => {
const date = new Date();
const hh = date.getHours();
const mm = date.getMinutes();
const ss = date.getSeconds();
setTime(`${hh}:${mm}:${ss}`);
};
return (
<div>
<input type="text" value={time} />
<button onClick={handleClick}>Update Time</button>
</div>
);
};
export default App; |
# archiveable_id - integer
# archiveable_type - string
# archive_number - string
# archived_at - datetime
class Poly < ActiveRecord::Base
acts_as_archival
belongs_to :archiveable, :polymorphic => true
end
|
module.exports = function () {
var child_process = require('child_process');
var spawn = child_process.spawn;
var execFile = child_process.execFile;
var fontCommandRegistry = {
"xfce": [{command: "xfconf-query", params: ["-c", "xsettings", "-p", "/Gtk/FontName"]}],
"mate": [{command: "dconf", params: ["read", "/org/mate/desktop/interface/font-name"]}],
"cinamon": [{command: "dconf", params: ["read", "/org/cinnamon/desktop/interface/font-name"]},
{command: "gsettings", params: ["get", "org.cinnamon.desktop.interface", "font-name"]}],
"gnome": [{command: "gsettings", params: ["get", "org.gnome.desktop.interface", "font-name"]}],
"ubuntu": [{command: "gsettings", params: ["get", "org.gnome.desktop.interface", "font-name"]}]
}
var defaultFamily = "Sans";
var defaultWeight = 400;
var defaultStyle = "normal";
var defaultSize = "10pt";
var fontNameReader = function(registry, callback, index) {
var child = spawn(registry.command, registry.params);
var tmp = "";
child.stdout.on("data", function(chunk) {
tmp += chunk.toString();
});
child.stdout.on("close", function() {
callback(tmp, index);
});
};
var fontProcessor = function(fontName, callback) {
if (fontName) {
fontName = fontName.trim();
}
var family = defaultFamily;
var weight = defaultWeight;
var style = defaultStyle;
var size = defaultSize;
if (fontName.match(/^'(.+)'$/)) fontName = RegExp.$1;
if (fontName.match(/^(.+) ([0-9]+)$/)) {
size = RegExp.$2 + "pt";
fontName = RegExp.$1.trim();
if (fontName.match(/^(.+) Italic$/i)) {
style = "italic";
fontName = RegExp.$1.trim();
}
if (fontName.match(/^(.+) (([a-z]+\-)?(thin|light|bold|black|heavy))$/i)) {
var styleName = RegExp.$4.toLowerCase();
if (styleName == "thin") weight = "100";
if (styleName == "light") weight = "300";
if (styleName == "bold") weight = "700";
if (styleName == "black" || styleName == "heavy") weight = "900";
fontName = RegExp.$1.trim();
}
family = fontName;
}
callback({
family: family,
weight: weight,
style: style,
size: size
});
};
var platformHandlers = {
linux: function (callback) {
var d = process.env.DESKTOP_SESSION;
if (/ubuntu/ig.exec(d)) {
d = "ubuntu";
}
var fontRegistry = fontCommandRegistry[d];
if (!fontRegistry) {
console.error("Coud not found font command registry for ", d);
callback({
family: defaultFamily,
weight: defaultWeight,
style: defaultStyle,
size: defaultSize
});
} else {
var fontNames = [];
var processFontName = function(finish) {
if (finish && fontNames.length == 0) {
callback({
family: defaultFamily,
weight: defaultWeight,
style: defaultStyle,
size: defaultSize
});
return;
}
var fontName = fontNames[0];
fontProcessor(fontName, callback);
}
for (var i = 0; i < fontRegistry.length; i++) {
var registry = fontRegistry[i];
fontNameReader(registry, function(fontName, index) {
if (fontName != null && fontName.length > 0) {
fontNames.push(fontName);
}
processFontName(index == fontRegistry.length -1);
}, i);
}
}
},
win32: function(callback) {
var family = "Microsoft Sans Serif",
size = "9",
style = "normal",
weight = "400";
function returnDefault() {
callback({
family: family,
weight: weight,
style: style,
size: size + "pt"
});
}
//var child = execFile(__dirname + '/lib/fontconfig/fontconfig.exe');
var child = execFile(__dirname + '/lib/fontconfig/fontconfig.exe', [], (error, stdout, stderr) => {
if (error) {
return returnDefault();
}
var data = stdout.trim();
console.log('System Font:', data);
// TODO: use default fontsize
size = 11;
if (/FontName:\s?(.*)/i.exec(data)) {
family = RegExp.$1;
}
if (/FontSize:\s?(.*)/i.exec(data)) {
size = parseInt(RegExp.$1, 10);
}
if (/FontWeight:\s?(.*)/i.exec(data)) {
weight = RegExp.$1;
}
if (/FontStyle:\s?(.*)/i.exec(data)) {
style = RegExp.$1;
}
size = 11;
callback({
family: family,
weight: weight,
style: style,
size: (parseInt(size) - 2) + "pt"
});
});
},
darwin: function (callback) {
callback({
family: "Helvetica Neue",
weight: "400",
style: "normal",
size: "10pt"
});
}
}
function getDesktopFontConfig(callback) {
var platform = process.platform;
var handler = platformHandlers[platform];
if (!handler) {
callback({
family: defaultFamily,
weight: defaultWeight,
style: defaultStyle,
size: defaultSize
});
return;
}
handler(callback);
}
return {
getDesktopFontConfig: getDesktopFontConfig
}
}();
|
const express = require('express');
const bodyParser = require('body-parser');
const session = require('express-session');
const crypto = require('crypto');
const app = express();
app.use(bodyParser.json());
app.use(session({
secret: 'secret-key',
resave: false,
saveUninitialized: false,
}));
let users = [];
app.post('/register', (req, res) => {
const hash = crypto.createHash('sha256');
const passwordHash = hash.update(req.body.password).digest('hex');
users.push({
...req.body,
passwordHash,
});
res.send({message: 'User registered'});
});
app.post('/login', (req, res) => {
let user = users.find(user => user.username === req.body.username);
if (user) {
const hash = crypto.createHash('sha256');
const passwordHash = hash.update(req.body.password).digest('hex');
if (passwordHash === user.passwordHash) {
req.session.user = user;
res.send({message: 'Logged in'});
} else {
res.send({message: 'Invalid username or password'});
}
} else {
res.send({message: 'Invalid username or password'});
}
});
app.post('/password/request', (req, res) => {
let user = users.find(user => user.username === req.body.username);
if (user) {
// Send an email to the user's email address with a link for resetting their password
res.send({message: 'Email sent'});
} else {
res.send({message: 'User not found'});
}
});
app.post('/password/reset', (req, res) => {
let user = users.find(user => user.username === req.body.username);
if (user) {
const hash = crypto.createHash('sha256');
const passwordHash = hash.update(req.body.password).digest('hex');
user.passwordHash = passwordHash;
res.send({message: 'Password reset'});
} else {
res.send({message: 'User not found'});
}
});
app.get('/protected', (req, res) => {
if (req.session.user) {
res.send({message: 'You are authorized'});
} else {
res.send({message: 'You are not authorized'});
}
});
app.listen(3000, () => console.log('Server started')); |
#include <Eigen/Dense>
#include <sstream>
#include <stdexcept>
template <std::size_t N>
Eigen::Matrix<double, N, 1> toVectorNd(const std::string& str) {
Eigen::Matrix<double, N, 1> result;
std::istringstream iss(str);
double value;
std::size_t count = 0;
while (iss >> value) {
if (count >= N) {
throw std::invalid_argument("Input string contains more values than the specified vector size");
}
result(count, 0) = value;
count++;
}
if (count != N) {
throw std::invalid_argument("Input string does not contain enough values for the specified vector size");
}
if (!iss.eof()) {
throw std::runtime_error("Error parsing input string");
}
return result;
} |
#! /bin/bash
# Shell script for running the various stages of the legacyhalos code using
# MPI+shifter at NERSC. Required arguments:
# {1} stage [coadds, pipeline-coadds, ellipse, htmlplots]
# {2} ncores [should match the resources requested.]
# Example: build the coadds using 16 MPI tasks with 8 cores per node (and therefore 16*8/32=4 nodes)
#salloc -N 8 -C haswell -A desi -L cfs,SCRATCH -t 04:00:00 --qos interactive --image=legacysurvey/legacyhalos:v0.1
#srun -n 64 -c 4 shifter --module=mpich-cle6 $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi.sh coadds 4 > coadds.log.1 2>&1 &
#srun -n 64 -c 4 shifter --module=mpich-cle6 $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi.sh ellipse 4 > ellipse.log.1 2>&1 &
#srun -n 64 -c 1 shifter --module=mpich-cle6 $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi.sh htmlplots 1 > htmlplots.log.1 2>&1 &
# Grab the input arguments--
stage=$1
ncores=$2
source $LEGACYHALOS_CODE_DIR/bin/manga/manga-env
maxmem=134217728 # Cori/Haswell = 128 GB (limit the memory per job).
grep -q "Xeon Phi" /proc/cpuinfo && maxmem=100663296 # Cori/KNL = 98 GB
let usemem=${maxmem}*${ncores}/32
if [ $stage = "test" ]; then
time python $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi --help
elif [ $stage = "coadds" ]; then
time python $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi --coadds --nproc $ncores --mpi --verbose
elif [ $stage = "pipeline-coadds" ]; then
time python $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi --pipeline-coadds --nproc $ncores --mpi --verbose
elif [ $stage = "ellipse" ]; then
time python $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi --ellipse --nproc $ncores --mpi --verbose
elif [ $stage = "htmlplots" ]; then
time python $LEGACYHALOS_CODE_DIR/bin/manga/manga-mpi --htmlplots --nproc $ncores --mpi --verbose
else
echo "Unrecognized stage "$stage
fi
|
<reponame>geowatson/DSA
public class LinkedQueue<Type> {
private LinkedList<Type> queue;
LinkedQueue() {
queue = new LinkedList<>();
}
public boolean isEmpty() {
return queue.isEmpty();
}
public int size() {
return queue.size();
}
public void enqueue(Type item) {
queue.prepend(item);
}
public Type dequeue() {
return queue.pop();
}
/**
* Debugging item - converts queue to an array
* @param type
* @return
*/
public Type[] asArray(Class<Type> type) {
return queue.asArray(type);
}
}
|
#include <iostream>
#include <array>
using namespace std;
array<int, 2> indicesOfSumUpToTarget(int arr[], int size, int target) {
for (int i = 0; i < size-1; i++) {
for (int k = i+1; k < size; k++) {
if (arr[i] + arr[k] == target) {
array<int, 2> result = {i, k};
return result;
}
}
}
return {-1, -1};
}
int main() {
int arr[] = {2, 5, 9, 8};
int target = 11;
array<int, 2> result = indicesOfSumUpToTarget(arr, 4, target);
cout << result[0] << " " << result[1] << endl;
return 0;
} |
// 4631. Symmetric Order
// 2019.09.05
// 구현, 영어문제
#include<iostream>
#include<vector>
#include<string>
#include<deque>
using namespace std;
int main()
{
int cnt = 1;
while (1)
{
int n;
cin >> n;
if (n == 0)
{
break;
}
vector<string> v(n);
for (int i = 0; i < n; i++)
{
cin >> v[i];
}
deque<string> dq;
if (n % 2 == 0)
{
for (int i = n - 1; i >= 0; i--)
{
if (i % 2 == 0)
{
dq.push_front(v[i]);
}
else
{
dq.push_back(v[i]);
}
}
}
else
{
dq.push_back(v[v.size() - 1]);
for (int i = v.size() - 2; i >= 0; i--)
{
if (i % 2 == 0)
{
dq.push_front(v[i]);
}
else
{
dq.push_back(v[i]);
}
}
}
cout << "SET " << cnt << endl;
for (int i = 0; i < dq.size(); i++)
{
cout << dq[i] << endl;
}
cnt++;
}
return 0;
}
|
#!/bin/bash
#by mikejdelro (2019)
#---------#
#Functions
#Asks the user yes or no
function ask {
# call with a prompt string or use a default
read -r -p "${1:-Are you sure? [y/N]} " response
case "$response" in
[yY][eE][sS]|[yY])
true
;;
*)
false
;;
esac
}
#Asks user to press any key to continue
function continue {
read -p "Press any key to continue or CTRL+C to abort...";
}
#---------#
#Initial Update/Upgrade
ask "Would you like to upgrade your version of Linux?" && sudo apt-get upgrade -y
clear
ask "Would you like to update your version of Linux?" && sudo apt-get update -y
clear
#Install software-properties-common, in case it isn't installed
#Will hopefully install python3, which I will try and use moving forward.
#https://packages.ubuntu.com/bionic/software-properties-common
ask "Install software-properties-common?" && sudo apt-get install software-properties-common -y
clear
#---------#
#Docker
#OS Requirements
#Provide context of OS requirements
echo "To install Docker CE, you need the 64-bit version of one of these Ubuntu versions: Cosmic 18.10; Bionic 18.04 (LTS); Xenial 16.04 (LTS)"
#Python script to provide current platform
echo "You are currently running:" && python3 -c "import platform;print(platform.platform())"
continue
#Install Requirements via Apt
screen -S bash -d -m bash -c "sudo apt-get install apt-transport-https ca-certificates curl gnupg-agent software-properties-common -y"
#Uninstall previous versions of Docker
ask "The script will now try to uninstall previous versions of Docker previously installed on your system. Continue?" && sudo apt-get remove docker docker-engine docker.io containerd runc
#Add Docker's official GPG key
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
#Add Docker's repository
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" && sudo apt-get update
#Install the latest version of Docker CE and containerd
ask "The script will now install docker-ce docker-ce-cli and containerd. Continue?" && sudo apt-get install docker-ce docker-ce-cli containerd.io -y
clear
#End of Docker script
echo "The script has finished installing the latest version of Docker"
continue
#---------#
#Installing Falco
ask "Install Falco?" && sudo docker pull falcosecurity/falco
screen -S falco -d -m bash -c "docker run -i -t --name falco --privileged -v /var/run/docker.sock:/host/var/run/docker.sock -v /dev:/host/dev -v /proc:/host/proc:ro -v /boot:/host/boot:ro -v /lib/modules:/host/lib/modules:ro -v /usr:/host/usr:ro falcosecurity/falco"
#Install event-generator
ask "Install Event Generator?" && sudo docker pull sysdig/falco-event-generator
screen -S eventgen -d -m bash -c "docker run -it --name falco-event-generator sysdig/falco-event-generator"
|
<filename>packages/console/src/HomePage.tsx
import { formatSearchQuery, parseSearchDefinition } from '@medplum/core';
import { Button, SearchControl } from '@medplum/ui';
import React from 'react';
import { useLocation } from 'react-router';
import { history } from './history';
export function HomePage() {
const location = useLocation();
const search = parseSearchDefinition(location);
if (!search.resourceType) {
search.resourceType = 'Patient';
}
return (
<>
<div style={{ background: 'white', padding: '2px 0 4px 4px' }}>
<Button size="small">Fields</Button>
<Button size="small">Filters</Button>
<Button size="small">Export</Button>
</div>
<SearchControl
checkboxesEnabled={true}
search={search}
onClick={e => history.push(`/${e.resource.resourceType}/${e.resource.id}`)}
onChange={e => history.push({
pathname: `/${e.definition.resourceType}`,
search: formatSearchQuery(e.definition)
})}
/>
</>
);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.