text stringlengths 1 1.05M |
|---|
<filename>utils/src/main/java/com/qtimes/utils/android/MoneyUtils.java
package com.qtimes.utils.android;
import androidx.annotation.NonNull;
import android.text.TextUtils;
import java.math.BigDecimal;
import java.text.DecimalFormat;
/**
* Created by gufei on 2016/8/22 0022.
* 货币转换
*/
public class MoneyUtils {
private static int defType = MoneyType.LONGBI;//默认龙币
private static int scaleLongbi = 100;//1:100 元宝龙币比例
public static String formatYuanBao(Object yuanbao) {
return formatYuanBao(defType, yuanbao, true);
}
public static String formatYuanBao(Object yuanbao, boolean isdecimal) {
return formatYuanBao(defType, yuanbao, isdecimal);
}
/**
* @param type
* @param yuanbao
* @param isdecimal 是否保留小数
* @return
*/
public static String formatYuanBao(int type, Object yuanbao, boolean isdecimal) {
String money = "0";
if (yuanbao == null || TextUtils.isEmpty(yuanbao.toString())) return money;
switch (type) {
case MoneyType.YUANBAO:
break;
case MoneyType.LONGBI:
Double tempYuanBao;
if (yuanbao instanceof Double) {
tempYuanBao = (Double) yuanbao;
} else {
tempYuanBao = Double.valueOf(yuanbao.toString());
}
tempYuanBao = tempYuanBao * scaleLongbi;
BigDecimal b = new BigDecimal(tempYuanBao);
tempYuanBao = b.setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue();
money = isdecimal ? String.valueOf(tempYuanBao) : String.valueOf(tempYuanBao.intValue());
break;
}
return money;
}
/**
* 把送完礼物后返回的balance转化成龙币,取整,去掉小数位
*
* @param balance
* @return
*/
public static String formatBalance(Double balance) {
String money = "0";
if (balance == null || TextUtils.isEmpty(balance.toString())) return money;
java.text.DecimalFormat df = new java.text.DecimalFormat("###########");
Double tempYuanBao;
tempYuanBao = balance * scaleLongbi;
money = df.format(tempYuanBao);
return money;
}
/**
* 龙币转化为金额,只保留有效位
* <p>
* 例如:100.0-->1; 112-->1.12
*/
public static String longBiToYuanBao(String longbi) {
try {
double double_longbi = !TextUtils.isEmpty(longbi) ? Double.valueOf(longbi) : 0.0;
double yuanbao = double_longbi / 100.0;
DecimalFormat df = new DecimalFormat("#.##");
return df.format(yuanbao);
// return String.valueOf(yuanbao);
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
// public static int yuanBaoTrans(String userBalance){
// try {
// int price=Integer.parseInt(userBalance);
// return price*scaleLongbi;
// }catch (Exception e){
// }
// return 0;
// }
public static double yuanbaoParseInt(String userBalance){
try {
double price= Double.parseDouble(userBalance);
return price;
}catch (Exception e){
return 0;
}
}
public static String numberFormat(String number) {
try {
double double_number = !TextUtils.isEmpty(number) ? Double.valueOf(number) : 0.0;
DecimalFormat df = new DecimalFormat("#.##");
return df.format(double_number);
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
/**
* 龙币转化为金额,只保留有效位
* <p>
* 例如:100.0-->1; 112-->1.12
*/
@NonNull
public static String money2longBi(String money) {
try {
int double_money = !TextUtils.isEmpty(money) ? Integer.valueOf(money) : 0;
int longbi = double_money * 100;
return String.valueOf(longbi);
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
public interface MoneyType {
int YUANBAO = 0;//元宝
int LONGBI = 1;//龙币
}
/**
* 将元单位数字转成int类型的元
*
* @param numStr
*/
public static int numStrToInt(String numStr) {
int num = 0;
try {
if (!TextUtils.isEmpty(numStr)) {
num = Integer.valueOf(numStr);
}
} catch (Exception e) {
e.printStackTrace();
}
return num;
}
}
|
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
// ...
public List<Product> getProducts() {
List<Product> products = new ArrayList<>();
try (Connection connection = this.getConnection()) {
String query = "SELECT * FROM products";
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(query);
while (resultSet.next()) {
int id = resultSet.getInt("id");
String name = resultSet.getString("name");
double price = resultSet.getDouble("price");
String description = resultSet.getString("description");
products.add(new Product(id, name, price, description));
}
} catch (SQLException e) {
// log error
}
return products;
} |
#!/bin/bash
export USER=headless
/dockerstartup/vnc_startup.sh &
#maximum time to wait for licenses (before installation of squish + before running tests)
runtime="30 minute"
endtime=$(date -ud "$runtime" +%s)
result=1
echo "installing squish"
# retry installing squish if there is an issue to connect the license server
while [[ $result -ne 0 ]]
do
if [[ $(date -u +%s) -gt $endtime ]]
then
echo "timeout waiting for license server"
exit 1
fi
/opt/squish.run unattended=1 ide=0 targetdir=${HOME}/squish licensekey=$LICENSEKEY
result=$?
if [[ $result -ne 0 ]]
then
echo "sleeping waiting for license server"
sleep $((1 + $RANDOM % 30))
fi
done
cp ${HOME}/squish/etc/paths.ini ${HOME}/squish/etc/paths.ini-backup
cp /dockerstartup/paths.ini ${HOME}/squish/etc/
mkdir -p ${HOME}/.squish/ver1/
cp ${SERVER_INI} ${HOME}/.squish/ver1/server.ini
# Set allowed core dump size to an unlimited value, needed for backtracing
ulimit -c unlimited
# Turn off the Squish crash handler by setting this environment variable
export SQUISH_NO_CRASHHANDLER=1
/home/headless/squish/bin/squishserver &
# squishrunner waits itself for a license to become available, but fails with error 37 if it cannot connect to the license server
LICENSE_ERROR_RESULT_CODE=37
result=LICENSE_ERROR_RESULT_CODE
echo "starting tests"
while true
do
if [[ $(date -u +%s) -gt $endtime ]]
then
echo "timeout waiting for license server"
exit 1
fi
~/squish/bin/squishrunner --testsuite ${CLIENT_REPO}/test/gui/ ${SQUISH_PARAMETERS} --exitCodeOnFail 1
result=$?
if [[ $result -eq $LICENSE_ERROR_RESULT_CODE ]]
then
echo "sleeping waiting for license server"
sleep $((1 + $RANDOM % 30))
else
exit $result
fi
done
|
#!/bin/bash
. /usr/share/beakerlib/beakerlib.sh || exit 1
rlJournalStart
rlPhaseStartSetup
rlRun "output=\$(mktemp)" 0 "Create output file"
rlRun "set -o pipefail"
rlRun "pushd data"
rlPhaseEnd
# Select by name
for tmt in 'tmt test ls' 'tmt test show'; do
rlPhaseStartTest "$tmt"
rlRun "$tmt | tee $output"
rlAssertGrep "/tests/enabled/default" $output
rlAssertGrep "/tests/tag/default" $output
rlAssertGrep "/tests/tier/default" $output
rlPhaseEnd
rlPhaseStartTest "$tmt <name>"
rlRun "$tmt tier | tee $output"
rlAssertNotGrep "/tests/enabled/default" $output
rlAssertNotGrep "/tests/tag/default" $output
rlAssertGrep "/tests/tier/default" $output
rlPhaseEnd
rlPhaseStartTest "$tmt non-existent"
rlRun "$tmt non-existent | tee $output"
rlRun "[[ $(wc -l <$output) == 0 ]]" 0 "Check no output"
rlPhaseEnd
done
for name in '-n' '--name'; do
rlPhaseStartTest "tmt run test $name <name>"
tmt='tmt run -rv discover finish'
# Existing
rlRun "$tmt test $name enabled | tee $output"
rlAssertGrep "/tests/enabled/default" $output
rlAssertNotGrep "/tests/enabled/disabled" $output
rlAssertNotGrep "/tests/tag/default" $output
rlAssertNotGrep "/tests/tier/default" $output
# Missing
rlRun "$tmt test $name non-existent | tee $output"
rlAssertGrep "No tests found" $output
# Using 'test --name' overrides 'test' in discover
rlRun "$tmt test $name tier/one | tee $output"
rlAssertGrep "/tests/tier/one" $output
rlAssertNotGrep "/tests/tier/two" $output
rlPhaseEnd
done
rlPhaseStartTest "Select tests using a filter"
# Enabled
for bool in True true; do
rlRun "tmt test ls --filter enabled:$bool | tee $output"
rlAssertGrep '/tests/enabled/default' $output
rlAssertGrep '/tests/enabled/defined' $output
rlAssertNotGrep '/tests/enabled/disabled' $output
done
for bool in False false; do
rlRun "tmt test ls --filter enabled:False | tee $output"
rlAssertNotGrep '/tests/enabled/default' $output
rlAssertNotGrep '/tests/enabled/defined' $output
rlAssertGrep '/tests/enabled/disabled' $output
done
for tmt in 'tmt test ls' 'tmt run -rv discover finish test' \
'tmt run -rv plans --name /plans/filtered discover finish test'; do
# Tag
rlRun "$tmt --filter tag:slow | tee $output"
rlAssertNotGrep '/tests/tag/default' $output
rlAssertGrep '/tests/tag/defined' $output
rlAssertNotGrep '/tests/tag/empty' $output
rlRun "$tmt --filter tag:-slow | tee $output"
rlAssertGrep '/tests/enabled/default' $output
rlAssertNotGrep '/tests/tag/defined' $output
rlAssertGrep '/tests/tag/empty' $output
# Tier
rlRun "$tmt --filter tier:1 | tee $output"
rlAssertGrep '/tests/tier/one' $output
rlAssertNotGrep '/tests/tier/two' $output
rlAssertNotGrep '/tests/tier/default' $output
rlRun "$tmt --filter tier:-1 | tee $output"
rlAssertNotGrep '/tests/tier/one' $output
rlAssertGrep '/tests/tier/two' $output
rlAssertGrep '/tests/tier/default' $output
rlRun "$tmt --filter tier:1,2 | tee $output"
rlAssertGrep '/tests/tier/one' $output
rlAssertGrep '/tests/tier/two' $output
rlAssertNotGrep '/tests/tier/default' $output
rlRun "$tmt -f tier:-1 -f tier:-2 | tee $output"
rlAssertNotGrep '/tests/tier/one' $output
rlAssertNotGrep '/tests/tier/two' $output
rlAssertGrep '/tests/tier/default' $output
done
rlPhaseEnd
rlPhaseStartTest "Select tests using a condition"
# Enabled
rlRun "tmt test ls --condition 'enabled == True' | tee $output"
rlAssertGrep '/tests/enabled/default' $output
rlAssertGrep '/tests/enabled/defined' $output
rlAssertNotGrep '/tests/enabled/disabled' $output
rlRun "tmt test ls --condition 'enabled == False' | tee $output"
rlAssertNotGrep '/tests/enabled/default' $output
rlAssertNotGrep '/tests/enabled/defined' $output
rlAssertGrep '/tests/enabled/disabled' $output
for tmt in 'tmt test ls' 'tmt run -rv discover finish test'; do
# Tag
rlRun "$tmt --condition '\"slow\" in tag' | tee $output"
rlAssertNotGrep '/tests/tag/default' $output
rlAssertGrep '/tests/tag/defined' $output
rlAssertNotGrep '/tests/tag/empty' $output
rlRun "$tmt --condition '\"slow\" not in tag' | tee $output"
rlAssertGrep '/tests/enabled/default' $output
rlAssertNotGrep '/tests/tag/defined' $output
rlAssertGrep '/tests/tag/empty' $output
# Tier
rlRun "$tmt --condition 'tier is not None' | tee $output"
rlAssertGrep '/tests/tier/one' $output
rlAssertGrep '/tests/tier/two' $output
rlAssertNotGrep '/tests/tier/default' $output
rlRun "$tmt -c 'tier and int(tier) > 1' | tee $output"
rlAssertNotGrep '/tests/tier/one' $output
rlAssertGrep '/tests/tier/two' $output
rlAssertNotGrep '/tests/tier/default' $output
done
rlPhaseEnd
rlPhaseStartTest "Select duplicate tests preserving tests ordering"
# 'tmt test ls' lists test name once
rlRun "tmt tests ls tier | tee $output"
rlAssertGrep '/tests/tier/two' $output
rlAssertEquals "/tests/tier/two is listed only once" 1 $( grep -c 'tier/two' $output )
rlRun "tmt tests ls tier/two tier/two | tee $output"
rlAssertGrep '/tests/tier/two' $output
rlAssertEquals "/tests/tier/two is listed only once" 1 $( grep -c 'tier/two' $output )
# 'tmt test show' lists test name once
rlRun "tmt tests show tier | tee $output"
rlAssertGrep '/tests/tier/two' $output
rlAssertEquals "/tests/tier/two is listed only once" 1 $( grep -c 'tier/two' $output )
# Prepare run dir and common command line
run=$(mktemp -d)
tmt="tmt run --id $run --scratch plans --name duplicate discover -v"
# 'tmt run discover' lists duplicate test names preserving order
rlRun "$tmt | tee $output"
rlAssertGrep 'tests: /tier/two, /tier/one and /tier/two' $output
rlAssertGrep 'summary: 3 tests selected' $output
rlRun "grep -A 1 summary $output | tail -1 | grep '/tests/tier/two'"
rlRun "grep -A 2 summary $output | tail -1 | grep '/tests/tier/one'"
rlRun "grep -A 3 summary $output | tail -1 | grep '/tests/tier/two'"
# tests --name filters discovered test names (/two is discovered twice)
rlRun "$tmt -h fmf tests --name two | tee $output"
rlAssertGrep 'tests: /tier/two, /tier/one and /tier/two' $output
rlAssertGrep 'summary: 2 tests selected' $output
rlRun "grep -A 1 summary $output | tail -1 | grep '/tests/tier/two'"
rlRun "grep -A 2 summary $output | tail -1 | grep '/tests/tier/two'"
# tests --name doesn't effect order of discovered tests
rlRun "$tmt -h fmf tests --name one --name two | tee $output"
rlAssertGrep 'tests: /tier/two, /tier/one and /tier/two' $output
rlAssertGrep 'summary: 3 tests selected' $output
rlRun "grep -A 1 summary $output | tail -1 | grep '/tests/tier/two'"
rlRun "grep -A 2 summary $output | tail -1 | grep '/tests/tier/one'"
rlRun "grep -A 3 summary $output | tail -1 | grep '/tests/tier/two'"
# discover --test redefines duplicate plan so two is discovered just once
rlRun "$tmt -h fmf --test two | tee $output"
rlAssertGrep 'tests: two' $output
rlAssertGrep 'summary: 1 test selected' $output
rlRun "grep -A 1 summary $output | tail -1 | grep '/tests/tier/two'"
# redefine --test via command line same as was in the plan
rlRun "$tmt -h fmf --test two --test two | tee $output"
rlAssertGrep 'tests: two and two' $output
rlAssertGrep 'summary: 2 tests selected' $output
rlRun "grep -A 1 summary $output | tail -1 | grep '/tests/tier/two'"
rlRun "grep -A 2 summary $output | tail -1 | grep '/tests/tier/two'"
# Clean up the run
rlRun "rm -rf $run" 0 "Clean up run"
rlPhaseEnd
rlPhaseStartTest "Select by test --name . "
rlRun "pushd subdir"
run=$(mktemp -d)
rlRun "tmt -c subdir=1 run --id $run discover tests --name ."
# only /subdir test is selected by /plans/all and /plans/filtered
for plan in all filtered; do
rlAssertEquals "just /subdir in $plan" \
"$(grep '^/' $run/plans/$plan/discover/tests.yaml)" "/subdir:"
done
# other two plans don't select any test
for plan in duplicate selected; do
rlAssertEquals "no test selected in $plan" \
"$(cat $run/plans/$plan/discover/tests.yaml)" "{}"
done
rlRun "rm -rf $run" 0 "Clean up run"
rlRun "popd"
rlPhaseEnd
rlPhaseStartCleanup
rlRun "popd"
rlRun "rm $output" 0 "Remove output file"
rlPhaseEnd
rlJournalEnd
|
//
// INDCollectionTimelineEndpointMarker.h
// <NAME>
//
// Created by <NAME> on 2014-04-09.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
/**
* Draws a marker that shows the start or end of a timeline.
*/
@interface INDCollectionTimelineEndpointMarker : UIView
@end
|
package com.arhscube.gameofcode.datapublic;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.TextNode;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Crawler {
private static final int TIMEOUT = 5 * 1000;
static Logger log = LoggerFactory.getLogger(Crawler.class);
public static List<OpenDataset> crawlAPage(int page, boolean stopQuick) throws Exception {
List<OpenDataset> ret = new ArrayList<>();
URL url = new URL("https://data.public.lu/en/datasets/?page=" + page);
try {
Document doc = Jsoup.parse(url, TIMEOUT);
log.info("Doc '{}' parsed from '{}'", doc.title(), url);
Elements datasets = doc.select("li[class~=dataset-result]");
for (Element dataset : datasets) {
Element a = dataset.getElementsByTag("a").first();
String link = a.attr("href");
URL datasetUrl = new URL(url, link);
String title = a.attr("title");
String description = dataset.getElementsByClass("result-description").first().text();
log.info("crawled {}\n {}\n href='{}'", title, description, link);
log.info("Could get more from '{}'", datasetUrl);
ret.add(getDataset(datasetUrl));
// Just get one for the test...
if (stopQuick)
break;
}
} catch (SocketTimeoutException e) {
log.error("Url {} timed out", url);
return new ArrayList<>();
}
return ret;
}
public static OpenDataset getDataset(URL url) throws Exception {
OpenDataset ret = new OpenDataset();
ret.origin = url;
try {
Document doc = Jsoup.parse(url, TIMEOUT);
ret.title = doc.title();
log.info("Doc '{}' parsed from '{}'", doc.title(), url);
StringBuilder description = new StringBuilder();
for (TextNode descr : doc.getElementsByAttributeValue("itemprop", "description").first()
.getElementsByTag("p").first().textNodes()) {
description.append(descr.text());
description.append(" ");
}
ret.description = description.toString();
log.info("Extracted description :'{}'", description);
String author = getItemPropAttribute(doc, "author", "title");
ret.author = author;
log.info("Extracted provider :'{}'", author);
for (Element keywordElement : doc.getElementsByAttributeValue("itemprop", "keywords")) {
String keyword = keywordElement.attr("content");
log.info("Keyword extracted {}", keyword);
ret.keywords.add(keyword);
}
log.debug("End of Eurovoc extraction");
for (Element dist : doc.getElementsByAttributeValue("itemprop", "distribution")) {
Element a = dist.getElementsByAttributeValue("itemprop", "url").first();
String href = a.attr("href");
Source src = new Source();
src.format = getItemPropContent(dist, "encodingFormat");
src.description = getItemPropContent(dist, "description");
src.createdOn = getItemPropContent(dist, "dateCreated");
src.size = getItemPropContent(dist, "contentSize");
src.modifiedOn = getItemPropContent(dist, "dateModified");
src.link = new URL(href);
ret.sources.add(src);
log.info("Distro {} ", src);
}
} catch (SocketTimeoutException e) {
log.error("Url {} timed out", url);
return null;
}
return ret;
}
private static String getItemPropContent(Element el, String property) {
return getItemPropAttribute(el, property, "content");
}
private static String getItemPropAttribute(Element el, String property, String attribute) {
String ret = null;
Elements els = el.getElementsByAttributeValue("itemprop", property);
if (els != null && els.size() > 0) {
return els.first().attr(attribute);
}
return ret;
}
}
|
#!/bin/bash
# set the path for the secrets below to be created in vault or credhub
export concourse_root_secrets_path="/concourse"
export concourse_team_name="team-name"
export concourse_pipeline_name="pks-kubectl-image"
# VAULT or CREDHUB - targeted secrets management system
export targeted_system="VAULT"
# This script assumes that:
# 1) the credhub or vault CLI is installed
# 2) you setup your vault or credhub target and login commands prior to invoking it
# e.g. for VAULT
# export VAULT_ADDR=https://myvaultdomain:8200
# export VAULT_SKIP_VERIFY=true
# export VAULT_TOKEN=vault-token
#
# e.g. for CREDHUB
# credhub login -s credhub-server-uri -u username -p password --skip-tls-validation
##
## TEAM level secrets (shared by all pipelines in that team)
##
export team_secrets=(
"dockerhub-username"::"dockerhub-username"
"dockerhub-password"::"dockerhub-password"
"pivnet_token"::"pivotal-network-token"
)
##
## PIPELINE LEVEL secrets (specific to the pipeline)
##
export pipeline_secrets=(
# "^1\.0\..*$"
"pks_product_version"::"^.*$"
"pks_product_slug"::"pivotal-container-service"
)
main () {
# team level secrets
concourse_team_level_secrets_path="${concourse_root_secrets_path}/${concourse_team_name}"
writeCredentials "${concourse_team_level_secrets_path}" "${team_secrets[*]}"
# pipeline level secrets
concourse_pipeline_level_secrets_path="${concourse_team_level_secrets_path}/${concourse_pipeline_name}"
writeCredentials "${concourse_pipeline_level_secrets_path}" "${pipeline_secrets[*]}"
}
writeCredentials () {
secretsPath=${1}
secretsObject=(${2})
for i in "${secretsObject[@]}"
do
KEY="${i%%::*}"
VALUE="${i##*::}"
echo "Creating secret for [$KEY]"
if [[ $targeted_system == "VAULT" ]]; then
vault write "${secretsPath}/${KEY}" value="${VALUE}"
else # CREDHUB
credhub set -n "${secretsPath}/${KEY}" -v "${VALUE}"
fi
done
}
main
|
<reponame>fancyfx/fx-theme-blog<gh_stars>0
function sortPages(pages) {
if(!pages.length) return []
const _pages = pages.filter((element) => {
if(typeof element.frontmatter.home === "object" && element.frontmatter.home)
return false
if(element.regularPath === '/')
return false
return true
}).sort((a,b) => {
if( typeof b.lastUpdated === "object"
&& typeof a.lastUpdated === "object"
&& typeof a.lastUpdated.timestamp === "number"
&& typeof b.lastUpdated.timestamp === "number")
{
return b.lastUpdated.timestamp - a.lastUpdated.timestamp
}
else
return 0
})
return _pages
}
export default ({
Vue, // VuePress 正在使用的 Vue 构造函数
options, // 附加到根实例的一些选项
router, // 当前应用的路由实例
siteData, // 站点元数据
isServer
}) => {
if (!isServer) {
import('./components/vue-infinite-scroll').then(infiniteScroll => {
Vue.use(infiniteScroll.default)
})
}
Vue.mixin({
computed:{
$articleLsts(){
return sortPages(siteData.pages)
}
}
})
} |
<filename>core/authentication/provider_userdetails.go<gh_stars>0
// Copyright (C) 2019-2021, <NAME>.
// @author xiongfa.li
// @version V1.0
// Description:
package authentication
import (
"github.com/xfali/neve-security/core"
"github.com/xfali/neve-security/core/authority"
"github.com/xfali/neve-security/core/userdetails"
)
const (
KeyProviderPreChecker = "authentication.provider.prechecker.set"
KeyProviderPostChecker = "authentication.provider.postchecker.set"
KeyProviderAuthorityMapper = "authentication.provider.authority.mapper.set"
)
type UserDetailsAuthenticationChecker interface {
Retrieve(username string, authentication *UsernamePasswordAuthentication) (userdetails.UserDetails, error)
AdditionalAuthenticationCheck(username string, authentication *UsernamePasswordAuthentication) error
}
type UserDetailsAuthenticationProvider struct {
UserDetailsAuthenticationChecker
preChecker userdetails.Checker
postChecker userdetails.Checker
mapper authority.Mapper
}
func NewUserDetailsAuthenticationProvider(opts ...core.Opt) *UserDetailsAuthenticationProvider {
ret := &UserDetailsAuthenticationProvider{
preChecker: userdetails.NewDefaultChecker(),
postChecker: userdetails.NewDefaultChecker(),
mapper: authority.NewDefaultMapper(),
}
for _, opt := range opts {
opt(ret)
}
return ret
}
func (p *UserDetailsAuthenticationProvider) Set(key string, value interface{}) {
switch key {
case KeyProviderPreChecker:
p.preChecker = value.(userdetails.Checker)
case KeyProviderPostChecker:
p.postChecker = value.(userdetails.Checker)
case KeyProviderAuthorityMapper:
p.mapper = value.(authority.Mapper)
}
}
func (p *UserDetailsAuthenticationProvider) Authenticate(auth Authentication) (Authentication, error) {
if userPwAuth, ok := auth.(*UsernamePasswordAuthentication); ok {
username := userPwAuth.GetName()
userdetails, err := p.Retrieve(username, userPwAuth)
if err != nil {
return nil, err
}
err = p.preChecker.Check(userdetails)
if err != nil {
return nil, err
}
err = p.AdditionalAuthenticationCheck(username, userPwAuth)
if err != nil {
return nil, err
}
err = p.postChecker.Check(userdetails)
if err != nil {
return nil, err
}
return p.createSuccessAuthentication(auth, userdetails, userdetails), nil
} else {
panic("Support UsernamePasswordAuthentication only")
}
}
func (p *UserDetailsAuthenticationProvider) createSuccessAuthentication(auth Authentication, principal interface{}, user userdetails.UserDetails) Authentication {
return NewUsernamePasswordAuthentication(auth.GetCredentials(), principal, p.mapper.MapAuthorities(user.GetAuthorities()))
}
func (p *UserDetailsAuthenticationProvider) Support(o interface{}) bool {
panic("Not support")
}
func ProviderOptSetPreChecker(checker userdetails.Checker) core.Opt {
return func(setter core.Setter) {
setter.Set(KeyProviderPreChecker, checker)
}
}
func ProviderOptSetPostChecker(checker userdetails.Checker) core.Opt {
return func(setter core.Setter) {
setter.Set(KeyProviderPostChecker, checker)
}
}
func ProviderOptSetAuthorityMapper(mapper authority.Mapper) core.Opt {
return func(setter core.Setter) {
setter.Set(KeyProviderAuthorityMapper, mapper)
}
}
|
import os
import pickle
import numpy as np
import torch.nn as nn
import torch
import math
import random
NEAR_INF = 1e20
NEAR_INF_FP16 = 65504
def set_random_seed(random_seed):
random.seed(random_seed)
np.random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
torch.backends.cudnn.deterministic = True
def save(toBeSaved, filename, mode='wb'):
'''
save data to pickle file
'''
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
file = open(filename, mode)
pickle.dump(toBeSaved, file, protocol=4) # protocol 4 allows large size object, it's the default since python 3.8
file.close()
def load(filename, mode='rb'):
'''
load pickle file
'''
file = open(filename, mode)
loaded = pickle.load(file)
file.close()
return loaded
def pad_sents(sents, pad_token=0, max_len=512):
'''
pad input to max length
'''
sents_padded = []
lens = get_lens(sents)
max_len = min(max(lens), max_len)
sents_padded = []
new_len = []
for i, l in enumerate(lens):
if l > max_len:
l = max_len
new_len.append(l)
sents_padded.append(sents[i][:l] + [pad_token] * (max_len - l))
return sents_padded, new_len
def get_mask(sents, unmask_idx=1, mask_idx=0, max_len=512):
'''
make mask for padded input
'''
lens = get_lens(sents)
max_len = min(max(lens), max_len)
mask = []
for l in lens:
if l > max_len:
l = max_len
mask.append([unmask_idx] * l + [mask_idx] * (max_len - l))
return mask
def get_lens(sents):
return [len(sent) for sent in sents]
def get_max_len(sents):
max_len = max([len(sent) for sent in sents])
return max_len |
package io.ejf.asyncfutures;
import org.apache.http.client.fluent.Content;
import org.apache.http.impl.bootstrap.HttpServer;
import java.util.concurrent.Future;
public class MultiBlockingExample {
static final SharedMethods.Log log_ = new SharedMethods.Log(MultiBlockingExample.class);
public static String doBlockingRequest(String appended) {
log_.log("doBlockingRequest");
long startTime = System.currentTimeMillis();
Future<Content> futureA = SharedMethods.requestService.submit(() -> SharedMethods.request());
Future<Content> futureB = SharedMethods.requestService.submit(() -> SharedMethods.request());
String result = "";
try {
result = futureA.get().asString() + "/" + appended + "/" + futureB.get().asString();
} catch (Exception e) {
e.printStackTrace();
}
long blockedTime = (System.currentTimeMillis() - startTime);
log_.log("blockedTime: " + blockedTime + "ms");
return result;
}
public static void main(String[] args) {
log_.log("main");
HttpServer server = SharedMethods.server();
String result = MultiBlockingExample.doBlockingRequest("0");
log_.log("request content: " + result);
result = MultiBlockingExample.doBlockingRequest(result);
log_.log("request content: " + result);
SharedMethods.teardown(server);
log_.log("finish");
}
}
|
import React from "react";
import { Comment } from "semantic-ui-react";
import { useRouteMatch } from "react-router-dom";
import CommentEntry from "./CommentEntry";
const Transcript = ({ users, sounds }) => {
const match = useRouteMatch({
path: `/clip/:userId/:soundId`,
}) || { params: {} };
const { userId, soundId } = match.params;
React.useEffect(() => {
if (sounds.length > 0 && soundId) {
const childIndex = sounds.findIndex((sound) => sound._id === soundId);
document
.getElementsByClassName("transcript")[0]
.children[childIndex].scrollIntoView({ behavior: "smooth" });
}
}, [sounds, soundId]);
return (
<Comment.Group className="transcript">
{sounds.map((sound) => (
<CommentEntry link key={sound._id} {...sound} users={users} highlight={soundId === sound._id} />
))}
</Comment.Group>
);
};
export default Transcript;
|
def sum_squares(nums):
"""
Function to take a list of integers and return the sum of their squares
"""
sum_sq = 0
for num in nums:
sum_sq += num**2
return sum_sq
nums = [2, 3, 4]
print(sum_squares(nums)) |
def detectCycle(graph):
visited = set()
stack = list()
for v in graph.keys():
if v not in visited:
if dfsc(v, graph, visited, stack):
return True
return False
def dfsc(v, graph, visited, stack):
visited.add(v)
stack.append(v)
for v_next in graph[v]:
if v_next in stack:
return True
elif v_next not in visited and dfsc(v_next, graph, visited, stack):
return True
stack.pop()
return False |
#!/bin/sh
#PARAMETERS_FILE=$1
RESOURCE_GROUP=$1
default='westus2'
REGION=${2-$default}
#az group deployment create --template-file uniqueString.json --resource-group $RESOURCE_GROUP --output table
#python deployment_ms.py parameters/granular_rally_mojio.yaml
#az group create --name $RESOURCE_GROUP --location $REGION --output table
az group deployment create -n RallyDeployment --mode Incremental --verbose --template-file generatedTemplate_rally_mojio.json --parameters @parameters/parameters.json --resource-group $RESOURCE_GROUP --output table
|
<gh_stars>100-1000
# frozen_string_literal: true
require_relative "lib/gemsmith/identity"
Gem::Specification.new do |spec|
spec.name = Gemsmith::Identity::NAME
spec.version = Gemsmith::Identity::VERSION
spec.platform = Gem::Platform::RUBY
spec.authors = ["<NAME>"]
spec.email = ["<EMAIL>"]
spec.homepage = "https://www.alchemists.io/projects/gemsmith"
spec.summary = "A command line interface for smithing Ruby gems."
spec.license = "Apache-2.0"
spec.metadata = {
"bug_tracker_uri" => "https://github.com/bkuhlmann/gemsmith/issues",
"changelog_uri" => "https://www.alchemists.io/projects/gemsmith/changes.html",
"documentation_uri" => "https://www.alchemists.io/projects/gemsmith",
"source_code_uri" => "https://github.com/bkuhlmann/gemsmith"
}
spec.signing_key = Gem.default_key_path
spec.cert_chain = [Gem.default_cert_path]
spec.required_ruby_version = "~> 3.0"
spec.add_dependency "git_plus", "~> 0.6"
spec.add_dependency "milestoner", "~> 12.0"
spec.add_dependency "pragmater", "~> 9.0"
spec.add_dependency "refinements", "~> 8.5"
spec.add_dependency "rubocop", "~> 1.20"
spec.add_dependency "runcom", "~> 7.0"
spec.add_dependency "thor", "~> 0.20"
spec.add_dependency "tocer", "~> 12.1"
spec.add_dependency "versionaire", "~> 9.0"
spec.add_dependency "zeitwerk", "~> 2.5"
spec.files = Dir.glob "lib/**/*", File::FNM_DOTMATCH
spec.extra_rdoc_files = Dir["README*", "LICENSE*"]
spec.executables << "gemsmith"
spec.require_paths = ["lib"]
end
|
CUDA_VISIBLE_DEVICES=0 nohup python3 tools/train.py --config_file "config/icdar2015_mobilenetv3_FPN_DBhead_polyLR.yaml" > train_log/mobilenet.file 2>&1 &
|
<gh_stars>1-10
//
// SGDatePickerSheetView.h
// SGPickerView_DatePickerExample
//
// Created by Sorgle on 16/9/23.
// Copyright © 2016年 Sorgle. All rights reserved.
//
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - //
//
// - - 如在使用中, 遇到什么问题或者有更好建议者, 请于<EMAIL>邮箱联系 - - - - - //
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// - - GitHub下载地址 https://github.com/kingsic/SGPickerView_DatePicker.git - - //
//
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - //
#import <UIKit/UIKit.h>
@interface SGDatePickerSheetView : UIView
@property (weak, nonatomic) IBOutlet UIDatePicker *datePicker;
/** 取消按钮的点击事件 */
- (void)addTargetCancelBtn:(id)target action:(SEL)action;
/** 确定按钮的点击事件 */
- (void)addTargetSureBtn:(id)target action:(SEL)action;
@end
|
#!/usr/bin/env bash
export LC_ALL=C
KNOWN_VIOLATIONS=(
"src/base58.cpp:.*isspace"
"src/bitcoin-tx.cpp.*stoul"
"src/bitcoin-tx.cpp.*trim_right"
"src/bitcoin-tx.cpp:.*atoi"
"src/core_read.cpp.*is_digit"
"src/dbwrapper.cpp.*stoul"
"src/dbwrapper.cpp:.*vsnprintf"
"src/httprpc.cpp.*trim"
"src/init.cpp:.*atoi"
"src/netbase.cpp.*to_lower"
"src/qt/rpcconsole.cpp:.*atoi"
"src/qt/rpcconsole.cpp:.*isdigit"
"src/rest.cpp:.*strtol"
"src/rpc/server.cpp.*to_upper"
"src/test/dbwrapper_tests.cpp:.*snprintf"
"src/test/getarg_tests.cpp.*split"
"src/torcontrol.cpp:.*atoi"
"src/torcontrol.cpp:.*strtol"
"src/uint256.cpp:.*isspace"
"src/uint256.cpp:.*tolower"
"src/util.cpp:.*atoi"
"src/util.cpp:.*fprintf"
"src/util.cpp:.*tolower"
"src/utilmoneystr.cpp:.*isdigit"
"src/utilmoneystr.cpp:.*isspace"
"src/utilstrencodings.cpp:.*atoi"
"src/utilstrencodings.cpp:.*isspace"
"src/utilstrencodings.cpp:.*strtol"
"src/utilstrencodings.cpp:.*strtoll"
"src/utilstrencodings.cpp:.*strtoul"
"src/utilstrencodings.cpp:.*strtoull"
"src/utilstrencodings.h:.*atoi"
# Dash
"src/rpc/masternode.cpp:.*atoi*"
"src/wallet/wallet.cpp:.*atoi*"
"src/spork.cpp:.*ctime*"
"src/governance-validators.cpp:.*tolower*"
"src/init.cpp:.*tolower*"
# Megacoin
"src/bignum.h:.*isspace"
"src/bignum.h:.*isxdigit"
"src/bignum.h:.*tolower"
)
REGEXP_IGNORE_EXTERNAL_DEPENDENCIES="^src/(crypto/ctaes/|leveldb/|secp256k1/|tinyformat.h|univalue/)"
LOCALE_DEPENDENT_FUNCTIONS=(
alphasort # LC_COLLATE (via strcoll)
asctime # LC_TIME (directly)
asprintf # (via vasprintf)
atof # LC_NUMERIC (via strtod)
atoi # LC_NUMERIC (via strtol)
atol # LC_NUMERIC (via strtol)
atoll # (via strtoll)
atoq
btowc # LC_CTYPE (directly)
ctime # (via asctime or localtime)
dprintf # (via vdprintf)
fgetwc
fgetws
fold_case # boost::locale::fold_case
fprintf # (via vfprintf)
fputwc
fputws
fscanf # (via __vfscanf)
fwprintf # (via __vfwprintf)
getdate # via __getdate_r => isspace // __localtime_r
getwc
getwchar
is_digit # boost::algorithm::is_digit
is_space # boost::algorithm::is_space
isalnum # LC_CTYPE
isalpha # LC_CTYPE
isblank # LC_CTYPE
iscntrl # LC_CTYPE
isctype # LC_CTYPE
isdigit # LC_CTYPE
isgraph # LC_CTYPE
islower # LC_CTYPE
isprint # LC_CTYPE
ispunct # LC_CTYPE
isspace # LC_CTYPE
isupper # LC_CTYPE
iswalnum # LC_CTYPE
iswalpha # LC_CTYPE
iswblank # LC_CTYPE
iswcntrl # LC_CTYPE
iswctype # LC_CTYPE
iswdigit # LC_CTYPE
iswgraph # LC_CTYPE
iswlower # LC_CTYPE
iswprint # LC_CTYPE
iswpunct # LC_CTYPE
iswspace # LC_CTYPE
iswupper # LC_CTYPE
iswxdigit # LC_CTYPE
isxdigit # LC_CTYPE
localeconv # LC_NUMERIC + LC_MONETARY
mblen # LC_CTYPE
mbrlen
mbrtowc
mbsinit
mbsnrtowcs
mbsrtowcs
mbstowcs # LC_CTYPE
mbtowc # LC_CTYPE
mktime
normalize # boost::locale::normalize
# printf # LC_NUMERIC
putwc
putwchar
scanf # LC_NUMERIC
setlocale
snprintf
sprintf
sscanf
stod
stof
stoi
stol
stold
stoll
stoul
stoull
strcasecmp
strcasestr
strcoll # LC_COLLATE
# strerror
strfmon
strftime # LC_TIME
strncasecmp
strptime
strtod # LC_NUMERIC
strtof
strtoimax
strtol # LC_NUMERIC
strtold
strtoll
strtoq
strtoul # LC_NUMERIC
strtoull
strtoumax
strtouq
strxfrm # LC_COLLATE
swprintf
to_lower # boost::locale::to_lower
to_title # boost::locale::to_title
to_upper # boost::locale::to_upper
tolower # LC_CTYPE
toupper # LC_CTYPE
towctrans
towlower # LC_CTYPE
towupper # LC_CTYPE
trim # boost::algorithm::trim
trim_left # boost::algorithm::trim_left
trim_right # boost::algorithm::trim_right
ungetwc
vasprintf
vdprintf
versionsort
vfprintf
vfscanf
vfwprintf
vprintf
vscanf
vsnprintf
vsprintf
vsscanf
vswprintf
vwprintf
wcrtomb
wcscasecmp
wcscoll # LC_COLLATE
wcsftime # LC_TIME
wcsncasecmp
wcsnrtombs
wcsrtombs
wcstod # LC_NUMERIC
wcstof
wcstoimax
wcstol # LC_NUMERIC
wcstold
wcstoll
wcstombs # LC_CTYPE
wcstoul # LC_NUMERIC
wcstoull
wcstoumax
wcswidth
wcsxfrm # LC_COLLATE
wctob
wctomb # LC_CTYPE
wctrans
wctype
wcwidth
wprintf
)
function join_array {
local IFS="$1"
shift
echo "$*"
}
REGEXP_IGNORE_KNOWN_VIOLATIONS=$(join_array "|" "${KNOWN_VIOLATIONS[@]}")
# Invoke "git grep" only once in order to minimize run-time
REGEXP_LOCALE_DEPENDENT_FUNCTIONS=$(join_array "|" "${LOCALE_DEPENDENT_FUNCTIONS[@]}")
GIT_GREP_OUTPUT=$(git grep -E "[^a-zA-Z0-9_\`'\"<>](${REGEXP_LOCALE_DEPENDENT_FUNCTIONS}(|_r|_s))[^a-zA-Z0-9_\`'\"<>]" -- "*.cpp" "*.h")
EXIT_CODE=0
for LOCALE_DEPENDENT_FUNCTION in "${LOCALE_DEPENDENT_FUNCTIONS[@]}"; do
MATCHES=$(grep -E "[^a-zA-Z0-9_\`'\"<>]${LOCALE_DEPENDENT_FUNCTION}(|_r|_s)[^a-zA-Z0-9_\`'\"<>]" <<< "${GIT_GREP_OUTPUT}" | \
grep -vE "\.(c|cpp|h):\s*(//|\*|/\*|\").*${LOCALE_DEPENDENT_FUNCTION}" | \
grep -vE 'fprintf\(.*(stdout|stderr)')
if [[ ${REGEXP_IGNORE_EXTERNAL_DEPENDENCIES} != "" ]]; then
MATCHES=$(grep -vE "${REGEXP_IGNORE_EXTERNAL_DEPENDENCIES}" <<< "${MATCHES}")
fi
if [[ ${REGEXP_IGNORE_KNOWN_VIOLATIONS} != "" ]]; then
MATCHES=$(grep -vE "${REGEXP_IGNORE_KNOWN_VIOLATIONS}" <<< "${MATCHES}")
fi
if [[ ${MATCHES} != "" ]]; then
echo "The locale dependent function ${LOCALE_DEPENDENT_FUNCTION}(...) appears to be used:"
echo "${MATCHES}"
echo
EXIT_CODE=1
fi
done
if [[ ${EXIT_CODE} != 0 ]]; then
echo "Unnecessary locale dependence can cause bugs that are very"
echo "tricky to isolate and fix. Please avoid using locale dependent"
echo "functions if possible."
echo
echo "Advice not applicable in this specific case? Add an exception"
echo "by updating the ignore list in $0"
fi
exit ${EXIT_CODE}
|
<gh_stars>0
const lineReader = require('line-reader');
const fs = require("fs");
const inputPath='./day3.txt'
const rowSz = 31;
const getTreeCount = (rowSize, input, xSlope, ySlope) => {
let y = ySlope;
let x = xSlope;
let result = 0;
const lines = fs.readFileSync(input, "utf-8")
.split("\n");
while (1) {
const currentRow = Array.from(lines[y].trim());
if(currentRow[x] === "#"){
result++;
}
if (!lines[y+1]){
break;
}
// Horizontal incrementing
x = (x + xSlope) % rowSize;
// Vertical incrementing
y= y + ySlope;
}
console.log(result);
return result;
}
//Part 1
getTreeCount(rowSz, inputPath, 3, 1);
//Part 2
console.log(
getTreeCount(rowSz, inputPath, 1, 1)
* getTreeCount(rowSz, inputPath, 3, 1)
* getTreeCount(rowSz, inputPath, 5, 1)
* getTreeCount(rowSz, inputPath, 7, 1)
* getTreeCount(rowSz, inputPath, 1, 2));
|
<reponame>nibble-4bits/AdventOfCode2020
const fs = require('fs');
const path = require('path');
const INPUT_FILE = 'input.txt';
const BUFFER = fs.readFileSync(path.join(__dirname, INPUT_FILE));
const PASSWORD_POLICIES = BUFFER.toString()
.split('\n')
.map((line) => {
const [minMax, letter, password] = line.replace(':', '').split(' ');
const [min, max] = minMax.split('-').map(Number);
return {
min,
max,
letter,
password,
};
});
function day2Part1() {
let validCount = 0;
for (const policy of PASSWORD_POLICIES) {
const regex = new RegExp(policy.letter, 'g');
const matchCount = (policy.password.match(regex) || []).length;
if (matchCount >= policy.min && matchCount <= policy.max) {
validCount++;
}
}
return validCount;
}
function day2Part2() {
let validCount = 0;
for (const policy of PASSWORD_POLICIES) {
const firstPos = policy.password[policy.min - 1];
const secondPos = policy.password[policy.max - 1];
if (
(firstPos === policy.letter && secondPos !== policy.letter) ||
(firstPos !== policy.letter && secondPos === policy.letter)
) {
validCount++;
}
}
return validCount;
}
module.exports = [day2Part1, day2Part2];
|
#include <bits/stdc++.h>
using namespace std;
// print all combination of size r in arr[] of size n
void combinationUtil(int arr[], int n, int r, int index,
int data[], int i, int& count)
{
// Current combination is ready
if (index == r) {
count++;
for (int j = 0; j < r; j++)
cout << data[j] << " ";
cout << std::endl;
return;
}
// When no more elements are there to put in data[]
if (i >= n)
return;
// current is included, put next at next location
data[index] = arr[i];
combinationUtil(arr, n, r, index + 1, data, i + 1, count);
// current is excluded, replace it with next (Note that
// i+1 is passed, but index is not changed)
combinationUtil(arr, n, r, index, data, i + 1, count);
}
// The main function that prints all combinations of size r
// in arr[] of size n.
void printCombination(int arr[], int n, int r, int& count)
{
// A temporary array to store all combination one by one
int data[r];
// Print all combination using temprary array 'data[]'
combinationUtil(arr, n, r, 0, data, 0, count);
}
int main()
{
int arr[] = {2, 3, 4};
int n = sizeof(arr) / sizeof(arr[0]);
int r = 3; // Size of a combination
int count = 0;
printCombination(arr, n, r, count);
return 0;
} |
<gh_stars>0
import { NavLink as Nav } from 'rebass'
import styled from 'styled-components'
import Link from 'gatsby-link'
import { fonts, fontSizes, colors } from '../../theme'
const NavLink = styled(Link)`
font-family: ${fonts.primary};
font-weight: 400;
font-size: ${fontSizes[2] + 'px'};
text-transform: uppercase;
transition: all 0.3s linear;
color: ${colors.black};
text-decoration: none;
padding: 0.5rem;
margin: 0 10px;
&:hover {
color: ${colors.primary}
}
`
export default NavLink; |
<gh_stars>0
from .loading import *
from .utils_dict_list import *
from .get_logger import get_neptune_logger, get_tensorboard_logger
from .map_dict import Map,DotDict |
<reponame>Bobobert/RoLas
from .base import Agent
from rofl.functions.const import DEVICE_DEFT, F_TDTYPE_DEFT, I_TDTYPE_DEFT, UI_NDTYPE_DEFT
from rofl.functions.functions import clipReward, np, torch, rnd, no_grad, ceil
from rofl.functions.torch import array2Tensor
from rofl.functions.coach import singlePathRollout
from rofl.utils.memory import episodicMemory
from rofl.utils.openCV import imgResize
class pgAgent(Agent):
name = "pg gym agent"
def initAgent(self, **kwargs):
config = self.config
self.clipReward = abs(config["agent"].get("clip_reward", 0))
self.nstep = config['agent'].get('nstep', -1)
self.forceLen = True if self.nstep > 0 else False
keys = [('action', I_TDTYPE_DEFT)] if self.policy.discrete else [('action', F_TDTYPE_DEFT)]
self.memory = episodicMemory(config, *keys)
def processReward(self, reward, **kwargs):
return clipReward(self, reward)
def processObs(self, obs, reset = False):
return array2Tensor(obs, device = self.device)
def getBatch(self, size: int, proportion: float = 1, random=False,
device=DEVICE_DEFT, progBar: bool = False):
return super().getBatch(size, proportion=proportion, random=random, device=device, progBar=progBar)
def getEpisode(self, random = False, device = None):
memory = self.memory
memory.reset()
singlePathRollout(self, maxLength = self.nstep, memory = memory, random = random, forceLen = self.forceLen)
device = self.device if device is None else device
return memory.getEpisode(device, self.keysForBatches)
class pgFFAgent(pgAgent):
name = "forestFire_pgAgent"
def __init__(self, config, policy, envMaker, tbw = None):
super(pgFFAgent, self).__init__(config, policy, envMaker, tbw)
self.isAtari = config["env"]["atari"]
obsShape, lhist = config["env"]["obs_shape"], config["agent"]["lhist"]
#self.memory = MemoryFF(config)
self.obsShape = (lhist, *obsShape)
self.frameSize = obsShape
self.frameStack, self.lastObs, self.prevFrame = np.zeros(self.obsShape, dtype = UI_NDTYPE_DEFT), None, None
def processObs(self, obs, reset: bool = False): # TODO: pass this to a function that uses lHistObsProcess
# with reward type, compose the outputs as a tensor alone always.
"""
If the agent needs to process the observation of the
environment. Write it here
"""
frame, pos, tm = obs["frame"], obs["position"], obs.get("time", 0)
if reset:
self.frameStack.fill(0)
else:
self.frameStack = np.roll(self.frameStack, 1, axis = 0)
self.prevFrame = imgResize(frame, size = self.frameSize)
self.frameStack[0] = self.prevFrame
self.prevFrame = {"frame":self.prevFrame, "position":pos, "time":tm}
newObs = torch.from_numpy(self.frameStack).to(self.device).unsqueeze(0).float().div(255)
Tpos = torch.as_tensor(pos).to(self.device).float().unsqueeze(0)
Ttm = torch.as_tensor([tm]).to(self.device).float().unsqueeze(0)
return {"frame": newObs, "position":Tpos, "time":Ttm}
|
<filename>lc0542_01_matrix.py
"""Leetcode 542. 01 Matrix
Medium
URL: https://leetcode.com/problems/01-matrix/
Given a matrix consists of 0 and 1,
find the distance of the nearest 0 for each cell.
The distance between two adjacent cells is 1.
Example 1:
Input:
[[0,0,0],
[0,1,0],
[0,0,0]]
Output:
[[0,0,0],
[0,1,0],
[0,0,0]]
Example 2:
Input:
[[0,0,0],
[0,1,0],
[1,1,1]]
Output:
[[0,0,0],
[0,1,0],
[1,2,1]]
Note:
- The number of elements of the given matrix will not exceed 10,000.
- There are at least one 0 in the given matrix.
- The cells are adjacent in only four directions: up, down, left and right.
"""
from typing import List
class SolutionDFSRecur:
def _dfs(self, r: int, c: int, distance: int, mat: List[List[int]]) -> None:
# Base cases: out of boundary or longer distance.
if (r < 0 or r >= len(mat)
or c < 0 or c >= len(mat[0])
or mat[r][c] < distance):
return None
# Update the shortest distance.
mat[r][c] = distance
# Apply recursive DFS: visit up/down/left/right.
dirs = [(r - 1, c), (r + 1, c), (r, c - 1), (r, c + 1)]
for (r_next, c_next) in dirs:
self._dfs(r_next, c_next, distance + 1, mat)
def updateMatrix(self, mat: List[List[int]]) -> List[List[int]]:
"""
Time complexity: O(m*n), where
- m: number of rows
- n: number of columns
Space complexity: O(m*n).
"""
# Edge cases.
if not mat or not mat[0]:
return mat
n_rows, n_cols = len(mat), len(mat[0])
# Collect 0 cells as DFS start points and update 1 cell values to inf.
zero_cells = []
for r in range(n_rows):
for c in range(n_cols):
if mat[r][c] == 0:
zero_cells.append((r, c))
else:
mat[r][c] = float('inf')
# Iterate through start points and update distance to them.
for (r, c) in zero_cells:
distance = 0
self._dfs(r, c, distance, mat)
return mat
class SolutionBFS:
def updateMatrix(self, mat: List[List[int]]) -> List[List[int]]:
"""
Time complexity: O(mn), where
- m: number of rows
- n: number of columns
Space complexity: O(mn).
"""
from collections import deque
# Edge cases.
if not mat or not mat[0]:
return mat
n_rows, n_cols = len(mat), len(mat[0])
# Collect 0 cells as BFS start points and update 1 cell values to inf.
queue = deque([])
for r in range(n_rows):
for c in range(n_cols):
if mat[r][c] == 0:
queue.appendleft((r, c))
else:
# For cell with value != 0, update its distance to inf.
mat[r][c] = float('inf')
# BFS: start from value 0 cells, visiting neighbors: up/down/left/right.
while queue:
r, c = queue.pop()
dirs = [(r - 1, c), (r + 1, c), (r, c - 1), (r, c + 1)]
for r_next, c_next in dirs:
# Check out of boundary.
if (r_next < 0 or r_next >= n_rows
or c_next < 0 or c_next >= n_cols):
continue
# Update distance if find shorter.
if mat[r_next][c_next] > mat[r][c] + 1:
mat[r_next][c_next] = mat[r][c] + 1
queue.appendleft((r_next, c_next))
return mat
class SolutionDPTopLeftBottomRight:
def updateMatrix(self, mat: List[List[int]]) -> List[List[int]]:
"""
Time complexity: O(mn), where
- m: number of rows
- n: number of columns
Space complexity: O(1).
"""
# Edge cases.
if not mat or not mat[0]:
return mat
n_rows, n_cols = len(mat), len(mat[0])
# Iterate through from top-left, check up & left to update distance.
for r in range(n_rows):
for c in range(n_cols):
if mat[r][c] == 0:
continue
# Check its up & left.
if r > 0:
up = mat[r - 1][c]
else:
up = float('inf')
if c > 0:
left = mat[r][c - 1]
else:
left = float('inf')
# Update distance by min(up & left).
mat[r][c] = min(up, left) + 1
# Iterate through from bottom-right, check down & right to update distance.
for r in range(n_rows - 1, -1, -1):
for c in range(n_cols -1, -1, -1):
if mat[r][c] == 0:
continue
# Check its down & right.
if r < n_rows - 1:
down = mat[r + 1][c]
else:
down = float('inf')
if c < n_cols - 1:
right = mat[r][c + 1]
else:
right = float('inf')
# Update distance by min(previous distance, min(down & right)).
mat[r][c] = min(mat[r][c], min(down, right) + 1)
return mat
def main():
import copy
import time
# Output:
# [[0,0,0],
# [0,1,0],
# [0,0,0]]
mat = [[0,0,0],
[0,1,0],
[0,0,0]]
start_time = time.time()
print(SolutionDFSRecur().updateMatrix(copy.deepcopy(mat)))
print("DFS:", time.time() - start_time)
start_time = time.time()
print(SolutionBFS().updateMatrix(copy.deepcopy(mat)))
print("BFS:", time.time() - start_time)
start_time = time.time()
print(SolutionDPTopLeftBottomRight().updateMatrix(copy.deepcopy(mat)))
print("DP:", time.time() - start_time)
# Output:
# [[0,0,0],
# [0,1,0],
# [1,2,1]]
mat = [[0,0,0],
[0,1,0],
[1,1,1]]
start_time = time.time()
print(SolutionDFSRecur().updateMatrix(copy.deepcopy(mat)))
print("DFS:", time.time() - start_time)
start_time = time.time()
print(SolutionBFS().updateMatrix(copy.deepcopy(mat)))
print("BFS:", time.time() - start_time)
start_time = time.time()
print(SolutionDPTopLeftBottomRight().updateMatrix(copy.deepcopy(mat)))
print("DP:", time.time() - start_time)
if __name__ == '__main__':
main()
|
#!/bin/bash -x
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The only prerequisite should be homebrew. If something doesn't work out of
# the box with just homebrew, let's fix it.
# fail fast
set -e
BUILD_DIR=${BUILD_DIR:-_build}
# brew install alias
brew_install() {
brew install "$@" || brew upgrade "$@"
}
# install deps
install_deps() {
# folly deps
dependencies=(
boost
cmake
double-conversion
gflags
glog
jemalloc
libevent
lz4
openssl
pkg-config
snappy
xz
)
# fetch deps
for dependency in "${dependencies[@]}"; do
brew_install "${dependency}"
done
}
install_deps
# Allows this script to be invoked from anywhere in the source tree but the
# BUILD_DIR we create will always be in the top level folly directory
TOP_LEVEL_DIR="$(cd "$(dirname -- "$0")"/../.. ; pwd)" # folly
cd "$TOP_LEVEL_DIR"
mkdir -p "${BUILD_DIR}"
cd "${BUILD_DIR}"
OPENSSL_INCLUDES=$(brew --prefix openssl)/include
cmake \
-DOPENSSL_INCLUDE_DIR="${OPENSSL_INCLUDES}" \
-DFOLLY_HAVE_WEAK_SYMBOLS=ON \
"$@" \
..
# fetch googletest, if doesn't exist
GTEST_VER=1.8.0
GTEST_DIR=gtest-${GTEST_VER}
if [ ! -d ${GTEST_DIR} ]; then
mkdir ${GTEST_DIR}
curl -SL \
https://github.com/google/googletest/archive/release-${GTEST_VER}.tar.gz | \
tar -xvzf - --strip-components=1 -C ${GTEST_DIR}
fi
# make, test, install
make
make install
|
def my_function(input_array):
output_array = set()
return output_array.union(input_array) |
require 'test_helper'
class SchoolsControllerTest < ActionController::TestCase
include Devise::Test::ControllerHelpers
setup do
@school = schools(:tuggle)
sign_in users(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:schools)
end
test "should get new" do
get :new
assert_response :success
end
test "should create school" do
assert_difference('School.count') do
post :create, params: { school: { name: @school.name + 'test' } }
end
assert_redirected_to school_path(assigns(:school))
end
test "should show school" do
get :show, params: { id: @school }
assert_response :success
end
test "should get edit" do
get :edit, params: { id: @school }
assert_response :success
end
test "should update school" do
patch :update, params: { id: @school, school: { name: @school.name + 'test' } }
assert_redirected_to school_path(assigns(:school))
end
test "should destroy school" do
assert_difference('School.count', -1) do
delete :destroy, params: { id: @school }
end
assert_redirected_to schools_path
end
test "staff user shouldn't be able to delete school" do
user = User.create!(email: '<EMAIL>', staff: true, password: '<PASSWORD>')
ability = Ability.new(user)
assert ability.cannot? :delete, @school
end
end
|
<gh_stars>0
module Fast
class Config
DEFAULT_CONFIG_PATH = File.expand_path('~/.fast_config')
class << self
attr_reader :commands
end
@commands = {}
@config = {}
def self.register_command(command)
if @commands.has_key?(command.usage)
raise FastError, "Duplicate command `#{command.usage}'"
end
@commands[command.usage] = command
end
def self.init(file_path = DEFAULT_CONFIG_PATH)
@config = File.exist?(file_path)? YAML.load_file(file_path) : {}
end
def self.[](k)
@config[k]
end
def self.github
@config["github"] || {}
end
end
end
|
<gh_stars>1-10
import debug from 'debug';
import React, {forwardRef} from 'react';
import {MaskedInput, MaskedInputProps} from '../../inputs/MaskedInput';
const d = debug('thx.controls.date.LocalDatePicker.MaskedDateInput');
export interface MaskedDateInputProps {
name?: string;
onChange?: (value: {target: {value: string}}) => void;
}
// eslint-disable-next-line no-unused-vars,@typescript-eslint/no-unused-vars
function MaskedDateInputInner(props: MaskedDateInputProps & Omit<MaskedInputProps, 'onChange'>, ref: any) {
const {onChange, name, ...rest} = props;
return (
<MaskedInput
{...rest}
name={name}
mask={{alias: 'datetime', inputFormat: 'mm/dd/yyyy'}}
onChange={value => {
if (onChange) onChange({target: {value: value || ''}});
}}
/>
);
}
export const MaskedDateInput = forwardRef(MaskedDateInputInner);
|
import React, {Component} from "react"
import PropTypes from 'prop-types'
import {bindActionCreators} from 'redux'
import {connect} from 'react-redux'
import {Link} from 'react-router'
import bodyContainer from '../containers'
// import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider'
import getMuiTheme from 'material-ui/styles/getMuiTheme'
import darkBaseTheme from 'material-ui/styles/baseThemes/darkBaseTheme'
import {green100, green500, green700} from 'material-ui/styles/colors'
import AppBar from 'material-ui/AppBar'
import Login from './login'
import Logged from './logged'
import Toggle from 'material-ui/Toggle'
import FlatButton from 'material-ui/FlatButton'
import ToDos from '../containers/todos'
// Needed for onTouchTap, REMOVE WHEN REACT HAS THIS FEATURE !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// It's a mobile friendly onClick() alternative for all components in Material-UI
// http://stackoverflow.com/a/34015469/988941
import injectTapEventPlugin from 'react-tap-event-plugin'
injectTapEventPlugin()
// Creating custom theme or manipulating the current one.
// And also setting userAgent for server rendering.
const muiTheme = getMuiTheme({
// setting userAgent for server rendering !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// userAgent: req.headers['user-agent'],
/* palette: {
primary1Color: green500,
primary2Color: green700,
primary3Color: green100,
}, */
// Customising specifically a component
/* avatar: {
borderColor: null,
},
appBar: {
height: 50,
}, */
})
class Body extends Component {
constructor(props) {
super(props)
this.state = {
muiTheme: muiTheme,
logged: true
}
this.changeTheme = this.changeTheme.bind(this)
this.handleLoggedToggle = this.handleLoggedToggle.bind(this)
}
getChildContext() {
// ALSO USE CONTEXT FOR LANGUAGE and ACCOUNT/USER DATA !!!!!!!!!!!!!!!!!!!!
// Should i store lang in cookie like theme info ???
return {
muiTheme: this.state.muiTheme
}
}
/* componentDidMount() {
// Get user theme info from cookie and apply !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
this.changeTheme(userT)
this.accountAndUserData = $.ajax({
url: this.props.url,
dataType: 'json',
cache: false,
success: function(data) {
this.setState({aAndUData: data})
}.bind(this),
error: function(xhr, status, err) {
console.error(this.props.url, status, err.toString())
}.bind(this)
})
} */
changeTheme() {
if (this.state.muiTheme === muiTheme) {
this.setState({muiTheme: getMuiTheme(darkBaseTheme)})
} else {
this.setState({muiTheme: muiTheme})
}
}
handleLoggedToggle() {
this.setState({logged: !this.state.logged})
}
render() {
const {user} = this.props.params
{user === "MerinEREN" && console.log(user)}
return (
<div
style={{backgroundColor:
this.state.muiTheme.palette.canvasColor}}
>
<AppBar
title="User Name"
iconElementRight={this.state.logged ? <Logged /> :
<Login />}
/>
<FlatButton
label="Change Theme"
primary={true}
onTouchTap={this.changeTheme}
/>
<Toggle
label="Logged"
defaultToggled={true}
onToggle={this.handleLoggedToggle}
labelPosition="right"
style={{margin: 20}}
/>
{user !== "MerinEREN" &&
<Link
style={{color: this.state.muiTheme.
palette.textColor}}
to="MerinEREN"
activeStyle={{
textDecoration: 'none',
color: 'black'
}}
>
Navigate to Merin
</Link>
}
<ToDos />
</div>
)
}
/* componentWillUnmount() {
this.accountAndUserData.abort()
} */
}
Body.childContextTypes = {
muiTheme: PropTypes.object.isRequired
}
export default connect(bodyContainer)(Body)
|
CREATE TABLE /*$wgDBprefix*/user (
user_id int NOT NULL IDENTITY(1,1),
user_name varchar(255) NOT NULL default '',
user_real_name varchar(255) NOT NULL default '',
user_password text NOT NULL,
user_newpassword text NOT NULL,
user_newpass_time varchar(5) NULL,
user_email text NOT NULL,
user_options text NOT NULL,
user_touched varchar(5) NOT NULL default '',
user_token varchar(10) NOT NULL default '',
user_email_authenticated varchar(5) NULL,
user_email_token varchar(10) NULL,
user_email_token_expires varchar(5) NULL,
user_registration varchar(5) NULL,
user_editcount int,
PRIMARY KEY (user_id)
);
CREATE TABLE /*$wgDBprefix*/user_groups (
ug_user int NOT NULL default '0',
ug_group varchar(5) NOT NULL default '',
PRIMARY KEY (ug_user,ug_group)
);
CREATE TABLE /*$wgDBprefix*/user_newtalk (
user_id int NOT NULL default '0',
user_ip varchar(13) NOT NULL default '',
user_last_timestamp varchar(5) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/page (
page_id int NOT NULL IDENTITY(1,1),
page_namespace int NOT NULL,
page_title varchar(255) NOT NULL,
page_restrictions text NOT NULL,
page_counter bigint NOT NULL default '0',
page_is_redirect tinyint NOT NULL default '0',
page_is_new tinyint NOT NULL default '0',
page_random real NOT NULL,
page_touched varchar(5) NOT NULL default '',
page_latest int NOT NULL,
page_len int NOT NULL,
PRIMARY KEY (page_id)
);
CREATE TABLE /*$wgDBprefix*/revision (
rev_id int NOT NULL IDENTITY(1,1),
rev_page int NOT NULL,
rev_text_id int NOT NULL,
rev_comment text NOT NULL,
rev_user int NOT NULL default '0',
rev_user_text varchar(255) NOT NULL default '',
rev_timestamp varchar(5) NOT NULL default '',
rev_minor_edit tinyint NOT NULL default '0',
rev_deleted tinyint NOT NULL default '0',
rev_len int,
rev_parent_id int default NULL,
PRIMARY KEY (rev_page, rev_id)
);
CREATE TABLE /*$wgDBprefix*/text (
old_id int NOT NULL IDENTITY(1,1),
old_text text NOT NULL,
old_flags text NOT NULL,
PRIMARY KEY (old_id)
);
CREATE TABLE /*$wgDBprefix*/archive (
ar_namespace int NOT NULL default '0',
ar_title varchar(255) NOT NULL default '',
ar_text text NOT NULL,
ar_comment text NOT NULL,
ar_user int NOT NULL default '0',
ar_user_text varchar(255) NOT NULL,
ar_timestamp varchar(5) NOT NULL default '',
ar_minor_edit tinyint NOT NULL default '0',
ar_flags text NOT NULL,
ar_rev_id int,
ar_text_id int,
ar_deleted tinyint NOT NULL default '0',
ar_len int,
ar_page_id int,
ar_parent_id int default NULL
);
CREATE TABLE /*$wgDBprefix*/pagelinks (
pl_from int NOT NULL default '0',
pl_namespace int NOT NULL default '0',
pl_title varchar(255) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/templatelinks (
tl_from int NOT NULL default '0',
tl_namespace int NOT NULL default '0',
tl_title varchar(255) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/imagelinks (
il_from int NOT NULL default '0',
il_to varchar(255) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/categorylinks (
cl_from int NOT NULL default '0',
cl_to varchar(255) NOT NULL default '',
cl_sortkey varchar(70) NOT NULL default '',
cl_timestamp timestamp NOT NULL
);
CREATE TABLE /*$wgDBprefix*/category (
cat_id int NOT NULL IDENTITY(1,1),
cat_title varchar(255) NOT NULL,
cat_pages int NOT NULL default 0,
cat_subcats int NOT NULL default 0,
cat_files int NOT NULL default 0,
cat_hidden tinyint NOT NULL default 0,
PRIMARY KEY (cat_id)
);
CREATE TABLE /*$wgDBprefix*/externallinks (
el_from int NOT NULL default '0',
el_to text NOT NULL,
el_index text NOT NULL
);
CREATE TABLE /*$wgDBprefix*/langlinks (
ll_from int NOT NULL default '0',
ll_lang varchar(7) NOT NULL default '',
ll_title varchar(255) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/site_stats (
ss_row_id int NOT NULL,
ss_total_views bigint default '0',
ss_total_edits bigint default '0',
ss_good_articles bigint default '0',
ss_total_pages bigint default '-1',
ss_users bigint default '-1',
ss_admins int default '-1',
ss_images int default '0'
);
CREATE TABLE /*$wgDBprefix*/hitcounter (
hc_id int NOT NULL
);
CREATE TABLE /*$wgDBprefix*/ipblocks (
ipb_id int NOT NULL IDENTITY(1,1),
ipb_address text NOT NULL,
ipb_user int NOT NULL default '0',
ipb_by int NOT NULL default '0',
ipb_by_text varchar(255) NOT NULL default '',
ipb_reason text NOT NULL,
ipb_timestamp varchar(5) NOT NULL default '',
ipb_auto bit NOT NULL default 0,
ipb_anon_only bit NOT NULL default 0,
ipb_create_account bit NOT NULL default 1,
ipb_enable_autoblock bit NOT NULL default '1',
ipb_expiry varchar(5) NOT NULL default '',
ipb_range_start text NOT NULL,
ipb_range_end text NOT NULL,
ipb_deleted bit NOT NULL default 0,
ipb_block_email bit NOT NULL default 0,
PRIMARY KEY (ipb_id)
);
CREATE TABLE /*$wgDBprefix*/image (
img_name varchar(255) NOT NULL default '',
img_size int NOT NULL default '0',
img_width int NOT NULL default '0',
img_height int NOT NULL default '0',
img_metadata text NOT NULL,
img_bits int NOT NULL default '0',
img_media_type TEXT default NULL,
img_major_mime TEXT NOT NULL default "unknown",
img_minor_mime varchar(10) NOT NULL default "unknown",
img_description text NOT NULL,
img_user int NOT NULL default '0',
img_user_text varchar(255) NOT NULL,
img_timestamp varchar(5) NOT NULL default '',
img_sha1 varchar(10) NOT NULL default '',
PRIMARY KEY (img_name)
);
CREATE TABLE /*$wgDBprefix*/oldimage (
oi_name varchar(255) NOT NULL default '',
oi_archive_name varchar(255) NOT NULL default '',
oi_size int NOT NULL default 0,
oi_width int NOT NULL default 0,
oi_height int NOT NULL default 0,
oi_bits int NOT NULL default 0,
oi_description text NOT NULL,
oi_user int NOT NULL default '0',
oi_user_text varchar(255) NOT NULL,
oi_timestamp varchar(5) NOT NULL default '',
oi_metadata text NOT NULL,
oi_media_type TEXT default NULL,
oi_major_mime TEXT NOT NULL default "unknown",
oi_minor_mime varchar(10) NOT NULL default "unknown",
oi_deleted tinyint NOT NULL default '0',
oi_sha1 varchar(10) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/filearchive (
fa_id int NOT NULL IDENTITY(1,1),
fa_name varchar(255) NOT NULL default '',
fa_archive_name varchar(255) NULL default '',
fa_storage_group varchar(5) NULL,
fa_storage_key varchar(17) NULL default '',
fa_deleted_user int,
fa_deleted_timestamp varchar(5) NULL default '',
fa_deleted_reason text,
fa_size int default '0',
fa_width int default '0',
fa_height int default '0',
fa_metadata text,
fa_bits int default '0',
fa_media_type TEXT default NULL,
fa_major_mime TEXT default "unknown",
fa_minor_mime varchar(10) NULL default "unknown",
fa_description text,
fa_user int default '0',
fa_user_text varchar(255) NULL,
fa_timestamp varchar(5) NULL default '',
fa_deleted tinyint NOT NULL default '0',
PRIMARY KEY (fa_id)
);
CREATE TABLE /*$wgDBprefix*/recentchanges (
rc_id int NOT NULL IDENTITY(1,1),
rc_timestamp varchar(5) NOT NULL default '',
rc_cur_time varchar(5) NOT NULL default '',
rc_user int NOT NULL default '0',
rc_user_text varchar(255) NOT NULL,
rc_namespace int NOT NULL default '0',
rc_title varchar(255) NOT NULL default '',
rc_comment varchar(255) NOT NULL default '',
rc_minor tinyint NOT NULL default '0',
rc_bot tinyint NOT NULL default '0',
rc_new tinyint NOT NULL default '0',
rc_cur_id int NOT NULL default '0',
rc_this_oldid int NOT NULL default '0',
rc_last_oldid int NOT NULL default '0',
rc_type tinyint NOT NULL default '0',
rc_moved_to_ns tinyint NOT NULL default '0',
rc_moved_to_title varchar(255) NOT NULL default '',
rc_patrolled tinyint NOT NULL default '0',
rc_ip varchar(13) NOT NULL default '',
rc_old_len int,
rc_new_len int,
rc_deleted tinyint NOT NULL default '0',
rc_logid int NOT NULL default '0',
rc_log_type varchar(17) NULL default NULL,
rc_log_action varchar(17) NULL default NULL,
rc_params text NULL,
PRIMARY KEY (rc_id)
);
CREATE TABLE /*$wgDBprefix*/watchlist (
wl_user int NOT NULL,
wl_namespace int NOT NULL default '0',
wl_title varchar(255) NOT NULL default '',
wl_notificationtimestamp varchar(5) NULL
);
CREATE TABLE /*$wgDBprefix*/math (
math_inputhash varchar(5) NOT NULL,
math_outputhash varchar(5) NOT NULL,
math_html_conservativeness tinyint NOT NULL,
math_html text,
math_mathml text
);
CREATE TABLE /*$wgDBprefix*/searchindex (
si_page int NOT NULL,
si_title varchar(255) NOT NULL default '',
si_text text NOT NULL
);
CREATE TABLE /*$wgDBprefix*/interwiki (
iw_prefix varchar(32) NOT NULL,
iw_url text NOT NULL,
iw_local bit NOT NULL,
iw_trans tinyint NOT NULL default 0
);
CREATE TABLE /*$wgDBprefix*/querycache (
qc_type varchar(10) NOT NULL,
qc_value int NOT NULL default '0',
qc_namespace int NOT NULL default '0',
qc_title varchar(255) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/objectcache (
keyname varchar(17) NOT NULL default '',
value text,
exptime datetime
);
CREATE TABLE /*$wgDBprefix*/transcache (
tc_url varchar(17) NOT NULL,
tc_contents text,
tc_time int NOT NULL
);
CREATE TABLE /*$wgDBprefix*/logging (
log_id int NOT NULL IDENTITY(1,1),
log_type varchar(4) NOT NULL default '',
log_action varchar(4) NOT NULL default '',
log_timestamp varchar(5) NOT NULL default '19700101000000',
log_user int NOT NULL default 0,
log_namespace int NOT NULL default 0,
log_title varchar(255) NOT NULL default '',
log_comment varchar(255) NOT NULL default '',
log_params text NOT NULL,
log_deleted tinyint NOT NULL default '0',
PRIMARY KEY (log_id)
);
CREATE TABLE /*$wgDBprefix*/trackbacks (
tb_id int IDENTITY(1,1),
tb_page int REFERENCES /*$wgDBprefix*/page(page_id) ON DELETE CASCADE,
tb_title varchar(255) NOT NULL,
tb_url text NOT NULL,
tb_ex text,
tb_name varchar(255) NULL,
PRIMARY KEY (tb_id)
);
CREATE TABLE /*$wgDBprefix*/job (
job_id int NOT NULL IDENTITY(1,1),
job_cmd varchar(17) NOT NULL default '',
job_namespace int NOT NULL,
job_title varchar(255) NOT NULL,
job_params text NOT NULL,
PRIMARY KEY (job_id)
);
CREATE TABLE /*$wgDBprefix*/querycache_info (
qci_type varchar(10) NOT NULL default '',
qci_timestamp varchar(5) NOT NULL default '19700101000000'
);
CREATE TABLE /*$wgDBprefix*/redirect (
rd_from int NOT NULL default '0',
rd_namespace int NOT NULL default '0',
rd_title varchar(255) NOT NULL default '',
PRIMARY KEY (rd_from)
);
CREATE TABLE /*$wgDBprefix*/querycachetwo (
qcc_type varchar(10) NOT NULL,
qcc_value int NOT NULL default '0',
qcc_namespace int NOT NULL default '0',
qcc_title varchar(255) NOT NULL default '',
qcc_namespacetwo int NOT NULL default '0',
qcc_titletwo varchar(255) NOT NULL default ''
);
CREATE TABLE /*$wgDBprefix*/page_restrictions (
pr_page int NOT NULL,
pr_type varchar(17) NOT NULL,
pr_level varchar(17) NOT NULL,
pr_cascade tinyint NOT NULL,
pr_user int NULL,
pr_expiry varchar(5) NULL,
pr_id int NOT NULL IDENTITY(1,1),
PRIMARY KEY (pr_page,pr_type)
);
CREATE TABLE /*$wgDBprefix*/protected_titles (
pt_namespace int NOT NULL,
pt_title varchar(255) NOT NULL,
pt_user int NOT NULL,
pt_reason text,
pt_timestamp varchar(5) NOT NULL,
pt_expiry varchar(5) NOT NULL default '',
pt_create_perm varchar(17) NOT NULL,
PRIMARY KEY (pt_namespace,pt_title)
);
CREATE TABLE /*$wgDBprefix*/page_props (
pp_page int NOT NULL,
pp_propname varchar(17) NOT NULL,
pp_value text NOT NULL,
PRIMARY KEY (pp_page,pp_propname)
);
CREATE TABLE /*$wgDBprefix*/updatelog (
ul_key varchar(255) NOT NULL,
PRIMARY KEY (ul_key)
);
|
#!/usr/bin/env bash
./autogen.sh && ./configure |
package one.microproject.rpi.hardware.gpio.tests;
import one.microproject.rpi.hardware.gpio.sensors.BMP180;
public class BMP180Test {
public static void main(String[] args) throws Exception {
System.out.println("BMP180Test started ...");
BMP180 bmp180 = new BMP180();
float temp = bmp180.readTemperature();
float pres = bmp180.readPressure();
double alt = bmp180.readAltitude();
System.out.println("temperature: " + temp + " C");
System.out.println("pressure : " + (pres/1000) + " kPa");
System.out.println("altitude : " + alt + " m");
System.out.println("BMP180Test done.");
}
}
|
<filename>endpoint/websocket.go
package endpoint
import "strconv"
func Gateway(v int) string {
return discordAPI + version + strconv.Itoa(v) + gateway
}
|
def max_unique_characters(s):
characters = set()
max_characters = 0
for char in s:
if char not in characters:
characters.add(char)
max_characters += 1
return max_characters |
robot_NQJ=4
robot_NJ=4
robot_name="S4PPPR2"
robot_NL=5
robot_NKP=5
robot_KP="a2 a3 a4 d4 theta2"
robot_kinconstr_exist=0
robot_NKCP=0
robot_KCP=""
robot_NMPVFIXB=6
robot_NMPVFLOATB=NOTDEFINED
robot_NTAUJFIXBREGNN=13
|
#include <iostream>
using namespace std;
int main() {
int arr[] = {-2, -3, 4, -1, -2, 1, 5, -3};
int max_sum = 0, curr_sum = 0;
for (int i : arr) {
curr_sum += i;
if (curr_sum > max_sum)
max_sum = curr_sum;
if (curr_sum < 0)
curr_sum = 0;
}
cout << max_sum << endl;
return 0;
} |
<reponame>wolfchinaliu/gameCenter
package com.shiliu.game.domain;
import java.util.ArrayList;
import java.util.List;
public class BallotExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
public BallotExample() {
oredCriteria = new ArrayList<Criteria>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andBallotIdIsNull() {
addCriterion("ballot_id is null");
return (Criteria) this;
}
public Criteria andBallotIdIsNotNull() {
addCriterion("ballot_id is not null");
return (Criteria) this;
}
public Criteria andBallotIdEqualTo(String value) {
addCriterion("ballot_id =", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdNotEqualTo(String value) {
addCriterion("ballot_id <>", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdGreaterThan(String value) {
addCriterion("ballot_id >", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdGreaterThanOrEqualTo(String value) {
addCriterion("ballot_id >=", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdLessThan(String value) {
addCriterion("ballot_id <", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdLessThanOrEqualTo(String value) {
addCriterion("ballot_id <=", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdLike(String value) {
addCriterion("ballot_id like", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdNotLike(String value) {
addCriterion("ballot_id not like", value, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdIn(List<String> values) {
addCriterion("ballot_id in", values, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdNotIn(List<String> values) {
addCriterion("ballot_id not in", values, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdBetween(String value1, String value2) {
addCriterion("ballot_id between", value1, value2, "ballotId");
return (Criteria) this;
}
public Criteria andBallotIdNotBetween(String value1, String value2) {
addCriterion("ballot_id not between", value1, value2, "ballotId");
return (Criteria) this;
}
public Criteria andGameIdIsNull() {
addCriterion("game_id is null");
return (Criteria) this;
}
public Criteria andGameIdIsNotNull() {
addCriterion("game_id is not null");
return (Criteria) this;
}
public Criteria andGameIdEqualTo(String value) {
addCriterion("game_id =", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdNotEqualTo(String value) {
addCriterion("game_id <>", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdGreaterThan(String value) {
addCriterion("game_id >", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdGreaterThanOrEqualTo(String value) {
addCriterion("game_id >=", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdLessThan(String value) {
addCriterion("game_id <", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdLessThanOrEqualTo(String value) {
addCriterion("game_id <=", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdLike(String value) {
addCriterion("game_id like", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdNotLike(String value) {
addCriterion("game_id not like", value, "gameId");
return (Criteria) this;
}
public Criteria andGameIdIn(List<String> values) {
addCriterion("game_id in", values, "gameId");
return (Criteria) this;
}
public Criteria andGameIdNotIn(List<String> values) {
addCriterion("game_id not in", values, "gameId");
return (Criteria) this;
}
public Criteria andGameIdBetween(String value1, String value2) {
addCriterion("game_id between", value1, value2, "gameId");
return (Criteria) this;
}
public Criteria andGameIdNotBetween(String value1, String value2) {
addCriterion("game_id not between", value1, value2, "gameId");
return (Criteria) this;
}
public Criteria andStartTimeIsNull() {
addCriterion("start_time is null");
return (Criteria) this;
}
public Criteria andStartTimeIsNotNull() {
addCriterion("start_time is not null");
return (Criteria) this;
}
public Criteria andStartTimeEqualTo(String value) {
addCriterion("start_time =", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeNotEqualTo(String value) {
addCriterion("start_time <>", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeGreaterThan(String value) {
addCriterion("start_time >", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeGreaterThanOrEqualTo(String value) {
addCriterion("start_time >=", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeLessThan(String value) {
addCriterion("start_time <", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeLessThanOrEqualTo(String value) {
addCriterion("start_time <=", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeLike(String value) {
addCriterion("start_time like", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeNotLike(String value) {
addCriterion("start_time not like", value, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeIn(List<String> values) {
addCriterion("start_time in", values, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeNotIn(List<String> values) {
addCriterion("start_time not in", values, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeBetween(String value1, String value2) {
addCriterion("start_time between", value1, value2, "startTime");
return (Criteria) this;
}
public Criteria andStartTimeNotBetween(String value1, String value2) {
addCriterion("start_time not between", value1, value2, "startTime");
return (Criteria) this;
}
public Criteria andEndTimeIsNull() {
addCriterion("end_time is null");
return (Criteria) this;
}
public Criteria andEndTimeIsNotNull() {
addCriterion("end_time is not null");
return (Criteria) this;
}
public Criteria andEndTimeEqualTo(String value) {
addCriterion("end_time =", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeNotEqualTo(String value) {
addCriterion("end_time <>", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeGreaterThan(String value) {
addCriterion("end_time >", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeGreaterThanOrEqualTo(String value) {
addCriterion("end_time >=", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeLessThan(String value) {
addCriterion("end_time <", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeLessThanOrEqualTo(String value) {
addCriterion("end_time <=", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeLike(String value) {
addCriterion("end_time like", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeNotLike(String value) {
addCriterion("end_time not like", value, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeIn(List<String> values) {
addCriterion("end_time in", values, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeNotIn(List<String> values) {
addCriterion("end_time not in", values, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeBetween(String value1, String value2) {
addCriterion("end_time between", value1, value2, "endTime");
return (Criteria) this;
}
public Criteria andEndTimeNotBetween(String value1, String value2) {
addCriterion("end_time not between", value1, value2, "endTime");
return (Criteria) this;
}
public Criteria andTitleIsNull() {
addCriterion("title is null");
return (Criteria) this;
}
public Criteria andTitleIsNotNull() {
addCriterion("title is not null");
return (Criteria) this;
}
public Criteria andTitleEqualTo(String value) {
addCriterion("title =", value, "title");
return (Criteria) this;
}
public Criteria andTitleNotEqualTo(String value) {
addCriterion("title <>", value, "title");
return (Criteria) this;
}
public Criteria andTitleGreaterThan(String value) {
addCriterion("title >", value, "title");
return (Criteria) this;
}
public Criteria andTitleGreaterThanOrEqualTo(String value) {
addCriterion("title >=", value, "title");
return (Criteria) this;
}
public Criteria andTitleLessThan(String value) {
addCriterion("title <", value, "title");
return (Criteria) this;
}
public Criteria andTitleLessThanOrEqualTo(String value) {
addCriterion("title <=", value, "title");
return (Criteria) this;
}
public Criteria andTitleLike(String value) {
addCriterion("title like", value, "title");
return (Criteria) this;
}
public Criteria andTitleNotLike(String value) {
addCriterion("title not like", value, "title");
return (Criteria) this;
}
public Criteria andTitleIn(List<String> values) {
addCriterion("title in", values, "title");
return (Criteria) this;
}
public Criteria andTitleNotIn(List<String> values) {
addCriterion("title not in", values, "title");
return (Criteria) this;
}
public Criteria andTitleBetween(String value1, String value2) {
addCriterion("title between", value1, value2, "title");
return (Criteria) this;
}
public Criteria andTitleNotBetween(String value1, String value2) {
addCriterion("title not between", value1, value2, "title");
return (Criteria) this;
}
public Criteria andOpenIdIsNull() {
addCriterion("open_id is null");
return (Criteria) this;
}
public Criteria andOpenIdIsNotNull() {
addCriterion("open_id is not null");
return (Criteria) this;
}
public Criteria andOpenIdEqualTo(String value) {
addCriterion("open_id =", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdNotEqualTo(String value) {
addCriterion("open_id <>", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdGreaterThan(String value) {
addCriterion("open_id >", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdGreaterThanOrEqualTo(String value) {
addCriterion("open_id >=", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdLessThan(String value) {
addCriterion("open_id <", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdLessThanOrEqualTo(String value) {
addCriterion("open_id <=", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdLike(String value) {
addCriterion("open_id like", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdNotLike(String value) {
addCriterion("open_id not like", value, "openId");
return (Criteria) this;
}
public Criteria andOpenIdIn(List<String> values) {
addCriterion("open_id in", values, "openId");
return (Criteria) this;
}
public Criteria andOpenIdNotIn(List<String> values) {
addCriterion("open_id not in", values, "openId");
return (Criteria) this;
}
public Criteria andOpenIdBetween(String value1, String value2) {
addCriterion("open_id between", value1, value2, "openId");
return (Criteria) this;
}
public Criteria andOpenIdNotBetween(String value1, String value2) {
addCriterion("open_id not between", value1, value2, "openId");
return (Criteria) this;
}
public Criteria andLab1IsNull() {
addCriterion("lab1 is null");
return (Criteria) this;
}
public Criteria andLab1IsNotNull() {
addCriterion("lab1 is not null");
return (Criteria) this;
}
public Criteria andLab1EqualTo(String value) {
addCriterion("lab1 =", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1NotEqualTo(String value) {
addCriterion("lab1 <>", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1GreaterThan(String value) {
addCriterion("lab1 >", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1GreaterThanOrEqualTo(String value) {
addCriterion("lab1 >=", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1LessThan(String value) {
addCriterion("lab1 <", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1LessThanOrEqualTo(String value) {
addCriterion("lab1 <=", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1Like(String value) {
addCriterion("lab1 like", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1NotLike(String value) {
addCriterion("lab1 not like", value, "lab1");
return (Criteria) this;
}
public Criteria andLab1In(List<String> values) {
addCriterion("lab1 in", values, "lab1");
return (Criteria) this;
}
public Criteria andLab1NotIn(List<String> values) {
addCriterion("lab1 not in", values, "lab1");
return (Criteria) this;
}
public Criteria andLab1Between(String value1, String value2) {
addCriterion("lab1 between", value1, value2, "lab1");
return (Criteria) this;
}
public Criteria andLab1NotBetween(String value1, String value2) {
addCriterion("lab1 not between", value1, value2, "lab1");
return (Criteria) this;
}
public Criteria andLab2IsNull() {
addCriterion("lab2 is null");
return (Criteria) this;
}
public Criteria andLab2IsNotNull() {
addCriterion("lab2 is not null");
return (Criteria) this;
}
public Criteria andLab2EqualTo(String value) {
addCriterion("lab2 =", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2NotEqualTo(String value) {
addCriterion("lab2 <>", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2GreaterThan(String value) {
addCriterion("lab2 >", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2GreaterThanOrEqualTo(String value) {
addCriterion("lab2 >=", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2LessThan(String value) {
addCriterion("lab2 <", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2LessThanOrEqualTo(String value) {
addCriterion("lab2 <=", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2Like(String value) {
addCriterion("lab2 like", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2NotLike(String value) {
addCriterion("lab2 not like", value, "lab2");
return (Criteria) this;
}
public Criteria andLab2In(List<String> values) {
addCriterion("lab2 in", values, "lab2");
return (Criteria) this;
}
public Criteria andLab2NotIn(List<String> values) {
addCriterion("lab2 not in", values, "lab2");
return (Criteria) this;
}
public Criteria andLab2Between(String value1, String value2) {
addCriterion("lab2 between", value1, value2, "lab2");
return (Criteria) this;
}
public Criteria andLab2NotBetween(String value1, String value2) {
addCriterion("lab2 not between", value1, value2, "lab2");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
} |
<gh_stars>0
import glob
import os
import subprocess
import time
import click
class Downloader(object):
def __init__(self, batch_file):
self.batch_file = batch_file
def batch_download(self):
with open(self.batch_file) as f:
urls = f.read().split()
for url in urls:
if url == "" or url.startswith("#"):
continue
self.download_video(url)
self.update_batch_file(url)
def download_video(self, url):
click.echo(click.style(f"Start downloading {url}", fg="green"))
for retry in range(10):
try:
p = subprocess.run(["youtube-dl", url], check=True)
break
except:
time.sleep(10 * retry)
def update_batch_file(self, finished_url):
with open(self.batch_file) as f:
urls = f.read().split()
with open(self.batch_file, "w") as f:
for url in urls:
if url == finished_url:
url = "#" + url
f.write(url)
f.write("\n")
@click.command()
@click.option("-a", "--batch-file")
@click.option("-u", "--url")
def cli(batch_file, url):
dl = Downloader(batch_file)
if batch_file is None and url is None:
raise click.BadParameter(
f"You must provide either the --batch-file option or the --url option"
)
elif url is not None:
raise click.BadParameter(f"Invalid url: {url}.")
dl.download_video(url)
else:
dl.batch_download()
if __name__ == "__main__":
cli()
|
#!/usr/bin/env bash
gunicorn wsgi:app -c gconfig.py |
import React, { Component } from 'react';
import { LOGO_URL, GITHUB_RIBBON_URL, RESPOSITORY_URL, GITHUB_PAGE } from '../../configs/configs';
import './AsideHeader.css';
class AsideHeader extends Component {
render() {
return (
<header className="aside-header">
<img src={ LOGO_URL } alt="Logo" width="200" />
<a href={ RESPOSITORY_URL } target="_blank">
<img className="github-ribbon" src={ GITHUB_RIBBON_URL } alt="Fork me on GitHub" />
</a>
<h1>Github Emojis Cheatsheet</h1>
<p>Hey! I'm <a href={ GITHUB_PAGE } target="_blank">@KutieKat</a> and this is the most awesome cheatsheet I've written <img src="https://assets-cdn.github.com/images/icons/emoji/unicode/1f61c.png" width="15" /></p>
</header>
);
}
}
export default AsideHeader; |
package de.mirkosertic.ddd.validation;
import de.mirkosertic.ddd.annotation.ValueObject;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.tools.Diagnostic;
import java.util.HashSet;
import java.util.Set;
import static de.mirkosertic.ddd.validation.ModelHelper.*;
public class ValueObjectValidator extends AbstractProcessor {
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
@Override
public Set<String> getSupportedAnnotationTypes() {
Set<String> theResult = new HashSet<>();
theResult.add(ValueObject.class.getName());
return theResult;
}
@Override
public boolean process(Set<? extends TypeElement> aAnnotations, RoundEnvironment aEnvironment) {
for (TypeElement theAnnotation : aAnnotations) {
for (Element theElement : aEnvironment.getElementsAnnotatedWith(theAnnotation)) {
validate(theElement, aEnvironment);
}
}
return false;
}
private void validate(Element aElement, RoundEnvironment aEnvironment) {
if (!isClass(aElement)) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Only classes can be annotated that way");
}
for (Element theEnclosedElement : getFields(aElement)) {
if (!hasModifier(theEnclosedElement, Modifier.FINAL)) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Field " + theEnclosedElement.getSimpleName()+" is not final!");
}
TypeMirror theTypeMirror = theEnclosedElement.asType();
if (theTypeMirror.toString().startsWith("java.")) {
continue;
} else {
if (TypeKind.DECLARED == theTypeMirror.getKind()) {
DeclaredType theDeclaredType = (DeclaredType) theTypeMirror;
Element theDeclaredElement = theDeclaredType.asElement();
if (!isEnum(theDeclaredElement)) {
if (theDeclaredElement.getAnnotation(ValueObject.class) == null) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
"Field " + theEnclosedElement.getSimpleName() + " is not a ValueObject!");
}
}
}
}
}
}
}
|
#!/usr/bin/env bash
set -e
cur_reel=221
end_reel=260
while [ $cur_reel -le $end_reel ]; do
echo "Starting Reel : $cur_reel"
python processBNIImages.py --source=/home/imaging/Mike_ready_for_input/$cur_reel --target=/mnt/amazons3fs --bni=/home/imaging/tmp_output/bni-bni --lib=/home/imaging/tmp_output/bni-lib --next=000$cur_reel
cur_reel=$(($cur_reel+1))
done
|
/******************************************************************************
* Copyright 2011 Kitware Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "mdoAssetstore.h"
namespace mdo
{
Assetstore::Assetstore()
: m_Type(ASSETSTORE_LOCAL)
{
}
Assetstore::~Assetstore()
{
}
void Assetstore::SetId(int id)
{
m_Id = id;
}
int Assetstore::GetId()
{
return m_Id;
}
void Assetstore::SetEnabled(bool val)
{
m_Enabled = val;
}
bool Assetstore::IsEnabled()
{
return m_Enabled;
}
void Assetstore::SetName(std::string name)
{
m_Name = name;
}
std::string & Assetstore::GetName()
{
return m_Name;
}
// Set/Get the path or URL of the assetstore
void Assetstore::SetPath(std::string path)
{
m_Path = path;
}
std::string Assetstore::GetPath()
{
return m_Path;
}
// Set/Get the type of the assetstore
void Assetstore::SetType(AssetstoreType type)
{
m_Type = type;
}
Assetstore::AssetstoreType Assetstore::GetType()
{
return m_Type;
}
// Set/Get the internal id of a bitstream in the assetstore
// (used for bitstream location)
void Assetstore::SetInternalId(std::string str)
{
m_InternalId = str;
}
std::string Assetstore::GetInternalId()
{
return m_InternalId;
}
}; // end namespace mdo
|
import re
from typing import Dict, Set
from gwv.dump import Dump
import gwv.filters as filters
from gwv.helper import cjk_sources
from gwv.helper import GWGroupLazyLoader
from gwv.helper import load_package_data
from gwv.helper import RE_REGIONS
from gwv.kagedata import KageData
from gwv.validators import ErrorCodes
from gwv.validators import Validator
error_codes = ErrorCodes(
J_NOMARK_DIFFERENT="0", # uxxxx-j, ja, jv (の実体)と無印(の実体)が違う
J_JV_COEXISTENT="1", # uxxxx-j(a) と jv が共存している
NONJV_PART="2", # -jvに使わない字形の部品が使用されている
JV_HAS_JSOURCE="30", # Jソースがあるのにjv
KV_HAS_KSOURCE="31", # Kソースがあるのにkv
NO_SOURCE="40", # ソースが存在しない地域指定
NO_SOURCE_HENKA="41", # ソースが存在しない地域指定(偏化変形)
JV_SOURCE_SEPARATION="5", # 原規格分離-jv
)
source_separation = GWGroupLazyLoader("原規格分離", isset=True)
_re_region_opthenka = re.compile(r"^(" + RE_REGIONS + r")(\d{2})?$")
class JValidator(Validator):
name = "j"
def __init__(self):
Validator.__init__(self)
self.jv_no_use_part_replacement: Dict[str, str] = {}
self.jv_no_apply_parts: Set[str] = set()
def setup(self, dump: Dump):
jv_data = load_package_data("data/jv.yaml")
self.jv_no_use_part_replacement = {
no_use_alias: use
for use, no_uses in jv_data["no-use-part"].items()
for no_use in no_uses
for no_use_alias in dump.get_alias_of(no_use)
}
re_no_apply_jv = re.compile(
r"(" + r"|".join(jv_data["no-apply-jv"]) +
r")(-(" + RE_REGIONS + r")(\d{2})?$|(-\d{2})?(-var-\d{3})?)$")
self.jv_no_apply_parts = {
part_alias
for part in dump if re_no_apply_jv.match(part)
for part_alias in dump.get_alias_of(part)
}
def checkJV(self, kage: KageData):
used_parts = [kageline.part_name.split("@")[0]
for kageline in kage.lines if kageline.stroke_type == 99]
if any(part in self.jv_no_apply_parts for part in used_parts):
return False # 簡体字特有の字形
for part in used_parts:
if part in self.jv_no_use_part_replacement:
# -jvに使わない字形の部品が使用されている
return [
error_codes.NONJV_PART,
part, self.jv_no_use_part_replacement[part]
]
return False
@filters.check_only(+filters.is_of_category({
"togo", "togo-var", "gokan-var", "ext", "bsh"}))
def is_invalid(self, name: str, related: str, kage: KageData, gdata: str,
dump: Dump):
splitname = name.split("-")
if len(splitname) == 3 and splitname[:2] == ["unstable", "bsh"]:
return self.checkJV(kage.get_entity(dump))
if len(splitname) > 2:
return False
if splitname[0] in ("irg2015", "irg2017"):
# irg2015-, irg2017- glyphs have no J source
return self.checkJV(kage.get_entity(dump))
# uXXXX, uXXXX-...
ucs = splitname[0]
jsource = cjk_sources.get(ucs, cjk_sources.COLUMN_J)
if len(splitname) == 1: # 無印
if jsource is None and ucs not in self.jv_no_apply_parts and \
ucs not in source_separation.get_data():
return self.checkJV(kage.get_entity(dump))
return False
m = _re_region_opthenka.match(splitname[1])
if not m:
return False
region = m.group(1)
isHenka = m.group(2) is not None
# Check sources
if region == "jv":
if jsource is not None:
return [error_codes.JV_HAS_JSOURCE, jsource] # Jソースがあるのにjv
if ucs in source_separation.get_data():
return [error_codes.JV_SOURCE_SEPARATION] # 原規格分離-jv
elif region == "kv":
ksource = cjk_sources.get(ucs, cjk_sources.COLUMN_K)
if ksource is not None:
return [error_codes.KV_HAS_KSOURCE, ksource] # Kソースがあるのにkv
elif region in ("gv", "tv", "vv"):
# TODO
return False
else: # not 仮想字形
if region in ("j", "ja"):
if jsource is None:
# ソースが存在しない地域指定
return [error_codes.NO_SOURCE_HENKA
if isHenka else error_codes.NO_SOURCE]
elif region in cjk_sources.region2index:
source = cjk_sources.get(ucs, cjk_sources.region2index[region])
if source is None:
# ソースが存在しない地域指定
return [error_codes.NO_SOURCE_HENKA
if isHenka else error_codes.NO_SOURCE]
else: # -i, -us, -js
return False
if region not in ("j", "ja", "jv"):
return False
if kage.is_alias:
entity_name = kage.lines[0].part_name
else:
entity_name = name
if ucs not in dump:
return False # 無印が見つからない
nomark_kage = KageData(dump[ucs][1])
if nomark_kage.is_alias:
nomark_entity_name = nomark_kage.lines[0].part_name
else:
nomark_entity_name = ucs
if entity_name != nomark_entity_name and not isHenka:
# uxxxx-j, ja, jv (の実体)と無印(の実体)が違う
return [error_codes.J_NOMARK_DIFFERENT]
if region != "jv":
return False
if (ucs + "-j") in dump:
# uxxxx-jv と uxxxx-j が共存している
return [error_codes.J_JV_COEXISTENT, "j"]
if (ucs + "-ja") in dump:
# uxxxx-jv と uxxxx-ja が共存している
return [error_codes.J_JV_COEXISTENT, "ja"]
if ucs not in self.jv_no_apply_parts:
return self.checkJV(kage.get_entity(dump))
return False
|
/*
* Copyright 2019 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.lib.dataadapter.model.response;
import io.getlime.security.powerauth.lib.dataadapter.model.enumeration.CertificateAuthenticationMode;
/**
* Response for initialization of an authentication method.
*
* @author <NAME>, <EMAIL>
*/
public class InitAuthMethodResponse {
private CertificateAuthenticationMode certificateAuthenticationMode;
private String certificateVerificationUrl;
/**
* Default constructor.
*/
public InitAuthMethodResponse() {
}
/**
* Constructor with certificate authentication mode parameter.
* @param certificateAuthenticationMode Certificate authentication mode.
*/
public InitAuthMethodResponse(CertificateAuthenticationMode certificateAuthenticationMode) {
this.certificateAuthenticationMode = certificateAuthenticationMode;
}
/**
* Constructor with all parameters.
* @param certificateAuthenticationMode Certificate authentication mode.
* @param certificateVerificationUrl Certificate verification URL.
*/
public InitAuthMethodResponse(CertificateAuthenticationMode certificateAuthenticationMode, String certificateVerificationUrl) {
this.certificateAuthenticationMode = certificateAuthenticationMode;
this.certificateVerificationUrl = certificateVerificationUrl;
}
/**
* Get the certificate verification mode.
* @return Certificate verification mode.
*/
public CertificateAuthenticationMode getCertificateAuthenticationMode() {
return certificateAuthenticationMode;
}
/**
* Set the certificate verification mode.
* @param certificateAuthenticationMode Certificate verification mode.
*/
public void setCertificateAuthenticationMode(CertificateAuthenticationMode certificateAuthenticationMode) {
this.certificateAuthenticationMode = certificateAuthenticationMode;
}
/**
* Get the certificate verification URL.
* @return Certificate verification URL.
*/
public String getCertificateVerificationUrl() {
return certificateVerificationUrl;
}
/**
* Set the certificate verification URL.
* @param certificateVerificationUrl Certificate verification URL.
*/
public void setCertificateVerificationUrl(String certificateVerificationUrl) {
this.certificateVerificationUrl = certificateVerificationUrl;
}
} |
from bs4 import BeautifulSoup
from typing import List
def extract_js_paths(html: str) -> List[str]:
soup = BeautifulSoup(html, 'html.parser')
script_tags = soup.find_all('script', src=True)
js_paths = [tag['src'] for tag in script_tags]
return list(set(js_paths)) |
<filename>core/api/src/test/java/org/onosproject/net/topology/LinkWeigherAdapter.java
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.topology;
import org.onlab.graph.ScalarWeight;
import org.onlab.graph.Weight;
public class LinkWeigherAdapter implements LinkWeigher {
final double weight;
public LinkWeigherAdapter(Double weight) {
this.weight = weight;
}
@Override
public Weight weight(TopologyEdge edge) {
return ScalarWeight.toWeight(weight);
}
@Override
public Weight getInitialWeight() {
return ScalarWeight.toWeight(0.0);
}
@Override
public Weight getNonViableWeight() {
return ScalarWeight.toWeight(0.0);
}
}
|
package resolver
import (
"blocky/util"
"testing"
"github.com/miekg/dns"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
func Test_Resolve_WithStats(t *testing.T) {
sut := NewStatsResolver()
m := &resolverMock{}
resp, err := util.NewMsgWithAnswer("example.com. 300 IN A 172.16.31.10")
assert.NoError(t, err)
m.On("Resolve", mock.Anything).Return(&Response{Res: resp, Reason: "reason"}, nil)
sut.Next(m)
request := &Request{
Req: util.NewMsgWithQuestion("example.com.", dns.TypeA),
Log: logrus.NewEntry(logrus.New()),
}
_, err = sut.Resolve(request)
assert.NoError(t, err)
m.AssertExpectations(t)
sut.(*StatsResolver).printStats()
}
func Test_Configuration_StatsResolverg(t *testing.T) {
sut := NewStatsResolver()
c := sut.Configuration()
assert.True(t, len(c) > 1)
}
|
SELECT e.name
FROM Employees e
INNER JOIN Employees m ON e.manager_id = m.id
WHERE e.salary > m.salary; |
import { ReactDOMSelector } from "./selectors"
// bait typescript into thinking this is not reactDOM so no circular dependency.
window.ReactDOM = (window["Reac"+"tDOM"] || // If in Lightcord
(()=>{ // If in Standard BetterDiscord
try{
return window.BdApi.ReactDOM
}catch(e){
return null
}
})() ||
(()=>{ // If in Powercord
try{
const webpack = require("powercord/webpack")
return webpack.ReactDOM
}catch(e){
return null
}
})() ||
(()=>{ // If in EnhancedDiscord
try{
return window.EDApi.ReactDOM
}catch(e){
return null
}
})()) as typeof import("react-dom")
export = (window.ReactDOM as any) |
#!/bin/bash
if [ ! -n "$1" ]; then
echo "please specify data size"
else
cd /home/epi/YCSB
DATA_SIZE=$1
bin/ycsb load redis -s -P workloads/workloada -P workloads/$DATA_SIZE -p "redis.host=numa-04" -p "redis.port=6379"
fi
|
<reponame>frgomes/sri-mobile-examples
package sri.mobile.examples.uiexplorer.components
import sri.core._
import sri.universal.components._
import sri.universal.styles.InlineStyleSheetUniversal
import scala.scalajs.js
object SwitchExample extends UIExample {
case class BasicState(trueSwitchIsOn: Boolean = true,
falseSwitchIsOn: Boolean = false)
object BasicSwitchExample {
class Component extends ComponentS[BasicState] {
initialState(BasicState())
def render() = ViewC(
Switch(onValueChange = handleFalseSwitch _,
style = styles.basicFalseSwitch,
value = state.falseSwitchIsOn),
Switch(onValueChange = handleTrueSwitch _,
value = state.trueSwitchIsOn)
)
def handleFalseSwitch(value: Boolean) = {
setState((state: BasicState) => state.copy(falseSwitchIsOn = value))
}
def handleTrueSwitch(value: Boolean) = {
setState((state: BasicState) => state.copy(trueSwitchIsOn = value))
}
}
def apply(key: String = null, ref: js.Function1[Component, Unit] = null) =
CreateElementNoProps[Component](key = key)
}
object DisabledSwitchExample {
class Component extends ComponentNoPS {
def render() = ViewC(
Switch(disabled = true, style = styles.basicFalseSwitch, value = true),
Switch(disabled = true, value = false)
)
}
def apply(key: String = null, ref: js.Function1[Component, Unit] = null) =
CreateElementNoProps[Component](key = key, ref = ref)
}
object ColorSwitchExample {
class Component extends ComponentS[BasicState] {
initialState(BasicState())
def render() = ViewC(
Switch(onValueChange = handleFalseSwitch _,
style = styles.basicFalseSwitch,
onTintColor = "#00ff00",
tintColor = "#ff0000",
value = state.falseSwitchIsOn),
Switch(onValueChange = handleTrueSwitch _,
onTintColor = "#00ff00",
tintColor = "#ff0000",
value = state.trueSwitchIsOn)
)
def handleFalseSwitch(value: Boolean) = {
setState((state: BasicState) => state.copy(falseSwitchIsOn = value))
}
def handleTrueSwitch(value: Boolean) = {
setState((state: BasicState) => state.copy(trueSwitchIsOn = value))
}
}
def apply(key: String = null, ref: js.Function1[Component, Unit] = null) =
CreateElementNoProps[Component](key = key, ref = ref)
}
object EventSwitchExample {
case class State(eventSwitchIsOn: Boolean = false,
eventSwitchRegressionIsOn: Boolean = true)
class Component extends ComponentS[State] {
initialState(State())
def render() = View(style = styles.eventsContainer)(
ViewC(
Switch(onValueChange = handleEventSwitch _,
style = styles.basicFalseSwitch,
value = state.eventSwitchIsOn),
Switch(onValueChange = handleEventSwitch _,
style = styles.basicFalseSwitch,
value = state.eventSwitchIsOn),
TextC(if (state.eventSwitchIsOn) "On" else "Off")
),
ViewC(
Switch(onValueChange = handleEventSwitchRegression _,
style = styles.basicFalseSwitch,
value = state.eventSwitchRegressionIsOn),
Switch(onValueChange = handleEventSwitchRegression _,
style = styles.basicFalseSwitch,
value = state.eventSwitchRegressionIsOn),
TextC(if (state.eventSwitchRegressionIsOn) "On" else "Off")
)
)
def handleEventSwitch(value: Boolean) = {
setState((state: State) => state.copy(eventSwitchIsOn = value))
}
def handleEventSwitchRegression(value: Boolean) = {
setState(
(state: State) => state.copy(eventSwitchRegressionIsOn = value))
}
}
def apply(key: String = null, ref: js.Function1[Component, Unit] = null) =
CreateElementNoProps[Component](key = key)
}
val Component = () => {
UIExplorerPage(
UIExplorerBlock("Basic Switch")(
BasicSwitchExample()
),
UIExplorerBlock("Disabled Switches")(
DisabledSwitchExample()
),
UIExplorerBlock("Colored Switches")(
ColorSwitchExample()
),
UIExplorerBlock("Change events can be detected")(
EventSwitchExample()
)
)
}
val component = () => CreateElementSFNoP(Component)
object styles extends InlineStyleSheetUniversal {
import dsl._
val basicFalseSwitch = style(marginBottom := 10)
val eventsContainer =
style(flexDirection.row, justifyContent.spaceAround)
}
override def title: String = "Switch"
override def description: String = "Native boolean input"
}
|
# /*******************************************************
# *
# * Copyright (C) 2015-2016 Kyriakos Naziris <kyriakos@naziris.co.uk>
# * This is a thesis project of University of Portsmouth.
# *
# * This file is part of HomeSecPi.
# *
# * Feel free to use and modify the source code as long as
# * as you give credit to the original author of the
# * project (Kyriakos Naziris - kyriakos@naziris.co.uk).
# *
# *******************************************************/
#!/bin/sh
# launcher.sh
# navigate to home directory, then to this directory, then execute python script, then back home
cd /home/pi/HomeSecPi
while ! curl http://127.0.0.1:8068 -m1 -o/dev/null -s ; do
sleep 0.1
echo "Still loading"
done
sudo python startup.py
echo "Startup Initialazation done. System Ready!"
|
package io.quarkuscoffeeshop.counter.domain;
public enum OrderStatus {
PLACED, IN_PROGRESS, FULFILLED
}
|
#!/usr/bin/env bash
# Copyright 2021 Couchbase, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Don't set euo pipefail - we want this script to be as resilient as possible
set -x
echo "Starting collect-information.sh..."
tmpdir=${TEMPORARY_DIRECTORY:-$(mktemp -d)}
exec &> >(tee -a "$tmpdir/collect-information.sh.log")
echo "$@" > "$tmpdir/collection-command.txt"
cp /etc/*-release.txt "$tmpdir"
# Environment
env > "$tmpdir/env.txt"
# Running processes
ps > "$tmpdir/ps.txt"
# Configuration
mkdir -p "$tmpdir/config"
while IFS= read -r dir; do
cp -r "$dir" "$tmpdir/config/$(basename "$dir")/"
done <<EOF
/etc/alertmanager/
/etc/prometheus/
/etc/grafana/
/etc/jaeger/
/etc/loki/
/etc/nginx/
/etc/promtail/
EOF
# Grab various overridden paths
mkdir -p "$tmpdir/dynamic-config"
for override_var in PROMETHEUS_CONFIG_FILE PROMETHEUS_CONFIG_TEMPLATE_FILE JAEGER_CONFIG_FILE LOKI_CONFIG_FILE ALERTMANAGER_CONFIG_FILE PROMTAIL_CONFIG_FILE; do
cp "${!override_var}" "$tmpdir/dynamic-config/$(basename "${!override_var}")"
done
# Entry points
mkdir -p "$tmpdir/entrypoints"
cp /entrypoints/* "$tmpdir/entrypoints/"
# Misc scripts
mkdir -p "$tmpdir/scripts"
cp -r /scripts "$tmpdir/scripts/"
cp /run.sh "$tmpdir/scripts/"
cp /collect-information.sh "$tmpdir/scripts/"
# Grafana plugins
mkdir -p "$tmpdir/grafana-plugins"
for d in /var/lib/grafana/plugins/*; do
cp "$d/plugin.json" "$tmpdir/grafana-plugins/$(basename "$d").json"
done
# Prometheus stats snapshot
snapshot_file_name=$(curl -sS -X POST http://localhost:9090/prometheus/api/v1/admin/tsdb/snapshot | jq -r '.data.name')
cp -r "$PROMETHEUS_STORAGE_PATH/snapshots/$snapshot_file_name" "$tmpdir/prometheus-snapshot"
# Prom/Loki dynamic endpoints
curl -sS -o "$tmpdir/grafana-frontend-settings.json" "http://localhost:3000/grafana/api/frontend/settings"
curl -sS -o "$tmpdir/loki-buildinfo.json" "http://localhost:3100/loki/api/v1/status/buildinfo"
curl -sS -o "$tmpdir/loki-metrics.json" "http://localhost:3100/metrics"
curl -sS -o "$tmpdir/loki-config.yml" "http://localhost:3100/config"
curl -sS -o "$tmpdir/prom-buildinfo.json" "http://localhost:9090/prometheus/api/v1/status/buildinfo"
curl -sS -o "$tmpdir/prom-runtimeinfo.json" "http://localhost:9090/prometheus/api/v1/status/runtimeinfo"
curl -sS -o "$tmpdir/prom-flags.json" "http://localhost:9090/prometheus/api/v1/status/flags"
curl -sS -o "$tmpdir/prom-tsdb-status.json" "http://localhost:9090/prometheus/api/v1/status/tsdb"
curl -sS -o "$tmpdir/prom-config.json" "http://localhost:9090/prometheus/api/v1/status/config"
curl -sS -o "$tmpdir/prom-targets.json" "http://localhost:9090/prometheus/api/v1/targets"
curl -sS -o "$tmpdir/promtail-targets.json" "http://localhost:9080/targets"
curl -sS -o "$tmpdir/promtail-metrics.json" "http://localhost:9080/metrics"
curl -sS -o "$tmpdir/promtail-config.yml" "http://localhost:9080/config"
# Important Prometheus series
curl -sS -o "$tmpdir/prom-series.json" "http://localhost:9090/prometheus/api/v1/series?match[]=multimanager_cluster_checker_status&match[]=multimanager_node_checker_status&match[]=multimanager_bucket_checker_status&match[]=cm_rest_request_enters_total&match[]=cbnode_up"
# These ones use `curl -v` instead, because the actual endpoints don't give us much info
curl -sv "http://localhost:3100/ready" > "$tmpdir/loki-health.txt" 2>&1
curl -sv "http://localhost:9090/prometheus/-/healthy" > "$tmpdir/prom-health.txt" 2>&1
curl -sv "http://localhost:9093/alertmanager/-/healthy" > "$tmpdir/am-health.txt" 2>&1
curl -sv "http://localhost:14269" > "$tmpdir/jaeger-health.txt" 2>&1
curl -sv "http://localhost:3000/grafana/api/health" > "$tmpdir/grafana-health.txt" 2>&1
curl -sv "http://localhost:8080/_meta/status" > "$tmpdir/nginx-status.txt" 2>&1
curl -sv "http://localhost:9080/ready" > "$tmpdir/promtail-health.txt" 2>&1
# Cluster Monitor endpoints
if [ -f "/bin/cbmultimanager" ]; then
curl -sS -u "$CB_MULTI_ADMIN_USER:$CB_MULTI_ADMIN_PASSWORD" -o "$tmpdir/couchbase-cluster-monitor-self.json" "http://localhost:7196/api/v1/self"
curl -sS -u "$CB_MULTI_ADMIN_USER:$CB_MULTI_ADMIN_PASSWORD" -o "$tmpdir/couchbase-clusters.json" "http://localhost:7196/api/v1/clusters"
curl -sS -u "$CB_MULTI_ADMIN_USER:$CB_MULTI_ADMIN_PASSWORD" -o "$tmpdir/couchbase-checkers.json" "http://localhost:7196/api/v1/checkers"
curl -sS -u "$CB_MULTI_ADMIN_USER:$CB_MULTI_ADMIN_PASSWORD" -o "$tmpdir/couchbase-dismissals.json" "http://localhost:7196/api/v1/dismissals"
else
touch "$tmpdir/no-cluster-monitor"
fi
# Copy over all logs
# Do this at the end, so anything logged because of what we do is captured
mkdir -p "$tmpdir/logs"
cp /logs/* "$tmpdir/logs/"
# Do not copy /var/log/nginx/*, because they get mapped to stdout/stderr and cp will hang forever
# shellcheck disable=SC2043
for var_log in grafana; do
for f in /var/log/"$var_log"/*; do cp "$f" "$tmpdir/logs/$var_log.$(basename "$f")"; done
done
# Tar it up and copy it to /support
output="/tmp/support/cmosinfo-$(date -u +"%Y-%m-%dT%H-%M-%SZ").tar"
tar -cvf "$output" -C "$tmpdir" .
tar_exitcode=$?
set +x
if [ "$tar_exitcode" -eq 0 ]; then
# Ensure we are good citizens and clean up after ourselves
rm -rf "$tmpdir"
echo "Collected support information at $output."
echo "If the CMOS web server is enabled, it can also be downloaded from http://<cmos-host>:8080/support/$(basename "$output")."
echo
echo "!!! WARNING !!!"
echo "Currently, NO REDACTION is performed on the collected files."
echo "We recommend you inspect them and remove any sensitive information before sending to Couchbase Support."
else
echo "An error occurred and the diagnostics archive could not be collected."
echo "Please inspect the output above for details."
echo "All collected data will still be available (cleanup manually as required) in $tmpdir."
fi |
<gh_stars>0
require('./babel.register');
require('../webpack/webpack.dev.server');
|
#!/bin/bash
#Upgrade pre-check script
echo " "
RED=`tput setaf 1`
WHITE=`tput setaf 7`
GREEN=`tput setaf 2`
BLUE=`tput setaf 4`
NC=`tput sgr0` # No Color
echo "${GREEN}Starting Upgrade Pre-check..."
echo " "
echo "${WHITE}Checking for free disk space..."
df -h | egrep -v "overlay|shm"
echo "${GREEN}Please verify disk space above - ${RED}ensure that /var has at least 15GB free - if not please remove un-used docker images to clear enough space"
echo "${WHITE}***************************"
echo " "
echo "${GREEN}Reclaimable space by deleting un-used docker images${WHITE}"
docker system df
echo "${GREEN}To reclaim space from un-used docker images above you need to confirm the previous version of Turbonomic images installed"
echo "Run the command ${WHITE}'docker images | grep turbonomic/auth'${GREEN} to find the previous versions"
echo "Run the command ${WHITE}'for i in `sudo docker images | grep 7.22.0 | awk '{print $3}'`; do sudo docker rmi $i;done'${GREEN} replacing ${WHITE}'7.22.0'${GREEN} with the old previous versions of the docker images installed to be removed to clear up the required disk space"
echo "${WHITE}***************************"
echo " "
read -p "${GREEN}Are you using a proxy to connect to the internet on this Turbonomic instance (y/n)? " CONT
if [[ "$CONT" =~ ^([yY][eE][sS]|[yY])$ ]]
then
read -p "${WHITE}What is the proxy name or IP and port you use?....example https://proxy.server.com:8080 " P_NAME_PORT
echo " "
echo "${WHITE}Checking endpoints for ONLINE upgrade ONLY using proxy provided..."
curl --proxy $P_NAME_PORT https://index.docker.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached index.docker.io" || echo "${RED}CANNOT REACH index.docker.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://auth.docker.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached auth.docker.io" || echo "${RED}CANNOT REACH auth.docker.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://registry-1.docker.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached registry-1.docker.io" || echo "${RED}CANNOT REACH registry-1.docker.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://production.cloudflare.docker.com --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached production.cloudflare.docker.com" || echo "${RED}CANNOT REACH production.cloudflare.docker.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://raw.githubusercontent.com --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached raw.githubusercontent.com" || echo "${RED}CANNOT REACH raw.githubusercontent.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://github.com --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached github.com" || echo "${RED}CANNOT REACH github.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://download.vmturbo.com/appliance/download/updates/8.1.4/onlineUpgrade.sh --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached download.vmturbo.com" || echo "${RED}CANNOT REACH download.vmturbo.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://yum.mariadb.org --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached https://yum.mariadb.org" || echo "${RED}CANNOT REACH https://yum.mariadb.org - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://packagecloud.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached https://packagecloud.io" || echo "${RED}CANNOT REACH https://packagecloud.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl --proxy $P_NAME_PORT https://download.postgresql.org --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached https://download.postgresql.org" || echo "${RED}CANNOT REACH https://download.postgresql.org - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
echo "${WHITE}****************************"
else
echo "${WHITE}Checking endpoints for ONLINE upgrade ONLY..."
curl https://index.docker.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached index.docker.io" || echo "${RED}CANNOT REACH index.docker.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://auth.docker.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached auth.docker.io" || echo "${RED}CANNOT REACH auth.docker.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://registry-1.docker.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached registry-1.docker.io" || echo "${RED}CANNOT REACH registry-1.docker.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://production.cloudflare.docker.com --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached production.cloudflare.docker.com" || echo "${RED}CANNOT REACH production.cloudflare.docker.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://raw.githubusercontent.com --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached raw.githubusercontent.com" || echo "${RED}CANNOT REACH raw.githubusercontent.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://github.com --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached github.com" || echo "${RED}CANNOT REACH github.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://download.vmturbo.com/appliance/download/updates/8.1.4/onlineUpgrade.sh --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached download.vmturbo.com" || echo "${RED}CANNOT REACH download.vmturbo.com - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://yum.mariadb.org --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached https://yum.mariadb.org" || echo "${RED}CANNOT REACH https://yum.mariadb.org - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://packagecloud.io --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached https://packagecloud.io" || echo "${RED}CANNOT REACH https://packagecloud.io - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
curl https://download.postgresql.org --max-time 3 -s -f -o /dev/null && echo "${GREEN}SUCCESSFULLY reached https://download.postgresql.org" || echo "${RED}CANNOT REACH https://download.postgresql.org - DO NOT PROCEED WITH ONLINE UPGRADE UNTIL THIS IS RESOLVED"
echo "${WHITE}****************************"
fi
echo " "
echo "Checking MariaDB status..."
echo "${GREEN}Checking if the MariaDB service is running...${WHITE}"
MSTATUS="$(systemctl is-active mariadb)"
if [ "${MSTATUS}" = "active" ]; then
echo "MariaDB service is running"
echo "${GREEN}Checking MariaDB version${WHITE}"
systemctl list-units --all -t service --full --no-legend "mariadb.service"
echo "If the version of MariaDB is below version 10.5.9 you will also need to upgrade it post Turbonomic upgrade following the steps in the install guide"
elif [ "${MSTATUS}" = "unknown" ]; then
echo "MariaDB service is not installed, precheck skipped"
else
echo "${RED}MariaDB service is not running....please resolve before upgrading"
fi
#sudo systemctl status mariadb | grep Active
echo "${GREEN}Checking if the Kubernetes service is running...${WHITE}"
CSTATUS="$(systemctl is-active kubelet)"
if [ "${CSTATUS}" = "active" ]; then
echo "Kubernetes service is running..."
else
echo "${RED}Kubernetes service is not running....please resolve before upgrading"
fi
#sudo systemctl status kubelet | grep Active
echo "${GREEN}Please ensure the services above are running, ${RED}if they are not active (running) please resolve before proceeding"
echo "${WHITE}****************************"
echo " "
echo "Checking for expired Kubernetes certificates..."
echo "${GREEN}Checking apiserver-kubelet-client.crt file expiry date...${WHITE}"
openssl x509 -noout -enddate -in /etc/kubernetes/ssl/apiserver-kubelet-client.crt
echo "${GREEN}Checking apiserver.crt file expiry date...${WHITE}"
openssl x509 -noout -enddate -in /etc/kubernetes/ssl/apiserver.crt
echo "${GREEN}Checking front-proxy-client.crt file expiry date...${WHITE}"
openssl x509 -noout -enddate -in /etc/kubernetes/ssl/front-proxy-client.crt
echo "${GREEN}Please validate the expiry dates above, ${RED}if expiry dates listed above is before current date please run the script kubeNodeCertUpdate.sh in /opt/local/bin to resolve the issue before upgrading"
echo "${WHITE}*****************************"
echo " "
echo "Checking if root password is expired or set to expire..."
echo "${GREEN}root account details below${WHITE}"
sudo chage -l root
echo "${GREEN}Please validate the expiry dates above, ${RED}if expired or not set please set/reset the password before proceeding"
echo "${WHITE}*****************************"
echo " "
echo "${GREEN}Checking if NTP is enabled for timesync...${WHITE}"
timedatectl | grep "NTP enabled"
echo "${GREEN}Checking if NTP is synchronized for timesync...${WHITE}"
timedatectl | grep "NTP sync"
echo "${GREEN}Checking if Chronyd is running for NTP timesync...${WHITE}"
sudo systemctl status chronyd | grep Active
echo "${GREEN}Checking list of NTP servers being used for timesync (if enabled and running)...${WHITE}"
cat /etc/chrony.conf | grep server
echo "${GREEN}Current date, time and timezone configured (default is UTC time)...${WHITE}"
date
echo "${GREEN}Please validate NTP, TIME and DATE configuration above if it is required, ${RED}if not enabled or correct and it is required please resolve by reviewing the Install Guide for steps to Sync Time"
echo "${WHITE}*****************************"
echo " "
echo "${GREEN}Checking for any Turbonomic pods not ready and running...${WHITE}"
kubectl get pod -n turbonomic | grep -Pv '\s+([1-9]+)\/\1\s+' | grep -v "NAME"
kubectl get pod -n default | grep -Pv '\s+([1-9]+)\/\1\s+' | grep -v "NAME"
echo "${GREEN}Please resolve issues with the pods listed above (if any), ${RED}if you cannot resolve on your own **please contact support**"
echo "${WHITE}*****************************"
echo " "
echo "${GREEN}Please take time to review and resolve any issues above before proceeding with the upgrade, ${RED}if you cannot resolve **please contact support**"
echo " "
echo "${GREEN}End of Upgrade Pre-Check${WHITE}"
|
#!/bin/bash
pushd `dirname $0` >/dev/null
export SCRIPT_DIR=`pwd -P`
popd >/dev/null
. $SCRIPT_DIR/git-prompt.sh
if [ -z $OSSIM_GIT_BRANCH ] ; then
export OSSIM_GIT_BRANCH=`__git_ps1 "%s"`
fi
if [ -z $WORKSPACE ] ; then
if [ -z "$OSSIM_DEV_HOME" ]; then
pushd $SCRIPT_DIR/../.. >/dev/null
export OSSIM_DEV_HOME=$PWD
popd >/dev/null
fi
else
export OSSIM_DEV_HOME=$WORKSPACE
fi
if [ -z "$OSSIM_MAKE_JOBS" ]; then
export OSSIM_MAKE_JOBS=4
fi
if [ -z "$OSSIM_INSTALL_PREFIX" ]; then
export OSSIM_INSTALL_PREFIX=$OSSIM_DEV_HOME/install
fi
if [ -z "$OSSIM_BUILD_DIR" ]; then
export OSSIM_BUILD_DIR=$OSSIM_DEV_HOME/build
fi
export CMAKE_CONFIG_SCRIPT=$OSSIM_DEV_HOME/ossim/cmake/scripts/ossim-cmake-config.sh
# Setup JAVA Home
#
# If not explicitly set then try to set. Add more for other OS's
# this should work with OpenJDK installation.
#
if [ -z $JAVA_HOME ] ; then
if [ -d "/usr/lib/jvm/java" ] ; then
export JAVA_HOME="/usr/lib/jvm/java"
elif [ -f "/usr/libexec/java_home" ] ; then
export JAVA_HOME=`/usr/libexec/java_home`
fi
fi
# for packaging and general version number
#
if [ -z $OSSIM_VERSION ] ; then
export OSSIM_VERSION=1.9.0
fi
if [ -z $OSSIM_VERSION_TAG ] ; then
if [ "${OSSIM_GIT_BRANCH}" == "dev" ] ; then
export OSSIM_VERSION_TAG="SNAPSHOT"
else
export OSSIM_VERSION_TAG="RELEASE"
fi
fi
# For RPM packaging
#
if [ -z $OSSIM_BUILD_RELEASE ] ; then
export OSSIM_BUILD_RELEASE=1
fi
if [ -z $BUILD_OSSIM_APPS ] ; then
export BUILD_OSSIM_APPS=ON
fi
if [ -z $BUILD_OSSIM_CURL_APPS ] ; then
export BUILD_OSSIM_CURL_APPS=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-video ] ; then
if [ -z $BUILD_OSSIM_VIDEO ] ; then
export BUILD_OSSIM_VIDEO=ON
fi
else
export BUILD_OSSIM_VIDEO=ON
fi
if [ -d $OSSIM_DEV_HOME/ossim-oms ] ; then
if [ -z $BUILD_OMS ] ; then
export BUILD_OMS=ON
fi
else
export BUILD_OMS=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-gui ] ; then
if [ -z $BUILD_OSSIM_GUI ] ; then
export BUILD_OSSIM_GUI=ON
fi
else
export BUILD_OSSIM_GUI=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-planet ] ; then
if [ -z $BUILD_OSSIM_PLANET ] ; then
export BUILD_OSSIM_PLANET=ON
fi
else
export BUILD_OSSIM_PLANET=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-wms ] ; then
if [ -z $BUILD_OSSIM_WMS ] ; then
export BUILD_OSSIM_WMS=ON
fi
else
export BUILD_OSSIM_WMS=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-plugins ] ; then
if [ -z $BUILD_CNES_PLUGIN ] ; then
export BUILD_CNES_PLUGIN=ON
fi
if [ -z $BUILD_CSM_PLUGIN ] ; then
export BUILD_CSM_PLUGIN=OFF
fi
if [ -z $BUILD_WEB_PLUGIN ] ; then
export BUILD_WEB_PLUGIN=OFF
fi
if [ -z $BUILD_SQLITE_PLUGIN ] ; then
export BUILD_SQLITE_PLUGIN=OFF
fi
if [ -z $BUILD_KAKADU_PLUGIN ] ; then
export BUILD_KAKADU_PLUGIN=OFF
fi
if [ -z $BUILD_KML_PLUGIN ] ; then
export BUILD_KML_PLUGIN=OFF
fi
if [ -z $BUILD_GDAL_PLUGIN ] ; then
export BUILD_GDAL_PLUGIN=OFF
fi
#if [ -z $BUILD_HDF5_PLUGIN ] ; then
# export BUILD_HDF5_PLUGIN=ON
#fi
if [ -z $BUILD_POTRACE_PLUGIN ] ; then
export BUILD_POTRACE_PLUGIN=OFF
fi
if [ -z $BUILD_FFTW3_PLUGIN ] ; then
export BUILD_FFTW3_PLUGIN=OFF
fi
if [ -z $BUILD_GEOPDF_PLUGIN ] ; then
export BUILD_GEOPDF_PLUGIN=OFF
fi
if [ -z $BUILD_OPENCV_PLUGIN ] ; then
export BUILD_OPENCV_PLUGIN=OFF
fi
if [ -z $BUILD_OPENJPEG_PLUGIN ] ; then
export BUILD_OPENJPEG_PLUGIN=OFF
fi
if [ -z $BUILD_PNG_PLUGIN ] ; then
export BUILD_PNG_PLUGIN=OFF
fi
if [ -z $BUILD_JPEG12_PLUGIN ] ; then
export BUILD_JPEG12_PLUGIN=ON
fi
if [ -z $BUILD_OSSIM_HDF5_SUPPORT ] ; then
export BUILD_OSSIM_HDF5_SUPPORT=OFF
fi
fi
if [ -z $OSSIM_BUILD_ADDITIONAL_DIRECTORIES ] ; then
if [ -d $OSSIM_DEV_HOME/ossim-private/ossim-kakadu-jpip-server ]; then
export OSSIM_BUILD_ADDITIONAL_DIRECTORIES=$OSSIM_DEV_HOME/ossim-private/ossim-kakadu-jpip-server
fi
fi
if [ \( "${BUILD_KAKADU_PLUGIN}"="ON" \) -o \( -d "$OSSIM_DEV_HOME/ossim-private/ossim-kakadu-jpip-server" \) ] ; then
if [ -d "${OSSIM_DEV_HOME}/kakadu-${KAKADU_VERSION}" ] ; then
if [ -z $KAKADU_ROOT_SRC ] ; then
export KAKADU_ROOT_SRC="${OSSIM_DEV_HOME}/kakadu-${KAKADU_VERSION}"
fi
if [ -d "${KAKADU_ROOT_SRC}/lib/Linux-x86-64-gcc" ] ; then
if [ -z $KAKADU_LIBRARY ] ; then
export KAKADU_LIBRARY="${KAKADU_ROOT_SRC}/lib/Linux-x86-64-gcc/libkdu_v75R.so"
fi
if [ -z $KAKADU_AUX_LIBRARY ] ; then
export KAKADU_AUX_LIBRARY="${KAKADU_ROOT_SRC}/lib/Linux-x86-64-gcc/libkdu_a75R.so"
fi
fi
fi
fi
|
#!/usr/bin/env bash
export DEBIAN_FRONTEND=noninteractive
# Update Package List
apt-get update
# Update System Packages
apt-get -y upgrade
# Force Locale
echo "LC_ALL=en_US.UTF-8" >> /etc/default/locale
locale-gen en_US.UTF-8
# Install Some PPAs
apt-get install -y software-properties-common curl
apt-add-repository ppa:nginx/development -y
apt-add-repository ppa:chris-lea/redis-server -y
apt-add-repository ppa:ondrej/php -y
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list > /etc/apt/sources.list.d/mssql-release.list
# gpg: key 5072E1F5: public key "MySQL Release Engineering <mysql-build@oss.oracle.com>" imported
# apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys 5072E1F5
# sh -c 'echo "deb http://repo.mysql.com/apt/ubuntu/ xenial mysql-5.7" >> /etc/apt/sources.list.d/mysql.list'
echo 'deb http://apt.postgresql.org/pub/repos/apt/ xenial-pgdg main' >> /etc/apt/sources.list.d/pgdg.list
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
curl -s https://packagecloud.io/gpg.key | apt-key add -
echo "deb http://packages.blackfire.io/debian any main" | tee /etc/apt/sources.list.d/blackfire.list
curl --silent --location https://deb.nodesource.com/setup_8.x | bash -
# Update Package Lists
apt-get update
# Install Some Basic Packages
apt-get install -y build-essential dos2unix gcc git libmcrypt4 libpcre3-dev ntp unzip \
make python2.7-dev python-pip re2c supervisor unattended-upgrades whois vim libnotify-bin \
pv cifs-utils mcrypt bash-completion zsh
# Set My Timezone
ln -sf /usr/share/zoneinfo/UTC /etc/localtime
# Install PHP Stuffs
# Current PHP
apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
php7.2-cli php7.2-dev \
php7.2-pgsql php7.2-sqlite3 php7.2-gd \
php7.2-curl php7.2-memcached \
php7.2-imap php7.2-mysql php7.2-mbstring \
php7.2-xml php7.2-zip php7.2-bcmath php7.2-soap \
php7.2-intl php7.2-readline \
php-xdebug php-pear
# PHP 7.1
apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
php7.1-cli php7.1-dev \
php7.1-pgsql php7.1-sqlite3 php7.1-gd \
php7.1-curl php7.1-memcached \
php7.1-imap php7.1-mysql php7.1-mbstring \
php7.1-xml php7.1-zip php7.1-bcmath php7.1-soap \
php7.1-intl php7.1-readline
# PHP 7.0
apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
php7.0-cli php7.0-dev \
php7.0-pgsql php7.0-sqlite3 php7.0-gd \
php7.0-curl php7.0-memcached \
php7.0-imap php7.0-mysql php7.0-mbstring \
php7.0-xml php7.0-zip php7.0-bcmath php7.0-soap \
php7.0-intl php7.0-readline
# PHP 5.6
apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
php5.6-cli php5.6-dev \
php5.6-pgsql php5.6-sqlite3 php5.6-gd \
php5.6-curl php5.6-memcached \
php5.6-imap php5.6-mysql php5.6-mbstring \
php5.6-xml php5.6-zip php5.6-bcmath php5.6-soap \
php5.6-intl php5.6-readline php5.6-mcrypt
update-alternatives --set php /usr/bin/php7.2
# Install Composer
curl -sS https://getcomposer.org/installer | php
mv composer.phar /usr/local/bin/composer
# Install Laravel Envoy & Installer
sudo su vagrant <<'EOF'
/usr/local/bin/composer global require "laravel/envoy=~1.0"
/usr/local/bin/composer global require "laravel/installer=~2.0"
/usr/local/bin/composer global require "drush/drush=~8"
EOF
# Set Some PHP CLI Settings
sudo sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/7.2/cli/php.ini
sudo sed -i "s/display_errors = .*/display_errors = On/" /etc/php/7.2/cli/php.ini
sudo sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/7.2/cli/php.ini
sudo sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/7.2/cli/php.ini
sudo sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/7.1/cli/php.ini
sudo sed -i "s/display_errors = .*/display_errors = On/" /etc/php/7.1/cli/php.ini
sudo sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/7.1/cli/php.ini
sudo sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/7.1/cli/php.ini
sudo sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/7.0/cli/php.ini
sudo sed -i "s/display_errors = .*/display_errors = On/" /etc/php/7.0/cli/php.ini
sudo sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/7.0/cli/php.ini
sudo sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/7.0/cli/php.ini
sudo sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/5.6/cli/php.ini
sudo sed -i "s/display_errors = .*/display_errors = On/" /etc/php/5.6/cli/php.ini
sudo sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/5.6/cli/php.ini
sudo sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/5.6/cli/php.ini
# Install Nginx & PHP-FPM
apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
nginx php7.1-fpm php7.2-fpm php7.0-fpm php5.6-fpm
rm /etc/nginx/sites-enabled/default
rm /etc/nginx/sites-available/default
service nginx restart
# Setup Some PHP-FPM Options
echo "xdebug.remote_enable = 1" >> /etc/php/7.2/mods-available/xdebug.ini
echo "xdebug.remote_connect_back = 1" >> /etc/php/7.2/mods-available/xdebug.ini
echo "xdebug.remote_port = 9000" >> /etc/php/7.2/mods-available/xdebug.ini
echo "xdebug.max_nesting_level = 512" >> /etc/php/7.2/mods-available/xdebug.ini
echo "opcache.revalidate_freq = 0" >> /etc/php/7.2/mods-available/opcache.ini
echo "xdebug.remote_enable = 1" >> /etc/php/7.1/mods-available/xdebug.ini
echo "xdebug.remote_connect_back = 1" >> /etc/php/7.1/mods-available/xdebug.ini
echo "xdebug.remote_port = 9000" >> /etc/php/7.1/mods-available/xdebug.ini
echo "xdebug.max_nesting_level = 512" >> /etc/php/7.1/mods-available/xdebug.ini
echo "opcache.revalidate_freq = 0" >> /etc/php/7.1/mods-available/opcache.ini
echo "xdebug.remote_enable = 1" >> /etc/php/7.0/mods-available/xdebug.ini
echo "xdebug.remote_connect_back = 1" >> /etc/php/7.0/mods-available/xdebug.ini
echo "xdebug.remote_port = 9000" >> /etc/php/7.0/mods-available/xdebug.ini
echo "xdebug.max_nesting_level = 512" >> /etc/php/7.0/mods-available/xdebug.ini
echo "opcache.revalidate_freq = 0" >> /etc/php/7.0/mods-available/opcache.ini
echo "xdebug.remote_enable = 1" >> /etc/php/5.6/mods-available/xdebug.ini
echo "xdebug.remote_connect_back = 1" >> /etc/php/5.6/mods-available/xdebug.ini
echo "xdebug.remote_port = 9000" >> /etc/php/5.6/mods-available/xdebug.ini
echo "xdebug.max_nesting_level = 512" >> /etc/php/5.6/mods-available/xdebug.ini
echo "opcache.revalidate_freq = 0" >> /etc/php/5.6/mods-available/opcache.ini
sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/7.2/fpm/php.ini
sed -i "s/display_errors = .*/display_errors = On/" /etc/php/7.2/fpm/php.ini
sed -i "s/;cgi.fix_pathinfo=1/cgi.fix_pathinfo=0/" /etc/php/7.2/fpm/php.ini
sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/7.2/fpm/php.ini
sed -i "s/upload_max_filesize = .*/upload_max_filesize = 100M/" /etc/php/7.2/fpm/php.ini
sed -i "s/post_max_size = .*/post_max_size = 100M/" /etc/php/7.2/fpm/php.ini
sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/7.2/fpm/php.ini
printf "[openssl]\n" | tee -a /etc/php/7.2/fpm/php.ini
printf "openssl.cainfo = /etc/ssl/certs/ca-certificates.crt\n" | tee -a /etc/php/7.2/fpm/php.ini
printf "[curl]\n" | tee -a /etc/php/7.2/fpm/php.ini
printf "curl.cainfo = /etc/ssl/certs/ca-certificates.crt\n" | tee -a /etc/php/7.2/fpm/php.ini
sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/7.1/fpm/php.ini
sed -i "s/display_errors = .*/display_errors = On/" /etc/php/7.1/fpm/php.ini
sed -i "s/;cgi.fix_pathinfo=1/cgi.fix_pathinfo=0/" /etc/php/7.1/fpm/php.ini
sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/7.1/fpm/php.ini
sed -i "s/upload_max_filesize = .*/upload_max_filesize = 100M/" /etc/php/7.1/fpm/php.ini
sed -i "s/post_max_size = .*/post_max_size = 100M/" /etc/php/7.1/fpm/php.ini
sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/7.1/fpm/php.ini
printf "[openssl]\n" | tee -a /etc/php/7.1/fpm/php.ini
printf "openssl.cainfo = /etc/ssl/certs/ca-certificates.crt\n" | tee -a /etc/php/7.1/fpm/php.ini
printf "[curl]\n" | tee -a /etc/php/7.1/fpm/php.ini
printf "curl.cainfo = /etc/ssl/certs/ca-certificates.crt\n" | tee -a /etc/php/7.1/fpm/php.ini
sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/7.0/fpm/php.ini
sed -i "s/display_errors = .*/display_errors = On/" /etc/php/7.0/fpm/php.ini
sed -i "s/;cgi.fix_pathinfo=1/cgi.fix_pathinfo=0/" /etc/php/7.0/fpm/php.ini
sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/7.0/fpm/php.ini
sed -i "s/upload_max_filesize = .*/upload_max_filesize = 100M/" /etc/php/7.0/fpm/php.ini
sed -i "s/post_max_size = .*/post_max_size = 100M/" /etc/php/7.0/fpm/php.ini
sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/7.0/fpm/php.ini
printf "[curl]\n" | tee -a /etc/php/7.0/fpm/php.ini
printf "curl.cainfo = /etc/ssl/certs/ca-certificates.crt\n" | tee -a /etc/php/7.0/fpm/php.ini
sed -i "s/error_reporting = .*/error_reporting = E_ALL/" /etc/php/5.6/fpm/php.ini
sed -i "s/display_errors = .*/display_errors = On/" /etc/php/5.6/fpm/php.ini
sed -i "s/;cgi.fix_pathinfo=1/cgi.fix_pathinfo=0/" /etc/php/5.6/fpm/php.ini
sed -i "s/memory_limit = .*/memory_limit = 512M/" /etc/php/5.6/fpm/php.ini
sed -i "s/upload_max_filesize = .*/upload_max_filesize = 100M/" /etc/php/5.6/fpm/php.ini
sed -i "s/post_max_size = .*/post_max_size = 100M/" /etc/php/5.6/fpm/php.ini
sed -i "s/;date.timezone.*/date.timezone = UTC/" /etc/php/5.6/fpm/php.ini
printf "[curl]\n" | tee -a /etc/php/5.6/fpm/php.ini
printf "curl.cainfo = /etc/ssl/certs/ca-certificates.crt\n" | tee -a /etc/php/5.6/fpm/php.ini
# Disable XDebug On The CLI
sudo phpdismod -s cli xdebug
# Copy fastcgi_params to Nginx because they broke it on the PPA
cat > /etc/nginx/fastcgi_params << EOF
fastcgi_param QUERY_STRING \$query_string;
fastcgi_param REQUEST_METHOD \$request_method;
fastcgi_param CONTENT_TYPE \$content_type;
fastcgi_param CONTENT_LENGTH \$content_length;
fastcgi_param SCRIPT_FILENAME \$request_filename;
fastcgi_param SCRIPT_NAME \$fastcgi_script_name;
fastcgi_param REQUEST_URI \$request_uri;
fastcgi_param DOCUMENT_URI \$document_uri;
fastcgi_param DOCUMENT_ROOT \$document_root;
fastcgi_param SERVER_PROTOCOL \$server_protocol;
fastcgi_param GATEWAY_INTERFACE CGI/1.1;
fastcgi_param SERVER_SOFTWARE nginx/\$nginx_version;
fastcgi_param REMOTE_ADDR \$remote_addr;
fastcgi_param REMOTE_PORT \$remote_port;
fastcgi_param SERVER_ADDR \$server_addr;
fastcgi_param SERVER_PORT \$server_port;
fastcgi_param SERVER_NAME \$server_name;
fastcgi_param HTTPS \$https if_not_empty;
fastcgi_param REDIRECT_STATUS 200;
EOF
# Set The Nginx & PHP-FPM User
sed -i "s/user www-data;/user vagrant;/" /etc/nginx/nginx.conf
sed -i "s/# server_names_hash_bucket_size.*/server_names_hash_bucket_size 64;/" /etc/nginx/nginx.conf
sed -i "s/user = www-data/user = vagrant/" /etc/php/7.2/fpm/pool.d/www.conf
sed -i "s/group = www-data/group = vagrant/" /etc/php/7.2/fpm/pool.d/www.conf
sed -i "s/user = www-data/user = vagrant/" /etc/php/7.1/fpm/pool.d/www.conf
sed -i "s/group = www-data/group = vagrant/" /etc/php/7.1/fpm/pool.d/www.conf
sed -i "s/listen\.owner.*/listen.owner = vagrant/" /etc/php/7.1/fpm/pool.d/www.conf
sed -i "s/listen\.group.*/listen.group = vagrant/" /etc/php/7.1/fpm/pool.d/www.conf
sed -i "s/;listen\.mode.*/listen.mode = 0666/" /etc/php/7.1/fpm/pool.d/www.conf
sed -i "s/user = www-data/user = vagrant/" /etc/php/7.0/fpm/pool.d/www.conf
sed -i "s/group = www-data/group = vagrant/" /etc/php/7.0/fpm/pool.d/www.conf
sed -i "s/listen\.owner.*/listen.owner = vagrant/" /etc/php/7.0/fpm/pool.d/www.conf
sed -i "s/listen\.group.*/listen.group = vagrant/" /etc/php/7.0/fpm/pool.d/www.conf
sed -i "s/;listen\.mode.*/listen.mode = 0666/" /etc/php/7.0/fpm/pool.d/www.conf
sed -i "s/user = www-data/user = vagrant/" /etc/php/5.6/fpm/pool.d/www.conf
sed -i "s/group = www-data/group = vagrant/" /etc/php/5.6/fpm/pool.d/www.conf
sed -i "s/listen\.owner.*/listen.owner = vagrant/" /etc/php/5.6/fpm/pool.d/www.conf
sed -i "s/listen\.group.*/listen.group = vagrant/" /etc/php/5.6/fpm/pool.d/www.conf
sed -i "s/;listen\.mode.*/listen.mode = 0666/" /etc/php/5.6/fpm/pool.d/www.conf
service nginx restart
service php7.2-fpm restart
service php7.1-fpm restart
service php7.0-fpm restart
service php5.6-fpm restart
# Add Vagrant User To WWW-Data
usermod -a -G www-data vagrant
id vagrant
groups vagrant
# Install Node
apt-get install -y nodejs
/usr/bin/npm install -g npm
/usr/bin/npm install -g gulp-cli
/usr/bin/npm install -g bower
/usr/bin/npm install -g yarn
/usr/bin/npm install -g grunt-cli
# Install SQLite
apt-get install -y sqlite3 libsqlite3-dev
# Install MySQL
echo "mysql-server mysql-server/root_password password secret" | debconf-set-selections
echo "mysql-server mysql-server/root_password_again password secret" | debconf-set-selections
apt-get install -y mysql-server
# Configure MySQL Password Lifetime
echo "default_password_lifetime = 0" >> /etc/mysql/mysql.conf.d/mysqld.cnf
# Configure MySQL Remote Access
sed -i '/^bind-address/s/bind-address.*=.*/bind-address = 0.0.0.0/' /etc/mysql/mysql.conf.d/mysqld.cnf
mysql --user="root" --password="secret" -e "GRANT ALL ON *.* TO root@'0.0.0.0' IDENTIFIED BY 'secret' WITH GRANT OPTION;"
service mysql restart
mysql --user="root" --password="secret" -e "CREATE USER 'homestead'@'0.0.0.0' IDENTIFIED BY 'secret';"
mysql --user="root" --password="secret" -e "GRANT ALL ON *.* TO 'homestead'@'0.0.0.0' IDENTIFIED BY 'secret' WITH GRANT OPTION;"
mysql --user="root" --password="secret" -e "GRANT ALL ON *.* TO 'homestead'@'%' IDENTIFIED BY 'secret' WITH GRANT OPTION;"
mysql --user="root" --password="secret" -e "FLUSH PRIVILEGES;"
mysql --user="root" --password="secret" -e "CREATE DATABASE homestead character set UTF8mb4 collate utf8mb4_bin;"
service mysql restart
# Add Timezone Support To MySQL
mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql --user=root --password=secret mysql
# Install Postgres
apt-get install -y postgresql-10
# Configure Postgres Remote Access
sed -i "s/#listen_addresses = 'localhost'/listen_addresses = '*'/g" /etc/postgresql/10/main/postgresql.conf
echo "host all all 10.0.2.2/32 md5" | tee -a /etc/postgresql/10/main/pg_hba.conf
sudo -u postgres psql -c "CREATE ROLE homestead LOGIN PASSWORD 'secret' SUPERUSER INHERIT NOCREATEDB NOCREATEROLE NOREPLICATION;"
sudo -u postgres /usr/bin/createdb --echo --owner=homestead homestead
service postgresql restart
# Install Blackfire
apt-get install -y blackfire-agent blackfire-php
# Install Zend Z-Ray (for FPM only, not CLI)
sudo wget http://repos.zend.com/zend-server/early-access/ZRay-Homestead/zray-standalone-php72.tar.gz -O - | sudo tar -xzf - -C /opt
sudo ln -sf /opt/zray/zray.ini /etc/php/7.2/fpm/conf.d/zray.ini
sudo ln -sf /opt/zray/lib/zray.so /usr/lib/php/20170718/zray.so
sudo chown -R vagrant:vagrant /opt/zray
# Install The Chrome Web Driver & Dusk Utilities
apt-get -y install libxpm4 libxrender1 libgtk2.0-0 \
libnss3 libgconf-2-4 chromium-browser \
xvfb gtk2-engines-pixbuf xfonts-cyrillic \
xfonts-100dpi xfonts-75dpi xfonts-base \
xfonts-scalable imagemagick x11-apps
# Install Memcached & Beanstalk
apt-get install -y redis-server memcached beanstalkd
# Configure Beanstalkd
sed -i "s/#START=yes/START=yes/" /etc/default/beanstalkd
/etc/init.d/beanstalkd start
# Install & Configure MailHog
wget --quiet -O /usr/local/bin/mailhog https://github.com/mailhog/MailHog/releases/download/v0.2.1/MailHog_linux_amd64
chmod +x /usr/local/bin/mailhog
sudo tee /etc/systemd/system/mailhog.service <<EOL
[Unit]
Description=Mailhog
After=network.target
[Service]
User=vagrant
ExecStart=/usr/bin/env /usr/local/bin/mailhog > /dev/null 2>&1 &
[Install]
WantedBy=multi-user.target
EOL
systemctl daemon-reload
systemctl enable mailhog
# Configure Supervisor
systemctl enable supervisor.service
service supervisor start
# Install ngrok
wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip
unzip ngrok-stable-linux-amd64.zip -d /usr/local/bin
rm -rf ngrok-stable-linux-amd64.zip
# Install Flyway
wget https://repo1.maven.org/maven2/org/flywaydb/flyway-commandline/4.2.0/flyway-commandline-4.2.0-linux-x64.tar.gz
tar -zxvf flyway-commandline-4.2.0-linux-x64.tar.gz -C /usr/local
chmod +x /usr/local/flyway-4.2.0/flyway
ln -s /usr/local/flyway-4.2.0/flyway /usr/local/bin/flyway
rm -rf flyway-commandline-4.2.0-linux-x64.tar.gz
# Install wp-cli
curl -O https://raw.githubusercontent.com/wp-cli/builds/gh-pages/phar/wp-cli.phar
chmod +x wp-cli.phar
mv wp-cli.phar /usr/local/bin/wp
# Install oh-my-zsh
git clone git://github.com/robbyrussell/oh-my-zsh.git /home/vagrant/.oh-my-zsh
cp /home/vagrant/.oh-my-zsh/templates/zshrc.zsh-template /home/vagrant/.zshrc
printf "\nsource ~/.bash_aliases\n" | tee -a /home/vagrant/.zshrc
printf "\nsource ~/.profile\n" | tee -a /home/vagrant/.zshrc
chown -R vagrant:vagrant /home/vagrant/.oh-my-zsh
chown vagrant:vagrant /home/vagrant/.zshrc
# Install Golang
wget https://dl.google.com/go/go1.10.linux-amd64.tar.gz
tar -C /usr/local -xzf go1.10.linux-amd64.tar.gz
printf "\nPATH=\"/usr/local/go/bin:\$PATH\"\n" | tee -a /home/vagrant/.profile
# One last upgrade check
apt-get -y upgrade
# Clean Up
apt-get -y autoremove
apt-get -y clean
chown -R vagrant:vagrant /home/vagrant
# Add Composer Global Bin To Path
printf "\nPATH=\"$(sudo su - vagrant -c 'composer config -g home 2>/dev/null')/vendor/bin:\$PATH\"\n" | tee -a /home/vagrant/.profile
# Enable Swap Memory
/bin/dd if=/dev/zero of=/var/swap.1 bs=1M count=1024
/sbin/mkswap /var/swap.1
/sbin/swapon /var/swap.1
apt-get -y autoremove;
apt-get -y clean;
|
/* Copyright 2007-2015 QReal Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#include "startPosition.h"
#include "twoDModel/engine/model/constants.h"
using namespace twoDModel::items;
const QSizeF size = QSizeF(13, 13);
const int lineWidth = 3;
StartPosition::StartPosition(const QSizeF &robotSize, QGraphicsItem *parent)
: RotateItem(parent)
, mRobotSize(robotSize)
{
setX(mRobotSize.width() / 2);
setY(mRobotSize.height() / 2);
RotateItem::init();
}
QRectF StartPosition::boundingRect() const
{
return QRectF(-size.width() / 2, -size.height() / 2, size.width(), size.height());
}
void StartPosition::drawItem(QPainter *painter, const QStyleOptionGraphicsItem *option, QWidget *widget)
{
Q_UNUSED(option)
Q_UNUSED(widget)
painter->save();
QPen pen(Qt::red);
pen.setWidth(lineWidth);
painter->setPen(pen);
painter->drawLine(-size.width() / 2, -size.height() / 2, size.width() / 2, size.height() / 2);
painter->drawLine(-size.width() / 2, size.height() / 2, size.width() / 2, -size.height() / 2);
painter->restore();
}
QDomElement StartPosition::serialize(QDomElement &parent) const
{
QDomElement startPositionElement = RotateItem::serialize(parent);
startPositionElement.setTagName("startPosition");
startPositionElement.setAttribute("x", QString::number(scenePos().x()));
startPositionElement.setAttribute("y", QString::number(scenePos().y()));
startPositionElement.setAttribute("direction", QString::number(rotation()));
return startPositionElement;
}
void StartPosition::deserialize(const QDomElement &startPositionElement)
{
setX(startPositionElement.attribute("x").toDouble());
setY(startPositionElement.attribute("y").toDouble());
setRotation(startPositionElement.attribute("direction").toDouble());
}
void StartPosition::deserializeCompatibly(const QDomElement &robotElement)
{
const QDomElement startPositionElement = robotElement.firstChildElement("startPosition");
if (startPositionElement.isNull()) {
const QStringList robotPositionParts = robotElement.attribute("position").split(":");
const QString robotX = robotPositionParts.count() != 2 ? "0" : robotPositionParts[0];
const QString robotY = robotPositionParts.count() != 2 ? "0" : robotPositionParts[1];
setX(robotX.toDouble() + mRobotSize.width() / 2);
setY(robotY.toDouble() + mRobotSize.height() / 2);
setRotation(robotElement.attribute("direction").toDouble());
} else {
deserialize(startPositionElement);
}
}
void StartPosition::drawFieldForResizeItem(QPainter *painter)
{
Q_UNUSED(painter)
}
void StartPosition::changeDragState(qreal x, qreal y)
{
Q_UNUSED(x)
Q_UNUSED(y)
setDragState(None);
}
void StartPosition::contextMenuEvent(QGraphicsSceneContextMenuEvent *event)
{
Q_UNUSED(event)
}
|
#!/bin/bash
#***********CONSTANTS
DIRECTORY_NAME=$1
HERE=$(pwd)
#************MAIN
cd "$DIRECTORY_NAME" || exit
# Extracting the appropriate column to grab the hour using awk
# I am using $3 here judging by the readme on GitHub to grab the hour
# I assume that third field is the hour and not the year as the year would most
# Likely be in MM/DD/YYYY (USA) or YYYY/MM/DD (EU) format
cat ./*/failed_login_data.txt \
| awk ' {print $3} ' | sort | uniq -c | awk ' { print "data.addRow([\x27"$2"\x27, "$1"]);"}' \
> temp_hours_dist.html
# Using wrap contentssh to add footer and header to usernames
cd "$HERE" || exit
./bin/wrap_contents.sh "$DIRECTORY_NAME"/temp_hours_dist.html html_components/hours_dist \
"$DIRECTORY_NAME"/hours_dist.html
# clean up temp files
rm "$DIRECTORY_NAME"/temp_hours_dist.html
|
<filename>lib/components/CardTenDay.js
import React, { Component } from 'react';
const CardTenDay = ({ cardForWeather }) => {
return (
<section id='tenDay-test'>
{cardForWeather[0].forecast.simpleforecast.forecastday.map((eachDay, index) => {
return (
<section id='card-tenday' key = {index}>
<h3 className='ten-day'> { eachDay.date.weekday_short } </h3>
<div><img src ={eachDay.icon_url.toString()} /></div>
<h3 id ='high' className='ten-day'> { eachDay.high.fahrenheit }°F </h3>
<h3 id='low' className='ten-day'> { eachDay.low.fahrenheit }°F </h3>
</section>
);
})}
</section>
);
};
export default CardTenDay;
|
#!/bin/bash
clang -g test.c -otest && ./test && rm ./test |
const addToArray = (arr, item) => {
arr.push(item);
return arr;
};
// testing
console.log(addToArray(["apples", "oranges"], "bananas")) // Output: ["apples", "oranges", "bananas"] |
<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { Book, UserBook, AuthenticatorData } from '../../models/models';
import { ApplicationToastService } from '../../shared-services/toast.service';
import { UserService } from '../../http-services/shared-services/user.service';
import { AuthService } from '../../http-services/authenticator/auth.service';
import { AdminService } from '../../http-services/shared-services/admin.service';
@Component({
selector: 'book-users',
templateUrl: './book-users.page.html',
styleUrls: ['./book-users.page.scss'],
})
export class BookUsersPage implements OnInit {
book: Book = {} as any;
bookId: any;
idList: string[] = [];
isDeleteDailog = true;
users: UserBook[] = [];
bookUsers: AuthenticatorData[] = [];
deleteButtonEnabled = false;
isLoading = false;
isLoadingBar = false;
constructor( private route: ActivatedRoute,
private applicationServe: ApplicationToastService,
private userServe: UserService,
public authServe: AuthService,
private adminServe: AdminService,
private router: Router) {
this.bookId = this.route.snapshot.paramMap.get('id');
}
ngOnInit() {
this.getBookDetails();
}
getBookDetails(): void {
this.isLoading = true;
if (this.bookId ) {
this.userServe.getBookUserDetails(this.bookId).subscribe((val) => {
if (val && val != null) {
console.log(val);
this.book = val.post;
this.users = val.bookUsers;
this.getUsers();
}
this.isLoading = false;
this.applicationServe.presentToastWithOptions(val.message, 'primary');
}, (error) => {
this.isLoading = false;
this.applicationServe.presentToastWithOptions(error.message, 'red');
this.authServe.callAuthChecker(error.status);
});
}
}
getUsers() {
this.isLoading = true;
this.idList = [];
this.users.forEach(element => {
this.idList.push(element.userId);
});
if ( this.idList.length > 0) {
this.deleteButtonEnabled = false;
this.adminServe.searchUsers(this.idList).subscribe(val => {
this.bookUsers = val.posts;
this.isLoading = false;
}, (error) => {
if (error.status === 401) {
this.authServe.logOut401();
}
});
} else {
this.isLoading = false;
this.deleteButtonEnabled = true;
}
}
onNoClick() {
this.router.navigate(['/admin', 'tabs', 'admin']);
}
deleteBook() {
this.isLoadingBar = true;
this.adminServe.deleteBook(this.bookId).subscribe((val) => {
this.isLoadingBar = false;
this.applicationServe.presentToastWithOptions(val.message, 'primary');
this.router.navigate(['/admin', 'tabs', 'admin']);
}, (error) => {
if (error.status === 401) {
this.isLoadingBar = false;
this.authServe.logOut401();
}
});
}
}
|
<filename>7-assets/past-student-repos/LambdaSchool-master/m2/23d1/src/comparison.js
import React, { useState, useEffect } from 'react';
import { withFormik, Form, Field } from 'formik';
import axios from 'axios';
import * as Yup from 'yup';
import './App.css';
const UserForm = ({values, touched, errors, status}) =>{
const [user, setUser] = useState([]);
useEffect(() =>{
console.log(status);
status && setUser(user => [...user, status]);
}, [status]);
return (
<div className='user-form'>
<Form>
<h1>Sign Up</h1>
<Field type='text' name='name' placeholder='Name' />
{touched.name && errors.name && (
<p className='error'>{errors.name}</p>
)}
<Field type='text' name='email' placeholder='Email'/>
<Field type='text' name='password' placeholder='Password' />
{touched.password && errors.password && (
<p className='error'>{errors.password}</p>
)}
<label className="checkbox-container">
Terms of Service
<Field type='checkbox' name = 'tos' checked={values.tos} />
<span className='checkmark' />
</label>
<button type='submit'>Submit</button>
</Form>
{user.map(user =>(
<ul key={user.id}>
<li>Name: </li>
<li>Email: </li>
<li>Password: </li>
</ul>
))}
</div>
);
};
const FormikUserForm = withFormik({
mapPropsToValues({name, email, password, tos}) {
return {
tos: tos || false,
name: name || '',
email: email || '',
password: password || ''
};
},
validationSchema: Yup.object().shape({
name: Yup.string().required('Name is required'),
password: Yup.string().required('<PASSWORD>')
}),
handleSubmit(values, { setStatus, resetForm}) {
axios
.post('https://reqres.in/api/users/', values)
.then(resp =>{
console.log(resp);
setStatus(resp.data);
resetForm();
})
.catch(err => console.log(err.responce));
}
}) (UserForm);
export default FormikUserForm; |
import logging
import os
import re
import sys
import click
from click.testing import CliRunner
import rasterio
from rasterio.rio import features
# logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
TEST_FEATURES = """{
"geometry": {
"coordinates": [
[
[-110, 40],
[-100, 40],
[-100, 45],
[-105, 45],
[-110, 40]
]
],
"type": "Polygon"
},
"properties": {
"val": 15
},
"type": "Feature"
}"""
# > rio shapes tests/data/shade.tif --mask --sampling 500 --projected --precision 0
TEST_MERC_FEATURECOLLECTION = """{
"bbox": [-11858135.0, 4803914.0, -11848351.0, 4813698.0],
"features": [{
"bbox": [-11853357.504145855, 4808920.97837715,
-11848580.189878704, 4813698.2926443005],
"geometry": {
"coordinates": [
[
[-11853357.504145855, 4813698.2926443005],
[-11853357.504145855, 4808920.97837715],
[-11848580.189878704, 4808920.97837715],
[-11848580.189878704, 4813698.2926443005],
[-11853357.504145855, 4813698.2926443005]
]
],
"type": "Polygon"
},
"properties": {
"val": 2
},
"type": "Feature"
}, {
"bbox": [-11858134.818413004, 4804143.66411,
-11853357.504145855, 4808920.97837715],
"geometry": {
"coordinates": [
[
[-11858134.818413004, 4808920.97837715],
[-11858134.818413004, 4804143.66411],
[-11853357.504145855, 4804143.66411],
[-11853357.504145855, 4808920.97837715],
[-11858134.818413004, 4808920.97837715]
]
],
"type": "Polygon"
},
"properties": {
"val": 3
},
"type": "Feature"
}],
"type": "FeatureCollection"
}"""
def test_err():
runner = CliRunner()
result = runner.invoke(
features.shapes, ['tests/data/shade.tif', '--bidx', '4'])
assert result.exit_code == 1
def test_shapes(runner):
result = runner.invoke(features.shapes, ['tests/data/shade.tif'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 1
assert result.output.count('"Feature"') == 232
def test_shapes_sequence(runner):
result = runner.invoke(features.shapes, ['tests/data/shade.tif', '--sequence'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 0
assert result.output.count('"Feature"') == 232
def test_shapes_sequence_rs(runner):
result = runner.invoke(
features.shapes, [
'tests/data/shade.tif',
'--sequence',
'--rs'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 0
assert result.output.count('"Feature"') == 232
assert result.output.count(u'\u001e') == 232
def test_shapes_with_nodata(runner):
result = runner.invoke(features.shapes, ['tests/data/shade.tif', '--with-nodata'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 1
assert result.output.count('"Feature"') == 288
def test_shapes_indent(runner):
result = runner.invoke(features.shapes, ['tests/data/shade.tif', '--indent', '2'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 1
assert result.output.count('\n') == 70139
def test_shapes_compact(runner):
result = runner.invoke(features.shapes, ['tests/data/shade.tif', '--compact'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 1
assert result.output.count(', ') == 0
assert result.output.count(': ') == 0
def test_shapes_sampling(runner):
result = runner.invoke(
features.shapes, ['tests/data/shade.tif', '--sampling', '10'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 1
assert result.output.count('"Feature"') == 124
def test_shapes_precision(runner):
result = runner.invoke(
features.shapes, ['tests/data/shade.tif', '--precision', '1'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 1
# Find no numbers with 2+ decimal places.
assert re.search(r'\d*\.\d{2,}', result.output) is None
def test_shapes_mask(runner):
result = runner.invoke(features.shapes, ['tests/data/RGB.byte.tif', '--mask'])
assert result.exit_code == 0
assert result.output.count('"FeatureCollection"') == 1
assert result.output.count('"Feature"') == 9
def test_rasterize_err(tmpdir, runner):
output = str(tmpdir.join('test.tif'))
# Test invalid stdin
result = runner.invoke(features.rasterize, [output], input='BOGUS')
assert result.exit_code == -1
# Test invalid GeoJSON
result = runner.invoke(features.rasterize, [output],
input='{"foo": "bar"}')
assert result.exit_code == 2
# Test invalid res
result = runner.invoke(features.rasterize, [output], input=TEST_FEATURES)
assert result.exit_code == 2
# Test invalid CRS for bounds
result = runner.invoke(features.rasterize, [output, '--res', 1],
input=TEST_MERC_FEATURECOLLECTION)
assert result.exit_code == 2
# Test invalid CRS value
result = runner.invoke(features.rasterize, [output,
'--res', 1,
'--src_crs', 'BOGUS'],
input=TEST_MERC_FEATURECOLLECTION)
assert result.exit_code == 2
def test_rasterize(tmpdir, runner):
# Test dimensions
output = str(tmpdir.join('test.tif'))
result = runner.invoke(features.rasterize,
[output, '--dimensions', 20, 10],
input=TEST_FEATURES)
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.count == 1
assert out.meta['width'] == 20
assert out.meta['height'] == 10
data = out.read_band(1, masked=False)
assert (data == 0).sum() == 55
assert (data == 1).sum() == 145
# Test dimensions and bounds
output = str(tmpdir.join('test2.tif'))
result = runner.invoke(features.rasterize,
[output,
'--dimensions', 40, 20,
'--bounds', -120, 30, -90, 50
], input=TEST_FEATURES)
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.count == 1
assert out.meta['width'] == 40
assert out.meta['height'] == 20
data = out.read_band(1, masked=False)
assert (data == 0).sum() == 748
assert (data == 1).sum() == 52
# Test resolution
output = str(tmpdir.join('test3.tif'))
result = runner.invoke(features.rasterize,
[output, '--res', 0.5], input=TEST_FEATURES)
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.count == 1
assert out.meta['width'] == 20
assert out.meta['height'] == 10
data = out.read_band(1, masked=False)
assert (data == 0).sum() == 55
assert (data == 1).sum() == 145
def test_rasterize_existing_output(tmpdir, runner):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(features.rasterize,
[output, '--res', 0.5], input=TEST_FEATURES)
assert result.exit_code == 0
assert os.path.exists(output)
geojson = """{
"geometry": {
"coordinates": [
[
[-102, 40],
[-98, 40],
[-98, 45],
[-100, 45],
[-102, 40]
]
],
"type": "Polygon"
},
"type": "Feature"
}"""
result = runner.invoke(features.rasterize, [output, '--default_value', 2],
input=geojson)
with rasterio.open(output) as out:
assert out.count == 1
data = out.read_band(1, masked=False)
assert (data == 0).sum() == 55
assert (data == 1).sum() == 125
assert (data == 2).sum() == 20
def test_rasterize_like(tmpdir, runner):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(features.rasterize,
[output, '--like', 'tests/data/shade.tif'],
input=TEST_MERC_FEATURECOLLECTION)
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.count == 1
data = out.read_band(1, masked=False)
assert (data == 0).sum() == 548576
assert (data == 1).sum() == 500000
# Test invalid like raster
output = str(tmpdir.join('test2.tif'))
result = runner.invoke(features.rasterize,
[output, '--like', 'foo.tif'], input=TEST_FEATURES)
assert result.exit_code == 2
def test_rasterize_property_value(tmpdir, runner):
# Test feature collection property values
output = str(tmpdir.join('test.tif'))
result = runner.invoke(features.rasterize,
[output,
'--res', 1000,
'--property', 'val',
'--src_crs', 'EPSG:3857'
],
input=TEST_MERC_FEATURECOLLECTION)
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.count == 1
data = out.read_band(1, masked=False)
assert (data == 0).sum() == 50
assert (data == 2).sum() == 25
assert (data == 3).sum() == 25
# Test feature property values
output = str(tmpdir.join('test2.tif'))
result = runner.invoke(features.rasterize,
[output, '--res', 0.5, '--property', 'val'],
input=TEST_FEATURES)
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.count == 1
data = out.read_band(1, masked=False)
assert (data == 0).sum() == 55
assert (data == 15).sum() == 145
def test_rasterize_out_of_bounds(tmpdir, runner):
output = str(tmpdir.join('test.tif'))
# Test out of bounds of --like raster
result = runner.invoke(features.rasterize,
[output, '--like', 'tests/data/shade.tif'],
input=TEST_FEATURES)
assert result.exit_code == 2
# Test out of bounds of existing output raster (first have to create one)
result = runner.invoke(features.rasterize,
[output,
'--res', 1000,
'--property', 'val',
'--src_crs', 'EPSG:3857'
],
input=TEST_MERC_FEATURECOLLECTION)
assert result.exit_code == 0
assert os.path.exists(output)
result = runner.invoke(features.rasterize, [output], input=TEST_FEATURES)
assert result.exit_code == 2
|
#!/bin/bash
# automatically create the debian package
build_dir=./packaging/archive/itunes-remote-applet_1.0-beta-1
rm -rf packaging/
mkdir -p $build_dir
cp src/*.py $build_dir
cp -R resources/* $build_dir
cd ./packaging/archive/
tar cfz ../itunes-remote-applet-1.0-beta-1.orig.tar.gz ./itunes-remote-applet_1.0-beta-1/ --exclude=.svn
# tar -xf itunes-remote-applet-1.0-beta-1.orig.tar.gz
# cd itunes-remote-applet
# dh_make -c apache -s -b -p itunes-remote-applet_1.0-beta-1
# cd debian
# rm *.ex *.EX
|
import smtplib
sender_email = 'example@gmail.com'
password = 'example_password'
for email in recipient_emails:
with smtplib.SMTP('smtp.gmail.com', 587) as server:
server.ehlo()
server.starttls()
server.login(sender_email, password)
subject = 'Automated Email from Program'
message = 'Message body'
server.sendmail(sender_email, email, 'Subject: ' + subject + '\n\n' + message)
server.quit()
print('Email sent successfully.') |
<gh_stars>0
module Puxico
class RadioConfig
attr_reader :preamble, :chans, :settings
def initialize(io)
lines = io.read.split("\r\n")
@preamble = lines.first
@chans = lines[1..128].collect {|c| Chan::Base.from_conf c }
@settings = lines[129..-1]
end
def to_s
[preamble, chans, settings, "\r\n"].flatten.collect(&:to_s).join("\r\n")
end
end
end
|
#!/bin/bash
if [ $# -lt 1 ]; then
echo "Usage: $0 [cnf-file]"
fi
file=$1
command="cat"
if [[ $file == *.gz ]]; then
command="zcat"
elif [[ $file == *.bz2 ]]; then
command="bzcat"
elif [[ $file == *.xz ]]; then
command="xzcat"
fi
$command $file | sed 's/[[:space:]]+/ /g;s/^[[:space:]]//g;s/[[:space:]]$//g' | grep -v '^c\|^p' | tr -s '\n\r' ' ' | md5sum
# this adds trainling zero if missing (but buffer-overruns for large files due to missing line-breaks after above tr-command): sed 's/[^0]$/& 0/'
|
<reponame>smagill/opensphere-desktop
package io.opensphere.csvcommon.ui.columndefinition.ui;
import java.util.Observable;
import java.util.Observer;
import org.apache.commons.lang3.StringUtils;
import io.opensphere.core.util.DefaultValidatorSupport;
import io.opensphere.core.util.ValidationStatus;
import io.opensphere.csvcommon.ui.columndefinition.model.ColumnDefinitionModel;
/**
* Sends any changes to the model's warning messages, to the UI.
*/
public class ColumnDefinitionBinder implements Observer
{
/**
* The validator used to communicate validation messages to the UI.
*/
private final DefaultValidatorSupport myValidator;
/**
* The model containing the validation messages to display.
*/
private final ColumnDefinitionModel myModel;
/**
* Constructs a new column definition binder.
*
* @param validator The validator.
* @param model The model.
*/
public ColumnDefinitionBinder(DefaultValidatorSupport validator, ColumnDefinitionModel model)
{
myModel = model;
myValidator = validator;
myModel.addObserver(this);
}
@Override
public void update(Observable o, Object arg)
{
if (ColumnDefinitionModel.WARNING_MESSAGE_PROPERTY.equals(arg)
|| ColumnDefinitionModel.ERROR_MESSAGE_PROPERTY.equals(arg))
{
updateValidationMessages();
}
}
/**
* Updates the validator messages based on the model's messages.
*/
private void updateValidationMessages()
{
String errorMessage = myModel.getErrorMessage();
String warningMessage = myModel.getWarningMessage();
if (StringUtils.isNotEmpty(errorMessage))
{
myValidator.setValidationResult(ValidationStatus.ERROR, errorMessage);
}
else if (StringUtils.isNotEmpty(warningMessage))
{
myValidator.setValidationResult(ValidationStatus.WARNING, warningMessage);
}
else
{
myValidator.setValidationResult(ValidationStatus.VALID, null);
}
}
/**
* Removes itself as a listener to the model.
*/
public void close()
{
myModel.deleteObserver(this);
}
}
|
<gh_stars>1-10
// Copyright (C) 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Gerrit.install(function(self) {
if (window.Polymer) { return; }
// The code below is only used by the GWT-UI
function onComment(e) {
var prefs = getPrefsFromCookie();
if (prefs !== null) {
convertImageLinks(getLinks(e), prefs);
} else {
Gerrit.get('/accounts/self/' + self.getPluginName() + '~preference', function(prefs) {
storePrefsInCookie(prefs);
convertImageLinks(getLinks(e), prefs);
});
}
}
function getLinks(e) {
return e.getElementsByTagName("a");
}
function storePrefsInCookie(prefs) {
var date = new Date();
date.setTime(date.getTime() + (1 * 24 * 60 * 60 * 1000)); // 1 day
document.cookie = getCookieName()
+ "="
+ JSON.stringify(prefs)
+ "; expires=" + date.toGMTString()
+ "; path=/";
}
function getPrefsFromCookie() {
var cookie = document.cookie;
if (cookie.length > 0) {
var cookieName = getCookieName();
var start = cookie.indexOf(cookieName + "=");
if (start != -1) {
start = start + cookieName.length + 1;
var end = document.cookie.indexOf(";", start);
if (end == -1) {
end = document.cookie.length;
}
return JSON.parse(unescape(document.cookie.substring(start, end)));
}
}
return null;
}
function getCookieName() {
return self.getPluginName() + "~prefs";
}
function convertImageLinks(l, prefs) {
if (!prefs.pattern) {
return;
}
if ('TOOLTIP' === prefs.link_decoration) {
addTooltips(l, prefs.pattern);
} else if ('INLINE' === prefs.link_decoration) {
inlineImages(l, prefs.pattern);
}
}
function inlineImages(l, pattern) {
for(var i = 0; i < l.length; i++) {
if (l[i].href.match(pattern)) {
var a = document.createElement('a');
a.setAttribute('href', l[i].href);
var img = document.createElement('img');
img.setAttribute('src', l[i].href);
img.setAttribute('style', 'border: 1px solid #B3B2B2;');
a.appendChild(img);
l[i].parentNode.replaceChild(a, l[i]);
}
}
}
function addTooltips(pattern) {
for(var i = 0; i < l.length; i++) {
if (l[i].href.match(pattern)) {
l[i].onmouseover = function (evt) {
var img = document.createElement('img');
img.setAttribute('src', this.href);
img.setAttribute('style', 'border: 1px solid #B3B2B2; position: absolute; top: ' + (this.offsetTop + this.offsetHeight) + 'px');
this.parentNode.insertBefore(img, this);
this.onmouseout = function (evt) {
this.parentNode.removeChild(this.previousSibling);
}
}
}
}
}
Gerrit.on('comment', onComment);
});
|
def process_ambient_temperature(data, sensorId):
reading = {}
modified_data = data.copy() # Create a copy to avoid modifying the original data
try:
ambient_temp = modified_data['sensors'][sensorId]['ambientTemperature']
reading['ambientTemperature'] = ambient_temp
del modified_data['sensors'][sensorId]['ambientTemperature']
except KeyError:
pass # Ignore the error if ambient temperature reading does not exist
return reading, modified_data |
package web
import (
"github.com/elko-dev/spawn/applications"
log "github.com/sirupsen/logrus"
)
// WebType struct to create an application type
type WebType struct {
Client applications.Project
Server applications.Project
includeBackend bool
}
// Create sets up a new application
func (webType WebType) Create() error {
log.WithFields(log.Fields{}).Debug("Creating client app")
err := webType.Client.Create()
if err != nil {
return err
}
if !webType.includeBackend {
return nil
}
log.WithFields(log.Fields{}).Debug("Creating backend app")
return webType.Server.Create()
}
// NewWebType init constructor
func NewWebType(client applications.Project, server applications.Project, includeBackend bool) WebType {
return WebType{client, server, includeBackend}
}
|
def bubble_sort(nums):
"""
Implement the Bubble sort algorithm to order a given array of integers in ascending order
"""
for _ in range(len(nums)-1):
for i in range(len(nums)-1):
if nums[i] > nums[i+1]:
nums[i], nums[i+1] = nums[i+1], nums[i]
if __name__ == '__main__':
# test data
nums = [7, 2, 5, 0, 1, 9]
print('Before sorting: {}'.format(nums))
# sort
bubble_sort(nums)
print('After sorting: {}'.format(nums)) |
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
set -xeuo pipefail
export PYTHONUNBUFFERED=1
cat >~/.condarc <<CONDARC
channels:
- loopbio
- conda-forge
- defaults
conda-build:
root-dir: /home/conda/feedstock_root/build_artifacts
show_channel_urls: true
CONDARC
# A lock sometimes occurs with incomplete builds. The lock file is stored in build_artifacts.
conda clean --lock
conda install --yes --quiet conda-forge-ci-setup=1 conda-build
source run_conda_forge_build_setup
conda build /home/conda/recipe_root -m /home/conda/feedstock_root/.ci_support/${CONFIG}.yaml --quiet
upload_or_check_non_existence /home/conda/recipe_root loopbio --channel=main -m /home/conda/feedstock_root/.ci_support/${CONFIG}.yaml
touch "/home/conda/feedstock_root/build_artifacts/conda-forge-build-done-${CONFIG}" |
#!/usr/bin/env sh
set -x
mkdir -p ~/scripts/
cp -riv ./scripts/* ~/scripts/
|
def draw_track_id(im_draw, track_id, pos, track_id_color=None):
import cv2 # Assuming the usage of OpenCV for drawing on the image
# Define default color list if track_id_color is not provided
color_list = [(255, 0, 0), (0, 255, 0), (0, 0, 255)] # Example RGB color list
# Use default color if track_id_color is not provided
if track_id_color is None:
track_id_color = color_list[track_id % len(color_list)]
# Draw the track ID on the image at the specified position with the specified color
cv2.putText(im_draw, str(track_id), pos, cv2.FONT_HERSHEY_SIMPLEX, 1, track_id_color, 2, cv2.LINE_AA)
return im_draw |
<reponame>divvydev/rippled-network-crawler
'use strict';
var src = require('./program');
var Promise = require('bluebird');
function callPrior(dbUrl, commander, lastCrawl) {
src
.prior(dbUrl, commander, lastCrawl)
.then(function(crawl) {
process.nextTick(function() {
callPrior(dbUrl, commander, crawl.data);
});
})
.catch(function(err) {
console.log(err);
});
}
module.exports = function(ipp, dbUrl, commander) {
return new Promise(function(resolve, reject) {
commander.store = dbUrl; // turning on -s dbUrl flag.
src
.enter(ipp, commander)
.then(function(crawl) {
callPrior(dbUrl, commander, crawl.data);
})
.catch(reject);
});
};
|
from keras.models import load_model
def load_first_available_model(model_paths):
for path in model_paths:
if not path.startswith('#'): # Check if the path is not commented out
try:
model = load_model(path.strip()) # Load the model
return model # Return the loaded model
except:
pass # Ignore any loading errors and continue to the next path
return "No available models found" # Return this message if no models can be loaded |
/*
* This file is generated by jOOQ.
*/
package io.cattle.platform.core.model.tables.records;
import io.cattle.platform.core.model.Subnet;
import io.cattle.platform.core.model.tables.SubnetTable;
import io.cattle.platform.db.jooq.utils.TableRecordJaxb;
import java.util.Date;
import java.util.Map;
import javax.annotation.Generated;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record17;
import org.jooq.Row17;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.9.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
@Entity
@Table(name = "subnet", schema = "cattle")
public class SubnetRecord extends UpdatableRecordImpl<SubnetRecord> implements TableRecordJaxb, Record17<Long, String, String, String, String, String, Date, Date, Date, Map<String,Object>, String, Integer, String, String, String, Long, Long>, Subnet {
private static final long serialVersionUID = -120830156;
/**
* Setter for <code>cattle.subnet.id</code>.
*/
@Override
public void setId(Long value) {
set(0, value);
}
/**
* Getter for <code>cattle.subnet.id</code>.
*/
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id", unique = true, nullable = false, precision = 19)
@Override
public Long getId() {
return (Long) get(0);
}
/**
* Setter for <code>cattle.subnet.name</code>.
*/
@Override
public void setName(String value) {
set(1, value);
}
/**
* Getter for <code>cattle.subnet.name</code>.
*/
@Column(name = "name", length = 255)
@Override
public String getName() {
return (String) get(1);
}
/**
* Setter for <code>cattle.subnet.kind</code>.
*/
@Override
public void setKind(String value) {
set(2, value);
}
/**
* Getter for <code>cattle.subnet.kind</code>.
*/
@Column(name = "kind", nullable = false, length = 255)
@Override
public String getKind() {
return (String) get(2);
}
/**
* Setter for <code>cattle.subnet.uuid</code>.
*/
@Override
public void setUuid(String value) {
set(3, value);
}
/**
* Getter for <code>cattle.subnet.uuid</code>.
*/
@Column(name = "uuid", unique = true, nullable = false, length = 128)
@Override
public String getUuid() {
return (String) get(3);
}
/**
* Setter for <code>cattle.subnet.description</code>.
*/
@Override
public void setDescription(String value) {
set(4, value);
}
/**
* Getter for <code>cattle.subnet.description</code>.
*/
@Column(name = "description", length = 1024)
@Override
public String getDescription() {
return (String) get(4);
}
/**
* Setter for <code>cattle.subnet.state</code>.
*/
@Override
public void setState(String value) {
set(5, value);
}
/**
* Getter for <code>cattle.subnet.state</code>.
*/
@Column(name = "state", nullable = false, length = 128)
@Override
public String getState() {
return (String) get(5);
}
/**
* Setter for <code>cattle.subnet.created</code>.
*/
@Override
public void setCreated(Date value) {
set(6, value);
}
/**
* Getter for <code>cattle.subnet.created</code>.
*/
@Column(name = "created")
@Override
public Date getCreated() {
return (Date) get(6);
}
/**
* Setter for <code>cattle.subnet.removed</code>.
*/
@Override
public void setRemoved(Date value) {
set(7, value);
}
/**
* Getter for <code>cattle.subnet.removed</code>.
*/
@Column(name = "removed")
@Override
public Date getRemoved() {
return (Date) get(7);
}
/**
* Setter for <code>cattle.subnet.remove_time</code>.
*/
@Override
public void setRemoveTime(Date value) {
set(8, value);
}
/**
* Getter for <code>cattle.subnet.remove_time</code>.
*/
@Column(name = "remove_time")
@Override
public Date getRemoveTime() {
return (Date) get(8);
}
/**
* Setter for <code>cattle.subnet.data</code>.
*/
@Override
public void setData(Map<String,Object> value) {
set(9, value);
}
/**
* Getter for <code>cattle.subnet.data</code>.
*/
@Column(name = "data", length = 16777215)
@Override
public Map<String,Object> getData() {
return (Map<String,Object>) get(9);
}
/**
* Setter for <code>cattle.subnet.network_address</code>.
*/
@Override
public void setNetworkAddress(String value) {
set(10, value);
}
/**
* Getter for <code>cattle.subnet.network_address</code>.
*/
@Column(name = "network_address", length = 255)
@Override
public String getNetworkAddress() {
return (String) get(10);
}
/**
* Setter for <code>cattle.subnet.cidr_size</code>.
*/
@Override
public void setCidrSize(Integer value) {
set(11, value);
}
/**
* Getter for <code>cattle.subnet.cidr_size</code>.
*/
@Column(name = "cidr_size", precision = 10)
@Override
public Integer getCidrSize() {
return (Integer) get(11);
}
/**
* Setter for <code>cattle.subnet.start_address</code>.
*/
@Override
public void setStartAddress(String value) {
set(12, value);
}
/**
* Getter for <code>cattle.subnet.start_address</code>.
*/
@Column(name = "start_address", length = 255)
@Override
public String getStartAddress() {
return (String) get(12);
}
/**
* Setter for <code>cattle.subnet.end_address</code>.
*/
@Override
public void setEndAddress(String value) {
set(13, value);
}
/**
* Getter for <code>cattle.subnet.end_address</code>.
*/
@Column(name = "end_address", length = 255)
@Override
public String getEndAddress() {
return (String) get(13);
}
/**
* Setter for <code>cattle.subnet.gateway</code>.
*/
@Override
public void setGateway(String value) {
set(14, value);
}
/**
* Getter for <code>cattle.subnet.gateway</code>.
*/
@Column(name = "gateway", length = 255)
@Override
public String getGateway() {
return (String) get(14);
}
/**
* Setter for <code>cattle.subnet.network_id</code>.
*/
@Override
public void setNetworkId(Long value) {
set(15, value);
}
/**
* Getter for <code>cattle.subnet.network_id</code>.
*/
@Column(name = "network_id", precision = 19)
@Override
public Long getNetworkId() {
return (Long) get(15);
}
/**
* Setter for <code>cattle.subnet.cluster_id</code>.
*/
@Override
public void setClusterId(Long value) {
set(16, value);
}
/**
* Getter for <code>cattle.subnet.cluster_id</code>.
*/
@Column(name = "cluster_id", nullable = false, precision = 19)
@Override
public Long getClusterId() {
return (Long) get(16);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<Long> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record17 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row17<Long, String, String, String, String, String, Date, Date, Date, Map<String,Object>, String, Integer, String, String, String, Long, Long> fieldsRow() {
return (Row17) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row17<Long, String, String, String, String, String, Date, Date, Date, Map<String,Object>, String, Integer, String, String, String, Long, Long> valuesRow() {
return (Row17) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<Long> field1() {
return SubnetTable.SUBNET.ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field2() {
return SubnetTable.SUBNET.NAME;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field3() {
return SubnetTable.SUBNET.KIND;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field4() {
return SubnetTable.SUBNET.UUID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field5() {
return SubnetTable.SUBNET.DESCRIPTION;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field6() {
return SubnetTable.SUBNET.STATE;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Date> field7() {
return SubnetTable.SUBNET.CREATED;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Date> field8() {
return SubnetTable.SUBNET.REMOVED;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Date> field9() {
return SubnetTable.SUBNET.REMOVE_TIME;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Map<String,Object>> field10() {
return SubnetTable.SUBNET.DATA;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field11() {
return SubnetTable.SUBNET.NETWORK_ADDRESS;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field12() {
return SubnetTable.SUBNET.CIDR_SIZE;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field13() {
return SubnetTable.SUBNET.START_ADDRESS;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field14() {
return SubnetTable.SUBNET.END_ADDRESS;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field15() {
return SubnetTable.SUBNET.GATEWAY;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Long> field16() {
return SubnetTable.SUBNET.NETWORK_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Long> field17() {
return SubnetTable.SUBNET.CLUSTER_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Long value1() {
return getId();
}
/**
* {@inheritDoc}
*/
@Override
public String value2() {
return getName();
}
/**
* {@inheritDoc}
*/
@Override
public String value3() {
return getKind();
}
/**
* {@inheritDoc}
*/
@Override
public String value4() {
return getUuid();
}
/**
* {@inheritDoc}
*/
@Override
public String value5() {
return getDescription();
}
/**
* {@inheritDoc}
*/
@Override
public String value6() {
return getState();
}
/**
* {@inheritDoc}
*/
@Override
public Date value7() {
return getCreated();
}
/**
* {@inheritDoc}
*/
@Override
public Date value8() {
return getRemoved();
}
/**
* {@inheritDoc}
*/
@Override
public Date value9() {
return getRemoveTime();
}
/**
* {@inheritDoc}
*/
@Override
public Map<String,Object> value10() {
return getData();
}
/**
* {@inheritDoc}
*/
@Override
public String value11() {
return getNetworkAddress();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value12() {
return getCidrSize();
}
/**
* {@inheritDoc}
*/
@Override
public String value13() {
return getStartAddress();
}
/**
* {@inheritDoc}
*/
@Override
public String value14() {
return getEndAddress();
}
/**
* {@inheritDoc}
*/
@Override
public String value15() {
return getGateway();
}
/**
* {@inheritDoc}
*/
@Override
public Long value16() {
return getNetworkId();
}
/**
* {@inheritDoc}
*/
@Override
public Long value17() {
return getClusterId();
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value1(Long value) {
setId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value2(String value) {
setName(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value3(String value) {
setKind(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value4(String value) {
setUuid(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value5(String value) {
setDescription(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value6(String value) {
setState(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value7(Date value) {
setCreated(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value8(Date value) {
setRemoved(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value9(Date value) {
setRemoveTime(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value10(Map<String,Object> value) {
setData(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value11(String value) {
setNetworkAddress(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value12(Integer value) {
setCidrSize(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value13(String value) {
setStartAddress(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value14(String value) {
setEndAddress(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value15(String value) {
setGateway(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value16(Long value) {
setNetworkId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord value17(Long value) {
setClusterId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SubnetRecord values(Long value1, String value2, String value3, String value4, String value5, String value6, Date value7, Date value8, Date value9, Map<String,Object> value10, String value11, Integer value12, String value13, String value14, String value15, Long value16, Long value17) {
value1(value1);
value2(value2);
value3(value3);
value4(value4);
value5(value5);
value6(value6);
value7(value7);
value8(value8);
value9(value9);
value10(value10);
value11(value11);
value12(value12);
value13(value13);
value14(value14);
value15(value15);
value16(value16);
value17(value17);
return this;
}
// -------------------------------------------------------------------------
// FROM and INTO
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public void from(Subnet from) {
setId(from.getId());
setName(from.getName());
setKind(from.getKind());
setUuid(from.getUuid());
setDescription(from.getDescription());
setState(from.getState());
setCreated(from.getCreated());
setRemoved(from.getRemoved());
setRemoveTime(from.getRemoveTime());
setData(from.getData());
setNetworkAddress(from.getNetworkAddress());
setCidrSize(from.getCidrSize());
setStartAddress(from.getStartAddress());
setEndAddress(from.getEndAddress());
setGateway(from.getGateway());
setNetworkId(from.getNetworkId());
setClusterId(from.getClusterId());
}
/**
* {@inheritDoc}
*/
@Override
public <E extends Subnet> E into(E into) {
into.from(this);
return into;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached SubnetRecord
*/
public SubnetRecord() {
super(SubnetTable.SUBNET);
}
/**
* Create a detached, initialised SubnetRecord
*/
public SubnetRecord(Long id, String name, String kind, String uuid, String description, String state, Date created, Date removed, Date removeTime, Map<String,Object> data, String networkAddress, Integer cidrSize, String startAddress, String endAddress, String gateway, Long networkId, Long clusterId) {
super(SubnetTable.SUBNET);
set(0, id);
set(1, name);
set(2, kind);
set(3, uuid);
set(4, description);
set(5, state);
set(6, created);
set(7, removed);
set(8, removeTime);
set(9, data);
set(10, networkAddress);
set(11, cidrSize);
set(12, startAddress);
set(13, endAddress);
set(14, gateway);
set(15, networkId);
set(16, clusterId);
}
}
|
<gh_stars>0
/** @jsxImportSource https://esm.sh/preact */
import { RenderableProps } from "https://esm.sh/preact@10.6.4";
export type {
ComponentChildren,
ComponentType,
JSX,
// RenderableProps,
// ComponentProps,
// Component,
VNode,
} from "https://esm.sh/preact@10.6.4";
interface ExtendableAdditions {
className?: string;
}
export type ExtendableProps<T = null> = RenderableProps<
ExtendableAdditions & T
>;
export {
createContext,
h,
hydrate,
render,
toChildArray,
} from "https://esm.sh/preact@10.6.4";
export { useContext, useState } from "https://esm.sh/preact@10.6.4/hooks";
export type { StateUpdater } from "https://esm.sh/preact@10.6.4/hooks";
export { default as renderToString } from "https://esm.sh/preact-render-to-string?deps=preact@10.6.4";
export { debounce } from "https://deno.land/std/async/mod.ts";
export { join as pathJoin } from "https://deno.land/std/path/mod.ts";
declare global {
interface Crypto {
randomUUID: () => string;
}
}
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_music_note_twotone = void 0;
var ic_music_note_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "circle",
"attribs": {
"cx": "10.01",
"cy": "17",
"opacity": ".3",
"r": "2"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12 3l.01 10.55c-.59-.34-1.27-.55-2-.55C7.79 13 6 14.79 6 17s1.79 4 4.01 4S14 19.21 14 17V7h4V3h-6zm-1.99 16c-1.1 0-2-.9-2-2s.9-2 2-2 2 .9 2 2-.9 2-2 2z"
},
"children": []
}]
};
exports.ic_music_note_twotone = ic_music_note_twotone; |
#!/bin/bash
# This utility file contains functions that format test output to be parsed into jUnit XML
# os::test::junit::declare_suite_start prints a message declaring the start of a test suite
# Any number of suites can be in flight at any time, so there is no failure condition for this
# script based on the number of suites in flight.
#
# Globals:
# - JUNIT_REPORT_OUTPUT
# - NUM_OS_JUNIT_SUITES_IN_FLIGHT
# Arguments:
# - 1: the suite name that is starting
# Returns:
# - increment NUM_OS_JUNIT_SUITES_IN_FLIGHT
function os::test::junit::declare_suite_start() {
local suite_name=$1
local num_suites=${NUM_OS_JUNIT_SUITES_IN_FLIGHT:-0}
echo "=== BEGIN TEST SUITE github.com/openshift/origin/test/${suite_name} ===" >> "${JUNIT_REPORT_OUTPUT:-/dev/null}"
NUM_OS_JUNIT_SUITES_IN_FLIGHT=$(( ${num_suites} + 1 ))
export NUM_OS_JUNIT_SUITES_IN_FLIGHT
}
readonly -f os::test::junit::declare_suite_start
# os::test::junit::declare_suite_end prints a message declaring the end of a test suite
# If there aren't any suites in flight, this function will fail.
#
# Globals:
# - JUNIT_REPORT_OUTPUT
# - NUM_OS_JUNIT_SUITES_IN_FLIGHT
# Arguments:
# - 1: the suite name that is starting
# Returns:
# - export/decrement NUM_OS_JUNIT_SUITES_IN_FLIGHT
function os::test::junit::declare_suite_end() {
local num_suites=${NUM_OS_JUNIT_SUITES_IN_FLIGHT:-0}
if [[ "${num_suites}" -lt "1" ]]; then
# we can't end a suite if none have been started yet
echo "[ERROR] jUnit suite marker could not be placed, expected suites in flight, got ${num_suites}"
return 1
fi
echo "=== END TEST SUITE ===" >> "${JUNIT_REPORT_OUTPUT:-/dev/null}"
NUM_OS_JUNIT_SUITES_IN_FLIGHT=$(( ${num_suites} - 1 ))
export NUM_OS_JUNIT_SUITES_IN_FLIGHT
}
readonly -f os::test::junit::declare_suite_end
# os::test::junit::declare_test_start prints a message declaring the start of a test case
# If there is already a test marked as being in flight, this function will fail.
#
# Globals:
# - JUNIT_REPORT_OUTPUT
# - NUM_OS_JUNIT_TESTS_IN_FLIGHT
# Arguments:
# None
# Returns:
# - increment NUM_OS_JUNIT_TESTS_IN_FLIGHT
function os::test::junit::declare_test_start() {
local num_tests=${NUM_OS_JUNIT_TESTS_IN_FLIGHT:-0}
if [[ "${num_tests}" -ne "0" ]]; then
# someone's declaring the starting of a test when a test is already in flight
echo "[ERROR] jUnit test marker could not be placed, expected no tests in flight, got ${num_tests}"
return 1
fi
local num_suites=${NUM_OS_JUNIT_SUITES_IN_FLIGHT:-0}
if [[ "${num_suites}" -lt "1" ]]; then
# we can't end a test if no suites are in flight
echo "[ERROR] jUnit test marker could not be placed, expected suites in flight, got ${num_suites}"
return 1
fi
echo "=== BEGIN TEST CASE ===" >> "${JUNIT_REPORT_OUTPUT:-/dev/null}"
NUM_OS_JUNIT_TESTS_IN_FLIGHT=$(( ${num_tests} + 1 ))
export NUM_OS_JUNIT_TESTS_IN_FLIGHT
}
readonly -f os::test::junit::declare_test_start
# os::test::junit::declare_test_end prints a message declaring the end of a test case
# If there is no test marked as being in flight, this function will fail.
#
# Globals:
# - JUNIT_REPORT_OUTPUT
# - NUM_OS_JUNIT_TESTS_IN_FLIGHT
# Arguments:
# None
# Returns:
# - decrement NUM_OS_JUNIT_TESTS_IN_FLIGHT
function os::test::junit::declare_test_end() {
local num_tests=${NUM_OS_JUNIT_TESTS_IN_FLIGHT:-0}
if [[ "${num_tests}" -ne "1" ]]; then
# someone's declaring the end of a test when a test is not in flight
echo "[ERROR] jUnit test marker could not be placed, expected one test in flight, got ${num_tests}"
return 1
fi
echo "=== END TEST CASE ===" >> "${JUNIT_REPORT_OUTPUT:-/dev/null}"
NUM_OS_JUNIT_TESTS_IN_FLIGHT=$(( ${num_tests} - 1 ))
export NUM_OS_JUNIT_TESTS_IN_FLIGHT
}
readonly -f os::test::junit::declare_test_end
# os::test::junit::check_test_counters checks that we do not have any test suites or test cases in flight
# This function should be called at the very end of any test script using jUnit markers to make sure no error in
# marking has occurred.
#
# Globals:
# - NUM_OS_JUNIT_SUITES_IN_FLIGHT
# - NUM_OS_JUNIT_TESTS_IN_FLIGHT
# Arguments:
# None
# Returns:
# None
function os::test::junit::check_test_counters() {
if [[ "${NUM_OS_JUNIT_SUITES_IN_FLIGHT-}" -ne "0" ]]; then
echo "[ERROR] Expected no test suites to be marked as in-flight at the end of testing, got ${NUM_OS_JUNIT_SUITES_IN_FLIGHT-}"
return 1
elif [[ "${NUM_OS_JUNIT_TESTS_IN_FLIGHT-}" -ne "0" ]]; then
echo "[ERROR] Expected no test cases to be marked as in-flight at the end of testing, got ${NUM_OS_JUNIT_TESTS_IN_FLIGHT-}"
return 1
fi
}
readonly -f os::test::junit::check_test_counters
# os::test::junit::reconcile_output appends the necessary suite and test end statements to the jUnit output file
# in order to ensure that the file is in a consistent state to allow for parsing
#
# Globals:
# - NUM_OS_JUNIT_SUITES_IN_FLIGHT
# - NUM_OS_JUNIT_TESTS_IN_FLIGHT
# Arguments:
# None
# Returns:
# None
function os::test::junit::reconcile_output() {
if [[ "${NUM_OS_JUNIT_TESTS_IN_FLIGHT:-0}" = "1" ]]; then
os::test::junit::declare_test_end
fi
for (( i = 0; i < ${NUM_OS_JUNIT_SUITES_IN_FLIGHT:-0}; i++ )); do
os::test::junit::declare_suite_end
done
}
readonly -f os::test::junit::reconcile_output
# os::test::junit::generate_report determines which type of report is to
# be generated and does so from the raw output of the tests.
#
# Globals:
# - JUNIT_REPORT_OUTPUT
# - ARTIFACT_DIR
# Arguments:
# None
# Returns:
# None
function os::test::junit::generate_report() {
if [[ -z "${JUNIT_REPORT_OUTPUT:-}" ||
-n "${JUNIT_REPORT_OUTPUT:-}" && ! -s "${JUNIT_REPORT_OUTPUT:-}" ]]; then
# we can't generate a report
return
fi
if grep -q "=== END TEST CASE ===" "${JUNIT_REPORT_OUTPUT}"; then
os::test::junit::reconcile_output
os::test::junit::check_test_counters
os::test::junit::internal::generate_report "oscmd"
else
os::test::junit::internal::generate_report "gotest"
fi
}
# os::test::junit::internal::generate_report generats an XML jUnit
# report for either `os::cmd` or `go test`, based on the passed
# argument. If the `junitreport` binary is not present, it will be built.
#
# Globals:
# - JUNIT_REPORT_OUTPUT
# - ARTIFACT_DIR
# Arguments:
# - 1: specify which type of tests command output should junitreport read
# Returns:
# export JUNIT_REPORT_NUM_FAILED
function os::test::junit::internal::generate_report() {
local report_type="$1"
os::util::ensure::built_binary_exists 'junitreport'
local report_file
report_file="$( mktemp "${ARTIFACT_DIR}/${report_type}_report_XXXXX" ).xml"
os::log::info "jUnit XML report placed at $( os::util::repository_relative_path ${report_file} )"
junitreport --type "${report_type}" \
--suites nested \
--roots github.com/openshift/origin \
--output "${report_file}" \
<"${JUNIT_REPORT_OUTPUT}"
local summary
summary=$( junitreport summarize <"${report_file}" )
JUNIT_REPORT_NUM_FAILED="$( grep -oE "[0-9]+ failed" <<<"${summary}" )"
export JUNIT_REPORT_NUM_FAILED
echo "${summary}"
} |
import json
import os
def extract_customer_id(file_path: str) -> str:
try:
with open(file_path, "r") as file:
payload = json.load(file)
customer_id = payload['customerId']
os.remove(file_path)
return customer_id
except FileNotFoundError:
return "File not found"
except json.JSONDecodeError:
return "Invalid JSON format"
except KeyError:
return "Customer ID not found in JSON"
except Exception as e:
return f"Error occurred: {str(e)}" |
#!/bin/bash
###
### Script to capture the steps required when open sourcing the web layer for a single exemplar
### Note that you need to set the GITLAB_ROOT environment variable before you can run this script
###
# Include the common functions
. common.sh
enterExemplarName() {
read -p "Type the exemplar name to open source:" COMPONENT_NAME
}
getExemplarName() {
echo "Enter the exemplar name"
FINISHED="False"
while [ ${FINISHED} == "False" ]; do
enterExemplarName
echo "Open sourcing exemplar $COMPONENT_NAME. Do you want to continue?"
answerYesNo
if [ ${ANS_YN} == "y" ] || [ ${ANS_YN} == "Y" ]; then
FINISHED="True"
fi
done
}
showInfo() {
echo "Exemplar name: $COMPONENT_NAME"
echo "Release tag: $RELEASE_TAG"
echo "Directory: $DIRECTORY"
}
checkEnvironmentVariables
echo "This script will open source the web layer code for a single exemplar"
echo "Please turn on your VPN connection so we can access GitLab"
getExemplarName
getRelease
createOpenSourceDirectory
pullGitLabMergeGitHub
confirmCanContinue
reviewCodeDifferences
confirmCanContinue
turnOffVpnBuildAndTest
confirmCanContinue
echo "The next step will be to push the code to GitHub so it will be open source"
showInfo
confirmCanContinue
pushToGitHub
echo "Now wait for travis to build. Once it is green the next step will be to tag the release and push that tag"
confirmCanContinue
tagOpenSourceRelease
echo "Done."
|
CREATE OR REPLACE FUNCTION your_project_id.your_dataset.h3_to_parent(h3_index STRING, resolution NUMERIC)
RETURNS STRING
LANGUAGE js AS
"""
return h3.h3ToParent(h3_index,resolution);
"""
OPTIONS (
library=['gs://file_path']
);
|
th main.lua \
-dirName depthscratch \
-supervision depth
|
list = ["apple","orange","banana","grapes","Mango"]
# function to search elements in given array
def searchElement(arr, val):
for i in range(len(arr)):
if arr[i] == val:
return i
return -1
# function to sort array in ascending order
def sortArray(arr):
n = len(arr)
for i in range(n):
for j in range(0, n - i - 1):
if arr[j] > arr[j+1]:
arr[j], arr[j+1] = arr[j+1], arr[j]
# function to filter even elements
def filterElement(arr):
for i in range(len(arr)):
if(arr[i] % 2 == 0):
arr.remove(arr[i])
# Driver Code
val = 'grapes'
index = searchElement(list, val)
if index != -1:
print("%s is present at index %d" %(val, index))
else:
print("%s is not present" %val)
print("Unsorted array:", list)
sortArray(list)
print("Sorted array:", list)
filterElement(list)
print("Filtered array:", list) |
<reponame>ZaiwangLi/SA-SSD
from .bbox_head import BBoxHead
from .convfc_bbox_head import ConvFCBBoxHead, SharedFCBBoxHead
from ..single_stage_heads import PSWarpHead
__all__ = ['BBoxHead', 'ConvFCBBoxHead', 'SharedFCBBoxHead', 'PSWarpHead']
|
<filename>employeetracker.sql
DROP DATABASE IF EXISTS employeeTracker_DB;
CREATE DATABASE employeeTracker_DB;
USE employeeTracker_DB;
CREATE TABLE department(
id INT NOT NULL AUTO_INCREMENT,
dept_name VARCHAR(30) NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE role(
id INT NOT NULL AUTO_INCREMENT,
dept_title VARCHAR(30) NOT NULL,
salary INT NOT NULL,
dept_id INT NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE employee(
id INT NOT NULL AUTO_INCREMENT,
first_name VARCHAR(30) NOT NULL,
last_name VARCHAR(30) NOT NULL,
role_id INT NOT NULL,
manager_id INT,
PRIMARY KEY (id)
);
-- DEPARTMENT SEEDS -----
INSERT INTO department (dept_name)
VALUE ("Executive");
INSERT INTO department (dept_name)
VALUE ("Sales");
INSERT INTO department (dept_name)
VALUE ("Accounting");
INSERT INTO department (dept_name)
VALUE ("Legal");
-- EMPLOYEE ROLE SEEDS -------
INSERT INTO role (dept_title, salary, dept_id)
VALUE ("CEO", 20000, 1);
INSERT INTO role (dept_title, salary, dept_id)
VALUE ("Sales Manager", 12500, 3);
INSERT INTO role (dept_title, salary, dept_id)
VALUE ("Lead Bookkeeper", 12500, 4);
INSERT INTO role (dept_title, salary, dept_id)
VALUE ("In House Counsel", 17500, 6);
INSERT INTO role (dept_title, salary, dept_id)
VALUE ("Sales Associate", 75000, 5);
INSERT INTO role (dept_title, salary, dept_id)
VALUE ("COO", 19000, 2);
INSERT INTO role (dept_title, salary, dept_id)
VALUE ("In House Counsel", 19000, 7);
-- EMPLOYEE SEEDS ----
INSERT INTO employee (first_name, last_name, manager_id, role_id)
VALUE ("Jonathan", "Gardner", null, 1);
INSERT INTO employee (first_name, last_name, manager_id, role_id)
VALUE ("Phil", "Katz", null, 2);
INSERT INTO employee (first_name, last_name, manager_id, role_id)
VALUE ("Roger","West",null,3);
INSERT INTO employee (first_name, last_name, manager_id, role_id)
VALUE ("Zao", "Cheng", 1, 4);
INSERT INTO employee (first_name, last_name, manager_id, role_id)
VALUE ("James", "Stewart", 4, 5);
INSERT INTO employee (first_name, last_name, manager_id, role_id)
VALUE ("Fred", "Astair", 1, 6);
INSERT INTO employee (first_name, last_name, manager_id, role_id)
VALUE ("Rita", "Hayworth", 2, 7);
|
<filename>json_test.go
package json_test
import (
"testing"
"github.com/stretchr/testify/assert"
j "jsonparser"
"fmt"
)
const InvalidJson = `invalidJSon`
const SimpleJson = `{"name":"json"}`
func TestBuildJsonFailsForInvalidJson(t *testing.T) {
jsonStr := []byte(InvalidJson)
_, err := j.NewJSON(jsonStr)
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed to unmarshal json :")
}
func TestBuildJsonReturnsJSONForValidJson(t *testing.T) {
jsonBytes := []byte(SimpleJson)
expectedType := j.JSON{}
json, err := j.NewJSON(jsonBytes)
assert.NoError(t, err)
assert.NotNil(t, json)
assert.IsType(t, expectedType, json)
}
func TestKeyReturnsNilIfKeyDoesNotExist(t *testing.T) {
jsonBytes := []byte(SimpleJson)
json, _ := j.NewJSON(jsonBytes)
result, jsonType, _ := json.Key("non_existent_key")
assert.Nil(t, result)
assert.Equal(t, j.Type(0), jsonType)
}
func TestKeyReturnsValueWithTypeJSONObject(t *testing.T) {
jsonObject := `{"firstlevel":"jsonObjectValue"}`
jsonWithJsonObject := fmt.Sprintf(`{"key":%s}`, jsonObject)
expectedResult := map[string]interface{}{
"firstlevel": "jsonObjectValue",
}
jsonBytes := []byte(jsonWithJsonObject)
json, _ := j.NewJSON(jsonBytes)
result, jsonType, _ := json.Key("key")
assert.NotNil(t, result)
assert.Equal(t, expectedResult, result)
assert.Equal(t, j.Type(1), jsonType)
}
func TestKeyReturnsValueWithTypeJSONArray(t *testing.T) {
jsonArray := `["1","2","3","4"]`
jsonWithJsonArray := fmt.Sprintf(`{"key":%s}`, jsonArray)
expectedResult := []interface{}{
"1", "2", "3", "4",
}
jsonBytes := []byte(jsonWithJsonArray)
json, _ := j.NewJSON(jsonBytes)
result, jsonType, _ := json.Key("key")
assert.NotNil(t, result)
assert.Equal(t, expectedResult, result)
assert.Equal(t, j.Type(2), jsonType)
}
func TestKeyReturnsValueWithTypeString(t *testing.T) {
value := `json value`
jsonWithStringValue := fmt.Sprintf(`{"key":"%s"}`, value)
jsonBytes := []byte(jsonWithStringValue)
json, _ := j.NewJSON(jsonBytes)
result, jsonType, _ := json.Key("key")
assert.NotNil(t, result)
assert.Equal(t, value, result)
assert.Equal(t, j.Type(3), jsonType)
}
func TestKeyReturnsValueWithTypeInteger(t *testing.T) {
jsonWithIntegerValue := `{"key":123}`
expectedResult := float64(123)
jsonBytes := []byte(jsonWithIntegerValue)
json, _ := j.NewJSON(jsonBytes)
result, jsonType, _ := json.Key("key")
assert.NotNil(t, result)
assert.Equal(t, expectedResult, result)
assert.Equal(t, j.Type(4), jsonType)
}
func TestKeyReturnsErrorWhenJsonNotObject(t *testing.T) {
jsonArray := `["1","2","3","4"]`
jsonBytes := []byte(jsonArray)
json, _ := j.NewJSON(jsonBytes)
result, jsonType, err := json.Key("key")
assert.Error(t, err)
assert.Nil(t, result)
assert.Equal(t, j.Type(0), jsonType)
}
//func TestNextReturnsNextItemInJson(t *testing.T) {
//
// str := `{"key1":"value1","key2":"value2"}`
// jsonBytes := []byte(str)
//
// json, _ := j.NewJSON(jsonBytes)
// nextItem := json.Next()
//
//}
|
def replace_character(string, character, replacement):
modified_string = ""
for char in string:
if char == character:
modified_string += replacement
else:
modified_string += char
return modified_string |
import {
useForm,
useSelect,
Edit,
Form,
Input,
Select,
Upload,
RcFile,
} from "@pankod/refine-antd";
import { useApiUrl, useDataProvider } from "@pankod/refine-core";
import { getValueProps, MediaConfig, mediaUploadMapper, useDirectusUpload } from "@tspvivek/refine-directus";
import { directusClient } from "src/directusClient";
import { IPost } from "src/interfaces";
export const PostEdit: React.FC = () => {
const { formProps, saveButtonProps, queryResult } = useForm<IPost>({ metaData: { fields: ['*', 'image.*', 'gallery.*.*'] } });
const apiUrl = useApiUrl();
const { selectProps: categorySelectProps } = useSelect<IPost>({
resource: "categories",
optionLabel: "name",
optionValue: "id",
});
//List of media field and their config
const mediaConfigList: MediaConfig[] = [
{ name: "image", multiple: false, maxCount: 1 },
{ name: "gallery", multiple: true, maxCount: 5, normalize: (id) => ({ directus_files_id: id }) },
];
const getUploadProps = useDirectusUpload(mediaConfigList, directusClient);
return (
<Edit saveButtonProps={saveButtonProps}>
<Form {...formProps} layout="vertical"
onFinish={(values) => {
return (
formProps.onFinish &&
formProps.onFinish(mediaUploadMapper(values, mediaConfigList))
);
}}
>
<Form.Item label="Title" name="title">
<Input />
</Form.Item>
<Form.Item label="Status" name="status">
<Select
options={[
{
label: "Published",
value: "published",
},
{
label: "Draft",
value: "draft",
},
{
label: "Rejected",
value: "rejected",
},
]}
/>
</Form.Item>
<Form.Item label="Category" name={"category"}>
<Select {...categorySelectProps} />
</Form.Item>
<Form.Item label="Images">
<Form.Item
name="image"
valuePropName="fileList"
getValueProps={(data) => getValueProps({
data,
imageUrl: apiUrl
})}
noStyle
>
<Upload.Dragger
name="file"
listType="picture"
multiple
onChange={(info) => { return info }}
{...getUploadProps("image")}
>
<p className="ant-upload-text">
Drag & drop a file in this area
</p>
</Upload.Dragger>
</Form.Item>
</Form.Item>
<Form.Item label="Images">
<Form.Item
name="gallery"
valuePropName="fileList"
getValueProps={(data) => {
return getValueProps({
data,
imageUrl: apiUrl,
getFileUrl: (item) => { return apiUrl + "assets/" + item.directus_files_id.id; },
getFileTitle: (item) => { return item.directus_files_id.title; },
})
}}
noStyle
>
<Upload.Dragger
name="file"
listType="picture"
multiple
onChange={(info) => { return info }}
{...getUploadProps("gallery")}
>
<p className="ant-upload-text">
Drag & drop a file in this area
</p>
</Upload.Dragger>
</Form.Item>
</Form.Item>
</Form>
</Edit>
);
};
|
#!/bin/bash
# The ESA landcover dataset must be downloaded manually
import_esa_landcover() {
echo "Import ESA Landcover"
gdal_translate NETCDF:"C3S-LC-L4-LCCS-Map-300m-P1Y-2019-v2.1.1.nc":lccs_class esa_landcover.tiff
rm -f esa_landcover-*
i = 0
for x in {0..9}
do
xmin=$(((x*36)-180))
xmax=$((xmin+36))
for y in {0..9}
do
ymin=$(((y*18)-90))
ymax=$((ymin+18))
echo "---------"
echo "Part" $i
gdalwarp \
-te $xmin $ymin $xmax $ymax \
esa_landcover.tiff \
esa_landcover-$i.tiff
gdal_polygonize.py \
esa_landcover-$i.tiff \
-f PostgreSQL \
PG:"host=${HOST} port=${PORT} dbname=${POSTGRES_DB} user=${POSTGRES_USER} password=${POSTGRES_PASSWORD}" \
esa_landcover_import
rm esa_landcover-$i.tiff
i=$((i+1))
done
done
}
if [ "${1}" != "--source-only" ]; then
import_esa_landcover "${@}"
fi |
// 1547. 공
// 2019.05.18
// 시뮬레이션
#include<iostream>
using namespace std;
int main()
{
int n;
cin >> n;
int ball[4] = { 0,1,2,3 };
for (int i = 0; i < n; i++)
{
int x, y, a, b;
cin >> x >> y;
// x,y 공을 가지고 있는 인덱스를 찾음
for (int j = 1; j < 4; j++)
{
if (ball[j] == x)
{
a = j;
}
if (ball[j] == y)
{
b = j;
}
}
swap(ball[a], ball[b]);
}
cout << ball[1] << endl;
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.