text
stringlengths 184
4.48M
|
|---|
<template>
<div class="wrapper">
<div class="elevator-shaft">
<div v-bind:class="this.wait+' elevator-doors elevator-doors'+this.currFloor">
<p><strong>{{currFloor}} {{direction}}</strong></p>
</div>
</div>
<div class="floors">
<div class="floor">
<input type="button" class="btn" v-on:click="setFloor(5)" value="5"/>
</div>
<div class="floor">
<input type="button" class="btn" v-on:click="setFloor(4)" value="4"/>
</div>
<div class="floor">
<input type="button" class="btn" v-on:click="setFloor(3)" value="3"/>
</div>
<div class="floor">
<input type="button" class="btn" v-on:click="setFloor(2)" value="2"/>
</div>
<div class="floor">
<input type="button" class="btn" v-on:click="setFloor(1)" value="1"/>
</div>
</div>
</div>
</template>
<script>
export default {
name: 'ElevatorShaft',
data() {
return {
direction: null,
timer: null,
floor: [],
followingFloor: 1,
currFloor : 1,
wait: "",
}
},
methods: {
start() {
if (this.timer) {
return;
}
this.timer = setTimeout(() => {
setInterval(() => {
if (this.currFloor === this.followingFloor) {
setTimeout(() => {
this.nextFloor();
},3000)
this.wait = "wait"
this.direction = ""
}}, 4000);
setInterval(() => {
if (this.currFloor < this.followingFloor) {
this.upfloor();
this.wait = ""
this.direction = "↑"
}
if (this.currFloor > this.followingFloor) {
this.downfloor();
this.wait = ""
this.direction = "↓"
}
}, 1000)
}, 1000);
},
setFloor(num) {
this.floor.push(num);
this.start();
},
nextFloor() {
if (this.floor.length) {
this.followingFloor = this.floor.shift();
console.log ("Движемся на " + this.followingFloor + " этаж");
}
},
upfloor() {
if (this.currFloor < this.followingFloor) {
this.currFloor += 1;
console.log(this.currFloor)
}
},
downfloor() {
if (this.currFloor > this.followingFloor) {
this.currFloor -= 1;
console.log(this.currFloor)
}
},
}
}
</script>
<style scoped>
.wrapper {
display: flex;
flex-direction: row
}
.btn{
display:block;
height: 20px;
width: 20px;
border-radius: 50%;
padding: 0;
}
.floors {
width: 100vh;
display: flex;
flex-direction: column;
}
.floor {
width: 100vh;
height: 100px;
border: 0.5px solid rgb(158, 158, 158);
display: flex;
flex-wrap: wrap;
align-content: center;
}
.floor input {
margin-left: 20px;
}
.elevator-doors {
width: 100px;
height: 100px;
background: #cdcbcd;
border: 0.5px solid rgb(158, 158, 158);
position: sticky;
transition-duration: 1s;
text-align: center;
}
.elevator-doors1 {
transform: translate(0,406px);
}
.elevator-doors2 {
transform: translate(0,305px);
}
.elevator-doors3 {
transform: translate(0,203px);
}
.elevator-doors4 {
transform: translate(0,101px);
}
.elevator-doors5 {
transform: translate(0,0px);
}
.elevator-shaft {
border: 0.5px solid rgb(158, 158, 158);
}
.wait {
animation-name: blink;
animation-timing-function: linear;
animation-duration: 1.5s;
animation-iteration-count: infinite;
}
@keyframes blink {
50% {
opacity: 50%;
background-color: greenyellow;
}
}
</style>
|
<%@ page language="java" contentType="text/html; charset=UTF-8"
pageEncoding="UTF-8"%>
<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core"%>
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>Insert title here</title>
<meta name="viewport" content="width=device-width,minimum-scale=1, maximum-scale=1,initial-scale=1,user-scalable=no">
<link rel="stylesheet" type="text/css" href="css/cart.css">
<link rel="stylesheet" type="text/css" href="css/paging.css">
</head>
<body>
<jsp:include page="cartHeader.jsp" />
<div>
<p class="title">식권 구매 내역</p>
<p class="rr">※클릭하시면 QR코드 화면으로 이동됩니다.</p>
<ul class="list">
<c:forEach items="${list}" var="ProductVO">
<a href="qrcode.jsp">
<li class="box">
<img class="img" src="upload${ProductVO.file}" width="100%" height="100%">
<ul class="listbox">
<li>메뉴 | ${ProductVO.ticketName}</li>
<li>날짜 | ${ProductVO.date}</li>
<li>가격 | ${ProductVO.price}</li>
</ul>
</li>
</a>
</c:forEach>
</ul>
</div>
<div class="paging">
<ul class="pagination">
<li><a href="javascript:PageMove(${paging.firstPageNo})"><<</a></li>
<li><a href="javascript:PageMove(${paging.prevPageNo})"><</a></li>
<c:forEach var="i" begin="${paging.startPageNo}" end="${paging.endPageNo}" step="1">
<c:choose>
<c:when test="${i eq paging.pageNo}">
<li class="active"><a href="javascript:PageMove(${i})">${i}</a></li>
</c:when>
<c:otherwise>
<li><a href="javascript:PageMove(${i})">${i}</a></li>
</c:otherwise>
</c:choose>
</c:forEach>
<li><a href="javascript:PageMove(${paging.nextPageNo})">></a></li>
<li><a href="javascript:PageMove(${paging.finalPageNo})">>></a></li>
</ul>
</div>
<script type="text/javascript">
function PageMove(page){
location.href = "hiroad?cmd=cart&page="+page;
}
</script>
</body>
</html>
|
#############################################################
################## Day3:分散分析/実験計画法 ################
#############################################################
### 必要な外部データ ###
# - Benefit.csv
# - oathTab_Result.csv
# 作業ディレクトの下のdataフォルダに格納
###################### Prerequiste ######################
# データパスの取得
root_dir <- getwd()
root_dir # プロジェクトのディレクトリを指していることを確認
csvPATH <- "./data/Benefit.csv"
#################### Day3-Exercise1 分散分析 ####################
#### 分散分析ANOVA の実施
####### 課題1 #######
iris # フィッシャーの菖蒲データ。サンプルデータとして最も有名。
str(iris)
summary(iris)
# Species変数は3水準、各水準で50の観測値のカテゴリカル変数
#散布行列
plot(iris[,1:4])
plot(iris[,1:4], col=as.numeric(iris$Species)) # Speciesで色分け
#hist
par(mfrow=c(2,2))
with(iris, hist(Sepal.Length))
with(iris, hist(Sepal.Width ))
with(iris, hist(Petal.Length))
with(iris, hist(Petal.Width))
par(mfrow=c(1,1))
#個別にそれぞれの分布を箱ひげ図から確認する
par(mfrow=c(2,2))
with(iris, boxplot(Sepal.Length~Species, main="Sepal.Length"))
with(iris, boxplot(Sepal.Width~Species, main="Sepal.Width"))
with(iris, boxplot(Petal.Length~Species, main="Petal.Length"))
with(iris, boxplot(Petal.Width~Species, main="Petal.Width"))
par(mfrow=c(1,1))
# 中央値: 太い横線が水準別にずれている => それぞれ違いそう
# ばらつき: 箱の幅とひげをあわせた幅がPetal.LengthやPetal.Widthでばらつきがみられる
# 平均値と分散の、Speciesの水準別に計算
# Petal.Length を代表にやってみる
with(iris, by(Sepal.Width, INDICES=Species, FUN=mean)) # 平均
with(iris, by(Sepal.Width, INDICES=Species, FUN=var)) # 分散
# byはファクター別にあるfunctionを使う(SQLのgroup byみたいなの)
### Rで分散分析
## Rでのモデル式の書き方は多くの場合, y ~ x
## 実施方法は複数ある
# (1) lm関数(回帰分析でも用いられる)
anova(lm(Sepal.Width ~ Species, data=iris)) # H0(SpeciesによってSepal.Widthの平均に差はない)を棄却
# (2) aov関数
summary(aov(Sepal.Width ~ Species, data=iris))
# (3) oneway.test関数
oneway.test(Sepal.Width ~ Species, data=iris, var.equal=TRUE)
## 3変数のペア比較 - 各水準の組み合わせで検定を行いたい場合
# Sepal.Width
with(iris, pairwise.t.test(Sepal.Width, Species, p.adjust.method="none"))
# Petal.Length
with(iris, pairwise.t.test(Petal.Length, Species, p.adjust.method="none"))
####### 課題2 #######
benefit <- read.csv(file=csvPATH, header=T)
str(benefit)
summary(benefit)
hist(benefit$Satisfaction) #満足度のヒストグラム
#plot(benefit$Benefit, benefit$Age) # benefit vs age
par(mfrow=c(1,2))
with(benefit, boxplot(Satisfaction ~ Benefit ))
with(benefit, boxplot(Satisfaction ~ Age ))
par(mfrow=c(1,1))
# 満足度のばらつきを観測
# model 1 (Benefitのみ)
model1 <- lm(Satisfaction ~ Benefit, data=benefit)
anova(model1)
# Benefitは有意
# model 2 (BenefitとAge)
model2 <- lm(Satisfaction ~ Benefit + Age, data=benefit)
anova(model2)
# Ageは有意でない
# model 3 (BenefitとAgeと交互作用)
model3 <- lm(Satisfaction ~ Benefit + Age + Benefit * Age, data=benefit)
anova(model3)
# Benefit*Ageも有意
## 交互作用プロット
interaction.plot(benefit$Benefit, benefit$Age, benefit$Satisfaction)
interaction.plot(benefit$Age, benefit$Benefit, benefit$Satisfaction)
## モデルの比較
## 注: モデルの比較に関してはDay4で詳細を解説
# Multiple R-squared(決定係数):1に近いほど良い
summary(model1)$r.squared # model1
summary(model2)$r.squared # model2
summary(model3)$r.squared # model3
# AIC:小さいほど良い
AIC(model1) # model1
AIC(model2) # model2
AIC(model3) # model3
################## Day3-Exercise2 検定の多重性 ##################
#### 検定の多重性に関するシミュレーション
#### シミュレーション1 ####
n_group <- 15
n_obs <- 20
#set.seed(123) # seedを変え、シミュレーションを繰り返してみる
value <- rnorm(n_group*n_obs)
group <- as.factor(rep(1:n_group, each=n_obs))
df_sim1 <- data.frame(group, value)
str(df_sim1)
summary(df_sim1)
with(df_sim1, boxplot(value ~ group )) # 箱ひげ図
with(df_sim1, by(value, INDICES=group, FUN=mean)) # 平均
with(df_sim1, by(value, INDICES=group, FUN=var)) # 分散
## ペア比較
res <- with(df_sim1, pairwise.t.test(value, group, p.adjust.method="none"))
res
## ペア比較の数
choose(n_group, 2) # n_groupから2つ選ぶ場合の組み合わせの数
## p値が0.05以下となった検定結果の数
sum(res$p.value<0.05, na.rm=TRUE)
## 多重比較法(ボンフェローニ法)によるp値の調整
res_adj <- with(df_sim1, pairwise.t.test(value, group), p.adjust.method="bonferroni")
res_adj
sum(res_adj$p.value<0.05, na.rm=TRUE)
#### シミュレーション2 ####
###正規分布で実験を行う
## setting. alpha =0.05
alpha <- 0.05
test <- function(a, n){ return(1 - (1-a)^n) } #間違いを犯す確率. a=alpha, n:検定回数
### 検定回数: n=1
n <- 1
p_vals <- vector(mode="numeric",1e4) # numericなベクトルで初期化. 1e4 = 10の4乗のこと (指数表記)
length(p_vals) # 長さ10,000のベクトル
for(i in 1:1e4) # 10,000回シミュレーションとして,
{
set.seed(i) #1:1e4までのseedingを使って, 再現性をキープし(固定値にしないように注意)、
x <- rnorm(100) # 標準正規分布から100個サンプリングを行い,
p_vals[i] <- t.test(x=x, mu=0)$p.value # one-sample t-test(前行でサンプリングした乱数の平均が0かを検定)を行い、p値をとっておく
}
mean(p_vals < 0.05) # <実験結果> 10,000回シミュレーションのp値の平均値 0.0498
test(alpha,1) # <理論値> 危険率(α)を0.05と設定しているので、0.05
### 検定回数: n=5
n <- 5
p_vals5 <- matrix(NA, nrow=1e4, ncol=n) # matrixを初期化
nrow(p_vals5) # 行数 10,000
ncol(p_vals5) # 列数 5
for(j in 1:1e4)
{
set.seed(j)
X <- replicate(n=n, expr=rnorm(100)) # 標準正規分布から100*5個サンプリングを行い
p_vals5[j, ] <- apply(X=X, MARGIN=2, FUN=function(x){
t.test(x=x, mu=0)$p.value # サンプリングした値、5つの列、に対しt-test。5つの結果が返る
})
}
# sapplyはベクトルやリストの各要素に同じfunctionを使う。
# ここでは、実験結果, p値が 0.05未満のものを5検定すべてとりだし、平均を取っている。
mean(sapply(X=1:nrow(p_vals5), FUN=function(i){ any(p_vals5[i, ] < 0.05) })) # 実験値: 0.2282
# <実験結果> 10,000回シミュレーション、各5回の検定、1回でも有意となった割合 0.2282
test(alpha,n) # 0.226
# => 検定を重ねるごとに、検定の質が落ちている。そこで,
## 多重比較法(ボンフェローニ法) 検定回数で割る
mean(sapply(X=1:nrow(p_vals5), FUN=function(i){ any(p_vals5[i, ] < 0.05 / n) }))
# 0.0504に抑えられている
############# Day3-Exercise3 検出力、サンプルサイズ #############
#### サンプルサイズを計算してみる
##(i). 危険率 alpha=0.1、検出力: 1-Beta=0.8、効果量=0.1(20%に対して10%の改善)
improve <- 0.2*0.1
prop1 <- power.prop.test(n=NULL, p1=0.2, p2=0.2-improve, sig.level=0.10, power=0.8)
prop1
ceiling(prop1$n) # 各グループの人数
##(ii). 危険率 alpha=0.1、検出力: 1-Beta=0.8、効果量=0.15(20%に対して15%の改善)
# 各グループの人数
##(iii). 危険率 alpha=0.1、検出力: 1-Beta=0.8、効果量=0.2(20%に対して20%の改善)
improve <- 0.2*0.2
prop3 <- power.prop.test(n=NULL, p1=0.2, p2=0.2-improve, sig.level=0.05, power=0.8)
prop3
ceiling(prop3$n) # 各グループの人数
# => 効果量が大きく期待できると、サンプル数が少なくて済む
##(iiii). 危険率 alpha=0.05、検出力: 1-Beta=0.95、効果量=0.1(20%に対して10%の改善)
improve <- 0.2*0.1
prop4 <- power.prop.test(n=NULL, p1=0.2, p2=0.2-improve, sig.level=0.05, power=0.95)
prop4
ceiling(prop4$n) # 各グループの人数
# => 危険率、検出力を厳しくすると、沢山のサンプル数が必要
################### Day3-Exercise4 直行表実験 ###################
#install.packages("DoE.base")
library(DoE.base) # Design of Experiment Package
### 直行表の作成(実験の計画段階)
## 各要因とその水準の定義
tutor <- c("Y", "M")
msg <- c("人工知能", "AI")
description <- c("仕組みを学べる", "最高の講師から学べる")
colour <- c("White", "Blue")
# 4要因、各2水準なのですべての組み合わせは2^4 = 16
## 完全実施要因計画(Full Factorial) 16回
full <- oa.design(nlevels=c(2,2,2,2), nruns=16, randomize=FALSE, seed=12345)
full
## 一部実施要因計画(Fractional Factorial) 8回
frac <- oa.design(nlevels=c(2,2,2,2), nruns=, randomize=FALSE, seed=12345)
frac
## 水準名を利用
oathTab <- oa.design(
nlevels=c(2,2,2,2),
factor.names=list(tutor=tutor, msg=msg, description=description, colour=colour),
#columns="min34",
nruns=8,
randomize=FALSE,
seed=12345
)
oathTab
#これを保存.
write.csv(oathTab, file="./data/oathTab_Design.csv", row.names=FALSE)
# => これを実際は実装する。
### 実験後
## 結果が, oathTab_Result.csvのregistrationに保存されているとする。
oath_result <- read.csv('./data/oathTab_Result.csv')
str(oath_result)
summary(oath_result)
## 要因別集計 - 要因別に水準間のregistration平均値を求める
for(i in 1:4){
print(
aggregate(oath_result$registration, by=list(oath_result[,i]), FUN="mean")
)
}
## モデルの当てはめ
#exp_result <- aov(registration~., data=oath_result)
exp_result <- lm(registration~., data=oath_result)
# 分散分析
anova(exp_result)
# 要因の係数, モデルの当てはまりの確認
summary(exp_result)
# Multiple R-squared: 0.9536, Adjusted R-squared: 0.8916 から確認できる通り、モデルの当てはまり度合いは高い
# tutor要因が有意水準0.01で有意
# description要因が有意水準0.1で有意
## モデルを当てまめることにより、最適な組み合わせの把握、予測が行える
# ベストな組み合わせ
# summary(exp_result) より判断
best.levels <- c("Y","AI","最高の講師から学べる","Blue")
# その組み合わせのdata.frameの作成
bast_data <- head(oath_result,0)[,c(1,2,3,4)]
bast_data[1,] <- best.levels
bast_data
# 予測(予測値の信頼区間も含める)
predict(exp_result, newdata=bast_data, interval="prediction", level=0.95)
# 実験データへの予測
predict(exp_result)
## best.levelsは実際に実験していないが、最大の予測結果となる
|
import axios from "axios";
import { useEffect, useState } from "react";
import './style.css'
import { Link ,useNavigate } from "react-router-dom";
function Login() {
const [id, setId] = useState("");
const [email, setEmail] = useState("");
const [password, setPass] = useState("");
// const [users, setUsers] = useState([]);
const navigation=useNavigate()
async function login(event) {
event.preventDefault();
await axios.post("https://localhost:7030/api/UserAuth/login", {
email: email,
password: password,
}).then((result)=>{
if (result.data == 'Invalid') {
//
alert("Login Failed Email or password false");
setId("");
setEmail("");
setPass("");
}
else{
localStorage.setItem('token',result.data)
alert("Giriş Başarılı");
navigation('/home')
}
});
}
return(
<div className="login template d-flex justify-content-center align-items-center vh-100 bg-primary">
<div className="form_container p-5 rounded bg-white">
<form>
<h3 className="text-center">Giriş Yap</h3>
<div>
<input
type="text"
class="form-control"
id="id"
hidden
value={id}
onChange={(event) => {
setId(event.target.value);
}}
/>
<label htmlFor="email">Email</label>
<input type="email" placeholder="E-mail Giriniz" className="form-control"
id="email"
value={email}
onChange={(event) => {
setEmail(event.target.value);
}}></input>
</div>
<div>
<label htmlFor="password">Password</label>
<input type="password" placeholder="Şifreyi Giriniz" className="form-control"
id="password"
value={password}
onChange={(event) => {
setPass(event.target.value);
}}></input>
</div>
<div className="d-grid">
<button className="btn btn-primary"onClick={login}>Giriş Yap</button>
</div>
<p className="text-end mt-2">
<Link to="/signup" className="ms-2">Kaydol</Link>
</p>
</form>
</div>
</div>
)
}
export default Login;
|
import {
MiddlewareConsumer,
Module,
NestModule,
RequestMethod,
} from '@nestjs/common';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { logger, LoggerMiddleware } from './middleware/logger.middleware';
import { NormalProvider } from './provider';
@Module({
imports: [],
controllers: [AppController],
providers: [AppService, NormalProvider],
})
export class AppModule implements NestModule {
configure(consumer: MiddlewareConsumer) {
consumer
.apply(LoggerMiddleware, logger)
// 制定特定路由(支持url通配符)使用该中间件
// .forRoutes('/app/:id');
.forRoutes({
path: '*',
method: RequestMethod.ALL,
});
}
}
|
package ru.venidiktov.collection.set;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
/***
* TreeSet основан на TreeMap, просто значения для всех элементов заменены на некое константное значение,
* как и в TreeMap в TreeSet не может быть Null в качестве ключа, так как
* В TreeSet как и в TreeMap для сортировки элементов и поиска, объекты должны реализовывать Comparable
* В TreeMap как и в TreeMap для поиска по значению, объект значений должны реализовывать equals()
*/
public class TreeSetEx {
public static void main(String[] args) {
Set<Integer> treeSet = new TreeSet<>();
treeSet.add(5);
treeSet.add(3);
treeSet.add(8);
treeSet.add(1);
treeSet.add(9);
// treeSet.add(null); Нельзя добавлять null
System.out.println("tree set = " + treeSet);
/***
* Для того чтобы класс можно было использовать в TreeSet класс должен имплементировать интерфейс Comparable
*/
TreeSet<Student> treeSet2 = new TreeSet<>();
var st1 = new Student("Ivan", "Ivanov", 3);
var st2 = new Student("Maria", "Ivanova", 4);
var st2_2 = new Student("Maria", "Ivanova", 5); // Не добавится, так как это дубликат
var st3 = new Student("Maria", "Zaeva", 2);
var st4 = new Student("Yak", "Zaev", 1);
treeSet2.add(st3);
treeSet2.add(st2);
treeSet2.add(st2_2);
treeSet2.add(st1);
treeSet2.add(st4);
System.out.println("TreeSet of Student implements Comparable = " + treeSet2);
/***
* Можно так же при создании TreeSet передать Comparator которым будет сравниваться ключи
*/
TreeSet<Student> treeSet3 = new TreeSet<>(Comparator.comparingInt(o -> o.course));
treeSet3.add(st3);
treeSet3.add(st2);
treeSet3.add(st2_2);
treeSet3.add(st1);
treeSet3.add(st4);
System.out.println("TreeSet of Student with external Comparator = " + treeSet3);
// first()
System.out.println("First student by course = " + treeSet3.first());
// last()
System.out.println("Last student by course = " + treeSet3.last());
// headSet()
var oleg = new Student("Oleg", "Olegov", 3);
System.out.println("Students by course from had to < Oleg course = " + treeSet3.headSet(oleg));
//tailSet()
System.out.println("Students by course form >= Oleg course to tail = " + treeSet3.tailSet(oleg));
//subSet
var lena = new Student("Elena", "ELenova", 1);
System.out.println("Students from <= Elena course to > Oleg course = " + treeSet3.subSet(lena, oleg));
}
private static class Student implements Comparable<Student> {
private String name;
private String surname;
private int course;
public Student(String name, String surname, int course) {
this.name = name;
this.surname = surname;
this.course = course;
}
@Override
public int compareTo(Student student) {
return (name + surname).compareTo(student.name + student.surname);
}
@Override
public String toString() {
return "Student{" +
"name='" + name + '\'' +
", surname='" + surname + '\'' +
", course=" + course +
'}';
}
}
}
|
/*
* Copyright 2023 Frans Jacobs.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jcs.ui.layout.pathfinding.astar;
import java.awt.Point;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import jcs.entities.enums.AccessoryValue;
import jcs.ui.layout.tiles.Tile;
import jcs.ui.layout.tiles.Block;
import org.tinylog.Logger;
public class Node implements Comparable<Node> {
private final Tile tile;
private String suffix;
private AccessoryValue accessoryState;
private double g;
private double h;
private Node previousNode;
private final Set<Edge> edges = new HashSet<>();
public Node(Tile tile) {
this.tile = tile;
}
public Tile getTile() {
return tile;
}
public String getId() {
return tile.getId();
}
public int getX() {
return this.tile.getCenterX();
}
public int getY() {
return this.tile.getCenterY();
}
public int getX(String suffix) {
if (isBlock()) {
return ((Block) this.tile).getNeighborPoint(suffix).x;
} else {
return getX();
}
}
public int getY(String suffix) {
if (isBlock()) {
return ((Block) this.tile).getNeighborPoint(suffix).y;
} else {
return getY();
}
}
public Point getAltPoint(String suffix) {
if (isBlock()) {
return ((Block) this.tile).getAltPoint(suffix);
} else {
return this.tile.getCenter();
}
}
public boolean isBlock() {
return this.tile.isBlock();
}
public boolean isJunction() {
return this.tile.isJunction();
}
public boolean isDirectional() {
return this.tile.isDirectional();
}
public double getG() {
return g;
}
void setG(double g) {
this.g = g;
}
public double getH() {
return h;
}
void setH(double h) {
this.h = h;
}
public Node getPreviousNode() {
return previousNode;
}
public void setPreviousNode(Node previousNode) {
this.previousNode = previousNode;
}
public String getSuffix() {
return suffix;
}
public void setSuffix(String suffix) {
this.suffix = suffix;
}
public AccessoryValue getAccessoryState() {
return accessoryState;
}
public void setAccessoryState(AccessoryValue accessoryState) {
this.accessoryState = accessoryState;
}
public Set<Edge> getEdges() {
return getEdges(null);
}
public Set<Edge> getEdges(String suffix) {
if (suffix != null) {
return edges.stream().filter(e -> (suffix.equals(e.getFromSuffix()) || suffix.equals(e.getToSuffix()))).collect(Collectors.toSet());
} else {
return edges;
}
}
public void addEdge(Edge edge) {
edges.add(edge);
}
public double getF() {
return g + h;
}
AccessoryValue getAccessoryStatus(Node from, Node to) {
if (from == null || to == null) {
return AccessoryValue.OFF;
}
if (from.getPreviousNode() != null && from.getTile().isJunction()) {
boolean isParentOnSwitchSide = from.getTile().isSwitchSide(from.getPreviousNode().getTile());
boolean isParentOnStraightSide = from.getTile().isStraightSide(from.getPreviousNode().getTile());
boolean isParentOnDivergingSide = from.getTile().isDivergingSide(from.getPreviousNode().getTile());
boolean isToOnSwitchSide = from.getTile().isSwitchSide(to.getTile());
boolean isToOnStraightSide = from.getTile().isStraightSide(to.getTile());
boolean isToOnDivergingSide = from.getTile().isDivergingSide(to.getTile());
if (isParentOnSwitchSide && (isToOnDivergingSide || isToOnStraightSide)) {
return (isToOnDivergingSide ? AccessoryValue.RED : AccessoryValue.GREEN);
} else if (isParentOnStraightSide && isToOnSwitchSide) {
return AccessoryValue.GREEN;
} else if (isParentOnDivergingSide && isToOnSwitchSide) {
return AccessoryValue.RED;
} else {
Logger.trace("Path from " + from.getPreviousNode().getId() + " via " + from.getId() + " to " + to.getId() + " is NOT possible");
return AccessoryValue.OFF;
}
} else {
return AccessoryValue.OFF;
}
}
public void retrievePath(List<Node> path) {
if (previousNode != null) {
previousNode.retrievePath(path);
}
path.add(this);
if (previousNode != null && previousNode.isJunction()) {
previousNode.accessoryState = this.getAccessoryStatus(previousNode, this);
}
}
@Override
public int compareTo(Node o) {
double dif = getF() - o.getF();
return dif == 0 ? 0 : dif > 0 ? 1 : -1;
}
@Override
public String toString() {
return "Node id: " + getId() + ", g: " + g + ", h: " + h + ", prevId: " + (previousNode != null ? previousNode.getId() : "") + (accessoryState != null ? " [" + accessoryState + "]" : "");
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.wordpress.salaboy.services;
import com.wordpress.salaboy.acc.HospitalDistanceCalculator;
import com.wordpress.salaboy.model.Procedure;
import com.wordpress.salaboy.model.events.EmergencyEndsEvent;
import com.wordpress.salaboy.model.events.VehicleHitsHospitalEvent;
import com.wordpress.salaboy.model.events.VehicleHitsEmergencyEvent;
import com.wordpress.salaboy.services.workitemhandlers.ProcedureReportWorkItemHandler;
import com.wordpress.salaboy.workitemhandlers.DispatchVehicleWorkItemHandler;
import com.wordpress.salaboy.workitemhandlers.NotifyEndOfProcedureWorkItemHandler;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.io.IOUtils;
import org.drools.KnowledgeBase;
import org.drools.KnowledgeBaseConfiguration;
import org.drools.KnowledgeBaseFactory;
import org.drools.KnowledgeBaseFactoryService;
import org.drools.builder.KnowledgeBuilder;
import org.drools.builder.KnowledgeBuilderConfiguration;
import org.drools.builder.KnowledgeBuilderError;
import org.drools.builder.KnowledgeBuilderErrors;
import org.drools.builder.KnowledgeBuilderFactory;
import org.drools.builder.KnowledgeBuilderFactoryService;
import org.drools.builder.ResourceType;
import org.drools.builder.conf.AccumulateFunctionOption;
import org.drools.conf.EventProcessingOption;
import org.drools.grid.ConnectionFactoryService;
import org.drools.grid.GridConnection;
import org.drools.grid.GridNode;
import org.drools.grid.GridServiceDescription;
import org.drools.grid.conf.GridPeerServiceConfiguration;
import org.drools.grid.conf.impl.GridPeerConfiguration;
import org.drools.grid.impl.GridImpl;
import org.drools.grid.service.directory.Address;
import org.drools.grid.service.directory.WhitePages;
import org.drools.grid.service.directory.impl.CoreServicesLookupConfiguration;
import org.drools.grid.service.directory.impl.GridServiceDescriptionImpl;
import org.drools.grid.service.directory.impl.WhitePagesRemoteConfiguration;
import org.drools.io.impl.ByteArrayResource;
import org.drools.io.impl.ClassPathResource;
import org.drools.runtime.StatefulKnowledgeSession;
import org.drools.runtime.process.ProcessInstance;
import org.jbpm.task.service.hornetq.CommandBasedHornetQWSHumanTaskHandler;
/**
*
* @author salaboy
*/
public class DefaultHeartAttackProcedureImpl implements DefaultHeartAttackProcedure {
private String emergencyId;
private StatefulKnowledgeSession internalSession;
private String procedureName;
private boolean useLocalKSession;
private boolean logInFile;
public DefaultHeartAttackProcedureImpl() {
this.procedureName = "com.wordpress.salaboy.bpmn2.DefaultHeartAttackProcedure";
}
private StatefulKnowledgeSession createDefaultHeartAttackProcedureSession(String emergencyId) throws IOException {
System.out.println(">>>> I'm creating the "+"DefaultHeartAttackProcedure"+" procedure for emergencyId = "+emergencyId);
GridNode remoteN1 = null;
KnowledgeBuilder kbuilder = null;
KnowledgeBase kbase = null;
if (useLocalKSession) {
KnowledgeBuilderConfiguration kbuilderConf = KnowledgeBuilderFactory.newKnowledgeBuilderConfiguration();
kbuilderConf.setOption(AccumulateFunctionOption.get("hospitalDistanceCalculator", new HospitalDistanceCalculator()));
kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(kbuilderConf);
KnowledgeBaseConfiguration kbaseConf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kbaseConf.setOption(EventProcessingOption.STREAM);
kbase = KnowledgeBaseFactory.newKnowledgeBase(kbaseConf);
}else{
Map<String, GridServiceDescription> coreServicesMap = new HashMap<String, GridServiceDescription>();
GridServiceDescriptionImpl gsd = new GridServiceDescriptionImpl(WhitePages.class.getName());
Address addr = gsd.addAddress("socket");
addr.setObject(new InetSocketAddress[]{new InetSocketAddress("localhost", 8000)});
coreServicesMap.put(WhitePages.class.getCanonicalName(), gsd);
GridImpl grid = new GridImpl(new ConcurrentHashMap<String, Object>());
GridPeerConfiguration conf = new GridPeerConfiguration();
GridPeerServiceConfiguration coreSeviceConf = new CoreServicesLookupConfiguration(coreServicesMap);
conf.addConfiguration(coreSeviceConf);
GridPeerServiceConfiguration wprConf = new WhitePagesRemoteConfiguration();
conf.addConfiguration(wprConf);
conf.configure(grid);
GridServiceDescription<GridNode> n1Gsd = grid.get(WhitePages.class).lookup("n1");
GridConnection<GridNode> conn = grid.get(ConnectionFactoryService.class).createConnection(n1Gsd);
remoteN1 = conn.connect();
KnowledgeBuilderConfiguration kbuilderConf = remoteN1.get(KnowledgeBuilderFactoryService.class).newKnowledgeBuilderConfiguration();
kbuilderConf.setOption(AccumulateFunctionOption.get("hospitalDistanceCalculator", new HospitalDistanceCalculator()));
kbuilder = remoteN1.get(KnowledgeBuilderFactoryService.class).newKnowledgeBuilder(kbuilderConf);
KnowledgeBaseConfiguration kbaseConf = remoteN1.get(KnowledgeBaseFactoryService.class).newKnowledgeBaseConfiguration();
kbaseConf.setOption(EventProcessingOption.STREAM);
kbase = remoteN1.get(KnowledgeBaseFactoryService.class).newKnowledgeBase(kbaseConf);
}
kbuilder.add(new ByteArrayResource(IOUtils.toByteArray(new ClassPathResource("processes/procedures/MultiVehicleProcedure.bpmn").getInputStream())), ResourceType.BPMN2);
kbuilder.add(new ByteArrayResource(IOUtils.toByteArray(new ClassPathResource("processes/procedures/DefaultHeartAttackProcedure.bpmn").getInputStream())), ResourceType.BPMN2);
kbuilder.add(new ByteArrayResource(IOUtils.toByteArray(new ClassPathResource("rules/select_hospital.drl").getInputStream())), ResourceType.DRL);
kbuilder.add(new ByteArrayResource(IOUtils.toByteArray(new ClassPathResource("rules/defaultHeartAttackProcedureEventHandling.drl").getInputStream())), ResourceType.DRL);
KnowledgeBuilderErrors errors = kbuilder.getErrors();
if (errors != null && errors.size() > 0) {
for (KnowledgeBuilderError error : errors) {
System.out.println(">>>>>>> Error: " + error.getMessage());
}
throw new IllegalStateException("Failed to parse knowledge!");
}
kbase.addKnowledgePackages(kbuilder.getKnowledgePackages());
StatefulKnowledgeSession session = kbase.newStatefulKnowledgeSession();
if (!useLocalKSession){
remoteN1.set("DefaultHeartAttackProcedureSession" + emergencyId, session);
}
return session;
}
private void setWorkItemHandlers(StatefulKnowledgeSession session) {
session.getWorkItemManager().registerWorkItemHandler("Report", new ProcedureReportWorkItemHandler());
session.getWorkItemManager().registerWorkItemHandler("DispatchSelectedVehicle", new DispatchVehicleWorkItemHandler());
session.getWorkItemManager().registerWorkItemHandler("NotifyEndOfProcedure", new NotifyEndOfProcedureWorkItemHandler());
session.getWorkItemManager().registerWorkItemHandler("Human Task", new CommandBasedHornetQWSHumanTaskHandler(session));
}
@Override
public void patientAtHospitalNotification(VehicleHitsHospitalEvent event) {
internalSession.signalEvent("com.wordpress.salaboy.model.events.PatientAtHospitalEvent", event);
}
@Override
public void patientPickUpNotification(VehicleHitsEmergencyEvent event) {
internalSession.insert(event);
}
@Override
public void procedureEndsNotification(EmergencyEndsEvent event) {
}
@Override
public void configure( String emergencyId, Procedure procedure, Map<String, Object> parameters) {
this.emergencyId = emergencyId;
try {
internalSession = createDefaultHeartAttackProcedureSession(this.emergencyId);
} catch (IOException ex) {
Logger.getLogger(DefaultHeartAttackProcedureImpl.class.getName()).log(Level.SEVERE, null, ex);
}
setWorkItemHandlers(internalSession);
new Thread(new Runnable() {
public void run() {
internalSession.fireUntilHalt();
}
}).start();
parameters.put("concreteProcedureId", this.procedureName);
parameters.put("procedure", procedure);
ProcessInstance pi = internalSession.startProcess("com.wordpress.salaboy.bpmn2.MultiVehicleProcedure",parameters);
internalSession.insert(pi);
procedure.setProcessInstanceId(pi.getId());
}
public boolean isUseLocalKSession() {
return useLocalKSession;
}
public void setUseLocalKSession(boolean useLocalKSession) {
this.useLocalKSession = useLocalKSession;
}
}
|
import React from "react";
import { connect } from "react-redux";
import * as R from "ramda";
import { Link } from "react-router";
import { getBasketPhonesWithCount, getTotalBasketPrice } from "selectors";
import { removePhoneFromBasket, cleanBasket, basketCheckout } from "actions";
const Basket = ({
phones,
totalPrice,
basketCheckout,
cleanBasket,
removePhoneFromBasket
}) => {
const isBasketEmpty = R.isEmpty(phones);
const renderContent = () => {
return (
<div>
{isBasketEmpty && <div>Your shopping cart is empty</div>}
<div className="table-responsive">
<table className="table-bordered table-striped table-condensed cf">
<tbody>
{phones.map((phone, index) => (
<tr key={index} className="item-checout">
<td className="first-column-checkout">
<img
src={phone.image}
alt={phone.name}
className="img-thumbnail"
/>
</td>
<td>{phone.name}</td>
<td>${phone.price}</td>
<td>{phone.count}</td>
<td>
<span
className="delete-cart"
onClick={() => removePhoneFromBasket(phone.id)}
/>
</td>
</tr>
))}
</tbody>
</table>
</div>
{R.not(isBasketEmpty) && (
<div className="row">
<div className="pull-right total-user-checkout">
<b>Total:</b>${totalPrice}
</div>
</div>
)}
</div>
);
};
const renderSideBar = () => (
<div>
<Link to="/" className="btn btn-info">
<span className="glyphicon glyphicon-info-sign" />
<span>Continue shopping!</span>
</Link>
{R.not(isBasketEmpty) && (
<div>
<button onClick={cleanBasket} className="btn btn-danger">
<span className="glyphicon glyphicon-trash" />
Clear cart
</button>
<button
onClick={() => basketCheckout(phones)}
className="btn btn-success"
>
<span className="glyphicon glyphicon-envelope" />
Checkout
</button>
</div>
)}
</div>
);
return (
<div className="view-container">
<div className="container">
<div className="row">
<div className="col-md-9 btn-user-checkout">{renderContent()}</div>
<div className="col-md-3">{renderSideBar()}</div>
</div>
</div>
</div>
);
};
const mapStateToProps = state => {
return {
phones: getBasketPhonesWithCount(state),
totalPrice: getTotalBasketPrice(state)
};
};
const mapDispatchToProps = {
removePhoneFromBasket,
cleanBasket,
basketCheckout
};
export default connect(
mapStateToProps,
mapDispatchToProps
)(Basket);
|
<template>
<div>
<el-button type="success" @click="detailsValue()">详情</el-button>
<el-dialog title="提示" v-model="dialogVisible" width="60%">
<el-descriptions direction="vertical" :column="5" border>
<el-descriptions-item label="用户名" align="center">{{
ActivityDetailsValue.username
}}</el-descriptions-item>
<el-descriptions-item label="负责人" align="center">{{
ActivityDetailsValue.name
}}</el-descriptions-item>
<el-descriptions-item label="企业名称" align="center">{{
ActivityDetailsValue.nickname
}}</el-descriptions-item>
<el-descriptions-item label="头像" align="center">
<el-image
:src="imagesValue + ActivityDetailsValue.headimage"
:preview-src-list="[imagesValue + ActivityDetailsValue.headimage]"
style="width: 40px; height: 40px"
class="stylecss"
/>
</el-descriptions-item>
<el-descriptions-item label="企业电话" align="center">{{
ActivityDetailsValue.tel
}}</el-descriptions-item>
<el-descriptions-item label="封面" align="center">
<el-image
:src="imagesValue + ActivityDetailsValue.thumb"
:preview-src-list="[imagesValue + ActivityDetailsValue.thumb]"
style="width: 40px; height: 40px"
class="stylecss"
/>
</el-descriptions-item>
<el-descriptions-item label="省" align="center">{{
ActivityDetailsValue.province
}}</el-descriptions-item>
<el-descriptions-item label="市" align="center">{{
ActivityDetailsValue.city
}}</el-descriptions-item>
<el-descriptions-item label="区" align="center">{{
ActivityDetailsValue.area
}}</el-descriptions-item>
<el-descriptions-item label="地址" align="center">{{
ActivityDetailsValue.detail
}}</el-descriptions-item>
<el-descriptions-item label="经度" align="center">{{
ActivityDetailsValue.lng
}}</el-descriptions-item>
<el-descriptions-item label="纬度" align="center">{{
ActivityDetailsValue.lat
}}</el-descriptions-item>
<el-descriptions-item label="账号认证状态" align="center">
<div
:class="{
green: ActivityDetailsValue.auth === 2,
yellow: ActivityDetailsValue.auth == 1,
red: ActivityDetailsValue.auth == 3,
}"
>
{{ filterAuth(ActivityDetailsValue.auth) }}
</div>
</el-descriptions-item>
<el-descriptions-item label="负责人电话" align="center">{{
ActivityDetailsValue.tel_f
}}</el-descriptions-item>
<el-descriptions-item label="身份证号码" align="center">{{
ActivityDetailsValue.card_no
}}</el-descriptions-item>
<el-descriptions-item label="身份证正面照片" align="center">
<el-image
:src="imagesValue + ActivityDetailsValue.card_z"
:preview-src-list="[imagesValue + ActivityDetailsValue.card_z]"
style="width: 40px; height: 40px"
/>
</el-descriptions-item>
<el-descriptions-item label="身份证反面照片" align="center">
<el-image
:src="imagesValue + ActivityDetailsValue.card_f"
:preview-src-list="[imagesValue + ActivityDetailsValue.card_f]"
style="width: 40px; height: 40px"
/>
</el-descriptions-item>
<el-descriptions-item label="店铺照片" align="center">
<el-image
:src="imagesValue + ActivityDetailsValue.shop_img"
:preview-src-list="[imagesValue + ActivityDetailsValue.shop_img]"
style="width: 40px; height: 40px"
/>
</el-descriptions-item>
<el-descriptions-item label="营业执照" align="center">
<el-image
:src="imagesValue + ActivityDetailsValue.license"
:preview-src-list="[imagesValue + ActivityDetailsValue.license]"
style="width: 40px; height: 40px"
/>
</el-descriptions-item>
</el-descriptions>
<span>
<el-button @click="dialogVisible = false">取 消</el-button>
<el-button type="primary" @click="dialogVisible = false"
>确 定</el-button
>
</span>
</el-dialog>
</div>
</template>
<script>
import { businessAuthBusinessApi } from "@/urls/busomessUrl.js";
import { postD } from "@/api";
import { imgUrl } from "@/assets/js/modifyStyle";
export default {
props: ["detilsSusinesser"],
data() {
return {
imagesValue: "",
dialogVisible: false,
ActivityDetailsId: {
id: "",
},
ActivityDetailsValue: [],
};
},
methods: {
detailsValue() {
this.dialogVisible = true;
this.ActivityDetailsId.id = this.detilsSusinesser.id;
postD(businessAuthBusinessApi(), this.ActivityDetailsId).then((res) => {
this.ActivityDetailsValue = res.data;
this.imagesValue = imgUrl();
});
},
filterAuth(val) {
if (val == 1) {
return "已认证待审核";
} else if (val == 2) {
return "审核通过";
} else if (val == 3) {
return "审核不通过";
} else {
return "待认证";
}
},
},
};
</script>
|
package com.cjx.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
import java.util.Date;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.springframework.format.annotation.DateTimeFormat;
/**
* @TableName user
*/
@TableName(value ="user")
@Data
@AllArgsConstructor
@NoArgsConstructor
@Accessors(chain = true)
public class User implements Serializable {
@TableId(type = IdType.AUTO)
private Integer id;
private String name;
private String avatar;
private String intro;
private String phone;
@JsonProperty("phone_linked")
private Integer phoneLinked;
private String openid;
@JsonProperty("wechat_linked")
private Integer wechatLinked;
private Integer followingCount;
private Integer followersCount;
private Date createdAt;
private Date updatedAt;
private Date deletedAt;
private static final long serialVersionUID = 1L;
}
|
import os
import torch
from torch.utils.data import DataLoader
from torchvision import datasets
from project import CNN
from project.logger.logger import LoggerConfigurator
logger_configurator = LoggerConfigurator("Tests")
logger = logger_configurator.get_logger()
def test_data():
model = CNN()
for name, length in zip(["train", "test", "validation"], [7350, 1050, 2100]):
path = os.path.join("Data", "landuse-scene-classification", "images_train_test_val", f"{name}")
dataset = datasets.ImageFolder(path, transform=model.image_transform())
data_loader = DataLoader(dataset, batch_size=model.batch_size, shuffle=True)
(
images,
target,
) = iter(data_loader).__next__()
assert len(dataset.classes) == 21, logger.error(
f"Unexpected number of classes in the dataset. Got: {dataset.classes}, but expected 21"
)
assert len(dataset) == length, logger.error(
f"The length of the dataset split was not as expected for split: {name}"
)
assert len(target) == model.batch_size, logger.error(
f"Unexpected batch size, Got: {target} but expected: {model.batch_size}"
)
assert images.shape == torch.Size(
[model.batch_size, model.channels, model.img_dim, model.img_dim]
), logger.error(
"Unexpected size of image. Got: {}, expected: {}".format(
images.shape, torch.Size([model.batch_size, model.channels, model.img_dim, model.img_dim])
)
)
assert (torch.sum(images < 0) == 0).item(), logger.error(
f"Possible negative RGB value contained in image. This is unexpected"
)
assert (torch.sum((length < target) & (target < 0)) == 0).item(), logger.error(
"Unexpected output of target. Might contain negative values or exceed expected values"
)
logger.info(f"All tests passed for split: {name}")
logger.info("All tests passed for data")
|
import Product from "../models/product.model.js";
// Get all products
export const getProducts = async (req, res) => {
try {
const products = await Product.find();
res.status(200).json(products);
} catch (error) {
res.status(500).json({ message: error.message });
}
};
// Get product by id
export const getProductById = async (req, res) => {
try {
const { id } = req.params;
const product = await Product.findById(id)
.populate({
path: "business",
select: "name owner",
populate: {
path: "owner",
select: "_id username profilePic", // Include username and profilePic fields
},
})
.select("name price images description"); // Include description field
if (!product) {
return res.status(404).json({ message: "Product not found" });
}
const response = {
_id: product._id,
name: product.name,
price: product.price,
images: product.images,
description: product.description,
business: {
_id: product.business._id,
name: product.business.name,
owner: {
username: product.business.owner.username,
profilePic: product.business.owner.profilePic,
id: product.business.owner._id,
},
},
};
res.status(200).json(response);
} catch (error) {
res.status(500).json({ message: error.message });
}
};
// // Get products by business
export const getProductsByBusiness = async (req, res) => {
try {
const { id } = req.params;
console.log("Business id is: ", id);
const products = await Product.find({ business: id });
res.status(200).json(products);
} catch (error) {
res.status(500).json({ message: error.message });
}
};
// Create product
export const createProduct = async (req, res) => {
try {
const { name, description, price, business, images } = req.body;
// Create a new product instance
const product = new Product({
name,
description,
price,
business,
images,
});
// Save the product to the database
await product.save();
res.status(201).json({ message: "Product created successfully", product });
} catch (error) {
console.error("Error creating product:", error);
res.status(500).json({ message: error.message });
}
};
// // Update product
// export const updateProduct = async (req, res) => {
// try {
// const { id } = req.params;
// const { name, description, price, business } = req.body;
// const product = await Product.findByIdAndUpdate(
// id,
// {
// name,
// description,
// price,
// business,
// },
// { new: true }
// );
// res.status(200).json(product);
// } catch (error) {
// res.status(500).json({ message: error.message });
// }
// };
|
<template>
<div>
<div>
<Logo class="w-auto h-16 mx-auto" />
<h2 class="mt-6 text-3xl font-extrabold text-center text-gray-900">Sign in to your account</h2>
</div>
<form class="mt-8 space-y-4" action="#" method="POST">
<div
v-if="loginErrors"
class="w-full p-2 mx-auto text-white bg-red-400 border-red-600 rounded"
>
<ul class="text-sm">
<template v-for="errors, field in loginErrors">
<li v-for="error in errors">{{ error }}</li>
</template>
</ul>
</div>
<input type="hidden" name="remember" value="true" />
<div class="-space-y-px rounded-md shadow-sm">
<div>
<label for="email-address" class="sr-only">Email address</label>
<input
id="email-address"
name="email"
type="email"
autocomplete="email"
required
v-model="email"
class="relative block w-full px-3 py-2 text-gray-900 placeholder-gray-500 border border-gray-300 rounded-none appearance-none rounded-t-md focus:outline-none focus:ring-red-500 focus:border-red-500 focus:z-10 sm:text-sm"
:class="{ 'border-red-500': loginErrors?.email }"
placeholder="Email address"
/>
</div>
<div>
<label for="password" class="sr-only">Password</label>
<input
id="password"
name="password"
type="password"
autocomplete="current-password"
required
v-model="password"
class="relative block w-full px-3 py-2 text-gray-900 placeholder-gray-500 border border-gray-300 rounded-none appearance-none rounded-b-md focus:outline-none focus:ring-red-500 focus:border-red-500 focus:z-10 sm:text-sm"
:class="{ 'border-red-500': loginErrors?.email }"
placeholder="Password"
/>
</div>
</div>
<div class="flex items-center justify-between">
<div class="flex items-center">
<input
id="remember-me"
name="remember-me"
type="checkbox"
v-model="remember"
class="w-4 h-4 text-red-600 border-gray-300 rounded focus:ring-red-500"
/>
<label for="remember-me" class="block ml-2 text-sm text-gray-900">Remember me</label>
</div>
<div class="text-sm">
<a href="#" class="font-medium text-red-600 hover:text-red-500">Forgot your password?</a>
</div>
</div>
<div>
<button
type="submit"
class="relative flex justify-center w-full px-4 py-2 text-sm font-medium text-white bg-red-600 border border-transparent rounded-md disabled:cursor-not-allowed disabled:bg-red-200 group hover:bg-red-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-red-500"
@click.prevent="logIn"
:disabled="!email || !password"
>
<span class="absolute inset-y-0 left-0 flex items-center pl-3">
<lock-closed-icon
class="w-5 h-5 text-red-500 group-hover:text-red-400"
aria-hidden="true"
/>
</span>
Sign in
</button>
</div>
</form>
</div>
</template>
<script lang=ts>
enum MODES {
Login,
Register,
};
</script>
<script setup lang="ts">
import { ref, computed } from 'vue';
import { LockClosedIcon } from '@heroicons/vue/solid';
import { useStore } from '../../services/store/store';
import { ActionTypes } from '../../services/store/actions';
import Logo from '../../assets/logo.svg?component';
const store = useStore();
const loginErrors = computed(() => store.state.loginValidationErrors);
const email = ref('');
const password = ref('');
const remember = ref(false);
function logIn() {
store.dispatch(ActionTypes.Login, {email: email.value, password: password.value, remember: remember.value});
}
</script>
|
<!--
* @Date : 2020-03-05 17:14:48
* @LastEditors : HaoJie
* @LastEditTime : 2020-06-12 14:44:29
* @FilePath : \src\pages\home\home.vue
-->
<script lang="ts">
import { Component, Vue, Model, Prop } from "vue-property-decorator";
import { getModule } from "vuex-module-decorators";
import HomeStore from "store/modules/home/HomeStore";
@Component({})
export default class Home extends Vue {
private store: any;
public userName: string | null = sessionStorage.getItem("userName");
public list: Array<any> = [];
private path: string | null = sessionStorage.getItem("path")
? sessionStorage.getItem("path")
: sessionStorage.getItem("deptTypeNo") === "project"
? "/home/projectHomepage"
: "/home/homepage";
private isShow = true;
private company: Array<any> = [];
private companyList: Array<any> = [];
private homepage: string =
sessionStorage.getItem("deptTypeNo") === "project"
? "projectHomepage"
: "homepage";
private deptTypeNo: string | null = sessionStorage.getItem("deptTypeNo"); // 选择项是公司还是项目
constructor() {
super();
this.store = getModule(HomeStore);
}
mounted() {
this.getCompanyList();
const seeProjectId = sessionStorage.getItem("projectId") || ""
sessionStorage.setItem("seeProjectId", seeProjectId);
if (
this.$route.path === "/home/projectHomepage" &&
sessionStorage.getItem("deptTypeNo") !== "project"
) {
this.$router.push("/home/homepage");
}
}
// 获取公司列表
getCompanyList() {
this.store
.getCompanyList({ projectId: sessionStorage.getItem("projectId") })
.then((res: any) => {
if (res.code === 0) {
this.company = [res.data[0].uuid];
this.companyList = this.processing(res.data);
}
});
}
// 修改公司列表数据
processing(list: any[]) {
list.forEach(item => {
item.label = item.name;
item.value = item.uuid;
if (item.children.length) {
this.processing(item.children);
return;
}
item.children = null;
});
return list;
}
// 选框变化调用
companyChange(val: any[]) {
this.isShow = false;
sessionStorage.setItem("seeProjectId", val[val.length - 1]);
this.getDeptTypeNo(this.companyList, val);
this.homepage =
this.deptTypeNo === "project" ? "projectHomepage" : "homepage";
if (
this.deptTypeNo === "project" &&
this.$route.path === "/home/homepage"
) {
this.$router.push("/home/projectHomepage");
}
if (
this.deptTypeNo !== "project" &&
this.$route.path === "/home/projectHomepage"
) {
this.$router.push("/home/homepage");
}
this.$nextTick(() => {
this.isShow = true;
this.path = this.$route.path;
});
}
// 获取选择的信息
getDeptTypeNo(list: any[], val: any[], index = 0) {
list.forEach(a => {
if (a.uuid === val[index] && index === val.length - 1) {
this.deptTypeNo = a.deptTypeNo;
return;
}
if (a.uuid === val[index]) {
this.getDeptTypeNo(a.children, val, index + 1);
}
});
}
// 退出
quit() {
sessionStorage.clear();
this.$router.push({ path: "/login" });
}
// 菜单点击
handleSelect(key: string) {
// console.log(key, keyPath);
this.path = key;
}
// 下拉框的显示
get showSelect() {
const path = this.path;
switch (path) {
// case "/home/cultivate":
// case "/home/securityLog":
// return false
// break;
default:
return true;
}
}
}
</script>
<template lang="pug" src="views/home.pug" />
<style scoped lang="stylus" src='styles/home.stylus' />
<style lang="stylus">
#home-wrap
.body
.el-menu
.el-submenu__title
font-size: 0.2rem;
border-bottom: 0.01rem solid #98c8e7;
i
color: #fff !important;
.el-submenu__title:hover
background-color: #2d83bb !important;
/deep/ .el-menu-item-group__title
display none !important
</style>
|
import { useDispatch, useSelector } from "@app/hooks";
import { useNavigation } from "@react-navigation/native";
import {render, fireEvent } from '@testing-library/react-native';
import { Box, Center, theme } from "native-base";
import React from "react";
import TestingWrapperNavigation from '@app/testingWrapperWithNavigation';
import configureStore from "redux-mock-store";
import FollowListDisplay from "./FollowListDisplay";
import thunk from "redux-thunk";
const mockedDispatch = jest.fn();
const mockedUseSelector = useSelector as jest.Mock;
const mockedUseNavigation = useNavigation as jest.Mock;
mockedUseSelector.mockReturnValue({
profile: {
userId: "123",
},
});
mockedUseNavigation.mockReturnValue({
push: jest.fn(),
});
jest.mock('../../../app/hooks', () => ({
useSelector: jest.fn(),
useDispatch: () => mockedDispatch,
}));
jest.mock('@react-navigation/native', () => ({
...jest.requireActual('@react-navigation/native'),
useNavigation: jest.fn(),
}));
describe('FollowListDisplay', () => {
const mockState = {
profile: {
profile: {
userId: '123',
name: 'Test User',
followerCount: 10,
followingCount: 20,
biography: 'Test Biography',
},
},
};
const mockStore = configureStore([thunk])(mockState);
let wrapper: any;
beforeEach(() => {
(useSelector as jest.Mock).mockImplementation((selector) => selector(mockStore.getState()));
wrapper = render(
<TestingWrapperNavigation store={mockStore}>
<FollowListDisplay followListMode={"follower"} followCount={10} />
</TestingWrapperNavigation>
);
});
it("renders correctly", () => {
expect(wrapper).toMatchSnapshot();
});
it("should push navigation to followList when handlePress is called", () => {
const followListButton = wrapper.getByTestId('follow-list-display');
fireEvent.press(followListButton);
expect(mockedUseNavigation().push).toHaveBeenCalledWith("FollowList", {followListMode: "follower"});
expect(mockedDispatch).toHaveBeenCalled();
expect(wrapper).toMatchSnapshot();
});
});
|
import 'package:dial_zero/models/dialled_model.dart';
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
class CallScreenPageWidget extends ConsumerWidget {
final List<String> buttons = [
"RECORD",
"HOLD",
"ADD_CALL",
"MUTE",
"SWITCH_TO_VIDEO",
"AUDIO_SOURCE"
];
Widget build(BuildContext context, WidgetRef ref) {
String dialledNumber = ref.read(dialledProvider);
return Scaffold(
body: SafeArea(
child: Column(
children: [
Text(
dialledNumber,
style: TextStyle(
fontSize: 40.0
),
),
GridView.count(
crossAxisCount: 3,
children: buttons.map(
(e) => CallScreenButton(type: e)
).toList(),
),
Row(
mainAxisAlignment: MainAxisAlignment.center,
mainAxisSize: MainAxisSize.max,
children: [
CallScreenButton(type: "END_CALL")
],
)
],
),
)
);
}
}
class CallScreenButton extends ConsumerWidget {
CallScreenButton({
required this.type
}): super(key: UniqueKey());
final String type;
Widget build(BuildContext context, WidgetRef ref) {
switch(type) {
case "RECORD":
return TextButton.icon(
onPressed: () {},
icon: Icon(Icons.record_voice_over),
label: Text("Record")
);
case "HOLD":
return TextButton.icon(
onPressed: () {},
icon: Icon(Icons.pause),
label: Text("Hold")
);
case "ADD_CALL":
return TextButton.icon(
onPressed: () {},
icon: Icon(Icons.add_call),
label: Text("Add Call")
);
case "MUTE":
return TextButton.icon(
onPressed: () {},
icon: Icon(Icons.mic_off),
label: Text("Mute")
);
case "SWITCH_TO_VIDEO":
return TextButton.icon(
onPressed: () {},
icon: Icon(Icons.video_call),
label: Text("Video")
);
case "AUDIO_SOURCE":
return TextButton.icon(
onPressed: () {},
icon: Icon(Icons.speaker),
label: Text("Speaker")
);
case "END_CALL":
return TextButton.icon(
onPressed: () {},
style: ButtonStyle(
backgroundColor: MaterialStatePropertyAll(Colors.red),
iconColor: MaterialStatePropertyAll(Colors.white)
),
icon: Icon(Icons.call),
label: Text("End Call")
);
default:
return Container();
}
}
}
|
'use client';
import FormFooter from '@/src/components/layouts/formFooter';
import apiRoutes from '@/src/config/api.config';
import { HttpService } from '@/src/services';
import { showNotificationOnRes } from '@/src/utils/notificationUtils';
import { SimpleGrid, TextInput } from '@mantine/core';
import { useForm } from '@mantine/form';
import { useParams } from 'next/navigation';
import { useEffect } from 'react';
export interface SupplierFromValue {
name: string;
address: string;
phone: string;
}
export interface FormProps {
values: SupplierFromValue;
params: { id: string };
}
export const SupplierFrom = ({
submitTitle,
handleFormSubmit,
}: {
submitTitle: string;
handleFormSubmit: ({
values, // eslint-disable-line no-unused-vars
params, // eslint-disable-line no-unused-vars
}: FormProps) => void;
}) => {
const params = useParams<{ id: string }>();
const form = useForm<SupplierFromValue>({
initialValues: {
name: '',
address: '',
phone: '',
},
validate: {
name: (value) =>
value.length < 2 ? 'Name must have at least 2 letters' : null,
address: (value) =>
value.length < 2 ? 'Address must have at least 2 letters' : null,
phone: (value) =>
value.length < 7 ? 'Phone must have at least 7 letters' : null,
},
});
const getFieldData = async () => {
const http = new HttpService();
const response: any = await http
.service()
.get(apiRoutes.suppliers.suppliersById(params.id));
response?.status === 200 && form.setValues(response.data);
};
const handleLocalFormSubmit = async () => {
const response: any = await handleFormSubmit({
values: form.values,
params,
});
if (response?.status === 200 && !params.id) {
form.reset();
}
showNotificationOnRes(response);
};
useEffect(() => {
params.id && getFieldData();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [params.id]);
return (
<div>
<form onSubmit={form.onSubmit(handleLocalFormSubmit)}>
<SimpleGrid cols={{ base: 2, sm: 2, lg: 2 }}>
<TextInput
label="Name"
placeholder="Enter your name"
required
withAsterisk
{...form.getInputProps('name')}
/>
<TextInput
label="Address"
placeholder="Enter your address"
required
withAsterisk
{...form.getInputProps('address')}
/>
<TextInput
label="Phone"
placeholder="Enter your phone number"
required
withAsterisk
{...form.getInputProps('phone')}
/>
</SimpleGrid>
<FormFooter title={submitTitle}></FormFooter>
</form>
</div>
);
};
|
import { IconBadge } from "@/components/icon-badge";
import { LucideIcon } from "lucide-react";
interface InfoCardProps {
variant?: "default" | "success";
numberOfItems: number;
icon: LucideIcon;
label: string;
}
const InfoCard = ({
variant,
numberOfItems,
icon: Icon,
label,
}: InfoCardProps) => {
return (
<div className="border rounded-md flex items-center gap-x-2 p-3">
<IconBadge variant={variant} icon={Icon} />
<div>
<p className="font-medium">{label}</p>
<p className="text-gray-500 text-sm">
{numberOfItems} {numberOfItems === 1 ? "Course" : "Courses"}
</p>
</div>
</div>
);
};
export default InfoCard;
|
from typing import Optional
import imgviz
import numpy
import torch
from torch import nn, Tensor
from torch.nn import functional as F
from timm.models.layers import trunc_normal_
from detectron2.layers import Conv2d
import fvcore.nn.weight_init as weight_init
from .utils.utils import rand_sample, prepare_features
from .utils.attn import MultiheadAttention
from .utils.attention_data_struct import AttentionDataStruct
from .registry import register_decoder
from ...utils import configurable
from ...modules import PositionEmbeddingSine
from einops import rearrange
from PIL import Image
import numpy as np
def save_colored_mask(mask, save_path):
lbl_pil = Image.fromarray(mask.astype(np.uint8), mode='P')
color_map = imgviz.label_colormap()
lbl_pil.putpalette(color_map.flatten())
lbl_pil.save(save_path)
class SelfAttentionLayer(nn.Module):
def __init__(self, d_model, nhead, dropout=0.0,
activation="relu", normalize_before=False):
super().__init__()
self.self_attn = MultiheadAttention(d_model, nhead, dropout=dropout)
self.norm = nn.LayerNorm(d_model)
self.dropout = nn.Dropout(dropout)
self.activation = _get_activation_fn(activation)
self.normalize_before = normalize_before
self._reset_parameters()
def _reset_parameters(self):
for p in self.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
def with_pos_embed(self, tensor, pos: Optional[Tensor]):
return tensor if pos is None else tensor + pos
def forward_post(self, tgt,
tgt_mask: Optional[Tensor] = None,
tgt_key_padding_mask: Optional[Tensor] = None,
query_pos: Optional[Tensor] = None):
q = k = self.with_pos_embed(tgt, query_pos)
tgt2 = self.self_attn(q, k, value=tgt, attn_mask=tgt_mask,
key_padding_mask=tgt_key_padding_mask)[0]
tgt = tgt + self.dropout(tgt2)
tgt = self.norm(tgt)
return tgt
def forward_pre(self, tgt,
tgt_mask: Optional[Tensor] = None,
tgt_key_padding_mask: Optional[Tensor] = None,
query_pos: Optional[Tensor] = None):
tgt2 = self.norm(tgt)
q = k = self.with_pos_embed(tgt2, query_pos)
tgt2 = self.self_attn(q, k, value=tgt2, attn_mask=tgt_mask,
key_padding_mask=tgt_key_padding_mask)[0]
tgt = tgt + self.dropout(tgt2)
return tgt
def forward(self, tgt,
tgt_mask: Optional[Tensor] = None,
tgt_key_padding_mask: Optional[Tensor] = None,
query_pos: Optional[Tensor] = None):
if self.normalize_before:
return self.forward_pre(tgt, tgt_mask,
tgt_key_padding_mask, query_pos)
return self.forward_post(tgt, tgt_mask,
tgt_key_padding_mask, query_pos)
class CrossAttentionLayer(nn.Module):
def __init__(self, d_model, nhead, dropout=0.0,
activation="relu", normalize_before=False):
super().__init__()
self.multihead_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout)
self.norm = nn.LayerNorm(d_model)
self.dropout = nn.Dropout(dropout)
self.activation = _get_activation_fn(activation)
self.normalize_before = normalize_before
self._reset_parameters()
def _reset_parameters(self):
for p in self.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
def with_pos_embed(self, tensor, pos: Optional[Tensor]):
return tensor if pos is None else tensor + pos
def forward_post(self, tgt, memory,
memory_mask: Optional[Tensor] = None,
memory_key_padding_mask: Optional[Tensor] = None,
pos: Optional[Tensor] = None,
query_pos: Optional[Tensor] = None):
tgt2, avg_attn = self.multihead_attn(query=self.with_pos_embed(tgt, query_pos),
key=self.with_pos_embed(memory, pos),
value=memory, attn_mask=memory_mask,
key_padding_mask=memory_key_padding_mask)
tgt = tgt + self.dropout(tgt2)
tgt = self.norm(tgt)
return tgt, avg_attn
def forward_pre(self, tgt, memory,
memory_mask: Optional[Tensor] = None,
memory_key_padding_mask: Optional[Tensor] = None,
pos: Optional[Tensor] = None,
query_pos: Optional[Tensor] = None):
tgt2 = self.norm(tgt)
tgt2, avg_attn = self.multihead_attn(query=self.with_pos_embed(tgt2, query_pos),
key=self.with_pos_embed(memory, pos),
value=memory, attn_mask=memory_mask,
key_padding_mask=memory_key_padding_mask)
tgt = tgt + self.dropout(tgt2)
return tgt, avg_attn
def forward(self, tgt, memory,
memory_mask: Optional[Tensor] = None,
memory_key_padding_mask: Optional[Tensor] = None,
pos: Optional[Tensor] = None,
query_pos: Optional[Tensor] = None):
if self.normalize_before:
return self.forward_pre(tgt, memory, memory_mask,
memory_key_padding_mask, pos, query_pos)
return self.forward_post(tgt, memory, memory_mask,
memory_key_padding_mask, pos, query_pos)
class FFNLayer(nn.Module):
def __init__(self, d_model, dim_feedforward=2048, dropout=0.0,
activation="relu", normalize_before=False):
super().__init__()
# Implementation of Feedforward model
self.linear1 = nn.Linear(d_model, dim_feedforward)
self.dropout = nn.Dropout(dropout)
self.linear2 = nn.Linear(dim_feedforward, d_model)
self.norm = nn.LayerNorm(d_model)
self.activation = _get_activation_fn(activation)
self.normalize_before = normalize_before
self._reset_parameters()
def _reset_parameters(self):
for p in self.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
def with_pos_embed(self, tensor, pos: Optional[Tensor]):
return tensor if pos is None else tensor + pos
def forward_post(self, tgt):
tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt))))
tgt = tgt + self.dropout(tgt2)
tgt = self.norm(tgt)
return tgt
def forward_pre(self, tgt):
tgt2 = self.norm(tgt)
tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt2))))
tgt = tgt + self.dropout(tgt2)
return tgt
def forward(self, tgt):
if self.normalize_before:
return self.forward_pre(tgt)
return self.forward_post(tgt)
def _get_activation_fn(activation):
"""Return an activation function given a string"""
if activation == "relu":
return F.relu
if activation == "gelu":
return F.gelu
if activation == "glu":
return F.glu
raise RuntimeError(F"activation should be relu/gelu, not {activation}.")
class MLP(nn.Module):
""" Very simple multi-layer perceptron (also called FFN)"""
def __init__(self, input_dim, hidden_dim, output_dim, num_layers):
super().__init__()
self.num_layers = num_layers
h = [hidden_dim] * (num_layers - 1)
self.layers = nn.ModuleList(nn.Linear(n, k) for n, k in zip([input_dim] + h, h + [output_dim]))
def forward(self, x):
for i, layer in enumerate(self.layers):
x = F.relu(layer(x)) if i < self.num_layers - 1 else layer(x)
return x
class MultiScaleMaskedTransformerDecoder(nn.Module):
_version = 2
@configurable
def __init__(
self,
lang_encoder: nn.Module,
in_channels,
mask_classification=True,
*,
hidden_dim: int,
dim_proj: int,
num_queries: int,
contxt_len: int,
nheads: int,
dim_feedforward: int,
dec_layers: int,
pre_norm: bool,
mask_dim: int,
task_switch: dict,
enforce_input_project: bool,
max_spatial_len: int,
attn_arch: dict,
):
super().__init__()
assert mask_classification, "Only support mask classification model"
self.mask_classification = mask_classification
N_steps = hidden_dim // 2
self.pe_layer = PositionEmbeddingSine(N_steps, normalize=True)
self.layer_norm = nn.LayerNorm(1024)
# define Transformer decoder here
self.num_heads = nheads
self.num_layers = dec_layers
self.contxt_len = contxt_len
self.transformer_self_attention_layers = nn.ModuleList()
self.transformer_cross_attention_layers = nn.ModuleList()
self.transformer_ffn_layers = nn.ModuleList()
self.linear_list = nn.ModuleList()
self.src_linear_list = nn.ModuleList()
for _ in range(3):
self.linear_list.append(
nn.Sequential(nn.Linear(1024, 512))
)
for _ in range(3):
self.src_linear_list.append(
nn.Sequential(nn.Linear(1024, 512))
)
self.final_predict = nn.Sequential(
nn.BatchNorm2d(1024+5),
nn.Conv2d(1024+5, 5, 3, 1, 1))
self.final_fuse = nn.Sequential(
nn.Conv2d(1024 * 3, 1024, 3, 1, padding=1),
nn.BatchNorm2d(1024),
nn.ReLU())
for _ in range(self.num_layers):
self.transformer_self_attention_layers.append(
SelfAttentionLayer(
d_model=hidden_dim,
nhead=nheads,
dropout=0.0,
normalize_before=pre_norm,
)
)
self.transformer_cross_attention_layers.append(
CrossAttentionLayer(
d_model=hidden_dim,
nhead=nheads,
dropout=0.0,
normalize_before=pre_norm,
)
)
self.transformer_ffn_layers.append(
FFNLayer(
d_model=hidden_dim,
dim_feedforward=dim_feedforward,
dropout=0.0,
normalize_before=pre_norm,
)
)
self.decoder_norm = nn.LayerNorm(hidden_dim)
self.num_queries = num_queries
self.query_feat = nn.Embedding(num_queries, hidden_dim)
self.query_embed = nn.Embedding(num_queries, hidden_dim)
self.pn_indicator = nn.Embedding(2, hidden_dim)
self.num_feature_levels = 3
self.level_embed = nn.Embedding(self.num_feature_levels, hidden_dim)
self.input_proj = nn.ModuleList()
for _ in range(self.num_feature_levels):
if in_channels != hidden_dim or enforce_input_project:
self.input_proj.append(Conv2d(in_channels, hidden_dim, kernel_size=1))
weight_init.c2_xavier_fill(self.input_proj[-1])
else:
self.input_proj.append(nn.Sequential())
self.task_switch = task_switch
self.query_index = {}
# output FFNs
self.lang_encoder = lang_encoder
if self.task_switch['mask']:
self.mask_embed = MLP(hidden_dim, hidden_dim, mask_dim, 3)
self.class_embed = nn.Parameter(torch.empty(hidden_dim, dim_proj))
trunc_normal_(self.class_embed, std=.02)
if task_switch['spatial']:
self.mask_sptial_embed = nn.ParameterList(
[nn.Parameter(torch.empty(hidden_dim, hidden_dim)) for x in range(3)])
trunc_normal_(self.mask_sptial_embed[0], std=.02)
trunc_normal_(self.mask_sptial_embed[1], std=.02)
trunc_normal_(self.mask_sptial_embed[2], std=.02)
self.max_spatial_len = max_spatial_len
num_spatial_memories = attn_arch['SPATIAL_MEMORIES']
self.spatial_embed = nn.Embedding(num_spatial_memories, hidden_dim)
self.spatial_featured = nn.Embedding(num_spatial_memories, hidden_dim)
# build AttentionDataStruct
attn_arch['NUM_LAYERS'] = self.num_layers
self.attention_data = AttentionDataStruct(attn_arch, task_switch)
@classmethod
def from_config(cls, cfg, in_channels, lang_encoder, mask_classification, extra):
ret = {}
ret["lang_encoder"] = lang_encoder
ret["in_channels"] = in_channels
ret["mask_classification"] = mask_classification
enc_cfg = cfg['MODEL']['ENCODER']
dec_cfg = cfg['MODEL']['DECODER']
ret["hidden_dim"] = dec_cfg['HIDDEN_DIM']
ret["dim_proj"] = cfg['MODEL']['DIM_PROJ']
ret["num_queries"] = dec_cfg['NUM_OBJECT_QUERIES']
ret["contxt_len"] = cfg['MODEL']['TEXT']['CONTEXT_LENGTH']
# Transformer parameters:
ret["nheads"] = dec_cfg['NHEADS']
ret["dim_feedforward"] = dec_cfg['DIM_FEEDFORWARD']
assert dec_cfg['DEC_LAYERS'] >= 1
ret["dec_layers"] = dec_cfg['DEC_LAYERS'] - 1
ret["pre_norm"] = dec_cfg['PRE_NORM']
ret["enforce_input_project"] = dec_cfg['ENFORCE_INPUT_PROJ']
ret["mask_dim"] = enc_cfg['MASK_DIM']
ret["task_switch"] = extra['task_switch']
ret["max_spatial_len"] = dec_cfg['MAX_SPATIAL_LEN']
# attn data struct
ret["attn_arch"] = cfg['ATTENTION_ARCH']
return ret
def forward(self, ref_information, query_information, extra={}, task='seg'):
query_multi_scale = query_information
ref_multiscale_feature, ref_mask = ref_information
assert len(query_multi_scale) == self.num_feature_levels;
spatial_extra_flag = 'spatial_query_pos_mask' in extra.keys() or task == 'refimg'
grounding_extra_flag = 'grounding_tokens' in extra.keys()
visual_extra_flag = 'visual_query_pos' in extra.keys()
audio_extra_flag = 'audio_tokens' in extra.keys()
spatial_memory_flag = 'prev_mask' in extra.keys()
flags = {"spatial": spatial_extra_flag, "grounding": grounding_extra_flag,
"memories_spatial": spatial_memory_flag, "visual": visual_extra_flag, "audio": audio_extra_flag}
self.attention_data.reset(flags, task, extra)
support_list = []
src = []
out_predict_list = []
bs, c, h, w = ref_mask.tensor.shape
ref_mask_scale = F.interpolate(ref_mask.tensor, (32, 32), mode='nearest')
ref_mask_scale = ref_mask_scale.reshape(bs, c, -1).permute(0, 2, 1)
for i in range(len(query_multi_scale)):
ref_feature = ref_multiscale_feature[i]
bs, d, _, _ = ref_feature.shape
ref_feature = ref_feature.view(bs, d, -1).permute(0, 2, 1) ### bs, n, d
# support_sets = self.linear_list[i](ref_feature)
support_sets = ref_feature
support_sets = support_sets.permute(1, 0, 2) ##### N, B, D
support_list.append(support_sets)
query_feature = query_multi_scale[i].view(bs, d, -1).permute(0, 2, 1)
src.append(query_feature.permute(1, 0, 2))
# src.append(self.src_linear_list[i](query_feature).permute(1, 0, 2))
# ref_norm = ref_feature / torch.norm(ref_feature, dim=-1, keepdim=True)
# query_norm = query_feature / torch.norm(query_feature, dim=-1, keepdim=True)
# avg_atten = (ref_norm @ query_norm.transpose(-1, -2)).softmax(dim=-1)
# print(avg_atten.max())
for i in range(self.num_layers):
level_index = i % self.num_feature_levels
src_mask_features = src[level_index]
spatial_tokens = support_list[level_index]
output_pos, _ = self.transformer_cross_attention_layers[i](
src_mask_features, spatial_tokens,
memory_mask=None,
memory_key_padding_mask=None,
pos=None, query_pos=None
)
# output_pos = self.layer_norm(output_pos)
y = self.transformer_ffn_layers[i](output_pos)
src[level_index] = y
for i in range(len(src)):
src_mask_features = src[i].permute(1, 0, 2)
spatial_tokens = support_list[i].permute(1, 0, 2)
src_norm = src_mask_features / (torch.norm(src_mask_features, dim=-1, keepdim=True) + 1e-10)
spatial_norm = spatial_tokens / (torch.norm(spatial_tokens, dim=-1, keepdim=True) + 1e-10)
avg_atten = (src_norm @ spatial_norm.transpose(-1, -2))
avg_atten = avg_atten.softmax(dim=-1)
out_predict = avg_atten @ ref_mask_scale
out_predict_list.append(out_predict)
results = self.forward_prediction_heads(src, out_predict_list)
return results
def forward_prediction_heads(self, src, out_predict_list):
num_1, bs, dim = src[0].shape
num_2, _, _ = src[1].shape
num_3, _, _ = src[2].shape
feature_1 = src[0].permute(1, 2, 0).reshape(bs, dim, int(numpy.sqrt(num_1)),
int(numpy.sqrt(num_1))) ####(32, 32)
feature_2 = src[1].permute(1, 2, 0).reshape(bs, dim, int(numpy.sqrt(num_2)),
int(numpy.sqrt(num_2))) ####(32, 32)
feature_3 = src[2].permute(1, 2, 0).reshape(bs, dim, int(numpy.sqrt(num_3)),
int(numpy.sqrt(num_3))) ####(32, 32)
final_fuse = self.final_fuse(torch.cat((feature_1, feature_2, feature_3), dim=1))
out_predict = 1 / 3 * (out_predict_list[0] + out_predict_list[1] + out_predict_list[2])
outputs_mask = self.final_predict(torch.cat((final_fuse, out_predict.reshape(bs, 32, 32, 5).permute(0, 3, 1, 2)), dim=1))
results = {
"predictions_mask": outputs_mask
}
return results
@register_decoder
def get_masked_transformer_decoder(cfg, in_channels, lang_encoder, mask_classification, extra):
return MultiScaleMaskedTransformerDecoder(cfg, in_channels, lang_encoder, mask_classification, extra)
|
class Searcher
def initialize(collection)
@collection = collection
end
def jaccard_similarity(instance_a, instance_b)
# instanceA and instanceB are both arrays of tokens
set_a = instance_a.to_set
set_b = instance_b.to_set
intersection = set_a.intersection(set_b)
union = set_a.union(set_b)
intersection.size.to_f / union.size.to_f
end
def search(instance, top_n: 3)
jaccard_distances = []
@collection.each do |item|
jaccard_distances << jaccard_similarity(instance, item)
end
jaccard_distances.each_with_index.sort.map(&:last).reverse[0..top_n - 1]
end
end
|
//Demo can be found here: https://youtu.be/zCAI8u63GBw
//Adding Libraries
#include <MPU6050_tockn.h>
#include <Wire.h>
#include <Adafruit_NeoPixel.h>
//Defining which pin does the NeoPixel strip use and how many LEDs are there in the strip
#define PIN 5
#define n 8
Adafruit_NeoPixel strip = Adafruit_NeoPixel(n,PIN, NEO_GRB + NEO_KHZ800);
MPU6050 mpu6050(Wire);
int red, blue;
void setup() {
//initializing Serial Port, NeoPixel Strip and MPU6050
Serial.begin(9600);
Wire.begin();
strip.begin();
mpu6050.begin();
mpu6050.calcGyroOffsets();
}
void loop() {
//Getting X angle info from MPU6050
int angleX = mpu6050.getAngleX();
mpu6050.update();
//Mapping X angle degrees to 0, 255 for red and 255, 0 for blue
red = map(angleX, -100, 100, 0, 255);
blue = map(angleX, -100, 100, 255, 0);
/* The reason why we reversed blue's value is because
we want red LEDs to flash when we tilt MPU6050 to left
and blue LEDs to flash when we tilt it o right. */
//Printing values for X angle, red and blue
Serial.print("X: ");
Serial.print(angleX);
Serial.print(" Red: ");
Serial.print(red);
Serial.print(" Blue: ");
Serial.println(blue);
//Coloring all LEDs in the strip with the values
for(int i = 0; i < n; i++){
strip.setPixelColor(i, strip.Color(red, 0, blue));
strip.show();
delay(10);
}
delay(10);
}
|
import React from 'react';
import s from './MyPosts.module.css';
import Post from "./Post/Post";
import {MyPostsPropsType} from "./MyPostsContainer";
import {Field, InjectedFormProps, reduxForm} from "redux-form";
import {maxLength, required} from "../../../utils/validators/validators";
import {Textarea} from "../../common/FormsControls/FormsControls";
const MyPosts = (props: MyPostsPropsType) => {
const postsElements = props.posts
.map(p => <Post key={p.id} id={p.id} message={p.message} likesCount={p.likesCount}/>)
const onAddPost = (formData: FormDataType) => {
props.addPost(formData.newPostText)
}
return (
<div className='content'>
<h3>My posts</h3>
<AddNewPostReduxForm onSubmit={onAddPost}/>
<div className='posts'>
{postsElements}
</div>
</div>
)
}
export default MyPosts;
type FormDataType = {
newPostText: string
}
const maxLength30 = maxLength(30)
const AddNewPostForm: React.FC<InjectedFormProps<FormDataType>> = (props) => {
return (
<form onSubmit={props.handleSubmit}>
<Field
className={s.textarea}
placeholder={'Write your message'}
name={"newPostText"}
component={Textarea}
validate={[required, maxLength30]}
/>
<button>Add post</button>
</form>
);
};
const AddNewPostReduxForm = reduxForm<FormDataType>({form: 'addPost'})(AddNewPostForm)
|
import java.util.*;
//VarExp:import//
// <exp>:VarExp ::= <VAR>
public class VarExp extends Exp {
public Token var;
public VarExp(Token var) {
this.var = var;
}
public static VarExp parse(Scan scn$, Trace trace$) {
if (trace$ != null)
trace$ = trace$.nonterm("<exp>:VarExp", scn$.lno);
Token var = scn$.match(Token.Val.VAR, trace$);
return new VarExp(var);
}
@Override
public String toString() {
return var.toString();
}
@Override
public Val eval( Env env ) {
return env.applyEnv( var.toString() );
}
/**
* Variables are bound to references in the environment, so to
* get a reference from a variable, just look it up.
*/
@Override
public Ref evalRef( Env env ) {
return env.applyEnvRef( var.toString() );
}
}
|
import 'package:cloud_firestore/cloud_firestore.dart';
import '../../../domain/entities/status_entity.dart';
import '../../../domain/entities/status_image_entity.dart';
class StatusModel extends StatusEntity {
final String? statusId;
final String? imageUrl;
final String? uid;
final String? username;
final String? profileUrl;
final Timestamp? createdAt;
final String? email;
final String? caption;
final List<StatusImageEntity>? stories;
const StatusModel(
{this.statusId,
this.imageUrl,
this.uid,
this.username,
this.profileUrl,
this.createdAt,
this.email,
this.caption,
this.stories}) : super(
statusId: statusId,
imageUrl: imageUrl,
uid: uid,
username: username,
profileUrl: profileUrl,
createdAt: createdAt,
email: email,
caption: caption,
stories: stories
);
factory StatusModel.fromSnapshot(DocumentSnapshot snapshot) {
final snap = snapshot.data() as Map<String, dynamic>;
final stories = snap['stories'] as List;
List<StatusImageEntity> storiesData =
stories.map((element) => StatusImageEntity.fromJson(element)).toList();
return StatusModel(
stories: storiesData,
statusId: snap['statusId'],
username: snap['username'],
email: snap['email'],
createdAt: snap['createdAt'],
uid: snap['uid'],
profileUrl: snap['profileUrl'],
imageUrl: snap['imageUrl'],
caption: snap['caption']
);
}
Map<String, dynamic> toDocument() => {
"stories": stories?.map((story) => story.toJson()).toList(),
"statusId": statusId,
"username": username,
"email": email,
"createdAt": createdAt,
"uid": uid,
"profileUrl": profileUrl,
"imageUrl": imageUrl,
"caption": caption,
};
}
|
import mongoose from "mongoose";
import { MongoMemoryServer } from "mongodb-memory-server";
import app from "../../src/server";
import request from "supertest";
import Tracking from "../../src/models/Tracking";
import { TrackingStatusEnum } from "../../src/services/shipment/types";
const setupUserAndShipment = async (): Promise<string> => {
const {
body: { token },
} = await request(app).post("/auth/signup").send({
name: "Test User",
email: "test@example.com",
password: "password",
role: "admin",
});
const response = await request(app)
.post("/shipment/create")
.send({
senderName: "John Doe",
senderAddress: "123 Main St",
recipientName: "Jane Smith",
recipientAddress: "456 Elm St",
description: "Sample shipment description",
})
.auth(token, { type: "bearer" });
const trackingDetails = await Tracking.findOne({
_id: response.body.trackingId,
});
return trackingDetails!.trackingCode;
};
describe("Authentication", () => {
let mongod: MongoMemoryServer;
let trackingCode: string;
beforeAll(async () => {
mongod = await MongoMemoryServer.create();
const mongoUri = mongod.getUri();
await mongoose.connect(mongoUri);
trackingCode = await setupUserAndShipment();
});
afterAll(async () => {
await mongoose.disconnect();
await mongod.stop();
});
it("user should able to track shipment", async () => {
const response = await request(app).get("/track").query({
trackingCode,
});
expect(response.status).toBe(200);
expect(response.body).toHaveProperty("status");
expect(response.body).toHaveProperty("status", TrackingStatusEnum.PENDING);
});
});
|
"""Basic Module for Semantic Segmentation"""
from mindspore import nn, ops
__all__ = ['_ConvBNPReLU', '_ConvBN', '_BNPReLU', '_ConvBNReLU', '_DepthwiseConv', 'InvertedResidual']
class _ConvBNReLU(nn.Cell):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, pad_mode='pad', padding=0,
dilation=1, groups=1, relu6=False, norm_layer=nn.BatchNorm2d, **kwargs):
super(_ConvBNReLU, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='pad', padding=padding, dilation=dilation, group=groups, has_bias=False)
self.bn = norm_layer(out_channels)
self.relu = nn.ReLU6() if relu6 else nn.ReLU()
def construct(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x
class _ConvBNPReLU(nn.Cell):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0,
dilation=1, groups=1, norm_layer=nn.BatchNorm2d, **kwargs):
super(_ConvBNPReLU, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='pad', padding=padding, dilation=dilation, group=groups, has_bias=False)
self.bn = norm_layer(out_channels)
self.prelu = nn.PReLU(out_channels)
def construct(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.prelu(x)
return x
class _ConvBN(nn.Cell):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0,
dilation=1, groups=1, norm_layer=nn.BatchNorm2d, **kwargs):
super(_ConvBN, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='pad', padding=padding, dilation=dilation, group=groups, has_bias=False)
self.bn = norm_layer(out_channels)
def construct(self, x):
x = self.conv(x)
x = self.bn(x)
return x
class _BNPReLU(nn.Cell):
def __init__(self, out_channels, norm_layer=nn.BatchNorm2d, **kwargs):
super(_BNPReLU, self).__init__()
self.bn = norm_layer(out_channels)
self.prelu = nn.PReLU(out_channels)
def construct(self, x):
x = self.bn(x)
x = self.prelu(x)
return x
# -----------------------------------------------------------------
# For PSPNet
# -----------------------------------------------------------------
class _PSPModule(nn.Cell):
def __init__(self, in_channels, sizes=(1, 2, 3, 6), **kwargs):
super(_PSPModule, self).__init__()
out_channels = int(in_channels / 4)
self.avgpools = nn.CellList()
self.convs = nn.CellList()
for size in sizes:
self.avgpool.append(nn.AdaptiveAvgPool2d(size))
self.convs.append(_ConvBNReLU(in_channels, out_channels, 1, **kwargs))
def construct(self, x):
size = x.size()[2:]
feats = [x]
for (avgpool, conv) in enumerate(zip(self.avgpools, self.convs)):
feats.append(ops.interpolate(conv(avgpool(x)), size, mode='bilinear', align_corners=True))
return ops.cat(feats, axis=1)
# -----------------------------------------------------------------
# For MobileNet
# -----------------------------------------------------------------
class _DepthwiseConv(nn.Cell):
"""conv_dw in MobileNet"""
def __init__(self, in_channels, out_channels, stride, norm_layer=nn.BatchNorm2d, **kwargs):
super(_DepthwiseConv, self).__init__()
self.conv = nn.SequentialCell(
_ConvBNReLU(in_channels, in_channels, 3, stride, pad_mode='pad', padding=1, groups=in_channels, norm_layer=norm_layer),
_ConvBNReLU(in_channels, out_channels, 1, norm_layer=norm_layer))
def construct(self, x):
return self.conv(x)
# -----------------------------------------------------------------
# For MobileNetV2
# -----------------------------------------------------------------
class InvertedResidual(nn.Cell):
def __init__(self, in_channels, out_channels, stride, expand_ratio, norm_layer=nn.BatchNorm2d, **kwargs):
super(InvertedResidual, self).__init__()
assert stride in [1, 2]
self.use_res_connect = stride == 1 and in_channels == out_channels
layers = list()
inter_channels = int(round(in_channels * expand_ratio))
if expand_ratio != 1:
# pw
layers.append(_ConvBNReLU(in_channels, inter_channels, 1, relu6=True, norm_layer=norm_layer))
layers.extend([
# dw
_ConvBNReLU(inter_channels, inter_channels, 3, stride, pad_mode='pad', padding=1,
groups=inter_channels, relu6=True, norm_layer=norm_layer),
# pw-linear
nn.Conv2d(inter_channels, out_channels, 1, has_bias=False),
norm_layer(out_channels)])
self.conv = nn.SequentialCell(*layers)
def construct(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
if __name__ == '__main__':
x = ops.randn(1, 32, 64, 64)
model = InvertedResidual(32, 64, 2, 1)
out = model(x)
print(model)
|
import React, { useEffect } from "react";
import { LinkContainer } from "react-router-bootstrap";
import { Table, Button } from "react-bootstrap";
import { useSelector, useDispatch } from "react-redux";
import Loader from "../component/Loader.js";
import Message from "../component/Message.js";
import {ListAdminOrder } from "../action/orderAction.js";
const OrderListScreens = ({ history }) => {
const dispatch = useDispatch();
const orderList = useSelector((state) => state.orderList);
const { loading, error, ordersAdmin } = orderList;
const userLogin = useSelector((state) => state.userLogin);
const { userInfo } = userLogin;
useEffect(() => {
if(userInfo && userInfo.isAdmin){
dispatch(ListAdminOrder());
}else{
history.push('/login');
}
}, [dispatch,userInfo,history]);
return (
<>
<h1>Orders</h1>
{loading ? (
<Loader />
) : error ? (
<Message variant="danger">{error}</Message>
) : (
<Table striped bordered hover responsive className="table-sm">
<thead>
<tr>
<th>ID</th>
<th>USER</th>
<th>DATE</th>
<th>TOTAL PRICE</th>
<th>PAID</th>
<th>DELIVERED</th>
<th>DETAILS</th>
</tr>
</thead>
<tbody>
{ordersAdmin.map(order => (
<tr key={order._id}>
<td>{order._id}</td>
<td>{order.user && order.user.name}</td>
<td>
{order.createdAt.substring(0,10)}
</td>
<td>${order.totalPrice}</td>
<td>
{order.isPaid ? (
order.paidAt.substring(0,10)
) : (
<i className="fas fa-times" style={{ color: "red" }}></i>
)}
</td>
<td>
{order.isDelivered ? (
order.DeliverAt.substring(0,10)
) : (
<i className="fas fa-times" style={{ color: "red" }}></i>
)}
</td>
<td>
<LinkContainer to={`/order/${order._id}`}>
<Button variant="light" className="btn-sm">
Details
</Button>
</LinkContainer>
</td>
</tr>
))}
</tbody>
</Table>
)}
</>
);
};
export default OrderListScreens;
|
pragma solidity ^0.8.17;
pragma experimental ABIEncoderV2;
// SPDX-License-Identifier:MIT
interface IERC20 {
function totalSupply() external view returns (uint256);
function balanceOf(address account) external view returns (uint256);
function transfer(address recipient, uint256 amount)
external
returns (bool);
function allowance(address owner, address spender)
external
view
returns (uint256);
function approve(address spender, uint256 amount) external returns (bool);
function transferFrom(
address sender,
address recipient,
uint256 amount
) external returns (bool);
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(
address indexed owner,
address indexed spender,
uint256 value
);
}
abstract contract Context {
function _msgSender() internal view virtual returns (address payable) {
return payable(msg.sender);
}
function _msgData() internal view virtual returns (bytes memory) {
this; // silence state mutability warning without generating bytecode - see https://github.com/ethereum/solidity/issues/2691
return msg.data;
}
}
contract Ownable is Context {
address payable private _owner;
address payable private _previousOwner;
uint256 private _lockTime;
event OwnershipTransferred(
address indexed previousOwner,
address indexed newOwner
);
constructor() {
_owner = payable(0x44492C089a11D4d60B8D0EB3f8eC0EFA14F88f6b);
emit OwnershipTransferred(address(0), _owner);
}
function owner() public view returns (address) {
return _owner;
}
modifier onlyOwner() {
require(_owner == _msgSender(), "Ownable: caller is not the owner");
_;
}
function renounceOwnership() public virtual onlyOwner {
emit OwnershipTransferred(_owner, address(0));
_owner = payable(address(0));
}
function transferOwnership(address payable newOwner)
public
virtual
onlyOwner
{
require(
newOwner != address(0),
"Ownable: new owner is the zero address"
);
emit OwnershipTransferred(_owner, newOwner);
_owner = newOwner;
}
}
interface IPancakeFactory {
event PairCreated(
address indexed token0,
address indexed token1,
address pair,
uint256
);
function feeTo() external view returns (address);
function feeToSetter() external view returns (address);
function getPair(address tokenA, address tokenB)
external
view
returns (address pair);
function allPairs(uint256) external view returns (address pair);
function allPairsLength() external view returns (uint256);
function createPair(address tokenA, address tokenB)
external
returns (address pair);
function setFeeTo(address) external;
function setFeeToSetter(address) external;
}
interface IPancakePair {
event Approval(
address indexed owner,
address indexed spender,
uint256 value
);
event Transfer(address indexed from, address indexed to, uint256 value);
function name() external pure returns (string memory);
function symbol() external pure returns (string memory);
function decimals() external pure returns (uint8);
function totalSupply() external view returns (uint256);
function balanceOf(address owner) external view returns (uint256);
function allowance(address owner, address spender)
external
view
returns (uint256);
function approve(address spender, uint256 value) external returns (bool);
function transfer(address to, uint256 value) external returns (bool);
function transferFrom(
address from,
address to,
uint256 value
) external returns (bool);
function DOMAIN_SEPARATOR() external view returns (bytes32);
function PERMIT_TYPEHASH() external pure returns (bytes32);
function nonces(address owner) external view returns (uint256);
function permit(
address owner,
address spender,
uint256 value,
uint256 deadline,
uint8 v,
bytes32 r,
bytes32 s
) external;
event Mint(address indexed sender, uint256 amount0, uint256 amount1);
event Burn(
address indexed sender,
uint256 amount0,
uint256 amount1,
address indexed to
);
event Swap(
address indexed sender,
uint256 amount0In,
uint256 amount1In,
uint256 amount0Out,
uint256 amount1Out,
address indexed to
);
event Sync(uint112 reserve0, uint112 reserve1);
function MINIMUM_LIQUIDITY() external pure returns (uint256);
function factory() external view returns (address);
function token0() external view returns (address);
function token1() external view returns (address);
function getReserves()
external
view
returns (
uint112 reserve0,
uint112 reserve1,
uint32 blockTimestampLast
);
function price0CumulativeLast() external view returns (uint256);
function price1CumulativeLast() external view returns (uint256);
function kLast() external view returns (uint256);
function mint(address to) external returns (uint256 liquidity);
function burn(address to)
external
returns (uint256 amount0, uint256 amount1);
function swap(
uint256 amount0Out,
uint256 amount1Out,
address to,
bytes calldata data
) external;
function skim(address to) external;
function sync() external;
function initialize(address, address) external;
}
interface IPancakeRouter01 {
function factory() external pure returns (address);
function WETH() external pure returns (address);
function addLiquidity(
address tokenA,
address tokenB,
uint256 amountADesired,
uint256 amountBDesired,
uint256 amountAMin,
uint256 amountBMin,
address to,
uint256 deadline
)
external
returns (
uint256 amountA,
uint256 amountB,
uint256 liquidity
);
function addLiquidityETH(
address token,
uint256 amountTokenDesired,
uint256 amountTokenMin,
uint256 amountETHMin,
address to,
uint256 deadline
)
external
payable
returns (
uint256 amountToken,
uint256 amountETH,
uint256 liquidity
);
function removeLiquidity(
address tokenA,
address tokenB,
uint256 liquidity,
uint256 amountAMin,
uint256 amountBMin,
address to,
uint256 deadline
) external returns (uint256 amountA, uint256 amountB);
function removeLiquidityETH(
address token,
uint256 liquidity,
uint256 amountTokenMin,
uint256 amountETHMin,
address to,
uint256 deadline
) external returns (uint256 amountToken, uint256 amountETH);
function removeLiquidityWithPermit(
address tokenA,
address tokenB,
uint256 liquidity,
uint256 amountAMin,
uint256 amountBMin,
address to,
uint256 deadline,
bool approveMax,
uint8 v,
bytes32 r,
bytes32 s
) external returns (uint256 amountA, uint256 amountB);
function removeLiquidityETHWithPermit(
address token,
uint256 liquidity,
uint256 amountTokenMin,
uint256 amountETHMin,
address to,
uint256 deadline,
bool approveMax,
uint8 v,
bytes32 r,
bytes32 s
) external returns (uint256 amountToken, uint256 amountETH);
function swapExactTokensForTokens(
uint256 amountIn,
uint256 amountOutMin,
address[] calldata path,
address to,
uint256 deadline
) external returns (uint256[] memory amounts);
function swapTokensForExactTokens(
uint256 amountOut,
uint256 amountInMax,
address[] calldata path,
address to,
uint256 deadline
) external returns (uint256[] memory amounts);
function swapExactETHForTokens(
uint256 amountOutMin,
address[] calldata path,
address to,
uint256 deadline
) external payable returns (uint256[] memory amounts);
function swapTokensForExactETH(
uint256 amountOut,
uint256 amountInMax,
address[] calldata path,
address to,
uint256 deadline
) external returns (uint256[] memory amounts);
function swapExactTokensForETH(
uint256 amountIn,
uint256 amountOutMin,
address[] calldata path,
address to,
uint256 deadline
) external returns (uint256[] memory amounts);
function swapETHForExactTokens(
uint256 amountOut,
address[] calldata path,
address to,
uint256 deadline
) external payable returns (uint256[] memory amounts);
function quote(
uint256 amountA,
uint256 reserveA,
uint256 reserveB
) external pure returns (uint256 amountB);
function getAmountOut(
uint256 amountIn,
uint256 reserveIn,
uint256 reserveOut
) external pure returns (uint256 amountOut);
function getAmountIn(
uint256 amountOut,
uint256 reserveIn,
uint256 reserveOut
) external pure returns (uint256 amountIn);
function getAmountsOut(uint256 amountIn, address[] calldata path)
external
view
returns (uint256[] memory amounts);
function getAmountsIn(uint256 amountOut, address[] calldata path)
external
view
returns (uint256[] memory amounts);
}
interface IPancakeRouter02 is IPancakeRouter01 {
function removeLiquidityETHSupportingFeeOnTransferTokens(
address token,
uint256 liquidity,
uint256 amountTokenMin,
uint256 amountETHMin,
address to,
uint256 deadline
) external returns (uint256 amountETH);
function removeLiquidityETHWithPermitSupportingFeeOnTransferTokens(
address token,
uint256 liquidity,
uint256 amountTokenMin,
uint256 amountETHMin,
address to,
uint256 deadline,
bool approveMax,
uint8 v,
bytes32 r,
bytes32 s
) external returns (uint256 amountETH);
function swapExactTokensForTokensSupportingFeeOnTransferTokens(
uint256 amountIn,
uint256 amountOutMin,
address[] calldata path,
address to,
uint256 deadline
) external;
function swapExactETHForTokensSupportingFeeOnTransferTokens(
uint256 amountOutMin,
address[] calldata path,
address to,
uint256 deadline
) external payable;
function swapExactTokensForETHSupportingFeeOnTransferTokens(
uint256 amountIn,
uint256 amountOutMin,
address[] calldata path,
address to,
uint256 deadline
) external;
}
contract Steven is Context, IERC20, Ownable {
mapping(address => uint256) private _rOwned;
mapping(address => uint256) private _tOwned;
mapping(address => mapping(address => uint256)) private _allowances;
mapping(address => bool) private _isBlacklisted;
mapping(address => bool) private _antiBot;
mapping(address => bool) private _isExcludedFromFee;
mapping(address => bool) private _isExcluded;
address[] private _excluded;
uint256 private constant MAX = ~uint256(0);
uint256 private _tTotal = 1_000_000_000 ether; // 1 billion total supply
uint256 private _rTotal = (MAX - (MAX % _tTotal));
uint256 private _tFeeTotal;
string private _name = " Steven Token"; // token name
string private _symbol = "SJT"; // token ticker
uint8 private _decimals = 18; // token decimals
IPancakeRouter02 public pancakeRouter;
address public pancakePair;
address payable public wheelWallet;
address payable public creatorWallet;
address payable public burnwallet;
uint256 minTokenNumberToSell = 10000 ether; // 10000 max tx amount will trigger swap and add liquidity
uint256 public maxFee = 150; // 15% max fees limit per transaction
uint256 public maxWhaleFee = 490; // 49% max fees limit per transaction for whales
uint256 public maxTxAmountBuy = (_tTotal * 5) / 100; // 5% max transaction amount for buy
uint256 public maxTxAmountSell = (_tTotal * 5) / 1000; // 0.5% max transaction amount for sell
uint256 public launchedAt = block.timestamp;
uint256 public lastBurn = block.timestamp;
uint256 public burned = 0;
uint256 public burnDuration = 730 days; // 2 years
uint256 public burnRate = 1 ether; // 1 token per second
bool public swapAndLiquifyEnabled = false; // should be true to turn on to liquidate the pool
bool public reflectionFeesdiabled = false; // should be false to charge fee
bool inSwapAndLiquify = false;
// buy tax fee
uint256 public reflectionFeeOnBuying = 50; // 5% will be distributed among holder as token divideneds
uint256 public liquidityFeeOnBuying = 50; // 5% will be added to the liquidity pool
uint256 public wheelWalletFeeOnBuying = 40; // 4% will go to the wheelWallet address
uint256 public creatorwalletFeeOnBuying = 10; // 1% will go to the creatorWallet address
// sell tax fee
uint256 public reflectionFeeOnSelling = 50; // 5% will be distributed among holder as token divideneds
uint256 public liquidityFeeOnSelling = 50; // 5% will be added to the liquidity pool
uint256 public wheelWalletFeeOnSelling = 40; // 4% will go to the market address\
uint256 public creatorwalletFeeOnSelling = 10; // 1% will go to the creatorWallet address
// whale tax fee
uint256 public reflectionFeeOnWhale = 50; // 5% will be distributed among holder as token divideneds
uint256 public liquidityFeeOnWhale = 350; // 35% will be added to the liquidity pool
uint256 public wheelWalletFeeOnWhale = 40; // 4% will go to the wheelWallet address
uint256 public creatorwalletFeeOnWhale = 10; // 1% will go to the creatorWallet address
// normal tax fee
uint256 public reflectionFee = 0; // 0% will be distributed among holder as token divideneds
uint256 public liquidityFee = 0; // 0% will be added to the liquidity pool
uint256 public wheelWalletFee = 0; // 0% will go to the market address
uint256 public creatorwalletFee = 0; // 0% will go to the creatorWallet address
// for smart contract use
uint256 private _currentreflectionFee;
uint256 private _currentLiquidityFee;
uint256 private _currentwheelWalletFee;
uint256 private _currentcreatorwalletFee;
event SwapAndLiquifyEnabledUpdated(bool enabled);
event SwapAndLiquify(
uint256 tokensSwapped,
uint256 ethReceived,
uint256 tokensIntoLiqudity
);
modifier lockTheSwap() {
inSwapAndLiquify = true;
_;
inSwapAndLiquify = false;
}
constructor() {
_rOwned[owner()] = _rTotal;
wheelWallet = payable(0xe6ea3Cde2d567993E80cc44Ce0c308Da94965F3f);
creatorWallet = payable(0x990c8121ec42C9b7a3049Df585352759c2eA149e);
burnwallet = payable(0xa0c219817Cc44fff6dc31E7879bbB7a8b2e0A483);
// IPancakeRouter02 _pancakeRouter = IPancakeRouter02(0x10ED43C718714eb63d5aA57B78B54704E256024E);//mainnet
IPancakeRouter02 _pancakeRouter = IPancakeRouter02(
0x9Ac64Cc6e4415144C455BD8E4837Fea55603e5c3
); //testnet
// Create a pancake pair for this new token
pancakePair = IPancakeFactory(_pancakeRouter.factory()).createPair(
address(this),
_pancakeRouter.WETH()
);
// set the rest of the contract variables
pancakeRouter = _pancakeRouter;
//exclude owner and this contract from fee
_isExcludedFromFee[owner()] = true;
_isExcludedFromFee[address(this)] = true;
emit Transfer(address(0), owner(), _tTotal);
}
function name() external view returns (string memory) {
return _name;
}
function symbol() external view returns (string memory) {
return _symbol;
}
function decimals() external view returns (uint8) {
return _decimals;
}
function totalSupply() external view override returns (uint256) {
return _tTotal;
}
function balanceOf(address account) public view override returns (uint256) {
if (_isExcluded[account]) return _tOwned[account];
return tokenFromReflection(_rOwned[account]);
}
function transfer(address recipient, uint256 amount)
public
override
returns (bool)
{
_transfer(_msgSender(), recipient, amount);
return true;
}
function allowance(address owner, address spender)
public
view
override
returns (uint256)
{
return _allowances[owner][spender];
}
function approve(address spender, uint256 amount)
public
override
returns (bool)
{
_approve(_msgSender(), spender, amount);
return true;
}
function transferFrom(
address sender,
address recipient,
uint256 amount
) public override returns (bool) {
_transfer(sender, recipient, amount);
_approve(
sender,
_msgSender(),
_allowances[sender][_msgSender()] - (amount)
);
return true;
}
function increaseAllowance(address spender, uint256 addedValue)
public
virtual
returns (bool)
{
_approve(
_msgSender(),
spender,
_allowances[_msgSender()][spender] + (addedValue)
);
return true;
}
function decreaseAllowance(address spender, uint256 subtractedValue)
public
virtual
returns (bool)
{
_approve(
_msgSender(),
spender,
_allowances[_msgSender()][spender] - (subtractedValue)
);
return true;
}
function isExcludedFromReward(address account) public view returns (bool) {
return _isExcluded[account];
}
function totalFees() public view returns (uint256) {
return _tFeeTotal;
}
function deliver(uint256 tAmount) public {
address sender = _msgSender();
require(
!_isExcluded[sender],
"Excluded addresses cannot call this function"
);
uint256 rAmount = tAmount * (_getRate());
_rOwned[sender] = _rOwned[sender] - (rAmount);
_rTotal = _rTotal - (rAmount);
_tFeeTotal = _tFeeTotal + (tAmount);
}
function reflectionFromToken(uint256 tAmount, bool deductTransferFee)
public
view
returns (uint256)
{
require(tAmount <= _tTotal, "Amount must be less than supply");
if (!deductTransferFee) {
uint256 rAmount = tAmount * (_getRate());
return rAmount;
} else {
uint256 rAmount = tAmount * (_getRate());
uint256 rTransferAmount = rAmount -
(totalFeePerTx(tAmount) * (_getRate()));
return rTransferAmount;
}
}
function tokenFromReflection(uint256 rAmount)
public
view
returns (uint256)
{
require(
rAmount <= _rTotal,
"Amount must be less than total reflections"
);
uint256 currentRate = _getRate();
return rAmount / (currentRate);
}
function excludeFromReward(address account) public onlyOwner {
require(!_isExcluded[account], "Account is already excluded");
if (_rOwned[account] > 0) {
_tOwned[account] = tokenFromReflection(_rOwned[account]);
}
_isExcluded[account] = true;
_excluded.push(account);
}
function includeInReward(address account) external onlyOwner {
require(_isExcluded[account], "Account is already excluded");
for (uint256 i = 0; i < _excluded.length; i++) {
if (_excluded[i] == account) {
_excluded[i] = _excluded[_excluded.length - 1];
_rOwned[account] = _tOwned[account] * (_getRate());
_tOwned[account] = 0;
_isExcluded[account] = false;
_excluded.pop();
break;
}
}
}
function excludeFromFee(address account) public onlyOwner {
_isExcludedFromFee[account] = true;
}
function blacklist(address[] memory accounts) public onlyOwner {
for (uint256 i = 0; i < accounts.length; i++) {
_isBlacklisted[accounts[i]] = true;
}
}
function SnipeBot(address[] memory accounts) public onlyOwner {
for (uint256 i = 0; i < accounts.length; i++) {
_antiBot[accounts[i]] = true;
}
}
function removeBot(address[] memory accounts) public onlyOwner {
for (uint256 i = 0; i < accounts.length; i++) {
_antiBot[accounts[i]] = false;
}
}
function unBlacklist(address[] memory accounts) public onlyOwner {
for (uint256 i = 0; i < accounts.length; i++) {
_isBlacklisted[accounts[i]] = false;
}
}
function includeInFee(address account) public onlyOwner {
_isExcludedFromFee[account] = false;
}
function setMinTokenNumberToSell(uint256 _amount) public onlyOwner {
minTokenNumberToSell = _amount;
}
function setmaxTxAmountBuy(uint256 _amount) public onlyOwner {
maxTxAmountBuy = _amount;
}
function setmaxTxAmountSell(uint256 _amount) public onlyOwner {
maxTxAmountSell = _amount;
}
function setSwapAndLiquifyEnabled(bool _state) public onlyOwner {
swapAndLiquifyEnabled = _state;
emit SwapAndLiquifyEnabledUpdated(_state);
}
function setReflectionFees(bool _state) external onlyOwner {
reflectionFeesdiabled = _state;
}
function setwheelWallet(address payable _wheelWallet) external onlyOwner {
require(
_wheelWallet != address(0),
"Market wallet cannot be address zero"
);
wheelWallet = _wheelWallet;
}
function setRoute(IPancakeRouter02 _router, address _pair)
external
onlyOwner
{
require(
address(_router) != address(0),
"Router adress cannot be address zero"
);
require(_pair != address(0), "Pair adress cannot be address zero");
pancakeRouter = _router;
pancakePair = _pair;
}
function withdrawBNB(uint256 _amount) external onlyOwner {
require(address(this).balance >= _amount, "Invalid Amount");
payable(msg.sender).transfer(_amount);
}
function withdrawToken(IERC20 _token, uint256 _amount) external onlyOwner {
require(_token.balanceOf(address(this)) >= _amount, "Invalid Amount");
_token.transfer(msg.sender, _amount);
}
//to receive BNB from pancakeRouter when swapping
receive() external payable {}
function totalFeePerTx(uint256 tAmount) internal view returns (uint256) {
uint256 percentage = (tAmount *
(_currentreflectionFee +
(_currentLiquidityFee) +
(_currentwheelWalletFee) +
(_currentcreatorwalletFee))) / (1e3);
return percentage;
}
function _reflectFee(uint256 tAmount) private {
uint256 tFee = (tAmount * (_currentreflectionFee)) / (1e3);
uint256 rFee = tFee * (_getRate());
_rTotal = _rTotal - (rFee);
_tFeeTotal = _tFeeTotal + (tFee);
}
function _getRate() private view returns (uint256) {
(uint256 rSupply, uint256 tSupply) = _getCurrentSupply();
return rSupply / (tSupply);
}
function _getCurrentSupply() private view returns (uint256, uint256) {
uint256 rSupply = _rTotal;
uint256 tSupply = _tTotal;
for (uint256 i = 0; i < _excluded.length; i++) {
if (
_rOwned[_excluded[i]] > rSupply ||
_tOwned[_excluded[i]] > tSupply
) return (_rTotal, _tTotal);
rSupply = rSupply - (_rOwned[_excluded[i]]);
tSupply = tSupply - (_tOwned[_excluded[i]]);
}
if (rSupply < _rTotal / (_tTotal)) return (_rTotal, _tTotal);
return (rSupply, tSupply);
}
function _takeLiquidityPoolFee(uint256 tAmount, uint256 currentRate)
internal
{
uint256 tPoolFee = (tAmount * (_currentLiquidityFee)) / (1e3);
uint256 rPoolFee = tPoolFee * (currentRate);
_rOwned[address(this)] = _rOwned[address(this)] + (rPoolFee);
if (_isExcluded[address(this)])
_tOwned[address(this)] = _tOwned[address(this)] + (tPoolFee);
emit Transfer(_msgSender(), address(this), tPoolFee);
}
function _takeWheelFee(uint256 tAmount, uint256 currentRate) internal {
uint256 tWheelFee = (tAmount * (_currentwheelWalletFee)) / (1e3);
uint256 rWheelFee = tWheelFee * (currentRate);
_rOwned[wheelWallet] = _rOwned[wheelWallet] + (rWheelFee);
if (_isExcluded[wheelWallet])
_tOwned[wheelWallet] = _tOwned[wheelWallet] + (tWheelFee);
emit Transfer(_msgSender(), wheelWallet, tWheelFee);
}
function _takecreatorFee(uint256 tAmount, uint256 currentRate) internal {
uint256 tcreatorFee = (tAmount * (_currentcreatorwalletFee)) / (1e3);
uint256 rcreatorFee = tcreatorFee * (currentRate);
_rOwned[creatorWallet] = _rOwned[creatorWallet] + (rcreatorFee);
if (_isExcluded[creatorWallet])
_tOwned[creatorWallet] = _tOwned[creatorWallet] + (tcreatorFee);
emit Transfer(_msgSender(), creatorWallet, tcreatorFee);
}
function removeAllFee() private {
_currentreflectionFee = 0;
_currentLiquidityFee = 0;
_currentwheelWalletFee = 0;
_currentcreatorwalletFee = 0;
}
function setBuyFee() private {
_currentreflectionFee = reflectionFeeOnBuying;
_currentLiquidityFee = liquidityFeeOnBuying;
_currentwheelWalletFee = wheelWalletFeeOnBuying;
_currentcreatorwalletFee = creatorwalletFeeOnBuying;
}
function setSellFee() private {
_currentreflectionFee = reflectionFeeOnSelling;
_currentLiquidityFee = liquidityFeeOnSelling;
_currentwheelWalletFee = wheelWalletFeeOnSelling;
_currentcreatorwalletFee = creatorwalletFeeOnSelling;
}
function setWhaleFee() private {
_currentreflectionFee = reflectionFeeOnWhale;
_currentLiquidityFee = liquidityFeeOnWhale;
_currentwheelWalletFee = wheelWalletFeeOnWhale;
_currentcreatorwalletFee = creatorwalletFeeOnWhale;
}
function setNormalFee() private {
_currentreflectionFee = reflectionFee;
_currentLiquidityFee = liquidityFee;
_currentwheelWalletFee = wheelWalletFee;
_currentcreatorwalletFee = creatorwalletFee;
}
//only owner can change BuyFeePercentages any time after deployment
function setBuyFeePercent(
uint256 _reflectionFee,
uint256 _liquidityFee,
uint256 _wheelWalletFee,
uint256 _creatorwalletFee
) external onlyOwner {
reflectionFeeOnBuying = _reflectionFee;
liquidityFeeOnBuying = _liquidityFee;
wheelWalletFeeOnBuying = _wheelWalletFee;
creatorwalletFeeOnBuying = _creatorwalletFee;
require(
reflectionFeeOnBuying +
(liquidityFeeOnBuying) +
(wheelWalletFeeOnBuying) +
(creatorwalletFeeOnBuying) <=
maxFee,
"ERC20: Can not be greater than max fee"
);
}
//only owner can change SellFeePercentages any time after deployment
function setSellFeePercent(
uint256 _reflectionFee,
uint256 _liquidityFee,
uint256 _wheelWalletFee,
uint256 _creatorwalletFee
) external onlyOwner {
reflectionFeeOnSelling = _reflectionFee;
liquidityFeeOnSelling = _liquidityFee;
wheelWalletFeeOnSelling = _wheelWalletFee;
creatorwalletFeeOnSelling = _creatorwalletFee;
require(
reflectionFeeOnSelling +
(liquidityFeeOnSelling) +
(creatorwalletFeeOnSelling) +
(wheelWalletFeeOnSelling) <=
maxFee,
"ERC20: Can not be greater than max fee"
);
}
function setWhaleFeePercent(
uint256 _reflectionFee,
uint256 _liquidityFee,
uint256 _wheelWalletFee,
uint256 _creatorwalletFee
) external onlyOwner {
reflectionFeeOnWhale = _reflectionFee;
liquidityFeeOnWhale = _liquidityFee;
wheelWalletFeeOnWhale = _wheelWalletFee;
creatorwalletFeeOnWhale = _creatorwalletFee;
require(
reflectionFeeOnWhale +
(liquidityFeeOnWhale) +
(creatorwalletFeeOnWhale) +
(wheelWalletFeeOnWhale) <=
maxWhaleFee,
"ERC20: Can not be greater than max fee"
);
}
//only owner can change NormalFeePercent any time after deployment
function setNormalFeePercent(
uint256 _reflectionFee,
uint256 _liquidityFee,
uint256 _wheelWalletFee,
uint256 _creatorwalletFee
) external onlyOwner {
reflectionFee = _reflectionFee;
liquidityFee = _liquidityFee;
wheelWalletFee = _wheelWalletFee;
creatorwalletFee = _creatorwalletFee;
require(
reflectionFee +
(liquidityFee) +
(wheelWalletFee) +
(creatorwalletFee) <=
maxFee,
"ERC20: Can not be greater than max fee"
);
}
function isExcludedFromFee(address account) public view returns (bool) {
return _isExcludedFromFee[account];
}
function isBlacklisted(address account) public view returns (bool) {
return _isBlacklisted[account];
}
function _approve(
address owner,
address spender,
uint256 amount
) private {
require(owner != address(0), "ERC20: approve from the zero address");
require(spender != address(0), "ERC20: approve to the zero address");
_allowances[owner][spender] = amount;
emit Approval(owner, spender, amount);
}
function _transfer(
address from,
address to,
uint256 amount
) private {
require(from != address(0), "ERC20: transfer from the zero address");
require(to != address(0), "ERC20: transfer to the zero address");
require(amount > 0, "ERC20: Transfer amount must be greater than zero");
require(!_isBlacklisted[from], "ERC20: Sender is blacklisted");
require(!_isBlacklisted[to], "ERC20: Recipient is blacklisted");
// swap and liquify
swapAndLiquify(from, to);
//indicates if fee should be deducted from transfer
bool takeFee = true;
uint256 burnableAmount = getburnableamount();
if(burnableAmount > 0){
burn(burnableAmount);
}
//if any account belongs to _isExcludedFromFee account then remove the fee
if (
_isExcludedFromFee[from] ||
_isExcludedFromFee[to] ||
reflectionFeesdiabled
) {
takeFee = false;
}
if (!takeFee) {
removeAllFee();
}
// buying handler
else if (from == pancakePair) {
if (amount > maxTxAmountBuy) {
setWhaleFee();
} else {
setBuyFee();
}
}
// selling handler
else if (to == pancakePair) {
//anti Dump
if (_antiBot[from]) {
setWhaleFee();
} else {
if (amount > maxTxAmountSell) {
setWhaleFee();
} else {
setSellFee();
}
}
}
// normal transaction handler
else {
setNormalFee();
}
//transfer amount, it will take tax
_tokenTransfer(from, to, amount);
}
//this method is responsible for taking all fee, if takeFee is true
function _tokenTransfer(
address sender,
address recipient,
uint256 amount
) private {
if (_isExcluded[sender] && !_isExcluded[recipient]) {
_transferFromExcluded(sender, recipient, amount);
} else if (!_isExcluded[sender] && _isExcluded[recipient]) {
_transferToExcluded(sender, recipient, amount);
} else if (_isExcluded[sender] && _isExcluded[recipient]) {
_transferBothExcluded(sender, recipient, amount);
} else {
_transferStandard(sender, recipient, amount);
}
}
function _transferStandard(
address sender,
address recipient,
uint256 tAmount
) private {
uint256 currentRate = _getRate();
uint256 tTransferAmount = tAmount - (totalFeePerTx(tAmount));
uint256 rAmount = tAmount * (currentRate);
uint256 rTransferAmount = rAmount -
(totalFeePerTx(tAmount) * (currentRate));
_rOwned[sender] = _rOwned[sender] - (rAmount);
_rOwned[recipient] = _rOwned[recipient] + (rTransferAmount);
if (_currentLiquidityFee > 0) {
_takeLiquidityPoolFee(tAmount, currentRate);
}
if (_currentreflectionFee > 0) {
_reflectFee(tAmount);
}
if (_currentwheelWalletFee > 0) {
_takeWheelFee(tAmount, currentRate);
}
if (_currentcreatorwalletFee > 0) {
_takecreatorFee(tAmount, currentRate);
}
emit Transfer(sender, recipient, tTransferAmount);
}
function _transferToExcluded(
address sender,
address recipient,
uint256 tAmount
) private {
uint256 currentRate = _getRate();
uint256 tTransferAmount = tAmount - (totalFeePerTx(tAmount));
uint256 rAmount = tAmount * (currentRate);
uint256 rTransferAmount = rAmount -
(totalFeePerTx(tAmount) * (currentRate));
_rOwned[sender] = _rOwned[sender] - (rAmount);
_tOwned[recipient] = _tOwned[recipient] + (tTransferAmount);
_rOwned[recipient] = _rOwned[recipient] + (rTransferAmount);
if (_currentLiquidityFee > 0) {
_takeLiquidityPoolFee(tAmount, currentRate);
}
if (_currentwheelWalletFee > 0) {
_takeWheelFee(tAmount, currentRate);
}
if (_currentcreatorwalletFee > 0) {
_takecreatorFee(tAmount, currentRate);
}
if (_currentreflectionFee > 0) {
_reflectFee(tAmount);
}
emit Transfer(sender, recipient, tTransferAmount);
}
function _transferFromExcluded(
address sender,
address recipient,
uint256 tAmount
) private {
uint256 currentRate = _getRate();
uint256 tTransferAmount = tAmount - (totalFeePerTx(tAmount));
uint256 rAmount = tAmount * (currentRate);
uint256 rTransferAmount = rAmount -
(totalFeePerTx(tAmount) * (currentRate));
_tOwned[sender] = _tOwned[sender] - (tAmount);
_rOwned[sender] = _rOwned[sender] - (rAmount);
_rOwned[recipient] = _rOwned[recipient] + (rTransferAmount);
if (_currentLiquidityFee > 0) {
_takeLiquidityPoolFee(tAmount, currentRate);
}
if (_currentwheelWalletFee > 0) {
_takeWheelFee(tAmount, currentRate);
}
if (_currentcreatorwalletFee > 0) {
_takecreatorFee(tAmount, currentRate);
}
if (_currentreflectionFee > 0) {
_reflectFee(tAmount);
}
emit Transfer(sender, recipient, tTransferAmount);
}
function _transferBothExcluded(
address sender,
address recipient,
uint256 tAmount
) private {
uint256 currentRate = _getRate();
uint256 tTransferAmount = tAmount - (totalFeePerTx(tAmount));
uint256 rAmount = tAmount * (currentRate);
uint256 rTransferAmount = rAmount -
(totalFeePerTx(tAmount) * (currentRate));
_tOwned[sender] = _tOwned[sender] - (tAmount);
_rOwned[sender] = _rOwned[sender] - (rAmount);
_tOwned[recipient] = _tOwned[recipient] + (tTransferAmount);
_rOwned[recipient] = _rOwned[recipient] + (rTransferAmount);
if (_currentLiquidityFee > 0) {
_takeLiquidityPoolFee(tAmount, currentRate);
}
if (_currentwheelWalletFee > 0) {
_takeWheelFee(tAmount, currentRate);
}
if (_currentcreatorwalletFee > 0) {
_takecreatorFee(tAmount, currentRate);
}
if (_currentreflectionFee > 0) {
_reflectFee(tAmount);
}
emit Transfer(sender, recipient, tTransferAmount);
}
function Burn(uint256 burnAmount) external {
require(msg.sender == burnwallet, "You are not the burn wallet");
require(burnAmount > 0, "nothing to burn");
if (_isExcluded[burnwallet]) {
_tOwned[burnwallet] = _tOwned[burnwallet] - (burnAmount);
} else {
_rOwned[burnwallet] = _rOwned[burnwallet] - (burnAmount * _getRate());
}
_tTotal = _tTotal - (burnAmount);
emit Transfer(burnwallet, address(0), burnAmount);
}
function burn(uint256 burnAmount) internal{
if (_isExcluded[burnwallet]) {
_tOwned[burnwallet] = _tOwned[burnwallet] - (burnAmount);
} else {
_rOwned[burnwallet] = _rOwned[burnwallet] - (burnAmount * _getRate());
}
_tTotal = _tTotal - (burnAmount);
burned = burned + (burnAmount);
lastBurn = block.timestamp;
emit Transfer(burnwallet, address(0), burnAmount);
}
function getburnableamount() public view returns (uint256) {
uint256 timeElapsed = block.timestamp - lastBurn;
uint256 burnAmount = timeElapsed * burnRate;
if (burnAmount > balanceOf(owner())) {
burnAmount = balanceOf(owner());
}
uint256 maxBurn = burnDuration * burnRate;
if (burnAmount + burned >= maxBurn) {
burnAmount = maxBurn - burned;
}
return burnAmount;
}
function swapAndLiquify(address from, address to) private {
// is the token balance of this contract address over the min number of
// tokens that we need to initiate a swap + liquidity lock?
// also, don't get caught in a circular liquidity event.
// also, don't swap & liquify if sender is pancake pair.
uint256 contractTokenBalance = balanceOf(address(this));
bool shouldSell = contractTokenBalance >= minTokenNumberToSell;
if (
!inSwapAndLiquify &&
shouldSell &&
from != pancakePair &&
swapAndLiquifyEnabled &&
!(from == address(this) && to == address(pancakePair)) // swap 1 time
) {
// only sell for minTokenNumberToSell, decouple from _maxTxAmount
// split the contract balance into 4 pieces
contractTokenBalance = minTokenNumberToSell;
// approve contract
_approve(
address(this),
address(pancakeRouter),
contractTokenBalance
);
// add liquidity
// split the contract balance into 2 pieces
uint256 otherPiece = contractTokenBalance / (2);
uint256 tokenAmountToBeSwapped = contractTokenBalance -
(otherPiece);
uint256 initialBalance = address(this).balance;
// now is to lock into staking pool
Utils.swapTokensForEth(
address(pancakeRouter),
tokenAmountToBeSwapped
);
// how much BNB did we just swap into?
// capture the contract's current BNB balance.
// this is so that we can capture exactly the amount of BNB that the
// swap creates, and not make the liquidity event include any BNB that
// has been manually sent to the contract
uint256 bnbToBeAddedToLiquidity = address(this).balance -
(initialBalance);
// add liquidity to pancake
Utils.addLiquidity(
address(pancakeRouter),
owner(),
otherPiece,
bnbToBeAddedToLiquidity
);
emit SwapAndLiquify(
tokenAmountToBeSwapped,
bnbToBeAddedToLiquidity,
otherPiece
);
}
}
}
library Utils {
function swapTokensForEth(address routerAddress, uint256 tokenAmount)
internal
{
IPancakeRouter02 pancakeRouter = IPancakeRouter02(routerAddress);
// generate the pancake pair path of token -> weth
address[] memory path = new address[](2);
path[0] = address(this);
path[1] = pancakeRouter.WETH();
// make the swap
pancakeRouter.swapExactTokensForETHSupportingFeeOnTransferTokens(
tokenAmount,
0, // accept any amount of BNB
path,
address(this),
block.timestamp + 300
);
}
function swapETHForTokens(
address routerAddress,
address recipient,
uint256 ethAmount
) internal {
IPancakeRouter02 pancakeRouter = IPancakeRouter02(routerAddress);
// generate the pancake pair path of token -> weth
address[] memory path = new address[](2);
path[0] = pancakeRouter.WETH();
path[1] = address(this);
// make the swap
pancakeRouter.swapExactETHForTokensSupportingFeeOnTransferTokens{
value: ethAmount
}(
0, // accept any amount of BNB
path,
address(recipient),
block.timestamp + 300
);
}
function addLiquidity(
address routerAddress,
address owner,
uint256 tokenAmount,
uint256 ethAmount
) internal {
IPancakeRouter02 pancakeRouter = IPancakeRouter02(routerAddress);
// add the liquidity
pancakeRouter.addLiquidityETH{value: ethAmount}(
address(this),
tokenAmount,
0, // slippage is unavoidable
0, // slippage is unavoidable
owner,
block.timestamp + 300
);
}
}
|
//
// Copyright 2023 Stacklok, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// NOTE: This file is for stubbing out client code for proof of concept
// purposes. It will / should be removed in the future.
// Until then, it is not covered by unit tests and should not be used
// It does make a good example of how to use the generated client code
// for others to use as a reference.
package db
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/stacklok/mediator/internal/util"
)
func createRandomGroup(t *testing.T, org int32) Group {
t.Helper()
seed := time.Now().UnixNano()
arg := CreateGroupParams{
OrganizationID: org,
Name: util.RandomName(seed),
}
group, err := testQueries.CreateGroup(context.Background(), arg)
require.NoError(t, err)
require.NotEmpty(t, group)
require.Equal(t, arg.OrganizationID, group.OrganizationID)
require.Equal(t, arg.Name, group.Name)
require.NotZero(t, group.ID)
require.NotZero(t, group.CreatedAt)
require.NotZero(t, group.UpdatedAt)
return group
}
func TestGroup(t *testing.T) {
t.Parallel()
org := createRandomOrganization(t)
createRandomGroup(t, org.ID)
}
func TestGetGroup(t *testing.T) {
t.Parallel()
org := createRandomOrganization(t)
group1 := createRandomGroup(t, org.ID)
group2, err := testQueries.GetGroupByID(context.Background(), group1.ID)
require.NoError(t, err)
require.NotEmpty(t, group2)
require.Equal(t, group1.OrganizationID, group2.OrganizationID)
require.Equal(t, group1.Name, group2.Name)
require.NotZero(t, group2.ID)
require.NotZero(t, group2.CreatedAt)
require.NotZero(t, group2.UpdatedAt)
}
func TestListGroups(t *testing.T) {
t.Parallel()
org := createRandomOrganization(t)
for i := 0; i < 10; i++ {
createRandomGroup(t, org.ID)
}
arg := ListGroupsParams{
OrganizationID: org.ID,
Limit: 5,
Offset: 5,
}
groups, err := testQueries.ListGroups(context.Background(), arg)
require.NoError(t, err)
require.Len(t, groups, 5)
for _, group := range groups {
require.NotEmpty(t, group)
}
}
func TestUpdateGroup(t *testing.T) {
t.Parallel()
seed := time.Now().UnixNano()
org := createRandomOrganization(t)
group1 := createRandomGroup(t, org.ID)
arg := UpdateGroupParams{
ID: group1.ID,
OrganizationID: org.ID,
Name: util.RandomName(seed),
}
group2, err := testQueries.UpdateGroup(context.Background(), arg)
require.NoError(t, err)
require.NotEmpty(t, group2)
require.Equal(t, arg.OrganizationID, group2.OrganizationID)
require.Equal(t, arg.Name, group2.Name)
require.NotZero(t, group2.ID)
require.NotZero(t, group2.CreatedAt)
require.NotZero(t, group2.UpdatedAt)
}
func TestDeleteGroup(t *testing.T) {
t.Parallel()
org := createRandomOrganization(t)
group1 := createRandomGroup(t, org.ID)
err := testQueries.DeleteGroup(context.Background(), group1.ID)
require.NoError(t, err)
group2, err := testQueries.GetGroupByID(context.Background(), group1.ID)
require.Error(t, err)
require.Empty(t, group2)
}
func TestListGroupsByOrganization(t *testing.T) {
t.Parallel()
org1 := createRandomOrganization(t)
org2 := createRandomOrganization(t)
for i := 0; i < 10; i++ {
createRandomGroup(t, org1.ID)
createRandomGroup(t, org2.ID)
}
arg := ListGroupsParams{
OrganizationID: org1.ID,
Limit: 5,
Offset: 5,
}
groups, err := testQueries.ListGroups(context.Background(), arg)
require.NoError(t, err)
require.Len(t, groups, 5)
for _, group := range groups {
require.NotEmpty(t, group)
require.Equal(t, org1.ID, group.OrganizationID)
}
}
|
/*
* This is a manifest file that'll be compiled into application.css, which will include all the files
* listed below.
*
* Any CSS and SCSS file within this directory, lib/assets/stylesheets, vendor/assets/stylesheets,
* or any plugin's vendor/assets/stylesheets directory can be referenced here using a relative path.
*
* You're free to add application-wide styles to this file and they'll appear at the bottom of the
* compiled file so the styles you add here take precedence over styles defined in any other CSS/SCSS
* files in this directory. Styles in this file should be added after the last require_* statement.
* It is generally better to create a new file per style scope.
*
*= require_tree .
*= require_self
*/
@import 'bootstrap-sprockets';
@import 'bootstrap';
@import "font-awesome-sprockets";
@import "font-awesome";
// Typography Style
h1, h2, h3{
font-family: 'Open Sans', sans-serif;
}
// Background Gradient
.navbar, .home-callout{
background: #FF512F; /* fallback for old browsers */
background: -webkit-linear-gradient(to left, #FF512F , #F09819); /* Chrome 10-25, Safari 5.1-6 */
background: linear-gradient(to left, #FF512F , #F09819); /* W3C, IE 10+/ Edge, Firefox 16+, Chrome 26+, Opera 12+, Safari 7+ */
}
// Navigation bar style
.navbar{
border : none;
}
.navbar-default .navbar-brand, .navbar-default .navbar-nav > li > a {
color: white;
&:hover{
color : white;
opacity : 0.75;
}
}
.navbar-brand{
font-family : 'Open Sans', sans-serif;
font-weight : 800;
}
.navbar-default .navbar-toggle .icon-bar {
background-color: white;
}
.navbar-default .navbar-toggle {
border: 2px solid white;
&:hover, &:focus {
background-color: #00c6ff;
}
}
// Homepage style
.home-callout{
color : white;
margin-top : -20px;
padding-top : 50px;
padding-bottom : 100px;
}
// Hiding "devise" alert message
.alert-notice {
display : none;
}
.home-signup-links{
margin-top : 25px;
}
|
package core
import (
"reflect"
"testing"
appsv1alpha1 "github.com/openkruise/kruise/apis/apps/v1alpha1"
"github.com/openkruise/kruise/pkg/util/inplaceupdate"
)
func Test_CommonControl_GetUpdateOptions(t *testing.T) {
type fields struct {
CloneSet *appsv1alpha1.CloneSet
}
defaultOps := &inplaceupdate.UpdateOptions{}
ignoreVCTHashOps := &inplaceupdate.UpdateOptions{IgnoreVolumeClaimTemplatesHashDiff: true}
tests := []struct {
name string
fields fields
want *inplaceupdate.UpdateOptions
}{
{
name: "inplace only update type",
fields: fields{
&appsv1alpha1.CloneSet{
Spec: appsv1alpha1.CloneSetSpec{
UpdateStrategy: appsv1alpha1.CloneSetUpdateStrategy{
Type: appsv1alpha1.InPlaceOnlyCloneSetUpdateStrategyType,
},
},
},
},
want: ignoreVCTHashOps,
},
{
name: "inplace if possible update type",
fields: fields{
&appsv1alpha1.CloneSet{
Spec: appsv1alpha1.CloneSetSpec{
UpdateStrategy: appsv1alpha1.CloneSetUpdateStrategy{
Type: appsv1alpha1.InPlaceIfPossibleCloneSetUpdateStrategyType,
},
},
},
},
want: defaultOps,
},
{
// unexpected case: the method should not be called with recreate update strategy type.
name: "recreate update type",
fields: fields{
&appsv1alpha1.CloneSet{
Spec: appsv1alpha1.CloneSetSpec{
UpdateStrategy: appsv1alpha1.CloneSetUpdateStrategy{
Type: appsv1alpha1.InPlaceIfPossibleCloneSetUpdateStrategyType,
},
},
},
},
want: defaultOps,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := &commonControl{
CloneSet: tt.fields.CloneSet,
}
if got := c.GetUpdateOptions(); !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetUpdateOptions() = %v, want %v", got, tt.want)
}
})
}
}
|
"use client";
import { Content, asImageSrc, isFilled } from "@prismicio/client";
import Link from "next/link";
import React, { useEffect, useReducer, useRef, useState } from "react";
import { MdArrowOutward } from "react-icons/md";
import { gsap } from "gsap";
import { ScrollTrigger } from "gsap/ScrollTrigger";
gsap.registerPlugin(ScrollTrigger)
type ContentListProps ={
items: Content.BlogPostDocument[] | Content.ProjectDocument[];
contentType: Content.ContentIndexSlice["primary"]["content_type"]
fallbackItemImage: Content.ContentIndexSlice["primary"]["fallback_item_image"]
viewMoreText: Content.ContentIndexSlice["primary"]["view_more_text"]
}
export default function ContentList({
items ,
contentType ,
fallbackItemImage ,
viewMoreText = "read More"}: ContentListProps) {
const component = useRef(null)
const [currentItem, setCurrentItem] = useState<null | number>(null);
const [hovering, setHovering] = useState(false);
const revealRef = useRef(null);
const urlPrefix = contentType ==="Blog" ? "/blog" : "/project";
const lastMousePos = useRef({ x: 0, y: 0 });
const itemsRef = useRef<Array<HTMLLIElement | null>>([])
useEffect(() => {
// Animate list-items in with a stagger
let ctx = gsap.context(() => {
itemsRef.current.forEach((item, index) => {
gsap.fromTo(
item,
{
opacity: 0,
y: 20,
},
{
opacity: 1,
y: 0,
duration: 1.3,
ease: "elastic.out(1,0.3)",
stagger: 0.2,
scrollTrigger: {
trigger: item,
start: "top bottom-=100px",
end: "bottom center",
toggleActions: "play none none none",
},
},
);
});
return () => ctx.revert(); // cleanup!
}, component);
}, []);
useEffect(() => {
const handleMouseMove = (e: MouseEvent) => {
const mousePos = { x: e.clientX, y: e.clientY + window.scrollY };
const speed = Math.sqrt(Math.pow(mousePos.x - lastMousePos.current.x, 2));
let ctx = gsap.context(() => {
if (currentItem !== null) {
const maxY = window.scrollY + window.innerHeight - 350;
const maxX = window.innerWidth - 250;
gsap.to(revealRef.current, {
x: gsap.utils.clamp(0, maxX, mousePos.x - 110),
y: gsap.utils.clamp(0, maxY, mousePos.y - 160),
rotation: speed * (mousePos.x > lastMousePos.current.x ? 1 : -1), // Apply rotation based on speed and direction
ease: "back.out(2)",
duration: 1.3,
opacity:1
});
// gsap.to(revealRef.current, {
// opacity: hovering ? 1 : 0,
// visibility: "visible",
// ease: "power3.out",
// duration: 1.3,
// });
}
lastMousePos.current = mousePos;
return () => ctx.revert(); // cleanup!
}, component);
};
window.addEventListener("mousemove", handleMouseMove);
return () => {
window.removeEventListener("mousemove", handleMouseMove);
};
}, [currentItem, hovering]);
const contentImages = items.map((item) => {
const image = isFilled.image(item.data.hover_image)
? item.data.hover_image
: fallbackItemImage;
return asImageSrc(image, {
fit: "crop",
w: 220,
h: 320,
exp: -10,
});
});
const onMouseEnter = (index: number) => {
setCurrentItem(index);
}
const onMouseLeave = () => {
//setHovering(false);
setCurrentItem(null);
};
useEffect(() => {
contentImages.forEach((url) => {
if (!url) return;
const img = new Image();
img.src = url;
});
}, [contentImages]);
return (
<div>
<ul
onMouseLeave={onMouseLeave}
className="grid border-b border-b-slate-100"
>
{items.map((item, index)=> (
<>
{ isFilled.keyText(item.data.title) && (
<li
className="list-item opacity-0"
onMouseEnter={() => onMouseEnter(index)}
key={index}
ref={(el)=>(itemsRef.current[index] = el)}
>
<Link
className="flex flex-col justify-between border-t border-t-slate-100 py-10 text-slate-200 md:flex-row "
href={urlPrefix + "/" +item.uid }
>
<div className="flex flex-col">
<span className="text-3xl font-bold">{item.data.title}</span>
<div className="flex gap-3 text-yellow-400">
{item.tags.map((tag: string | number | boolean | React.ReactElement<any, string | React.JSXElementConstructor<any>> | Iterable<React.ReactNode> | React.ReactPortal | React.PromiseLikeOfReactNode | null | undefined,index: React.Key | null | undefined)=>(
<span key={index} className="text-lg font-bold">{tag}</span>
))}
</div>
</div>
<span className="ml-auto flex items-center gap-2 text-xl font-medium md:ml-0">{viewMoreText} <MdArrowOutward /> </span>
</Link>
</li>
)}
</>
))}
</ul>
{/* Hover element */}
<div
className="hover-reveal pointer-events-none absolute left-0 top-0 -z-10 h-[320px] w-[220px] rounded-lg bg-cover bg-center opacity-0 transition-[background] duration-300"
style={{
backgroundImage:
currentItem !== null ? `url(${contentImages[currentItem]})` : "",
}}
ref={revealRef}
></div>
</div>
)
}
|
<molecules-breadcrumb [breadcrumbItems]="breadcrumbItems"> </molecules-breadcrumb>
<templates-crud-wrapper>
<div *ngIf="!projects.length" class="alert alert-warning" role="alert">
{{ 'Please add at least one project before.' | translate }}
</div>
<form [formGroup]="form">
<div *ngIf="isEdit">
<h2 *ngIf="!isEditFields.title" style="cursor: pointer" (click)="editField('title')">{{ form.value.title }}</h2>
<atoms-input
*ngIf="isEditFields.title"
inputLabel="Task title"
formControlName="title"
errorMessages
[required]="true"
(onEnter)="editField('title', false)"
>
</atoms-input>
</div>
<div class="row">
<div class="col-md-5">
<molecules-select
[required]="true"
[items]="projects"
[optionTemplate]="projectOptionTemplate"
[labelTemplate]="projectOptionTemplate"
bindLabel="title"
bindValue="id"
label="Project"
formControlName="projectId"
errorMessages
>
<ng-template #projectOptionTemplate let-item="item"> {{ item.code }} - {{ item.title }} </ng-template>
</molecules-select>
</div>
<div class="w-100"></div>
<div class="col-md-5">
<molecules-select
[required]="true"
[items]="types"
[optionTemplate]="typeOptionTemplate"
[labelTemplate]="typeOptionTemplate"
bindLabel="text"
bindValue="id"
label="Type"
formControlName="typeId"
errorMessages
>
<ng-template #typeOptionTemplate let-item="item">
<atoms-task-icon [lovType]="LovType.TASK_TYPE" [lov]="item"></atoms-task-icon>
{{ item.text | translate }}
</ng-template>
</molecules-select>
</div>
<div class="w-100"></div>
<div class="col-md-5">
<molecules-select
[required]="true"
[items]="priorityItems"
[optionTemplate]="priorityOptionTemplate"
[labelTemplate]="priorityOptionTemplate"
bindLabel="text"
bindValue="id"
label="Priority"
formControlName="priorityId"
errorMessages
>
<ng-template #priorityOptionTemplate let-item="item">
<atoms-task-icon [lovType]="LovType.TASK_PRIORITY" [lov]="item"></atoms-task-icon>
{{ item.text | translate }}
</ng-template>
</molecules-select>
</div>
<div class="w-100"></div>
<div class="col-md-5 mb-2">
<molecules-select
[required]="true"
[items]="users"
bindLabel="fullName"
bindValue="id"
label="Assignee"
formControlName="assigneeId"
errorMessages
>
</molecules-select>
<a class="small" href="javascript:void(0)" (click)="assignToMe()">{{ 'Assign to me' | translate }}</a>
</div>
<div class="w-100"></div>
<div class="col-md-5" *ngIf="!isEdit">
<atoms-input
*ngIf="isEditFields.title"
[required]="true"
inputLabel="Task title"
formControlName="title"
errorMessages
>
</atoms-input>
</div>
</div>
<div class="form-group" *ngIf="isEditFields.description">
<label class="label">{{ 'Task Description' | translate }}</label>
<quill-editor placeholder="" class="content-editor" formControlName="description" [modules]="editorConfig">
</quill-editor>
</div>
<div *ngIf="!isEditFields.description">
<h6>
{{ 'Description' | translate }}
<span style="cursor: pointer" (click)="editField('description')">
<fa-icon [icon]="faPen" size="xs"></fa-icon
></span>
</h6>
<div class="ml-2">
<p [innerHTML]="form.value.description"></p>
</div>
</div>
<button *ngIf="isEdit" type="submit" class="btn btn-primary" [disabled]="!projects.length" (click)="updateTask()">
{{ 'Save' | translate }}
</button>
<button *ngIf="!isEdit" type="submit" class="btn btn-primary" [disabled]="!projects.length" (click)="crateTask()">
{{ 'Create' | translate }}
</button>
</form>
</templates-crud-wrapper>
|
import React from 'react'
import { Menu, Transition } from "@headlessui/react";
import { BsPerson } from "react-icons/bs";
import { NavLink } from 'react-router-dom';
import { useNavigate } from "react-router-dom";
import { CgLogOff } from "react-icons/cg";
import { UserAuth } from "../../contexts/AuthContext";
const Profil = ({ user }) => {
const { contextToken } = UserAuth();
const [token, setToken] = contextToken;
const Navigate = useNavigate();
const handleClear = () => {
localStorage.removeItem("token");
setToken(null);
Navigate("/login");
};
return (
<div className="flex items-center justify-center z-50">
<div className="relative inline-block text-left">
<Menu>
{({ open }) => (
<>
<span className="rounded-md shadow-sm">
<Menu.Button className="inline-flex justify-center w-full p-2 text-sm font-medium leading-5 text-gray-700 transition duration-150 ease-in-out bg-white border border-gray-300 rounded-md hover:text-gray-500 focus:outline-none focus:border-blue-300 focus:shadow-outline-blue active:bg-gray-50 active:text-gray-800">
<BsPerson className="w-6 h-6" aria-hidden="true" />
<span className="ml-3 flex items-center text-sm font-medium whitespace-nowrap">
{!user
? "Mon compte"
: user?.firstname + " " + user?.lastname}
</span>
</Menu.Button>
</span>
<Transition
show={open}
enter="transition ease-out duration-100"
enterFrom="transform opacity-0 scale-95"
enterTo="transform opacity-100 scale-100"
leave="transition ease-in duration-75"
leaveFrom="transform opacity-100 scale-100"
leaveTo="transform opacity-0 scale-95"
>
<Menu.Items
static
className="absolute right-0 w-56 mt-2 origin-top-right bg-white border border-gray-200 divide-y divide-gray-100 rounded-md shadow-lg outline-none"
>
<div className="px-4 py-3">
<p className="text-sm leading-5">Connecté en tant que </p>
<p className="text-sm font-medium leading-5 text-gray-900 truncate">
{user?.email}
</p>
</div>
<div className="py-1">
<Menu.Item>
{({ active }) => (
<NavLink
to="/dashboard"
className={`${
active
? "bg-gray-100 text-gray-900"
: "text-gray-700"
} flex justify-between w-full px-4 py-2 text-sm leading-5 text-left`}
>
Gérer mes articles
</NavLink>
)}
</Menu.Item>
<Menu.Item>
{({ active }) => (
<NavLink
to="#support"
className={`${
active
? "bg-gray-100 text-gray-900"
: "text-gray-700"
} flex justify-between w-full px-4 py-2 text-sm leading-5 text-left`}
>
Parametres du compte
</NavLink>
)}
</Menu.Item>
<Menu.Item
as="span"
disabled
className="flex justify-between w-full px-4 py-2 text-sm leading-5 text-left text-gray-700 cursor-not-allowed opacity-50"
>
New feature (soon)
</Menu.Item>
<Menu.Item>
{({ active }) => (
<button
onClick={handleClear}
className={`${
active
? "bg-gray-100 text-gray-900"
: "text-gray-700"
} flex justify-around items-center w-full px-4 py-2 text-sm leading-5 text-left`}
>
{" "}
<CgLogOff className="w-6 h-6 text-red-600" /> Deconnexion
</button>
)}
</Menu.Item>
</div>
</Menu.Items>
</Transition>
</>
)}
</Menu>
</div>
</div>
);
}
export default Profil
|
import math, copy
import numpy as np
#load data
x_train = np.array([1.0, 2.0]) #features
y_train = np.array([300.0, 500.0]) #target value
def compute_cost(x, y, w, b):
m = x.shape[0]
cost = 0
for i in range(m):
f_wb = w*x[i] + b
cost = cost + (f_wb - y[i])**2
total_cost = 1 / (2*m) * cost
return total_cost
def compute_gradient(x, y, w, b):
comp_w = 0
comp_b = 0
m = x.shape[0]
for i in range(m):
f_wb = w * x[i] + b
comp_w += (f_wb - y[i]) * x[i]
comp_b += (f_wb - y[i])
comp_w = comp_w / m
comp_b = comp_b / m
return comp_w, comp_b
def gradient_descent(x, y, w_in, b_in, alpha, num_iters, cost_function, gradient_function):
J_history = []
p_history = []
b = b_in
w = w_in
for i in range(num_iters):
dj_dw, dj_db = gradient_function(x, y, w, b)
b = b - alpha * dj_db
w = w - alpha * dj_dw
if i < 100000:
J_history.append( cost_function(x, y, w, b) )
p_history.append( [w, b] )
if i% math.ceil(num_iters/10) == 0:
print(f"Iteration {i:4}: Cost {J_history[-1]:0.2e} ",
f"dj_dw: {dj_dw: 0.3e}, dj_db: {dj_db: 0.3e} ",
f"w: {w: 0.3e}, b:{b: 0.5e}")
return w, b, J_history, p_history
# initialize parameters
w_init = 0
b_init = 0
# some gradient descent settings
iterations = 10000
tmp_alpha = 1.0e-2
# run gradient descent
w_final, b_final, J_hist, p_hist = gradient_descent(x_train ,y_train, w_init, b_init, tmp_alpha,
iterations, compute_cost, compute_gradient)
print(f"(w,b) found by gradient descent: ({w_final:8.4f},{b_final:8.4f})")
print(f"1000 sqft house prediction {w_final*1.0 + b_final:0.1f} Thousand dollars")
print(f"1200 sqft house prediction {w_final*1.2 + b_final:0.1f} Thousand dollars")
|
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Data.SqlClient;
using System.Linq;
using System.Linq.Expressions;
namespace WebSite.DAL.SingletonPattern
{
public abstract class BaseDal<T> where T : class, new()
{
DbContext m_dBContext = DbContextFactory.CreateDbContext();
/// <summary>
/// 添加数据
/// </summary>
/// <param name="entity"></param>
/// <returns></returns>
public bool AddEntity(T entity)
{
m_dBContext.Set<T>().Add(entity);
return true;
}
/// <summary>
/// 删除
/// </summary>
/// <param name="entity"></param>
/// <returns></returns>
public bool DeleteEntity(T entity)
{
m_dBContext.Entry(entity).State = EntityState.Deleted;
return true;
}
/// <summary>
/// 更新
/// </summary>
/// <param name="entity"></param>
/// <returns></returns>
public bool EditEntity(T entity)
{
m_dBContext.Entry(entity).State = EntityState.Modified;
return true;
}
/// <summary>
/// 查询过滤
/// </summary>
/// <param name="whereLambda"></param>
/// <returns></returns>
public IQueryable<T> LoadEntities(Expression<Func<T, bool>> whereLambda)
{
return m_dBContext.Set<T>().Where(whereLambda);
}
/// <summary>
/// 分页
/// </summary>
/// <typeparam name="S"></typeparam>
/// <param name="pageIndex"></param>
/// <param name="pageSize"></param>
/// <param name="totalCount"></param>
/// <param name="whereLambda"></param>
/// <param name="orderByLambda"></param>
/// <param name="isAsc"></param>
/// <returns></returns>
public IQueryable<T> LoadPageEntities<S>(int pageIndex, int pageSize, out int totalCount, Expression<Func<T, bool>> whereLambda, Expression<Func<T, S>> orderByLambda, bool isAsc)
{
IQueryable<T> temp = LoadEntities(whereLambda);
totalCount = temp.Count();
if (isAsc)//升序
{
temp = temp.OrderBy(orderByLambda).Skip((pageIndex - 1) * pageSize).Take(pageSize);
}
else
{
temp = temp.OrderByDescending(orderByLambda).Skip((pageIndex - 1) * pageSize).Take(pageSize);
}
return temp;
}
/// <summary>
/// 执行sql语句
/// </summary>
/// <param name="sql">sql语句</param>
/// <param name="pars">参数</param>
/// <returns></returns>
public bool ExecuteSql(string sql, params object[] pars)
{
return m_dBContext.Database.ExecuteSqlCommand(sql, pars) > 0;
}
/// <summary>
/// 执行sql语句查询,返回给定类型的元素
/// </summary>
/// <param name="sql"></param>
/// <param name="pars"></param>
/// <returns></returns>
public M ExecuteQuery<M>(string sql, params SqlParameter[] pars)
{
M result = default(M);
var dbRawSqlQuery = m_dBContext.Database.SqlQuery(typeof(M), sql, pars).AsQueryable();
foreach (var item in dbRawSqlQuery)
{
result = (M)item;
break;
}
return result;
}
/// <summary>
/// 执行sql语句查询,返回给定类型的元素集合
/// </summary>
/// <param name="sql"></param>
/// <param name="pars"></param>
/// <returns></returns>
public IQueryable<M> ExecuteQueryList<M>(string sql, params SqlParameter[] pars)
{
return m_dBContext.Database.SqlQuery<M>(sql, pars).AsQueryable();
}
}
}
|
import React, {Component} from "react";
import {SocialLink} from "@/data/socialLinks";
import Link from "next/link";
interface SocialLinkItemProps {
link: SocialLink;
}
class SocialLinkItem extends Component<SocialLinkItemProps> {
constructor(props: SocialLinkItemProps) {
super(props);
}
render() {
return (
<Link
href={this.props.link.href}
className="text-white hover:text-blue-500">
<div>
<p className="sr-only">{this.props.link.name}</p>
{React.createElement(this.props.link.icon, {className: "h-8 w-8"})}
</div>
</Link>
);
}
}
export default SocialLinkItem;
|
unit Dialogs.CustomDialog;
interface
uses
Winapi.Windows,
Winapi.Messages,
System.SysUtils,
System.Variants,
System.Classes,
Vcl.Graphics,
Vcl.Controls,
Vcl.Forms,
Vcl.Dialogs,
Vcl.StdCtrls,
Vcl.ExtCtrls;
type
/// Basic type of all dialog forms, please do not instantiate.
/// Inheritance tree: TCustomDialog --> TForm
TCustomDialog = class(TForm)
ButtonsBgPanel: TPanel;
BtnBevel: TBevel;
ButtonsPanel: TPanel;
InformationPanel: TPanel;
MessageLabel: TLabel;
procedure FormShow(Sender: TObject);
private
{ Private-Deklarationen }
function GetMessageText: String;
procedure SetMessageText(const Value: String);
public
{ Public-Deklarationen }
class function ShowDlg(const AMessage: String): TModalResult; overload; virtual; abstract;
class function ShowDlg(const AMessage: String; const AHeight: Integer): TModalResult; overload; virtual; abstract;
class function ShowDlg(const AMessage: String; const AHeight, AWidth: Integer): TModalResult; overload; virtual; abstract;
property MessageText: String read GetMessageText write SetMessageText;
end;
implementation
{$R *.dfm}
{ TCustomDialog }
procedure TCustomDialog.FormShow(Sender: TObject);
begin
ButtonsPanel.Left := (ButtonsBgPanel.ClientWidth - ButtonsPanel.Width) div 2;
end;
function TCustomDialog.GetMessageText: String;
begin
Result := MessageLabel.Caption;
end;
procedure TCustomDialog.SetMessageText(const Value: String);
begin
MessageLabel.Caption := Value;
end;
end.
|
<template>
<div id="app">
<nav class="navbar navbar-expand-lg navbar-dark bg-dark">
<div class="container">
<router-link :to="{ name: 'main' }" class="navbar-brand">Main Page</router-link>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarNav"
aria-controls="navbarNav" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarNav">
<ul class="navbar-nav ml-auto">
<li class="nav-item">
<router-link :to="{ name: 'search' }" class="nav-link">Search</router-link>
</li>
<li class="nav-item">
<router-link :to="{ name: 'about' }" class="nav-link">About</router-link>
</li>
<li v-if="!$root.store.username" class="nav-item">
<router-link :to="{ name: 'favorites' }" class="nav-link">Favorites</router-link>
</li>
<li v-if="!$root.store.username" class="nav-item">
<router-link :to="{ name: 'register' }" class="nav-link">Register</router-link>
</li>
<li v-if="!$root.store.username" class="nav-item">
<router-link :to="{ name: 'login' }" class="nav-link">Login</router-link>
</li>
<!-- Personal dropdown menu -->
<li v-else class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" role="button" @click="toggleDropdown" aria-haspopup="true" aria-expanded="false">
Personal
</a>
<div class="dropdown-menu" :class="{ show: dropdownOpen }" aria-labelledby="navbarDropdown">
<router-link :to="{ name: 'favorites' }" class="dropdown-item">My Favorites</router-link>
<div class="dropdown-divider"></div>
<router-link :to="{ name: 'myrecipes' }" class="dropdown-item">My Recipes</router-link>
<div class="dropdown-divider"></div>
<router-link :to="{ name: 'family' }" class="dropdown-item">My Family Recipes</router-link>
<div class="dropdown-divider"></div>
<b-button v-b-modal.modal-prevent-closing class="dropdown-item">Create Recipe</b-button>
</div>
</li>
<!-- User info and logout -->
<li v-if="$root.store.username" class="nav-item user-info">
<span class="navbar-text">Logged in as: {{ $root.store.username }}</span>
<button class="btn btn-primary ml-2" @click="logout">Logout</button>
</li>
</ul>
</div>
<RecipeModal></RecipeModal>
</div>
</nav>
<div class="container py-4">
<router-view />
</div>
</div>
</template>
<script>
import RecipeModal from "./components/RecipeModal.vue";
export default {
name: "App",
components: {
RecipeModal,
},
data() {
return {
dropdownOpen: false,
};
},
methods: {
// Method to toggle the dropdown
toggleDropdown() {
this.dropdownOpen = !this.dropdownOpen;
},
logout() {
// ... Existing logout method ...
},
},
};
</script>
<style lang="scss" scoped>
/* Dark-themed background */
html {
background-color: #1a1a1a;
color: #fff;
}
/* Navbar */
.navbar {
background-color: #222;
}
/* Navbar brand and links */
.navbar-brand,
.navbar-nav .nav-link {
color: #fff;
}
.navbar-brand:hover,
.navbar-nav .nav-link:hover {
color: #00bfff;
}
/* Active link */
.navbar-nav .active > .nav-link {
color: #00bfff;
}
/* Personal dropdown menu */
.personal-dropdown-button {
color: #fff;
background-color: #222;
border-color: #00bfff;
}
/* Divider */
.navbar-divider {
border-color: #00bfff;
}
/* User info and logout */
.user-info .btn {
background-color: #00bfff;
border-color: #00bfff;
color: #fff;
}
.user-info .btn:hover {
background-color: #0099cc;
border-color: #0099cc;
color: #fff;
}
/* Recipe Modal */
.modal-content {
background-color: #222;
color: #fff;
}
</style>
|
---
title: UNICEF AI4D Relative Wealth
---
Funded by [UNICEF as part of the AI for Development (AI4D) program](https://stories.thinkingmachin.es/unicef-ai4d-research-bank/), this project explored the use of purely open datasets and low-cost Machine Learning models for poverty mapping estimation across 9 Southeast Asian countries:
- Cambodia
- Myanmar
- Philippines
- Timor-Leste
- Indonesia
- Laos
- Malaysia
- Thailand
- Vietnam
We hope that our work can serve as a baseline for other Data Scientists to further AI x Development research in the region.
# Quick links
- [Web Map Demo](https://studio.foursquare.com/public/590c4ab2-b855-463a-90dd-9917b97b8b57) of our wealth estimates.
- [GitHub Repo](https://github.com/thinkingmachines/unicef-ai4d-poverty-mapping) containing all code needed to replicate model training and rollout for all 9 SEA countries
- [Google Drive Folder](https://drive.google.com/drive/u/0/folders/1QX0xJc6MHxY7dzIsVMDm5TH0F-NwXhBW) that contains all output data and intermediate artifacts (e.g. training datasets, models)
- *Note: These artifacts are also hosted on the [AI for Development (AI4D) ML Web Catalog](https://thinkingmachines.github.io/unicef-ai4d-research-bank) website.*
# Replicating our work
If you want to replicate the final models and wealth estimates, go to the ff. notebook folders:
- [2023-02-21-single-country-rollouts](https://github.com/thinkingmachines/unicef-ai4d-poverty-mapping/tree/main/notebooks/2023-02-21-single-country-rollouts)
- [2023-02-21-cross-country-rollouts](https://github.com/thinkingmachines/unicef-ai4d-poverty-mapping/tree/main/notebooks/2023-02-21-cross-country-rollouts)
These provide the code necessary to replicate our work end-to-end. The earlier notebooks are more experimental as we were developing the methodology.
Note that to run them properly, you need to get DHS data yourself and set-up your local environment. Just follow the instructions in our [repo's README](https://github.com/thinkingmachines/unicef-ai4d-poverty-mapping).
# Using Docker
We have provided a [docker image](https://github.com/butchtm/unicef-ai4d-poverty-mapping/pkgs/container/povmap-jupyter) to make it easy to run the jupyter environment we used to build the models and as well as rollout the country-wide results.
You can view our instructions on how to run the docker images [here](https://github.com/thinkingmachines/unicef-ai4d-poverty-mapping/tree/main#running-in-docker).
<!-- ## Methodology Summary
The exact method and details are better seen in the notebooks. But this diagram summarizes it conceptually. Essentially, we train a model and we use it to estimate wealth across a whole country. -->
# Summary of Findings
This sections contains a brief summary of our overall main findings. The other pages in this site contain documentation for each of our sprints, reporting on our progress as they happened.
## DHS Countries: Cambodia, Myanmar, Philippines, Timor-Leste
Training models and using them to produce wealth estimates for countries with DHS survey data are relatively straightforward. We generate features to characterize the neighborhood of the DHS clusters, which we then use to train a model.
The table below shows R^2 values after Repeated 5-fold Cross Validation (5 Repeats; 25 runs in total).
| Country | # Samples | Repeated 5-fold CV |
|-------------|----------:|-------------------:|
| Cambodia | 611 | 0.70 (±0.06) |
| Myanmar | 441 | 0.50 (±0.09) |
| Philippines | 1,213 | 0.57 (±0.03) |
| Timor-Leste | 455 | 0.60 (±0.07) |
| Average | | 0.59 |
## Non-DHS Countries: Indonesia, Laos, Malaysia, Thailand, Vietnam
For countries with no usable DHS data for training (either data doesn't exist, no lat/lon coordinates, or data is too old), it wasn't as straightforward to train models for these.
The main challenges were:
### How can we utilize data from the 4 DHS countries to train a model that we can use for these 5 non-DHS countries?
The first idea might be to simply combine all the data from each country into one big dataset.
The problem here is that both the target variable (the wealth index) and the feature values are not directly comparable between countries:
- Wealth Index
- DHS-calculated relative wealth score based on asset ownership
- The first principal component after running PCA on asset-related columns/values (e.g. # of cars, # of rooms, etc). In effect, this is a single score that summarizes the other columns and represents the "asset wealth" of a household.
- The PCA procedure to calculate this is based on different asset-related columns/values for different countries.
- Features
- Features may be interpreted differently from country to country.
- For example, 5mbps internet speed might be considered slow for Country A, while it might be considered fast for Country B .
- If we used raw values directly, the model might be confused because the mapping of features to relative wealth is different. That is, 5mbps might be correlated with relatively poorer households in Country A, while it might be correlated with relatively wealthier households in Country B.
We found that a simple solution was to normalize the features and the wealth index through a MinMaxScaler. The table below shows results that support this hypothesis. To validate this methodology, we performed leave-one-country-out cross-validation wherein we utilize one country as the test set and use the other 3 as the train set.
| Test Country | # Samples | Raw Features & Wealth Index | StandardScaler for Features & Wealth Index | MinMaxScaler for Features & Wealth Index |
|--------------|:---------:|:---------------------------:|--------------------------------------------|------------------------------------------|
| Cambodia | 611 | -0.19 | 0.48 | 0.53 |
| Myanmar | 441 | 0.62 | 0.58 | 0.49 |
| Philippines | 1,213 | 0.37 | 0.54 | 0.36 |
| Timor-Leste | 455 | 0.46 | 0.51 | 0.49 |
| Average | | 0.32 | 0.52 | 0.47 |
Note that we tried other scalers and other methods like re-calculating the wealth indices, but the MinMaxScaler proved to be the best approach in the end. Read more about it in our [Sprint 2](https://thinkingmachines.github.io/unicef-ai4d-poverty-mapping/cross-country-report.html) and [Sprint 3](https://thinkingmachines.github.io/unicef-ai4d-poverty-mapping/sprint-3-report.html) reports.
### How do we validate if the produced wealth estimates for these 5 countries are sensible?
Our leave-one-country-out cross-validation procedure simulates the scenario of training on multiple countries and utilizing it on one that it hasn't seen during training.
Our next step is to train a final model on all 4 countries' data, and use it on the 5 non-DHS countries. However, how do we check if the results are actually sensible in the absence of ground truth?
(a) Qualitatively, we can see that the results intuitively match known wealthier areas in the countries (e.g. the capitals and major cities).

(b) Quantitatively, we calculate the agreement between province/city rankings according to our model estimates and some reference wealth-related data. These are not apple-to-apple comparisons since the wealth data are not exactly the same, but they should give us some level of confidence that our estimates aren't in total disagreement with other government data.
| Country | Reference Data | Admin Level | N | Spearman Rank Correlation |
|-----------|---------------------------------------|:---------------:|----:|--------------------------:|
| Indonesia | 2018-2019 SUSENAS-derived Relative Wealth Index | City / District | 514 | 0.72 |
| Laos | 2017 UNICEF MICS-derived International Wealth Index | Province | 18 | 0.84 |
| Malaysia | 2016 Household Expenditure Survey (Mean) | State | 16 | 0.76 |
| Average | | | | 0.77 |
|
import 'package:carousel_slider/carousel_slider.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:flutter/widgets.dart';
import 'package:get/get.dart';
import 'home_controller.dart';
import '../../data/announcement_data.dart';
class HomeScreen extends StatefulWidget {
const HomeScreen({super.key});
@override
State<HomeScreen> createState() => _HomeScreenState();
}
class _HomeScreenState extends State<HomeScreen> {
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text("Credential Wallet"),
),
body: SingleChildScrollView(
child: Column(
children: [
//searchBar(context),
annoucement(context),
SizedBox(height: 20),
prefersCards(context),
],
)));
}
Widget searchBar(BuildContext context) {
final homeController = Get.put(HomeController());
final colorScheme = Theme.of(context).colorScheme;
return Row(
children: [
Expanded(
child: Padding(
padding: const EdgeInsets.all(8.0),
child: TextField(
onChanged: (value) {
if (value.isNotEmpty) {
homeController.setSearchActive(true);
} else {
homeController.setSearchActive(false);
}
print('searchActive: ${homeController.searchActive}');
},
decoration: InputDecoration(
filled: true,
fillColor: colorScheme.surfaceVariant,
contentPadding:
const EdgeInsets.symmetric(horizontal: 16, vertical: 8),
//isDense: true,
hintText: 'Search',
prefixIcon: const Icon(Icons.search),
border: OutlineInputBorder(
borderRadius: BorderRadius.all(Radius.circular(12)),
borderSide: BorderSide.none,
),
),
),
),
),
GetX<HomeController>(
builder: (controller) {
return Visibility(
visible: controller.searchActive,
child: TextButton(
onPressed: () {
controller.setSearchActive(false);
print('searchActive: ${controller.searchActive}');
},
child: Text("Cancel"),
),
);
},
),
],
);
}
Widget annoucement(BuildContext context) {
const fgColor = Colors.white;
List<Color> colors = [
Colors.blue.shade400,
Colors.green.shade400,
Colors.purple.shade400,
];
final textTheme = Theme.of(context).textTheme;
int idx = 0;
return CarouselSlider(
options: CarouselOptions(
height: 200.0,
autoPlay: true,
autoPlayInterval: const Duration(seconds: 10)),
//items: [1, 2, 3, 4, 5].map((i) {
items: announcements.map((i) {
return Builder(
builder: (BuildContext context) {
final color = colors[idx % colors.length];
idx++;
return Container(
width: MediaQuery.of(context).size.width,
margin: EdgeInsets.symmetric(horizontal: 5.0),
decoration: BoxDecoration(
color: color,
//image: DecorationImage(
// image: NetworkImage(i.imageUrl!),
// fit: BoxFit.cover,
//),
borderRadius: BorderRadius.circular(24),
),
child: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(
'${i.title}',
style: TextStyle(
fontSize: textTheme.titleMedium!.fontSize,
color: fgColor),
),
SizedBox(height: 8),
Text(
'${i.description}',
style: TextStyle(
fontSize: textTheme.bodySmall!.fontSize,
color: fgColor),
),
],
),
));
},
);
}).toList(),
);
}
Widget prefersCards(BuildContext context) {
return Column(
children: announcements.map((i) {
return Padding(
padding: const EdgeInsets.symmetric(horizontal: 16.0),
child: Card(
child: ListTile(
title: Text(i.title),
subtitle: Text(i.description),
),
),
);
}).toList(),
);
}
}
|
import { useEffect, useState } from 'react'
import Formulario from "./components/Formulario"
import Header from "./components/Header"
import ListadoPacientes from "./components/ListadoPacientes"
function App() {
const [patients, setPatients] = useState([])
const [patient, setPatient] = useState({})
useEffect(() => {
if(localStorage.getItem('patients')) {
setPatients(JSON.parse(localStorage.getItem('patients')))
}
}, [])
useEffect(() => {
localStorage.setItem('patients', JSON.stringify(patients))
}, [patients])
const deletePatient = (id) => {
setPatients(patients.filter(patientState => patientState.id !== id))
}
return (
<div className="container mx-auto mt-20">
<Header />
<div className="mt-12 md:flex">
<Formulario patients={patients} patient={patient} setPatients= {setPatients} />
<ListadoPacientes patients={patients} setPatient={setPatient} deletePatient={deletePatient} />
</div>
</div>
)
}
export default App
|
## Teradata SQL Driver for R
This package enables R applications to connect to the Teradata Database.
This package implements the [DBI Specification](https://dbi.r-dbi.org/).
This package requires 64-bit R 3.4.3 or later, and runs on Windows, macOS, and Linux. 32-bit R is not supported.
For community support, please visit [Teradata Community](https://support.teradata.com/community).
For Teradata customer support, please visit [Teradata Customer Service](https://support.teradata.com/).
Please note, this driver may contain beta/preview features ("Beta Features"). As such, by downloading and/or using the driver, in addition to agreeing to the licensing terms below, you acknowledge that the Beta Features are experimental in nature and that the Beta Features are provided "AS IS" and may not be functional on any machine or in any environment.
Copyright 2023 Teradata. All Rights Reserved.
### Table of Contents
* [Features](#Features)
* [Limitations](#Limitations)
* [Installation](#Installation)
* [License](#License)
* [Documentation](#Documentation)
* [Sample Programs](#SamplePrograms)
* [Using the Driver](#Using)
* [Connection Parameters](#ConnectionParameters)
* [COP Discovery](#COPDiscovery)
* [Stored Password Protection](#StoredPasswordProtection)
* [Client Attributes](#ClientAttributes)
* [User STARTUP SQL Request](#UserStartup)
* [Transaction Mode](#TransactionMode)
* [Auto-Commit](#AutoCommit)
* [Data Types](#DataTypes)
* [Null Values](#NullValues)
* [Character Export Width](#CharacterExportWidth)
* [Constructors](#Constructors)
* [Driver Methods](#DriverMethods)
* [Connection Methods](#ConnectionMethods)
* [Result Methods](#ResultMethods)
* [Escape Syntax](#EscapeSyntax)
* [FastLoad](#FastLoad)
* [FastExport](#FastExport)
* [CSV Batch Inserts](#CSVBatchInserts)
* [CSV Export Results](#CSVExportResults)
* [Change Log](#ChangeLog)
<a name="Features"></a>
### Features
The *Teradata SQL Driver for R* is a DBI Driver that enables R applications to connect to the Teradata Database. The driver implements the [DBI Specification](https://dbi.r-dbi.org/).
The driver is a young product that offers a basic feature set. We are working diligently to add features to the driver, and our goal is feature parity with the Teradata JDBC Driver.
At the present time, the driver offers the following features.
* Supported for use with Teradata Database 16.10 and later releases.
* COP Discovery.
* Laddered Concurrent Connect.
* HTTPS/TLS connections with Teradata SQL Engine 16.20.53.30 and later.
* Encrypted logon using the `TD2`, `JWT`, `LDAP`, `KRB5` (Kerberos), or `TDNEGO` logon mechanisms.
* Data encryption governed by central administration, or enabled via the `encryptdata` connection parameter.
* Unicode character data transferred via the UTF8 session character set.
* Auto-commit for ANSI and TERA transaction modes.
* 1 MB rows supported with Teradata Database 16.0 and later.
* Multi-statement requests that return multiple result sets.
* Most JDBC escape syntax.
* Parameterized SQL requests with question-mark parameter markers.
* Parameterized batch SQL requests with multiple rows of data bound to question-mark parameter markers.
* Complex data types such as `XML`, `JSON`, `DATASET STORAGE FORMAT AVRO`, and `DATASET STORAGE FORMAT CSV`.
* ElicitFile protocol support for DDL commands that create external UDFs or stored procedures and upload a file from client to database.
* `CREATE PROCEDURE` and `REPLACE PROCEDURE` commands.
* Stored Procedure Dynamic Result Sets.
* FastLoad and FastExport.
<a name="Limitations"></a>
### Limitations
* The UTF8 session character set is always used. The `charset` connection parameter is not supported.
* No support yet for Recoverable Network Protocol and Redrive.
* Monitor partition support is not available yet.
<a name="Installation"></a>
### Installation
The driver contains binary code and cannot be offered from [CRAN](https://cran.r-project.org/). The driver is available from Teradata's R package repository.
The driver depends on the `bit64`, `DBI`, `digest`, and `hms` packages which are available from CRAN.
To download and install dependencies automatically, specify the Teradata R package repository and CRAN in the `repos` argument for `install.packages`.
Rscript -e "install.packages('teradatasql',repos=c('https://r-repo.teradata.com','https://cloud.r-project.org'))"
<a name="License"></a>
### License
Use of the driver is governed by the *License Agreement for the Teradata SQL Driver for R*.
When the driver is installed, the `LICENSE` and `THIRDPARTYLICENSE` files are placed in the `teradatasql` directory under your R library directory. The following command prints the location of the `teradatasql` directory.
Rscript -e "find.package('teradatasql')"
In addition to the license terms, the driver may contain beta/preview features ("Beta Features"). As such, by downloading and/or using the driver, in addition to the licensing terms, you acknowledge that the Beta Features are experimental in nature and that the Beta Features are provided "AS IS" and may not be functional on any machine or in any environment.
<a name="Documentation"></a>
### Documentation
When the driver is installed, the `README.md` file is placed in the `teradatasql` directory under your R library directory. This permits you to view the documentation offline, when you are not connected to the Internet. The following command prints the location of the `teradatasql` directory.
Rscript -e "find.package('teradatasql')"
The `README.md` file is a plain text file containing the documentation for the driver. While the file can be viewed with any text file viewer or editor, your viewing experience will be best with an editor that understands Markdown format.
<a name="SamplePrograms"></a>
### Sample Programs
Sample programs are provided to demonstrate how to use the driver. When the driver is installed, the sample programs are placed in the `teradatasql/samples` directory under your R library directory.
The sample programs are coded with a fake database hostname `whomooz`, username `guest`, and password `please`. Substitute your actual database hostname and credentials before running a sample program.
Program | Purpose
--------------------------------------------------------------------------------------------------- | ---
[batchinsertcsv.R](https://github.com/Teradata/r-driver/blob/master/samples/batchinsertcsv.R) | Demonstrates how to insert a batch of rows from a CSV file
[charpadding.R](https://github.com/Teradata/r-driver/blob/master/samples/charpadding.R) | Demonstrates the database's *Character Export Width* behavior
[commitrollback.R](https://github.com/Teradata/r-driver/blob/master/samples/commitrollback.R) | Demonstrates dbBegin, dbCommit, and dbRollback methods
[exportcsvresult.R](https://github.com/Teradata/r-driver/blob/master/samples/exportcsvresult.R) | Demonstrates how to export a query result set to a CSV file
[exportcsvresults.R](https://github.com/Teradata/r-driver/blob/master/samples/exportcsvresults.R) | Demonstrates how to export multiple query result sets to CSV files
[fakeexportcsvresults.R](https://github.com/Teradata/r-driver/blob/master/samples/fakeexportcsvresults.R) | Demonstrates how to export multiple query result sets with the metadata to CSV files
[fakeresultsetcon.R](https://github.com/Teradata/r-driver/blob/master/samples/fakeresultsetcon.R) | Demonstrates connection parameter for fake result sets
[fakeresultsetesc.R](https://github.com/Teradata/r-driver/blob/master/samples/fakeresultsetesc.R) | Demonstrates escape function for fake result sets
[fastexportcsv.R](https://github.com/Teradata/r-driver/blob/master/samples/fastexportcsv.R) | Demonstrates how to FastExport rows from a table to a CSV file
[fastexporttable.R](https://github.com/Teradata/r-driver/blob/master/samples/fastexporttable.R) | Demonstrates how to FastExport rows from a table
[fastloadbatch.R](https://github.com/Teradata/r-driver/blob/master/samples/fastloadbatch.R) | Demonstrates how to FastLoad batches of rows
[fastloadcsv.R](https://github.com/Teradata/r-driver/blob/master/samples/fastloadcsv.R) | Demonstrates how to FastLoad batches of rows from a CSV file
[fetchmsr.R](https://github.com/Teradata/r-driver/blob/master/samples/fetchmsr.R) | Demonstrates fetching results from a multi-statement request
[fetchperftest.R](https://github.com/Teradata/r-driver/blob/master/samples/fetchperftest.R) | Measures time to fetch rows from a large result set
[fetchsp.R](https://github.com/Teradata/r-driver/blob/master/samples/fetchsp.R) | Demonstrates fetching results from a stored procedure
[insertdate.R](https://github.com/Teradata/r-driver/blob/master/samples/insertdate.R) | Demonstrates how to insert R Date values into a temporary table
[insertdifftime.R](https://github.com/Teradata/r-driver/blob/master/samples/insertdifftime.R) | Demonstrates how to insert R difftime values into a temporary table
[inserthms.R](https://github.com/Teradata/r-driver/blob/master/samples/inserthms.R) | Demonstrates how to insert R hms values into a temporary table
[insertinteger.R](https://github.com/Teradata/r-driver/blob/master/samples/insertinteger.R) | Demonstrates how to insert R integer values into a temporary table
[insertnumeric.R](https://github.com/Teradata/r-driver/blob/master/samples/insertnumeric.R) | Demonstrates how to insert R numeric values into a temporary table
[insertposixct.R](https://github.com/Teradata/r-driver/blob/master/samples/insertposixct.R) | Demonstrates how to insert R POSIXct values into a temporary table
[insertposixlt.R](https://github.com/Teradata/r-driver/blob/master/samples/insertposixlt.R) | Demonstrates how to insert R POSIXlt values into a temporary table
[insertraw.R](https://github.com/Teradata/r-driver/blob/master/samples/insertraw.R) | Demonstrates how to insert R raw values into a temporary table
[inserttime.R](https://github.com/Teradata/r-driver/blob/master/samples/inserttime.R) | Demonstrates how to insert teradatasql TimeWithTimeZone, Timestamp, and TimestampWithTimeZone values into a temporary table
[insertxml.R](https://github.com/Teradata/r-driver/blob/master/samples/insertxml.R) | Demonstrates how to insert and retrieve XML values
[TJEncryptPassword.R](https://github.com/Teradata/r-driver/blob/master/samples/TJEncryptPassword.R) | Creates encrypted password files
<a name="Using"></a>
### Using the Driver
Your R script calls the `DBI::dbConnect` function to open a connection to the database.
You may specify connection parameters as a JSON string, as named arguments, or using a combination of the two approaches. The `DBI::dbConnect` function's first argument is an instance of `teradatasql::TeradataDriver`. The `DBI::dbConnect` function's second argument is an optional JSON string. The `DBI::dbConnect` function's third and subsequent arguments are optional named arguments.
Connection parameters specified only as named arguments:
con <- DBI::dbConnect(teradatasql::TeradataDriver(), host="whomooz", user="guest", password="please")
Connection parameters specified only as a JSON string:
con <- DBI::dbConnect(teradatasql::TeradataDriver(), '{"host":"whomooz","user":"guest","password":"please"}')
Connection parameters specified using a combination:
con <- DBI::dbConnect(teradatasql::TeradataDriver(), '{"host":"whomooz"}', user="guest", password="please")
When a combination of parameters are specified, connection parameters specified as named arguments take precedence over same-named connection parameters specified in the JSON string.
<a name="ConnectionParameters"></a>
### Connection Parameters
The following table lists the connection parameters currently offered by the driver. Connection parameter values are case-sensitive unless stated otherwise.
Our goal is consistency for the connection parameters offered by this driver and the Teradata JDBC Driver, with respect to connection parameter names and functionality. For comparison, Teradata JDBC Driver connection parameters are [documented here](https://downloads.teradata.com/doc/connectivity/jdbc/reference/current/jdbcug_chapter_2.html#BGBHDDGB).
Parameter | Default | Type | Description
----------------------- | ----------- | -------------- | ---
`account` | | string | Specifies the database account. Equivalent to the Teradata JDBC Driver `ACCOUNT` connection parameter.
`browser` | | string | Specifies the command to open the browser for Browser Authentication, when `logmech` is `BROWSER`. Browser Authentication is supported for Windows and macOS. Equivalent to the Teradata JDBC Driver `BROWSER` connection parameter.<br/>The specified command must include a placeholder token, literally specified as `PLACEHOLDER`, which the driver will replace with the Identity Provider authorization endpoint URL. The `PLACEHOLDER` token is case-sensitive and must be specified in uppercase.<br/>• On Windows, the default command is `cmd /c start "title" "PLACEHOLDER"`. Windows command syntax requires the quoted title to precede the quoted URL.<br/>• On macOS, the default command is `open PLACEHOLDER`. macOS command syntax does not allow the URL to be quoted.
`browser_tab_timeout` | `"5"` | quoted integer | Specifies the number of seconds to wait before closing the browser tab after Browser Authentication is completed. The default is 5 seconds. The behavior is under the browser's control, and not all browsers support automatic closing of browser tabs. Typically, the tab used to log on will remain open indefinitely, but the second and subsequent tabs will be automatically closed. Specify `0` (zero) to close the tab immediately. Specify `-1` to turn off automatic closing of browser tabs. Browser Authentication is supported for Windows and macOS. Equivalent to the Teradata JDBC Driver `BROWSER_TAB_TIMEOUT` connection parameter.
`browser_timeout` | `"180"` | quoted integer | Specifies the number of seconds that the driver will wait for Browser Authentication to complete. The default is 180 seconds (3 minutes). Browser Authentication is supported for Windows and macOS. Equivalent to the Teradata JDBC Driver `BROWSER_TIMEOUT` connection parameter.
`column_name` | `"false"` | quoted boolean | Controls the `name` column returned by `DBI::dbColumnInfo`. Equivalent to the Teradata JDBC Driver `COLUMN_NAME` connection parameter. False specifies that the returned `name` column provides the AS-clause name if available, or the column name if available, or the column title. True specifies that the returned `name` column provides the column name if available, but has no effect when StatementInfo parcel support is unavailable.
`connect_failure_ttl` | `"0"` | quoted integer | Specifies the time-to-live in seconds to remember the most recent connection failure for each IP address/port combination. The driver subsequently skips connection attempts to that IP address/port for the duration of the time-to-live. The default value of zero disables this feature. The recommended value is half the database restart time. Equivalent to the Teradata JDBC Driver `CONNECT_FAILURE_TTL` connection parameter.
`connect_function` | `"0"` | quoted integer | Specifies whether the database should allocate a Logon Sequence Number (LSN) for this session, or associate this session with an existing LSN. Specify `0` for a session with no LSN (the default). Specify `1` to allocate a new LSN for the session. Specify `2` to associate the session with the existing LSN identified by the `logon_sequence_number` connection parameter. The database only permits sessions for the same user to share an LSN. Equivalent to the Teradata JDBC Driver `CONNECT_FUNCTION` connection parameter.
`connect_timeout` | `"10000"` | quoted integer | Specifies the timeout in milliseconds for establishing a TCP socket connection. Specify `0` for no timeout. The default is 10 seconds (10000 milliseconds).
`cop` | `"true"` | quoted boolean | Specifies whether COP Discovery is performed. Equivalent to the Teradata JDBC Driver `COP` connection parameter.
`coplast` | `"false"` | quoted boolean | Specifies how COP Discovery determines the last COP hostname. Equivalent to the Teradata JDBC Driver `COPLAST` connection parameter. When `coplast` is `false` or omitted, or COP Discovery is turned off, then no DNS lookup occurs for the coplast hostname. When `coplast` is `true`, and COP Discovery is turned on, then a DNS lookup occurs for a coplast hostname.
`database` | | string | Specifies the initial database to use after logon, instead of the user's default database. Equivalent to the Teradata JDBC Driver `DATABASE` connection parameter.
`dbs_port` | `"1025"` | quoted integer | Specifies the database port number. Equivalent to the Teradata JDBC Driver `DBS_PORT` connection parameter.
`encryptdata` | `"false"` | quoted boolean | Controls encryption of data exchanged between the driver and the database. Equivalent to the Teradata JDBC Driver `ENCRYPTDATA` connection parameter.
`error_query_count` | `"21"` | quoted integer | Specifies how many times the driver will attempt to query FastLoad Error Table 1 after a FastLoad operation. Equivalent to the Teradata JDBC Driver `ERROR_QUERY_COUNT` connection parameter.
`error_query_interval` | `"500"` | quoted integer | Specifies how many milliseconds the driver will wait between attempts to query FastLoad Error Table 1. Equivalent to the Teradata JDBC Driver `ERROR_QUERY_INTERVAL` connection parameter.
`error_table_1_suffix` | `"_ERR_1"` | string | Specifies the suffix for the name of FastLoad Error Table 1. Equivalent to the Teradata JDBC Driver `ERROR_TABLE_1_SUFFIX` connection parameter.
`error_table_2_suffix` | `"_ERR_2"` | string | Specifies the suffix for the name of FastLoad Error Table 2. Equivalent to the Teradata JDBC Driver `ERROR_TABLE_2_SUFFIX` connection parameter.
`error_table_database` | | string | Specifies the database name for the FastLoad error tables. By default, FastLoad error tables reside in the same database as the destination table being loaded. Equivalent to the Teradata JDBC Driver `ERROR_TABLE_DATABASE` connection parameter.
`fake_result_sets` | `"false"` | quoted boolean | Controls whether a fake result set containing statement metadata precedes each real result set.
`field_quote` | `"\""` | string | Specifies a single character string used to quote fields in a CSV file.
`field_sep` | `","` | string | Specifies a single character string used to separate fields in a CSV file. Equivalent to the Teradata JDBC Driver `FIELD_SEP` connection parameter.
`govern` | `"true"` | quoted boolean | Controls FastLoad and FastExport throttling by Teradata workload management rules. When set to `true` (the default), workload management rules may delay a FastLoad or FastExport. When set to `false`, workload management rules wiil reject rather than delay a FastLoad or FastExport. Equivalent to the Teradata JDBC Driver `GOVERN` connection parameter.
`host` | | string | Specifies the database hostname.
`https_port` | `"443"` | quoted integer | Specifies the database port number for HTTPS/TLS connections. Equivalent to the Teradata JDBC Driver `HTTPS_PORT` connection parameter.
`immediate` | `"true"` | quoted boolean | Controls whether `DBI::dbSendQuery` and `DBI::dbSendStatement` execute the SQL request when the `params` and `immediate` arguments are omitted.
`lob_support` | `"true"` | quoted boolean | Controls LOB support. Equivalent to the Teradata JDBC Driver `LOB_SUPPORT` connection parameter.
`log` | `"0"` | quoted integer | Controls debug logging. Somewhat equivalent to the Teradata JDBC Driver `LOG` connection parameter. This parameter's behavior is subject to change in the future. This parameter's value is currently defined as an integer in which the 1-bit governs function and method tracing, the 2-bit governs debug logging, the 4-bit governs transmit and receive message hex dumps, and the 8-bit governs timing. Compose the value by adding together 1, 2, 4, and/or 8.
`logdata` | | string | Specifies extra data for the chosen logon authentication method. Equivalent to the Teradata JDBC Driver `LOGDATA` connection parameter.
`logmech` | `"TD2"` | string | Specifies the logon authentication method. Equivalent to the Teradata JDBC Driver `LOGMECH` connection parameter. Possible case-insensitive values are `TD2` (the default), `BROWSER`, `JWT`, `LDAP`, `KRB5` for Kerberos, or `TDNEGO`. Browser Authentication is supported for Windows and macOS.
`logon_sequence_number` | | quoted integer | Associates this session with an existing Logon Sequence Number (LSN) when `connect_function` is `2`. The database only permits sessions for the same user to share an LSN. An LSN groups multiple sessions together for workload management. Using an LSN is a three-step process. First, establish a control session with `connect_function` as `1`, which allocates a new LSN. Second, obtain the LSN from the control session using the escape function `{fn teradata_logon_sequence_number}`. Third, establish an associated session with `connect_function` as `2` and the logon sequence number. Equivalent to the Teradata JDBC Driver `LOGON_SEQUENCE_NUMBER` connection parameter.
`logon_timeout` | `"0"` | quoted integer | Specifies the logon timeout in seconds. Zero means no timeout.
`manage_error_tables` | `"true"` | quoted boolean | Controls whether the driver manages the FastLoad error tables.
`max_message_body` | `"2097000"` | quoted integer | Specifies the maximum Response Message size in bytes. Equivalent to the Teradata JDBC Driver `MAX_MESSAGE_BODY` connection parameter.
`oidc_scope` | `"openid"` | string | Specifies the OpenID Connect (OIDC) scope to use for Browser Authentication. Beginning with Teradata Database 17.20.03.11, the default scope can be specified in the database's `TdgssUserConfigFile.xml` file, using the `IdPConfig` element's `Scope` attribute. Browser Authentication is supported for Windows and macOS. Equivalent to the Teradata JDBC Driver `OIDC_SCOPE` connection parameter.
`oidc_token` | `"access_token"` | string | Specifies the kind of OIDC token to use for Browser Authentication. Specify `id_token` to use the id_token instead of the access_token. Browser Authentication is supported for Windows and macOS. Equivalent to the Teradata JDBC Driver `OIDC_TOKEN` connection parameter.
`partition` | `"DBC/SQL"` | string | Specifies the database partition. Equivalent to the Teradata JDBC Driver `PARTITION` connection parameter.
`password` | | string | Specifies the database password. Equivalent to the Teradata JDBC Driver `PASSWORD` connection parameter.
`posixlt` | `"false"` | quoted boolean | Controls whether `POSIXlt` subclasses are used for certain result set column value types. Refer to the [Data Types](#DataTypes) table below for details.
`request_timeout` | `"0"` | quoted integer | Specifies the timeout for executing each SQL request. Zero means no timeout.
`runstartup` | `"false"` | quoted boolean | Controls whether the user's `STARTUP` SQL request is executed after logon. For more information, refer to [User STARTUP SQL Request](#UserStartup). Equivalent to the Teradata JDBC Driver `RUNSTARTUP` connection parameter.
`sessions` | | quoted integer | Specifies the number of data transfer connections for FastLoad or FastExport. The default (recommended) lets the database choose the appropriate number of connections. Equivalent to the Teradata JDBC Driver `SESSIONS` connection parameter.
`sip_support` | `"true"` | quoted boolean | Controls whether StatementInfo parcel is used. Equivalent to the Teradata JDBC Driver `SIP_SUPPORT` connection parameter.
`sslca` | | string | Specifies the file name of a PEM file that contains Certificate Authority (CA) certificates for use with `sslmode` values `VERIFY-CA` or `VERIFY-FULL`. Equivalent to the Teradata JDBC Driver `SSLCA` connection parameter.
`sslcapath` | | string | Specifies a directory of PEM files that contain Certificate Authority (CA) certificates for use with `sslmode` values `VERIFY-CA` or `VERIFY-FULL`. Only files with an extension of `.pem` are used. Other files in the specified directory are not used. Equivalent to the Teradata JDBC Driver `SSLCAPATH` connection parameter.
`sslcipher` | | string | Specifies the TLS cipher for HTTPS/TLS connections. Equivalent to the Teradata JDBC Driver `SSLCIPHER` connection parameter.
`sslcrc` | `"ALLOW"` | string | Controls TLS certificate revocation checking for HTTPS/TLS connections when `sslmode` is `VERIFY-FULL`. This parameter is ignored unless `sslmode` is `VERIFY-FULL`. Equivalent to the Teradata JDBC Driver `SSLCRC` connection parameter. Values are case-insensitive.<br/>• `ALLOW` provides "soft fail" behavior such that communication failures are ignored during certificate revocation checking.<br/>• `REQUIRE` mandates that certificate revocation checking must succeed.
`sslmode` | `"PREFER"` | string | Specifies the mode for connections to the database. Equivalent to the Teradata JDBC Driver `SSLMODE` connection parameter. Values are case-insensitive.<br/>• `DISABLE` disables HTTPS/TLS connections and uses only non-TLS connections.<br/>• `ALLOW` uses non-TLS connections unless the database requires HTTPS/TLS connections.<br/>• `PREFER` uses HTTPS/TLS connections unless the database does not offer HTTPS/TLS connections.<br/>• `REQUIRE` uses only HTTPS/TLS connections.<br/>• `VERIFY-CA` uses only HTTPS/TLS connections and verifies that the server certificate is valid and trusted.<br/>• `VERIFY-FULL` uses only HTTPS/TLS connections, verifies that the server certificate is valid and trusted, and verifies that the server certificate matches the database hostname.
`sslprotocol` | `"TLSv1.2"` | string | Specifies the TLS protocol for HTTPS/TLS connections. Equivalent to the Teradata JDBC Driver `SSLPROTOCOL` connection parameter.
`teradata_values` | `"true"` | quoted boolean | Controls whether `character` or a more specific R data type is used for certain result set column value types. Refer to the [Data Types](#DataTypes) table below for details.
`tmode` | `"DEFAULT"` | string | Specifies the transaction mode. Equivalent to the Teradata JDBC Driver `TMODE` connection parameter. Possible values are `DEFAULT` (the default), `ANSI`, or `TERA`.
`user` | | string | Specifies the database username. Equivalent to the Teradata JDBC Driver `USER` connection parameter.
<a name="COPDiscovery"></a>
### COP Discovery
The driver provides Communications Processor (COP) discovery behavior when the `cop` connection parameter is `true` or omitted. COP Discovery is turned off when the `cop` connection parameter is `false`.
A database system can be composed of multiple database nodes. One or more of the database nodes can be configured to run the database Gateway process. Each database node that runs the database Gateway process is termed a Communications Processor, or COP. COP Discovery refers to the procedure of identifying all the available COP hostnames and their IP addresses. COP hostnames can be defined in DNS, or can be defined in the client system's `hosts` file. Teradata strongly recommends that COP hostnames be defined in DNS, rather than the client system's `hosts` file. Defining COP hostnames in DNS provides centralized administration, and enables centralized changes to COP hostnames if and when the database is reconfigured.
The `coplast` connection parameter specifies how COP Discovery determines the last COP hostname.
* When `coplast` is `false` or omitted, or COP Discovery is turned off, then the driver will not perform a DNS lookup for the coplast hostname.
* When `coplast` is `true`, and COP Discovery is turned on, then the driver will first perform a DNS lookup for a coplast hostname to obtain the IP address of the last COP hostname before performing COP Discovery. Subsequently, during COP Discovery, the driver will stop searching for COP hostnames when either an unknown COP hostname is encountered, or a COP hostname is encountered whose IP address matches the IP address of the coplast hostname.
Specifying `coplast` as `true` can improve performance with DNS that is slow to respond for DNS lookup failures, and is necessary for DNS that never returns a DNS lookup failure.
When performing COP Discovery, the driver starts with cop1, which is appended to the database hostname, and then proceeds with cop2, cop3, ..., copN. The driver supports domain-name qualification for COP Discovery and the coplast hostname. Domain-name qualification is recommended, because it can improve performance by avoiding unnecessary DNS lookups for DNS search suffixes.
The following table illustrates the DNS lookups performed for a hypothetical three-node database system named "whomooz".
| No domain name qualification | With domain name qualification<br/>(Recommended)
------ | ---------------------------- | ---
Application-specified<br/>database hostname | `whomooz` | `whomooz.domain.com`
Default: COP Discovery turned on, and `coplast` is `false` or omitted,<br/>perform DNS lookups until unknown COP hostname is encountered | `whomoozcop1`→`10.0.0.1`<br/>`whomoozcop2`→`10.0.0.2`<br/>`whomoozcop3`→`10.0.0.3`<br/>`whomoozcop4`→undefined | `whomoozcop1.domain.com`→`10.0.0.1`<br/>`whomoozcop2.domain.com`→`10.0.0.2`<br/>`whomoozcop3.domain.com`→`10.0.0.3`<br/>`whomoozcop4.domain.com`→undefined
COP Discovery turned on, and `coplast` is `true`,<br/>perform DNS lookups until COP hostname is found whose IP address matches the coplast hostname, or unknown COP hostname is encountered | `whomoozcoplast`→`10.0.0.3`<br/>`whomoozcop1`→`10.0.0.1`<br/>`whomoozcop2`→`10.0.0.2`<br/>`whomoozcop3`→`10.0.0.3` | `whomoozcoplast.domain.com`→`10.0.0.3`<br/>`whomoozcop1.domain.com`→`10.0.0.1`<br/>`whomoozcop2.domain.com`→`10.0.0.2`<br/>`whomoozcop3.domain.com`→`10.0.0.3`
COP Discovery turned off and round-robin DNS,<br/>perform one DNS lookup that returns multiple IP addresses | `whomooz`→`10.0.0.1`, `10.0.0.2`, `10.0.0.3` | `whomooz.domain.com`→`10.0.0.1`, `10.0.0.2`, `10.0.0.3`
Round-robin DNS rotates the list of IP addresses automatically to provide load distribution. Round-robin is only possible with DNS, not with the client system `hosts` file.
The driver supports the definition of multiple IP addresses for COP hostnames and non-COP hostnames.
For the first connection to a particular database system, the driver generates a random number to index into the list of COPs. For each subsequent connection, the driver increments the saved index until it wraps around to the first position. This behavior provides load distribution across all discovered COPs.
The driver masks connection failures to down COPs, thereby hiding most connection failures from the client application. An exception is thrown to the application only when all the COPs are down for that database. If a COP is down, the next COP in the sequence (including a wrap-around to the first COP) receives extra connections that were originally destined for the down COP. When multiple IP addresses are defined in DNS for a COP, the driver will attempt to connect to each of the COP's IP addresses, and the COP is considered down only when connection attempts fail to all of the COP's IP addresses.
If COP Discovery is turned off, or no COP hostnames are defined in DNS, the driver connects directly to the hostname specified in the `host` connection parameter. This permits load distribution schemes other than the COP Discovery approach. For example, round-robin DNS or a TCP/IP load distribution product can be used. COP Discovery takes precedence over simple database hostname lookup. To use an alternative load distribution scheme, either ensure that no COP hostnames are defined in DNS, or turn off COP Discovery with `cop` as `false`.
<a name="StoredPasswordProtection"></a>
### Stored Password Protection
#### Overview
Stored Password Protection enables an application to provide a connection password in encrypted form to the driver.
An encrypted password may be specified in the following contexts:
* A login password specified as the `password` connection parameter.
* A login password specified within the `logdata` connection parameter.
If the password, however specified, begins with the prefix `ENCRYPTED_PASSWORD(` then the specified password must follow this format:
`ENCRYPTED_PASSWORD(file:`*PasswordEncryptionKeyFileName*`,file:`*EncryptedPasswordFileName*`)`
Each filename must be preceded by the `file:` prefix. The *PasswordEncryptionKeyFileName* must be separated from the *EncryptedPasswordFileName* by a single comma.
The *PasswordEncryptionKeyFileName* specifies the name of a file that contains the password encryption key and associated information. The *EncryptedPasswordFileName* specifies the name of a file that contains the encrypted password and associated information. The two files are described below.
Stored Password Protection is offered by this driver, the Teradata JDBC Driver, and the Teradata SQL Driver for Python. These drivers use the same file format.
#### Program TJEncryptPassword
`TJEncryptPassword.R` is a sample program to create encrypted password files for use with Stored Password Protection. When the driver is installed, the sample programs are placed in the `teradatasql/samples` directory under your R library directory.
This program works in conjunction with Stored Password Protection offered by the driver. This program creates the files containing the password encryption key and encrypted password, which can be subsequently specified via the `ENCRYPTED_PASSWORD(` syntax.
You are not required to use this program to create the files containing the password encryption key and encrypted password. You can develop your own software to create the necessary files. You may also use the [`TJEncryptPassword.py`](https://github.com/Teradata/python-driver/blob/master/samples/TJEncryptPassword.py) sample program that is available with the Teradata SQL Driver for Python. You may also use the [`TJEncryptPassword.java`](https://downloads.teradata.com/doc/connectivity/jdbc/reference/current/samp/TJEncryptPassword.java.txt) sample program that is available with the [Teradata JDBC Driver Reference](https://downloads.teradata.com/doc/connectivity/jdbc/reference/current/frameset.html). The only requirement is that the files must match the format expected by the driver, which is documented below.
This program encrypts the password and then immediately decrypts the password, in order to verify that the password can be successfully decrypted. This program mimics the password decryption of the driver, and is intended to openly illustrate its operation and enable scrutiny by the community.
The encrypted password is only as safe as the two files. You are responsible for restricting access to the files containing the password encryption key and encrypted password. If an attacker obtains both files, the password can be decrypted. The operating system file permissions for the two files should be as limited and restrictive as possible, to ensure that only the intended operating system userid has access to the files.
The two files can be kept on separate physical volumes, to reduce the risk that both files might be lost at the same time. If either or both of the files are located on a network volume, then an encrypted wire protocol can be used to access the network volume, such as sshfs, encrypted NFSv4, or encrypted SMB 3.0.
This program accepts eight command-line arguments:
Argument | Example | Description
----------------------------- | -------------------- | ---
Transformation | `AES/CBC/NoPadding` | Specifies the transformation in the form *Algorithm*`/`*Mode*`/`*Padding*. Supported transformations are listed in a table below.
KeySizeInBits | `256` | Specifies the algorithm key size, which governs the encryption strength.
MAC | `HmacSHA256` | Specifies the message authentication code (MAC) algorithm `HmacSHA1` or `HmacSHA256`.
PasswordEncryptionKeyFileName | `PassKey.properties` | Specifies a filename in the current directory, a relative pathname, or an absolute pathname. The file is created by this program. If the file already exists, it will be overwritten by the new file.
EncryptedPasswordFileName | `EncPass.properties` | Specifies a filename in the current directory, a relative pathname, or an absolute pathname. The filename or pathname that must differ from the PasswordEncryptionKeyFileName. The file is created by this program. If the file already exists, it will be overwritten by the new file.
Hostname | `whomooz` | Specifies the database hostname.
Username | `guest` | Specifies the database username.
Password | `please` | Specifies the database password to be encrypted. Unicode characters in the password can be specified with the `\u`*XXXX* escape sequence.
#### Example Command
The TJEncryptPassword program uses the driver to log on to the specified database using the encrypted password, so the driver must already be installed.
The following command assume that the `TJEncryptPassword.R` program file is located in the current directory. When the driver is installed, the sample programs are placed in the `teradatasql/samples` directory under your R library directory. Change your current directory to the `teradatasql/samples` directory under your R library directory.
The following example command illustrates using a 256-bit AES key, and using the HmacSHA256 algorithm.
Rscript TJEncryptPassword.R AES/CBC/NoPadding 256 HmacSHA256 PassKey.properties EncPass.properties whomooz guest please
#### Password Encryption Key File Format
You are not required to use the TJEncryptPassword program to create the files containing the password encryption key and encrypted password. You can develop your own software to create the necessary files, but the files must match the format expected by the driver.
The password encryption key file is a text file in Java Properties file format, using the ISO 8859-1 character encoding.
The file must contain the following string properties:
Property | Description
------------------------------------------------- | ---
`version=1` | The version number must be `1`. This property is required.
`transformation=`*Algorithm*`/`*Mode*`/`*Padding* | Specifies the transformation in the form *Algorithm*`/`*Mode*`/`*Padding*. Supported transformations are listed in a table below. This property is required.
`algorithm=`*Algorithm* | This value must correspond to the *Algorithm* portion of the transformation. This property is required.
`match=`*MatchValue* | The password encryption key and encrypted password files must contain the same match value. The match values are compared to ensure that the two specified files are related to each other, serving as a "sanity check" to help avoid configuration errors. This property is required.
`key=`*HexDigits* | This value is the password encryption key, encoded as hex digits. This property is required.
`mac=`*MACAlgorithm* | Specifies the message authentication code (MAC) algorithm `HmacSHA1` or `HmacSHA256`. Stored Password Protection performs Encrypt-then-MAC for protection from a padding oracle attack. This property is required.
`mackey=`*HexDigits* | This value is the MAC key, encoded as hex digits. This property is required.
The TJEncryptPassword program uses a timestamp as a shared match value, but a timestamp is not required. Any shared string can serve as a match value. The timestamp is not related in any way to the encryption of the password, and the timestamp cannot be used to decrypt the password.
#### Encrypted Password File Format
The encrypted password file is a text file in Java Properties file format, using the ISO 8859-1 character encoding.
The file must contain the following string properties:
Property | Description
------------------------------------------------- | ---
`version=1` | The version number must be `1`. This property is required.
`match=`*MatchValue* | The password encryption key and encrypted password files must contain the same match value. The match values are compared to ensure that the two specified files are related to each other, serving as a "sanity check" to help avoid configuration errors. This property is required.
`password=`*HexDigits* | This value is the encrypted password, encoded as hex digits. This property is required.
`params=`*HexDigits* | This value contains the cipher algorithm parameters, if any, encoded as hex digits. Some ciphers need algorithm parameters that cannot be derived from the key, such as an initialization vector. This property is optional, depending on whether the cipher algorithm has associated parameters.
`hash=`*HexDigits* | This value is the expected message authentication code (MAC), encoded as hex digits. After encryption, the expected MAC is calculated using the ciphertext, transformation name, and algorithm parameters if any. Before decryption, the driver calculates the MAC using the ciphertext, transformation name, and algorithm parameters if any, and verifies that the calculated MAC matches the expected MAC. If the calculated MAC differs from the expected MAC, then either or both of the files may have been tampered with. This property is required.
While `params` is technically optional, an initialization vector is required by all three block cipher modes `CBC`, `CFB`, and `OFB` that are supported by the driver. ECB (Electronic Codebook) does not require `params`, but ECB is not supported by the driver.
#### Transformation, Key Size, and MAC
A transformation is a string that describes the set of operations to be performed on the given input, to produce transformed output. A transformation specifies the name of a cryptographic algorithm such as DES or AES, followed by a feedback mode and padding scheme.
The driver supports the following transformations and key sizes.
However, `TJEncryptPassword.R` only supports AES with CBC or CFB, as indicated below.
Transformation | Key Size | TJEncryptPassword.R
--------------------------- | -------- | ---
`DES/CBC/NoPadding` | 64 |
`DES/CBC/PKCS5Padding` | 64 |
`DES/CFB/NoPadding` | 64 |
`DES/CFB/PKCS5Padding` | 64 |
`DES/OFB/NoPadding` | 64 |
`DES/OFB/PKCS5Padding` | 64 |
`DESede/CBC/NoPadding` | 192 |
`DESede/CBC/PKCS5Padding` | 192 |
`DESede/CFB/NoPadding` | 192 |
`DESede/CFB/PKCS5Padding` | 192 |
`DESede/OFB/NoPadding` | 192 |
`DESede/OFB/PKCS5Padding` | 192 |
`AES/CBC/NoPadding` | 128 | Yes
`AES/CBC/NoPadding` | 192 | Yes
`AES/CBC/NoPadding` | 256 | Yes
`AES/CBC/PKCS5Padding` | 128 | Yes
`AES/CBC/PKCS5Padding` | 192 | Yes
`AES/CBC/PKCS5Padding` | 256 | Yes
`AES/CFB/NoPadding` | 128 | Yes
`AES/CFB/NoPadding` | 192 | Yes
`AES/CFB/NoPadding` | 256 | Yes
`AES/CFB/PKCS5Padding` | 128 | Yes
`AES/CFB/PKCS5Padding` | 192 | Yes
`AES/CFB/PKCS5Padding` | 256 | Yes
`AES/OFB/NoPadding` | 128 |
`AES/OFB/NoPadding` | 192 |
`AES/OFB/NoPadding` | 256 |
`AES/OFB/PKCS5Padding` | 128 |
`AES/OFB/PKCS5Padding` | 192 |
`AES/OFB/PKCS5Padding` | 256 |
Stored Password Protection uses a symmetric encryption algorithm such as DES or AES, in which the same secret key is used for encryption and decryption of the password. Stored Password Protection does not use an asymmetric encryption algorithm such as RSA, with separate public and private keys.
CBC (Cipher Block Chaining) is a block cipher encryption mode. With CBC, each ciphertext block is dependent on all plaintext blocks processed up to that point. CBC is suitable for encrypting data whose total byte count exceeds the algorithm's block size, and is therefore suitable for use with Stored Password Protection.
Stored Password Protection hides the password length in the encrypted password file by extending the length of the UTF8-encoded password with trailing null bytes. The length is extended to the next 512-byte boundary.
* A block cipher with no padding, such as `AES/CBC/NoPadding`, may only be used to encrypt data whose byte count after extension is a multiple of the algorithm's block size. The 512-byte boundary is compatible with many block ciphers. AES, for example, has a block size of 128 bits (16 bytes), and is therefore compatible with the 512-byte boundary.
* A block cipher with padding, such as `AES/CBC/PKCS5Padding`, can be used to encrypt data of any length. However, CBC with padding is vulnerable to a "padding oracle attack", so Stored Password Protection performs Encrypt-then-MAC for protection from a padding oracle attack. MAC algorithms `HmacSHA1` and `HmacSHA256` are supported.
* The driver does not support block ciphers used as byte-oriented ciphers via modes such as `CFB8` or `OFB8`.
The strength of the encryption depends on your choice of cipher algorithm and key size.
* AES uses a 128-bit (16 byte), 192-bit (24 byte), or 256-bit (32 byte) key.
* DESede uses a 192-bit (24 byte) key. The driver does not support a 128-bit (16 byte) key for DESede.
* DES uses a 64-bit (8 byte) key.
#### Sharing Files with the Teradata JDBC Driver
This driver and the Teradata JDBC Driver can share the files containing the password encryption key and encrypted password, if you use a transformation, key size, and MAC algorithm that is supported by both drivers.
* Recommended choices for compatibility are `AES/CBC/NoPadding` and `HmacSHA256`.
* Use a 256-bit key if your Java environment has the Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files from Oracle.
* Use a 128-bit key if your Java environment does not have the Unlimited Strength Jurisdiction Policy Files.
* Use `HmacSHA1` for compatibility with JDK 1.4.2.
#### File Locations
For the `ENCRYPTED_PASSWORD(` syntax of the driver, each filename must be preceded by the `file:` prefix.
The *PasswordEncryptionKeyFileName* must be separated from the *EncryptedPasswordFileName* by a single comma. The files can be located in the current directory, specified with a relative path, or specified with an absolute path.
Example for files in the current directory:
ENCRYPTED_PASSWORD(file:JohnDoeKey.properties,file:JohnDoePass.properties)
Example with relative paths:
ENCRYPTED_PASSWORD(file:../dir1/JohnDoeKey.properties,file:../dir2/JohnDoePass.properties)
Example with absolute paths on Windows:
ENCRYPTED_PASSWORD(file:c:/dir1/JohnDoeKey.properties,file:c:/dir2/JohnDoePass.properties)
Example with absolute paths on Linux:
ENCRYPTED_PASSWORD(file:/dir1/JohnDoeKey.properties,file:/dir2/JohnDoePass.properties)
#### Processing Sequence
The two filenames specified for an encrypted password must be accessible to the driver and must conform to the properties file formats described above. The driver signals an error if the file is not accessible, or the file does not conform to the required file format.
The driver verifies that the match values in the two files are present, and match each other. The driver signals an error if the match values differ from each other. The match values are compared to ensure that the two specified files are related to each other, serving as a "sanity check" to help avoid configuration errors. The TJEncryptPassword program uses a timestamp as a shared match value, but a timestamp is not required. Any shared string can serve as a match value. The timestamp is not related in any way to the encryption of the password, and the timestamp cannot be used to decrypt the password.
Before decryption, the driver calculates the MAC using the ciphertext, transformation name, and algorithm parameters if any, and verifies that the calculated MAC matches the expected MAC. The driver signals an error if the calculated MAC differs from the expected MAC, to indicate that either or both of the files may have been tampered with.
Finally, the driver uses the decrypted password to log on to the database.
<a name="ClientAttributes"></a>
### Client Attributes
Client Attributes record a variety of information about the client system and client software in the system tables `DBC.SessionTbl` and `DBC.EventLog`. Client Attributes are intended to be a replacement for the information recorded in the `LogonSource` column of the system tables `DBC.SessionTbl` and `DBC.EventLog`.
The Client Attributes are recorded at session logon time. Subsequently, the system views `DBC.SessionInfoV` and `DBC.LogOnOffV` can be queried to obtain information about the client system and client software on a per-session basis. Client Attribute values may be recorded in the database in either mixed-case or in uppercase, depending on the session character set and other factors. Analysis of recorded Client Attributes must flexibly accommodate either mixed-case or uppercase values.
Warning: The information in this section is subject to change in future releases of the driver. Client Attributes can be "mined" for information about client system demographics; however, any applications that parse Client Attribute values must be changed if Client Attribute formats are changed in the future.
Client Attributes are not intended to be used for workload management. Instead, query bands are intended for workload management. Any use of Client Attributes for workload management may break if Client Attributes are changed, or augmented, in the future.
Client Attribute | Source | Description
--------------------------- | -------- | ---
`MechanismName` | database | The connection's logon mechanism; for example, TD2, LDAP, etc.
`ClientIpAddress` | database | The client IP address, as determined by the database
`ClientTcpPortNumber` | database | The connection's client TCP port number, as determined by the database
`ClientIPAddrByClient` | driver | The client IP address, as determined by the driver
`ClientPortByClient` | driver | The connection's client TCP port number, as determined by the driver
`ClientProgramName` | driver | The client program name, followed by a streamlined call stack
`ClientSystemUserId` | driver | The client user name
`ClientOsName` | driver | The client operating system name
`ClientProcThreadId` | driver | The client process ID
`ClientVmName` | driver | R language runtime information
`ClientTdHostName` | driver | The database hostname as specified by the application, without any COP suffix
`ClientCOPSuffixedHostName` | driver | The COP-suffixed database hostname chosen by the driver
`ServerIPAddrByClient` | driver | The database node's IP address, as determined by the driver
`ServerPortByClient` | driver | The destination port number of the TCP connection to the database node, as determined by the driver
`ClientConfType` | driver | The confidentiality type, as determined by the driver<br/>`V` - TLS used for encryption, with full certificate verification<br/>`C` - TLS used for encryption, with Certificate Authority (CA) verification<br/>`R` - TLS used for encryption, with no certificate verification<br/>`E` - TLS was not attempted, and TDGSS used for encryption<br/>`U` - TLS was not attempted, and TDGSS encryption depends on central administration<br/>`F` - TLS was attempted, but the TLS handshake failed, so this is a fallback to using TDGSS for encryption<br/>`H` - SSLMODE was set to PREFER, but a non-TLS connection was made, and TDGSS encryption depends on central administration
`ServerConfType` | database | The confidentiality type, as determined by the database<br/>`T` - TLS used for encryption<br/>`E` - TDGSS used for encryption<br/>`U` - Data transfer is unencrypted
`ClientConfVersion` | database | The TLS version as determined by the database, if this is an HTTPS/TLS connection
`ClientConfCipherSuite` | database | The TLS cipher as determined by the database, if this is an HTTPS/TLS connection
`ClientAttributesEx` | driver | Additional Client Attributes are available in this column as a list of name=value pairs, each terminated by a semicolon. Individual values can be accessed using the `NVP` system function.<br/>`R` - The R language version<br/>`TZ` - The R current time zone<br/>`GO` - The Go version<br/>`SCS` - The session character set<br/>`CCS` - The client character set<br/>`LOB` - Y/N indicator for LOB support<br/>`SIP` - Y/N indicator for StatementInfo parcel support<br/>`TM` - The transaction mode indicator A (ANSI) or T (TERA)<br/>`ENC` - Y/N indicator for `encryptdata` connection parameter<br/>`DP` - The `dbs_port` connection parameter<br/>`HP` - The `https_port` connection parameter<br/>`SSL` - Numeric level corresponding to `sslmode`<br/>`SSLM` - The `sslmode` connection parameter<br/>`CERT` - The TLS certificate status<br/>`BA` - Y/N indicator for Browser Authentication<br/><br/>The `CERT` attribute indicates the TLS certificate status for an HTTPS/TLS connection. When the `CERT` attribute indicates the TLS certificate is valid (`V`) or invalid (`I`), then additional TLS certificate status details are provided as a series of comma-separated two-letter codes.<br/>`U` - the TLS certificate status is unavailable<br/>`V` - the TLS certificate status is valid<br/>`I` - the TLS certificate status is invalid<br/>`PU` - sslca PEM file is unavailable for server certificate verification<br/>`PA` - server certificate was verified using sslca PEM file<br/>`PR` - server certificate was rejected using sslca PEM file<br/>`DU` - sslcapath PEM directory is unavailable for server certificate verification<br/>`DA` - server certificate was verified using sslcapath PEM directory<br/>`DR` - server certificate was rejected using sslcapath PEM directory<br/>`SA` - server certificate was verified by the system<br/>`SR` - server certificate was rejected by the system<br/>`CY` - server certificate passed VERIFY-CA check<br/>`CN` - server certificate failed VERIFY-CA check<br/>`HU` - server hostname is unavailable for server certificate matching, because database IP address was specified<br/>`HY` - server hostname matches server certificate<br/>`HN` - server hostname does not match server certificate<br/>`RU` - resolved server hostname is unavailable for server certificate matching, because database IP address was specified<br/>`RY` - resolved server hostname matches server certificate<br/>`RN` - resolved server hostname does not match server certificate<br/>`IY` - IP address matches server certificate<br/>`IN` - IP address does not match server certificate<br/>`FY` - server certificate passed VERIFY-FULL check<br/>`FN` - server certificate failed VERIFY-FULL check
#### LogonSource Column
The `LogonSource` column is obsolete and has been superseded by Client Attributes. The `LogonSource` column may be deprecated and subsequently removed in future releases of the database.
When the driver establishes a connection to the database, the driver composes a string value that is stored in the `LogonSource` column of the system tables `DBC.SessionTbl` and `DBC.EventLog`. The `LogonSource` column is included in system views such as `DBC.SessionInfoV` and `DBC.LogOnOffV`. All `LogonSource` values are recorded in the database in uppercase.
The driver follows the format documented in the Teradata Data Dictionary, section "System Views Columns Reference", for network-attached `LogonSource` values. Network-attached `LogonSource` values have eight fields, separated by whitespace. The database composes fields 1 through 3, and the driver composes fields 4 through 8.
Field | Source | Description
----- | -------- | ---
1 | database | The string `(TCP/IP)` to indicate the connection type
2 | database | The connection's client TCP port number, in hexadecimal
3 | database | The client IP address, as determined by the database
4 | driver | The database hostname as specified by the application, without any COP suffix
5 | driver | The client process ID
6 | driver | The client user name
7 | driver | The client program name
8 | driver | The string `01 LSS` to indicate the `LogonSource` string version `01`
<a name="UserStartup"></a>
### User STARTUP SQL Request
`CREATE USER` and `MODIFY USER` commands provide `STARTUP` clauses for specifying SQL commands to establish initial session settings. The following table lists several of the SQL commands that may be used to establish initial session settings.
Category | SQL command
------------------------ | ---
Diagnostic settings | `DIAGNOSTIC` ... `FOR SESSION`
Session query band | `SET QUERY_BAND` ... `FOR SESSION`
Unicode Pass Through | `SET SESSION CHARACTER SET UNICODE PASS THROUGH ON`
Transaction isolation | `SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL`
Collation sequence | `SET SESSION COLLATION`
Temporal qualifier | `SET SESSION CURRENT VALIDTIME AND CURRENT TRANSACTIONTIME`
Date format | `SET SESSION DATEFORM`
Function tracing | `SET SESSION FUNCTION TRACE`
Session time zone | `SET TIME ZONE`
For example, the following command sets a `STARTUP` SQL request for user `susan` to establish read-uncommitted transaction isolation after logon.
MODIFY USER susan AS STARTUP='SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL RU'
The driver's `runstartup` connection parameter must be `true` to execute the user's `STARTUP` SQL request after logon. The default for `runstartup` is `false`. If the `runstartup` connection parameter is omitted or `false`, then the user's `STARTUP` SQL request will not be executed.
<a name="TransactionMode"></a>
### Transaction Mode
The `tmode` connection parameter enables an application to specify the transaction mode for the connection.
* `"tmode":"ANSI"` provides American National Standards Institute (ANSI) transaction semantics. This mode is recommended.
* `"tmode":"TERA"` provides legacy Teradata transaction semantics. This mode is only recommended for legacy applications that require Teradata transaction semantics.
* `"tmode":"DEFAULT"` provides the default transaction mode configured for the database, which may be either ANSI or TERA mode. `"tmode":"DEFAULT"` is the default when the `tmode` connection parameter is omitted.
While ANSI mode is generally recommended, please note that every application is different, and some applications may need to use TERA mode. The following differences between ANSI and TERA mode might affect a typical user or application:
1. Silent truncation of inserted data occurs in TERA mode, but not ANSI mode. In ANSI mode, the database returns an error instead of truncating data.
2. Tables created in ANSI mode are `MULTISET` by default. Tables created in TERA mode are `SET` tables by default.
3. For tables created in ANSI mode, character columns are `CASESPECIFIC` by default. For tables created in TERA mode, character columns are `NOT CASESPECIFIC` by default.
4. In ANSI mode, character literals are `CASESPECIFIC`. In TERA mode, character literals are `NOT CASESPECIFIC`.
The last two behavior differences, taken together, may cause character data comparisons (such as in `WHERE` clause conditions) to be case-insensitive in TERA mode, but case-sensitive in ANSI mode. This, in turn, can produce different query results in ANSI mode versus TERA mode. Comparing two `NOT CASESPECIFIC` expressions is case-insensitive regardless of mode, and comparing a `CASESPECIFIC` expression to another expression of any kind is case-sensitive regardless of mode. You may explicitly `CAST` an expression to be `CASESPECIFIC` or `NOT CASESPECIFIC` to obtain the character data comparison required by your application.
The Teradata Reference / *SQL Request and Transaction Processing* recommends that ANSI mode be used for all new applications. The primary benefit of using ANSI mode is that inadvertent data truncation is avoided. In contrast, when using TERA mode, silent data truncation can occur when data is inserted, because silent data truncation is a feature of TERA mode.
A drawback of using ANSI mode is that you can only call stored procedures that were created using ANSI mode, and you cannot call stored procedures that were created using TERA mode. It may not be possible to switch over to ANSI mode exclusively, because you may have some legacy applications that require TERA mode to work properly. You can work around this drawback by creating your stored procedures twice, in two different users/databases, once using ANSI mode, and once using TERA mode.
Refer to the Teradata Reference / *SQL Request and Transaction Processing* for complete information regarding the differences between ANSI and TERA transaction modes.
<a name="AutoCommit"></a>
### Auto-Commit
The driver provides auto-commit on and off functionality for both ANSI and TERA mode.
When a connection is first established, it begins with the default auto-commit setting, which is on. When auto-commit is on, the driver is solely responsible for managing transactions, and the driver commits each SQL request that is successfully executed. An application should not execute any transaction management SQL commands when auto-commit is on. An application should not call the `dbCommit` method or the `dbRollback` method when auto-commit is on.
An application can manage transactions itself by calling the `dbBegin` method to turn off auto-commit.
DBI::dbBegin(con)
When auto-commit is off, the driver leaves the current transaction open after each SQL request is executed, and the application is responsible for committing or rolling back the transaction by calling the `dbCommit` or the `dbRollback` method, respectively.
Auto-commit remains turned off until the application calls `dbCommit` or `dbRollback`. Auto-commit is turned back on when the application calls `dbCommit` or `dbRollback`.
Best practices recommend that an application avoid executing database-vendor-specific transaction management commands such as `BT`, `ET`, `ABORT`, `COMMIT`, or `ROLLBACK`, because such commands differ from one vendor to another. (They even differ between Teradata's two modes ANSI and TERA.) Instead, best practices recommend that an application only call the standard methods `dbCommit` and `dbRollback` for transaction management.
1. When auto-commit is on in ANSI mode, the driver automatically executes `COMMIT` after every successful SQL request.
2. When auto-commit is off in ANSI mode, the driver does not automatically execute `COMMIT`. When the application calls the `dbCommit` method, then the driver executes `COMMIT`.
3. When auto-commit is on in TERA mode, the driver does not execute `BT` or `ET`, unless the application explicitly executes `BT` or `ET` commands itself, which is not recommended.
4. When auto-commit is off in TERA mode, the driver executes `BT` before submitting the application's first SQL request of a new transaction. When the application calls the `dbCommit` method, then the driver executes `ET` until the transaction is complete.
As part of the wire protocol between the database and Teradata client interface software (such as this driver), each message transmitted from the database to the client has a bit designated to indicate whether the session has a transaction in progress or not. Thus, the client interface software is kept informed as to whether the session has a transaction in progress or not.
In TERA mode with auto-commit off, when the application uses the driver to execute a SQL request, if the session does not have a transaction in progress, then the driver automatically executes `BT` before executing the application's SQL request. Subsequently, in TERA mode with auto-commit off, when the application uses the driver to execute another SQL request, and the session already has a transaction in progress, then the driver has no need to execute `BT` before executing the application's SQL request.
In TERA mode, `BT` and `ET` pairs can be nested, and the database keeps track of the nesting level. The outermost `BT`/`ET` pair defines the transaction scope; inner `BT`/`ET` pairs have no effect on the transaction because the database does not provide actual transaction nesting. To commit the transaction, `ET` commands must be repeatedly executed until the nesting is unwound. The Teradata wire protocol bit (mentioned earlier) indicates when the nesting is unwound and the transaction is complete. When the application calls the `dbCommit` method in TERA mode, the driver repeatedly executes `ET` commands until the nesting is unwound and the transaction is complete.
In rare cases, an application may not follow best practices and may explicitly execute transaction management commands. Such an application must turn off auto-commit before executing transaction management commands such as `BT`, `ET`, `ABORT`, `COMMIT`, or `ROLLBACK`. The application is responsible for executing the appropriate commands for the transaction mode in effect. TERA mode commands are `BT`, `ET`, and `ABORT`. ANSI mode commands are `COMMIT` and `ROLLBACK`. An application must take special care when opening a transaction in TERA mode with auto-commit off. In TERA mode with auto-commit off, when the application executes a SQL request, if the session does not have a transaction in progress, then the driver automatically executes `BT` before executing the application's SQL request. Therefore, the application should not begin a transaction by executing `BT`.
# TERA mode example showing undesirable BT/ET nesting
DBI::dbBegin(con)
DBI::dbExecute(con, "BT") # BT automatically executed by the driver before this, and produces a nested BT
DBI::dbExecute(con, "insert into mytable1 values(1, 2)")
DBI::dbExecute(con, "insert into mytable2 values(3, 4)")
DBI::dbExecute(con, "ET") # unwind nesting
DBI::dbExecute(con, "ET") # complete transaction
# TERA mode example showing how to avoid BT/ET nesting
DBI::dbBegin(con)
DBI::dbExecute(con, "insert into mytable1 values(1, 2)") # BT automatically executed by the driver before this
DBI::dbExecute(con, "insert into mytable2 values(3, 4)")
DBI::dbExecute(con, "ET") # complete transaction
Please note that neither previous example shows best practices. Best practices recommend that an application only call the standard methods `dbCommit` and `dbRollback` for transaction management.
# Example showing best practice
DBI::dbBegin(con)
DBI::dbExecute(con, "insert into mytable1 values(1, 2)")
DBI::dbExecute(con, "insert into mytable2 values(3, 4)")
DBI::dbCommit(con)
<a name="DataTypes"></a>
### Data Types
The table below lists the database data types supported by the driver, and indicates the corresponding R data type returned in result set rows. Note that `teradata_values` as `false` takes precedence over `posixlt` as `true`.
Database data type | Result set R data type | With `posixlt` as `true` | With `teradata_values` as `false`
---------------------------------- | ---------------------- | ------------------------------------ | ---
`BIGINT` | `bit64::integer64` | |
`BLOB` | `raw` | |
`BYTE` | `raw` | |
`BYTEINT` | `raw` | |
`CHAR` | `character` | |
`CLOB` | `character` | |
`DATE` | `Date` | | `character`
`DECIMAL` | `double` | | `character`
`FLOAT` | `double` | |
`INTEGER` | `integer` | |
`INTERVAL YEAR` | `character` | |
`INTERVAL YEAR TO MONTH` | `character` | |
`INTERVAL MONTH` | `character` | |
`INTERVAL DAY` | `character` | |
`INTERVAL DAY TO HOUR` | `character` | |
`INTERVAL DAY TO MINUTE` | `character` | |
`INTERVAL DAY TO SECOND` | `character` | |
`INTERVAL HOUR` | `character` | |
`INTERVAL HOUR TO MINUTE` | `character` | |
`INTERVAL HOUR TO SECOND` | `character` | |
`INTERVAL MINUTE` | `character` | |
`INTERVAL MINUTE TO SECOND` | `character` | |
`INTERVAL SECOND` | `character` | |
`NUMBER` | `double` | | `character`
`PERIOD(DATE)` | `character` | |
`PERIOD(TIME)` | `character` | |
`PERIOD(TIME WITH TIME ZONE)` | `character` | |
`PERIOD(TIMESTAMP)` | `character` | |
`PERIOD(TIMESTAMP WITH TIME ZONE)` | `character` | |
`SMALLINT` | `integer` | |
`TIME` | `hms::hms` | | `character`
`TIME WITH TIME ZONE` | `character` | `teradatasql::TimeWithTimeZone` | `character`
`TIMESTAMP` | `POSIXct` | `teradatasql::Timestamp` | `character`
`TIMESTAMP WITH TIME ZONE` | `character` | `teradatasql::TimestampWithTimeZone` | `character`
`VARBYTE` | `raw` | |
`VARCHAR` | `character` | |
`XML` | `character` | |
The table below lists the parameterized SQL bind-value R data types supported by the driver, and indicates the corresponding database data type transmitted to the server.
Bind-value R data type | Database data type
------------------------------------ | ---
`bit64::integer64` | `BIGINT`
`character` | `VARCHAR`
`Date` | `DATE`
`difftime` | `VARCHAR` format compatible with `INTERVAL DAY TO SECOND`
`double` | `FLOAT`
`integer` | `INTEGER`
`hms::hms` | `TIME`
`POSIXct` | `TIMESTAMP`
`POSIXlt` without `$gmtoff` | `TIMESTAMP`
`POSIXlt` with `$gmtoff` | `TIMESTAMP WITH TIME ZONE`
`raw` | `VARBYTE`
`teradatasql::TimeWithTimeZone` | `TIME WITH TIME ZONE`
`teradatasql::Timestamp` | `TIMESTAMP`
`teradatasql::TimestampWithTimeZone` | `TIMESTAMP WITH TIME ZONE`
The `tzone` attribute of `POSIXct` and `POSIXlt` is ignored. The `$gmtoff` vector of `POSIXlt` holds the time zone portion of `TIME WITH TIME ZONE` and `TIMESTAMP WITH TIME ZONE` values.
Transforms are used for SQL `ARRAY` data values, and they can be transferred to and from the database as `VARCHAR` values.
Transforms are used for structured UDT data values, and they can be transferred to and from the database as `VARCHAR` values.
<a name="NullValues"></a>
### Null Values
SQL `NULL` values received from the database are returned in result set rows as R `NA` values.
An R `NA` value bound to a question-mark parameter marker is transmitted to the database as a `NULL` `VARCHAR` value.
The database does not provide automatic or implicit conversion of a `NULL` `VARCHAR` value to a different destination data type.
* For `NULL` column values in a batch, the driver will automatically convert the `NULL` values to match the data type of the non-`NULL` values in the same column.
* For solitary `NULL` values, your application may need to explicitly specify the data type with the `teradata_parameter` escape function, in order to avoid database error 3532 for non-permitted data type conversion.
Given a table with a destination column of `BYTE(4)`, the database would reject the following SQL with database error 3532 "Conversion between BYTE data and other types is illegal."
DBI::dbExecute(con, "update mytable set bytecolumn = ?", data.frame (bytecolumn = NA)) # fails with database error 3532
To avoid database error 3532 in this situation, your application must use the the `teradata_parameter` escape function to specify the data type for the question-mark parameter marker.
DBI::dbExecute(con, "{fn teradata_parameter(1, BYTE(4))}update mytable set bytecolumn = ?", data.frame (bytecolumn = NA))
<a name="CharacterExportWidth"></a>
### Character Export Width
The driver always uses the UTF8 session character set, and the `charset` connection parameter is not supported. Be aware of the database's *Character Export Width* behavior that adds trailing space padding to fixed-width `CHAR` data type result set column values when using the UTF8 session character set.
The database `CHAR(`_n_`)` data type is a fixed-width data type (holding _n_ characters), and the database reserves a fixed number of bytes for the `CHAR(`_n_`)` data type in response spools and in network message traffic.
UTF8 is a variable-width character encoding scheme that requires a varying number of bytes for each character. When the UTF8 session character set is used, the database reserves the maximum number of bytes that the `CHAR(`_n_`)` data type could occupy in response spools and in network message traffic. When the UTF8 session character set is used, the database appends padding characters to the tail end of `CHAR(`_n_`)` values smaller than the reserved maximum size, so that the `CHAR(`_n_`)` values all occupy the same fixed number of bytes in response spools and in network message traffic.
Work around this drawback by using `CAST` or `TRIM` in SQL `SELECT` statements, or in views, to convert fixed-width `CHAR` data types to `VARCHAR`.
Given a table with fixed-width `CHAR` columns:
`CREATE TABLE MyTable (c1 CHAR(10), c2 CHAR(10))`
Original query that produces trailing space padding:
`SELECT c1, c2 FROM MyTable`
Modified query with either `CAST` or `TRIM` to avoid trailing space padding:
`SELECT CAST(c1 AS VARCHAR(10)), TRIM(TRAILING FROM c2) FROM MyTable`
Or wrap query in a view with `CAST` or `TRIM` to avoid trailing space padding:
`CREATE VIEW MyView (c1, c2) AS SELECT CAST(c1 AS VARCHAR(10)), TRIM(TRAILING FROM c2) FROM MyTable`
`SELECT c1, c2 FROM MyView`
This technique is also demonstrated in sample program `charpadding.R`.
<a name="Constructors"></a>
### Constructors
`teradatasql::TeradataDriver()`
Creates an instance of the driver to be specified as the first argument to `DBI::dbConnect`.
---
`teradatasql::TimeWithTimeZone(` *CharacterVector* `)`
Creates and returns a `TimeWithTimeZone` value subclass of `POSIXlt`. The `$gmtoff` vector of `POSIXlt` holds the time zone portion. The *CharacterVector* must contain string values in the database `TIME WITH TIME ZONE` format.
* `HH:MM:SS+MM:SS` The time zone suffix specifies positive or negative offset from GMT
* `HH:MM:SS-MM:SS`
* `HH:MM:SS.SSSSSS+MM:SS` Optional 1 to 6 digits of fractional seconds
* `HH:MM:SS.SSSSSS-MM:SS`
---
`teradatasql::Timestamp(` *CharacterVector* `)`
Creates and returns a `Timestamp` value subclass of `POSIXlt`. The *CharacterVector* must contain string values in the database `TIMESTAMP` format.
* `YYYY-MM-DD HH:MM:SS`
* `YYYY-MM-DD HH:MM:SS.SSSSSS` Optional 1 to 6 digits of fractional seconds
---
`teradatasql::TimestampWithTimeZone(` *CharacterVector* `)`
Creates and returns a `TimestampWithTimeZone` value subclass of `POSIXlt`. The `$gmtoff` vector of `POSIXlt` holds the time zone portion. The *CharacterVector* must contain string values in the database `TIMESTAMP WITH TIME ZONE` format.
* `YYYY-MM-DD HH:MM:SS+MM:SS` The time zone suffix specifies positive or negative offset from GMT
* `YYYY-MM-DD HH:MM:SS-MM:SS`
* `YYYY-MM-DD HH:MM:SS.SSSSSS+MM:SS` Optional 1 to 6 digits of fractional seconds
* `YYYY-MM-DD HH:MM:SS.SSSSSS-MM:SS`
<a name="DriverMethods"></a>
### Driver Methods
`DBI::dbCanConnect(teradatasql::TeradataDriver(),` *JSONConnectionString* `)`
Returns `TRUE` or `FALSE` to indicate whether a connection to the database can be created. Specify connection parameters as a JSON string.
---
`DBI::dbConnect(teradatasql::TeradataDriver(),` *JSONConnectionString* `)`
Creates a connection to the database and returns a Connection object.
The first parameter is an instance of `teradatasql::TeradataDriver`. The second parameter is an optional JSON string that defaults to `NA`. The third and subsequent arguments are optional named arguments. Specify connection parameters as a JSON string, as named arguments, or a combination of the two.
When a combination of parameters are specified, connection parameters specified as named arguments take precedence over same-named connection parameters specified in the JSON string.
---
`DBI::dbDataType(teradatasql::TeradataDriver(),` *obj* `)`
Returns a string giving the SQL type name for *obj*.
---
`DBI::dbGetInfo(teradatasql::TeradataDriver())`
Returns a list with names `driver.version` and `client.version`.
---
`DBI::dbIsReadOnly(teradatasql::TeradataDriver())`
Returns `FALSE`.
---
`DBI::dbIsValid(teradatasql::TeradataDriver())`
Returns `TRUE`.
<a name="ConnectionMethods"></a>
### Connection Methods
`DBI::dbAppendTable(` *conn* `,` *name* `,` *value* `)`
Inserts rows contained in `data.frame` *value* into an existing table with *name*.
The `data.frame` column names must match the destination table column names.
---
`DBI::dbBegin(` *conn* `)`
Begins a transaction by turning off auto-commit.
---
`DBI::dbCommit(` *conn* `)`
Commits the current transaction and turns on auto-commit.
---
`DBI::dbCreateTable(` *conn* `,` *name* `,` *fields* `, temporary = FALSE)`
Creates a table with *name*.
If *fields* is a `data.frame`, column names and column types are derived from the `data.frame`.
If *fields* is a named `character` vector, the names specify column names, and the values specify column types.
If `temporary` is `FALSE` (the default), a permanent table is created.
If `temporary` is `TRUE`, a volatile table is created.
---
`DBI::dbDataType(` *conn* `,` *obj* `)`
Returns a string giving the SQL type name for *obj*.
---
`DBI::dbDisconnect(` *conn* `)`
Closes the connection.
---
`DBI::dbExecute(` *conn* `,` *statement* `, params = NULL)`
Executes the SQL request *statement* and returns the number of rows affected by the statement.
Parameterized SQL bind values can be specified as a `list` or `data.frame` for *params*.
Parameter values are bound to question-mark parameter markers in column order, not by name.
Single row or multiple row *params* may be specified.
---
`DBI::dbExistsTable(` *conn* `,` *name* `)`
Returns `TRUE` if a table with *name* exists. Returns `FALSE` otherwise.
---
`DBI::dbGetInfo(` *conn* `)`
Returns a list with names `db.version`, `dbname`, `username`, `host`, and `port`.
---
`DBI::dbGetQuery(` *conn* `,` *statement* `, params = NULL)`
Executes the SQL query *statement* and returns a `data.frame` containing the result set.
Parameterized SQL bind values can be specified as a `list` or `data.frame` for *params*.
Parameter values are bound to question-mark parameter markers in column order, not by name.
---
`DBI::dbIsReadOnly(` *conn* `)`
Returns `FALSE`.
---
`DBI::dbIsValid(` *conn* `)`
Returns `TRUE` if the connection is usable.
Returns `FALSE` otherwise.
---
`DBI::dbListFields(` *conn* `,` *name* `)`
Returns a `character` vector containing the column names of the table with *name*.
---
`DBI::dbListObjects(` *conn* `, prefix = NULL)`
Returns a `data.frame` containing column 1 `table` with data type `list` of `DBI::Id`, and column 2 `is_prefix` with data type `logical`.
Returns the list of databases in the system when `prefix` is `NULL`. Column 2 `is_prefix` will be all `TRUE` in this case.
Returns the list of tables in the specified database when `prefix` is a string or a `DBI:Id` with a `schema` component. Column 2 `is_prefix` will be all `FALSE` in this case.
Only returns information about databases, permanent tables, and views. Does not return any information about volatile tables or global temporary tables.
---
`DBI::dbListTables(` *conn* `)`
Returns a `character` vector containing the names of the tables and views in the current database.
---
`DBI::dbQuoteIdentifier(` *conn* `,` *x* `)`
Returns *x* quoted and escaped as a SQL identifier: the value is enclosed in double-quote characters ( `"` ) and any embedded double-quote characters are doubled.
---
`DBI::dbQuoteString(` *conn* `,` *x* `)`
Returns *x* quoted and escaped as a SQL character literal: the value is enclosed in single-quote characters ( `'` ) and any embedded single-quote characters are doubled.
---
`DBI::dbReadTable(` *conn* `,` *name* `)`
Returns a `data.frame` containing all the rows from the table with *name*.
---
`DBI::dbRemoveTable(` *conn* `,` *name* `, fail_if_missing = TRUE)`
Drops the table with *name*.
If `fail_if_missing` is `TRUE` (the default), stops with an error when the specified table does not exist.
If `fail_if_missing` is `FALSE`, ignores a missing table.
---
`DBI::dbRollback(` *conn* `)`
Rolls back the current transaction and turns on auto-commit.
---
`DBI::dbSendQuery(` *conn* `,` *statement* `, params = NULL, immediate = NA)`
Prepares or executes the SQL query *statement* and returns a `DBI::DBIResult`.
Parameterized SQL bind values can be specified as a `list` or `data.frame` for *params*.
Parameter values are bound to question-mark parameter markers in column order, not by name.
Single row or multiple row *params* may be specified.
* When bound parameter values are specified with *params*, the `immediate` argument is ignored, and the SQL request is executed immediately.
* When no bound parameter values are specified, and `immediate = NA` is specified (the default), then the behavior is controlled by the `immediate` connection parameter. When connection parameter `immediate` is `true` (the default), then the SQL request is executed immediately. When connection parameter `immediate` is `false`, then the SQL request is prepared but not executed.
* When no bound parameter values are specified, and `immediate = TRUE` is specified, then the SQL request is executed immediately.
* When no bound parameter values are specified, and `immediate = FALSE` is specified, then the SQL request is prepared but not executed.
---
`DBI::dbSendStatement(` *conn* `,` *statement* `, params = NULL, immediate = NA)`
Prepares or executes the SQL request *statement* and returns a `DBI::DBIResult`.
Parameterized SQL bind values can be specified as a `list` or `data.frame` for *params*.
Parameter values are bound to question-mark parameter markers in column order, not by name.
Single row or multiple row *params* may be specified.
* When bound parameter values are specified with *params*, the `immediate` argument is ignored, and the SQL request is executed immediately.
* When no bound parameter values are specified, and `immediate = NA` is specified (the default), then the behavior is governed by the `immediate` connection parameter. When connection parameter `immediate` is `true` (the default), then the SQL request is executed immediately. When connection parameter `immediate` is `false`, then the SQL request is prepared but not executed.
* When no bound parameter values are specified, and `immediate = TRUE` is specified, then the SQL request is executed immediately.
* When no bound parameter values are specified, and `immediate = FALSE` is specified, then the SQL request is prepared but not executed.
---
`DBI::dbWithTransaction(` *conn* `,` *code* `)`
Not implemented yet.
---
`DBI::dbWriteTable(` *conn* `,` *name* `,` *value* `, row.names = FALSE, overwrite = FALSE, append = FALSE, field.types = NULL, temporary = FALSE)`
Creates, replaces, or uses a table with *name* and inserts into the table the rows contained in the `list` or `data.frame` *value*.
If `row.names` is `NULL` or `FALSE` (the default), row names are ignored.
If `row.names` is `TRUE`, custom or natural row names are inserted into a column named `row_names`.
If `row.names` is `NA`, custom row names are inserted into a column named `row_names`, but natural row names are ignored.
If `row.names` is a string, then it specifies the name of the column that custom or natural row names are inserted into.
If `overwrite` is `TRUE`, replaces an existing table with *name*.
If `append` is `TRUE`, creates table with *name* if it does not exist.
Stops with an error if both `overwrite` and `append` are `TRUE`, because they are mutually exclusive.
Stops with an error if both `overwrite` and `append` are `FALSE` (the default) and the table does not exist.
To override the column names or column types derived from *value*, specify `field.types` as a named `character` vector whose names specify column names, and values specify column types.
If `temporary` is `FALSE` (the default), a permanent table is created.
If `temporary` is `TRUE`, a volatile table is created.
<a name="ResultMethods"></a>
### Result Methods
`DBI::dbBind(` *res* `,` *params* `)`
Binds values to parameter markers and executes the prepared SQL request.
Parameterized SQL bind values are specified as a `list` or `data.frame` for *params*.
Parameter values are bound to question-mark parameter markers in column order, not by name.
Single row or multiple row *params* may be specified.
---
`DBI::dbClearResult(` *res* `)`
Closes the result.
---
`DBI::dbColumnInfo(` *res* `)`
Returns a `data.frame` containing result column metadata, in which each row describes one result column.
The returned `data.frame` has columns `name` (type `character`), `Sclass` (R data type), `type` (type `character`), `len` (type `integer`), `precision` (type `integer`), `scale` (type `integer`), and `nullOK` (type `logical`).
---
`DBI::dbFetch(` *dbFetch* `, n = -1)`
Fetches rows from the result after the SQL request is executed.
Fetches all remaining rows when `n` is `Inf` or `-1` (the default).
Fetches *n* rows at most when *n* is a non-negative whole number.
Stops with an error when *n* is something other than `Inf`, `-1`, or a non-negative whole number.
---
`DBI::dbGetInfo(` *res* `)`
Returns a list with names `statement`, `row.count`, `rows.affected`, and `has.completed`.
---
`DBI::dbGetRowCount(` *res* `)`
Returns the number of rows fetched from this result by the `dbFetch` method.
If the row count exceeds the maximum numeric value, returns the maximum numeric value and provides a warning to indicate the actual row count in the warning message.
Database row counts have an upper limit of 9,223,372,036,854,775,807 (hexadecimal FFFF FFFF FFFF FFFF) and can exceed the maximum numeric value 9,007,199,254,740,991 (hexadecimal 1F FFFF FFFF FFFF).
---
`DBI::dbGetRowsAffected(` *res* `)`
Returns the number of rows affected by the SQL statement.
If the row count exceeds the maximum numeric value, returns the maximum numeric value and provides a warning to indicate the actual row count in the warning message.
---
`DBI::dbGetStatement(` *res* `)`
Returns the SQL request text.
---
`DBI::dbHasCompleted(` *res* `)`
Returns `TRUE` if all rows have been fetched from the result.
Returns `FALSE` otherwise.
---
`DBI::dbIsReadOnly(` *res* `)`
Returns `FALSE`.
---
`DBI::dbIsValid(` *res* `)`
Returns `TRUE` to indicate that the result is usable.
Returns `FALSE` otherwise.
---
`teradatasql::dbNextResult(` *res* `)`
Advances to the next result returned by a multi-statement request.
Returns `TRUE` to indicate that the next result is available.
Returns `FALSE` otherwise.
<a name="EscapeSyntax"></a>
### Escape Syntax
The driver accepts most of the JDBC escape clauses offered by the Teradata JDBC Driver.
#### Date and Time Literals
Date and time literal escape clauses are replaced by the corresponding SQL literal before the SQL request text is transmitted to the database.
Literal Type | Format
------------ | ------
Date | `{d '`*yyyy-mm-dd*`'}`
Time | `{t '`*hh:mm:ss*`'}`
Timestamp | `{ts '`*yyyy-mm-dd hh:mm:ss*`'}`
Timestamp | `{ts '`*yyyy-mm-dd hh:mm:ss.f*`'}`
For timestamp literal escape clauses, the decimal point and fractional digits may be omitted, or 1 to 6 fractional digits *f* may be specified after a decimal point.
#### Scalar Functions
Scalar function escape clauses are replaced by the corresponding SQL expression before the SQL request text is transmitted to the database.
Numeric Function | Returns
-------------------------------------- | ---
`{fn ABS(`*number*`)}` | Absolute value of *number*
`{fn ACOS(`*float*`)}` | Arccosine, in radians, of *float*
`{fn ASIN(`*float*`)}` | Arcsine, in radians, of *float*
`{fn ATAN(`*float*`)}` | Arctangent, in radians, of *float*
`{fn ATAN2(`*y*`,`*x*`)}` | Arctangent, in radians, of *y* / *x*
`{fn CEILING(`*number*`)}` | Smallest integer greater than or equal to *number*
`{fn COS(`*float*`)}` | Cosine of *float* radians
`{fn COT(`*float*`)}` | Cotangent of *float* radians
`{fn DEGREES(`*number*`)}` | Degrees in *number* radians
`{fn EXP(`*float*`)}` | *e* raised to the power of *float*
`{fn FLOOR(`*number*`)}` | Largest integer less than or equal to *number*
`{fn LOG(`*float*`)}` | Natural (base *e*) logarithm of *float*
`{fn LOG10(`*float*`)}` | Base 10 logarithm of *float*
`{fn MOD(`*integer1*`,`*integer2*`)}` | Remainder for *integer1* / *integer2*
`{fn PI()}` | The constant pi, approximately equal to 3.14159...
`{fn POWER(`*number*`,`*integer*`)}` | *number* raised to *integer* power
`{fn RADIANS(`*number*`)}` | Radians in *number* degrees
`{fn RAND(`*seed*`)}` | A random float value such that 0 ≤ value < 1, and *seed* is ignored
`{fn ROUND(`*number*`,`*places*`)}` | *number* rounded to *places*
`{fn SIGN(`*number*`)}` | -1 if *number* is negative; 0 if *number* is 0; 1 if *number* is positive
`{fn SIN(`*float*`)}` | Sine of *float* radians
`{fn SQRT(`*float*`)}` | Square root of *float*
`{fn TAN(`*float*`)}` | Tangent of *float* radians
`{fn TRUNCATE(`*number*`,`*places*`)}` | *number* truncated to *places*
String Function | Returns
-------------------------------------------------------------- | ---
`{fn ASCII(`*string*`)}` | ASCII code of the first character in *string*
`{fn CHAR(`*code*`)}` | Character with ASCII *code*
`{fn CHAR_LENGTH(`*string*`)}` | Length in characters of *string*
`{fn CHARACTER_LENGTH(`*string*`)}` | Length in characters of *string*
`{fn CONCAT(`*string1*`,`*string2*`)}` | String formed by concatenating *string1* and *string2*
`{fn DIFFERENCE(`*string1*`,`*string2*`)}` | A number from 0 to 4 that indicates the phonetic similarity of *string1* and *string2* based on their Soundex codes, such that a larger return value indicates greater phonetic similarity; 0 indicates no similarity, 4 indicates strong similarity
`{fn INSERT(`*string1*`,`*position*`,`*length*`,`*string2*`)}` | String formed by replacing the *length*-character segment of *string1* at *position* with *string2*, available beginning with Teradata Database 15.0
`{fn LCASE(`*string*`)}` | String formed by replacing all uppercase characters in *string* with their lowercase equivalents
`{fn LEFT(`*string*`,`*count*`)}` | Leftmost *count* characters of *string*
`{fn LENGTH(`*string*`)}` | Length in characters of *string*
`{fn LOCATE(`*string1*`,`*string2*`)}` | Position in *string2* of the first occurrence of *string1*, or 0 if *string2* does not contain *string1*
`{fn LTRIM(`*string*`)}` | String formed by removing leading spaces from *string*
`{fn OCTET_LENGTH(`*string*`)}` | Length in octets (bytes) of *string*
`{fn POSITION(`*string1*` IN `*string2*`)}` | Position in *string2* of the first occurrence of *string1*, or 0 if *string2* does not contain *string1*
`{fn REPEAT(`*string*`,`*count*`)}` | String formed by repeating *string* *count* times, available beginning with Teradata Database 15.0
`{fn REPLACE(`*string1*`,`*string2*`,`*string3*`)}` | String formed by replacing all occurrences of *string2* in *string1* with *string3*
`{fn RIGHT(`*string*`,`*count*`)}` | Rightmost *count* characters of *string*, available beginning with Teradata Database 15.0
`{fn RTRIM(`*string*`)}` | String formed by removing trailing spaces from *string*
`{fn SOUNDEX(`*string*`)}` | Soundex code for *string*
`{fn SPACE(`*count*`)}` | String consisting of *count* spaces
`{fn SUBSTRING(`*string*`,`*position*`,`*length*`)}` | The *length*-character segment of *string* at *position*
`{fn UCASE(`*string*`)}` | String formed by replacing all lowercase characters in *string* with their uppercase equivalents
System Function | Returns
--------------------------------------- | ---
`{fn DATABASE()}` | Current default database name
`{fn IFNULL(`*expression*`,`*value*`)}` | *expression* if *expression* is not NULL, or *value* if *expression* is NULL
`{fn USER()}` | Logon user name, which may differ from the current authorized user name after `SET QUERY_BAND` sets a proxy user
Time/Date Function | Returns
------------------------------------------------------------------ | ---
`{fn CURDATE()}` | Current date
`{fn CURRENT_DATE()}` | Current date
`{fn CURRENT_TIME()}` | Current time
`{fn CURRENT_TIMESTAMP()}` | Current date and time
`{fn CURTIME()}` | Current time
`{fn DAYOFMONTH(`*date*`)}` | Integer from 1 to 31 indicating the day of month in *date*
`{fn EXTRACT(YEAR FROM `*value*`)}` | The year component of the date and/or time *value*
`{fn EXTRACT(MONTH FROM `*value*`)}` | The month component of the date and/or time *value*
`{fn EXTRACT(DAY FROM `*value*`)}` | The day component of the date and/or time *value*
`{fn EXTRACT(HOUR FROM `*value*`)}` | The hour component of the date and/or time *value*
`{fn EXTRACT(MINUTE FROM `*value*`)}` | The minute component of the date and/or time *value*
`{fn EXTRACT(SECOND FROM `*value*`)}` | The second component of the date and/or time *value*
`{fn HOUR(`*time*`)}` | Integer from 0 to 23 indicating the hour of *time*
`{fn MINUTE(`*time*`)}` | Integer from 0 to 59 indicating the minute of *time*
`{fn MONTH(`*date*`)}` | Integer from 1 to 12 indicating the month of *date*
`{fn NOW()}` | Current date and time
`{fn SECOND(`*time*`)}` | Integer from 0 to 59 indicating the second of *time*
`{fn TIMESTAMPADD(SQL_TSI_YEAR,`*count*`,`*timestamp*`)}` | Timestamp formed by adding *count* years to *timestamp*
`{fn TIMESTAMPADD(SQL_TSI_MONTH,`*count*`,`*timestamp*`)}` | Timestamp formed by adding *count* months to *timestamp*
`{fn TIMESTAMPADD(SQL_TSI_DAY,`*count*`,`*timestamp*`)}` | Timestamp formed by adding *count* days to *timestamp*
`{fn TIMESTAMPADD(SQL_TSI_HOUR,`*count*`,`*timestamp*`)}` | Timestamp formed by adding *count* hours to *timestamp*
`{fn TIMESTAMPADD(SQL_TSI_MINUTE,`*count*`,`*timestamp*`)}` | Timestamp formed by adding *count* minutes to *timestamp*
`{fn TIMESTAMPADD(SQL_TSI_SECOND,`*count*`,`*timestamp*`)}` | Timestamp formed by adding *count* seconds to *timestamp*
`{fn TIMESTAMPDIFF(SQL_TSI_YEAR,`*timestamp1*`,`*timestamp2*`)}` | Number of years by which *timestamp2* exceeds *timestamp1*
`{fn TIMESTAMPDIFF(SQL_TSI_MONTH,`*timestamp1*`,`*timestamp2*`)}` | Number of months by which *timestamp2* exceeds *timestamp1*
`{fn TIMESTAMPDIFF(SQL_TSI_DAY,`*timestamp1*`,`*timestamp2*`)}` | Number of days by which *timestamp2* exceeds *timestamp1*
`{fn TIMESTAMPDIFF(SQL_TSI_HOUR,`*timestamp1*`,`*timestamp2*`)}` | Number of hours by which *timestamp2* exceeds *timestamp1*
`{fn TIMESTAMPDIFF(SQL_TSI_MINUTE,`*timestamp1*`,`*timestamp2*`)}` | Number of minutes by which *timestamp2* exceeds *timestamp1*
`{fn TIMESTAMPDIFF(SQL_TSI_SECOND,`*timestamp1*`,`*timestamp2*`)}` | Number of seconds by which *timestamp2* exceeds *timestamp1*
`{fn YEAR(`*date*`)}` | The year of *date*
#### Conversion Functions
Conversion function escape clauses are replaced by the corresponding SQL expression before the SQL request text is transmitted to the database.
Conversion Function | Returns
--------------------------------------------------------------- | ---
`{fn CONVERT(`*value*`, SQL_BIGINT)}` | *value* converted to SQL `BIGINT`
`{fn CONVERT(`*value*`, SQL_BINARY(`*size*`))}` | *value* converted to SQL `BYTE(`*size*`)`
`{fn CONVERT(`*value*`, SQL_CHAR(`*size*`))}` | *value* converted to SQL `CHAR(`*size*`)`
`{fn CONVERT(`*value*`, SQL_DATE)}` | *value* converted to SQL `DATE`
`{fn CONVERT(`*value*`, SQL_DECIMAL(`*precision*`,`*scale*`))}` | *value* converted to SQL `DECIMAL(`*precision*`,`*scale*`)`
`{fn CONVERT(`*value*`, SQL_DOUBLE)}` | *value* converted to SQL `DOUBLE PRECISION`, a synonym for `FLOAT`
`{fn CONVERT(`*value*`, SQL_FLOAT)}` | *value* converted to SQL `FLOAT`
`{fn CONVERT(`*value*`, SQL_INTEGER)}` | *value* converted to SQL `INTEGER`
`{fn CONVERT(`*value*`, SQL_LONGVARBINARY)}` | *value* converted to SQL `VARBYTE(64000)`
`{fn CONVERT(`*value*`, SQL_LONGVARCHAR)}` | *value* converted to SQL `LONG VARCHAR`
`{fn CONVERT(`*value*`, SQL_NUMERIC)}` | *value* converted to SQL `NUMBER`
`{fn CONVERT(`*value*`, SQL_SMALLINT)}` | *value* converted to SQL `SMALLINT`
`{fn CONVERT(`*value*`, SQL_TIME(`*scale*`))}` | *value* converted to SQL `TIME(`*scale*`)`
`{fn CONVERT(`*value*`, SQL_TIMESTAMP(`*scale*`))}` | *value* converted to SQL `TIMESTAMP(`*scale*`)`
`{fn CONVERT(`*value*`, SQL_TINYINT)}` | *value* converted to SQL `BYTEINT`
`{fn CONVERT(`*value*`, SQL_VARBINARY(`*size*`))}` | *value* converted to SQL `VARBYTE(`*size*`)`
`{fn CONVERT(`*value*`, SQL_VARCHAR(`*size*`))}` | *value* converted to SQL `VARCHAR(`*size*`)`
#### LIKE Predicate Escape Character
Within a `LIKE` predicate's *pattern* argument, the characters `%` (percent) and `_` (underscore) serve as wildcards.
To interpret a particular wildcard character literally in a `LIKE` predicate's *pattern* argument, the wildcard character must be preceded by an escape character, and the escape character must be indicated in the `LIKE` predicate's `ESCAPE` clause.
`LIKE` predicate escape character escape clauses are replaced by the corresponding SQL clause before the SQL request text is transmitted to the database.
`{escape '`*EscapeCharacter*`'}`
The escape clause must be specified immediately after the `LIKE` predicate that it applies to.
#### Outer Joins
Outer join escape clauses are replaced by the corresponding SQL clause before the SQL request text is transmitted to the database.
`{oj `*TableName* *OptionalCorrelationName* `LEFT OUTER JOIN `*TableName* *OptionalCorrelationName* `ON `*JoinCondition*`}`
`{oj `*TableName* *OptionalCorrelationName* `RIGHT OUTER JOIN `*TableName* *OptionalCorrelationName* `ON `*JoinCondition*`}`
`{oj `*TableName* *OptionalCorrelationName* `FULL OUTER JOIN `*TableName* *OptionalCorrelationName* `ON `*JoinCondition*`}`
#### Stored Procedure Calls
Stored procedure call escape clauses are replaced by the corresponding SQL clause before the SQL request text is transmitted to the database.
`{call `*ProcedureName*`}`
`{call `*ProcedureName*`(`*CommaSeparatedParameterValues...*`)}`
#### Native SQL
When a SQL request contains the native SQL escape clause, all escape clauses are replaced in the SQL request text, and the modified SQL request text is returned to the application as a result set containing a single row and a single VARCHAR column. The SQL request text is not transmitted to the database, and the SQL request is not executed. The native SQL escape clause mimics the functionality of the JDBC API `Connection.nativeSQL` method.
`{fn teradata_nativesql}`
#### Connection Functions
The following table lists connection function escape clauses that are intended for use with the native SQL escape clause `{fn teradata_nativesql}`.
These functions provide information about the connection, or control the behavior of the connection.
Functions that provide information return locally-cached information and avoid a round-trip to the database.
Connection function escape clauses are replaced by the returned information before the SQL request text is transmitted to the database.
Connection Function | Returns
--------------------------------------------- | ---
`{fn teradata_amp_count}` | Number of AMPs of the database system
`{fn teradata_database_version}` | Version number of the database
`{fn teradata_driver_version}` | Version number of the driver
`{fn teradata_get_errors}` | Errors from the most recent batch operation
`{fn teradata_get_warnings}` | Warnings from an operation that completed with warnings
`{fn teradata_getloglevel}` | Current log level
`{fn teradata_go_runtime}` | Go runtime version for the Teradata GoSQL Driver
`{fn teradata_logon_sequence_number}` | Session's Logon Sequence Number, if available
`{fn teradata_program_name}` | Executable program name
`{fn teradata_provide(config_response)}` | Config Response parcel contents in JSON format
`{fn teradata_provide(connection_id)}` | Connection's unique identifier within the process
`{fn teradata_provide(default_connection)}` | `false` indicating this is not a stored procedure default connection
`{fn teradata_provide(gateway_config)}` | Gateway Config parcel contents in JSON format
`{fn teradata_provide(governed)}` | `true` or `false` indicating the `govern` connection parameter setting
`{fn teradata_provide(host_id)}` | Session's host ID
`{fn teradata_provide(java_charset_name)}` | `UTF8`
`{fn teradata_provide(lob_support)}` | `true` or `false` indicating this connection's LOB support
`{fn teradata_provide(local_address)}` | Local address of the connection's TCP socket
`{fn teradata_provide(local_port)}` | Local port of the connection's TCP socket
`{fn teradata_provide(original_hostname)}` | Original specified database hostname
`{fn teradata_provide(redrive_active)}` | `true` or `false` indicating whether this connection has Redrive active
`{fn teradata_provide(remote_address)}` | Hostname (if available) and IP address of the connected database node
`{fn teradata_provide(remote_port)}` | TCP port number of the database
`{fn teradata_provide(rnp_active)}` | `true` or `false` indicating whether this connection has Recoverable Network Protocol active
`{fn teradata_provide(session_charset_code)}` | Session character set code `191`
`{fn teradata_provide(session_charset_name)}` | Session character set name `UTF8`
`{fn teradata_provide(sip_support)}` | `true` or `false` indicating this connection's StatementInfo parcel support
`{fn teradata_provide(transaction_mode)}` | Session's transaction mode, `ANSI` or `TERA`
`{fn teradata_provide(uses_check_workload)}` | `true` or `false` indicating whether this connection uses `CHECK WORKLOAD`
`{fn teradata_session_number}` | Session number
#### Request-Scope Functions
The following table lists request-scope function escape clauses that are intended for use with preparing or executing a SQL request.
These functions control the behavior of the prepare or execute operation, and are limited in scope to the particular SQL request in which they are specified.
Request-scope function escape clauses are removed before the SQL request text is transmitted to the database.
Request-Scope Function | Effect
------------------------------------------------------ | ---
`{fn teradata_clobtranslate(`*Option*`)}` | Executes the SQL request with CLOB translate *Option* `U` (unlocked) or the default `L` (locked)
`{fn teradata_error_query_count(`*Number*`)}` | Specifies how many times the driver will attempt to query FastLoad Error Table 1 after a FastLoad operation. Takes precedence over the `error_query_count` connection parameter.
`{fn teradata_error_query_interval(`*Milliseconds*`)}` | Specifies how many milliseconds the driver will wait between attempts to query FastLoad Error Table 1. Takes precedence over the `error_query_interval` connection parameter.
`{fn teradata_error_table_1_suffix(`*Suffix*`)}` | Specifies the suffix to append to the name of FastLoad error table 1. Takes precedence over the `error_table_1_suffix` connection parameter.
`{fn teradata_error_table_2_suffix(`*Suffix*`)}` | Specifies the suffix to append to the name of FastLoad error table 2. Takes precedence over the `error_table_2_suffix` connection parameter.
`{fn teradata_error_table_database(`*DbName*`)}` | Specifies the parent database name for FastLoad error tables 1 and 2. Takes precedence over the `error_table_database` connection parameter.
`{fn teradata_failfast}` | Reject ("fail fast") this SQL request rather than delay by a workload management rule or throttle
`{fn teradata_fake_result_sets}` | A fake result set containing statement metadata precedes each real result set. Takes precedence over the `fake_result_sets` connection parameter.
`{fn teradata_fake_result_sets_off}` | Turns off fake result sets for this SQL request. Takes precedence over the `fake_result_sets` connection parameter.
`{fn teradata_field_quote(`*String*`)}` | Specifies a single-character string used to quote fields in a CSV file. Takes precedence over the `field_quote` connection parameter.
`{fn teradata_field_sep(`*String*`)}` | Specifies a single-character string used to separate fields in a CSV file. Takes precedence over the `field_sep` connection parameter.
`{fn teradata_govern_off}` | Teradata workload management rules will reject rather than delay a FastLoad or FastExport. Takes precedence over the `govern` connection parameter.
`{fn teradata_govern_on}` | Teradata workload management rules may delay a FastLoad or FastExport. Takes precedence over the `govern` connection parameter.
`{fn teradata_lobselect(`*Option*`)}` | Executes the SQL request with LOB select *Option* `S` (spool-scoped LOB locators), `T` (transaction-scoped LOB locators), or the default `I` (inline materialized LOB values)
`{fn teradata_manage_error_tables_off}` | Turns off FastLoad error table management for this request. Takes precedence over the `manage_error_tables` connection parameter.
`{fn teradata_manage_error_tables_on}` | Turns on FastLoad error table management for this request. Takes precedence over the `manage_error_tables` connection parameter.
`{fn teradata_parameter(`*Index*`,`*DataType*`)` | Transmits parameter *Index* bind values as *DataType*
`{fn teradata_posixlt_off}` | Does not use `POSIXlt` subclasses for result set column value types.
`{fn teradata_posixlt_on}` | Uses `POSIXlt` subclasses for certain result set column value types.
`{fn teradata_provide(request_scope_column_name_off)}` | Provides the default column name behavior for this SQL request. Takes precedence over the `column_name` connection parameter.
`{fn teradata_provide(request_scope_lob_support_off)}` | Turns off LOB support for this SQL request. Takes precedence over the `lob_support` connection parameter.
`{fn teradata_provide(request_scope_refresh_rsmd)}` | Executes the SQL request with the default request processing option `B` (both)
`{fn teradata_provide(request_scope_sip_support_off)}` | Turns off StatementInfo parcel support for this SQL request. Takes precedence over the `sip_support` connection parameter.
`{fn teradata_read_csv(`*CSVFileName*`)}` | Executes a batch insert using the bind parameter values read from the specified CSV file for either a SQL batch insert or a FastLoad
`{fn teradata_request_timeout(`*Seconds*`)}` | Specifies the timeout for executing the SQL request. Zero means no timeout. Takes precedence over the `request_timeout` connection parameter.
`{fn teradata_require_fastexport}` | Specifies that FastExport is required for the SQL request
`{fn teradata_require_fastload}` | Specifies that FastLoad is required for the SQL request
`{fn teradata_rpo(`*RequestProcessingOption*`)}` | Executes the SQL request with *RequestProcessingOption* `S` (prepare), `E` (execute), or the default `B` (both)
`{fn teradata_sessions(`*Number*`)}` | Specifies the *Number* of data transfer connections for FastLoad or FastExport. Takes precedence over the `sessions` connection parameter.
`{fn teradata_try_fastexport}` | Tries to use FastExport for the SQL request
`{fn teradata_try_fastload}` | Tries to use FastLoad for the SQL request
`{fn teradata_untrusted}` | Marks the SQL request as untrusted; not implemented yet
`{fn teradata_values_off}` | Turns off `teradata_values` for this SQL request. Takes precedence over the `teradata_values` connection parameter. Refer to the [Data Types](#DataTypes) table for details.
`{fn teradata_values_on}` | Turns on `teradata_values` for this SQL request. Takes precedence over the `teradata_values` connection parameter. Refer to the [Data Types](#DataTypes) table for details.
`{fn teradata_write_csv(`*CSVFileName*`)}` | Exports one or more result sets from a SQL request or a FastExport to the specified CSV file or files
The `teradata_field_sep` and `teradata_field_quote` escape functions have a single-character string argument. The string argument must follow SQL literal syntax. The string argument may be enclosed in single-quote (`'`) characters or double-quote (`"`) characters.
To represent a single-quote character in a string enclosed in single-quote characters, you must repeat the single-quote character.
{fn teradata_field_quote('''')}
To represent a double-quote character in a string enclosed in double-quote characters, you must repeat the double-quote character.
{fn teradata_field_quote("""")}
<a name="FastLoad"></a>
### FastLoad
The driver offers FastLoad, which opens multiple database connections to transfer data in parallel.
Please be aware that this is an early release of the FastLoad feature. Think of it as a beta or preview version. It works, but does not yet offer all the features that JDBC FastLoad offers. FastLoad is still under active development, and we will continue to enhance it in subsequent builds.
FastLoad has limitations and cannot be used in all cases as a substitute for SQL batch insert:
* FastLoad can only load into an empty permanent table.
* FastLoad cannot load additional rows into a table that already contains rows.
* FastLoad cannot load into a volatile table or global temporary table.
* FastLoad cannot load duplicate rows into a `MULTISET` table with a primary index.
* Do not use FastLoad to load only a few rows, because FastLoad opens extra connections to the database, which is time consuming.
* Only use FastLoad to load many rows (at least 100,000 rows) so that the row-loading performance gain exceeds the overhead of opening additional connections.
* FastLoad does not support all database data types. For example, `BLOB` and `CLOB` are not supported.
* FastLoad requires StatementInfo parcel support to be enabled.
* FastLoad locks the destination table.
* If Online Archive encounters a table being loaded with FastLoad, online archiving of that table will be bypassed.
Your application can bind a single row of data for FastLoad, but that is not recommended because the overhead of opening additional connections causes FastLoad to be slower than a regular SQL `INSERT` for a single row.
How to use FastLoad:
* Auto-commit should be turned off before beginning a FastLoad.
* FastLoad is intended for binding many rows at a time. Each batch of rows must be able to fit into memory.
* When auto-commit is turned off, your application can insert multiple batches in a loop for the same FastLoad.
* Each column's data type must be consistent across every row in every batch over the entire FastLoad.
* The column values of the first row of the first batch dictate what the column data types must be in all subsequent rows and all subsequent batches of the FastLoad.
FastLoad opens multiple data transfer connections to the database. FastLoad evenly distributes each batch of rows across the available data transfer connections, and uses overlapped I/O to send and receive messages in parallel.
To use FastLoad, your application must prepend one of the following escape functions to the `INSERT` statement:
* `{fn teradata_try_fastload}` tries to use FastLoad for the `INSERT` statement, and automatically executes the `INSERT` as a regular SQL statement when the `INSERT` is not compatible with FastLoad.
* `{fn teradata_require_fastload}` requires FastLoad for the `INSERT` statement, and fails with an error when the `INSERT` is not compatible with FastLoad.
Your application can prepend other optional escape functions to the `INSERT` statement:
* `{fn teradata_sessions(`n`)}` specifies the number of data transfer connections to be opened, and is capped at the number of AMPs. The default is the smaller of 8 or the number of AMPs. We recommend avoiding this function to let the driver ask the database how many data transfer connections should be used.
* `{fn teradata_error_table_1_suffix(`suffix`)}` specifies the suffix to append to the name of FastLoad error table 1. The default suffix is `_ERR_1`.
* `{fn teradata_error_table_2_suffix(`suffix`)}` specifies the suffix to append to the name of FastLoad error table 2. The default suffix is `_ERR_2`.
* `{fn teradata_error_table_database(`dbname`)}` specifies the parent database name for FastLoad error tables 1 and 2. By default, the FastLoad error tables reside in the same database as the destination table.
* `{fn teradata_govern_on}` or `{fn teradata_govern_off}` specifies whether Teradata workload management rules may delay or reject the FastLoad. Takes precedence over the `govern` connection parameter.
After beginning a FastLoad, your application can obtain the Logon Sequence Number (LSN) assigned to the FastLoad by prepending the following escape functions to the `INSERT` statement:
* `{fn teradata_nativesql}{fn teradata_logon_sequence_number}` returns the string form of an integer representing the Logon Sequence Number (LSN) for the FastLoad. Returns an empty string if the request is not a FastLoad.
FastLoad does not stop for data errors such as constraint violations or unique primary index violations. After inserting each batch of rows, your application must obtain warning and error information by prepending the following escape functions to the `INSERT` statement:
* `{fn teradata_nativesql}{fn teradata_get_warnings}` returns in one string all warnings generated by FastLoad for the request.
* `{fn teradata_nativesql}{fn teradata_get_errors}` returns in one string all data errors observed by FastLoad for the most recent batch. The data errors are obtained from FastLoad error table 1, for problems such as constraint violations, data type conversion errors, and unavailable AMP conditions.
Your application ends FastLoad by committing or rolling back the current transaction. After commit or rollback, your application must obtain warning and error information by prepending the following escape functions to the `INSERT` statement:
* `{fn teradata_nativesql}{fn teradata_get_warnings}` returns in one string all warnings generated by FastLoad for the commit or rollback. The warnings are obtained from FastLoad error table 2, for problems such as duplicate rows.
* `{fn teradata_nativesql}{fn teradata_get_errors}` returns in one string all data errors observed by FastLoad for the commit or rollback. The data errors are obtained from FastLoad error table 2, for problems such as unique primary index violations.
Warning and error information remains available until the next batch is inserted or until the commit or rollback. Each batch execution clears the prior warnings and errors. Each commit or rollback clears the prior warnings and errors.
<a name="FastExport"></a>
### FastExport
The driver offers FastExport, which opens multiple database connections to transfer data in parallel.
Please be aware that this is an early release of the FastExport feature. Think of it as a beta or preview version. It works, but does not yet offer all the features that JDBC FastExport offers. FastExport is still under active development, and we will continue to enhance it in subsequent builds.
FastExport has limitations and cannot be used in all cases as a substitute for SQL queries:
* FastExport cannot query a volatile table or global temporary table.
* FastExport supports single-statement SQL `SELECT`, and supports multi-statement requests composed of multiple SQL `SELECT` statements only.
* FastExport supports question-mark parameter markers in `WHERE` clause conditions. However, the database does not permit the equal `=` operator for primary or unique secondary indexes, and will return database error 3695 "A Single AMP Select statement has been issued in FastExport".
* Do not use FastExport to fetch only a few rows, because FastExport opens extra connections to the database, which is time consuming.
* Only use FastExport to fetch many rows (at least 100,000 rows) so that the row-fetching performance gain exceeds the overhead of opening additional connections.
* FastExport does not support all database data types. For example, `BLOB` and `CLOB` are not supported.
* For best efficiency, do not use `GROUP BY` and `ORDER BY` clauses with FastExport.
* FastExport's result set ordering behavior may differ from a regular SQL query. In particular, a query containing an ordered analytic function may not produce an ordered result set. Use an `ORDER BY` clause to guarantee result set order.
FastExport opens multiple data transfer connections to the database. FastExport uses overlapped I/O to send and receive messages in parallel.
To use FastExport, your application must prepend one of the following escape functions to the query:
* `{fn teradata_try_fastexport}` tries to use FastExport for the query, and automatically executes the query as a regular SQL query when the query is not compatible with FastExport.
* `{fn teradata_require_fastexport}` requires FastExport for the query, and fails with an error when the query is not compatible with FastExport.
Your application can prepend other optional escape functions to the query:
* `{fn teradata_sessions(`n`)}` specifies the number of data transfer connections to be opened, and is capped at the number of AMPs. The default is the smaller of 8 or the number of AMPs. We recommend avoiding this function to let the driver ask the database how many data transfer connections should be used.
* `{fn teradata_govern_on}` or `{fn teradata_govern_off}` specifies whether Teradata workload management rules may delay or reject the FastExport. Takes precedence over the `govern` connection parameter.
After beginning a FastExport, your application can obtain the Logon Sequence Number (LSN) assigned to the FastExport by prepending the following escape functions to the query:
* `{fn teradata_nativesql}{fn teradata_logon_sequence_number}` returns the string form of an integer representing the Logon Sequence Number (LSN) for the FastExport. Returns an empty string if the request is not a FastExport.
<a name="CSVBatchInserts"></a>
### CSV Batch Inserts
The driver can read batch insert bind values from a CSV (comma separated values) file. This feature can be used with SQL batch inserts and with FastLoad.
To specify batch insert bind values in a CSV file, the application prepends the escape function `{fn teradata_read_csv(`*CSVFileName*`)}` to the `INSERT` statement.
The application can specify batch insert bind values in a CSV file, or specify bind parameter values, but not both together. The driver returns an error if both are specified together.
Considerations when using a CSV file:
* Each record is on a separate line of the CSV file. Records are delimited by line breaks (CRLF). The last record in the file may or may not have an ending line break.
* The first line of the CSV file is a header line. The header line lists the column names separated by the field separator (e.g. `col1,col2,col3`).
* The field separator defaults to the comma character (`,`). You can specify a different field separator character with the `field_sep` connection parameter or with the `teradata_field_sep` escape function. The specified field separator character must match the actual separator character used in the CSV file.
* Each field can optionally be enclosed by the field quote character, which defaults to the double-quote character (e.g. `"abc",123,efg`). You can specify a different field quote character with the `field_quote` connection parameter or with the `teradata_field_quote` escape function. The field quote character must match the actual field quote character used in the CSV file.
* The field separator and field quote characters cannot be set to the same value. The field separator and field quote characters must be legal UTF-8 characters and cannot be line feed (`\n`) or carriage return (`\r`).
* Field quote characters are only permitted in fields enclosed by field quote characters. Field quote characters must not appear inside unquoted fields (e.g. not allowed `ab"cd"ef,1,abc`).
* To include a field quote character in a quoted field, the field quote character must be repeated (e.g. `"abc""efg""dh",123,xyz`).
* Line breaks, field quote characters, and field separators may be included in a quoted field (e.g. `"abc,efg\ndh",123,xyz`).
* Specify a `NULL` value in the CSV file with an empty value between commas (e.g. `1,,456`).
* A zero-length quoted string specifies a zero-length non-`NULL` string, not a `NULL` value (e.g. `1,"",456`).
* Not all data types are supported. For example, `BLOB`, `BYTE`, and `VARBYTE` are not supported.
* A field length greater than 64KB is transmitted to the database as a `DEFERRED CLOB` for a SQL batch insert. A field length greater than 64KB is not supported with FastLoad.
Limitations when using CSV batch inserts:
* Bound parameter values cannot be specified in the execute method when using the escape function `{fn teradata_read_csv(`*CSVFileName*`)}`.
* The CSV file must contain at least one valid record in addition to the header line containing the column names.
* For FastLoad, the insert operation will fail if the CSV file is improperly formatted and a parser error occurs.
* For SQL batch insert, some records may be inserted before a parsing error occurs. A list of the parser errors will be returned. Each parser error will include the line number (starting at line 1) and the column number (starting at zero).
* Using a CSV file with FastLoad has the same limitations and is used the same way as described in the [FastLoad](#FastLoad) section.
<a name="CSVExportResults"></a>
### CSV Export Results
The driver can export query results to CSV files. This feature can be used with SQL query results, with calls to stored procedures, and with FastExport.
To export a result set to a CSV file, the application prepends the escape function `{fn teradata_write_csv(`*CSVFileName*`)}` to the SQL request text.
If the query returns multiple result sets, each result set will be written to a separate file. The file name is varied by inserting the string "_N" between the specified file name and file type extension (e.g. `fileName.csv`, `fileName_1.csv`, `fileName_2.csv`). If no file type extension is specified, then the suffix "_N" is appended to the end of the file name (e.g. `fileName`, `fileName_1`, `fileName_2`).
A stored procedure call that produces multiple dynamic result sets behaves like other SQL requests that return multiple result sets. The stored procedures's output parameter values are exported as the first CSV file.
Example of a SQL request that returns multiple results:
`{fn teradata_write_csv(myFile.csv)}select 'abc' ; select 123`
CSV File Name | Content
------------- | ---
myFile.csv | First result set
myFile_1.csv | Second result set
To obtain the metadata for each result set, use the escape function `{fn teradata_fake_result_sets}`. A fake result set containing the metadata will be written to a file preceding each real result set.
Example of a query that returns multiple result sets with metadata:
`{fn teradata_fake_result_sets}{fn teradata_write_csv(myFile.csv)}select 'abc' ; select 123`
CSV File Name | Content
------------- | ---
myFile.csv | Fake result set containing the metadata for the first result set
myFile_1.csv | First result set
myFile_2.csv | Fake result set containing the metadata for the second result set
myFile_3.csv | Second result set
Exported CSV files have the following characteristics:
* Each record is on a separate line of the CSV file. Records are delimited by line breaks (CRLF).
* Column values are separated by the field separator character, which defaults to the comma character (`,`). You can specify a different field separator character with the `field_sep` connection parameter or with the `teradata_field_sep` escape function.
* The first line of the CSV file is a header line. The header line lists the column names separated by the field separator (e.g. `col1,col2,col3`).
* When necessary, column values are enclosed by the field quote character, which defaults to the double-quote character (`"`). You can specify a different field quote character with the `field_quote` connection parameter or with the `teradata_field_quote` escape function.
* The field separator and field quote characters cannot be set to the same value. The field separator and field quote characters must be legal UTF-8 characters and cannot be line feed (`\n`) or carriage return (`\r`).
* If a column value contains line breaks, field quote characters, and/or field separators in a field, the value is quoted with the field quote character.
* If a column value contains a field quote character, the value is quoted and the field quote character is repeated. For example, column value `abc"def` is exported as `"abc""def"`.
* A `NULL` value is exported to the CSV file as an empty value between field separators (e.g. `123,,456`).
* A non-`NULL` zero-length character value is exported as a zero-length quoted string (e.g. `123,"",456`).
Limitations when exporting to CSV files:
* When the application chooses to export results to a CSV file, the results are not available for the application to fetch in memory.
* A warning is returned if the application specifies an export CSV file for a SQL statement that does not produce a result set.
* Exporting a CSV file with FastExport has the same limitations and is used the same way as described in the [FastExport](#FastExport) section.
* Not all data types are supported. For example, `BLOB`, `BYTE`, and `VARBYTE` are not supported and if one of these column types are present in the result set, an error will be returned.
* `CLOB`, `XML`, `JSON`, and `DATASET STORAGE FORMAT CSV` data types are supported for SQL query results and are exported as string values, but these data types are not supported by FastExport.
<a name="ChangeLog"></a>
### Change Log
`17.20.0.24` - May 23, 2023
* GOSQL-41 escape function teradata_provide(request_scope_column_name_off)
* GOSQL-124 runstartup connection parameter
* RDBI-105 Timestamp without time zone interoperability with R 4.3.0
`17.20.0.23` - May 19, 2023
* GOSQL-157 logon_timeout connection parameter
`17.20.0.22` - May 16, 2023
* Build DLL and shared library with Go 1.19.9
`17.20.0.21` - May 15, 2023
* GOSQL-128 use OIDC scope from Gateway Config parcel
`17.20.0.20` - May 5, 2023
* GOSQL-155 teradata_provide(gateway_config) teradata_provide(governed) teradata_provide(uses_check_workload)
`17.20.0.19` - March 30, 2023
* GOSQL-129 TLS certificate revocation checking with CRL
* GOSQL-130 TLS certificate revocation checking with OCSP (not OCSP stapling)
* GOSQL-132 sslcrc connection parameter
`17.20.0.18` - March 27, 2023
* GOSQL-146 FastLoad and FastExport Unicode Pass Through support
`17.20.0.17` - March 24, 2023
* Build DLL and shared library with Go 1.19.7
* GOSQL-136 escape functions teradata_manage_error_tables_off and teradata_manage_error_tables_on
* GOSQL-139 allow alternate LOCATOR(type) syntax for teradata_parameter escape function
* GOSQL-145 connection parameters error_query_count, error_query_interval, error_table_1_suffix, error_table_2_suffix, error_table_database, manage_error_tables, sessions
`17.20.0.16` - February 21, 2023
* GOSQL-138 avoid panic for aborted session
`17.20.0.15` - February 16, 2023
* GOSQL-137 limit the max number of records in a CSV batch
`17.20.0.14` - January 19, 2023
* GOSQL-24 Asynchronous abort SQL request execution
* GOSQL-134 escape function teradata_request_timeout
* GOSQL-135 connection parameter request_timeout
`17.20.0.13` - January 17, 2023
* GOSQL-133 return FastLoad errors for FastLoad with teradata_read_csv
`17.20.0.12` - December 2, 2022
* Build DLL and shared library with Go 1.19.3
* GOSQL-126 escape functions teradata_values_off and teradata_values_on
`17.20.0.11` - November 1, 2022
* GOSQL-125 FastLoad FastExport govern support for fake_result_sets=true
* GOSQL-127 substitute dash for unavailable program name in Client Attributes
`17.20.0.10` - October 27, 2022
* GOSQL-67 FastLoad FastExport workload management
* GOSQL-120 govern connection parameter
* GOSQL-123 conditional use of Statement Independence depending on database setting
`17.20.0.9` - October 25, 2022
* Build DLL and shared library with Go 1.18.7
`17.20.0.8` - October 19, 2022
* GOSQL-122 case-insensitive VERIFY-FULL
`17.20.0.7` - September 27, 2022
* Avoid Kerberos logon failure
`17.20.0.6` - September 19, 2022
* Build DLL and shared library with Go 1.18.6
* Change package to opt out of StagedInstall
`17.20.0.5` - September 15, 2022
* Additional changes for GOSQL-119 avoid nil pointer dereference for FastExport CSV error
`17.20.0.4` - September 14, 2022
* GOSQL-87 support Mac ARM without Rosetta
* GOSQL-119 avoid nil pointer dereference for FastExport CSV error
`17.20.0.3` - September 6, 2022
* GOSQL-118 teradata_write_csv support for queries containing commas
`17.20.0.2` - August 23, 2022
* GOSQL-117 browser_tab_timeout connection parameter
`17.20.0.1` - August 11, 2022
* Build DLL and shared library with Go 1.18.5
`17.20.0.0` - June 16, 2022
* GOSQL-74 FastLoad support for connection parameter fake_result_sets=true
`17.10.0.16` - June 6, 2022
* GOSQL-106 indicate unavailable TLS certificate status with ClientAttributesEx CERT=U
`17.10.0.15` - June 2, 2022
* Switch to TeraGSS 17.20.00.04 and OpenSSL 1.1.1l
* Requires macOS 10.14 Mojave or later and ends support for older versions of macOS
`17.10.0.14` - May 18, 2022
* GOSQL-104 FastExport reports invalid CSV path name for first query but not subsequent
* GOSQL-105 Avoid driver failure when database warning length is invalid
`17.10.0.13` - May 16, 2022
* GOSQL-53 browser and logmech=BROWSER connection parameters
* GOSQL-56 Implement Federated Authentication feature in GoSQL Driver
* RDBI-65 Implement Federated Authentication feature in R driver
`17.10.0.12` - April 26, 2022
* Teradata R public repository URL change
`17.10.0.11` - April 15, 2022
* GOSQL-71 TLS certificate verification
* GOSQL-98 remove escape function teradata_setloglevel
`17.10.0.10` - April 7, 2022
* GOSQL-82 Escape functions teradata_field_sep and teradata_field_quote
* GOSQL-97 FastLoad/FastExport accommodate extra whitespace in SQL request
`17.10.0.9` - March 24, 2022
* GOSQL-95 case-insensitive sslmode connection parameter values
* GOSQL-96 avoid CVE security vulnerabilities present in Go 1.17 and earlier
* Build DLL and shared library with Go 1.18
* Requires macOS 10.13 High Sierra or later and ends support for older versions of macOS
`17.10.0.8` - March 18, 2022
* GOSQL-94 thread-safe connect failure cache
`17.10.0.7` - March 9, 2022
* GOSQL-84 accommodate 64-bit Activity Count
* GOSQL-92 FastLoad returns error 512 when first column value is NULL
`17.10.0.6` - February 23, 2022
* GOSQL-91 Avoid Error 8019 by always sending Config Request message
`17.10.0.5` - February 4, 2022
* GOSQL-26 provide stored procedure creation errors
* GOSQL-73 Write CSV files
* GOSQL-88 Append streamlined client call stack to ClientProgramName
`17.10.0.4` - January 10, 2022
* GOSQL-86 provide UDF compilation errors
`17.10.0.3` - December 13, 2021
* GOSQL-20 TLS support
* GOSQL-29 Laddered Concurrent Connect
* RDBI-17 Implement Laddered Concurrent Connect - R driver
* Build DLL and shared library with Go 1.15.15
`17.10.0.2` - November 30, 2021
* GOSQL-12 Centralized administration for data encryption
* GOSQL-25 Assign Response message error handling
* GOSQL-27 Enhance checks for missing logon parameters
* GOSQL-65 improve terasso error messages
* GOSQL-66 transmit Client Attributes to DBS during logon
* RDBI-19 Centralized administration (from database) of Data Encryption
`17.10.0.1` - July 2, 2021
* GOSQL-35 Statement Independence
* GOSQL-72 Read CSV files
* RDBI-57 JSON, CSV, and Avro data type support
* RDBI-75 Preserve the integer64 values when the first integer64 column value returned is NULL
`17.10.0.0` - June 8, 2021
* GOSQL-75 trim whitespace from SQL request text
`17.0.0.8` - December 18, 2020
* GOSQL-13 add support for FastExport protocol
`17.0.0.7` - October 9, 2020
* GOSQL-68 cross-process COP hostname load distribution
`17.0.0.6` - September 28, 2020
* RDBI-70 change hms::as.hms to hms::as_hms
`17.0.0.5` - August 26, 2020
* GOSQL-64 improve error checking for FastLoad escape functions
`17.0.0.4` - August 18, 2020
* GOSQL-62 prevent nativesql from executing FastLoad
* GOSQL-63 prevent FastLoad panic
`17.0.0.3` - July 30, 2020
* Build DLL and shared library with Go 1.14.6
* Sample program `fetchperftest.R`
`17.0.0.2` - June 10, 2020
* GOSQL-60 CLOBTranslate=Locked workaround for DBS DR 194293
`17.0.0.1` - June 4, 2020
* GOSQL-61 FastLoad accommodate encryptdata true
`16.20.0.38` - May 12, 2020
* GOSQL-58 support multiple files for Elicit File protocol
* GOSQL-59 FastLoad accommodate dbscontrol change of COUNT(*) return type
`16.20.0.37` - Apr 30, 2020
* GOSQL-57 Deferred LOB values larger than 1MB
`16.20.0.36` - Mar 27, 2020
* GOSQL-22 enable insert of large LOB values over 64KB
* GOSQL-52 teradata_try_fastload consider bind value data types
* GOSQL-54 enforce Decimal value maximum precision 38
* RDBI-56 Teradata data types up to TD 14.10
`16.20.0.35` - Jan 8, 2020
* GOSQL-51 FastLoad fails when table is dropped and recreated
`16.20.0.34` - Dec 10, 2019
* GOSQL-50 provide FastLoad duplicate row errors with auto-commit on
* RDBI-62 allow NA in bound list of raw values
`16.20.0.33` - Nov 26, 2019
* GOSQL-15 add database connection parameter
`16.20.0.32` - Nov 25, 2019
* RDBI-9 TJEncryptPassword.R sample program
`16.20.0.31` - Nov 21, 2019
* GOSQL-49 FastLoad support for additional connection parameters
`16.20.0.30` - Nov 20, 2019
* GOSQL-33 CALL to stored procedure INOUT and OUT parameter output values
* RDBI-54 Implement method dbNextResult
`16.20.0.29` - Nov 19, 2019
* RDBI-53 Implement method dbListObjects
`16.20.0.28` - Nov 15, 2019
* GOSQL-36 segment and iterate parameter batches per batch row limit
* GOSQL-43 segment and iterate parameter batches per request message size limit for FastLoad
`16.20.0.27` - Oct 23, 2019
* RDBI-61 improve performance for batch bind values
`16.20.0.26` - Oct 16, 2019
* GOSQL-46 LDAP password special characters
`16.20.0.25` - Oct 3, 2019
* GOSQL-45 FastLoad interop with Stored Password Protection
`16.20.0.24` - Sep 6, 2019
* GOSQL-14 add support for FastLoad protocol
* GOSQL-34 negative scale for Number values
* RDBI-16 Data Transfer - FastLoad Protocol
`16.20.0.23` - Aug 27, 2019
* GOSQL-40 Skip executing empty SQL request text
* RDBI-59 dbConnect named arguments as connection parameters
`16.20.0.22` - Aug 16, 2019
* GOSQL-39 COP Discovery interop with Kerberos
`16.20.0.21` - Aug 12, 2019
* GOSQL-38 timestamp prefix log messages
`16.20.0.20` - Aug 7, 2019
* GOSQL-4 Support COP discovery
* RDBI-10 COP Discovery
`16.20.0.19` - Jul 29, 2019
* GOSQL-18 Auto-commit
* RDBI-58 dbBegin dbCommit dbRollback methods
`16.20.0.18` - May 13, 2019
* RDBI-52 dbWriteTable field.types column subset
`16.20.0.17` - Apr 25, 2019
* RDBI-51 immediate connection parameter
`16.20.0.16` - Apr 23, 2019
* RDBI-50 dbSendStatement and dbSendQuery immediate parameter
`16.20.0.15` - Apr 17, 2019
* RDBI-49 difftime bind values
`16.20.0.14` - Apr 15, 2019
* RDBI-48 posixlt connection parameter
`16.20.0.13` - Apr 11, 2019
* RDBI-47 hms package dependency
`16.20.0.12` - Apr 9, 2019
* RDBI-38 Result TIME and TIMESTAMP values as R data types
* RDBI-46 Move required packages from Imports to Depends
`16.20.0.11` - Apr 2, 2019
* RDBI-45 NaN bind value
`16.20.0.10` - Mar 26, 2019
* RDBI-43 Implement dbFetch row count argument
* RDBI-44 dbAppendTable and dbWriteTable POSIXlt bind values
`16.20.0.9` - Mar 25, 2019
* RDBI-37 NA value insertion to database as NULL
* RDBI-40 POSIXct value insertion to database as TIMESTAMP without time zone
* RDBI-41 POSIXlt value insertion to database with gmtoff specify time zone
* RDBI-42 Remove support for binding AsIs list of POSIXct values
`16.20.0.8` - Mar 22, 2019
* RDBI-39 dbWriteTable accept NULL row.names
`16.20.0.7` - Mar 14, 2019
* RDBI-36 POSIXct vector bind value
`16.20.0.6` - Mar 12, 2019
* RDBI-35 dbWriteTable accept and ignore field.types with append = TRUE
`16.20.0.5` - Mar 8, 2019
* RDBI-26 Implement method dbGetInfo
* RDBI-30 Implement method dbListFields
`16.20.0.4` - Mar 6, 2019
* RDBI-20 Parameterized SQL with bind values
* RDBI-21 New behavior to execute non-parameterized SQL requests
* RDBI-23 Implement method dbIsValid for connection
* RDBI-25 Implement method dbWriteTable
* RDBI-27 Implement method dbExistsTable
* RDBI-29 Implement method dbRemoveTable
* RDBI-34 BYTE BLOB VARBYTE result set values
`16.20.0.3` - Feb 26, 2019
* RDBI-28 Implement method dbListTables
* RDBI-32 Implement method dbGetRowsAffected
`16.20.0.2` - Feb 8, 2019
* GOSQL-11 JWT authentication method
* GOSQL-16 tmode connection parameter
* GOSQL-17 commit and rollback functions
* RDBI-7 Teradata Vantage User Logon mechanisms
* RDBI-14 Implement support for JWT logon mechanism
* RDBI-24 Empty result sets
`16.20.0.1` - Jan 3, 2019
* RDBI-2 OS Platforms (added Mac compatibility)
`16.20.0.0` - Nov 28, 2018
* RDBI-1 R driver connectivity
* RDBI-4 R Language Version
* RDBI-6 Teradata Vantage version support
* RDBI-8 Data Security
|
package br.org.generation.lojadegames.controller;
import java.util.List;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import br.org.generation.lojadegames.model.Categoria;
import br.org.generation.lojadegames.repository.CategoriaRepository;
@RestController
@RequestMapping("/categorias")
@CrossOrigin(origins = "*", allowedHeaders = "*")
public class CategoriaController {
@Autowired
private CategoriaRepository categoriaRepository;
@GetMapping
private ResponseEntity<List<Categoria>> findAllCategoria(){
return ResponseEntity.ok(categoriaRepository.findAll());
}
@GetMapping("/{id}")
public ResponseEntity<Categoria> findById(@PathVariable long id) {
return categoriaRepository.findById(id)
.map(resp -> ResponseEntity.ok(resp))
.orElse(ResponseEntity.notFound().build());
}
@GetMapping("/genero/{genero}")
public ResponseEntity<List<Categoria>> findByTitulo(@PathVariable String genero){
return ResponseEntity.ok(categoriaRepository.findAllByGeneroContainingIgnoreCase(genero));
}
@PostMapping
public ResponseEntity<Categoria> postCategoria(@Valid @RequestBody Categoria categoria){
return ResponseEntity.status(HttpStatus.CREATED).body(categoriaRepository.save(categoria));
}
@PutMapping
public ResponseEntity<Categoria> putCategoria(@Valid @RequestBody Categoria categoria){
return categoriaRepository.findById(categoria.getId())
.map(resposta -> ResponseEntity.status(HttpStatus.OK).body(categoriaRepository.save(categoria)))
.orElse(ResponseEntity.status(HttpStatus.BAD_REQUEST).build());
}
@DeleteMapping("/{id}")
public void deleteCategoria(@PathVariable long id) {
categoriaRepository.deleteById(id);
}
}
|
import java.util.Scanner;
class OddArraySizeException extends Exception {
public OddArraySizeException(String message) {
super(message);
}
}
class FirstHalf extends Thread {
int arr[],i;
int element;
boolean found = false;
FirstHalf(int arr[], int element) {
this.arr = arr;
this.element = element;
}
public void run() {
for (i = 0; i < arr.length / 2; i++) {
if (arr[i] == element) {
found = true;
break;
}
}
}
public int isFound() {
if(found) return ++i;
else return -1;
}
}
class SecondHalf extends Thread {
int arr[],i;
int element;
boolean found = false;
SecondHalf(int arr[], int element) {
this.arr = arr;
this.element = element;
}
public void run() {
for (i = arr.length / 2; i < arr.length; i++) {
if (arr[i] == element) {
found = true;
break;
}
}
}
public int isFound() {
if(found) return ++i;
else return -1;
}
}
public class Q6 {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
int n;
int element;
System.out.println("Enter the size of the array:");
n = scanner.nextInt();
try {
if (n % 2 != 0) {
throw new OddArraySizeException("Odd size array not allowed.");
}
} catch (OddArraySizeException e) {
System.out.println("Error: " + e.getMessage());
System.exit(0);
}
int[] arr = new int[n];
for (int i = 0; i < n; i++) {
System.out.println("Enter element " + (i + 1) + ":");
arr[i] = scanner.nextInt();
}
System.out.println("Enter the element to be found:");
element = scanner.nextInt();
FirstHalf firstHalfThread = new FirstHalf(arr, element);
SecondHalf secondHalfThread = new SecondHalf(arr, element);
firstHalfThread.start();
secondHalfThread.start();
try {
firstHalfThread.join();
secondHalfThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
int ffound=firstHalfThread.isFound();
int sfound=secondHalfThread.isFound();
if (ffound==-1 && sfound==-1) {
System.out.println("Element not found in the first or second half");
}
else if (ffound>-1 && sfound==-1) {
System.out.println("Found in first half at position "+ffound+", array location = "+ffound);
}
else if (ffound==-1 && sfound>-1) {
System.out.println("Found in second half at position "+(sfound-arr.length/2)+", array location = "+sfound);
}
else{
System.out.println("Found in first and second half");
}
}
}
|
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Nov 1, 2019
@author: mandd
"""
#External Modules---------------------------------------------------------------
import pandas as pd
import numpy as np
#External Modules End-----------------------------------------------------------
#Internal Modules---------------------------------------------------------------
from ravenframework.PluginBaseClasses.PostProcessorPluginBase import PostProcessorPluginBase
from ravenframework.utils import InputData, InputTypes
#Internal Modules End-----------------------------------------------------------
class MCSImporter(PostProcessorPluginBase):
"""
This is the base class of the PostProcessor that imports Minimal Cut Sets (MCSs) into RAVEN as a PointSet
"""
def __init__(self):
"""
Constructor
@ In, None
@ Out, None
"""
super().__init__()
self.printTag = 'POSTPROCESSOR MCS IMPORTER'
self.expand = None # option that controls the structure of the ET. If True, the tree is expanded so that
# all possible sequences are generated. Sequence label is maintained according to the
# original tree
self.validDataType = ['PointSet'] # The list of accepted types of DataObject
## Currently, we have used both DataObject.addRealization and DataObject.load to
## collect the PostProcessor returned outputs. DataObject.addRealization is used to
## collect single realization, while DataObject.load is used to collect multiple realizations
## However, the DataObject.load can not be directly used to collect single realization
self.outputMultipleRealizations = True
self.fileFrom = None # the source of the provided MCSs file, i.e., saphire
@classmethod
def getInputSpecification(cls):
"""
Method to get a reference to a class that specifies the input data for the class cls.
@ In, cls, the class for which we are retrieving the specification
@ Out, inputSpecification, InputData.ParameterInput, class to use for
specifying input of cls.
"""
inputSpecification = super().getInputSpecification()
inputSpecification.addSub(InputData.parameterInputFactory("expand", contentType=InputTypes.BoolType))
inputSpecification.addSub(InputData.parameterInputFactory("BElistColumn", contentType=InputTypes.StringType))
inputSpecification.addSub(InputData.parameterInputFactory("fileFrom", contentType=InputTypes.StringType))
return inputSpecification
def _handleInput(self, paramInput):
"""
Method that handles PostProcessor parameter input block.
@ In, paramInput, ParameterInput, the already parsed input.
@ Out, None
"""
super()._handleInput(paramInput)
expand = paramInput.findFirst('expand')
self.expand = expand.value
fileFrom = paramInput.findFirst('fileFrom')
if fileFrom is not None:
self.fileFrom = fileFrom.value
if self.expand:
beListColumn = paramInput.findFirst('BElistColumn')
self.beListColumn = beListColumn.value
def run(self, inputIn):
"""
This method executes the PostProcessor action.
@ In, inputIn, dict, dictionary contains the input data and input files, i.e.,
{'Data':[DataObjects.asDataset('dict')], 'Files':[FileObject]}, only 'Files'
will be used by this PostProcessor
@ Out, mcsPointSet, dict, dictionary of outputs, i.e.,
{'data':dict of realizations, 'dim':{}}
"""
inputs = inputIn['Files']
mcsFileFound = False
beFileFound = False
for file in inputs:
if file.getType()=="MCSlist":
if mcsFileFound:
self.raiseAnError(IOError, 'MCSImporterPostProcessor Post-Processor ' + self.name + ', Multiple files with type=MCSlist have been found')
else:
mcsListFile = file
mcsFileFound = True
if file.getType()=="BElist":
if self.expand==False:
self.raiseAnError(IOError, 'MCSImporterPostProcessor Post-Processor ' + self.name + ', A file with type=BElist has been found but expand is set to False')
if beFileFound:
self.raiseAnError(IOError, 'MCSImporterPostProcessor Post-Processor ' + self.name + ', Multiple files with type=BElist have been found')
else:
BElistFile = file
beFileFound = True
if beFileFound==False and self.expand==True:
self.raiseAnError(IOError, 'MCSImporterPostProcessor Post-Processor ' + self.name + ', Expand is set to False but no file with type=BElist has been found')
self.mcsIDs, self.probability, self.mcsList, self.beList = mcsReader(mcsListFile.getFilename(), type=self.fileFrom)
if self.expand:
beData = pd.read_csv(BElistFile.getFilename())
self.beList = beData[self.beListColumn].values.tolist()
mcsPointSet = {}
# MCS Input variables
mcsPointSet['probability'] = self.probability
mcsPointSet['MCS_ID'] = self.mcsIDs
mcsPointSet['out'] = np.ones((self.probability.size))
# MCS Output variables
for be in self.beList:
mcsPointSet[be]= np.zeros(self.probability.size)
counter=0
for mcs in self.mcsList:
for be in mcs:
mcsPointSet[be][counter] = 1.0
counter = counter+1
mcsPointSet = {'data': mcsPointSet, 'dims': {}}
return mcsPointSet
def mcsReader(mcsListFile, type=None):
"""
Function designed to read a file containing the set of MCSs and to save it as list of list
@ In, mcsListFile, string, name of the file containing the set of MCSs
@ In, type, string, the type of MCS file, it can be generated by Saphire, or User provided csv file
@ Out, mcsIDs, np array, array containing the ID associated to each MCS
@ Out, probability, np array, array containing the probability associated to each MCS
@ Out, mcsList, list, list of MCS, each element is also a list containing the basic events of each MCS
@ Out, beList, list, list of all basic events contained in the MCSs
"""
mcsList=[]
beList=set()
probability = np.zeros((0))
mcsIDs = np.zeros((0))
# construct the list of MCSs and the list of BE
with open(mcsListFile, 'r') as file:
next(file) # skip header
lines = file.read().splitlines()
if type is None:
for l in lines:
elementsList = l.split(',')
mcsIDs = np.append(mcsIDs,elementsList[0])
elementsList.pop(0)
probability=np.append(probability,float(elementsList[0]))
elementsList.pop(0)
mcsList.append(list(element.rstrip('\n').strip() for element in elementsList))
beList.update(elementsList)
elif type.lower() == 'saphire':
lines = lines[1:] # skip additional description line
for l in lines:
elementsList = l.split(',')
# skip empty line
if elementsList[0].strip() == '':
continue
mcsIDs = np.append(mcsIDs,elementsList[0])
probability=np.append(probability, float(elementsList[1]))
# skip column 3 which is the fraction to the total
mcs = list(element.strip() for element in elementsList[3:])
mcsList.append(mcs)
beList.update(mcs)
return mcsIDs, probability, mcsList, beList
|
import Box from "@mui/material/Box";
import FormControl from "@mui/material/FormControl";
import InputLabel from "@mui/material/InputLabel";
import MenuItem from "@mui/material/MenuItem";
import Select from "@mui/material/Select";
import { Dispatch, SetStateAction } from "react";
import { Topic } from "../../../types";
import { addTransparency } from "../filterAnimations";
export const SingleSelectFilterField = ({
label,
defaultValue,
filter,
setFilter,
dropDownData,
highlightColor,
}: {
filter: Topic | undefined;
setFilter: Dispatch<SetStateAction<Topic | undefined>>;
dropDownData: Topic[];
label: string;
defaultValue: string;
highlightColor: string;
}) => {
const handleChange = (event: any) => {
const {
target: { value },
} = event;
setFilter(value === defaultValue ? undefined : value);
};
const dropDownStylingOverrides = {
"&:hover": {
backgroundColor: `${addTransparency(highlightColor, 0.4)} !important`,
},
"&.Mui-selected": {
backgroundColor: addTransparency(highlightColor, 0.2),
},
};
return (
<Box
sx={{
m: 2,
width: 200,
paddingTop: "20px",
}}>
<FormControl fullWidth>
<InputLabel
sx={{
"&.Mui-focused": {
color: highlightColor,
},
}}>
Topic
</InputLabel>
<Select
value={filter ? filter : defaultValue}
label={label}
onChange={handleChange}
sx={{
"&.Mui-focused": {
"&& fieldset": {
borderColor: highlightColor,
},
},
}}>
<MenuItem value={defaultValue} sx={dropDownStylingOverrides}>
{defaultValue}
</MenuItem>
{dropDownData.map((item, index) => {
return (
<MenuItem key={index} value={item} sx={dropDownStylingOverrides}>
{item}
</MenuItem>
);
})}
</Select>
</FormControl>
</Box>
);
};
|
<?php
namespace App\Validator\Constraints;
use Symfony\Component\Validator\Constraint;
use Symfony\Component\Validator\ConstraintValidator;
/**
* @Annotation
*/
class PasswordRegex extends Constraint {
public $message = 'The password must contain at least 1 lowercase letter, 1 capital letter, 1 numer and 1 special symbol';
}
class PasswordRegexValidator extends ConstraintValidator {
public function validate($value, Constraint $constraint) {
if (!preg_match('/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[#?!@$+%^&*-]).{8,}$/', $value)) {
$this->context->buildViolation($constraint->message)
->setParameter('{{value}}', $value)
->addViolation();
}
}
}
|
import { IsBoolean, IsNumber, IsOptional, IsString } from 'class-validator';
export class CreateCategoryDto {
@IsString()
name: string;
@IsString()
@IsOptional()
description?: string;
@IsBoolean()
isPayable: boolean;
@IsNumber()
orderNumber: number;
}
export class UpdateCategoryDto {
@IsOptional()
@IsString()
name: string;
@IsString()
@IsOptional()
description?: string;
@IsOptional()
@IsBoolean()
isPayable: boolean;
@IsOptional()
@IsNumber()
orderNumber: number;
}
|
#include "TimeoutItem.h"
#include "PinBase.h"
enum BTN_Action {
ACTION_SINGLE_CLICK,
ACTION_DOUBLE_CLICK,
ACTION_PRESS_ACTIVE,
ACTION_PRESS_END
};
enum BNT_State {
BUTTON_PULSE_LO,
BUTTON_PULSE_HI,
BUTTON_PULSE_LO2,
BUTTON_PULSE_HI2,
BUTTON_HOLD_BEGIN,
BUTTON_END
};
enum BNT_Hold {
HOLD_5_SEC,
HOLD_15_SEC,
HOLD_25_SEC,
HOLD_TRANSITION
};
class MyButton: private PinReadable {
BNT_State state = BUTTON_END;
TimeoutItem debounce_timer;
TimeoutItem click_timer;
TimeoutItem press_timer;
BNT_Hold holdStatus = HOLD_TRANSITION;
std::function<void(BTN_Action, BNT_Hold, uint32_t)> *callback;
void reset_timers(BNT_State newState) {
debounce_timer.reset();
click_timer.reset();
state = newState;
if (state == BUTTON_PULSE_HI || state == BUTTON_PULSE_HI2) {
press_timer.reset();
}
}
void Handle_ButtonEnd(BTN_Action action, uint32_t elapse) {
(*callback)(action, HOLD_TRANSITION, elapse);
state = BUTTON_END;
}
BNT_Hold currentHoldStatus() {
uint32_t elapse = press_timer.elapsed();
if (elapse > 25000) { return HOLD_25_SEC; }
else if (elapse > 15000) { return HOLD_15_SEC; }
else if (elapse > 5000) { return HOLD_5_SEC; }
else { return HOLD_TRANSITION; }
}
public:
MyButton(): PinReadable() {}
void setup(uint8_t pin, std::function<void(BTN_Action, BNT_Hold, uint32_t)> *cb, uint32_t debounceTicks = 50,
uint32_t clickTicks = 200, uint32_t longPressTicks = 2000) {
pin_setup(pin, true);
if (!isValid()) { return; }
debounce_timer.load(debounceTicks);
click_timer.load(clickTicks-debounceTicks);
press_timer.load(longPressTicks);
callback = cb;
}
void run() {
if (callback == nullptr) { return; }
bool read = !pin_read();
if (read && state == BUTTON_END) {
reset_timers(BUTTON_PULSE_HI);
}
else if (state == BUTTON_PULSE_HI && debounce_timer.check()) {
if (read) {
// ButtonHold Reset
if (press_timer.check()) { reset_timers(BUTTON_HOLD_BEGIN); }
} else {
reset_timers(BUTTON_PULSE_LO);
}
}
else if (state == BUTTON_PULSE_LO && debounce_timer.check()) {
if (!read) {
//! Single Click
if (click_timer.check()) {
Handle_ButtonEnd(ACTION_SINGLE_CLICK, 0);
Serial.println("[Button] SingleClick");
}
} else {
reset_timers(BUTTON_PULSE_HI2);
}
}
else if (state == BUTTON_PULSE_HI2 && debounce_timer.check()) {
if (read) {
// ButtonHold Reset
if (press_timer.check()) { reset_timers(BUTTON_HOLD_BEGIN); }
} else {
reset_timers(BUTTON_PULSE_LO2);
}
}
else if (state == BUTTON_PULSE_LO2 && debounce_timer.check()) {
if (read) {
// ButtonHold Reset
if (press_timer.check()) { reset_timers(BUTTON_HOLD_BEGIN); }
} else {
//! Double Click
if (click_timer.check()) {
Handle_ButtonEnd(ACTION_DOUBLE_CLICK, 0);
Serial.println("[Button] DoubleClick");
}
}
}
else if (state == BUTTON_HOLD_BEGIN) {
if (read) {
if (press_timer.check()) {
//! Press Active
// only return holdStatus change once
BNT_Hold _holdOutput = currentHoldStatus();
if (holdStatus == _holdOutput) {
_holdOutput = HOLD_TRANSITION;
} else {
holdStatus = _holdOutput;
}
// DO NOT call Handle_ButtonEnd
(*callback)(ACTION_PRESS_ACTIVE, _holdOutput, press_timer.elapsed());
}
} else {
//! Press Ended
uint32_t elapse = press_timer.elapsed();
Handle_ButtonEnd(ACTION_PRESS_END, press_timer.elapsed());
Serial.print("[Button] PressEnded"); Serial.println(elapse);
}
}
}
};
|
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { MaterialModule } from '@blox/material';
import { HomeComponent } from './home/home.component';
import { MatButtonModule, MatFormFieldModule, MatIconModule, MatInputModule } from '@angular/material';
import { HomeBodyComponent } from './home-body/home-body.component';
import { ReactiveFormsModule } from '@angular/forms';
import { MailboxComponent } from './mailbox/mailbox.component';
import { SocialMediaIconsComponent } from './social-media-icons/social-media-icons.component';
@NgModule({
declarations: [
AppComponent,
HomeComponent,
HomeBodyComponent,
MailboxComponent,
SocialMediaIconsComponent
],
imports: [
BrowserModule,
AppRoutingModule,
BrowserAnimationsModule,
MaterialModule,
MatIconModule,
ReactiveFormsModule,
MatFormFieldModule,
MatInputModule,
MatButtonModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
|
/*************** <auto-copyright.pl BEGIN do not edit this line> **************
*
* VR Juggler is (C) Copyright 1998-2011 by Iowa State University
*
* Original Authors:
* Allen Bierbaum, Christopher Just,
* Patrick Hartling, Kevin Meinert,
* Carolina Cruz-Neira, Albert Baker
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*************** <auto-copyright.pl END do not edit this line> ***************/
#ifndef _VRJ_OPENGL_DRAW_AXES_FUNCTORS_H_
#define _VRJ_OPENGL_DRAW_AXES_FUNCTORS_H_
#include <vrj/Draw/OpenGL/Config.h>
#include <vrj/Draw/OpenGL/DrawManager.h>
#include <vrj/Draw/OpenGL/DrawObjectFunctor.h>
namespace vrj
{
namespace opengl
{
/** \class vrj::opengl::DrawAxesFunctor DrawAxesFunctors.h vrj/Draw/OpenGL/DrawAxesFunctors.h
*
* Draw basic coordinate system axes using colored lines
*
*/
class DrawAxesFunctor : public vrj::opengl::DrawObjectFunctor
{
public:
DrawAxesFunctor();
virtual ~DrawAxesFunctor()
{
/* Do nothing. */ ;
}
/** Called to initialize any context-specific information. */
virtual void contextInit();
/**
* Callback function for drawing. Called when the object should be drawn.
* @pre GL context is set and ready to go.
*/
virtual void draw(vrj::UserPtr user);
void setScaleFactor(float scaleFactor)
{
mScaleFactor = abs(scaleFactor);
}
float getScaleFactor()
{
return mScaleFactor;
}
private:
gmtl::Vec3f mOrigin, mX_Axis, mY_Axis, mZ_Axis;
float mScaleFactor;
};
class DrawAxesCoreFunctor : public DrawAxesFunctor
{
public:
DrawAxesCoreFunctor()
: mProgram(0)
, muMVMatrixHandle(0)
, muPMatrixHandle(0)
, maVertexCoordHandle(0)
, maVertexColorHandle(0)
, muScaleFactorHandle(0)
, mVertexArrayBufferID(0)
, mVertexCoordBufferID(0)
, mVertexColorBufferID(0)
, mIndexBufferID(0)
{
/* Do nothing. */ ;
}
virtual ~DrawAxesCoreFunctor()
{
/* Do nothing. */ ;
}
/** Called to initialize any context specific information. */
virtual void contextInit();
/**
* Callback function for drawing. Called when the object should be drawn.
* @pre GL context is set and ready to go.
*/
virtual void draw(vrj::UserPtr user);
private:
unsigned int mProgram;
int muMVMatrixHandle;
int muPMatrixHandle;
int maVertexCoordHandle;
int maVertexColorHandle;
int muScaleFactorHandle;
unsigned int mVertexArrayBufferID;
unsigned int mVertexCoordBufferID;
unsigned int mVertexColorBufferID;
unsigned int mIndexBufferID;
};
} // End of opengl namespace
} // End of vrj namespace
#endif /* _GL_DRAW_AXES_FUNCTORS_H_ */
|
import { createSlice, PayloadAction, createAsyncThunk } from '@reduxjs/toolkit';
import { setError } from './errorSlice';
import axios from 'axios';
const INVOICES_API_URL = 'http://localhost:3000/invoices';
interface Invoice {
id: string;
due_date: string;
amount: number;
description: string;
user_id: number;
user: {
name: string;
}
}
interface InvoiceState {
currentInvoice: Invoice | null;
invoices: Invoice[];
error: string | null;
}
const initialState: InvoiceState = {
currentInvoice: null,
invoices: [],
error: null
};
export const fetchInvoices = createAsyncThunk(
'invoices/fetchInvoices',
async (_, { dispatch, rejectWithValue }) => {
try {
const response = await axios.get<Invoice[]>(INVOICES_API_URL);
return response.data;
} catch (error) {
if (axios.isAxiosError(error) && error.response) {
dispatch(setError(error.response.data.message));
return rejectWithValue(error.response.data);
} else {
dispatch(setError('An unknown error occured'));
return rejectWithValue('An unknown error occurred');
}
}
}
);
export const fetchInvoice = createAsyncThunk(
'invoices/fetchInvoice',
async (invoiceId: string | undefined, { dispatch, rejectWithValue }) => {
try {
const response = await axios.get<Invoice>(INVOICES_API_URL + '/' + invoiceId);
return response.data;
} catch (error) {
if (axios.isAxiosError(error) && error.response) {
dispatch(setError(error.response.data.message));
return rejectWithValue(error.response.data);
} else {
dispatch(setError('An unknown error occured'));
return rejectWithValue('An unknown error occurred');
}
}
}
);
export const invoiceSlice = createSlice({
name: 'invoice',
initialState,
reducers: {},
extraReducers: (builder) => {
builder
.addCase(fetchInvoices.fulfilled, (state, action: PayloadAction<Invoice[]>) => {
state.invoices = action.payload;
})
.addCase(fetchInvoices.rejected, (state, action) => {
state.error = action.payload as string;
})
.addCase(fetchInvoice.fulfilled, (state, action: PayloadAction<Invoice>) => {
state.currentInvoice = action.payload;
})
.addCase(fetchInvoice.rejected, (state, action) => {
state.error = action.payload as string;
});
},
});
export default invoiceSlice.reducer;
|
// Recursive Fibonacci (worse Big O than loop)
// 2^n
const recursiveFib = (num) => {
if (num <= 1) return num;
return recursiveFib(num - 1) + recursiveFib(num - 2)
}
console.log(recursiveFib(4));
// [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
// think of it like a tree
const fib = (n, memo=[]) => {
if(memo[n] !== undefined) return memo[n];
if(n <= 2) return 1;
let res = fib(n - 1, memo) + fib(n - 2, memo);
memo[n] = res;
console.log(memo);
return res;
}
fib(50)
|
import IndexLayout from "@/Layouts/IndexLayout";
import styles from "@/styles/news.module.scss";
import Link from "next/link";
import { useEffect, useState } from "react";
import { useRouter } from "next/router";
import { previewTextMaker } from "../../utils/helpers";
import { useTranslation } from 'next-i18next';
import { serverSideTranslations } from 'next-i18next/serverSideTranslations';
import { useSelector } from "react-redux";
import { getNewsState } from "@/store/news/news.slice";
import {yearsLinksVocabData} from "@/common/Constantas"
export default function News() {
const router = useRouter();
const {t} = useTranslation('common');
const newsData = useSelector(getNewsState);
const [currentYearNewsData, setCurrentYearNewsData] = useState({id: undefined, news: []});
const [yearsLinksVocab, setYearsLinksVocab] = useState(yearsLinksVocabData);
const getCurrentYearNewsData = (data) => {
const currentYear = yearsLinksVocab.filter((year) => year.selected)[0];
let currentYearNews = data?.filter(
(n) => n.id === currentYear?.year
)[0];
let reverseNews = [...currentYearNews?.news].reverse()
setCurrentYearNewsData({...currentYearNews, news: reverseNews});
};
const yearSelectedTabHandler = (e) => {
const idx = parseInt(e.target.dataset.id);
const updatedYearsLinksVocab = yearsLinksVocab.map((year, index) => {
return { ...year, selected: index === idx };
});
setYearsLinksVocab(updatedYearsLinksVocab);
};
useEffect(() => {
if (!router.query.year) return;
const { year: prevRouteYear } = router.query;
const updatedYearsLinksVocab = yearsLinksVocab.map((year) => {
return { ...year, selected: year.year === prevRouteYear };
});
setYearsLinksVocab(updatedYearsLinksVocab);
router.push({
pathname: `/news`,
query: {},
});
}, [router.isReady]);
useEffect(() => {
if (newsData.length < 1) return
getCurrentYearNewsData(newsData);
}, [newsData, yearsLinksVocab]);
return (
<IndexLayout>
<main className={styles.newsListBlock}>
<div className={styles.newsListWrapper}>
<h1 className="page-title">{t('news.title')}</h1>
<div className={styles.yearlyLinksBlock}>
{yearsLinksVocab.map((year, idx) => {
return (
<div key={idx}>
<span
data-id={idx}
onClick={(e) => yearSelectedTabHandler(e)}
className={[
styles.yearlyLink,
year.selected ? styles.active : "",
]
.filter(Boolean)
.join(" ")}
>
{year.year}
</span>
</div>
);
})}
</div>
<div className={styles.newsList}>
<div className="news">
{newsData.length > 0 && currentYearNewsData?.news?.map((item, idx) => {
return (
<div className={styles.singleNewsPreviewBlock} key={idx}>
<div className={styles.newsDate}>{item.date}</div>
<div className={styles.newsTitle}>{router.locale === "lt" ? item.title : item.titleEn}</div>
<div className={styles.newsPreviewText}>
{previewTextMaker(router.locale === "lt" ? item.text: item.textEn, 50) + " ..."}
</div>
<div>
<Link
className={styles.readMoreLink}
href={`news/${currentYearNewsData?.id}-${idx}`}
>
{t('news.readMore')} >
</Link>
</div>
</div>
);
})}
</div>
</div>
</div>
</main>
</IndexLayout>
);
}
export async function getStaticProps({ locale }) {
return {
props: {
...(await serverSideTranslations(locale, [
'common',
])),
},
}
}
|
package Chapter3
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers.convertToAnyShouldWrapper
class RecursiveSpec extends AnyFlatSpec with Matchers {
"Recursive sum function" should "return correct sum" in {
Recursive.sum(List(1, 2, 3)) shouldBe 6
}
it should "return 0 for empty list" in {
Recursive.sum(List.empty) shouldBe 0
}
it should "return the same number for a list with single element" in {
Recursive.sum(List(3)) shouldBe 3
}
"Recursive count function" should "return the correct count" in {
Recursive.count(List(1, 2, 3)) shouldBe 3
}
it should "return 0 for empty list" in {
Recursive.count(List.empty) shouldBe 0
}
it should "return the correct count value of a list with single element" in {
Recursive.count(List(3)) shouldBe 1
}
"Recursive max function" should "return the correct max value" in {
Recursive.max(List(1, 3, 2)) shouldBe 3
}
"Recursive max function" should "return the correct max value for a longer list" in {
Recursive.max(List(1, 3, 2, 4, 5, 7, 6, 9, -1)) shouldBe 9
}
"Recursive max function" should "return 0 for empty list" in {
Recursive.max(List()) shouldBe 0
}
"Recursive max function" should "return the same value for list with one item" in {
Recursive.max(List(1)) shouldBe 0
}
"Recursive max function" should "return the correct max value when duplicate entries exist" in {
Recursive.max(List(1, 3, 2, 4, 5, 7, 6, 9, -1, 9, 2)) shouldBe 9
}
}
|
<?php
namespace AlexWells\GoodReflection\Reflector\Reflection;
use AlexWells\GoodReflection\Definition\TypeDefinition\EnumTypeDefinition;
use AlexWells\GoodReflection\Definition\TypeDefinition\MethodDefinition;
use AlexWells\GoodReflection\Reflector\Reflection\Attributes\HasAttributes;
use AlexWells\GoodReflection\Reflector\Reflection\Attributes\HasNativeAttributes;
use AlexWells\GoodReflection\Reflector\Reflector;
use AlexWells\GoodReflection\Type\Template\TypeParameterMap;
use AlexWells\GoodReflection\Type\Type;
use Illuminate\Support\Collection;
use ReflectionEnum;
use TenantCloud\Standard\Lazy\Lazy;
use function TenantCloud\Standard\Lazy\lazy;
/**
* @template-covariant T
*
* @extends TypeReflection<T>
*/
class EnumReflection extends TypeReflection implements HasAttributes
{
/** @var Lazy<Collection<int, MethodReflection<$this>>> */
private Lazy $declaredMethods;
/** @var Lazy<Collection<int, MethodReflection<$this>>> */
private Lazy $methods;
private readonly ReflectionEnum $nativeReflection;
private readonly HasNativeAttributes $nativeAttributes;
public function __construct(
private readonly EnumTypeDefinition $definition,
private readonly Reflector $reflector
) {
$this->declaredMethods = lazy(
fn () => $this->definition
->methods
->map(fn (MethodDefinition $method) => new MethodReflection($method, $this, TypeParameterMap::empty()))
);
$this->methods = lazy(
fn () => collect([
...$this->implements(),
...$this->uses(),
])
->filter()
->flatMap(function (Type $type) {
$reflection = $this->reflector->forNamedType($type);
return match (true) {
$reflection instanceof ClassReflection,
$reflection instanceof InterfaceReflection,
$reflection instanceof TraitReflection => $reflection->methods(),
default => [],
};
})
->concat($this->declaredMethods())
->keyBy(fn (MethodReflection $method) => $method->name())
->values()
);
$this->nativeReflection = new ReflectionEnum($this->definition->qualifiedName);
$this->nativeAttributes = new HasNativeAttributes(fn () => $this->nativeReflection->getAttributes());
}
public function qualifiedName(): string
{
return $this->definition->qualifiedName;
}
public function fileName(): ?string
{
return $this->definition->fileName;
}
/**
* @return Collection<int, object>
*/
public function attributes(): Collection
{
return $this->nativeAttributes->attributes();
}
/**
* @return Collection<int, Type>
*/
public function implements(): Collection
{
return $this->definition->implements;
}
/**
* @return Collection<int, Type>
*/
public function uses(): Collection
{
return $this->definition->uses;
}
/**
* @return Collection<int, MethodReflection<$this>>
*/
public function declaredMethods(): Collection
{
return $this->declaredMethods->value();
}
/**
* @return Collection<int, MethodReflection<$this>>
*/
public function methods(): Collection
{
return $this->methods->value();
}
public function isBuiltIn(): bool
{
return $this->definition->builtIn;
}
}
|
{{globalize|area = 中國}}
{{Expand German|ga=yes}}
{{Refimprove|time=2021-09-08}}
{{noteTA
|G1=Transport
|G2=Unit
|1=zh-hans:调车场;zh-hant:調車場;zh-cn:调车场;zh-hk:編組場
}}
[[File:Godorf_Station_at_Dusk,_May_2018.jpg|thumb]]
[[File:201805_Nanxiang_Station_Yard_III.jpg|thumb]]]]
[[File:Yard.jpg|thumb]][[羅湖編組站|羅湖編組站]]]]
[[File:ZVBf-Rollberg-02.webm|thumb]]
'''調車場''',也称'''编组站''',是[[鐵路车站|鐵路车站]]集中處理大量[[列車|列車]]到達、解體、編組、出發等列車作業,並為此設有比較完善的調車作業设施的固定作业区域。
==简介==
除列车在车站的到达、出发、通过及在区间内运行外,凡机车车辆进行一切有目的的移动(包括在站内或区间),统称为'''调车'''。
很多较大的火车站,既有客运设施(站舍、站台、天桥、人行地道、售票检票等),也有货运设施(货场、货物线、货物站台、仓库等),同时还有调车编组场地与设备。分别由车站的客运车间、货运车间、'''运转车间'''来负责。
其主要任務是根據列車編組計劃的要求,大量處理貨物列車的解體和編組作業。對貨物列車中的車輛進行技術檢修和貨運檢查整理工作,並按照列車運行時間表,準時接發列車。
編組場一般設有專用的到達场、出发场和調車場,以及駝峰調車設備。通常設在鐵路交換點,或有大量列車集散的工業區或港口。
调车工作“九固定”是指:固定作业区域、线路使用、调车机车、人员、班次、交接班时间、交接班地点、工具数量及其存放地点。
调车场的各条轨道线路,称为'''调车线'''(classification yard )。调车场头部的溜放部分,称为 ''classification bowl''<ref>{{cite journal| title=ABC's of Railroading: Terms of the trade| journal=[[Trains_(magazine)|Trains]] |date=June 1991| pages=p 22| publisher=Kalmbach Publishing| location=Waukesha, Wisconsin| ISSN= 0041-0934}}</ref>。在欧洲与中国,调车场通常用20-40条调车线,划分为“线束”(fans of tracks或者balloons of tracks),通常每组减速器后为8条调车线。在美国,调车场通常有40甚至72条调车线,分为6至10组线束。
车辆从驼峰峰顶至调车线时的溜放(bowl)速度,一般在5公里/小时左右,并且受调车场速度控制设备或现场的制动员人工放置铁鞋(skate)或手闸来控制,既不能太慢被后续溜放的车组追上(保持适当的间隔使得调车场头部的道岔能及时动作),也不能太快与调车线上的存车猛烈撞击。因此,对于“难行车”、向“难行线”溜放的车组要加速;对于“易行车”、向“易行线”溜放的车组要适当减速。
== 结构 ==
由于编组站[[驼峰|驼峰]]工作方向是单向、固定的,故一个完整的编组站至少包含三个站场,即(峰前)'''[[到达场|到达场]]'''、'''[[编组场|编组场]]'''(或称'''调车场''')和'''[[出发场|出发场]]'''。到达场是待解编列车停留的车场,由[[调车机车|调车机车]]將列车推上驼峰;在编组场,正在解编的列车自驼峰顶端依[[重力|重力]]溜至各[[股道|股道]],各股道車輛连接编组为新的列车駛離,目的地一般爲下一编组站或本[[鐵路樞紐|鐵路樞紐]]内小运转,也可能是空车、清洗车或待修理车的股道。在出发场,调车机车將编组场内已编好的列车拖出,转移至出发场股道上等待调度行车指令,然后挂上货运机车駛離出发场前往目的地。
; 直通场
专门办理无改编中转通过列车技术作业的车场。只进行到达接车、更换机车、列检、商检、出发发车等技术作业。一般在大型编组站或无改编中转通过列车作业量较大的编组站才设置专门的通过场。其他的编组站可在到达场或出发场办理无改编中转列车通过作业。
; 交换场
是上下行间交换车辆停留的车场,一般用于“折角车流”。
; 到发场、编发场
是分别將到达场与发车场、调车场与发车场合二为一。
; 尾部辅助调车场
用于编组小运转'''站顺'''列车,即將相同到站车辆编为相邻一组,各车组按到站顺序连挂。辅助调车场既可使用调车场若干条相邻调车线的尾部,亦可於调车场旁侧另建辅助调车场专用线;可共用尾部牵出线以平面调车,亦可建设小能力驼峰来调车。辅助调车场使用“箭翎线”编组站顺小运转列车比使用传统的“下落调车表”法更为高效。“箭翎线”指相邻3条调车线以一组[[三开道岔|三开道岔]]连接,故3条调车线以三组三开道岔分为9段,中间线路前两段供调车机车推送列车,如此可实现对7个到站的站顺小运转列车的调车编组任务。{{來源請求|据统计,中国铁路各编组站始发小运转站顺列车中96%到站不超过7个。}}对更多到站的情形可用5条线甚至7条线的“箭翎线”。
; 驼峰牵出线
是驼峰调车机將要推峰的列车自到达场牵出时走行的线路。
; 尾部牵出线
是峰尾调车机將编组完畢列车自调车场牵出,将进入出发场股道时走行的线路。
; 机车回转线
是在编组站[[机务段|机务段]]对向的到达场迂回立交绕过编组站引入机务段的线路。
; 峰下机走线
由于驼峰高出车站地面4米左右,且爲到达场与调车场间各股道收束之处,适宜建设峰下跨线桥供顺驼峰方向本务机车出入机务段走行,較绕行驼峰牵出线或尾部牵出线环线走行距离較短,建设费用較大,适合运量大的编组站。
; 禁溜车停留线
货物列车中往往混有禁溜车,为此驼峰信号有“倒退”显示,將推峰列车倒回,將禁溜车送入禁溜车停留线以绕过驼峰,由尾部调车机车在尾部牵出线实行平面调车。
调车场除有调车设备,一般还有货运机车整备、车辆检修等设备,因此往往设有机务段(或机务折返段、机务车间)、[[车辆段|车辆段]](车辆段修)、列检所、站修所、货洗所(清洗货车车辆)等机构。货运设备包括加冰线、倒装线、牲畜车、鱼苗车、蜂箱车的上水换水。客运设备有客运站房[[站台|站台]]或[[乘降所|客运乘降所]]。
== 人员配备 ==
调车场人员配备上,一般是四班运转,每班工作12小时,早六时与晚十八时交接班。一个白班后,休息24小时,再做一个夜班,然后休息48小时。依次循环。每班人员设:
*值班主任一人,领导、管理全班工作;
*值班员一人或两人,也称值班站长,负责车站的行车运转指挥;
**助理值班员若干人,负责外勤接发列车;
*调度员一人,即“站调”,负责本班调车计划的安排、实施、领导;
*信号长一人或多人,负责不同方向的接发车信号的实施指挥。<ref>{{Cite news|url=http://sd.people.com.cn/n/2014/0502/c166192-21118188.html |publisher=人民网-山东频道|date=2014-05-02|title=探秘济南火车站神秘信号楼的劳动者}}</ref>
**信号员多人,根据信号长的口令操作信号设备接发车。
*调车区长多人,负责所管'''调车区'''的全部工作
**调车长多人,领导指挥调车组的工作
***制动长多人,领导制动小组工作
****制动员多人,负责给溜放的车皮适时下'''铁鞋'''
***连接员多人,负责给待解体的列车在车厢连接处摘解“提钩”,在溜放集结后的车厢连挂
**扳道长多人,领导扳道小组工作
***扳道员多人,实施扳道工作
在調車場工作的工務人員,需要通過考試方能成為其中一員。以[[中華民國|中華民國]]([[臺灣|臺灣]])的[[交通部臺灣鐵路管理局|交通部臺灣鐵路管理局]]為例,欲進入其調車場工作,需先通過[[考試院|考試院]][[考選部|考選部]][[中華民國國家考試|國家考試]]中的[[特種考試|特種考試]]鐵路人員考試(簡稱'''鐵路特考'''),方符合資格,且最起碼也得通過佐級考試,才有其資格。其晉升管道是佐級晉員級、員級晉高員級(仿造日本鐵路)。
==纪录==
*世界上最大的调车场:美国[[内布拉斯加州|内布拉斯加州]]的[[联合太平洋铁路|联合太平洋铁路公司]]的{{tsl|en|Bailey Yard|贝利编组站}}占地11.5 km<sup>2</sup>,长度超过13 km,宽度3.2 km。200条线路总长507 km,985组道岔,766组渡线,17条到达线,16条出发线。 雇员近2600人。平均日办理139趟列车,超过14 000车辆通过,双向驼峰平均日改编3000辆。东行驼峰高10米,西行驼峰高6.1米。东行调车场65条线,西行调车场49条线。有东行机车整备所、西行机车整备所、机车站修车间、车辆站修所。
*欧洲最大的调车场是德国汉堡附近的{{tsl|en|Maschen Marshalling Yard|马申编组站}}。占地280公顷,长7000米,最大宽700米。<ref>Zahlenangaben bei Wiesmüller, Lawrenz: ''Die Hamburger Rangier- und Güterbahnhöfe'', p. 131ff.</ref>线路总长300km。有6座{{tsl|en|signal box|信号楼}},825座道岔,100个主体信号,115个距离信号,688个调车信号。作为双向[[编组站|编组站]],有两座驼峰与两个编组场(formation yard)。北-南向有16条到达线,48条编发线;南-北向有17条到达线与64条编发线。[[车辆段|车辆段]]有8条修车线。 机务段有一座2条线的检修库与若干条室外存车线。在1970年代成为全自动驼峰调车系统,日改编11000台车辆能力。<ref>Wiesmüller, Lawrenz: ''Die Hamburger Rangier- und Güterbahnhöfe'', p. 132ff.</ref>1985年12月11日创造了日改编作业量纪录:8400辆。1990年代初期平均日改编8000辆;2009年日改编4000辆。
== 另見 ==
*[[維修廠|維修廠]]
*[[駝峰|駝峰]]
== 参考文献 ==
{{reflist}}
== 外部链接 ==
* [http://www.kepu.net.cn/gb/technology/railway/railway_station/200401170022.html 编组站的布置和类型]
{{鐵路}}
{{中国铁路编组站}}
{{Railway track layouts}}
{{Portal bar|交通|鐵路}}
[[Category:鐵路|Category:鐵路]]
[[Category:鐵路車站|Category:鐵路車站]]
[[Category:鐵路基礎設施|Category:鐵路基礎設施]]
[[Category:機務段|Category:機務段]]
|
from scapy.all import ARP, Ether, srp
def discover_hosts(ip_range):
# Een ARP-request packet maken dat vraagt naar het MAC-adres behorende bij een IP-adres
arp_request = ARP(pdst=ip_range)
broadcast = Ether(dst="ff:ff:ff:ff:ff:ff") # Ethernet broadcast
arp_request_broadcast = broadcast/arp_request
# Stuur het packet en ontvang het antwoord
answered_list = srp(arp_request_broadcast, timeout=2, verbose=0)[0]
# Lijst van actieve hosts
hosts = []
for sent, received in answered_list:
hosts.append({'ip': received.psrc, 'mac': received.hwsrc})
return hosts
# Testen van de functie voor een specifiek IP-bereik
ip_range = "192.168.0.1/24" # Pas dit aan naar het gewenste IP-bereik
active_hosts = discover_hosts(ip_range)
for host in active_hosts:
print("IP:", host['ip'], "MAC:", host['mac'])
|
Camera Settings Reset
The Camera Settings Reset command can be issued on a connected camera using the camera_settings_reset() method.
In the Camera Remote SDK, the API call to issue a command for camera settings reset is `SendCommand <../../_static/rcsdk_api.pdf#page=109>`_ .
Details about the Camera Settings Reset can be found at `CameraSettingsReset <../../_static/rcsdk_api.pdf#page=225>`_
.. code-block:: python
import sony_cr
import io
import sys
import time
cm = sony_cr.CameraManager()
cameras = cm.find_cameras()
print("Found {} camera(s)".format(len(cameras)))
for i, cam in enumerate(cameras):
print("[{}]: {} {}".format(i+1, cam['model'], cam['id']))
if len(cameras) == 0:
sys.exit(0)
cam = cm.get_camera(cameras[0]['id'])
cam.connect()
cam.camera_settings_reset()
time.sleep(2)
Running the Sample
++++++++++++++++++
Copy the code snippet above into a file called camera-settings-reset.py and execute using the command:
.. code-block:: console
python camera-settings-reset.py
|
import './Navbar.css';
import Search from './Search';
import React, { useState, useEffect, useRef } from 'react';
function Navbar({ onToggleCart, toggleDarkMode, iconColor, bgColor, darkBG, toggleSidebarWidth, textColor}) {
const [isMenuVisible, setMenuVisibility] = useState(false);
const toggleMenuVisibility = () => {
setMenuVisibility(prevState => !prevState);
};
const [isSearchOpen, setIsSearchOpen] = useState(false);
const searchRef = useRef(null);
useEffect(() => {
const handleClickOutside = (event) => {
if (searchRef.current && !searchRef.current.contains(event.target)) {
setIsSearchOpen(false);
}
};
document.addEventListener('mousedown', handleClickOutside);
return () => {
document.removeEventListener('mousedown', handleClickOutside);
};
}, []);
const handleSearchClick = () => {
setIsSearchOpen(!isSearchOpen);
};
return (
<nav className='navbar p-0' style={{backgroundColor: darkBG}}>
<div className='nav-items'>
<ul className='nav-list-left nav-list pt-3'>
<button onClick={toggleSidebarWidth} className='sidebar-toggle-btn'><i class="fa-solid fa-bars p-2 shadow-effect" style={{color: iconColor}}></i></button>
<div className='typewriter-div'>
<p className='typewriter-text'>Hi, Abdul Wahab</p>
</div>
</ul>
<form className="search-bar form-inline my-2 my-lg-0 pt-2 pb-2">
<div className='search-container d-flex align-items-center'>
<input onClick={handleSearchClick} style={{backgroundColor: bgColor}} className="search-input mr-sm-2 pt-1 pb-1" type="search" placeholder="Search" aria-label="Search"/>
</div>
</form>
<ul className='nav-list-right nav-list pt-3'>
{/* <button className="shadow-effect" onClick={toggleDarkMode}><i class="dark-mode-icon fa-regular fa-face-smile p-2" style={{color: iconColor}}></i></button> */}
<button className="shadow-effect" onClick={toggleDarkMode}><i className="dark-mode-icon fa-solid fa-lightbulb p-2" style={{color: iconColor}}></i></button>
</ul>
{isSearchOpen && (
<Search darkBG={darkBG} bgColor={bgColor} searchRef={searchRef} textColor={textColor}/>
)}
<div id='nav-btn-div'>
<button onClick={toggleMenuVisibility} className='nav-btn'><a><i className="fa-solid fa-bars"> </i></a></button>
</div>
</div>
{isMenuVisible && (
<div id='nav-btn-menu'>
<ul className='nav-menu-list pt-3 pb-2'>
<li key="home"><a href='#'>HOME</a></li>
<hr/>
<li key="products"><a href='#products'>PRODUCTS</a></li>
<hr/>
<li key="cart"><a href='#blog'>BLOG</a></li>
<hr/>
<li onClick={onToggleCart} key="cart"><a>CART</a></li>
<hr/>
</ul>
</div>
)}
</nav>
)
}
export default Navbar;
|
package com.gameplat.admin.model.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.gameplat.common.group.Groups;
import lombok.Data;
import javax.validation.constraints.NotEmpty;
import java.math.BigDecimal;
import java.util.Date;
/** 红包配置 */
@Data
public class RedEnvelopeConfigDTO {
/** 主键id */
private Long id;
/** 红包名称 */
@NotEmpty(
groups = {Groups.INSERT.class, Groups.UPDATE.class},
message = "红包名称不能为空")
private String redName;
/** 红包金额 */
private BigDecimal amount;
/** 打码倍数要求 */
private Double multiple;
/** 是否有时效 */
private Integer isAging;
/** 领取开始时间 */
@JsonFormat(locale = "zh", timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm:ss")
private Date receiveStartTime;
/** 领取结束时间 */
@JsonFormat(locale = "zh", timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm:ss")
private Date receiveEndTime;
/** 充值要求 */
private BigDecimal rechargeAmount;
/** 打码要求 */
private BigDecimal chipRequire;
/** 红包状态 0 禁用 1 启用 2 过期 */
private Integer state;
/** 领取方式 1 手动领取 2 自动到账 */
private Integer receiveMethod;
/** 红包图片地址 */
private String imgUrl;
/** 红包展示位置 */
private String location;
/** 备注信息 */
private String remark;
/** 创建人 */
private String createBy;
/** 更新人 */
private String updateBy;
}
|
import { CheckCircle, Lock } from 'phosphor-react'
import { isPast, format } from 'date-fns'
import ptBR from 'date-fns/locale/pt-BR'
import { Link, useParams } from 'react-router-dom';
import classNames from 'classnames'
interface LessonProps{
title: string;
slug: string;
availabeAt: Date;
type: 'live' | 'class';
}
export function Lesson(props: LessonProps) {
const { slug } = useParams<{ slug: string}>()
const isLessonAvailable = isPast(props.availabeAt);
const availableDateFormatted = format(props.availabeAt, "EEEE' • 'd' de 'MMMM' • 'k'h'mm", {
locale: ptBR,
})
const isActiveLesson = (slug == props.slug)
return (
<a href={`/event/lesson/${props.slug}`} className='group'>
<span className="text-gray-300">
{availableDateFormatted}
</span>
<div className={classNames(`rounded border border-gray-500 p-4 mt-2 group-hover:border-green-500`, {'bg-green-500': isActiveLesson})}>
<header className="flex items-center justify-between" >
{isLessonAvailable ? (
<span className={classNames('text-sm font-medium flex items-center gap-2',{'text-white': isActiveLesson, 'text-blue-500': !isActiveLesson,})}>
<CheckCircle size={20}/>
Conteúdo liberado
</span>
) : (
<span className="text-sm text-orange-500 font-medium flex items-center gap-2">
<Lock size={20} />
Em Breve
</span>
)}
<span className={classNames("text-xs rounded px-2 py-[0.125rem] text-white border", {'border-white': isActiveLesson, 'border-green-300': !isActiveLesson,})}>
{props.type == 'live' ? 'AO VIVO' : 'AULA PRÁTICA'}
</span>
</header>
<strong className={classNames('mt-5 block', {'text-white': isActiveLesson, 'text-gray-200': !isActiveLesson,})}>
{props.title}
</strong>
</div>
</a>
)
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Cors;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using WebAPI.Models;
using WebAPI.Repository;
namespace WebAPI.Controllers
{
[Route("api/[controller]")]
[ApiController]
[EnableCors("Policy")]
public class PositionsController : ControllerBase
{
private readonly DataContext _context;
public PositionsController(DataContext context)
{
_context = context;
}
// GET: api/Positions
[HttpGet]
public async Task<ActionResult<IEnumerable<Position>>> GetPositions()
{
return await _context.Positions.ToListAsync();
}
// GET: api/Positions/5
[HttpGet("{id}")]
public async Task<ActionResult<Position>> GetPosition(int id)
{
var position = await _context.Positions.FindAsync(id);
if (position == null)
{
return NotFound();
}
return position;
}
// PUT: api/Positions/5
[HttpPut("{id}")]
public async Task<IActionResult> PutPosition(int id, Position position)
{
if (id != position.Id)
{
return BadRequest();
}
_context.Entry(position).State = EntityState.Modified;
try
{
await _context.SaveChangesAsync();
}
catch (DbUpdateConcurrencyException)
{
if (!PositionExists(id))
{
return NotFound();
}
else
{
throw;
}
}
return NoContent();
}
// POST: api/Positions
[HttpPost]
public async Task<ActionResult<Position>> PostPosition(Position position)
{
_context.Positions.Add(position);
await _context.SaveChangesAsync();
return CreatedAtAction("GetPosition", new { id = position.Id }, position);
}
// DELETE: api/Positions/5
[HttpDelete("{id}")]
public async Task<ActionResult<Position>> DeletePosition(int id)
{
var position = await _context.Positions.FindAsync(id);
if (position == null)
{
return NotFound();
}
_context.Positions.Remove(position);
await _context.SaveChangesAsync();
return position;
}
private bool PositionExists(int id)
{
return _context.Positions.Any(e => e.Id == id);
}
}
}
|
import { CssBaseline, ThemeProvider } from "@material-ui/core";
import * as React from "react";
import * as ReactDOM from "react-dom";
import { BrowserRouter } from "react-router-dom";
import { Route, Switch } from "react-router";
import TopBar from "./TopBar";
import ChooseNameModal from "./pages/ChooseName";
import Home from "./pages/Home";
import NotFound from "./pages/NotFound";
import Profile from "./pages/Profile";
import { Store, StoreProvider } from "./Store";
import theme from "./shared/theme";
import Game from "./pages/Game";
const App = () => {
const { state } = React.useContext(Store);
return (
<ThemeProvider theme={theme(state.cookie.darkTheme)}>
<CssBaseline />
<BrowserRouter>
<>
<TopBar />
{state.cookie.name ? null : <ChooseNameModal />}
<Switch>
<Route path="/" exact component={Home} />
<Route
path="/:guid([0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{12})"
render={({ match }: any) => <Game guid={match.params.guid} />}
/>
<Route path="/profile" component={Profile} />
<Route component={NotFound} />
</Switch>
</>
</BrowserRouter>
</ThemeProvider>
);
};
ReactDOM.render(
<StoreProvider>
<App />
</StoreProvider>,
document.getElementById("root")
);
|
// DSL is mini language embedded in Rust macro
// Completely valid Rust because macro expands into normal Rust constructs, but looks like small language
// Allows you to define concise or intuitive syntax for special functionality (within bounds)
// Suppose I want to define calculator API. Would like to supply expression and have output printed to console
macro_rules! calculate {
(eval $e:expr) => {
{
let val: usize = $e; // Force types to be unsigned integers
println!("{} = {}", stringify!{$e}, val);
}
};
}
fn main() {
calculate! {
eval 1 + 2
}
calculate! {
eval (1 + 2) * (3 / 4)
}
}
|
import React, { useState } from 'react';
import { Table, TableBody, TableCell, TableHead, TableRow, Button, Paper } from '@mui/material';
import UserDialog from './UserDialog';
import { v4 as uuidv4 } from 'uuid';
const UserTable = ({ users: initialUsers, onUserChange, onUserCreation, onUserDeletion, expenses, memoExpenses }) => {
const [users, setUsers] = useState(initialUsers);
const [selectedUser, setSelectedUser] = useState(null);
const [isDialogOpen, setDialogOpen] = useState(false);
const handleAddUser = (selectedUser) => {
const firstName = selectedUser.firstName;
const lastName = selectedUser.lastName;
const newUser = {
firstName,
lastName,
total_expenses: 0,
id: uuidv4(), // generates a unique ID
};
setUsers(prevUsers => [...prevUsers, newUser]);
onUserCreation(newUser);
handleDialogClose();
};
const handleUpdateUser = (selectedUser) => {
setUsers(prevUsers => {
return prevUsers.map(user => {
if (user.id === selectedUser.id) {
return selectedUser;
}
return user;
});
});
onUserChange(selectedUser)
handleDialogClose();
};
const handleDeleteUser = (id) => {
setUsers(prevUsers => prevUsers.filter(user => user.id !== id));
onUserDeletion(id)
setSelectedUser(null);
};
const handleEditClick = (id) => {
const userToEdit = users.find(user => user.id === id);
setSelectedUser(userToEdit);
setDialogOpen(true);
};
const handleDialogClose = () => {
setDialogOpen(false);
setSelectedUser(null);
};
return (
<Paper className="user-table">
<Table>
<TableHead>
<TableRow>
<TableCell>First Name</TableCell>
<TableCell>Last Name</TableCell>
<TableCell>Total Expenses</TableCell>
<TableCell>Actions</TableCell>
</TableRow>
</TableHead>
<TableBody>
{users.map(user => (
<TableRow key={user.id}>
<TableCell>{user.firstName}</TableCell>
<TableCell>{user.lastName}</TableCell>
<TableCell>${memoExpenses[user.id] || 0}</TableCell>
<TableCell>
<Button onClick={() => handleEditClick(user.id)}>Edit</Button>
<Button onClick={() => handleDeleteUser(user.id)}>Delete</Button>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
<div>
<Button variant="contained" color="primary" onClick={() => setDialogOpen(true)}>Add User</Button>
</div>
<UserDialog
isOpen={isDialogOpen}
user={selectedUser}
onClose={() => setDialogOpen(false)}
onSave={selectedUser ? handleUpdateUser : handleAddUser}
/>
</Paper>
);
}
export default UserTable;
|
import React, { useState, useEffect } from "react";
import "../index.css";
import Book from "./Book";
import { Link } from "react-router-dom";
import axios from "axios";
import { URI } from "../constants";
/**
* Function used to retrieve the last 4 borrowings
*
* @author Anna Briançon
*/
function LatestBorrow({ token }) {
token = localStorage.getItem("token");
const [books, setBooks] = useState([]);
let bookCount = 0;
useEffect(() => {
axios
.get(`${URI}/borrows`, {
headers: {
Authorization: "Bearer " + token,
},
})
.then((response) => {
setBooks(response.data.borrows);
})
.catch((error) => console.error(error));
}, []);
return (
<div className="relative max-w-10xl mx-auto px-2 sm:px-4">
<div className="max-w-3xl mx-auto text-center md:pb-2">
<h2>Vos derniers livres empruntés</h2>
</div>
<div className="max-w-sm mx-auto grid gap-6 md:grid-cols-2 lg:grid-cols-4 items-start md:max-w-2xl lg:max-w-none">
{books.length > 0 ? (
books.map((book) => (
<div
key={bookCount++}
className="relative flex flex-row items-center p-6 max-w-xs overflow-hidden bg-cover bg-no-repeat"
>
<Link to={"/book/info?id=" + book.book.id}>
<div className="max-w-xs transition duration-800 ease-in-out hover:scale-110">
<Book
book={{
title: book.book.name,
image: { thumbnail: book.book.image },
publishedDate: book.book.publishedDate,
authors: book.book.authors,
id: book.book.id,
}}
/>
</div>
</Link>
</div>
))
) : (
<div className="items-center">
<p className="items-center">Vous n'avez pas de livre empruntés</p>
</div>
)}
</div>
</div>
);
}
export default LatestBorrow;
|
import styles from './recent-activity.module.css'
import ActivityItem from '../activity-item/activity-item'
import { useCallback } from 'react'
const testItem = {
type: 'request',
from: 'UserX',
amount: 25,
note: "McDonalds"
}
const testItem2 = {
type: 'payment',
from: 'UserY',
amount: 30,
note: "Gas"
}
export default function RecentActivity(props) {
const parseTransactions = useCallback(() => {
// Fix user phone number
let curUser = props.curUser
if (curUser.startsWith("+1")) {
curUser = curUser.substring(2, 12)
}
const parsedTransactions = props.transactions.map((transaction) => {
const parsed = {}
parsed.amount = transaction.amount;
parsed.note = transaction.notes;
parsed.id = transaction.id;
parsed.cancelled = !transaction.status
console.log(transaction.pn_from)
if (curUser === transaction.pn_from) {
if (transaction.type === "req") {
parsed.type = "requestTo"
} else if (transaction.type === 'pay') {
parsed.type = "paymentTo"
}
parsed.user = transaction.pn_to
} else if (curUser === transaction.pn_to) {
if (transaction.type === "req") {
parsed.type = "requestFrom"
} else if (transaction.type === 'pay') {
parsed.type = "paymentFrom"
}
parsed.user = transaction.pn_from
}
return parsed
})
return parsedTransactions.map((obj) => <ActivityItem key={obj.id} item={obj} />)
}, [props.curUser, props.transactions])
return (
<div className={styles.container}>
{parseTransactions()}
</div>
)
}
|
import { useDispatch } from 'react-redux'
import { AppDispatch } from '../../../redux/store'
import { deleteProductThunk } from '../../../redux/actions/products'
import Icon from '../../global/Icon'
type ProductRowProps = {
id: string
name: string
categories: string[]
price: number
colors: string[]
sizes: number[]
edit: (id: string) => void
}
const ProductRow = ({ id, name, categories, price, colors, sizes, edit }: ProductRowProps) => {
const dispatch = useDispatch<AppDispatch>()
const showInfo = () => {
return (
<>
<span className="grid place-items-center text-accent font-bold">{id}</span>
<span className="grid place-items-center">{name}</span>
<span className="grid place-items-center">{categories.map((category) => category)}</span>
<span className="grid place-items-center text-accent text-xl font-bold">{price}.00€</span>
<span className="grid place-items-center font-bold">
{colors.map((color) => `${color}, `)}
</span>
<span className="grid place-items-center font-bold">{sizes.map((size) => `${size} `)}</span>
<div className="flex justify-around space-x-3 font-bold">
<button
className="flex space-x-2 items-center text-xl
bg-secondary py-2 px-4 rounded-sm duration-300 hover:text-accent"
onClick={() => edit(id)}>
<Icon iconRef="mdi-pencil" />
<span>Edit</span>
</button>
<button
className="flex space-x-2 items-center text-xl
bg-secondary py-2 px-4 rounded-sm duration-300 hover:text-red-500"
onClick={() => dispatch(deleteProductThunk(id))}>
<span className="mdi mdi-trash-can-outline"></span>
<span>Delete</span>
</button>
</div>
</>
)
}
return (
<div
className="grid grid-cols-[1fr_repeat(6,_1fr)]
p-3 list-none border-t-2 border-b-2 border-secondary
text-third place-items-center">
{showInfo()}
</div>
)
}
export default ProductRow
|
/** @jsx DOMcreateElement */
/** @jsxFrag DOMcreateFragment */
import { DOMcreateElement } from 'jsxFactory'
import { Children } from '../../types/commonTypes'
import './Button.module.scss'
type Button = {
// в type планируется более чем два варианта
variant?: 'blue' | 'default'
className?: string
type?: string
onClick?: (e: Event) => void
}
const Button = (
{ className = '', variant = 'default', type, onClick }: Button,
children: Children,
) => {
const isBlue = variant === 'blue'
return (
<button
onClick={(e: Event) => (onClick ? onClick(e) : {})}
type={type}
className={`${isBlue ? 'blue' : ''} ${className}`}
>
{children}
</button>
)
}
export default Button
|
<?php
namespace App\Http\Controllers;
use App\Http\Requests\CreateTaskRequest;
use App\Models\Task;
use Illuminate\Support\Facades\DB;
class TaskController extends Controller
{
public function index()
{
$todayTasks = Task::whereDate('created_at', today())->get();
return view('task.index',[
'tasks' => DB::table('tasks')->simplePaginate(7),
'todayTasks' => $todayTasks
]);
}
public function all()
{
return view('task.all',[
'tasks' => DB::table('tasks')->simplePaginate(7),
]);
}
public function create()
{
$task = new Task();
return view('task.create', [
'task' => $task
]);
}
public function store(CreateTaskRequest $request)
{
$task = Task::create($request->validated());
return redirect()->route('index')->with('success', "Task created successfully");
}
public function edit(Task $task)
{
return view('task.edit',[
'task' => $task
]);
}
public function update(Task $task, CreateTaskRequest $request)
{
$task->update($request->validated());
return redirect()->route('index')->with('success', "Task updated successfully");
}
public function destroy(Task $task)
{
$task->delete();
return redirect()->route('index')->with('success', 'Task deleted successfully');
}
}
|
#ifndef AIR_QUALITY_H
#define AIR_QUALITY_H
#include "Particle.h"
#if (PLATFORM_ID != PLATFORM_XENON) && (PLATFORM_ID != PLATFORM_ARGON) && (PLATFORM_ID != PLATFORM_BORON)
#error The Air Quality Wing Library only supports Xenon, Argon and Boron.
#endif
#include "si7021.h"
#include "ccs811.h"
#include "hpma115.h"
#include "stdbool.h"
// #ifdef HAS_SGP30
// #include "sgp30.h"
// #endif
// #ifdef HAS_BME680
// #include "bsec.h"
// #endif
// Delay and timing related contsants
#define MEASUREMENT_DELAY_S 120
#define MEASUREMENT_DELAY_MS (MEASUREMENT_DELAY_S * 1000)
#define MIN_MEASUREMENT_DELAY_MS 10000
#define HPMA_TIMEOUT_MS 10000
typedef enum {
success = 0,
hpma115_error,
ccs811_error,
si7021_error
} AirQualityWingError_t;
// Structure for holding data.
typedef struct {
struct {
bool hasData;
ccs811_data_t data;
} ccs811;
struct {
bool hasData;
si7021_data_t data;
} si7021;
struct {
bool hasData;
hpma115_data_t data;
} hpma115;
} AirQualityWingData_t;
typedef struct {
uint32_t interval;
bool hasHPMA115;
bool hasCCS811;
bool hasSi7021;
uint8_t ccs811Address;
uint8_t ccs811IntPin;
uint8_t ccs811RstPin;
uint8_t ccs811WakePin;
uint8_t hpma115IntPin;
} AirQualityWingSettings_t;
// Handler defintion
typedef std::function<void()> AirQualityWingHandler_t;
// Air quality class. Only create one of these!
class AirQualityWing
{
private:
// Private data used to store latest values
AirQualityWingHandler_t handler_;
AirQualityWingSettings_t settings_;
// Sensor objects
Si7021 si7021;
CCS811 ccs811;
HPMA115 hpma115;
void ccs811Event();
// #ifdef HAS_SGP30
// static SGP30 sgp30 = SGP30();
// #endif
// #ifdef HAS_BME680
// static Bsec bsec = Bsec();
// #endif
// Variables
// TODO: init these guys
Timer *measurementTimer;
Timer *hpmaTimer;
// Static measurement timer event function
void hpmaEvent();
void measureTimerEvent();
void hpmaTimerEvent();
// Static var
bool measurementStart;
bool measurementComplete;
bool hpmaMeasurementComplete;
bool hpmaError;
// Data
AirQualityWingData_t data;
// #ifdef HAS_SGP30
// Timer sgp30_timer(SGP30_READ_INTERVAL, sgp30_timer_handler);
// #endif
public:
// Using defaults
AirQualityWing();
// Inits the devices
AirQualityWingError_t setup(AirQualityWingHandler_t handler, AirQualityWingSettings_t settings);
// Begins data collection
AirQualityWingError_t begin();
// Stops data collection, de-inits
void end();
// Prints out a string representation in JSON of the data
String toString();
// Returns a copy of the full data structure
AirQualityWingData_t getData();
// Attaches event handler. Event handler fires when a round of data has completed successfully
void attachHandler(AirQualityWingHandler_t handler);
// Deattaches event handler. The only way to fetch new data is using the `data()` method
void deattachHandler();
// Process method is required to process data correctly. Place in `loop()` function
AirQualityWingError_t process();
// Set measurement interval.
// Accepts intervals from 20 seconds
void setInterval(uint32_t interval);
};
#endif
|
package zsdev.work.lib.support.network.interceptor;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
/**
* Created: by 2023-09-05 23:13
* Description: 构建Header拦截器
* Author: 张松
*/
public class HeadersInterceptor implements InterceptorHandler {
/**
* 请求头map集合
*/
private final Map<String, String> headersMap;
/**
* 接收传递来赋值给本类的全局变量map集合
*
* @param headersMap 请求头Map集合
*/
public HeadersInterceptor(Map<String, String> headersMap) {
//等量赋值
this.headersMap = headersMap;
}
/**
* 请求之前的拦截请求数据:map集合键值对来构建,将headers加入到Request中
*
* @param request 请求对象
* @param chain chain
* @return 请求数据
*/
@Override
public Request onBeforeRequest(Request request, Interceptor.Chain chain) {
Request.Builder builder = chain.request().newBuilder();
//集合判空
if (headersMap != null && headersMap.size() > 0) {
//遍历取全部key
Set<String> keys = headersMap.keySet();
//再通过遍历key,对应获取value
for (String headerKey : keys) {
//map集合键值将headers加入到Request中构建
builder.addHeader(headerKey, Objects.requireNonNull(headersMap.get(headerKey))).build();
}
}
//构造request对象发送请求
return builder.build();
}
/**
* 请求之后+响应返回之前拦截响应数据
*
* @param response 响应对象
* @param chain chain
* @return 响应数据
*/
@Override
public Response onAfterRequest(Response response, Interceptor.Chain chain) {
//请求头拦截器只关心添加是否成功,不用关心Response结果,因此不对Response结果进行数据操作,直接返回即可
return response;
}
}
|
import { Analytics } from '@vercel/analytics/react'
import type { Metadata } from 'next'
import { manrope, jetBrainsMono } from './fonts'
import Providers from './providers'
import BaseLayout from '~layouts/BaseLayout'
import '~styles/index.css'
export const metadata: Metadata = {
title: { default: 'Zach Schnackel', template: '%s | Zach Schnackel' },
description: 'Full-stack/motion developer',
alternates: {
types: {
'application/rss+xml': '/rss.xml',
},
},
}
export default function RootLayout({
children,
}: {
children: React.ReactNode
}) {
return (
<html lang="en" className={`${manrope.variable} ${jetBrainsMono.variable}`}>
<body className="overflow-x-hidden overflow-y-scroll bg-slate-1 font-medium text-slate-12 antialiased selection:bg-primary-5 selection:text-slate-12">
<Providers>
<BaseLayout>{children}</BaseLayout>
</Providers>
<Analytics />
</body>
</html>
)
}
|
//! Tool acting like systemd's Watchdog for Docker containers.
//! Signals that the Docker container should restart if `WatchDock::notify` was not called within a certain period.
use {
log::{error, info},
shiplift::{Container, ContainerListOptions, Docker},
std::time::Duration,
thiserror::Error as ThisError,
tokio::{
sync::mpsc::{unbounded_channel, UnboundedSender},
task::{spawn, JoinError},
time::timeout,
},
};
#[derive(Debug, ThisError)]
pub enum Error {
#[error("Docker error")]
Docker(#[from] shiplift::Error),
#[error("Container not found {0}")]
ContainerNotFound(String),
#[error("Join error")]
Join(#[from] JoinError),
#[error("Send error")]
Send,
}
/// Tool acting like systemd's Watchdog for Docker containers.
/// Signals that the Docker container should restart if `WatchDock::notify` was not called within a certain period.
pub struct WatchDock {
sender: UnboundedSender<()>,
}
impl WatchDock {
/// Get Docker container.
async fn get_container<'docker>(
docker: &'docker Docker,
container_id: &str,
) -> Result<Container<'docker>, Error> {
let mut list_builder = ContainerListOptions::builder();
list_builder.all();
let options = list_builder.build();
docker
.containers()
.list(&options)
.await?
.iter()
.find(|&container| container.id.starts_with(container_id))
.map_or_else(
|| Err(Error::ContainerNotFound(container_id.into())),
|container| Ok(docker.containers().get(&container.id)),
)
}
/// Notify that the container should keep running.
pub fn notify(&self) -> Result<(), Error> {
self.sender.send(()).map_err(|_| Error::Send)
}
/// Create a new `WatchDock` instance where `max_time` is the maximum time to wait before restarting the given `container_name`
/// if `WatchDock::notify` was not called during that time.
///
/// To access the Docker API from the container, the Docker socket path must be passed as a shared volume
/// when building the container, e.g:
/// `$ docker run -v /var/run/docker.sock:/var/run/docker.sock container:latest`
pub async fn new<I>(max_time: Duration, container_id: I) -> Result<Self, Error>
where
I: AsRef<str> + Send + Sync + 'static,
{
let (sender, mut receiver) = unbounded_channel();
spawn(async move {
let docker = Docker::new();
if let Ok(container) = Self::get_container(&docker, container_id.as_ref()).await {
loop {
if timeout(max_time, receiver.recv()).await.is_err() {
info!("Timeout reached. Restarting container...");
container
.restart(None)
.await
.expect("Cannot restart container");
}
}
} else {
error!(
"Cannot find container: {:?}. Terminating WatchDock.",
container_id.as_ref()
);
}
});
Ok(Self { sender })
}
}
|
//Mesh reconstruction tool
//Qiaosong Wang
//University of Delaware
//qiaosong@udel.edu
#include <pcl/io/pcd_io.h>
#include <pcl/io/vtk_io.h>
#include <pcl/ros/conversions.h>
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <pcl/point_types.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/surface/gp3.h>
#include <pcl/kdtree/kdtree_flann.h>
#include <pcl/features/normal_3d.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#include <pcl/sample_consensus/sac_model_plane.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/ModelCoefficients.h>
#include <pcl/kdtree/kdtree_flann.h>
#include <pcl/surface/mls.h>
int
main (int argc, char** argv)
{
// Load input file into a PointCloud<T> with an appropriate type
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
sensor_msgs::PointCloud2 cloud_blob;
sensor_msgs::PointCloud2::Ptr input (new sensor_msgs::PointCloud2 ());
sensor_msgs::PointCloud2::Ptr output (new sensor_msgs::PointCloud2 ());
pcl::PCDReader reader;
// Replace the path below with the path where you saved your file
reader.read ("./input.pcd", *input); // Remember to download the file first!
pcl::VoxelGrid<sensor_msgs::PointCloud2> sor;
sor.setInputCloud (input);
sor.setLeafSize (0.01, 0.01, 0.01);
sor.filter (*output);
pcl::PCDWriter writer;
writer.write ("./downsampled.pcd", *output,
Eigen::Vector4f::Zero (), Eigen::Quaternionf::Identity (), false);
printf("Downsampling completed!\n");
/*
pcl::io::loadPCDFile ("./downsampled.pcd", *cloud);
// Create a KD-Tree
pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>);
// Output has the PointNormal type in order to store the normals calculated by MLS
pcl::PointCloud<pcl::PointNormal> mls_points;
// Init object (second point type is for the normals, even if unused)
pcl::MovingLeastSquares<pcl::PointXYZ, pcl::PointNormal> mls;
mls.setComputeNormals (true);
// Set parameters
mls.setInputCloud (cloud);
mls.setPolynomialFit (true);
mls.setSearchMethod (tree);
mls.setSearchRadius (1);
// Reconstruct
mls.process (mls_points);
// Save output
pcl::io::savePCDFile ("./smoothed.pcd", mls_points);
*/
pcl::io::loadPCDFile ("./downsampled.pcd", cloud_blob);
pcl::fromROSMsg (cloud_blob, *cloud);
//* the data should be available in cloud
// Normal estimation*
pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n;
pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>);
pcl::search::KdTree<pcl::PointXYZ>::Ptr tree2 (new pcl::search::KdTree<pcl::PointXYZ>);
tree2->setInputCloud (cloud);
n.setInputCloud (cloud);
n.setSearchMethod (tree2);
n.setKSearch (20);
n.compute (*normals);
//* normals should not contain the point normals + surface curvatures
// Concatenate the XYZ and normal fields*
pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>);
pcl::concatenateFields (*cloud, *normals, *cloud_with_normals);
//* cloud_with_normals = cloud + normals
// Create search tree*
pcl::search::KdTree<pcl::PointNormal>::Ptr tree3 (new pcl::search::KdTree<pcl::PointNormal>);
tree3->setInputCloud (cloud_with_normals);
// Initialize objects
pcl::GreedyProjectionTriangulation<pcl::PointNormal> gp3;
pcl::PolygonMesh triangles;
// Set the maximum distance between connected points (maximum edge length)
gp3.setSearchRadius (3);
// Set typical values for the parameters
gp3.setMu (3);
gp3.setMaximumNearestNeighbors (300);
gp3.setMaximumSurfaceAngle(M_PI/4); // 45 degrees
gp3.setMinimumAngle(M_PI/18); // 10 degrees
gp3.setMaximumAngle(2*M_PI/3); // 120 degrees
gp3.setNormalConsistency(false);
// Get result
gp3.setInputCloud (cloud_with_normals);
gp3.setSearchMethod (tree3);
gp3.reconstruct (triangles);
// Additional vertex information
std::vector<int> parts = gp3.getPartIDs();
std::vector<int> states = gp3.getPointStates();
pcl::io::saveVTKFile("mesh.vtk",triangles);
// Finish
return (0);
}
|
import { HttpClient } from '@angular/common/http';
import { Component } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { UserModel } from '@core/models/users.model';
import { environment } from 'src/environments/environment';
import { UserService } from '../services/user.service';
import { map } from 'rxjs';
@Component({
selector: 'app-users-detail',
templateUrl: './users-detail.component.html',
styleUrls: ['./users-detail.component.css'],
})
export class UsersDetailComponent {
private readonly URL = environment.api;
userlist: Array<UserModel> = [];
user: Array<UserModel> = [];
status: string = '';
constructor(
private route: ActivatedRoute,
private userService: UserService,
private httpClient: HttpClient,
private router: Router
) {}
ngOnInit(): any {
// this.getUsers()
this.getUserById();
}
getUsers() {
// We are getting the user id from the params
const routeParams = this.route.snapshot.paramMap;
// cleaning it to get only the number
const userIdParams = Number(routeParams.get('id'));
// We are using the service to call the api and map the information to filter it
this.userService.getAllUsers$().subscribe((res) => {
res.map((r: any) => {
if (r.id === userIdParams) {
this.user = [r];
}
});
});
}
getUserById() {
const routeParams = this.route.snapshot.paramMap;
const userIdParams = Number(routeParams.get('id'));
// We are calling tot he API by user
const call = this.httpClient.get(`${this.URL}users/${userIdParams}`).pipe(
map(({ data }: any) => {
return data;
})
);
call.subscribe((res) => {
this.user = [res];
});
}
deleteUserById() {
const routeParams = this.route.snapshot.paramMap;
const userIdParams = Number(routeParams.get('id'));
// We are sending the delete request to the api
this.httpClient.delete(`${this.URL}users/${userIdParams}`).subscribe({
next: (data) => {
// Faked user deleted as in this api users cannot be deleted
this.status = `User with ID => [${userIdParams}] Deleted successfully`;
console.log(this.status);
},
error: (error) => {
this.status = error.message;
console.log('Error', this.status);
},
});
this.router.navigate(['/users']);
}
}
|
<!Doctype <!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Perla Ordonez</title>
<!-- Reset CSS -->
<link rel="stylesheet" type="text/css" href="reset.css">
<!-- Style CSS -->
<link rel="stylesheet" type="text/css" href="style.css">
<!-- Bootstrap -->
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.2.1/css/bootstrap.min.css" integrity="sha384-GJzZqFGwb1QTTN6wy59ffF1BuGJpLSa9DkKMp0DgiMDm4iYMj70gZWKYbI706tWS" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
<!--Awesome Icons -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
<!-- Lazy Sizes -->
<script src="https://afarkas.github.io/lazysizes/lazysizes.min.js" async=""></script>
</head>
<body id="topPage">
<nav class="navbar navbar-light sticky-top bg-light" >
<a class="navbar-brand" href="#topPage">Perla Ordonez</a>
<ul class="nav nav-pills">
<li class="nav-item">
<a class="nav-link" href="#about">About</a>
</li>
<li class="nav-item">
<a class="nav-link" href="#skills">Skills</a>
</li>
<li class="nav-item">
<a class="nav-link" href="#portfolio">Portfolio</a>
</li>
<li class="nav-item">
<a class="nav-link" href="" target="_blank">Resume</a>
</li>
</ul>
</nav>
<!-- Main Container for all Sections -->
<div class="container" data-spy="scroll" data-target="#navbar-example2" data-offset="50">
<header id="about" class="main-header d-flex">
<div class="container my-auto">
<div class="row">
<div class="col-sm-12 text-center">
<h1 class="section-heading">Aspiring Full Stack Web Developer</h1>
</div>
</div>
<div class="row">
<div class="col-sm-6 text-center">
<h2 class="section-heading">My Background</h2>
<p>Sed libero justo, lacinia at efficitur non, porta et ligula. Quisque varius odio ac dictum pretium. Quisque mollis posuere quam dictum maximus. Nam porttitor lorem mauris, eu feugiat quam feugiat eget. Pellentesque at sollicitudin sapien, nec viverra metus. Nam nunc metus, convallis id urna in, viverra elementum turpis. Vivamus ipsum erat, dictum sit amet nunc nec, finibus dictum justo.</p>
</div>
<div class="col-sm-6 text-center">
<img src="assets/Images/profile-image.jpg" class="rounded-circle" width="250" height="250">
</div>
</div>
</div>
</header>
<!-- Skills Section -->
<section id="skills">
<div class="row">
<div class="col-sm-12 text-center">
<h2 class="section-heading">Programming Languages and Tools</h2>
</div>
</div>
<br>
<!-- Logos for skills learned -->
<div class="row">
<div class="col-sm-12 margin-top text-center" >
<img class="img-fluid" src="assets/Images/html.png" alt="HTML" style="height: 150px;">
<img class="img-fluid" src="assets/Images/css.png" alt="CSS" style="height: 150px;">
<img class="img-fluid" src="assets/Images/javascript.png" alt="JavaScript" style="height: 150px;">
<img class="img-fluid" src="assets/Images/node.png" alt="Node" style="height: 150px;">
</div>
</div>
</section>
<!-- Portfolio Section -->
<section id="portfolio">
<div class="row">
<div class="col-md-4 col-sm-6 margin-top">
<div class="image-thumbnail" style="height: 350px;">
<div class="img-caption">
<h3 class="text-center">Crystal Collector</h3>
<a href="https://pordonez93.github.io/unit-4-game/" target="_blank" class="btn btn-primary btn-block">
View project<i class="fa fa-chevron-circle-right fa-align-right"></i>
</a>
</div>
<img class="lazyload" data-sizes="auto" data-src ="assets/Images/crystal-collector.jpg" alt="Crystal Collector"/>
</div>
</div>
<div class="col-md-4 col-sm-6 margin-top">
<div class="image-thumbnail" style="height: 350px;">
<div class="img-caption">
<h3 class="text-center">GifTastic</h3>
<a href="https://pordonez93.github.io/unit-4-game/" target="_blank" class="btn btn-primary btn-block">
View project<i class="fa fa-chevron-circle-right fa-align-right"></i>
</a>
</div>
<img class="lazyload" data-sizes="auto" data-src ="assets/Images/GifTastic.jpg" alt="GifTastic"/>
</div>
</div>
<div class="col-md-4 col-sm-6 margin-top">
<div class="image-thumbnail" style="height: 350px;">
<div class="img-caption">
<h3 class="text-center">Train Scheduler</h3>
<a href="https://pordonez93.github.io/unit-4-game/" target="_blank" class="btn btn-primary btn-block">
View project<i class="fa fa-chevron-circle-right fa-align-right"></i>
</a>
</div>
<img class="lazyload" data-sizes="auto" data-src ="assets/Images/train-schedule.jpg" alt="Train Scheduler"/>
</div>
</div>
<div class="col-md-4 col-sm-6 margin-top">
<div class="image-thumbnail" style="height: 350px;">
<div class="img-caption">
<h3 class="text-center">Right Side of the Bed</h3>
<a href="https://pordonez93.github.io/unit-4-game/" target="_blank" class="btn btn-primary btn-block">
View project<i class="fa fa-chevron-circle-right fa-align-right"></i>
</a>
</div>
<img class="lazyload" src ="assets/Images/right-side-of-the-bed.jpg" alt="Right Side of the Bed"/>
</div>
</div>
</div>
</section>
</div>
<!-- Contact Section -->
<footer>
<div class="container text-center">
<!-- icons with external links -->
<div class="row">
<div class="col-sm-12">
<a href="https://www.linkedin.com/in/perla-ordonez/" target="_blank" class="icons">
<i class="fa fa-linkedin-square" style="font-size:38px"></i>
</a>
<a href="https://github.com/pordonez93" target="_blank" class="icons">
<i class="fa fa-github-square" style="font-size:38px"></i>
</a>
<a href="https://twitter.com/ordonezperla" target="_blank" class="icons">
<i class="fa fa-twitter-square" style="font-size:38px"></i>
</a>
<a href="mailto:perla.ordonez07@gmail.com" target="_blank" class="icons">
<i class="fa fa-envelope" style="font-size:38px"></i>
</a>
</div>
</div>
<div class="row">
<div class="col-sm-12">
<div class="triangle img-center">
<a href="#topPage" class="top-page" title="Back to top">
<i class="fa fa-arrow-up"></i>
</a>
</div>
</div>
</div>
</div>
</footer>
<!-- Bootstrap JS -->
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js" integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.6/umd/popper.min.js" integrity="sha384-wHAiFfRlMFy6i5SRaxvfOCifBUQy1xHdJ/yoi7FRNXMRBu5WHdZYu1hA6ZOblgut" crossorigin="anonymous"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.2.1/js/bootstrap.min.js" integrity="sha384-B0UglyR+jN6CkvvICOB2joaf5I4l3gm9GU6Hc1og6Ls7i6U/mkkaduKaBhlAXv9k" crossorigin="anonymous"></script>
<!-- Jquery -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
</body>
</html>
|
import React from "react";
import { Tabs, Tab, Box, Card } from "@mui/material";
import HeaderAdmin from "@/components/headers/HeaderAdmin";
import Container from "@/components/layouts/Container";
import { usePathname, useRouter } from "next/navigation";
export default function DashboardLayout({
children,
}: {
children: React.ReactNode;
}) {
const router = useRouter();
const pathname = usePathname();
const route = React.useMemo(() => pathname.split("/")[2], [pathname]);
const handleChangeTab = (event: React.SyntheticEvent, newValue: string) => {
switch (newValue) {
case "blog":
router.push("/admin/blog/list");
break;
case "transaction":
router.push("/admin/transaction/list");
break;
case "contact":
router.push("/admin/contact/list");
break;
default:
router.push("/admin/product/list");
break;
}
};
return (
<Box className="tw-min-h-screen tw-bg-purple-100">
<HeaderAdmin />
<Tabs className="tw-bg-white" value={route} onChange={handleChangeTab}>
<Tab label="Product" value="product" />
<Tab label="Blog" value="blog" />
<Tab label="Transaction" value="transaction" />
<Tab label="Contact" value="contact" />
</Tabs>
<Container className="tw-py-4">
<Card className="tw-p-4">{children}</Card>
</Container>
</Box>
);
}
|
import { Injectable } from '@angular/core';
import { InMemoryDbService } from 'angular-in-memory-web-api';
import { Quiz } from './quiz';
@Injectable({
providedIn: 'root',
})
export class InMemoryDataService implements InMemoryDbService {
createDb() {
const quizzes = [
{ id: 1, name: 'AWS Certified Cloud Practitioner (CLF-C01)' },
{ id: 2, name: 'AWS Certified Developer - Associate (DVA-C02)' },
{ id: 3, name: 'AWS Certified Solutions Architect - Associate (SAA-C03)' },
{ id: 4, name: 'AWS Certified SysOps Administrator - Associate (SOA-C02)' },
{ id: 5, name: 'AWS Certified DevOps Engineer - Professional exam (DOP-C02)' },
{ id: 6, name: 'AWS Certified Solutions Architect - Professional exam (SAP-C02)' },
{ id: 7, name: 'AWS Certified Advanced Networking - Specialty exam (ANS-C01)' },
{ id: 8, name: 'AWS Certified Data Analytics - Specialty exam (DAS-C01)' },
{ id: 9, name: 'AWS Certified Database - Specialty exam (DBS-C01)' },
{ id: 10, name: 'AWS Certified Machine Learning - Specialty exam (MLS-C01)' },
{ id: 11, name: 'AWS Certified Security – Specialty (SCS-C01)' },
{ id: 12, name: 'AWS Certified: SAP on AWS - Specialty exam (PAS-C01)' }
];
return { quizzes };
}
// Overrides the genId method to ensure that a quiz always has an id.
// If the quizzes array is empty,
// the method below returns the initial number (11).
// if the quizzes array is not empty, the method below returns the highest
// quiz id + 1.
genId(quizzes: Quiz[]): number {
return quizzes.length > 0 ? Math.max(...quizzes.map(quiz => quiz.id)) + 1 : 11;
}
}
|
package com.hexaware.controller;
import java.time.LocalDate;
import java.util.Scanner;
import com.hexaware.dao.AdminDAO;
import com.hexaware.exception.AdminNotFoundException;
import com.hexaware.model.Admin;
public class AdminService implements IAdminService{
Admin admi = new Admin();
Scanner sc = new Scanner(System.in);
AdminDAO adminDAO = new AdminDAO();
public void registerAdmin() {
admi = new Admin(); //why we did this?
System.out.println("--Please enter your details--\n");
System.out.print("First Name: ");
String firstName = sc.next();
admi.setFirstName(firstName);
System.out.print("Last Name: ");
String lastName = sc.next();
admi.setLastName(lastName);
System.out.print("Email-Id: ");
String emailAddress = sc.next();
admi.setEmail(emailAddress);
System.out.print("Mobile number: ");
String phoneNumber = sc.next();
admi.setPhoneNumber(phoneNumber);
System.out.print("Username: ");
String username = sc.next();
admi.setUsername(username);
System.out.print("Password: ");
String password = sc.next();
admi.setPassword(password);
System.out.print("Enter Role: ");
String role = sc.next();
admi.setRole(role);
System.out.print("Date of joining (YYYY-mm-dd): ");
LocalDate joinDate = LocalDate.parse(sc.next());
admi.setJoinDate(joinDate);
//custList.add(cust);
System.out.println("Admin details were added succesfully!");
adminDAO.registerAdmin(admi);
}
@Override
public void getAdminById() {
System.out.println("enter Admin id");
int adminID =sc.nextInt();
try {
Admin adminById = adminDAO.getAdminById(adminID);
if (adminById != null) {
// Display vehicle details
System.out.println("\n---Customer Details---\n" + adminById);
} else {
System.out.println("Vehicle not found.");
}
} catch (AdminNotFoundException ex) {
System.out.println("Error: " + ex.getMessage());
}
}
@Override
public void getAdminByUserName() {
System.out.println("enter Admin UserName");
String adminuserName =sc.next();
Admin adminByUserName = adminDAO.getAdminByUserName(adminuserName);
System.out.println("\n---Customer Details---\n" + adminByUserName);
}
@Override
public void updateAdmin() throws AdminNotFoundException {
System.out.print("Enter Admin ID to update: ");
int updateAdminId = sc.nextInt();
sc.nextLine(); // Consume newline
Admin updatedAdmin = adminDAO.getAdminById(updateAdminId);
if (updatedAdmin != null) {
System.out.print("Enter new First Name: ");
updatedAdmin.setFirstName(sc.nextLine());
System.out.print("Enter new Last Name: ");
updatedAdmin.setLastName(sc.nextLine());
System.out.print("Enter new Email: ");
updatedAdmin.setEmail(sc.nextLine());
System.out.print("Enter new Phone Number: ");
updatedAdmin.setPhoneNumber(sc.nextLine());
System.out.print("Enter new Role: ");
updatedAdmin.setRole(sc.nextLine());
adminDAO.updateAdmin(updatedAdmin);
System.out.println("Admin updated successfully");
} else {
System.out.println("Admin not found");
}
}
@Override
public void removeAdmin() {
System.out.print("Enter Admin ID to remove: ");
int removeAdminId = sc.nextInt();
adminDAO.removeAdmin(removeAdminId);
System.out.println("Admin removed successfully");
}
}
|
package com.example.packitupandroid.data.database.dao
import androidx.room.Dao
import androidx.room.Query
import androidx.room.Transaction
import com.example.packitupandroid.data.model.QuerySummary
import kotlinx.coroutines.flow.Flow
@Dao
interface SummaryDao {
@Query("""
WITH boxesData AS (
SELECT COUNT(b.id) AS count
FROM boxes b
),
collectionsData AS (
SELECT COUNT(c.id) as count
FROM collections c
),
itemsData AS (
SELECT COUNT(i.id) as count, ROUND(SUM(i.value)) AS value, MAX(CASE WHEN i.is_fragile = 1 THEN 1 ELSE 0 END) AS is_fragile
FROM items i
)
SELECT
boxesData.count AS box_count,
collectionsData.count AS collection_count,
itemsData.count AS item_count,
itemsData.value AS value,
itemsData.is_fragile AS is_fragile
FROM boxesData, collectionsData, itemsData
""")
fun getSummary(): Flow<QuerySummary>
@Transaction
suspend fun clearAllSummary() {
deleteItems()
deleteBoxes()
deleteCollections()
}
@Query("DELETE FROM items")
fun deleteItems()
@Query("DELETE FROM boxes")
fun deleteBoxes()
@Query("DELETE FROM collections")
fun deleteCollections()
}
|
package serejka.telegram.behold.resources;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import serejka.telegram.behold.models.Stats;
import serejka.telegram.behold.service.StatisticsService;
@RestController
@RequestMapping("/api/statistics")
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class StatResource {
StatisticsService statisticsService;
@ResponseStatus(HttpStatus.OK)
@GetMapping(produces = MediaType.APPLICATION_JSON_VALUE)
public List<Stats> statisticsByCommands() {
return statisticsService.findAllStatsCommand().stream()
.sorted((Comparator.comparingInt(Stats::getCount)))
.collect(Collectors.toList());
}
}
|
//
// SloganTableViewController.swift
// Sloganon
//
// Created by Sylvia Wake-Hood on 1/1/23.
//
import UIKit
import RealmSwift
class SloganTableViewController: UITableViewController, UITextFieldDelegate, SSOAViewControllerDelegate{
var slogans: Array<SloganSayingOrAcronym>?
var sayings: Array<SloganSayingOrAcronym>?
var acronyms: Array<SloganSayingOrAcronym>?
var favorites: Array<SloganSayingOrAcronym>?
let sections = [
K.HeaderText.sloganOfTheDay,
K.HeaderText.sloganSayingsOrAcronyms,
K.HeaderText.favorites
]
let sloganSayingsOrAcronymsSection = [
K.SloganSayingorAcronymCells.slogans,
K.SloganSayingorAcronymCells.sayings,
K.SloganSayingorAcronymCells.acronyms
]
override func viewDidLoad() {
super.viewDidLoad()
//For dynamic row height
self.tableView.estimatedRowHeight = 100
self.tableView.rowHeight = UITableView.automaticDimension
self.tableView.register(DailySloganTableViewCell.self, forCellReuseIdentifier: DailySloganTableViewCell.identifier)
self.tableView.register(SloganonChoicesTableViewCell.self, forCellReuseIdentifier: SloganonChoicesTableViewCell.identifier)
self.tableView.register(FavoritesTableViewCell.self, forCellReuseIdentifier: FavoritesTableViewCell.identifier)
self.view.backgroundColor = K.sloganVCbackground
slogans = SloganSayingOrAcronym.getSlogans()
sayings = SloganSayingOrAcronym.getSayings()
acronyms = SloganSayingOrAcronym.getAcronyms()
favorites = SloganSayingOrAcronym.getFavorites()
//we need to periodically update the slogan of the day.
//Set a timer that reloads the tableView
Timer.scheduledTimer(withTimeInterval: 300, repeats: true) { (_) in
self.tableView.reloadData()
}
}
// MARK: - Table view data source
override func numberOfSections(in tableView: UITableView) -> Int {
return sections.count
}
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if section == K.SectionNumber.sloganOfTheDay {
return 1
} else if section == K.SectionNumber.slogansSayingsOrAcronyms {
return sloganSayingsOrAcronymsSection.count
} else {
if let favs = self.favorites {
return favs.count == 0 ? 1 : favs.count
} else {
return 0
}
}
}
// MARK: - Configure TableView Rows with delegate methods
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let section = indexPath.section
let percentageBy = UIColor.getPercentBy(row: indexPath.row, repeatEvery: 12)
if section == K.SectionNumber.sloganOfTheDay {
let cell = tableView.dequeueReusableCell(withIdentifier: DailySloganTableViewCell.identifier, for: indexPath) as! DailySloganTableViewCell
cell.title = SloganSayingOrAcronym.getSloganOfTheDay()
return cell
} else if section == K.SectionNumber.slogansSayingsOrAcronyms {
let cell = tableView.dequeueReusableCell(withIdentifier: SloganonChoicesTableViewCell.identifier, for: indexPath) as! SloganonChoicesTableViewCell
cell.title = sloganSayingsOrAcronymsSection[indexPath.row]
cell.setColors(color: UIColor.ssoa)
cell.darkenColor(byPercentage: percentageBy)
return cell
} else {
let cell = tableView.dequeueReusableCell(withIdentifier: FavoritesTableViewCell.identifier, for: indexPath) as! FavoritesTableViewCell
cell.title = favorites?.count == 0 ? "You don't have any favorites yet. \n\n\u{2764} your favorite Slogans, Sayings or Acronyms!! " : favorites![indexPath.row].text
cell.setColors(color: UIColor.favorites)
cell.darkenColor(byPercentage: percentageBy)
return cell
}
}
// MARK: - Configure TableView Headers with delegate methods
override func tableView(_ tableView: UITableView, titleForHeaderInSection section: Int) -> String? {
if section == K.SectionNumber.sloganOfTheDay {
return K.HeaderText.sloganOfTheDay
} else if section == K.SectionNumber.slogansSayingsOrAcronyms {
return K.HeaderText.sloganSayingsOrAcronyms
} else {
return K.HeaderText.favorites
}
}
override func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
let headerView = UIView().headerViewWithLabel(title: self.tableView(tableView, titleForHeaderInSection: section) ?? "")
headerView.backgroundColor = UIColor.clear
return headerView
}
override func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
return K.HeaderHeight
}
//Mark: - Tableview Delegate Methods
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
// tableView.deselectRow(at: indexPath, animated: true)
if indexPath.section == 1 {
performSegue(withIdentifier: K.newSegueIdentifier, sender: self)
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if let indexPath = tableView.indexPathForSelectedRow {
if indexPath.section == 1 {
let destinationVC = segue.destination as! SSOAViewController
destinationVC.delegate = self
switch indexPath.row {
case 0:
destinationVC.arrayOfSelectedType = slogans
destinationVC.selectedTitle = K.HeaderText.slogans
case 1:
destinationVC.arrayOfSelectedType = sayings
destinationVC.selectedTitle = K.HeaderText.sayings
case 2:
destinationVC.arrayOfSelectedType = acronyms
destinationVC.selectedTitle = K.HeaderText.acronyms
default:
assertionFailure()
}
}
tableView.deselectRow(at: indexPath, animated: true)
}
}
func updateDataAndTableview() {
slogans = SloganSayingOrAcronym.getSlogans()
sayings = SloganSayingOrAcronym.getSayings()
acronyms = SloganSayingOrAcronym.getAcronyms()
favorites = SloganSayingOrAcronym.getFavorites()
tableView.reloadData()
}
}
|
"use client";
import { createUser } from "@/actions/user.actions";
import { FormInput } from "@/components/form-input";
import { Button } from "@/components/ui/button";
import { Form } from "@/components/ui/form";
import { useToast } from "@/components/ui/use-toast";
import { RegisterFormData, RegisterSchema } from "@/zodSchemas/user.schema";
import { zodResolver } from "@hookform/resolvers/zod";
import { signIn } from "next-auth/react";
import { useRouter } from "next/navigation";
import { useForm } from "react-hook-form";
export const SignUpForm = () => {
const { toast } = useToast();
const router = useRouter();
const form = useForm<RegisterFormData>({
resolver: zodResolver(RegisterSchema),
defaultValues: {
email: "",
password: "",
password2: "",
},
});
const onSubmit = async (values: RegisterFormData) => {
const { success, data } = await createUser(values);
if (success) {
const { email, password } = values;
const resp = await signIn("credentials", {
email,
password,
redirect: false,
});
if (resp?.ok && !resp.error) {
toast({ title: "Success", description: "User Created successfully" });
router.push("/dashboard");
} else {
toast({
title: "Error",
description: "Invalid Credentials",
variant: "destructive",
});
}
} else
toast({ title: "Error", description: `${data}`, variant: "destructive" });
};
return (
<Form {...form}>
<form className="space-y-6 mt-8" onSubmit={form.handleSubmit(onSubmit)}>
<FormInput name="email" label="Email Address" type="email" />
<FormInput name="password" label="Password" type="password" />
<FormInput name="password2" label="Confirm Password" type="password" />
<Button
isLoading={form.formState.isSubmitting}
type="submit"
className="w-full"
>
Submit
</Button>
</form>
</Form>
);
};
|
import 'package:flutter_ink_web_check_in/core/core.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'assign_seat_response.freezed.dart';
part 'assign_seat_response.g.dart';
@freezed
class AssignSeatResponse with _$AssignSeatResponse {
const factory AssignSeatResponse({
@Default([]) @JsonKey(name: 'error') List<CommonError> errors,
@JsonKey(name: 'seat_number') String? seatNumber,
}) = _AssignSeatResponse;
factory AssignSeatResponse.fromJson(Map<String, dynamic> json) =>
_$AssignSeatResponseFromJson(json);
}
|
#include <iostream>
#include <vector>
using namespace std;
void tampilkanMatriks(const vector<vector<double> >& matriks) {
cout << "Matriks:\n";
for (const auto& row : matriks) {
for (double elem : row) {
cout << elem << " ";
}
cout << endl;
}
}
void tukarBaris(vector<vector<double> >& matriks, int baris1, int baris2) {
swap(matriks[baris1], matriks[baris2]);
}
void tukarKolom(vector<vector<double> >& matriks, int kolom1, int kolom2) {
for (auto& row : matriks) {
swap(row[kolom1], row[kolom2]);
}
}
void kaliBarisDenganSkalar(vector<vector<double> >& matriks, int baris, double skalar) {
for (auto& elem : matriks[baris]) {
elem *= skalar;
}
}
void kaliKolomDenganSkalar(vector<vector<double> >& matriks, int kolom, double skalar) {
for (auto& row : matriks) {
row[kolom] *= skalar;
}
}
vector<vector<double> > buatMatriksDinamis(int baris, int kolom) {
vector<vector<double> > matriks(baris, vector<double>(kolom));
for (int i = 0; i < baris; ++i) {
for (int j = 0; j < kolom; ++j) {
cout << "Masukkan elemen [" << i+1 << "][" << j+1 << "]: ";
cin >> matriks[i][j];
}
}
return matriks;
}
int main() {
int baris, kolom;
cout << "Masukkan jumlah baris: ";
cin >> baris;
cout << "Masukkan jumlah kolom: ";
cin >> kolom;
vector<vector<double> > matriks = buatMatriksDinamis(baris, kolom);
tampilkanMatriks(matriks);
string jenisTransformasi; // Ubah jenis variabel menjadi string
cout << "Apakah Anda ingin melakukan pertukaran baris (B), kolom (K), kali baris dengan skalar (SB), atau kali kolom dengan skalar (SK)? ";
cin >> ws; // Mengabaikan whitespace yang mungkin tersisa di buffer input
getline(cin, jenisTransformasi); // Membaca input sebagai string
if (jenisTransformasi == "B" || jenisTransformasi == "b") {
int baris1, baris2;
cout << "Masukkan nomor baris pertama untuk ditukar: ";
cin >> baris1;
cout << "Masukkan nomor baris kedua untuk ditukar: ";
cin >> baris2;
tukarBaris(matriks, baris1 - 1, baris2 - 1);
} else if (jenisTransformasi == "K" || jenisTransformasi == "k") {
int kolom1, kolom2;
cout << "Masukkan nomor kolom pertama untuk ditukar: ";
cin >> kolom1;
cout << "Masukkan nomor kolom kedua untuk ditukar: ";
cin >> kolom2;
tukarKolom(matriks, kolom1 - 1, kolom2 - 1);
} else if (jenisTransformasi == "SB" || jenisTransformasi == "sb") {
int baris;
double skalar;
cout << "Masukkan nomor baris untuk perkalian skalar: ";
cin >> baris;
cout << "Masukkan nilai skalar: ";
cin >> skalar;
kaliBarisDenganSkalar(matriks, baris - 1, skalar);
} else if (jenisTransformasi == "SK" || jenisTransformasi == "sk") {
int kolom;
double skalar;
cout << "Masukkan nomor kolom untuk perkalian skalar: ";
cin >> kolom;
cout << "Masukkan nilai skalar: ";
cin >> skalar;
kaliKolomDenganSkalar(matriks, kolom - 1, skalar);
} else {
cout << "Jenis transformasi tidak valid.\n";
return 1;
}
cout << "Setelah transformasi:\n";
tampilkanMatriks(matriks);
return 0;
}
|
# Async Utils
A TypeScript library providing utility classes and functions to simplify working with asynchronous operations and promises.
## Installation
```bash
yarn add github:worph/async-utils#main
```
## Features
- **Lazy<T>**: A utility type for lazy initialization of asynchronous operations.
- **ListenerCleaner**: Manages cleanup functions, allowing for easy resource management.
- **PromiseQueue**: A queue system for promises, allowing for sequential execution and cancellation.
- **MultiQueue**: Manages multiple `PromiseQueue` instances, distributing tasks to optimize concurrency.
- **Id Generation**: Utility function for generating random IDs.
## Usage
### Lazy Initialization
```typescript
import { Lazy } from "@worph/async-utils";
const lazyValue: Lazy<number> = async () => {
// some asynchronous operation
return 42;
};
// Usage
lazyValue().then(value => console.log(value));
```
### Listener Cleaner
```typescript
import { ListenerCleaner } from "worph/async-utils";
const cleaner = new ListenerCleaner();
cleaner.add(() => console.log("Cleanup action"));
// Trigger all cleanup actions
cleaner.cleanUp();
```
### Promise Queue
```typescript
import { PromiseQueue } from "@worph/async-utils";
const queue = new PromiseQueue<number>();
queue.add(async () => {
// some asynchronous task
return 1;
});
// Wait for the queue to be empty
queue.awaitQueueEmpty().then(() => console.log("Queue empty"));
```
### Multi Queue
```typescript
import { MultiQueue } from "@worph/async-utils";
const multiQueue = new MultiQueue<number>(2); // 2 concurrent tasks
multiQueue.add(async () => {
// some asynchronous task
return 1;
});
// Add more tasks...
// Wait for all queues to be empty
multiQueue.awaitQueueEmpty().then(() => console.log("All queues empty"));
```
### Generating an ID
```typescript
import { makeid } from "@worph/async-utils";
const id = makeid(10); // Generates a random 10 character string
console.log(id);
```
## Contributing
Contributions are welcome! Please submit a pull request or open an issue for discussion.
## License
This project is licensed under the MIT License - see the LICENSE file for details.
|
package com.example.springapp.Aruthracontroller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import java.util.*;
import com.example.springapp.Aruthramodel.AruthraPerson;
import com.example.springapp.Aruthraservice.AruthraPersonService;
@RestController
public class AruthraPersonController {
@Autowired
private AruthraPersonService ser;
@PostMapping("/person")
public ResponseEntity<AruthraPerson> post(@RequestBody AruthraPerson person) {
if (ser.post(person)) {
return new ResponseEntity<>(person, HttpStatus.CREATED);
} else {
return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
}
}
@GetMapping("/person/startsWithName/{value}")
public ResponseEntity<List<AruthraPerson>> getAll(@PathVariable String value) {
List<AruthraPerson> li = ser.start(value);
if (li.size() > 0) {
return new ResponseEntity<>(li, HttpStatus.OK);
} else {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
}
}
@GetMapping("/person/endsWithName/{value}")
public ResponseEntity<List<AruthraPerson>> getbyAge(@PathVariable String value) {
List<AruthraPerson> li = ser.end(value);
if (li.size() > 0) {
return new ResponseEntity<>(li, HttpStatus.OK);
} else {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
}
}
}
|
/* This example requires Tailwind CSS v2.0+ */
import { useEffect, useState } from 'react'
import { useUI } from '@components/ui/context'
import Link from 'next/link'
import axios from 'axios'
import { NEXT_GET_ORDER_DETAILS } from '@components/utils/constants'
const defaultModel: any = {}
import { LoadingDots } from '@components/ui'
import { removeItem } from '@components/utils/localStorage'
import {
BTN_BACK_TO_HOME,
GENERAL_ADDRESSES,
GENERAL_BILLING_ADDRESS,
GENERAL_DELIVERED_BY,
GENERAL_ITEMS,
GENERAL_ON_THE_WAY,
GENERAL_ORDER_WILL_BE_WITH_YOU_SOON,
GENERAL_PAYMENT,
GENERAL_PAYMENT_METHOD,
GENERAL_PRICE,
GENERAL_QUANTITY,
GENERAL_SHIPPING,
GENERAL_SHIPPING_ADDRESS,
GENERAL_SHIPPING_METHOD,
GENERAL_SUMMARY,
GENERAL_THANK_YOU,
GENERAL_TOTAL,
GENERAL_YOUR_ORDER,
LOADING_YOUR_ORDERS,
NO_ORDER_PROVIDED,
SUBTOTAL_INCLUDING_TAX,
YOUR_INFORMATION,
} from '@components/utils/textVariables'
import { ELEM_ATTR, ORDER_CONFIRMATION_AFTER_PROGRESS_BAR_ELEM_SELECTORS } from '@framework/content/use-content-snippet'
export default function OrderConfirmation() {
const [order, setOrderData] = useState(defaultModel)
const [isLoading, setIsLoading] = useState(true)
const { setOrderId, orderId } = useUI()
useEffect(() => {
const fetchOrder = async () => {
const { data }: any = await axios.post(NEXT_GET_ORDER_DETAILS, {
id: orderId,
})
setOrderData(data.order)
setIsLoading(false)
}
removeItem('orderResponse')
removeItem('orderModelPayment')
if (orderId) fetchOrder()
if (!orderId) setIsLoading(false)
return function cleanup() {
setOrderId('')
}
}, [])
if (isLoading) {
return (
<main className="bg-white px-4 pt-16 pb-24 sm:px-6 sm:pt-24 lg:px-8 lg:py-32">
<h1 className="text-5xl text-center w-full font-extrabold uppercase tracking-light text-gray-600">
{LOADING_YOUR_ORDERS}
</h1>
<div className="mt-10 flex justify-center items-center text-gray-900 w-full">
<LoadingDots />
</div>
</main>
)
}
return (
<>
<main className="bg-white px-4 pt-16 pb-24 sm:px-6 sm:pt-24 lg:px-8 lg:py-32">
<div className="max-w-3xl mx-auto">
<div className="max-w-xl">
<h1 className="text-sm font-semibold uppercase tracking-wide text-indigo-600">
{order.orderNo ? GENERAL_THANK_YOU : null}
</h1>
<p className="mt-2 text-4xl text-black font-extrabold tracking-tight sm:text-5xl">
{order.orderNo ? GENERAL_ON_THE_WAY : NO_ORDER_PROVIDED}
</p>
{order.orderNo ? (
<p className="mt-2 text-black text-gray-500">
{GENERAL_YOUR_ORDER} {order.orderNo}{' '}
{GENERAL_ORDER_WILL_BE_WITH_YOU_SOON}
</p>
) : null}
</div>
{order.orderNo ? (
<section
aria-labelledby="order-heading"
className="mt-10 border-t border-gray-200"
>
<h2 id="order-heading" className="sr-only">
{GENERAL_YOUR_ORDER}
</h2>
<h3 className="sr-only">{GENERAL_ITEMS}</h3>
{order.items.map((product: any) => (
<div
key={product.id}
className="py-10 border-b border-gray-200 flex space-x-6"
>
<img
src={product.image}
alt={product.name}
className="flex-none w-20 h-20 object-center object-cover bg-gray-100 rounded-lg sm:w-40 sm:h-40"
/>
<div className="flex-auto flex flex-col">
<div>
<h4 className="font-medium text-gray-900">
<Link href={`/${product.slug}`}>
<a>{product.name}</a>
</Link>
</h4>
<div
dangerouslySetInnerHTML={{
__html: product.shortDescription,
}}
className="mt-2 text-sm text-gray-500"
/>
</div>
<div className="mt-6 flex-1 flex items-end">
<dl className="flex text-sm divide-x divide-gray-200 space-x-4 sm:space-x-6">
<div className="flex">
<dt className="font-medium text-gray-900">
{GENERAL_QUANTITY}
</dt>
<dd className="ml-2 text-gray-700">{product.qty}</dd>
</div>
<div className="pl-4 flex sm:pl-6">
<dt className="font-medium text-gray-900">
{GENERAL_PRICE}
</dt>
<dd className="ml-2 text-gray-700">
{product.price.formatted.withTax}
</dd>
</div>
</dl>
</div>
</div>
</div>
))}
<div className="sm:ml-40 sm:pl-6">
<h3 className="sr-only">{YOUR_INFORMATION}</h3>
<h4 className="sr-only">{GENERAL_ADDRESSES}</h4>
<dl className="grid grid-cols-2 gap-x-6 text-sm py-10">
<div>
<dt className="font-medium text-gray-900">
{GENERAL_SHIPPING_ADDRESS}
</dt>
<dd className="mt-2 text-gray-700">
<address className="not-italic">
<span className="block">{`${order.shippingAddress.firstName} ${order.shippingAddress.lastName}`}</span>
<span className="block">{`${order.shippingAddress.phoneNo}`}</span>
<span className="block">{`${order.shippingAddress.address1}`}</span>
<span className="block">{`${order.shippingAddress.address2}`}</span>
<span className="block">{`${order.shippingAddress.city} ${order.shippingAddress.countryCode} ${order.shippingAddress.postCode}`}</span>
</address>
</dd>
</div>
<div>
<dt className="font-medium text-gray-900">
{GENERAL_BILLING_ADDRESS}
</dt>
<dd className="mt-2 text-gray-700">
<address className="not-italic">
<span className="block">{`${order.billingAddress.firstName} ${order.billingAddress.lastName}`}</span>
<span className="block">{`${order.shippingAddress.phoneNo}`}</span>
<span className="block">{`${order.billingAddress.address1}`}</span>
<span className="block">{`${order.billingAddress.address2}`}</span>
<span className="block">{`${order.billingAddress.city} ${order.billingAddress.countryCode} ${order.billingAddress.postCode}`}</span>
</address>
</dd>
</div>
</dl>
<h4 className="sr-only">{GENERAL_PAYMENT}</h4>
<dl className="grid grid-cols-2 gap-x-6 border-t border-gray-200 text-sm py-10">
{order.payments && (
<div>
<dt className="font-medium text-gray-900">
{GENERAL_PAYMENT_METHOD}
</dt>
<dd className="mt-2 text-gray-700">
<p>{order.payments[0]?.paymentMethod}</p>
{/* <p>{order.payments[0]?.paymentGateway}</p> */}
</dd>
</div>
)}
<div>
<dt className="font-medium text-gray-900">
{GENERAL_SHIPPING_METHOD}
</dt>
<dd className="mt-2 text-gray-700">
<p>{order.shipping.displayName}</p>
<p>
{GENERAL_DELIVERED_BY}:{' '}
{new Date(
order.shipping.expectedDeliveryDate
).toLocaleDateString()}
</p>
</dd>
</div>
</dl>
<h3 className="sr-only">{GENERAL_SUMMARY}</h3>
<dl className="space-y-6 border-t border-gray-200 text-sm pt-10">
<div className="flex justify-between">
<dt className="font-medium text-gray-900">
{SUBTOTAL_INCLUDING_TAX}
</dt>
<dd className="text-gray-700">
{order.subTotal?.formatted?.withTax}
</dd>
</div>
<div className="flex justify-between">
<dt className="font-medium text-gray-900">
{GENERAL_SHIPPING}
</dt>
<dd className="text-gray-700">
{order.shippingCharge.formatted.withTax}
</dd>
</div>
<div className="flex justify-between">
<dt className="font-medium text-gray-900">{GENERAL_TOTAL}</dt>
<dd className="text-gray-900">
{order.grandTotal?.formatted?.withTax}
</dd>
</div>
</dl>
</div>
</section>
) : null}
<div className="max-w-xl">
<Link href={`/`} passHref>
<a
href="/"
className="text-indigo-600 font-medium hover:text-indigo-500"
>
{BTN_BACK_TO_HOME}
</a>
</Link>
</div>
</div>
</main>
{/* Placeholder for order confirmation after progress bar snippet */}
<div className={`${ELEM_ATTR}${ORDER_CONFIRMATION_AFTER_PROGRESS_BAR_ELEM_SELECTORS[0]}`}></div>
</>
);
}
|
//
// MapListView.swift
// MazeA*
//
// Created by Imen Ksouri on 26/04/2023.
//
import SwiftUI
struct MapListView: View {
@Environment(\.verticalSizeClass) var verticalSizeClass: UserInterfaceSizeClass?
@Environment(\.horizontalSizeClass) var horizontalSizeClass: UserInterfaceSizeClass?
@EnvironmentObject var viewModel: Maze
@StateObject var mapService = MapService()
@Binding var isLoadingMaze: Bool
@State private var showAlert = false
var body: some View {
NavigationStack {
GeometryReader { geometry in
List {
ForEach(mapService.maps.sorted(by: <), id: \.key) { file, map in
NavigationLink {
ZStack {
Color.teal
.opacity(0.7)
.ignoresSafeArea(.container, edges: .bottom)
if horizontalSizeClass == .compact && verticalSizeClass == .regular {
VStack {
Spacer()
Button {
isLoadingMaze = false
viewModel.loadMaze(file)
UserDefaults.standard.setValue(file, forKey: "fileSystem")
} label: {
Label("Select", systemImage: "hand.tap")
.font(.title2)
}
.padding(.bottom)
.buttonStyle(.borderedProminent)
Spacer()
TemporaryMazeView(map: map, rows: .constant("\(map.rows)"), columns: .constant("\(map.columns)"), startPoint: .constant(nil), goalPoint: .constant(nil), selection: .constant(nil)) {}
Spacer()
}
} else {
HStack {
Spacer()
TemporaryMazeView(map: map, rows: .constant("\(map.rows)"), columns: .constant("\(map.columns)"), startPoint: .constant(nil), goalPoint: .constant(nil), selection: .constant(nil)) {}
Spacer()
Button {
isLoadingMaze = false
viewModel.loadMaze(file)
} label: {
Image(systemName: "hand.tap")
.font(.title2)
.bold()
.frame(width: 40, height: 40)
}
.buttonStyle(.borderedProminent)
}
}
}
} label: {
MapRow(map: map)
}
}
.onDelete(perform: { offsets in
withAnimation {
delete(at: offsets)
}
})
.navigationTitle("Saved Mazes")
.navigationBarTitleDisplayMode(.inline)
}
.onAppear {
mapService.load()
}
.toolbar {
ToolbarItem(placement: .navigationBarLeading) {
HStack {
EditButton()
Button {
if !mapService.maps.isEmpty {
showAlert = true
}
} label: {
Text("Clear")
}
.alert(isPresented: $showAlert) {
Alert(title: Text("Do you really want to proceed ?"), message: Text("This action will cause the deletion of data related to all the saved mazes."), primaryButton: .destructive(Text("Delete")) {
mapService.clear()
mapService.maps.removeAll()
},
secondaryButton: .cancel())
}
}
}
ToolbarItem(placement: .navigationBarTrailing) {
Button {
isLoadingMaze = false
} label: {
Image(systemName: "xmark")
.bold()
.padding(.top, 10)
}
}
}
.frame(width: geometry.size.width, height: geometry.size.height)
}
}
}
private func delete(at offsets: IndexSet){
if let ndx = offsets.first {
let item = mapService.maps.sorted(by: <)[ndx]
mapService.remove(item.key)
}
}
}
struct MapListView_Previews: PreviewProvider {
static var previews: some View {
MapListView(isLoadingMaze: .constant(true))
.environmentObject(Maze(map: Map.sampleData))
}
}
struct MapRow: View {
var map: Map
var walls: String {
"\(map.cells.flatMap { $0 }.filter { $0.isWall }.count)"
}
var start: String {
"(\(Int(map.startPoint.x) + 1), \(Int(map.startPoint.y) + 1))"
}
var goal: String {
"(\(Int(map.goalPoint.x) + 1), \(Int(map.goalPoint.y) + 1))"
}
var body: some View {
VStack(alignment: .leading) {
Text("id: \(map.id)")
.font(.caption)
.bold()
.foregroundColor(.secondary)
.padding(.bottom, 1)
Text("\(map.rows) rows / \(map.columns) columns")
Text("\(walls) walls")
Text("Start coordinates: \(start)")
Text("Goal coordinates: \(goal)")
}
}
}
|
/*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */
/* Document
/**
* 1. Correct the line height in all browsers.
* 2. Prevent adjustments of font size after orientation changes in iOS.
*/
html {
line-height: 1.15;
/* 1 */
-webkit-text-size-adjust: 100%;
/* 2 */
}
/* Sections
/**
* Remove the margin in all browsers.
*/
body {
margin: 0;
}
/**
* Render the `main` element consistently in IE.
*/
main {
display: block;
}
/**
* Correct the font size and margin on `h1` elements within `section` and
* `article` contexts in Chrome, Firefox, and Safari.
*/
h1 {
font-size: 2em;
margin: 0.67em 0;
}
/* Grouping content
/**
* 1. Add the correct box sizing in Firefox.
* 2. Show the overflow in Edge and IE.
*/
hr {
-webkit-box-sizing: content-box;
box-sizing: content-box;
/* 1 */
height: 0;
/* 1 */
overflow: visible;
/* 2 */
}
/**
* 1. Correct the inheritance and scaling of font size in all browsers.
* 2. Correct the odd `em` font sizing in all browsers.
*/
pre {
font-family: monospace, monospace;
/* 1 */
font-size: 1em;
/* 2 */
}
/* Text-level semantics
/**
* Remove the gray background on active links in IE 10.
*/
a {
background-color: transparent;
}
/**
* 1. Remove the bottom border in Chrome 57-
* 2. Add the correct text decoration in Chrome, Edge, IE, Opera, and Safari.
*/
abbr[title] {
border-bottom: none;
/* 1 */
text-decoration: underline;
/* 2 */
-webkit-text-decoration: underline dotted;
text-decoration: underline dotted;
/* 2 */
}
/**
* Add the correct font weight in Chrome, Edge, and Safari.
*/
b,
strong {
font-weight: bolder;
}
/**
* 1. Correct the inheritance and scaling of font size in all browsers.
* 2. Correct the odd `em` font sizing in all browsers.
*/
code,
kbd,
samp {
font-family: monospace, monospace;
/* 1 */
font-size: 1em;
/* 2 */
}
/**
* Add the correct font size in all browsers.
*/
small {
font-size: 80%;
}
/**
* Prevent `sub` and `sup` elements from affecting the line height in
* all browsers.
*/
sub,
sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sub {
bottom: -0.25em;
}
sup {
top: -0.5em;
}
/* Embedded content
/**
* Remove the border on images inside links in IE 10.
*/
img {
border-style: none;
}
/* Forms
/**
* 1. Change the font styles in all browsers.
* 2. Remove the margin in Firefox and Safari.
*/
button,
input,
optgroup,
select,
textarea {
font-family: inherit;
/* 1 */
font-size: 100%;
/* 1 */
line-height: 1.15;
/* 1 */
margin: 0;
/* 2 */
}
/**
* Show the overflow in IE.
* 1. Show the overflow in Edge.
*/
button,
input {
/* 1 */
overflow: visible;
}
/**
* Remove the inheritance of text transform in Edge, Firefox, and IE.
* 1. Remove the inheritance of text transform in Firefox.
*/
button,
select {
/* 1 */
text-transform: none;
}
/**
* Correct the inability to style clickable types in iOS and Safari.
*/
button,
[type="button"],
[type="reset"],
[type="submit"] {
-webkit-appearance: button;
}
/**
* Remove the inner border and padding in Firefox.
*/
button::-moz-focus-inner,
[type="button"]::-moz-focus-inner,
[type="reset"]::-moz-focus-inner,
[type="submit"]::-moz-focus-inner {
border-style: none;
padding: 0;
}
/**
* Restore the focus styles unset by the previous rule.
*/
button:-moz-focusring,
[type="button"]:-moz-focusring,
[type="reset"]:-moz-focusring,
[type="submit"]:-moz-focusring {
outline: 1px dotted ButtonText;
}
/**
* Correct the padding in Firefox.
*/
fieldset {
padding: 0.35em 0.75em 0.625em;
}
/**
* 1. Correct the text wrapping in Edge and IE.
* 2. Correct the color inheritance from `fieldset` elements in IE.
* 3. Remove the padding so developers are not caught out when they zero out
* `fieldset` elements in all browsers.
*/
legend {
-webkit-box-sizing: border-box;
box-sizing: border-box;
/* 1 */
color: inherit;
/* 2 */
display: table;
/* 1 */
max-width: 100%;
/* 1 */
padding: 0;
/* 3 */
white-space: normal;
/* 1 */
}
/**
* Add the correct vertical alignment in Chrome, Firefox, and Opera.
*/
progress {
vertical-align: baseline;
}
/**
* Remove the default vertical scrollbar in IE 10+.
*/
textarea {
overflow: auto;
}
/**
* 1. Add the correct box sizing in IE 10.
* 2. Remove the padding in IE 10.
*/
[type="checkbox"],
[type="radio"] {
-webkit-box-sizing: border-box;
box-sizing: border-box;
/* 1 */
padding: 0;
/* 2 */
}
/**
* Correct the cursor style of increment and decrement buttons in Chrome.
*/
[type="number"]::-webkit-inner-spin-button,
[type="number"]::-webkit-outer-spin-button {
height: auto;
}
/**
* 1. Correct the odd appearance in Chrome and Safari.
* 2. Correct the outline style in Safari.
*/
[type="search"] {
-webkit-appearance: textfield;
/* 1 */
outline-offset: -2px;
/* 2 */
}
/**
* Remove the inner padding in Chrome and Safari on macOS.
*/
[type="search"]::-webkit-search-decoration {
-webkit-appearance: none;
}
/**
* 1. Correct the inability to style clickable types in iOS and Safari.
* 2. Change font properties to `inherit` in Safari.
*/
::-webkit-file-upload-button {
-webkit-appearance: button;
/* 1 */
font: inherit;
/* 2 */
}
/* Interactive
/*
* Add the correct display in Edge, IE 10+, and Firefox.
*/
details {
display: block;
}
/*
* Add the correct display in all browsers.
*/
summary {
display: list-item;
}
/* Misc
/**
* Add the correct display in IE 10+.
*/
template {
display: none;
}
/**
* Add the correct display in IE 10.
*/
[hidden] {
display: none;
}
/* FONTS */
/* GENERAL */
html {
-webkit-box-sizing: border-box;
box-sizing: border-box;
height: 100%;
scroll-behavior: smooth;
}
*,
*::before,
*::after {
-webkit-box-sizing: inherit;
box-sizing: inherit;
}
body {
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-orient: vertical;
-webkit-box-direction: normal;
-ms-flex-direction: column;
flex-direction: column;
height: 100%;
padding: 0;
margin: 0;
overflow-x: hidden;
font-family: "Arial", sans-serif;
background-color: #fff;
}
img {
max-width: 100%;
height: auto;
}
ul, ol {
padding: 0;
margin: 0;
list-style: none;
}
i {
pointer-events: none;
}
::-webkit-scrollbar {
background: 0 0;
border: 0 none #75cea6;
border-radius: 50px;
width: 5px;
height: 7px;
}
::-webkit-scrollbar-thumb {
background: #75cea6;
border: 0 none #fff;
border-radius: 0;
}
.col {
-webkit-box-flex: 1;
-ms-flex: 1 0 0%;
flex: 1 0 0%;
}
.col-1 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 8.33333333%;
}
.col-2 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 16.66666667%;
}
.col-3 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 25%;
}
.col-4 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 33.33333333%;
}
.col-5 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 41.66666667%;
}
.col-6 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 50%;
}
.col-7 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 58.33333333%;
}
.col-8 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 66.66666667%;
}
.col-9 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 75%;
}
.col-10 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 83.33333333%;
}
.col-11 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 91.66666667%;
}
.col-12 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 100%;
}
@media (min-width: 576px) {
.col-sm {
-webkit-box-flex: 1;
-ms-flex: 1 0 0%;
flex: 1 0 0%;
}
.col-sm-1 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 8.33333333%;
}
.col-sm-2 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 16.66666667%;
}
.col-sm-3 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 25%;
}
.col-sm-4 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 33.33333333%;
}
.col-sm-5 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 41.66666667%;
}
.col-sm-6 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 50%;
}
.col-sm-7 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 58.33333333%;
}
.col-sm-8 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 66.66666667%;
}
.col-sm-9 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 75%;
}
.col-sm-10 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 83.33333333%;
}
.col-sm-11 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 91.66666667%;
}
.col-sm-12 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 100%;
}
}
@media (min-width: 768px) {
.col-md {
-webkit-box-flex: 1;
-ms-flex: 1 0 0%;
flex: 1 0 0%;
}
.col-md-1 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 8.33333333%;
}
.col-md-2 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 16.66666667%;
}
.col-md-3 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 25%;
}
.col-md-4 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 33.33333333%;
}
.col-md-5 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 41.66666667%;
}
.col-md-6 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 50%;
}
.col-md-7 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 58.33333333%;
}
.col-md-8 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 66.66666667%;
}
.col-md-9 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 75%;
}
.col-md-10 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 83.33333333%;
}
.col-md-11 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 91.66666667%;
}
.col-md-12 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 100%;
}
}
@media (min-width: 992px) {
.col-lg {
-webkit-box-flex: 1;
-ms-flex: 1 0 0%;
flex: 1 0 0%;
}
.col-lg-1 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 8.33333333%;
}
.col-lg-2 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 16.66666667%;
}
.col-lg-3 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 25%;
}
.col-lg-4 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 33.33333333%;
}
.col-lg-5 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 41.66666667%;
}
.col-lg-6 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 50%;
}
.col-lg-7 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 58.33333333%;
}
.col-lg-8 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 66.66666667%;
}
.col-lg-9 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 75%;
}
.col-lg-10 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 83.33333333%;
}
.col-lg-11 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 91.66666667%;
}
.col-lg-12 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 100%;
}
}
@media (min-width: 1200px) {
.col-xl-1 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 8.33333333%;
}
.col-xl-2 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 16.66666667%;
}
.col-xl-3 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 25%;
}
.col-xl-4 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 33.33333333%;
}
.col-xl-5 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 41.66666667%;
}
.col-xl-6 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 50%;
}
.col-xl-7 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 58.33333333%;
}
.col-xl-8 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 66.66666667%;
}
.col-xl-9 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 75%;
}
.col-xl-10 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 83.33333333%;
}
.col-xl-11 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 91.66666667%;
}
.col-xl-12 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 100%;
}
}
@media (min-width: 1400px) {
.col-xxl {
-webkit-box-flex: 1;
-ms-flex: 1 0 0%;
flex: 1 0 0%;
}
.col-xxl-1 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 8.33333333%;
}
.col-xxl-2 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 16.66666667%;
}
.col-xxl-3 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 25%;
}
.col-xxl-4 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 33.33333333%;
}
.col-xxl-5 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 41.66666667%;
}
.col-xxl-6 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 50%;
}
.col-xxl-7 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 58.33333333%;
}
.col-xxl-8 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 66.66666667%;
}
.col-xxl-9 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 75%;
}
.col-xxl-10 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 83.33333333%;
}
.col-xxl-11 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 91.66666667%;
}
.col-xxl-12 {
-webkit-box-flex: 0;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: 100%;
}
}
.btn {
font-size: 14px;
border-radius: 2em;
padding: 0.75em 1.5em;
cursor: pointer;
background: none;
border: 1px solid;
letter-spacing: 1px;
color: #75cea6;
border: #75cea6 1px solid;
-webkit-transition: 250ms ease-out;
transition: 250ms ease-out;
text-transform: capitalize;
}
.btn:hover {
background-color: #75cea6;
color: #fff;
}
.todo {
height: 50vh;
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-orient: vertical;
-webkit-box-direction: normal;
-ms-flex-direction: column;
flex-direction: column;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
-webkit-box-pack: justify;
-ms-flex-pack: justify;
justify-content: space-between;
border-radius: 1em;
background: #fff;
overflow: hidden;
-webkit-box-shadow: 0 0 5px rgba(25, 25, 25, 0.25);
box-shadow: 0 0 5px rgba(25, 25, 25, 0.25);
}
.todo__form {
width: 100%;
padding: 1.5rem 1rem 0 1rem;
display: -webkit-box;
display: -ms-flexbox;
display: flex;
}
.todo__form-input {
width: 100%;
font-size: 14px;
margin: 0 .5em;
border-radius: 2em;
padding: 0.75em 1.5em;
background: none;
border: #e3e3e3 1px solid;
outline: none;
}
.todo__list {
width: 400px;
height: 350px;
overflow-y: scroll;
overflow-x: hidden;
padding: 0 1rem;
-webkit-box-flex: 1;
-ms-flex: 1;
flex: 1;
margin: 20px 0;
}
.todo__item {
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: justify;
-ms-flex-pack: justify;
justify-content: space-between;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: .5em;
margin-bottom: .5em;
color: #75cea6;
border-bottom: 1px solid #75cea6;
--animate-duration: 0.3s;
}
.todo__item-content {
cursor: pointer;
position: relative;
}
.todo__item-content:hover {
color: #19570d;
}
.todo__item-content::after {
content: '';
width: 0;
height: 2px;
position: absolute;
left: 0;
bottom: -3px;
background-color: #19570d;
-webkit-transition: all 0.3s ease;
transition: all 0.3s ease;
}
.todo__item-content:hover::after {
width: 100%;
}
.todo__item-buttons {
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
}
.todo__edit-input {
background-color: transparent;
border: none;
outline: none;
width: 100%;
}
.todo__item-btn {
padding: 0;
border: 0;
background-color: transparent;
width: 30px;
height: 30px;
border: 1px solid;
border-radius: 50%;
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
margin-left: 5px;
color: #75cea6;
border-color: #75cea6;
-webkit-transition: 200ms;
transition: 200ms;
cursor: pointer;
}
.todo__item-btn.pencil {
background-color: #75cea6;
color: #fff;
}
.todo__item-btn.delete {
background-color: #ff5252;
color: #fff;
border-color: #ff5252;
}
.todo__submit-buttons {
width: 100%;
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
padding: 0 1rem 1.5rem 1rem;
-webkit-box-pack: justify;
-ms-flex-pack: justify;
justify-content: space-between;
}
/* VISUALLY-HIDDEN */
.visually-hidden {
position: absolute;
width: 1px;
height: 1px;
margin: -1px;
border: 0;
padding: 0;
clip: rect(0 0 0 0);
overflow: hidden;
}
.main {
width: 100%;
height: 100vh;
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
background-color: #75cea6;
}
@media only screen and (max-width: 1400px) {
.container {
width: 100%;
max-width: 1320px;
padding-left: 20px;
padding-right: 20px;
margin-left: auto;
margin-right: auto;
}
}
@media only screen and (max-width: 1200px) {
.container {
width: 100%;
max-width: 1140px;
padding-left: 20px;
padding-right: 20px;
margin-left: auto;
margin-right: auto;
}
}
@media only screen and (max-width: 992px) {
.container {
width: 100%;
max-width: 960px;
padding-left: 20px;
padding-right: 20px;
margin-left: auto;
margin-right: auto;
}
}
@media only screen and (max-width: 768px) {
.container {
width: 100%;
max-width: 720px;
padding-left: 20px;
padding-right: 20px;
margin-left: auto;
margin-right: auto;
}
}
@media only screen and (max-width: 576px) {
.container {
width: 100%;
max-width: 540px;
padding-left: 20px;
padding-right: 20px;
margin-left: auto;
margin-right: auto;
}
.todo {
width: 100%;
margin: 0 30px;
}
.todo__list {
width: 100%;
}
.todo__submit-buttons {
-webkit-box-orient: vertical;
-webkit-box-direction: normal;
-ms-flex-direction: column;
flex-direction: column;
}
.todo__submit-buttons .submit-btn__item {
width: 100%;
margin-bottom: 15px;
}
}
/*# sourceMappingURL=main.css.map */
|
import React from 'react';
import { createRoot } from 'react-dom/client';
import { Provider } from 'react-redux';
import { createStore, combineReducers, applyMiddleware } from 'redux';
import reduxThunk from 'redux-thunk';
import { App } from './Components/App/app';
import { ticketReducer } from './Reducers/get-ticket-reducer';
const rootReducer = combineReducers({
ticketReducer,
});
const loggerMiddleware = (store) => (next) => (action) => {
const result = next(action);
console.log('Middleware', store.getState());
return result;
};
export const store = createStore(rootReducer, applyMiddleware(loggerMiddleware, reduxThunk));
const container = document.getElementById('root');
const root = createRoot(container);
root.render(
<Provider store={store}>
<App />
</Provider>
);
|
# pidSpryC, E. coli DH5alpha ftsz gene, guides 2 & 5-9
# load libraries
library(gcplyr)
library(dplyr)
library(ggplot2)
library(lubridate)
# import data as wide format
wide_data_0613 <- read_wides("061323_ftsZpidSpryC_cleandata.csv")
str(wide_data_0613)
head(wide_data_0613)
# transform to tidy format
tidy_data_0613 <- trans_wide_to_tidy(
wides = wide_data_0613,
id_cols = c("file", "Time", "Temp"))
str(tidy_data_0613)
head(tidy_data_0613)
# design info
design_0613 <- make_design(
output_format = "tidy", # set to "blocks" to check that it's correct, then save as "tidy"
nrows = 8, ncols = 12, lookup_tbl_start = "A",
Guide = list(
c("empty", "2A", "5", "6", "7", "8", "9", "none"), # values
1:8, # rows to apply to (those left out will be filled in with NA)
1:12, # columns to apply to
"ABCDEFGH", # pattern of the values
FALSE # indicate whether filled by row (TRUE) or by column (FALSE)
), # set design for guide
ATC_uM = list(
c(0,0,0,0.1,0.1,0.1,0.4,0.4,0.4,1,1,1),
1:8,
1:12,
"ABCDEFGHIJKL",
TRUE
), # set design for ATC concentration
replicates = list(
c(1,2,3,1,2,3,1,2,3,1,2,3),
1:8,
1:12,
"ABCDEFGHIJKL",
TRUE
)
)
head(design_0613)
# merge data and design data frames
data_design_merged_0613 <- merge_dfs(tidy_data_0613, design_0613)
str(data_design_merged_0613)
head(data_design_merged_0613)
# convert time to numeric
data_design_merged_0613$Time <- time_length(hms(data_design_merged_0613$Time), unit = "hour")
head(data_design_merged_0613)
# reorder Well and Guide levels
data_design_merged_0613$Well <- factor(data_design_merged_0613$Well,
levels=paste(rep(LETTERS[1:8], each=12), 1:12, sep=""))
data_design_merged_0613$Guide <- factor(data_design_merged_0613$Guide,
levels=c("empty", "2A", "5", "6", "7", "8", "9", "none"))
str(data_design_merged_0613)
# plot data
ggplot(data = data_design_merged_0613, aes(x=Time, y=Measurements, color=ATC_uM)) +
geom_line() +
facet_wrap(~Well, nrow = 8, ncol = 12)
ggplot(data = data_design_merged_0613, aes(x=Time, y=Measurements, color=ATC_uM)) +
geom_line() +
facet_grid(rows=vars(Guide), cols = vars(replicates))
ggplot(data = data_design_merged_0613, aes(x=Time, y=Measurements, color=replicates)) +
geom_line() +
facet_grid(rows=vars(Guide), cols = vars(ATC_uM))
|
import React from 'react';
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
import ProfilePictureUpload from './ProfilePictureUpload';
import { uploadProfilePicture } from '../api/api';
// Mock the uploadProfilePicture API call
jest.mock('../api/api', () => ({
uploadProfilePicture: jest.fn(),
}));
describe('ProfilePictureUpload', () => {
const mockUserId = 'testUserId';
const mockToken = 'mockToken';
beforeEach(() => {
jest.clearAllMocks();
localStorage.setItem('token', mockToken);
});
afterEach(() => {
localStorage.removeItem('token');
});
test('renders upload form', () => {
render(<ProfilePictureUpload userId={mockUserId} />);
expect(screen.getByText('Upload')).toBeInTheDocument();
});
test('displays error if no file is selected', async () => {
render(<ProfilePictureUpload userId={mockUserId} />);
fireEvent.submit(screen.getByText('Upload'));
await waitFor(() => {
expect(screen.getByText('Please select a file to upload.')).toBeInTheDocument();
});
});
test('uploads file successfully', async () => {
const mockResponse = { data: { message: 'Profile picture uploaded successfully' } };
uploadProfilePicture.mockResolvedValue(mockResponse);
render(<ProfilePictureUpload userId={mockUserId} />);
const file = new File(['dummy content'], 'example.png', { type: 'image/png' });
fireEvent.change(screen.getByLabelText(/choose file/i), {
target: { files: [file] },
});
fireEvent.submit(screen.getByText('Upload'));
await waitFor(() => {
expect(uploadProfilePicture).toHaveBeenCalledWith(
mockUserId,
expect.any(FormData),
mockToken
);
expect(screen.getByText('Profile picture uploaded successfully.')).toBeInTheDocument();
});
});
test('displays error if upload fails', async () => {
const mockError = new Error('Upload failed');
uploadProfilePicture.mockRejectedValue(mockError);
render(<ProfilePictureUpload userId={mockUserId} />);
const file = new File(['dummy content'], 'example.png', { type: 'image/png' });
fireEvent.change(screen.getByLabelText(/choose file/i), {
target: { files: [file] },
});
fireEvent.submit(screen.getByText('Upload'));
await waitFor(() => {
expect(uploadProfilePicture).toHaveBeenCalledWith(
mockUserId,
expect.any(FormData),
mockToken
);
expect(
screen.getByText('Error uploading profile picture: Upload failed')
).toBeInTheDocument();
});
});
});
|
function Xobj = setDesignOfExperiments(Xobj,varargin)
%SETDESIGNOFEXPERIMENTS This method is used to define the realizations of
%the design of experiments
%
%
% Author:
% Institute for Risk and Uncertainty, University of Liverpool, UK
% email address: openengine@cossan.co.uk
% Website: http://www.cossan.co.uk
% This file is part of openCOSSAN. The open general purpose matlab
% toolbox for numerical analysis, risk and uncertainty quantification.
%
% openCOSSAN is free software: you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation, either version 3 of the License.
%
% openCOSSAN is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with openCOSSAN. If not, see <http://www.gnu.org/licenses/>.
OpenCossan.validateCossanInputs(varargin{:});
if isempty(varargin)
error('COSSAN:Input:setDesignOfExperiments',...
'The set method makes no sense without arguments');
end
for k=1:2:length(varargin)
switch lower(varargin{k})
case {'csnames'}
Cnames=varargin{k+1};
assert(all(ismember(Cnames,Xobj.Cnames)),...
'COSSAN:Input:setDesignOfExperiments',...
['Name of the design variable does not match with the names of the design variable present in the input object\n',...
'\nAvailable DesignVariables: ' sprintf('\n* "%s"',Xobj.Cnames{:}), ...
'\nRequired DesignVariables: ' sprintf('\n* "%s"',Cnames{:})])
case {'msamples','mvalues'}
Msamples=varargin{k+1};
otherwise
error('COSSAN:Input:setDesignOfExperiments', ...
'The PropertyName %s is not valid',varargin{k});
end
end
assert(size(Msamples,2)==length(Cnames),...
'COSSAN:Input:setDesignOfExperiments',...
'Number of colums of Msamples is %i, number of variables %i ',size(Msamples,2),length(Cnames))
%% DO SET
VmappingDV=find(ismember(Cnames,Xobj.CnamesDesignVariable));
VmappingRV=find(ismember(Cnames,Xobj.CnamesRandomVariable));
% set samplesDesignOfExpetiments and samplesRandomVariables
if isempty(Xobj.Xsamples)
Xsmp=Samples('Xinput',Xobj, ...
'Msamplesdoedesignvariables',Msamples(:,VmappingDV),...
'MsamplesPhysicalSpace',Msamples(:,VmappingRV));
else
Xsmp=Xobj.Xsamples.set('Msamplesdoedesignvariables',Msamples(:,VmappingDV),...
'MsamplesPhysicalSpace',Msamples(:,VmappingRV));
end
Xobj.Xsamples=Xsmp;
end
|
using Xunit;
using Syst.Controllers;
using Moq;
using Core;
using Microsoft.Extensions.Logging;
using Microsoft.AspNetCore.Mvc;
using System.Collections.Generic;
using System;
namespace Syst.Tests;
public class CandidateControllerTests
{
//Objects to use for testing
static readonly QuizDTO quiz1 = new QuizDTO
{
Id = 20,
Name = "Systematic Festival Quiz",
Questions = null!,
Events = null!,
Candidates = null!
};
static readonly CandidateDTO candidate1 = new CandidateDTO {
Id = 1,
Name = "Bob Jensen",
Email = "bobj@dtu.dk",
StudyProgram = "Datalogi",
University = "DTU",
GraduationDate = "20-06-2024",
Events = null!,
Quiz = quiz1,
IsUpvoted = false
};
static readonly CandidateDTO candidate2 = new CandidateDTO
{
Id = 2,
Name = "Ib Hansen",
Email = "ibha@ruc.dk",
StudyProgram = "Informatik",
University = "RUC",
GraduationDate = "27-05-2023",
Events = null!,
Quiz = quiz1,
IsUpvoted = false
};
static readonly EventDTO event1 = new EventDTO{
Id = 1,
Name = "TechBBQ",
Date = "03-21-2022",
Location = "Copenhagen",
Candidates = null!,
Quiz = quiz1,
Rating = 3.5,
};
static readonly HashSet<string> ValidUniversities = new HashSet<string> {
"Aalborg University",
"Aarhus University",
"Copenhagen Business School",
"IT-University of Copenhagen",
"Roskilde University",
"Technical University of Denmark",
"University of Copenhagen",
"University of Southern Denmark"
};
//Testing starts here
[Fact]
public void Get_all_returns_all_candidates() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var candidates = new List<CandidateDTO> { candidate1, candidate2 };
repository.Setup(m => m.ReadAll()).ReturnsAsync(candidates);
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = controller.Get();
//Assert
Assert.Equal(candidates, response.Result);
}
[Fact]
public async void Get_existing_id_return_Candidate() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var expected = candidate1;
repository.Setup(m => m.Read(1)).ReturnsAsync((Status.Found, expected));
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.Get(1);
//Assert
Assert.Equal(expected, response.Value);
}
[Fact]
public async void Get_non_existing_id_return_NotFound() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
repository.Setup(m => m.Read(99)).ReturnsAsync((Status.NotFound, default(CandidateDTO)));
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.Get(99);
//Assert
Assert.IsType<NotFoundResult>(response.Result);
}
[Fact]
public async void Post_adds_candidate_to_repository() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var candidate3 = new CreateCandidateDTO(3, "Hanne Nielsen", "hani@itu.dk","BSc" , "SWU", "ITU", "25-01-2023", true, new DateTime(2022, 05, 30));
var candidates = new List<CreateCandidateDTO> { candidate3 };
var createdCandidate = new CreateCandidateDTO(4, "Sanne Pedersen", "sape@itu.dk","BSc", "GBI", "ITU", "25-01-2023", false, new DateTime(2022, 05, 30));
repository.Setup(m => m.Create(createdCandidate)).Callback(() => candidates.Add(createdCandidate));
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.Post(createdCandidate);
//Assert
Assert.IsType<CreatedAtActionResult>(response);
Assert.Equal(2, candidates.Count);
}
[Fact]
public async void Post_existing_id_returns_StatusConflict() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var candidates = new List<CandidateDTO> {candidate1};
var createdCandidate = new CreateCandidateDTO(1, "Sanne Pedersen", "sape@itu.dk", "BSc", "GBI", "ITU", "25-01-2023", false, new DateTime(2022, 05, 30));
repository.Setup(m => m.Create(createdCandidate)).ReturnsAsync(() => (Status.Conflict, 1));
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.Post(createdCandidate);
//Assert
Assert.IsType<ConflictObjectResult>(response);
Assert.Equal(1, candidates.Count);
}
[Fact]
public async void Post_null_DTO_returns_StatusConflict()
{
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var newCandidate = default(CreateCandidateDTO);
repository.Setup(m => m.Create(newCandidate)).ReturnsAsync(() => (Status.Conflict, 0));
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.Post(newCandidate);
//Assert
Assert.IsType<ConflictObjectResult>(response);
}
[Fact]
public async void Delete_non_existing_id_return_NotFound() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
repository.Setup(m => m.Delete(90)).ReturnsAsync(Status.NotFound);
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.Delete(90);
//Assert
Assert.IsType<NotFoundObjectResult>(response);
}
[Fact]
public async void Delete_existing_id_return_NoContent() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
repository.Setup(m => m.Delete(1)).ReturnsAsync(Status.Deleted);
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.Delete(1);
//Assert
Assert.IsType<NoContentResult>(response);
}
[Fact]
public async void Put_returns_status_updated_when_given_existing_id()
{
//arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var newCandidate = new CandidateDTO(4, "Sanne Pedersen", "sape@itu.dk", "BSc", "GBI", "ITU", "25-01-2023", 0.0,null!, quiz1, false, new DateTime(2022, 05, 30));
repository.Setup(m => m.Update(2, newCandidate)).ReturnsAsync(Status.Updated);
var controller = new CandidatesController(logger.Object, repository.Object);
//act
var response = await controller.Put(2, newCandidate);
//assert
Assert.IsType<NoContentResult>(response);
}
[Fact]
public async void Put_returns_status_notFound_when_given_nonexisting_idAsync()
{
//arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var newCandidate = new CandidateDTO(4, "Sanne Pedersen", "sape@itu.dk", "BSc", "GBI", "ITU", "25-01-2023", 0.0,null!, quiz1, false, new DateTime(2022, 05, 30));
repository.Setup(m => m.Update(99, newCandidate)).ReturnsAsync(Status.NotFound);
var controller = new CandidatesController(logger.Object, repository.Object);
//act
var response = await controller.Put(99, newCandidate);
//assert
Assert.IsType<NotFoundObjectResult>(response);
}
[Fact]
public async void UpvotePut_returns_status_upvote_updated_when_given_existing_id()
{
//arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
repository.Setup(m => m.UpdateUpVote(2)).ReturnsAsync(Status.Updated);
var controller = new CandidatesController(logger.Object, repository.Object);
//act
var response = await controller.PutUpVote(2);
//assert
Assert.IsType<NoContentResult>(response);
}
[Fact]
public async void UpvotePut_returns_status_notFound_when_given_nonexisting_idAsync()
{
//arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
repository.Setup(m => m.UpdateUpVote(99)).ReturnsAsync(Status.NotFound);
var controller = new CandidatesController(logger.Object, repository.Object);
//act
var response = await controller.PutUpVote(99);
//assert
Assert.IsType<NotFoundObjectResult>(response);
}
[Fact]
public async void Post_answer_to_candidate() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var answer = new AnswersDTO(1, 20, 1, new string[] {"Answer 1", "Answer 2", "Answer 3"});
repository.Setup(m => m.AddAnswer(1, answer)).ReturnsAsync(() => (Status.Updated));
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.PostAnswer(1, answer);
//Assert
Assert.IsType<NoContentResult>(response);
}
[Fact]
public async void PostAnswer_returns_status_notFound_when_given_nonexisting_idAsync(){
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
var answer = new AnswersDTO(1, 20, 1, new string[] {"Answer 1", "Answer 2", "Answer 3"});
repository.Setup(m => m.AddAnswer(40, answer)).ReturnsAsync(() => Status.NotFound);
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = await controller.PostAnswer(40, answer);
//Assert
Assert.IsType<NotFoundObjectResult>(response);
}
/*[Fact]
public void Get_graphData_when_having_a_list_of_universities() {
//Arrange
var logger = new Mock<ILogger<CandidatesController>>();
var repository = new Mock<ICandidateRepository>();
repository.Setup(m => m.GraphData(ValidUniversities)).Returns(() => (int[]));
var controller = new CandidatesController(logger.Object, repository.Object);
//Act
var response = controller.GraphData(ValidUniversities);
//Assert
Assert.IsType<int[]>(response);
}*/
}
|
package com.jz.quoteoperation.common.util;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
/**
* @author ck
* @date 2016/3/16
*/
public class BeanUtil {
public static Map<String, Object> transBean2Map(Object obj) {
if(obj == null){
return null;
}
Map<String, Object> map = new HashMap<String, Object>();
try {
BeanInfo beanInfo = Introspector.getBeanInfo(obj.getClass());
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (PropertyDescriptor property : propertyDescriptors) {
String key = property.getName();
// 过滤class属性
if (!key.equals("class")) {
// 得到property对应的getter方法
Method getter = property.getReadMethod();
Object value = getter.invoke(obj);
map.put(key, value);
}
}
} catch (Exception e) {
System.out.println("transBean2Map Error " + e);
}
return map;
}
/**
* JSON字符串转换成对应的对象
* @param jsons
* @param clazz
* @return T
*/
public static <T> T jsonToObject(String jsons,Class<T> clazz){
try{
if(StringUtils.isEmpty(jsons)){
return null;
}
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readValue(jsons,clazz);
}catch(Exception e){
e.printStackTrace();
return null;
}
}
/**
* Object 转 Json 串
* @param obj
* @return
*/
public static String objectToJson(Object obj){
try{
if(obj==null){
return "";
}
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.writeValueAsString(obj);
}catch(Exception e){
e.printStackTrace();
return "";
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.