text
stringlengths 184
4.48M
|
|---|
<!DOCTYPE html>
<html lang="pt-BR">
<head>
<meta charset="UTF-8">
<title>Orientação à Objetos (OO)</title>
<style>
body {
font-family: Arial, Helvetica, sans-serif;
}
article {
margin: 5px;
padding: 5px;
border: 3px solid seagreen;
}
</style>
</head>
<body>
<h1>JavaScript OO</h1>
<section>
<article>
<h2>Classe simples</h2>
<script>
class Pessoa {
constructor(nome, sobrenome, ano) {
this.nome = nome;
this.sobrenome = sobrenome;
this.ano = ano;
}
nomeCompleto() {
return this.nome + " " + this.sobrenome;
}
apresentarSe() {
return "Oi, eu sou o(a) " + this.nomeCompleto() + " e nasci no ano " + this.ano + ".";
}
}
const pessoa = new Pessoa("Renan", "Soares", 1997);
const pessoa2 = new Pessoa("Maria", "Silva", 1990);
const pessoa3 = new Pessoa("João", "Souza", 1995);
for (const p of [pessoa, pessoa2, pessoa3]) {
document.write(p.apresentarSe() + "<br>");
}
</script>
</article>
<article>
<h2>Herança</h2>
<script>
class Professor extends Pessoa {
constructor(nome, sobrenome, ano, universidade, cursos) {
super(nome, sobrenome, ano)
this.universidade = universidade
this.cursos = cursos
}
apresentarSe() {
return super.apresentarSe() + " Eu dou aula nos cursos " + this.cursos + ", na universidade " + this.universidade + ".";
}
}
const professor = new Professor("Maria", "da Mata", 1990, "Mackenzie", ["Química", "Matemática", "Biologia"]);
console.log(professor);
document.write(professor.apresentarSe());
</script>
</article>
<article>
<h2>Getters e Setters</h2>
<script>
class Operacao {
constructor(a, b, operacao) {
this.a = a;
this.b = b;
this.operacao = operacao;
this._mensagemDeErro = "Erro padrão.";
}
get resultado() {
switch (this.operacao) {
case "+": return this.a + this.b;
case "-": return this.a - this.b;
case "*": return this.a * this.b;
case "/": return this.a / this.b;
default: return this._mensagemDeErro;
}
}
get erro() {
return this._mensagemDeErro;
}
set erro(valor) {
this._mensagemDeErro = valor;
}
}
const soma = new Operacao(10, 10, "**");
document.write(soma.erro);
soma.erro = "Mensagem customizada de erro!";
document.write(soma.resultado);
</script>
</article>
<article>
<h2>Funções estáticas de classes</h2>
<script>
class Animal {
constructor(especie, qtdPatas, locomocao, alimentacao) {
this.especie = especie
this.qtdPatas = qtdPatas
this.locomocao = locomocao
this.alimentacao = alimentacao
}
static apresentarAnimal(animal) {
return "Este animal é da espécie " + animal.especie + ", possui " + animal.qtdPatas + " patas, locomove-se " + animal.locomocao + " e é " + animal.alimentacao + ".";
}
}
const leao = new Animal("felino", 4, "andando", "carnivoro");
document.write(Animal.apresentarAnimal(leao));
</script>
</article>
<article>
<h2>Prática</h2>
<script>
class FormaGeometrica {
constructor(lados) {
this.lados = lados;
}
get perimetro() {
let soma = 0;
for (const lado of this.lados) { soma += lado; }
return soma
}
get nome() {
switch (this.lados.length) {
case 0:
case 1:
case 2:
return "Erro";
case 3: return "Triângulo";
case 4: return "Quadrado";
case 5: return "Pentagono";
case 6: return "Hexagono";
default: return "Poligono";
}
}
}
const triangulo = new FormaGeometrica([10, 10, 10]);
document.write(triangulo.perimetro + "<br>");
document.write(triangulo.nome + "<br>");
const quadrado = new FormaGeometrica([10, 10, 10, 10]);
document.write(quadrado.perimetro + "<br>");
document.write(quadrado.nome + "<br>");
</script>
</article>
</section>
</body>
</html>
|
use crate::{Chainflip, FundingInfo};
use frame_support::Never;
use sp_runtime::{
traits::{CheckedSub, Zero},
Saturating,
};
use sp_std::{collections::btree_map::BTreeMap, marker::PhantomData};
use super::{MockPallet, MockPalletStorage};
pub struct MockFundingInfo<T>(PhantomData<T>);
impl<T> MockPallet for MockFundingInfo<T> {
const PREFIX: &'static [u8] = b"MockFundingInfo";
}
const BALANCES: &[u8] = b"BALANCES";
impl<T: Chainflip> MockFundingInfo<T> {
pub fn credit_funds(account_id: &T::AccountId, amount: T::Amount) {
<Self as MockPalletStorage>::mutate_value(
BALANCES,
|storage: &mut Option<BTreeMap<T::AccountId, T::Amount>>| {
if let Some(balances) = storage.as_mut() {
balances.entry(account_id.clone()).or_default().saturating_accrue(amount);
} else {
let _ = storage.insert(BTreeMap::from_iter([(account_id.clone(), amount)]));
}
Ok::<_, Never>(())
},
)
.unwrap();
}
pub fn try_debit_funds(account_id: &T::AccountId, amount: T::Amount) -> Option<T::Amount> {
if amount.is_zero() {
return Some(amount)
}
<Self as MockPalletStorage>::mutate_value(
BALANCES,
|storage: &mut Option<BTreeMap<T::AccountId, T::Amount>>| {
storage.as_mut().and_then(|balances| {
balances.get_mut(account_id).and_then(|balance| {
balance.checked_sub(&amount).map(|remainder| {
*balance = remainder;
remainder
})
})
})
},
)
}
pub fn set_balances(balances: impl IntoIterator<Item = (T::AccountId, T::Amount)>) {
<Self as MockPalletStorage>::mutate_value(BALANCES, |storage| {
let _ = storage.insert(BTreeMap::from_iter(balances));
Ok::<_, Never>(())
})
.unwrap();
}
}
impl<T: Chainflip> FundingInfo for MockFundingInfo<T> {
type AccountId = T::AccountId;
type Balance = T::Amount;
fn total_balance_of(account_id: &Self::AccountId) -> Self::Balance {
<Self as MockPalletStorage>::get_value(BALANCES)
.and_then(|balances: BTreeMap<Self::AccountId, Self::Balance>| {
balances.get(account_id).cloned()
})
.unwrap_or_default()
}
fn total_onchain_funds() -> Self::Balance {
<Self as MockPalletStorage>::get_value(BALANCES)
.map(|balances: BTreeMap<Self::AccountId, Self::Balance>| {
balances.values().cloned().sum()
})
.unwrap_or_default()
}
}
|
# Importando pacotes ------------------------------------------------------
library(dplyr)
library(ggplot2)
library(showtext)
library(srvyr)
library(survey)
library(rlang)
library(margins)
library(convey)
library(xtable)
library(stargazer)
font_add_google(name = "Inter", family = "custom")
showtext_auto()
# Estimação econométrica --------------------------------------------------
## Criando variáveis necessárias para a estimação
pnadc <- pnadc_clean |>
dplyr::filter(chefe_domicilio == "Sim") |>
dplyr::mutate(
renda_domiciliar_pc = VD5008,
pobreza_sm = ifelse(VD5008 < 998 / 2, 1, 0),
pobreza_rm = ifelse(VD5008 < renda_media$renda_media, 1, 0),
sexo_cf = ifelse(V2007 == 1, 1, 0),
idade_cf = V2009,
raca_cf = ifelse(V2010 %in% c(2, 4), 1, 0),
raca_cf = ifelse(V2010 == 2, 1, 0),
ano_estudo_cf = VD3005,
local_dom = ifelse(V1022 == 1, 1, 0),
num_membros = V2001,
trab_agric = ifelse(VD4010 == 1, 1, 0),
regiao_dom = ifelse(regiao %in% c("Norte", "Nordeste"), 1, 0)
)
## Colocando no formato amostral complexo
pnadc <- pnadc |>
srvyr::as_survey_design(
ids = UPA,
strata = Estrato,
weights = V1032,
nest = TRUE
)
## Estimação econométrica utilizando 50% salário-mínimo com linha de pobreza
modelo_sm <- survey::svyglm(
formula = pobreza_sm ~ sexo_cf + idade_cf + raca_cf + ano_estudo_cf + local_dom + num_membros + trab_agric + regiao_dom,
family = quasibinomial(link = "probit"),
design = pnadc
)
## Sumário estatístico do modelo e teste de hipótese de significância estatística
## Dos coeficientes estimados
jtools::j_summ(modelo_sm)
survey::regTermTest(modelo_sm, test.terms = ~ sexo_cf + idade_cf + raca_cf + ano_estudo_cf + local_dom + num_membros + trab_agric + regiao_dom)
## Pseudo-R2 Cox-Snell and Nagelkerke
pseudor2cs_sm <- survey::psrsq(modelo_sm, method = "Cox-Snell") |> round(2)
pseudor2ng_sm <- survey::psrsq(modelo_sm, method = "Nagelkerke") |> round(2)
## Estimation using as poverty line household mean income per capita
modelo_rm <- survey::svyglm(
formula = pobreza_rm ~ sexo_cf + idade_cf + raca_cf + ano_estudo_cf + local_dom + num_membros + trab_agric + regiao_dom,
family = quasibinomial(link = "probit"),
design = pnadc
)
## Sumário estatístico do modelo e teste de hipótese de significância estatística
## Dos coeficientes estimados
jtools::j_summ(modelo_rm)
survey::regTermTest(modelo_rm, test.terms = ~ sexo_cf + idade_cf + raca_cf + ano_estudo_cf + local_dom + num_membros + trab_agric + regiao_dom)
## Pseudo-R2 Cox-Snell and Nagelkerke
pseudor2cs_rm <- survey::psrsq(modelo_rm, method = "Cox-Snell") |> round(2)
pseudor2ng_rm <- survey::psrsq(modelo_rm, method = "Nagelkerke") |> round(2)
## Exporta os parâmetros estimados dos dois modelos em \LaTeX
stargazer::stargazer(
modelo_sm, modelo_rm,
add.lines = list(
c("Pseudo $R^2$ Cox-Snell", pseudor2cs_sm, pseudor2cs_rm),
c("Pseudo $R^2$ Nagelkerke", pseudor2ng_sm, pseudor2ng_rm)),
column.sep.width = "3pt",
header = FALSE,
single.row = FALSE,
no.space = TRUE,
font.size = "small",
summary = TRUE,
model.names = TRUE,
model.numbers = TRUE,
keep.stat = c("rsq", "n"),
type = "latex",
out = "misc/output.html",
decimal.mark = ",",
covariate.labels = c("sexo", "idade", "raca", "anosEstudo", "localDom", "numMembros", "trabAgric", "regiao")
)
# Efeitos marginais -------------------------------------------------------
## Calculando os efeitos marginais
ef_modelo_sm <- margins::margins(model = modelo_sm, data = pnadc)
ef_modelo_rm <- margins::margins(model = modelo_rm, data = pnadc)
## Exportando os efeitos marginais em formato .xlsx e \LaTeX
## Efeitos marginais do modelo 1
broom::tidy(ef_modelo_sm) |>
dplyr::arrange(desc(estimate)) |>
writexl::write_xlsx("tables/ef_modelo_sm.xlsx") # |>
#xtable::xtable()
## Efeitos marginais do modelo 2
broom::tidy(ef_modelo_rm) |>
dplyr::arrange(desc(estimate)) |>
writexl::write_xlsx("tables/ef_modelo_rm.xlsx") # |>
#xtable::xtable()
# Estimação do `conditional predicted average marginal effects`
## A função abaixo calcula os os efeitos marginais condicionais para ambas
## as linhas de pobreza e salva os gráficos
estimate_margins <- function(var_name = "",
xlabel = "",
ylabel = "",
plot_name = ""
){
# Modelo 1
cplot_sm <- margins::cplot(modelo_sm, var_name, draw = FALSE)
# Modelo 2
cplot_rm <- margins::cplot(modelo_rm, var_name, draw = FALSE)
# Combina os dois `tibbles`
cplot_binned <- dplyr::bind_rows(
"Linha de pobreza: Salário mínimo" = cplot_sm,
"Linha de pobreza: Renda domiciliar *Per capita*" = cplot_rm,
.id = "id"
)
# Gera o gráfico
gen_plot <- cplot_binned |>
dplyr::group_by(id) |>
ggplot2::ggplot(aes(x = xvals))+
ggplot2::geom_line(aes(y = yvals), color = "black", linewidth = 1) +
ggplot2::geom_line(aes(y = upper), linetype = 2, color = "red", linewidth = 1) +
ggplot2::geom_line(aes(y = lower), linetype = 2, color = "red", linewidth = 1) +
ggplot2::geom_hline(yintercept = 0)+
ggplot2::facet_wrap(~id)+
ggplot2::theme_classic()+
ggplot2::theme(
strip.text = ggtext::element_markdown(size = 18),
strip.background = element_blank(),
axis.text = element_text(color = "black", family = "custom", size = 14),
axis.title = element_text(color = "black", family = "custom", size = 16)
)+
ggplot2::ylim(0, 1)+
ggplot2::labs(
color = "Linha de pobreza",
x = xlabel,
y = ylabel
)
# Salva o gráfico
ggplot2::ggsave(
plot = gen_plot,
filename = plot_name,
width = 14,
height = 8,
dpi = 200,
bg = "white",
path = "plots/",
units = "in"
)
return(gen_plot)
}
# Gera os gráficos dos efeitos marginais ----------------------------------
## Anos de estudo do chefe de família
estimate_margins(var_name = "ano_estudo_cf",
xlabel = "Anos de estudo",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_anos_estudo.png"
)
## Sexo do chefe de família
estimate_margins(var_name = "sexo_cf",
xlabel = "Sexo",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_sexo_cf.png"
)
## Idade do chefe de família
estimate_margins(var_name = "idade_cf",
xlabel = "Idade",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_idade_cf.png"
)
## Raça do chefe de família
estimate_margins(var_name = "raca_cf",
xlabel = "Raça",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_raca_cf.png"
)
## Quantidade de membros da família
estimate_margins(var_name = "num_membros",
xlabel = "Número de membros da família",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_num_membros.png"
)
## Trabalha na agricultura
estimate_margins(var_name = "trab_agric",
xlabel = "Trabalha na agricultura",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_trab_agric.png"
)
## Localização do domicílio
estimate_margins(var_name = "local_dom",
xlabel = "Localização do domicílio (Zona)",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_local_dom.png"
)
## Região de localização do domicílio
estimate_margins(var_name = "regiao_dom",
xlabel = "Região de localização do domicílio (Região)",
ylabel = "Probabilidade",
plot_name = "efeitos_marginais_regiao_dom.png"
)
|
title: 双链表
author: 熊 超
tags:
- 数据结构
- 算法
categories:
- 数据结构
date: 2019-10-24 14:45:00
---
<!-- more -->
### 一、概念

#### 单向链表
- 只能从头遍历到尾或者从尾遍历到头(一般从头到尾),也就是链表相连的过程是 单向 的。
- 实现的原理是上一个链表中有一个指向下一个的引用。
单链表的缺点:
- 我们可以轻松的到达下一个节点,但是回到前一个节点是很难的。
但是,在实际开发中,经常会遇到需要回到上一个节点的情况。
- 举个例子:
假设一个文本编辑用链表来存储文本。每一行用一个 String 对象存储在链表的一个节点中。
当编辑器用户向下移动光标时,链表直接操作到下一个节点即可。但是当用于将光标向上移动呢?
这个时候为了回到上一个节点,我们可能需要从first开始,依次走到想要的节点上。
#### 双向链表
- 既可以从头遍历到尾,又可以从尾遍历到头。也就是链表的相连过程是双向的,
- 一个节点既有向前连接的引用,也有一个向后连接的引用。
优点:
1. 每次在插入或删除某个节点时,需要处理四个引用,而不是两个。实现起来要困难一些。
2. 相对于单向链表,占用的内存空间更大一些。
#### 结构模型
> head -> { prev, data, next } -> { prev, data, next } -> { prev, data, next(tail) } -> null;
- head 指向头部节点;
- tail 指向尾部节点;
- prev 指向上一个节点;
- next 指向下一个节点;
- data 当前节点数据
第一个节点的 prev 是 null
最后一个节点的 next 是 null
### 二、属性和方法
链表的属性:
- head: 头部节点
- node: 节点
- data: 节点数据
- prev:指向下一个节点的
- next:指向下一个节点的
- tail: 尾部节点
- length: 链表长度
链表的方法(6个):
1. append(data):向链表尾部添加一个新的项
2. insert(position, data):向链表的特定位置 插入一个新的项
3. get(position):获取对应位置的元素
4. indexOf(data):返回元素在链表中的索引。如果链表中没有该元素,则返回-1
5. update(position, data):修改某个位置的元素
6. removeAt(position):从链表的特定位置删除一项
7. remove(data):从列表中删除一项
8. getHead():获取头部节点
9. forwardString():返回向前遍历的节点字符串形式
10. backwordString():返回向后遍历的节点字符串形式
11. getTail():获取尾部节点
12. isEmpty():如果链表中不包含任何元素,返回true;否则返回false
13. size():返回链表中包含的元素个数
14. toString():将链表中的内容,转换成字符串形式
具体实现:
```js
/*************** 链表封装 ***********/
function DoublyLinkedList() {
// 内部类:节点类
function Node(data) {
this.data = data;
this.prev = null;
this.next = null;
}
// 属性
this.head = null;
this.tail = null;
this.length = 0;
// 向列表尾部添加一个新的项
DoublyLinkedList.prototype.append = function (data) {
const node = new Node(data);
// 第一个节点
if (this.length === 0) {
this.head = node;
this.tail = node;
} else {
node.prev = this.tail;
this.tail.next = node;
this.tail = node;
}
this.length++;
}
// 向列表的特定位置 插入一个新的项
DoublyLinkedList.prototype.insert = function (position, data) {
// 1.越界判断
if (position < 0 || position > this.length) {
return false
}
// 2.创建节点数据
const node = new Node(data);
// 3.判断 原列表 是否为空
if (this.length === 0) {
this.head = node;
this.tail = node;
} else {
// 3.1 插入的是第一个节点
if (position === 0) {
this.head.prev = node;
node.next = this.head;
this.head = node;
} else if (position === this.length) { // 3.2 插入的是 最后一个节点
this.tail.next = node;
node.prev = this.tail;
this.tail = node;
} else { // 3.3 在中间位置插入
let currentNode = this.head;
let index = 0;
while (index++ < position) {
currentNode = currentNode.next;
}
node.next = currentNode;
node.prev = currentNode.prev
currentNode.prev.next = node;
currentNode.prev = node;
}
}
this.length++;
return true;
}
// 获取对应位置的 元素
DoublyLinkedList.prototype.get = function (position) {
if (position < 0 || position >= this.length) {
return null;
}
let currentNode = this.head;
let index = 0;
while(index++ < position) {
currentNode = currentNode.next;
}
return currentNode.data;
}
// 返回元素在列表中的索引。如果列表中没有该元素,则返回-1
DoublyLinkedList.prototype.indexOf = function (data) {
let curentNode = this.head;
let index = 0;
while(index < this.length) {
if (curentNode.data === data) {
return index;
}
curentNode = curentNode.next;
index++
}
return -1;
}
// 修改某个位置的元素
DoublyLinkedList.prototype.update = function (position, data) {
if(position < 0 || position >= this.length) {
return false;
}
let curentNode = this.head;
let index = 0;
while(index++ < position) {
curentNode = curentNode.next;
}
curentNode.data = data;
return true;
}
// 从列表的特定位置删除一项
DoublyLinkedList.prototype.removeAt = function (position) {
if(position < 0 || position >= this.length) {
return null;
}
let curentNode = this.head;
// 只有一个节点
if (this.length === 1) {
this.head = null
this.tail = null;
} else {
if (position === 0) { // 删除的是 第一个节点
this.head.next.prev = null;
this.head = this.head.next;
} else if (position === this.length - 1) { // 删除的是 最后一个节点
curentNode = this.tail;
this.tail.prev.next = null;
this.tail = this.tail.prev;
} else { // 删除的是 中间部分的节点
let index = 0;
while(index++ < position) {
curentNode = curentNode.next;
}
curentNode.prev.next = curentNode.next;
curentNode.next.prev = curentNode.prev;
}
}
this.length--;
return curentNode.data;
}
// 从列表中删除一项
DoublyLinkedList.prototype.remove = function (data) {
// 1.根据元素获取 其位置
const postion = this.indexOf(data);
// 2.根绝位置信息,删除节点
return this.removeAt(postion);
}
DoublyLinkedList.prototype.getHead = function () {
return this.head.data;
}
DoublyLinkedList.prototype.getTail = function () {
return this.tail.data;
}
// 如果链表中不包含任何元素,返回true;否则返回false
DoublyLinkedList.prototype.isEmpty = function () {
return this.length === 0;
}
// 返回链表中包含的元素个数
DoublyLinkedList.prototype.size = function () {
return this.length
}
// 由于列表项使用了Node类,就需要重写继承自JavaScript对象默认的toString方法,让其只输出元素的值
DoublyLinkedList.prototype.toString = function () {
return this.backwordString();
}
// 返回向前遍历的节点字符串形式
DoublyLinkedList.prototype.forwardString = function () {
let current = this.tail;
let listString = '';
while (current) {
listString += current.data + ' ';
current = current.prev;
}
return listString;
}
// 返回向后遍历的节点字符串形式
DoublyLinkedList.prototype.backwordString = function () {
let current = this.head;
let listString = '';
while (current) {
listString += current.data + ' ';
current = current.next;
}
return listString;
}
}
```
|
```java
class Solution {
public char nextGreatestLetter(char[] letters, char target) {
for(int i=0; i<letters.length; i++){
if(letters[i]>target){
return letters[i];
}
}
return letters[0];
}
}
```
```java
// my
class Solution {
public char nextGreatestLetter(char[] letters, char target) {
int s =0;
int e = letters.length-1;
char ans = '\0';
while(s<=e){
int mid = s+(e-s)/2;
if(letters[mid]>target){
ans = letters[mid];
e = mid-1;
}else{
s = mid+1;
}
}
return s==letters.length?letters[0]:ans;
}
}
```
```java
class Solution {
public char nextGreatestLetter(char[] letters, char target) {
int start = 0, end = letters.length-1;
while (start <= end) {
int mid = (start+end)/2;
if (letters[mid] <= target) {
start = mid + 1;
} else {
end = mid - 1;
}
}
return start < letters.length ? letters[start] : letters[0];
}
}
```
```java
class Solution {
public char nextGreatestLetter(char[] letters, char target) {
boolean c[]=new boolean[26];
for(char ch:letters)
c[ch-'a']=true;
int index=(target-'a'+1)%26;
while(index!=(target-'a')){
if(c[index]==true)
return (char)(index+'a');
index=(index+1)%26;
}
return ' ';
}
}
```
You are given an array of characters letters that is sorted in non-decreasing order, and a character target. There are at least two different characters in letters.
Return the smallest character in letters that is lexicographically greater than target. If such a character does not exist, return the first character in letters.
Example 1:
Input: letters = ["c","f","j"], target = "a"
Output: "c"
Explanation: The smallest character that is lexicographically greater than 'a' in letters is 'c'.
Example 2:
Input: letters = ["c","f","j"], target = "c"
Output: "f"
Explanation: The smallest character that is lexicographically greater than 'c' in letters is 'f'.
Example 3:
Input: letters = ["x","x","y","y"], target = "z"
Output: "x"
Explanation: There are no characters in letters that is lexicographically greater than 'z' so we return letters[0].
Constraints:
2 <= letters.length <= 104
letters[i] is a lowercase English letter.
letters is sorted in non-decreasing order.
letters contains at least two different characters.
target is a lowercase English letter.
|
package utils
import (
"crypto"
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"crypto/x509/pkix"
"encoding/pem"
"math/big"
"os"
"time"
"github.com/pkg/errors"
)
// GeneratePrivateKey 生成 ECC 私钥
func GeneratePrivateKey() (key *ecdsa.PrivateKey) {
key, _ = ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
return
}
func GenerateRootCA() *x509.Certificate {
var rootCsr = &x509.Certificate{
Version: 3,
SerialNumber: big.NewInt(time.Now().Unix()),
Subject: pkix.Name{
Country: []string{"银河系"},
Province: []string{"地球"},
Locality: []string{"地球"},
Organization: []string{"类型安全"},
OrganizationalUnit: []string{"银河系类型安全公司"},
CommonName: "银河系类型安全公司根证书",
},
NotBefore: time.Now(),
NotAfter: time.Now().AddDate(10, 0, 0),
BasicConstraintsValid: true,
IsCA: true,
MaxPathLen: 1,
MaxPathLenZero: false,
KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign | x509.KeyUsageCRLSign,
}
return rootCsr
}
func LoadOrCreateCA(crt, key string) (*x509.Certificate, crypto.Signer, error) {
if !pathExists(crt) {
// 文件不存在,自动生成一个
rootCA := GenerateRootCA()
priv := GeneratePrivateKey()
cert, err := x509.CreateCertificate(rand.Reader, rootCA, rootCA, priv.Public(), priv)
if err != nil {
return nil, nil, err
}
if err := os.WriteFile(crt, pem.EncodeToMemory(
&pem.Block{Type: "CERTIFICATE", Bytes: cert}), 0644); err != nil {
return nil, nil, err
}
privDER, err := x509.MarshalPKCS8PrivateKey(priv)
if err != nil {
return nil, nil, err
}
if err := os.WriteFile(key, pem.EncodeToMemory(
&pem.Block{Type: "PRIVATE KEY", Bytes: privDER}), 0400); err != nil {
return nil, nil, err
}
}
certPEMBlock, err := os.ReadFile(crt)
if err != nil {
return nil, nil, err
}
keyPEMBlock, err := os.ReadFile(key)
if err != nil {
return nil, nil, err
}
return ParseCertAndPrivateKey(certPEMBlock, keyPEMBlock)
}
func SignCertWithCA(rootCA *x509.Certificate, privateKey crypto.Signer, isClient bool, domains ...string) ([]byte, []byte, error) {
// Certificates last for 2 years and 3 months, which is always less than
// 825 days, the limit that macOS/iOS apply to all certificates,
// including custom roots. See https://support.apple.com/en-us/HT210176.
expiration := time.Now().AddDate(2, 3, 0)
var csr = &x509.Certificate{
Version: 3,
SerialNumber: big.NewInt(time.Now().Unix()),
Subject: pkix.Name{
Country: rootCA.Subject.Country,
Province: rootCA.Subject.Province,
Locality: rootCA.Subject.Locality,
Organization: rootCA.Subject.Organization,
OrganizationalUnit: rootCA.Subject.OrganizationalUnit,
CommonName: rootCA.Subject.CommonName,
},
IPAddresses: nil,
DNSNames: domains,
NotBefore: time.Now(),
NotAfter: expiration,
BasicConstraintsValid: true,
IsCA: false,
KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageKeyEncipherment,
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
}
if isClient {
csr.Subject.CommonName = "银河系类型安全公司客户端证书"
csr.ExtKeyUsage = []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth}
} else {
csr.Subject.CommonName = "银河系类型安全公司服务端证书"
}
der, err := x509.CreateCertificate(rand.Reader, csr, rootCA, privateKey.Public(), privateKey)
if err != nil {
return nil, nil, err
}
cert, err := x509.ParseCertificate(der)
if err != nil {
return nil, nil, err
}
certPEM := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw})
var privPEM []byte
switch cert.PublicKeyAlgorithm {
case x509.RSA:
privDER := x509.MarshalPKCS1PrivateKey(privateKey.(*rsa.PrivateKey))
privPEM = pem.EncodeToMemory(&pem.Block{Type: "RSA PRIVATE KEY", Bytes: privDER})
case x509.ECDSA:
privDER, err := x509.MarshalECPrivateKey(privateKey.(*ecdsa.PrivateKey))
if err != nil {
return nil, nil, err
}
privPEM = pem.EncodeToMemory(&pem.Block{Type: "EC PRIVATE KEY", Bytes: privDER})
default:
return nil, nil, errors.New("failed to sign cert, unsupported algorithm:" + cert.PublicKeyAlgorithm.String())
}
return certPEM, privPEM, nil
}
func pathExists(path string) bool {
_, err := os.Stat(path)
return err == nil
}
func ParseCertAndPrivateKey(certBytes, privateKeyBytes []byte) (*x509.Certificate, crypto.Signer, error) {
certDERBlock, _ := pem.Decode(certBytes)
if certDERBlock == nil || certDERBlock.Type != "CERTIFICATE" {
return nil, nil, errors.New("failed to read the CA certificate: unexpected content")
}
caCert, err := x509.ParseCertificate(certDERBlock.Bytes)
if err != nil {
return nil, nil, err
}
keyDERBlock, _ := pem.Decode(privateKeyBytes)
if keyDERBlock == nil {
return nil, nil, errors.New("failed to read the CA key: unexpected content")
}
var caKey interface{}
switch keyDERBlock.Type {
case "RSA PRIVATE KEY":
caKey, err = x509.ParsePKCS1PrivateKey(keyDERBlock.Bytes)
if err != nil {
return nil, nil, err
}
case "EC PRIVATE KEY":
caKey, err = x509.ParseECPrivateKey(keyDERBlock.Bytes)
if err != nil {
return nil, nil, err
}
case "PRIVATE KEY":
caKey, err = x509.ParsePKCS8PrivateKey(keyDERBlock.Bytes)
if err != nil {
return nil, nil, err
}
default:
return nil, nil, errors.New("failed to read the CA key, unsupported type:" + keyDERBlock.Type)
}
switch caCert.PublicKeyAlgorithm {
case x509.RSA:
return caCert, caKey.(*rsa.PrivateKey), nil
case x509.ECDSA:
return caCert, caKey.(*ecdsa.PrivateKey), nil
default:
return nil, nil, errors.New("failed to read the CA key, unsupported algorithm:" + caCert.PublicKeyAlgorithm.String())
}
}
|
using Backend.Models;
using Microsoft.EntityFrameworkCore;
using BCrypt.Net;
namespace Backend.Data
{
public class CalendarDbContext : DbContext
{
public CalendarDbContext(DbContextOptions<CalendarDbContext> options) : base(options)
{
}
public DbSet<User> Users { get; set; }
public DbSet<Attending> Attendings { get; set; }
public DbSet<Event> Events { get; set; }
public DbSet<EventCategory> EventCategories { get; set; }
public DbSet<EventMedia> EventMedias { get; set; }
public DbSet<EventReview> EventReviews { get; set; }
public DbSet<Venue> Venues { get; set; }
public DbSet<VenueCategory> VenueCategories { get; set; }
public DbSet<VenueMedia> VenueMedias { get; set; }
public DbSet<VenueReview> VenueReviews { get; set; }
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
optionsBuilder.UseNpgsql();
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<User>().HasData(
new User
{
Id = 1,
UserName = "TestUser",
Password = BCrypt.Net.BCrypt.HashPassword("Test123!"),
ContactName = "",
OrganizationName = "",
IsBanned = false,
Email = "User@Test.com",
DOB = DateTime.UtcNow.AddYears(-18),
Role = User.Roles.user
},
new User
{
Id = 2,
UserName = "TestAdmin",
Password = BCrypt.Net.BCrypt.HashPassword("Test123!"),
ContactName = "",
OrganizationName = "",
IsBanned = false,
Email = "Admin@Test.com",
DOB = DateTime.UtcNow.AddYears(-18),
Role = User.Roles.admin
});
modelBuilder.Entity<EventCategory>().HasData(
new EventCategory
{
Id = 1,
CategoryName= "Concert",
Descr = "Live music"
},
new EventCategory
{
Id = 2,
CategoryName = "Play",
Descr = "Theatrical presentation"
},
new EventCategory
{
Id = 3,
CategoryName = "Art Exhibition",
Descr = "Viewing of art pieces"
},
new EventCategory
{
Id = 4,
CategoryName = "Festival",
Descr = "Thematic celebration, often over multiple days"
},
new EventCategory
{
Id = 5,
CategoryName = "Party",
Descr = "General gathering of people"
},
new EventCategory
{
Id = 6,
CategoryName = "Fundraiser",
Descr = "Event aimed at collecting money for a cause"
});
modelBuilder.Entity<VenueCategory>().HasData(
new VenueCategory
{
Id = 1,
CategoryName = "Bar",
Descr = "Drinking establishment"
},
new VenueCategory
{
Id = 2,
CategoryName = "Theatre",
Descr = "Area for dramatic performances"
},
new VenueCategory
{
Id = 3,
CategoryName = "Gallery",
Descr = "Venue for displaying art"
},
new VenueCategory
{
Id = 4,
CategoryName = "Park",
Descr = "Open green space for recreation"
},
new VenueCategory
{
Id = 5,
CategoryName = "Stadium",
Descr = "Large capacity arena for popular events"
},
new VenueCategory
{
Id = 6,
CategoryName = "Restaurant",
Descr = "Location for dining"
},
new VenueCategory
{
Id = 7,
CategoryName = "Hall",
Descr = "Large multi-purpose gathering space"
});
modelBuilder.HasDefaultSchema("public");
base.OnModelCreating(modelBuilder);
}
}
}
|
#pragma once
#include <stddef.h>
typedef struct vector {
void *data; // Pointer to the data
size_t size; // Number of elements in the vector
size_t capacity; // Number of elements the vector can hold
size_t element_size; // Size of each element in bytes
size_t read_index; // index of the element to read (left to right)
} vector;
#define VECTOR_DEFAULT_CAPACITY 42
vector *vector_create_with(size_t capacity, size_t element_size);
vector *vector_create(size_t element_size);
void vector_destroy(vector *v);
void vector_push(vector *v, const void *element);
void vector_pop(vector *v);
void *vector_peek(vector *v);
void *vector_read(vector *v);
void *vector_read_at(vector *v, size_t index);
void *vector_back(vector *v);
void vector_rewind(vector *v);
|
import httpStatus from "http-status";
import catchAsync from "../../utils/catchAsync";
import sendResponse from "../../utils/sendResponse";
import { offeredCourseServices } from "./offeredCourse.service";
const createOfferedCourse = catchAsync(async (req, res) => {
const result = await offeredCourseServices.createOfferedCourseIntoDB(req.body?.body);
sendResponse(res, {
statusCode: httpStatus.OK,
message: "Offered course is created successfully",
data: result
})
})
const getAllOfferedCourses = catchAsync(async (req, res) => {
const result = await offeredCourseServices.getAllOfferedCoursesFromDB(req.query);
sendResponse(res, {
statusCode: httpStatus.OK,
message: "Offered courses are retrieved successfully!",
data: result
})
})
const getSingleOfferedCourse = catchAsync(async (req, res) => {
const { id } = req.params
const result = await offeredCourseServices.getSingleOfferedCoursesFromDB(id);
sendResponse(res, {
statusCode: httpStatus.OK,
message: "Semester registration is retrieved successfully",
data: result
})
})
const updateOfferedCourse = catchAsync(async (req, res) => {
const { id } = req.params
const result = await offeredCourseServices.updateOfferedCoursesIntoDB(id, req.body?.body);
sendResponse(res, {
statusCode: httpStatus.OK,
message: "Offered course is updated successfully",
data: result
})
})
const deleteOfferedCourse = catchAsync(async (req, res) => {
const { id } = req.params
const result = await offeredCourseServices.deleteOfferedCoursesFromDB(id);
sendResponse(res, {
statusCode: httpStatus.OK,
message: "Offered course is deleted successfully",
data: result
})
})
export const OfferedCourseControllers = {
createOfferedCourse, getAllOfferedCourses, getSingleOfferedCourse, updateOfferedCourse, deleteOfferedCourse
}
|
Goldbach's conjecture
Goldbach's conjecture
thumb=Goldbach_partitions_of_the_even_integers_from_4_to_28_300px.png|300px|The even integers from 4 to 28 as sums of two primes: Even integers correspond to horizontal lines. For each odd prime, there are two oblique lines, one red and one blue. The sums of two primes are the intersections of one red and one blue line, marked by a circle. Thus the circles on a given horizontal line give all partitions of the corresponding even integer into the sum of two primes.
Goldbach's conjecture is one of the oldest and best-known unsolved problems in number theory and in all of mathematics. It states:
Every even integer greater than 2 can be expressed as the sum of two primes.1
The conjecture has been shown to hold up through 4 × 1018,2 but remains unproven despite considerable effort.
Goldbach number
(Figure)
The number of ways an even number can be represented as the sum of two primes.3
A Goldbach number is a positive integer that can be expressed as the sum of two odd primes.4 Therefore, another statement of Goldbach's conjecture is that all even integers greater than 4 are Goldbach numbers.
The expression of a given even number as a sum of two primes is called a Goldbach partition of that number. The following are examples of Goldbach partitions for some even numbers:
4 = 2 + 2
6 = 3 + 3
8 = 3 + 5
10 = 3 + 7 = 5 + 5
12 = 7 + 5
...
100 = 3 + 97 = 11 + 89 = 17 + 83 = 29 + 71 = 41 + 59 = 47 + 53
...
The number of ways in which 2n can be written as the sum of two primes (for n starting at 1) is:
0, 1, 1, 1, 2, 1, 2, 2, 2, 2, 3, 3, 3, 2, 3, 2, 4, 4, 2, 3, 4, 3, 4, 5, 4, 3, 5, 3, 4, 6, 3, 5, 6, 2, 5, 6, 5, 5, 7, 4, 5, 8, 5, 4, 9, 4, 5, 7, 3, 6, 8, 5, 6, 8, 6, 7, 10, 6, 6, 12, 4, 5, 10, 3, ... .
Origins
On 7 June 1742, the German mathematician Christian Goldbach wrote a letter to Leonhard Euler (letter XLIII)5 in which he proposed the following conjecture:
Every integer which can be written as the sum of two primes, can also be written as the sum of as many primes as one wishes, until all terms are units.
He then proposed a second conjecture in the margin of his letter:
Every integer greater than 2 can be written as the sum of three primes.
He considered 1 to be a prime number, a convention subsequently abandoned.6 The two conjectures are now known to be equivalent, but this did not seem to be an issue at the time. A modern version of Goldbach's marginal conjecture is:
Every integer greater than 5 can be written as the sum of three primes.
Euler replied in a letter dated 30 June 1742, and reminded Goldbach of an earlier conversation they had (), in which Goldbach remarked his original (and not marginal) conjecture followed from the following statement
Every even integer greater than 2 can be written as the sum of two primes,
which is, thus, also a conjecture of Goldbach. In the letter dated 30 June 1742, Euler stated:
("That … every even integer is a sum of two primes, I regard as a completely certain theorem, although I cannot prove it.")78
Goldbach's third version (equivalent to the two other versions) is the form in which the conjecture is usually expressed today. It is also known as the "strong", "even", or "binary" Goldbach conjecture, to distinguish it from a weaker corollary. The strong Goldbach conjecture implies the conjecture that all odd numbers greater than 7 are the sum of three odd primes, which is known today variously as the "weak" Goldbach conjecture, the "odd" Goldbach conjecture, or the "ternary" Goldbach conjecture. While the weak Goldbach conjecture appears to have been finally proved in 2013,910 the strong conjecture has remained unsolved.
Verified results
For small values of n, the strong Goldbach conjecture (and hence the weak Goldbach conjecture) can be verified directly. For instance, Nils Pipping in 1938 laboriously verified the conjecture up to n ≤ 105.11 With the advent of computers, many more values of n have been checked; T. Oliveira e Silva is running a distributed computer search that has verified the conjecture for n ≤ 4 × 1018 (and double-checked up to 4 × 1017). One record from this search is that 3325581707333960528 is the smallest number that has no Goldbach partition with a prime below 9781.12
Heuristic justification
Statistical considerations that focus on the probabilistic distribution of prime numbers present informal evidence in favour of the conjecture (in both the weak and strong forms) for sufficiently large integers: the greater the integer, the more ways there are available for that number to be represented as the sum of two or three other numbers, and the more "likely" it becomes that at least one of these representations consists entirely of primes.
A very crude version of the heuristic probabilistic argument (for the strong form of the Goldbach conjecture) is as follows. The prime number theorem asserts that an integer m selected at random has roughly a
chance of being prime. Thus if n is a large even integer and m is a number between 3 and n/2, then one might expect the probability of m and n − m simultaneously being prime to be
. If one pursues this heuristic, one might expect the total number of ways to write a large even integer n as the sum of two odd primes to be roughly
Since this quantity goes to infinity as n increases, we expect that every large even integer has not just one representation as the sum of two primes, but in fact has very many such representations.
This heuristic argument is actually somewhat inaccurate, because it assumes that the events of m and n − m being prime are statistically independent of each other. For instance, if m is odd then n − m is also odd, and if m is even, then n − m is even, a non-trivial relation because, besides the number 2, only odd numbers can be prime. Similarly, if n is divisible by 3, and m was already a prime distinct from 3, then n − m would also be coprime to 3 and thus be slightly more likely to be prime than a general number. Pursuing this type of analysis more carefully, Hardy and Littlewood in 1923 conjectured (as part of their famous Hardy–Littlewood prime tuple conjecture) that for any fixed c ≥ 2, the number of representations of a large integer n as the sum of c primes
with
should be asymptotically equal to
where the product is over all primes p, and
is the number of solutions to the equation
in modular arithmetic, subject to the constraints
. This formula has been rigorously proven to be asymptotically valid for c ≥ 3 from the work of Vinogradov, but is still only a conjecture when
. In the latter case, the above formula simplifies to 0 when n is odd, and to
when n is even, where
is the twin prime constant
This is sometimes known as the extended Goldbach conjecture. The strong Goldbach conjecture is in fact very similar to the twin prime conjecture, and the two conjectures are believed to be of roughly comparable difficulty.
The Goldbach partition functions shown here can be displayed as histograms which informatively illustrate the above equations. See Goldbach's comet.13
Rigorous results
The strong Goldbach conjecture is much more difficult than the weak Goldbach conjecture. Using Vinogradov's method, Chudakov,14 Van der Corput,15 and Estermann16 showed that almost all even numbers can be written as the sum of two primes (in the sense that the fraction of even numbers which can be so written tends towards 1). In 1930, Lev Schnirelmann proved1718 that any natural number greater than 1 can be written as the sum of not more than C prime numbers, where C is an effectively computable constant, see Schnirelmann density. Schnirelmann's constant is the lowest number C with this property. Schnirelmann himself obtained C
Chen Jingrun showed in 1973 using the methods of sieve theory that every sufficiently large even number can be written as the sum of either two primes, or a prime and a semiprime (the product of two primes).19 See Chen's theorem for more.
In 1975, Hugh Montgomery and Robert Charles Vaughan showed that "most" even numbers were expressible as the sum of two primes. More precisely, they showed that there existed positive constants c and C such that for all sufficiently large numbers N, every even number less than N is the sum of two primes, with at most
exceptions. In particular, the set of even integers which are not the sum of two primes has density zero.
Linnik proved in 1951 the existence of a constant K such that every sufficiently large even number is the sum of two primes and at most K powers of 2. Roger Heath-Brown and Jan-Christoph Schlage-Puchta in 2002 found that K = 13 works.20 This was improved to K=8 by Pintz and Ruzsa in 2003.21
As with many famous conjectures in mathematics, there are a number of purported proofs of the Goldbach conjecture, none accepted by the mathematical community.
Considerable work has been done on Goldbach's weak conjecture, culminating in a 2013 claim by Harald Helfgott22232425 to fully prove the conjecture for all odd integers greater than 7 (rather than the much larger
that was implied by previous results).
Similar questions
One can pose similar questions when primes are replaced by other special sets of numbers, such as the squares.
It was proven by Lagrange that every positive integer is the sum of four squares. See Waring's problem and the related Waring–Goldbach problem on sums of powers of primes.
Hardy and Littlewood listed as their Conjecture I: "Every large odd number (n > 5) is the sum of a prime and the double of a prime." (Mathematics Magazine, 66.1 (1993): 45-47.) This conjecture is known as Lemoine's conjecture (also called Levy's conjecture).
The Goldbach conjecture for practical numbers, a prime-like sequence of integers, was stated by Margenstern in 1984,26 and proved by Melfi in 1996: every even number is a sum of two practical numbers.
References
Further reading
Terence Tao proved that all odd numbers are at most the sum of five primes
State of the art
External links
Goldbach's original letter to Euler — PDF format (in German and Latin)
Goldbach's conjecture, part of Chris Caldwell's Prime Pages.
Goldbach conjecture verification, Tomás Oliveira e Silva's distributed computer search.
"
Category:Additive number theory Category:Analytic number theory Category:Conjectures about prime numbers Category:Hilbert's problems
"Goldbach conjecture verification"
"Goldbach's Conjecture" by Hector Zenil, Wolfram Demonstrations Project, 2007.
http://www.math.dartmouth.edu/~euler/correspondence/letters/OO0765.pdf
Pipping, Nils (1890-1982), "Die Goldbachsche Vermutung und der Goldbach-Vinogradovsche Satz." Acta. Acad. Aboensis, Math. Phys. 11, 4–25, 1938.
Tomás Oliveira e Silva, Goldbach conjecture verification. Retrieved 20 July 2013
Schnirelmann, L.G. (1930). "On the additive properties of numbers", first published in "Proceedings of the Don Polytechnic Institute in Novocherkassk" (in Russian), vol XIV (1930), pp. 3-27, and reprinted in "Uspekhi Matematicheskikh Nauk" (in Russian), 1939, no. 6, 9–25.
Schnirelmann, L.G. (1933). First published as "Über additive Eigenschaften von Zahlen" in "Mathematische Annalen" (in German), vol 107 (1933), 649-690, and reprinted as "On the additive properties of numbers" in "Uspekhi Matematicheskikh Nauk" (in Russian), 1940, no. 7, 7–46.
http://www.truthiscool.com/prime-numbers-the-271-year-old-puzzle-resolved
Proof that an infinite number of primes are paired - physics-math - 14 May 2013. New Scientist. Retrieved on 2014-05-11.
|
/* eslint-disable react-hooks/rules-of-hooks */
import { GetServerSideProps } from 'next'
import Head from 'next/head'
import { useRouter } from 'next/router'
import { useEffect, useState } from 'react'
import Button from '../../components/Button'
import Header from '../../components/Header'
import { Icon } from '../../components/Icon'
import { useAppContext } from '../../contexts/app'
import { useApi } from '../../libs/useApi'
import styles from '../../styles/ForgetSucess.module.css'
import { Tenant } from '../../types/Tenant'
const ForgetSucess = (data: Props) => {
const { tenant, setTenant } = useAppContext()
const router = useRouter()
useEffect(() => {
setTenant(data.tenant)
}, [])
const handleSubmit = () => {
router.push(`/${tenant?.slug}/login`)
}
const title = `Esqueci a Senha | ${tenant?.name}`
return (
<div className={styles.container}>
<Head>
<title>{title}</title>
</Head>
<Header
color={tenant?.primaryColor as string}
backHref={`${tenant?.slug}/forget`}
/>
<div className={styles.iconArea}>
<Icon icon='mainSent' color={tenant?.primaryColor as string} width={99} height={81}/>
</div>
<div className={styles.title}>
Verifique seu e-mail
</div>
<div
className={styles.subtitle}
>
Enviamos as instruções para recuperação de senha para o seu e-mail
</div>
<div className={styles.inputArea}>
<Button
color={tenant?.primaryColor as string}
label="Fazer Login"
onClick={handleSubmit}
fill
/>
</div>
</div>
)
}
export default ForgetSucess
type Props = {
tenant: Tenant
}
export const getServerSideProps: GetServerSideProps = async (context) => {
const { tenant: tenantSlug } = context.query
const api = useApi(tenantSlug as string)
//Get Tenant
const tenant = await api.getTenant()
if (!tenant) {
return { redirect: { destination: '/', permanent: false } }
}
return {
props: {
tenant
}
}
}
|
import React from 'react'
import InputField from '../../components/form/InputField'
import MultiSelectField from '../../components/form/MultiSelectField'
import SelectField from '../../components/form/SelectField'
export default function GettingStartedStore() {
const category_list = [
{
id: 1,
name: 'Hospital'
},
{
id: 2,
name: 'Gym'
},
{
id: 3,
name: 'Consultancy'
},
{
id: 4,
name: 'Garage'
},
{
id: 5,
name: 'Store'
},
{
id: 6,
name: 'Other'
}
];
const type_list = [
{
id: 1,
name: 'Appointments'
},
{
id: 2,
name: 'Ecommerce'
},
{
id: 3,
name: 'Bookings'
},
]
return (
<>
<form className="sm:mx-auto sm:w-full">
<div className="space-y-8">
<InputField
id='business_name'
type='text'
label='Business Name'
placeholder='Ex: Business Inc'
required={true}
value=''
/>
<SelectField
id='business_category'
label='Business Category'
items_array={category_list}
required={true}
placeholder='Select Category'
/>
<MultiSelectField
id='business_type'
label='Business Type'
required={true} value={[]}
items_array={type_list}
placeholder='Ex: Selling, Online/Offline Services'
/>
</div>
</form>
</>
)
}
|
import { ReactNode } from 'react';
import { ButtonDropdownProps } from '../ButtonDropdown';
/**
* MAIN
*/
export type SortingOptionType = 'DEFAULT' | 'ASC' | 'DESC';
export type TableSettingsSortingOptionsProps = Omit<
ButtonDropdownProps,
'children' | 'onItemClick' | 'items'
> & {
/**
* size of button
* @default medium
*/
size?: ButtonDropdownProps['size'];
/**
* the total number of selected item
*/
total?: number;
/**
* custom text to be displayed
* @default { default: 'Default', asc: 'Ascending', desc: 'Descending' }
*/
text?: { default: string; asc: string; desc: string };
/**
* icon
*/
icon?: ReactNode;
/**
* selected option
*/
selected: SortingOptionType;
/**
* callback function for selecting density
*/
onChange?: (item: any) => void;
};
/**
* EXPORTS
*/
export default TableSettingsSortingOptionsProps;
|
const {onRequest} = require("firebase-functions/v2/https");
const {getFirestore} = require('firebase-admin/firestore');
/**
* Register a new user
* @param {object} req - The request object
* @param {object} res - The response object
*/
const addUser = onRequest(async (req, res) => {
// Check if method is POST
if (req.method !== 'POST') {
return res.status(400).send('Please send a POST request with user data');
}
const { name, numberplate, phone} = req.body;
// Basic validation
if (!phone || !name || !numberplate) {
console.log('Phone number, name, and number plate are required');
return res.status(400).send('Phone number, name, and number plate are required');
}
const db = getFirestore();
const usersRef = db.collection('users');
// Check if a user with the same phone number already exists
const querySnapshot = await usersRef.where('phone', '==', phone).get();
if (!querySnapshot.empty) {
console.log('A user with this phone number already exists');
return res.status(400).send('A user with this phone number already exists');
}
// Add new user if phone number doesn't exist
const newUser = {
name,
numberplate,
phone
};
const newUserRef = await usersRef.add(newUser);
return res.status(200).send(`New user added with ID: ${newUserRef.id}`);
});
module.exports = addUser;
|
############################################################################################
去噪扩散隐式模型(Denoising Diffusion Implicit Models,DDIM)
############################################################################################
在 `DDPM` 中,生成过程被定义为马尔可夫扩散过程的反向过程,在逆向采样过程的每一步,模型预测噪声
`DDIM` 的作者发现,扩散过程并不是必须遵循马尔科夫链,
在之后的基于分数的扩散模型以及基于随机微分等式的理论都有相同的结论。
基于此,`DDIM` 的作者重新定义了扩散过程和逆过程,并提出了一种新的采样技巧,
可以大幅减少采样的步骤,极大的提高了图像生成的效率,代价是牺牲了一定的多样性,
图像质量略微下降,但在可接受的范围内。
扩散模型的回顾
############################################################################################
首先回顾一下 DDPM 模型的机制,在 DDPM 中,真实世界的图像数据用随机变量 :math:`x_0`
表示,它的概率密度记作 :math:`q(x_0)`,
然而它的真实分布未知的,即 :math:`q(x_0)` 具体形式是未知的,
所以我们没办法直接从 :math:`q(x_0)` 采样生成新的图片。
幸运的是,我们有很多 :math:`x_0` 的观测样本,
因此我们可以想办法利用这些观测样本估计出 :math:`q(x_0)` 的一个近似表示,
然后从这个近似表示中抽样生成样本。
核心的思想是,构建一个马尔科夫链式结构,逐步的向 :math:`x_0`
上添加高斯随机噪声,并最终令其演变成一个纯高斯数据(标准正态分布),
把这个过程称为加噪过程,或者前向过程。
它的逆向过程就是逐步降噪的过程,
主要估计出逆向过程中每一步的降噪转换核 :math:`p(x_{t-1}|p_{t})`,
就可以从一个标准正态分布的高斯噪声数据 :math:`x_T`,
逐步的降噪生成一张图片数据。
整个网络的联合概率分布可以表示为 :math:`p(x_{0:T})`,
根据联合概率的链式法则,前向扩散过程的链式分解
.. math::
:label: eq_ddim_211
p(x_{0:T}) = q(x_0) \prod_{t=1}^T q(x_t|x_{t-1})
然而 :math:`q(x_0)` 是具体形式是未知的,但是我们有它的观测样本,
有了 :math:`x_0` 的观测样本之后,相当于 :math:`x_0` 的值已知,
此时可以写成已知 :math:`x_0` 的条件概率分布,
.. math::
:label: eq_ddim_213
q(x_{1:T}|x_0) = q(x_1|x_0) \prod_{t=2}^T q(x_t|x_{t-1}) = \prod_{t=1}^T q(x_t|x_{t-1})
前向扩散过程的转换核 :math:`q(x_t|x_{t-1})` 是一个条件高斯分布,
它的概率密度为
.. math::
:label: eq_ddim_214
q(x_t|x_{t-1}) = \mathcal{N} (\sqrt{\alpha_t} \ x_{t-1}, (1- \alpha_t ) \textit{I} )
:math:`q(x_t|x_{t-1})` 代表着在 :math:`x_{t-1}` 的基础上添加一个高斯噪声得到 :math:`x_{t}`,
有了这个转换(加噪)核,就可以逐步的把一张有意义的图像数据 :math:`x_0`
转换为一个纯高斯噪声数据 :math:`x_T`。
然而这个加噪计算过程需要一步一步计算,计算效率比较差,
这时可以利用条件高斯分布的一个计算技巧,直接从 :math:`x_{0}` 一步计算得到任意时刻的 :math:`x_t`,
这个过程可以表示为条件概率分布 :math:`q(x_t|x_{0})` :
.. math::
:label: eq_ddim_215
q(x_t|x_{0}) = \int q(x_{1:t}|x_0) d x_{1:t-1}
\sim \mathcal{N}(\sqrt{\bar{ \alpha}_t } \ x_0, (1- \bar{ \alpha}_t) \textit{I})
:math:`q(x_t|x_{0})` 的直接算数计算等式为:
.. math::
:label: eq_ddim_216
x_t &= \sqrt{\bar{\alpha}_t } \ x_0 + \sqrt{1- \bar{ \alpha}_t } \ \epsilon_t \ \ \ ,
\bar{\alpha}_t = \prod_{i=1}^t \alpha_i ,\ \ \epsilon_t \sim \mathcal{N}(0,\textit{I})
&\sim \mathcal{N}(\sqrt{\bar{ \alpha}_t } \ x_0, (1- \bar{ \alpha}_t) \textit{I})
加噪过程的逆过程称为降噪过程,降噪过程是对联合概率 :math:`p(x_{0:T})`
按照反向过程进行链式分解:
.. math::
:label: eq_ddim_217
p(x_{0:T}) = p(x_T) \prod_{t=T}^1 p(x_{t-1}|x_{t})
其中 :math:`p(x_T) \sim \mathcal{N}(0,\textit{I})` 是一个标准正态分布,
:math:`p(x_{t-1}|x_{t})` 是逆向过程每一步的转换核(条件分布),
他表示在 :math:`x_{t}` 的基础上去掉一部分高斯噪声得到 :math:`x_{t-1}`
,所以称为降噪过程。
我们的关键就是估计出 :math:`p(x_{t-1}|x_{t})` 的一个近似表示。
根据最大似然估计理论,我们需要极大化观测数据的对数似然 :math:`\ln p(x_0)`
,然而整个网络中随机变量有个 :math:`T` 个,可是只有 :math:`x_0`
有观测样本,这是就需要 :math:`T` 变量的联合概率 :math:`p(x_{0:T})`
进行边际化进而得到 :math:`x_0` 的边缘概率 :math:`p(x_0)`,
如下式所示,
.. math::
:label: eq_ddim_218
\ln p(x_0) = \ln \int p(x_{0:T} ) d x_{1:T}
没有观测样本的变量 :math:`x_{1:T}` 称为隐变量,
这是一个典型的含有隐变量模型的估计问题。
如 :eq:`eq_ddim_218` 所示,隐变量的存在(需要边际化消除)导致
对数似然函数存在了积分操作,这导致对数函数无法分解成简单的形式,
进而难以求解其梯度,无法直接极大化。
这时可以利用 `Jensen` 不等式,得到对数似然函数的一个下界函数(ELBO),
当满足一定条件时,极大化这个下界函数和极大化对数似然是等价的。
.. math::
:label: eq_ddim_219
\EE[q_{x_0}]{\ln p(x_0)}
& \geq {\mathbb{E}_{q(x_{1:T}|x_0)}\left[\ln \frac{p(x_{0:T})}{q(x_{1:T}|x_0)}\right]}
& \Rightarrow \mathbb{E}_{q(x_{1}|x_0)}\left[\ln p_{{\theta}}(x_0|x_1)\right]
- \sum_{t=2}^{T} \mathbb{E}_{q(x_{t}|x_0)}\left[\KL{q(x_{t-1}|x_t, x_0)}{p_{{\theta}}(x_{t-1}|x_t)}\right]
代入各项之后,最后的目标函数是一个简单的均方误差,这里记作 :math:`L_{\gamma}`,
其中 :math:`\gamma_t` 表示一些常数项,它不影响极大化的结果。
.. math::
:label: eq_ddim_220
L_{\gamma} := \sum_{t=1}^T \gamma_t \EE[q(x_t|x_0)]{ \left\lVert\epsilon_t - {\hat\epsilon}_{{\theta}}(x_t, t)\right\rVert_2^2 }
\ \ , \epsilon_t \sim \mathcal{N}(0,\textit{I})
非马尔科夫前向过程
############################################################################################
重点观察下 DDPM 最终的目标函数 :eq:`eq_ddim_219` 和 :eq:`eq_ddim_220`,
目标函数中最关键的是 KL 散度的项, 这一项是 :math:`q(x_{t-1}|x_t, x_0)` 和 :math:`p_{{\theta}}(x_{t-1}|x_t)`
的 KL 散度。其中 :math:`q(x_{t-1}|x_t, x_0)` 是逆过程的转换核,它是逆过程(图像生成过程,图像采样过程)的核心,
只要有了它就能完成生成过程。:math:`p_{{\theta}}(x_{t-1}|x_t)` 代表模型,我们的目标就是训练 :math:`p_{{\theta}}(x_{t-1}|x_t)`
令其尽量与 :math:`q(x_{t-1}|x_t, x_0)` 相似,它作为 :math:`q(x_{t-1}|x_t, x_0)` 的一个近似解。
单纯的看 :math:`q(x_{t-1}|x_t, x_0)`,它是逆向过程的转换核,貌似和前向过程无关,但这是个条件概率分布,它的条件变量是 :math:`x_t`
和 :math:`x_0`,也就说它依赖 :math:`x_t` 和 :math:`x_0`,那这两项怎么来的呢?注意这里关注的是训练阶段怎么来,
再预测阶段(图像生成阶段) :math:`x_0` 是模型预测得到的,:math:`x_t` 本身就是利用 :math:`q(x_{t-1}|x_t, x_0)` 一步步迭代得到的。
- 先看 :math:`x_0`,在训练阶段 :math:`x_0` 是观测样本,本身就是已知的。
- 再看 :math:`x_t`,最直接的它通过 :math:`q(x_t|x_{t-1})` 计算得到,然而我们利用线性高斯的特性,可以直接通过 :math:`q(x_t|x_0)` 计算,
绕过了 :math:`q(x_t|x_{t-1})` ,这意味这 :math:`x_t` 可以不依赖 :math:`x_{t-1}`。
看下 :math:`q(x_t|x_0)` 是怎么来的,
它是在联合概率的 :math:`q(x_{1:t}|x_0)` 的基础上边际化(消除变量 :math:`x_{1:t-1}` )得到的,
如下式所示,
.. math::
:label: eq_ddim_221
q(x_t|x_0) = \int q(x_{1:t}|x_0) d x_{1:t-1}
在原始的 DDPM 模型中,对联合概率 :math:`q(x_{1:t})` 的进一步分解是按照马尔科夫链的形式分解的,
即
.. math::
:label: eq_ddim_222
q(x_t|x_0) &= \int q(x_{1:t}|x_0) d x_{1:t-1}
&= \int \prod_{i=1}^t q(x_{i}|x_{i-1}) d x_{1:t-1}
然而,从概率计算规则上讲,不管 :math:`q(x_{1:t})` 怎么分解,最终都是要把它通过积分消除掉,它可以有很多种不同的分解方式的,
如何分解它并不影响积分后的结果,即不论 :math:`q(x_{1:t})` 的分解方式是什么,最后 :math:`q(x_t|x_0)` 的结果都是一样的。
也就说,这种马尔科夫链式分解的方式不是必须得。
**如果放弃了马尔科夫链式结构的假设,那就得到了一个非马尔科夫的前向模型**。
从以上的分析结果看,**如果想放弃非马尔科夫的假设,但又想得到和 DDPM 等价的模型(即不改变DDPM的目标函数)**,
**只需要确保** :math:`q(x_t|x_0)` **和** :math:`q(x_{t-1}|x_t, x_0)` **的表达式与DDPM一样就可以了**。
接下来就尝试构建一个非马尔科夫建设的模型。
现在,我们重新定义 :math:`q(x_{1:T}|x_0)` 的分解方式,
在这过程中引入一个人工定义的自由参数 :math:`\sigma^2`,它代表 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的方差,
它的具体值稍后会讨论。
定义 :math:`q(x_{1:T}|x_0)` 的分解方式为
.. math::
:label: eq_ddim_225
q_\sigma (x_{1:T}|x_0) := q_{\sigma}(x_T|x_0) \prod_{t=2}^T q_{\sigma}(x_{t-1}|x_t,x_0)
其中 :math:`q_\sigma(x_T|x_{0})` 维持与DDPM一样,
.. math::
:label: eq_ddim_226
q_\sigma(x_T|x_{0})
\sim \mathcal{N}(\sqrt{\bar{ \alpha}_T } \ x_0, (1- \bar{ \alpha}_T) \textit{I})
对任意 :math:`t>1`,定义 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的分布为
.. math::
:label: eq_ddim_227
q_{\sigma}(x_{t-1}|x_t,x_0) \sim \mathcal{N} \left(
\underbrace{
\sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ x_0 }{\sqrt{1-\bar{\alpha}_t}}
}_{\text{期望}}
, \underbrace{ \sigma_t^2 \textit{I} }_{\text{方差}}
\right )
定义完成后,还有一个问题,就是 :eq:`eq_ddim_226` 是否对任意的 :math:`1\le t \le T` 成立,
在 DDIM 的原论文 :footcite:`song2022denoising` 中给出了证明,这里不再赘述证明过程,有兴趣的可以查看原论文的附录B。
结论是:如下分布对任意的 :math:`1\le t \le T` 都是成立的。
.. math::
:label: eq_ddim_228
q_\sigma(x_t|x_{0})
\sim \mathcal{N}(
\sqrt{\bar{ \alpha}_t } \ x_0 ,
(1- \bar{ \alpha}_t)\textit{I})
新的分解方式( :eq:`eq_ddim_225`)下, 没有了马尔科夫链式结构的假设。
其中 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 仍然是逆过程的转换核,在逆过程中
,:math:`x_{t-1}` 同时依赖 :math:`x_t` 和 :math:`x_0`。
在原始的DPM模型中,训练的模型就是直接预测 :math:`x_0` 进而得到 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 近似表示。
在后来的 DDPM 改进中,利用了 :math:`x_0,x_t,\epsilon_t` 三者的关系式( :eq:`eq_ddim_216`),
用 :math:`x_t,\epsilon_t` 替换了 :math:`x_0`,进而令模型去预测 :math:`\epsilon_t`。
现在 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的( :eq:`eq_ddim_227`)期望又直接依赖 :math:`x_0` 了,
**兜兜转转居然又回到了最初**。
**在这里为了能利用上已经训练好的 DDPM 模型(预测噪声的模型)**,
再一次利用关系式 :eq:`eq_ddim_216` 得到预测的 :math:`\hat{x}_0`
,如下式所示
.. math::
:label: eq_ddim_229
\hat{x}_0 =f_{\theta}^{(t)}(x_t)= \frac{x_t -\sqrt{1- \bar{ \alpha}_t } \ \hat{\epsilon}_t (x_t,t)}{ \sqrt{\bar{\alpha}_t } }
这样我们利用上已经训练好的 DDPM模型 :math:`\hat{\epsilon}_t (x_t,t)` ,
不需要再重新训练一个模型。利用 :eq:`eq_ddim_229` 得到 :math:`\hat{x}_0`
,代入到 :eq:`eq_ddim_227` 进而就得到了逆向转换核 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的(近似)分布
.. math::
:label: eq_ddim_230
p_{{\theta},\sigma}(x_{t-1}|x_t) &\sim \mathcal{N} \left(
\sqrt{\bar{\alpha}_{t-1}} \ \hat{x}_0
+ \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ \hat{x}_0 }{\sqrt{1-\bar{\alpha}_t}}
,\sigma_t^2 \textit{I}
\right )
&\approx q_{\sigma}(x_{t-1}|x_t,x_0)
我们整理下整个逆向生成过程,
对于 :math:`x_T`,
.. math::
:label: eq_ddim_231
p(x_T) = \mathcal{N}(0, \textit{I})
对于 :math:`p(x_{t-1}|x_t)`,
.. math::
:label: eq_ddim_232
p(x_{t-1}|x_t) = \left \{ \begin{array}{rcl}
&\mathcal{N}(\hat{x}_0(x_1,t=1), \sigma^2_1 \textit{I} \ ) &\mbox{if}\quad t =1\\
&q_{\sigma}(x_{t-1}|x_t,\hat{x}_0(x_t,t)) &\mbox{if}\quad 1 \lt t \le T
\end{array} \right .
根据 :eq:`eq_ddim_230`,:math:`x_{t-1}` 具体地计算(采样)公式为:
.. math::
:label: eq_ddim_233
x_{t-1} &= \sqrt{\bar{\alpha}_{t-1}} \ \hat{x}_0
+ \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ \hat{x}_0 }{\sqrt{1-\bar{\alpha}_t}}
+ \sigma_t \epsilon_t^*
&=\sqrt{\bar{\alpha}_{t-1}} \underbrace{ \left (
\frac{x_t -\sqrt{1- \bar{ \alpha}_t } \ \hat{\epsilon}_t (x_t,t)}{ \sqrt{\bar{\alpha}_t } } \right )
}_{\text{predict } x_0}
+ \underbrace{\sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \quad \hat{\epsilon}_t (x_t,t)}_{\text{direction pointing to }x_t}
+ \underbrace{\sigma_t \epsilon_t^{*}}_{\text{random noise}}
& \text{where}\quad \epsilon_t^{*} \sim \mathcal{N}(0,\textit{I})
在这个新的定义中,前向过程没有了马尔科夫的假设,
并且逆向转换核 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 也不再满足马尔科夫链的定义(仅依赖上一个状态),
因此本论文作者称之为非马尔科夫扩散过程。
同时可以直接利用之前已经训练好的 DDPM 预测噪声的模型,不用再重新训练一个新模型。
加速采样
########################################################
回顾下 :eq:`eq_ddim_227`、 :eq:`eq_ddim_232`、 :eq:`eq_ddim_233`,多了一个方差参数 :math:`\sigma^2`
,它代表的是 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的方差。
实际上,它并不是在 DDIM 中额外新增的,现在回顾一下 DDPM 的内容,在 DDPM 中这个方差也是存在的( :eq:`eq_ddpm_036`)
,只是在 DDPM 中 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 是直接通过贝叶斯定理推导出来的( :eq:`eq_ddpm_036`),
推导的结果中这个方差有一个固定的表达式,即
.. math::
:label: eq_ddim_234
\sigma^2 = \frac{(1 - \alpha_t)(1 - \bar\alpha_{t-1})}{ 1 -\bar\alpha_{t}}
在 DDIM 中,把 :math:`\sigma^2` 当做一个可以人工调整的超参数,
这样就可以通过调整方差 :math:`\sigma^2` 而得到不一样的效果。
而在 DDIM 中,如果令 :eq:`eq_ddim_234` 成立,那么 DDIM 就退化成了 DDPM。
这里给出推导的过程。
令 :eq:`eq_ddim_234` 成立,并将其代入到 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的期望中,有
.. math::
:label: eq_ddim_235
\EE{q_{\sigma}(x_{t-1}|x_t,x_0)} &=\sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ x_0 }{\sqrt{1-\bar{\alpha}_t}}
&= \sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{1-\bar{\alpha}_{t-1}- \frac{(1 - \alpha_t)(1 - \bar\alpha_{t-1})}{ 1 -\bar\alpha_{t}} } \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ x_0 }{\sqrt{1-\bar{\alpha}_t}}
&= \sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{ \frac{(1-\bar{\alpha}_{t-1})(1 -\bar\alpha_{t})- (1 - \alpha_t)(1 - \bar\alpha_{t-1})}{ 1 -\bar\alpha_{t} } } \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ x_0 }{\sqrt{1-\bar{\alpha}_t}}
&= \sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{ \frac{(1-\bar{\alpha}_{t-1})(\alpha_{t} -\bar\alpha_{t}) }{ 1 -\bar\alpha_{t} } } \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ x_0 }{\sqrt{1-\bar{\alpha}_t}}
&= \sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{ \frac{\alpha_{t}(1-\bar{\alpha}_{t-1})^2 }{ 1 -\bar\alpha_{t} } } \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ x_0 }{\sqrt{1-\bar{\alpha}_t}}
&= \sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{ \frac{\alpha_{t}(1-\bar{\alpha}_{t-1})^2 }{ (1 -\bar\alpha_{t})^2 } } \cdot (x_t - \sqrt{ \bar{\alpha}_t} \ x_0 )
&= \sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \frac{ \sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) (x_t - \sqrt{ \bar{\alpha}_t} \ x_0 ) }{ (1 -\bar\alpha_{t}) }
&= \frac{ (1 -\bar\alpha_{t}) \sqrt{\bar{\alpha}_{t-1}} \ x_0 }{(1 -\bar\alpha_{t})}
+ \frac{ \sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) x_t - \sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) \sqrt{ \bar{\alpha}_t} \ x_0 }{ (1 -\bar\alpha_{t}) }
&=\frac{ (1 -\bar\alpha_{t}) \sqrt{\bar{\alpha}_{t-1}} \ x_0
+ \sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) x_t
- (1-\bar{\alpha}_{t-1}) \sqrt{ \alpha_{t} \bar{\alpha}_t } \ x_0 }
{ (1 -\bar\alpha_{t}) }
&=\frac{
\sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) x_t
+(1 -\bar\alpha_{t}) \sqrt{\bar{\alpha}_{t-1}} \ x_0
- (1-\bar{\alpha}_{t-1}) \sqrt{ \alpha_{t}^2 \bar{\alpha}_{t-1} } \ x_0 }
{ (1 -\bar\alpha_{t}) }
&=\frac{
\sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) x_t
+(1 -\bar\alpha_{t}) \sqrt{\bar{\alpha}_{t-1}} \ x_0
- (1-\bar{\alpha}_{t-1}) \alpha_{t}\sqrt{ \bar{\alpha}_{t-1} } \ x_0 }
{ (1 -\bar\alpha_{t}) }
&=\frac{
\sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) x_t
+[(1 -\bar\alpha_{t})
- (1-\bar{\alpha}_{t-1}) \alpha_{t}] \sqrt{ \bar{\alpha}_{t-1} } \ x_0 }
{ (1 -\bar\alpha_{t}) }
&=\frac{
\sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) x_t
+(1 -\bar\alpha_{t}
- \alpha_{t} +\bar{\alpha}_{t}) \sqrt{ \bar{\alpha}_{t-1} } \ x_0 }
{ (1 -\bar\alpha_{t}) }
&=\frac{
\sqrt{\alpha_{t} } (1-\bar{\alpha}_{t-1}) x_t
+ \sqrt{ \bar{\alpha}_{t-1} }(1 - \alpha_{t} ) \ x_0 }
{ (1 -\bar\alpha_{t}) }
可以看到,这和上一章 DDPM 中 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的期望( :eq:`eq_ddpm_036`)是完全一样的,
可以得出:**DDIM 可以看做是 DDPM 的扩展, DDPM 是 DDIM 的一个特例**。
可以想到,:math:`\sigma^2` 另一个特殊的选择是令 :math:`\sigma^2=0`
,这意味 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` **的方差为** :math:`0`
。最直接的 :eq:`eq_ddim_233` 中的随机噪声项 :math:`\sigma_t \epsilon_t^{*}` 没了,
相当于 :math:`x_{t-1}` 直接等于了 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的期望。
想一想这意味着什么?
1. 从随机采样的角度看, :math:`x_{t-1}` 不再是从 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 进行随机采样,
而是直接选择 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)` 的期望,而又由于 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)`
是高斯分布,它的期望就是它概率密度最大的点,这又相当于 :math:`x_{t-1}` 每次都是选取的 :math:`q_{\sigma}(x_{t-1}|x_t,x_0)`
概率密度最大的点,相当于最大概率采样。
2. 从数值计算角度看,没有了随机项 :math:`\sigma_t \epsilon_t^{*}`,成了确定性等式计算,不再具有随机性。
DDIM 原论文中阐述方差为 :math:`0` 时可以加速采样(图像生成)过程,
它从子序列的角度解释,不是很容易理解。这里我们就从随机性的直观角度理解。
- 方差不为 :math:`0` 时, :math:`x_t` 到 :math:`x_{t-1}` 的每一步是随机性采样,随机性导致不可控,会走很多弯路。就好比你从起点 :math:`x_T`
走到终点 :math:`x_0`,每一步你不好好走,虽然指向目的地的方向(期望)确定了,但是总是在这个方向上加上一个随机量(方差)作为你的步进,
自然你会走很多弯路绕路,走的不是直线。这样你到达目的地 :math:`x_0` 的步数(时间)就变长了。
- 方差为 :math:`0` 时, :math:`x_t` 到 :math:`x_{t-1}` 的每一步,你就专注的向着目的地笔直前进(沿着期望的方向),不搞幺蛾子,自然到达目的地就快了很多。
回顾一下,我们讲的扩散模型的三个等价表示( :numref:`ch_ddpm_score_based`),
其中一个就是基于分数的等价模型,所谓分数(score),不就是 :math:`p(x_t)` 的梯度么,
想想梯度下降法,梯度是什么?就是指向 :math:`p(x_0)` 的方向,
:math:`x_t` 到 :math:`x_{t-1}` 的每一步就是在沿着 :math:`p(x_t)` 的梯度向着 :math:`p(x_0)` 前进。
根据 :numref:`ch_ddpm_score_based` 的内容,梯度 :math:`\nabla\log p(x_t)`
和预测噪声 :math:`\hat{\epsilon}_t(x_t,t)` 的关系为
.. math::
:label: eq_ddim_236
\nabla\log p(x_t) = -\frac{1}{\sqrt{1 - \bar\alpha_t}} \hat{\epsilon}_t(x_t,t)
:math:`\hat{x}_0` 可以用梯度替换
.. math::
:label: eq_ddim_237
\hat{x}_0 = \frac{x_t + (1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t}}
把 :eq:`eq_ddim_237` 代入到 :math:`x_{t-1}` 的迭代公式 :eq:`eq_ddim_233` 中
.. math::
:label: eq_ddim_238
x_{t-1} &= \sqrt{\bar{\alpha}_{t-1}} \ \hat{x}_0
+ \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot \frac{x_t - \sqrt{\bar{\alpha}_t} \ \hat{x}_0 }{\sqrt{1-\bar{\alpha}_t}}
+ \sigma_t \epsilon_t^*
&= \sqrt{\bar{\alpha}_{t-1}} \frac{x_t + (1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t}}
& \quad + \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot
\left [ \frac{x_t - \sqrt{\bar{\alpha}_t} }{\sqrt{1-\bar{\alpha}_t}}
- \frac{\sqrt{\bar{\alpha}_t}}{\sqrt{1-\bar{\alpha}_t}} \cdot \frac{x_t + (1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t}}
\right ]
&\quad + \sigma_t \epsilon_t^*
&= \frac{x_t \sqrt{\bar{\alpha}_{t-1}} + \sqrt{\bar{\alpha}_{t-1}} (1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t}}
& \quad + \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot
\left [ \frac{x_t\sqrt{\bar{\alpha}_t} - \bar{\alpha}_t }{\sqrt{\bar{\alpha}_t} \sqrt{1-\bar{\alpha}_t}}
- \frac{x_t\sqrt{\bar{\alpha}_t} + \sqrt{\bar{\alpha}_t}(1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t} \sqrt{1-\bar{\alpha}_t}}
\right ]
& \quad + \sigma_t \epsilon_t^*
&= \frac{x_t \sqrt{\bar{\alpha}_{t-1}}}{\sqrt{\bar\alpha_t}}
+ \frac{\sqrt{\bar{\alpha}_{t-1}} (1 - \bar\alpha_t)\nabla\log p(x_t)}{ \sqrt{\bar\alpha_t}}
& \quad + \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot
\frac{-\bar{\alpha}_t - \sqrt{\bar{\alpha}_t}(1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t} \sqrt{1-\bar{\alpha}_t}}
& \quad + \sigma_t \epsilon_t^*
&= \frac{x_t \sqrt{\bar{\alpha}_{t-1}} }{ \sqrt{\bar\alpha_t}}
+ \frac{\sqrt{\bar{\alpha}_{t-1}} (1 - \bar\alpha_t)\nabla\log p(x_t)}{ \sqrt{\bar\alpha_t} }
& \quad - \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \cdot
\frac{\bar{\alpha}_t + \sqrt{\bar{\alpha}_t}(1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t} \sqrt{1-\bar{\alpha}_t}}
& \quad + \sigma_t \epsilon_t^*
&= \frac{x_t \sqrt{\bar{\alpha}_{t-1}} }{ \sqrt{\bar\alpha_t}}
+ \frac{\sqrt{\bar{\alpha}_{t-1}} (1 - \bar\alpha_t)\nabla\log p(x_t)}{ \sqrt{\bar\alpha_t}}
& \quad -
\frac{ \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \sqrt{\bar{\alpha}_t}(1 - \bar\alpha_t)\nabla\log p(x_t)}{\sqrt{\bar\alpha_t} \sqrt{1-\bar{\alpha}_t}}
& \quad + \frac{\bar{\alpha}_t \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} }{\sqrt{\bar\alpha_t} \sqrt{1-\bar{\alpha}_t}}
& \quad + \sigma_t \epsilon_t^*
&= \frac{x_t }{ \sqrt{\alpha_t}}
+ \frac{\sqrt{\bar{\alpha}_{t-1}} (1 - \bar\alpha_t)\nabla\log p(x_t)}{ \sqrt{\bar\alpha_t}}
& \quad -
\frac{ \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \sqrt{\bar{\alpha}_t }\sqrt{(1 - \bar\alpha_t)}\nabla\log p(x_t)}{\sqrt{\bar\alpha_t} }
& \quad + \frac{\sqrt{\bar{\alpha}_t} \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} }{ \sqrt{1-\bar{\alpha}_t}}
& \quad + \sigma_t \epsilon_t^*
&= \frac{x_t }{ \sqrt{\alpha_t}}
&\quad + \frac{ \left [ \sqrt{\bar{\alpha}_{t-1}} (1 - \bar\alpha_t)
- \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} \sqrt{\bar{\alpha}_t }\sqrt{(1 - \bar\alpha_t)} \right ]
\nabla\log p(x_t)}{ \sqrt{\bar\alpha_t}}
& \quad + \frac{\sqrt{\bar{\alpha}_t} \sqrt{1-\bar{\alpha}_{t-1}-\sigma_t^2} }{ \sqrt{1-\bar{\alpha}_t}}
& \quad + \sigma_t \epsilon_t^*
& := A x_t + B \nabla\log p(x_t) + C + \sigma_t \epsilon_t^*
现在看明白了吧,逆向生成过程中 :math:`x_t` 的迭代(降噪)过程,其实就是沿着 :math:`x_t` 的梯度在前进。
**放开你的想象力,这和利用梯度下降法求解最优参数是不是一样的**!!!
**那是不是意味着我可以把各种高级的优化器算法、自适应性学习率算法等等用在这里**,比如 ``adam`` 算法等等。
如果令 :math:`\sigma_t \neq 0`,意味着保留了随机项,生成的多样性更好,但是收敛速度会下降。
反之,令 :math:`\sigma_t = 0`,去掉随机项,收敛速度加快,但会损失多样性。
**能不能兼顾速度和多样性呢?** 显然是可以的,设计一个动态自适应的调度算法,开始一段时刻,
令 :math:`\sigma_t = 0` 加快前进收敛速度,最后一段(个)时刻,再令 :math:`\sigma_t \neq 0`
,增加一点多样性,可以线性控制,也可以余弦控制,总之可以把自适应调度那套玩法借鉴过来。
同样系数 :math:`A` 和 :math:`B` 类似于学习率作用,同样可以采用自适应学习率那套玩法。
在 DDIM 论文中,虽然提出了自由方差参数 :math:`\sigma_t`,并做一些实验,
但论文中把只有 :math:`\sigma_t = 0` 的情况定义成 DDIM 模型,
意为 `隐式(implicit)扩散模型`。
关于算法的实现代码,可以参考论文作者的实现 https://github.com/ermongroup/ddim.git
或者 ``huggingface`` 开发的扩散模型工具包 ``diffusers`` 的源码,
建议看 ``diffusers`` 的源码,代码写的非常清晰工整,并且在关键点上有注释。
参考文献
########################################################
.. footbibliography::
.. meta::
:description lang=zh_CN: 潜在扩散模型
:keywords: ddim,隐式扩散模型,Denoising Diffusion Implicit Models,DDIM,DDPM,DPM,扩散模型
|
import React, { useContext } from 'react';
import { Navigate, useLocation } from 'react-router-dom';
import { AuthContext } from '../Providors/AuthProvider';
import { Dna } from 'react-loader-spinner';
const PrivetRoute = ({ children }) => {
const location = useLocation();
const { user, loading } = useContext(AuthContext);
if (loading) {
return (
<div className="h-80 mt-20 flex items-center justify-center">
<Dna
visible={true}
height="80"
width="80"
ariaLabel="dna-loading"
wrapperStyle={{}}
wrapperClass="dna-wrapper"
/>
</div>
);
}
if (user) {
return children;
}
return <Navigate to="/login" state={{ from: location }} replace></Navigate>
};
export default PrivetRoute;
|
"""Problem
This problem was asked by Netflix.
Given an array of integers, determine whether it contains a Pythagorean triplet. Recall that a Pythogorean triplet (a, b, c) is defined by the equation a2+ b2= c2.
Solution
One simple solution involves looping through the array three times, trying each possible combination of three numbers to see if the Pythogorean property holds true. We can make this a little faster by precomputing the squares of each number."""
def triplet(array):
array = [x ** 2 for x in array]
for a in array:
for b in array:
for c in array:
if a + b == c or a + c == b or b + c == a:
return True
return False
"""However, with this algorithm we cannot get around the fact that using three loops corresponds to O(N3) time complexity, where N is the total number of integers.
A better way to go about this is to first sort the squared array. Without loss of generality, we can assume that a < b < c, and so the elements of our triplet appear in that order in our new array. Given this information, we can apply the following algorithm.
First, let us assume that c is the last element in the array. Then the lowest possible value of a will be the first element, and the highest possible value of b will be the second-to-last element. Now we repeatedly compare a + b against c, and perform the following:
If a + b < c, move the index of a up in the list, to make our squared total higher.
If a + b > c, move the index of b down in the list, to make our squared total lower.
If a + b = c, return True, as we have found a solution.
Once a and b cross paths, we know there cannot be any more solutions with our current value of c, so we decrement c and try again. If we check all values of c and fail to find a solution, there cannot be a triplet."""
def triplet(array):
array = sorted([x ** 2 for x in array])
for c in range(len(array) - 1, 1, -1):
a, b = 0, c - 1
while a < b:
if array[a] + array[b] == array[c]:
return True
elif array[a] + array[b] < array[c]:
a += 1
else:
b -= 1
return False
"""With this algorithm, we only need to iterate through the array once for each value of c, so the runtime will be O(N2)."""
|
# 初心者視点メモ [2022.03 青木]
## なぜ git が怖いのか
- 簡単な操作(ブランチ作成、クローン、コミット、プッシュくらい)は「なんとなく」の理解でできてしまい、そもそも自分の git への理解があっているのか、どこまで理解すれば十分なのかがわかっていないまま使ってるから
- 大事なもの(リモートリポジトリのもの)を誤った操作で消してしまいそう
- ローカルで行った変更がどこかに消えてしまう(上書きされてしまう)不安がある
- 自分が今触っているものがどのような管理下におかれたものかがいまいちわからない(立ち位置がわからない的なことかな?)
- 細かくコミット記録などが残るので緊張する
## 現在どのような git の使い方をしているか
### github(チーム開発で利用。CDK のソースコード管理など)
- ツールとしては soucetree を利用
- ほぼ私一人が修正をいれるリポジトリを使用中(そのため通常はコンフリクト等は起こらない)
1. 目的別にブランチを作成(github のブラウザにて)し、soucetree でブランチごとにクローンしてきて別フォルダ(ローカルリポジトリ)を作成
2. それぞれのコーディング作業は VScode で行い、soucetree 上のファイルステータスで差分を確認
3. soucetree で「インデックスに追加」後、コミット
4. 「プッシュ」に印がつくのでそこからリモートブランチにプッシュ
5. ブラウザの github のページから PR を作成(Reviewer を指定)
6. レビューのコメントをもらったらコメント返信。ソースは VScode で編集して soucetree から 2-4 の手順で修正をプッシュまで行う
### gitlab(環境管理用の設定ファイルなど)
- 部分的な変更、ファイル追加が多いため、ローカルにクローンせずに直接 gitlab のブラウザ画面上ですべて操作
- develop ブランチにプッシュすることもないため(ブランチ内で完結する作業がほとんど)MR もほとんど利用していない
## git で躓いたところ、モヤモヤするところ
- git を操作するのに何のツールを使うのがいいの?(最初 gitGUI を使ってみたが上手く使えず、数か月前に soucetree に変えた)それともすべてターミナルのコマンドで操作すべき?
- ローカルリポジトリはリモートリポジトリと 1:1 にすべき?ローカルリポジトリをどう扱っていくのかが腑に落ちていない(そのため現状ブランチごとにローカルリポジトリを作ってしまっているような状態かと…)
- 複数のブランチを同時に作業したい場合の良い方法がわからない(それぞれ同じソースを別の目的で並行して編集したい)
- origin/HEAD がよくわからない
- git 管理されているファイル(フォルダ)はローカルで勝手にコピーとかして使うのはまずい?
|
import { FC } from 'react'
import cn from 'clsx'
import Link from 'next/link'
import { Logo, Container } from '@components/ui'
import s from './Footer.module.css'
import Newsletter from './Newsletter'
import { Link as LinkType, useMenu } from '@lib/api/page/menu'
import { useTranslation } from 'react-i18next'
import { CMS } from 'cms'
interface Props {
className?: string
children?: any
}
const Footer: FC<Props> = ({ className }) => {
const rootClassName = cn(s.root, className)
const { main, legal } = useMenu()
const { t } = useTranslation()
return (
<footer className={rootClassName}>
<Container>
<div className="grid grid-cols-1 lg:grid-cols-12 gap-4 md:gap-8 border-b border-accent-0 py-12 text-accent-0 transition-colors duration-150">
<div className="flex flex-col justify-between col-span-1 lg:col-span-6 gap-y-12">
<div className="grid grid-cols-1 md:grid-flow-col max-w-lg gap-y-6">
<div className="grid grid-cols-1 md:grid-cols-2 grid-rows-4 gap-x-4">
<h3 className="uppercase font-bold md:col-span-2 ">
<CMS blockId="footer__menu_title" single={CMS.Text} />
</h3>
{main.map(renderLink)}
</div>
</div>
<div className="grid grid-cols-1 md:grid-rows-4 gap-x-4">
<h3 className="uppercase font-bold">
<CMS blockId="footer__legal_menu_title" single={CMS.Text} />
</h3>
{legal.map(renderLink)}
</div>
<div>
<Link href="/">
<a className="flex flex-initial items-center font-bold md:mr-24 my-4">
<span className="mr-2">
<Logo />
</span>
</a>
</Link>
</div>
</div>
<div className="col-span-1 lg:col-span-6">
<div className="py-4 border-b-2">
<Newsletter />
</div>
<div className="py-4">
<CMS blockId="footer__socials" single={CMS.Socials} />
</div>
</div>
</div>
<div className="pt-6 pb-10 flex flex-col md:flex-row justify-between items-center space-y-4 text-secondary text-sm">
<div>
<span>{t('footer.copyright')}</span>
</div>
<div className="flex items-center text-primary text-sm">
<span className="text-secondary">Created by</span>
<a
rel="noopener noreferrer"
href="https://jana.wtf"
aria-label="Mlocco link"
target="_blank"
className="text-secondary"
>
<h3 className="pl-2">MLOCCOCO AGENCY</h3>
</a>
</div>
</div>
</Container>
</footer>
)
}
const renderLink = (link: LinkType) => (
<span key={link.href} className="py-3 md:py-0 md:pb-4">
<Link href={link.href}>
<a className="text-accent-0 hover:text-secondary transition ease-in-out duration-150">
{link.label}
</a>
</Link>
</span>
)
export default Footer
|
import {
collection,
doc,
getFirestore,
onSnapshot,
runTransaction,
} from 'firebase/firestore';
import { v4 as uuidv4 } from 'uuid';
import { obtenerFechaActual } from '../helpers/dateToday';
import { Unsubscribe } from 'redux';
interface AfipDetailProps {
monto: number;
aliasCuenta: string;
}
export const UploadAfip = async (
afipDetail: AfipDetailProps
): Promise<string> => {
const afipId = uuidv4();
const firestore = getFirestore();
const fechaFormateada = obtenerFechaActual();
const [, mes, anio] = fechaFormateada.split('/');
const pedidosCollectionRef = collection(firestore, 'afip', anio, mes);
const pedidoDocRef = doc(pedidosCollectionRef, 'datos');
try {
await runTransaction(firestore, async (transaction) => {
const docSnapshot = await transaction.get(pedidoDocRef);
const existingData = docSnapshot.exists() ? docSnapshot.data() : {};
// Obtener los montos acumulados para el alias de la cuenta bancaria
const montosPorAlias = existingData.montosPorAlias || {};
// Sumar el monto del pedido al valor existente para el alias de la cuenta bancaria en el mismo mes
montosPorAlias[afipDetail.aliasCuenta] =
(montosPorAlias[afipDetail.aliasCuenta] || 0) + afipDetail.monto;
// Actualizar los datos en Firestore
transaction.set(
pedidoDocRef,
{
montosPorAlias,
},
{ merge: true }
);
});
console.log('Pedido subido correctamente');
return afipId;
} catch (error) {
console.error('Error al subir el pedido:', error);
throw error;
}
};
export const obtenerMontosPorAlias = (
anio: string,
mes: string,
callback: (montos: Record<string, number>) => void
): Unsubscribe => {
const firestore = getFirestore();
const pedidosCollectionRef = collection(firestore, 'afip', anio, mes);
const pedidoDocRef = doc(pedidosCollectionRef, 'datos');
// Escuchar cambios en el documento
return onSnapshot(
pedidoDocRef,
(docSnapshot) => {
if (docSnapshot.exists()) {
const data = docSnapshot.data();
if (data && data.montosPorAlias) {
// Si el documento existe y contiene montos por alias, llamar al callback con esos montos
callback(data.montosPorAlias as Record<string, number>);
} else {
// Si el documento existe pero no contiene montos por alias, llamar al callback con un objeto vacío
callback({});
}
} else {
// Si el documento no existe, llamar al callback con un objeto vacío
callback({});
}
},
(error) => {
console.error('Error al obtener los montos por alias:', error);
}
);
};
|
import React, { Component, useState } from 'react';
import Installer from './installer';
import { InformationCircleIcon } from '@heroicons/react/solid';
import DefaultProfilePicture from '../profile/defaultProfilePicture.png';
import {
ACTION_TYPE_USERS,
ACTION_TYPE_POPUPWINDOWRESET,
ACTION_TYPE_POPUPWINDOWPROGRESS,
ACTION_TYPE_POPUPWINDOWSUCCESS,
ACTION_TYPE_POPUPWINDOWERROR
} from '../../redux/redux';
export default class Settings extends Component {
constructor(props) {
super(props);
// default state
let reduxState = this.props.store.getState();
this.state = {
application: reduxState.application,
settings: reduxState.settings,
users: reduxState.users,
login: reduxState.user.login,
input: "",
saveButtonTriggered: false,
}
// handling API and streaming state changes
this.props.store.subscribe(() => {
let reduxState = this.props.store.getState();
this.setState({ application: reduxState.application });
this.setState({ settings: reduxState.settings });
this.setState({ users: reduxState.users });
this.setState({ login: reduxState.user.login });
});
this.deleteUser = this.deleteUser.bind(this)
}
save() {
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWPROGRESS, payload: {
header: "Saving..."
}
});
this.setState({ saveButtonTriggered: true });
if (!this.state.users.some(user => user.login === this.state.input)) {
this.props.gimletClient.saveUser(this.state.input)
.then(saveUserResponse => {
this.setTimeOutForButtonTriggeredAndPopupWindow();
this.setState({ input: "" });
this.props.store.dispatch({
type: ACTION_TYPE_USERS,
payload: [...this.state.users, {
login: saveUserResponse.login,
token: saveUserResponse.token,
admin: false
}]
});
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWSUCCESS, payload: {
header: "Success",
message: "User saved"
}
});
}, err => {
this.setTimeOutForButtonTriggeredAndPopupWindow();
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWERROR, payload: {
header: "Error",
message: err.statusText
}
});
})
} else {
this.setTimeOutForButtonTriggeredAndPopupWindow();
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWERROR, payload: {
header: "Error",
message: "User already exists"
}
});
}
}
setTimeOutForButtonTriggeredAndPopupWindow() {
setTimeout(() => {
this.setState({ saveButtonTriggered: false })
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWRESET
});
}, 3000);
}
sortAlphabetically(users) {
return users.sort((a, b) => a.login.localeCompare(b.login));
}
deleteUser(login) {
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWPROGRESS, payload: {
header: "Deleting..."
}
});
this.props.gimletClient.deleteUser(login)
.then(() => {
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWSUCCESS, payload: {
header: "Success",
message: "User deleted"
}
});
this.props.gimletClient.getUsers()
.then(data => {
this.props.store.dispatch({
type: ACTION_TYPE_USERS,
payload: data
});
}, () => {/* Generic error handler deals with it */
});
}, err => {
this.props.store.dispatch({
type: ACTION_TYPE_POPUPWINDOWERROR, payload: {
header: "Error",
message: err.statusText
}
});
this.setTimeOutForButtonTriggeredAndPopupWindow();
});
}
render() {
const { settings, application, users, login, input, saveButtonTriggered } = this.state;
const sortedUsers = this.sortAlphabetically(users);
return (
<div>
<main>
<div className="max-w-7xl mx-auto sm:px-6 lg:px-8">
<div className="px-4 sm:px-0">
<div>
{settings.provider === "github" &&
githubAppSettings(application)
}
{(!settings.provider || settings.provider === "") &&
gimletInstaller()
}
<div className="my-4 bg-white overflow-hidden shadow rounded-lg divide-y divide-gray-200">
<div className="px-4 py-5 sm:px-6">
<h3 className="text-lg leading-6 font-medium text-gray-900">
Users and API Keys
</h3>
</div>
<Users
users={sortedUsers}
login={login}
scmUrl={settings.scmUrl}
deleteUser={this.deleteUser}
/>
<div className="px-4 py-5 sm:px-6">
<h3 className="text-lg leading-6 font-medium text-gray-900">Create an API key</h3>
<div>
<input
onChange={e => this.setState({ input: e.target.value })}
className="shadow appearance-none border rounded w-full my-4 py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline"
id="environment"
type="text"
value={input}
placeholder="Please enter an API key name" />
<div className="p-0 flow-root">
<span className="inline-flex rounded-md shadow-sm gap-x-3 float-right">
<button
disabled={input === "" || saveButtonTriggered}
onClick={() => this.save()}
className={(input === "" || saveButtonTriggered ? "bg-gray-600 cursor-not-allowed" : "bg-green-600 hover:bg-green-500 focus:outline-none focus:border-green-700 focus:shadow-outline-indigo active:bg-green-700") + " inline-flex items-center px-6 py-3 border border-transparent text-base leading-6 font-medium rounded-md text-white transition ease-in-out duration-150"}>
Create
</button>
</span>
</div>
</div>
</div>
</div>
{dashboardVersion(application)}
</div>
</div>
</div>
</main >
</div >
)
}
}
function githubAppSettings(application) {
if (application.name === "") {
return null
}
return (
<div className="bg-white overflow-hidden shadow rounded-lg divide-y divide-gray-200 my-4">
<div className="px-4 py-5 sm:px-6">
<h3 className="text-lg leading-6 font-medium text-gray-900">
Github Integration
</h3>
</div>
<div className="px-4 py-5 sm:px-6">
<div className="inline-grid">
<span
onClick={() => window.open(application.appSettingsURL)}
className="mt-1 text-sm text-gray-500 hover:text-gray-600 cursor-pointer">
Github Application
<svg xmlns="http://www.w3.org/2000/svg"
className="inline fill-current text-gray-500 hover:text-gray-700 ml-1" width="12" height="12"
viewBox="0 0 24 24">
<path d="M0 0h24v24H0z" fill="none" />
<path
d="M19 19H5V5h7V3H5c-1.11 0-2 .9-2 2v14c0 1.1.89 2 2 2h14c1.1 0 2-.9 2-2v-7h-2v7zM14 3v2h3.59l-9.83 9.83 1.41 1.41L19 6.41V10h2V3h-7z" />
</svg>
</span>
<span>
<a
href={application.installationURL}
rel="noreferrer"
target="_blank"
className="mt-1 text-sm text-gray-500 hover:text-gray-600">
Application installation
<svg xmlns="http://www.w3.org/2000/svg"
className="inline fill-current text-gray-500 hover:text-gray-700 ml-1" width="12" height="12"
viewBox="0 0 24 24">
<path d="M0 0h24v24H0z" fill="none" />
<path
d="M19 19H5V5h7V3H5c-1.11 0-2 .9-2 2v14c0 1.1.89 2 2 2h14c1.1 0 2-.9 2-2v-7h-2v7zM14 3v2h3.59l-9.83 9.83 1.41 1.41L19 6.41V10h2V3h-7z" />
</svg>
</a>
</span>
</div>
</div>
</div>
)
}
function gimletInstaller() {
return (
<div className="bg-white overflow-hidden shadow rounded-lg divide-y divide-gray-200 my-4">
<div className="px-4 py-5 sm:px-6">
<h3 className="text-lg leading-6 font-medium text-gray-900">
Github Integration
</h3>
</div>
<div className="px-4 py-5 sm:p-6">
<Installer />
</div>
</div>
)
}
function Users({ users, login, scmUrl, deleteUser }) {
return (
<div className="px-4 py-5 sm:px-6">
{users.map(user => (
<div key={user.login} className="flex justify-between p-2 hover:bg-gray-100 rounded">
<div className="inline-flex items-center">
<img
className="h-8 w-8 rounded-full text-2xl font-medium text-gray-900"
src={`${scmUrl}/${user.login}.png?size=128`}
onError={(e) => { e.target.src = DefaultProfilePicture }}
alt={user.login} />
<div className="ml-4">{user.login}</div>
</div>
<TokenInfo
token={user.token}
/>
{user.login !== login &&
<div className="flex items-center">
<svg xmlns="http://www.w3.org/2000/svg"
onClick={() => {
// eslint-disable-next-line no-restricted-globals
confirm(`Are you sure you want to delete ${user.login}?`) &&
deleteUser(user.login);
}}
className="items-center cursor-pointer inline text-red-400 hover:text-red-600 opacity-70 h-4 w-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
</svg>
</div>
}
</div>
))}
</div>
)
}
function TokenInfo({ token }) {
const [isCopied, setIsCopied] = useState(false);
const handleCopyClick = () => {
setIsCopied(true);
setTimeout(() => {
setIsCopied(false);
}, 2000);
};
if (!token) {
return null
}
return (
<div className="rounded-md bg-blue-50 p-4 w-5/6">
<div className="flex">
<div className="flex-shrink-0">
<InformationCircleIcon className="h-5 w-5 text-blue-400" aria-hidden="true" />
</div>
<div className="ml-3">
<h3 className="text-sm font-medium text-blue-800">API key:</h3>
<div className="mt-2 text-sm text-blue-700">
<div className="flex items-center">
<span className="text-xs font-mono bg-blue-100 text-blue-500 font-medium px-1 py-1 rounded break-all">{token}</span>
<div className="relative ml-3 cursor-pointer" onClick={() => {
copyToClipboard(token);
handleCopyClick();
}}>
<svg xmlns="http://www.w3.org/2000/svg" className="h-6 w-6 text-blue-400 hover:text-blue-500" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
<path strokeLinecap="round" strokeLinejoin="round" d="M8 16H6a2 2 0 01-2-2V6a2 2 0 012-2h8a2 2 0 012 2v2m-6 12h8a2 2 0 002-2v-8a2 2 0 00-2-2h-8a2 2 0 00-2 2v8a2 2 0 002 2z" />
</svg>
{isCopied && (
<div className="absolute top-6 right-0">
<div className="p-2 bg-indigo-600 select-none text-white inline-block rounded">
Copied!
</div>
</div>
)}
</div>
</div>
</div>
</div>
</div>
</div>)
};
function dashboardVersion(application) {
if (!application.dashboardVersion) {
return null
}
return (
<div className="my-4 bg-white overflow-hidden shadow rounded-lg divide-y divide-gray-200">
<div className="px-4 py-5 sm:px-6">
<h3 className="text-lg leading-6 font-medium text-gray-900">
Dashboard version
</h3>
</div>
<div className="px-4 py-5 sm:px-6">
<div className="inline-grid">
<span
className="mt-1 text-sm text-gray-500">
{application.dashboardVersion}
</span>
</div>
</div>
</div>
)
}
export function copyToClipboard(copyText) {
if (navigator.clipboard && window.isSecureContext) {
navigator.clipboard.writeText(copyText);
} else {
unsecuredCopyToClipboard(copyText);
}
}
function unsecuredCopyToClipboard(text) {
const textArea = document.createElement("textarea");
textArea.value = text;
textArea.style.position = "fixed";
textArea.style.opacity = "0";
document.body.appendChild(textArea);
textArea.select();
try {
document.execCommand('copy');
} catch (err) {
console.error('Unable to copy to clipboard', err);
}
document.body.removeChild(textArea);
}
|
/* eslint-disable max-classes-per-file */
/* eslint-disable import/prefer-default-export */
/**
* Represents a basic Game Loop based on `requestAnimationFrame()`.
*
* The implementation of this class depends on another class: `Game`. This
* means that, if you use this class, you need to either have a `Game` class
* that exactly implements the three methods `processInput()`, `update(elapsed)`
* and `render()` or change the code in the `step()` method of this class so it
* represents your own game methods.
*
* @see https://gameprogrammingpatterns.com/game-loop.html
* @author BugSlayer
*
* @version 1.5.0
*/
export class Game {
}
export class GameLoop {
/**
* Construct a new instance of this class.
*
* @param game the game to animate
* @param mode OPTIONAL, the mode of the gameloop. It defaults to
* GameLoop.NORMAL_MODE, which is fine for simple games
*/
constructor(game, mode = GameLoop.NORMAL_MODE) {
this.state = GameLoop.STATE_IDLE;
this.mode = mode;
this.game = game;
}
/**
* Start the game loop.
*/
start() {
if (this.state === GameLoop.STATE_IDLE) {
this.state = GameLoop.STATE_STARTING;
this.gameStart = performance.now();
this.frameEnd = this.gameStart;
this.previousElapsed = this.gameStart;
this.gameTime = 0;
this.frameCount = 0;
requestAnimationFrame(this.step);
}
}
/**
* Requests to gracefully stop the gameloop.
*/
stop() {
this.state = GameLoop.STATE_STOPPING;
}
/**
* Returns `true` if the given state exactly matches the current state of
* this object
*
* @param state the state to check
* @returns `true` if the given state exactly matches the current state of
* this object
*/
isInState(state) {
return this.state === state;
}
/**
* This MUST be an arrow method in order to keep the `this` variable working
* correctly. It will be overwritten by another object otherwise caused by
* javascript scoping behaviour.
*
* @param timestamp a `DOMHighResTimeStamp` similar to the one returned by
* `performance.now()`, indicating the point in time when `requestAnimationFrame()`
* starts to execute callback functions
*/
step(timestamp) {
// Handle first animation frame
if (this.isInState(GameLoop.STATE_STARTING)) {
this.state = GameLoop.STATE_RUNNING;
}
this.game.processInput();
// Let the game update itself
let shouldStop = false;
if (this.mode === GameLoop.PLAY_CATCH_UP) {
const step = 1;
while (this.previousElapsed < timestamp && !shouldStop) {
shouldStop = !this.game.update(step);
this.previousElapsed += step;
}
}
else {
const elapsed = timestamp - this.previousElapsed;
shouldStop = !this.game.update(elapsed);
this.previousElapsed = timestamp;
}
// Let the game render itself
this.game.render();
// Check if a next animation frame needs to be requested
if (!shouldStop || this.isInState(GameLoop.STATE_STOPPING)) {
requestAnimationFrame(this.step);
}
else {
this.state = GameLoop.STATE_IDLE;
}
// Handle time measurement and analysis
const now = performance.now();
const stepTime = timestamp - now;
const frameTime = now - this.frameEnd;
this.fps = Math.round(1000 / frameTime);
this.load = stepTime / frameTime;
this.frameEnd = now;
this.gameTime = now - this.gameStart;
this.frameCount += 1;
}
;
}
GameLoop.STATE_IDLE = 0;
GameLoop.STATE_STARTING = 1;
GameLoop.STATE_RUNNING = 2;
GameLoop.STATE_STOPPING = 3;
GameLoop.NORMAL_MODE = 0;
GameLoop.PLAY_CATCH_UP = 1;
//# sourceMappingURL=GameLoop.js.map
|
<?php
namespace Drupal\tbo_services\Services\Rest;
use Drupal\adf_core\Util\UtilMessage;
use Drupal\Core\Session\AccountProxyInterface;
use Drupal\rest\ResourceResponse;
use Drupal\tbo_api\TboApiClientInterface;
use Symfony\Component\HttpKernel\Exception\AccessDeniedHttpException;
/**
* Class NotificationRestService.
*
* @package Drupal\tbo_services\Services\Rest
*/
class NotificationRestService {
protected $api;
protected $currentUser;
protected $segment;
/**
* NotificationRestService constructor.
*
* @param \Drupal\tbo_api\TboApiClientInterface $api
* The api interface.
*/
public function __construct(TboApiClientInterface $api) {
$this->api = $api;
$service = \Drupal::service('adf_segment');
$service->segmentPhpInit();
$this->segment = $service->getSegmentPhp();
}
/**
* Responds to POST requests.
*
* @param \Drupal\Core\Session\AccountProxyInterface $currentUser
* User actual.
* @param array $params
* Data of user.
*
* @return \Drupal\rest\ResourceResponse
* The data response.
*/
public function post(AccountProxyInterface $currentUser, array $params) {
$this->currentUser = $currentUser;
\Drupal::service('page_cache_kill_switch')->trigger();
// You must to implement the logic of your REST Resource here.
// Use current user after pass authentication to validate access.
if (!$this->currentUser->hasPermission('access content')) {
throw new AccessDeniedHttpException();
}
// Get user mail.
$mail = $currentUser->getEmail();
if ($params['notification_id'] == 0 && $params['send_verified'] == 0) {
// Save audit log alerts.
$this->saveAuditLog(1);
return new ResourceResponse("OK");
}
// Validate notification_id and user.
// Get data.
$tempStore = \Drupal::service('user.private_tempstore')->get('tbo_notifications');
$notifications_allowed = $tempStore->get('tbo_notifications_allowed_' . $currentUser->id());
if (empty($notifications_allowed)) {
throw new AccessDeniedHttpException();
}
$notification_id = $params['notification_id'];
if (empty($notification_id)) {
throw new AccessDeniedHttpException();
}
$notification_id = (int) $notification_id;
if (!in_array($notification_id, $notifications_allowed)) {
throw new AccessDeniedHttpException();
}
// Get tool service.
$notification_service = \Drupal::service('tbo_services.tools_notifications');
$uid = $currentUser->id();
// Get repository.
$repository = \Drupal::service('tbo_services.tbo_services_repository');
// Validate relation and create.
$exist = $repository->getNotificationDetail($notification_id, $uid);
// Validate Action.
if ($params['send_verified'] == 1) {
// Send verification.
$jsonBody = [
'email' => $mail,
];
$jsonBody = json_encode($jsonBody);
$params = [
'query' => [
'email_type' => 'verification_email',
],
'body' => $jsonBody,
];
try {
$send = $this->api->forwardingVerificationEmail($params);
}
catch (\Exception $e) {
// Save audit log.
$this->saveAuditLog(2, $mail);
return new ResourceResponse(UtilMessage::getMessage($e));
}
if (empty($exist)) {
// Create relation in notification.
$create_relation = $notification_service->createRelationNotification($uid, $notification_id, TRUE);
}
// Add session var to verified send.
$_SESSION['notification_verified']['tbo_notifications_verified_send_' . $currentUser->id()] = TRUE;
// Save audit log.
$this->saveAuditLog(0, $mail);
return new ResourceResponse("OK");
}
elseif ($params['send_verified'] == 0) {
// Create relation in notification.
if (empty($exist)) {
// Create relation in notification.
$create_relation = $notification_service->createRelationNotification($uid, $notification_id);
}
return new ResourceResponse("OK");
}
return new ResourceResponse('Faild');
}
/**
* @param int $type
* @param string $mail
*/
public function saveAuditLog($type = 0, $mail = '') {
// Save audit log.
$service = \Drupal::service('tbo_core.audit_log_service');
$service->loadName();
// Create array data_log.
$data_log = [
'companyName' => isset($_SESSION['company']['name']) ? $_SESSION['company']['name'] : '',
'companyDocument' => isset($_SESSION['company']['nit']) ? $_SESSION['company']['nit'] : '',
'companySegment' => isset($_SESSION['company']['segment']) ? $_SESSION['company']['segment'] : '',
'event_type' => 'Cuenta',
'description' => t('Usuario reenvía correo de verificación de cuenta'),
'details' => t('Usuario @userName con @mail re envía correo de verificación de cuenta de TigoID',
[
'@userName' => $service->getName(),
'@mail' => $mail,
]
),
'old_value' => 'No disponible',
'new_value' => 'No disponible',
];
if ($type == 1) {
$data_log['event_type'] = 'Notificaciones';
$data_log['description'] = t('Usuario consulta alertas y notificaciones');
$data_log['details'] = t('Usuario @userName consulto alertas y notificaciones',
[
'@userName' => $service->getName(),
]
);
}
if ($type == 2) {
$data_log['event_type'] = 'Cuenta';
$data_log['description'] = t('Usuario no puede reenviar correo de verificación de TigoID');
$data_log['details'] = t('Usuario @userName no pudo reenviar correo de
verificación de cuenta de TigoID al correo registrado @mail',
[
'@userName' => $service->getName(),
'@mail' => $mail,
]
);
}
// Save audit log.
$service->insertGenericLog($data_log);
}
}
|
package ExceptionHanding;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
class InVaidProductIDException extends Exception{
public InVaidProductIDException(String message) {
super(message);
}
}
class InValidQuantityException extends Exception{
public InValidQuantityException(String message) {
super(message);
}
}
class paymentExcetion extends Exception{
public paymentExcetion(String message) {
super(message);
}
}
class OutOfStockException extends Exception{
public OutOfStockException(String message) {
super(message);
}
}
class OrderProcessedException extends Exception{
public OrderProcessedException(String message) {
super(message);
}
}
class OrderProcessor{
private Map<Integer,Integer> productInventory;
public OrderProcessor(){
this.productInventory = new HashMap<>();
productInventory.put(123,20);
productInventory.put(234,90);
}
public void processOrder(int productId,int requestQuantity) throws OrderProcessedException {
try {
validateProductId(productId);
validateRequestQuantity(requestQuantity);
checkStockAvailability(productId, requestQuantity);
processPayment();
System.out.println("Order Processed!");
}catch (InVaidProductIDException | InValidQuantityException | OutOfStockException | paymentExcetion e){
throw new OrderProcessedException("Failed to process Order : " + e.getMessage());
}
}
private void validateProductId(int productId) throws InVaidProductIDException {
if(!productInventory.containsKey(productId))
throw new InVaidProductIDException("Product is InValid");
}
private void validateRequestQuantity(int requestQuantity) throws InValidQuantityException {
if(requestQuantity<0)
throw new InValidQuantityException("Quantities cannot be neagtive");
}
private void checkStockAvailability(int productID, int requestQuantity) throws OutOfStockException {
if(!(productInventory.get(productID)>requestQuantity))
throw new OutOfStockException("Product" + productID + "is out of stock");
}
private void processPayment() throws paymentExcetion {
boolean paymentStatus = true;
if(!paymentStatus){
throw new paymentExcetion("Payment Failed!");
}
}
}
public class Example2 {
public static void main(String[] arg){
Scanner scanner = new Scanner(System.in);
try {
OrderProcessor orderProcessor = new OrderProcessor();
while (true)
{
System.out.println("Enter the prouct Id :");
int productId = scanner.nextInt();
System.out.println("Enter the quantity(Negative quantities are not allowed : ");
int requestQuantity = scanner.nextInt();
orderProcessor.processOrder(productId,requestQuantity);
}
} catch (OrderProcessedException e) {
System.err.println("Error : " + e.getMessage());
Throwable cause = e.getCause();
while (cause!=null)
System.err.println("Caused by : " + cause.getCause());
}finally {
scanner.close();
System.out.println("Scanner is closed!");
}
}
}
|
module Lib.AST where
import Data.List
import Control.Monad
import Lib.Monads
{- AST.hs
- This file includes the data type of the AST of the lambda terms you should use,
- and the data type of the CES code you should use as well.
-
- Moreover, this includes the function ``termPrettyPrint`` for pretty showing
- the data type of the lambda term.
-}
-- A type alias for a user inputted lambda term
type LambdaTerm = Term String (String, [String]) String
-- A type alias the DeBruijn indexed type of a
-- lambda term.
type DeBruijnTerm = Term () () Int
-- | Term singlebinder multibinder var
-- The type of a lambda term.
-- Some notes on the type definition -- for ``Term singlebinder multibinder var``,
-- - the type variable ``singlebinder`` is the type of an argument
-- which binds exactly one variable
-- - the type variable ``multibinder`` is the type of an argument
-- which binds one or more variables.
-- - the type variable ``var`` is the type of a variable used in the body of
-- a lambda term
data Term singlebinder multibinder var
-- A lambda abstraction
= Abs multibinder (Term singlebinder multibinder var)
-- An application
| App (Term singlebinder multibinder var) (Term singlebinder multibinder var)
-- A variable
| Var var
-- A constant value
| Const Int
-- Primitive addition
| Add (Term singlebinder multibinder var) (Term singlebinder multibinder var)
-- Primitive multiplication
| Mul (Term singlebinder multibinder var) (Term singlebinder multibinder var)
-- Primitive if
| BIf (Term singlebinder multibinder var) (Term singlebinder multibinder var) (Term singlebinder multibinder var)
-- Primitive Leq
| BLeq (Term singlebinder multibinder var) (Term singlebinder multibinder var)
-- Primitive True
| BTrue
-- Primitive False
| BFalse
-- fixed point combinators for a function which takes 0 arguments
-- and 1 argument.
-- (see the pg.19 of ces.pdf for more details).
-- This corresponds to Fixc:
| FFix0 singlebinder (Term singlebinder multibinder var)
-- This corresponds to Fix:
| FFix1 (singlebinder, singlebinder) (Term singlebinder multibinder var)
-- Primitive list
| LNil
-- Primitive cons
| LCons (Term singlebinder multibinder var) (Term singlebinder multibinder var) --
-- Primitive list case
| LCase
-- (term to pattern match on)
(Term singlebinder multibinder var)
-- (what to output in the LNil case)
(Term singlebinder multibinder var)
-- ( a : b, term to output where ``a`` and ``b`` are bound to the constructor)
((singlebinder, singlebinder), Term singlebinder multibinder var)
deriving (Show, Eq)
-- Type aliases for the ``Code``, ``Env``, and ``Stack``
-- of a CES machine respectively..
type Code = [Instr]
type Env = [Instr]
type Stack = [Instr]
-- Type alias for a CES machine
type CES = (Code, Env, Stack)
-- The ``Instr`` type is the instructions used for the
-- CES machine..
-- See pg 17 of ``ces.pdf``
data Instr
= IClo (Code, Env) -- ( code, environment )
| IApp
| IAccess Int
| IRet
| IConst Int
| IAdd
| IMul
| ILeq
| ITrue
| IFalse
| IIf (Code, Code)
-- 0 arity fixc
| IFFix0 (Code, Env) -- ( code, environment )
-- 1 arity fix
| IFFix1 (Code, Env) -- ( code, environment )
| ICons (Maybe (Instr, Instr))
| INil
| ICase (Code, Code) -- (the [] case, the (_:_) case)
deriving (Show, Eq)
-- | Function for specializing pretty printing for a lambda term
lambdaTermPrettyPrint :: LambdaTerm -> String
lambdaTermPrettyPrint = termPrettyPrint
-- | Function for specializing pretty printing for a De Bruijn term
deBruijnTermPrettyPrint :: LambdaTerm -> String
deBruijnTermPrettyPrint = termPrettyPrint
--------------------------------------
-- Everything past here, you don't need to worry about..
--------------------------------------
-- | TermPrettyPrinter is a pretty printer for a ``Term a b``.
-- Write
-- `` termPrettyPrint (someTerm :: LambdaTerm)``
-- or
-- `` termPrettyPrint (someTerm :: DeBruijnTerm)``
-- to get the pretty printed string
-- If you are in GHCI, you can just print it by typing:
-- `` putStrLn $ termPrettyPrint (someTerm :: LambdaTerm)``
-- or
-- `` putStrLn $ termPrettyPrint (someTerm :: DeBruijnTerm)``
-- which will print the escaped characters as they are meant to be
-- seen.
-- Please report bugs!
--
-- As a future TODO, this code is a bit messy and needs to be cleaned
-- up... ``TermPrettyPrinter" is really just used for pretty printing
-- multiple function args (and the whole thing), and ``TermValuePrinter"
-- should be used for printing individual args (very loosely how this
-- should be written)...
class TermPrettyPrinter a where
termPrettyPrint :: a -> String
class TermValuePrinter a where
termValuePrint :: a -> String
instance TermValuePrinter Char where
termValuePrint = pure
instance TermValuePrinter a => TermValuePrinter [a] where
termValuePrint = concatMap termValuePrint
instance TermValuePrinter () where
termValuePrint = const "_"
-- This is only called when calling a DeBruijn index
instance TermValuePrinter Int where
termValuePrint n = "#"++ show n
instance TermValuePrinter a => TermPrettyPrinter [a] where
termPrettyPrint = intercalate " " . map termValuePrint
instance TermPrettyPrinter () where
termPrettyPrint = const ""
-- We only use tuples of (a,b) as a multiarg binder..
-- Honestly, this is a bit of a weird hack that could be fixed with
-- type families -- but alas, we will avoid using GHC extensions..
-- Also, some strange hacks to get the spacing a bit prettier..
instance (TermValuePrinter a, TermPrettyPrinter b) => TermPrettyPrinter (a, b) where
termPrettyPrint (a, b) = termValuePrint a ++ b'
where
b' = case termPrettyPrint b of
[] -> []
pb -> ' ' : pb
instance (TermValuePrinter a, TermPrettyPrinter b) => TermValuePrinter (a, b) where
termValuePrint (a,b) = termValuePrint a ++ b'
where
b' = case termPrettyPrint b of
[] -> []
pb -> ' ' : pb
-- functions for inserting the brackets in
-- the right spots..
bAssocParen :: String -> String
bAssocParen str = concat ["(", str, ")"]
appAssocL lt@(l :< lp) = ContT $ \k -> case lp of
VarF _ -> return lt
ConstF _ -> return lt
BTrueF -> return lt
BFalseF -> return lt
LNilF -> return lt
AppF _ _ -> return lt
_ -> k lt
appAssocR rt@(r :< rp) = ContT $ \k -> case rp of
VarF _ -> return rt
ConstF _ -> return rt
BTrueF -> return rt
BFalseF -> return rt
LNilF -> return rt
-- AbsF _ _ -> return rt
_ -> k rt
mulAssocL rt@(r :< rp) = ContT $ \k -> case rp of
MulF _ _ -> return rt
_ -> k rt
mulAssocR rt@(r :< rp) = ContT $ \k -> case rp of
AppF _ _ -> return rt
_ -> k rt
addAssocL rt@(r :< rp) = ContT $ \k -> case rp of
AddF _ _ -> return rt
_ -> k rt
addAssocR rt@(r :< rp) = ContT $ \k -> case rp of
MulF _ _ -> return rt
_ -> k rt
consAssocL rt@(r :< rp) = ContT $ \k -> case rp of
_ -> k rt
consAssocR rt@(r :< rp) = ContT $ \k -> case rp of
LConsF _ _ -> return rt
AddF _ _ -> return rt
_ -> k rt
leqAssoc rt@(r :< rp) = ContT $ \k -> case rp of
_ -> k rt
assocParen (a :< as) = return $ concat ["(",a,")"] :< as
instance
( TermPrettyPrinter a
, TermValuePrinter a
, TermPrettyPrinter b
, TermValuePrinter b
, TermValuePrinter c ) =>
TermPrettyPrinter (Term a b c) where
termPrettyPrint = histo f
where
runAssocParen ct = case runCont ct id of
str :< _ -> str
f ::
( TermPrettyPrinter a
, TermValuePrinter a
, TermPrettyPrinter b
, TermValuePrinter b
, TermValuePrinter c
) =>
TermF a b c (Cofree (TermF a b c) String) -> String
f term = case term of
AbsF arg (bdy :< _) -> concat
[ "\\"
-- , intercalate " " $ map termPrettyPrint $ uncurry (:) arg
, termValuePrint arg
, " -> "
, bdy
]
{- infixity copies Haskell, so we have:
infix 4 <=
infixr 5 :
infixl 6 +
infixl 7 *
and function application has the highest precedence.
-}
AppF l r -> concat
[ runAssocParen (appAssocL >=> assocParen $ l)
, " "
, runAssocParen (appAssocR >=> assocParen $ r)
]
MulF l r -> concat
[ runAssocParen (appAssocL >=> mulAssocL >=> assocParen $ l)
, " * "
, runAssocParen (appAssocR >=> mulAssocR >=> assocParen $ r)
]
AddF l r -> concat
[ runAssocParen (appAssocL >=> mulAssocL >=> addAssocL >=> assocParen $ l)
, " + "
, runAssocParen (appAssocR >=> mulAssocR >=> addAssocR >=> assocParen $ r)
]
LConsF l r -> concat
[ runAssocParen (appAssocL >=> mulAssocL >=> addAssocL >=> consAssocL >=> assocParen $ l)
, " : "
, runAssocParen (appAssocR >=> mulAssocR >=> addAssocR >=> consAssocR >=> assocParen $ r)
]
BLeqF l r -> concat
[ runAssocParen (appAssocL >=> mulAssocL >=> addAssocL >=> consAssocL >=> leqAssoc >=> assocParen $ l)
, " <= "
, runAssocParen (appAssocR >=> mulAssocR >=> addAssocR >=> consAssocR >=> leqAssoc >=> assocParen $ r)
]
VarF v -> termValuePrint v
-- just show constants.. No need to use
-- to use ``termValuePrint`` since ``termValuePrint``
-- would expect a DeBruijn index to print not just an
-- Int
ConstF v -> show v
BIfF (cond :< _) (thenc :< _) (elsec :< _) -> intercalate " "
[ "if"
, cond
, "then"
, thenc
, "else"
, elsec
]
BTrueF -> "True"
BFalseF -> "False"
FFix1F (a, b) (bdy :< _) -> concat
[ "fix"
, " ("
, "\\"
, termValuePrint a
, " "
, termValuePrint b
, " -> "
, bdy
, ")"
]
FFix0F a (bdy :< _) -> concat
[ "fix"
, " ("
, "\\"
, termValuePrint a
, " -> "
, bdy
, ")"
]
LNilF -> "[]"
LCaseF (caseon :< _) (nilc :< _) ((arg0,arg1), consc :< _) -> intercalate " "
[ "lcase"
, caseon
, "of"
, "[]"
, "->"
, nilc
, ";"
, termValuePrint arg0
, ":"
, termValuePrint arg1
, "->"
, consc
]
----------------------------
-- Everything past here is just some helper functions for working with
-- recursive types in general. You don't need to worry about this :D
----------------------------
cata :: (TermF a b c z -> z) -> Term a b c -> z
cata f = f . fmap g . project
where
g term = cata f term
para :: (TermF a b c (Term a b c, z) -> z) -> Term a b c -> z
para f = f . fmap g . project
where
g term = (term, para f term)
data Cofree f a = a :< f (Cofree f a)
instance Functor f => Functor (Cofree f) where
fmap f (a :< as) = f a :< fmap (fmap f) as
histo :: (TermF a b c (Cofree (TermF a b c) z) -> z) -> Term a b c -> z
histo f = g . h
where
g (v :< _) = v
h term = uncurry (:<)
$ (\termf -> (f termf, termf))
$ fmap h (project term)
project :: Term a b c -> TermF a b c (Term a b c)
project (Abs a_a1wl0 a_a1wl1) = (AbsF a_a1wl0) a_a1wl1
project (App a_a1wl2 a_a1wl3) = (AppF a_a1wl2) a_a1wl3
project (Var a_a1wl4) = VarF a_a1wl4
project (Const a_a1wl5) = ConstF a_a1wl5
project (Add a_a1wl6 a_a1wl7) = (AddF a_a1wl6) a_a1wl7
project (Mul a_a1wl8 a_a1wl9) = (MulF a_a1wl8) a_a1wl9
project (BIf a_a1wla a_a1wlb a_a1wlc) = ((BIfF a_a1wla) a_a1wlb) a_a1wlc
project (BLeq a_a1wld a_a1wle) = (BLeqF a_a1wld) a_a1wle
project BTrue = BTrueF
project BFalse = BFalseF
project (FFix0 a_a1wlf a_a1wlg) = (FFix0F a_a1wlf) a_a1wlg
project (FFix1 a_a1wlh a_a1wli) = (FFix1F a_a1wlh) a_a1wli
project LNil = LNilF
project (LCons a_a1wlj a_a1wlk) = (LConsF a_a1wlj) a_a1wlk
project (LCase a_a1wll a_a1wlm a_a1wln) = ((LCaseF a_a1wll) a_a1wlm) a_a1wln
embed :: TermF a b c (Term a b c) -> Term a b c
embed (AbsF a_ac8U a_ac8V) = (Abs a_ac8U) a_ac8V
embed (AppF a_ac8W a_ac8X) = (App a_ac8W) a_ac8X
embed (VarF a_ac8Y) = Var a_ac8Y
embed (ConstF a_ac8Z) = Const a_ac8Z
embed (AddF a_ac90 a_ac91) = (Add a_ac90) a_ac91
embed (MulF a_ac92 a_ac93) = (Mul a_ac92) a_ac93
embed (BIfF a_ac94 a_ac95 a_ac96) = ((BIf a_ac94) a_ac95) a_ac96
embed (BLeqF a_ac97 a_ac98) = (BLeq a_ac97) a_ac98
embed BTrueF = BTrue
embed BFalseF = BFalse
embed (FFix0F a_ac99 a_ac9a) = (FFix0 a_ac99) a_ac9a
embed (FFix1F a_ac9b a_ac9c) = (FFix1 a_ac9b) a_ac9c
embed LNilF = LNil
embed (LConsF a_ac9d a_ac9e) = (LCons a_ac9d) a_ac9e
embed (LCaseF a_ac9f a_ac9g a_ac9h) = ((LCase a_ac9f) a_ac9g) a_ac9h
data TermF singlebinder_a1w8b multibinder_a1w8c var_a1w8d rec_a1wkZ
= AbsF multibinder_a1w8c rec_a1wkZ |
AppF rec_a1wkZ rec_a1wkZ |
VarF var_a1w8d |
ConstF Int |
AddF rec_a1wkZ rec_a1wkZ |
MulF rec_a1wkZ rec_a1wkZ |
BIfF rec_a1wkZ rec_a1wkZ rec_a1wkZ |
BLeqF rec_a1wkZ rec_a1wkZ |
BTrueF |
BFalseF |
FFix0F singlebinder_a1w8b rec_a1wkZ |
FFix1F (singlebinder_a1w8b, singlebinder_a1w8b) rec_a1wkZ |
LNilF |
LConsF rec_a1wkZ rec_a1wkZ |
LCaseF rec_a1wkZ rec_a1wkZ ((singlebinder_a1w8b,
singlebinder_a1w8b),
rec_a1wkZ)
instance Functor (TermF a b c) where
fmap f_afeD (AbsF a1_afeE a2_afeF) = AbsF ((\ b1_afeG -> b1_afeG) a1_afeE) (f_afeD a2_afeF)
fmap f_afeH (AppF a1_afeI a2_afeJ) = AppF (f_afeH a1_afeI) (f_afeH a2_afeJ)
fmap f_afeK (VarF a1_afeL) = VarF ((\ b1_afeM -> b1_afeM) a1_afeL)
fmap f_afeN (ConstF a1_afeO) = ConstF ((\ b1_afeP -> b1_afeP) a1_afeO)
fmap f_afeQ (AddF a1_afeR a2_afeS) = AddF (f_afeQ a1_afeR) (f_afeQ a2_afeS)
fmap f_afeT (MulF a1_afeU a2_afeV) = MulF (f_afeT a1_afeU) (f_afeT a2_afeV)
fmap f_afeW (BIfF a1_afeX a2_afeY a3_afeZ) = BIfF (f_afeW a1_afeX) (f_afeW a2_afeY) (f_afeW a3_afeZ)
fmap f_aff0 (BLeqF a1_aff1 a2_aff2) = BLeqF (f_aff0 a1_aff1) (f_aff0 a2_aff2)
fmap f_aff3 BTrueF = BTrueF
fmap f_aff4 BFalseF = BFalseF
fmap f_aff5 (FFix0F a1_aff6 a2_aff7) = FFix0F ((\ b1_aff8 -> b1_aff8) a1_aff6) (f_aff5 a2_aff7)
fmap f_aff9 (FFix1F a1_affa a2_affb) = FFix1F ((\ b1_affc -> b1_affc) a1_affa) (f_aff9 a2_affb)
fmap f_affd LNilF = LNilF
fmap f_affe (LConsF a1_afff a2_affg) = LConsF (f_affe a1_afff) (f_affe a2_affg)
fmap f_affh (LCaseF a1_affi a2_affj a3_affk) = LCaseF
(f_affh a1_affi) (f_affh a2_affj)
((\ b2_affl
-> case b2_affl of {
((,) a1_affm a2_affn)
-> (,) ((\ b1_affo -> b1_affo) a1_affm) (f_affh a2_affn) })
a3_affk)
|
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using System;
public class InputPrompt : EditorWindow
{
public string InputValue = "";
private Action<string> _callback;
public static void ShowWindow(Action<string> callback)
{
InputPrompt window = (InputPrompt)EditorWindow.GetWindow(typeof(InputPrompt), true, "Specify Asset Type", true);
window._callback = callback;
window.Show();
}
private void OnGUI()
{
GUILayout.Label("Enter the name of the asset type you want to introspect:", EditorStyles.wordWrappedLabel);
InputValue = EditorGUILayout.TextField("Asset Type:", InputValue);
if (GUILayout.Button("OK"))
{
_callback.Invoke(InputValue);
this.Close();
}
if (GUILayout.Button("Cancel"))
{
this.Close();
}
}
}
public class IntrospectAssets
{
[MenuItem("Tools/Introspect All Asset Types in Resources")]
private static void ListAssetsByType()
{
// Load all assets from the Resources folder
UnityEngine.Object[] allAssets = Resources.LoadAll<UnityEngine.Object>("");
Dictionary<string, int> assetTypeCounts = new Dictionary<string, int>();
foreach (var asset in allAssets)
{
string typeName = asset.GetType().Name;
if (assetTypeCounts.ContainsKey(typeName))
{
assetTypeCounts[typeName]++;
}
else
{
assetTypeCounts[typeName] = 1;
}
}
Debug.Log($"Total asset types in Resources: {assetTypeCounts.Count}");
foreach (var pair in assetTypeCounts)
{
Debug.Log($"Asset Type: {pair.Key}, Count: {pair.Value}");
}
}
[MenuItem("Tools/Introspect Specific Asset Type in Resources")]
private static void ListSpecificAssetType()
{
InputPrompt.ShowWindow(assetType =>
{
if (string.IsNullOrEmpty(assetType))
{
Debug.Log("No asset type specified.");
return;
}
// Load all assets of the specified type from the Resources folder
var allAssets = Resources.LoadAll("", System.Type.GetType($"UnityEngine.{assetType}, UnityEngine.CoreModule"));
Debug.Log($"Total {assetType} assets in Resources: {allAssets.Length}");
foreach (var asset in allAssets)
{
Debug.Log($"Asset Name: {asset.name}");
}
});
}
}
|
<div class="content" flex="40">
<form ng-cloak name="dogEditForm" ng-submit="submitForm(dogEditForm)">
<div layout="column">
<md-input-container>
<label>Name</label>
<input type="text"
name="name"
ng-model="dog.name"
md-maxlength="30"
required>
<div ng-if="dogEditForm.name.$invalid && !dogEditForm.name.$pristine"
ng-messages="dogEditForm.name.$error">
<div ng-message="required">This is required.</div>
<div ng-message="md-maxlength">Must be less than 30 characters long.</div>
</div>
</md-input-container>
<md-input-container>
<label>Breed</label>
<md-select name="breed"
ng-model="dog.breed"
required>
<md-option ng-repeat="breed in breeds" ng-value="breed">{{breed}}</md-option>
</md-select>
<div ng-if="dogEditForm.breed.$invalid && !dogEditForm.breed.$pristine"
ng-messages="dogEditForm.breed.$error">
<div ng-message="required">This is required.</div>
</div>
</md-input-container>
<md-input-container>
<label>Gender</label>
<md-select name="gender"
ng-model="dog.gender"
required>
<md-option ng-value="'male'">Male</md-option>
<md-option ng-value="'female'">Female</md-option>
</md-select>
<div ng-messages="dogEditForm.gender.$error && !dogEditForm.gender.$pristine">
<div ng-message="required">This is required.</div>
</div>
</md-input-container>
<div layout="row">
<div flex="50">
<md-input-container style="width: 100%">
<label>Buy Price</label>
<input type="number"
name="price"
ng-model="dog.price"
required>
<div ng-if="dogEditForm.price.$invalid && !dogEditForm.price.$pristine"
ng-messages="dogEditForm.price.$error">
<div ng-message="required">This is required.</div>
</div>
</md-input-container>
</div>
<div flex="50">
<md-input-container style="width: 100%">
<label>Mate Price</label>
<input type="number"
name="matePrice"
ng-model="dog.matePrice"
required>
<div ng-if="dogEditForm.matePrice.$invalid && !dogEditForm.matePrice.$pristine"
ng-messages="dogEditForm.matePrice.$error">
<div ng-message="required">This is required.</div>
</div>
</md-input-container>
</div>
</div>
<md-input-container>
<label>Info</label>
<textarea name="info"
ng-model="dog.info"
required
rows="4">
</textarea>
<div ng-if="dogEditForm.info.$invalid && !dogEditForm.info.$pristine"
ng-messages="dogEditForm.info.$error">
<div ng-message="required">This is required.</div>
</div>
</md-input-container>
<div layout="row">
<div flex="50">
<md-input-container style="width: 100%">
<label>Mother</label>
<md-select name="mother"
ng-model="dog.mother">
<md-option ng-repeat="parent in femaleDogs"
ng-value="'resource:org.acme.mynetwork.Dog#' + parent.dogId">{{parent.name}}
</md-option>
</md-select>
</md-input-container>
</div>
<div flex="50">
<md-input-container style="width: 100%">
<label>Father</label>
<md-select name="father"
ng-model="dog.father">
<md-option ng-repeat="parent in maleDogs"
ng-value="'resource:org.acme.mynetwork.Dog#' + parent.dogId">{{parent.name}}
</md-option>
</md-select>
</md-input-container>
</div>
</div>
<div layout="row">
<md-input-container>
<md-checkbox ng-model="dog.forSale">For Sale</md-checkbox>
</md-input-container>
<md-input-container>
<md-checkbox ng-model="dog.forMate">For Mate</md-checkbox>
</md-input-container>
</div>
<input type="file" id="photo" placeholder="Image">
<span style="color: #72e47f">{{photoUploadMessage}}</span>
<div layout="row">
<md-button ng-click="uploadImage()">
Upload Image
</md-button>
</div>
</div>
<div layout="row">
<span flex></span>
<md-button type="submit" ng-disabled="dogEditForm.$invalid || dogEditForm.$pristine" md-autofocus>
Save
</md-button>
<md-button ng-click="goToDogs()">
Cancel
</md-button>
</div>
</form>
</div>
|
package dk.itu.raven.join;
import java.util.List;
public abstract class JoinFilterFunctions {
public static IRasterFilterFunction acceptAll() {
return new IRasterFilterFunction() {
@Override
public boolean containsWithin(long lo, long hi) {
return true;
}
@Override
public boolean containsOutside(long lo, long hi) {
return false;
}
};
}
public static IRasterFilterFunction rangeFilter(long lo, long hi) {
return new IRasterFilterFunction() {
@Override
public boolean containsWithin(long lo2, long hi2) {
return lo2 <= hi && lo <= hi2;
}
@Override
public boolean containsOutside(long lo2, long hi2) {
return lo2 < lo || hi < hi2;
}
};
}
public static IRasterFilterFunction multiSampleRangeFilter(List<Long> ranges, int[] sampleSize, int totalBits) {
return new IRasterFilterFunction() {
@Override
public boolean containsWithin(long lo, long hi) {
boolean lowerMet = false; // signifies that we are allowed to set all further lom's to 0
boolean higherMet = false; // signifies that we are allowed to set all further him's to their maximum
// possible value
boolean eitherMet = false; // signifies that we can have either the higherMet OR the lowerMet set to
// true, but not both
int bitsRemaining = totalBits;
for (int i = 0; i < sampleSize.length; i++) {
long mask = (1 << sampleSize[i]) - 1; // all 1s, the length of it is equal to sampleSize[i]
long lom = lowerMet ? 0 : (lo >> (bitsRemaining - sampleSize[i])) & mask;
long him = higherMet ? mask : (hi >> (bitsRemaining - sampleSize[i])) & mask;
long min = ranges.get(2 * i);
long max = ranges.get(2 * i + 1);
// the first check is for when we can set the value of either the lom or the him
// and one of them is satisfied anyways
// the second check is for when we don't have eitherMet set to true. In that
// case, we need to either have higherMet set to true or be able to satisfy it
// anyways. Similarly, we also need to have lowerMet set to true or be able to
// satisfy it anyway.
if (!((eitherMet && (lom <= max || min <= him))
|| ((lom <= max || lowerMet) && (min <= him || higherMet)))) {
return false;
} else if (!((lom <= max || lowerMet) && (min <= him || higherMet))) {
// this is for when we are forced to choose one of the options given by
// eitherMet (namely we need to choose between setting lowerMet and higherMet)
eitherMet = false;
if (lom > max) {
lowerMet = true;
} else {
higherMet = true;
}
} else if (eitherMet && (him - 1 >= min || lom + 1 <= max)) {
// here, we have eitherMet set to true, and we are able to satisfy at least one
// of the conditions necessary to set one of lowerMet or higherMet. In this
// case, we can set them both to true (one because its condition is met and the
// other because of eitherSet)
eitherMet = false;
lowerMet = true;
higherMet = true;
}
// recompute the lom and him, as the lowerMet and higherMet flags might have
// changed.
lom = lowerMet ? 0 : (lo >> (bitsRemaining - sampleSize[i])) & mask;
him = higherMet ? mask : (hi >> (bitsRemaining - sampleSize[i])) & mask;
if (him - lom >= 2 && him - 1 >= min && lom + 1 <= max) {
// we have enough room from min and max that we can set both flags to true
eitherMet = false;
lowerMet = true;
higherMet = true;
} else if (him - lom == 1 && him - 1 >= min && lom + 1 <= max) {
// we can only set exactly one of them to true
if (lowerMet) {
higherMet = true;
} else if (higherMet) {
lowerMet = true;
} else {
eitherMet = true;
}
} else if (him > lom && him - 1 >= min) {
// we can only set exactly higherMet, not lowerMet
higherMet = true;
} else if (him > lom && lom + 1 <= max) {
// we can only set exactly lowerMet, not higherMet
lowerMet = true;
}
bitsRemaining -= sampleSize[i];
}
return true;
}
@Override
public boolean containsOutside(long lo, long hi) {
boolean differenceBefore = false; // signifies that an earlier sample has satisfied lom < him. This
// means that both all 0s and all 1s are possible for the rest of the
// samples.
int bitsRemaining = totalBits;
for (int i = 0; i < sampleSize.length; i++) {
long mask = (1 << sampleSize[i]) - 1;
long lom = (lo >> (bitsRemaining - sampleSize[i])) & mask;
long him = (hi >> (bitsRemaining - sampleSize[i])) & mask;
long min = ranges.get(2 * i);
long max = ranges.get(2 * i + 1);
// first part means that the range within the currect sample has some member not
// overlapping with the target range.
// second part means that there was an earlier sample where lom < him. this
// means that unless the target range for the current sample accepts both all 0s
// and all 1s, there is some number that is non-overlapping.
if (lom < min || him > max || (differenceBefore && (min != 0 || max != mask)))
return true;
if (lom < him)
differenceBefore = true;
bitsRemaining -= sampleSize[i];
}
return false;
}
};
}
}
|
import json
from objects.functions_for_object import fct_get_external_urls_spotify, fct_get_duration_ms_into_string
# albums.json() --> tracks
class TrackObject:
def __init__(self, available_markets, disc_number, duration_ms, explicit,
external_urls, href, id_, is_local, name, preview_url, track_number,
type_, uri):
self.album = None
self.artists = []
self.available_markets = available_markets
self.disc_number = disc_number
self.duration_ms = duration_ms
self.explicit = explicit
self.external_urls = external_urls
self.href = href
self.id = id_
self.is_local = is_local
self.name = name
self.preview_url = preview_url
self.track_number = track_number
self.type = type_
self.uri = uri
def toJSON(self):
# to avoid circular reference exception
tempo = self.album
self.album = self.album.id
json_str = json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=0)
self.album = tempo
return json_str
def is_featuring(self):
return len(self.artists) > 1
def is_album_of_artist(self, artist):
return self.album.album_type.lower() == "album" and artist in self.album.artists
def is_track_of_artist(self, artist):
result = False
for art in self.artists:
if artist.id == art.id:
result = True
break
return result
def get_external_urls_spotify(self):
return fct_get_external_urls_spotify(self.external_urls)
def get_duration_string(self):
return fct_get_duration_ms_into_string(self.duration_ms)
# playlist().json --> tracks --> track
# ATTENTION : self.episode et self.track sont ici des booléens
class PlaylistTracksObject_track(TrackObject):
def __init__(self, available_markets, disc_number, duration_ms, episode, explicit, external_ids,
external_urls, href, id_, is_local, name, popularity, preview_url, track, track_number, type_, uri):
super().__init__(available_markets, disc_number, duration_ms, explicit,
external_urls, href, id_, is_local, name, preview_url, track_number,
type_, uri)
self.episode = episode
self.external_ids = external_ids
self.popularity = popularity
self.track = track
# playlist().json --> tracks
class PlaylistTracksObject:
def __init__(self, added_at, added_by, is_local, primary_color, video_thumbnail):
self.added_at = added_at
self.added_by = added_by
self.is_local = is_local
self.primary_color = primary_color
self.track = None
self.video_thumbnail = video_thumbnail
def toJSON(self):
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=0)
def generate_track_object(track):
if "popularity" in track:
return PlaylistTracksObject_track(track["available_markets"], track["disc_number"], track["duration_ms"],
track["episode"], track["explicit"], track["external_ids"],
track["external_urls"], track["href"], track["id"], track["is_local"],
track["name"], track["popularity"], track["preview_url"], track["track"],
track["track_number"], track["type"], track["uri"])
return TrackObject(track["available_markets"], track["disc_number"], track["duration_ms"], track["explicit"],
track["external_urls"], track["href"], track["id"], track["is_local"], track["name"],
track["preview_url"], track["track_number"], track["type"], track["uri"])
def generate_playlistTrack_object(playlist_track):
return PlaylistTracksObject(playlist_track["added_at"], playlist_track["added_by"], playlist_track["is_local"],
playlist_track["primary_color"], playlist_track["video_thumbnail"])
|
class CuentaBancaria
{
public int NumeroCuenta { get; set; }
public string TitularCuenta { get; set; }
protected decimal Saldo { get; set; }
public int Pin { get; set; }
public CuentaBancaria(int numeroCuenta, string titularCuenta, decimal saldoInicial, int pin)
{
NumeroCuenta = numeroCuenta;
TitularCuenta = titularCuenta;
Saldo = saldoInicial;
Pin = pin;
}
public decimal ConsultarSaldo()
{
return Saldo;
}
public void Depositar(decimal cantidad)
{
Saldo += cantidad;
}
public virtual void Retirar(decimal cantidad)
{
if (cantidad > Saldo)
{
throw new InvalidOperationException("Saldo insuficiente");
}
Saldo -= cantidad;
}
public void CambiarPIN(int nuevoPin)
{
if (nuevoPin == Pin)
{
throw new InvalidOperationException("El nuevo PIN no puede ser igual al PIN actual.");
}
Pin = nuevoPin;
}
}
class CajeroAutomatico : CuentaBancaria
{
private decimal LimiteRetiroDiario { get; set; }
public CajeroAutomatico(int numeroCuenta, string titularCuenta, decimal saldoInicial, int pin, decimal limiteRetiroDiario)
: base(numeroCuenta, titularCuenta, saldoInicial, pin)
{
LimiteRetiroDiario = limiteRetiroDiario;
}
public override void Retirar(decimal cantidad)
{
if (cantidad > LimiteRetiroDiario)
{
throw new InvalidOperationException("La cantidad de retiro excede el límite diario.");
}
base.Retirar(cantidad);
}
}
class Program
{
static void Main(string[] args)
{
CajeroAutomatico cajero = new CajeroAutomatico(123456, "Usuario Ejemplo", 1000, 1234, 2207);
Console.WriteLine("Ingresar PIN ");
int pin = Convert.ToInt32(Console.ReadLine());
if (pin == cajero.Pin)
{
while (true)
{
Console.WriteLine("1. Consultar saldo");
Console.WriteLine("2. Depositar fondos");
Console.WriteLine("3. Retirar efectivo");
Console.WriteLine("4. Cambiar PIN");
Console.WriteLine("5. Salir");
Console.Write("Seleccione una opción: ");
int opcion = int.Parse(Console.ReadLine());
switch (opcion)
{
case 1:
Console.WriteLine($"Saldo actual: {cajero.ConsultarSaldo()}");
break;
case 2:
Console.Write("Ingrese la cantidad a depositar: ");
decimal deposito = decimal.Parse(Console.ReadLine());
cajero.Depositar(deposito);
Console.WriteLine("Depósito exitoso.");
break;
case 3:
Console.Write("Ingrese la cantidad a retirar: ");
decimal retiro = decimal.Parse(Console.ReadLine());
try
{
cajero.Retirar(retiro);
Console.WriteLine("Retiro exitoso.");
}
catch (InvalidOperationException e)
{
Console.WriteLine($"Error: {e.Message}");
}
break;
case 4:
Console.Write("Ingrese el nuevo PIN: ");
int nuevoPin = int.Parse(Console.ReadLine());
cajero.CambiarPIN(nuevoPin);
Console.WriteLine("PIN cambiado exitosamente.");
break;
case 5:
Console.WriteLine("Gracias por usar nuestro cajero automático.");
return;
default:
Console.WriteLine("Opción inválida.");
break;
}
Console.WriteLine();
}
}
else
{
Console.WriteLine("Contraseña Incorrecta.....");
}
}
}
|
import { createSlice } from "@reduxjs/toolkit";
interface User {
login: {
currentUser: {
currentUser: {
id: number,
username: string,
email: string,
provider: string,
confirmed : boolean,
blocked : boolean,
createdAt: string,
updateAt: string
},
jwt: string
},
isFetching: boolean,
error: boolean
},
logout: {
currentUser: {
currentUser: {
id: number,
username: string,
email: string,
provider: string,
confirmed : boolean,
blocked : boolean,
createdAt: string,
updateAt: string
},
jwt: string
},
isFetching: boolean,
}
}
const initialState: User = {
login: {
currentUser: {
currentUser: {
id: 0,
username: '',
email: '',
provider: '',
confirmed : false,
blocked : false,
createdAt: '',
updateAt: ''
},
jwt: ''
},
isFetching: false,
error: false,
},
logout: {
currentUser: {
currentUser: {
id: 0,
username: '',
email: '',
provider: '',
confirmed : false,
blocked : false,
createdAt: '',
updateAt: ''
},
jwt: ''
},
isFetching: false,
}
}
const authSlice = createSlice({
name: "auth",
initialState,
reducers: {
loginStart: (state) => {
state.login.isFetching = true
},
loginSuccess: (state, action) => {
state.login.isFetching = false,
state.login.currentUser = action.payload
state.login.error = false
},
loginFailed: (state) => {
state.login.isFetching = false,
state.login.error = true
},
logoutStart: (state) => {
state.logout.isFetching = true
},
logoutSuccess: (state) => {
state.login.currentUser = state.logout.currentUser
}
}
})
export const {
loginStart,
loginSuccess,
loginFailed,
logoutStart,
logoutSuccess
} = authSlice.actions;
export default authSlice.reducer;
|
# Acoustic well log data imputation

## Imputing Petrophysical Well Log Data with Supervised Machine Learning Techniques
This repository contains the code and resources related to the study titled "Imputing Petrophysical Well Log Data with Supervised Machine Learning Techniques: A Comparative Analysis of Acoustic Logs." This study focuses on forecasting missing acoustic well log values using two machine learning algorithms, namely Random Forest and XGBoost. The comparison between these algorithms aims to provide decision-makers with insights for informed decision-making and enhancing the reliability of log data.
## Study Approach
Dataset Description (Taranaki Basin, New Zealand)
Data Cleaning and Preprocessing
Feature Selection and Correlation Analysis
Model Training (Random Forest and XGBoost)
Performance Evaluation Metrics (R2 Score)
Results and Discussion
Comparative Analysis of Random Forest and XGBoost
Correlation Plots and Feature Importance Analysis
Scatter Plots and KDE plots
Residual Analysis
Conclusion
## Requirements and Dependencies
1. XGBoost (1.7.3)
2. scikit-learn (1.0.1)
3. numpy (1.20.3)
4. pandas (1.3.4)
5. matplotlib (3.5.0)
6. seaborn (0.11.2)
7. missingno (0.5.1)
Acknowledgments
We extend our heartfelt appreciation to the New Zealand Petroleum & Minerals (NZPAM) and GNS Science for generously providing essential open-source geological and energy-related data through their online platforms. The accessibility to such data profoundly enriched our research, enabling us to conduct a comprehensive analysis and draw meaningful conclusions. Their commitment to open data sharing has been pivotal in advancing our insights in the field of energy studies.
## Getting Started
To replicate the experiments and analyses presented in this study, follow these steps:
Clone this repository to your local machine.
Install the required dependencies (provide list of dependencies or link to requirements file).
Explore the dataset and preprocessing steps in the Jupyter Notebook files.
Run the training scripts for both Random Forest and XGBoost models.
Evaluate model performance using provided evaluation scripts.
Interpret and visualize the results using the provided visualization scripts.
Contribute to the repository by implementing improvements or extensions.
## Contact Information
For inquiries, questions, or collaborations, please contact ***Sarthak Singh*** at sarthaks66@gmail.com

|
// Test code for Adafruit GPS modules using MTK3329/MTK3339 driver
//
// This code shows how to listen to the GPS module in an interrupt
// which allows the program to have more 'freedom' - just parse
// when a new NMEA sentence is available! Then access data when
// desired.
//
// Tested and works great with the Adafruit Ultimate GPS module
// using MTK33x9 chipset
// ------> http://www.adafruit.com/products/746
// Pick one up today at the Adafruit electronics shop
// and help support open source hardware & software! -ada
#include <Adafruit_GPS.h>
#include <SoftwareSerial.h>
// Connect the GPS Power pin to 5V
// Connect the GPS Ground pin to ground
// Connect the GPS TX (transmit) pin to Digital 8
// Connect the GPS RX (receive) pin to Digital 7
// you can change the pin numbers to match your wiring:
SoftwareSerial mySerial(8, 7);
Adafruit_GPS GPS(&mySerial);
// Set GPSECHO to 'false' to turn off echoing the GPS data to the Serial console
// Set to 'true' if you want to debug and listen to the raw GPS sentences
#define GPSECHO false
void setup()
{
// connect at 115200 so we can read the GPS fast enough and echo without dropping chars
// also spit it out
Serial.begin(115200);
delay(5000);
Serial.println("Adafruit GPS library basic parsing test!");
// 9600 NMEA is the default baud rate for Adafruit MTK GPS's- some use 4800
GPS.begin(9600);
// uncomment this line to turn on RMC (recommended minimum) and GGA (fix data) including altitude
GPS.sendCommand(PMTK_SET_NMEA_OUTPUT_RMCGGA);
// uncomment this line to turn on only the "minimum recommended" data
GPS.sendCommand(PMTK_SET_NMEA_OUTPUT_RMCONLY);
// For parsing data, we don't suggest using anything but either RMC only or RMC+GGA since
// the parser doesn't care about other sentences at this time
// Set the update rate
GPS.sendCommand(PMTK_SET_NMEA_UPDATE_1HZ); // 1 Hz update rate
// For the parsing code to work nicely and have time to sort thru the data, and
// print it out we don't suggest using anything higher than 1 Hz
// Request updates on antenna status, comment out to keep quiet
// GPS.sendCommand(PGCMD_ANTENNA);
delay(1000);
// Ask for firmware version
// mySerial.println(PMTK_Q_RELEASE);
}
uint32_t timer = millis();
void loop() // run over and over again
{
char c = GPS.read();
// if you want to debug, this is a good time to do it!
if ((c) && (GPSECHO))
Serial.write(c);
// if a sentence is received, we can check the checksum, parse it...
if (GPS.newNMEAreceived()) {
// a tricky thing here is if we print the NMEA sentence, or data
// we end up not listening and catching other sentences!
// so be very wary if using OUTPUT_ALLDATA and trytng to print out data
//Serial.println(GPS.lastNMEA()); // this also sets the newNMEAreceived() flag to false
if (!GPS.parse(GPS.lastNMEA())) // this also sets the newNMEAreceived() flag to false
return; // we can fail to parse a sentence in which case we should just wait for another
}
// approximately every 2 seconds or so, print out the current stats
if (millis() - timer > 2000) {
timer = millis(); // reset the timer
// Serial.print("\nTime: ");
// if (GPS.hour < 10) { Serial.print('0'); }
// Serial.print(GPS.hour, DEC); Serial.print(':');
// if (GPS.minute < 10) { Serial.print('0'); }
// Serial.print(GPS.minute, DEC); Serial.print(':');
// if (GPS.seconds < 10) { Serial.print('0'); }
// Serial.print(GPS.seconds, DEC); Serial.print('.');
// if (GPS.milliseconds < 10) {
// Serial.print("00");
// } else if (GPS.milliseconds > 9 && GPS.milliseconds < 100) {
// Serial.print("0");
// }
// Serial.println(GPS.milliseconds);
// Serial.print("Date: ");
// Serial.print(GPS.day, DEC); Serial.print('/');
// Serial.print(GPS.month, DEC); Serial.print("/20");
// Serial.println(GPS.year, DEC);
// Serial.print("Fix: "); Serial.print((int)GPS.fix);
// Serial.print(" quality: "); Serial.println((int)GPS.fixquality);
// if (GPS.fix) {
// Serial.print("Location: ");
// Serial.print(GPS.latitude, 4); Serial.print(GPS.lat);
// Serial.print(", ");
// Serial.print(GPS.longitude, 4); Serial.println(GPS.lon);
// Serial.print("Speed (knots): "); Serial.println(GPS.speed);
// Serial.print("Angle: "); Serial.println(GPS.angle);
// Serial.print("Altitude: "); Serial.println(GPS.altitude);
// Serial.print("Satellites: "); Serial.println((int)GPS.satellites);
// Serial.print("Antenna status: "); Serial.println((int)GPS.antenna);
// }
Serial.print("{");
if (GPS.fix)
{
// ISO 8601 format. Specifically, it represents a date and time in Coordinated Universal Time (UTC)
Serial.print("\"timestamp\":\"");
Serial.print("20"); Serial.print(GPS.year, DEC); Serial.print("-");
if (GPS.month < 10) { Serial.print('0'); }
Serial.print(GPS.month, DEC); Serial.print("-");
if (GPS.day < 10) { Serial.print('0'); }
Serial.print(GPS.day, DEC);
Serial.print("T");
if (GPS.hour < 10) { Serial.print('0'); }
Serial.print(GPS.hour, DEC); Serial.print(':');
if (GPS.minute < 10) { Serial.print('0'); }
Serial.print(GPS.minute, DEC); Serial.print(':');
if (GPS.seconds < 10) { Serial.print('0'); }
Serial.print(GPS.seconds, DEC); Serial.print(':');
if (GPS.milliseconds < 10) {
Serial.print("00");
} else if (GPS.milliseconds > 9 && GPS.milliseconds < 100) {
Serial.print("0");
}
Serial.print("Z\",");
Serial.print("\"latitude\":"); Serial.print(GPS.latitude_fixed/10000000.0, 8); Serial.print(",");
Serial.print("\"longitude\":"); Serial.print(GPS.longitude_fixed/10000000.0, 8); Serial.print(",");
Serial.print("\"altitude\":"); Serial.print(GPS.altitude); Serial.print(",");
Serial.print("\"speed\":"); Serial.print(GPS.speed); Serial.print(",");
Serial.print("\"angle\":"); Serial.print(GPS.angle); Serial.print(",");
Serial.print("\"fix\":"); Serial.print((int)GPS.fix); Serial.print(",");
Serial.print("\"fixquality\":"); Serial.print((int)GPS.fixquality); Serial.print(",");
Serial.print("\"satellites\":"); Serial.print((int)GPS.satellites); Serial.print(",");
Serial.print("\"antenna\":"); Serial.print((int)GPS.antenna); Serial.print("");
}
else
{
Serial.print("\"err\":\"no gps fix\"");
}
Serial.println("}");
}
}
|
@model WebQuanAoAI.Models.ProductModel
@{
ViewData["Title"] = "Edit";
}
<h1>Edit</h1>
<div class="col-9">
<a asp-action="Index"><button class="btn btn-dark">Back to product list</button></a>
<form asp-action="Edit" enctype="multipart/form-data">
<div asp-validation-summary="ModelOnly" class="text-danger"></div>
<div class="form-group">
<label>Name:</label>
<input required asp-for="Name" class="form-control" />
<span asp-validation-for="Name" class="text-danger" />
</div>
<div class="form-group">
<label>Description:</label>
<textarea required asp-for="Description" class="form-control"></textarea>
<span asp-validation-for="Description" class="text-danger" />
</div>
<div class="form-group">
<label>Price:</label>
<input required asp-for="Price" class="form-control" />
<span asp-validation-for="Price" class="text-danger" />
</div>
<div class="form-group form-select" aria-label="Default select example">
<label>Category:</label>
<select required asp-for="CategoryId" asp-items="ViewBag.Categories">
</select>
<span asp-validation-for="Category" class="text-danger" />
</div>
<div class="form-group form-select" aria-label="Default select example">
<label>Brand:</label>
<select required asp-for="BrandId" asp-items="ViewBag.Brands">
</select>
<span asp-validation-for="Brand" class="text-danger" />
</div>
<div class="form-group">
<label>Image:</label>
<input required asp-for="ImageUpload" class="form-control-file" />
<span asp-validation-for="ImageUpload" class="text-danger" />
<img src="~/media/products/@Model.Image" width="120" />
</div>
<div class="form-group">
<button type="submit" class="btn btn-primary">Update</button>
</div>
</form>
</div>
<div>
<a asp-action="Index">Back to List</a>
</div>
@section Scripts {
@{await Html.RenderPartialAsync("_ValidationScriptsPartial");}
}
|
import * as React from 'react';
import Grid from '@material-ui/core/Grid';
import moment from 'moment';
import { DateField } from '@pec/aion-ui-form/components/DateField';
import { dateIsOnOrBefore, maxLength, required } from '@pec/aion-ui-core/validators';
import { Field } from 'react-final-form';
import { FieldState } from 'final-form';
import { GridContainer } from '@pec/aion-ui-components/components/GridContainer';
import { IAccreditationForm } from 'interfaces/accreditationForm';
import { TextField } from '@pec/aion-ui-form/components/TextField';
export const CommonAccreditationFormFields: React.FC = () => {
const composeValidators = (...validators: any[]) => (
value: any,
allValues: IAccreditationForm,
meta: FieldState<any>
) => validators.reduce((error, validator) => error || validator(value, allValues, meta), undefined);
const today = moment()
.utc()
.format();
return (
<GridContainer justify="center">
<Grid item xs={12}>
<GridContainer>
<Grid item xs={12}>
<Field<string>
name="name"
label="Accreditation Name"
component={TextField}
fullWidth
variant="filled"
required
validate={composeValidators(maxLength(50), required)}
/>
</Grid>
<Grid item md={12}>
<Field<string>
name="issueDateUtc"
label="Issue Date"
inputVariant="filled"
component={DateField}
fullWidth
required
validate={composeValidators(dateIsOnOrBefore(today), required)}
maxDate={today}
/>
</Grid>
<Grid item xs={12}>
<Field<string>
name="accreditationId"
label="ID"
component={TextField}
fullWidth
variant="filled"
required
validate={composeValidators(maxLength(50), required)}
/>
</Grid>
</GridContainer>
</Grid>
</GridContainer>
);
};
|
const express = require('express')
const app = express()
const dotenv = require("dotenv").config()
const PORT = process.env.PORT || 8080
// npm i dotenv
// process.env.variable_name
const db = require('./config/db.config')
const cookieParser = require('cookie-parser')
const { isSeller , authMiddleware } = require('./middleware/auth.middleware.js')
const { getSingleData, getProduct, getHome , getHomeCategory, getSlider, getHomeSlider, getSnaks } = require('./controller/product.controller.js')
const { getCart } = require('./controller/cart.controller.js')
app.use(express.json())
app.use(express.urlencoded({ extended : true }))
app.use(cookieParser())
app.set('view engine' , "ejs")
app.get('/' , getHome )
app.use('/cart', authMiddleware, require("./routes/cart.router.js"))
app.get('/cart', (req, res) => {
const cart = req.session.cart;
res.render('cart.ejs', { cart: cart });
});
app.get('/category', (req,res)=>{
res.render("category.ejs")
})
app.get('/address', (req,res)=>{
res.render("addressform.ejs")
})
app.get('/features', (req,res)=>{
res.render("features.ejs")
})
app.get('/blog', (req,res)=>{
res.render("blog.ejs")
})
app.get('/vegitables', (req,res)=>{
res.render("vegitables.ejs")
})
app.get('/fruits', (req,res)=>{
res.render("fruits.ejs")
})
app.get('/diaryProduct', (req,res)=>{
res.render("diaryProduct.ejs")
})
app.get('/meat', (req,res)=>{
res.render("meat.ejs")
})
app.get('/bakingGoods', (req,res)=>{
res.render("bakingGoods.ejs")
})
app.get('/desserts', (req,res)=>{
res.render("/desserts.ejs")
})
app.get('/dryFruits', (req,res)=>{
res.render("/dryFruits.ejs")
})
app.get('/other', (req,res)=>{
res.render("/other.ejs")
})
app.get('/cart', getCart)
app.use('/user' , require('./routes/user.routes.js'))
app.use('/product' , require('./routes/product.routes.js'))
app.use('/order' , require('./routes/order.router.js'))
app.use('/seller',authMiddleware , require('./routes/seller.routes.js') )
// app.get('/singlePage/:id',getSingleData)
app.listen(PORT , ()=>{
console.log(`server on ${PORT}`)
})
|
extends CardState
# This is a float
const DRAG_MINIMUM_TRESHHOLD := 0.05
var minimum_drag_time_elasped := false
# We need to reparent the CardUI from our Hand/Hbox
# So that we can freely move our Card UI
func enter() -> void:
# We need to get access to our Battle UI.
# Theoretically we could get CardUI to get its parent, Hand, and then
# Hand can get its parent, Battle UI
# But its not a best practice simply because it relies too much on our current set up
# Where everything is parented the exact way it is.
# The easiest fix is to set BattleUI to a group (in this case "ui_layer")
# and then use a method to grab that group and get battle ui.
var ui_layer := get_tree().get_first_node_in_group("ui_layer")
if ui_layer:
card_ui_node.reparent(ui_layer)
# Debuging
card_ui_node.color.color = Color.NAVY_BLUE
card_ui_node.state.text = "DRAGGING STATE"
minimum_drag_time_elasped = false
# the false, we specify the timer should pause when the scene is paused too
var treshold_timer := get_tree().create_timer(DRAG_MINIMUM_TRESHHOLD,false)
# When the timer runs out of time, we make minimum_drag_time_elasped true
treshold_timer.timeout.connect(func(): minimum_drag_time_elasped = true)
# Transitioning to Release OR Base state.
func on_input(event: InputEvent) -> void:
# This checks if our mouse is moving
# By checking if the event we pass through is an Input Mouse motion
var mouse_motion:= event is InputEventMouseMotion
# This checks if we canceled by pressing right click
var cancel = event.is_action_pressed("right_mouse")
# This checks if we confirmed with left click
var confirm = event.is_action_released("left_mouse") or event.is_action_pressed("left_mouse")
#If we move the mouse...
if mouse_motion:
#update the global position so it follows the mouse cursor
# We subtract the pivot offset here
card_ui_node.global_position = card_ui_node.get_global_mouse_position() - card_ui_node.pivot_offset
#If we cancel our mouse movement
if cancel:
#Go back to Base State
transition_requested.emit(self, CardState.State.BASE)
#but otherwise, if we confirm ...
elif minimum_drag_time_elasped and confirm:
# Set it so that we cant spam input to grab the card again
get_viewport().set_input_as_handled()
# Move to the released state
transition_requested.emit(self, CardState.State.RELEASED)
|
import React, { useState } from "react";
import { Link, useNavigate } from "react-router-dom";
import { UserAuth } from "../../context/AuthContext";
const Register = () => {
const [error, setError] = useState("");
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const history = useNavigate();
// eslint-disable-next-line
const { user, signUp } = UserAuth();
const handleSubmit = async (e) => {
e.preventDefault();
try {
await signUp(email, password);
history("/");
} catch (error) {
setError(error);
}
};
return (
<>
<div className="w-full h-screen">
<img
className="hidden sm:block absolute w-full h-full object-cover"
src="https://assets.nflxext.com/ffe/siteui/vlv3/5ea364b1-8e59-4693-8ad8-f0eaee32d1bf/fe7046a2-cca7-45d7-b041-ab4a43ac971e/NL-nl-20220530-popsignuptwoweeks-perspective_alpha_website_large.jpg"
alt="/"
/>
<div className="fixed bg-black/80 top-0 left-0 w-full h-screen"></div>
<div className="fixed w-full px-4 py-24 z-50">
<div className="max-w-[450px] h-[600px] mx-auto bg-black/75 text-white">
<div className="max-w-[320px] mx-auto py-16">
<h1 className="text-3xl font-bold">Register to Netflix</h1>
<form
onSubmit={handleSubmit}
className="w-full flex flex-col py-4 gap-3"
>
<input
className="p-3 bg-gray-700 rounded "
type="email"
placeholder="Email"
autoComplete="email"
onChange={(e) => setEmail(e.target.value)}
/>
<input
className="p-3 bg-gray-700 rounded "
type="password"
placeholder="Password"
autoComplete="current-password"
onChange={(e) => setPassword(e.target.value)}
/>
<button className="bg-red-600 py-3 my-6 rounded font-bold">
Sign Up
</button>
<div className="flex justify-between items-center text-sm text-gray-400">
<p>
<input className="mr-2" type="checkbox" /> Remember Me!
</p>
<p>Need Help!</p>
</div>
<p className="py-4">
<span>Already subscribed to Netflix?</span>
{" "}
<Link to="/login" className="text-white">
Sign In
</Link>
</p>
</form>
<p>{error}</p>
</div>
</div>
</div>
</div>
</>
);
};
export default Register;
|
const mongoose = require('mongoose');
const Joi = require('joi');
// Book Schema
const BookSchema = new mongoose.Schema({
title: {
type: String,
required: true,
trim: true,
minlength: 3,
maxlength: 250
},
author: {
type: mongoose.Schema.Types.ObjectId,
required: true,
ref: 'Author',
minlength: 5
},
description: {
type: String,
required: true,
trim: true
},
price: {
type: Number,
required: true,
min: 0
},
cover: {
type: String,
required: true,
enum: ['soft cover', "hard cover"]
}
}, {timestamps: true});
// Book Model
const Book = mongoose.model('Book', BookSchema);
// Validate Create Book
function validateCreateBook(obj) {
const shcema = Joi.object({
title: Joi.string().trim().min(3).max(250).required(),
author: Joi.string().required(),
description: Joi.string().min(5).required(),
price: Joi.number().min(0).required(),
cover: Joi.string().valid('soft cover', "hard cover").required()
});
return shcema.validate(obj);
}
// Validate Update Book
function validateUpdateBook(obj) {
const shcema = Joi.object({
title: Joi.string().trim().min(3).max(250),
author: Joi.string(),
description: Joi.string().trim().min(5),
price: Joi.number().min(0),
cover: Joi.string().valid('soft cover', "hard cover")
});
return shcema.validate(obj);
}
module.exports = {
Book,
validateCreateBook,
validateUpdateBook
}
|
## Problem Minimum island
url: https://www.structy.net/problems/minimum-island
Write a function, minimum_island, that takes in a grid containing Ws and Ls. W represents water and L represents land. The function should return the size of the smallest island. An island is a vertically or horizontally connected region of land.
You may assume that the grid contains at least one island.
grid = [
['W', 'L', 'W', 'W', 'W'],
['W', 'L', 'W', 'W', 'W'],
['W', 'W', 'W', 'L', 'W'],
['W', 'W', 'L', 'L', 'W'],
['L', 'W', 'W', 'L', 'L'],
['L', 'L', 'W', 'W', 'W'],
]
minimum_island(grid) # -> 2
## Solution
Summary: Go over arr, perform a DFS recursive adding up the numbers with 1 for each level. Define base layers to return 0
```python
def minimum_island(grid):
# parse each element in array
# explore DFS recursive
# if it returns 0 ignore, if is > 0 calculate min(current_min, explore_function)
min_size = float('inf')
visited = set()
for r in range(len(grid)):
for c in range(len(grid[r])):
print(r,c)
result = explore(r,c,grid,visited)
if result > 0 and result < min_size:
min_size = result
return min_size
def explore(r,c,grid,visited):
row_bounds = 0 <= r < len(grid)
col_bounds = 0 <= c < len(grid[0])
if not row_bounds or not col_bounds:
return 0
# if is water stop
if grid[r][c] == "W":
return 0
pos = (r,c)
if pos in visited:
return 0
visited.add(pos)
score = 1
score += explore(r+1,c,grid,visited)
score += explore(r-1,c,grid,visited)
score += explore(r,c+1,grid,visited)
score += explore(r,c-1,grid,visited)
return score
grid = [
['W', 'L', 'W', 'W', 'W'],
['W', 'L', 'W', 'W', 'W'],
['W', 'W', 'W', 'L', 'W'],
['W', 'W', 'L', 'L', 'W'],
['L', 'W', 'W', 'L', 'L'],
['L', 'L', 'W', 'W', 'W'],
]
minimum_island(grid) # -> 2
```
|
import { Injectable, Inject } from "@angular/core";
import { HttpHeaders, HttpClient } from "@angular/common/http";
import { APP_CONFIG, AppConfig } from "../providers/app-config.module";
import { ORMChartModuleSettingSave } from "../models/setting/ORMChartModuleSettingSave";
import { ORMChartModuleTempSettingSave } from "../models/setting/ORMChartModuleTempSettingSave";
import { ORMDeleteRecord } from "../models/general/orm-delete-record";
import { ORMSaveReferral } from "../models/setting/ORMSaveReferral";
import { ORMRSaveoleAdministrationModule } from "../models/setting/ORMRSaveoleAdministrationModule";
import { ORMRoleCDSRules } from "../models/setting/ORMRoleCDSRules";
import { SearchCriteria } from "../models/common/search-criteria";
import { ORMinsurancesetup } from "../models/setting/orm-insurance-setup";
import { ORMSaveSetupFacility } from "../models/setting/ORMSaveSetupFacility";
import { ORMSaveSetupLocation } from "../models/setting/ORMSaveSetupLocation";
import { ORMSaveSetupBillingProvider } from "../models/setting/ORMSaveSetupBillingProvider";
import { ORMSaveSetupProvider } from "../models/setting/ORMSaveSetupProvider";
import { ORMSaveSetupLabCategoryDetail } from "../models/setting/ORMSaveSetupLabCategoryDetail";
import { ORMSaveSetupSubLabCategory } from "../models/setting/ORMSaveSetupSubLabCategory";
import { ORMSaveSetupLabCategory } from "../models/setting/ORMSaveSetupLabCategory";
import { ORMSaveSuperBillCategoryDetail } from "../models/setting/ORMSaveSuperBillCategoryDetail";
import { ORMSaveSuperBillCategory } from "../models/setting/ORMSaveSuperBillCategory";
import { ORMSaveSuperBill } from "../models/setting/ORMSaveSuperBill";
import { ORMSaveGuarantor } from "../models/setting/ORMSaveGuarantor";
import { ORMTemplate } from "../models/setting/ORMTemplate";
import { ORMtemplateprovider } from "../models/setting/ORMtemplateprovider";
import { ORMMyListICD } from "../models/setting/ORMMyListICD";
import { ORMMyListCPT } from "../models/setting/ORMMyListCPT";
import { ORMAppSettingsSave } from "../models/setting/orm-app-settings-save";
import { ORMRestrictedCode } from "../models/billing/ORMRestrictedCode";
import { ORMAdjustmentReasonCodes } from "../models/billing/ORMAdjustmentReasonCodes";
import { ORMProcedureSetup } from "../models/setting/ORMProcedureSetup";
import { ORMDiagnosis } from "../models/setting/ORMDiagnosis";
import { ORMWriteOffCodes } from "../models/billing/ORMWriteOffCodes";
import { ORMPractice } from "../models/setting/ORMPractice";
import { ORMPracticeServices } from "../models/setting/ORMPracticeServices";
import { ORMProviderTemplateSave } from "../models/setting/ORMProviderTemplateSave";
import { ORM_template_provider } from "../models/setting/ORM_template_provider";
@Injectable()
export class SetupService {
private httpOptions = {
headers: new HttpHeaders({ 'Content-Type': 'application/json' })
};
constructor(private http: HttpClient,
@Inject(APP_CONFIG) private config: AppConfig) { }
getSetupAttorney(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupAttorney/' + practice_id, this.httpOptions);
}
getSetupReferringPhysician(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupRefferingPhysician/' + practice_id, this.httpOptions);
}
getSetupGuarantor(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupGuarantor/' + practice_id, this.httpOptions);
}
saveGuarantor(obj: ORMSaveGuarantor) {
return this.http
.post(this.config.apiEndpoint + 'setup/saveGuarantor', obj, this.httpOptions);
}
deleteGuarantor(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteGuarantor', obj, this.httpOptions);
}
getSuperBillSetup(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSuperBillSetup/' + practice_id, this.httpOptions);
}
getSuperBillCategorySetup(bill_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSuperBillCategorySetup/' + bill_id, this.httpOptions);
}
getSuperBillCategoryDetailSetup(category_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSuperBillCategoryDetailSetup/' + category_id, this.httpOptions);
}
getSetupChartModuleSetting(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupChartModuleSetting/' + practice_id, this.httpOptions);
}
getSetupChartModuleSettingsDetail(setting_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupChartModuleSettingsDetail/' + setting_id, this.httpOptions);
}
getSetupChartModuleAll() {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupChartModuleAll/', this.httpOptions);
}
saveEncounterSetting(objSettingSave: ORMChartModuleSettingSave) {
return this.http.post(this.config.apiEndpoint + 'setup/saveEncounterSetting/', objSettingSave, this.httpOptions);
}
saveEncounterSettingDetail(lstSave: Array<ORMChartModuleTempSettingSave>) {
return this.http.post(this.config.apiEndpoint + 'setup/saveEncounterSettingDetail/', lstSave, this.httpOptions);
}
deleteEncounterSetting(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteEncounterSetting', obj, this.httpOptions);
}
deleteEncounterSettingModule(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteEncounterSettingModule', obj, this.httpOptions);
}
deleteEncounterSettingPage(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteEncounterSettingPage', obj, this.httpOptions);
}
saveReferral(obj: ORMSaveReferral) {
return this.http
.post(this.config.apiEndpoint + 'setup/saveReferral', obj, this.httpOptions);
}
deleteReferral(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteReferral', obj, this.httpOptions);
}
getRoleAdministrationModules(practice_id: string, role_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getRoleAdministrationModules/' + practice_id + '/' + role_id, this.httpOptions);
}
SaveRoleAdministrationModules(lstSave: Array<ORMRSaveoleAdministrationModule>) {
return this.http
.post(this.config.apiEndpoint + 'setup/SaveRoleAdministrationModules', lstSave, this.httpOptions);
}
getPracticeRoleCDSRules(practice_id: string, role_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getPracticeRoleCDSRules/' + practice_id + '/' + role_id, this.httpOptions);
}
SaveRoleCDS(lstSave: Array<ORMRoleCDSRules>) {
return this.http
.post(this.config.apiEndpoint + 'setup/SaveRoleCDS', lstSave, this.httpOptions);
}
getClientPayerId(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getClientPayerId/' + practice_id, this.httpOptions);
}
getInsurances(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/getInsurances/', searchCriteria, this.httpOptions);
}
saveInsurance(obj: ORMinsurancesetup) {
return this.http.post(this.config.apiEndpoint + 'setup/saveInsurance', obj, this.httpOptions);
}
getPayerType(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getPayerType/' + practice_id, this.httpOptions);
}
searchInsuranceSetup(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/searchInsuranceSetup/', searchCriteria, this.httpOptions);
}
editWebsite(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/editWebsite', searchCriteria, this.httpOptions);
}
saveSuperBill(obj: ORMSaveSuperBill) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSuperBill', obj, this.httpOptions);
}
deleteSuperBill(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSuperBill', obj, this.httpOptions);
}
saveSuperBillCategory(obj: ORMSaveSuperBillCategory) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSuperBillCategory', obj, this.httpOptions);
}
deleteSuperBillCategory(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSuperBillCategory', obj, this.httpOptions);
}
saveSuperBillCategoryDetail(lstSave: Array<ORMSaveSuperBillCategoryDetail>) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSuperBillCategoryDetail', lstSave, this.httpOptions);
}
deleteSuperBillCategoryDetail(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSuperBillCategoryDetail', obj, this.httpOptions);
}
getSetupLabCategory(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupLabCategory/' + practice_id, this.httpOptions);
}
saveSetupLabCategory(obj: ORMSaveSetupLabCategory) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupLabCategory', obj, this.httpOptions);
}
deleteSetupLabCategory(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupLabCategory', obj, this.httpOptions);
}
getSetupSubLabCategory(category_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupSubLabCategory/' + category_id, this.httpOptions);
}
saveSetupSubLabCategory(obj: ORMSaveSetupSubLabCategory) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupSubLabCategory', obj, this.httpOptions);
}
deleteSetupLabCategorysub(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupLabCategorysub', obj, this.httpOptions);
}
getSetupLabCategoryDetail(category_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupLabCategoryDetail/' + category_id, this.httpOptions);
}
saveSetupLabCategoryDetail(lstSave: Array<ORMSaveSetupLabCategoryDetail>) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupLabCategoryDetail', lstSave, this.httpOptions);
}
deleteSetupLabCategoryDetail(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupLabCategoryDetail', obj, this.httpOptions);
}
getSetupProvider(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupProvider/' + practice_id, this.httpOptions);
}
saveSetupProvider(objSave: ORMSaveSetupProvider) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupProvider', objSave, this.httpOptions);
}
deleteSetupProvider(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupProvider', obj, this.httpOptions);
}
getSetupBillingProvider(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupBillingProvider/' + practice_id, this.httpOptions);
}
saveSetupBillingProvider(objSave: ORMSaveSetupBillingProvider) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupBillingProvider', objSave, this.httpOptions);
}
deleteSetupBillingProvider(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupBillingProvider', obj, this.httpOptions);
}
getSetupLocation(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupLocation/' + practice_id, this.httpOptions);
}
saveSetupLocation(objSave: ORMSaveSetupLocation) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupLocation', objSave, this.httpOptions);
}
deleteSetupLocation(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupLocation', obj, this.httpOptions);
}
getSetupFacility(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupFacility/' + practice_id, this.httpOptions);
}
saveSetupFacility(objSave: ORMSaveSetupFacility) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupFacility', objSave, this.httpOptions);
}
deleteSetupFacility(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupFacility', obj, this.httpOptions);
}
getTemplate(practice_id: string, type: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getTemplate/' + practice_id + '/' + type, this.httpOptions);
}
saveTemplateSetup(obj: ORMTemplate) {
return this.http.post(this.config.apiEndpoint + 'setup/saveTemplateSetup', obj, this.httpOptions);
}
DeleteTemplateProvider(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/DeleteTemplateProvider', searchCriteria, this.httpOptions);
}
saveSelectedProvider(obj: ORMtemplateprovider) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSelectedProvider', obj, this.httpOptions);
}
getSetupMyListICD(practice_id: string, provider_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupMyListICD/' + practice_id + '/' + provider_id, this.httpOptions);
}
saveSetupMyListICD(lstSave: Array<ORMMyListICD>) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupMyListICD', lstSave, this.httpOptions);
}
deleteSetupMyListICD(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupMyListICD', obj, this.httpOptions);
}
getSetupMyListCPT(practice_id: string, provider_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getSetupMyListCPT/' + practice_id + '/' + provider_id, this.httpOptions);
}
saveSetupMyListCPT(lstSave: Array<ORMMyListCPT>) {
return this.http.post(this.config.apiEndpoint + 'setup/saveSetupMyListCPT', lstSave, this.httpOptions);
}
deleteSetupMyListCPT(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteSetupMyListCPT', obj, this.httpOptions);
}
getTemplateDetails(practice_id: string, template_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/getTemplateDetails/' + practice_id + '/' + template_id, this.httpOptions);
}
getUserConfigurableAppSettings(practice_id: number) {
return this.http.get(this.config.apiEndpoint + 'setup/getUserConfigurableAppSettings/' + practice_id, this.httpOptions);
}
saveAppSettings(lstSave: Array<ORMAppSettingsSave>) {
return this.http.post(this.config.apiEndpoint + 'setup/savetConfigurableAppSettings', lstSave, this.httpOptions);
}
deleteSeletedTemplate(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/deleteSeletedTemplate', searchCriteria, this.httpOptions);
}
updateCategoryCodeType(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/updateCategoryCodeType', searchCriteria, this.httpOptions);
}
getRestrictedcode(practice_id: number) {
return this.http.get(this.config.apiEndpoint + 'setup/getRestrictedcode/' + practice_id, this.httpOptions);
}
saveRestrictedCode(objSave: ORMRestrictedCode) {
return this.http.post(this.config.apiEndpoint + 'setup/saveRestrictedCode', objSave, this.httpOptions);
}
deleteRestrictedCode(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteRestrictedCode', obj, this.httpOptions);
}
getAdjustcode(practice_id: number) {
return this.http.get(this.config.apiEndpoint + 'setup/getAdjustcode/' + practice_id, this.httpOptions);
}
saveAdjustCode(objSave: ORMAdjustmentReasonCodes) {
return this.http.post(this.config.apiEndpoint + 'setup/saveAdjustCode', objSave, this.httpOptions);
}
deleteAdjustCode(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteAdjustCode', obj, this.httpOptions);
}
getprocedures(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/getprocedures', searchCriteria, this.httpOptions);
}
addProcedure(ormProc: ORMProcedureSetup) {
return this.http.post(this.config.apiEndpoint + 'setup/addProcedure', ormProc, this.httpOptions);
}
updateProcedure(ormProc: ORMProcedureSetup) {
return this.http.post(this.config.apiEndpoint + 'setup/updateProcedure', ormProc, this.httpOptions);
}
deleteProcedure(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteProcedure', obj, this.httpOptions);
}
getDiagnosis(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/getDiagnosis', searchCriteria, this.httpOptions);
}
addDiagnosis(ormProc: ORMDiagnosis) {
return this.http.post(this.config.apiEndpoint + 'setup/addDiagnosis', ormProc, this.httpOptions);
}
updateDiagnosis(ormProc: ORMDiagnosis) {
return this.http.post(this.config.apiEndpoint + 'setup/updateDiagnosis', ormProc, this.httpOptions);
}
deleteDiagnosis(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteDiagnosis', obj, this.httpOptions);
}
getWriteOffcode(practice_id: number) {
return this.http.get(this.config.apiEndpoint + 'setup/getWriteOffcode/' + practice_id, this.httpOptions);
}
saveWriteOffcode(objSave: ORMWriteOffCodes) {
return this.http.post(this.config.apiEndpoint + 'setup/saveWriteOffcode', objSave, this.httpOptions);
}
deleteWriteOffcode(obj: ORMDeleteRecord) {
return this.http
.post(this.config.apiEndpoint + 'setup/deleteWriteOffcode', obj, this.httpOptions);
}
getAllPractices() {
return this.http.get(this.config.apiEndpoint + 'setup/getAllPractices/', this.httpOptions);
}
GetPracticeServices(practice_id: string) {
return this.http.get(this.config.apiEndpoint + 'setup/GetPracticeServices/' + practice_id, this.httpOptions);
}
saveupdatePractices(obj: ORMPractice) {
return this.http.post(this.config.apiEndpoint + 'setup/saveupdatePractices', obj, this.httpOptions);
}
saveupdatePracticesServices(obj: Array<ORMPracticeServices>) {
return this.http.post(this.config.apiEndpoint + 'setup/saveupdatePracticesServices', obj, this.httpOptions);
}
getTemplateProvider(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/getTemplateProvider', searchCriteria, this.httpOptions);
}
saveProvTemplateSetup(obj: ORMProviderTemplateSave){
return this.http.post(this.config.apiEndpoint + 'setup/saveProvTemplateSetup', obj, this.httpOptions);
}
addEditProvTemplateSetup(obj: ORM_template_provider){
return this.http.post(this.config.apiEndpoint + 'setup/addEditProvTemplateSetup', obj, this.httpOptions);
}
deleteSeletedProvTemplate(searchCriteria: SearchCriteria) {
return this.http.post(this.config.apiEndpoint + 'setup/deleteSeletedProvTemplate', searchCriteria, this.httpOptions);
}
}
|
// Copyright 2023 Turing Machines
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::UsbBoot;
use crate::{usb_boot::UsbBootError, utils::get_device_path};
use async_trait::async_trait;
use std::{fmt::Display, time::Duration};
use tokio::time::sleep;
const VID_PID: (u16, u16) = (0x0a5c, 0x2711);
pub struct RpiBoot;
#[async_trait]
impl UsbBoot for RpiBoot {
fn is_supported(&self, vid_pid: &(u16, u16)) -> bool {
vid_pid == &VID_PID
}
async fn load_as_block_device(
&self,
_device: &rusb::Device<rusb::GlobalContext>,
) -> Result<std::path::PathBuf, UsbBootError> {
load_rpi_boot().await?;
log::info!("Checking for presence of a device file ('RPi-MSD-.*')...");
get_device_path(&["RPi-MSD-"])
.await
.map_err(UsbBootError::internal_error)
}
}
impl Display for RpiBoot {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "rustpiboot")
}
}
async fn load_rpi_boot() -> Result<(), UsbBootError> {
let options = rustpiboot::Options {
delay: 500 * 1000,
..Default::default()
};
rustpiboot::boot(options).map_err(|err| {
UsbBootError::internal_error(format!(
"Failed to reboot {:?} as USB MSD: {:?}",
VID_PID, err
))
})?;
sleep(Duration::from_secs(3)).await;
Ok(())
}
|
library(ggplot2)
library(dplyr)
library(tidyverse)
library(readr)
library(readxl)
library(ggcorrplot)
library(igraph)
library(ggrepel)
library(corrr)
library(mgcv)
library(patchwork)
library(gt)
# Color palette definition
color_palette <- "Paired"
# Import main data file
data.initial <- read_delim("../data/leaderboard_deduped.csv",
delim = ";", escape_double = FALSE, trim_ws = TRUE)
# Import data details
data.details <- read_delim("../data/leaderboard_repo_detailes.csv",
delim = ";", escape_double = FALSE, trim_ws = TRUE) |> select(-1)
# Check if we have duplicates in data.details
data.details |> duplicated() |> sum()
# Remove duplicates from data.details
data.details[-(data.details |> duplicated() |> which()),] -> data.details
# Join data items with corresponding details
data.initial |> left_join(data.details, by = c("model_name_for_query" = "name")) -> data
# Import dataset maps
dataset.maps <- read_delim("../data/leaderboard_datasets.csv",
delim = ";", escape_double = FALSE, trim_ws = TRUE)
# Rename the first column so it matches the column in data
colnames(dataset.maps)[1] <- "model_name_for_query"
# Import base models
base.models <- read_delim("../data/leaderboard_base_models.csv",
delim = ";", escape_double = FALSE, trim_ws = TRUE)
# Rename the first column so it matches the column in data
colnames(base.models)[1] <- "model_name_for_query"
# Import author activity
author.activity <- read_delim("../data/leaderboard_repo_activity.csv",
delim = ";", escape_double = FALSE, trim_ws = TRUE)
# Import commit data
commit.data <- read_delim("../data/leaderboard_commit_data.csv",
delim = ",", escape_double = FALSE, trim_ws = TRUE)
# Remove the first column which is row numbers
commit.data[,-1] -> commit.data
# Check if we have missing values in Average accuracy
sum(is.na(data$`Average ⬆️`))
# Check if data we created has same number of rows as initial data
data.initial$model_name_for_query |> unique() |> length() == data$model_name_for_query |> unique() |> length()
# Check if we have duplicates in final data
data[data |> duplicated(),]
# Check number of flagged, merged MoE models
data$Flagged |> sum()
data$Merged |> sum()
data$MoE |> sum()
# FOR SOME REASON MERGED, FLAGGED AND MOE ARE INVERTED TRUE AND FALSE (this was checked manually on the HF website)
data |> mutate(Flagged = ifelse(Flagged == "FALSE", TRUE, FALSE), Merged = ifelse(Merged == "FALSE", TRUE, FALSE), MoE = ifelse(MoE == "FALSE", TRUE, FALSE)) -> data
# Arrange data by Average accuracy and rename columns that have special characters
data |> arrange(desc(`Average ⬆️`)) |> rename(Average = `Average ⬆️`, HubLikes = `Hub ❤️`,Params = "#Params (B)") -> data
# Remove spaces from colnames
colnames(data) <- gsub(" ", "", colnames(data))
# Extract date of creation and last modification while saving exact time in new column
data |> mutate(created_at_time = created_at, last_modified_time = last_modified) -> data
data |> mutate(created_at = as.Date(created_at), last_modified = as.Date(last_modified)) -> data
# Remove emoji and empty space at the beginning of TypeString
data |> mutate(TypeString = str_sub(TypeString, 3 , length(TypeString))) -> data
# Arrange authors by number of repositories
author.activity |> arrange(desc(num_of_repos)) -> author.activity
# Extract important architectures that have at least 50 instances in the dataset
data$Architecture |> tolower() |> table() |> sort() |> as.data.frame() |> arrange(desc(Freq)) |> dplyr::filter(Freq > 50 & Var1 != "?" & Var1 != "unknown") |> pull(Var1) |> droplevels() |> as.character()-> important_architectures
# Create new column Architecture_new that contains only important architectures all other are grouped under "Other"
data |> mutate(Architecture_new = Architecture |> tolower()) |> mutate(Architecture_new = ifelse(Architecture_new %in% important_architectures, Architecture_new, "Other")) -> data
# Rename some architectures to more readable names
data <- data |> mutate(Architecture_new = case_when(
Architecture_new == "llamaforcausallm" ~ "LLama",
Architecture_new == "mixtralforcausallm" ~ "Mixtral",
Architecture_new == "qwen2forcausallm" ~ "Qwen2",
Architecture_new == "mistralforcausallm" ~ "Mistral",
Architecture_new == "gemmaforcausallm" ~ "Gemma",
Architecture_new == "phiforcausallm" ~ "Phi",
Architecture_new == "optforcausallm" ~ "Opt",
Architecture_new == "gpt2lmheadmodel" ~ "GPT2",
Architecture_new == "gptneoxforcausallm" ~ "GPT-NeoX",
TRUE ~ "Other"
))
# Extract date while keeping exact time of first and last commit
commit.data |> mutate(first_commit_time = first_commit, last_commit_time = last_commit, first_commit = as.Date(first_commit), last_commit = as.Date(last_commit)) -> commit.data
# Total days between first and last commit
commit.data |> mutate(total_time_days = as.numeric(difftime(as.Date(last_commit), as.Date(first_commit), units = "days"))) -> commit.data
# Create numeric value for first commit so it can be used in correlation matrix
commit.data |> select(first_commit) |> mutate(first_commit = as.numeric(first_commit) - min(as.numeric(first_commit))) |> pull() -> commit.data$first_commit_numeric
data |> dplyr::filter(Flagged == F) -> data
data[data$ModelHub == "https://huggingface.co/meta-llama/Meta-Llama-3-8B",]$created_at <- as.Date("2024-04-17")
data[data$ModelHub == "https://huggingface.co/meta-llama/Meta-Llama-3-8B",]$created_at_time <- ymd_hms("2024-04-17 23:16:24")
author.activity |>dplyr::filter(str_detect(author_name, "bot$") == F) -> author.activity
# Filter data for fine-tuned models
data |> dplyr::filter(TypeString == "fine-tuned on domain-specific datasets") -> data.fine.tuned
# Filter base models that are present in fine-tuned data
base.models |> dplyr::filter(model_name_for_query %in% data.fine.tuned$model_name_for_query) -> base.models.fine.tuned
# Calculate column sums for the filtered base models (excluding the first column)
base.models.fine.tuned[,-1] |> colSums() -> colsums
# Identify columns with non-zero sums
colsums > 0 -> idx
idx <- c(TRUE, idx) # Keep the first column (model names)
# Keep only the columns with non-zero sums
base.models.fine.tuned[,idx] -> base.models.fine.tuned
# Calculate row sums again after filtering columns
base.models.fine.tuned[,-1] |> rowSums() -> rowsums
# Keep only the rows where the row sum is 1 (indicating a unique mapping)
base.models.fine.tuned[which(rowsums == 1),] -> base.models.fine.tuned
# Initialize an empty list to store models and their corresponding base models
models.and.base.models.list <- list()
# Loop through each row to extract the base model names
for (i in 1:nrow(base.models.fine.tuned)){
base.models.fine.tuned[i, which(base.models.fine.tuned[i, 1:ncol(base.models.fine.tuned)] != 0)] |> colnames() |> (\(x) x[-1])() -> names
models.and.base.models.list[[base.models.fine.tuned[i,]$model_name_for_query]] <- names
}
# Convert the list to a data frame
models.and.base.model.df <- data.frame(model_name_for_query = names(models.and.base.models.list), base_model = I(models.and.base.models.list) |> unlist())
# Reset row names
models.and.base.model.df |> rownames() <- NULL
# Display the frequency of base model occurrences
models.and.base.models.list |> sapply(length) |> table() |> as.data.frame() |> arrange(desc(Freq)) |> select(-1)
# Save the data frame to a CSV file
models.and.base.model.df |> write.csv("../data/models_and_base_models.csv")
# Calculate the maximum average for fine-tuned models and arrange them
data |> dplyr::filter(TypeString == "fine-tuned on domain-specific datasets" & model_name_for_query %in% names(models.and.base.models.list)) |> group_by(model_name_for_query) |> summarise(base_model, Average = max(Average)) |> arrange(Average) |> unique() -> average.fine.tuned.df
# Merge the model and base model data with the average fine-tuned data
models.and.base.model.df |> inner_join(average.fine.tuned.df, by = c("model_name_for_query")) |> select(model_name_for_query, base_model = base_model.x, Average) -> fine.tuned.models.and.base.model.df
# Calculate the maximum average for base models and arrange them
data |> dplyr::filter(model_name_for_query %in% fine.tuned.models.and.base.model.df$base_model) |> group_by(model_name_for_query) |> reframe(base_model = model_name_for_query, Average_base_model = max(Average)) |> arrange(Average_base_model) |> select(-model_name_for_query) -> average.base.models.df
# Display the merged data frame of fine-tuned models and base models
fine.tuned.models.and.base.model.df |> as.tibble()
# Merge the fine-tuned models data with base model averages and arrange by difference in averages
fine.tuned.models.and.base.model.df |> inner_join(average.base.models.df, by = c("base_model")) |> group_by(model_name_for_query, base_model) |> reframe(Average = max(Average), Average_base_model) |> arrange(Average_base_model - Average) |> unique() -> fine.tuned.models.and.base.model.df
# Save the final data frame to a CSV file
write.csv(fine.tuned.models.and.base.model.df, "../data/finetuned_models_and_base_models_with_scores.csv")
# Group the final data frame by model name for further analysis
fine.tuned.models.and.base.model.df |> group_by(model_name_for_query)
# Filter data for chat models
data |> dplyr::filter(TypeString == "chat models (RLHF, DPO, IFT, ...)") -> data.chat.model
# Filter base models that are present in chat model data
base.models |> dplyr::filter(model_name_for_query %in% data.chat.model$model_name_for_query) -> base.models.chat.model
# Calculate column sums for the filtered base models (excluding the first column)
base.models.chat.model[,-1] |> colSums() -> colsums
# Identify columns with non-zero sums
colsums > 0 -> idx
# Keep the first column (model names)
idx <- c(TRUE, idx)
# Keep only the columns with non-zero sums
base.models.chat.model[,idx] -> base.models.chat.model
# Calculate row sums again after filtering columns
base.models.chat.model[,-1] |> rowSums() -> rowsums
# Display the frequency of row sums
table(rowsums)
# Keep only the rows where the row sum is 1 (indicating a unique mapping)
base.models.chat.model[which(rowsums == 1),] -> base.models.chat.model
# Initialize an empty list to store models and their corresponding base models
models.and.base.models.list <- list()
# Loop through each row to extract the base model names
for (i in 1:nrow(base.models.chat.model)){
base.models.chat.model[i, which(base.models.chat.model[i, 1:ncol(base.models.chat.model)] != 0)] |> colnames() |> (\(x) x[-1])() -> names
models.and.base.models.list[[base.models.chat.model[i,]$model_name_for_query]] <- names
}
# Convert the list to a data frame
chat.models.and.base.model.df <- data.frame(model_name_for_query = names(models.and.base.models.list), base_model = I(models.and.base.models.list) |> unlist())
# Reset row names
chat.models.and.base.model.df |> rownames() <- NULL
# Display the frequency of base model occurrences
models.and.base.models.list |> sapply(length) |> table() |> as.data.frame() |> arrange(desc(Freq))
# Save the data frame to a CSV file
chat.models.and.base.model.df |> write.csv("../data/chat_models_and_base_models.csv")
# Calculate the maximum average for chat models and arrange them
data |> dplyr::filter(TypeString == "chat models (RLHF, DPO, IFT, ...)" & model_name_for_query %in% names(models.and.base.models.list)) |> group_by(model_name_for_query) |> reframe(base_model, Average = max(Average)) |> unique() |> arrange(Average) -> average.chat.model.df
# Merge the model and base model data with the average chat model data
chat.models.and.base.model.df |> inner_join(average.chat.model.df, by = c("model_name_for_query")) |> select(model_name_for_query, base_model = base_model.x, Average) -> chat.models.and.base.model.df
# Calculate the maximum average for base models and arrange them
data |> dplyr::filter(model_name_for_query %in% chat.models.and.base.model.df$base_model) |> group_by(model_name_for_query) |> reframe(base_model = model_name_for_query, Average_base_model = max(Average)) |> arrange(Average_base_model) |> select(-model_name_for_query) -> chat.average.base.models.df
# Merge the chat models data with base model averages and arrange by difference in averages
chat.models.and.base.model.df |> inner_join(chat.average.base.models.df, by = c("base_model")) |> group_by(model_name_for_query, base_model) |> reframe(Average = max(Average), Average_base_model) |> arrange(Average_base_model - Average) |> unique() -> chat.models.and.base.model.df
# Save the final data frame to a CSV file
write.csv(chat.models.and.base.model.df, "../data/chat_models_and_base_models_with_scores.csv")
# Group the final data frame by model name for further analysis
chat.models.and.base.model.df |> group_by(model_name_for_query)
# Filter dataset maps for fine-tuned models
dataset.maps |> dplyr::filter(model_name_for_query %in% fine.tuned.models.and.base.model.df$model_name_for_query) -> dataset.finetuned
# Calculate column sums for the filtered datasets (excluding the first column)
dataset.finetuned[,-1] |> colSums() -> colsums1
# Identify columns with non-zero sums
colsums1 > 0 -> idx1
# Keep the first column (model names)
idx1 <- c(TRUE, idx1)
# Calculate row sums for the filtered datasets (excluding the first column)
dataset.finetuned[,-1] |> rowSums()
# Keep only the columns with non-zero sums
dataset.finetuned[,idx1] -> dataset.finetuned
# Calculate row sums again after filtering columns
dataset.finetuned[,-1] |> rowSums() -> rowsums1
# Display the filtered dataset
dataset.finetuned
# Initialize an empty list to store models and their corresponding datasets
dataset.finetuned.list <- list()
# Loop through each row to extract the dataset names
for (i in 1:nrow(dataset.finetuned)){
dataset.finetuned[i, which(dataset.finetuned[i, 1:ncol(dataset.finetuned)] != 0)] |> colnames() |> (\(x) x[-1])() |> paste(collapse = ",") -> names
dataset.finetuned.list[[dataset.finetuned[i,]$model_name_for_query]] <- names
}
# Convert the list to a data frame
fine.tuned.models.and.datasets.df <- data.frame(model_name_for_query = names(dataset.finetuned.list), datasets = I(dataset.finetuned.list) |> unlist())
# Reset row names
fine.tuned.models.and.datasets.df |> rownames() <- NULL
# Save the data frame to a CSV file
fine.tuned.models.and.datasets.df |> write.csv("../data/fine.tuned.models.and.datasets.csv")
# Merge the fine-tuned model and base model data with the dataset data
fine.tuned.models.and.base.model.df |> inner_join(fine.tuned.models.and.datasets.df, by = c("model_name_for_query")) -> complete.df.finetuned
# Calculate the average difference and create a histogram
complete.df.finetuned |> mutate(Average_diff = Average - Average_base_model) |> ggplot(aes(x = Average_diff)) + geom_histogram(binwidth = 1) + labs(title = "Average difference", x = "Average difference", y = "Count") + scale_x_continuous(breaks = seq(-1000, 2222, 1))
# Summarize average differences per dataset and create a bar plot
complete.df.finetuned |> group_by(datasets) |> summarise(n = n(), average_diff = mean(Average - Average_base_model)) |> arrange(desc(average_diff)) |> dplyr::filter(n > 2) |> ggplot(aes(x = datasets, y = average_diff)) + geom_bar(stat = "identity") + labs(title = "Average difference per dataset", x = "Dataset", y = "Average difference") + theme(axis.text.x = element_text(angle = 90, hjust = 1))
# Add a column for the number of datasets and save the updated data frame to a CSV file
complete.df.finetuned |> mutate(number_of_datasets = str_count(datasets, ",") + 1, Average_diff = Average - Average_base_model) -> complete.df.finetuned
complete.df.finetuned |> write.csv("../data/complete_df_finetuned.csv")
# Summarize the data by the number of datasets
complete.df.finetuned |> group_by(number_of_datasets) |> summarise(n = n())
# Check correlation between change in Average score and number of datasets
complete.df.finetuned |> select(Average_diff, number_of_datasets) |> cor()
# Filter dataset maps for chat models
dataset.maps |> dplyr::filter(model_name_for_query %in% chat.models.and.base.model.df$model_name_for_query) -> dataset.chat
# Calculate column sums for the filtered datasets (excluding the first column)
dataset.chat[,-1] |> colSums() -> colsums2
# Identify columns with non-zero sums
colsums2 > 0 -> idx2
# Keep the first column (model names)
idx2 <- c(TRUE, idx2)
# Calculate row sums for the filtered datasets (excluding the first column)
dataset.chat[,-1] |> rowSums()
# Keep only the columns with non-zero sums
dataset.chat[,idx2] -> dataset.chat
# Calculate row sums again after filtering columns
dataset.chat[,-1] |> rowSums() -> rowsums2
# Display the filtered dataset
dataset.chat
# Initialize an empty list to store models and their corresponding datasets
dataset.chat.list <- list()
# Loop through each row to extract the dataset names
for (i in 1:nrow(dataset.chat)){
dataset.chat[i, which(dataset.chat[i, 1:ncol(dataset.chat)] != 0)] |> colnames() |> (\(x) x[-1])() |> paste(collapse = ",") -> names
dataset.chat.list[[dataset.chat[i,]$model_name_for_query]] <- names
}
# Convert the list to a data frame
chat.models.and.datasets.df <- data.frame(model_name_for_query = names(dataset.chat.list), datasets = I(dataset.chat.list) |> unlist())
# Reset row names
chat.models.and.datasets.df |> rownames() <- NULL
# Save the data frame to a CSV file
chat.models.and.datasets.df |> write.csv("../data/chat.models.and.datasets.csv")
# Merge the chat model and base model data with the dataset data
chat.models.and.base.model.df |> inner_join(chat.models.and.datasets.df, by = c("model_name_for_query")) -> complete.df.chat
# Calculate the average difference and create a histogram
complete.df.chat |> mutate(Average_diff = Average - Average_base_model) |> ggplot(aes(x = Average_diff)) + geom_histogram(binwidth = 1) + labs(title = "Average difference", x = "Average difference", y = "Count") + scale_x_continuous(breaks = seq(-1000, 2222, 1)) + theme(axis.text.x = element_text(angle = 90, hjust = 1))
# Summarize average differences per dataset and create a bar plot
complete.df.chat |> group_by(datasets) |> summarise(n = n(), average_diff = mean(Average - Average_base_model)) |> arrange(desc(average_diff)) |> dplyr::filter(n > 2) |> ggplot(aes(x = datasets, y = average_diff)) + geom_bar(stat = "identity") + labs(title = "Average difference per dataset", x = "Dataset", y = "Average difference") + theme(axis.text.x = element_text(angle = 90, hjust = 1))
# Add a column for the number of datasets and save the updated data frame to a CSV file
complete.df.chat |> mutate(number_of_datasets = str_count(datasets, ",") + 1, Average_diff = Average - Average_base_model) -> complete.df.chat
complete.df.chat |> write.csv("../data/complete_df_chat.csv")
# Summarize average differences per dataset and create a bar plot (duplicate, appears intentional)
complete.df.chat |> group_by(datasets) |> summarise(n = n(), average_diff = mean(Average - Average_base_model)) |> arrange(desc(average_diff)) |> dplyr::filter(n > 2) |> ggplot(aes(x = datasets, y = average_diff)) + geom_bar(stat = "identity") + labs(title = "Average difference per dataset", x = "Dataset", y = "Average difference") + theme(axis.text.x = element_text(angle = 90, hjust = 1))
# Identify unique datasets that are present in both chat and fine-tuned models
complete.df.chat$datasets[complete.df.chat$datasets %in% complete.df.finetuned$datasets] |> unique()
# Add a new column 'Type' with value 'chat' to the complete.df.chat data frame and 'finetuned' to the complete.df.finetuned data frame
# Bind rows of both to create final data frame
complete.df.chat |> mutate(Type = "chat") |> bind_rows(complete.df.finetuned |> mutate(Type = "finetuned")) -> complete.chat.finetuned.df
# Save the combined data frame to a CSV file
complete.chat.finetuned.df |> write.csv("../data/complete_chat_finetuned.csv")
author.activity |> select(author_name, num_of_repos) |> head(400) |>
gt()
author.activity |> select(author_name, num_of_repos) |> head(10) |> pull(num_of_repos) |> sum()
author.activity |> select(author_name, num_of_repos) |> pull(num_of_repos) |> sum()
hist(author.activity$num_of_repos, breaks = 100, main = "Number of Repositories per Author", xlab = "Number of Repositories", ylab = "Count", col = "lightblue")
author.activity |> ggplot(aes(x = num_of_repos)) + geom_histogram(binwidth = 2) + labs(title = "Number of Repositories per Author", x = "Number of Repositories", y = "Count") + theme(axis.text.x = element_text(angle = 90, hjust = 1))
author.activity |> nrow()
commit.data$num_of_authors |> hist(main = "Number of Authors per Repository", xlab = "Number of Authors", ylab = "Count", col = "lightblue")
data$Architecture_new |> table() |> as.data.frame() |> gt()
# Plot a histogram of the Average variable with a bin width of 0.3
data |> ggplot(aes(x = Average)) + geom_histogram(binwidth = 0.3) + labs(title = "Average Distribution", x = "Average", y = "Count")
# Print summary statistics of the data
summary(data)
# Calculate the correlation matrix for selected variables and plot it using ggcorrplot
data |> select(Average, HubLikes, downloads, Params) |> na.omit() |> cor() |>
ggcorrplot::ggcorrplot(hc.order = TRUE, type = "lower", outline.color = "black", lab = TRUE) +
labs(title = "Correlation Plot for All Data")
# Calculate the correlation matrix for the top 1000 rows (by Average) and plot it using ggcorrplot
data |> arrange(desc(Average)) |> head(1000) |> select(Average, HubLikes, downloads, Params) |> na.omit() |> cor() |>
ggcorrplot::ggcorrplot(hc.order = TRUE, type = "lower", outline.color = "black", lab = TRUE) +
labs(title = "Correlation Plot for Top 1000 Data")
# Calculate the correlation matrix for the top 100 rows (by Average) and plot it using ggcorrplot
data |> arrange(desc(Average)) |> head(100) |> select(Average, HubLikes, downloads, Params) |> na.omit() |> cor() |>
ggcorrplot::ggcorrplot(hc.order = TRUE, type = "lower", outline.color = "black", lab = TRUE) +
labs(title = "Correlation Plot for Top 100 Data")
# Calculate the correlation matrix for the top 50 rows (by Average) and plot it using ggcorrplot
data |> arrange(desc(Average)) |> head(50) |> select(Average, HubLikes, downloads, Params) |> na.omit() |> cor() |>
ggcorrplot::ggcorrplot(hc.order = TRUE, type = "lower", outline.color = "black", lab = TRUE) +
labs(title = "Correlation Plot for Top 50 Data")
# Select the model_name_for_query and Average columns from the data
data |> select(model_name_for_query, Average) |>
# Perform an inner join with dataset.maps on the model_name_for_query column
inner_join(dataset.maps, by = c("model_name_for_query")) |>
# Remove the model_name_for_query column
select(-model_name_for_query) |>
# Calculate the correlation matrix
cor() -> cor.score.dataset
# Set the margins for the plot
par(mar = c(12, 5, 4, 5))
# Sort the correlations with the Average column (excluding the first row, which is Average itself),
# take the top 10 correlations, and create a bar plot
cor.score.dataset[2:nrow(cor.score.dataset),1] |>
sort(decreasing = TRUE) |>
head(10) |>
barplot(main = "Correlation with Average", col = "blue", las = 2, cex.names = 0.6)
# Set the margins for the plot
par(mar = c(10, 5, 4, 5))
# Calculate the column sums of dataset.maps excluding the first column,
# sort the sums in decreasing order, convert to a data frame, and select the top 100 datasets
dataset.maps[,-1] |> colSums() |> sort(decreasing = TRUE) |> data.frame() |> head(100) -> top100.datasets
# Rename the column to "count"
colnames(top100.datasets) <- "count"
# Create a bar plot of the top 100 datasets
top100.datasets |> ggplot(aes(x = rownames(top100.datasets), y = count)) +
geom_bar(stat = "identity") +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
labs(title = "Top 100 Datasets", x = "Dataset", y = "Count")
# Save the plot as a PDF
ggsave("../out/top100datasets.pdf", width = 20, height = 10)
# Select the model_name_for_query and Average columns from the data
data |> select(model_name_for_query, Average) |>
# Perform an inner join with base.models on the model_name_for_query column
inner_join(base.models, by = c("model_name_for_query")) |>
# Remove the model_name_for_query column
select(-model_name_for_query) |>
# Calculate the correlation matrix
cor() -> cor.score.base.model
# Set the margins for the plot
par(mar = c(10, 5, 4, 5))
# Sort the correlations with the Average column (excluding the first row, which is Average itself),
# take the top 10 correlations, and create a bar plot
cor.score.base.model[2:nrow(cor.score.base.model),1] |>
sort(decreasing = TRUE) |>
head(10) |>
barplot(main = "Correlation Base model with Average", col = "blue", las = 2, cex.names = 0.6)
# Select the first 100 rows from data, then select model_name_for_query and Average columns
# Perform an inner join with commit.data by matching model_name_for_query to name
# Select Average, num_of_authors, and num_of_commits columns and calculate the correlation matrix
data[1:100,] |> select(model_name_for_query, Average) |>
inner_join(commit.data, by = c("model_name_for_query" = "name")) |>
select(Average, num_of_authors, num_of_commits) |>
cor() |>
ggcorrplot::ggcorrplot(hc.order = TRUE,
type = "lower",
outline.color = "black",
lab = TRUE) +
labs(title = "Correlation Plot for Top 100 Data")
# Group data by Architecture_new, calculate the mean of Average for each group, and arrange in descending order
data |> group_by(Architecture_new) |>
summarise(Average = mean(Average)) |>
arrange(desc(Average))
# Group data by TypeString, calculate the mean of Average for each group, and arrange in descending order
data |> group_by(TypeString) |>
summarise(Average = mean(Average)) |>
arrange(desc(Average))
# Split the 'authors' column by comma and unnest the resulting list
# Remove square brackets and single quotes from the 'authors' column
commit.data |>
mutate(authors = strsplit(as.character(authors), ",\\s*")) |>
unnest(authors) |>
mutate(authors = gsub("\\[|\\]|'", "", authors)) -> commit.data.long
# Calculate the correlation matrix between 'num_of_commits' and 'total_time_days' columns in commit.data.long
# Plot the correlation matrix using ggcorrplot with a title
commit.data.long |> select(num_of_commits, total_time_days) |> cor() |>
ggcorrplot::ggcorrplot(hc.order = TRUE,
type = "lower",
outline.color = "black",
lab = TRUE) +
labs(title = "Correlation Plot: Number of Commits and Total Time in Days") +
theme(plot.title = element_text(hjust = 0.5))
# Group commit.data.long by authors and calculate summary statistics for each author
# Calculate the total number of models, average number of commits, average total time in days, and average number of authors
# Arrange the results in descending order of total_models
commit.data.long |> group_by(authors) |>
summarise(total_models = n(),
average_num_of_commits = mean(num_of_commits),
average_total_time_days = mean(total_time_days),
average_num_of_authors = mean(num_of_authors)) |>
arrange(desc(total_models))
# Select the 'authors' and 'first_commit' columns from commit.data and arrange the data by 'first_commit'
commit.data |> select(authors, first_commit) |> arrange(first_commit)
# Group commit.data.long by authors and calculate the number of commits for each author
# Arrange the result in descending order and select the top 100 authors
commit.data.long |> group_by(authors) |> summarise(num_of_commits = n()) |> arrange(desc(num_of_commits)) |> head(100)
# Group commit.data.long by authors and find the earliest commit date for each author
commit.data.long |> group_by(authors) |> summarise(first_commit = min(as.Date(first_commit))) -> first.commit.of.each.author
# Group data by 'created_at' and calculate the mean of 'Average' for each group
# Plot Average vs. Date using ggplot, adding smooth trend lines
data |> group_by(created_at) |> summarise(Average = mean(Average)) |> ggplot(aes(x = created_at, y = Average)) + geom_point() + geom_smooth() + labs(title = "Average vs Date", x = "Date", y = "Average score")
# Group first.commit.of.each.author by 'first_commit' and count the number of new authors over time
first.commit.of.each.author |> group_by(first_commit) |> summarise(new_authors = n()) |> mutate(total_authors = cumsum(new_authors)) -> total.authors.over.time
# Keep only the first occurrence of each author and add columns for month, week, and year of the first commit
commit.data.long |> select(authors, first_commit) |> arrange(first_commit) |> distinct(authors, .keep_all = TRUE) |> mutate(month = floor_date(first_commit,"month"), week = floor_date(first_commit,"week"), year = floor_date(first_commit, "year")) -> author.list
# Plot the total number of authors over time, distinguishing between new authors and total authors
ggplot(author.list |> group_by(month) |> summarise(new_authors = n()) |> mutate(total_authors = cumsum(new_authors)), aes(x = month)) +
geom_point(aes(y = new_authors, color = "New Authors"), size = 2, alpha = 0.7) +
geom_point(aes(y = total_authors, color = "Total Authors"), size = 2, alpha = 0.7) +
geom_smooth(aes(y = new_authors), color = "darkblue") +
geom_smooth(aes(y = total_authors), color = "#b5162b") +
scale_color_manual(values = c("New Authors" = "darkblue", "Total Authors" = "#b5162b"),
labels = c("New Authors", "Total Authors")) +
labs(title = "Total and new authors over time", x = "First commit", y = "Authors", color = "Legend") +
scale_x_date(date_labels = "%b %Y", date_breaks = "3 month") +
theme_minimal() +
theme(axis.text.x = element_text(angle = 90, hjust = 1), legend.position = "bottom") +
scale_y_continuous(breaks = seq(0, 3000, 200))
# Save the plot as a PDF
ggsave("../out/total_authors_over_time.pdf", width = 10, height = 6)
# Generate a dataframe with year and month columns from the 'created_at' column
data |>
mutate(year = year(created_at), month = month(created_at)) |>
group_by(year, month) |>
summarise(Average = max(Average, na.rm = TRUE), total = n(), .groups = 'drop') |>
mutate(date = make_date(year, month)) |>
ungroup() -> average.vs.total.per.month
# Define the y-intercept for placing labels
y.intercept <- 1000
# Create a plot showing the total models per month with notable model release dates
average.vs.total.per.month |>
ggplot(aes(x = date + days(15), y = total, fill = Average)) +
geom_col() +
labs(title = "Total models per month", x = "Month", y = "Total models") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = expansion(mult = c(0.01, 0))) +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
#MODEL RELEASE DATES
#LAMMA
geom_vline(xintercept = as.Date("2024-04-18"), linetype = "dashed", color = "red") +
geom_text(aes(x = as.Date("2024-04-18"), y = y.intercept, label = "LLama3"), angle = 90, hjust = 0.8, vjust = -0.8, color = "red") +
geom_vline(xintercept = as.Date("2023-07-18"), linetype = "dashed", color = "blue") +
geom_text(aes(x = as.Date("2023-07-18"), y = y.intercept, label = "LLama2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "blue") +
geom_vline(xintercept = as.Date("2023-02-24"), linetype = "dashed", color = "darkgreen") +
geom_text(aes(x = as.Date("2023-02-24"), y = y.intercept, label = "LLama1"), angle = 90, hjust = 0.8, vjust = -0.8, color = "darkgreen") +
#GEMINI
geom_vline(xintercept = as.Date("2023-12-6"), linetype = "dashed", color = "black") +
geom_text(aes(x = as.Date("2023-12-6"), y = y.intercept, label = "Gemini1.0"), angle = 90, hjust = 0.8, vjust = -0.8, color = "black") +
geom_vline(xintercept = as.Date("2024-02-15"), linetype = "dashed", color = "purple") +
geom_text(aes(x = as.Date("2024-02-15"), y = y.intercept, label = "Gemini1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "purple") +
#MISTRAL
geom_vline(xintercept = as.Date("2023-9-20"), linetype = "dashed", color = "orange") +
geom_text(aes(x = as.Date("2023-9-20"), y = y.intercept, label = "Mistral7b-v0.1"), angle = 90, hjust = 0.8, vjust = -0.8, color = "orange") +
geom_vline(xintercept = as.Date("2024-5-22"), linetype = "dashed", color = "green") +
geom_text(aes(x = as.Date("2024-5-22"), y = y.intercept, label = "Mistral7b-v0.3"), angle = 90, hjust = 0.8, vjust = -0.8, color = "green") +
geom_vline(xintercept = as.Date("2024-4-10"), linetype = "dashed", color = "pink") +
geom_text(aes(x = as.Date("2024-4-10"), y = y.intercept, label = "Mixtral 8x22B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "pink") +
geom_vline(xintercept = as.Date("2023-12-11"), linetype = "dashed", color = "lightgreen") +
geom_text(aes(x = as.Date("2023-12-11"), y = y.intercept, label = "Mistral7b-v0.2 and Mixtral 8x7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "lightgreen") +
#PHI
geom_vline(xintercept = as.Date("2023-10-17"), linetype = "dashed", color = "cyan") +
geom_text(aes(x = as.Date("2023-10-17"), y = y.intercept, label = "Phi1 and Phi1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "cyan") +
geom_vline(xintercept = as.Date("2024-1-11"), linetype = "dashed", color = "grey") +
geom_text(aes(x = as.Date("2024-1-11"), y = y.intercept, label = "Phi2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "grey")+
geom_vline(xintercept = as.Date("2024-4-22"), linetype = "dashed", color = "lightblue") +
geom_text(aes(x = as.Date("2024-4-22"), y = y.intercept, label = "Phi3-mini"), angle = 90, hjust = 0.8, vjust = -0.8, color = "lightblue")+
#FALCON
geom_vline(xintercept = as.Date("2023-4-24"), linetype = "dashed", color = "purple") +
geom_text(aes(x = as.Date("2023-4-24"), y = y.intercept, label = "Falcon-7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "purple") +
#BLOOM
geom_vline(xintercept = as.Date("2022-10-21"), linetype = "dashed", color = "darkgrey") +
geom_text(aes(x = as.Date("2022-10-21"), y = y.intercept, label = "Bloom"), angle = 90, hjust = 0.8, vjust = -0.8, color = "darkgrey")
# Save the plot as a PDF
ggsave("../out/total_models_per_month.pdf", width = 30, height = 8)
# Set the y-intercept for placing labels
y.intercept <- 100
# Create a plot showing the total models per day with notable model release dates
data |>
dplyr::filter(!is.na(created_at) & created_at > "2022-10-1") |>
group_by(created_at) |>
summarise(Average = max(Average, na.rm = TRUE), total = n(), .groups = 'drop') |>
ggplot(aes(x = created_at, y = total, fill = Average)) +
geom_col() +
geom_smooth() +
labs(title = "Total models per date", x = "Date", y = "Total models per date") +
theme_minimal() +
theme(legend.position = "bottom") +
#MODEL RELEASE DATES
#LAMMA
geom_vline(xintercept = as.Date("2024-04-18"), linetype = "dashed", color = "red") +
geom_text(aes(x = as.Date("2024-04-18"), y = y.intercept, label = "LLama3"), angle = 90, hjust = 0.8, vjust = -0.8, color = "red") +
geom_vline(xintercept = as.Date("2023-07-18"), linetype = "dashed", color = "blue") +
geom_text(aes(x = as.Date("2023-07-18"), y = y.intercept, label = "LLama2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "blue") +
geom_vline(xintercept = as.Date("2023-02-24"), linetype = "dashed", color = "darkgreen") +
geom_text(aes(x = as.Date("2023-02-24"), y = y.intercept, label = "LLama1"), angle = 90, hjust = 0.8, vjust = -0.8, color = "darkgreen") +
#GEMINI
geom_vline(xintercept = as.Date("2023-12-6"), linetype = "dashed", color = "black") +
geom_text(aes(x = as.Date("2023-12-6"), y = y.intercept, label = "Gemini1.0"), angle = 90, hjust = 0.8, vjust = -0.8, color = "black") +
geom_vline(xintercept = as.Date("2024-02-15"), linetype = "dashed", color = "purple") +
geom_text(aes(x = as.Date("2024-02-15"), y = y.intercept, label = "Gemini1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "purple") +
#MISTRAL
geom_vline(xintercept = as.Date("2023-9-20"), linetype = "dashed", color = "orange") +
geom_text(aes(x = as.Date("2023-9-20"), y = y.intercept, label = "Mistral7b-v0.1"), angle = 90, hjust = 0.8, vjust = -0.8, color = "orange") +
geom_vline(xintercept = as.Date("2024-5-22"), linetype = "dashed", color = "green") +
geom_text(aes(x = as.Date("2024-5-22"), y = y.intercept, label = "Mistral7b-v0.3"), angle = 90, hjust = 0.8, vjust = -0.8, color = "green") +
geom_vline(xintercept = as.Date("2024-4-10"), linetype = "dashed", color = "pink") +
geom_text(aes(x = as.Date("2024-4-10"), y = y.intercept, label = "Mixtral 8x22B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "pink") +
geom_vline(xintercept = as.Date("2023-12-11"), linetype = "dashed", color = "lightgreen") +
geom_text(aes(x = as.Date("2023-12-11"), y = y.intercept, label = "Mistral7b-v0.2 and Mixtral 8x7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "lightgreen") +
#PHI
geom_vline(xintercept = as.Date("2023-10-17"), linetype = "dashed", color = "cyan") +
geom_text(aes(x = as.Date("2023-10-17"), y = y.intercept, label = "Phi1 and Phi1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "cyan") +
geom_vline(xintercept = as.Date("2024-1-11"), linetype = "dashed", color = "grey") +
geom_text(aes(x = as.Date("2024-1-11"), y = y.intercept, label = "Phi2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "grey")+
geom_vline(xintercept = as.Date("2024-4-22"), linetype = "dashed", color = "lightblue") +
geom_text(aes(x = as.Date("2024-4-22"), y = y.intercept, label = "Phi3-mini"), angle = 90, hjust = 0.8, vjust = -0.8, color = "lightblue")+
#FALCON
geom_vline(xintercept = as.Date("2023-4-24"), linetype = "dashed", color = "purple") +
geom_text(aes(x = as.Date("2023-4-24"), y = y.intercept, label = "Falcon-7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "purple") +
#BLOOM
geom_vline(xintercept = as.Date("2022-10-21"), linetype = "dashed", color = "darkgrey") +
geom_text(aes(x = as.Date("2022-10-21"), y = y.intercept, label = "Bloom"), angle = 90, hjust = 0.8, vjust = -0.8, color = "darkgrey")+
scale_fill_binned(type = "viridis")
ggsave("../out/total_models_per_date.pdf", width = 25, height = 8)
# Generate a dataframe with year and month columns from the 'created_at' column
data |>
dplyr::filter(Params > 0) |>
mutate(year = year(created_at), month = month(created_at), Average.vs.Params = Average/Params, Params.vs.Average = Params/Average) |>
group_by(year, month) |>
summarise(Average.vs.Params = max(Average.vs.Params, na.rm = TRUE), Params.vs.Average = min(Params.vs.Average, na.rm = TRUE), total = n(), .groups = 'drop') |>
mutate(date = make_date(year, month)) |>
ungroup() -> average.vs.params.per.month
# Filter months with at least 100 total models and create a plot showing the average/params per month
average.vs.params.per.month |>
dplyr::filter(total >= 30) |>
ggplot(aes(x = date, y = Average.vs.Params, fill = total)) +
geom_col(position = "dodge", linewidth = 2) +
labs(title = "Average/Params by Month", x = "Month", y = "Average/Params") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = expansion(mult = c(0, 0))) +
theme(axis.text.x = element_text(angle = 90, hjust = 1))
# y-intercept for placing labels
y.intercept <- 0.04
# Filter months with at least 30 total models and create a plot showing the params/average per month with notable dates of release of models
average.vs.params.per.month |>
dplyr::filter(total >= 30) |>
ggplot(aes(x = date + days(15), y = Params.vs.Average, fill = total)) +
geom_col(position = "dodge") +
labs(title = "Params/Average by Month", x = "Month", y = "Params/Average") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y",expand = expansion(mult = c(0, 0))) +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
#LAMMA
geom_vline(xintercept = as.Date("2024-04-18"), linetype = "dashed", color = "red") +
geom_text(aes(x = as.Date("2024-04-18"), y = y.intercept, label = "LLama3"), angle = 90, hjust = 0.8, vjust = -0.8, color = "red") +
geom_vline(xintercept = as.Date("2023-07-18"), linetype = "dashed", color = "blue") +
geom_text(aes(x = as.Date("2023-07-18"), y = y.intercept, label = "LLama2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "blue") +
#GEMINI
geom_vline(xintercept = as.Date("2023-12-6"), linetype = "dashed", color = "black") +
geom_text(aes(x = as.Date("2023-12-6"), y = y.intercept, label = "Gemini1.0"), angle = 90, hjust = 0.8, vjust = -0.8, color = "black") +
geom_vline(xintercept = as.Date("2024-02-15"), linetype = "dashed", color = "purple") +
geom_text(aes(x = as.Date("2024-02-15"), y = y.intercept, label = "Gemini1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "purple") +
#MISTRAL
geom_vline(xintercept = as.Date("2023-9-27"), linetype = "dashed", color = "orange") +
geom_text(aes(x = as.Date("2023-9-27"), y = y.intercept, label = "Mistral7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "orange") +
geom_vline(xintercept = as.Date("2024-4-10"), linetype = "dashed", color = "pink") +
geom_text(aes(x = as.Date("2024-4-10"), y = y.intercept, label = "Mixtral 8x22B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "pink") +
geom_vline(xintercept = as.Date("2023-12-11"), linetype = "dashed", color = "lightgreen") +
geom_text(aes(x = as.Date("2023-12-11"), y = y.intercept, label = "Mixtral 8x7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "lightgreen") +
#PHI
geom_vline(xintercept = as.Date("2023-10-17"), linetype = "dashed", color = "cyan") +
geom_text(aes(x = as.Date("2023-10-17"), y = y.intercept, label = "Phi1 and Phi1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "cyan") +
geom_vline(xintercept = as.Date("2024-1-11"), linetype = "dashed", color = "grey") +
geom_text(aes(x = as.Date("2024-1-11"), y = y.intercept, label = "Phi2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "grey")+
geom_vline(xintercept = as.Date("2024-4-22"), linetype = "dashed", color = "lightblue") +
geom_text(aes(x = as.Date("2024-4-22"), y = y.intercept, label = "Phi3-mini"), angle = 90, hjust = 0.8, vjust = -0.8, color = "lightblue")+
#FALCON
geom_vline(xintercept = as.Date("2023-4-24"), linetype = "dashed", color = "purple") +
geom_text(aes(x = as.Date("2023-4-24"), y = y.intercept, label = "Falcon-7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "purple") +
scale_fill_binned(type = "gradient")
# Save plot as PDF
ggsave("../out/params.vs.average.per.month.pdf", width = 30, height = 12)
# y-intercept for placing labels
y.intercept <- 250
# Create a plot showing the total models per week with notable model release dates
data |>
select(Average, Params, created_at, Architecture_new) |>
na.omit() |>
arrange(created_at) |>
dplyr::filter(Params > 0 & created_at >= "2023-01-01") |>
mutate(week_start = floor_date(created_at, "week")) |>
group_by(week_start,Architecture_new) |>
summarise(Average = max(Average, na.rm = TRUE), total = n(), .groups = 'drop') |>
ggplot(aes(x = week_start, y = total, fill = Architecture_new)) +
geom_col() +
labs(title = "Total models per date", x = "Date", y = "Total models per date") +
theme_minimal() +
theme(legend.position = "bottom") +
#MODEL RELEASE DATES
#LAMMA
geom_vline(xintercept = as.Date("2024-04-18"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2024-04-18"), y = y.intercept, label = "LLama3"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
geom_vline(xintercept = as.Date("2023-07-18"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2023-07-18"), y = y.intercept, label = "LLama2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
geom_vline(xintercept = as.Date("2023-02-24"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2023-02-24"), y = y.intercept, label = "LLama1"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
#GEMINI
geom_vline(xintercept = as.Date("2023-12-6"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2023-12-6"), y = y.intercept, label = "Gemini1.0"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
geom_vline(xintercept = as.Date("2024-02-15"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2024-02-15"), y = y.intercept, label = "Gemini1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
#MISTRAL
geom_vline(xintercept = as.Date("2023-9-20"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2023-9-20"), y = y.intercept, label = "Mistral7b-v0.1"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
geom_vline(xintercept = as.Date("2024-5-22"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2024-5-22"), y = y.intercept, label = "Mistral7b-v0.3"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
geom_vline(xintercept = as.Date("2024-4-10"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2024-4-10"), y = y.intercept, label = "Mixtral 8x22B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
geom_vline(xintercept = as.Date("2023-12-11"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2023-12-11"), y = y.intercept, label = "Mistral7b-v0.2 and Mixtral 8x7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
#PHI
geom_vline(xintercept = as.Date("2023-10-17"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2023-10-17"), y = y.intercept, label = "Phi1 and Phi1.5"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
geom_vline(xintercept = as.Date("2024-1-11"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2024-1-11"), y = y.intercept, label = "Phi2"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3)+
geom_vline(xintercept = as.Date("2024-4-22"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2024-4-22"), y = y.intercept, label = "Phi3-mini"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3)+
#FALCON
geom_vline(xintercept = as.Date("2023-4-24"), linetype = "dashed", color = "#5d6066") +
geom_text(aes(x = as.Date("2023-4-24"), y = y.intercept, label = "Falcon-7B"), angle = 90, hjust = 0.8, vjust = -0.8, color = "#5d6066", size = 3) +
scale_fill_brewer(palette = color_palette)
# Save the plot as a PDF
ggsave("../out/total_models_per_week.jpg", width = 30, height = 10)
release_dates <- data.frame(
date = as.Date(c("2024-04-18", "2023-07-18", "2023-02-24", "2023-12-06",
"2024-02-15", "2023-09-20", "2024-05-22", "2024-04-10",
"2023-12-11", "2023-10-17", "2024-01-11", "2024-04-22",
"2023-04-24")),
label = c("LLama3", "LLama2", "LLama1", "Gemini1.0",
"Gemini1.5","Mistral7b-v0.1", "Mistral7b-v0.3", "Mixtral 8x22B",
"Mistral7b-v0.2 and Mixtral 8x7B", "Phi1 and Phi1.5","Phi2", "Phi3-mini", "Falcon-7B")
)
# Create a dataframe with the total number of new models and total models per day
data |>
mutate(week_start = floor_date(created_at, "week")) |>
mutate(TypeString = ifelse(is.na(TypeString), "unknown", TypeString)) |>
group_by(TypeString, week_start) |>
summarise(new_models = n()) |>
mutate(total_models = cumsum(new_models)) |>
group_by(week_start) |>
summarise(TypeString, new_models, total_models, total_new_models_that_week = sum(new_models), .groups = "drop") -> total.models.vs.date.df
# Plot new models vs. date, grouped by TypeString, with a title, limited y-axis, and customized theme
# Scale x-axis to display dates from January 1, 2023, to June 1, 2024, with monthly breaks
# Manually set fill colors for different TypeString categories
(total.models.vs.date.df |>
dplyr::filter(!is.na(TypeString)) |>
ggplot(aes(x = week_start, y = new_models, fill = TypeString)) +
geom_col(color = "black", linewidth = 0.15) +
labs(title = "New models per week", x = "", y = "Week") +
theme(legend.position = "none", axis.text.x = element_blank(), panel.background = element_blank(), panel.grid = element_blank()) +
scale_x_date(date_labels = "%Y-%m-%d", limits = c(as.Date("2023-01-01"), as.Date("2024-06-01")), breaks = seq(as.Date("2023-01-01"), as.Date("2024-06-01"), by = "1 month")) +
scale_fill_brewer(palette = "Set2") -> p1)
# Plot percentage of new models vs. date, grouped by TypeString, with a customized theme
# Scale x-axis to display dates from January 1, 2023, to June 1, 2024, with monthly breaks
# Manually set fill colors for different TypeString categories
(total.models.vs.date.df |>
dplyr::filter(!is.na(TypeString)) |>
group_by(week_start) |>
mutate(percentage = new_models/total_new_models_that_week) |>
ggplot(aes(x = week_start, y = percentage, fill = TypeString)) +
geom_col(color = "black", linewidth = 0.15) +
labs(title = "", x = "Date", y = "Percentage") +
theme(legend.position = "bottom", panel.background = element_blank()) +
scale_x_date(date_labels = "%Y-%m-%d", limits = c(as.Date("2023-01-01"), as.Date("2024-06-01")), breaks = seq(as.Date("2023-01-01"), as.Date("2024-06-01"), by = "1 month")) +
scale_fill_brewer(palette = "Set2") +
theme(axis.text.x = element_text(angle = 90, hjust = 1), legend.text = element_text(size = 8), axis.title.x = element_text(margin = margin(t = 20,
r = 50, # Right margin
b = 40, # Bottom margin
l = 10))) -> p2)
# Arrange plots p1 and p2 vertically, save the combined plot as a PDF
gridExtra::grid.arrange(p1, p2, ncol = 1) -> p3
p1 + p2 + plot_layout(ncol = 1)
ggsave("../out/new_models_vs_date_combined.pdf", p3, width = 20, height = 16)
# Plot author activity over time, showing the evolution of author activity by TypeString
# Filter author.activity, arrange by num_of_repos, select top 10, join with data, and select relevant columns
# Group by author_name, TypeString, and created_at, then plot using ggplot
(author.activity[-1,] |> arrange(desc(num_of_repos)) |> head(10) |>
inner_join(data, by = c("author_name" = "author")) |>
select(author_name, TypeString, Average, created_at) |>
group_by(author_name, TypeString, created_at) |>
ggplot(aes(x = created_at, y = author_name, fill = TypeString, color = author_name)) +
geom_line(linetype = "solid", linewidth = 1.3) +
geom_tile(color = "black") +
labs(title = "Author activity", x = "", y = "Total") +
theme_minimal() +
scale_color_brewer(palette = color_palette) + # Use a color palette for line colors
scale_fill_brewer(palette = color_palette) + # Use a color palette for fill colors
theme(axis.text.x = element_text(angle = 90, hjust = 1), legend.position = "top", axis.text.y = element_blank()) -> p11)
# Save the author activity plot as a PDF
ggsave("../out/author_activity.pdf", width = 20, height = 20)
# Plot average author activity over time, showing the average score of authors' activities
# Filter author.activity, select top 10, join with data, select relevant columns, group, and reframe data
# Plot using ggplot
(author.activity[-1,] |> head(10) |>
inner_join(data, by = c("author_name" = "author")) |>
select(author_name, TypeString, Average, created_at) |>
group_by(author_name, TypeString, created_at) |>
reframe(Average = mean(Average)) |>
ggplot(aes(x = created_at, y = Average, color = author_name)) +
geom_line(linewidth = 1.3) +
labs(title = "Author activity", x = "Date", y = "Average score") +
theme_minimal() +
scale_color_brewer(palette = color_palette) + # Use a color palette for line colors
theme(axis.text.x = element_text(angle = 90, hjust = 1), legend.position = "none") -> p12)
# Arrange the author activity plots vertically and save as a combined PDF
gridExtra::grid.arrange(p11, p12, ncol = 1) -> p13
ggsave("../out/author_activity_combined.pdf", p13, width = 20, height = 12)
# Select unique dates and arrange them from smallest to largest
unique_dates <- sort(unique(data$created_at))
# Select every 5th date
selected_dates <- unique_dates[seq(1, length(unique_dates), by = 5)]
# Prepare filtered data for plotting
data |>
dplyr::filter(!is.na(TypeString)) |>
group_by(created_at, TypeString) |>
summarise(total = n(), .groups = 'drop') |>
group_by(TypeString) |>
arrange(created_at, .by_group = TRUE) |>
mutate(total_cumsum = cumsum(total)) |>
ungroup() |>
# Complete the data frame with missing dates
complete(TypeString, created_at = seq(min(selected_dates), max(selected_dates), by = "day")) |>
# Fill missing cumulative sums
group_by(TypeString) |>
fill(total_cumsum, .direction = "down") |>
dplyr::filter(created_at %in% selected_dates) |>
ungroup() -> filtered_data
# Plot Type vs. Average over time
(data |> dplyr::filter(!is.na(TypeString)) |>
ggplot(aes(x = created_at, y = Average, color = TypeString)) +
geom_point(alpha = 0.65, position = position_dodge2()) +
geom_smooth(se = FALSE, linewidth = 1.5) +
labs(title = "Type vs Average", x = "", y = "Average") +
scale_color_brewer(palette = color_palette) +
theme(legend.position = "none", panel.background = element_blank(), panel.grid = element_blank(), axis.text.x = element_blank()) -> p_type_vs_average)
# Plot Type vs. Total combined
(filtered_data |>
ggplot(aes(x = created_at, y = total_cumsum, color = TypeString)) +
geom_line(aes(group = TypeString), position = "dodge", linewidth = 1.5, alpha = 0.8) +
labs(title = "", x = "Date", y = "Total") +
scale_color_brewer(palette = color_palette) +
theme(legend.position = "bottom", panel.background = element_blank(), panel.grid = element_blank()) +
scale_x_date(date_labels = "%b %Y", limits = c(as.Date("2023-01-01"), as.Date("2024-06-01")), breaks = seq(as.Date("2023-01-01"), as.Date("2024-06-01"), by = "1 month")) +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) -> p_type_vs_total)
# Arrange the plots vertically and save as a combined PDF
gridExtra::grid.arrange(p_type_vs_average, p_type_vs_total, ncol = 1) -> p_type_combined
ggsave("../out/type_combined.pdf", p_type_combined, width = 18, height = 8)
# Find common datasets between complete.df.chat and complete.df.finetuned
complete.df.chat$datasets[complete.df.chat$datasets %in% complete.df.finetuned$datasets] |> unique() -> datasets_common
# Filter and process data from complete.chat.finetuned.df
complete.chat.finetuned.df |>
group_by(datasets, Type) |>
reframe(n = n(), mean_diff = mean(Average_diff)) |>
arrange(desc(n)) |>
dplyr::filter(datasets %in% datasets_common) |>
dplyr::filter(n > 3) |>
group_by(datasets) |>
reframe(ntotal = sum(n >= 4)) |>
dplyr::filter(ntotal == 2) |>
pull(datasets) -> datasets_common2
# Filter and process data from complete.chat.finetuned.df
complete.chat.finetuned.df |>
group_by(datasets, Type) |>
reframe(n = n(), mean_diff = mean(Average_diff)) |>
arrange(desc(n)) |>
dplyr::filter(datasets %in% datasets_common2) |>
dplyr::filter(n > 3) |>
ggplot(aes(x = datasets, y = mean_diff, fill = Type)) +
geom_bar(stat = "identity", position = position_dodge2()) +
labs(title = "Average difference per dataset", x = "Dataset", y = "Average difference") +
theme(axis.text.x = element_text(angle = 90, hjust = 1))
# Save the plot as a PDF
ggsave("../out/average_diff_per_dataset.pdf", width = 5, height = 5)
# Plot for Params between 65 and 75
data |>
dplyr::filter(Params > 65 & Params < 75) |>
group_by(created_at) |>
reframe(Average = max(Average)) |>
ggplot(aes(created_at, Average)) +
geom_point() +
geom_smooth() +
labs(title = "Average vs Date for Params between 65 and 75")
# Plot for Params between 30 and 40
data |>
dplyr::filter(Params > 30 & Params < 40) |>
group_by(created_at) |>
reframe(Average = max(Average)) |>
ggplot(aes(created_at, Average)) +
geom_point() +
geom_smooth() +
labs(title = "Average vs Date for Params between 30 and 40")
# Plot for Params between 1 and 15
data |>
dplyr::filter(Params > 1 & Params < 15) |>
group_by(created_at) |>
reframe(Average = max(Average)) |>
ggplot(aes(created_at, Average)) +
geom_point() +
geom_smooth() +
labs(title = "Average vs Date for Params between 1 and 15")
# Plot Params vs Average with scatter points and smoothed line
data |>
group_by(Params) |> # Group data by Params value
summarise(n = n(), average = max(Average)) |> # Summarise data by count and maximum average
ggplot(aes(x = Params, y = average)) + # Define aesthetics for the plot
geom_point() + # Add scatter points
geom_smooth() + # Add smoothed line
labs(title = "Params vs Average", x = "Params", y = "Average") # Set plot titles and axis labels
# Plot Params density
library(DescTools)
data |>
group_by(Params) |> # Group data by Params value
summarise(n = n(), average = max(Average)) |> # Summarise data by count and maximum average
ggplot(aes(Params)) + # Define aesthetics for the plot
geom_density(fill = "lightgreen") + # Add density plot with green fill
labs(title = "Params density", x = "Params", y = "Density") + # Set plot titles and axis labels
geom_vline(xintercept = median(data$Params), color = "red", linetype = "dashed") + # Add vertical line for median
geom_vline(xintercept = mean(data$Params), color = "blue", linetype = "dashed") + # Add vertical line for mean
geom_label(aes(x = median(data$Params), y = 0.0125, label = paste("Median =", median(data$Params))), color = "red", fill = "white") + # Add label for median
geom_label(aes(x = mean(data$Params), y = 0.014, label = paste("Mean =", round(mean(data$Params), 2))), color = "blue", fill = "white") + # Add label for mean
theme_minimal() # Apply minimal theme
# Save Params density plot
ggsave("../out/params_density.pdf", width = 10, height = 6)
# Create histogram of Params with fill by Architecture_new
(plot_params_vs_architecture <- data |>
dplyr::filter(Params > 0) |> # Filter data where Params > 0
ggplot(aes(Params, fill=Architecture_new)) + # Define aesthetics for the plot
geom_histogram(binwidth = 1, color = "black", size = .1) + # Add histogram with specified bin width and border color
labs(title = "Params histogram", x = "Params", y = "Count") + # Set plot titles and axis labels
theme_minimal() + # Apply minimal theme
scale_x_continuous(breaks = seq(0, 300, 5)) + # Set x-axis breaks
scale_y_continuous(breaks = seq(0, 4000, 200)) + # Set y-axis breaks
scale_fill_brewer(palette = color_palette) + # Set color palette for fill
theme(legend.position = "none", axis.text.x = element_text(angle = 90, hjust = 1))) # Remove legend and adjust x-axis text
# Save Params histogram plot
ggsave("../out/params_histogram.pdf", width = 10, height = 6)
# Calculate total specific architecture per size of Params
temp <- data |>
dplyr::filter(Params > 0) |> # Filter data where Params > 0
group_by(Params, Architecture_new) |> # Group data by Params and Architecture_new
summarise(TotalSpesificPerSize = n()) # Summarise data to get total specific architecture per size of Params
# Create plot showing percentage of specific architecture per number of parameters
plot_params_vs_architecture_percentage <- data |>
dplyr::filter(Params > 0) |> # Filter data where Params > 0
group_by(Params) |> # Group data by Params
summarise(TotalModelsPerSize = n()) |> # Summarise data to get total models per size of Params
inner_join(temp, by = "Params") |> # Inner join with temp to get total specific architecture per size of Params
mutate(Percentage = TotalSpesificPerSize / TotalModelsPerSize) |> # Calculate percentage of specific architecture per size of Params
ggplot(aes(x = Params, y = Percentage, fill = Architecture_new)) + # Define aesthetics for the plot
geom_col(position = "stack", color = "black", size = 0.1) + # Add stacked column plot with black border
labs(title = "Percentage of specific architecture per number of parameters", x = "Params", y = "Percentage") + # Set plot titles and axis labels
theme_minimal() + # Apply minimal theme
scale_fill_brewer(palette = color_palette) + # Set color palette for fill
scale_x_continuous(breaks = seq(0, 300, 5)) + # Set x-axis breaks
theme(legend.position = "bottom", axis.text.x = element_text(angle = 90, hjust = 1)) # Adjust legend position and x-axis text
# Create plot showing Params vs Average with color gradient by Average
plot_params_vs_average <- data |>
dplyr::filter(Params > 0) |> # Filter data where Params > 0
group_by(Params) |> # Group data by Params
summarise(Average = max(Average), TotalModelsPerSize = n()) |> # Summarise data to get maximum Average and total models per size of Params
ggplot(aes(x = Params, color = Average)) + # Define aesthetics for the plot
geom_segment(aes(xend = Params, y = 0, yend = 100), size = 2) + # Add line segments to represent the maximum Average
labs(title = "Params vs Average", x = "Params", y = "Average") + # Set plot titles and axis labels
scale_x_continuous(breaks = seq(0, 300, 5)) + # Set x-axis breaks
scale_y_continuous(breaks = seq(0, 100, 5)) + # Set y-axis breaks
theme_minimal() + # Apply minimal theme
theme(axis.text.x = element_text(angle = 90, hjust = 1), legend.position = "bottom") + # Adjust x-axis text and legend position
scale_color_continuous(type = "viridis") # Set color gradient using viridis palette
# Create plot showing Params vs Date vs Average
(plot_params_vs_date_vs_average <- data |>
dplyr::filter(Params > 0) |> # Filter data where Params > 0
group_by(Params, created_at) |> # Group data by Params and created_at
summarise(Average = max(Average), .groups = "drop") |> # Summarise data to get maximum Average per Params per created_at
group_by(Params) |> # Group data by Params
dplyr::filter(Average == max(Average) & !is.na(created_at)) |> # Filter rows where Average is maximum and created_at is not NA
distinct(Params, .keep_all = TRUE) |> # Keep only distinct rows
ungroup() |> # Ungroup data
ggplot(aes(x = Params, y = Average, fill = created_at)) + # Define aesthetics for the plot
geom_col(color = "black", size = 0.1) + # Add column plot with black border
labs(title = "Params vs Max Average", y = "Average", x = "Params") + # Set plot titles and axis labels
theme_minimal() + # Apply minimal theme
scale_x_continuous(breaks = seq(0, 300, 5)) + # Set x-axis breaks
theme(axis.text.x = element_text(angle = 90, hjust = 1), axis.title.y = element_text(margin =unit(c(0,0.3,0,0), "cm")), legend.position = "bottom") + # Adjust x-axis text, y-axis title, and legend position
scale_fill_date(date_labels = "%Y-%m-%d", low = "purple", high = "lightgreen", guide = "legend")) # Set fill color gradient using date labels
# Combine plots into a single grid arrangement
gridExtra::grid.arrange(plot_params_vs_architecture, plot_params_vs_architecture_percentage, plot_params_vs_date_vs_average, ncol = 1) -> plot_params_combined
# Save combined plot as a PDF
ggsave("../out/params_combined.pdf", plot_params_combined, width = 20, height = 24)
# Read the CSV file and create a directed graph
graph <- read.csv("chat_models_and_base_models.csv") |>
select(-X) |>
graph_from_data_frame(directed = TRUE, vertices = NULL)
# Set the size of vertices
vertex_size <- 1
# Create a PDF file for graph output
pdf("graph_output.pdf", width = 50, height = 30)
# Plot the graph with specified parameters
plot(graph,
vertex.size = vertex_size,
vertex.label.cex = 0.7,
vertex.label.dist = 1,
edge.arrow.size = 0.1,
asp = 0) # asp=0 allows for automatic aspect ratio adjustment
dev.off() # Close the PDF device
# Pivot the data to long format and filter rows where value equals 1
models_and_basemodels_all <- base.models |>
pivot_longer(cols = -model_name_for_query, names_to = "basemodel", values_to = "value") |>
subset(value == 1, select = -value)
# Write the resulting data to a CSV file
write.csv(models_and_basemodels_all, "../data/models_and_basemodels.csv")
# Create a directed graph from the data frame
graph <- models_and_basemodels_all |>
graph_from_data_frame(directed = TRUE)
# Set the size of vertices
vertex_size <- 0.5
# Create a PDF file for graph output
pdf("graph_output.pdf", width = 50, height = 20)
# Plot the graph with specified parameters
plot(graph,
vertex.size = vertex_size,
vertex.label.cex = 0.4,
vertex.label.dist = 1,
edge.arrow.size = 0.1,
asp = 0) # asp=0 allows for automatic aspect ratio adjustment
dev.off() # Close the PDF device
# Select the first few rows of the 'basemodel' column
selected_models <- models_and_basemodels_all$basemodel |> head()
explore_graph_recursively <- function(graph, vertex, edges_collected = NULL) {
# Get the neighbors of the current vertex (outgoing edges)
neighbors <- neighbors(graph, vertex, mode = "out")
for (neighbor in neighbors) {
# Collect the edge
edges_collected <- rbind(edges_collected, data.frame(from = V(graph)[vertex]$name, to = V(graph)[neighbor]$name))
# Recursively explore the neighbor
edges_collected <- explore_graph_recursively(graph, neighbor, edges_collected)
}
return(edges_collected)
}
# Initialize an empty data frame to collect all edges
all_edges_collected <- data.frame(from = character(0), to = character(0))
# Define your starting vertices
data |> dplyr::filter(TypeString == "fine-tuned on domain-specific datasets") |> dplyr::filter(model_name_for_query %in% models_and_basemodels_all$model_name_for_query) |> pull(model_name_for_query) |> unique() |> head(10) -> start_vertices
# Explore the graph starting from each defined vertex
for (v in start_vertices) {
vertex_index <- which(V(graph)$name == v)
edges_collected <- explore_graph_recursively(graph, vertex_index)
all_edges_collected <- rbind(all_edges_collected, edges_collected)
}
# Remove duplicate edges
all_edges_collected <- unique(all_edges_collected)
all_edges_collected$from |> c(all_edges_collected$to) |> unique() -> all_vertices
data |> dplyr::filter(model_name_for_query %in% all_vertices) |> select(model_name_for_query,Average, created_at) |> unique() -> vertex_scores
# Create a subgraph containing only the collected edges# Create a subgraph containing only the collected edges# Create a subgraph containing only the collected edges
sub_g <- graph_from_data_frame(all_edges_collected, directed = TRUE)
vertex_scores[match(V(sub_g)$name, vertex_scores$model_name_for_query), c("Average","created_at")]-> vertex_scores
# Use a layout algorithm to improve the visualization
#vertex_scores$Average[is.na(vertex_scores$Average)] <- mean(vertex_scores$Average, na.rm = TRUE)
layout <- layout_with_fr(sub_g)
vertex_sizes <- (vertex_scores$Average - min(vertex_scores$Average)) / (max(vertex_scores$Average) - min(vertex_scores$Average))
vertex_dates <- vertex_scores$created_at
# Define vertex colors, setting starting vertices to green
vertex_colors <- ifelse(V(sub_g)$name %in% start_vertices, "green", "orange")
vertex_data <- data.frame(name = V(sub_g)$name, score = vertex_scores$Average, date = vertex_dates)
edge_data <- all_edges_collected
edge_data$from_date <- vertex_data$date[match(edge_data$from, vertex_data$name)]
edge_data$to_date <- vertex_data$date[match(edge_data$to, vertex_data$name)]
# Plot the graph over time with segments representing edges and points representing vertices
ggplot() +
geom_segment(data = edge_data, aes(x = to_date, xend = from_date, y = to, yend = from),
color = "grey", arrow = arrow(length = unit(0.5, "cm"), type = "closed")) +
geom_point(data = vertex_data, aes(x = date, y = name, size = score, color = name %in% start_vertices)) +
scale_color_manual(values = c("FALSE" = "orange", "TRUE" = "green")) +
theme_minimal() +
labs(title = "Graph Plot Over Time", x = "Date", y = "Vertices", size = "Score") +
theme(legend.position = "bottom")
# Plot the graph over time with segments representing edges, points representing vertices, and labels for vertices
ggplot() +
geom_segment(data = edge_data, aes(x = to_date, xend = from_date, y = vertex_data$score[match(to, vertex_data$name)], yend = vertex_data$score[match(from, vertex_data$name)]),
color = "grey", arrow = arrow(length = unit(0.5, "cm"), type = "closed")) +
geom_point(data = vertex_data, aes(x = date, y = score, size = score, color = name %in% start_vertices)) +
geom_text(data = vertex_data, aes(x = date, y = score, label = name), hjust = 1, vjust = 1, size = 3) +
scale_color_manual(values = c("FALSE" = "orange", "TRUE" = "green")) +
theme_minimal() +
labs(title = "Graph Plot Over Time", x = "Date", y = "Score", size = "Score") +
theme(legend.position = "bottom")
# Save the plots as a PDF file
ggsave("../out/graph_plot_over_time.pdf", width = 20, height = 40)
# Use a layout algorithm to improve the visualization
#vertex_scores$Average[is.na(vertex_scores$Average)] <- mean(vertex_scores$Average, na.rm = TRUE)
layout <- layout_with_fr(sub_g)
vertex_sizes <- (vertex_scores$Average - min(vertex_scores$Average, na.rm = T)) / (max(vertex_scores$Average, na.rm = T) - min(vertex_scores$Average, na.rm = T))
vertex_sizes[is.na(vertex_sizes)] <- 0
pdf("subgraph_output.pdf", 29, 20) # Specify the file name and dimensions
# Plot the subgraph with the collected edges using the chosen layout
plot(sub_g, layout = layout, vertex.color = vertex_colors, vertex.size = vertex_sizes * 5 + 1, vertex.label.cex = 0.6,
vertex.label.dist = 0, vertex.label.degree = -pi/2, # Positions labels directly above the vertices
edge.color = "red", main = "Subgraph with Collected Edges",
vertex.label = V(sub_g)$name, edge.arrow.size = 1, asp = 0, axes = T)
dev.off() # Close the PDF device
sub_g |> reverse_edges() -> sub_g_reversed
pdf("subgraph_output_reversed.pdf", 29, 20) # Specify the file name and dimensions
# Plot the subgraph with the collected edges using the chosen layout
plot(sub_g_reversed, layout = layout, vertex.color = vertex_colors, vertex.size = vertex_sizes * 4 + 1, vertex.label.cex = 0.6,
vertex.label.dist = 0, vertex.label.degree = -pi/2, # Positions labels directly above the vertices
edge.color = "red", main = "Subgraph with Collected Edges",
vertex.label = V(sub_g_reversed)$name, edge.arrow.size = 0.6)
dev.off() # Close the PDF device
# Get unique architecture names and sort them
unique_architectures <- data$Architecture_new |> unique() |> sort()
# Calculate change in popularity of models over time compared to the previous week
data |>
select(Params, created_at, Architecture_new) |>
na.omit() |>
arrange(created_at) |>
mutate(week_start = floor_date(created_at, "week")) |>
group_by(week_start, Architecture_new) |>
summarise(total_that_week = n(), .groups = "drop") |>
complete(week_start = seq(min(week_start), max(week_start), by = "week"), Architecture_new, fill = list(total_that_week = 0)) |>
group_by(Architecture_new) |>
mutate(change = total_that_week - dplyr::lag(total_that_week, default = 0)) -> change_in_popularity
# Plot change in popularity of models over time compared to the previous week
change_in_popularity |>
ggplot(aes(x = week_start, y = change, fill = Architecture_new)) +
stat_smooth(method = "loess", se = FALSE, geom = "area", alpha = 0.6) +
labs(title = "Change in Popularity of Models Over Time", x = "Date", y = "Change in Popularity") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_fill_brewer(palette = color_palette) +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = c(0,0)) +
theme(axis.text.x = element_text(angle = 90, hjust = 1))
ggsave("../out/change_in_popularity_compared_to_previous_week.pdf", width = 20, height = 10)
# Calculate change in popularity of models over time compared to the average model popularity
change_in_popularity |>
group_by(week_start) |>
summarise(average_that_week = mean(total_that_week), total_that_week, Architecture_new) |>
mutate(change = total_that_week - average_that_week) |>
ggplot(aes(x = week_start, y = change, fill = Architecture_new)) +
stat_smooth(method = "loess", se = FALSE, geom = "area", alpha = 0.6) +
labs(title = "Change in Popularity of Models Over Time Compared to Average Model Popularity", x = "Date", y = "Change in Popularity") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_fill_brewer(palette = color_palette) +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = c(0,0)) +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
geom_smooth(aes(week_start,average_that_week), method = "loess", se = FALSE, color = "#464d49", size = 0.3)
ggsave("../out/change_in_popularity_compared_to_average.pdf", width = 20, height = 10)
# Define a function to find increasing maximums in a dataframe
find_increasing_maximums <- function(df) {
max <- 0 # Initialize maximum value
reference <- df[1,]$Average # Store the reference value
plotdf <- data.frame() # Initialize an empty dataframe to store plot data
for(row in 1:nrow(df)) {
if(df[row,]$Average > max) { # Check if current value is greater than current maximum
max <- df[row,]$Average # Update maximum value
plotdf <- rbind(plotdf, df[row,] |> mutate(Average = Average - reference)) # Append data to plot dataframe
}
}
return(plotdf) # Return dataframe with increasing maximums
}
# Define a function to process data and find increasing maximums
process_data <- function(data, pattern, params = NULL, param_range = 0) {
# Filter data based on parameters and conditions
if(!is.null(params)) {
data |>
dplyr::filter(Params >= params & Params <= params + param_range & TypeString != "base merges and moerges" & str_detect(tags,"merge") %in% c(NA,FALSE) & Merged == FALSE) -> processed_data
} else {
data |>
dplyr::filter(TypeString != "base merges and moerges" & str_detect(tags,"merge") %in% c(NA,FALSE) & Merged == FALSE) -> processed_data
}
# Process data
processed_data <- processed_data |>
select(Architecture, model_name_for_query, Params, Average, created_at, created_at_time) |> # Select relevant columns
mutate(original_name = model_name_for_query) |> # Create a copy of model_name_for_query
select(Architecture, model_name_for_query, Params, Average, created_at, created_at_time) |> # Select relevant columns again
mutate(original_name = model_name_for_query) |> # Create another copy of model_name_for_query
mutate(model_name_for_query = tolower(str_remove_all(model_name_for_query, "-"))) |> # Convert model_name_for_query to lowercase and remove hyphens
mutate(model_name_for_query = (str_remove_all(model_name_for_query, "_"))) |> # Remove underscores
mutate(model_name_for_query = (str_remove_all(model_name_for_query, "\\."))) |> # Remove dots
mutate(model_name_for_query = tolower(gsub(c("^.*?/"), "", model_name_for_query))) |> # Remove substrings before "/"
dplyr::filter(str_detect(model_name_for_query, pattern) & !is.na(created_at)) |> # Filter based on pattern and non-missing created_at values
arrange(created_at_time) |> # Arrange data by created_at_time
mutate(total_models = n(), total_days_model = as.numeric(max(created_at) - min(created_at)), cumulative_models = cumsum(!is.na(model_name_for_query))) # Calculate total models, total days model, and cumulative models
# Check if processed_data contains rows
if (nrow(processed_data) > 0) {
release_date <- min(processed_data$created_at) # Find the minimum release date
processed_data <- processed_data |> # Pipe processed_data
mutate(days_since_release = as.integer(difftime(created_at, release_date, units = "days")), Average_original = Average) |> # Calculate days since release and store original average
find_increasing_maximums() |> # Find increasing maximums using helper function
mutate(Model = str_c(pattern,params)) # Add a column for the model
}
return(processed_data) # Return processed data
}
# Filter data excluding "base merges and moerges" in TypeString and rows with "merge" in tags
data |>
dplyr::filter(TypeString != "base merges and moerges") |>
dplyr::filter(!str_detect(tags,"merge"))
# Display unique values in TypeString column
data$TypeString |> unique()
# Filter data excluding "base merges and moerges" in TypeString, rows with "merge" in tags, and Merged is FALSE
# Convert model_name_for_query to lowercase, remove hyphens and underscores, filter based on "mistral" in model_name_for_query, arrange by created_at_time,
# calculate total_models, total_days_model, and cumulative_models, and then filter Params equal to 7
data |>
dplyr::filter(TypeString != "base merges and moerges") |>
dplyr::filter(!str_detect(tags,"merge") %in% c(NA,FALSE) & Merged == F) |>
mutate(model_name_for_query = tolower(gsub(c("-","_"), "", model_name_for_query))) |>
mutate(model_name_for_query = tolower(gsub(c("^.*?/"), "", model_name_for_query))) |>
dplyr::filter(str_detect(model_name_for_query, "llama3") & !is.na(created_at)) |>
arrange(created_at_time) |>
mutate(total_models = n(), total_days_model = as.numeric(max(created_at) - min(created_at)), cumulative_models = cumsum(!is.na(model_name_for_query))) |>
dplyr::filter(Params == 8)
# Process data for "llama3" model with 8 parameters
process_data(data, "llama3", 8)
data |> dplyr::filter(str_detect(model_name_for_query, "meta"))
# Filter data based on conditions related to merges and model_name_for_query
# Store model_name_for_query values in list_of_merges
data |>
dplyr::filter(TypeString == "base merges and moerges" | Merged == TRUE | str_detect(tags, "merge") == T | str_detect(tolower(model_name_for_query), "merge") == T) |>
pull(model_name_for_query) -> list_of_merges
# Store initial length of list_of_merges
(prev_len <- length(list_of_merges))
# Create a copy of data as data.no.mereges
data.no.mereges <- data
# Loop until length of list_of_merges becomes 0
while (prev_len > 0) {
# Filter data.no.mereges excluding rows with model_name_for_query containing values in list_of_merges
data.no.mereges |>
dplyr::filter(!str_detect(model_name_for_query, paste(list_of_merges, collapse = "|"))) -> data.no.mereges
# Filter data.no.mereges based on base_model containing values in list_of_merges
# Store model_name_for_query values in list_of_merges
data.no.mereges |>
dplyr::filter(str_detect(base_model, paste(list_of_merges, collapse = "|")) %in% c(T)) |> pull(model_name_for_query)-> list_of_merges
# Update prev_len with the new length of list_of_merges
prev_len <- length(list_of_merges)
print(prev_len)
}
# Process each model
llamma38b_data <- process_data(data.no.mereges, "llama3", 8)
llamma38b_data |> mutate(total_improved_models = n()) -> llamma38b_data
mistral7b_data <- process_data(data.no.mereges, "mistral7b", 7)
mistral7b_data |> mutate(total_improved_models = n()) -> mistral7b_data
mixtral8x7b_data <- process_data(data.no.mereges, "mixtral8x7b", 46)
mixtral8x7b_data |> mutate(total_improved_models = n()) -> mixtral8x7b_data
llamma27b_data <- process_data(data.no.mereges, "llama27b", 7)
llamma27b_data |> mutate(total_improved_models = n()) -> llamma27b_data
phi3mini_data <- process_data(data.no.mereges, "phi3mini")
phi3mini_data |> mutate(total_improved_models = n()) -> phi3mini_data
phi2_data <- process_data(data.no.mereges, "phi2")
phi2_data |> mutate(total_improved_models = n()) -> phi2_data
process_data(data.no.mereges, "llama3", 70) -> llama370b_data
llama370b_data |> mutate(total_improved_models = n()) -> llama370b_data
process_data(data.no.mereges, "llama", 30) -> llama30b_data
llama30b_data |> mutate(total_improved_models = n()) -> llama30b_data
process_data(data.no.mereges, "llama7b",7) -> llama7b_data
llama7b_data |> mutate(total_improved_models = n()) -> llama7b_data
llama270b_data <- process_data(data.no.mereges, "llama2", 70)
llama270b_data |> mutate(total_improved_models = n()) -> llama270b_data
process_data(data.no.mereges, "phi3medium") -> phi3medium_data
phi3medium_data |> mutate(total_improved_models = n()) -> phi3medium_data
# Combine all data
combined_data <- bind_rows(llamma38b_data, mistral7b_data, mixtral8x7b_data, llamma27b_data, phi3mini_data, phi2_data, llama370b_data, llama30b_data, llama7b_data, llama270b_data, phi3medium_data)
# Calculate normalized average popularity and store the result in combined_data
combined_data |> mutate(Average_norm_popular = round(Average / total_models,2)) -> combined_data
data.no.mereges |> dplyr::filter(str_detect(model_name_for_query, "meta"))
# Plot the combined data: Model Comparison Over Time
ggplot(combined_data, aes(x = days_since_release, y = Average, color = Model)) +
geom_line() +
geom_point() +
geom_label_repel(aes(label = original_name), max.overlaps = 50, size = 2) +
labs(title = "Model Comparison Over Time", x = "Days since release", y = "Average") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_continuous(breaks = seq(0, max(combined_data$days_since_release), by = 5)) +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
scale_color_discrete()# Adjust color scale if necessary
ggsave("../out/model_comparison_over_time.pdf", width = 30, height = 10)
# Plot the combined data: Model Comparison Over Time Normalized by Total Number of Models
ggplot(combined_data, aes(x = days_since_release, y = Average_norm_popular, color = Model)) +
geom_line() +
geom_point() +
#geom_label_repel(aes(label = original_name), max.overlaps = 31, size = 2) +
labs(title = "Model Comparison Over Time Normalized by Total Number of Models", x = "Days since release", y = "Average increase normalized") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_continuous(breaks = seq(0, max(combined_data$days_since_release), by = 5)) +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) # Adjust color scale if necessary
ggsave("../out/model_comparison_over_time_normalized.pdf", width = 15, height = 10)
# Plot the combined data: Model Popularity Index vs. Average
ggplot(combined_data |> mutate(Model_popularity_index = cumulative_models / (days_since_release + 1)), aes(x = Model_popularity_index, y = Average, color = Model)) +
geom_point() +
#geom_label_repel(aes(label = original_name), max.overlaps = 20, size = 3) +
labs(title = "Model Popularity Index vs. Average", x = "Model Popularity Index", y = "Average") +
theme_minimal() +
scale_color_brewer(palette = "Set3") # Adjust color scale if necessary
# Compute the popularity index and average by popularity index for each model
combined_data |>
mutate(popularity_index = total_models / total_days_model, Average_by_popularity_index = Average / popularity_index) -> combined_data
# Plot the average by popularity index over time for models with minimum and maximum average
combined_data |>
group_by(Model) |>
dplyr::filter(Average == min(Average) | Average == max(Average)) |>
ggplot(aes(y = Average_by_popularity_index , x = days_since_release, color = Model)) +
geom_line()
# Plot the average by popularity index over time for models with minimum and maximum average
combined_data |>
group_by(Model) |>
dplyr::filter(Average == min(Average) | Average == max(Average)) |>
mutate(Model_popularity_index = cumulative_models / (days_since_release), Average2 = Average / Model_popularity_index) |>
ggplot(aes(y = Average2, x= days_since_release, color = Model)) +
geom_line()
# Plot the comparison of average over time for each model
ggplot(combined_data, aes(x = days_since_release, y = Average, color = Model)) +
geom_line() +
geom_point() +
geom_label_repel(aes(label = original_name), max.overlaps = 20, size = 2) +
labs(title = "Model Comparison Over Time Normalized by Total Number of Models", x = "Days", y = "Average") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_fill_binned(type = "viridis") +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
scale_x_continuous(breaks = seq(0, max(combined_data$days_since_release), by = 5))
# Plot the comparison of average over time normalized by popularity index for each model
combined_data |>
mutate(Model_popularity_index = cumulative_models / (days_since_release+1), Average2 = Average / (Model_popularity_index+1)) |>
ggplot(aes(x = days_since_release, y = Average2, color = Model)) +
geom_line() +
geom_point() +
geom_label_repel(aes(label = model_name_for_query), max.overlaps = 30, size = 2) +
labs(title = "Model Comparison Over Time Normalized by Total Number of Models", x = "Days since release of first model", y = "Average normalized per model") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_fill_binned(type = "viridis") +
theme(axis.text.x = element_text(angle = 90, hjust = 1)) +
scale_x_continuous(breaks = seq(0, max(combined_data$days_since_release), by = 5))
# Save the plot as a PDF
ggsave("../out/model_comparison_over_time_normalized_by_popularity_index.pdf", width = 15, height = 10)
commit.data.long |> select(authors, first_commit) |> arrange(first_commit) |> distinct(authors, .keep_all = TRUE) |> group_by(first_commit) |> summarise(new_authors = n()) |> mutate(total_authors = cumsum(new_authors)) |> dplyr::filter(first_commit >= "2023-01-01") -> prediction.data
# Split data to train and test randomly
train.data <- prediction.data[1:round(nrow(prediction.data) * 0.8),]
test.data <- prediction.data[(round(nrow(prediction.data) * 0.8) + 1):nrow(prediction.data),]
# Fit a linear model to the data
model <- lm(total_authors ~ first_commit + new_authors, data = train.data)
predict(model, newdata = test.data) |> round(0) -> predictions
# Plot the predicted number of users
ggplot(prediction.data, aes(x = first_commit, y = total_authors)) +
geom_line() +
geom_point() +
geom_point(data = test.data, aes(x = first_commit, y = predictions), color = "red") +
labs(title = "Predicted Number of Users Over Time", x = "Month", y = "Total Users") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = c(0,0)) +
theme(axis.text.x = element_text(angle = 90, hjust = 1))
lm(total_authors ~ first_commit, data=train.data) -> model
predict(model, newdata = test.data) |> round(0) -> predictions
# Plot the predicted number of users
ggplot(prediction.data, aes(x = first_commit, y = total_authors)) +
geom_line() +
geom_point() +
geom_point(data = test.data, aes(x = first_commit, y = predictions), color = "red") +
labs(title = "Predicted Number of Users Over Time", x = "Month", y = "Total Users") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = c(0,0)) +
theme(axis.text.x = element_text(angle = 90, hjust = 1))
predict(model, newdata = data.frame(first_commit = as.Date("2025-01-01"))) |> round(0)
# Create sequence of dates from 2024-06-01 to 2025-06-01
dates <- seq(as.Date("2024-06-01"), as.Date("2029-06-01"), by = "month")
mgcv::gam(total_authors ~ s(first_commit |> as.numeric()), data = train.data) -> model_gam
# Predict the number of users for each date
predictions <- data.frame(first_commit = dates) |> mutate(prediction = predict(model_gam, newdata = data.frame(first_commit = dates))) |> mutate_if(is.numeric, round)
# Plot the predicted number of users
predictions |> ggplot(aes(x = first_commit, y = prediction)) + geom_point() + labs(title = "Predicted Number of Users Over Time", x = "Month", y = "Total Users") + theme_minimal() + theme(legend.position = "bottom") + scale_x_date(date_breaks = "1 month", date_labels = "%b %Y") + theme(axis.text.x = element_text(angle = 90, hjust = 1)) + scale_y_continuous(breaks = seq(0, 10000, by = 200))
prediction.data |> ggplot(aes(first_commit, total_authors)) + geom_line() + geom_point() + labs(title = "Total Users Over Time", x = "Month", y = "Total Users") + theme_minimal() + theme(legend.position = "bottom") + scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = c(0,0)) + theme(axis.text.x = element_text(angle = 90, hjust = 1)) + geom_smooth(method = "gam", color = "red")
predict(model_gam, newdata = test.data) |> round(0) -> predictions_gam
# Plot the predicted number of users
ggplot(prediction.data, aes(x = first_commit, y = total_authors)) +
geom_line() +
geom_point() +
geom_point(data = test.data, aes(x = first_commit, y = predictions_gam), color = "red") +
labs(title = "Predicted Number of Users Over Time", x = "Month", y = "Total Users") +
theme_minimal() +
theme(legend.position = "bottom") +
scale_x_date(date_breaks = "1 month", date_labels = "%b %Y", expand = c(0,0)) +
theme(axis.text.x = element_text(angle = 90, hjust = 1))
data |> select(model_name_for_query, Average, ARC, created_at, Architecture_new, Params, TypeString) |>
arrange(desc(ARC)) |> head(20)
data |> select(model_name_for_query, Average, HellaSwag, created_at, Architecture_new, Params, TypeString) |>
arrange(desc(HellaSwag)) |> head(20)
data |> select(model_name_for_query, TruthfulQA, Average, created_at, Architecture_new, Params, TypeString) |>
arrange(desc(TruthfulQA)) |> head(20)
data |> select(model_name_for_query, Average,Winogrande, created_at, Architecture_new, Params, TypeString) |>
arrange(desc(Winogrande)) |> head(20)
data |> write.csv("../data/data.csv", row.names = FALSE)
author.activity |> write.csv("../data/author_activity.csv", row.names = FALSE)
combined_data |> write.csv("../data/data_for_model_evolution.csv", row.names = FALSE)
|
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:recipe_hub/services/bloc/recipe/chip/chip_bloc.dart';
import 'package:recipe_hub/services/bloc/recipe/chip/chip_event.dart';
import 'package:recipe_hub/services/bloc/recipe/chip/chip_state.dart';
import 'package:recipe_hub/services/bloc/recipe/filter/filter_bloc.dart';
import 'package:recipe_hub/services/bloc/recipe/filter/filter_event.dart';
class ScrollFilter extends StatefulWidget {
final List<String> filterList;
const ScrollFilter({super.key, required this.filterList});
@override
State<ScrollFilter> createState() => _ScrollFilterState();
}
class _ScrollFilterState extends State<ScrollFilter> {
@override
Widget build(BuildContext context) {
return SizedBox(
height: 50,
child: ListView(
padding: const EdgeInsets.only(bottom: 15),
scrollDirection: Axis.horizontal,
children: widget.filterList.map((String title) {
return Padding(
padding: const EdgeInsets.symmetric(vertical: 0, horizontal: 6),
child: BlocProvider<ChipBloc>(
create: (context) => ChipBloc(),
child: Builder(builder: (context) {
final filters =
context.select((FilterBloc bloc) => bloc.filters);
final ChipBloc chip = BlocProvider.of<ChipBloc>(context);
if (filters.contains(title)) {
context
.read<FilterBloc>()
.add(FilterEventAdd(value: title, chip: chip));
}
return BlocBuilder<ChipBloc, ChipState>(
builder: (context, state) {
return FilterChip(
checkmarkColor: Colors.white,
selectedColor: Theme.of(context).colorScheme.primary,
labelStyle: TextStyle(
color: filters.contains(title)
? Colors.white
: Colors.black),
label: Text(title),
selected: filters.contains(title),
onSelected: (bool selected) {
if (selected) {
context
.read<FilterBloc>()
.add(FilterEventAdd(value: title, chip: chip));
context
.read<ChipBloc>()
.add(const ChipEventUpdate(state: true));
} else {
context
.read<FilterBloc>()
.add(FilterEventRemove(value: title, chip: chip));
context
.read<ChipBloc>()
.add(const ChipEventUpdate(state: false));
}
},
);
},
);
}),
),
);
}).toList(),
),
);
}
}
|
import axios from "axios"
import React from "react"
import { ProgressBar, Tab, Tabs } from "react-bootstrap"
import CountUp from "react-countup"
import { graphql } from "gatsby"
import Layout from "../../components/Layout"
import MasonryCardGrid from "../../components/Card/MasonryCardGrid"
import { heading } from "./publications.module.css"
class Publications extends React.Component {
constructor(props) {
super(props)
this.state = {
data: props.data.allCardsJson.nodes,
professors: props.data["allStaffCsv"]["nodes"],
loadedCount: 1,
conferences: 0,
journals: 0,
}
this.year = new Date().getFullYear()
}
componentDidMount () {
for (const prof of this.state.professors) {
let joinDate = prof["DOJ"]
let leaveDate = prof["DOR"]
joinDate = joinDate.toString()
if (joinDate.length < 4) joinDate = "20" + joinDate.split("-")[2]
if (leaveDate && leaveDate !== "")
leaveDate = "20" + leaveDate.split("-")[2]
else leaveDate = (this.year + 10).toString()
joinDate = "20" + joinDate.split("-")[2]
axios.get("https://dblp.org/pid/" + prof["DBLP"] + ".xml").then(res => {
const parser = require("xml2js")
parser.parseString(res.data, (err, rest) => {
rest = rest["dblpperson"]["r"]
rest = rest
.map(x =>
this.parse(
x,
joinDate,
leaveDate,
this.state.professors,
prof["name"]
)
)
.filter(x => {
return x !== {}
})
this.setState(prevState => ({
data: [...rest, ...prevState.data],
loadedCount: prevState.loadedCount + 1,
}))
})
})
}
}
parse(element, joinDate, leaveDate, pl, nm) {
const type = Object.keys(element)[0]
element = element[type][0]
if (element["year"][0] < joinDate || element["year"][0] > leaveDate)
return {}
if (type !== "inproceedings" && type !== "article") return {}
const result = {}
result["date"] = element["year"][0] + "-02-02"
result["subtitle"] = ""
let author_final = ""
if (element.author !== undefined)
for (const temp of element.author)
author_final = author_final + temp["_"] + ", "
for (const pln of pl) {
if (author_final.includes(pln["name"])) {
if (pln["name"] != nm) return {}
else break
}
}
author_final = author_final.slice(0, author_final.length - 2)
result["description"] = author_final.replace(/[0-9]/g, "")
result["title"] = element.title[0]
if (typeof result["title"] !== "string") return {}
if (element["ee"]) {
if (element["ee"][0]["_"]) result["link"] = element["ee"][0]["_"]
else result["link"] = element["ee"][0]
} else if (element.url)
result["link"] = "https://dblp.org/" + element.url[0]
else if (element["$"].key)
result["link"] = "https://dblp.org/rec/" + element["$"].key
result["category"] = ["Publications"]
if (element.journal) result["subtitle"] += element.journal + " "
if (element.booktitle) result["subtitle"] += element.booktitle + " "
if (element.year) result["subtitle"] += element.year + " "
if (
result["subtitle"].includes("CoRR") ||
result["subtitle"].includes("ePrint") ||
result["subtitle"].includes("Editor") ||
result["subtitle"].includes("editor")
)
return {}
if (type === "inproceedings")
this.setState(prevState => ({
conferences: prevState.conferences + 1,
}))
if (type === "article")
this.setState(prevState => ({
journals: prevState.journals + 1,
}))
return result
}
render() {
if (this.state.loadedCount < this.state.professors.length)
return (
<Layout mainClass="publications" title="Publications">
{"Loading"}
<ProgressBar
animated
label={Math.floor(
((this.state.loadedCount + 1) * 100) /
this.state.professors.length
)}
now={
((this.state.loadedCount + 1) * 100) /
this.state.professors.length
}
/>
</Layout>
)
else {
const year_list = [
this.year.toString(),
(this.year - 1).toString(),
(this.year - 2).toString(),
(this.year - 3).toString(),
(this.year - 4).toString(),
(this.year - 5).toString(),
(this.year - 6).toString(),
(this.year - 7).toString(),
(this.year - 8).toString(),
(this.year - 9).toString(),
(this.year - 10).toString(),
]
return (
<Layout mainClass="publications" title="Publications">
<h1 className="section-heading">Publications</h1>
<div className={"float-right text-end"}>
<div className="h4" style={{ color: "#3fada8" }}>
<strong>Statistics</strong>
</div>{" "}
<br />
<div className={heading}>
Total Conference Papers:{" "}
<CountUp
duration={4}
end={this.state.conferences}
useEasing={true}
/>
<br />
Total Journal Articles:{" "}
<CountUp
duration={4}
end={this.state.journals}
useEasing={true}
/>
</div>
</div>
<Tabs defaultActiveKey={year_list[0]} className="mb-3">
<Tab eventKey={year_list[0]} title={year_list[0]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[0] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[1]} title={year_list[1]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[1] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[2]} title={year_list[2]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[2] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[3]} title={year_list[3]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[3] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[4]} title={year_list[4]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[4] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[5]} title={year_list[5]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[5] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[6]} title={year_list[6]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[6] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[7]} title={year_list[7]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[7] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[8]} title={year_list[8]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[8] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[9]} title={year_list[9]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[9] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
<Tab eventKey={year_list[10]} title={year_list[10]}>
<MasonryCardGrid
data={this.state.data.filter(x => {
return (
x.date &&
x.date.split("-")[0] === year_list[10] &&
x["category"].includes("Publications")
)
})}
/>
</Tab>
</Tabs>
</Layout>
)
}
}
}
export default Publications
export const query = graphql`
{
allCardsJson(filter: {}) {
nodes {
category
description
title
subtitle
link
image
}
}
allStaffCsv(filter: { DBLP: { ne: "" } }) {
nodes {
name
DBLP
DOJ
DOR
}
}
}
`
|
import { computed, defineComponent, inject, ref, type Ref } from 'vue';
import { useI18n } from 'vue-i18n';
import { useRoute, useRouter } from 'vue-router';
import { useVuelidate } from '@vuelidate/core';
import CidadesMySuffixService from './cidades-my-suffix.service';
import { useValidation } from '@/shared/composables';
import { useAlertService } from '@/shared/alert/alert.service';
import { type ICidadesMySuffix, CidadesMySuffix } from '@/shared/model/cidades-my-suffix.model';
import { DiariaLocalidadeEnum } from '@/shared/model/enumerations/diaria-localidade-enum.model';
import { LocalidadeEnum } from '@/shared/model/enumerations/localidade-enum.model';
export default defineComponent({
compatConfig: { MODE: 3 },
name: 'CidadesMySuffixUpdate',
setup() {
const cidadesService = inject('cidadesService', () => new CidadesMySuffixService());
const alertService = inject('alertService', () => useAlertService(), true);
const cidades: Ref<ICidadesMySuffix> = ref(new CidadesMySuffix());
const diariaLocalidadeEnumValues: Ref<string[]> = ref(Object.keys(DiariaLocalidadeEnum));
const localidadeEnumValues: Ref<string[]> = ref(Object.keys(LocalidadeEnum));
const isSaving = ref(false);
const currentLanguage = inject('currentLanguage', () => computed(() => navigator.language ?? 'pt-br'), true);
const route = useRoute();
const router = useRouter();
const previousState = () => router.go(-1);
const retrieveCidadesMySuffix = async cidadesId => {
try {
const res = await cidadesService().find(cidadesId);
cidades.value = res;
} catch (error) {
alertService.showHttpError(error.response);
}
};
if (route.params?.cidadesId) {
retrieveCidadesMySuffix(route.params.cidadesId);
}
const initRelationships = () => {};
initRelationships();
const { t: t$ } = useI18n();
const validations = useValidation();
const validationRules = {
cidade: {},
valorLocalidade: {},
missao: {},
};
const v$ = useVuelidate(validationRules, cidades as any);
v$.value.$validate();
return {
cidadesService,
alertService,
cidades,
previousState,
diariaLocalidadeEnumValues,
localidadeEnumValues,
isSaving,
currentLanguage,
v$,
t$,
};
},
created(): void {},
methods: {
save(): void {
this.isSaving = true;
if (this.cidades.id) {
this.cidadesService()
.update(this.cidades)
.then(param => {
this.isSaving = false;
this.previousState();
this.alertService.showInfo(this.t$('iceaApp.cidades.updated', { param: param.id }));
})
.catch(error => {
this.isSaving = false;
this.alertService.showHttpError(error.response);
});
} else {
this.cidadesService()
.create(this.cidades)
.then(param => {
this.isSaving = false;
this.previousState();
this.alertService.showSuccess(this.t$('iceaApp.cidades.created', { param: param.id }).toString());
})
.catch(error => {
this.isSaving = false;
this.alertService.showHttpError(error.response);
});
}
},
},
});
|
/QUESTION/
Given a string containing digits from 2-9 inclusive, return all possible letter combinations that
the number could represent. Return the answer in any order.
A mapping of digits to letters (just like on the telephone buttons) is given below.
Note that 1 does not map to any letters.
/C++ CODE IMPLEMENTATION/
void solve(int idx, string temp, vector<string> &res, vector<string> &arr)
{
if (idx==arr.size())
{
res.push_back(temp);
return;
}
for(int i = 0;i<arr[idx].size();i++)
{
solve(idx+1,temp+arr[idx][i], res,arr);
}
}
vector<string> letterCombinations(string digits) {
if (digits.empty()) return {};
unordered_map<char, string> dic =
{
{'2', "abc"}, {'3', "def"}, {'4', "ghi"},
{'5', "jkl"}, {'6', "mno"}, {'7', "pqrs"},
{'8', "tuv"}, {'9', "wxyz"}
};
vector<string> arr;
for(char ch: digits)
{
arr.push_back(dic[ch]);
}
vector<string> res;
solve(0,"",res,arr);
return res;
}
|
import { Route, Switch } from "react-router-dom";
import Login from "../pages/Login";
import Register from "../pages/Register";
import Dashboard from "../pages/Dashboard";
import { useEffect, useState } from "react";
const Routes = () => {
const [auth, setAuth] = useState(false);
useEffect(() => {
const token = JSON.parse(localStorage.getItem("@KenzieHub:token"));
if (token) {
return setAuth(true);
}
}, [auth]);
return (
<Switch>
<Route exact path="/">
<Login auth={auth} setAuth={setAuth} />
</Route>
<Route path="/register">
<Register auth={auth} />
</Route>
<Route path="/dashboard">
<Dashboard auth={auth} setAuth={setAuth} />
</Route>
</Switch>
);
};
export default Routes;
|
<?php
require './Traits/LangFlag.php';
class Description{
use LangFlag;
//Variabili d'istanza
public $plot;
public $language;
public $genre;
//Costruttore
public function __construct(string $_plot,string $_language,array $_genre){
try{
$this->setPlot($_plot);
$this->setLanguage($_language);
$this->genre= $_genre;
}catch(Exception $e){
echo "Errore: " . $e->getMessage();
}
}
private function setPlot($_plot){
//Controllo che la trama non sia troppo lunga
if(strlen($_plot)<50){
$this->plot = $_plot;
}else{
throw new UnexpectedValueException("La trama è troppo lunga o inesistente.");
}
}
private function setLanguage($_language){
//Controllo validità linguaggio
$supportedLanguages = ['en', 'it', 'es', 'fr', 'de', 'ja'];
if (in_array($_language, $supportedLanguages)) {
$this->language = $_language;
} else {
throw new UnexpectedValueException("Lingua non supportata.");
}
}
}
|
let socket = io.connect("http://localhost:3500");
let connectedUserElement = document.getElementById("connected-users");
let welcomeMessageElement = document.getElementById("welcome-user-name-id");
let inputField = document.getElementById("input-text");
let inputButton = document.getElementById("input-button");
let chatBox = document.getElementById("chat-id");
let logout = document.getElementById("logout");
let username;
let image;
inputField.addEventListener("keypress", function (event) {
if (event.key === "Enter") {
event.preventDefault();
inputButton.click();
}
});
async function fetchUserInfo() {
try {
const res = localStorage.getItem("token");
const response = await fetch("http://localhost:3500/api/userdetails", {
method: "GET",
headers: {
"Content-Type": "application/json",
Authorization: res,
},
});
await response
.json()
.then((data) => {
socket.emit("connected-user", data);
})
.catch((err) => {
// console.log(err);
localStorage.clear();
window.location.replace("/index.html");
});
} catch (error) {
console.log(error);
}
}
let timer;
socket.on("getUsers", (users) => {
users.filter((item, index) => users.indexOf(item) === index);
if (users.length > 0) {
users.forEach((user) => {
if (user.username != userInfo.username) {
let li = document.createElement("li");
li.setAttribute("class", "online-user");
li.setAttribute("id", "online-user-" + user.username);
li.innerHTML = ` <div class="green-dot-wrapper"> <div class="green-dot"></div> </div> <div class="online-user-name">${user.username}</div> `;
connectedUserElement.appendChild(li);
const numOfOnlineUsers = document.querySelectorAll("li").length;
document.getElementById("online-user-id").innerText =
"Online Users " + numOfOnlineUsers;
}
});
}
});
socket.on("current_user", (user) => {
userInfo = user;
welcomeMessageElement.innerText = "Welcome " + userInfo.username + "!";
});
socket.on("load-messages", (chats) => {
chatBox.innerHTML = "";
chats.forEach((chat) => {
if (chat.username == userInfo.username) {
let d = new Date(chat.timeStamp);
let time = d.getHours() + ":" + d.getMinutes();
let messageElement = document.createElement("div");
messageElement.setAttribute("class", "chat-content user");
messageElement.innerHTML = `<div class="img-container"><img src="${chat.image}" alt="" class="user-image"></div> <div class="user-text"> <div class="user-metadata"><div class="user-time">${time}</div> <div class="user-name">${chat.username}</div></div> <div class="user-content">${chat.message}</div></div> `;
chatBox.append(messageElement);
} else {
let d = new Date(chat.timeStamp);
let time = d.getHours() + ":" + d.getMinutes();
let messageElement = document.createElement("div");
messageElement.setAttribute("class", "chat-content sender");
messageElement.innerHTML = `<div class="img-container"><img src="${chat.image}" alt="" class="user-image"></div> <div class="sender-text"> <div class="sender-metadata"><div class="sender-time">${time}</div> <div class="sender-name">${chat.username}</div></div><div class="sender-content">${chat.message}</div></div> `;
chatBox.append(messageElement);
}
chatBox.scrollTop = chatBox.scrollHeight;
});
});
socket.on("joined_user", (user) => {
if (user) {
let li = document.createElement("li");
li.setAttribute("class", "online-user");
li.setAttribute("id", "online-user-" + user.username);
li.innerHTML = ` <div class="green-dot-wrapper"> <div class="green-dot"></div> </div> <div class="online-user-name">${user.username}</div> `;
connectedUserElement.appendChild(li);
const numOfOnlineUsers = document.querySelectorAll("li").length;
document.getElementById("online-user-id").innerText =
"Online Users " + numOfOnlineUsers;
}
});
inputField.addEventListener("keydown", (event) => {
socket.emit("typing-user", userInfo.username);
});
socket.on("typing", (user) => {
if (timer) {
clearTimeout(timer);
}
document.getElementById("sender-status-id").innerText =
user + " is typing...";
timer = setTimeout(() => {
document.getElementById("sender-status-id").innerText = "";
}, 1000);
});
inputButton.addEventListener("click", (event) => {
let message = inputField.value;
let d = new Date();
let time = d.getHours() + ":" + d.getMinutes();
if (message) {
socket.emit("send_chat_msg", userInfo, message, time);
let messageElement = document.createElement("div");
messageElement.setAttribute("class", "chat-content user");
messageElement.innerHTML = `<div class="img-container"><img src="${userInfo.image}" alt="" class="user-image"></div> <div class="user-text"> <div class="user-metadata"><div class="user-time">${time}</div> <div class="user-name">${userInfo.username}</div></div> <div class="user-content">${message}</div></div> `;
chatBox.append(messageElement);
inputField.value = "";
chatBox.scrollTop = chatBox.scrollHeight;
}
});
socket.on("receive_chat_msg", (user, message, time) => {
if (user.username == userInfo.username) {
let messageElement = document.createElement("div");
messageElement.setAttribute("class", "chat-content user");
messageElement.innerHTML = `<div class="img-container"><img src="${user.image}" alt="" class="user-image"></div> <div class="user-text"> <div class="user-metadata"><div class="user-time">${time}</div> <div class="user-name">${user.username}</div></div> <div class="user-content">${message}</div></div> `;
chatBox.append(messageElement);
chatBox.scrollTop = chatBox.scrollHeight;
} else {
let messageElement = document.createElement("div");
messageElement.setAttribute("class", "chat-content sender");
messageElement.innerHTML = `<div class="img-container"><img src="${user.image}" alt="" class="user-image"></div> <div class="sender-text"> <div class="sender-metadata"><div class="sender-time">${time}</div> <div class="sender-name">${user.username}</div></div><div class="sender-content">${message}</div></div> `;
chatBox.append(messageElement);
chatBox.scrollTop = chatBox.scrollHeight;
}
});
logout.addEventListener("click", (event) => {
localStorage.clear();
window.location.replace("/index.html");
});
socket.on("disconnected_user", (user) => {
document.getElementById("online-user-" + user.username).remove();
});
|
const hre = require("hardhat");
const fs = require('fs');
async function main() {
// contract addresses
const wbtcAddress = "0x2f2a2543B76A4166549F7aaB2e75Bef0aefC5B0f";
const wbtcDecimals = 8;
const wbtcShark = "0x7546966122e636a601a3ea4497d3509f160771d8";
const usdcAddress = "0xaf88d065e77c8cC2239327C5EDb3A432268e5831";
const usdcDecimals = 6;
const usdcShark = "0x3dd1d15b3c78d6acfd75a254e857cbe5b9ff0af2";
const [deployer] = await hre.ethers.getSigners();
const userAddress = deployer.address;
// deploy OrderBook contract
const OrderBook = await hre.ethers.getContractFactory("OrderBook");
const orderBook = await OrderBook.deploy(wbtcAddress, wbtcDecimals, usdcAddress, usdcDecimals);
await orderBook.deployed();
const orderBookAddress = orderBook.address;
console.log("OrderBook deployed to:", orderBookAddress);
// deploy CranklessOrderBook contract
const CranklessOrderBook = await hre.ethers.getContractFactory("CranklessOrderBook");
const cranklessOrderBook = await CranklessOrderBook.deploy(wbtcAddress, wbtcDecimals, usdcAddress, usdcDecimals);
await cranklessOrderBook.deployed();
const cranklessOrderBookAddress = cranklessOrderBook.address;
console.log("CranklessOrderBook deployed to:", cranklessOrderBookAddress);
// imporsonate sharkAddress for wbtc
await hre.network.provider.request({
method: "hardhat_impersonateAccount",
params: [wbtcShark],
});
const wbtcSigner = await hre.ethers.getSigner(wbtcShark);
const wbtcContractShark = await hre.ethers.getContractAt("contracts/libraries/IERC20.sol:IERC20", wbtcAddress, wbtcSigner);
var receipt = await wbtcContractShark.transfer(
userAddress,
2*10**10,
);
console.log("wbtc balance: ", (await wbtcContractShark.balanceOf(userAddress)).toString())
// imporsonate sharkAddress for usdc
await hre.network.provider.request({
method: "hardhat_impersonateAccount",
params: [usdcShark],
});
const usdcSigner = await hre.ethers.getSigner(usdcShark);
const usdcContractShark = await hre.ethers.getContractAt("contracts/libraries/IERC20.sol:IERC20", usdcAddress, usdcSigner);
var receipt = await usdcContractShark.transfer(
userAddress,
2*10**10,
);
await receipt.wait();
console.log("usdc balance: ", (await usdcContractShark.balanceOf(userAddress)).toString())
const config = {
"usdcAddress": usdcAddress,
"wbtcAddress": wbtcAddress,
"orderBookAddress": orderBookAddress,
"cranklessOrderBookAddress": cranklessOrderBookAddress
}
fs.writeFileSync("config.json", JSON.stringify(config));
}
main().catch((error) => {
console.error(error);
process.exitCode = 1;
});
|
const QueueImpl = require('./queueImplementation')
test('First item to enter is the first to leave', () => {
let queue = new QueueImpl(5)
let expected = "olá"
queue.push(expected)
queue.push("mundo")
let front = queue.front()
expect(front).toBe(expected)
let poppedItem = queue.pop()
expect(poppedItem).toBe(expected)
})
test('Front is correct', () => {
let queue = new QueueImpl(3)
queue.push("ola")
queue.push(",")
queue.push("mundo")
queue.push("!")
expect("ola").toBe(queue.front())
})
test('Rear is correct', () => {
let queue = new QueueImpl(3)
queue.push("ola")
queue.push(",")
queue.push("mundo")
queue.push("!")
expect("mundo").toBe(queue.rear())
})
test('Push item beyond limit', () => {
let queue = new QueueImpl(3)
queue.push("ola")
queue.push(",")
queue.push("mundo")
queue.push("!")
expect(3).toBe(queue.size())
expect(3).toBe(queue.maxSize())
expect("mundo").toBe(queue.rear())
expect("ola").toBe(queue.front())
})
test('Pop beyond 0', () => {
let queue = new QueueImpl(3)
queue.push("ola")
queue.push(",")
queue.push("mundo")
queue.pop()
queue.pop()
queue.pop()
queue.pop()
queue.pop()
queue.pop()
expect(0).toBe(queue.size())
expect(3).toBe(queue.maxSize())
expect(null).toBe(queue.rear())
})
test('IsEmpty returns true', () => {
let queue = new QueueImpl(3)
queue.push("ola")
queue.push(",")
queue.push("mundo")
queue.pop()
queue.pop()
queue.pop()
queue.pop()
queue.pop()
queue.pop()
expect(true).toBe(queue.isEmpty())
})
test('IsEmpty returns false', () => {
let queue = new QueueImpl(3)
queue.push("ola")
expect(false).toBe(queue.isEmpty())
queue.pop()
queue.push("ola")
queue.push("ola")
expect(false).toBe(queue.isEmpty())
queue.pop()
queue.push("ola")
queue.push("ola")
queue.push("ola")
expect(false).toBe(queue.isEmpty())
})
test('Size is correct', () => {
let queue = new QueueImpl(3)
queue.push("ola")
expect(1).toBe(queue.size())
queue.pop()
expect(0).toBe(queue.size())
queue.pop()
queue.push("ola")
queue.push("ola")
queue.push("ola")
expect(3).toBe(queue.size())
})
|
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { HomeComponent } from './home/home.component';
import { AboutComponent } from './about/about.component';
import { SkillsComponent } from './skills/skills.component';
import { EducationComponent } from './education/education.component';
import { ExperienceComponent } from './experience/experience.component';
import { Router,RouterModule } from '@angular/router';
import { FooterComponent } from './footer/footer.component';
import { NavbarComponent } from './navbar/navbar.component';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
import { ReactiveFormsModule,FormsModule} from '@angular/forms';
import { ContactnewComponent } from './contactnew/contactnew.component';
@NgModule({
declarations: [
AppComponent,
HomeComponent,
AboutComponent,
SkillsComponent,
EducationComponent,
ExperienceComponent,
FooterComponent,
NavbarComponent,
ContactnewComponent,
],
imports: [
BrowserModule,
AppRoutingModule,
RouterModule,
FontAwesomeModule,
ReactiveFormsModule,
FormsModule
],
providers: [Router],
bootstrap: [AppComponent]
})
export class AppModule { }
|
//输入年和天数,输出对应的年、月、日。
//例如:输入2000和61,输出2000-3-1。
void month_day(int year, int yearday, int *pmonth,int *pday);
int main (void)
{ int day, month, year, yearday;
printf (“input year and yearday: ”);
scanf ("%d%d", &year, &yearday );
month_day (year, yearday, &month, &day );
printf ("%d-%d-%d \n", year, month, day );
return 0;
}
void month_day ( int year, int yearday, int * pmonth, int * pday)
{ int k, leap;
int tab [2][13]= {{0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31},
{0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}};
/* 建立闰年判别条件leap */
leap = (year%4 == 0 && year%100 != 0) || year%400 == 0;
for ( k = 1; yearday > tab[leap][k]; k++){
yearday = yearday-tab [leap][k];
}
*pmonth = k;
*pday = yearday;
}
|
const { Review } = require("../../models/review");
const { HttpError, ctrlWrapper } = require("../../helpers");
async function getAll(req, res) {
const { page = 1, limit = 20 } = req.query;
const skip = (page - 1) * limit;
const reviewsAll = await Review.find({}, {}, { skip, limit });
res.status(200).json(reviewsAll);
}
async function getOwnReview(req, res) {
const { _id: owner } = req.user;
const ownReview = await Review.find({ owner });
if (!ownReview) {
throw HttpError(404, "Not found");
}
res.status(200).json(ownReview);
}
async function addReview(req, res) {
const { _id: owner, name, avatarURL } = req.user;
const ownReview = await Review.find({ owner });
if (ownReview && ownReview.length !== 0) {
throw HttpError(409, "Conflict");
}
const newReview = await Review.create({
...req.body,
owner,
name,
avatarURL,
});
res.status(201).json(newReview);
}
async function updateReview(req, res) {
const { _id: owner, avatarURL } = req.user;
const updatedReview = await Review.findOneAndUpdate(
{ owner },
{ ...req.body, avatarURL },
{
new: true,
}
);
if (!updatedReview) {
throw HttpError(404, "Not found");
}
res.status(201).json(updatedReview);
}
async function deleteReview(req, res) {
const { _id: owner } = req.user;
const ownReview = await Review.findOneAndDelete({ owner });
if (!ownReview) {
throw HttpError(404, "Not found");
}
res.status(200).json({ message: "review deleted" });
}
module.exports = {
getAll: ctrlWrapper(getAll),
addReview: ctrlWrapper(addReview),
getOwnReview: ctrlWrapper(getOwnReview),
deleteReview: ctrlWrapper(deleteReview),
updateReview: ctrlWrapper(updateReview),
};
|
const jwt = require('jsonwebtoken');
/**
* this function called without arguments returns a middleware
* that parses user-data from JWT token.
* If any role is given and current user role
* is not equales to a given role, its throws an error
*
* this middleware will be linked to any request that needs
* a special user role for access like POST '/api/user/device'
*/
module.exports = function(role){
return function(req, res, next){
if(req.method === "OPTIONS"){
next();
}
try{
const token = req.headers.authorization.split(' ')[1]; // 'JWT dsdf9sdf9sd0f'
if(!token){
res.status(401).json({message: 'No authorized'})
}
const decoded = jwt.verify(token, process.env.SECRET_KEY);
if(role && decoded.role !== role){
next(res.status(403).json({message: 'No access'}))
}
req.user = decoded;
next(); // calling next middleware
} catch(e){
res.status(401).json({message: 'No authorized'})
}
}
}
|
let score = JSON.parse(localStorage.getItem('score')) || {
wins: 0,
losses: 0,
ties: 0
};
updateScore();
document.body.addEventListener('keydown', (event) => {
if (event.key === 'r'){
playGame('rock');
} else if (event.key === 'p') {
playGame('paper');
}else if (event.key === 's') {
playGame('scissors');
} else if (event.key === 'a') {
playGame(autoPlay());
}
})
const rockButton = document.querySelector('.js-rock');
rockButton.addEventListener('click', () => {
playGame('rock');
});
const paperButton = document.querySelector('.js-paper');
paperButton.addEventListener('click', () => {
playGame('paper');
})
const scissorsButton = document.querySelector('.js-scissors');
scissorsButton.addEventListener('click', () => {
playGame('scissors');
})
function playGame(playerMove) {
const computerMove = pickComputerMove();
let result = '';
if (playerMove === 'rock') {
if (computerMove === 'rock') {
result = 'Tie';
}else if (computerMove === 'paper') {
result = 'You Loss';
}else if (computerMove === 'scissors') {
result = 'You Win';
}
} else if (playerMove === 'paper') {
if (computerMove === 'rock') {
result = 'You Win';
}else if (computerMove === 'paper') {
result = 'Tie';
}else if (computerMove === 'scissors') {
result = 'You Loss';
}
}else if ( playerMove === 'scissors') {
if (computerMove === 'rock') {
result = 'You Loss';
} else if (computerMove === 'paper') {
result = 'You Win';
}else if (computerMove === 'scissors') {
result = 'Tie';
}
}
if ( result === 'You Win') {
score.wins++;
}else if (result === 'You Loss') {
score.losses+= 1;
}else if (result === 'Tie') {
score.ties+= 1;
}
localStorage.setItem('score', JSON.stringify(score));
updateScore();
document.querySelector('.js-moves')
.innerHTML = `You <img src="images/${playerMove}.png" alt=""> Computer <img src="images/${computerMove}.png" alt="">`;
document.querySelector('.js-result').innerHTML = `${result}`;
}
function resetScore() {
score = {
wins: 0,
losses: 0,
ties: 0
};
localStorage.removeItem('score');
updateScore();
}
function updateScore(){
document.querySelector('.js-score').innerHTML = `Wins: ${score.wins}, Losses: ${score.losses}, Ties: ${score.ties}`;
}
function pickComputerMove() {
const randomNumber = Math.random();
let computerMove = '';
if (randomNumber >= 0 && randomNumber < 1/3) {
computerMove = 'rock';
} else if (randomNumber >= 1/3 && randomNumber < 2/3) {
computerMove = 'paper';
} else if (randomNumber>= 2/3 && randomNumber < 1) {
computerMove = 'scissors';
}
return computerMove;
}
let isAutoPlaying = false;
let intervalId;
function autoPlay() {
if (!isAutoPlaying) {
intervalId = setInterval(function(){
const playerMove = pickComputerMove();
playGame(playerMove);
},1000);
isAutoPlaying = true;
} else {
clearInterval(intervalId);
isAutoPlaying = false;
}
}
|
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { RouterTestingModule } from '@angular/router/testing';
import { AppComponent } from './app.component';
import { AngularMaterialModule } from './shared/modules/angular-material/angular-material.module';
describe('AppComponent', () => {
let component: AppComponent;
let fixture: ComponentFixture<AppComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
imports: [RouterTestingModule, AngularMaterialModule],
declarations: [AppComponent],
}).compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(AppComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create the app', () => {
expect(component).toBeTruthy();
});
it(`should have as title 'star-wars-characters'`, () => {
expect(component.title).toEqual('star-wars-characters');
});
it('should render title', () => {
const fixture = TestBed.createComponent(AppComponent);
fixture.detectChanges();
const compiled = fixture.nativeElement as HTMLElement;
expect(compiled.querySelector('.app__title')?.textContent).toContain(
component.title
);
});
});
|
<section class="restaurant-main-page py-2 mt-5">
<button type="button" class="btn btn-outline-primary btn-sm" data-bs-toggle="modal" data-bs-target="#add-restaurant" data-bs-whatever="@mdo">Add Restaurant</button>
<div class="resto-index my-3">
<table class="table caption-top">
<!-- <caption>List of restaurants</caption> -->
<thead class="table-thead">
<tr>
<th scope="col">#</th>
<th scope="col"><b>Name</b></th>
<th scope="col"><b>Adress</b></th>
<th scope="col"><b>Open On</b></th>
<th scope="col"><b>Open Time</b></th>
<th scope="col"><b>Close Time</b></th>
<th scope="col"><b>Actions</b></th>
</tr>
</thead>
<tbody class="resto-body-tr">
<% @restaurants.each_with_index do |restaurant, index|%>
<tr style="height: 20px;">
<th scope="row"><%= index+1%></th>
<td><%= restaurant.name%></td>
<td><%= restaurant.address%></td>
<td><%= restaurant.opened_on%></td>
<td><%= restaurant.open_time.strftime("%I:%M%p")%></td>
<td><%= restaurant.close_time.strftime("%I:%M%p")%></td>
<td>
<button type="button" class="edit-button-resto" data-bs-toggle="modal" data-bs-target="#edit-restaurant-<%= restaurant.id%>" title="edit">
<i class="fa-solid fa-pen-to-square fa-shake" style="color: #ada00e;"></i>
</button>
<button type="button" class="edit-button-resto" data-bs-toggle="modal" data-bs-target="#show-restaurant-<%= restaurant.id%>" title="show">
<i class="fa-solid fa-eye fa-beat" style="color: #032663;"></i>
</button>
<%= link_to "<i class='fa-solid fa-trash fa-bounce' style='color: red;'></i>".html_safe, restaurant_owner_path(restaurant), method: :delete, data: { confirm: "Are you sure?"}, title:"delete"%>
<button type="button" class="btn btn-outline-primary btn-sm menu-add" title="add menu" data-id="<%= restaurant.id%>">
Add Menu
</button>
<%= link_to "items", restaurant_owner_path(restaurant), class:"btn btn-sm btn-primary"%>
</td>
</tr>
<!-- edit restaurant details-->
<div class="modal fade" id="edit-restaurant-<%= restaurant.id%>" tabindex="-1" aria-labelledby="restaurantLable" aria-hidden="true">
<div class="modal-dialog">
<%= render "restaurant/owners/form", restaurant: restaurant, path: restaurant_owner_url(restaurant)%>
</div>
</div>
<!-- show restaurant details -->
<div class="modal fade" id="show-restaurant-<%= restaurant.id%>" tabindex="-1" aria-labelledby="restaurantLable" aria-hidden="true">
<div class="modal-dialog">
<%= render "restaurant/owners/show", restaurant: restaurant%>
</div>
</div>
<!-- Add menus for restaurant -->
<div class="card add-menu-resto mx-2 px-2 d-none" id="add-menu-resto-<%= restaurant.id%>">
<h5 class="text-center py-2"><%= restaurant.name%></h5>
<%= render "restaurant/owners/add_menu", restaurant: restaurant, menu: Menu.new, path: restaurant_menus_path%>
</div>
<%end%>
</tbody>
</table>
</div>
</section>
<!-- Add new restaurant -->
<div class="modal fade" id="add-restaurant" tabindex="-1" aria-labelledby="restaurantLable" aria-hidden="true">
<div class="modal-dialog">
<%= render "restaurant/owners/form", restaurant: Restaurant.new, path: restaurant_owners_url%>
</div>
</div>
<style type="text/css">
i{
font-size: 20px;
padding: 2px 8px;
}
.edit-button-resto{
background: none;
color: inherit;
border: none;
padding: 0;
font: inherit;
cursor: pointer;
outline: inherit;
}
.add-menu-resto{
width: 30%;
box-shadow: 0px 0px 10px #00000026;
background: #253893;
color: white;
font-weight: 600;
}
.table{
/* display: block;*/
border-collapse: collapse;
border-radius: 10px !important;
overflow: hidden;
box-shadow: 0px 0px 10px #00000026;
}
/* thead{
background: #253893 !important;
}
thead th{
color: white !important;
}*/
.add-flex{
display: flex;
flex-direction: row-reverse !important;
gap: 20px;
}
</style>
<script type="text/javascript">
$(".menu-add").click(function(){
$(".add-menu-resto").each(function( index ) {
$(`.add-menu-resto`).addClass('d-none')
});
var id = $(this).attr('data-id')
$(".resto-index").addClass('add-flex')
$(`#add-menu-resto-${id}`).addClass('d-block')
$(`#add-menu-resto-${id}`).removeClass('d-none')
})
</script>
|
<template>
<div class="page-container" id="top-container">
<div class="product-intro">
<div class="header-bg header-ipf"></div>
<div class="header-overlay"></div>
<div class="content p-4">
<div class="col is-flex">
<div class="title-wrapper">
<h1 class="title">Insurance Premium Financing</h1>
<h1 class="subtitle">#KeepTheMomentum</h1>
<p>
Dial
<a href="tel:*674#" class="underline hover:text-white">*674#</a> to get a quote
</p>
<div class="mt-8">
<a
href="#"
class="btn lg font-bold uppercase text-white"
v-scroll-to="'#after-header'"
>Learn More</a>
</div>
</div>
</div>
<div class="col">
<form
class="calculator-form calculator-form-updated"
id="insurance-premium-form"
@submit.prevent="submitForm()"
>
<div class="form-title-wrapper">
<h5>SUBMIT DETAILS</h5>
</div>
<div class="input-row pt-6 pb-4">
<div class="input-item">
<label for="vehicleType">Vehicle Class</label>
<select v-model="form.vehicleType" id="vehicleType">
<option
:value="index"
v-for="(vehicle, index) in vehicleClass"
:key="index"
>{{vehicle}}</option>
</select>
</div>
<div class="input-item">
<label for="typeOfCove">Type of Cover</label>
<select v-model="form.coverType" id="typeOfCover">
<option value="Comprehensive">Comprehensive</option>
<option value="Third Party Only">Third Party Only</option>
</select>
</div>
</div>
<div class="input-group px-6 py-2 hidden">
<div class="bg-white">
<button
class="p-4 py-3 pointer block w-full border relative flex justify-center items-center"
type="button"
@click="showAddOns = !showAddOns"
>
Add Ons
<span :class="{'icon-caret':true, 'up':showAddOns}"></span>
</button>
<div class="p-4" v-if="showAddOns">
<div class="flex items-center mb-2">
<input
type="checkbox"
v-model="form.excessWaiver"
id="excess-waiver"
class="mr-3"
/>
<label for="excess-waiver">Excess Waiver/Protector</label>
</div>
<div class="flex items-center mb-2">
<input type="checkbox" v-model="form.pvts" id="pvts" class="mr-3" />
<label for="pvts">PVTS</label>
</div>
<div class="flex items-center mb-2">
<input type="checkbox" v-model="form.roadRescue" id="road-rescue" class="mr-3" />
<label for="road-rescue">Road Rescue</label>
</div>
<div class="flex items-center mb-2">
<input
type="checkbox"
id="personal-accident"
v-model="form.personalAccident"
class="mr-3"
/>
<label for="personal-accident">Personal Accident</label>
</div>
<div class="flex items-center mb-2">
<input
type="checkbox"
id="personal-accident"
style="pointer-events: none;"
readonly
:checked="form.courtesyCar == '10' || form.courtesyCar == '21'"
class="mr-3"
/>
<label for="courtesy-car" class="mr-3">Courtesy Car</label>
<select v-model="form.courtesyCar" class="border border-gray-600 rounded-sm">
<option value="No">No</option>
<option value="10">10 Days</option>
<option value="21">21 Days</option>
</select>
</div>
</div>
</div>
</div>
<div class="input-group">
<div class="input-container">
<label>Sum Insured</label>
<div>
<button
type="button"
@click="form.sumInsured > 0 ? form.sumInsured = form.sumInsured - 10000 : 0"
>
<img src="~/assets/img/input-minus.png" alt="Minus" />
</button>
<div class="input-text">
<input type="text" v-model="amount" required @keyup="amountUpdated($event)" />
<span>KES</span>
</div>
<button
type="button"
@click="form.sumInsured < 10000000 ? form.sumInsured = form.sumInsured + 10000 : 0"
>
<img src="~/assets/img/input-add.png" alt="Add" />
</button>
</div>
</div>
<div class="input-range-container">
<no-ssr>
<vue-range-slider
:max="10000000"
:min="0"
:step="10000"
:bg-style="rangeBackgroundStyle"
:tooltip-style="rangeTooltipStyle"
:process-style="rangeProgressStyle"
:piecewise-style="rangePiecewiseStyle"
v-model="form.sumInsured"
></vue-range-slider>
</no-ssr>
</div>
</div>
<div class="input-group pt-6">
<div class="input-container">
<label>Number of Instalments</label>
<div>
<button
type="button"
@click="form.numberOfInstalments > 0 ? form.numberOfInstalments = --form.numberOfInstalments : 0"
>
<img src="~/assets/img/input-minus.png" alt="Minus" />
</button>
<div class="input-text">
<input type="text" v-model="form.numberOfInstalments" required />
<span>{{activePaymentFrequnecy}}</span>
</div>
<button
type="button"
@click="form.numberOfInstalments < activeMaxLoanTerms ? form.numberOfInstalments = ++form.numberOfInstalments : 0"
>
<img src="~/assets/img/input-add.png" alt="Add" />
</button>
</div>
</div>
<div class="input-range-container">
<no-ssr>
<vue-range-slider
:max="activeMaxLoanTerms"
:min="0"
:step="1"
:bg-style="rangeBackgroundStyle"
:tooltip-style="rangeTooltipStyle"
:process-style="rangeProgressStyle"
:piecewise-style="rangePiecewiseStyle"
v-model="form.numberOfInstalments"
></vue-range-slider>
</no-ssr>
</div>
</div>
<div class="input-group pt-6">
<div class="input-container">
<label class="calc-results-label">Basic Premium</label>
<div>
<div class="input-text results">
<input readonly type="text" value="0.00" required v-model="form.basicPremium" />
<span>KES</span>
</div>
</div>
</div>
</div>
<div class="input-group">
<div class="input-container">
<label class="calc-results-label">Instalment</label>
<div>
<div class="input-text results">
<input
readonly
type="text"
value="0.00"
required
v-model="form.amountPerInstalment"
/>
<span>KES</span>
</div>
</div>
</div>
</div>
<div class="input-group">
<div class="input-container">
<label class="calc-results-label">Deposit</label>
<div>
<div class="input-text results">
<input readonly type="text" value="0.00" required v-model="form.deposit" />
<span>KES</span>
</div>
</div>
</div>
</div>
<div class="input-row pt-6">
<div class="input-item w-full">
<label>Name</label>
<input
type="text"
v-model="form.user_name"
required
placeholder="Name"
class="w-full"
/>
</div>
</div>
<div class="input-row pt-6 pb-6">
<div class="input-item">
<label>Phone</label>
<input
type="tel"
v-model="form.user_phone"
required
placeholder="Phone"
class="w-full"
@focus="phoneIsInvalid = false"
/>
</div>
<div class="input-item">
<label>Email</label>
<input
type="email"
v-model="form.user_email"
required
placeholder="Email"
class="w-full"
/>
</div>
</div>
<span
v-if="phoneIsInvalid"
class="input-error-message text-sm pt-1 pl-4 text-red-700 block text-center"
>Phone number is invalid!</span>
<div class="p-1 pb-6 px-6 text-center" v-if="formSubmitted">
<div class="text-sm text-green-700">
Thank you for your request. One of our representative will get back to you as soon as possible.
<!-- Your quote is ready to download. Click <a href="#" class="text-yellow">here</a> to download. -->
</div>
</div>
<div class="p-6 text-center" v-if="!formSubmitted">
<button type="submit" class="uppercase btn submit-btn">
<strong class="px-6 text-white">GET A QUOTE</strong>
</button>
</div>
</form>
</div>
</div>
</div>
<section class="icons-section hidden">
<div class="section-container">
<div class="icons-container cols-3 title-only">
<div>
<img
src="~/assets/img/money-bag-icon@2x.png"
alt="Only 20% deposit & flexible repayments up to 11 months"
/>
<h4>Only 20% deposit & flexible repayments up to 11 months</h4>
</div>
<div>
<img
src="~/assets/img/calendar-icon@2x.png"
alt="Market beating premium rates & Interest rates as low as 2% per month"
/>
<h4>Market beating premium rates & Interest rates as low as 2% per month</h4>
</div>
<div>
<img src="~/assets/img/clock-icon@2x.png" alt="Same day processing" />
<h4>Same day processing</h4>
</div>
</div>
</div>
</section>
<section class="text-image-grid" id="after-header">
<div class="section-container">
<div class="text-col">
<h3 class="section-title">Insurance Premium Financing</h3>
<p
class="mb-4"
>Deposit 20% and secure insurance premium financing with flexible repayments of up to 11 months with competitive premium rates and affordable interests. All that is required is an upfront deposit of 20% and the insurance certificate is processed on the same day. We offer flexible repayments of up to 11 months and competitive premium rates at affordable interest rates.</p>
<nuxt-link
to="/application/insurance-premium-financing"
class="btn lg font-bold uppercase text-white"
>Apply Now</nuxt-link>
</div>
<div class="img-col">
<img src="~/assets/img/ipf-page-image@2x.png" alt="Insurance Premium Financing<" />
</div>
</div>
</section>
<section class="card-stripe-section hidden">
<div class="section-container">
<div class="card">
<h4>
You can also Top-up loans by dialling;
<strong>*674#</strong>.
</h4>
</div>
</div>
</section>
<section class="icons-section hidden">
<div class="section-container">
<h3 class="section-title text-center">Why Choose Us</h3>
<div class="icons-container cols-4">
<div>
<img src="~/assets/img/speed-clock-icon@2x.png" alt="Fast" />
<h4>Fast</h4>
<p>We have a turnaround time promise of 24 hours on all our products and services.</p>
</div>
<div>
<img src="~/assets/img/handshake-icon@2x.png" alt="Reliable" />
<h4>Reliable</h4>
<p>We strive to always deliver on our promises.</p>
</div>
<div>
<img src="~/assets/img/puzzle-icon@2x.png" alt="Solution Oriented" />
<h4>Solution Oriented</h4>
<p>We believe in structuring effective solutions to meet your needs.</p>
</div>
<div>
<img src="~/assets/img/magnify-glass-icon@2x.png" alt="Transparent" />
<h4>Transparent</h4>
<p>There are no hidden charges in our fee structures.</p>
</div>
</div>
</div>
</section>
<section class="icons-section">
<div class="section-container">
<h3 class="section-title text-center">Benefits of Insurance Premium Financing</h3>
<div class="icons-container cols-4 title-only">
<div>
<img
src="~/assets/img/car-icon@2x.png"
alt="Only 20% deposit & flexible repayments of up to 11 months."
/>
<h4>Only 20% deposit & flexible repayments of up to 11 months</h4>
</div>
<div>
<img
src="~/assets/img/hand-cash-icon@2x.png"
alt="Market beating premium rates & Interest rates as low as 2% per month"
/>
<h4>Market beating premium rates & Interest rates as low as 2% per month</h4>
</div>
<div>
<img src="~/assets/img/badge-icon@2x.png" alt="Same day processing" />
<h4>Same day processing</h4>
</div>
<div>
<img src="~/assets/img/settlement-fee-2.png" alt="No early settlement fees" />
<h4>No early settlement fees</h4>
</div>
</div>
</div>
</section>
<section class="m-blue-bg">
<div class="section-container">
<h3 class="section-title text-center">Basic Requirements</h3>
<div class="card-grid">
<div class="card-wrapper">
<div class="card">
<h4>Individual Application</h4>
<ul>
<li>
<img src="~/assets/img/star@2x.png" alt />
<span>Copy of Logbook</span>
</li>
<li>
<img src="~/assets/img/star@2x.png" alt />
<span>Copy of ID</span>
</li>
<li>
<img src="~/assets/img/star@2x.png" alt />
<span>KRA Pin</span>
</li>
<li>
<img src="~/assets/img/star@2x.png" alt />
<span>Insurance Premium Quote</span>
</li>
</ul>
</div>
</div>
<div class="card-wrapper">
<div class="card">
<h4>Corporate Application</h4>
<ul>
<li>
<img src="~/assets/img/star@2x.png" alt />
<span>Copy of Logbook</span>
</li>
<li class>
<img src="~/assets/img/star@2x.png" alt />
<span>Certificate of registration/Incorporation</span>
</li>
<li>
<img src="~/assets/img/star@2x.png" alt />
<span>KRA Pin</span>
</li>
<li>
<img src="~/assets/img/star@2x.png" alt />
<span>Insurance Premium Quote</span>
</li>
</ul>
</div>
</div>
</div>
<div class="text-center">
<!-- <nuxt-link
to="/application/insurance-premium-financing"
class="btn secondary lg font-bold text-white uppercase"
>Apply Now</nuxt-link> -->
<a
href="#"
class="btn secondary lg font-bold text-white uppercase"
v-scroll-to="'#top-container'"
>Apply Now</a>
</div>
</div>
</section>
<section class="bg-white">
<div class="section-container">
<h3 class="section-title text-center">FAQs</h3>
<div class="faq-container">
<div class="row">
<div class="faq large-6">
<a href="#" class="quiz" @click.prevent="faq = 1">What is Insurance Premium Financing?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 1">
<p>
Insurance premium financing is a facility that allows you to make the required lump sum premium to an
insurance company while giving you the benefit of paying-off the amount in instalments to your financier.
</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a href="#" class="quiz" @click.prevent="faq = 2">Do you finance all types of policies?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 2">
<p>
Being a pioneer product we currently only finance motor vehicles and we are open to exploring the others
in the future.
</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a
href="#"
class="quiz"
@click.prevent="faq = 3"
>Will you pay the money to me so that I can forward it to the Insurance Company?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 3">
<p>
No. The funds are paid directly to the Insurance Company. Your agent is informed immediately this is done
and the same communication is forwarded to you.
</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a href="#" class="quiz" @click.prevent="faq = 4">Do you charge any extra fees?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 4">
<p>Yes, a processing fee of 4%.</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a href="#" class="quiz" @click.prevent="faq = 5">Will you finance 100% of my premium?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 5">
<p>No. We finance 80% to a minimum on Kshs. 2,500 and you’ll be required to pay a 20% deposit as a commitment.</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a
href="#"
class="quiz"
@click.prevent="faq = 6"
>What’s the minimum and maximum amount that you can finance?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 6">
<p>We finance a minimum of Kshs. 2,500, and a maximum of Kshs. 2,000,000.</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a href="#" class="quiz" @click.prevent="faq = 7">How do I know how much I’ll be paying?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 7">
<p>Once you get in touch with one of our agents, you will receive a quotation with the estimated instalments.</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a
href="#"
class="quiz"
@click.prevent="faq = 8"
>What happens if I do not know the market value of my motor vehicle?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 8">
<p>Your agent will assist you to conduct the valuation with a registered valuer of your preferred insurance company.</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a
href="#"
class="quiz"
@click.prevent="faq = 9"
>Do I require security for the facility?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 9">
<p>No. The initial 20% deposit paid will be your 'security'</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a href="#" class="quiz" @click.prevent="faq = 10">What is used to secure the facility?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 10">
<p>The facility is unsecured. No property is required to cover the borrowing.</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a
href="#"
class="quiz"
@click.prevent="faq = 11"
>How long will it take to process the Insurance Certificate/Sticker?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 11">
<p>Within 2 hours, subject to payment of the 20% deposit and submission of all the required documentation</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a href="#" class="quiz" @click.prevent="faq = 12">What happens if I default?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 12">
<p>
We will give you a seven days’ notice of cancellation of the insurance policies financed upon which we will
instruct the insurance company to cancel the policy.
</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a
href="#"
class="quiz"
@click.prevent="faq = 13"
>Can my policy be reinstated once cancelled?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 13">
<p>Yes, the policy can only be reinstated after seven days of cancellation.</p>
</div>
</transition>
</div>
<div class="faq large-6">
<a
href="#"
class="quiz"
@click.prevent="faq = 14"
>What charges are associated with reinstatement?</a>
<transition name="fade" mode="out-in">
<div class="ans" v-if="faq == 14">
<p>
You will be required to clear all arrears, make an advance payment of one instalment and a reinstatement
fee of Kshs. 1,000.
</p>
</div>
</transition>
</div>
</div>
<!-- FAQs -->
</div>
</div>
</section>
</div>
</template>
<script>
export default {
name: "IPF",
head: {
title: "Momentum Credit – Insurance Premium Financing",
meta: [
{ charset: "utf-8" },
{ name: "viewport", content: "width=device-width, initial-scale=1" },
{
hid: "keywords",
name: "keywords",
content:
"momentum,credit,smes,IPF,insurance,kenya,nairobi,premiums,borrow,lend,financing,loans,payments",
},
{
hid: "title",
name: "title",
content: "Momentum Credit – Insurance Premium Financing",
},
{
hid: "description",
name: "description",
content:
"Deposit 20% and secure insurance premium financing with flexible repayments of up to 11 months with competitive premium rates and affordable interests. ",
},
{
hid: "og:type",
property: "og:type",
content: "website",
},
{
hid: "og:url",
property: "og:url",
content: "https://momentumcredit.co.ke/",
},
{
hid: "og:title",
property: "og:title",
content: "Momentum Credit – Insurance Premium Financing",
},
{
hid: "og:description",
property: "og:description",
content:
"Deposit 20% and secure insurance premium financing with flexible repayments of up to 11 months with competitive premium rates and affordable interests. ",
},
{
hid: "og:image",
property: "og:image",
content: "https://momentumcredit.co.ke/img/cover.jpg",
},
{
hid: "twitter:card",
property: "twitter:card",
content: "summary_large_image",
},
{
hid: "og:url",
property: "og:url",
content: "https://momentumcredit.co.ke/",
},
{
hid: "twitter:title",
property: "twitter:title",
content: "Momentum Credit – Insurance Premium Financing",
},
{
hid: "twitter:description",
property: "twitter:description",
content:
"Deposit 20% and secure insurance premium financing with flexible repayments of up to 11 months with competitive premium rates and affordable interests. ",
},
{
hid: "twitter:image",
property: "twitter:image",
content: "https://momentumcredit.co.ke/img/cover.jpg",
},
],
link: [
{ rel: "icon", href: "/img/favicon-196.png" },
{
rel: "apple-touch-icon-precomposed",
href: "/img/apple-touch-icon-precomposed.png",
},
{
rel: "stylesheet",
fref:
"https://fonts.googleapis.com/css2?family=Lato:wght@300;400;700;900&display=swap",
},
],
},
data() {
return {
phoneIsInvalid: false,
amount: null,
faq: 1,
formSubmitted: false,
showAddOns: false,
rangePiecewiseStyle: {},
rangeBackgroundStyle: {},
rangeTooltipStyle: {},
rangeProgressStyle: {},
vehicleClass: [
"Motor Private",
"Taxi",
"Motor Commercial Own Goods",
"Motor Commercial General Cartage",
"Tuk Tuk",
"Boda Boda",
],
excessWaiverRates: [0.0025, 0.005, 0.005, 0.005],
excesWaiverMin: [2500, 5000, 5000, 5000, 0, 0],
roadRescue: [6960, 0, 0, 0, 0, 0],
personalAccident: [500, 500, 500, 500, 500, 500],
paymentFrequnecy: [
"Monthly",
"Weekly",
"Monthly",
"Monthly",
"Weekly",
"Weekly",
],
maxLoanTerms: [11, 52, 11, 11, 27, 27],
calcMaxLoanTerms: [12, 52, 12, 12, 52, 52],
ratesByBrian: [
0.03475,
0.00811370348,
0.04304,
0.03475,
0.009685082,
0.009685082,
],
premiumRates: [0.035, 0.05, 0.045, 0.05, 0.05, 0.025],
premiumMin: [20000, 30175, 30175, 30000, 15108, 0],
tpoRates: [6000, 7574, 6500, 0, 7574, 6943],
activeMaxLoanTerms: 11,
activePaymentFrequnecy: "Monthly",
activePremiumRates: 0.035,
form: {
source: "Insurance Premium Financing",
user_name: "",
user_phone: "",
user_email: "",
vehicleType: 0,
coverType: "Comprehensive",
sumInsured: 50000,
numberOfInstalments: 11,
basicPremium: 0,
amountPerInstalment: 0,
deposit: 0,
excessWaiver: false,
pvts: false,
roadRescue: false,
personalAccident: false,
courtesyCar: "No",
},
};
},
watch: {
"form.courtesyCar": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
"form.personalAccident": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
"form.roadRescue": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
"form.pvts": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
"form.excessWaiver": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
"form.vehicleType": function (index) {
this.amount = this.thousandSeprator(this.form.sumInsured);
//first we reset the range
this.form.numberOfInstalments = 1;
this.activeMaxLoanTerms = this.maxLoanTerms[index];
this.activePaymentFrequnecy = this.paymentFrequnecy[index];
this.activePremiumRates = this.premiumRates[index];
this.hesabu();
},
"form.coverType": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
"form.sumInsured": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
"form.numberOfInstalments": function () {
this.amount = this.thousandSeprator(this.form.sumInsured);
this.hesabu();
},
},
created() {
let self = this;
self.rangeBackgroundStyle = {
backgroundColor: "#435C5F",
};
self.rangeTooltipStyle = {
backgroundColor: "#E5BA5B",
borderColor: "#E5BA5B",
};
self.rangeProgressStyle = {
backgroundColor: "#E5BA5B",
};
self.rangePiecewiseStyle = {
backgroundColor: "#E5BA5B",
};
},
mounted() {
let self = this;
//always close the mobile menu
this.$store.commit("settings/closeMobileMenu");
//set default currency
this.amount = this.thousandSeprator(this.form.sumInsured);
self.hesabu();
},
methods: {
validatePhone(phone) {
if (
/^(?:254|\+254|0)?(7(?:(?:[123456789][0-9])|(?:0[0-8])|(4[0-1]))[0-9]{6})$/.test(
phone
)
) {
return true;
} else {
return false;
}
},
pmt(
rate_per_period,
number_of_payments,
present_value,
future_value,
type
) {
future_value = typeof future_value !== "undefined" ? future_value : 0;
type = typeof type !== "undefined" ? type : 0;
if (rate_per_period != 0.0) {
// Interest rate exists
var q = Math.pow(1 + rate_per_period, number_of_payments);
return (
-(rate_per_period * (future_value + q * present_value)) /
((-1 + q) * (1 + rate_per_period * type))
);
} else if (number_of_payments != 0.0) {
// No interest rate, but number of payments exists
return -(future_value + present_value) / number_of_payments;
}
return 0;
},
hesabu() {
let self = this;
let vehicleClassIndex = self.form.vehicleType;
let premiumCharged,
levies,
estimatedPremium,
premiumBeforeLeviesStampDuty,
totalBasicPremium,
actualPremiumSummary,
monthlyInstallments,
processingFee,
pmtRate,
nPer,
totalLoanAmount,
loanAmount,
estimatedInstalments,
depositAmount,
termOfLoanInYears,
periodsPerYear;
let stampDuty = 40;
let annualInterestRate = 41.7031 / 100;
let loanTerm = 1;
//AddOns
let withExcesssWaiver = 0;
let withPVTS = 0;
let withRoadRescue = 0;
let withPersonalAccident = 0;
let withCourtesyCar = 0;
//calculate the premium charged
if (self.form.coverType === "Comprehensive") {
premiumCharged = Math.max(
self.premiumRates[vehicleClassIndex] * self.form.sumInsured,
self.premiumMin[vehicleClassIndex]
);
} else {
//Third Party Only
premiumCharged = self.tpoRates[vehicleClassIndex];
}
console.log(`Vehicle Class ${self.vehicleClass[vehicleClassIndex]}`);
console.log(`Cover Type ${self.form.coverType}`);
console.log(`Sum Insured ${self.form.sumInsured}`);
console.log(`Premium Charged ${premiumCharged}`);
//Here is where we add the addons
//excessWaiver
if (self.form.excessWaiver) {
//get max value between h16*g20 and Vl (g14 a3:h7, 2)
withExcesssWaiver = Math.max(
self.form.sumInsured * self.excessWaiverRates[vehicleClassIndex],
self.excesWaiverMin[vehicleClassIndex]
);
console.log(`Excess Waiver is ${withExcesssWaiver}`);
}
//pvts
if (self.form.pvts) {
withPVTS = 0.0025 * self.form.sumInsured;
console.log(`PVTS is ${withPVTS}`);
}
//road rescue
if (self.form.roadRescue) {
withRoadRescue = self.roadRescue[vehicleClassIndex];
console.log(`Road Rescue is ${withRoadRescue}`);
}
//personal accident
if (self.form.personalAccident) {
withPersonalAccident = self.personalAccident[vehicleClassIndex];
console.log(`Personal Accident is ${withPersonalAccident}`);
}
//Coutesy Car
if (self.form.courtesyCar == "10") {
withCourtesyCar = 3000;
} else if (self.form.courtesyCar == "21") {
withCourtesyCar = 6000;
}
/* if it's third-party, just work with the premium charged, else, let's do some maths */
if (self.form.coverType !== "Third Party Only") {
if (self.vehicleClass[vehicleClassIndex] === "Boda Boda") {
//Bodaboda
estimatedPremium = premiumCharged;
} else if (self.vehicleClass[vehicleClassIndex] === "Motor Private") {
estimatedPremium =
parseInt(premiumCharged) +
parseInt(withExcesssWaiver) +
parseInt(withPVTS) +
parseInt(withRoadRescue) +
parseInt(withPersonalAccident) +
parseInt(withCourtesyCar);
} else if (self.vehicleClass[vehicleClassIndex] === "Taxi") {
//if taxi add 2000 (Passanger Legal iability)
estimatedPremium =
parseInt(premiumCharged) +
parseInt(withExcesssWaiver) +
parseInt(withPVTS) +
parseInt(withRoadRescue) +
parseInt(withPersonalAccident) +
parseInt(withCourtesyCar) +
parseInt(2000);
} else if (
self.vehicleClass[vehicleClassIndex] === "Motor Commercial Own Goods"
) {
estimatedPremium =
parseInt(premiumCharged) +
parseInt(withExcesssWaiver) +
parseInt(withPVTS) +
parseInt(withPersonalAccident);
} else if (
self.vehicleClass[vehicleClassIndex] ===
"Motor Commercial General Cartage"
) {
parseInt(premiumCharged) +
parseInt(withExcesssWaiver) +
parseInt(withPVTS) +
parseInt(withPersonalAccident);
} else if (self.vehicleClass[vehicleClassIndex] === "Tuk Tuk") {
//if tuk tuk add 1500 (Passanger Legal iability)
estimatedPremium =
parseInt(premiumCharged) +
parseInt(withExcesssWaiver) +
parseInt(withPVTS) +
parseInt(withPersonalAccident) +
parseInt(1500);
}
} else {
//rest is inclusive
estimatedPremium = premiumCharged;
}
//Here is where we add windscreen & R/C
premiumBeforeLeviesStampDuty = estimatedPremium;
levies = (0.45 / 100) * premiumBeforeLeviesStampDuty;
totalBasicPremium =
parseInt(premiumBeforeLeviesStampDuty) +
parseInt(levies) +
parseInt(stampDuty);
console.log(`Levies ${levies}`);
console.log(`Estimated Premium ${estimatedPremium}`);
console.log(
`Premium Before Levies/Stamp Duty ${premiumBeforeLeviesStampDuty}`
);
console.log(`Total Basic Premium ${totalBasicPremium}`);
//Actual Premium Summary
if (self.vehicleClass[vehicleClassIndex] == "Boda Boda") {
//if Boda Boda
actualPremiumSummary =
parseInt(premiumBeforeLeviesStampDuty) +
parseInt(levies) +
parseInt(stampDuty);
} else if (
self.vehicleClass[vehicleClassIndex] ==
"Motor Commercial General Cartage"
) {
//if Motor Commercial General Cartage
actualPremiumSummary =
parseInt(premiumBeforeLeviesStampDuty) +
parseInt(levies) +
parseInt(stampDuty);
} else {
actualPremiumSummary = totalBasicPremium;
}
console.log(
`Actual Premium Summary, Premium Amount ${actualPremiumSummary}`
);
processingFee = actualPremiumSummary * 0.04;
totalLoanAmount =
parseInt(actualPremiumSummary) + parseInt(processingFee);
//set the basic premium
self.form.basicPremium = totalLoanAmount.toLocaleString();
console.log(`Processing Fee ${processingFee}`);
console.log(`Total Loan Amount ${totalLoanAmount}`);
//
pmtRate = annualInterestRate / self.form.numberOfInstalments;
nPer = loanTerm * self.form.numberOfInstalments;
loanAmount = parseInt(actualPremiumSummary) + parseInt(processingFee);
console.log(`PMT Rate ${pmtRate}`);
console.log(`nPer ${nPer}`);
console.log(`Loan Amount ${loanAmount}`);
estimatedInstalments = self.pmt(pmtRate, nPer, -loanAmount);
console.log(`Estimated Instalments ${estimatedInstalments}`);
//20% of basic premium
depositAmount = (20 / 100) * loanAmount;
//set the deposit amount
self.form.deposit = parseInt(depositAmount.toFixed()).toLocaleString();
//calc term of loan in years
termOfLoanInYears =
self.form.numberOfInstalments /
self.calcMaxLoanTerms[vehicleClassIndex];
if (self.paymentFrequnecy[vehicleClassIndex] === "Monthly") {
periodsPerYear = 12;
} else if (self.paymentFrequnecy[vehicleClassIndex] === "Weekly") {
periodsPerYear = 52;
}
let schedule_nPer = termOfLoanInYears * periodsPerYear;
monthlyInstallments =
self.pmt(
self.ratesByBrian[vehicleClassIndex],
schedule_nPer,
loanAmount
) * -1;
console.log(`Monthly Instalment ${monthlyInstallments}`);
if (self.form.numberOfInstalments <= 0) {
self.form.amountPerInstalment = 0;
} else {
self.form.amountPerInstalment = parseInt(
monthlyInstallments.toFixed(0)
).toLocaleString();
}
},
submitForm() {
let self = this;
if (self.form.sumInsured <= 0) {
return;
}
if (!self.validatePhone(self.form.user_phone)) {
self.phoneIsInvalid = true;
return;
}
self.form.vehicleClassLabel = this.vehicleClass[this.form.vehicleType];
self.sendDataToServer();
},
async sendDataToServer() {
let self = this;
this.loading = true;
let data = this.form;
const config = {
headers: {
Authorization: "Bearer " + process.env.API_API,
},
};
try {
await this.$axios.post(
"https://momentumcredit.co.ke/leads/data.php",
data,
config
);
self.formSubmitted = true;
this.loading = false;
} catch (error) {
console.log(`Request Error ${error}`);
this.loading = false;
}
self.$gtm.push({ event: "onIPFFormSubmitted" });
},
amountUpdated(e) {
let self = this;
let str = e.target.value;
if (str.length < 1) {
return;
}
//let's make there are no commas
let strSafe = str.replace(/,/g, "");
//let's make sure it's an int
let value = parseInt(strSafe);
if (value <= 0 || value == null) {
return;
}
setTimeout(function () {
self.form.sumInsured = value;
self.amount = self.thousandSeprator(value);
}, 300);
},
thousandSeprator(amount) {
if (
amount !== "" ||
amount !== undefined ||
amount !== 0 ||
amount !== "0" ||
amount !== null
) {
return amount.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");
} else {
return amount;
}
},
},
};
</script>
<style>
/* Sample `apply` at-rules with Tailwind CSS
.container {
@apply min-h-screen flex justify-center items-center text-center mx-auto;
}
*/
</style>
<style lang="scss" scoped>
.faq-container .faq.large-6{
margin-right: 10px;
width: 49%;
border: #e5ba5b solid 2px;
border-radius: 10px;
.quiz{
//border-bottom: #e5ba5b solid 2px;
// background-color:white;
border-radius: 0px;
}
.ans{
padding: 10px;
}
}
</style>
|
#!/usr/bin/env python
# basic_classes.py
class Circle():
''' A class called Circle that uses radius and color. '''
def __init__(self, color, radius):
self.color = color
self.radius = radius
def diameter(self):
''' Find the diameter of the circle '''
return 2 * self.radius
def circumference(self):
''' Find the circumference of the circle '''
return 2 * 3.14 * self.radius
def isRed(self):
''' Return True is circle is red '''
if self.color == "red":
return color
circle_1 = Circle("blue", 10)
print circle_1.circumference()
class GraduateStudent():
''' A class called GraduateStudent that uses names, year and major '''
def __init__(self, first_name, last_name, year, major):
self.first_name = first_name
self.last_name = last_name
self.year = year
self.major = major
def year_matriculated(self):
''' return the matriculation year '''
return 2020 - self.year
matriculation_year = GraduateStudent("Nancy", "Smith", 2017, "Buisness")
print matriculation_year.year_matriculated()
|
'''
source: https://github.com/deepak112/Keras-SRGAN/blob/master/Utils_model.py
also:
C. Ledig et al., “Photo-Realistic Single Image Super-Resolution Using a Generative Adversarial Network,” in 2017 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), Honolulu, HI, 2017, pp. 105–114.
'''
#title :Network.py
#description :Architecture file(Generator and Discriminator)
#author :Deepak Birla
#date :2018/10/30
#usage :from Network import Generator, Discriminator
#python_version :3.5.4
# Modules
from keras.layers import Dense, Lambda
from keras.layers.core import Activation
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import UpSampling2D
from keras.layers.core import Flatten
from keras.layers import Input
from keras.layers.convolutional import Conv2D, Conv2DTranspose
from keras.models import Model
from keras.layers.advanced_activations import LeakyReLU, PReLU
from keras.layers import add
import keras
import math
import Utils
import tensorflow as tf
# Residual block
def res_block_gen(model, kernal_size, filters, strides):
gen = model
model = Conv2D(filters = filters, kernel_size = kernal_size, strides = strides, padding = "same")(model)
model = BatchNormalization(momentum = 0.5)(model)
# Using Parametric ReLU
model = PReLU(alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=[1,2])(model)
model = Conv2D(filters = filters, kernel_size = kernal_size, strides = strides, padding = "same")(model)
model = BatchNormalization(momentum = 0.5)(model)
model = add([gen, model])
return model
def up_sampling_block(model, kernal_size, filters, strides):
# In place of Conv2D and UpSampling2D we can also use Conv2DTranspose (Both are used for Deconvolution)
# Even we can have our own function for deconvolution (i.e one made in Utils.py)
#model = Conv2DTranspose(filters = filters, kernel_size = kernal_size, strides = strides, padding = "same")(model)
#model = Conv2D(filters = filters, kernel_size = kernal_size, strides = strides, padding = "same")(model)
#model = UpSampling2D(size = (2,2))(model)
shape = keras.backend.shape(model)
model = Utils.SubpixelConv2D(shape, scale=2)(model)
model = LeakyReLU(alpha = 0.2)(model)
return model
def discriminator_block(model, filters, kernel_size, strides):
model = Conv2D(filters = filters, kernel_size = kernel_size, strides = strides, padding = "same")(model)
model = BatchNormalization(momentum = 0.5)(model)
model = LeakyReLU(alpha = 0.2)(model)
return model
# Network Architecture is same as given in Paper https://arxiv.org/pdf/1609.04802.pdf
class Generator(object):
def __init__(self, noise_shape, scaling_factor):
self.noise_shape = noise_shape
self.scaling_factor = scaling_factor
def generator(self):
gen_input = Input(shape = self.noise_shape)
model = Conv2D(filters = 64, kernel_size = 9, strides = 1, padding = "same")(gen_input)
model = PReLU(alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=[1,2])(model)
gen_model = model
# Using 16 Residual Blocks
for index in range(16):
model = res_block_gen(model, 3, 64, 1)
model = Conv2D(filters = 64, kernel_size = 3, strides = 1, padding = "same")(model)
model = BatchNormalization(momentum = 0.5)(model)
model = add([gen_model, model])
# Using 2 UpSampling Blocks
for index in range(int(math.log2(self.scaling_factor))):
model = up_sampling_block(model, 3, 256, 1)
model = Conv2D(filters = 3, kernel_size = 9, strides = 1, padding = "same")(model)
model = Activation('tanh')(model)
generator_model = Model(inputs = gen_input, outputs = model)
return generator_model
# Network Architecture is same as given in Paper https://arxiv.org/pdf/1609.04802.pdf
class Discriminator(object):
def __init__(self, image_shape):
self.image_shape = image_shape
def discriminator(self):
dis_input = Input(shape = self.image_shape)
model = Conv2D(filters = 64, kernel_size = 3, strides = 1, padding = "same")(dis_input)
model = LeakyReLU(alpha = 0.2)(model)
model = discriminator_block(model, 64, 3, 2)
model = discriminator_block(model, 128, 3, 1)
model = discriminator_block(model, 128, 3, 2)
model = discriminator_block(model, 256, 3, 1)
model = discriminator_block(model, 256, 3, 2)
model = discriminator_block(model, 512, 3, 1)
model = discriminator_block(model, 512, 3, 2)
model = Flatten()(model)
model = Dense(512)(model)
model = LeakyReLU(alpha = 0.2)(model)
model = Dense(1)(model)
model = Activation('sigmoid')(model)
discriminator_model = Model(inputs = dis_input, outputs = model)
return discriminator_model
|
@can('peptide_psm_create')
<div style="margin-bottom: 10px;" class="row">
<div class="col-lg-12">
<a class="btn btn-success" href="{{ route('admin.peptide-psms.create') }}">
{{ trans('global.add') }} {{ trans('cruds.peptidePsm.title_singular') }}
</a>
</div>
</div>
@endcan
<div class="card">
<div class="card-header">
{{ trans('cruds.peptidePsm.title_singular') }} {{ trans('global.list') }}
</div>
<div class="card-body">
<div class="table-responsive">
<table class=" table table-bordered table-stripd table-hover datatable datatable-psmPeptidePsms">
<thead>
<tr>
<th width="10">
</th>
<th>
{{ trans('cruds.peptidePsm.fields.id') }}
</th>
<th>
{{ trans('cruds.peptidePsm.fields.name') }}
</th>
<th>
{{ trans('cruds.peptidePsm.fields.peptide') }}
</th>
<th>
{{ trans('cruds.peptidePsm.fields.psm') }}
</th>
<th>
</th>
</tr>
</thead>
<tbody>
@foreach ($peptidePsms as $key => $peptidePsm)
<tr data-entry-id="{{ $peptidePsm->id }}">
<td>
</td>
<td>
{{ $peptidePsm->id ?? '' }}
</td>
<td>
{{ $peptidePsm->name ?? '' }}
</td>
<td>
{{ $peptidePsm->peptide->sequence ?? '' }}
</td>
<td>
{{ $peptidePsm->psm->spectra ?? '' }}
</td>
<td>
@can('peptide_psm_show')
<a class="btn btn-xs btn-primary"
href="{{ route('admin.peptide-psms.show', $peptidePsm->id) }}">
{{ trans('global.view') }}
</a>
@endcan
@can('peptide_psm_edit')
<a class="btn btn-xs btn-info"
href="{{ route('admin.peptide-psms.edit', $peptidePsm->id) }}">
{{ trans('global.edit') }}
</a>
@endcan
@can('peptide_psm_delete')
<form action="{{ route('admin.peptide-psms.destroy', $peptidePsm->id) }}"
method="POST" onsubmit="return confirm('{{ trans('global.areYouSure') }}');"
style="display: inline-block;">
<input type="hidden" name="_method" value="DELETE">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<input type="submit" class="btn btn-xs btn-danger"
value="{{ trans('global.delete') }}">
</form>
@endcan
</td>
</tr>
@endforeach
</tbody>
</table>
</div>
</div>
</div>
@section('scripts')
@parent
<script>
$(function() {
let dtButtons = $.extend(true, [], $.fn.dataTable.defaults.buttons)
@can('peptide_psm_delete')
let deleteButtonTrans = '{{ trans('global.datatables.delete') }}'
let deleteButton = {
text: deleteButtonTrans,
url: "{{ route('admin.peptide-psms.massDestroy') }}",
className: 'btn-danger',
action: function(e, dt, node, config) {
var ids = $.map(dt.rows({
selected: true
}).nodes(), function(entry) {
return $(entry).data('entry-id')
});
if (ids.length === 0) {
alert('{{ trans('global.datatables.zero_selected') }}')
return
}
if (confirm('{{ trans('global.areYouSure') }}')) {
$.ajax({
headers: {
'x-csrf-token': _token
},
method: 'POST',
url: config.url,
data: {
ids: ids,
_method: 'DELETE'
}
})
.done(function() {
location.reload()
})
}
}
}
dtButtons.push(deleteButton)
@endcan
$.extend(true, $.fn.dataTable.defaults, {
orderCellsTop: true,
order: [
[1, 'desc']
],
pageLength: 100,
});
let table = $('.datatable-psmPeptidePsms:not(.ajaxTable)').DataTable({
buttons: dtButtons
})
$('a[data-toggle="tab"]').on('shown.bs.tab click', function(e) {
$($.fn.dataTable.tables(true)).DataTable()
.columns.adjust();
});
})
</script>
@endsection
|
import { Route, createBrowserRouter, createRoutesFromElements } from "react-router-dom";
import MainLayout from "../layouts/main";
import AuthLayout from "../layouts/auth";
import SignIn from "../pages/auth/SignIn";
import SignUp from "../pages/auth/SingUp";
import Dashboard from "../pages/main/Dashboard";
import Orders from "../pages/main/Orders";
import LayoutWrapper from "../components/atoms/LayoutWrapper";
import Activity from "../pages/main/Activity";
import MealDetail from "../pages/main/MealDetail";
export const router = createBrowserRouter(
createRoutesFromElements(
<>
<Route path='/' element={<LayoutWrapper />}>
<Route path='/main' element={<MainLayout />}>
<Route path='dashboard' element={<Dashboard />} />
<Route path='orders' element={<Orders />} />
<Route path='activity' element={<Activity />} />
<Route path='meal/:slug' element={<MealDetail />} />
</Route>
<Route path='/auth' element={<AuthLayout />}>
<Route path='login' element={<SignIn />} />
<Route path='register' element={<SignUp />} />
</Route>
</Route>
</>
)
);
|
$primary: #fff;
@import './bootstrap/bootstrap';
// Colors
$soft-red: hsl(7, 99%, 70%);
$yellow: hsl(51, 100%, 49%);
$dark-cyan-1: hsl(167, 40%, 24%); // graphic design text
$dark-blue-1: hsl(198, 62%, 26%); // photography text
$dark-cyan-2: hsl(168, 34%, 41%); // footer
$dark-cyan-2: hsl(168, 34%, 41%); // footer
$dark-cyan-3: #90D4C5; // footer
// Neutral
$dark-blue-2: hsl(212, 27%, 19%);
$dark-blue-3: hsl(213, 9%, 39%);
$dark-blue-4: hsl(232, 10%, 55%);
$grayish-blue: hsl(210, 4%, 67%);
$white: hsl(0, 0%, 100%);
// Layout
$mobile: '375px';
$desktop: '1440px';
$mobile-screen: "only screen and (max-width: #{$mobile})";
// Typography
$font-size: '18px';
// Font family
@import url('https://fonts.googleapis.com/css2?family=Barlow:wght@600&display=swap');
@mixin barlow() {
font-family: 'Barlow', sans-serif;
};
@import url('https://fonts.googleapis.com/css2?family=Fraunces:wght@700;900&display=swap');
@mixin fraunces() {
font-family: 'Fraunces', sans-serif;
};
body {
@include barlow();
font-size: $font-size;
font-weight: 500;
color: $dark-blue-3;
line-height: 1.7;
@media #{$mobile-screen} {
width: 100vw;
font-size: 14px;
}
}
a, a:hover, a:focus {
text-decoration: none;
color: inherit;
}
h1, h2, h3, h4, h5, h6, a {
@include fraunces;
}
.btn,
.navbar-toggler {
border: none;
&:focus {
box-shadow: none;
}
}
.header {
min-height: 100vh;
background: url(../images/desktop/image-header.jpg) no-repeat center center;
background-size: cover;
}
.navbar {
background: transparent;
padding: 1.5em 0;
color: $white;
.container {
position: relative;
}
&-brand {
@include barlow();
font-weight: 700;
font-size: 2em;
}
&-toggler {
color: $white !important;
}
&-nav {
@media (max-width: 576px) {
display: none;
}
.nav-item {
margin: 0 1em;
&:last-child {
margin-left: 0.5em;
margin-right: 0;
}
}
.nav-link {
@include barlow();
color: $white !important;
font-weight: 500;
font-size: 1.1em;
&:hover, &:focus {
color: $white !important;
}
}
}
.btn {
margin-left: 1rem;
padding: 1.3em 2.6em;
text-transform: uppercase;
@include fraunces();
font-size: 0.8em;
font-weight: 600;
&-primary:hover, &-primary:focus {
background-image: #fff4;
background: #fff4;
border-color: #fff4;
color: $white;
}
&-warning {
background-image: $yellow;
background: $yellow;
color: $dark-blue-2;
}
}
&-mobile {
@media (min-width: 576px) {
display: none;
}
.dropdown-menu {
width: 92vw;
margin-top: 40px;
border: 0;
text-align: center;
padding-top: .8rem;
padding-bottom: .8rem;
border-radius: 0;
&::after {
content: '';
position: absolute;
top: -20px;
right: 0;
width: 0;
height: 0;
border: 20px solid transparent;
border-right: 0;
border-top: 0;
border-bottom-color: $white;
clear: both;
}
}
.dropdown-item {
padding-top: .8rem;
padding-bottom: .8rem;
&:hover {
background: inherit;
}
a {
@include barlow();
font-weight: 500;
font-size: 1.1em;
color: $dark-blue-4;
}
}
}
}
.hero {
color: $white;
h1 {
text-transform: uppercase;
font-size: 3rem;
font-weight: 900;
letter-spacing: .1em;
padding-top: .8em;
margin-bottom: 1.2em;
}
}
.features {
.container {
max-width: 100vw;
.row {
padding: 0 !important;
}
}
.col-md-6 {
margin: 0;
padding: 0;
}
&-1 .feature-img {
background: url(../images/desktop/image-transform.jpg);
}
&-2 .feature-img {
background: url(../images/desktop/image-stand-out.jpg);
}
.feature {
position: relative;
min-height: 600px;
margin-left: 0;
margin-right: 0;
&-img {
background-repeat: no-repeat;
background-position: center center;
background-size: cover;
}
&-text {
width: 83%;
height: 100%;
padding: 0 3rem;
display: flex;
flex-direction: column;
justify-content: center;
align-items: flex-start;
}
h2 {
font-size: 2.6em;
font-weight: 900;
color: $dark-blue-2;
margin-bottom: 0.8em;
}
h3 {
font-size: 1.6em;
font-weight: 900;
margin-bottom: .9em;
}
p {
color: $dark-blue-4;
}
a {
margin-top: 1.2em;
position: relative;
text-transform: uppercase;
font-size: 0.9em;
font-weight: 700;
color: $dark-blue-2;
&::after {
content: '';
position: absolute;
left: 0;
right: 0;
bottom: 3px;
width: 110%;
height: 8px;
border-radius: 4px;
transform: translateX(-5%);
background: $yellow;
z-index: -1;
}
}
&-text-wrap {
max-width: 350px;
height: 100%;
display: flex;
flex-direction: column;
justify-content: flex-end;
text-align: center;
margin-left: auto;
margin-right: auto;
padding-bottom: 3rem;
}
&-design {
background: url(../images/desktop/image-graphic-design.jpg) no-repeat center center;
background-size: cover;
h3, p {
color: $dark-cyan-1;
}
}
&-photography {
background: url(../images/desktop/image-photography.jpg) no-repeat center center;
h3, p {
color: $dark-blue-1;
}
}
}
&-2 {
.feature a::after {
background: $soft-red;
}
}
}
.testimonials {
.container {
text-align: center;
padding: 8rem 0;
}
&-title {
margin-bottom: 2em;
font-size: 1.2em;
font-weight: 700;
text-transform: uppercase;
letter-spacing: .2em;
color: $dark-blue-4;
}
.testimonial {
padding: 1.5em 5em;
img {
width: 75px;
margin-bottom: 3rem;
}
p {
font-size: .9em;
}
h5 {
margin-top: 3rem;
font-weight: 700;
font-size: 1.2em;
color: $dark-blue-2;
}
small {
font-size: 0.8em;
color: $dark-blue-4;
}
}
}
.gallery {
.container {
max-width: 100vw;
.row {
padding: 0;
& > * {
padding: 0;
margin: 0;
}
}
}
}
.footer {
background: $dark-cyan-3;
color: $dark-cyan-1;
.container {
text-align: center;
padding: 4rem 0;
}
&-brand {
@include barlow();
font-weight: 700;
font-size: 2em;
}
a {
transition: color .2s ease-in-out;
&:hover, &:focus {
color: $white;
}
}
&-nav {
margin: 2rem 0 3.5rem 0;
a {
@include barlow();
margin: 0.5em 1em;
}
}
&-social {
a {
margin: 0 0.3em;
font-size: 1.4em;
}
}
}
|
#include <iostream>
// a non-member function for operator overloading is essentially a global function.
// When overloading operators as non-member functions, you create a function outside the class, and it takes two (or more) parameters explicitly.
// The left operand is passed as the first parameter, and the right operand is passed as the second parameter.
class ComplexNumber {
private:
double real;
double imaginary;
public:
ComplexNumber(double r, double i) : real(r), imaginary(i) {}
// Display the complex number
void display() const {
std::cout << real << " + " << imaginary << "i";
}
// Declare the non-member function for operator overloading
friend ComplexNumber operator+(const ComplexNumber& num1, const ComplexNumber& num2);
};
// Define the non-member function for operator overloading
ComplexNumber operator+(const ComplexNumber& num1, const ComplexNumber& num2) {
return ComplexNumber(num1.real + num2.real, num1.imaginary + num2.imaginary);
}
int main() {
ComplexNumber num1(2.0, 3.0);
ComplexNumber num2(1.5, 2.5);
// Using the overloaded '+' operator as a non-member function
ComplexNumber sum = num1 + num2;
// Display the result
std::cout << "Sum: ";
sum.display();
std::cout << std::endl;
return 0;
}
|
"""
Module holding the (base) classes that can be used by the user of the OpenApiLibCore
to implement custom mappings for dependencies between resources in the API under
test and constraints / restrictions on properties of the resources.
"""
from abc import ABC
from copy import deepcopy
from dataclasses import dataclass, fields
from logging import getLogger
from random import choice, shuffle
from typing import Any, Dict, List, Optional, Union
from uuid import uuid4
from OpenApiLibCore import value_utils
logger = getLogger(__name__)
NOT_SET = object()
SENTINEL = object()
def resolve_schema(schema: Dict[str, Any]) -> Dict[str, Any]:
"""
Helper function to resolve allOf, anyOf and oneOf instances in a schema.
The schemas are used to generate values for headers, query parameters and json
bodies to be able to make requests.
"""
# Schema is mutable, so deepcopy to prevent mutation of original schema argument
resolved_schema = deepcopy(schema)
# allOf / anyOf / oneOf may be nested, so recursively resolve the dict-typed values
for key, value in resolved_schema.items():
if isinstance(value, dict):
resolved_schema[key] = resolve_schema(value)
# When handling allOf there should no duplicate keys, so the schema parts can
# just be merged after resolving the individual parts
if schema_parts := resolved_schema.pop("allOf", None):
for schema_part in schema_parts:
resolved_part = resolve_schema(schema_part)
resolved_schema = merge_schemas(resolved_schema, resolved_part)
# Handling anyOf and oneOf requires extra logic to deal with the "type" information.
# Some properties / parameters may be of different types and each type may have its
# own restrictions e.g. a parameter that accepts an enum value (string) or an
# integer value within a certain range.
# Since the library needs all this information for different purposes, the
# schema_parts cannot be merged, so a helper property / key "types" is introduced.
any_of = resolved_schema.pop("anyOf", [])
one_of = resolved_schema.pop("oneOf", [])
schema_parts = any_of if any_of else one_of
for schema_part in schema_parts:
resolved_part = resolve_schema(schema_part)
if isinstance(resolved_part, dict) and "type" in resolved_part.keys():
if "types" in resolved_schema.keys():
resolved_schema["types"].append(resolved_part)
else:
resolved_schema["types"] = [resolved_part]
else:
resolved_schema = merge_schemas(resolved_schema, resolved_part)
return resolved_schema
def merge_schemas(first: Dict[str, Any], second: Dict[str, Any]) -> Dict[str, Any]:
"""Helper method to merge two schemas, recursively."""
merged_schema = deepcopy(first)
for key, value in second.items():
# for existing keys, merge dict and list values, leave others unchanged
if key in merged_schema.keys():
if isinstance(value, dict):
# if the key holds a dict, merge the values (e.g. 'properties')
merged_schema[key].update(value)
elif isinstance(value, list):
# if the key holds a list, extend the values (e.g. 'required')
merged_schema[key].extend(value)
else:
logger.warning(
f"key '{key}' with value '{merged_schema[key]}' not "
f"updated to '{value}'"
)
else:
merged_schema[key] = value
return merged_schema
class ResourceRelation(ABC): # pylint: disable=too-few-public-methods
"""ABC for all resource relations or restrictions within the API."""
property_name: str
error_code: int
@dataclass
class PathPropertiesConstraint(ResourceRelation):
"""The resolved path for the endpoint."""
path: str
property_name: str = "id"
error_code: int = 404
@dataclass
class PropertyValueConstraint(ResourceRelation):
"""The allowed values for property_name."""
property_name: str
values: List[Any]
invalid_value: Any = NOT_SET
invalid_value_error_code: int = 422
error_code: int = 422
@dataclass
class IdDependency(ResourceRelation):
"""The path where a valid id for the property_name can be gotten (using GET)."""
property_name: str
get_path: str
operation_id: Optional[str] = None
error_code: int = 422
@dataclass
class IdReference(ResourceRelation):
"""The path where a resource that needs this resource's id can be created (using POST)."""
property_name: str
post_path: str
error_code: int = 422
@dataclass
class UniquePropertyValueConstraint(ResourceRelation):
"""The value of the property must be unique within the resource scope."""
property_name: str
value: Any
error_code: int = 422
Relation = Union[
IdDependency,
IdReference,
PathPropertiesConstraint,
PropertyValueConstraint,
UniquePropertyValueConstraint,
]
@dataclass
class Dto(ABC):
"""Base class for the Dto class."""
@staticmethod
def get_parameter_relations() -> List[Relation]:
"""Return the list of Relations for the header and query parameters."""
return []
def get_parameter_relations_for_error_code(self, error_code: int) -> List[Relation]:
"""Return the list of Relations associated with the given error_code."""
relations: List[Relation] = [
r
for r in self.get_parameter_relations()
if r.error_code == error_code
or (
getattr(r, "invalid_value_error_code", None) == error_code
and getattr(r, "invalid_value", None) != NOT_SET
)
]
return relations
@staticmethod
def get_relations() -> List[Relation]:
"""Return the list of Relations for the (json) body."""
return []
def get_relations_for_error_code(self, error_code: int) -> List[Relation]:
"""Return the list of Relations associated with the given error_code."""
relations: List[Relation] = [
r
for r in self.get_relations()
if r.error_code == error_code
or (
getattr(r, "invalid_value_error_code", None) == error_code
and getattr(r, "invalid_value", None) != NOT_SET
)
]
return relations
def get_invalidated_data(
self,
schema: Dict[str, Any],
status_code: int,
invalid_property_default_code: int,
) -> Dict[str, Any]:
"""Return a data set with one of the properties set to an invalid value or type."""
properties: Dict[str, Any] = self.as_dict()
schema = resolve_schema(schema)
relations = self.get_relations_for_error_code(error_code=status_code)
# filter PathProperyConstraints since in that case no data can be invalidated
relations = [
r for r in relations if not isinstance(r, PathPropertiesConstraint)
]
property_names = [r.property_name for r in relations]
if status_code == invalid_property_default_code:
# add all properties defined in the schema, including optional properties
property_names.extend((schema["properties"].keys()))
# remove duplicates
property_names = list(set(property_names))
if not property_names:
raise ValueError(
f"No property can be invalidated to cause status_code {status_code}"
)
# shuffle the property_names so different properties on the Dto are invalidated
# when rerunning the test
shuffle(property_names)
for property_name in property_names:
# if possible, invalidate a constraint but send otherwise valid data
id_dependencies = [
r
for r in relations
if isinstance(r, IdDependency) and r.property_name == property_name
]
if id_dependencies:
invalid_value = uuid4().hex
logger.debug(
f"Breaking IdDependency for status_code {status_code}: replacing "
f"{properties[property_name]} with {invalid_value}"
)
properties[property_name] = invalid_value
return properties
invalid_value_from_constraint = [
r.invalid_value
for r in relations
if isinstance(r, PropertyValueConstraint)
and r.property_name == property_name
and r.invalid_value_error_code == status_code
]
if (
invalid_value_from_constraint
and invalid_value_from_constraint[0] is not NOT_SET
):
properties[property_name] = invalid_value_from_constraint[0]
logger.debug(
f"Using invalid_value {invalid_value_from_constraint[0]} to "
f"invalidate property {property_name}"
)
return properties
value_schema = schema["properties"][property_name]
value_schema = resolve_schema(value_schema)
# Filter "type": "null" from the possible types since this indicates an
# optional / nullable property that can only be invalidated by sending
# invalid data of a non-null type
if value_schemas := value_schema.get("types"):
if len(value_schemas) > 1:
value_schemas = [
schema for schema in value_schemas if schema["type"] != "null"
]
value_schema = choice(value_schemas)
# there may not be a current_value when invalidating an optional property
current_value = properties.get(property_name, SENTINEL)
if current_value is SENTINEL:
# the current_value isn't very relevant as long as the type is correct
# so no logic to handle Relations / objects / arrays here
property_type = value_schema["type"]
if property_type == "object":
current_value = {}
elif property_type == "array":
current_value = []
else:
current_value = value_utils.get_valid_value(value_schema)
values_from_constraint = [
r.values[0]
for r in relations
if isinstance(r, PropertyValueConstraint)
and r.property_name == property_name
]
invalid_value = value_utils.get_invalid_value(
value_schema=value_schema,
current_value=current_value,
values_from_constraint=values_from_constraint,
)
properties[property_name] = invalid_value
logger.debug(
f"Property {property_name} changed to {invalid_value} (received from "
f"get_invalid_value)"
)
return properties
logger.warning("get_invalidated_data returned unchanged properties")
return properties # pragma: no cover
def as_dict(self) -> Dict[Any, Any]:
"""Return the dict representation of the Dto."""
result = {}
for field in fields(self):
field_name = field.name
if field_name not in self.__dict__:
continue
original_name = field.metadata["original_property_name"]
result[original_name] = getattr(self, field_name)
return result
|
---
layout: handbook-page-toc
title: "SAST analyzer deprecation and removal instructions"
---
### Analyzer Conversion Lifecycle
Many of the SAST analyzers are in the process of being replaced by semgrep. This involves having semgrep takeover the functionality of the legacy analyzer.
The steps to achieve this are:
1. [Migrate Rules to sast-rules](https://gitlab.com/gitlab-org/security-products/sast-rules/-/blob/main/docs/update-rule-process.md)
1. Audit Rules and review licensing
1. Deprecate and remove analyzers
This document is concerned with the `Deprecate and remove analyzers` step. All the deprecation steps must be completed before removal can commence.
### Analyzer Deprecation
#### 1. Deprecate the analyzer job in `SAST.latest.gitlab-ci.yml`
Submit an MR to update the [`SAST.latest.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml) file by substituting the analyzer's entry with a placeholder. The placeholder message should provide a notice about when the analyzer's deprecation was announced and when it was removed. Be sure to include a hyperlink directing users to the corresponding deprecation issue for further details. Additonally, ensure the placeholder never runs by adding `when: never` to `rules`.
Always change the existing job to this new "placeholder" rather than deleting the job entirely.
If we delete the entire job, this can break customer pipelines if they have provided custom variables or otherwise referenced the name of the deleted job.
```yaml
script:
- echo "This job was deprecated in GitLab 14.8 and removed in GitLab 15.3"
- echo "For more information see https://gitlab.com/gitlab-org/gitlab/-/issues/352554"
- exit 1
rules:
- when: never
```
#### 2. Deprecation Notification
We're required to publish notices in advance of potentially-breaking changes.
See [Deprecations, removals, and breaking changes](https://handbook.gitlab.com/handbook/product/gitlab-the-product/#deprecations-removals-and-breaking-changes) for details on the required process.
An example of a previous SAST analyzer deprecation notice is [the notice from 14.8](https://docs.gitlab.com/ee/update/deprecations.html#sast-analyzer-consolidation-and-cicd-template-changes).
The Product Manager and Engineering Manager for SAST are responsible for publishing this announcement.
Engineers implementing an analyzer removal are responsible for adhering to the stated scope of the change notice, and informing the PM/EM of any important changes to how customers will be affected by the change.
### Analyzer Removal
#### 1. Remove analyzer from documentation
All references to removed analyzers should be deleted from the SAST documentation. https://docs.gitlab.com/ee/user/application_security/sast/
Example of analyzer removal from documentation: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97451
#### 2. Add analyzer to ".End of supported analyzers" section of SAST documentation
All analyzers that are no longer supported should be listed in the [end-of-supported-analyzers](https://docs.gitlab.com/ee/user/application_security/sast/#end-of-supported-analyzers) section of the SAST documentation.
#### 3. Update the analyzer projects README.md
To further communicate that an analyzer is no longer supported, the following header should be added to its `README.md`.
```
[Maintenance Notice](link-to-removal-notice):
This analyzer is currently in terminal maintenance mode. No new major versions will be released.
We've migrated this analyzer's scanning coverage to the GitLab SAST [Semgrep-based analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/semgrep).
```
#### 4. Remove the analyzer job in `SAST.gitlab-ci.yml`
Submit an MR to update the [`SAST.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml) file by substituting the analyzer's entry with a placeholder. The placeholder message should provide a notice about when the analyzer's deprecation was announced and when it was removed. Additionally, be sure to include a hyperlink directing users to the corresponding deprecation issue for further details.
You can use the previous update to [`SAST.latest.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml) as a guide, but be sure to change the "removed in X" version number to match when the removal happened in the stable (`SAST`) template, not the latest (`SAST.latest`) template.
Example analyzer removal MR: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97216
#### 5. Resolve all current vulnerabilities
Vulnerabilities created by an analyzer that has been removed should have their state set to resolved.
|
/*
thot package for statistical machine translation
Copyright (C) 2013 Daniel Ortiz-Mart\'inez
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
as published by the Free Software Foundation; either version 3
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; If not, see <http://www.gnu.org/licenses/>.
*/
/**
* @file LogCount.h
*
* @brief Defines the LogCount class to work with logarithms of counts.
*/
#pragma once
#include "nlp_common/Count.h"
#include "nlp_common/MathFuncs.h"
#include <iomanip>
#include <iostream>
class LogCount
{
private:
float x;
public:
LogCount()
{
x = SMALL_LG_NUM;
}
LogCount(double y) : x((float)y)
{
}
LogCount(float y) : x(y)
{
}
LogCount(int y) : x((float)y)
{
}
operator double() const
{
return (double)x;
}
operator float() const
{
return x;
}
LogCount operator*=(double y)
{
x *= (float)y;
return *this;
}
LogCount operator*=(LogCount y)
{
x *= y.x;
return *this;
}
LogCount operator/=(double y)
{
x /= (float)y;
return *this;
}
LogCount operator/=(LogCount y)
{
x /= y.x;
return *this;
}
LogCount operator+=(double y)
{
x += (float)y;
return *this;
}
LogCount operator+=(LogCount y)
{
x += y.x;
return *this;
}
LogCount operator+(double y)
{
return x + (float)y;
}
LogCount operator+(LogCount y)
{
return x + y.x;
}
LogCount operator-=(double y)
{
x -= (float)y;
return *this;
}
LogCount operator-=(LogCount y)
{
x -= y.x;
return *this;
}
LogCount operator-(double y)
{
return x - (float)y;
}
LogCount operator-(LogCount y)
{
return x - y.x;
}
LogCount operator*(double y) const
{
return x * (float)y;
}
LogCount operator*(LogCount y) const
{
return x * y.x;
}
bool operator<(LogCount y) const
{
if (this->x < y.x)
return true;
else
return false;
}
bool operator>(LogCount y) const
{
if (this->x > y.x)
return true;
else
return false;
}
bool operator<=(LogCount y) const
{
if (this->x <= y.x)
return true;
else
return false;
}
bool operator>=(LogCount y) const
{
if (this->x >= y.x)
return true;
else
return false;
}
void set_count(float y)
{
x = y;
}
void set_count(double y)
{
x = (float)y;
}
void set_count(int y)
{
x = (float)y;
}
void set_count(Count y)
{
x = (float)log((double)y);
}
void set_logcount(float y)
{
x = y;
}
void set_logcount(double y)
{
x = (float)y;
}
void incr_count(float y)
{
x = (float)MathFuncs::lns_sumlog(x, log((double)y));
}
void incr_count(double y)
{
x = (float)MathFuncs::lns_sumlog(x, log((double)y));
}
void incr_count(int y)
{
x = (float)MathFuncs::lns_sumlog(x, log((double)y));
}
void incr_count(Count y)
{
x = (float)MathFuncs::lns_sumlog(x, log((double)y));
}
void incr_logcount(float y)
{
x = (float)MathFuncs::lns_sumlog(x, (double)y);
}
void incr_logcount(double y)
{
x = (float)MathFuncs::lns_sumlog(x, (double)y);
}
float get_c_s(void) const
{
return (float)exp(x);
}
float get_c_st(void) const
{
return (float)exp(x);
}
float get_lc_s(void) const
{
return x;
}
float get_lc_st(void) const
{
return x;
}
friend std::ostream& operator<<(std::ostream& outS, const LogCount& p)
{
outS << (double)p.x;
return outS;
}
friend std::istream& operator>>(std::istream& is, LogCount& p)
{
is >> p.x;
return is;
}
};
class greaterLogCount
{
public:
bool operator()(const LogCount& a, const LogCount& b) const
{
if ((double)a > (double)b)
return true;
else
return false;
}
};
|
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
class TestDropDownOptions(unittest.TestCase):
# Initialize the driver variable
driver = None
@classmethod
def setUpClass(cls):
# Initialize the Chrome WebDriver
cls.driver = webdriver.Chrome()
# Maximize the browser window
cls.driver.maximize_window()
@classmethod
def tearDownClass(cls):
# Close the driver
cls.driver.quit()
def test_drop_down_options(self):
# Navigate to the website
self.driver.get("https://letcode.in/dropdowns")
# Find the dropdown element by CSS selector
dropdown_element = self.driver.find_element(By.CSS_SELECTOR, "#superheros")
# Get all dropdown options
drop_down_options = Select(dropdown_element).options
# Print all the dropdown options
for option in drop_down_options:
print(option.text)
# Asserting the length of the list drop_down_options is equal to 29
assert len(drop_down_options) == 29
if __name__ == "__main__":
unittest.main()
|
"""
Tutorial 1: Search Chaining
In chapter 2, we learnt how to perform lens modeling using a non-linear search. In all of the tutorials, we fitted the
data using just one non-linear search. In this chapter, we introduce a technique called 'non-linear search chaining',
fits a lens model using a sequence of non-linear searches. The initial searches fit simpler lens models whose parameter
spaces can be more accurately and efficiently sampled. The results of this search are then passed to later searches
which fit lens models of gradually increasing complexity.
Lets think back to tutorial 4 of chapter 2. We learnt there were three approaches one could take fitting a lens model
accurately if we found that a model fit failed. These were:
1) Tuning our priors to the strong lens we're fitting.
2) Making our lens model less complex.
3) Searching non-linear parameter space for longer.
However, each of the above approaches has disadvantages. The more we tune our priors, the less we can generalize our
analysis to a different strong lens. The less complex we make our model, the less realistic it is. And if we rely too
much on searching parameter space for longer, we could end up with search`s that take days, weeks or months to run.
In this tutorial, we are going to show how search chaining combines these 3 approaches such that we can fit
complex and realistic lens models in a way that that can be generalized to many different strong lenses. To do this,
we'll run 2 searches, and chain the lens model inferred in the first search to the priors of the second search`s lens
model.
Our first search will make the same light-traces-mass assumption we made in the previous tutorial. We saw that this
gives a reasonable lens model. However, we'll make a couple of extra simplifying assumptions, to really try and bring
our lens model complexity down and get the non-linear search running fast.
The model we infer above will therefore be a lot less realistic. But it does not matter, because in the second search
we are going to relax these assumptions and fit the more realistic lens model. The beauty is that, by running the first
search, we can use its results to tune the priors of our second search. For example:
1) The first search should give us a pretty good idea of the lens galaxy's light and mass profiles, for example its
intensity, effective radius and einstein radius.
2) It should also give us a pretty good fit to the lensed source galaxy. This means we'll already know where in
source-plane its is located and what its intensity and effective are.
"""
# %matplotlib inline
# from pyprojroot import here
# workspace_path = str(here())
# %cd $workspace_path
# print(f"Working Directory has been set to `{workspace_path}`")
import numpy as np
from os import path
import autolens as al
import autolens.plot as aplt
import autofit as af
"""
__Initial Setup__
we'll use the same strong lensing data as the previous tutorial, where:
- The lens galaxy's light is an `Sersic`.
- The lens galaxy's total mass distribution is an `Isothermal` and `ExternalShear`.
- The source galaxy's light is an `Exponential`.
"""
dataset_name = "lens_sersic"
dataset_path = path.join("dataset", "imaging", dataset_name)
dataset = al.Imaging.from_fits(
data_path=path.join(dataset_path, "data.fits"),
noise_map_path=path.join(dataset_path, "noise_map.fits"),
psf_path=path.join(dataset_path, "psf.fits"),
pixel_scales=0.1,
)
mask = al.Mask2D.circular(
shape_native=dataset.shape_native, pixel_scales=dataset.pixel_scales, radius=2.6
)
dataset = dataset.apply_mask(mask=mask)
dataset_plotter = aplt.ImagingPlotter(
dataset=dataset, visuals_2d=aplt.Visuals2D(mask=mask)
)
dataset_plotter.subplot_dataset()
"""
__Model__
As we've eluded to before, one can look at an image and immediately identify the centre of the lens galaxy. It's
that bright blob of light in the middle! Given that we know we're going to make the lens model more complex in the
next search, lets take a more liberal approach than before and fix the lens centre to $(y,x)$ = (0.0", 0.0").
Now, you might be thinking, doesn`t this prevent our search from generalizing to other strong lenses? What if the
centre of their lens galaxy isn't at (0.0", 0.0")?
Well, this is true if our dataset reduction centres the lens galaxy somewhere else. But we get to choose where we
centre it when we make the image. Therefore, I`d recommend you always centre the lens galaxy at the same location,
and (0.0", 0.0") seems the best choice!
"""
bulge = af.Model(al.lp_linear.Sersic)
mass = af.Model(al.mp.Isothermal)
"""
You haven't actually seen a line like this one before. By setting a parameter to a number (and not a prior) it is be
removed from non-linear parameter space and always fixed to that value. Pretty neat, huh?
"""
bulge.centre_0 = 0.0
bulge.centre_1 = 0.0
mass.centre_0 = 0.0
mass.centre_1 = 0.0
"""
Lets use the same approach of making the ellipticity of the mass trace that of the bulge.
"""
mass.ell_comps = bulge.ell_comps
"""
We also discussed that the Sersic index of most lens galaxies is around 4. Lets fix it to 4 this time.
"""
bulge.sersic_index = 4.0
"""
We now compose the model with these components that have had their priors customized.
We have not done anything to the source model, but use an `Exponential` which will become the more complex
`Sersic` in the second search.
"""
lens = af.Model(
al.Galaxy, redshift=0.5, bulge=bulge, mass=mass, shear=al.mp.ExternalShear
)
source = af.Model(al.Galaxy, redshift=1.0, bulge=al.lp_linear.Exponential)
model_1 = af.Collection(galaxies=af.Collection(lens=lens, source=source))
"""
The `info` attribute shows the model in a readable format.
"""
print(model_1.info)
"""
__Search + Analysis__
Now lets create the search and analysis.
"""
search_1 = af.Nautilus(
path_prefix=path.join("howtolens", "chapter_3"),
name="tutorial_1_search_chaining_1",
unique_tag=dataset_name,
n_live=100,
number_of_cores=1,
)
analysis_1 = al.AnalysisImaging(dataset=dataset)
"""
__Run Time__
It is good practise to always check the `log_likelihood_function` run time before starting the non-linear search.
It will be similar to the value we saw in the previous chapter.
"""
run_time_dict, info_dict = analysis_1.profile_log_likelihood_function(
instance=model_1.random_instance()
)
print(f"Log Likelihood Evaluation Time (second) = {run_time_dict['fit_time']}")
print(
"Estimated Run Time Upper Limit (seconds) = ",
(run_time_dict["fit_time"] * model_1.total_free_parameters * 10000)
/ search_1.number_of_cores,
)
"""
Lets run the search, noting that our liberal approach to reducing the lens model complexity has reduced it to just
11 parameters.
"""
print(
"The non-linear search has begun running - checkout the workspace/output/5_chaining_searches"
" folder for live output of the results, images and lens model."
" This Jupyter notebook cell with progress once search has completed - this could take some time!"
)
result_1 = search_1.fit(model=model_1, analysis=analysis_1)
print("Search has finished run - you may now continue the notebook.")
"""
__Result__
The results are summarized in the `info` attribute.
"""
print(result_1.info)
"""
And indeed, we get a reasonably good model and fit to the data, in a much shorter space of time!
"""
fit_plotter = aplt.FitImagingPlotter(fit=result_1.max_log_likelihood_fit)
fit_plotter.subplot_fit()
"""
__Prior Passing__
Now all we need to do is look at the results of search 1 and pass the results as priors for search 2. Lets setup
a custom search that does exactly that.
`GaussianPrior`'s are a nice way to pass priors. They tell the non-linear search where to look, but leave open the
possibility that there might be a better solution nearby. In contrast, `UniformPrior`'s put hard limits on what values a
parameter can or can`t take. It makes it more likely we will accidentally cut-out the global maxima solution.
"""
bulge = af.Model(al.lp_linear.Sersic)
mass = af.Model(al.mp.Isothermal)
shear = af.Model(al.mp.ExternalShear)
source_bulge = af.Model(al.lp_linear.Sersic)
"""
What I've done below is looked at the results of search 1 and manually specified a prior for every parameter. If a
parameter was fixed in the previous search, its prior is based around the previous value. Don't worry about the sigma
values for now, I've chosen values that I know will ensure reasonable sampling, but we'll cover this later.
__LENS LIGHT PRIORS:__
"""
bulge.centre.centre_0 = af.GaussianPrior(
mean=0.0, sigma=0.1, lower_limit=-np.inf, upper_limit=np.inf
)
bulge.centre.centre_1 = af.GaussianPrior(
mean=0.0, sigma=0.1, lower_limit=-np.inf, upper_limit=np.inf
)
bulge.ell_comps.ell_comps_0 = af.GaussianPrior(
mean=0.05, sigma=0.15, lower_limit=-1.0, upper_limit=1.0
)
bulge.ell_comps.ell_comps_1 = af.GaussianPrior(
mean=0.0, sigma=0.2, lower_limit=-1.0, upper_limit=1.0
)
bulge.effective_radius = af.GaussianPrior(
mean=0.72, sigma=0.2, lower_limit=0.0, upper_limit=np.inf
)
bulge.sersic_index = af.GaussianPrior(
mean=4.0, sigma=2.0, lower_limit=0.0, upper_limit=np.inf
)
"""
__LENS MASS PRIORS:__
"""
mass.centre.centre_0 = af.GaussianPrior(
mean=0.0, sigma=0.1, lower_limit=-np.inf, upper_limit=np.inf
)
mass.centre.centre_1 = af.GaussianPrior(
mean=0.0, sigma=0.1, lower_limit=-np.inf, upper_limit=np.inf
)
mass.ell_comps.ell_comps_0 = af.GaussianPrior(
mean=0.05, sigma=0.15, lower_limit=-1.0, upper_limit=1.0
)
mass.ell_comps.ell_comps_1 = af.GaussianPrior(
mean=0.0, sigma=0.2, lower_limit=-1.0, upper_limit=1.0
)
mass.einstein_radius = af.GaussianPrior(
mean=1.6, sigma=0.1, lower_limit=0.0, upper_limit=np.inf
)
shear.gamma_1 = af.GaussianPrior(mean=0.05, sigma=0.05)
shear.gamma_2 = af.GaussianPrior(mean=0.05, sigma=0.05)
"""
__SOURCE LIGHT PRIORS:__
"""
source_bulge.centre.centre_0 = af.GaussianPrior(
mean=0.0, sigma=0.1, lower_limit=-np.inf, upper_limit=np.inf
)
source_bulge.centre.centre_1 = af.GaussianPrior(
mean=0.0, sigma=0.1, lower_limit=-np.inf, upper_limit=np.inf
)
source_bulge.ell_comps.ell_comps_0 = af.GaussianPrior(
mean=0.08, sigma=0.15, lower_limit=-1.0, upper_limit=1.0
)
source_bulge.ell_comps.ell_comps_1 = af.GaussianPrior(
mean=-0.06, sigma=0.2, lower_limit=-1.0, upper_limit=1.0
)
source_bulge.effective_radius = af.GaussianPrior(
mean=0.1, sigma=0.2, lower_limit=0.0, upper_limit=np.inf
)
source_bulge.sersic_index = af.GaussianPrior(
mean=1.0, sigma=1.0, lower_limit=0.0, upper_limit=np.inf
)
"""
We now compose the model with these components that have had their priors customized.
"""
lens = af.Model(al.Galaxy, redshift=0.5, bulge=bulge, mass=mass, shear=shear)
source = af.Model(al.Galaxy, redshift=1.0, bulge=source_bulge)
model_2 = af.Collection(galaxies=af.Collection(lens=lens, source=source))
"""
The `info` attribute shows the model, including the priors specified above.
"""
print(model_2.info)
"""
Lets setup and run the search. As expected, it gives us the correct lens model. However, it does so significantly
faster than we are used to!
"""
search_2 = af.Nautilus(
path_prefix=path.join("howtolens", "chapter_3"),
name="tutorial_1_search_chaining_2",
unique_tag=dataset_name,
n_live=150,
number_of_cores=1,
)
analysis_2 = al.AnalysisImaging(dataset=dataset)
"""
__Run Time__
Whilst the run-time of the log likelihood function is pretty much unchanged from the first search, the overall run-time
of the search should decrease.
This is because via prior passing we have informed the search of where to look in parameter space, meaning it
should spend far fewer than ~10000 iterations per free parameter.
"""
run_time_dict, info_dict = analysis_2.profile_log_likelihood_function(
instance=model_2.random_instance()
)
print(f"Log Likelihood Evaluation Time (second) = {run_time_dict['fit_time']}")
print(
"Estimated Run Time Upper Limit (seconds) = ",
(run_time_dict["fit_time"] * model_2.total_free_parameters * 10000)
/ search_2.number_of_cores,
)
"""
Run the search.
"""
print(
"The non-linear search has begun running - checkout the workspace/output/5_chaining_searches"
" folder for live output of the results, images and lens model."
" This Jupyter notebook cell with progress once search has completed - this could take some time!"
)
result_2 = search_2.fit(model=model_2, analysis=analysis_2)
print("Search has finished run - you may now continue the notebook.")
"""
__Result__
We can again inspect the results via the `info` attribute.
"""
print(result_2.info)
"""
And a plot of the image shows we get a good model again!
"""
fit_plotter = aplt.FitImagingPlotter(fit=result_2.max_log_likelihood_fit)
fit_plotter.subplot_fit()
"""
__Wrap Up__
Chaining two searches together was a huge success. We managed to fit a complex and realistic model, but were able to
begin by making simplifying assumptions that eased our search of non-linear parameter space. We could apply search 1 to
pretty much any strong lens and therefore get ourselves a decent lens model with which to tune search 2`s priors.
You are probably thinking though that there is one huge, giant, glaring flaw in all of this that I've not mentioned.
Search 2 can`t be generalized to another lens, because its priors are tuned to the image we fitted. If we had a lot
of lenses, we`d have to write a new search for every single one. This isn't ideal, is it?
Fortunately, we can pass priors in **PyAutoLens** without specifying the specific values. The API for this technique,
called prior passing, is the topic of the next tutorial.
"""
|
#Curve function 'f'
def f(x):
import math
return 10*math.e**(math.log(0.5)/5.27 * x)
def radiationExposure(start, stop, step):
'''
Computes and returns the amount of radiation exposed
to between the start and stop times. Calls the
function f (defined for you in the grading script)
to obtain the value of the function at any point.
start: integer, the time at which exposure begins
stop: integer, the time at which exposure ends
step: float, the width of each rectangle. You can assume that
the step size will always partition the space evenly.
returns: float, the amount of radiation exposed to
between start and stop times.
'''
#Try to solve with recursion
result = 0
i = start
for i in range(start, stop):
result+=step*f(i)
return result
radiationExposure(0, 5, 1)
#testCase 1 result: 39.10318784326239
radiationExposure(5, 11, 1)
#testCase 2 result: 22.94241041057671
radiationExposure(0, 11, 1)
#testCase 3 result: 62.0455982538
radiationExposure(40, 100, 1.5)
#testCase 4 result: 0.434612356115
#Your answers should be within 0.01 of the correct answer.
|
import React, {Component} from 'react'
import './cardmodal.css'
export default class Cardmodal extends Component {
constructor (props) {
super()
this.onClose = this.onClose.bind(this)
this.onKeyUp = this.onKeyUp.bind(this)
this.onSubmission = this.onSubmission.bind(this)
}
onClose () {
this.props.cardmodalclose()
}
onKeyUp (e) {
if (e.which === 27) {
this.props.cardmodalclose()
}
}
componentDidMount () {
document.addEventListener('keyup', this.onKeyUp)
}
componentWillUnmount () {
document.removeEventListener('keyup', this.onKeyUp)
}
onSubmission (e) {
e.preventDefault()
this.props.newcard(
{
listid: e.target.id,
title: e.target.cardtitle.value,
desc: e.target.description.value
}
)
this.props.cardmodalclose()
}
render () {
return (
<form onSubmit={this.onSubmission} id={this.props.id}>
<div className='modal fade show' id='myModal'style={{display: 'block'}}>
<div className='modal-dialog'>
<div className='modal-content'>
<div className='modal-header'>
<input className='titleinput' name='cardtitle' type='text' placeholder='Card Heading' required />
<button className='close' type='button' onClick={this.onClose}>×</button>
</div>
<div className='modal-body'>
<textarea className='descriptioninput' name='description' placeholder='Add description' />
</div>
<div className='modal-footer'>
<button className='btn btn-light' type='submit'>Save</button>
<button className='btn btn-danger' type='button'>Close</button>
</div>
</div>
</div>
</div>
<div className='modal-backdrop fade show' />
</form>
)
}
}
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Matador de Monstros</title>
<link rel="stylesheet" href="style.css">
<script src="https://unpkg.com/vue"></script>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Lato:ital,wght@1,&display=swap" rel="stylesheet">
</head>
<body>
<div id="app">
<div id="wallpaper">
<div class="panel scores">
<div class="score">
<h1 class="h1player">Jogador</h1>
<div class="life-bar">
<div class="life"
:class="{danger: playerLife < 20}"
:style="{width: playerLife + '%'}"></div>
</div>
<div style="background-color: rgba(0, 0, 0, 0.651); color: rgb(255, 255, 255);">{{ playerLife }}%</div>
</div>
<div class="score">
<h1 class="h1monster">Monstro</h1>
<div class="life-bar">
<div class="life"
:class="{danger: monsterLife < 20}"
:style="{width: monsterLife + '%'}"></div>
</div>
<div style="background-color: rgba(0, 0, 0, 0.664); color: rgb(255, 255, 255); text-transform: uppercase;">{{ monsterLife }}%</div>
</div>
</div>
<div v-if="hasResult" class="panel result">
<div v-if="monsterLife == 0" class="win">Você venceu!!! :)</div>
<div v-else class="lose">Você perdeu!!! :(</div>
</div>
<div class="panel buttons">
<template v-if="running">
<button @click="attack(false)"
class="btn attack">Ataque</button>
<button @click="attack(true)"
class="btn especial-attack">Ataque Especial</button>
<button class="btn heal">Curar</button>
<button @click="running = false" class="btn give-up">Desistir</button>
</template>
<button v-else @click="startGame"
class="btn new-game">Inicia Jogo</button>
</div>
<div class="panel logs"></div>
</div>
</div>
<script src="app.js"></script>
</body>
</html>
|
import 'dart:async';
import 'package:bloc/bloc.dart';
import 'package:meta/meta.dart';
import 'package:ui_layout/model/customer_model.dart';
import 'package:ui_layout/service/customer_service.dart';
part 'customer_event.dart';
part 'customer_state.dart';
class CustomerBloc extends Bloc<CustomerEvent, CustomerState> {
final CustomerService customerService;
CustomerBloc(this.customerService) : super(CustomerInitial()) {
on<LoadAllCustomerEvent>(_loadAllCustomers);
on<LoadUnpaidCustomerEvent>(_loadUnpaidCustomers);
on<LoadPaidCustomerEvent>(_loadPaidCustomers);
}
Future<void> _loadAllCustomers(LoadAllCustomerEvent event, emit) async {
emit(LoadedCustomerState(await customerService.getAllCustomers()));
}
Future<void> _loadUnpaidCustomers(LoadUnpaidCustomerEvent event, emit) async {
emit(LoadedCustomerState(await customerService.getUnpaidCustomer()));
}
Future<void> _loadPaidCustomers(LoadPaidCustomerEvent event, emit) async {
emit(LoadedCustomerState(await customerService.getPaidCustomer()));
}
}
|
#include <iostream>
#include <chrono>
using namespace std::literals;
int main()
{
// 1. predefined typedef
std::chrono::hours h(1); // typedef duration<int, ratio<3600,1>> hours;
std::chrono::minutes m = h; // using minutes = duration<int, ratio<60, 1>>
std::chrono::seconds s = h; // using seconds = duration<int, ratio<1, 1>>
std::chrono::milliseconds ms = h; // using milliseconds = duration<int, ratio<1, 1000>>
std::chrono::microseconds us = h;
std::chrono::nanoseconds ns = h;
std::cout << h.count() << std::endl;
std::cout << m.count() << std::endl;
std::cout << s.count() << std::endl;
std::cout << ms.count() << std::endl;
std::cout << us.count() << std::endl;
std::cout << ns.count() << std::endl;
// 2. 시간 객체를 만드는 2가지 기술
std::chrono::seconds s1(3); // 방법 1. 타입 이름 사용
auto s2 = 3s; // seconds operator""s(3)
// 3. literals( h, min, s, ms, us, ns )
// => "using namespace std::literals" 필요
std::chrono::seconds sec = 1min;
sec = 1min + 3s;
sec += 40s;
std::cout << sec.count() << std::endl;
std::chrono::seconds sec2 = 1min + 3s;
std::cout << sec2.count() << std::endl;
}
|
//
// DetailsViewModel.swift
// StarWars-SHIFT
//
// Created by Ivan Semenov on 02.02.2023.
//
import Foundation
final class DetailsViewModel {
typealias Factory = GuideItemsRepositoryFactory
// MARK: - Public properties
var showContent: (() -> Void)?
var showHomeworlnfo: (() -> Void)?
var showSpecieslnfo: (() -> Void)?
var showLoadingIndicator: (() -> Void)?
var showReceivedError: ((String) -> Void)?
var applyModalWindowAppearance: (() -> Void)?
var updateDetailsData: ((String, String) -> Void)?
var didPresentOptionDetails: ((DetailsViewModelData) -> Void)?
var detailText: String {
detailsData.details
}
var nameItem: String {
detailsData.name.filter { $0 != "/" }
}
var speciesTitle: String {
"Species: "
}
var homeworldTitle: String {
"Homeworld: "
}
var isContainsOptionalInfo: Bool {
detailsData.shouldShowSpecies || detailsData.shouldShowHomeWorld
}
// MARK: - Private properties
private let factory: Factory
private let isOptional: Bool
private let detailsData: DetailsViewModelData
private var speciesItem: DetailsViewModelData?
private var homeworldItem: DetailsViewModelData? {
didSet {
updateDetailsData?(speciesItem?.name ?? "unknown",
homeworldItem?.name ?? "unknown")
}
}
private lazy var guideItemsRepository = factory.makeGuideItemsRepository()
// MARK: - Init
init(factory: Factory, detailsData: DetailsViewModelData, isOptional: Bool) {
self.factory = factory
self.isOptional = isOptional
self.detailsData = detailsData
}
}
// MARK: - Public methods
extension DetailsViewModel {
func isUnknown(_ name: String?) -> Bool {
guard let name = name else { return false }
return name == "unknown"
}
func showSpeciesDetails() {
guard let speciesItem = speciesItem else { return }
didPresentOptionDetails?(speciesItem)
}
func showHomeworldDetails() {
guard let homeworldItem = homeworldItem else { return }
didPresentOptionDetails?(homeworldItem)
}
func viewDidLoad() {
if isOptional {
applyModalWindowAppearance?()
}
guard detailsData.shouldShowSpecies || detailsData.shouldShowHomeWorld else {
showContent?()
return
}
showLoadingIndicator?()
if detailsData.shouldShowSpecies {
getSpecies()
showSpecieslnfo?()
}
if detailsData.shouldShowHomeWorld {
getHomeworld()
showHomeworlnfo?()
}
}
}
// MARK: - Private methods
private extension DetailsViewModel {
func getSpecies() {
guard let speciesURL = detailsData.speciesURL?.first else { return }
guideItemsRepository.getSpeciesDetails(speciesURL) { result in
switch result {
case .success(let value):
self.speciesItem = value.toSpecies()
case .failure(let error):
self.showReceivedError?(error.localizedDescription)
}
}
}
func getHomeworld() {
guard let homeworldURL = detailsData.homeworldURL else { return }
guideItemsRepository.getHomeworldDetails(homeworldURL) { result in
switch result {
case .success(let value):
self.homeworldItem = value.toPlanet()
case .failure(let error):
self.showReceivedError?(error.localizedDescription)
}
}
}
}
|
---
title: Docker 介绍
tags:
- Docker
categories:
- 服务&组件
date: 2022-07-01 12:01:01
thumbnail:
---
> [docker 库](https://hub.docker.com) | [docker 官方文档](https://docs.docker.com/)
## 1. 简介
Docker是一个开源的引擎,可以轻松的为任何应用创建一个轻量级的、可移植的、自给自足的容器。开发者在笔记本上编译测试通过的容器可以批量地在生产环境中部署,包括VMs(虚拟机)、bare metal、OpenStack 集群和其他的基础应用平台。
Docker是一个开源的引擎,可以轻松的为任何应用创建一个轻量级的、可移植的、自给自足的容器。开发者在笔记本上编译测试通过的容器可以批量地在生产环境中部署,包括VMs(虚拟机)、 [bare metal](http://www.whatis.com.cn/word_5275.htm)、OpenStack 集群和其他的基础应用平台。
## 2. 适用场景
Docker通常用于如下场景:
- web应用的自动化打包和发布;
- 自动化测试和持续集成、发布;
- 在服务型环境中部署和调整数据库或其他的后台应用;
- 从头编译或者扩展现有的OpenShift或Cloud Foundry平台来搭建自己的PaaS环境。
## 3. 安装
### 3.1. 使用yum安装
```shell
yum update # 更新yum
yum install docker # 安装docker
systemctl start docker.service # 启动docker
docker version # 查看docker版本,验证是否安装成功
sudo systemctl enable docker # 设置开机自启动
# 结束
```
### 3.2. 使用brew安装
```sh
brew install --cask --appdir=/Applications docker
```
> mac的安装,执行完命令后需要打开docker的App,授权后才能使用。
|
<!DOCTYPE html>
<html>
<head>
<!-- 文字コードにUTF-8を使用することを明示 -->
<meta charset="utf-8">
<!-- 表示を端末の幅に合わせて調整するように指示 -->
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>content center</title>
<!-- Alpine.jsをCDNインポートで使う -->
<script src="//unpkg.com/alpinejs" defer></script>
<!-- UAStyleSheetを一気にリセットする -->
<link rel="stylesheet" href="reset.css">
<!-- このページのCSSを書く -->
<style>
* {
/* ボックスモデルの判定をボーダーとマージンの境界に持たせる */
box-sizing: border-box;
}
html, body {
height: 100%;
width: 100%;
/* カラー指定をしやすくするために css variables を設定する */
--color-primary: #3CDEF2;
--color-secondary: #3C83F2;
--color-accent: #F23C83;
--color-text: lightgray;
--color-text-on-primary: #AB3CF2;
--color-background: midnightblue;
}
.button {
padding: 8px 16px;
/* border-radius(要素の角を丸くする半径)は%指定だとイマイチになることが多いのでvhという単位で指定すると困ることが少ない */
border-radius: 100vh;
/* CSS variables を利用して色をしていするときは var() を用いる */
color: var(--color-text-on-primary);
background-color: var(--color-primary);
}
.button.outlined {
border: 2px solid var(--color-secondary);
color: var(--color-text);
background-color: initial;
}
.background {
width: 100%;
height: 100%;
background-color: var(--color-background);
display: flex;
/* flex の向きを指定する(columnは垂直方向) */
flex-direction: column;
/* flexの向きに対して垂直に交わる向きを中央寄せする */
/* この場合はページの水平方向 */
align-items: center;
/* flexの向きに対して平行な向きを中央寄せする */
/* この場合はページの垂直方向 */
justify-content: center;
}
.title {
margin: 32px;
color: var(--color-primary);
font-size: larger;
font-weight: bolder;
}
.panel {
width: 100%;
display: flex;
flex-direction: column;
align-items: center;
}
.panel .button {
margin-bottom: 16px;
width: fit-content;
height: fit-content;
}
.panel .button:last-child {
margin-bottom: 0;
}
</style>
</head>
<body>
<div class="background">
<div class="title">This is Title.</div>
<div class="panel">
<a class="button">default button</a>
<a class="button outlined">outlined button</a>
</div>
</div>
</body>
</html>
|
#include <algorithm>
#include <iostream>
#include <map>
#include <stack>
#include <stdexcept>
#include <string>
#include "input.h"
using problem = std::vector<std::vector<char>>;
problem parse_input(const std::vector<std::string_view> &input) {
problem result{};
for (auto row : input) {
std::vector<char> v{};
for (char c : row) {
v.emplace_back(c);
}
result.emplace_back(std::move(v));
}
return result;
}
void print(problem p) {
for (auto row : p) {
for (char c : row) {
std::cout << c;
}
std::cout << "\n";
}
}
problem tilt_north(problem &p) {
for (size_t col{0}; col < p.at(0).size(); ++col) {
size_t free{0};
for (size_t row{0}; row < p.size(); ++row) {
switch (p[row][col]) {
case 'O':
p[row][col] = '.';
p[free++][col] = 'O';
break;
case '#':
free = row + 1;
break;
default:
break;
}
}
}
return p;
}
problem tilt_south(problem &p) {
for (size_t col{0}; col < p.at(0).size(); ++col) {
size_t free_row{p.size() - 1};
for (size_t row{p.size() - 1};; --row) {
switch (p[row][col]) {
case 'O':
p[row][col] = '.';
p[free_row--][col] = 'O';
break;
case '#':
free_row = row - 1;
break;
default:
break;
}
// Avoid overflow
if (row == 0) break;
}
}
return p;
}
problem tilt_west(problem &p) {
for (size_t row{0}; row < p.size(); ++row) {
size_t free_col{0};
for (size_t col{0}; col < p.at(0).size(); ++col) {
switch (p[row][col]) {
case 'O':
p[row][col] = '.';
p[row][free_col++] = 'O';
break;
case '#':
free_col = col + 1;
break;
default:
break;
}
}
}
return p;
}
problem tilt_east(problem p) {
for (size_t row{0}; row < p.size(); ++row) {
size_t free_col{p.at(0).size() - 1};
for (size_t col{p.at(0).size() - 1};; --col) {
switch (p[row][col]) {
case 'O':
p[row][col] = '.';
p[row][free_col--] = 'O';
break;
case '#':
free_col = col - 1;
break;
default:
break;
}
// Avoid overflow
if (col == 0) break;
}
}
return p;
}
size_t get_north_beam_load(const problem &p) {
size_t sum{0};
for (size_t row{0}, distance_to_south{p.size()}; row < p.size();
++row, --distance_to_south) {
auto stones = p[row];
for (char stone : stones) {
if (stone == 'O') {
sum += distance_to_south;
}
}
}
return sum;
}
size_t part1(problem p) {
return get_north_beam_load(tilt_north(p));
}
size_t part2(problem p) {
std::map<problem, size_t> seen{};
for (size_t i{0}; i < 1000000000; ++i) {
p = tilt_north(p);
p = tilt_west(p);
p = tilt_south(p);
p = tilt_east(p);
if (seen.find(p) != seen.end()) {
// Was seen before, there is a cycle
auto [_, cycle_start] = *seen.find(p);
size_t cycle_length{i - cycle_start};
size_t remaining{(999999999 - i) % cycle_length};
for (size_t i{0}; i < remaining; ++i) {
p = tilt_north(p);
p = tilt_west(p);
p = tilt_south(p);
p = tilt_east(p);
}
return get_north_beam_load(p);
} else {
seen.insert({p, i});
}
}
return get_north_beam_load(p);
}
int main() {
const problem p = parse_input(input);
size_t result_1{part1(p)};
std::cout << "Part 1: " << result_1 << "\n";
size_t result_2{part2(p)};
std::cout << "Part 2: " << result_2 << "\n";
}
|
//
// PortfolioViewModel.swift
// IBClientAPISwiftUI
//
// Created by Danil Poletaev on 18.03.2022.
//
import Foundation
final class PortfolioViewModel: ObservableObject {
@Published var positions: [Position] = []
@Published var assetClass: AssetClass? = nil
@Published var accountSummary: AccountSummary = [:]
@Published var dailyPnL: CorePnLModel? = nil
@Published var isLoading = true
@Published var isPositionsLoading = true
@Published var isAccountPerformanceLoading = true
@Published var isAccountSummaryLoading = true
@Published var isPnlLoading = true
private let repository: PortfolioRepositoryProtocol
init(repository: PortfolioRepositoryProtocol?) {
self.repository = repository ?? PortfolioRepository(portfolioApiService: nil, accountApiService: nil, tickerApiService: nil )
}
private func fetchPositions() {
self.repository.fetchPositions { positions in
self.positions = positions
self.isPositionsLoading = false
if (!self.isAccountPerformanceLoading && !self.isAccountSummaryLoading && !self.isPnlLoading) {
self.isLoading = false
}
}
}
private func fetchAccounPerformance() {
self.repository.fetchAccountAllocation { accountPerformance in
self.assetClass = accountPerformance.assetClass
self.isAccountPerformanceLoading = false
if (!self.isPositionsLoading && !self.isAccountSummaryLoading && !self.isPnlLoading) {
self.isLoading = false
}
}
}
private func fetchAccountSummary() {
self.repository.getAccountSummary { accountSummary in
self.accountSummary = accountSummary
self.isAccountSummaryLoading = false
if (!self.isPositionsLoading && !self.isAccountPerformanceLoading && !self.isPnlLoading) {
self.isLoading = false
}
}
}
private func getPnL() {
self.repository.getPnL { coreModel in
self.dailyPnL = coreModel
self.isPnlLoading = false
if (!self.isPositionsLoading && !self.isAccountPerformanceLoading && !self.isAccountSummaryLoading) {
self.isLoading = false
}
}
}
func onAppear() {
if positions.count == 0 {
fetchPositions()
}
fetchAccountSummary()
fetchAccounPerformance()
getPnL()
}
}
|
<!DOCTYPE html>
<html lang="en" xmlns:th="http://www.thymeleaf.org">
<head>
<meta charset="UTF-8">
<title>Login_REGISTER</title>
<link href="//maxcdn.bootstrapcdn.com/bootstrap/4.1.1/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<link th:href="@{/form/login.css}" rel="stylesheet" >
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.1.1/js/bootstrap.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
<script th:src="@{/form/login.js}"></script>
</head>
<body>
<div class="container">
<div class="row">
<!-- Mixins-->
<!-- Pen Title-->
<div class="pen-title">
</div>
<div class="container">
<div class="card"></div>
<div class="card">
<h1 class="title">Login User</h1>
<form th:action="@{/client/login}" method="post" th:object="${user}">
<div th:if="${message} != null" class="alert alert-success text-center" style="color: red" >
<h6 th:text="${message}"></h6>
</div>
<div th:if="${messageSuccessfully} != null" class="alert alert-success text-center" >
<h6 th:text="${messageSuccessfully}"></h6>
</div>
<div class="input-container">
<input type="text" th:field="*{email}" required="required" />
<label for="Email">Username</label>
<div class="bar"></div>
</div>
<div class="input-container">
<input type="password" th:field="*{password}" required="required"/>
<label for="Password">Password</label>
<div class="bar"></div>
</div>
<div class="button-container">
<button><span>Go</span></button>
<button onclick="cancelForm()">Cancel</button>
</div>
</form>
</div>
<div class="card alt">
<div class="toggle"></div>
<h1 class="title">Register
<div class="close"></div>
</h1>
<form th:action="@{/client/users/save}" method="post" th:object="${user}">
<input type="hidden" th:field="*{userId}" id="typeId" >
<div class="input-container">
<input type="text" id="FirstName" th:field="*{firstName}"/>
<label for="FirstName">First Name</label>
<div class="bar"></div>
</div>
<div class="input-container">
<input type="text" id="LastName" th:field="*{lastName}" />
<label for="LastName">Last Name</label>
<div class="bar"></div>
</div>
<div class="input-container">
<input type="text" id="Email" required="required" th:field="*{email}"/>
<label for="Email">Email</label>
<div class="bar"></div>
</div>
<div class="input-container">
<input type="password" id="Password" required="required" th:field="*{password}"/>
<label for="Password">Password</label>
<div class="bar"></div>
</div>
<div class="button-container">
<button><span>Next</span></button>
<button onclick="cancelForm()">Cancel</button>
</div>
</form>
</div>
</div>
<!-- Portfolio--><a id="portfolio" th:href="@{/client}" title="View my portfolio!"><i class="fa fa-link"></i></a>
</div>
</div>
<script type="text/javascript">
function cancelForm(){
window.location="[[@{/client}]]"
}
</script>
</body>
</html>
|
import React, {memo, useCallback, useEffect, useState} from 'react';
import {useSelector} from "react-redux";
import {classNames} from '@/shared/lib/classNames/classNames';
import {VStack} from '@/shared/ui/component/Stack';
import cls from './Sidebar.module.scss';
import {useAppDispatch} from "@/shared/lib/hooks/useAppDispatch/useAppDispatch";
import {Button} from "@/shared/ui/component/Button";
import {Flex} from "@/shared/ui/component/Stack/Flex/Flex";
import {getUserAuthData, getUserWorkersData} from "@/entities/User";
import {Text} from "@/shared/ui/component/Text";
import {useNavigate, useParams} from "react-router-dom";
import {AvatarDropdown} from "@/features/avatarDropdown";
import {getRouteTaskByUser} from "@/shared/const/router";
import {LoginModal} from "@/features/AuthByUsername";
interface SidebarProps {
className?: string;
}
export const SidebarRight = memo(({className}: SidebarProps) => {
const [isAuthModal, setIsAuthModal] = useState(false);
const authData = useSelector(getUserAuthData);
const onCloseModal = useCallback(() => {
setIsAuthModal(false);
}, []);
const onShowModal = useCallback(() => {
setIsAuthModal(true);
}, []);
const navigate = useNavigate();
const currentUser=useSelector(getUserAuthData)
const dispatch = useAppDispatch();
const [selectedUserId, setSelectedUserId] = useState<string | null>(null);
useEffect(() => {
const userIdFromUrl = location.pathname.split('/').pop();
if(userIdFromUrl){
setSelectedUserId(userIdFromUrl);
}
}, [location.pathname]);
const handleButtonClick = (userId: string) => {
navigate(`tasks/${userId}`);
setSelectedUserId(userId);
};
return (
<>
<aside
className={classNames(
cls.Sidebar,
{},
[className],
)}
>
<Flex className={cls.padding} gap="4" max justify="start"
direction="column">
<AvatarDropdown onShowModal={onShowModal} />
</Flex>
{currentUser &&
<VStack role="navigation" gap="8" className={cls.items}>
<Text title="All list of tasks"/>
<Button
fullWidth
color={'success'}
variant={'filled'}
className={cls.links}
onClick={() => navigate(getRouteTaskByUser())}
>
Tasks
</Button>
{currentUser?.workers && currentUser.workers.length>0 && <Text title="List of employees"/>}
{
currentUser?.workers && currentUser.workers.length>0 && currentUser.workers.map((item,index)=>
<Button
fullWidth
color={selectedUserId === item.id ? 'error' : 'success'}
variant={selectedUserId === item.id ? 'outline' : 'filled'}
onClick={() => handleButtonClick(item.id)}
>
{item.username}
</Button>
)
}
</VStack>
}
</aside>
)
{isAuthModal && (
<LoginModal isOpen={isAuthModal} onClose={onCloseModal}/>
)}
</>
);
});
|
package com.develsystems.smartid.smartid_flutter
import android.content.Context
import androidx.annotation.NonNull
import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import com.develsystems.smartid.SmartId
import com.develsystems.smartid.models.Account
import com.develsystems.smartid.models.AccountTo
import com.develsystems.smartid.models.CreditCard
import com.develsystems.smartid.models.CreditTo
import com.develsystems.smartid.models.DebitFrom
import com.develsystems.smartid.models.Device
import com.develsystems.smartid.models.Model
import com.develsystems.smartid.models.Operation
import com.develsystems.smartid.models.Order
import com.develsystems.smartid.models.Transaction
import java.util.HashMap
/** SmartidFlutterPlugin */
class SmartidFlutterPlugin: FlutterPlugin, MethodCallHandler {
/// The MethodChannel that will the communication between Flutter and native Android
///
/// This local reference serves to register the plugin with the Flutter Engine and unregister it
/// when the Flutter Engine is detached from the Activity
private var appContext: Context? = null;
private lateinit var channel : MethodChannel
override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
this.appContext = flutterPluginBinding.applicationContext;
channel = MethodChannel(flutterPluginBinding.binaryMessenger, "smartid_flutter")
channel.setMethodCallHandler(this)
}
override fun onMethodCall(@NonNull call: MethodCall, @NonNull result: Result) {
when(call.method){
"initInstance" -> {
initSmartId(call, result)
}
"link" -> {
linkSmartId(call, result)
}
"unlink" -> {
unlinkSmartId(call, result)
}
"createOperation" -> {
createOperationSmartId(call, result)
}
else -> result.notImplemented()
}
}
fun initSmartId(call: MethodCall, result: Result) {
var instance = SmartId.initInstance(
appContext!!,
call.argument<String>("license"),
call.argument<String>("username"),
call.argument<Boolean>("isProduction")!!
)
result.success("init Success")
}
fun linkSmartId(call: MethodCall, result: Result) {
SmartId.getInstance().Link(
call.argument<String>("channel"),
call.argument<String>("session")
)
result.success("link success")
}
fun unlinkSmartId(call: MethodCall, result: Result) {
SmartId.getInstance().UnLink(
call.argument<String>("channel"),
call.argument<String>("session")
)
result.success("unlink success")
}
fun createOperationSmartId(call: MethodCall, result: Result) {
val license = call.argument<String>("license")!!
val isProduction = call.argument<Boolean>("isProduction")!!
val operationMap = call.argument<Map<String, Any>>("operation")!!
val operationModel = convertMapToOperation(operationMap)
SmartId.getInstance().CreateOperation(
appContext,
license,
operationModel,
isProduction
)
result.success("Create operation success")
}
private fun convertMapToOperation(map: Map<String, Any>): Operation {
val operation = Operation()
operation.model = convertMapToModel(map)
return operation
}
private fun convertMapToModel(map: Map<String, Any>): Model? {
val model = Model()
model.channelId = (map["channelId"] as String).toInt()
model.device = convertMapToDevice(map["device"] as Map<String, Any>)
model.transaction = convertMapToTransaction(map["transaction"] as Map<String, Any>)
model.account = convertMapToAccount(map["account"] as Map<String, Any>)
model.accountTo = convertMapToAccountTo(map["accountTo"] as Map<String, Any>)
model.debitFrom = convertMapToDebitFrom(map["debitFrom"] as Map<String, Any>)
model.creditTo = convertMapToCreditTo(map["creditTo"] as Map<String, Any>)
model.creditCard = convertMapToCreditCard(map["creditCard"] as Map<String, Any>)
model.order = convertMapToOrder(map["order"] as Map<String, Any>)
return model
}
private fun convertMapToDevice(map: Map<String, Any>): Device? {
val device = Device()
device.smartId = map["smartId"] as String
device.ipAddress = map["ipAddress"] as String
return device
}
private fun convertMapToTransaction(map: Map<String, Any>): Transaction? {
val transaction = Transaction()
transaction.category = map["category"] as String
transaction.type = map["type"] as String
transaction.description = map["description"] as String
transaction.reference = map["reference"] as String
transaction.date = map["date"] as String
transaction.details = map["details"] as HashMap<String, String>
return transaction
}
private fun convertMapToAccount(map: Map<String, Any>): Account? {
val account = Account()
account.client = map["client"] as String
account.clientRefId = map["clientRefId"] as Int
account.clientRefIdStr = map["clientRefIdStr"] as String
account.email = map["email"] as String
account.phoneNumber = map["phoneNumber"] as String
account.session = map["session"] as String
account.accountNumber = map["accountNumber"] as String
account.bank = map["bank"] as String
return account
}
private fun convertMapToAccountTo(map: Map<String, Any>): AccountTo? {
val accountTo = AccountTo()
accountTo.client = map["client"] as String
accountTo.clientRefId = map["clientRefId"] as Int
accountTo.clientRefIdStr = map["clientRefIdStr"] as String
accountTo.email = map["email"] as String
accountTo.phoneNumber = map["phoneNumber"] as String
accountTo.session = map["session"] as String
accountTo.accountNumber = map["accountNumber"] as String
accountTo.bank = map["bank"] as String
return accountTo
}
private fun convertMapToDebitFrom(map: Map<String, Any>): DebitFrom? {
val debitFrom = DebitFrom()
debitFrom.account = map["account"] as String
debitFrom.bank= map["bank"] as String
debitFrom.currency = map["currency"] as String
return debitFrom
}
private fun convertMapToCreditTo(map: Map<String, Any>): CreditTo? {
val creditTo = CreditTo()
creditTo.account = map["account"] as String
creditTo.bank= map["bank"] as String
creditTo.currency = map["currency"] as String
return creditTo
}
private fun convertMapToCreditCard(map: Map<String, Any>): CreditCard? {
val creditCard = CreditCard()
creditCard.bin = map["bin"] as String
creditCard.hash = map["hash"] as String
creditCard.last4Digits = map["last4Digits"] as String
creditCard.token = map["token"] as String
return creditCard
}
private fun convertMapToOrder(map: Map<String, Any>): Order? {
val order = Order()
order.amount = map["amount"] as Double
order.currency = map["currency"] as String
return order
}
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
channel.setMethodCallHandler(null)
}
}
|
import { Injectable } from '@angular/core';
import {HttpClient, HttpErrorResponse, HttpHeaders} from "@angular/common/http";
import {catchError, Observable, tap, throwError} from "rxjs";
import {Key} from "../enum/key";
import {AccountType, CustomHttpResponse, Page, Profile} from "../interface/appStates";
import {User} from "../interface/user";
import {JwtHelperService} from "@auth0/angular-jwt";
import {Biodata} from "../interface/biodata";
import {Stats} from "../interface/stats";
@Injectable({
providedIn: 'root'
})
export class UserService {
private readonly server: string= 'http://localhost:8080';
private jwtHelper = new JwtHelperService();
constructor(private http: HttpClient) { }
login$ = (email: string, password: string) => <Observable<CustomHttpResponse<Profile>>>
this.http.post<CustomHttpResponse<Profile>>
(`${this.server}/user/login`, {email, password})
.pipe(
tap(console.log),
catchError(this.handleError)
);
save$ = (user: User) => <Observable<CustomHttpResponse<Profile>>>
this.http.post<CustomHttpResponse<Profile>>
(`${this.server}/user/register`, user)
.pipe(
tap(console.log),
catchError(this.handleError)
);
verifyCode$ = (email: string, code: string) => <Observable<CustomHttpResponse<Profile>>>
this.http.get<CustomHttpResponse<Profile>>
(`${this.server}/user/verify/code/${email}/${code}`)
.pipe(
tap(console.log),
catchError(this.handleError)
);
profile$ = () => <Observable<CustomHttpResponse<Profile>>>
this.http.get<CustomHttpResponse<Profile>>
(`${this.server}/user/profile`)
.pipe(
tap(console.log),
catchError(this.handleError)
);
update$ = (user: User) => <Observable<CustomHttpResponse<Profile>>>
this.http.patch<CustomHttpResponse<Profile>>
(`${this.server}/user/update`, user)
.pipe(
tap(console.log),
catchError(this.handleError)
);
refreshToken$ = () => <Observable<CustomHttpResponse<Profile>>>
this.http.get<CustomHttpResponse<Profile>>
(`${this.server}/user/refresh/token`, {headers: {Authorization: `Bearer ${localStorage.getItem(Key.REFRESH_TOKEN)}`}})
.pipe(
tap(response => {
console.log(response);
localStorage.removeItem(Key.TOKEN);
localStorage.removeItem(Key.REFRESH_TOKEN);
localStorage.setItem(Key.TOKEN, response.data.access_token);
localStorage.setItem(Key.REFRESH_TOKEN, response.data.refresh_token);
}),
catchError(this.handleError)
);
updatePassword$ = (form: {
currentPassword: string,
newPassword: string,
confirmNewPassword: string
}) => <Observable<CustomHttpResponse<Profile>>>
this.http.patch<CustomHttpResponse<Profile>>
(`${this.server}/user/update/password`, form)
.pipe(
tap(console.log),
catchError(this.handleError)
);
private handleError(error: HttpErrorResponse): Observable<never> {
console.log(error);
let errorMessage: string;
if (error.error instanceof ErrorEvent) {
errorMessage = `A client error occurred - ${error.error.message}`;
} else {
if (error.error.reason) {
errorMessage = error.error.reason;
console.log(errorMessage);
} else {
errorMessage = `An error occurred - Error status ${error.status}`;
}
}
return throwError(() => errorMessage);
}
updateRoles$ = (roleName: string) => <Observable<CustomHttpResponse<Profile>>>
this.http.patch<CustomHttpResponse<Profile>>
(`${this.server}/user/update/role/${roleName}`, {})
.pipe(
tap(console.log),
catchError(this.handleError)
);
updateAccountSettings$ = (settings: { enabled: boolean, notLocked: boolean }) => <Observable<CustomHttpResponse<Profile>>>
this.http.patch<CustomHttpResponse<Profile>>
(`${this.server}/user/update/settings`, settings)
.pipe(
tap(console.log),
catchError(this.handleError)
);
toggleMfa$ = () => <Observable<CustomHttpResponse<Profile>>>
this.http.patch<CustomHttpResponse<Profile>>
(`${this.server}/user/togglemfa`, {})
.pipe(
tap(console.log),
catchError(this.handleError)
);
updateImage$ = (formData: FormData) => <Observable<CustomHttpResponse<Profile>>>
this.http.patch<CustomHttpResponse<Profile>>
(`${this.server}/user/update/image`, formData)
.pipe(
tap(console.log),
catchError(this.handleError)
);
isAuthenticated = (): boolean => (this.jwtHelper.decodeToken<string>(localStorage.getItem(Key.TOKEN)) && !this.jwtHelper.isTokenExpired(localStorage.getItem(Key.TOKEN))) ? true : false;
logOut() {
localStorage.removeItem(Key.TOKEN);
localStorage.removeItem(Key.REFRESH_TOKEN);
}
requestPasswordReset$ = (email: string) => <Observable<CustomHttpResponse<Profile>>>
this.http.get<CustomHttpResponse<Profile>>
(`${this.server}/user/resetpassword/${email}`)
.pipe(
tap(console.log),
catchError(this.handleError)
);
verify$ = (key: string, type: AccountType) => <Observable<CustomHttpResponse<Profile>>>
this.http.get<CustomHttpResponse<Profile>>
(`${this.server}/user/verify/${type}/${key}`)
.pipe(
tap(console.log),
catchError(this.handleError)
);
renewPassword$ = (form: { userId: number, password: string, confirmPassword: string }) => <Observable<CustomHttpResponse<Profile>>>
this.http.put<CustomHttpResponse<Profile>>
(`${this.server}/user/new/password`, form)
.pipe(
tap(console.log),
catchError(this.handleError)
);
searchUsers$ = (name: string = '', page: number = 0) => <Observable<CustomHttpResponse<Page<User> & User>>>
this.http.get<CustomHttpResponse<Page<User> & User>>
(`${this.server}/user/search?name=${name}&page=${page}`)
.pipe(
tap(console.log),
catchError(this.handleError)
);
}
|
package org.springframework.social.partnercenter.serialization;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import org.junit.Test;
public class JsonTest {
@Test
public void testTimeSerialization(){
Something something = new Something();
String s = Json.toJson(something);
Something instant = Json.fromJson(s, Something.class);
assertThat(instant.instant).isEqualByComparingTo(something.instant);
assertThat(instant.zonedDateTime.toInstant()).as("ZonedDateTime").isEqualByComparingTo(something.zonedDateTime.toInstant());
assertThat(instant.localDateTime).as("LocalDateTime").isEqualTo(something.localDateTime);
assertThat(instant.offsetDateTime.toInstant()).as("OffsetDateTime").isEqualByComparingTo(something.offsetDateTime.toInstant());
}
public static class Something {
private Instant instant;
private ZonedDateTime zonedDateTime;
private LocalDateTime localDateTime;
private OffsetDateTime offsetDateTime;
public Something() {
instant = Instant.now();
zonedDateTime = ZonedDateTime.now();
localDateTime = LocalDateTime.now();
offsetDateTime = OffsetDateTime.now();
}
public Instant getInstant() {
return instant;
}
public ZonedDateTime getZonedDateTime() {
return zonedDateTime;
}
public LocalDateTime getLocalDateTime() {
return localDateTime;
}
public OffsetDateTime getOffsetDateTime() {
return offsetDateTime;
}
public void setInstant(Instant instant) {
this.instant = instant;
}
public void setZonedDateTime(ZonedDateTime zonedDateTime) {
this.zonedDateTime = zonedDateTime;
}
public void setLocalDateTime(LocalDateTime localDateTime) {
this.localDateTime = localDateTime;
}
public void setOffsetDateTime(OffsetDateTime offsetDateTime) {
this.offsetDateTime = offsetDateTime;
}
}
}
|
import 'package:flutter/material.dart';
import 'package:go_router/go_router.dart';
import 'package:gradproject/app/constants/routes_constants.dart';
import 'package:gradproject/app/global_functions.dart';
import 'package:gradproject/domain/classes/trainings/training.dart';
import 'package:url_launcher/url_launcher.dart';
import 'package:video_player/video_player.dart';
//Colors//
const backgroundColor = Color(0xff27374D); //#176b87 with 100% opacity
const textColor = Color.fromARGB(255, 32, 56, 50);
const backGroundPhoto = Color(0xff176b87);
const buttonColor = Color(0xD9FA9A85);
const buttonTextColor = Colors.white;
const loginText = Color(0xCCFA9A85);
const borderColor = Color(0xffDDE6ED);
const backBtnColor = Color(0xffFBC6BA);
List<Training> trainings = [
Training(
category: "Knee Pain",
exercises: [
TrainingExercise(
exerciseImg: 'assets/images/side_lying_hip_abduction.png',
haveVideo: false,
exerciseName: "Long ARC Quads",
have3DModel: true,
modelUrl: "assets/3dModels/Long_ARC_Quads_try5.glb",
description: "15-20 reps , 3 sets",
exerciseDuration: "1 min",
),
TrainingExercise(
exerciseImg: 'assets/images/side_lying_hip_abduction.png',
haveVideo: true,
exerciseName: "side lying hip abduction",
have3DModel: true,
modelUrl: 'assets/3dModels/side_lying_hip_abduction.glb',
description:
"lifting your heel up towards don't lead your toe and let your hip flexors feel kind of more posterioly more in the butt than in the front feel more posteriorly in glute 10 reps / 3 sets",
exerciseDuration: "1 min",
),
TrainingExercise(
exerciseImg: 'assets/images/Short_ARC_Quads.png',
haveVideo: true,
exerciseName: "Short ARC Quads",
have3DModel: true,
modelUrl: 'assets/3dModels/Short_ARC_Quads.glb',
description: "15-20 reps ,3 set",
exerciseDuration: "1 min",
),
TrainingExercise(
exerciseImg: 'assets/images/Short_ARC_Quads.png',
haveVideo: true,
exerciseName: "Straight Leg Raise",
have3DModel: true,
modelUrl: 'assets/3dModels/Straight_Leg_Raise.glb',
description: "10-15 reps , 3 sets",
exerciseDuration: "1 min",
),
],
imageUrl: 'assets/images/Knee.png',
),
Training(
category: "Lower Back",
exercises: [
TrainingExercise(
exerciseImg: 'assets/images/Lumbar_rotation_stretch.png',
haveVideo: true,
videoUrl: 'https://www.youtube.com/watch?v=1rUFz5RzGmc',
exerciseName: "Lumbar rotation stretch",
have3DModel: true,
modelUrl: "assets/3dModels/Lumbar_rotation_stretch.glb",
description: "20 Seconds per side 3 Reps",
exerciseDuration: "1 min",
),
],
imageUrl: 'assets/images/lower-back-pain.png',
),
Training(
category: "Upper Back",
exercises: [
TrainingExercise(
exerciseImg: 'assets/images/swimmers.png',
haveVideo: true,
exerciseName: "Swimmers",
have3DModel: true,
modelUrl: 'assets/3dModels/swimmer.glb',
videoUrl:
'https://www.youtube.com/watch?v=bAHLexn6Ruk&list=PLT4Yite3Tx5n-rPIsiqWzVgQI78FCmyHR&index=3',
description: "10 rep/ s sets",
exerciseDuration: "1 min",
),
TrainingExercise(
exerciseImg: 'assets/images/Prone_Ts.png',
haveVideo: true,
exerciseName: "Prone T's",
modelUrl: 'assets/3dModels/Prone_Ts.glb',
videoUrl:
'https://www.youtube.com/watch?v=bAHLexn6Ruk&list=PLT4Yite3Tx5n-rPIsiqWzVgQI78FCmyHR&index=4',
have3DModel: true,
description: "10 repetitions 3 sets",
exerciseDuration: "1 min",
),
],
imageUrl: 'assets/images/upper-back.png'),
Training(
category: "Hip pain",
exercises: [
TrainingExercise(
exerciseImg: 'assets/images/hip_flexor_stretch.png',
haveVideo: true,
exerciseName: "hip flexor stretch",
have3DModel: true,
modelUrl: "assets/3dModels/hip_flexor_stretch.glb",
description: "20 secs / 3 rep",
videoUrl:
'https://www.youtube.com/watch?v=FOYaffW5zfA&list=PLT4Yite3Tx5kNj8FZobTBZyT-b2_cuh5P&index=6',
exerciseDuration: "1 min",
),
TrainingExercise(
exerciseImg: 'assets/images/side_lying_hip_abduction.png',
haveVideo: true,
exerciseName: "side lying hip abduction",
have3DModel: true,
modelUrl: 'assets/3dModels/side_lying_hip_abduction.glb',
description:
"lifting your heel up towards don't lead your toe and let your hip flexors feel kind of more posteri oly more in the butt than in the front feel more posteriorly in glute 10 reps / 3 sets",
exerciseDuration: "1 min",
videoUrl: 'https://www.youtube.com/watch?v=ikt6NME0k9E',
),
],
imageUrl: 'assets/images/hip_pain.png',
),
Training(
category: "VR Meta",
exercises: [
TrainingExercise(
exerciseImg: 'assets/images/hip_flexor_stretch.png',
haveVideo: true,
exerciseName: "VR trial",
have3DModel: true,
modelUrl: "assets/3dModels/orsaTry.glb",
description: "30 secs / 3 rep",
videoUrl:
'https://www.youtube.com/watch?v=FOYaffW5zfA&list=PLT4Yite3Tx5kNj8FZobTBZyT-b2_cuh5P&index=6',
exerciseDuration: "30 sec",
),
],
imageUrl: 'assets/images/vr.jpg',
),
];
class ExerciseCard extends StatelessWidget {
final double cardWidth;
final double cardHight;
final String text;
final String imageUrl;
final TrainingExercise exerciseTraining;
const ExerciseCard({
super.key,
required this.exerciseTraining,
required this.cardWidth,
required this.cardHight,
required this.text,
required this.imageUrl,
});
@override
Widget build(BuildContext context) {
return SizedBox(
width: cardWidth,
height: cardHight,
child: InkWell(
onTap: () {
securePrint("[orsa1] ${exerciseTraining.description}");
context.pushNamed(RoutesName.exerciseDetails,
extra: exerciseTraining);
},
child: Stack(
alignment: Alignment.bottomCenter,
children: [
SizedBox(
width: double.infinity,
height: double.infinity,
child: ClipRRect(
borderRadius: BorderRadius.circular(10.0),
child: Image.asset(
imageUrl,
fit: BoxFit.fill,
)),
),
Container(
width: double.infinity,
height: cardHight * 0.4,
decoration: const BoxDecoration(
borderRadius: BorderRadius.only(
bottomLeft: Radius.circular(10),
bottomRight: Radius.circular(10)),
gradient: LinearGradient(
colors: [
// Color.fromARGB(255, 13, 136, 161),
// Color.fromARGB(199, 19, 161, 189),
// Color.fromARGB(149, 32, 193, 225),
// Color.fromARGB(99, 42, 198, 229),
// Color.fromARGB(49, 69, 218, 248),
// Color.fromARGB(0, 85, 162, 230),
Color.fromARGB(255, 18, 35, 49),
Color.fromARGB(190, 110, 135, 158),
Color.fromARGB(10, 181, 192, 202),
],
begin: Alignment.bottomCenter,
end: Alignment.topCenter,
),
),
child: Column(
mainAxisAlignment: MainAxisAlignment.end,
children: [
Text(
text,
style: const TextStyle(
color: Colors.white,
fontSize: 15,
fontWeight: FontWeight.bold,
),
),
SizedBox(
height: cardHight * 0.06,
)
],
),
),
],
),
),
);
}
}
Widget getExerciseDifficulty() {
return Row(mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [
Container(
padding: const EdgeInsets.symmetric(horizontal: 15, vertical: 5),
decoration: BoxDecoration(
border: Border.all(color: const Color(0xFF93469F)),
borderRadius: BorderRadius.circular(20),
),
child: const Text(
'Beginner',
style: TextStyle(
fontSize: 15,
color: Colors.black,
),
),
),
Container(
padding: const EdgeInsets.symmetric(horizontal: 15, vertical: 5),
decoration: BoxDecoration(
color: const Color(0xFF93469F),
borderRadius: BorderRadius.circular(20),
),
child: const Text(
'Intermediate',
style: TextStyle(
fontSize: 15,
color: Colors.white,
),
),
),
Container(
padding: const EdgeInsets.symmetric(horizontal: 15, vertical: 5),
decoration: BoxDecoration(
border: Border.all(color: const Color(0xFF93469F)),
borderRadius: BorderRadius.circular(20),
),
child: const Text(
'Advanced',
style: TextStyle(
fontSize: 15,
color: Colors.black,
),
),
),
]);
}
class OpenYouTubeVideo extends StatelessWidget {
final String videoUrl;
const OpenYouTubeVideo({super.key, required this.videoUrl});
Future<void> _launchURL(String url) async {
if (!await launchUrl(Uri.parse(url),
mode: LaunchMode.externalApplication)) {
throw Exception('Could not launch $url');
}
}
@override
Widget build(BuildContext context) {
return IconButton(
onPressed: () {
_launchURL(videoUrl);
},
icon: const Icon(
Icons.image_search,
color: Colors.white,
),
);
}
}
class OpenVideo extends StatefulWidget {
const OpenVideo({super.key});
@override
State<OpenVideo> createState() => _OpenVideoState();
}
class _OpenVideoState extends State<OpenVideo> {
late VideoPlayerController _controller;
@override
void dispose() {
super.dispose();
_controller.dispose();
}
@override
void initState() {
super.initState();
// ignore: deprecated_member_use
_controller = VideoPlayerController.asset(
'assets/videos/video.mp4', // Replace with your video URL or asset path
)..initialize().then((_) {
setState(
() {}); // Ensure the first frame is shown after the video is initialized
});
}
@override
Widget build(BuildContext context) {
return Scaffold(
body: Center(
child: _controller.value.isInitialized
? AspectRatio(
aspectRatio: _controller.value.aspectRatio,
child: Stack(
children: [
VideoPlayer(_controller),
_ControlsOverlay(controller: _controller),
VideoProgressIndicator(_controller, allowScrubbing: true),
],
),
)
: const CircularProgressIndicator(), // Show loading spinner until the video is initialized
),
floatingActionButton: FloatingActionButton(
onPressed: () {
setState(() {
if (_controller.value.isPlaying) {
_controller.pause();
} else {
_controller.play();
}
});
},
child: Icon(
_controller.value.isPlaying ? Icons.pause : Icons.play_arrow,
),
),
);
}
}
class _ControlsOverlay extends StatelessWidget {
const _ControlsOverlay({required this.controller});
static const List<Duration> _exampleCaptionOffsets = <Duration>[
Duration(seconds: -10),
Duration(seconds: -3),
Duration(seconds: -1, milliseconds: -500),
Duration(milliseconds: -250),
Duration.zero,
Duration(milliseconds: 250),
Duration(seconds: 1, milliseconds: 500),
Duration(seconds: 3),
Duration(seconds: 10),
];
static const List<double> _examplePlaybackRates = <double>[
0.25,
0.5,
1.0,
1.5,
2.0,
3.0,
5.0,
10.0,
];
final VideoPlayerController controller;
@override
Widget build(BuildContext context) {
return Stack(
children: <Widget>[
AnimatedSwitcher(
duration: const Duration(milliseconds: 50),
reverseDuration: const Duration(milliseconds: 200),
child: controller.value.isPlaying
? const SizedBox.shrink()
: const ColoredBox(
color: Colors.black26,
child: Center(
child: Icon(
Icons.play_arrow,
color: Colors.white,
size: 100.0,
semanticLabel: 'Play',
),
),
),
),
GestureDetector(
onTap: () {
controller.value.isPlaying ? controller.pause() : controller.play();
},
),
Align(
alignment: Alignment.topLeft,
child: PopupMenuButton<Duration>(
initialValue: controller.value.captionOffset,
tooltip: 'Caption Offset',
onSelected: (Duration delay) {
controller.setCaptionOffset(delay);
},
itemBuilder: (BuildContext context) {
return <PopupMenuItem<Duration>>[
for (final Duration offsetDuration in _exampleCaptionOffsets)
PopupMenuItem<Duration>(
value: offsetDuration,
child: Text('${offsetDuration.inMilliseconds}ms'),
)
];
},
child: Padding(
padding: const EdgeInsets.symmetric(
// Using less vertical padding as the text is also longer
// horizontally, so it feels like it would need more spacing
// horizontally (matching the aspect ratio of the video).
vertical: 12,
horizontal: 16,
),
child: Text('${controller.value.captionOffset.inMilliseconds}ms'),
),
),
),
Align(
alignment: Alignment.topRight,
child: PopupMenuButton<double>(
initialValue: controller.value.playbackSpeed,
tooltip: 'Playback speed',
onSelected: (double speed) {
controller.setPlaybackSpeed(speed);
},
itemBuilder: (BuildContext context) {
return <PopupMenuItem<double>>[
for (final double speed in _examplePlaybackRates)
PopupMenuItem<double>(
value: speed,
child: Text('${speed}x'),
)
];
},
child: Padding(
padding: const EdgeInsets.symmetric(
// Using less vertical padding as the text is also longer
// horizontally, so it feels like it would need more spacing
// horizontally (matching the aspect ratio of the video).
vertical: 12,
horizontal: 16,
),
child: Text('${controller.value.playbackSpeed}x'),
),
),
),
],
);
}
}
|
/**
* @file fraction.h
* @author Bharath.G ()
* @brief Interface for fraction class
* @version 0.1
* @date 2021-12-08
*
* @copyright Copyright (c) 2021
*
*/
#ifndef FRACTION_H_
#define FRACTION_H_
class fraction
{
private:
int numerator;
int denominator;
// int* ptr;
public:
fraction();
fraction(int, int);
fraction(const fraction&);
~fraction();
void set_numerator(int);
void set_denominator(int);
int get_numerator()const;
int get_denominator()const;
//friend fraction operator+(fraction ppf1, fraction ppf2);
fraction operator+(const fraction&);
fraction& operator++();
fraction operator++(int );
};
#endif
|
import 'package:flutter/material.dart';
import 'package:note/src/features/auth/set_password.dart';
import 'package:provider/provider.dart';
import '../../../service/app_localizations.dart';
import '../../view/provider.dart';
class ForgotPage extends StatefulWidget {
const ForgotPage({Key? key}) : super(key: key);
@override
State<ForgotPage> createState() => _ForgotPageState();
}
class _ForgotPageState extends State<ForgotPage> {
final _formKey3 = GlobalKey<FormState>();
@override
Widget build(BuildContext context) {
return Scaffold(
resizeToAvoidBottomInset: false,
body: SafeArea(
child: Form(
key: _formKey3,
child: Padding(
padding: const EdgeInsets.only(left: 20, right: 20, top: 25),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(
AppLocalizations.of(context).forgotpassword,
style: const TextStyle(fontSize: 30,
fontWeight: FontWeight.w600,
),
),
const Spacer(),
TextFormField(
style: const TextStyle(color: Colors.white),
validator: (value) =>
Provider.of<ProFunc>(context, listen: false).emailUp(value/*, box*/),
decoration: InputDecoration(
filled: true,
hintStyle: const TextStyle(color: Colors.grey),
fillColor: Color(0xFF5B6262),
hintText: AppLocalizations.of(context).emailAddress,
border: OutlineInputBorder(
borderRadius: BorderRadius.circular(15),
),
),
),
const Spacer(),
Container(
width: double.infinity,
height: 60,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(60),
),
child: ElevatedButton(
style: ElevatedButton.styleFrom(
backgroundColor: Color(0xFF36BFFA),
),
onPressed: (){
if (_formKey3.currentState!.validate()) {
Navigator.push(
context,
MaterialPageRoute(
builder: (BuildContext context) => const SetPassword(),
),
);
}
},
child: Text(AppLocalizations.of(context).next),
),
),
const SizedBox(height: 40,)
],
),
),
),
),
);
}
}
|
# 复习语法
# 用栈实现队列[力扣232](https://leetcode.cn/problems/implement-queue-using-stacks/)
**题目描述**:

**重点或思路总结**:
## 方法一:用两个栈实现队列

在push数据的时候,只要数据放进输入栈就好,**但在pop的时候,操作就复杂一些,输出栈如果为空,就把进栈数据全部导入进来(注意是全部导入)**,再从出栈弹出数据,如果输出栈不为空,则直接从出栈弹出数据就可以了。
最后如何判断队列为空呢?**如果进栈和出栈都为空的话,说明模拟的队列为空了。**
```python
class MyQueue:
def __init__(self):
self.in_stack = []
self.out_stack = []
def push(self, x: int) -> None:
self.in_stack.append(x)
def pop(self) -> int:
if self.empty():
return None
if not self.out_stack:
while(self.in_stack):
self.out_stack.append(self.in_stack.pop())
return self.out_stack.pop()
def peek(self) -> int:
if self.empty():
return None
if not self.out_stack:
while(self.in_stack):
self.out_stack.append(self.in_stack.pop())
return self.out_stack[-1]
def empty(self) -> bool:
if (not self.in_stack) and (not self.out_stack):
return True
else:
return False
# Your MyQueue object will be instantiated and called as such:
# obj = MyQueue()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.peek()
# param_4 = obj.empty()
```
## 复用一部分
pop和peek可以复用一部分,功能相近的函数要抽象出来,不要大量的复制粘贴,很容易出问题!
```python
class MyQueue:
def __init__(self):
self.in_stack = []
self.out_stack = []
def push(self, x: int) -> None:
self.in_stack.append(x)
def pop(self) -> int:
if self.empty():
return None
if not self.out_stack:
while(self.in_stack):
self.out_stack.append(self.in_stack.pop())
return self.out_stack.pop()
def peek(self) -> int:
ans = self.pop()
if ans:
self.out_stack.append(ans)
return ans
def empty(self) -> bool:
return (not self.in_stack) and (not self.out_stack)
# Your MyQueue object will be instantiated and called as such:
# obj = MyQueue()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.peek()
# param_4 = obj.empty()
```
# 用队列实现栈[力扣225](https://leetcode.cn/problems/implement-stack-using-queues/)

## 两个deque
两个deque,deque的pop方法:popleft()

```python
class MyStack:
def __init__(self):
self.main_que = deque()
self.aid_que = deque()
def push(self, x: int) -> None:
self.main_que.append(x)
def pop(self) -> int:
if self.empty():
return None
while(len(self.main_que) != 1):
self.aid_que.append(self.main_que.popleft())
ans = self.main_que.popleft()
self.aid_que, self.main_que = self.main_que, self.aid_que
return ans
def top(self) -> int:
ans = self.pop()
self.push(ans)
return ans
def empty(self) -> bool:
return not self.main_que
# Your MyStack object will be instantiated and called as such:
# obj = MyStack()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.top()
# param_4 = obj.empty()
```
## 用单队列实现
一个队列在模拟栈弹出元素的时候只要将队列头部的元素(除了最后一个元素外) 重新添加到队列尾部,此时再去弹出元素就是栈的顺序了。
```python
class MyStack:
def __init__(self):
self.que = deque()
def push(self, x: int) -> None:
self.que.append(x)
def pop(self) -> int:
if not self.que:
return None
for i in range(len(self.que) - 1):
self.que.append(self.que.popleft())
return self.que.popleft()
def top(self) -> int:
ans = self.pop()
self.push(ans)
return ans
def empty(self) -> bool:
return not self.que
# Your MyStack object will be instantiated and called as such:
# obj = MyStack()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.top()
# param_4 = obj.empty()
```
# 有效的括号[力扣20](https://leetcode.cn/problems/valid-parentheses/)

## 使用栈
其实偷偷用两个列表实现了哈希表

```python
class Solution:
def isValid(self, s: str) -> bool:
my_stack = []
left_list = ['(','[','{']
right_list = [')',']','}']
for i in range(len(s)):
if s[i] in left_list:
my_stack.append(s[i])
if s[i] in right_list:
if not my_stack:
return False
cur_left = my_stack.pop()
cur_index = left_list.index(cur_left)
if s[i] != right_list[cur_index]:
return False
return not my_stack
```
## 哈希表+栈
```python
class Solution:
def isValid(self, s: str) -> bool:
my_stack = []
dic = {'[':']','{':'}','(':')'}
for c in s:
if c in dic:
my_stack.append(c)
if c in dic.values():
if not my_stack:
return False
if c != dic[my_stack.pop()]:
return False
return not my_stack
```
# 删除字符串中的所有相邻重复项[力扣1047](https://leetcode.cn/problems/remove-all-adjacent-duplicates-in-string/)

## 栈

P.S:
**递归的实现就是:每一次递归调用都会把函数的局部变量、参数值和返回地址等压入调用栈中,**然后递归返回的时候,从栈顶弹出上一次递归的各项参数,所以这就是递归为什么可以返回上一层位置的原因。
相信大家应该遇到过一种错误就是栈溢出,系统输出的异常是Segmentation fault(当然不是所有的Segmentation fault 都是栈溢出导致的) ,如果你使用了递归,就要想一想是不是无限递归了,那么系统调用栈就会溢出。
而且在企业项目开发中,尽量不要使用递归!在项目比较大的时候,由于参数多,全局变量等等,使用递归很容易判断不充分return的条件,非常容易无限递归(或者递归层级过深),造成栈溢出错误(这种问题还不好排查!)
```python
class Solution:
def removeDuplicates(self, s: str) -> str:
my_stack = []
for c in s:
if my_stack and my_stack[-1] == c:
my_stack.pop()
else:
my_stack.append(c)
return ''.join(my_stack)
```
## 双指针
原地修改 双指针
使用双指针模拟栈,如果不让用栈可以作为备选方法
记得修改返回的列表长度
```python
class Solution:
def removeDuplicates(self, s: str) -> str:
slow = 0
fast = 0
ans = list(s)
while(fast != len(s)):
ans[slow] = ans[fast]
if slow > 0 and ans[slow - 1] == ans[fast]:
slow -= 1
else:
slow += 1
fast += 1
return ''.join(ans[:slow])
```
# 逆波兰表达式求值

## 使用栈

**针对问题:**
Python 中没有一个函数可以判断一个字符串是否为合理的整数(包括正、负数)。str.isdigit() 可以判断正数,但是无法判断负数。
**解决方案:**
使用 int() 函数,并做 try-except 。
- 如果是整数,那么可以用 int() 转成数字;
- 如果是运算符,那么 int() 会报错,从而进入 except 中。
不用try-except版本:
```python
class Solution:
def evalRPN(self, tokens: List[str]) -> int:
cal_list = ['+','-','*','/']
stack = []
for i in range(len(tokens)):
if tokens[i] in cal_list:
if not stack:
return False
stack.append(self.evaluate(stack.pop(), tokens[i], stack.pop()))
else:
stack.append(int(tokens[i]))
return stack[0]
def evaluate(self, num2, op, num1) -> int:
if op == '+':
return int(num1 + num2)
if op == '-':
return int(num1 - num2)
if op == '*':
return int(num2 * num1)
else:
return int(num1 / num2)
```
使用try-except的版本:
```python
class Solution:
def evalRPN(self, tokens: List[str]) -> int:
cal_list = ['+','-','*','/']
stack = []
for i in range(len(tokens)):
try:
stack.append(int(tokens[i]))
except:
stack.append(self.evaluate(stack.pop(), tokens[i], stack.pop()))
return stack[0]
def evaluate(self, num2, op, num1) -> int:
if op == '+':
return int(num1 + num2)
if op == '-':
return int(num1 - num2)
if op == '*':
return int(num2 * num1)
else:
return int(num1 / num2)
```
##很简洁,用lambda
```python
class Solution:
def evalRPN(self, tokens: List[str]) -> int:
res_stack = []
operations={'+': lambda a,b:a+b,
'-': lambda a,b:b-a,
'*': lambda a,b:a*b,
'/': lambda a,b:int(b/a) }
for i in tokens:
if i in operations.keys():
res_stack.append(operations[i](int(res_stack.pop()),int(res_stack.pop())))
else:
res_stack.append(i)
return int(res_stack[0])
```
# 滑动窗口最大值 [力扣 239](https://leetcode.cn/problems/sliding-window-maximum/)
**题目描述**

## 暴力法 但是超时了
```python
class Solution:
def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:
ans = []
for i in range(k-1, len(nums)):
ans.append(max(nums[i-k+1:i+1]))
return ans
```
## 有序队列
此时我们需要一个队列,这个队列呢,放进去窗口里的元素,然后随着窗口的移动,队列也一进一出,每次移动之后,队列告诉我们里面的最大值是什么
这个队列需要定义几个函数:pop,push,returnMax
其实队列没有必要维护窗口里的所有元素,只需要维护有可能成为窗口里最大值的元素就可以了,同时保证队列里的元素数值是由大到小的。
那么这个维护元素单调递减的队列就叫做单调队列,即单调递减或单调递增的队列。C++中没有直接支持单调队列,需要我们自己来实现一个单调队列

不要以为实现的单调队列就是 对窗口里面的数进行排序,如果排序的话,那和优先级队列又有什么区别了呢。
设计单调队列的时候,pop,和push操作要保持如下规则:
1. pop(value):如果窗口移除的元素value等于单调队列的出口元素,那么队列弹出元素,否则不用任何操作
2. push(value):如果push的元素value大于入口元素的数值,那么就将队列入口的元素弹出,直到push元素的数值小于等于队列入口元素的数值为止
保持如上规则,每次窗口移动的时候,只要问que.front()就可以返回当前窗口的最大值。
为了更直观的感受到单调队列的工作过程,以题目示例为例,输入: nums = [1,3,-1,-3,5,3,6,7], 和 k = 3,动画如下:

```python
class my_queue:
def __init__(self):
self.queue = deque()
def pop(self, value):
if self.queue and self.queue[0] == value:
self.queue.popleft()
def push(self, value):
while self.queue and value > self.queue[-1]:
self.queue.pop()
self.queue.append(value)
def front(self):
return self.queue[0]
class Solution:
def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:
que = my_queue()
ans = []
for i in range(k):
que.push(nums[i])
ans.append(que.front())
for j in range(k,len(nums)):
que.pop(nums[j-k])
que.push(nums[j])
ans.append(que.front())
return ans
```
## 大顶堆
对于「最大值」,我们可以想到一种非常合适的数据结构,那就是优先队列(堆),其中的大根堆可以帮助我们实时维护一系列元素中的最大值。
对于本题而言,初始时,我们将数组nums的前 k 个元素放入优先队列中。每当我们向右移动窗口时,我们就可以把一个新的元素放入优先队列中,此时堆顶的元素就是堆中所有元素的最大值。然而这个最大值可能并不在滑动窗口中,在这种情况下,这个值在数组 nums中的位置出现在滑动窗口左边界的左侧。因此,当我们后续继续向右移动窗口时,这个值就永远不可能出现在滑动窗口中了,我们可以将其永久地从优先队列中移除。
我们不断地移除堆顶的元素,直到其确实出现在滑动窗口中。此时,堆顶元素就是滑动窗口中的最大值。为了方便判断堆顶元素与滑动窗口的位置关系,我们可以在优先队列中存储二元组 (num,index)表示元素num 在数组中的下标为index。
```python
class Solution:
def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:
n = len(nums)
q = [(-nums[i], i) for i in range(k)]
heapq.heapify(q)
# python默认是个小顶堆,所以要取-num插进去
ans = [-q[0][0]]
for i in range(k, n):
heapq.heappush(q, (-nums[i], i))
while q[0][1] <= i - k:
heapq.heappop(q)
ans.append(-q[0][0])
return ans
```
## 数据分段预处理


```python
class Solution:
def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:
n = len(nums)
pre = [0] * n
suf = [0] * n
for i in range(len(nums)):
if i % k == 0:
pre[i] = nums[i]
else:
pre[i] = max(pre[i - 1], nums[i])
for i in range(n - 1, -1, -1):
if i == n - 1 or (i + 1) % k == 0:
suf[i] = nums[i]
else:
suf[i] = max(nums[i], suf[i + 1])
ans = [max(suf[i], pre[k + i - 1]) for i in range(n - k + 1)]
return ans
```
# 前 K 个高频元素 [力扣 347](https://leetcode.cn/problems/top-k-frequent-elements/)
**题目描述**

## 偷偷用collection
```python
class Solution:
def topKFrequent(self, nums: List[int], k: int) -> List[int]:
count = collections.Counter(nums)
return [item[0] for item in count.most_common(k)]
```
## 先哈希表统计次数,然后用小顶堆排序
哈希表的时候key是数字,value是统计次数,其中获得次数可以使用get(dic[nums[i], 0])
小顶堆中,依据freq排序,即push进去(freq, num),对于小顶堆的处理可以用push进去-freq
```python
class Solution:
def topKFrequent(self, nums: List[int], k: int) -> List[int]:
dic = {}
for i in range(len(nums)):
dic[nums[i]] = dic.get(nums[i], 0) + 1
# get到就num[i],get不到就0
pri_que = []
for key, freq in dic.items():
heapq.heappush(pri_que, (-freq, key))
ans = []
for i in range(k):
ans.append(heapq.heappop(pri_que)[1])
return ans
```
或者,控制一下小顶堆的节点数,只维护k个节点
```python
class Solution:
def topKFrequent(self, nums: List[int], k: int) -> List[int]:
dic = {}
for i in range(len(nums)):
dic[nums[i]] = dic.get(nums[i], 0) + 1
# get到就num[i],get不到就0
pri_que = []
for key, freq in dic.items():
heapq.heappush(pri_que, (-freq, key))
ans = []
for i in range(k):
ans.append(heapq.heappop(pri_que)[1])
return ans
```
## 一个看起来有点厉害的,手写了堆的实现的代码
有缘的时候再学习一下吧[解题链接](https://leetcode.cn/problems/top-k-frequent-elements/solution/python-dui-pai-xu-by-xxinjiee/)
```python
class Solution:
def topKFrequent(self, nums: List[int], k: int) -> List[int]:
def sift_down(arr, root, k):
"""下沉log(k),如果新的根节点>子节点就一直下沉"""
val = arr[root] # 用类似插入排序的赋值交换
while root<<1 < k:
child = root << 1
# 选取左右孩子中小的与父节点交换
if child|1 < k and arr[child|1][1] < arr[child][1]:
child |= 1
# 如果子节点<新节点,交换,如果已经有序break
if arr[child][1] < val[1]:
arr[root] = arr[child]
root = child
else:
break
arr[root] = val
def sift_up(arr, child):
"""上浮log(k),如果新加入的节点<父节点就一直上浮"""
val = arr[child]
while child>>1 > 0 and val[1] < arr[child>>1][1]:
arr[child] = arr[child>>1]
child >>= 1
arr[child] = val
stat = collections.Counter(nums)
stat = list(stat.items())
heap = [(0,0)]
# 构建规模为k+1的堆,新元素加入堆尾,上浮
for i in range(k):
heap.append(stat[i])
sift_up(heap, len(heap)-1)
# 维护规模为k+1的堆,如果新元素大于堆顶,入堆,并下沉
for i in range(k, len(stat)):
if stat[i][1] > heap[1][1]:
heap[1] = stat[i]
sift_down(heap, 1, k+1)
return [item[0] for item in heap[1:]]
```
|
<?php
// Get the query & post information
global $post, $wp_query;
$category = get_the_category();
// Settings
$separator = '>';
$id = 'breadcrumbs';
$class = 'breadcrumbs';
$home_title = esc_html__('Home', 'snshadona');
$breadcrumbs_title = '';
if(is_home()){
$breadcrumbs_title = esc_html__('blog', 'snshadona');
}elseif(class_exists('WooCommerce') &&is_shop()){
$breadcrumbs_title = esc_html__('shop', 'snshadona');
}elseif( class_exists('WooCommerce') && is_woocommerce() ){
if(is_product()){
$product_cats = wp_get_post_terms( get_the_ID(), 'product_cat' );
$single_cat = array_shift( $product_cats );
$breadcrumbs_title = $single_cat->name;
}else{
$breadcrumbs_title = get_cat_name($wp_query->get_queried_object()->term_id);
}
}elseif (is_single() && isset($category[0])){
$breadcrumbs_title = $category[0]->cat_name;
}else if ( is_category() ) {
$breadcrumbs_title = $category[0]->cat_name;
}else if ( is_page() ) {
$breadcrumbs_title = get_the_title();
}else if ( is_tag() ) {
// Tag page
// Get tag information
$term_id = get_query_var('tag_id');
$taxonomy = 'post_tag';
$args ='include=' . $term_id;
$terms = get_terms( $taxonomy, $args );
$breadcrumbs_title = $terms[0]->name;
} else if ( is_author() ) {
// Get the author information
global $author;
$userdata = get_userdata( $author );
$breadcrumbs_title = $userdata->display_name;
}
// Build the breadcrums
echo '<div id="' . $id . '" class="' . $class . '">';
// Home page
echo '<a class="home" href="' . esc_url( home_url('/') ) . '" title="' . $home_title . '">'.$home_title.'</a>';
echo '<span class="navigation-pipe">' . $separator . '</span>';
if ( class_exists('WooCommerce') && is_woocommerce() ) {
$args = '';
$args = wp_parse_args( $args, apply_filters( 'woocommerce_breadcrumb_defaults', array(
'delimiter' => '<span class="navigation-pipe">' . $separator . '</span>',
'wrap_before' => '',
'wrap_after' => '',
'before' => '',
'after' => '',
'home' => ''
) ) );
$breadcrumbs = new WC_Breadcrumb();
if ( $args['home'] ) {
$breadcrumbs->add_crumb( $args['home'], apply_filters( 'woocommerce_breadcrumb_home_url', esc_url( home_url('/') ) ) );
}
$args['breadcrumb'] = $breadcrumbs->generate();
wc_get_template( 'global/breadcrumb.php', $args );
}
elseif ( is_single() && isset($category[0])) {
// Single post (Only display the first category)
echo '<a class="bread-cat bread-cat-' . $category[0]->term_id . ' bread-cat-' . $category[0]->category_nicename . '" href="' . esc_url( get_category_link($category[0]->term_id ) ) . '" title="' . $category[0]->cat_name . '">' . $category[0]->cat_name . '</a>';
echo '<span class="navigation-pipe">' . $separator . '</span>';
echo '<span class="item-current item-' . $post->ID . '">' . get_the_title() . '</span>';
} else if ( is_category() ) {
// Category page
echo '<span class="bread-current bread-cat-' . $category[0]->term_id . ' bread-cat-' . $category[0]->category_nicename . '">' . $category[0]->cat_name . '</span>';
} else if ( is_page() ) {
// Standard page
if( $post->post_parent ){
$parents = '';
// If child page, get parents
$anc = get_post_ancestors( $post->ID );
// Get parents in the right order
$anc = array_reverse($anc);
// Parent page loop
foreach ( $anc as $ancestor ) {
$parents .= '<a class="bread-parent bread-parent-' . $ancestor . '" href="' . esc_url( get_permalink($ancestor) ) . '" title="' . get_the_title($ancestor) . '">' . get_the_title($ancestor) . '</a>';
$parents .= '<span class="navigation-pipe">' . $separator . '</span>';
}
// Display parent pages
echo $parents;
// Current page
echo '<span title="' . get_the_title() . '"> ' . get_the_title() . '</span>';
} else {
// Just display current page if not parents
echo '<span class="bread-current bread-' . $post->ID . '"> ' . get_the_title() . '</span>';
}
} else if ( is_tag() ) {
// Tag page
// Get tag information
$term_id = get_query_var('tag_id');
$taxonomy = 'post_tag';
$args ='include=' . $term_id;
$terms = get_terms( $taxonomy, $args );
// Display the tag name
echo '<span class="bread-current bread-tag-' . $terms[0]->term_id . ' bread-tag-' . $terms[0]->slug . '">' . $terms[0]->name . '</span>';
} elseif ( is_day() ) {
// Day archive
// Year link
echo '<a class="bread-year bread-year-' . get_the_time('Y') . '" href="' . esc_url( get_year_link( get_the_time('Y') ) ) . '" title="' . get_the_time('Y') . '">' . get_the_time('Y') . ' Archives</a>';
echo '<span class="navigation-pipe">' . $separator . ' </span>';
// Month link
echo '<a class="bread-month bread-month-' . get_the_time('m') . '" href="' . esc_url( get_month_link( get_the_time('Y'), get_the_time('m') ) ) . '" title="' . get_the_time('M') . '">' . get_the_time('M') . ' Archives</a>';
echo '<span class="navigation-pipe">' . $separator . '';
// Day display
echo '<span class="bread-current bread-' . get_the_time('j') . '"> ' . get_the_time('jS') . ' ' . get_the_time('M') . ' Archives</span>';
} else if ( is_month() ) {
// Month Archive
// Year link
echo '<a class="bread-year bread-year-' . get_the_time('Y') . '" href="' . esc_url( get_year_link( get_the_time('Y') ) ) . '" title="' . get_the_time('Y') . '">' . get_the_time('Y') . ' Archives</a>';
echo '<span class="navigation-pipe">' . $separator . '</span>';
// Month display
echo '<span class="bread-month bread-month-' . get_the_time('m') . '" title="' . get_the_time('M') . '">' . get_the_time('M') . ' Archives</span>';
} else if ( is_year() ) {
// Display year archive
echo '<span class="bread-current bread-current-' . get_the_time('Y') . '" title="' . get_the_time('Y') . '">' . get_the_time('Y') . ' Archives</span>';
} else if ( is_author() ) {
// Auhor archive
// Get the author information
global $author;
$userdata = get_userdata( $author );
// Display author name
echo '<span class="bread-current bread-current-' . $userdata->user_nicename . '" title="' . $userdata->display_name . '">' . 'Author: ' . $userdata->display_name . '</span>';
} else if ( get_query_var('paged') ) {
// Paginated archives
echo '<span class="bread-current bread-current-' . get_query_var('paged') . '" title="Page ' . get_query_var('paged') . '">'.esc_html__('Page', 'snshadona') . ' ' . get_query_var('paged') . '</span>';
} else if ( is_search() ) {
// Search results page
echo '<span class="bread-current bread-current-' . get_search_query() . '" title="Search results for: ' . get_search_query() . '">Search results for: ' . get_search_query() . '</span>';
} elseif ( is_404() ) {
// 404 page
echo '<span>' . 'Error 404' . '</span>';
}else{
echo $breadcrumbs_title;
}
echo '</div>';
?>
|
import React, { useContext } from "react";
import { Text, Vibration } from "react-native";
import { Icon, Tooltip } from "react-native-elements";
import { colorTheme, ThemeContext } from "../../common/context/ThemeContext";
export const postOptions = ({ navigation, route }) => ({
headerTitle: () => {
const { theme, setTheme } = useContext(ThemeContext);
return (
<Text
style={{
backgroundColor: colorTheme[theme].primary,
fontWeight: "bold",
fontSize: 24,
color: colorTheme[theme].secondary,
}}
>
Post {route?.params?.props?.id}
</Text>
);
},
headerStyle: {
backgroundColor: colorTheme[route?.params?.theme].primary,
},
headerRight: () => {
const tooltipRef = React.useRef(null);
React.useEffect(() => {
navigation.setOptions({ props: route?.params?.props });
}, [route?.params?.props]);
const { theme, setTheme } = useContext(ThemeContext);
return (
<>
<Tooltip
ref={tooltipRef}
toggleAction="onLongPress"
backgroundColor={colorTheme[theme].secondary}
highlightColor={colorTheme[theme].primary}
pointerColor={colorTheme[theme].primary}
overlayColor="rgba(255, 255, 255, 0)"
popover={
<Text style={{ color: colorTheme[theme].primary }}>Edit Post</Text>
}
>
<Icon
size={34}
style={{ alignContent: "center" }}
name="create-outline"
type="ionicon"
onPress={() => {
navigation.navigate("EditPost", {
props: route?.params?.props,
theme,
});
}}
onLongPress={() => {
tooltipRef.current?.toggleTooltip();
Vibration.vibrate(20);
setTimeout(() => {
tooltipRef.current?.toggleTooltip();
}, 2000);
}}
></Icon>
</Tooltip>
</>
);
},
});
|
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
// Import Containers
import { DefaultLayoutComponent } from './containers';
import { P404Component } from './views/error/404.component';
export const routes: Routes = [
{
path: '',
redirectTo: 'admin',
pathMatch: 'full',
},
{
path: 'admin',
component: DefaultLayoutComponent,
data: {
title: 'Admin',
},
children: [
{
path: '',
loadChildren: () =>
import('./views/admin/admin.module').then(
(m) => m.AdminModule
),
},
],
},
{ path: '**', component: P404Component },
];
@NgModule({
imports: [
RouterModule.forRoot(routes, {
onSameUrlNavigation: 'reload',
}),
],
exports: [RouterModule],
})
export class AppRoutingModule {}
|
import React, { FC, useEffect, useState } from "react"
import "./MainPage.scss"
import axios from "axios"
interface NewsItem {
source: {
id: number
name: string
}
author: string
title: string
description: string
url: string
urlToImage: string
publishedAt: string
content: string
}
const apikey = "64569a53da124124b14e7cbbb71d210a"
const api =
"https://newsapi.org/v2/everything?q=tesla&from=2022-05-10&sortBy=publishedAt&apiKey=64569a53da124124b14e7cbbb71d210a"
const MainPage: FC = () => {
const [news, setNews] = useState<NewsItem[]>([])
useEffect(() => {
axios.get(api).then((res) => setNews(res.data.articles))
}, [])
return (
<div className="mainpage">
<section className="mainpage__landing landing">
<h1 className="landing__title">Стань волонтером прямо сейчас</h1>
<div className="landing__buttons">
<button className="landing__button">Регистрация</button>
<button className="landing__button">Войти</button>
</div>
</section>
<section className="mainpage__news news">
{news.length &&
news.map((el) => (
<div key={el.source.id} className="news__item">
<div className="news__left">
<img
src={el.urlToImage || ""}
alt="news img"
className="news__img"
/>
</div>
<div className="news__right">
<h2 className="news__title">{el.title}</h2>
</div>
</div>
))}
</section>
</div>
)
}
export default MainPage
|
/*!
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* \file FaultOwner.h
* \brief Base class for objects implementing the IFaultOwner interface
*
*
* \details
*
*/
#ifndef FAULTOWNER_H_
#define FAULTOWNER_H_
#include <stdexcept>
#include <memory>
#include <vector>
#include <set>
#include "Loggable.h"
#include "IFault.h"
#include "IFaultsList.h"
#include "IFaultOwner.h"
namespace Mplane {
class FaultOwner : public IFaultOwner
{
public:
/**
* Create an IFaultOwner with a name and a list of the faults that this owner manages
* @param name
* @param faultNames
*/
FaultOwner(const std::string& name, const std::set<std::string>& faultNames) ;
/**
* Create a simple IFaultOwner with a name let it decide what it should own later
* @param name
* @param faultNames
*/
explicit FaultOwner(const std::string& name) ;
virtual ~FaultOwner() ;
/**
* Called by a service or any other regular task. Causes the IFaultOwner to update it's owned faults
*/
virtual void updateFaults() override ;
/**
* The pure virtual method that all owners must implement.
*
* It is called when the fault integrator reaches the raise or clear
* threshold.
*
* If the owner has no use of this callback then their implementation
* will just leave the method implementation empty.
*
* @param fault The fault object instance
*/
virtual void faultCallback(IFault& fault) override;
/**
* The pure virtual method that all users must implement.
*
* It is called as part of the fault handling restart sequence.
*
* All fault owners will need to implement this callback and to handle
* the restarting of their faults.
*/
virtual void resetFaults() override;
/**
* Gets the fault owner name text string
*
* @return The fault owner name string
*/
virtual std::string getName() const override;
/**
* Get the list of fault names that this IFaultOwner owns
* @return list of names
*/
virtual std::set<std::string> getFaultNames() const override ;
/**
* The show method, displays its current status.
*/
virtual void show(std::ostream& os) const override;
/**
* Hook provided for derived objects. They can use this to support clearing out things like hardware sticky registers
* from which the fault is derived.
*/
virtual void clearFaults() override ;
/**
* Add a single fault to the set
*/
virtual void addFault(const std::string& name) override ;
/**
* Add a set of faults to the set
*/
virtual void addFault(const std::set<std::string>& names) override ;
/**
* Helper method used by make_faultOwner(). Does the additional work after the alarm shared pointer has been created
*/
static void makeFaultOwner(std::shared_ptr<IFaultOwner> owner) ;
private:
const std::string mName ;
std::set<std::string> mFaultNames ;
} ;
//-------------------------------------------------------------------------------------------------------------
/**
* All fault owners *MUST* be created via this helper template function. This ensures the shared pointer is created
* and registered to the IFaultsList. Expects T to be derived from IFaultOwner
*
* @return shared pointer to fault owner
*/
template <typename T>
inline std::shared_ptr<T> make_faultOwner()
{
// create a new fault instance (assumes T is derived from IFault)
std::shared_ptr<T> faultOwner(std::make_shared<T>()) ;
// Finish off
FaultOwner::makeFaultOwner(faultOwner) ;
return faultOwner ;
}
}
#endif /* FAULTOWNER_H_ */
|
/**
* @description: ATENÇÃO, esta classe não deve ser instânciada diretamente, use um dos métodos Left ou Right
*/
module.exports = class Either {
constructor(left, right) {
this.left = left;
this.right = right;
}
static Left(left) {
return new Either(left, null);
}
static Right(right) {
return new Either(null, right);
}
static valorJaCadastrado(valor) {
return { message: `${valor} já cadastrado.` };
}
fold(leftFn, rightFn) {
return this.left !== null ? leftFn(this.left) : rightFn(this.right);
}
static dataRetornoMenorQueDataSaida = {
message: 'Data de retorno menor que a data de saída'
};
static livroComISBNJaEmprestadoPendenteUsuario = {
message: 'Livro com ISBN já emprestado ao usuário e ainda não devolvido'
};
};
|
package com.example.imagecrop
import android.content.Context
import android.graphics.Bitmap
import android.os.Bundle
import androidx.activity.ComponentActivity
import androidx.activity.compose.setContent
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.Surface
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.runtime.MutableState
import androidx.compose.runtime.mutableStateOf
import androidx.compose.ui.Modifier
import androidx.compose.ui.geometry.Offset
import androidx.compose.ui.graphics.ImageBitmap
import androidx.compose.ui.graphics.asAndroidBitmap
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.res.imageResource
import androidx.compose.ui.tooling.preview.Preview
import androidx.lifecycle.ViewModel
import com.example.imagecrop.ui.theme.ImageCropTheme
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
class MainActivity : ComponentActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContent {
ImageCropTheme {
// A surface container using the 'background' color from the theme
Surface(
modifier = Modifier.fillMaxSize(),
color = MaterialTheme.colorScheme.background
) {
imageCrop()
}
}
}
}
}
@Composable
fun imageCrop(modifier: Modifier = Modifier) {
var context = LocalContext.current
var uiViewModel = UiViewModel(true, context = context)
var imageBitmap = ImageBitmap.imageResource(R.drawable.cat)
var bitmap = imageBitmap.asAndroidBitmap()
crop(1.0f,bitmap,uiViewModel)
}
data class UiViewModel(var openDialog: Boolean, var context: Context): ViewModel() {
private val _uiState = MutableStateFlow(UiState(context = context))
val uiState: StateFlow<UiState> = _uiState.asStateFlow()
}
data class UiState constructor(
var allowCropBool: MutableState<Boolean> = mutableStateOf(false),
var cropResultReady: MutableState<Boolean> = mutableStateOf(false),
var errorDialog: MutableState<Boolean> = mutableStateOf(false),
var cropResult: MutableState<Bitmap?> = mutableStateOf(null),
var context: Context,
var westFlag: MutableState<Boolean> = mutableStateOf(false),
var northFlag: MutableState<Boolean> = mutableStateOf(false),
var eastFlag: MutableState<Boolean> = mutableStateOf(false),
var southFlag: MutableState<Boolean> = mutableStateOf(false),
var offsetX: MutableState<Float> = mutableStateOf(0f),
var offsetY: MutableState<Float> = mutableStateOf(0f),
var cropSquareX: MutableState<Int> = mutableStateOf(400),
var cropSquareY: MutableState<Int> = mutableStateOf(400),
var thresholdX: MutableState<Int> = mutableStateOf(65),
var thresholdY: MutableState<Int> = mutableStateOf(65),
)
|
<?php
namespace Tests\Feature\Api;
use App\Models\Level;
use App\Models\Period;
use App\Models\Student;
use App\Models\User;
use Database\Seeders\RoleSeeder;
use Database\Seeders\UserSeeder;
use Illuminate\Foundation\Testing\RefreshDatabase;
use Illuminate\Foundation\Testing\WithFaker;
use Laravel\Sanctum\Sanctum;
use Tests\TestCase;
class RegistrationTest extends TestCase
{
use RefreshDatabase;
private $user;
public function setUp(): void
{
parent::setUp();
$this->seed([
RoleSeeder::class,
UserSeeder::class
]);
$this->user = User::where('email', 'admin@gmail.com')->first();
Sanctum::actingAs($this->user);
}
public function test_admin_can_create_new_registration()
{
$period = Period::factory()->create();
$level = Level::factory()->create();
$student_1 = Student::factory()->create();
$student_2 = Student::factory()->create();
$student_3 = Student::factory()->create();
$response = $this->postJson('/api/registrations', [
'period_id' => $period->id,
'level_id' => $level->id,
'student_ids' => [
$student_1->id,
$student_2->id,
$student_3->id,
]
])->assertStatus(201);
$this->assertDatabaseHas('registrations', [
'period_id' => $period->id,
'level_id' => $level->id,
'student_id' => $student_1->id
]);
}
public function test_admin_can_delete_existing_registration()
{
$period = Period::factory()->create();
$level = Level::factory()->create();
$student_1 = Student::factory()->create();
$student_2 = Student::factory()->create();
$student_3 = Student::factory()->create();
$a = $this->postJson('/api/registrations', [
'period_id' => $period->id,
'level_id' => $level->id,
'student_ids' => [
$student_1->id,
$student_2->id,
$student_3->id,
]
]);
$response = $this->deleteJson('/api/registrations', [
'period_id' => $period->id,
'level_id' => $level->id,
'student_ids' => [
$student_1->id,
$student_2->id,
]
])->assertStatus(200);
$this->assertDatabaseMissing('registrations', [
'period_id' => $period->id,
'level_id' => $level->id,
'student_id' => [
$student_1->id,
$student_2->id,
]
]);
$this->assertDatabaseHas('registrations', [
'period_id' => $period->id,
'level_id' => $level->id,
'student_id' => [
$student_3->id,
]
]);
}
}
|
import { BrowserModule } from "@angular/platform-browser";
import { NgModule } from "@angular/core";
import { AppComponent } from "./app.component";
import { HeroeComponent } from "./pages/heroe/heroe.component";
import { HeroesComponent } from "./pages/heroes/heroes.component";
// importar el appRoutingModule para utilizar las rutas
import { AppRoutingModule } from "./app-routing.module";
// para trabajar con formularios a nivel template hay que hacer este import:
import { FormsModule } from "@angular/forms";
// para trabajar con peticiones http
import { HttpClientModule } from "@angular/common/http";
@NgModule({
declarations: [AppComponent, HeroeComponent, HeroesComponent],
imports: [BrowserModule, AppRoutingModule, FormsModule, HttpClientModule],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule {}
|
package com.example.todo_list.ReminderSort;
import static org.junit.Assert.*;
import com.example.todo_list.Reminder.Sort.SortByDateStrategy;
import com.example.todo_list.Reminder.Task;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
/**
* This test class is for {@link SortByDateStrategy}.
* This class contains tests to verify that the SortByDateStrategy correctly sorts a list of {@link Task} objects.
*/
public class SortByDateStrategyTest {
/**
* Tests the sorting of tasks by their date and time.Here actually sorted by descending order
* like 10/5/2024 will be at 1st position and 9/05/24 will be at 2nd position
* It creates a list of tasks with different dates and adds them to a list.
* After sorting the list using {@link SortByDateStrategy}, it verifies that the tasks are sorted.
*
* <p>Expected order based on dates:</p>
* <ul>
* <li>Task with date "12/05/2024" should come first.</li>
* <li>Task with date "10/05/2024" should come second.</li>
* <li>Task with date "9/05/2024" should come last.</li>
* </ul>
* The test checks if the order of the tasks in the list matches the expected order after sorting.
*/
@Test
public void sortTaskBydateandTime(){
Task task1=new Task("1","10/05/2024","t1","abcd","15:16",993498369);
Task task2=new Task("2","12/05/2024","t2","abcd","15:20",993498369);
Task task3=new Task("3","9/05/2024","t3","abcd","14:16",993498369);
List<Task> tasks=new ArrayList<>();
tasks.add(task1);
tasks.add(task2);
tasks.add(task3);
SortByDateStrategy sorting=new SortByDateStrategy();
sorting.sort(tasks);
assertEquals("t2",tasks.get(0).getTitle());
assertEquals("t1",tasks.get(1).getTitle());
assertEquals("t3",tasks.get(2).getTitle());
}
/**
* Tests the sorting of tasks that have the same date but different times.
* The tasks are sorted in ascending order based on their time for the same date.
* This test verifies the ability of {@link SortByDateStrategy} to correctly sort tasks by time when the dates are identical.
*
* <p>Expected order based on time:</p>
* <ul>
* <li>Task with time "14:16" on "10/05/2024" should come first.</li>
* <li>Task with time "15:16" on "10/05/2024" should come second.</li>
* <li>Task with time "15:20" on "10/05/2024" should come last.</li>
* </ul>
* The test checks if the order of the tasks in the list matches the expected order after sorting by time on the same date.
*/
@Test
public void sortTaskBySamedateandDiffTime(){
//same date different time
Task task1=new Task("1","10/05/2024","t1","abcd","15:16",993498369);
Task task2=new Task("2","10/05/2024","t2","abcd","15:20",993498369);
Task task3=new Task("3","10/05/2024","t3","abcd","14:16",993498369);
List<Task> tasks=new ArrayList<>();
tasks.add(task1);
tasks.add(task2);
tasks.add(task3);
SortByDateStrategy sorting=new SortByDateStrategy();
sorting.sort(tasks);
assertEquals("t2",tasks.get(0).getTitle());
assertEquals("t1",tasks.get(1).getTitle());
assertEquals("t3",tasks.get(2).getTitle());
}
@Test
public void sortIdenticalDatesAndTimes(){
Task task1=new Task("1","10/05/2024","t1","abcd","15:16",993498369);
Task task2=new Task("2","10/05/2024","t2","abcd","15:16",993498369);
Task task3=new Task("3","10/05/2024","t3","abcd","15:16",993498369);
List<Task> tasks=new ArrayList<>();
tasks.add(task1);
tasks.add(task2);
tasks.add(task3);
SortByDateStrategy sorting=new SortByDateStrategy();
sorting.sort(tasks);
assertTrue(tasks.indexOf(task1)!=-1);
assertTrue(tasks.indexOf(task2)!=-1);
assertTrue(tasks.indexOf(task3)!=-1);
assertTrue(tasks.indexOf(task1)!=3);
}
@Test
public void sortSingleElementList(){
Task task1=new Task("2","10/05/2024","t1","Content","12:02",993498369);
List<Task> tasks=new ArrayList<>();
tasks.add(task1);
SortByDateStrategy sorting=new SortByDateStrategy();
sorting.sort(tasks);
assertEquals(1,tasks.size());
assertEquals("12:02",tasks.get(0).getTime());
assertNotEquals("12/02",tasks.get(0).getTime());
}
@Test
public void sortEmptyListByDate() {
List<Task> tasks = new ArrayList<>();
SortByDateStrategy sorting=new SortByDateStrategy();
sorting.sort(tasks);
assertTrue(tasks.isEmpty());
}
/*@Test
public void SortwithNullDate(){
Task task1=new Task("1",null,"t1","abcd","15:16");
Task task2=new Task("2","10/05/2024","t2","abcd","15:16");
Task task3=new Task("3","10/05/2024","t3","abcd","15:16");
List<Task> tasks=new ArrayList<>();
tasks.add(task1);
tasks.add(task2);
tasks.add(task3);
SortByDateStrategy sorting=new SortByDateStrategy();
sorting.sort(tasks);
assertEquals("t2",tasks.get(0).getTitle());
assertEquals("t1",tasks.get(2).getTitle());
assertEquals("t3",tasks.get(1).getTitle());
}*/
@Test
public void sortLargeNumberOfTasks() {
List<Task> tasks = new ArrayList<>();
for (int i = 1000; i > 0; i--) {
tasks.add(new Task(String.valueOf(i), "10/05/" + (2024 + i), "t" + i, "Content " + i, "15:" + (i % 60),993498369));
}
SortByDateStrategy sorting=new SortByDateStrategy();
sorting.sort(tasks);
assertEquals("t1000", tasks.get(0).getTitle());
assertEquals("t1", tasks.get(tasks.size() - 1).getTitle());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.