text stringlengths 184 4.48M |
|---|
from xbot.framework.utils import assertx
from lib.testcase import TestCase
class tc_eg_pass_get_values_from_testbed(TestCase):
"""
Get information from the testbed and perform checks.
"""
TIMEOUT = 60
FAILFAST = True
TAGS = ['tag1']
def setup(self):
"""
Prepare test environment.
"""
pass
def step1(self):
"""
Expect the value of `example.key1` to be `value1`.
"""
value1 = self.testbed.get('example.key1')
assertx(value1, '==', 'value1')
def step2(self):
"""
Expect the value of `example.key2."key2-1"` to be `value2-1`.
"""
value2 = self.testbed.get('example.key2."key2-1"')
assertx(value2, '==', 'value2-1')
def step3(self):
"""
Expect the value of `example.key3[1]` to be `value3-2`.
"""
value3 = self.testbed.get('example.key3[1]')
assertx(value3, '==', 'value3-2')
def step4(self):
"""
Expect the value of `example.key4[?name=='jack']` to be `[{'name': 'jack', 'age', '20'}]`.
"""
value4 = self.testbed.get("example.key4[?name=='jack']")
assertx(value4, '==', [{'name': 'jack', 'age': 20}])
def step5(self):
"""
Expect the value of `example.key4[?name=='lily']` to be `[]`.
"""
value5 = self.testbed.get("example.key5[?name=='lily']")
assertx(value5, '==', None)
def teardown(self):
"""
Clean up test environment.
"""
self.sleep(1) |
import { useState, useEffect } from "react"
import { useSelector, useDispatch } from "react-redux"
import GridViewProducts from "./GridViewProducts"
import ListViewProducts from "./ListViewProducts"
import Loading from "./Loading"
import { updatePage } from "../features/filter/filterSlice"
import CartModal from "./CartModal"
const ProductsList = () => {
const [reminderShow, setRemindShow] = useState(false)
const { isLoading } = useSelector((store) => store.products)
const {
filteredProducts: products,
gridView,
page,
productsPerPage,
} = useSelector((store) => store.filter)
const dispatch = useDispatch()
// for scroll to show more product effect
useEffect(() => {
const event = window.addEventListener("scroll", () => {
if (
!isLoading &&
window.innerHeight + window.scrollY >= document.body.scrollHeight
)
dispatch(updatePage())
})
return () => window.removeEventListener("scroll", event)
}, [])
// for mobile version pop up modal tell user swipe right to open filter menu
useEffect(() => {
let timer
if (!isLoading) {
setRemindShow(true)
timer = setTimeout(() => {
setRemindShow(false)
}, 2000)
}
return () => clearTimeout(timer)
}, [isLoading])
if (isLoading) {
return (
<div className="section-center">
<Loading />
</div>
)
}
if (products.length < 1) {
return (
<div className="section-center">
<h3 style={{ marginTop: "10rem" }}>
Sorry, we can't find anything match your search
</h3>
</div>
)
}
if (gridView)
return (
<>
{reminderShow ? (
<div className="modal mobile">
<CartModal message="Swipe right to open the filter menu" />
</div>
) : null}
<GridViewProducts
products={products.slice(0, productsPerPage * (page + 1))}
/>
</>
)
return (
<ListViewProducts
products={products.slice(0, productsPerPage * (page + 1))}
/>
)
}
export default ProductsList |
import { Dialog, DialogContent, DialogTrigger } from '@/components/ui/dialog'
import React from 'react'
import CardDetails from './CardDetails'
import { Draggable, Droppable } from '@hello-pangea/dnd'
import { getContrastTextColor } from '@/lib/utils';
interface props {
cards: any[],
id: string;
listTitle: string
}
const CardsContainer = ({ cards, id, listTitle }: props) => {
return (
<Droppable droppableId={id} type="card">
{(provided) => (
<div className={`w-full ${cards && cards.length > 0 && 'grid gap-2.5 my-4'}`}
ref={provided.innerRef}
{...provided.droppableProps}>
{cards && cards.length > 0 ? cards.map((item, index) => {
return (
<Draggable draggableId={item.id} index={index} key={item.id}>
{(provided) => (
<Dialog>
<DialogTrigger>
<div className='text-left border-2 border-transparent hover:border-gray-600 py-2 px-3 bg-white rounded-md shadow-sm cursor-pointer '
{...provided.draggableProps}
{...provided.dragHandleProps}
ref={provided.innerRef}>
{item.label && item.labelColor && (
<span className={`text-xs font-medium px-2 py-0.5 rounded-sm`} style={{ color: getContrastTextColor(item.labelColor), background: item.labelColor || "#000" }}>{item.label || 'Add label'}</span>
)}
<p className={`font-medium truncate text-sm ${item.label && item.labelColor && 'mt-1'}`}>{item.title}</p>
</div>
</DialogTrigger>
<DialogContent className='max-w-2xl'>
<CardDetails data={item} listId={id} listTitle={listTitle} />
</DialogContent>
</Dialog>
)}
</Draggable>
)
}) : (
<Draggable draggableId={"1"} index={1} key={1}>
{(provided) => (
<div className='h-2.5'
{...provided.draggableProps}
{...provided.dragHandleProps}
ref={provided.innerRef}>
</div>
)}
</Draggable>
)}
{provided.placeholder}
</div>
)}
</Droppable>
)
}
export default CardsContainer |
# Welcome to the Personal Project - [OptiMarket: Uncovering the Ultimate Marketing Approach](https://ayumu0622.github.io/OptiMarket_Uncovering_the_Ultimate_Marketing_Approach/)
## Access this project from [here](https://ayumu0622.github.io/OptiMarket_Uncovering_the_Ultimate_Marketing_Approach/)
Hello! I'm Ayumu Justin Ueda, a Data Science student at UC Berkeley. You can find my profile on (https://www.linkedin.com/in/ayumu-ueda-ab1879224/).
In this project, as a data scientist at a car insurance company, I've been provided with last year's data documenting the outcomes of marketing campaigns involving various types of strategies (a combination of insurance renewal plans - Offer1 and Offer2, and different sales channels - Agent, Branch, Call Center, Email).
## Objective:
My objective is to enhance the KPI, specifically the acceptance rate of offers, in the upcoming marketing phase by identifying the most effective combinations of customer segments and marketing strategies (a combination of offer type and sales channel).
<br/><br/>
## Project Overview:
### 1. Customer segmentation for pinpointing optimal marketing strategies in each category:
**Skills Applied:**
* Parametric Statistical Inference(Test of Homogeneity, Chi Square Test)
* K-means Clustering
* Principal Component Analysis (PCA)
**Conclusion for this section:**
| | cluster | best marketing strategy |
|---:|----------:|:--------------------------|
| 0 | 0 | Offer2_Agent |
| 1 | 1 | Offer1_Email |
| 2 | 2 | Offer2_Branch |
| 3 | 3 | Offer2_Agent |
| 4 | 4 | Offer2_Call Center |
| 5 | 5 | Offer2_Agent |
| 6 | 6 | Offer2_Email |
| 7 | 7 | Offer2_Email |
| 8 | 8 | Offer2_Call Center |
| 9 | 9 | Offer2_Agent |
The best combination of marketing strategies is as outlined above.
For future marketing initiatives, we can utilize customer information such as gender, education, etc., categorizing them into one of the ten groups mentioned. This approach allows us to employ the most effective marketing strategy for each group, ultimately increasing the number of car insurance renewals and enhancing the profitability of our insurance department.
### 2. Optimizing Email Sales through A/B Testing:
**Skills Applied:**
* Parametric Statistical Inference(Anova one sided test)
* Non-Parametric Statistical Inference(Kruskal-Wallis test)
* A/B test (Bootstrap)
**Conclusion for this section:**
After conducting A/B testing on the marketing campaign, it's evident that Promotion 1 is the most effective in increasing sales.
I have decided to implement Promotion 1 for the email marketing campaign. |
<template>
<div class="table-card">
<h2>Liste des Trajets</h2>
<div class="table-wrapper">
<table>
<thead>
<tr>
<th>Date début</th>
<th>Date fin</th>
<th>Type Trajet</th>
<th>Code Trajet</th>
<th>Voiture</th>
<th>Libellé</th>
<th>Lieu de départ</th>
<th>Lieu d'arrivé</th>
<th>Heure de départ</th>
<th>Heure d'arrivé</th>
<th>Distance</th>
<th>Employés</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
<tr v-for="trajet in trajets" :key="trajet.id">
<td>{{ trajet.dateDebut }}</td>
<td>{{ trajet.dateFin }}</td>
<td>{{ trajet.typeTrajet }}</td>
<td>{{ trajet.codeTrajet }}</td>
<td>{{ trajet.voiture }}</td>
<td>{{ trajet.libelle }}</td>
<td>{{ trajet.lieuDepart }}</td>
<td>{{ trajet.lieuArrivee }}</td>
<td>{{ trajet.heureDepart }}</td>
<td>{{ trajet.heureArrivee }}</td>
<td>{{ trajet.distance }}</td>
<td>
<span v-if="trajet.employes.length" style="display: flex; flex-direction: column; align-items: center; gap: 8px;">
<span v-for="employees in trajet.employes" :key="employees">{{ employees }}</span></span>
<span v-else>Aucun</span>
</td>
<td>
<button class="btn update" @click="$router.push('/trajform/'+trajet.id)">✏️</button>
<button class="btn delete" @click="deleteTrajet(trajet.id)">🗑️</button>
</td>
</tr>
</tbody>
</table>
</div>
<router-link to="/trajform/new" class="btn add">Ajouter +</router-link>
</div>
</template>
<script>
import { ref } from 'vue';
export default {
name: 'TableTrajet',
setup() {
const trajets = ref(getTrajet());
// Placeholder methods for edit and delete actions
const updateTrajet = () => {
};
const deleteTrajet = (id) => {
localStorage.setItem("trajets", JSON.stringify(trajets.value.filter((trajet) => trajet.id !== id)));
window.location.reload();
};
return {
trajets,
updateTrajet,
deleteTrajet
};
}
};
function getTrajet() {
// Load items from localStorage
const storedTrajets = localStorage.getItem("trajets");
if (storedTrajets) {
return JSON.parse(storedTrajets);
}
return [];
}
</script>
<style scoped>
.table-card {
background: #FFFFFF;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
margin: 2rem;
padding: 1rem;
}
/* Add your global table styling here */
.table-container {
padding: 1em;
}
.table-wrapper {
overflow-x: auto;
}
table {
width: 100%;
border-collapse: collapse;
margin-bottom: 1em;
}
th,
td {
text-align: left;
padding: 0.5em;
border-bottom: 1px solid #ddd;
}
th {
background-color: #eee;
}
tbody tr:nth-child(odd) {
background-color: #f9f9f9;
}
.btn {
padding: 0.5em 1em;
margin-right: 0.5em;
cursor: pointer;
border: none;
border-radius: 0.25em;
color: white;
}
h2 {
text-align: center;
}
.btn.add {
background-color: #9b59b6;
color: white;
text-decoration: none;
padding: 0.5em 1em;
margin: 1em 0;
display: inline-block;
}
.btn:hover {
background-color: #8e44ad;
}
</style> |
#include "binary_trees.h"
/**
* binary_tree_height - finds height of the tree
*
* @tree: pointer to the root node of the tree to measure the height
* Return: height of the tree
*/
size_t binary_tree_height(const binary_tree_t *tree)
{
size_t height_left = 0;
size_t height_right = 0;
if (tree == NULL)
{
return (0);
}
height_left = binary_tree_height(tree->left);
height_right = binary_tree_height(tree->right);
if (tree->left || tree->right)
{
if (height_left > height_right)
{
return (height_left + 1);
}
else
{
return (height_right + 1);
}
}
return (0);
} |
# 大型项目管理不善
> 原文:<https://dev.to/jonrimmer/megaproject-mismanagement-2lic>
接下来是由[教授本特·弗吕布杰格](https://en.wikipedia.org/wiki/Bent_Flyvbjerg)撰写的[关于大型项目你应该知道什么,为什么要知道](https://arxiv.org/pdf/1409.0003.pdf)的总结,以及对超大型项目所面临的困难的分析。对于那些在技术领域从事过大型 IT 项目的人来说,这些问题太熟悉了。所有的见解都是 Flyvbjerg 和他的来源和合作者的。任何错误都是我的。
# 好人
“大型项目”是极其庞大、复杂的项目,预算高达数十亿或更多,交付时间长达数年。它们的规模使它们不同于常规项目,并且它们需要不同的方法来成功管理。典型的大型项目是非常大的建筑、桥梁、公共交通系统和太空任务。
大型项目在公共和私营部门越来越受欢迎,保守估计其市场规模为每年 6-9 万亿美元。他们拥有四个“优点”,这使他们对不同类型的决策者具有吸引力:
* 工程师——他们的技术雄心和突破边界的能力。
* 政治家——作为他们和他们事业的纪念碑。
* 企业和工会——他们提供的金钱和工作岗位的数量。
* 设计师和设计爱好者——他们的美丽和标志性规模。
政策制定者普遍支持大型项目,因为它们在增加就业、国内消费、提供更好的服务以及改善环境的潜力方面有明显的好处。
# 坏了
尽管大型项目被认为有好处,但它们有着糟糕的表现历史。它们的规模使它们具有内在的风险,难以进行正确的长期规划或领导。乐观偏见会影响对成本和可能收益的预测,而它们几乎总是错的。实际成本和时间超出预算,需求低于预期。ICT 大型项目尤其糟糕。
只有千分之一的大型项目在预算、时间和收益方面得以实现。数量如此之少,以至于没有足够的人去做适当的统计分析,来解释为什么他们在大多数人失败的地方取得了成功。随着时间的推移,成功项目与不成功项目的比率似乎没有提高。
大多数大型项目遵循“中断-修复”模式。实施者不知道如何成功,但无论如何都要坚持下去,直到危机时刻,也就是“突破”,当现实击中要害的时候。此时,通常会暂停一下,通过重组和再融资来“修复”项目。
一些人认为,规划失误是帮助有价值项目启动的必要之恶。这种想法通常由内部人士表达,但很少公开。赫希曼认为,对问题的低估可能与对创造性解决相同问题的能力的低估相匹配。他称之为“藏手”原则。
这些理论已经被证明是受欢迎的,尤其是在那些对看到大型项目发生有既得利益的人中。这造就了一种战略误导的文化,在这种文化中,推广者为了启动项目,愿意进行过于乐观的预测。
# 丑陋
然而,这些理论是有缺陷的。支撑它们的研究是基于不充分和有偏见的数据。事实上,乐观偏见适用于成本和收益的所有层面,问题往往会阻碍项目,而不是被创造性地解决。
对大型项目的欺骗性宣传导致帕累托低效率,错配了本可以更好地用于其他地方的资源。公共或私营部门的个人在努力推广项目时故意误导他人也是不道德的,有时甚至是非法的。
事实上,非常大的项目可以有一个正的收益成本比,但问题是如何将这些项目与大多数糟糕的提案区分开来,猖獗的不诚实行为使这变得更加困难。存在一种“最不适合者生存”的现象,即那些在理论上对收益和成本最不切实际的项目最有可能被选中,尽管同样的不诚实会导致更大的问题。
# 未来
有一些积极的迹象。例如,奥巴马政府愿意指出美国公共采购中的问题。此外,现在的项目规模如此之大,以至于一个项目就能拖垮一名高管或整个公司,这刺激了更好的监管。
英国政府成立了重大项目管理局(Major Projects Authority),试图控制项目超支,其他一些国家也纷纷效仿,推出了类似的计划,这是继安然(Enron)等公司丑闻之后加强治理的总体趋势。资本和养恤基金私人资金的更多参与,也通过对项目预测进行更独立的审计,带来了更好的监督。
最后,对大型项目管理的研究有所改进,这源于对乐观偏见和战略错误陈述如何导致它们首先出错的更好理解。这项研究的成果已经开始影响实践。尽管现在断言改革是否会成功还为时过早。 |
import os
import numpy as np
from keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import ModelCheckpoint
from .sample import Sample
from .plot import CountPlotter
from .model import PredictorModel
class Trainer:
def __init__(self, sample: Sample):
self.sample = sample
self.datagen = self.create_data_generator()
self.checkpoint = self.create_checkpoint()
def train(self, model: PredictorModel):
self.sample.load()
images = self.sample.get_concatenated_images()
tags = self.sample.get_concatenated_tags()
sample_count = len(images)
batch_size = 32
self.datagen.fit(images)
history = model.fit(
self.datagen.flow(images, tags, batch_size=batch_size),
steps_per_epoch=sample_count / batch_size,
epochs=10,
callbacks=[self.checkpoint],
verbose=1,
)
model.set_history(history)
def create_data_generator(self):
return ImageDataGenerator(
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
rotation_range=30,
zoom_range=0.2,
width_shift_range=0.1,
height_shift_range=0.1,
horizontal_flip=True,
vertical_flip=False,
)
def create_checkpoint(self):
return ModelCheckpoint(
"modelo_normal_neumonia.hdf5",
monitor="val_accuracy",
verbose=1,
save_best_only=True,
)
def get_sample_count_plotter(self, sample: Sample = None):
sample = sample or self.sample
normal_count = len(sample.normal_images)
pneumo_count = len(sample.pneumo_images)
plotter = CountPlotter(
title=f"Distribucion de clases en la muestra {sample.name}",
xlabel="Cantidad de Muestras",
ylabel="Clase",
)
plotter.set_data_class(
name="NORMAL", color="skyblue", count=normal_count
)
plotter.set_data_class(
name="PNEUMONIA", color="thistle", count=pneumo_count
)
return plotter
def evaluate_sample(self, sample: Sample = None):
sample = sample or self.sample
loss, accuracy = self.model.evaluate(sample_images, sample_tags)
return loss, accuracy
def evaluate_model(self, sample_images, sample_tags):
sample_count = len(sample_images)
batch_size = 32
history = self.model.fit(
self.datagen.flow(sample_images, sample_tags,
batch_size=batch_size),
steps_per_epoch=sample_count / batch_size,
epochs=10,
validation_data=[self.checkpoint],
verbose=1,
).history
return history["loss"], history["accuracy"] |
package ent;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import connect.OracleConnectUtil;
public class EntDao {
private static EntDao entDao = new EntDao();
public EntDao() { }
public static EntDao getEntDao() {
return entDao;
}
// insert
public void insert(Ent vo) {
Connection conn = OracleConnectUtil.connect();
System.out.println("main 메소드 확인용 : " + conn);
String sql = "INSERT INTO TEAMA.ENT (ENT_CODE, ENT_NAME, TEL, CEO, ADDRESS, F_DATE)\r\n" +
"VALUES(?, ?, ?, ?, ?, ?)";
PreparedStatement pstmt;
try {
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, vo.getEnt_code());
pstmt.setString(2, vo.getEnt_name());
pstmt.setString(3, vo.getTel());
pstmt.setString(4, vo.getCeo());
pstmt.setString(5, vo.getAddress());
pstmt.setDate(6, vo.getF_date());
pstmt.execute("기업정보가 등록되었습니다.");
pstmt.close();
System.out.println("");
} catch (SQLException e) {
System.out.println("SQL 실행 오류 : " + e.getMessage());
}
OracleConnectUtil.close(conn);
}
// select
// 모든 기업 조회
public static List<Ent> selectAll() {
Connection conn = OracleConnectUtil.connect();
String sql = "SELECT * FROM ENT";
PreparedStatement pstmt;
ResultSet rs;
List<Ent> ents = new ArrayList<>();
try {
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
while (rs.next()) {
ents.add(new Ent(rs.getInt(1),
rs.getString(2),
rs.getString(3),
rs.getString(4),
rs.getString(5),
rs.getDate(6)));
}
} catch (SQLException e) {
System.out.println("SQL 실행 오류 : " + e.getMessage());
}
return ents;
}
// 기업 코드별 조회
public static Ent selectEnt(int ent_code) {
Connection conn = OracleConnectUtil.connect();
String sql = "select * from ent where ent_code = ?";
PreparedStatement pstmt = null;
ResultSet rs = null;
Ent ent = new Ent();
try {
pstmt = conn.prepareStatement(sql);
pstmt.setInt (1, ent_code);
rs = pstmt.executeQuery();
if (rs.next()) {
ent.setEnt_code(rs.getInt(1));
ent.setEnt_name(rs.getString(2));
ent.setTel(rs.getString(3));
ent.setCeo(rs.getString(4));
ent.setAddress(rs.getString(5));
ent.setF_date(rs.getDate(6));
}
} catch (SQLException e) {
System.out.println("SQL 실행 오류 : " + e.getMessage());
}
return ent;
}
public static String selectTeamCode(String ent_name) {
Connection conn = OracleConnectUtil.connect();
String sql = "select ent_name from ent where ent_name = ?";
PreparedStatement pstmt = null;
ResultSet rs = null;
String ents = "";
try {
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, ent_name);
rs = pstmt.executeQuery();
while (rs.next()) {
ents = rs.getString(1);
}
} catch (SQLException e) {
System.out.println("SQL 실행 오류 : " + e.getMessage());
}
return ents;
}
// update
public void update(Ent vo) {
Connection conn = OracleConnectUtil.connect();
PreparedStatement pstmt;
String sql = "UPDATE TEAMA.ENT SET ADDRESS=?" +
"WHERE ENT_NAME=?";
try {
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, vo.getAddress());
pstmt.setString(1, vo.getEnt_name());
pstmt.execute();
pstmt.close();
System.out.println("회사주소가 변경되었습니다.");
} catch (SQLException e) {
System.out.println("SQL 실행오류 : " + e.getMessage());
}
OracleConnectUtil.close(conn);
}
// delete
public void delete(String ent_code) {
Connection conn = OracleConnectUtil.connect();
PreparedStatement pstmt;
String sql = "DELETE FROM TEAMA.ENT WHERE ENT_CODE=?";
try {
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, ent_code);
pstmt.execute();
pstmt.close();
System.out.println("기업 삭제가 완료되었습니다.");
} catch (SQLException e) {
System.out.println("SQL 실행오류 : " + e.getMessage());
}
OracleConnectUtil.close(conn);
}
} |
# Security - Sistema de Autenticação e Autorização
# Introdução ao codigo
Esse é um codigo que foi desenvolvido para autenticar e autorizar diferentes tipos de usuarios, dando permissões e restringindo acesso a diferentes funcionalidades. É uma pagina Web (Java SpringBoot) e que faz requisições a um banco de dados (Mongo DB) e utiliza RestApi e TokensJWT. Abaixo explicarei cada funcionalidade das minhas classes.
## PACKAGE APPLICATION
### CLASSE JwtRestapiApplication(APPLICATION)
<img src="https://github.com/Mairondc21/Java_Spring_JWT_Users/blob/main/Java_Spring_JWT_Users/img/JwtRestapiApplication.png "/>
*Descrição*:
- *Anotação @SpringBootApplication*: Marca a classe como uma aplicação Spring Boot.
- *Anotação @EnableMongoRepositories*: Habilita a detecção de repositórios do MongoDB.
- *Método main*: Ponto de entrada da aplicação.
## PACKAGE CONFIG
### CLASSE AppCongig(CONFIG)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/AppConfig.png"/>
*Descrição*:
- *Anotação @Configuration*: Marca a classe como uma fonte de definições de beans.
- *Anotação @EnableWebSecurity*: Habilita a segurança web do Spring Security.
- *Método securityFilterChain(HttpSecurity http)*: Configura as regras de segurança HTTP.
- *csrf(AbstractHttpConfigurer::disable)*: Desabilita a proteção CSRF.
- *authorizeHttpRequests(request -> request...)*: Define regras de autorização para diferentes endpoints.
- *addFilterBefore(jwtAuthenticationFilter, UsernamePasswordAuthenticationFilter.class)*: Adiciona o filtro de autenticação JWT antes do filtro de autenticação padrão.
- *Método passwordEncoder()*: Define o bean para o codificador de senhas BCrypt.
## PACKAGE CONTROLLER
### CLASSE AuthController(CONTROLLER)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/AuthController.png"/>
*Descrição*:
- *Anotação @RestController*: Marca a classe como um controlador REST.
- *Método register(@RequestBody User user)*: Registra um novo usuário.
- *Método login(@RequestParam String username, @RequestParam String password)*: Autentica o usuário e retorna um token JWT.
- *Método extractRole(@PathVariable String token)*: Extrai a role do token JWT.
## PACKAGE MODEL
### CLASSE User(MODEL)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/user.png"/>
*Descrição*:
- *Anotação @Document*: Marca a classe como um documento MongoDB.
- *Atributos*: id, username, password, role - representam os campos do documento de usuário.
- *Getters e Setters*: Métodos para acessar e modificar os atributos.
## PACKAGE REPOSITORY
### CLASSE UserRepostirory(REPOSITORY)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/UserRepository.png"/>
*Descrição*:
- *Anotação @Repository*: Marca a interface como um repositório do Spring Data.
- *Interface MongoRepository<User, String>*: Extende a interface MongoRepository fornecida pelo Spring Data, especificando a entidade User e o tipo do ID (String).
- *Método findByUsername(String username)*: Declaração de um método personalizado para encontrar um usuário pelo nome de usuário.
## PACKAGE SECURITY
### CLASSE JwtAuthenticationFilter(SECURITY)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/JwtAuthenticationFilter.png"/>
*Descrição*:
- *Anotação @Component*: Marca a classe como um componente do Spring.
- *Método doFilterInternal*: Filtra as requisições HTTP para validar o token JWT e autenticar o usuário.
- *Autenticação do Usuário*: Extração do nome de usuário do token JWT e configuração do contexto de segurança.
### CLASSE JwtUtil(SECURITY)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/JwtUtil.png"/>
*Descrição*:
- *@Component*: Marca a classe como um bean gerenciado pelo Spring.
- *Construtor JwtUtil(@Value("${jwt.secret}") String secret)*: Inicializa a chave secreta usada para assinar os tokens JWT.
- *generateToken(String username, String role)*: Gera um token JWT com o nome de usuário e a role.
- *extractUsername(String token)*: Extrai o nome de usuário do token JWT.
- *extractRole(String token)*: Extrai a role do token JWT.
## PACKAGE SERVICE
### CLASSE AuthService(SERVICE)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/AuthService.png"/>
*Descrição*:
- *@Service*: Marca a classe como um serviço do Spring.
- *authenticateUser(String username, String password)*: Autentica o usuário verificando as credenciais e gera um token JWT.
- *extractUsername(String token)*: Extrai o nome de usuário do token JWT.
- *extractRole(String token)*: Extrai a role do token JWT.
### CLASSE UserService(SERVICE)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/UserService.png"/>
*Descrição*:
- *@Service*: Marca a classe como um serviço do Spring.
- *registerUser(User user)*: Registra um novo usuário após verificar se o nome de usuário já existe e codificar a senha.
## PACKAGE TEST
### CLASSE SecretKeyGenerator(TEST)
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/SecretKeyGenerator.png"/>
*Descrição*:
- *Classe para Gerar Chave Secreta*: Utilizada para gerar uma chave secreta para assinatura de tokens JWT.
- *Método main*: Gera e imprime uma chave secreta codificada em base64.
# DIAGRAMA
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/Diagrama.png"/>
### Explicação Passo a Passo do Diagrama
1. *JwtRestapiApplication*: Ponto de entrada da aplicação Spring Boot.
2. *AppConfig*: Configura as regras de segurança, define os beans de segurança e adiciona o filtro JWT.
3. *AuthController*: Controlador responsável por gerenciar as requisições de registro, login e extração de roles. Chama o UserService para registrar usuários e o AuthService para autenticar e extrair roles.
4. *AuthService*: Contém a lógica de autenticação e interação com o repositório de usuários. Utiliza o JwtUtil para gerar e validar tokens JWT.
5. *UserService*: Gerencia o registro de usuários, verificando a existência de nomes de usuário e codificando senhas antes de salvar.
6. *UserRepository*: Repositório que interage com o banco de dados MongoDB para operações de CRUD de usuários. Implementa métodos personalizados como findByUsername.
7. *User*: Modelo que representa um usuário no sistema, com atributos id, username, password e role.
8. *JwtUtil*: Utilitário que gerencia a geração, extração e validação de tokens JWT.
9. *JwtAuthenticationFilter*: Filtra as requisições HTTP para validar tokens JWT e autenticar usuários.
10. *SecretKeyGenerator*: Classe utilizada para gerar uma chave secreta para assinatura de tokens JWT.
11. *Web Application*: Consiste em páginas HTML (login.html, register.html, home.html) que interagem com o backend para registro, login e acesso a diferentes seções baseadas na role do usuário.
# INCIAÇÃO DO CODIGO
Para se iniciar a aplicação execute primeiro a classe de *SecretKeyGenerator* e gere uma token, logo depois vá até a classe *Application.Properties* e dentro do metodo jwt.secret= coloque a chave gerada. Depois execute a classe JwtRestapiApplication e na sua URL digite http://localhost:8080/register.html e faça um novo registro.
# TELAS DA MINHA APLICAÇÃO
## Register.html
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/register-html.png"/>
- *register.html*: Página de registro que permite ao usuário criar uma nova conta.
- *Formulário de Registro*: Envia uma requisição POST para /register com o nome de usuário, senha e role.
- *Script JavaScript*: Processa a resposta do servidor e redireciona o usuário para a página de login.
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/Insercao_MongoDB.png"/>
- Depois de registrar pela aba de /register.html, o usuario é cadastrado no banco de dados
## Login.html
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/login-html.png"/>
- *login.html*: Página de login que permite ao usuário inserir seu nome de usuário e senha.
- *Formulário de Login*: Envia uma requisição GET para /login com o nome de usuário e senha.
- *Script JavaScript*: Processa a resposta do servidor e armazena o token JWT no localStorage.
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/insomnia_login.png"/>
- Usando o Insomnia consigo fazer uma requisição GET diretamente pro meu Banco de Dados e ele recupera o registro adiconado do usuario e seu codigo JWT
## Home.html
- *home.html*: Página inicial que mostra informações diferentes com base na role do usuário.
- *Seções de Usuário, Moderador e Admin*: Seções diferentes visíveis com base na role do usuário.
- *Funções de Adicionar e Remover Campos*: Funções JavaScript para adicionar e remover campos que salvam as alterações no localStorage.
- *Funções de Salvar e Carregar Campos Dinâmicos*: Funções JavaScript para salvar e carregar campos dinâmicos do localStorage.
## Home.html/ADMIN
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/Home-Admin.png"/>
- Essa aba da página so é acessada quem tem a role == Admin
- ele poderá adicionar/remover/visuzalizar campos inputs na Página
## Home.html/MODERADOR
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/Home-Moderador.png"/>
- Essa aba da página so é acessada quem tem a role == Moderador
- ele poderá adicionar/visuzalizar campos inputs na Página
## Home.html/USER
<img src="https://github.com/RickRamosss/Sistema-de-Autentica-o-e-Autoriza-o/blob/main/Security/img/Home-Usere.png"/>
- Essa aba da página so é acessada quem tem a role == User
- ele poderá somente visuzalizar campos inputs na Página |
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") return Reflect.decorate(decorators, target, key, desc);
switch (arguments.length) {
case 2: return decorators.reduceRight(function(o, d) { return (d && d(o)) || o; }, target);
case 3: return decorators.reduceRight(function(o, d) { return (d && d(target, key)), void 0; }, void 0);
case 4: return decorators.reduceRight(function(o, d) { return (d && d(target, key, o)) || o; }, desc);
}
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var core_1 = require('angular2/core');
var common_1 = require('angular2/common');
var router_1 = require('angular2/router');
var spotifyService_1 = require("../services/spotifyService");
var ArtistComponent = (function () {
function ArtistComponent(spotify, routeParams, locationStrategy) {
this.spotify = spotify;
this.routeParams = routeParams;
this.locationStrategy = locationStrategy;
this.id = this.routeParams.get('id');
}
ArtistComponent.prototype.ngOnInit = function () {
var _this = this;
this.spotify
.getArtist(this.id)
.subscribe(function (res) { return _this.renderArtist(res); });
};
ArtistComponent.prototype.renderArtist = function (res) {
this.artist = res;
console.log(this.artist);
};
ArtistComponent.prototype.back = function () {
this.locationStrategy.back();
};
ArtistComponent = __decorate([
core_1.Component({
selector: 'artist',
directives: [common_1.CORE_DIRECTIVES],
template: "\n <div *ngIf=\"artist\">\n <h1>{{ artist.name }}</h1>\n\n <p>\n <img src=\"{{ artist.images[0].url }}\">\n </p>\n\n <p><a href (click)=\"back()\">Back</a></p>\n </div>\n "
}),
__metadata('design:paramtypes', [spotifyService_1.SpotifyService, router_1.RouteParams, router_1.LocationStrategy])
], ArtistComponent);
return ArtistComponent;
})();
exports.ArtistComponent = ArtistComponent;
//# sourceMappingURL=ArtistComponent.js.map |
import {
Image,
ScrollView,
StyleSheet,
View,
TouchableOpacity,
Text,
} from 'react-native';
import DetailContent from '../components/atoms/DetailContent';
import CategoryRow from '../components/molecules/CategoryRow';
import {useNavigation} from '@react-navigation/native';
import {Badge} from 'react-native-paper';
import Icon from 'react-native-vector-icons/FontAwesome';
import React, {useState, useEffect, useContext} from 'react';
import firestore from '@react-native-firebase/firestore';
import {Context} from '../context/Context';
const Product = ({route}) => {
const {cartCount, favCount} = useContext(Context);
const [cartItems, setCartItems] = cartCount;
const [fav, setFav] = favCount;
const navigation = useNavigation();
const [product, setProduct] = useState({});
const data = route.params.title;
useEffect(() => {
const fetchProduct = async () => {
try {
await getProduct();
} catch (error) {
console.error('Error fetching product:', error);
}
};
fetchProduct();
}, [data]);
const getProduct = async () => {
const productCollection = await firestore()
.collection('Products')
.where('productTitle', '==', route.params.title)
.get();
setProduct(productCollection.docs[0].data());
};
useEffect(() => {
navigation.setOptions({
headerTitle: () => <Text style={headerStyle.headerTitle}>Product</Text>,
headerTitleAlign: 'center',
headerBackVisible: false,
headerLeft: () => (
<TouchableOpacity onPress={() => navigation.goBack()}>
<Icon
name="arrow-left"
size={30}
color="#2D4990"
style={headerStyle.icon}
/>
</TouchableOpacity>
),
headerRight: () => (
<View style={headerStyle.headerRight}>
<TouchableOpacity onPress={() => navigation.navigate('Cart')}>
<Badge>{cartItems}</Badge>
<Icon
name="shopping-cart"
size={30}
color="#2D4990"
style={headerStyle.icon}
/>
</TouchableOpacity>
<TouchableOpacity onPress={() => navigation.navigate('Favourites')}>
<Badge>{fav}</Badge>
<Icon
name="heart-o"
size={30}
color="#2D4990"
style={headerStyle.icon}
/>
</TouchableOpacity>
</View>
),
contentStyle: {
backgroundColor: 'white',
borderTopWidth: 2,
borderTopColor: '#D4A065',
},
headerShadowVisible: false,
});
}, [navigation, cartItems, fav]);
return (
<ScrollView>
{product != null ? (
<View style={styles.container}>
<View style={styles.banner}>
{product.image != null ? (
<Image
style={{width: '100%', height: 250}}
borderradius="10"
resizemode="cover"
source={{uri: product.image}}
/>
) : null}
</View>
<DetailContent
title={product.productTitle}
price={product.productPrice}
description={product.productDescription}
/>
{product.productCateogory != undefined ? (
<CategoryRow title={product.productCateogory} />
) : null}
</View>
) : null}
</ScrollView>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
marginTop: 20,
justifyContent: 'flex-start',
},
banner: {
borderWidth: 2,
borderColor: '#D4A065',
},
});
export default Product;
const headerStyle = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#E29500',
justifyContent: 'flex-start',
marginTop: 25,
},
headerTitle: {
fontSize: 32,
fontWeight: 'bold',
color: '#2D4990',
padding: 25,
},
headerRight: {
flexDirection: 'row',
},
icon: {
marginHorizontal: 10,
},
}); |
import { createSlice } from "@reduxjs/toolkit";
import utils from "../../utils/utils";
const profileSlice = createSlice({
initialState: {
profile: {
userId: null,
nickname: null,
},
},
name: "profileSlice",
reducers: {
setProfile(state, action) {
state.userId = action.payload;
state.nickname = utils.nickname.getRandomNickName();
},
updateNickname(state, action) {
state.nickname = action.payload;
},
updateUserId(state, action) {
state.userId = action.payload;
},
},
});
export const { setProfile, updateNickname } = profileSlice.actions;
export const profileReducer = profileSlice.reducer; |
'''IP calculation functions
- Original by Kailash Joshi.
- Source: https://github.com/kailashjoshi/Ipcalculator/
'''
import sys
def _dec_to_binary(ip_address):
return list(map(lambda x: bin(x)[2:].zfill(8), ip_address))
def _negation_mask(net_mask):
wild = list()
for i in net_mask:
wild.append(255 - int(i))
return wild
class IPv4Address(object):
'''IPCalculator object
Performs miscellaneous calculations on IPv4 addresses
:param str ip_address: IP address
:param int cidr: CIDR subnet mask length
'''
def __init__(self, ip_address, cdir=24):
if '/' in ip_address:
self._address_val, self._cidr = ip_address.split('/')
self._address = list(map(int, self._address_val.split('.')))
else:
self._address = list(map(int, ip_address.split('.')))
self._cidr = cdir
# Handle other number formats
if len(self._address) > 4:
self._address = self._address[:4]
elif len(self._address) < 4:
num = self._address.pop()
while len(self._address) < 4: self._address.append(0)
i = 3
while num > 0 and i >= 0:
self._address[i] = num % 256
num = num >> 8
i -= 1
self.mask = [0, 0, 0, 0]
for i in range(int(self._cidr)):
self.mask[int(i / 8)] += 1 << (7 - i % 8)
self.binary_IP = _dec_to_binary(self._address)
self.binary_Mask = _dec_to_binary(self.mask)
self.negation_Mask = _dec_to_binary(_negation_mask(self.mask))
network = list()
for x, y in zip(self.binary_IP, self.binary_Mask):
network.append(int(x, 2) & int(y, 2))
self.network = network
broadcast = list()
for x, y in zip(self.binary_IP, self.negation_Mask):
broadcast.append(int(x, 2) | int(y, 2))
self.broadcast = broadcast
def __str__(self):
'''human-readable, or informal, string representation of address'''
return 'IPv4({addr}/{cidr})'.format(addr = ".".join(map(str, self._address)), cidr = self._cidr)
def net_mask(self):
'''Return netmask representation
:returns str: string showing the netmask of object
'''
return '.'.join(map(str,self.mask))
def network_ip(self):
'''Returns the network IP
:returns str: network IP'''
return '.'.join(map(str,self.network))
def broadcast_ip(self):
'''Returns broadcast IP address
:returns str: broadcast IP address'''
return '.'.join(map(str,self.broadcast))
def host_range(self):
'''Host range
:returns str,str: strings with initial to last IP address'''
min_range = list(self.network)
min_range[-1] += 1
max_range = list(self.broadcast)
max_range[-1] -= 1
return ".".join(map(str, min_range)), ".".join(map(str, max_range))
def number_of_host(self):
'''Calculate the max number of hosts in this IP subnet
:returns int: count of max hosts'''
return (2 ** sum(map(lambda x: sum(c == '1' for c in x), self.negation_Mask))) - 2
def host_ip(self, ipoff):
'''Calculate a host IP in range
:returns str: IP address of host:
'''
if ipoff < 0 or ipoff > self.number_of_host():
raise ValueError('Must be greater than 0 and less than {}'.format(self.number_of_host()))
address = list(self.network)
octet = 3
while ipoff > 0:
address[octet] += ipoff
if address[octet] < 256: break
address[octet] = address[octet] % 256
ipoff = ipoff >> 8
octet -= 1
return '.'.join(map(str,address))
def prefix(self):
'''Return prefix size
:returns int: number of bits in prefix
'''
return int(self._cidr)
if __name__ == '__main__':
ip = sys.argv[1] if len(sys.argv) > 1 else sys.exit(0)
ip = IPv4Address(ip)
# ~ ip.t()
print('Calculation for: {}'.format(ip))
print('Prefix: {}'.format(ip.prefix()))
print('Netmask: {}'.format(ip.net_mask()))
print('Network ID: {}'.format(ip.network_ip()))
print('Broadcast address: {}'.format(ip.broadcast_ip()))
print("Host range: {}".format(ip.host_range()))
print('Max number of hosts: {}'.format(ip.number_of_host())) |
"use server";
import { db } from "../_lib/db";
import { v4 as uuidV4 } from "uuid";
import { verifyEmail } from "../(auth)/verifyEmail";
import { PasswordResetToken } from "../_types/types";
const forgotPassword = async ({ email }: { email: string }) => {
const verificationToken = uuidV4();
const userEmails = await db`
SELECT * FROM users
WHERE email =${email}
`;
const userEmail = userEmails[0];
if (!userEmail) {
return {
success: false,
status: 500,
message: "Email does not exist",
};
}
// Checks if existing verification email object exist in db, deletes if existing match found.
const selectExistingResetToken = await db`
SELECT * FROM password_reset_token
WHERE email = ${email}
`;
const existingResetToken =
selectExistingResetToken[0] as PasswordResetToken;
if (existingResetToken) {
await db`
DELETE FROM password_reset_token
WHERE id = ${[existingResetToken.id]}
`;
}
// Generates verification reset object in database
try {
await db`
INSERT INTO password_reset_token (email, expires, token)
VALUES(${email}, ${new Date(
new Date().getTime() + 3600 * 1000
)}, ${verificationToken})
`;
} catch (error) {
console.error("Error creating password reset token:", error);
return {
success: false,
status: 500,
message:
"Could not generate password reset token. Contact support or try again later.",
};
}
// Sends usesr reset link via email
try {
await verifyEmail({
userName: userEmail.userName as string,
token: verificationToken,
subject: "MemoMake Password reset",
email: userEmail.email as string,
purpose: "resetPassword",
});
} catch (error) {
console.log("Could not send Email via Resend");
return {
success: false,
status: 500,
message: "Could not send verification email",
};
}
return {
success: true,
status: 200,
message: "Password reset sent to email address",
};
};
export default forgotPassword; |
<?php
namespace App\Controller;
use App\Controller\AppController;
use Cake\Routing\Router;
use \Cake\Datasource\Exception\RecordNotFoundException;
use \Cake\Datasource\ConnectionManager;
/**
* GrupoUsuarios Controller
*
* @property \App\Model\Table\GrupoUsuariosTable $GrupoUsuarios
*
* @method \App\Model\Entity\GrupoUsuario[] paginate($object = null, array $settings = [])
*/
class GrupoUsuariosController extends AppController {
private $_crumbs;
public function initialize() {
parent::initialize();
$this->loadComponent('Search.Prg', [
'actions' => 'index',
]);
$this->_crumbs = [
'Painel' => Router::url(['controller' => 'usuarios', 'action' => 'dashboard'], true),
'Grupos de Usuários' => Router::url(['action' => 'index'])
];
}
/**
* Index method
*
* @return \Cake\Http\Response|null
*/
public function index()
{
$query = $this->GrupoUsuarios
->find('search', ['search' => $this->request->getQueryParams()])
;
$this->paginate = ['limit' => 20];
$grupoUsuarios = $this->paginate($query);
$this->set(compact('grupoUsuarios'));
$this->set('_serialize', ['grupoUsuarios']);
$this->set('crumbs', $this->_crumbs);
}
/**
* View method
*
* @param string|null $id Grupo Usuario id.
* @return \Cake\Http\Response|null
* @throws \Cake\Datasource\Exception\RecordNotFoundException When record not found.
*/
public function view($id = null)
{
$grupoUsuario = $this->GrupoUsuarios->get($id, [
'contain' => ['Usuarios']
]);
$this->set('grupoUsuario', $grupoUsuario);
$this->set('_serialize', ['grupoUsuario']);
$this->_crumbs['Visualização'] = Router::url(['action' => 'view']);
$this->set('crumbs', $this->_crumbs);
}
/**
* Add method
*
* @return \Cake\Http\Response|null Redirects on successful add, renders view otherwise.
*/
public function add()
{
$grupoUsuario = $this->GrupoUsuarios->newEntity();
if ($this->request->is('post')) {
$grupoUsuario = $this->GrupoUsuarios->patchEntity($grupoUsuario, $this->request->getData());
if ($this->GrupoUsuarios->save($grupoUsuario)) {
$this->Flash->success(__('Registro salvo com sucesso.'));
return $this->redirect(['action' => 'index']);
}
$this->Flash->error(__('Erro ao salvar o registro. Por favor tente novamente.'));
}
$this->set(compact('grupoUsuario'));
$this->set('_serialize', ['grupoUsuario']);
$this->_crumbs['Cadastro'] = Router::url(['action' => 'add']);
$this->set('crumbs', $this->_crumbs);
}
/**
* Edit method
*
* @param string|null $id Grupo Usuario id.
* @return \Cake\Http\Response|null Redirects on successful edit, renders view otherwise.
* @throws \Cake\Network\Exception\NotFoundException When record not found.
*/
public function edit($id = null)
{
$grupoUsuario = $this->GrupoUsuarios->get($id, [
'contain' => []
]);
if ($this->request->is(['patch', 'post', 'put'])) {
$grupoUsuario = $this->GrupoUsuarios->patchEntity($grupoUsuario, $this->request->getData());
if ($this->GrupoUsuarios->save($grupoUsuario)) {
$this->Flash->success(__('Registro salvo com sucesso.'));
return $this->redirect(['action' => 'index']);
}
$this->Flash->error(__('Erro ao salvar o registro. Por favor tente novamente.'));
}
$this->set(compact('grupoUsuario'));
$this->set('_serialize', ['grupoUsuario']);
$this->_crumbs['Edição'] = Router::url(['action' => 'edit']);
$this->set('crumbs', $this->_crumbs);
}
/**
* Delete method
*
* @param string|null $id Grupo Usuario id.
* @return \Cake\Http\Response|null Redirects to index.
* @throws \Cake\Datasource\Exception\RecordNotFoundException When record not found.
*/
public function delete($id = null)
{
$this->request->allowMethod(['post', 'delete']);
// Se for passado o $id
if (!empty($id)) {
$this->_handleDelete($id);
} else {
if ($this->request->getData('ids') !== null) {
$this->_handleDelete($this->request->getData('ids'));
} else {
throw new RecordNotFoundException('Registro não encontrado!');
}
}
return $this->redirect(['action' => 'index']);
}
/**
* Handle delete method
*
* @param int|array $ids GrupoUsuarios ids.
* @throws \Cake\Datasource\Exception\RecordNotFoundException Quando o registro não é encontrado.
*/
private function _handleDelete($ids) {
if (!is_array($ids)) {
$ids = [$ids];
}
$conn = ConnectionManager::get($this->GrupoUsuarios->defaultConnectionName());
$conn->begin();
try {
foreach ($ids as $id) {
$grupoUsuario = $this->GrupoUsuarios->get($id);
if (!$this->GrupoUsuarios->delete($grupoUsuario)) {
throw new \Exception();
}
}
$conn->commit();
$this->Flash->success(__('Registro(s) excluído com sucesso.'));
} catch (\PDOException $e) {
$conn->rollback();
$this->Flash->error(__('Não foi possível excluir pois um dos registros selecionados já está relacionado a outro registro.'));
} catch (\Exception $e) {
$conn->rollback();
$this->Flash->error(__('Erro ao excluir o(s) registro(s)! Por favor tente novamente.'));
}
}
} |
import React, { useState, useRef } from "react";
import styled from "@emotion/styled";
import { Link, useNavigate } from "react-router-dom";
import { useAuthContext } from "../contexts/AuthContext";
import Header from "../components/Header";
const Container = styled.div({
textAlign: "center",
});
const Wrapper = styled.div({
display: "flex",
justifyContent: "center",
alignItems: "center",
flexDirection: "column",
});
const Input = styled.input({
backgroundColor: "#dedede",
width: "85vw",
height: "35px",
marginTop: "0.2rem",
marginBottom: "1rem",
borderRadius: "5px",
paddingLeft: "5px",
border: "none",
textAlign: "center",
"&:focus": {
outline: "none",
},
"@media screen and (min-width: 600px)": {
width: "40vw",
},
"@media screen and (min-width: 1024px)": {
width: "25vw",
},
});
const Button = styled.button({
background: "linear-gradient(to right, #76b582, #368a46)",
width: "100px",
height: "35px",
borderRadius: "5px",
color: "white",
textAlign: "center",
border: "none",
marginBottom: "1rem",
cursor: "pointer",
});
const SignUpLink = styled(Link)({
cursor: "pointer",
textDecoration: "none",
color: "black",
fontSize: "0.8rem",
"@media screen and (min-width: 1024px)": {
fontSize: "0.9rem",
},
});
const LoginPage = () => {
// Ref to email
const emailRef = useRef();
// Ref to password
const passwordRef = useRef();
const [error, setError] = useState(null);
// Login function from user context
const { login } = useAuthContext();
const navigate = useNavigate();
const handleSubmit = async (e) => {
e.preventDefault();
setError(null);
try {
// Gives login function email and password
await login(emailRef.current.value, passwordRef.current.value);
// Navigates to homepage
navigate("/");
// Catches error and sets message to error message
} catch (e) {
setError(e.message);
}
};
return (
<Container>
<Header title={"LOG IN "} />
{error && <h2>{error}</h2>}
<form onSubmit={handleSubmit}>
<Wrapper>
<label>Email</label>
<Input type="email" ref={emailRef} required={true} />
<label>Password</label>
<div>
<Input type="password" ref={passwordRef} required={true} />
</div>
<Button type="submit">LOG IN</Button>
</Wrapper>
</form>
<SignUpLink to="/signup">Don't have an account? Sign up here</SignUpLink>
</Container>
);
};
export default LoginPage; |
<!DOCTYPE html>
<html lang="pt-br">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="./css/styleLogin.css">
<link rel="shortcut icon" href="assets/imagotipo.svg" type="image/x-icon">
<script src="https://kit.fontawesome.com/2780876011.js" crossorigin="anonymous"></script>
<script src="https://code.jquery.com/jquery-3.6.3.js"></script>
<script src="https://cdn.jsdelivr.net/npm/sweetalert2@11"></script>
<script src="./js/funcoes.js"></script>
<title> BeautyCurls - Entrar </title>
</head>
<body>
<main>
<section class="card-logo">
<div class="logo">
<a href="./index.html">
<img src="./assets/logotipo.svg">
</a>
</div>
</section>
<section class="card-sign-up">
<div class="content">
<div class="text">
<h1 class="title"> Entrar </h1>
<div class="subtitle">
Olá, bem-vindo de volta! Entre na sua conta e tenha acesso ao nosso <b> blog </b>.
</div>
</div>
<div class="form">
<div class="form-item">
<h3> Email </h3>
<input type="email" id="input_email" placeholder="Seu Email" required>
</div>
<div class="form-item">
<h3> Senha </h3>
<input type="password" id="input_senha" placeholder="Sua Senha" required>
</div>
<div class="form-item">
<button class="btn-background" onclick="entrarConta()">
Entrar
</button>
</div>
</div>
<div class="redirect">
<span class="text-redirect">
Não possui uma conta? <a href="./cadastro.html"> Cadastre-se </a>
</span>
</div>
</div>
</section>
</main>
</body>
</html>
<script>
function entrarConta() {
var emailVar = input_email.value;
var senhaVar = input_senha.value;
if (emailVar == "" || senhaVar == "") {
Swal.fire({
icon: 'error',
title: 'Oops...',
text: 'Preencha todos os campos!',
confirmButtonColor: "#FF5E81",
iconColor: '#FF5E81',
})
} else {
aguardar();
console.log("FORM LOGIN: ", emailVar);
console.log("FORM SENHA: ", senhaVar);
fetch("/usuarios/autenticar", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
emailServer: emailVar,
senhaServer: senhaVar
})
}).then(function (resposta) {
console.log("ESTOU NO THEN DO entrar()!")
if (resposta.ok) {
console.log(resposta);
resposta.json().then(json => {
console.log(json);
console.log(JSON.stringify(json));
sessionStorage.ID = json.idUsuario;
sessionStorage.NOME = json.nome;
sessionStorage.SOBRENOME = json.sobrenome;
sessionStorage.USER = json.nomeUser;
sessionStorage.EMAIL = json.email;
sessionStorage.SENHA = json.senha;
sessionStorage.FK_CURVATURA = json.fkCurvaturaCabelo;
sessionStorage.CURVATURA = json.nomeCurvatura;
sessionStorage.FOTO_PERFIL = json.url;
const Toast = Swal.mixin({
toast: true,
position: 'top-end',
showConfirmButton: false,
timer: 2500,
timerProgressBar: true,
didOpen: (toast) => {
toast.addEventListener('mouseenter', Swal.stopTimer)
toast.addEventListener('mouseleave', Swal.resumeTimer)
}
})
Toast.fire({
icon: 'success',
title: 'Login realizado com sucesso'
})
setTimeout(() => {
window.location = "./blog/index.html";
}, 2500); // apenas para exibir o loading
});
} else {
console.log("Houve um erro ao tentar realizar o login!");
resposta.text().then(texto => {
console.error(texto);
finalizarAguardar(texto);
});
}
}).catch(function (erro) {
console.log(erro);
})
return false;
}
}
</script> |
PUT-ENVIRONMENT-BLUEPRINT-CONFIGURATIPUT-ENVIRONMENT-BLUEPRINT-CONFIGURATION()
NAME
put-environment-blueprint-configuration -
DESCRIPTION
Writes the configuration for the specified environment blueprint in
Amazon DataZone.
See also: AWS API Documentation
SYNOPSIS
put-environment-blueprint-configuration
--domain-identifier <value>
--enabled-regions <value>
--environment-blueprint-identifier <value>
[--manage-access-role-arn <value>]
[--provisioning-role-arn <value>]
[--regional-parameters <value>]
[--cli-input-json <value>]
[--generate-cli-skeleton <value>]
[--debug]
[--endpoint-url <value>]
[--no-verify-ssl]
[--no-paginate]
[--output <value>]
[--query <value>]
[--profile <value>]
[--region <value>]
[--version <value>]
[--color <value>]
[--no-sign-request]
[--ca-bundle <value>]
[--cli-read-timeout <value>]
[--cli-connect-timeout <value>]
OPTIONS
--domain-identifier (string)
The identifier of the Amazon DataZone domain.
--enabled-regions (list)
Specifies the enabled Amazon Web Services Regions.
(string)
Syntax:
"string" "string" ...
--environment-blueprint-identifier (string)
The identifier of the environment blueprint.
--manage-access-role-arn (string)
The ARN of the manage access role.
--provisioning-role-arn (string)
The ARN of the provisioning role.
--regional-parameters (map)
The regional parameters in the environment blueprint.
key -> (string)
value -> (map)
key -> (string)
value -> (string)
Shorthand Syntax:
KeyName1=KeyName1=string,KeyName2=string,KeyName2=KeyName1=string,KeyName2=string
JSON Syntax:
{"string": {"string": "string"
...}
...}
--cli-input-json (string) Performs service operation based on the JSON
string provided. The JSON string follows the format provided by --gen-
erate-cli-skeleton. If other arguments are provided on the command
line, the CLI values will override the JSON-provided values. It is not
possible to pass arbitrary binary values using a JSON-provided value as
the string will be taken literally.
--generate-cli-skeleton (string) Prints a JSON skeleton to standard
output without sending an API request. If provided with no value or the
value input, prints a sample input JSON that can be used as an argument
for --cli-input-json. If provided with the value output, it validates
the command inputs and returns a sample output JSON for that command.
GLOBAL OPTIONS
--debug (boolean)
Turn on debug logging.
--endpoint-url (string)
Override command's default URL with the given URL.
--no-verify-ssl (boolean)
By default, the AWS CLI uses SSL when communicating with AWS services.
For each SSL connection, the AWS CLI will verify SSL certificates. This
option overrides the default behavior of verifying SSL certificates.
--no-paginate (boolean)
Disable automatic pagination.
--output (string)
The formatting style for command output.
o json
o text
o table
--query (string)
A JMESPath query to use in filtering the response data.
--profile (string)
Use a specific profile from your credential file.
--region (string)
The region to use. Overrides config/env settings.
--version (string)
Display the version of this tool.
--color (string)
Turn on/off color output.
o on
o off
o auto
--no-sign-request (boolean)
Do not sign requests. Credentials will not be loaded if this argument
is provided.
--ca-bundle (string)
The CA certificate bundle to use when verifying SSL certificates. Over-
rides config/env settings.
--cli-read-timeout (int)
The maximum socket read time in seconds. If the value is set to 0, the
socket read will be blocking and not timeout. The default value is 60
seconds.
--cli-connect-timeout (int)
The maximum socket connect time in seconds. If the value is set to 0,
the socket connect will be blocking and not timeout. The default value
is 60 seconds.
OUTPUT
createdAt -> (timestamp)
The timestamp of when the environment blueprint was created.
domainId -> (string)
The identifier of the Amazon DataZone domain.
enabledRegions -> (list)
Specifies the enabled Amazon Web Services Regions.
(string)
environmentBlueprintId -> (string)
The identifier of the environment blueprint.
manageAccessRoleArn -> (string)
The ARN of the manage access role.
provisioningRoleArn -> (string)
The ARN of the provisioning role.
regionalParameters -> (map)
The regional parameters in the environment blueprint.
key -> (string)
value -> (map)
key -> (string)
value -> (string)
updatedAt -> (timestamp)
The timestamp of when the environment blueprint was updated.
PUT-ENVIRONMENT-BLUEPRINT-CONFIGURATION() |
const _ = require('lodash');
const express = require('express');
const bodyParser = require('body-parser');
const { ObjectID } = require('mongodb');
var config = require('./config/config');
var { mongoose } = require('./db/mongoose');
var { Todo } = require('./models/todo');
var { User } = require('./models/user');
var { authenticate } = require('./middleware/authenticate');
var app = express();
const port = process.env.PORT;
app.use(bodyParser.json());
app.post('/todos', authenticate, (req, res) => {
var todo = new Todo({
text: req.body.text,
_creator: req.user._id
});
todo.save().then(
doc => {
res.send(doc);
},
err => {
res.status(400).send(err);
}
);
});
app.get('/todos', authenticate, (req, res) => {
Todo.find({ _creator: req.user._id }).then(
todos => {
res.send({
todos
});
},
err => {
res.status(404).send(err);
}
);
});
app.get('/todos/:id', authenticate, (req, res) => {
var id = req.params.id;
if (!ObjectID.isValid(id)) {
return res.status(404).send();
}
Todo.findOne({
_id: id,
_creator: req.user._id
}).then(
todo => {
if (!todo) {
return res.status(404).send();
}
res.send({ todo });
},
err => {
res.status(404).send(err);
}
);
});
app.delete('/todos/:id', authenticate, (req, res) => {
var id = req.params.id;
if (!ObjectID.isValid(id)) {
return res.status(404).send();
}
Todo.findOneAndRemove({
_id: id,
_creator: req.user._id
}).then(
todo => {
if (!todo) {
return res.status(404).send();
}
res.send({ todo });
},
err => {
res.send(err);
}
);
});
app.patch('/todos/:id', authenticate, (req, res) => {
var id = req.params.id;
var body = _.pick(req.body, ['text', 'completed']);
if (!ObjectID.isValid(id)) {
return res.status(404).send();
}
if (_.isBoolean(body.completed) && body.completed) {
body.completedAt = new Date().getTime();
} else {
body.completed = false;
body.completedAt = null;
}
Todo.findOneAndUpdate(
{
_id: id,
_creator: req.user._id
},
{
$set: body
},
{
new: true
}
)
.then(todo => {
if (!todo) {
return res.status(404).send();
}
res.send({ todo });
})
.catch(err => res.status(400).send());
});
app.post('/users', (req, res) => {
var body = _.pick(req.body, ['email', 'password']);
var user = new User(body);
user
.save()
.then(() => {
return user.generateAuthToken();
})
.then(token => {
res.header('x-auth', token).send(user);
})
.catch(e => res.status(400).send(e));
});
app.get('/users/me', authenticate, (req, res) => {
res.send(req.user);
});
app.listen(port, () => {
console.log('Started on port 3000');
});
app.post('/users/login', (req, res) => {
var body = _.pick(req.body, ['email', 'password']);
User.findByCredentials(body.email, body.password)
.then(user => {
return user.generateAuthToken().then(token => {
res.header('x-auth', token).send(user);
});
})
.catch(e => {
res.status(400).send();
});
});
app.delete('/users/me/token', authenticate, (req, res) => {
req.user.removeToken(req.token).then(
() => {
res.status(200).send();
},
() => {
res.status(400).send();
}
);
});
module.exports = { app }; |
using System.Reflection;
using Oxx.Backend.Generators.PocoSchema.Core.Attributes;
using Oxx.Backend.Generators.PocoSchema.Core.Configuration;
using Oxx.Backend.Generators.PocoSchema.Core.Configuration.Events;
using Oxx.Backend.Generators.PocoSchema.Core.Extensions;
using Oxx.Backend.Generators.PocoSchema.Core.Models.Pocos;
using Oxx.Backend.Generators.PocoSchema.Core.Models.Pocos.Contracts;
using Oxx.Backend.Generators.PocoSchema.Core.Models.Types;
namespace Oxx.Backend.Generators.PocoSchema.Core.Logic.PocoExtraction;
public abstract class ConfiguredPocoStructureExtractor<TSchemaConfiguration, TSchemaEvents, TDirectoryOutputConfiguration> : IPocoStructureExtractor
where TSchemaConfiguration: ISchemaConfiguration<TSchemaEvents, TDirectoryOutputConfiguration>
where TSchemaEvents : ISchemaEvents
where TDirectoryOutputConfiguration: IDirectoryOutputConfiguration
{
protected readonly TSchemaConfiguration Configuration;
protected ConfiguredPocoStructureExtractor(TSchemaConfiguration configuration)
{
Configuration = configuration;
}
#region Interface implementations
public abstract IReadOnlyCollection<IPocoStructure> Get(IEnumerable<Type> requestedTypes, bool includeDependencies = true);
public abstract IReadOnlyCollection<IPocoStructure> GetAll();
#endregion
private void CheckSupport(Type type, bool includeDependencies, TypeCollectionTypeDictionary supported, ICollection<UnsupportedType> unsupported)
{
if (unsupported.Any(x => x.Type == type))
{
return;
}
// if is generic, check if all generic arguments are supported
if (type.IsGenericType)
{
CheckSupportForGenerics(type, includeDependencies, supported, unsupported);
}
var schemaTypeAttribute = type.GetCustomAttribute<SchemaTypeAttribute>();
if (schemaTypeAttribute is null)
{
return;
}
var pocoStructure = schemaTypeAttribute.UnderlyingType;
if (!supported.ContainsKey(pocoStructure))
{
supported.Add(pocoStructure, new List<Type>());
}
if (supported[pocoStructure].Contains(type))
{
return;
}
if (includeDependencies)
{
CheckSupportForDependencies(type, includeDependencies, supported, unsupported);
}
if (IsSupported(type) is { } exception)
{
unsupported.Add(new UnsupportedType(type, exception));
// _configuration.Events.UnsupportedTypeFound?.Invoke(this, new UnsupportedTypeFoundEventArgs(type, exception));
return;
}
supported[pocoStructure].Add(type);
}
private void CheckSupportForDependencies(Type type, bool includeDependencies, TypeCollectionTypeDictionary supported, ICollection<UnsupportedType> unsupported)
{
foreach (var member in type.GetValidSchemaMembers())
{
CheckSupport(member.Type, includeDependencies, supported, unsupported);
}
}
private void CheckSupportForGenerics(Type type, bool includeDependencies, TypeCollectionTypeDictionary supported, ICollection<UnsupportedType> unsupported)
{
var genericArguments = type.GetGenericArguments();
foreach (var genericArgument in genericArguments)
{
CheckSupport(genericArgument, includeDependencies, supported, unsupported);
}
}
protected TypeSupport GetTypeSchemaDictionary(IEnumerable<Type> types,
bool includeDependencies,
TypeCollectionTypeDictionary? supported = null,
ICollection<UnsupportedType>? unsupported = null)
{
supported ??= new TypeCollectionTypeDictionary();
unsupported ??= new List<UnsupportedType>();
foreach (var type in types)
{
CheckSupport(type, includeDependencies, supported, unsupported);
}
return new TypeSupport(supported, unsupported);
}
private Exception? IsSupported(Type type)
{
if (type.IsGenericType)
{
return new ArgumentException("Generic types are not supported");
}
return null;
}
protected static IReadOnlyCollection<IPocoStructure> ParseStructures(TypeCollectionTypeDictionary typesCollection)
{
var objectTypes = typesCollection
.FirstOrDefault(x => x.Key == typeof(PocoObject), new KeyValuePair<Type, List<Type>>(default!, new List<Type>()))
.Value;
var enumTypes = typesCollection
.FirstOrDefault(x => x.Key == typeof(PocoEnum), new KeyValuePair<Type, List<Type>>(default!, new List<Type>()))
.Value;
var atomTypes = typesCollection
.FirstOrDefault(x => x.Key == typeof(PocoAtom), new KeyValuePair<Type, List<Type>>(default!, new List<Type>()))
.Value;
var atoms = atomTypes
.Select(t => new PocoAtom(t))
.Cast<IPocoStructure>()
.ToArray();
var enums = enumTypes
.Select(t => new PocoEnum(t))
.Cast<IPocoStructure>()
.ToArray();
var objects = objectTypes
.Select(t =>
{
var validSchemaMembers = t.GetValidSchemaMembers();
return new PocoObject(t, validSchemaMembers);
})
.Cast<IPocoStructure>()
.ToArray();
return atoms
.Concat(enums)
.Concat(objects)
.ToArray();
}
} |
import os
import argparse
from transformers import (
AutoModelForCausalLM,
default_data_collator,
AutoTokenizer,
set_seed,
)
from datasets import load_from_disk
import torch
from huggingface_hub import HfFolder
from transformers import Trainer, TrainingArguments
def parse_arge():
"""Parse the arguments."""
parser = argparse.ArgumentParser()
# add model id and dataset path argument
parser.add_argument("--model_id", type=str, default="google/flan-t5-xl", help="Model id to use for training.")
parser.add_argument("--train_dataset_path", type=str, help="Path to processed dataset stored by sageamker.")
parser.add_argument(
"--repository_id", type=str, default=None, help="Hugging Face Repository id for uploading models"
)
# add training hyperparameters for epochs, batch size, learning rate, and seed
parser.add_argument("--epochs", type=int, default=3, help="Number of epochs to train for.")
parser.add_argument("--per_device_train_batch_size", type=int, default=8, help="Batch size to use for training.")
parser.add_argument("--per_device_eval_batch_size", type=int, default=8, help="Batch size to use for testing.")
parser.add_argument("--optimizer", type=str, default="adamw_hf", help="Learning rate to use for training.")
parser.add_argument("--lr", type=float, default=3e-3, help="Learning rate to use for training.")
parser.add_argument("--seed", type=int, default=42, help="Seed to use for training.")
parser.add_argument("--deepspeed", type=str, default=None, help="Path to deepspeed config file.")
parser.add_argument("--gradient_checkpointing", type=bool, default=True, help="Whether to use gradient checkpointing.")
parser.add_argument("--access_token",type=str,default=None)
parser.add_argument("--max_steps", type=int, default=None, help="Number of epochs to train for.")
parser.add_argument(
"--bf16",
type=bool,
default=True if torch.cuda.get_device_capability()[0] == 8 else False,
help="Whether to use bf16.",
)
parser.add_argument(
"--hf_token",
type=str,
default=HfFolder.get_token(),
help="Token to use for uploading models to Hugging Face Hub.",
)
args = parser.parse_known_args()
print(args)
return args
def training_function(args):
# set seed
set_seed(args.seed)
from huggingface_hub.hf_api import HfFolder;
HfFolder.save_token(args.access_token)
dataset = load_from_disk(args.train_dataset_path)
# load dataset from disk and tokenizer
tokenizer = AutoTokenizer.from_pretrained(args.model_id)
# load model from the hub
model = AutoModelForCausalLM.from_pretrained(
args.model_id,
#token=args.access_token,
cache_dir="/opt/ml/sagemaker/warmpoolcache",
use_cache=False if args.gradient_checkpointing else True, # this is needed for gradient checkpointing
)
# Define compute metrics function
# Define training args
output_dir = os.environ["SM_OUTPUT_DATA_DIR"]
training_args = TrainingArguments(
output_dir=output_dir,
per_device_train_batch_size=args.per_device_train_batch_size,
per_device_eval_batch_size=args.per_device_eval_batch_size,
bf16=args.bf16, # Use BF16 if available
learning_rate=args.lr,
num_train_epochs=args.epochs,
deepspeed=args.deepspeed,
gradient_checkpointing=args.gradient_checkpointing,
logging_dir=f"{output_dir}/logs",
logging_strategy="steps",
logging_steps=500,
save_strategy="no",
save_total_limit=2,
optim=args.optimizer,
max_steps=args.max_steps,
# push to hub parameters
)
# Create Trainer instance
trainer = Trainer(
model=model,
args=training_args,
train_dataset=dataset["train"],
eval_dataset=dataset["validation"],
data_collator=default_data_collator
)
# Start training
trainer.train()
# Save our tokenizer and create model card
tokenizer.save_pretrained(output_dir)
# Saves the model to s3 uses os.environ["SM_MODEL_DIR"] to make sure checkpointing works
trainer.save_model(os.environ["SM_MODEL_DIR"])
tokenizer.save_pretrained(os.environ["SM_MODEL_DIR"])
def main():
args, _ = parse_arge()
local_rank = int(os.environ["LOCAL_RANK"])
rank = int(os.environ["RANK"])
world_size = int(os.environ["WORLD_SIZE"])
print(f"local rank {local_rank} global rank {rank} world size {world_size}")
training_function(args)
if __name__ == "__main__":
main() |
//Detector PMT Header
#ifndef DetectorPMT_h
#define DetectorPMT_h
#include "globals.hh"
#include "G4LogicalVolume.hh"
#include "G4AssemblyVolume.hh"
#include "G4NistManager.hh"
namespace CeBr3
{
class DetectorPMT
{
public:
DetectorPMT();
~DetectorPMT();
//Set Functions
void SetWidth(G4double w) {width = w;};
void SetLength(G4double l) {length = l;};
void SetThickness(G4double t) {thickness = t;};
void SetCathodeThick(G4double t) {cathodeThick = t;};
void SetGlassMat(G4Material* m) {glassMat = m;};
void SetGasMat(G4Material* m) {gasMat = m;};
void SetCathodeMat(G4Material* m) {cathodeMat = m;};
//Get Functions
G4double GetWidth() {return width;};
G4double GetLength() {return length;};
G4double GetThickness() {return thickness;};
G4double GetCathodeThick() {return cathodeThick;};
G4Material* GetGlassMat() {return glassMat;};
G4Material* GetGasMat() {return gasMat;};
G4Material* GetCathodeMat() {return cathodeMat;};
G4AssemblyVolume* GetPMT() {return pmt;};
//Construct
void ConstructPMT();
private:
//Parameters
G4double width;
G4double length;
G4double thickness;
G4double cathodeThick;
G4Material* glassMat;
G4Material* gasMat;
G4Material* cathodeMat;
//Assembly
G4AssemblyVolume* pmt;
};
}
#endif |
---
title: حماية صف معين في ورقة عمل Excel
linktitle: حماية صف معين في ورقة عمل Excel
second_title: Aspose.Cells لمرجع .NET API
description: قم بحماية صف معين في Excel باستخدام Aspose.Cells لـ .NET. دليل خطوة بخطوة لتأمين بياناتك السرية.
type: docs
weight: 90
url: /ar/net/protect-excel-file/protect-specific-row-in-excel-worksheet/
---
تعد حماية البيانات السرية في جدول بيانات Excel أمرًا ضروريًا لضمان أمن المعلومات. يقدم Aspose.Cells for .NET حلاً قويًا لحماية صفوف معينة في جدول بيانات Excel. سيرشدك هذا الدليل إلى كيفية حماية صف معين في ورقة عمل Excel باستخدام كود مصدر C# المتوفر. اتبع هذه الخطوات البسيطة لإعداد حماية الصف في ملفات Excel الخاصة بك.
## الخطوة 1: استيراد المكتبات المطلوبة
للبدء، تأكد من تثبيت Aspose.Cells for .NET على نظامك. تحتاج أيضًا إلى إضافة المراجع المناسبة في مشروع C# الخاص بك لتتمكن من استخدام وظيفة Aspose.Cells. إليك الكود لاستيراد المكتبات المطلوبة:
```csharp
// أضف المراجع اللازمة
using Aspose.Cells;
```
## الخطوة 2: إنشاء مصنف Excel وجدول البيانات
بعد استيراد المكتبات المطلوبة، يمكنك إنشاء مصنف Excel جديد وورقة عمل جديدة. هيريس كيفية القيام بذلك:
```csharp
//المسار إلى دليل المستندات.
string dataDir = "YOUR DOCUMENTS DIRECTORY";
// قم بإنشاء دليل إذا لم يكن موجودًا بالفعل.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// إنشاء مصنف جديد.
Workbook wb = new Workbook();
// قم بإنشاء كائن جدول بيانات واحصل على الورقة الأولى.
Worksheet sheet = wb.Worksheets[0];
```
## الخطوة 3: تحديد النمط وعلامة النمط
سنقوم الآن بتعيين نمط الخلية وعلامة النمط لفتح جميع الأعمدة في ورقة العمل. هنا هو الكود الضروري:
```csharp
// قم بتعيين كائن النمط.
Styling styling;
// قم بتعيين كائن styleflag.
StyleFlag flag;
// قم بالمرور عبر كافة الأعمدة في ورقة العمل وقم بإلغاء قفلها.
for (int i = 0; i <= 255; i++)
{
style = sheet.Cells.Columns[(byte)i].Style;
style. IsLocked = false;
flag = new StyleFlag();
flag. Locked = true;
sheet.Cells.Columns[(byte)i].ApplyStyle(style, flag);
}
```
## الخطوة 4: حماية الخط المحدد
الآن سنقوم بحماية الصف المحدد في ورقة العمل. سنقوم بقفل الصف الأول لمنع أي تعديل. إليك الطريقة:
```csharp
// احصل على نمط السطر الأول.
style = sheet.Cells.Rows[0].Style;
// أغلق.
style. IsLocked = true;
//إنشاء مثيل للعلم.
flag = new StyleFlag();
// قم بتعيين معلمة القفل.
flag. Locked = true;
// تطبيق النمط على السطر الأول.
sheet.Cells.ApplyRowStyle(0, style, flag);
```
## الخطوة 5: حماية ورقة العمل
وأخيرًا، سنقوم بحماية ورقة عمل Excel بأكملها لمنع التعديل غير المصرح به. إليك الطريقة:
```csharp
// حماية ورقة العمل.
sheet.Protect(ProtectionType.All);
```
## الخطوة 6: احفظ ملف Excel المحمي
بمجرد الانتهاء من حماية صف معين في ورقة عمل Excel، يمكنك حفظ ملف Excel المحمي على نظامك. إليك الطريقة:
```csharp
// احفظ ملف إكسل.
wb.Save(dataDir + "output.out.xls", SaveFormat.Excel97To2003);
```
بعد اتباع هذه الخطوات، ستكون قد نجحت في حماية صف معين في جدول بيانات Excel الخاص بك باستخدام Aspose.Cells for .NET.
### نموذج التعليمات البرمجية المصدر لحماية صف معين في ورقة عمل Excel باستخدام Aspose.Cells لـ .NET
```csharp
//المسار إلى دليل المستندات.
string dataDir = "YOUR DOCUMENT DIRECTORY";
// قم بإنشاء الدليل إذا لم يكن موجودًا بالفعل.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// إنشاء مصنف جديد.
Workbook wb = new Workbook();
// قم بإنشاء كائن ورقة عمل واحصل على الورقة الأولى.
Worksheet sheet = wb.Worksheets[0];
// تحديد كائن النمط.
Style style;
// تحديد كائن styleflag.
StyleFlag flag;
// قم بالمرور عبر كافة الأعمدة الموجودة في ورقة العمل وقم بإلغاء تأمينها.
for (int i = 0; i <= 255; i++)
{
style = sheet.Cells.Columns[(byte)i].Style;
style.IsLocked = false;
flag = new StyleFlag();
flag.Locked = true;
sheet.Cells.Columns[(byte)i].ApplyStyle(style, flag);
}
// احصل على نمط الصف الأول.
style = sheet.Cells.Rows[0].Style;
// أغلق.
style.IsLocked = true;
//إنشاء مثيل للعلم.
flag = new StyleFlag();
// اضبط إعداد القفل.
flag.Locked = true;
// قم بتطبيق النمط على الصف الأول.
sheet.Cells.ApplyRowStyle(0, style, flag);
// حماية الورقة.
sheet.Protect(ProtectionType.All);
// احفظ ملف الاكسل.
wb.Save(dataDir + "output.out.xls", SaveFormat.Excel97To2003);
```
## خاتمة
تعد حماية البيانات في ملفات Excel أمرًا ضروريًا لمنع الوصول غير المصرح به أو التعديل غير المرغوب فيه. باستخدام مكتبة Aspose.Cells لـ .NET، يمكنك بسهولة حماية صفوف معينة في جدول بيانات Excel باستخدام كود مصدر C# المتوفر. اتبع هذا الدليل خطوة بخطوة لإضافة طبقة إضافية من الأمان إلى ملفات Excel الخاصة بك.
### الأسئلة الشائعة
#### هل تعمل حماية صف معين في كافة إصدارات Excel؟
نعم، تعمل حماية صف معين باستخدام Aspose.Cells for .NET في كافة إصدارات Excel المدعومة.
#### هل يمكنني حماية عدة صفوف محددة في جدول بيانات Excel؟
نعم، يمكنك حماية عدة صفوف محددة باستخدام طرق مشابهة موضحة في هذا الدليل.
#### كيف يمكنني فتح صف معين في جدول بيانات Excel؟
لفتح صف معين، يجب عليك تعديل كود المصدر وفقًا لذلك باستخدام`IsLocked` طريقة`Style` هدف. |
from datetime import datetime
from typing import List, Optional
from uuid import UUID, uuid4
from applications.models import SupportTicket, TicketStatus, Operation
class SupportService:
def submit_ticket(self, user_id, issue):
if user_id is not None and issue.strip() != '':
ticket = SupportTicket.objects.create(user_id=user_id, issue=issue, status=TicketStatus.OPEN.value)
return ticket
def view_open_tickets(self):
return SupportTicket.objects.filter(status=TicketStatus.OPEN.value)
def notify_user(self, ticket_id, message):
try:
ticket = SupportTicket.objects.get(id=ticket_id)
ticket.status = TicketStatus.IN_PROGRESS.value
ticket.save()
print(f"Notification sent to user with ID {ticket.user_id}: {message}")
return ticket
except SupportTicket.DoesNotExist:
return None
def export_all_tickets(self):
return SupportTicket.objects.all()
class OperationService:
def create_operation(self, done: bool = False, result: str = None):
return Operation.objects.create(done=done, result=result)
def get_operation_by_id(self, operation_id: UUID):
return Operation.objects.get(id=operation_id)
def set_operation_done(self, operation_id: UUID):
operation = self.get_operation_by_id(operation_id)
operation.done = True
operation.save()
return operation
class ApplicationService:
def __init__(self):
self.operation_service = OperationService() # Creating an instance of OperationService
def process_application(self, application_data):
# Creating a new operation
operation_id = self.operation_service.create_operation()
# Processing the application (real business logic)
# For example, some lengthy operation or asynchronous task
# Setting the operation as done
self.operation_service.set_operation_done(operation_id)
return operation_id
def get_operation_info(self, operation_id: UUID):
# Getting information about the operation
return self.operation_service.get_operation_by_id(operation_id)
def export_data(self):
return uuid4() |
import { useState } from 'react';
import Button from 'react-bootstrap/Button';
import Modal from 'react-bootstrap/Modal';
import { base_api_url } from '../shared';
import { BsPlusLg } from 'react-icons/bs';
function CreateTweetModal(props) {
// user instance
const {user} = props
const [show, setShow] = useState(false);
const [input, setInput] = useState("")
const [file, setFile] = useState("")
const handleClose = () => setShow(false);
const handleShow = () => setShow(true);
// api istek
const create_tweet = async () => {
const uploadedFile = new FormData()
// key/value olacak şekilde gir
uploadedFile.append("content", input)
// resimleride gönder
uploadedFile.append("attachment", file)
const request = await fetch(`${base_api_url}/tweets/create`, {
method: "POST",
headers: {
"Authorization": "Bearer " + localStorage.getItem('access_token')
},
body: uploadedFile
})
const response = await request.json()
console.log("SVDEN GELEN TWEET VERİ:", response)
// modali kapat
handleClose()
if (request.status === 201)
window.location.reload()
}
return (
<>
<Button className='ms-auto btn btn-success' onClick={handleShow}>
<BsPlusLg></BsPlusLg>
</Button>
<Modal show={show} onHide={handleClose}>
<Modal.Header closeButton>
<Modal.Title>Tweet Oluştur</Modal.Title>
</Modal.Header>
<Modal.Body>
<textarea
className='form-control'
placeholder={`@${user.username} insanlar ile düşüncelerini paylaş.`}
value={input}
onChange={(e) => setInput(e.target.value)}
> </textarea>
<div className="mt-3">
<input className='form-control' type="file"
onChange={e => setFile(e.target.files[0])}
/>
</div>
</Modal.Body>
<Modal.Footer>
<Button variant="link" onClick={create_tweet}>
Tweetle
</Button>
</Modal.Footer>
</Modal>
</>
);
}
export default CreateTweetModal; |
<?php
namespace App\DTOs\Components\Filters\Dropdowns;
use App\DTOs\BaseDTO;
use App\DTOs\Filters\Items\FilterItemDTO;
use App\Enums\Filters\DealTypes;
use App\Enums\Filters\Queries;
use Spatie\TypeScriptTransformer\Attributes\LiteralTypeScriptType;
use Spatie\TypeScriptTransformer\Attributes\RecordTypeScriptType;
use Spatie\TypeScriptTransformer\Attributes\TypeScriptType;
/** @typescript */
abstract class BaseFilterMultipleChoiceDropdownComponentDTO extends BaseDTO
{
#[LiteralTypeScriptType('App.DTOs.Filters.Items.FilterItemDTO[] | null')]
public ?array $queryItems;
/**
* @param DealTypes $dealType
* @param Queries $query
* @param array<FilterItemDTO> $defaultItems
* @param array<FilterItemDTO> $items
*/
public function __construct(
public DealTypes $dealType,
public Queries $query,
#[LiteralTypeScriptType('App.DTOs.Filters.Items.FilterItemDTO[]')]
public array $defaultItems,
public array $items
)
{
$this->queryItems = $this->getQueryItems();
}
/**
* Returns the query item
*
* @return array<FilterItemDTO>|null
*/
protected function getQueryItems(): ?array
{
$query = $this->getQuery();
$queryValues = request()->query($query->value);
$items = [];
if (!empty($queryValues) && is_array($queryValues)) {
foreach ($queryValues as $value) {
foreach ($this->getItems() as $item) {
if ($item->value !== $value) continue;
$items[] = $item;
}
}
return $items;
}
return null;
}
/**
* Returns the query string
*
* @return Queries
*/
abstract protected function getQuery(): Queries;
/**
* Returns the default item
*
* @return array<FilterItemDTO>
*/
abstract protected function getDefaultItems(): array;
/**
* Returns the all items
*
* @return array<FilterItemDTO>
*/
abstract protected function getItems(): array;
} |
import React from "react";
/* Hook que retorna true se o match corresponder com o media passado, e
false caso contrário */
const useMedia = (media) => {
const [match, setMatch] = React.useState(null);
/* Effect que ocorre toda vez que media for alterado */
React.useEffect(() => {
function changeMatch() {
const { matches } = window.matchMedia(media);
setMatch(matches);
}
changeMatch();
/* EventListener que irá executar a função changeMatch toda vez que
a janela for reajustada */
window.addEventListener("resize", changeMatch);
return () => {
window.removeEventListener("resize", changeMatch);
};
}, [media]);
return match;
};
export default useMedia; |
/**
* @file Utility related to static Atelier status
* @author Zebullon
*/
const _ = require('underscore');
/**
* Mapping between atelier status ID and their description as should be mirrored in atelierStatusRef DB table
* @type {{Assigned: number, ErrorOnAccept: number, InAuction: number, Complete: number, ErrorOnCreate: number, InProgress: number, ErrorOnMux: number, Cancelled: number, Deleted: number, Created: number, ErrorUnknown: number}}
*/
const atelierStatus = {
Created: 0,
InAuction: 10,
Assigned: 20,
InProgress: 30,
Complete: 50,
Cancelled: 60,
Deleted: 70,
ErrorOnCreate: 700,
ErrorOnMux: 800,
ErrorOnAccept: 900,
ErrorUnknown: 999,
};
module.exports.atelierStatus = atelierStatus;
/**
* Translate the DB atelier status code into their description
* Note: This does not look like a very fast method, use with care
* @param {string} statusCode - Numerical string representing the atelier status code
* @return {string} The description of the atelier status
*/
module.exports.atelierStatusIDToDescription = (statusCode) => {
const statusCodeIdx = _.indexOf(_.values(atelierStatus), statusCode);
if (statusCodeIdx === -1) {
console.error('Could not find status description from ID', statusCode);
return undefined;
}
return _.keys(atelierStatus)[statusCodeIdx];
};
/**
* Check if an atelier is still in progress
* @param {number} atelierStatusID - Atelier numerical ID
* @return {boolean} True if the atelier is in progress, False otherwise
*/
const isAtelierInProgress = (atelierStatusID) => atelierStatusID === atelierStatus.InProgress;
module.exports.isAtelierInProgress = isAtelierInProgress;
/**
* Check if an atelier is completed
* @param {number} atelierStatusID - Atelier numerical ID
* @return {boolean} True if the atelier is complete, False otherwise
*/
const isAtelierComplete = (atelierStatusID) => atelierStatusID === atelierStatus.Complete;
module.exports.isAtelierComplete = isAtelierComplete;
/**
* Check if an atelier is cancelled
* @param {number} atelierStatusID - Atelier numerical ID
* @return {boolean} True if the atelier is cancelled, False otherwise
*/
const isAtelierCancelled = (atelierStatusID) => atelierStatusID === atelierStatus.Cancelled;
module.exports.isAtelierCancelled = isAtelierCancelled;
const isAtelierDeleted = (atelierStatusID) => atelierStatusID === atelierStatus.Deleted;
module.exports.isAtelierDeleted = isAtelierDeleted;
const isAtelierInAuction = (atelierStatusID) => atelierStatusID === atelierStatus.InAuction;
module.exports.isAtelierInAuction = isAtelierInAuction;
/**
* Check if an atelier is closed (complete or cancelled)
* @param {number} atelierStatusID - Atelier numerical ID
* @return {boolean} True if the atelier is closed, False otherwise or if an error occurred
*/
const isAtelierClosed = (atelierStatusID) => isAtelierCancelled(atelierStatusID) || isAtelierComplete(atelierStatusID)
|| isAtelierDeleted(atelierStatusID);
module.exports.isAtelierClosed = isAtelierClosed; |
#include<iostream>
using namespace std;
class Number
{
private:
int iNo; //class characteristics //Abstraction
public:// baherun milnar ahe(access specifier)
//Behaviours
void Accept() //setter,for set the value
{
cout<<"Enter the value:" <<endl;
cin>>this->iNo;
}
void Display() //getter, for set the value
{
cout<<"Value is:" <<this->iNo<<endl;
}
int Factorial()
{
int iCnt = 0;
int iFact = 1;
for(iCnt=1 ; iCnt <= iNo; iCnt++)
{
iFact = iFact * iCnt;
}
return iFact;
}
};
int main()
{
Number nobj1;
Number nobj2;
int iRet = 0;
nobj1.Accept();
nobj1.Display();
iRet = nobj1.Factorial();
cout<<"Factorial is:"<<iRet<<endl;
nobj2.Accept();
nobj2.Display();
iRet = nobj2.Factorial();
cout<<"Factorial is:"<<iRet<<endl;
return 0;
} |
<template>
<div id="app">
<div class="container">
<div class="text-center">
<h2 class="text-center mt-5">Trending Movies 🍿</h2>
<p>Keep up with the hottest movies that are trending this week.</p>
</div>
<div class="my-4">
<a href="#" @click="getTrendingMovies('day')" class="mx-3 h4">
Trending today</a
>
<a href="#" @click="getTrendingMovies('week')" class="mx-3 h4"
>This week</a
>
</div>
<div class="row" v-if="movies.length > 0">
<div class="col-md-3" v-for="(movie, i) in movies" :key="i">
<movie-card :movie="movie" />
</div>
</div>
</div>
</div>
</template>
<script>
import { ref } from '@vue/reactivity';
import MovieCard from "./components/MovieCard.vue";
import { onMounted } from '@vue/runtime-core';
export default {
setup() {
const movies = ref([]);
const apiKey = ref("9d295bfac80df197274af26d7eb81087");
const getTrendingMovies = async (category) => {
return fetch(
`https://api.themoviedb.org/3/trending/movie/${category}?api_key=9d295bfac80df197274af26d7eb81087`
)
.then((response) => response.json())
.then((data) => {
movies.value = data.results;
});
};
onMounted(async () => {
await getTrendingMovies("day");
});
return {
movies,
apiKey,
getTrendingMovies,
}
},
name: 'App',
components: {
MovieCard,
}
}
</script>
<style>
#app {
font-family: Avenir, Helvetica, Arial, sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
text-align: center;
color: #2c3e50;
margin-top: 60px;
}
</style> |
mod error;
mod function;
mod notification;
mod result;
mod serial;
mod stack;
mod types;
use std::ffi::{c_char, CStr};
use base64::engine::general_purpose::STANDARD;
use base64_serde::base64_serde_type;
pub use error::*;
pub use function::*;
pub use notification::*;
pub use result::*;
pub use stack::*;
use tonlib_sys::*;
pub use types::*;
use self::serial::*;
base64_serde_type!(Base64Standard, STANDARD);
// Wrapper around ton client with support for TL data types
pub struct TlTonClient {
ptr: *mut ::std::os::raw::c_void,
tag: String,
}
impl TlTonClient {
pub fn new(tag: &str) -> TlTonClient {
let client: TlTonClient = unsafe {
let ptr = tonlib_client_json_create();
TlTonClient {
ptr,
tag: tag.into(),
}
};
client
}
pub fn get_tag(&self) -> &str {
self.tag.as_str()
}
pub fn execute(&self, function: &TonFunction) -> Result<TonResult, TlError> {
let f_str = serialize_function(function)?;
log::trace!(
"[{}] execute: {}",
self.tag,
f_str.to_str().unwrap_or("<Error decoding string as UTF-8>")
);
let result = unsafe {
let c_str = tonlib_client_json_execute(self.ptr, f_str.as_ptr());
log::trace!(
"[{}] result: {}",
self.tag,
CStr::from_ptr(c_str)
.to_str()
.unwrap_or("<Error decoding string as UTF-8>")
);
deserialize_result(c_str)
};
#[allow(clippy::let_and_return)]
result
}
pub fn send(&self, function: &TonFunction, extra: &str) -> Result<(), TlError> {
let f_str = serialize_function_extra(function, extra)?;
log::trace!(
"[{}] send: {}",
self.tag,
f_str.to_str().unwrap_or("<Error decoding string as UTF-8>")
);
unsafe { tonlib_client_json_send(self.ptr, f_str.as_ptr()) };
Ok(())
}
pub fn receive(&self, timeout: f64) -> Option<(Result<TonResult, TlError>, Option<String>)> {
let c_str = unsafe { tonlib_client_json_receive(self.ptr, timeout) };
if c_str.is_null() {
None
} else {
let c_str_slice = unsafe { CStr::from_ptr(c_str) };
if let Ok(c_str_str) = c_str_slice.to_str() {
log::trace!("[{}] receive: {}", self.tag, c_str_str);
} else {
log::trace!("[{}] receive: <Error decoding string as UTF-8>", self.tag);
}
let c_str_bytes = c_str_slice.to_bytes();
let (result, extra) =
unsafe { deserialize_result_extra(c_str_bytes.as_ptr() as *const c_char) };
Some((result, extra))
}
}
pub fn set_log_verbosity_level(verbosity_level: u32) {
unsafe { tonlib_sys::tonlib_client_set_verbosity_level(verbosity_level) }
}
}
impl Drop for TlTonClient {
fn drop(&mut self) {
unsafe {
if !self.ptr.is_null() {
tonlib_client_json_destroy(self.ptr);
self.ptr = std::ptr::null_mut();
}
}
}
}
unsafe impl Send for TlTonClient {}
unsafe impl Sync for TlTonClient {}
#[cfg(test)]
mod tests {
use crate::tl::function::TonFunction;
use crate::tl::TlTonClient;
#[test]
fn set_log_verbosity_level_works() -> anyhow::Result<()> {
let level = 1;
TlTonClient::set_log_verbosity_level(level);
Ok(())
}
#[test]
fn it_executes_functions() -> anyhow::Result<()> {
let client = TlTonClient::new("test");
let get_logging = TonFunction::GetLogVerbosityLevel {};
let _ = client.execute(&get_logging)?;
Ok(())
}
} |
###########################################################
New Step
###########################################################
prompt_log: Sample Prompt:
You are confronted with a task in which a 1-dimensional input sequence of pixels should be transformed into a corresponding output sequence. The input and output sequences have values from 1 to 9 representing different pixel colors, and 0 representing the background color. Adjacent pixels of the same color are designated as objects. For example [0, 2, 2, 0, 3] represents a pixel sequence with the following objects: Object_1: {color: 2, start_index: 1, end_index: 2, size: 2}, Object_2: {color: 3, start_index: 4, end_index: 4, size: 1}, with zero-indexing for the position.
The transformation from input to output follows a certain pattern with logical rules that might refer to concepts as follows:
- Objects:
- transformations, such as move, hollow, scale, remove, copy, recolor.
- relations between objects, such as distance, alignment, overlap, containment.
- Noise pixels.
- Arithmetics based on objects: Counting, sorting.
- Conditions: rules might be conditional.
This list is not exhaustive.
You are to infer the simplest possible relation between input and output. Then, your task is to transform the test input sequence into its test output sequence.
You are to output only the following in json format, fill the values as described: {'description': {'Example_1': 'regarding the first example, describe the difference between the input and output sequence, be precise and say WHAT changed HOW!', 'Example_2': '...'}, 'overall_pattern': 'describe the input-output relationship for all input-output pairs', 'instructions': 'describe the needed transformation actions to transform a new input into its output, think step by step', 'transformation': {'input': 'copy the test case input sequence from the task. Mind the sequence length!', 'object_description': 'regarding the test input, describe the objects in the input sequences, focusing on size, position, color', 'transformed_objects': 'Describe how the objects should look like in the test output sequence, focusing on size, position, color', 'output': 'create the resulting test output sequence. Mind the sequence length!'}, 'test_output': 'Return the created test output sequence in numpy array format. Mind the sequence length!'}. Do not use quotation marks ' or " within the fields.
The following input-output pairs are examples and share the same underlying transformation pattern.
Example_1:
input: [0, 0, 0, 0, 0, 4, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 0]
output: [0, 0, 0, 0, 0, 5, 5, 5, 5, 5, 4, 0, 0, 0, 0, 0, 0]
Example_2:
input: [0, 0, 0, 0, 1, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0]
output: [0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 1, 0, 0, 0, 0, 0]
Example_3:
input: [0, 0, 5, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0]
output: [0, 0, 3, 3, 3, 3, 3, 3, 5, 0, 0, 0, 0, 0, 0, 0, 0]
Test case:
input: [0, 0, 0, 0, 0, 0, 0, 0, 7, 1, 1, 1, 1, 1, 1, 1, 0]
###########################################################
Sample Outputs:
output: [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0]
"""
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
def get_image(img):
img = Image.open(img)
return img
def get_array(img):
arr = np.asarray(img)
return arr
def get_pixels(arr):
pixels = []
for i in range(len(arr)):
for j in range(len(arr[i])):
if arr[i][j]:
pixels.append([i, j])
return pixels
def get_colors(pixels):
colors = {}
for px in pixels:
if px[0],px[1] in colors:
colors[(px[0],px[1])].append(arr[px[0]][px[1]])
else:
colors[(px[0],px[1])] = [arr[px[0]][px[1]]]
return colors
def get_objects(colors):
objects = []
for k,v in colors.items():
if len(set(v)) == 1:
objects.append({'start_index':k[0],'end_index':k[0]+1,'size':1})
elif len(set(v)) > 1:
objects.append({'start_index':min(k),'end_index':max(k)+1,'size':max(k)-min(k)})
return objects
def get_noises(objects,pixels):
noises = []
for o in objects:
for p in pixels:
if (o['start_index'] <= p[0] < o['end_index']) and (o['start_index'],o['end_index'])!= p:
noises.append((p[0],p[1]))
return noises
def get_relationships(objects):
relationships = []
for o1 in objects:
for o2 in objects:
if o1['start_index'] >= o2['start_index'] and o1['end_index'] <= o2['end_index']:
relationships.append('contain')
elif o1['start_index'] <= o2['start_index'] and o1['end_index'] >= o2['end_index']:
relationships.append('contained')
elif o1['start_index'] == o2['start_index'] and o1['end_index'] == o2['end_index']:
relationships.append('equal')
elif o1['start_index'] == o2['start_index'] + 1 and o1['end_index'] == o2['end_index'] - 1:
relationships.append('adjacent')
elif o1['start_index'] == o2['start_index'] - 1 and o1['end_index'] == o2['end_index'] + 1:
relationships.append('opposite')
elif o1['start_index'] == o2['start_index'] - 1 and o1['end_index'] == o2['end_index'] - 1:
relationships.append('left')
elif o1['start_index'] == o2['start_index'] + 1 and o1['end_index'] == o2['end_index'] + 1:
relationships.append('right')
elif o1['start_index'] == o2['start_index'] and o1['end_index'] == o2['end_index'] + 1:
relationships.append('above')
elif o1['start_index'] == o2['start_index'] and o1['end_index'] == o2['end_index'] - 1:
relationships.append('below')
return relationships
def get_conditions(relationships):
conditions = []
for r in relationships:
if r == 'contain':
conditions.append("if object_1 contains object_2")
elif r == 'contained':
conditions.append("if object_1 is contained by object_2")
elif r == 'equal':
conditions.append("if object_1 equals object_2")
elif r == 'adjacent':
conditions.append("if object_1 is adjacent to object_2")
elif r == 'opposite':
conditions.append("if object_1 is opposite to object_2")
elif r == 'left':
conditions.append("if object_1 is left to object_2")
elif r == 'right':
conditions.append("if object_1 is right to object_2")
elif r == 'above':
conditions.append("if object_1 is above object_2")
elif r == 'below':
conditions.append("if object_1 is below object_2")
return conditions
def get_actions(objects,relationships):
actions = []
for o1 in objects:
for o2 in objects:
if o1['start_index'] >= o2['start_index'] and o1['end_index'] <= o2['end_index']:
actions.append('move')
elif o1['start_index'] <= o2['start_index'] and o1['end_index'] >= o2['end_index']:
actions.append('hollow')
elif o1['start_index'] == o2['start_index'] and o1['end_index'] == o2['end_index']:
actions.append('scale')
elif o1['start_index'] == o2['start_index'] + 1 and o1['end_index'] == o2['end_index'] - 1:
actions.append('remove')
elif o1['start_index'] == o2['start_index'] - 1 and o1['end_index'] == o2['end_index'] + 1:
actions.append('copy')
elif o1['start_index'] == o2['start_index'] - 1 and o1['end_index'] == o2['end_index'] - 1:
actions.append('recolor')
return actions
def get_results(objects,relationships,actions):
results = []
for o1 in objects:
for o2 in objects:
if o1['start_index'] >= o2['start_index'] and o1['end_index'] <= o2['end_index']:
results.append(f'{o1["start_index"]},{o1["end_index"]}')
elif o1['start_index'] <= o2['start_index'] and o1['end_index'] >= o2['end_index']:
results.append(f'{o2["start_index"]},{o2["end_index"]}')
elif o1['start_index'] == o2['start_index'] and o1['end_index'] == o2['end_index']:
results.append(f'{o1["start_index"]},{o1["end_index"]}')
elif o1['start_index'] == o2['start_index'] + 1 and o1['end_index'] == o2['end_index'] - 1:
results.append(f'{o2["start_index"]},{o2["end_index"]}')
elif o1['start_index'] == o2['start_index'] - 1 and o1['end_index'] == o2['end_index'] + 1:
results.append(f'{o1["start_index"]},{o1["end_index"]}')
elif o1['start_index'] == o2['start_index'] - 1 and o1['end_index'] == o2['end_index'] - 1:
results.append(f'{o2["start_index"]},{o2["end_index"]}')
return results
def get_result(objects,relationships,actions):
result = []
for o1 in objects:
for o2 in objects:
if o1['start_index'] >= o2['start_index'] and o1['end_index'] <= o2['end_index']:
result.append(f'{o1["start_index"]},{o1["end_index"]}')
###########################################################
Result:
solution: [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 7, 0]
success: 0
tries: 1
success_rate: 0.18012422360248448
cat_success_cnt: 2
cat_success_rate: 0.18181818181818182 |
<!-- Links -->
<div th:fragment="links">
<!-- Script Links -->
<script src="https://cdn.jsdelivr.net/npm/@popperjs/core@2.10.2/dist/umd/popper.min.js"
integrity="sha384-7+zCNj/IqJ95wo16oMtfsKbZ9ccEh31eOz1HGyDuCQ6wgnyJNSYdrPa03rtR1zdB"
crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.min.js"
integrity="sha384-QJHtvGhmr9XOIpI6YVutG+2QOK9T+ZnN4kzFN1RtK3zEFEIsxhlmWl5/YESvpZ13"
crossorigin="anonymous"></script>
<!-- Font Awesome Icons-->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css"
integrity="sha512-Fo3rlrZj/k7ujTnHg4CGR2D7kSs0v4LLanw2qksYuRlEzO+tcaEPQogQ0KaoGN26/zrn20ImR1DfuLWnOo7aBA=="
crossorigin="anonymous" referrerpolicy="no-referrer" />
<!-- Favicon-->
<link rel="icon" type="image/x-icon" href="assets/favicon.ico" />
<!-- Bootstrap icons-->
<link href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.5.0/font/bootstrap-icons.css" rel="stylesheet" />
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T" crossorigin="anonymous">
<link rel="stylesheet" href="/css/basic.css">
</div>
<!-- Navbar -->
<div th:fragment="navBar">
<nav class="navbar navbar-expand-lg">
<div class="container">
<a class="navbar-brand" th:href="@{/}">Home</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse"
data-bs-target="#navbarSupportedContent" aria-controls="navbarSupportedContent" aria-expanded="false"
aria-label="Toggle navigation"><span class="navbar-toggler-icon"><i
class="fa fa-bars"></i></span></button>
<div class="collapse navbar-collapse" id="navbarSupportedContent">
<ul class="navbar-nav me-auto mb-2 mb-lg-0 ms-lg-4">
<li class="nav-item dropdown" sec:authorize="hasRole('ROLE_ADMIN')">
<!-- aparece cuando se ha logueado como admin -->
<a class="nav-link dropdown-toggle" id="navbarDropdown" href="#" role="button"
data-bs-toggle="dropdown" aria-expanded="false">USERS</a>
<ul class="dropdown-menu" aria-labelledby="navbarDropdown">
<li class="nav-item"><a class="dropdown-item cta-admin">New Admin</a></li>
<li class="nav-item">
<a class="dropdown-item cta">New Client</a>
</li>
</ul>
</li>
</ul>
<ul class="navbar-nav mb-lg-0">
<li id="nav-item" class="login-btn" sec:authorize="!isAuthenticated()">
<!-- aparece cuando no se ha logueado o se desloguea -->
<a class="nav-link login">LOGIN</a>
</li>
<li id="nav-link" class="login-btn" sec:authorize="!isAuthenticated()">
<!-- aparece cuando no se ha logueado o se desloguea -->
<a class="nav-link cta">SIGN UP</a>
</li>
<li class="dropdown" sec:authorize="isAuthenticated()">
<!-- aparece cuando se loguea-->
<a class="btn btn-outline-dark dropdown-toggle login-btn" href="#" role="button"
id="dropdownMenuLink" data-bs-toggle="dropdown" aria-expanded="false">
<span sec:authentication="name"></span>
</a>
<ul class="dropdown-menu" aria-labelledby="dropdownMenuLink">
<li>
<form th:action="@{/editUser}" method="GET">
<button class="dropdown-item" type="submit" name="id"
th:value="${session.usuario.id}">Edit Profile</button>
</form>
</li>
<li>
<form th:action="@{/logout}" method="POST">
<button class="dropdown-item" type="submit">Logout</button>
</form>
</li>
</ul>
</li>
</ul>
</div>
</div>
</nav>
</div>
<!-- Modal User-->
<div th:fragment="modal-user" class="modal-container-signup">
<div class="modal-signup modal-close-signup">
<p class="close-signup">X</p>
<img class="admin-features" sec:authorize="hasRole('ROLE_ADMIN')" src="/img/admin-icon.png" alt="">
<img sec:authorize="!isAuthenticated()" src="/img/welcomeCats.svg" alt="">
<div id="user-sign-up" class="model-textos">
<form id="contact-signup" action="/saveClient" method="POST">
<div class="form-group-signup">
<p id="error" style="color:rgb(231, 15, 15)"></p>
<h2 sec:authorize="hasRole('ROLE_ADMIN')">Create a new Client</h2>
<h2 sec:authorize="!isAuthenticated()">Sign Up!</h2>
<label for="exampleInputTitulo"></label>
<input type="text" class="form-control" name="name" id="name" placeholder="Name"
th:value="${name}">
<label for="exampleInputTitulo"></label>
<input type="number" class="form-control" name="dni" id="dni" placeholder="DNI"
th:value="${dni}">
<label for="exampleInputTitulo"></label>
<input type="email" class="form-control" name="mail" id="mail" placeholder="Mail"
th:value="${mail}">
<label for="exampleInputTitulo"></label>
<input type="tel" class="form-control" name="phone" id="phone" placeholder="Phone"
th:value="${phone}">
<label for="exampleInputTitulo"></label>
<input type="text" class="form-control" name="address" id="address" placeholder="Address"
th:value="${address}">
<label for="exampleInputTitulo"></label>
<input type="password" class="form-control" name="password1" id="password"
placeholder="Password">
<label for="exampleInputTitulo"></label>
<input type="password" class="form-control" name="password2" placeholder="Repeat password">
<input type="text" name="role" id="role" value="ROLE_CLIENT"
style="visibility: hidden; display: none;">
<input type="text" name="active" id="active" value="true"
style="visibility: hidden; display: none;">
</div>
<button name="submit" type="submit" id="contact-submit-1" class="btn btn-outline-dark"
data-submit="...Sending">Sign Up</button>
</form>
</div>
</div>
</div>
<!-- Modal Admin -->
<div th:fragment="modal-admin" class="modal-container-admin">
<div class="modalA modal-close-admin">
<p class="close-admin">X</p>
<img class="admin-features" sec:authorize="hasRole('ROLE_ADMIN')" src="/img/admin-icon.png" alt="">
<img sec:authorize="!isAuthenticated()" src="/img/welcomeCats.svg" alt="">
<div id="admin-sign-up" class="model-textos">
<h2>Formulario de Registro</h2>
<form id="contact-admin" action="/saveAdmin" method="POST">
<div class="form-group-admin">
<p id="error2" style="color:rgb(231, 15, 15)"></p>
<label for="exampleInputTitulo"></label>
<input type="text" class="form-control" name="name" id="name1" placeholder="Name">
<label for="exampleInputTitulo"></label>
<input type="number" class="form-control" name="dni" id="dni1" placeholder="DNI">
<label for="exampleInputTitulo"></label>
<input type="email" class="form-control" name="mail" id="mail1" placeholder="Mail">
<label for="exampleInputTitulo"></label>
<input type="tel" class="form-control" name="phone" id="phone1" placeholder="Phone">
<label for="exampleInputTitulo"></label>
<input type="password" class="form-control" name="password1" id="password2"
placeholder="Password">
<label for="exampleInputTitulo"></label>
<input type="password" class="form-control" name="password2" placeholder="Repeat password">
<input type="password" class="form-control" name="password2" placeholder="Repeat password">
<input type="text" name="role" id="role" value="ROLE_CLIENT"
style="visibility: hidden; display: none;">
<input type="text" name="active" id="active" value="true"
style="visibility: hidden; display: none;">
</div>
</div>
<br><br>
<button name="submit" type="submit" id="contact-submit-2" class="btn btn-outline-dark"
data-submit="...Sending">Sign Up</button>
</form>
</div>
</div>
<!-- Modal login -->
<div th:fragment="modal-login" class="modal-container-login">
<div class="modal-login modal-close-login">
<p class="close-login">X</p>
<!-- <img src="welcomeCats.svg" alt=""> -->
<div id="user-login" class="model-textos">
<h2>Login</h2>
<form id="contact-login" action="/logincheck" method="POST">
<div class="form-group-login">
<p id="error2" style="color:rgb(231, 15, 15)"></p>
<label for="exampleInputTitulo"></label>
<input type="email" class="form-control" name="username" id="username" placeholder="Mail"
required autofocus>
<label for="exampleInputTitulo"></label>
<input type="password" class="form-control" name="password" id="password1"
placeholder="Password" required>
</div>
<button name="submit" type="submit" id="contact-submit">Login</button>
</form>
</div>
</div>
</div> |
package util
import (
"os"
"testing"
"time"
)
func TestDirExists(t *testing.T) {
dirPath := "./testDir"
// Создаем тестовую директорию
if err := os.Mkdir(dirPath, os.ModePerm); err != nil {
t.Fatalf("Не удалось создать тестовую директорию: %s", err)
}
defer os.Remove(dirPath) // Удаляем тестовую директорию по завершению
// Проверяем существование директории
exists, err := DirExists(dirPath)
if err != nil {
t.Fatalf("Ошибка при вызове DirExists: %s", err)
}
if !exists {
t.Fatalf("DirExists вернула false, ожидалось true")
}
// Проверяем несуществование директории
exists, err = DirExists(dirPath + "_not_exist")
if err != nil {
t.Fatalf("Ошибка при вызове DirExists для несуществующей директории: %s", err)
}
if exists {
t.Fatalf("DirExists вернула true для несуществующей директории, ожидалось false")
}
}
func TestMakeDir(t *testing.T) {
dirPath := "./testMakeDir"
// Создаем директорию
if err := MakeDir(dirPath); err != nil {
t.Fatalf("Не удалось создать директорию: %s", err)
}
defer os.Remove(dirPath) // Удаляем директорию по завершению
// Проверяем, что директория существует
exists, err := DirExists(dirPath)
if err != nil {
t.Fatalf("Ошибка при проверке существования директории: %s", err)
}
if !exists {
t.Fatalf("Директория не создана")
}
}
func TestGetMD5Hash(t *testing.T) {
text := "Hello, world!"
expectedHash := "6cd3556deb0da54bca060b4c39479839" // известный MD5-хеш для строки "Hello, world!"
hash := GetMD5Hash(text)
if hash != expectedHash {
t.Fatalf("GetMD5Hash вернула неправильный хеш: получено %s, ожидалось %s", hash, expectedHash)
}
}
func TestCreateExpireDate(t *testing.T) {
now := time.Now()
expireTime := 5 * time.Minute
expireDate := CreateExpireDate(expireTime)
// Проверяем, что expireDate корректно установлено
if (expireDate.Sub(now)/time.Second)*time.Second != expireTime {
t.Fatalf("CreateExpireDate вернула неправильную дату истечения срока")
}
}
func TestCompareDate(t *testing.T) {
now := time.Now()
future := now.Add(5 * time.Minute)
past := now.Add(-5 * time.Minute)
if !CompareDate(now, future) {
t.Fatal("CompareDate вернула false для даты в будущем")
}
if !CompareDate(now, now) {
t.Fatal("CompareDate вернула false для одинаковых дат")
}
if CompareDate(future, past) {
t.Fatal("CompareDate вернула true для даты в прошлом")
}
} |
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_itoa.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mijung <mijung@student.42seoul.kr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2022/07/11 15:54:22 by mijung #+# #+# */
/* Updated: 2022/07/11 15:54:58 by mijung ### ########.fr */
/* */
/* ************************************************************************** */
#include <stdlib.h>
static int ft_finddigit(int n)
{
int digit;
digit = 0;
if (n == 0)
return (1);
if (n < 0)
digit++;
while (n != 0)
{
n /= 10;
digit++;
}
return (digit);
}
char *ft_itoa(int n)
{
int digit;
long num;
char *str;
digit = ft_finddigit(n);
num = (long)n;
str = (char *)malloc(digit + 1);
if (str == 0)
return (0);
if (n == 0)
*str = '0';
if (n < 0)
{
num *= -1;
*str = '-';
}
*(str + digit--) = 0;
while (num > 0)
{
*(str + digit--) = num % 10 + '0';
num /= 10;
}
return (str);
} |
# Copyright (c) 2023 Robert Bosch GmbH
#
# This program and the accompanying materials are made available under the
# terms of the Apache License, Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# SPDX-License-Identifier: Apache-2.0
from typing import Any, List
def generate_nodeport(port: int) -> int:
"""Create nodeport from the last 4 digits of the passed port in the range
of 30000-32767. If the port is out of range it will just take the last 3 digits.
Args:
port: The port to be used to generate the nodeport.
"""
nodeport = port % 10000
if nodeport > 2767:
nodeport = nodeport % 1000
return int(f"3{nodeport:04d}")
def create_cluster_ip_spec(service_id: str, ports: List[dict]) -> dict[str, Any]:
"""Create cluster ip spec for the given service id.
Args:
service_id: The id of the service to create cluster ip for.
"""
return {
"apiVersion": "v1",
"kind": "Service",
"metadata": {"name": service_id, "labels": {"app": service_id}},
"spec": {"type": "ClusterIP", "selector": {"app": service_id}, "ports": ports},
} |
import Foundation
struct Coord { var x, y : Int }
struct Fold { var x, y : Int? }
struct Grid {
var grid: [Bool]
var rows, cols: Int
}
var coords : [Coord] = []
var folds : [Fold] = []
let coordMatch = try! NSRegularExpression(pattern: "(\\d+),(\\d+)")
let foldXMatch = try! NSRegularExpression(pattern: "fold along x=(\\d+)")
let foldYMatch = try! NSRegularExpression(pattern: "fold along y=(\\d+)")
while let s = readLine() {
if let m = coordMatch.firstMatch(
in: s, range: NSRange(location: 0, length: s.count)),
let xr = Range(m.range(at: 1), in: s),
let yr = Range(m.range(at: 2), in: s),
let x = Int(s[xr]),
let y = Int(s[yr]) {
coords.append(Coord(x: x, y: y))
}
if let m = foldXMatch.firstMatch(
in: s, range: NSRange(location: 0, length: s.count)),
let r = Range(m.range(at: 1), in: s),
let x = Int(s[r]) {
folds.append(Fold(x: x))
}
if let m = foldYMatch.firstMatch(
in: s, range: NSRange(location: 0, length: s.count)),
let r = Range(m.range(at: 1), in: s),
let y = Int(s[r]) {
folds.append(Fold(y: y))
}
}
func makeGrid(coords: [Coord]) -> Grid {
let maxes = coords.reduce(Coord(x: 0, y : 0)) {
(accum: Coord, nextVal: Coord) -> Coord in
return Coord(x: max(accum.x, nextVal.x),
y: max(accum.y, nextVal.y))
}
var out = Grid(grid: [],
rows: 1 + maxes.y,
cols: 1 + maxes.x)
let N = out.rows * out.cols
for _ in 0...N { out.grid.append(false); }
coords.forEach { coord in
let ind = coord.x + coord.y * out.cols
out.grid[ind] = true
}
return out
}
func copy(orig: Grid, rows: Int, cols: Int) -> Grid {
var out = Grid(grid: [], rows: rows, cols: cols)
for r in 0...(rows-1) {
for c in 0...(cols-1) {
out.grid.append(orig.grid[c + r * orig.cols])
}
}
return out
}
func fold(orig: Grid, fold: Fold) -> Grid {
if let x = fold.x {
let newcols = x
var out = copy(orig: orig, rows: orig.rows, cols : newcols)
for r in 0...(orig.rows-1) {
for dc in 1...x {
let ca = x - dc
let cb = x + dc
if cb < orig.cols {
out.grid[ca + r * out.cols] =
out.grid[ca + r * out.cols] ||
orig.grid[cb + r * orig.cols]
}
}
}
return out
}
if let y = fold.y {
let newcols = orig.cols
var out = copy(orig: orig, rows: y, cols: orig.cols)
for dr in 1...y {
let ra = y - dr
let rb = y + dr
if rb < orig.rows {
for c in 0...(newcols-1) {
out.grid[c + ra * out.cols] = out.grid[c + ra * out.cols]
|| orig.grid[c + rb * orig.cols]
}
}
}
return out
}
return orig
}
func countActive(g: Grid) -> Int {
return g.grid.reduce(0) {
(accum: Int, nextval: Bool) in
if nextval {
return 1 + accum
} else {
return accum
}
}
}
func gridSymbol(b: Bool) -> String {
if (b) {
return "#"
} else {
return "."
}
}
func dumpGrid(g: Grid) -> () {
print("G(\(g.cols) x \(g.rows) = \(countActive(g: g)))")
for r in 0...(g.rows-1) {
print("\(r)\t", terminator: "")
for c in 0...(g.cols-1) {
let ind = c + g.cols * r
print(gridSymbol(b: g.grid[ind]), terminator: "")
}
print()
}
}
var grid = makeGrid(coords: coords)
if folds.count > 0 {
let newg = fold(orig: grid, fold: folds[0])
print(countActive(g: newg))
} |
import React from 'react';
import useAdvanceSearch from '../../../Hooks/useAdvanceSearch'
const AdvanceSearchLg = () => {
const { SearchValues, handleChange, selectValues, handleChangeState, onSubmit } = useAdvanceSearch();
return (
<div className='AdvanceSearchLg'>
<div className="col-lg-2 px-1">
<div className="mb-3">
<input type="text" className=' form-control' name='id' value={SearchValues.id} onChange={handleChange} placeholder='Account No.' />
</div>
<div className="mb-3 d-grid">
<input type="button" className='btn btn-primary' value="Search" onClick={onSubmit} />
</div>
</div>
<div className="col-lg-10 d-flex flex-wrap">
<div className="col-lg-2 px-1">
<div className="mb-3">
<input type="text" className=' form-control' placeholder='First Name' value={SearchValues.first_name} name='first_name' onChange={handleChange} />
</div>
</div>
<div className="col-lg-2 px-1">
<div className="mb-3">
<input type="text" className=' form-control' placeholder='Last Name' value={SearchValues.last_name} name='last_name' onChange={handleChange} />
</div>
</div>
<div className="col-lg-2 px-1">
<div className="mb-3">
<input type="text" className=' form-control' placeholder='Nick Name' value={SearchValues.nick_name} name='nick_name' onChange={handleChange} />
</div>
</div>
<div className="col-lg-2 px-1">
<div className="mb-3">
<input type="date" className=' form-control' name='birthday' value={SearchValues.birthday} onChange={handleChange} />
</div>
</div>
<div className="col-lg-2 px-1">
<div className="mb-3">
<select className='form-control text-capitalize' name='memorial_for' value={SearchValues.memorial_for} onChange={handleChange}>
<option value="" disabled>Wall For</option>
<option value="Pet">Pet</option>
<option value="Individual">Individual</option>
<option value="Others">Others</option>
</select>
</div>
</div>
<div className="col-lg-2 px-1">
<div className="mb-3">
<select className='form-control text-capitalize' name='state' value={SearchValues.state} onChange={(e) => { handleChange(e); handleChangeState(e); }}>
<option value="" disabled>State</option>
{
selectValues.state.map((val, index) => {
return (
<option value={val.id} key={index}>{val.name}</option>
)
})
}
</select>
</div>
</div>
<div className="col-lg-2 px-1">
<div className="mb-3">
<select className='form-control' name='city' value={SearchValues.city} onChange={handleChange}>
<option value="" disabled>City</option>
{
selectValues.city.map((val, index) => {
return (
<option value={val.id} key={index}>{val.name}</option>
)
})
}
</select>
</div>
</div>
<div className="col-lg-2 px-1">
<div className="mb-3">
<select className='form-control' name='wall' value={SearchValues.wall} onChange={handleChange}>
<option value="community">Community Wall</option>
<option value="new">New Wall</option>
<option value="existing">Existing Wall</option>
</select>
</div>
</div>
<div className="col-lg-8 px-1">
<div className="mb-3 d-grid">
<input type="button" className='btn btn-primary' value="Search" onClick={onSubmit} />
</div>
</div>
</div>
</div>
)
}
export default AdvanceSearchLg |
// main-------------------------------------
// @file : testRe.go
// @author : Autumn
// @contact : rainy-autumn@outlook.com
// @time : 2024/5/27 18:56
// -------------------------------------------
package main
import (
"fmt"
"github.com/Autumn-27/ScopeSentry-Scan/pkg/system"
"github.com/Autumn-27/ScopeSentry-Scan/pkg/types"
"github.com/dlclark/regexp2"
"net/http"
_ "net/http/pprof"
"sync"
"time"
)
func main() {
go func() {
_ = http.ListenAndServe("0.0.0.0:6060", nil)
}()
testMsg := ""
system.InitDb()
system.UpdateSensitive()
var wg sync.WaitGroup
fmt.Println("begin test")
for i := 1; i <= 1; i++ {
wg.Add(1)
go func() {
defer wg.Done()
go Scan2("ddd", testMsg)
}()
}
time.Sleep(5 * time.Second)
wg.Wait()
}
func Scan(url string, resp string) {
defer system.RecoverPanic("sensitiveMode")
if len(system.SensitiveRules) == 0 {
system.UpdateSensitive()
}
chunkSize := 5120
overlapSize := 100
NotificationMsg := "SensitiveScan Result:\n"
allstart := time.Now()
Sresults := []types.SensitiveResult{}
for _, rule := range system.SensitiveRules {
start := time.Now()
if rule.State {
r, err := regexp2.Compile(rule.Regular, 0)
if err != nil {
system.SlogError(fmt.Sprintf("Error compiling sensitive regex pattern: %s - %s", err, rule.ID))
continue
}
resultChan, errorChan := processInChunks(r, resp, chunkSize, overlapSize)
for matches := range resultChan {
if len(matches) != 0 {
tmpResult := types.SensitiveResult{Url: url, SID: rule.ID, Match: matches, Body: resp, Time: system.GetTimeNow(), Color: rule.Color}
Sresults = append(Sresults, tmpResult)
NotificationMsg += fmt.Sprintf("%v\n%v:%v", url, rule.Name, matches)
}
}
if err := <-errorChan; err != nil {
system.SlogError(fmt.Sprintf("Error processing chunks: %s", err))
}
}
elapsed := time.Since(start)
fmt.Printf("%s Regex performance: %s\n", rule.Name, elapsed)
}
time.Sleep(5 * time.Second)
allelapsed := time.Since(allstart)
fmt.Printf("all Regex performance: %s\n", allelapsed)
}
func Scan2(url string, resp string) {
defer system.RecoverPanic("sensitiveMode")
if len(system.SensitiveRules) == 0 {
system.UpdateSensitive()
}
chunkSize := 5120
overlapSize := 100
NotificationMsg := "SensitiveScan Result:\n"
Sresults := []types.SensitiveResult{}
sem := make(chan struct{}, 10)
var wg sync.WaitGroup
mu := sync.Mutex{}
allstart := time.Now()
for _, rule := range system.SensitiveRules {
if rule.State {
wg.Add(1)
go func(rule types.SensitiveRule) {
defer wg.Done()
sem <- struct{}{} // 获取一个令牌
defer func() {
<-sem
}() // 释放一个令牌
start := time.Now()
r, err := regexp2.Compile(rule.Regular, 0)
if err != nil {
system.SlogError(fmt.Sprintf("Error compiling sensitive regex pattern: %s - %s", err, rule.ID))
return
}
resultChan, errorChan := processInChunks(r, resp, chunkSize, overlapSize)
for matches := range resultChan {
if len(matches) != 0 {
tmpResult := types.SensitiveResult{Url: url, SID: rule.ID, Match: matches, Body: resp, Time: system.GetTimeNow(), Color: rule.Color}
mu.Lock()
Sresults = append(Sresults, tmpResult)
NotificationMsg += fmt.Sprintf("%v\n%v:%v", url, rule.Name, matches)
mu.Unlock()
}
}
if err := <-errorChan; err != nil {
system.SlogError(fmt.Sprintf("Error processing chunks: %s", err))
}
elapsed := time.Since(start)
fmt.Printf("%s Regex performance: %s\n", rule.Name, elapsed)
}(rule)
}
}
wg.Wait()
time.Sleep(5 * time.Second)
allelapsed := time.Since(allstart)
fmt.Printf("all Regex performance: %s\n", allelapsed)
}
func processInChunks(regex *regexp2.Regexp, text string, chunkSize int, overlapSize int) (chan []string, chan error) {
resultChan := make(chan []string, 100)
errorChan := make(chan error, 1)
go func() {
defer close(resultChan)
defer close(errorChan)
for start := 0; start < len(text); start += chunkSize {
end := start + chunkSize
if end > len(text) {
end = len(text)
}
chunkEnd := end
if end+overlapSize < len(text) {
chunkEnd = end + overlapSize
}
matches, err := findMatchesInChunk(regex, text[start:chunkEnd])
if err != nil {
errorChan <- err
return
}
if len(matches) > 0 {
resultChan <- matches
}
}
}()
return resultChan, errorChan
}
func findMatchesInChunk(regex *regexp2.Regexp, text string) ([]string, error) {
var matches []string
m, _ := regex.FindStringMatch(text)
for m != nil {
matches = append(matches, m.String())
m, _ = regex.FindNextMatch(m)
}
return matches, nil
} |
// Copyright 2024 RisingWave Labs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Converts between arrays and Apache Arrow arrays.
//!
//! This file acts as a template file for conversion code between
//! arrays and different version of Apache Arrow.
//!
//! The conversion logic will be implemented for the arrow version specified in the outer mod by
//! `super::arrow_xxx`, such as `super::arrow_array`.
//!
//! When we want to implement the conversion logic for an arrow version, we first
//! create a new mod file, and rename the corresponding arrow package name to `arrow_xxx`
//! using the `use` clause, and then declare a sub-mod and set its file path with attribute
//! `#[path = "./arrow_impl.rs"]` so that the code in this template file can be embedded to
//! the new mod file, and the conversion logic can be implemented for the corresponding arrow
//! version.
//!
//! Example can be seen in `arrow_default.rs`, which is also as followed:
//! ```ignore
//! use {arrow_array, arrow_buffer, arrow_cast, arrow_schema};
//!
//! #[allow(clippy::duplicate_mod)]
//! #[path = "./arrow_impl.rs"]
//! mod arrow_impl;
//! ```
use std::fmt::Write;
use std::sync::Arc;
use chrono::{NaiveDateTime, NaiveTime};
use itertools::Itertools;
// This is important because we want to use the arrow version specified by the outer mod.
use super::{arrow_array, arrow_buffer, arrow_cast, arrow_schema};
// Other import should always use the absolute path.
use crate::array::*;
use crate::buffer::Bitmap;
use crate::types::*;
use crate::util::iter_util::ZipEqFast;
/// Converts RisingWave array to Arrow array with the schema.
/// This function will try to convert the array if the type is not same with the schema.
pub fn to_record_batch_with_schema(
schema: arrow_schema::SchemaRef,
chunk: &DataChunk,
) -> Result<arrow_array::RecordBatch, ArrayError> {
if !chunk.is_compacted() {
let c = chunk.clone();
return to_record_batch_with_schema(schema, &c.compact());
}
let columns: Vec<_> = chunk
.columns()
.iter()
.zip_eq_fast(schema.fields().iter())
.map(|(column, field)| {
let column: arrow_array::ArrayRef = column.as_ref().try_into()?;
if column.data_type() == field.data_type() {
Ok(column)
} else {
arrow_cast::cast(&column, field.data_type()).map_err(ArrayError::from_arrow)
}
})
.try_collect::<_, _, ArrayError>()?;
let opts = arrow_array::RecordBatchOptions::default().with_row_count(Some(chunk.capacity()));
arrow_array::RecordBatch::try_new_with_options(schema, columns, &opts)
.map_err(ArrayError::to_arrow)
}
// Implement bi-directional `From` between `DataChunk` and `arrow_array::RecordBatch`.
impl TryFrom<&DataChunk> for arrow_array::RecordBatch {
type Error = ArrayError;
fn try_from(chunk: &DataChunk) -> Result<Self, Self::Error> {
if !chunk.is_compacted() {
let c = chunk.clone();
return Self::try_from(&c.compact());
}
let columns: Vec<_> = chunk
.columns()
.iter()
.map(|column| column.as_ref().try_into())
.try_collect::<_, _, Self::Error>()?;
let fields: Vec<_> = columns
.iter()
.map(|array: &Arc<dyn arrow_array::Array>| {
let nullable = array.null_count() > 0;
let data_type = array.data_type().clone();
arrow_schema::Field::new("", data_type, nullable)
})
.collect();
let schema = Arc::new(arrow_schema::Schema::new(fields));
let opts =
arrow_array::RecordBatchOptions::default().with_row_count(Some(chunk.capacity()));
arrow_array::RecordBatch::try_new_with_options(schema, columns, &opts)
.map_err(ArrayError::to_arrow)
}
}
impl TryFrom<&arrow_array::RecordBatch> for DataChunk {
type Error = ArrayError;
fn try_from(batch: &arrow_array::RecordBatch) -> Result<Self, Self::Error> {
let mut columns = Vec::with_capacity(batch.num_columns());
for array in batch.columns() {
let column = Arc::new(array.try_into()?);
columns.push(column);
}
Ok(DataChunk::new(columns, batch.num_rows()))
}
}
/// Provides the default conversion logic for RisingWave array to Arrow array with type info.
pub trait ToArrowArrayWithTypeConvert {
fn to_arrow_with_type(
&self,
data_type: &arrow_schema::DataType,
array: &ArrayImpl,
) -> Result<arrow_array::ArrayRef, ArrayError> {
match array {
ArrayImpl::Int16(array) => self.int16_to_arrow(data_type, array),
ArrayImpl::Int32(array) => self.int32_to_arrow(data_type, array),
ArrayImpl::Int64(array) => self.int64_to_arrow(data_type, array),
ArrayImpl::Float32(array) => self.float32_to_arrow(data_type, array),
ArrayImpl::Float64(array) => self.float64_to_arrow(data_type, array),
ArrayImpl::Utf8(array) => self.utf8_to_arrow(data_type, array),
ArrayImpl::Bool(array) => self.bool_to_arrow(data_type, array),
ArrayImpl::Decimal(array) => self.decimal_to_arrow(data_type, array),
ArrayImpl::Int256(array) => self.int256_to_arrow(data_type, array),
ArrayImpl::Date(array) => self.date_to_arrow(data_type, array),
ArrayImpl::Timestamp(array) => self.timestamp_to_arrow(data_type, array),
ArrayImpl::Timestamptz(array) => self.timestamptz_to_arrow(data_type, array),
ArrayImpl::Time(array) => self.time_to_arrow(data_type, array),
ArrayImpl::Interval(array) => self.interval_to_arrow(data_type, array),
ArrayImpl::Struct(array) => self.struct_to_arrow(data_type, array),
ArrayImpl::List(array) => self.list_to_arrow(data_type, array),
ArrayImpl::Bytea(array) => self.bytea_to_arrow(data_type, array),
ArrayImpl::Jsonb(array) => self.jsonb_to_arrow(data_type, array),
ArrayImpl::Serial(array) => self.serial_to_arrow(data_type, array),
}
}
#[inline]
fn int16_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &I16Array,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Int16Array::from(array)))
}
#[inline]
fn int32_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &I32Array,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Int32Array::from(array)))
}
#[inline]
fn int64_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &I64Array,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Int64Array::from(array)))
}
#[inline]
fn float32_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &F32Array,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Float32Array::from(array)))
}
#[inline]
fn float64_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &F64Array,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Float64Array::from(array)))
}
#[inline]
fn utf8_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &Utf8Array,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::StringArray::from(array)))
}
#[inline]
fn bool_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &BoolArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::BooleanArray::from(array)))
}
// Decimal values are stored as ASCII text representation in a large binary array.
#[inline]
fn decimal_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &DecimalArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::LargeBinaryArray::from(array)))
}
#[inline]
fn int256_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &Int256Array,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Decimal256Array::from(array)))
}
#[inline]
fn date_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &DateArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Date32Array::from(array)))
}
#[inline]
fn timestamp_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &TimestampArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::TimestampMicrosecondArray::from(
array,
)))
}
#[inline]
fn timestamptz_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &TimestamptzArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(
arrow_array::TimestampMicrosecondArray::from(array).with_timezone_utc(),
))
}
#[inline]
fn time_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &TimeArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Time64MicrosecondArray::from(array)))
}
#[inline]
fn interval_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &IntervalArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::IntervalMonthDayNanoArray::from(
array,
)))
}
#[inline]
fn struct_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &StructArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::StructArray::try_from(array)?))
}
#[inline]
fn list_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &ListArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::ListArray::try_from(array)?))
}
#[inline]
fn bytea_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &BytesArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::BinaryArray::from(array)))
}
// JSON values are stored as text representation in a large string array.
#[inline]
fn jsonb_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
array: &JsonbArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::LargeStringArray::from(array)))
}
#[inline]
fn serial_to_arrow(
&self,
_data_type: &arrow_schema::DataType,
_array: &SerialArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
todo!("serial type is not supported to convert to arrow")
}
}
/// Provides the default conversion logic for RisingWave array to Arrow array with type info.
pub trait ToArrowArrayConvert {
fn to_arrow(&self, array: &ArrayImpl) -> Result<arrow_array::ArrayRef, ArrayError> {
match array {
ArrayImpl::Int16(array) => self.int16_to_arrow(array),
ArrayImpl::Int32(array) => self.int32_to_arrow(array),
ArrayImpl::Int64(array) => self.int64_to_arrow(array),
ArrayImpl::Float32(array) => self.float32_to_arrow(array),
ArrayImpl::Float64(array) => self.float64_to_arrow(array),
ArrayImpl::Utf8(array) => self.utf8_to_arrow(array),
ArrayImpl::Bool(array) => self.bool_to_arrow(array),
ArrayImpl::Decimal(array) => self.decimal_to_arrow(array),
ArrayImpl::Int256(array) => self.int256_to_arrow(array),
ArrayImpl::Date(array) => self.date_to_arrow(array),
ArrayImpl::Timestamp(array) => self.timestamp_to_arrow(array),
ArrayImpl::Timestamptz(array) => self.timestamptz_to_arrow(array),
ArrayImpl::Time(array) => self.time_to_arrow(array),
ArrayImpl::Interval(array) => self.interval_to_arrow(array),
ArrayImpl::Struct(array) => self.struct_to_arrow(array),
ArrayImpl::List(array) => self.list_to_arrow(array),
ArrayImpl::Bytea(array) => self.bytea_to_arrow(array),
ArrayImpl::Jsonb(array) => self.jsonb_to_arrow(array),
ArrayImpl::Serial(array) => self.serial_to_arrow(array),
}
}
#[inline]
fn int16_to_arrow(&self, array: &I16Array) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Int16Array::from(array)))
}
#[inline]
fn int32_to_arrow(&self, array: &I32Array) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Int32Array::from(array)))
}
#[inline]
fn int64_to_arrow(&self, array: &I64Array) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Int64Array::from(array)))
}
#[inline]
fn float32_to_arrow(&self, array: &F32Array) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Float32Array::from(array)))
}
#[inline]
fn float64_to_arrow(&self, array: &F64Array) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Float64Array::from(array)))
}
#[inline]
fn utf8_to_arrow(&self, array: &Utf8Array) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::StringArray::from(array)))
}
#[inline]
fn bool_to_arrow(&self, array: &BoolArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::BooleanArray::from(array)))
}
// Decimal values are stored as ASCII text representation in a large binary array.
#[inline]
fn decimal_to_arrow(&self, array: &DecimalArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::LargeBinaryArray::from(array)))
}
#[inline]
fn int256_to_arrow(&self, array: &Int256Array) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Decimal256Array::from(array)))
}
#[inline]
fn date_to_arrow(&self, array: &DateArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Date32Array::from(array)))
}
#[inline]
fn timestamp_to_arrow(
&self,
array: &TimestampArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::TimestampMicrosecondArray::from(
array,
)))
}
#[inline]
fn timestamptz_to_arrow(
&self,
array: &TimestamptzArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(
arrow_array::TimestampMicrosecondArray::from(array).with_timezone_utc(),
))
}
#[inline]
fn time_to_arrow(&self, array: &TimeArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::Time64MicrosecondArray::from(array)))
}
#[inline]
fn interval_to_arrow(
&self,
array: &IntervalArray,
) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::IntervalMonthDayNanoArray::from(
array,
)))
}
#[inline]
fn struct_to_arrow(&self, array: &StructArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::StructArray::try_from(array)?))
}
#[inline]
fn list_to_arrow(&self, array: &ListArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::ListArray::try_from(array)?))
}
#[inline]
fn bytea_to_arrow(&self, array: &BytesArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::BinaryArray::from(array)))
}
// JSON values are stored as text representation in a large string array.
#[inline]
fn jsonb_to_arrow(&self, array: &JsonbArray) -> Result<arrow_array::ArrayRef, ArrayError> {
Ok(Arc::new(arrow_array::LargeStringArray::from(array)))
}
#[inline]
fn serial_to_arrow(&self, _array: &SerialArray) -> Result<arrow_array::ArrayRef, ArrayError> {
todo!("serial type is not supported to convert to arrow")
}
}
pub trait ToArrowTypeConvert {
fn to_arrow_type(&self, value: &DataType) -> Result<arrow_schema::DataType, ArrayError> {
match value {
// using the inline function
DataType::Boolean => Ok(self.bool_type_to_arrow()),
DataType::Int16 => Ok(self.int16_type_to_arrow()),
DataType::Int32 => Ok(self.int32_type_to_arrow()),
DataType::Int64 => Ok(self.int64_type_to_arrow()),
DataType::Int256 => Ok(self.int256_type_to_arrow()),
DataType::Float32 => Ok(self.float32_type_to_arrow()),
DataType::Float64 => Ok(self.float64_type_to_arrow()),
DataType::Date => Ok(self.date_type_to_arrow()),
DataType::Timestamp => Ok(self.timestamp_type_to_arrow()),
DataType::Timestamptz => Ok(self.timestamptz_type_to_arrow()),
DataType::Time => Ok(self.time_type_to_arrow()),
DataType::Interval => Ok(self.interval_type_to_arrow()),
DataType::Varchar => Ok(self.varchar_type_to_arrow()),
DataType::Jsonb => Ok(self.jsonb_type_to_arrow()),
DataType::Bytea => Ok(self.bytea_type_to_arrow()),
DataType::Decimal => Ok(self.decimal_type_to_arrow()),
DataType::Serial => Ok(self.serial_type_to_arrow()),
DataType::Struct(fields) => self.struct_type_to_arrow(fields),
DataType::List(datatype) => self.list_type_to_arrow(datatype),
}
}
#[inline]
fn bool_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Boolean
}
#[inline]
fn int32_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Int32
}
#[inline]
fn int64_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Int64
}
// generate function for each type for me using inline
#[inline]
fn int16_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Int16
}
#[inline]
fn int256_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Decimal256(arrow_schema::DECIMAL256_MAX_PRECISION, 0)
}
#[inline]
fn float32_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Float32
}
#[inline]
fn float64_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Float64
}
#[inline]
fn date_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Date32
}
#[inline]
fn timestamp_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Timestamp(arrow_schema::TimeUnit::Microsecond, None)
}
#[inline]
fn timestamptz_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Timestamp(
arrow_schema::TimeUnit::Microsecond,
Some("+00:00".into()),
)
}
#[inline]
fn time_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Time64(arrow_schema::TimeUnit::Microsecond)
}
#[inline]
fn interval_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Interval(arrow_schema::IntervalUnit::MonthDayNano)
}
#[inline]
fn varchar_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Utf8
}
#[inline]
fn jsonb_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::LargeUtf8
}
#[inline]
fn bytea_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::Binary
}
#[inline]
fn decimal_type_to_arrow(&self) -> arrow_schema::DataType {
arrow_schema::DataType::LargeBinary
}
#[inline]
fn serial_type_to_arrow(&self) -> arrow_schema::DataType {
todo!("serial type is not supported to convert to arrow")
}
#[inline]
fn list_type_to_arrow(
&self,
datatype: &DataType,
) -> Result<arrow_schema::DataType, ArrayError> {
Ok(arrow_schema::DataType::List(Arc::new(
arrow_schema::Field::new("item", datatype.try_into()?, true),
)))
}
#[inline]
fn struct_type_to_arrow(
&self,
fields: &StructType,
) -> Result<arrow_schema::DataType, ArrayError> {
Ok(arrow_schema::DataType::Struct(
fields
.iter()
.map(|(name, ty)| Ok(arrow_schema::Field::new(name, ty.try_into()?, true)))
.try_collect::<_, _, ArrayError>()?,
))
}
}
struct DefaultArrowConvert;
impl ToArrowArrayConvert for DefaultArrowConvert {}
/// Implement bi-directional `From` between `ArrayImpl` and `arrow_array::ArrayRef`.
macro_rules! converts_generic {
($({ $ArrowType:ty, $ArrowPattern:pat, $ArrayImplPattern:path }),*) => {
// RisingWave array -> Arrow array
impl TryFrom<&ArrayImpl> for arrow_array::ArrayRef {
type Error = ArrayError;
fn try_from(array: &ArrayImpl) -> Result<Self, Self::Error> {
DefaultArrowConvert{}.to_arrow(array)
}
}
// Arrow array -> RisingWave array
impl TryFrom<&arrow_array::ArrayRef> for ArrayImpl {
type Error = ArrayError;
fn try_from(array: &arrow_array::ArrayRef) -> Result<Self, Self::Error> {
use arrow_schema::DataType::*;
use arrow_schema::IntervalUnit::*;
use arrow_schema::TimeUnit::*;
match array.data_type() {
$($ArrowPattern => Ok($ArrayImplPattern(
array
.as_any()
.downcast_ref::<$ArrowType>()
.unwrap()
.try_into()?,
)),)*
Timestamp(Microsecond, Some(_)) => Ok(ArrayImpl::Timestamptz(
array
.as_any()
.downcast_ref::<arrow_array::TimestampMicrosecondArray>()
.unwrap()
.try_into()?,
)),
Timestamp(Millisecond, Some(_)) => Ok(ArrayImpl::Timestamptz(
array
.as_any()
.downcast_ref::<arrow_array::TimestampMillisecondArray>()
.unwrap()
.try_into()?,
)),
// This arrow decimal type is used by iceberg source to read iceberg decimal into RW decimal.
Decimal128(_, _) => Ok(ArrayImpl::Decimal(
array
.as_any()
.downcast_ref::<arrow_array::Decimal128Array>()
.unwrap()
.try_into()?,
)),
t => Err(ArrayError::from_arrow(format!("unsupported data type: {t:?}"))),
}
}
}
};
}
converts_generic! {
{ arrow_array::Int16Array, Int16, ArrayImpl::Int16 },
{ arrow_array::Int32Array, Int32, ArrayImpl::Int32 },
{ arrow_array::Int64Array, Int64, ArrayImpl::Int64 },
{ arrow_array::Float32Array, Float32, ArrayImpl::Float32 },
{ arrow_array::Float64Array, Float64, ArrayImpl::Float64 },
{ arrow_array::StringArray, Utf8, ArrayImpl::Utf8 },
{ arrow_array::BooleanArray, Boolean, ArrayImpl::Bool },
// Arrow doesn't have a data type to represent unconstrained numeric (`DECIMAL` in RisingWave and
// Postgres). So we pick a special type `LargeBinary` for it.
// Values stored in the array are the string representation of the decimal. e.g. b"1.234", b"+inf"
{ arrow_array::LargeBinaryArray, LargeBinary, ArrayImpl::Decimal },
{ arrow_array::Decimal256Array, Decimal256(_, _), ArrayImpl::Int256 },
{ arrow_array::Date32Array, Date32, ArrayImpl::Date },
{ arrow_array::TimestampMicrosecondArray, Timestamp(Microsecond, None), ArrayImpl::Timestamp },
{ arrow_array::Time64MicrosecondArray, Time64(Microsecond), ArrayImpl::Time },
{ arrow_array::IntervalMonthDayNanoArray, Interval(MonthDayNano), ArrayImpl::Interval },
{ arrow_array::StructArray, Struct(_), ArrayImpl::Struct },
{ arrow_array::ListArray, List(_), ArrayImpl::List },
{ arrow_array::BinaryArray, Binary, ArrayImpl::Bytea },
{ arrow_array::LargeStringArray, LargeUtf8, ArrayImpl::Jsonb } // we use LargeUtf8 to represent Jsonb in arrow
}
// Arrow Datatype -> Risingwave Datatype
impl From<&arrow_schema::DataType> for DataType {
fn from(value: &arrow_schema::DataType) -> Self {
use arrow_schema::DataType::*;
use arrow_schema::IntervalUnit::*;
use arrow_schema::TimeUnit::*;
match value {
Boolean => Self::Boolean,
Int16 => Self::Int16,
Int32 => Self::Int32,
Int64 => Self::Int64,
Float32 => Self::Float32,
Float64 => Self::Float64,
LargeBinary => Self::Decimal,
Decimal256(_, _) => Self::Int256,
Date32 => Self::Date,
Time64(Microsecond) => Self::Time,
Timestamp(Microsecond, None) => Self::Timestamp,
Timestamp(Microsecond, Some(_)) => Self::Timestamptz,
Timestamp(Millisecond, None) => Self::Timestamp,
Timestamp(Millisecond, Some(_)) => Self::Timestamptz,
Interval(MonthDayNano) => Self::Interval,
Binary => Self::Bytea,
Utf8 => Self::Varchar,
LargeUtf8 => Self::Jsonb,
Struct(fields) => Self::Struct(fields.into()),
List(field) => Self::List(Box::new(field.data_type().into())),
Decimal128(_, _) => Self::Decimal,
_ => todo!("Unsupported arrow data type: {value:?}"),
}
}
}
impl From<&arrow_schema::Fields> for StructType {
fn from(fields: &arrow_schema::Fields) -> Self {
Self::new(
fields
.iter()
.map(|f| (f.name().clone(), f.data_type().into()))
.collect(),
)
}
}
impl TryFrom<&StructType> for arrow_schema::Fields {
type Error = ArrayError;
fn try_from(struct_type: &StructType) -> Result<Self, Self::Error> {
struct_type
.iter()
.map(|(name, ty)| Ok(arrow_schema::Field::new(name, ty.try_into()?, true)))
.try_collect()
}
}
impl From<arrow_schema::DataType> for DataType {
fn from(value: arrow_schema::DataType) -> Self {
(&value).into()
}
}
struct DefaultArrowTypeConvert;
impl ToArrowTypeConvert for DefaultArrowTypeConvert {}
impl TryFrom<&DataType> for arrow_schema::DataType {
type Error = ArrayError;
fn try_from(value: &DataType) -> Result<Self, Self::Error> {
DefaultArrowTypeConvert {}.to_arrow_type(value)
}
}
impl TryFrom<DataType> for arrow_schema::DataType {
type Error = ArrayError;
fn try_from(value: DataType) -> Result<Self, Self::Error> {
(&value).try_into()
}
}
impl From<&Bitmap> for arrow_buffer::NullBuffer {
fn from(bitmap: &Bitmap) -> Self {
bitmap.iter().collect()
}
}
/// Implement bi-directional `From` between concrete array types.
macro_rules! converts {
($ArrayType:ty, $ArrowType:ty) => {
impl From<&$ArrayType> for $ArrowType {
fn from(array: &$ArrayType) -> Self {
array.iter().collect()
}
}
impl From<&$ArrowType> for $ArrayType {
fn from(array: &$ArrowType) -> Self {
array.iter().collect()
}
}
impl From<&[$ArrowType]> for $ArrayType {
fn from(arrays: &[$ArrowType]) -> Self {
arrays.iter().flat_map(|a| a.iter()).collect()
}
}
};
// convert values using FromIntoArrow
($ArrayType:ty, $ArrowType:ty, @map) => {
impl From<&$ArrayType> for $ArrowType {
fn from(array: &$ArrayType) -> Self {
array.iter().map(|o| o.map(|v| v.into_arrow())).collect()
}
}
impl From<&$ArrowType> for $ArrayType {
fn from(array: &$ArrowType) -> Self {
array
.iter()
.map(|o| {
o.map(|v| {
<<$ArrayType as Array>::RefItem<'_> as FromIntoArrow>::from_arrow(v)
})
})
.collect()
}
}
impl From<&[$ArrowType]> for $ArrayType {
fn from(arrays: &[$ArrowType]) -> Self {
arrays
.iter()
.flat_map(|a| a.iter())
.map(|o| {
o.map(|v| {
<<$ArrayType as Array>::RefItem<'_> as FromIntoArrow>::from_arrow(v)
})
})
.collect()
}
}
};
}
converts!(BoolArray, arrow_array::BooleanArray);
converts!(I16Array, arrow_array::Int16Array);
converts!(I32Array, arrow_array::Int32Array);
converts!(I64Array, arrow_array::Int64Array);
converts!(F32Array, arrow_array::Float32Array, @map);
converts!(F64Array, arrow_array::Float64Array, @map);
converts!(BytesArray, arrow_array::BinaryArray);
converts!(Utf8Array, arrow_array::StringArray);
converts!(DateArray, arrow_array::Date32Array, @map);
converts!(TimeArray, arrow_array::Time64MicrosecondArray, @map);
converts!(TimestampArray, arrow_array::TimestampMicrosecondArray, @map);
converts!(TimestamptzArray, arrow_array::TimestampMicrosecondArray, @map);
// converts!(TimestamptzArray, arrow_array::TimestampMillisecondArray, @map);
converts!(IntervalArray, arrow_array::IntervalMonthDayNanoArray, @map);
impl From<&arrow_array::TimestampMillisecondArray> for TimestamptzArray {
fn from(array: &arrow_array::TimestampMillisecondArray) -> Self {
array
.iter()
.map(|o| o.map(|v| Timestamptz::from_millis(v).unwrap()))
.collect()
}
}
/// Converts RisingWave value from and into Arrow value.
trait FromIntoArrow {
/// The corresponding element type in the Arrow array.
type ArrowType;
fn from_arrow(value: Self::ArrowType) -> Self;
fn into_arrow(self) -> Self::ArrowType;
}
impl FromIntoArrow for F32 {
type ArrowType = f32;
fn from_arrow(value: Self::ArrowType) -> Self {
value.into()
}
fn into_arrow(self) -> Self::ArrowType {
self.into()
}
}
impl FromIntoArrow for F64 {
type ArrowType = f64;
fn from_arrow(value: Self::ArrowType) -> Self {
value.into()
}
fn into_arrow(self) -> Self::ArrowType {
self.into()
}
}
impl FromIntoArrow for Date {
type ArrowType = i32;
fn from_arrow(value: Self::ArrowType) -> Self {
Date(arrow_array::types::Date32Type::to_naive_date(value))
}
fn into_arrow(self) -> Self::ArrowType {
arrow_array::types::Date32Type::from_naive_date(self.0)
}
}
impl FromIntoArrow for Time {
type ArrowType = i64;
fn from_arrow(value: Self::ArrowType) -> Self {
Time(
NaiveTime::from_num_seconds_from_midnight_opt(
(value / 1_000_000) as _,
(value % 1_000_000 * 1000) as _,
)
.unwrap(),
)
}
fn into_arrow(self) -> Self::ArrowType {
self.0
.signed_duration_since(NaiveTime::default())
.num_microseconds()
.unwrap()
}
}
impl FromIntoArrow for Timestamp {
type ArrowType = i64;
fn from_arrow(value: Self::ArrowType) -> Self {
Timestamp(
NaiveDateTime::from_timestamp_opt(
(value / 1_000_000) as _,
(value % 1_000_000 * 1000) as _,
)
.unwrap(),
)
}
fn into_arrow(self) -> Self::ArrowType {
self.0
.signed_duration_since(NaiveDateTime::default())
.num_microseconds()
.unwrap()
}
}
impl FromIntoArrow for Timestamptz {
type ArrowType = i64;
fn from_arrow(value: Self::ArrowType) -> Self {
Timestamptz::from_micros(value)
}
fn into_arrow(self) -> Self::ArrowType {
self.timestamp_micros()
}
}
impl FromIntoArrow for Interval {
type ArrowType = i128;
fn from_arrow(value: Self::ArrowType) -> Self {
// XXX: the arrow-rs decoding is incorrect
// let (months, days, ns) = arrow_array::types::IntervalMonthDayNanoType::to_parts(value);
let months = value as i32;
let days = (value >> 32) as i32;
let ns = (value >> 64) as i64;
Interval::from_month_day_usec(months, days, ns / 1000)
}
fn into_arrow(self) -> Self::ArrowType {
// XXX: the arrow-rs encoding is incorrect
// arrow_array::types::IntervalMonthDayNanoType::make_value(
// self.months(),
// self.days(),
// // TODO: this may overflow and we need `try_into`
// self.usecs() * 1000,
// )
let m = self.months() as u128 & u32::MAX as u128;
let d = (self.days() as u128 & u32::MAX as u128) << 32;
let n = ((self.usecs() * 1000) as u128 & u64::MAX as u128) << 64;
(m | d | n) as i128
}
}
impl From<&DecimalArray> for arrow_array::LargeBinaryArray {
fn from(array: &DecimalArray) -> Self {
let mut builder =
arrow_array::builder::LargeBinaryBuilder::with_capacity(array.len(), array.len() * 8);
for value in array.iter() {
builder.append_option(value.map(|d| d.to_string()));
}
builder.finish()
}
}
// This arrow decimal type is used by iceberg source to read iceberg decimal into RW decimal.
impl TryFrom<&arrow_array::Decimal128Array> for DecimalArray {
type Error = ArrayError;
fn try_from(array: &arrow_array::Decimal128Array) -> Result<Self, Self::Error> {
if array.scale() < 0 {
bail!("support negative scale for arrow decimal")
}
let from_arrow = |value| {
const NAN: i128 = i128::MIN + 1;
let res = match value {
NAN => Decimal::NaN,
i128::MAX => Decimal::PositiveInf,
i128::MIN => Decimal::NegativeInf,
_ => Decimal::Normalized(
rust_decimal::Decimal::try_from_i128_with_scale(value, array.scale() as u32)
.map_err(ArrayError::internal)?,
),
};
Ok(res)
};
array
.iter()
.map(|o| o.map(from_arrow).transpose())
.collect::<Result<Self, Self::Error>>()
}
}
impl TryFrom<&arrow_array::LargeBinaryArray> for DecimalArray {
type Error = ArrayError;
fn try_from(array: &arrow_array::LargeBinaryArray) -> Result<Self, Self::Error> {
array
.iter()
.map(|o| {
o.map(|s| {
let s = std::str::from_utf8(s)
.map_err(|_| ArrayError::from_arrow(format!("invalid decimal: {s:?}")))?;
s.parse()
.map_err(|_| ArrayError::from_arrow(format!("invalid decimal: {s:?}")))
})
.transpose()
})
.try_collect()
}
}
impl From<&JsonbArray> for arrow_array::LargeStringArray {
fn from(array: &JsonbArray) -> Self {
let mut builder =
arrow_array::builder::LargeStringBuilder::with_capacity(array.len(), array.len() * 16);
for value in array.iter() {
match value {
Some(jsonb) => {
write!(&mut builder, "{}", jsonb).unwrap();
builder.append_value("");
}
None => builder.append_null(),
}
}
builder.finish()
}
}
impl TryFrom<&arrow_array::LargeStringArray> for JsonbArray {
type Error = ArrayError;
fn try_from(array: &arrow_array::LargeStringArray) -> Result<Self, Self::Error> {
array
.iter()
.map(|o| {
o.map(|s| {
s.parse()
.map_err(|_| ArrayError::from_arrow(format!("invalid json: {s}")))
})
.transpose()
})
.try_collect()
}
}
impl From<arrow_buffer::i256> for Int256 {
fn from(value: arrow_buffer::i256) -> Self {
let buffer = value.to_be_bytes();
Int256::from_be_bytes(buffer)
}
}
impl<'a> From<Int256Ref<'a>> for arrow_buffer::i256 {
fn from(val: Int256Ref<'a>) -> Self {
let buffer = val.to_be_bytes();
arrow_buffer::i256::from_be_bytes(buffer)
}
}
impl From<&Int256Array> for arrow_array::Decimal256Array {
fn from(array: &Int256Array) -> Self {
array
.iter()
.map(|o| o.map(arrow_buffer::i256::from))
.collect()
}
}
impl From<&arrow_array::Decimal256Array> for Int256Array {
fn from(array: &arrow_array::Decimal256Array) -> Self {
let values = array.iter().map(|o| o.map(Int256::from)).collect_vec();
values
.iter()
.map(|i| i.as_ref().map(|v| v.as_scalar_ref()))
.collect()
}
}
impl TryFrom<&ListArray> for arrow_array::ListArray {
type Error = ArrayError;
fn try_from(array: &ListArray) -> Result<Self, Self::Error> {
use arrow_array::builder::*;
fn build<A, B, F>(
array: &ListArray,
a: &A,
builder: B,
mut append: F,
) -> arrow_array::ListArray
where
A: Array,
B: arrow_array::builder::ArrayBuilder,
F: FnMut(&mut B, Option<A::RefItem<'_>>),
{
let mut builder = ListBuilder::with_capacity(builder, a.len());
for i in 0..array.len() {
for j in array.offsets[i]..array.offsets[i + 1] {
append(builder.values(), a.value_at(j as usize));
}
builder.append(!array.is_null(i));
}
builder.finish()
}
Ok(match &*array.value {
ArrayImpl::Int16(a) => build(array, a, Int16Builder::with_capacity(a.len()), |b, v| {
b.append_option(v)
}),
ArrayImpl::Int32(a) => build(array, a, Int32Builder::with_capacity(a.len()), |b, v| {
b.append_option(v)
}),
ArrayImpl::Int64(a) => build(array, a, Int64Builder::with_capacity(a.len()), |b, v| {
b.append_option(v)
}),
ArrayImpl::Float32(a) => {
build(array, a, Float32Builder::with_capacity(a.len()), |b, v| {
b.append_option(v.map(|f| f.0))
})
}
ArrayImpl::Float64(a) => {
build(array, a, Float64Builder::with_capacity(a.len()), |b, v| {
b.append_option(v.map(|f| f.0))
})
}
ArrayImpl::Utf8(a) => build(
array,
a,
StringBuilder::with_capacity(a.len(), a.data().len()),
|b, v| b.append_option(v),
),
ArrayImpl::Int256(a) => build(
array,
a,
Decimal256Builder::with_capacity(a.len()).with_data_type(
arrow_schema::DataType::Decimal256(arrow_schema::DECIMAL256_MAX_PRECISION, 0),
),
|b, v| b.append_option(v.map(Into::into)),
),
ArrayImpl::Bool(a) => {
build(array, a, BooleanBuilder::with_capacity(a.len()), |b, v| {
b.append_option(v)
})
}
ArrayImpl::Decimal(a) => build(
array,
a,
LargeBinaryBuilder::with_capacity(a.len(), a.len() * 8),
|b, v| b.append_option(v.map(|d| d.to_string())),
),
ArrayImpl::Interval(a) => build(
array,
a,
IntervalMonthDayNanoBuilder::with_capacity(a.len()),
|b, v| b.append_option(v.map(|d| d.into_arrow())),
),
ArrayImpl::Date(a) => build(array, a, Date32Builder::with_capacity(a.len()), |b, v| {
b.append_option(v.map(|d| d.into_arrow()))
}),
ArrayImpl::Timestamp(a) => build(
array,
a,
TimestampMicrosecondBuilder::with_capacity(a.len()),
|b, v| b.append_option(v.map(|d| d.into_arrow())),
),
ArrayImpl::Timestamptz(a) => build(
array,
a,
TimestampMicrosecondBuilder::with_capacity(a.len()),
|b, v| b.append_option(v.map(|d| d.into_arrow())),
),
ArrayImpl::Time(a) => build(
array,
a,
Time64MicrosecondBuilder::with_capacity(a.len()),
|b, v| b.append_option(v.map(|d| d.into_arrow())),
),
ArrayImpl::Jsonb(a) => build(
array,
a,
LargeStringBuilder::with_capacity(a.len(), a.len() * 16),
|b, v| b.append_option(v.map(|j| j.to_string())),
),
ArrayImpl::Serial(_) => todo!("list of serial"),
ArrayImpl::Struct(a) => {
let values = Arc::new(arrow_array::StructArray::try_from(a)?);
arrow_array::ListArray::new(
Arc::new(arrow_schema::Field::new(
"item",
a.data_type().try_into()?,
true,
)),
arrow_buffer::OffsetBuffer::new(arrow_buffer::ScalarBuffer::from(
array
.offsets()
.iter()
.map(|o| *o as i32)
.collect::<Vec<i32>>(),
)),
values,
Some(array.null_bitmap().into()),
)
}
ArrayImpl::List(_) => todo!("list of list"),
ArrayImpl::Bytea(a) => build(
array,
a,
BinaryBuilder::with_capacity(a.len(), a.data().len()),
|b, v| b.append_option(v),
),
})
}
}
impl TryFrom<&arrow_array::ListArray> for ListArray {
type Error = ArrayError;
fn try_from(array: &arrow_array::ListArray) -> Result<Self, Self::Error> {
use arrow_array::Array;
Ok(ListArray {
value: Box::new(ArrayImpl::try_from(array.values())?),
bitmap: match array.nulls() {
Some(nulls) => nulls.iter().collect(),
None => Bitmap::ones(array.len()),
},
offsets: array.offsets().iter().map(|o| *o as u32).collect(),
})
}
}
impl TryFrom<&StructArray> for arrow_array::StructArray {
type Error = ArrayError;
fn try_from(array: &StructArray) -> Result<Self, Self::Error> {
Ok(arrow_array::StructArray::new(
array.data_type().as_struct().try_into()?,
array
.fields()
.map(|arr| arr.as_ref().try_into())
.try_collect::<_, _, ArrayError>()?,
Some(array.null_bitmap().into()),
))
}
}
impl TryFrom<&arrow_array::StructArray> for StructArray {
type Error = ArrayError;
fn try_from(array: &arrow_array::StructArray) -> Result<Self, Self::Error> {
use arrow_array::Array;
let arrow_schema::DataType::Struct(fields) = array.data_type() else {
panic!("nested field types cannot be determined.");
};
Ok(StructArray::new(
fields.into(),
array
.columns()
.iter()
.map(|a| ArrayImpl::try_from(a).map(Arc::new))
.try_collect()?,
(0..array.len()).map(|i| !array.is_null(i)).collect(),
))
}
}
#[cfg(test)]
mod tests {
use super::arrow_array::Array as _;
use super::*;
#[test]
fn bool() {
let array = BoolArray::from_iter([None, Some(false), Some(true)]);
let arrow = arrow_array::BooleanArray::from(&array);
assert_eq!(BoolArray::from(&arrow), array);
}
#[test]
fn i16() {
let array = I16Array::from_iter([None, Some(-7), Some(25)]);
let arrow = arrow_array::Int16Array::from(&array);
assert_eq!(I16Array::from(&arrow), array);
}
#[test]
fn f32() {
let array = F32Array::from_iter([None, Some(-7.0), Some(25.0)]);
let arrow = arrow_array::Float32Array::from(&array);
assert_eq!(F32Array::from(&arrow), array);
}
#[test]
fn date() {
let array = DateArray::from_iter([
None,
Date::with_days(12345).ok(),
Date::with_days(-12345).ok(),
]);
let arrow = arrow_array::Date32Array::from(&array);
assert_eq!(DateArray::from(&arrow), array);
}
#[test]
fn time() {
let array = TimeArray::from_iter([None, Time::with_micro(24 * 3600 * 1_000_000 - 1).ok()]);
let arrow = arrow_array::Time64MicrosecondArray::from(&array);
assert_eq!(TimeArray::from(&arrow), array);
}
#[test]
fn timestamp() {
let array =
TimestampArray::from_iter([None, Timestamp::with_micros(123456789012345678).ok()]);
let arrow = arrow_array::TimestampMicrosecondArray::from(&array);
assert_eq!(TimestampArray::from(&arrow), array);
}
#[test]
fn interval() {
let array = IntervalArray::from_iter([
None,
Some(Interval::from_month_day_usec(
1_000_000,
1_000,
1_000_000_000,
)),
Some(Interval::from_month_day_usec(
-1_000_000,
-1_000,
-1_000_000_000,
)),
]);
let arrow = arrow_array::IntervalMonthDayNanoArray::from(&array);
assert_eq!(IntervalArray::from(&arrow), array);
}
#[test]
fn string() {
let array = Utf8Array::from_iter([None, Some("array"), Some("arrow")]);
let arrow = arrow_array::StringArray::from(&array);
assert_eq!(Utf8Array::from(&arrow), array);
}
#[test]
fn decimal() {
let array = DecimalArray::from_iter([
None,
Some(Decimal::NaN),
Some(Decimal::PositiveInf),
Some(Decimal::NegativeInf),
Some(Decimal::Normalized("123.4".parse().unwrap())),
Some(Decimal::Normalized("123.456".parse().unwrap())),
]);
let arrow = arrow_array::LargeBinaryArray::from(&array);
assert_eq!(DecimalArray::try_from(&arrow).unwrap(), array);
}
#[test]
fn jsonb() {
let array = JsonbArray::from_iter([
None,
Some("null".parse().unwrap()),
Some("false".parse().unwrap()),
Some("1".parse().unwrap()),
Some("[1, 2, 3]".parse().unwrap()),
Some(r#"{ "a": 1, "b": null }"#.parse().unwrap()),
]);
let arrow = arrow_array::LargeStringArray::from(&array);
assert_eq!(JsonbArray::try_from(&arrow).unwrap(), array);
}
#[test]
fn int256() {
let values = [
None,
Some(Int256::from(1)),
Some(Int256::from(i64::MAX)),
Some(Int256::from(i64::MAX) * Int256::from(i64::MAX)),
Some(Int256::from(i64::MAX) * Int256::from(i64::MAX) * Int256::from(i64::MAX)),
Some(
Int256::from(i64::MAX)
* Int256::from(i64::MAX)
* Int256::from(i64::MAX)
* Int256::from(i64::MAX),
),
Some(Int256::min_value()),
Some(Int256::max_value()),
];
let array =
Int256Array::from_iter(values.iter().map(|r| r.as_ref().map(|x| x.as_scalar_ref())));
let arrow = arrow_array::Decimal256Array::from(&array);
assert_eq!(Int256Array::from(&arrow), array);
}
#[test]
fn struct_array() {
// Empty array - risingwave to arrow conversion.
let test_arr = StructArray::new(StructType::empty(), vec![], Bitmap::ones(0));
assert_eq!(
arrow_array::StructArray::try_from(&test_arr).unwrap().len(),
0
);
// Empty array - arrow to risingwave conversion.
let test_arr_2 = arrow_array::StructArray::from(vec![]);
assert_eq!(StructArray::try_from(&test_arr_2).unwrap().len(), 0);
// Struct array with primitive types. arrow to risingwave conversion.
let test_arrow_struct_array = arrow_array::StructArray::try_from(vec![
(
"a",
Arc::new(arrow_array::BooleanArray::from(vec![
Some(false),
Some(false),
Some(true),
None,
])) as arrow_array::ArrayRef,
),
(
"b",
Arc::new(arrow_array::Int32Array::from(vec![
Some(42),
Some(28),
Some(19),
None,
])) as arrow_array::ArrayRef,
),
])
.unwrap();
let actual_risingwave_struct_array =
StructArray::try_from(&test_arrow_struct_array).unwrap();
let expected_risingwave_struct_array = StructArray::new(
StructType::new(vec![("a", DataType::Boolean), ("b", DataType::Int32)]),
vec![
BoolArray::from_iter([Some(false), Some(false), Some(true), None]).into_ref(),
I32Array::from_iter([Some(42), Some(28), Some(19), None]).into_ref(),
],
[true, true, true, true].into_iter().collect(),
);
assert_eq!(
expected_risingwave_struct_array,
actual_risingwave_struct_array
);
}
#[test]
fn list() {
let array = ListArray::from_iter([None, Some(vec![0, -127, 127, 50]), Some(vec![0; 0])]);
let arrow = arrow_array::ListArray::try_from(&array).unwrap();
assert_eq!(ListArray::try_from(&arrow).unwrap(), array);
}
} |
package uz.john.domain.model.person.details
import uz.john.data.remote.model.person.details.MovieCrewCreditData
import uz.john.util.formatDate
import uz.john.util.roundToOneDecimal
data class MovieCrewCredit(
val adult: Boolean,
val backdropPath: String?,
val genreIds: List<Int>,
val id: Int,
val mediaType: String?,
val originalLanguage: String,
val originalTitle: String,
val overview: String,
val popularity: Double,
val posterPath: String?,
val releaseDate: String?,
val title: String,
val video: Boolean,
val voteAverage: Double,
val voteCount: Int,
val creditId: String,
val department: String,
val job: String
)
fun MovieCrewCreditData.toDomain(): MovieCrewCredit {
return MovieCrewCredit(
adult = adult,
backdropPath = backdropPath,
genreIds = genreIds,
id = id,
mediaType = mediaType,
originalLanguage = originalLanguage,
originalTitle = originalTitle,
overview = overview,
popularity = popularity,
posterPath = posterPath,
releaseDate = releaseDate?.formatDate(),
title = title,
video = video,
voteAverage = voteAverage.roundToOneDecimal(),
voteCount = voteCount,
creditId = creditId,
department = department,
job = job
)
} |
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:flutter_svg/flutter_svg.dart';
import 'package:google_fonts/google_fonts.dart';
import 'package:kidneyscan/bars/navbar.dart';
import 'package:kidneyscan/constants/colors/app_colors.dart';
import 'package:kidneyscan/keys/app_keys.dart';
import 'package:kidneyscan/screens/login_screen.dart';
import 'package:kidneyscan/utils/switch_screen.dart';
import 'package:responsive_sizer/responsive_sizer.dart';
import 'package:shared_preferences/shared_preferences.dart';
class SplashScreen extends StatefulWidget {
const SplashScreen({super.key});
@override
State<SplashScreen> createState() => _SplashScreenState();
}
class _SplashScreenState extends State<SplashScreen> {
bool gettingLogin = false;
getTheme() async {
SharedPreferences prefs = await SharedPreferences.getInstance();
prefs.getBool(AppKeys.drawerKey);
}
Future<void> checkLoginStatus() async {
SharedPreferences prefs = await SharedPreferences.getInstance();
bool isLoggedIn = prefs.getBool(AppKeys.loginKey) ?? false;
bool isThemeSet = prefs.getBool(AppKeys.drawerKey) ?? false;
print("Login key is set to: $isLoggedIn");
print("Theme key is set to: $isThemeSet");
// Navigate after a short delay to show splash screen
Timer(
const Duration(seconds: 2),
() {
if (isLoggedIn) {
SwitchScreen().pushReplace(
context,
NavBar(),
);
} else {
SwitchScreen().pushReplace(
context,
const LoginScreen(),
);
}
},
);
}
@override
void initState() {
checkLoginStatus();
getTheme();
super.initState();
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: AppColors().primaryColor,
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
SvgPicture.asset("assets/svgs/splash.svg"),
Text(
'Kidney Scan',
style: GoogleFonts.pacifico(
fontSize: 35,
color: Colors.white,
fontWeight: FontWeight.normal),
),
SizedBox(
height: 2.h,
),
],
),
),
);
}
} |
<template>
<view class="carlife">
<!-- <view
class="status-bar"
:style="{ height: custom.top + 'px' }"
/>
<view class="u-flex">
<view class="u-padding-left-22">
<u-icon
name="arrow-left"
size="34"
color="#000"
/>
</view>
<view class="tabbar-tab u-padding-left-40">
<u-tabs
:list="list"
:current="current"
active-color="#000"
gutter="40"
inactive-color="#000000c9"
font-size="34"
@change="handleTab"
/>
</view>
</view> -->
<view class="m-store-content">
<view class="u-flex u-text-center" style="overflow-x: scroll;">
<!-- <scroll-view scroll-x class="model-scroll" :scroll-into-view="'toView_' + tabCurrent"> -->
<view
v-for="(item, index) in tabIcon"
:key="index"
class="u-flex-1 u-margin-right-40"
@click="changeSearchType(item.id)"
>
<view v-if="tabCurrent == item.id" class="m-on-view">
<view class="m-icon-on">
<u-image
width="60rpx"
height="60rpx"
:src="item.icon_on"
/>
</view>
<view class="u-font-24">
<text>{{ item.name }}</text>
</view>
</view>
<view v-if="tabCurrent != item.id" class="m-off-view">
<view class="m-icon-off">
<u-image
width="60rpx"
height="60rpx"
:src="item.icon_off"
/>
</view>
<view class="u-font-24">
<text>{{ item.name }}</text>
</view>
</view>
</view>
<!-- </scroll-view> -->
</view>
<view class="m-store-list">
<u-gap
height="50"
bg-color="#FFFFFF"
/>
<view v-if="list && list.length == 0">
<u-empty
text="暂无门店列表"
mode="list"
/>
</view>
<view
v-for="(item, index) in list"
:key="index"
class="item"
@click="handleDetail(item)"
>
<view class="u-flex">
<view class="">
<view class="title">
<view class="name">
{{ item.name }}
</view>
</view>
<view class="address">
{{ item.address }}
</view>
</view>
<view class="u-flex">
{{ item.telephone }}
</view>
</view>
</view>
</view>
</view>
<u-loadmore
v-show="page > 1"
:status="status"
:load-text="loadText"
/>
</view>
</template>
<script>
const app = getApp();
export default {
data() {
return {
custom: app.globalData.custom,
userInfo: null,
// current: 0,
// list: [{
// name: '爱车'
// }, {
// name: '门店'
// }, {
// name: '商城',
// }],
tabCurrent: 1,
tabIcon: [
{id: 1, name: '加油', icon_off: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab1_off.png', icon_on: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab1_on.png'},
{id: 2, name: '充电', icon_off: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab2_off.png', icon_on: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab2_on.png'},
{id: 3, name: '洗车', icon_off: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab3_off.png', icon_on: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab3_on.png'},
{id: 4, name: '美容', icon_off: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab4_off.png', icon_on: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab4_on.png'},
{id: 5, name: '保养', icon_off: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab5_off.png', icon_on: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab5_on.png'},
{id: 6, name: '检修', icon_off: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab6_off.png', icon_on: 'https://image.51cheyaoshi.com/xcx/app/static/carlife/tab6_on.png'},
],
list: [
{ address: '徐汇区龙华中路', area: '徐汇区', city: '上海市', name: '中国石化加油站(望京新城站)', province: '上海市', telephone: '021-60718488' },
{ address: '徐汇区龙华中路', area: '徐汇区', city: '上海市', name: '中国石化加油站(望京新城站)', province: '上海市', telephone: '021-60718488' },
{ address: '徐汇区龙华中路', area: '徐汇区', city: '上海市', name: '中国石化加油站(望京新城站)', province: '上海市', telephone: '021-60718488' },
],
status: 'loadmore', // 加载更多 状态
page: 0, // 第N页
totalPages: 0, // 总页数
loadText: {
loadmore: '轻轻上拉',
loading: '努力加载中',
nomore: '实在没有了'
}
}
},
onShow(){
// this.list = []
this.page = 0
this.totalPages = 0
app.globalData.getUserInfo((data) => {
this.userInfo = data;
this.userId = data.id;
})
},
onLoad() {
// 初始化地理位置
this.getLocat()
},
methods: {
getLocat() {
const pageThis = this
uni.getLocation({
type: 'gcj02', //返回可以用于uni.openLocation的经纬度
success: function (res) {
console.log(res)
let lat
let lng
if (res.errMsg != 'getLocation:ok') {
let lat = 31.18826
let lng = 121.43687
}
lat = res.latitude;
lng = res.longitude;
pageThis.getStoreList(lat, lng)
},
})
},
async getStoreList(lat,lng) {
const res = await this.$getRequest(this.$url.getGasStation, "POST", {
page: this.page,
"lat": lat,
"lng": lng
});
if (res.data && res.data.records) {
this.list = this.list.concat(res.data.records)
this.totalPages = res.data.pages
this.page = res.data.current
this.status = 'loadmore'
}
},
changeSearchType(index) {
uni.showLoading({
title: '加载中'
})
console.log(index)
this.tabCurrent = index
// TODO
uni.hideLoading()
}
},
onReachBottom() {
// if(this.page >= this.totalPages) {
// this.status = 'nomore';
// return;
// }
// this.status = 'loading';
// this.page = ++ this.page;
// setTimeout(() => {
// this.getStoreList(this.page)
// if(this.page >= this.totalPages) {
// this.status = 'nomore';
// }
// }, 2000)
},
}
</script>
<style lang="scss" scoped>
.carlife {
.m-store-content {
padding: 40rpx;
.m-on-view {
text {
font-weight: 500;
font-size: 12px;
color: #0A0F2D;
}
}
.m-icon-on {
width: 96rpx;
height: 96rpx;
background: rgba(10, 15, 45, 0.08);
box-shadow: 0px -2px 32px rgba(10, 15, 45, 0.02), 0px 8px 24px rgba(10, 15, 45, 0.04);
border-radius: 12px;
display: flex;
justify-content: center;
align-items: center;
margin: 0 auto;
margin-bottom: 16rpx;
}
.m-off-view {
text {
font-weight: 400;
font-size: 12px;
color: rgba(10, 15, 45, 0.5);
}
}
.m-icon-off {
width: 96rpx;
height: 96rpx;
background: #FFFFFF;
box-shadow: 0px -2px 32px rgba(10, 15, 45, 0.02), 0px 8px 24px rgba(10, 15, 45, 0.04);
border-radius: 12px;
display: flex;
justify-content: center;
align-items: center;
margin: 0 auto;
margin-bottom: 16rpx;
}
}
.item {
background: #FFFFFF;
box-shadow: 0px -2px 32px rgba(10, 15, 45, 0.02), 0px 8px 24px rgba(10, 15, 45, 0.04);
border-radius: 24rpx;
margin-top: 24rpx;
padding: 36rpx 30rpx;
position: relative;
.title {
font-weight: 500;
font-size: 15px;
colos: rgba(10, 15, 45, 0.8);
}
.address {
font-weight: 400;
font-size: 12px;
color: rgba(10, 15, 45, 0.5);
}
}
}
</style> |
/* eslint-disable react/jsx-no-duplicate-props */
import { Grid, IconButton, InputAdornment } from '@material-ui/core';
import Button from '@material-ui/core/Button';
import Paper from '@material-ui/core/Paper';
import TextField from '@material-ui/core/TextField';
import Typography from '@material-ui/core/Typography';
import RemoveCircle from '@material-ui/icons/RemoveCircle';
import React, { useState } from 'react';
import { Mutation } from 'react-apollo';
import useStyles from '../../../formStyles';
import SubmitFormButton from '../../../SubmitFromButton';
import { useDynamicArrayForm, validate } from '../../../utils';
import { SET_METTING_AGENDA } from '../../mutations';
import validationSchema from './validation';
const MettingAgendaForm = ({ activeSession, history }) => {
const classes = useStyles();
const mettingAgenda = useDynamicArrayForm('', 2, 10);
const [errors, setErrors] = useState({});
const handleSubmit = (setMettingAgenda, client) => async event => {
event.preventDefault();
if (!activeSession) return;
const sessionID = activeSession._id;
setErrors({});
const validationRes = validate(validationSchema, mettingAgenda);
if (validationRes.errors) {
setErrors(validationRes.errors);
return;
}
const input = { mettingAgenda: validationRes.validateInput.array, sessionID };
await setMettingAgenda({ variables: { input } });
history.push('/');
};
return (
<Mutation mutation={SET_METTING_AGENDA}>
{(setMettingAgenda, { data, loading, client }) => (
<main className={classes.root}>
<Paper className={classes.paper}>
<Typography align="center" variant="h6" className={classes.title}>
Metting Agenda
</Typography>
<form onSubmit={handleSubmit(setMettingAgenda, client)} noValidate autoComplete="off">
<Grid container spacing={3}>
{mettingAgenda.array.map((mettingAgendaTopic, index) => (
<Grid item xs={12} key={'juryMember' + index}>
<TextField
id={`mettingAgendaTopic#${index + 1}`}
name={`mettingAgendaTopic#${index + 1}`}
label={`Metting Agenda Topic #${index + 1}`}
inputProps={{ 'data-index': index }}
error={Boolean(errors.Team)}
helperText={errors.Team}
onChange={mettingAgenda.handleElementChange}
value={mettingAgendaTopic}
variant="outlined"
fullWidth
InputProps={{
endAdornment: (
<InputAdornment position="end">
<IconButton
aria-label="Remove Team Member"
onClick={mettingAgenda.handleRemoveElement(index)}
>
<RemoveCircle color="error" />
</IconButton>
</InputAdornment>
),
}}
/>
</Grid>
))}
<Grid item xs={12} sm={6}></Grid>
<Grid item xs={12} sm={6}>
<Button onClick={mettingAgenda.handleAddElement} variant="contained" color="default" fullWidth>Add A Topic</Button>
</Grid>
<Grid item xs={12}>
<SubmitFormButton />
</Grid>
</Grid>
</form>
</Paper>
</main >
)}
</Mutation>
);
};
export default MettingAgendaForm; |
package services
import (
"fmt"
"net/http"
"strconv"
enc "github.com/Elessar1802/api/src/v1/internal/encoder"
"github.com/Elessar1802/api/src/v1/internal/err"
"github.com/Elessar1802/api/src/v1/internal/passwd"
repo "github.com/Elessar1802/api/src/v1/repository"
"github.com/go-pg/pg/v10"
)
func GetUser(db *pg.DB, id string) (enc.Response) {
_int_id, e := strconv.Atoi(id)
if e != nil {
return err.BadRequestResponse("Provided Id is malformed")
}
user := repo.User{Id: _int_id}
er := db.Model(&user).WherePK().Select()
if er != nil {
return err.NotFoundErrorResponse()
}
return enc.Response{Code: http.StatusOK, Payload: user}
}
func GetUsers(db *pg.DB, query string) (enc.Response) {
var users []repo.User
var er error
if query != "" {
// custom query
// the reasoning being a wrong formatting on the end of go-pg when using query string
er = db.Model(&users).Where(fmt.Sprintf("name ilike '%%%v%%'", query)).Select()
} else {
er = db.Model(&users).Select()
}
if er != nil {
// we shouldn't get any errors here unless the table users doesn't exist
return err.NotFoundErrorResponse(er.Error())
}
return enc.Response{Code: http.StatusOK, Payload: users}
}
func GetUsersCount(db *pg.DB) (enc.Response) {
count, er := db.Model(&repo.User{}).Count()
if er != nil {
// we shouldn't get any errors here unless the table users doesn't exist
return err.NotFoundErrorResponse()
}
return enc.Response{Code: http.StatusOK, Payload: count}
}
func DeleteUser(db *pg.DB, id string) (enc.Response) {
_int_id, e := strconv.Atoi(id)
if e != nil {
return err.BadRequestResponse("Provided Id is malformed")
}
u := repo.User{Id: _int_id}
_, er := db.Model(&u).WherePK().Delete()
if er != nil {
return err.NotFoundErrorResponse()
}
return enc.Response{Code: http.StatusOK}
}
func UpdateUser(db *pg.DB, u repo.User) (enc.Response) {
// only name or phone can be changed
_, er := db.Model(&u).Column("name", "phone").WherePK().UpdateNotZero()
if er != nil {
// the user wasn't found
return err.NotFoundErrorResponse(er.Error())
}
return enc.Response{Code: http.StatusOK}
}
func AddUser(db *pg.DB, u repo.User) (enc.Response) {
// NOTE: the following command allows us to get atomicity with transactions from Postgresql
tx, er := db.Begin()
defer tx.Close()
x, er := tx.Model(&u).Insert()
if er != nil {
_ = tx.Rollback()
return err.BadRequestResponse(er.Error())
}
if u.Role == "student" {
s := repo.Student{Id: u.Id, Class: u.Class}
_, er := tx.Model(&s).Insert()
if er != nil {
// this should never fail unless students table doesn't exist
_ = tx.Rollback()
return err.InternalServerErrorResponse(er.Error())
}
}
// the default password is their respective phone number
password := passwd.GetHash(u.Phone)
c := repo.Credential{Id: strconv.Itoa(u.Id), Password: password, Role: u.Role}
_, er = tx.Model(&c).Insert()
if er != nil {
_ = tx.Rollback()
// this should never fail unless credentials table doesn't exist
return err.InternalServerErrorResponse(er.Error())
}
if er := tx.Commit(); er != nil {
panic(er.Error())
}
return enc.Response{Code: http.StatusCreated, Payload: x}
} |
# Day 5 (System Security Part 2 & Scalability)
## Authentication and Authorization
1. Strong Authentication Mechanisms: Implement multi-factor authentication (MFA) to provide an additional layer of security beyond just passwords.
2. Least Privilege Principle: Ensure that users have the minimum levels of access (or permissions) needed to perform their tasks, reducing the risk of unauthorized access to sensitive information.
3. Role based Access Control. RBAC and it's role in Internal and external Access
## Incident Response and Monitoring
1. Continuous Monitoring: Implement continuous monitoring tools to detect and respond to security incidents in real-time.
2. Incident Response Plan: Have a clear and tested incident response plan in place to quickly address any security breaches or compliance issues.
3. Backup & Restore. RPO (Return Point Objective) & RTO (Return Time Objective). Defining SLAs
## User Education and Awareness
Training: Regularly employees on the importance of security and compliance, including how to recognize phishing attempts and other common cyber threats, Use stronger passwords, 2FA's etc ..
## Scalability
1. Deploying a Highly Available Application
2. Explain Load Balancers and their role in deploying HA
3. Infrastructure Design of a High Available System (AWS, Local)
4. Two types: Horizontal & Vertical
Exercise Collaborative work to build a scalable end-to-end system that has security and development best practices in place.
Highly Available Backend using Docker
`docker-compose.yml` Example:
```
version: '3'
services:
backend1:
image: eddsnx3/astro-backend
container_name: backend1
build: .
networks:
- backend
ports:
- "3000"
env_file:
- .env
backend2:
image: eddsnx3/astro-backend
container_name: backend2
build: .
networks:
- backend
ports:
- "3000"
env_file:
- .env
backend3:
image: eddsnx3/astro-backend
container_name: backend3
build: .
networks:
- backend
ports:
- "3000"
env_file:
- .env
caddy:
image: caddy
container_name: caddy
restart: unless-stopped
networks:
- backend
ports:
- 80:80
- 443:443
env_file:
- .env
environment:
- BACKEND_URL=${BACKEND_URL:-backend.local}
- IP_WHITELIST=${IP_WHITELIST:-0.0.0.0/0}
volumes:
- caddy_data:/data
- caddy_config:/config
- .Caddyfile:/etc/caddy/Caddyfile
volumes:
caddy_data:
caddy_config:
networks:
backend:
```
`Caddyfile` Example:
```
{$BACKEND_URL} {
route {
@allowed {
path /*
remote_ip {$IP_WHITELIST}
}
reverse_proxy @allowed {
to backend1:3000 backend2:3000 backend3:3000
lb_policy round_robin
health_uri /health
health_interval 5s
health_timeout 2s
health_status 200
}
respond 403
}
}
``` |
use std::collections::BTreeMap;
use lazy_static::lazy_static;
use std::sync::Mutex;
use salvo::prelude::*;
mod handlers;
mod alumno;
mod db;
lazy_static! {
static ref ALUMNOS: Mutex<alumno::ListaAlumnos> = Mutex::new(alumno::ListaAlumnos {alumnos: BTreeMap::new()});
static ref CANT_ALUMNOS: Mutex<i32> = Mutex::new(0);
static ref DB: Mutex<db::Db> = Mutex::new(db::Db::new_connection());
}
#[tokio::main]
async fn main() {
tracing_subscriber::fmt().init();
DB.lock().unwrap().create_table();
//let router = Router::new().get(health_check);
let router = Router::new()
.push(Router::new().get(handlers::health_check))
.push(Router::new().path("alumnos").get(handlers::get_alumnos))
.push(Router::new().path("alumnos/<id>").get(handlers::get_alumno))
.push(Router::new().path("alumnos").post(handlers::post_alumno))
.push(Router::new().path("alumnos/<id>").put(handlers::put_alumno));
let acceptor = TcpListener::new("127.0.0.1:5800").bind().await;
Server::new(acceptor).serve(router).await;
} |
---
title: "Create team from group"
description: "Create a new team from a group."
author: "nkramer"
ms.localizationpriority: high
ms.prod: "microsoft-teams"
doc_type: apiPageType
---
# Create team from group
Namespace: microsoft.graph
[!INCLUDE [beta-disclaimer](../../includes/beta-disclaimer.md)]
Create a new [team](../resources/team.md) from a [group](../resources/group.md).
In order to create a team, the group must have a least one owner.
If the creation of the team call is delayed, you can retry the call up to three times before you have to wait for 15 minutes due to a propagation delay. If the group was created less than 15 minutes ago, the call might fail with a `404` error code due to replication delays.
If the group was created less than 15 minutes ago, it's possible for a call to create a team to fail with a 404 error code, due to ongoing replication delays.
The recommended pattern is to retry the Create team call three times, with a 10 second delay between calls.
> **Note:** SharePoint provisioning doesn't occur in real time, it's a background process. The completion of the provisioning can't be determined.
[!INCLUDE [national-cloud-support](../../includes/all-clouds.md)]
## Permissions
Choose the permission or permissions marked as least privileged for this API. Use a higher privileged permission or permissions [only if your app requires it](/graph/permissions-overview#best-practices-for-using-microsoft-graph-permissions). For details about delegated and application permissions, see [Permission types](/graph/permissions-overview#permission-types). To learn more about these permissions, see the [permissions reference](/graph/permissions-reference).
<!-- { "blockType": "permissions", "name": "team_put_teams" } -->
[!INCLUDE [permissions-table](../includes/permissions/team-put-teams-permissions.md)]
> **Note**: This API supports admin permissions. Global admins and Microsoft Teams service admins can access groups that they are not a member of.
## HTTP request
<!-- { "blockType": "ignored" } -->
```http
PUT /groups/{id}/team
```
## Request headers
| Header | Value |
|:---------------|:--------|
| Authorization | Bearer {token}. Required. |
| Content-Type | application/json |
## Request body
In the request body, supply a JSON representation of a [team](../resources/team.md) object.
## Response
If successful, this method should return a `201 Created` response code and a [team](../resources/team.md) object in the response body.
## Example
#### Request
The following example shows a request.
# [HTTP](#tab/http)
<!-- {
"blockType": "request",
"name": "create_team"
}-->
```http
PUT https://graph.microsoft.com/beta/groups/{id}/team
Content-type: application/json
{
"memberSettings": {
"allowCreateUpdateChannels": true
},
"messagingSettings": {
"allowUserEditMessages": true,
"allowUserDeleteMessages": true
},
"funSettings": {
"allowGiphy": true,
"giphyContentRating": "strict"
},
"discoverySettings": {
"showInTeamsSearchAndSuggestions": true
}
}
```
# [C#](#tab/csharp)
[!INCLUDE [sample-code](../includes/snippets/csharp/create-team-csharp-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
# [CLI](#tab/cli)
[!INCLUDE [sample-code](../includes/snippets/cli/create-team-cli-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
# [Go](#tab/go)
[!INCLUDE [sample-code](../includes/snippets/go/create-team-go-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
# [Java](#tab/java)
[!INCLUDE [sample-code](../includes/snippets/java/create-team-java-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
# [JavaScript](#tab/javascript)
[!INCLUDE [sample-code](../includes/snippets/javascript/create-team-javascript-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
# [PHP](#tab/php)
[!INCLUDE [sample-code](../includes/snippets/php/create-team-php-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
# [PowerShell](#tab/powershell)
[!INCLUDE [sample-code](../includes/snippets/powershell/create-team-powershell-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
# [Python](#tab/python)
[!INCLUDE [sample-code](../includes/snippets/python/create-team-python-snippets.md)]
[!INCLUDE [sdk-documentation](../includes/snippets/snippets-sdk-documentation-link.md)]
---
#### Response
The following example shows the response.
>**Note:** The response object shown here might be shortened for readability.
<!-- {
"blockType": "response",
"truncated": true,
"@odata.type": "microsoft.graph.team"
} -->
```http
HTTP/1.1 201 Created
Content-type: application/json
{
"memberSettings": {
"allowCreateUpdateChannels": true,
"allowDeleteChannels": true,
"allowAddRemoveApps": true,
"allowCreateUpdateRemoveTabs": true,
"allowCreateUpdateRemoveConnectors": true
},
"guestSettings": {
"allowCreateUpdateChannels": true,
"allowDeleteChannels": true
},
"messagingSettings": {
"allowUserEditMessages": true,
"allowUserDeleteMessages": true,
"allowOwnerDeleteMessages": true,
"allowTeamMentions": true,
"allowChannelMentions": true
},
"funSettings": {
"allowGiphy": true,
"giphyContentRating": "strict",
"allowStickersAndMemes": true,
"allowCustomMemes": true
},
"discoverySettings": {
"showInTeamsSearchAndSuggestions": true
}
}
```
<!-- uuid: 8fcb5dbc-d5aa-4681-8e31-b001d5168d79
2015-10-25 14:57:30 UTC -->
<!--
{
"type": "#page.annotation",
"description": "Create Team",
"keywords": "",
"section": "documentation",
"tocPath": "",
"suppressions": []
}
-->
## See also
- [Creating a group with a team](/graph/teams-create-group-and-team)
- [Microsoft Graph service-specific throttling limits](/graph/throttling-limits#microsoft-teams-service-limits) |
<!DOCTYPE html>
<html lang="en" xmlns:th="http://www.thymeleaf.org">
<head>
<!-- <meta content="upgrade-insecure-requests" http-equiv="Content-Security-Policy"> -->
<div th:insert="~{fragment/header :: header}"></div>
<link href="https://fonts.googleapis.com/css?family=Lato:300,400,700&display=swap" rel="stylesheet">
<link href="https://stackpath.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" rel="stylesheet">
<link href="https://cdn.jsdelivr.net/npm/semantic-ui@2.2.13/dist/semantic.min.css" rel="stylesheet">
</head>
<body>
<div class="container">
<div th:insert="~{fragment/toolbar :: toolbar}"></div>
<section class="blog-list px-3 pr-0 p-md-5">
<div class="container single-col-max-width">
<div class="text-end">
<button class="btn btn-primary" th:onclick="|location.href='@{/requested-service/registration}'|">서비스
요청
</button>
</div>
<div class="table-responsive mt-5">
<table class="primary table table-striped">
<thead>
<tr class="resume-wrapper mx-auto theme-bg-light p-5 mb-5 my-5 shadow-lg">
<th>#</th>
<th>제목</th>
<th>작성일</th>
<th>상태</th>
<th>작성자</th>
</tr>
</thead>
<tbody>
<tr class="resume-wrapper mx-auto theme-bg-light p-5 mb-5 my-5 shadow-lg"
th:each="article,index : ${articles}"
th:onclick="|location.href='@{/requested-service/{articleId} (articleId=${article.id})}'|">
<td th:text="${index.count}"></td>
<td class="text-truncate" th:text="${article.articleTitle}">Lorem ipsuelis,
ultricies...
</td>
<td class="text-truncate" th:text="${article.createdAt}">Lorem ipsuelis,
ultricies...
</td>
<td class="text-truncate" th:text="${article.requestStatus.name()}">요청</td>
<td class="text-truncate" th:text="${article.nickname}">Lorem ipsuelis, ultricies...
</td>
</tr>
</tbody>
</table>
</div>
</div>
</section>
<section class="cta-section py-5">
<div class="container text-center single-col-max-width">
<!--사용자 이름 , merge 여부 -->
<form class="row g-3 justify-content-end" method="get" th:action th:object="${condition}">
<div class="col-auto">
<input class="form-control" id="nickname" placeholder="닉네임 검색"
th:field="*{nickname}"
type="text">
</div>
<div class="col-auto">
<select class="form-select" th:field="*{requestStatus}" id="merge"
aria-label="Floating label select example">
<option value="">상태</option>
<option value="COMPLETE">요청 완료</option>
<option value="BEFORE">요청 대기</option>
<option value="DEFER">요청 보류</option>
<option value="FAIL">요청 거절</option>
</select>
</div>
<div class="col-auto">
<button class="btn btn-primary" type="submit">검색</button>
</div>
</form>
</div>
</section>
<div th:insert="~{fragment/footer :: footer}"></div>
</div><!--//main-wrapper-->
<script src="/js/jquery.min.js"></script>
<script src="/js/popper.js"></script>
<script src="/js/bootstrap.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/semantic-ui@2.2.13/dist/semantic.min.js"></script>
<script src="/js/main.js"></script>
<div th:insert="~{fragment/library :: libraries}"></div>
</body>
</html> |
package com.kaj.myapp.board;
import com.kaj.myapp.auth.Auth;
import com.kaj.myapp.auth.AuthUser;
import com.kaj.myapp.board.entity.Board;
import com.kaj.myapp.board.repository.BoardRepository;
import com.kaj.myapp.board.request.BoardModifyRequest;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.security.SecurityRequirement;
import io.swagger.v3.oas.annotations.tags.Tag;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.Date;
import java.util.Optional;
@Tag(name="게시판 관리 처리 API")
@RestController
@RequestMapping(value = "/api/boards")
public class BoardController {
@Autowired
BoardRepository boRepo;
@Operation(summary = "게시글 상세 조회", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@GetMapping(value = "/{boardNo}")
public ResponseEntity getBoard(@PathVariable long boardNo, @RequestAttribute AuthUser authUser) {
Optional<Board> findedBoard = boRepo.findById(boardNo);
if(!findedBoard.isPresent()){
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
if(findedBoard.get().getNickname().equals(authUser.getNickname())){
return ResponseEntity.status(HttpStatus.OK).body(findedBoard.get());
}else {
return ResponseEntity.status(HttpStatus.FORBIDDEN).body(findedBoard.get());
}
}
@Operation(summary = "마이페이지 유저 본인의 게시글 페이징 조회", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@GetMapping(value = "/nickname/{nickname}")
public ResponseEntity<Page<Board>> getBoardsPagingNickname(@PathVariable String nickname, @RequestParam int page, @RequestParam int size, @RequestAttribute AuthUser authUser){
System.out.println("닉네임 조회");
return ResponseEntity.status(HttpStatus.OK)
.body(boRepo.findByNicknameOrderByNoAsc(nickname, PageRequest.of(page, size)));
}
@Operation(summary = "게시글 페이징 조회", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@GetMapping(value = "/paging")
public Page<Board> getBoardsPaging(@RequestParam int page, @RequestParam int size, @RequestAttribute AuthUser authUser){
System.out.println(page + "1");
System.out.println(size + "1");
return boRepo.findByOrderByNoDesc(PageRequest.of(page, size));
}
@Operation(summary = "게시글 문의/추천 페이징 조회", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@GetMapping(value = "paging/request")
public Page<Board> getBoardsPagingRequest(
@RequestParam int page,
@RequestParam int size,
@RequestParam String request, @RequestAttribute AuthUser authUser) {
System.out.println("옵션 검색");
return boRepo.findByRequestContains(request, PageRequest.of(page, size));
}
@Operation(summary = "게시글 검색 페이징 조회", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@GetMapping(value = "paging/search")
public Page<Board> getBoardsPagingSearch(
@RequestParam int page,
@RequestParam int size,
@RequestParam(required = false) String nickname,
@RequestParam(required = false) String title,
@RequestParam(required = false) String content,
@RequestParam(required = false) String species, @RequestAttribute AuthUser authUser) {
System.out.println("검색");
if (nickname != null) {
return boRepo.findByNicknameContains(nickname, PageRequest.of(page, size));
} else if (title != null) {
return boRepo.findByTitleContains(title, PageRequest.of(page, size));
} else if (content != null) {
return boRepo.findByContentContains(content, PageRequest.of(page, size));
} else if (species != null) {
return boRepo.findBySpeciesContains(species, PageRequest.of(page, size));
} else {
return boRepo.findByOrderByNoAsc(PageRequest.of(page, size));
}
}
@Operation(summary = "게시글 추가", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@PostMapping
public ResponseEntity addBoard (@RequestBody Board board, @RequestAttribute AuthUser authUser){
System.out.println(6);
// try{
// Long convertValue = Long.valueOf(board.getRequest());
// }catch (NumberFormatException e){
// return ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
// }
if(board.getRequest() == null || board.getRequest().isEmpty()){
return ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
if(board.getTitle() == null || board.getTitle().isEmpty()){
return ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
if(board.getContent() == null || board.getContent().isEmpty()){
return ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
if(board.getSpecies() == null || board.getSpecies().isEmpty()){
return ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
board.setNickname(authUser.getNickname());
board.setCreatedTime(new Date().getTime());
Board savedBoard = boRepo.save(board);
if(savedBoard != null){
return ResponseEntity.status(HttpStatus.CREATED).build();
}
return ResponseEntity.ok().build();
}
@Operation(summary = "게시글 삭제", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@DeleteMapping(value = "/{no}")
public ResponseEntity removeBoard(@PathVariable long no, @RequestAttribute AuthUser authUser){
System.out.println(no + "7");
Optional<Board> removeBoard = boRepo.findById(no);
if(!removeBoard.isPresent()) {
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
if (!removeBoard.get().getNickname().equals(authUser.getNickname())){
return ResponseEntity.status(HttpStatus.FORBIDDEN).build();
}
boRepo.deleteById(no);
return ResponseEntity.status(HttpStatus.OK).build();
}
@Operation(summary = "게시글 수정", security = { @SecurityRequirement(name = "bearer-key") })
@Auth
@PutMapping(value = "/{no}")
public ResponseEntity modifyBoard(@PathVariable long no, @RequestBody BoardModifyRequest board, @RequestAttribute AuthUser authUser){
System.out.println(no + "8");
Optional<Board> findedBoard = boRepo.findByNo(no);
if(!findedBoard.isPresent()){
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
Board toModifyBoard = findedBoard.get();
// try{
// Long convertValue = Long.valueOf(board.getRequest());
// toModifyBoard.setRequest(convertValue);
// }catch (NumberFormatException e){
// return ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
// }
if (!toModifyBoard.getNickname().equals(authUser.getNickname())){
return ResponseEntity.status(HttpStatus.FORBIDDEN).build();
}
if(board.getRequest() != null && !board.getRequest().isEmpty()){
toModifyBoard.setRequest(board.getRequest());
}
if(board.getTitle() != null && !board.getTitle().isEmpty()){
toModifyBoard.setTitle(board.getTitle());
}
if(board.getContent() != null && !board.getContent().isEmpty()){
toModifyBoard.setContent(board.getContent());
}
if(board.getPetname() != null && !board.getPetname().isEmpty()){
toModifyBoard.setPetname(board.getPetname());
}
toModifyBoard.setSpecies(board.getSpecies());
toModifyBoard.setImage(board.getImage());
boRepo.save(toModifyBoard);
return ResponseEntity.ok().build();
}
} |
<!DOCTYPE html>
<html>
<head>
<title>My page</title>
<style>
header {
background-color: whitesmoke;
padding: 20px;
text-align: center;
}
.center-container {
display: flex;
flex-direction: column;
align-items: center;
min-height: calc(100vh - 80px); /* Subtract header height */
}
.block {
width: 800px;
height: 50px;
background-color: gray;
margin-top: 10px;
transition: all 0.3s ease-in-out;
display: flex;
align-items: center;
justify-content: space-between;
font-size: 16px;
color: white;
padding: 0 10px;
}
.block.selected {
transform: scale(1.2);
background-color: blue;
}
#addButton {
width: 25px;
height: 25px;
transition: all 0.3s ease-in-out;
}
#addButton:hover {
transform: scale(1.2);
}
.doneButton {
background-color: red;
border: none;
color: white;
padding: 5px 10px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: 16px;
margin: 0 2px;
cursor: pointer;
}
</style>
<header>
<h1>My To Do List</h1>
</header>
<style>
</style>
</head>
<body>
<input type="text" id="textInput" placeholder="Add a task!" />
<button id="addButton">+</button>
<div class="center-container">
<div id="container"></div>
</div>
<script>
const addButton = document.getElementById('addButton');
const textInput = document.getElementById('textInput');
const container = document.getElementById('container');
const blocks = []; // store references to all blocks
let selectedBlockIndex = -1; // initially, no block is selected
addButton.addEventListener('click', function(event) {
const blockContent = textInput.value;
if (!blockContent) return; // do not create empty blocks
const block = document.createElement('div');
block.className = 'block';
const content = document.createElement('span');
content.textContent = blockContent;
block.appendChild(content);
const doneButton = document.createElement('button');
doneButton.textContent = 'Done';
doneButton.className = 'doneButton';
doneButton.addEventListener('click', function() {
container.removeChild(block);
const index = blocks.indexOf(block);
if (index > -1) {
blocks.splice(index, 1);
if (selectedBlockIndex === index) {
selectBlock(-1);
}
}
});
block.appendChild(doneButton);
container.appendChild(block);
blocks.push(block);
textInput.value = ''; // clear the input field
});
// mouse hover over add button enlargement transition
addButton.addEventListener('mouseover', function(event) {
addButton.style.transform = 'scale(1.2)';
});
addButton.addEventListener('mouseout', function(event) {
addButton.style.transform = 'scale(1)';
});
document.addEventListener('keydown', function(event) {
// only if blocks are selected
if (blocks.length === 0) {
return;
}
// arrow key events
if (event.keyCode === 38) { // up arrow
selectBlock(selectedBlockIndex - 1);
}
else if (event.keyCode === 40) { // down arrow
selectBlock(selectedBlockIndex + 1);
}
});
function selectBlock(index) {
// unselect the currently selected block
if (selectedBlockIndex >= 0) {
blocks[selectedBlockIndex].classList.remove('selected');
}
// make sure index is within range
selectedBlockIndex = (index + blocks.length) % blocks.length;
// select the new block and make it bigger
blocks[selectedBlockIndex].classList.add('selected');
}
</script>
</body>
</html> |
const express = require("express");
const router = express.Router();
const cartModel = require("../models/cart.model.js");
const productRepository = require("../repositories/product.repository.js");
const ProductRepository = new productRepository();
const passport = require("passport");
router.get("/products", passport.authenticate('jwt', { session: false }), async (req,res) => {
try {
const { limit = 2 ,page = 1 } = req.query;
const productos = await ProductRepository.getAll({
page: parseInt(page),
limit: parseInt(limit)
});
const nuevoArray = productos.docs.map(producto => {
const { _id, ...rest } = producto.toObject();
return { id: _id, ...rest };
});
const cartId = req.user.cart.toString();
res.render("products", {
productos: nuevoArray,
hasPrevPage: productos.hasPrevPage,
hasNextPage: productos.hasNextPage,
prevPage: productos.prevPage,
nextPage: productos.nextPage,
currentPage: productos.page,
totalPages: productos.totalPages,
cartId
});
} catch (error) {
console.error("Error al obtener productos", error);
res.status(500).json({
status: 'error',
error: "Error interno del servidor"
});
}
});
router.get("/carts/:cid", async (req, res) => {
const cartId = req.params.cid;
try {
const carrito = await cartModel.findById(cartId)//.populate("products");
console.log(JSON.stringify(carrito, null, 2));
if (!carrito) {
console.log("No existe ese carrito con el id");
return res.status(404).json({ error: "Carrito no encontrado" });
}
let totalCompra = 0;
const productosEnCarrito = carrito.products.map(item => {
const product = item.product.toObject();
const quantity = item.quantity;
const totalPrice = product.price * quantity;
totalCompra += totalPrice;
return {
product: { ...product, totalPrice},
quantity,
cartId
};
}
);
res.render("carts", { productos : productosEnCarrito, totalCompra, cartId });
} catch (error) {
console.error("Error al obtener el carrito", error);
res.status(500).json({ error: "Error interno del servidor" });
}
});
router.get("/", async(req,res) => {
res.render("chat");
})
module.exports = router; |
; Read language codes from lang.txt
FileRead, lang, lang.txt ; Open the file "lang.txt" and store its contents in the variable "lang"
StringSplit, lang, lang, `n, `r ; Split the contents of "lang" into separate lines and store them in the array "lang"
; Define translation function
TranslateText(text, sourceLang, targetLang) {
UrlEncode := ComObjCreate("WinHttp.WinHttpRequest.5.1") ; Create a WinHttpRequest object to make a GET request
UrlEncode.Open("GET", "https://api.mymemory.translated.net/get?q=" . text . "&langpair=" . sourceLang . "|" . targetLang, false) ; Set the GET request URL with the provided text and language codes
UrlEncode.Send() ; Send the GET request
encodedText := UrlEncode.ResponseText ; Store the response text in the variable "encodedText"
encodedText := RegExReplace(encodedText, ".*""translatedText"":""([^""]+).*", "$1") ; Use regex to extract the translated text from the response
encodedText := StrReplace(encodedText, "\\\", "\", "All") ; Replace any escaped backslashes with regular backslashes
encodedText := StrReplace(encodedText, "\\", "", "All") ; Remove any remaining backslashes
encodedText := StrReplace(encodedText, "\n", "", "All") ; Remove any newline characters
encodedText := StrReplace(encodedText, "\r", "", "All") ; Remove any carriage return characters
return encodedText ; Return the translated text
}
; Define hotkey to translate clipboard text
^+z:: ; Define the hotkey as CTRL+SHIFT+Z
clipboard := "" ; Clear the clipboard
SendInput ^c ; Simulate pressing CTRL+C to copy the selected text to the clipboard
ClipWait, 1 ; Wait for the clipboard to contain data for up to 1 second
text := clipboard ; Store the contents of the clipboard in the variable "text"
translatedText := TranslateText(text, lang1, lang2) ; Call the TranslateText function with the text and language codes from lang.txt and store the translated text in the variable "translatedText"
clipboard := translatedText ; Copy the translated text to the clipboard
MsgBox, 0, Translation Complete!, The translated text has been copied to the clipboard. ; Display a message box indicating that the translation is complete
return ; End of hotkey definition |
const ocrResultsContainer = document.getElementById('ocr-results');
const ocrTextarea = document.getElementById('ocr-text');
const img = document.querySelector('img');
const progress = document.querySelector('.progress');
const enrollmentTableBody = document.querySelector('#enrollment-table tbody');
function uploadEnrollment() {
const fileSelector = document.getElementById('enrollment-upload');
if (fileSelector.files.length === 0) {
alert('Please select an image file.');
return;
}
const file = fileSelector.files[0];
displayImage(file);
performOCR(file);
}
function displayImage(file) {
const imgUrl = window.URL.createObjectURL(new Blob([file], { type: 'image/jpg' }));
img.src = imgUrl;
}
function performOCR(file) {
ocrTextarea.innerHTML = '';
const rec = new Tesseract.TesseractWorker();
rec.recognize(file)
.progress(function (response) {
if (response.status === 'recognizing text') {
progress.innerHTML = response.status + ' ' + response.progress;
} else {
progress.innerHTML = response.status;
}
})
.then(function (data) {
ocrTextarea.value = data.text;
ocrResultsContainer.style.display = 'block';
const yearRegex = /\b(20\d{2})\b/;
const sessionRegex = /DXB UG (Winter|Autumn|Spring) @ (\w+\/ On Campus)\b/;
const subjectCodeRegex = /\b([A-Z]{4}\d{3})\b/;
const regex = /Major:\s*([^\n]+)/;
const lines = data.text.split('\n');
console.log("Lines: ",lines);
const yearArray = [];
const sessionArray = [];
const campusDeliveryArray = [];
const subjectCodeArray = [];
const match = regex.exec(lines.join('\n'));
const major = match ? match[1].trim() : null;
lines.forEach(line => {
const yearMatch = line.match(yearRegex);
const sessionMatch = line.match(sessionRegex);
const subjectCodeMatch = line.match(subjectCodeRegex);
if (yearMatch) {
yearArray.push(yearMatch[1]);
}
if (sessionMatch) {
sessionArray.push(sessionMatch[1]);
campusDeliveryArray.push(sessionMatch[2]);
}
if (subjectCodeMatch) {
subjectCodeArray.push(subjectCodeMatch[1]);
}
});
console.log('Year Array:', yearArray);
console.log('Session Array:', sessionArray);
console.log('Campus/Delivery Array:', campusDeliveryArray);
console.log('Subject Code Array:', subjectCodeArray);
console.log("Major:", major);
const table = document.getElementById('extracted-values-table');
const tbody = table.getElementsByTagName('tbody')[0];
tbody.innerHTML = '';
const maxRows = Math.max(yearArray.length, sessionArray.length, campusDeliveryArray.length, subjectCodeArray.length);
for (let i = 0; i < maxRows; i++) {
const newRow = tbody.insertRow();
const yearCell = newRow.insertCell(0);
const sessionCell = newRow.insertCell(1);
const campusDeliveryCell = newRow.insertCell(2);
const subjectCodeCell = newRow.insertCell(3);
yearCell.contentEditable = true;
sessionCell.contentEditable = true;
campusDeliveryCell.contentEditable = true;
subjectCodeCell.contentEditable = true;
yearCell.textContent = yearArray[i] || '';
sessionCell.textContent = sessionArray[i] || '';
campusDeliveryCell.textContent = campusDeliveryArray[i] || '';
subjectCodeCell.textContent = subjectCodeArray[i] || '';
}
table.style.display = 'block';
updateArraysFromTable();
});
}
function updateArraysFromTable() {
const table = document.getElementById('extracted-values-table');
const tbody = table.getElementsByTagName('tbody')[0];
const updatedYearArray = [];
const updatedSessionArray = [];
const updatedCampusDeliveryArray = [];
const updatedSubjectCodeArray = [];
for (let i = 0; i < tbody.rows.length; i++) {
const row = tbody.rows[i];
const updatedYear = row.cells[0].textContent.trim();
const updatedSession = row.cells[1].textContent.trim();
const updatedCampusDelivery = row.cells[2].textContent.trim();
const updatedSubjectCode = row.cells[3].textContent.trim();
updatedYearArray.push(updatedYear);
updatedSessionArray.push(updatedSession);
updatedCampusDeliveryArray.push(updatedCampusDelivery);
updatedSubjectCodeArray.push(updatedSubjectCode);
}
yearArray = updatedYearArray;
sessionArray = updatedSessionArray;
campusDeliveryArray = updatedCampusDeliveryArray;
subjectCodeArray = updatedSubjectCodeArray;
console.log('Updated Year Array:', yearArray);
console.log('Updated Session Array:', sessionArray);
console.log('Updated Campus/Delivery Array:', campusDeliveryArray);
console.log('Updated Subject Code Array:', subjectCodeArray);
populateTable();
submitOCRResults();
}
document.getElementById('extracted-values-table').addEventListener('input', function () {
updateArraysFromTable();
});
function validateAndNext() {
const firstName = document.getElementById('first-name').value;
const lastName = document.getElementById('last-name').value;
const universityName = document.getElementById('university-name').value;
const phone = document.getElementById('phone').value;
const email = document.getElementById('email').value;
const studentId = document.getElementById('student-id').value;
const password = document.getElementById('password').value;
const nameRegex = /^[A-Za-z]+$/;
const universityNameRegex = /^[A-Za-z\s]+$/;
const phoneRegex = /^\d{10}$/;
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
const studentIdRegex = /^\d{7}$/;
const passwordRegex = /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[@$!%*?&])[A-Za-z\d@$!%*?&]{8,}$/;
const firstNameError = validateInput(firstName, nameRegex, 'first-name', 'first-name-error', 'Invalid first name');
const lastNameError = validateInput(lastName, nameRegex, 'last-name', 'last-name-error', 'Invalid last name');
const universityNameError = validateInput(universityName, universityNameRegex, 'university-name', 'university-name-error', 'Invalid university name');
const phoneError = validateInput(phone, phoneRegex, 'phone', 'phone-error', 'Invalid phone number');
const emailError = validateInput(email, emailRegex, 'email', 'email-error', 'Invalid email address');
const studentIdError = validateInput(studentId, studentIdRegex, 'student-id', 'student-id-error', 'Invalid student ID');
const passwordError = validateInput(password, passwordRegex, 'password', 'password-error', 'Invalid password');
if (firstNameError || lastNameError || universityNameError || phoneError || emailError || studentIdError || passwordError) {
return;
}
const userData = {
firstName: firstName,
lastName: lastName,
universityName: universityName,
studentId: studentId,
phone: phone,
email: email,
password: password
};
console.log('Stored user data:', userData);
fetch('/submitUserInfo', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(userData),
})
.then(response => response.json())
.then(data => {
if (data.success) {
console.log('User data saved successfully');
document.getElementById('user-info-modal').style.display = 'none';
document.getElementById('enrollment-upload-modal').style.display = 'block';
} else {
console.error('Error saving user data:', data.message);
const errorMessage = document.getElementById('error-message');
errorMessage.textContent = data.message;
errorMessage.style.display = 'block';
}
})
.catch(error => console.error('Error:', error));
}
function validateInput(value, regex, inputId, errorId, errorMessage) {
const errorElement = document.getElementById(errorId);
if (!regex.test(value)) {
errorElement.textContent = errorMessage;
document.getElementById(inputId).classList.add('error');
return true;
} else {
errorElement.textContent = '';
document.getElementById(inputId).classList.remove('error');
return false;
}
}
function populateTable() {
const table = document.getElementById('project-table');
const tbody = table.getElementsByTagName('tbody')[0];
tbody.innerHTML = '';
for (let i = 0; i < subjectCodeArray.length; i++) {
const newRow = tbody.insertRow();
const subjectCodeCell = newRow.insertCell(0);
const hasProjectCell = newRow.insertCell(1);
const projectUploadCell = newRow.insertCell(2);
subjectCodeCell.innerHTML = `<input type="text" class="subject-code" value="${subjectCodeArray[i]}" data-row-id="${i}" readonly>`;
hasProjectCell.innerHTML = `<select data-row-id="${i}"><option value="Yes">Yes</option><option value="No">No</option></select>`;
projectUploadCell.innerHTML = `<input type="file" class="project-upload" data-row-id="${i}">`;
console.log('Table is being populated');
}
table.style.display = 'block';
}
function next() {
document.getElementById('project-upload').style.display = 'none';
document.getElementById('certifications-modal').style.display = 'block';
fetch('/saveProjectData', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({subjectCodeArray, }),
})
.then(response => response.json())
.then(data => {
if (data.success) {
console.log('Subject data saved successfully');
} else {
console.error('Error saving subject data:', data.message);
}
})
.catch(error => console.error('Error:', error));
}
function showOCRResults(text) {
document.getElementById('ocr-text').value = text;
document.getElementById('ocr-results').style.display = 'block';
}
function showEnrollmentUpload() {
document.getElementById('terms-modal').style.display = 'none';
document.getElementById('user-info-modal').style.display = 'block';
}
function closeTermsModal() {
document.getElementById('terms-modal').style.display = 'none';
}
function showUserInfo() {
document.getElementById('ocr-results').style.display = 'none';
document.getElementById('enrollment-upload-modal').style.display = 'none';
document.getElementById('extracted-values-table').style.display = 'none';
document.getElementById('project-modal').style.display = 'block';
fetch('/saveSubjectData', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ yearArray, sessionArray, campusDeliveryArray, subjectCodeArray }),
})
.then(response => response.json())
.then(data => {
if (data.success) {
console.log('Subject data saved successfully');
} else {
console.error('Error saving subject data:', data.message);
}
})
.catch(error => console.error('Error:', error));
}
function showProjectUpload() {
document.getElementById('user-info-modal').style.display = 'none';
document.getElementById('project-upload').style.display = 'block';
}
function extract() {
document.getElementById('extracted-values-table').style.display = 'block';
}
function confirmExtractedInformation() {
document.getElementById('user-info-modal').style.display = 'block';
}
function next() {
document.getElementById('user-info-modal').style.display = 'none';
}
function handleCertifications() {
const hasCertifications = document.getElementById('has-certifications').value;
if (hasCertifications === 'yes') {
document.getElementById('certifications-modal').style.display = 'block';
} else {
handleNextAfterCertifications();
}
}
function handleNextAfterCertifications() {
document.getElementById('certifications-modal').style.display = 'none';
}
document.addEventListener('paste', function (event) {
const items = (event.clipboardData || event.originalEvent.clipboardData).items;
if (items.length > 0 && items[0].type.indexOf('image') !== -1) {
const file = items[0].getAsFile();
displayImage(file);
performOCR(file);
}
});
const fileSelector = document.querySelector('#enrollment-upload');
const start = document.querySelector('#upload-btn');
const img1 = document.querySelector('#uploaded-img');
const progress1 = document.querySelector('.progress');
const textarea = document.querySelector('#ocr-text');
fileSelector.onchange = () => {
var file = fileSelector.files[0];
var imgUrl = window.URL.createObjectURL(new Blob([file], { type: 'image/jpg' }));
img.src = imgUrl;
}
start.onclick = () => {
textarea.innerHTML = '';
const rec = new Tesseract.TesseractWorker();
rec.recognize(fileSelector.files[0])
.progress(function (response) {
if (response.status == 'recognizing text') {
progress.innerHTML = response.status + ' ' + response.progress;
} else {
progress.innerHTML = response.status;
}
})
.then(function (data) {
textarea.innerHTML = data.text;
progress.innerHTML = 'Done';
showOCRResults(data.text);
})
}
function showOCRResults(text) {
document.getElementById('ocr-text').value = text;
document.getElementById('ocr-results').style.display = 'block';
}
function submitOCRResults() {
const ocrResults = document.getElementById('ocr-text').value;
const updatedArrays = {
yearArray: yearArray,
sessionArray: sessionArray,
campusDeliveryArray: campusDeliveryArray,
subjectCodeArray: subjectCodeArray
};
const ocrData = {
ocrResults: ocrResults,
updatedArrays: updatedArrays
};
fetch('/submitOCRResults', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(ocrData)
})
.then(response => response.json())
.then(data => {
if (data.success) {
console.log('OCR results and arrays submitted successfully');
} else {
console.error('Failed to submit OCR results and arrays');
}
})
.catch(error => {
console.error('Error submitting OCR results and arrays:', error);
});
} |
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/app_localizations.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
class HelpDTO {
final String title;
final Widget widget;
HelpDTO({required this.title, required this.widget});
}
List<HelpDTO> getHelpDtoItems(BuildContext context, String title) {
return [
HelpDTO(
title: title,
widget: Wrap(
runSpacing: 20.sp,
children: [
Image.asset(
'assets/images/help1-image.png',
),
Text(
AppLocalizations.of(context)!.helpDtoOne1,
),
Container(),
Text(AppLocalizations.of(context)!.helpDtoOne2),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('1. '),
Expanded(child: Text(AppLocalizations.of(context)!.helpDtoOne3)),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('2. '),
Expanded(child: Text(AppLocalizations.of(context)!.helpDtoOne4)),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('3. '),
Expanded(child: Text(AppLocalizations.of(context)!.helpDtoOne5)),
],
),
],
),
),
HelpDTO(
title: title,
widget: Wrap(
runSpacing: 20.sp,
children: [
Text(
AppLocalizations.of(context)!.helpDtoTwo1,
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('1. '),
Expanded(child: Text(AppLocalizations.of(context)!.helpDtoTwo2)),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('2. '),
Expanded(child: Text(AppLocalizations.of(context)!.helpDtoTwo3)),
],
),
],
),
),
HelpDTO(
title: title,
widget: Wrap(
runSpacing: 20.sp,
children: [
Text(
AppLocalizations.of(context)!.helpDtoThree1,
),
Text(
AppLocalizations.of(context)!.helpDtoThree2,
),
],
),
),
HelpDTO(
title: title,
widget: Wrap(
runSpacing: 20.sp,
children: [
Text(
AppLocalizations.of(context)!.helpDtoFour1,
),
Text(
AppLocalizations.of(context)!.helpDtoFour2,
),
],
),
),
HelpDTO(
title: title,
widget: Wrap(
runSpacing: 20.sp,
children: [
Text(
AppLocalizations.of(context)!.helpDtoFive1,
),
Text(
AppLocalizations.of(context)!.helpDtoFive2,
),
],
),
),
HelpDTO(
title: title,
widget: Wrap(
runSpacing: 20.sp,
children: [
Text(
AppLocalizations.of(context)!.helpDtoSix1,
),
Text(
AppLocalizations.of(context)!.helpDtoSix2,
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('1. '),
Expanded(child: Text(AppLocalizations.of(context)!.helpDtoSix3)),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('2. '),
Expanded(child: Text(AppLocalizations.of(context)!.helpDtoSix4)),
],
),
Text(
AppLocalizations.of(context)!.helpDtoSix5,
),
Text(
AppLocalizations.of(context)!.helpDtoSix6,
),
],
),
),
];
} |
<html xmlns:th="http://www.thymeleaf.org"
xmlns:layout="http://www.ultraq.net.nz/thymeleaf/layout"
layout:decorate="~{layout/common-base}">
<head>
<link rel="stylesheet" th:href="@{/css/page/letsparty/home.css}" >
</head>
<div layout:fragment="content-top" class="container">
<div th:if="${errorMessage}" class="alert alert-danger">
<span th:text="${errorMessage}"></span>
</div>
<div class="row mb-3">
<div class="col-12">
<div class="card card-line">
<div class="card-header">
<ul id="letsparty-categorys" class="nav nav-tabs card-header-tabs" >
<li class="nav-item">
<a href="#" data-value="0" class="nav-link" >전체</a>
</li>
<li class="nav-item">
<a href="#" data-value="10" class="nav-link" >취미/동호회</a>
</li>
<li class="nav-item">
<a href="#" data-value="20" class="nav-link" >가족</a>
</li>
<li class="nav-item">
<a href="#" data-value="30" class="nav-link" >스터디</a>
</li>
<li class="nav-item">
<a href="#" data-value="40" class="nav-link">학교/동아리</a>
</li>
<li class="nav-item">
<a href="#" data-value="50" class="nav-link">운동</a>
</li>
<li class="nav-item">
<a href="#" data-value="60" class="nav-link">지역</a>
</li>
<li class="nav-item">
<a href="#" data-value="70" class="nav-link">재테크</a>
</li>
<li class="nav-item">
<a href="#" data-value="100" class="nav-link">자유주제</a>
</li>
</ul>
</div>
<div class="card-body">
<div class="d-flex jutstify-content-start mb-3">
<select class="form-select me-3" style="width: 150px;" name="sort">
<option value="latest" th:selected="${#request.getParameter('sort') == 'latest'}">최신순</option>
<option value="oldest" th:selected="${#request.getParameter('sort') == 'oldest'}">오래된 순</option>
<option value="popular" th:selected="${#request.getParameter('sort') == 'popular'}">인기순</option>
</select>
<select class="form-select me-3" style="width: 150px;" name="rows">
<option value="10" th:selected="${#request.getParameter('rows') == '10'}">10개씩</option>
<option value="20" th:selected="${#request.getParameter('rows') == '20'}">20개씩</option>
<option value="50" th:selected="${#request.getParameter('rows') == '50'}">50개씩</option>
</select>
</div>
<table class="table">
<thead>
<tr>
<th>게시글번호</th>
<th>제목</th>
<th>파티명</th>
<th>날짜</th>
<th>조회수/댓글</th>
</tr>
</thead>
<tbody>
<tbody>
<!-- 검색 결과가 있을 경우 -->
<tr th:each="post : ${result.posts}" th:if="${not #lists.isEmpty(result.posts)}">
<td class="postNo-td" th:text="${post.no}"></td>
<td class="ellipsis-post-title"><a th:href="@{/letsparty/read/{postNo}(postNo=${post.no})}" th:text="${post.title}"></a></td>
<td class="ellipsis-party-name"><a th:href="@{/party/{partyNo}(partyNo=${post.party.no})}" th:text="${post.party.name}"></a></td>
<td th:text="${#temporals.format(post.createdAt, 'yyyy년 M월 d일 HH:mm:ss')}"></td>
<td th:text="${post.readCnt + '/' + post.commentCnt}"></td>
</tr>
<!-- 검색 결과가 없을 경우 -->
<tr th:if="${#lists.isEmpty(result.posts)}">
<td colspan="5" class="text-center">검색결과가 존재하지 않습니다.</td>
</tr>
</tbody>
</table>
<div class="card-footer" style="background-color: white;">
<div th:if="${result.pagination.totalRows > 0}">
<div th:with="currentPage=${result.pagination.page},
first=${result.pagination.first},
last=${result.pagination.last},
prePage=${result.pagination.prePage},
nextPage=${result.pagination.nextPage},
beginPage=${result.pagination.beginPage},
endPage=${result.pagination.totalPages},
blockEndPage=${result.pagination.endPage}">
<ul class="pagination justify-content-center">
<li class="page-item" th:classappend="${first} ? 'disabled' : ''">
<a href="#" class="page-link" th:onclick="'changePage(event, 1)'"><i class="fa-solid fa-angles-left"></i></a>
</li>
<li class="page-item" th:classappend="${first} ? 'disabled' : ''">
<a href="#" class="page-link" th:onclick="'changePage(event, ' + ${prePage} + ')'"><i class="fa-solid fa-chevron-left"></i></a>
</li>
<li class="page-item" th:each="num: ${#numbers.sequence(beginPage, blockEndPage)}"
th:classappend="${currentPage == num} ? 'active' : ''">
<a href="#" class="page-link" th:onclick="'changePage(event, ' + ${num} + ')'" th:text="${num}"></a>
</li>
<li class="page-item" th:classappend="${last} ? 'disabled' : ''">
<a href="#" class="page-link" th:onclick="'changePage(event, ' + ${nextPage} + ')'"><i class="fa-solid fa-chevron-right"></i></a>
</li>
<li class="page-item" th:classappend="${last} ? 'disabled' : ''">
<a href="#" class="page-link" th:onclick="'changePage(event, ' + ${endPage} + ')'"><i class="fa-solid fa-angles-right"></i></a>
</li>
</ul>
</div>
</div>
<!-- 검색 구간 -->
<div class="d-flex justify-content-center">
<form id="form-letsparty-search" class="row row-cols-md-auto g-3 align-items-center" method="get" action="/letsparty/search">
<input type="hidden" name="categoryNo" th:value="${param.categoryNo}"/>
<input type="hidden" name="sort" th:value="${param.sort}"/>
<input type="hidden" name="rows" th:value="${param.rows}"/>
<input type="hidden" name="page" th:value="${param.page}"/>
<div class="col-12">
<select class="form-select" name="opt">
<option value="title" th:selected="${#request.getParameter('opt') == 'title'}">제목</option>
<option value="content" th:selected="${#request.getParameter('opt') == 'content'}">내용</option>
<option value="party" th:selected="${#request.getParameter('opt') == 'party'}">파티명</option>
</select>
</div>
<div class="col-12">
<input type="text" class="form-control" name="keyword" th:value="${param.keyword}"/>
</div>
<div class="col-12">
<button id="outline-btn" type="button" class="btn btn-outline-primary btn-sm" onclick="searchLetsParty()">검색</button>
</div>
</form>
</div>
<div class="text-end" th:if=${isLeader}>
<a id="btn"class="btn btn-primary btn-sm" th:href="@{/letsparty/post}">게시글 등록</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div layout:fragment="content-bottom">
<div class="modal">
<h1>modal영역</h1>
</div>
</div>
<script layout:fragment="script" th:src="@{/js/page/letsparty/home.js}"></script>
</html> |
---
title: "Cuentas de emisiones a la atmósfera"
output: html_notebook
---
```{r message=FALSE, warning=FALSE}
if (!"gghighlight" %in% installed.packages()) {install.packages("gghighlight")} # Para resaltar líneas
if (!"viridis" %in% installed.packages()) {install.packages('viridis')} # Paleta colores
if (!"ggthemes" %in% installed.packages()) {install.packages('ggthemes')} # Temas
if (!"mapSpain" %in% installed.packages()) {install.packages('mapSpain')} # Mapas de España
if (!"tmap" %in% installed.packages()) {install.packages('tmap') } # Dibujar mapas
if (!"cartography" %in% installed.packages()) {install.packages('cartography')} # Dibujar mapas
if (!"cartography" %in% installed.packages()) {install.packages('cartography')} # Dibujar mapas
```
```{r message=FALSE, warning=FALSE}
# Librerías que podéis necesitar
library(tidyverse)
library(lubridate) # Manejo de fechas
library(scales) # Fomatar fechas
library(gghighlight) # Para resaltar líneas
library(viridis) # Paleta colores
library(ggthemes) # Temas
library(sf) # Manejo objetos sf
library(mapSpain) # Mapas de España
library(tmap) # Dibujar mapas
library(cartography) # Dibujar mapas
library(treemap)
```
```{r message=FALSE, warning=FALSE}
## Librerías necesarias
if (!"tidyverse" %in% installed.packages()) {install.packages("tidyverse")}
library(tidyverse) #suit completa
if (!"scales" %in% installed.packages()) {install.packages("scales")}
library(scales) #para formato de las escalas
if (!"ggrepel" %in% installed.packages()) {install.packages("ggrepel")}
library(ggrepel) # para etiquetas
if (!"gghighlight" %in% installed.packages()) {install.packages("gghighlight")}
library(gghighlight) #para resaltar líneas
if (!"gganimate" %in% installed.packages()) {install.packages("gganimate")}
library(gganimate)
if (!"RColorBrewer" %in% installed.packages()) {install.packages("RColorBrewer")}
library(RColorBrewer) #para color
if (!"viridis" %in% installed.packages()) {install.packages("viridis")}
library(viridis)
if (!"gifski" %in% installed.packages()) {install.packages("gifski")}
library(gifski)
```
```{r}
theme_a <- function(base_size = 9,
base_family = "sans"
)
{
tema <-
theme_bw(base_size=base_size) +
theme(legend.position="right") +
theme(legend.text = element_text(size=base_size+2,family = base_family)) +
theme(plot.title=element_text(size=base_size+4,
vjust=1.25,
family=base_family,
hjust = 0.5
)) +
theme(plot.subtitle=element_text(size=base_size+2, family = base_family, hjust = 0.5)) +
theme(text = element_text(size=base_size+2,family = base_family)) +
theme(axis.text.x=element_text(size=base_size+2,family = base_family)) +
theme(axis.text.y=element_text(size=base_size+2, family = base_family)) +
theme(axis.title.x=element_text(size=base_size+2, vjust=0, family = base_family)) +
theme(axis.title.y=element_text(size=base_size+2, vjust=1.25, family = base_family)) +
theme(plot.caption=element_text(size=base_size-1, family = base_family)) +
theme(strip.text = element_text(size=base_size+1, family = base_family)) +
theme(strip.text.x = element_text(size=base_size+1, family = base_family)) +
theme(strip.text.y = element_text(size=base_size+1, family = base_family))
return (tema)
}
```
```{r}
df <- read_csv2("https://www.ine.es/jaxi/files/tpx/es/csv_bdsc/29252.csv?nocab=",
col_types = cols('Sustancias contaminantes' = col_character(),
'Ramas de actividad (CNAE 2009)' = col_character(),
'periodo' = col_character(),
'Total' = col_number()),
locale = locale(decimal_mark = ",", grouping_mark = ".")
)
```
```{r}
df$periodo = gsub("\\s*\\([^\\)]+\\)","",as.character(df$periodo))
df$periodo = as.Date(as.character(df$periodo), format = "%Y")
df = df[!(is.na(df$Total) | df$Total==""), ]
```
```{r}
df = df %>%
rename(
sustancia = 'Sustancias contaminantes',
actividad = 'Ramas de actividad (CNAE 2009)' ,
total = Total,
año = periodo
)
```
Dataframe que describe la cantidad de sustancias contaminantes que se producen en distintas actividades a lo largo de los años.
```{r}
head(df)
```
Dado que varias sustancias están en distintas unidades creo otro dataframe donde todas las sustancias estarán en la misma unidad.
```{r}
df_units <- df
df_units[df_units$sustancia == 'CO2 - Dióxido de carbono (miles de toneladas)',]$total = df_units[df_units$sustancia == 'CO2 - Dióxido de carbono (miles de toneladas)',]$total * 1000
df_units[df_units$sustancia == 'PFC - Perfluorocarbonos o compuestos polifluorcarbonados (miles de toneladas de CO2 equivalente)',]$total = df_units[df_units$sustancia == 'PFC - Perfluorocarbonos o compuestos polifluorcarbonados (miles de toneladas de CO2 equivalente)',]$total * 1000
df_units[df_units$sustancia == 'CO2 - Dióxido de carbono (miles de toneladas)',]$total = df[df$sustancia == 'CO2 - Dióxido de carbono (miles de toneladas)',]$total * 1000
df_units[df_units$sustancia == 'HFC - Hidrofluorocarbonos o compuestos hidrogenofluorcarbonados (miles de toneladas de CO2 equivalente)',]$total = df_units[df_units$sustancia == 'HFC - Hidrofluorocarbonos o compuestos hidrogenofluorcarbonados (miles de toneladas de CO2 equivalente)',]$total * 1000
df_units[df_units$sustancia == 'SF6 - Hexafluoruro de azufre (miles de toneladas de CO2 equivalente)',]$total = df_units[df_units$sustancia == 'SF6 - Hexafluoruro de azufre (miles de toneladas de CO2 equivalente)',]$total * 1000
df_units$sustancia = gsub("miles de ","",as.character(df_units$sustancia))
```
# ¿Qué sustancias contaminantes hay y cómo se distribuyen?
Dado que hay actividades que engloban otras elimino estas últimas para hacer un análisis correcto.
```{r fig.width=9, message=FALSE, warning=FALSE, paged.print=FALSE}
df2 <- df[!df$actividad == 'TOTAL SUSTANCIA CONTAMINANTE' & !df$actividad == 'Total ramas de actividad',]
df2$sustancia <- sub("\\-.*", "", df2$sustancia)
ggplot(df2, aes(x = total)) +
geom_histogram(bins=30) +
facet_wrap(~ sustancia, scales = "free") +
labs(title = "Histogramas de las sustancias contaminantes",
caption = "Fuente: INE") +
theme_a() +
theme(axis.title.y=element_blank(),
axis.title.x=element_blank(),
panel.grid.major.y = element_blank(),
panel.grid.major.x = element_line(colour = "gray"))
```
El Dióxido de Carbono (CO2) suele contaminar menos de 2 miles de toneladas y es la sustancia que más contamina.
Le siguen los Hidrofluorocarbonos (HFC) que suelen contaminar unas 100 miles de toneladas, los Óxidos de nitrógeno (NOx) suelen contaminar menos de 4000 toneladas, el Monóxido de carbono (CO) suele contaminar menos de 3000 toneladas y el Metano (CH4) suele contaminar menos de 1000 toneladas.
Entre las sustancias que contaminan menos de 600 toneladas son el Óxidos de azufre (SOx) y las partículas PM2.5 y PM10.
Entre las sustancias menos contaminantes se encuentra elÓxido nitroso (N20), el Hexafluoruro de azufre (SF6), los Perfluorocarbonos (PFC) o el Amoniaco (NH3) con menos de 50 toneladas.
# ¿Cuáles son las actividades que más contaminan de media?
```{r fig.width=9}
df3 <- df_units %>% group_by(actividad) %>% summarise(media = median(total), .groups = 'drop')
df3 <- df3[!df3$actividad == 'TOTAL SUSTANCIA CONTAMINANTE' & !df3$actividad == 'Total ramas de actividad',]
df3$actividad <- gsub(".*: ","", df3$actividad)
treemap(df3,
index= "actividad",
vSize="media",
type="index",
fontsize.labels = c(10),
fontface.labels = c(1),
inflate.labels=F,
title = "Contaminación por actividad"
)
```
La Agricultura, ganadería y caza es la actividad que más contamina, casi un 40% del total de emisiones.
Le siguen los hogares que contaminan casi el 30% del total de las emisiones.
En tercer lugar están las actividades de la metalurgia, fabricación de productos de hierro y acero.
En cuarto lugar el suministro d energía eléctrica, gas, vapor y aire acondicionado.
# Evolución del total de las sustancias contaminantes en miles cada año
```{r fig.width=9, fig.height=6}
ggplot(data = df_units[df_units$actividad == 'TOTAL SUSTANCIA CONTAMINANTE',], aes(x = año, y = reorder(sub("\\-.*", "", sustancia),(total)), fill=total/1000)) +
geom_tile()+
geom_text(aes(label = round(total/1000,1)), color="white",
size =3, hjust=.5, vjust=.5 ) +
scale_fill_gradient(low = "steelblue", high = "red4")+
labs(x = "Año", fill = "Cantidad de sustancias \ncontaminantes en miles",
title = "Evolución de las sustancias contaminantes en miles",
caption = "Fuente: INE") +
scale_x_date(date_breaks = "1 year",
date_labels = "%y",
expand = c(0,0),
sec.axis = dup_axis()) +
theme_a() +
theme (axis.title.y=element_blank(),
panel.border = element_blank(),
panel.background = element_blank())
```
CO2 es, con diferencia, la sustancia más contaminante. Sin embargo, tiene una tendencia decreciente a lo largo de los años.
HCF, pese a ser la segunda sustancia más contaminante, ha conseguido decrecer a una tercera parte.
Mientras CO, COVNM, NH3, SF6, N20 y CH4 se han mantenido a lo largo de los años.
No se observa ningún incremento relevante en ninguna sustancia contaminante a lo largo de los años.
# ¿Cómo se comporta CO2 en cada actividad?
```{r fig.width=9, fig.height=6, message=FALSE, warning=FALSE, paged.print=FALSE}
ggplot() +
geom_step(data = df_units[!df_units$actividad == 'TOTAL SUSTANCIA CONTAMINANTE' & !df_units$actividad == 'Total ramas de actividad' & df_units$sustancia == 'CO2 - Dióxido de carbono (toneladas)',] , aes(x = año, y = total/1000, color = actividad)) +
labs(x = "Años", y = "Toneladas de C02",
title = "Evolución del CO2 en miles de Toneladas por industria",
caption = "Fuente: INE") +
guides(color = FALSE) +
scale_color_viridis (option = 'plasma', discrete =TRUE,
direction =-1,
begin=0.2, end=0.8) + # Paleta contínua, uso discreto
scale_x_date (limits=c (min(df$año, na.rm = TRUE)-365, max(df$año, na.rm = TRUE)+365),
breaks = "5 years",
labels = date_format("%Y")) + # Alargamos el eje x
facet_wrap(~ sub("\\:.*", "", actividad)) +
theme_a()
```
Se observa como los hogares (h) y el suministro de energía eléctrica, gas, vapor y aire acondicionado (35) son las actividades que más toneladas de CO2 emiten.
Les siguen el transporte terrestre (49) y por tubería y la fabricación de otros productos minerales no metálicos (23).
# ¿Cómo se comportan el resto de sustancias contaminantes en el suministro de energía eléctrica, gas, vapor y aire acondicionado?
```{r message=FALSE, warning=FALSE, paged.print=FALSE}
sumi = df[df$actividad == "35: Suministro de energía eléctrica, gas, vapor y aire acondicionado" & !df$año == as.Date("2020-03-18") & !df$sustancia == 'CO2 - Dióxido de carbono (miles de toneladas)' ,]
ggplot() +
geom_line(data = sumi, aes(x = año, y = total, color = sustancia)) +
labs(x = "Año", y = "Cantidad de sustancias contaminantes en miles de Toneladas",
title = "Evolución de las sustancias contaminantes en miles de Toneladas \nen el suministro de energía eléctrica, gas, vapor y aire acondicionado",
caption = "Fuente: INE") +
geom_text_repel(data = sumi %>%
top_n(1, año),
aes(x = año, y = total, color = sustancia,
label = paste0(sub("\\-.*", "", sustancia))),
size =3,
nudge_x = 700, # Ajuste eje x
nudge_y = 500,
direction="y",
max.overlaps=20,
segment.size = 0.1,
segment.linetype = 1
) + # Ajuste eje y
guides(color = FALSE) +
scale_color_brewer (palette ='Set1') + # Paleta divergente
scale_x_date (limits=c (min(df$año, na.rm = TRUE)-365, max(df$año, na.rm = TRUE)+365),
labels = date_format("%Y")) + # Alargamos el eje x
theme_a()
```
Los Óxidos de nitrógeno (NOx) son los más contaminantes aun habiendo descendido a lo largo de los años.
Son seguidos, ya con mucha diferencia, por el Monóxido de carbono (CO) que es la sustancia con mayor crecimiento a lo largo de los años.
Los Perfluorocarbonos (PFC) y los Hidrofluorocarbonos (HFC) son las sustancias que menos contaminan en el suministro de energía eléctrica, gas, vapor y aire acondicionado.
# ¿Cómo ha variado la cantidad de sustancias contaminantes en 2008 respecto a 2020 en el suministro de energía eléctrica, gas, vapor y aire acondicionado?
```{r}
sumi_var <- df[df$actividad == "35: Suministro de energía eléctrica, gas, vapor y aire acondicionado" & !df_units$año == as.Date("2020-03-18"),] %>%
pivot_wider (names_from = año,values_from = total)
sumi_var$sustancia <- sub("\\-.*", "", sumi_var$sustancia)
sumi_var$sustancia[sumi_var$sustancia == "CO2 "] <- "CO2 en miles"
sumi_var$sustancia[sumi_var$sustancia == "PFC "] <- "PFC en miles"
sumi_var$sustancia[sumi_var$sustancia == "HFC "] <- "HFC en miles"
below <- c(243400.0, 16519.6)
sumi_var <- sumi_var %>% dplyr::mutate(label_above = ifelse(`2008-03-18` %in% below, "", `2008-03-18`),
label_below = ifelse(`2008-03-18` %in% below, `2008-03-18`, ""))
sumi_var$label_below <- as.numeric(as.character(sumi_var$label_below))
sumi_var$label_above <- as.numeric(as.character(sumi_var$label_above))
below2 <- c(8584.6, 224900.0)
sumi_var <- sumi_var %>% dplyr::mutate(label_above2 = ifelse(`2019-03-18` %in% below2, "", `2019-03-18`),
label_below2 = ifelse(`2019-03-18` %in% below2, `2019-03-18`, ""))
sumi_var$label_below2 <- as.numeric(as.character(sumi_var$label_below2))
sumi_var$label_above2 <- as.numeric(as.character(sumi_var$label_above2))
```
```{r fig.width=9, fig.height=8, message=FALSE, warning=FALSE, paged.print=FALSE}
mi_paleta <- brewer.pal(8,"Blues") # Paleta secuencial
cols <- c("2008"= mi_paleta [8],"2019"= mi_paleta [5])
ggplot(sumi_var, aes(y=reorder(sustancia,`2019-03-18`))) +
geom_segment(aes(x=`2008-03-18`,
xend=`2019-03-18`,
y=reorder(sustancia,`2019-03-18`),
yend=sustancia),
color="#b2b2b2", size=2)+
geom_point(aes(x=`2008-03-18`,
color = '2008'),
size=5)+
geom_point(aes(x=`2019-03-18`,
color = "2019"),
size=5)+
geom_text(aes(x = `label_above`,
label = paste0(round(`2008-03-18`/1000,1))),
size = 4,
hjust= 1.5,
vjust=0, show.legend = FALSE ) +
geom_text(aes(x = `label_below`,
label = paste0(round(`2008-03-18`/1000,1))),
size = 4,
hjust= -0.5,
vjust=0, show.legend = FALSE ) +
geom_text(aes(x = `label_above2`,
label = paste0(round(`2019-03-18`/1000,1))),
size = 4,
hjust= -0.75,
vjust=0, show.legend = FALSE ) +
geom_text(aes(x = `label_below2`,
label = paste0(round(`2019-03-18`/1000,1))),
size = 4,
hjust= 1.5,
vjust=0, show.legend = FALSE ) +
labs(title = "Variación de las sustancias contaminantes en el \nsuministro de energía eléctrica, gas, vapor y aire acondicionado \nen 2008 y 2019",
caption = "Fuente: INE")+
scale_x_continuous (sec.axis = dup_axis()) +
scale_color_manual(name = "", values = cols )+
theme_a() +
theme(legend.position="top",
axis.title.y=element_blank(),
axis.title.x=element_blank(),
panel.grid.major.y = element_blank(),
panel.grid.major.x = element_line(colour = "gray"))
```
Pese a seguir habiendo una diferencia muy notable entre las cantidades de CO2 y el resto de sustancias contaminantes es llamativo el descenso de más de la mitad de CO2 en 2019, así como el descenso de Óxidos de nitrógeno (NOx) y Óxidos de azufre (SOx).
# ¿Será la diferencia de C02, entre 2008 y 2019, tan grande en el resto de actividades?
```{r}
co2_act<-df[df$año %in% as.Date(c('2019-03-18','2008-03-18')) & !df$actividad == 'TOTAL SUSTANCIA CONTAMINANTE' & !df$actividad == 'Total ramas de actividad' & df$sustancia == 'CO2 - Dióxido de carbono (miles de toneladas)',]
co2_act$total <- co2_act$total
co2_act$actividad <- sub("\\:.*", "", co2_act$actividad)
co2_act <- co2_act %>%
pivot_wider (names_from = año,values_from = total)
co2_act <-co2_act %>%
mutate (diff = `2019-03-18` - `2008-03-18`) %>%
mutate (ajuste_color = ifelse(diff >0, "Incremento", "Descenso"),
ajuste_text = ifelse(diff >0, -0.2,1.2),
ajuste_num = ifelse(diff > 0, 1.2,-0.2))
co2_act <- select(co2_act, 'actividad', '2008-03-18', '2019-03-18') %>%
mutate (diff = `2019-03-18` - `2008-03-18`) %>%
mutate (ajuste_color = ifelse(diff >0, "Incremento", "Descenso"),
text_valor = ifelse(diff >0, paste(round(`2019-03-18`,1),"M :",actividad),
paste(actividad,": ",round(`2019-03-18`,1),"M")),
ajuste_text = ifelse(diff >0, -0.2,1.2),
ajuste_num = ifelse(diff > 0, 1.2,-0.2))
```
```{r fig.width=9, fig.height=14, message=FALSE, warning=FALSE, paged.print=FALSE}
ggplot(data = co2_act) +
geom_segment(aes(x = `2008-03-18`,
xend = `2019-03-18`,
y = reorder(actividad,`2019-03-18`),
yend = actividad,
color = ajuste_color),
size=1,
arrow = arrow(length = unit(0.20,"cm"),
ends = "last",
type = "closed")) +
geom_text(aes(x=`2008-03-18`,
y = reorder(actividad,`2019-03-18`),
color = ajuste_color,
label = paste(round(diff,1),"M"),
hjust = 0),
size = 4, vjust = -1, show.legend = FALSE) +
geom_text(aes(x=`2019-03-18`,
y= reorder(actividad,`2019-03-18`),
color = ajuste_color,
label = text_valor,
hjust = ajuste_text),
size=4, vjust = 0, show.legend = FALSE) +
labs(title = "Variación de C02 en miles por actividad 2008-2019",
caption = "Fuente: INE")+
scale_colour_manual('', values = c('Descenso'='steelblue4', 'Incremento'='red4')) +
scale_x_continuous (expand = c(0.09,0),
sec.axis = dup_axis()) +
scale_y_discrete (expand = c(0.09,0) ) +
theme_a() +
theme (legend.position = "top",
axis.title.y = element_blank(),
axis.text.y = element_blank(),
axis.title.x = element_blank(),
axis.ticks.y = element_blank(),
panel.grid.major.y = element_blank(),
panel.grid.major.x = element_line(colour = "gray"),
plot.margin = margin(1, 1, 1,1, "cm"))
```
El suministro de energía eléctrica, gas, vapor y aire acondicionado (35) es, con diferencia, la actividad que más ha variado su contaminación de C02 comparando el año 2008 y 2019, reduciéndose en casi 5 Millones. Le siguen la fabricación de otros productos minerales no metálicos (23) y los hogares (h) con descensos en CO2 de más de 800.000.
El transporte terrestre y por tubería (49) y las industrias de papel (17), alimentación, fabricación de bebidas, tabaco (10-12), extractivas (05-09), metalurgia, transporte aéreo (51), administración pública (84) y agricultura, ganadería y caza (01) tienen una diferencia de contaminación de C02 de más de 100000 en términos absolutos.
# En 2019, ¿Cuánto contaminaron el resto de sustancias en las actividades que tuvieron más incremento de CO2 respecto de 2008?
Estas actividades son las Industrias de la alimentación, fabricación de bebidas e industria del tabaco (10-12) y el Transporte aéreo (51).
```{r}
co2_sustancias <- df_units[df_units$año %in% as.Date(c('2019-03-18','2008-03-18')) & df_units$actividad %in% c("10-12: Industrias de la alimentación, fabricación de bebidas e industria del tabaco","51: Transporte aéreo" ),]
co2_sustancias <- co2_sustancias %>% pivot_wider (names_from = año,values_from = total)
co2_sustancias[co2_sustancias == 0] <- 0.00001
co2_sustancias <- co2_sustancias %>% mutate (per = round((abs(`2019-03-18` - `2008-03-18`)/ `2008-03-18` * 100 ),1))
co2_sustancias <-co2_sustancias %>% mutate(total = ifelse(actividad == "10-12: Industrias de la alimentación, fabricación de bebidas e industria del tabaco", per*(-1), per))
co2_sustancias[co2_sustancias == 999999900.0] <- 100
co2_sustancias[co2_sustancias == -700.0] <- -100
co2_sustancias[co2_sustancias == -109.6] <- -100
co2_sustancias$values <- c("+100","14","2.5","5.8","9","12","+100","100","73.1","22.7","0","0","30.3","12","34.2","12","51.3","19.2","11.6","12.7","29.2","61.2","38.2","25.9","15.7","12.3")
```
```{r fig.width=9, fig.height=5}
color_act <- c(`10-12: Industrias de la alimentación, fabricación de bebidas e industria del tabaco` = mi_paleta [8],`51: Transporte aéreo` = mi_paleta [5])
ggplot(co2_sustancias, aes(x = reorder(sub("\\-.*", "", sustancia),desc(values)) , y = total, fill=actividad )) +
geom_col(position = "stack", width = 0.6, alpha = 0.8, show.legend = FALSE) +
geom_text(aes(label=paste0(values)),
position=position_stack(vjust = 0.5),
size=3,
color="white")+
labs(title = paste("Porcentaje de variación de sustancias en 2008 y 2019 \nen las actividades de alimentación y transporte aéreo respectivamente"),
caption = "Fuente: INE")+
scale_y_continuous (breaks=c(-100, -50, -25,0,25,50,100),
labels=c("100%","50%","25%","0","25%","50%","100%"))+
scale_fill_manual(values = color_act)+
coord_flip() +
theme_a() +
theme (axis.title.y=element_blank(),
axis.title.x=element_blank())
```
En las actividades de alimentación (celdas en azul oscuro), además del CO2, PFC supera con los mayores porcentajes de incremento de 2019 respecto a 2008.
Además, PFC también supera con el mayor porcentaje de incremento de 2019 respecto a 2008 en el transporte aéreo (celdas en azul claro).
Al haber cierta similitud en la variación de contaminación en estas actividades en CO2 y PFC,
# ¿Será similar la evolución de estas dos sustancias en las respectivas actividades?
```{r}
ggplot( ) +
geom_line( data= df_units[df_units$actividad == "10-12: Industrias de la alimentación, fabricación de bebidas e industria del tabaco" & df_units$sustancia == 'CO2 - Dióxido de carbono (toneladas)',],
aes(x = año, y=total , color = "CO2")) +
geom_line( data = df_units[df_units$actividad == "10-12: Industrias de la alimentación, fabricación de bebidas e industria del tabaco" & df_units$sustancia == "PFC - Perfluorocarbonos o compuestos polifluorcarbonados (toneladas de CO2 equivalente)" ,],
aes(x = año, y=total*2500, color = "PFC")) +
labs(title = "Evolución del C02 VS PFC en las industrias de la alimentación y tabaco",
x = "Año",
caption = "Fuente: INE") +
scale_y_continuous(name = "C02",
sec.axis = sec_axis(~./2500, name="PFC")) + # Divide by 25
scale_colour_manual('', values = c('PFC'='steelblue4', 'CO2'='red4')) +
theme_a() +
theme( legend.position = "top",
axis.title.y = element_text(color = "red4", size = 14),
axis.title.y.right = element_text(color = "steelblue4", size = 14),
axis.text.y = element_text(color = "red4"),
axis.text.y.right = element_text(color = "steelblue4")
)
```
Ambas sustancias, los Perfluorocarbonos (PFC) y el Dióxido de carbono (C02), tienen una tendencia al alza a lo largo de los años. Sin embargo, sus caídas no suelen coincidir.
```{r}
ggplot( ) +
geom_line( data= df_units[df_units$actividad == "51: Transporte aéreo" & df_units$sustancia == 'CO2 - Dióxido de carbono (toneladas)',],
aes(x = año, y=total , color = "CO2")) +
geom_line( data = df_units[df_units$actividad == "51: Transporte aéreo" & df_units$sustancia == "PFC - Perfluorocarbonos o compuestos polifluorcarbonados (toneladas de CO2 equivalente)" ,],
aes(x = año, y=total*25000, color = "PFC")) +
labs(title = "Evolución del C02 VS PFC en el transporte aéreo",
x = "Año",
caption = "Fuente: INE")+
scale_y_continuous(name = "C02",
sec.axis = sec_axis(~./25000, name="PFC")) + # Divide by 25
scale_colour_manual('', values = c('PFC'='steelblue4', 'CO2'='red4')) +
theme_a() +
theme( legend.position = "top",
axis.title.y = element_text(color = "red4", size = 14),
axis.title.y.right = element_text(color = "steelblue4", size = 14),
axis.text.y = element_text(color = "red4"),
axis.text.y.right = element_text(color = "steelblue4")
)
```
Ambas sustancias, los Perfluorocarbonos (PFC) y el Dióxido de carbono (C02), tienen un picoo en 2019. Sin embargo,las tendencias no coinciden ya que los PFC son más establles..
# Comparación de la contaminación de sustancias en los últimos 5 años VS años anteriores
```{r fig.width=9, fig.height=6,message=FALSE, warning=FALSE, paged.print=FALSE}
color_act <- c(new= mi_paleta [8],old = mi_paleta [5])
new_old <- df[!df$actividad == 'TOTAL SUSTANCIA CONTAMINANTE' & !df$actividad == 'Total ramas de actividad',]
new_old$periodo <- ifelse(new_old$año < as.Date("2015-03-17"), "old", "new")
new_old <- new_old %>% group_by(sustancia, periodo) %>% summarise(media = mean(total)/ 1000, .groups = 'drop')
new_old$sustancia <- sub("\\-.*", "", new_old$sustancia)
new_old$sustancia[new_old$sustancia == "CO2 "] <- "CO2 en miles"
new_old$sustancia[new_old$sustancia == "PFC "] <- "PFC en miles"
new_old$sustancia[new_old$sustancia == "HFC "] <- "HFC en miles"
ggplot(data = new_old, aes(x=reorder(sustancia,media), y=media,fill= periodo)) +
geom_col ( position = position_dodge()) +
geom_text(aes(label = paste0(round(media,1),"mil"),
y= media),
position = position_dodge(width = 1), #hay que indicarle cuantas subbarras hay
size = 3,
hjust = -0.4) +
labs(title = "Comparación de la contaminación de sustancias en los últimos 5 años VS años anteriores",
caption = "Fuente: INE")+
scale_fill_manual('',values = color_act)+
coord_flip() + # Giramos la gráfica pero el eje X e Y vo cambia
theme_a() +
theme (legend.position="top",
axis.title.y=element_blank(),
axis.title.x=element_blank(),
panel.grid.major=element_blank(),
panel.grid.minor=element_blank())
```
Analizando los periodos de los últimos 5 años y los años anteriores se observa como no hay gran diferencia de emisiones en ninguna sustancia. La sustancia que tiene mayor diferencia de cantidades son los Óxidos de nitrógeno (NOx) con 2900 emisiones más en los años anteriores que en los últimos cinco años, seguido de los Óxidos de azufre (SOx) con 3100 emisiones más en los años anteriores.
Los años anteriores tienen más cantidad de sustancias contaminantes que en los últimos cinco años excepto para el Amoniaco (NH3) que supera las emisiones en los últimos cinco años. |
import { Show, show, WEEK_DAY } from "../model/Show";
import { BaseDatabase } from "./BaseDatabase";
export class ShowDatabase extends BaseDatabase{
private static TABLE_NAME = "Shows";
public async createShow(show:show):Promise<void> {
try {
await this.getConnection()
.insert({
id:show.id,
week_day:show.weekDay,
start_time:show.startTime,
end_time:show.endTime,
band_id:show.bandId
}).into(ShowDatabase.TABLE_NAME)
} catch (error:any) {
throw new Error(error.sqlMessage || error.message);
}
}
public async getShowByHour(show:show):Promise<show> {
try {
const foundShow=await this.getConnection().
select('*')
.from(ShowDatabase.TABLE_NAME)
.where({"week_day":show.weekDay,"start_time":show.startTime})
return foundShow[0]
} catch (error:any) {
throw new Error(error.sqlMessage || error.message);
}
}
async getShowByDate(date:WEEK_DAY):Promise<any>{
try {
const show=await this.getConnection()
.join("Bandas", "Shows.band_id", "Bandas.id")
.select("Bandas.name", "Bandas.music_genre")
.from(ShowDatabase.TABLE_NAME)
.where("Shows.week_day", date)
.orderBy("Shows.start_time", "asc");
return show
} catch (error:any) {
throw new Error(error.sqlMessage || error.message);
}
}
async getShowById(id:string):Promise<Show>{
try {
const show=await this.getConnection()
.select("*")
.from(ShowDatabase.TABLE_NAME)
.where({id})
return Show.toShowModel(show[0])
} catch (error:any) {
throw new Error(error.sqlMessage || error.message);
}
}
} |
import { apiSlice } from "../api/apiSlice";
import {
IOrderAmounts,
ISalesReport,
IMostSellingCategory,
IDashboardRecentOrders,
IGetAllOrdersRes,
IUpdateStatusOrderRes,
Order,
} from "@/types/order-amount-type";
export const authApi = apiSlice.injectEndpoints({
overrideExisting: true,
endpoints: (builder) => ({
// getUserOrders
getDashboardAmount: builder.query<IOrderAmounts, void>({
query: () => `/api/user-order/dashboard-amount`,
providesTags: ["DashboardAmount"],
keepUnusedDataFor: 600,
}),
// get sales report
getSalesReport: builder.query<ISalesReport, void>({
query: () => `/api/user-order/sales-report`,
providesTags: ["DashboardSalesReport"],
keepUnusedDataFor: 600,
}),
// get selling category
getMostSellingCategory: builder.query<IMostSellingCategory, void>({
query: () => `/api/user-order/most-selling-category`,
providesTags: ["DashboardMostSellingCategory"],
keepUnusedDataFor: 600,
}),
// get recent orders
getRecentOrders: builder.query<IDashboardRecentOrders, void>({
query: () => `/api/user-order/dashboard-recent-order`,
providesTags: ["DashboardRecentOrders"],
keepUnusedDataFor: 600,
}),
// get recent orders
getAllOrders: builder.query<IGetAllOrdersRes, void>({
query: () => `/api/order/orders`,
providesTags: ["AllOrders"],
keepUnusedDataFor: 600,
}),
// get recent orders
getSingleOrder: builder.query<Order, string>({
query: (id) => `/api/order/${id}`,
keepUnusedDataFor: 600,
}),
// get recent orders
updateStatus: builder.mutation<IUpdateStatusOrderRes, { id: string, status: { status: string } }>({
query({ id, status }) {
return {
url: `/api/order/update-status/${id}`,
method: "PATCH",
body: status,
};
},
invalidatesTags: ["AllOrders","DashboardRecentOrders"],
}),
}),
});
export const {
useGetDashboardAmountQuery,
useGetSalesReportQuery,
useGetMostSellingCategoryQuery,
useGetRecentOrdersQuery,
useGetAllOrdersQuery,
useUpdateStatusMutation,
useGetSingleOrderQuery,
} = authApi; |
from random import randint
from pygame.sprite import Sprite
import pygame
class Dice(Sprite):
""" Dice class"""
def __init__(self, main_game, id):
"""Initiate our dice"""
self.mysprites = {
0: 'sprites/dice0.bmp',
1: 'sprites/dice1.bmp',
2: 'sprites/dice2.bmp',
3: 'sprites/dice3.bmp',
4: 'sprites/dice4.bmp',
5: 'sprites/dice5.bmp',
6: 'sprites/dice6.bmp',
'mixing': 'sprites/rolling-dice-cup.bmp',
'throwing': 'sprites/cubes.bmp',
}
super().__init__()
self.sides = 6
self.id = id
self.is_held = False
self.screen = main_game.screen
self.screen_rect = main_game.screen.get_rect()
# Load the dice image and get its rect.
self.image = pygame.image.load(self.mysprites[0])
self.rect = self.image.get_rect(center=self.screen.get_rect().center)
self.rect.midleft = self.screen_rect.midleft
self.x = self.rect.x
self.y = self.rect.y
self.width = self.rect.width
self.height = self.rect.height
def roll(self):
""" Roll the dice and return its value """
if not self.is_held:
holder = randint(1, self.sides)
self.image = pygame.image.load(self.mysprites[holder])
def hold(self):
""" Hold the dice so it cannot be rolled"""
self.is_held = not self.is_held
def draw_die(self):
""" Draw the dice at its current location """
self.x = self.id * 300 + 50
self.y = 125
amt = 20
# Highlight dice that are held (FEATURE)
if self.is_held:
pygame.draw.rect(self.screen, (151, 74, 181), (self.x - amt/2, self.y - amt/2, self.width + amt, self.height + amt), 10)
self.screen.blit(self.image, (self.x, self.y)) |
import Box from "@mui/material/Box";
import Grid from "@mui/material/Grid";
import bg from "./bg/forgetPassword.svg";
import bgimg from "./bg/backimg.jpg";
import Button from "@mui/material/Button";
import TextField from "@mui/material/TextField";
import Typography from "@mui/material/Typography";
import Container from "@mui/material/Container";
import Avatar from "@mui/material/Avatar";
import LockOutlinedIcon from "@mui/icons-material/LockOutlined";
import { ThemeProvider, createTheme } from "@mui/material/styles";
import Checkbox from "@mui/material/Checkbox";
import FormControlLabel from "@mui/material/FormControlLabel";
import { useState, forwardRef, useEffect } from "react";
import Snackbar from "@mui/material/Snackbar";
import Stack from "@mui/material/Stack";
// import MuiAlert from "@mui/material/Alert";
import Slide from "@mui/material/Slide";
import { useNavigate, useParams } from "react-router-dom";
import { useForm } from "react-hook-form";
import {toast} from 'react-toastify';
import 'react-toastify/dist/ReactToastify.css';
// const Alert = forwardRef(function Alert(props, ref) {
// return <MuiAlert elevation={6} ref={ref} variant="filled" {...props} />;
// });
const darkTheme = createTheme({
palette: {
mode: "dark",
},
});
const boxstyle = {
position: "absolute",
top: "50%",
left: "50%",
transform: "translate(-50%, -50%)",
width: "75%",
height: "70%",
bgcolor: "background.paper",
boxShadow: 24,
};
const center = {
position: "relative",
top: "50%",
left: "37%",
};
export default function ForgetPassword() {
const {email, otp} = useParams();
const [open, setOpen] = useState(false);
const [remember, setRemember] = useState(false);
const vertical = "top";
const horizontal = "right";
const navigate = useNavigate();
const {
register,
handleSubmit,
// watch,
formState: { errors },
} = useForm();
const onSubmit = async (e) => {
console.log(e);
const {password, cPassword} = e
const res = await fetch(`/api/resetPassword/${email}/${otp}`, {
method : "post",
headers: {
"content-type": "application/json",
},
body: JSON.stringify({
password, cPassword
})
});
const data = await res.json();
console.log(data);
if(data.message)
{
toast.success(data.message);
navigate("/login");
}
else
{
toast.error(data.error);
navigate(`/verifyOTP/${email}`);
}
};
const handleClose = (event, reason) => {
if (reason === "clickaway") {
return;
}
setOpen(false);
};
function TransitionLeft(props) {
return <Slide {...props} direction="left" />;
}
return (
<>
<div
style={{
backgroundImage: `url(${bgimg})`,
backgroundSize: "cover",
height: "100vh",
color: "#f5f5f5",
}}
>
<Box sx={boxstyle}>
<Grid container>
<Grid item xs={12} sm={12} lg={6}>
<Box
style={{
backgroundImage: `url(${bg})`,
backgroundSize: "cover",
marginTop: "40px",
marginLeft: "15px",
marginRight: "15px",
height: "63vh",
color: "#f5f5f5",
}}
></Box>
</Grid>
<Grid item xs={12} sm={12} lg={6}>
<Box
style={{
backgroundSize: "cover",
height: "70vh",
minHeight: "500px",
backgroundColor: "#3b33d5",
}}
>
<ThemeProvider theme={darkTheme}>
<Container>
<Box height={35} />
<Box sx={center}>
<Avatar
sx={{ ml: "35px", mb: "4px", bgcolor: "#ffffff" }}
>
<LockOutlinedIcon />
</Avatar>
<Typography sx={{ml:"-35px", mt:"5px", mb:'10px'}} component="h1" variant="h5">
New Password
</Typography>
</Box>
<Box sx={{ mt: 2 }} />
<form onSubmit={handleSubmit(onSubmit)}>
<Grid container spacing={1}>
<Grid item xs={12} sx={{ ml: "3em", mr: "3em" }}>
<TextField
fullWidth
{...register("password", { required: true })}
name="password"
label="Password"
type="password"
id="password"
autoComplete="new-password"
/>
{errors.password && (
<span
style={{ color: "#f7d643", fontSize: "12px" }}
>
This field is required
</span>
)}
</Grid>
<Grid item xs={12} sx={{ ml: "3em", mr: "3em" }}>
<TextField
fullWidth
{...register("cPassword", { required: true })}
name="cPassword"
label="Confirm Password"
type="password"
id="password"
autoComplete="new-password"
/>
{errors.cPassword && (
<span
style={{ color: "#f7d643", fontSize: "12px" }}
>
This field is required
</span>
)}
</Grid>
<Grid item xs={12} sx={{ ml: "5em", mr: "5em" }}>
<Button
type="submit"
variant="contained"
fullWidth={true}
size="large"
sx={{
mt: "10px",
mr: "20px",
borderRadius: 28,
color: "#ffffff",
minWidth: "170px",
backgroundColor: "#FF9A01",
}}
>
Submit
</Button>
</Grid>
</Grid>
</form>
</Container>
</ThemeProvider>
</Box>
</Grid>
</Grid>
</Box>
</div>
</>
);
} |
<?php
namespace App\Http\Controllers;
use App\Models\User;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
use Spatie\Permission\Models\Role;
use Illuminate\Support\Facades\Auth;
use Illuminate\Support\Facades\Hash;
class UserController extends Controller
{
public function __construct()
{
$this->middleware('auth');
}
public function profile(){
// return Auth::user();
if (Auth::user()->hasPermissionTo('access admin page')) {
// return view('Pages.dashboard');
return view('Pages.Account.settings');
} else {
return view('Pages.Mobile.MobileProfile');
}
}
public function updateProfile(Request $request, User $user){
$rules = [
'name' => 'required|alpha|max:25',
'last_name' => 'required|alpha|max:25',
'phone' => 'required|numeric|max_digits:25',
];
//check email
if($request->email != $user->email)
{
$rules['email'] = 'required|string|email|unique:users';
}
//check avatar
if($request->file('avatar'))
{
$rules['avatar'] = 'image|file';
}
$validated = $request->validate($rules);
// return $validated;
// return $user->avatar_path = env('APP_URL')."/storage".$validated['avatar'];
DB::transaction(function() use ($validated, $user){
$user->name = $validated['name'];
$user->last_name = $validated['last_name'];
$user->phone = $validated['phone'];
if(isset($validated['email'])){
$user->email = $validated['email'];
}
if(isset($validated['avatar'])){
// $user->avatar_path = $validated['avatar']->store('avatars');
$user->avatar_path = env('APP_URL')."/storage/".$validated['avatar']->store('avatars');
}
$user->save();
});
$redirect = redirect()->route("account.settings");
return $redirect->with([
'message' => "Profile has been Updated",
'success' => true,
]);
}
public function updatePassword(Request $request, User $user){
$validated = $request->validate([
'current_password' => 'current_password',
'password' => ['required', 'string', 'min:8', 'confirmed'],
]);
// return $validated;
DB::transaction(function() use ($validated, $user){
$user->password = Hash::make($validated['password']);
$user->save();
});
$redirect = redirect()->route("account.settings");
return $redirect->with([
'message' => "Password has been Updated",
'success' => true,
]);
}
// Customers CRUD-----------------------------------------------------------------------------
public function customers()
{
$customers = User::role(['user'])->withCount('order as total_order')->withSum('order as revenue','grand_total')->orderBy('total_order','DESC')->get();
// return $customers;
return view('Pages.Users.customers', compact('customers'));
}
public function editCustomer(User $user)
{
$edit = true;
return view('Pages.Users.customer-details', compact('user', 'edit'));
}
// Employee CRUD------------------------------------------------------------------------------
public function employee(){
$employees = User::role(['admin','driver','employee'])->get();
// return $employees;
return view('Pages.Users.employees', compact('employees'));
}
public function createEmployee(){
$roles = Role::whereNotIn('name', ['user'])->pluck('name');
return view('Pages.Users.employees-new', compact('roles'));
}
public function storeEmployee(Request $request){
$validated = $request->validate([
'name' => 'required|alpha|max:25',
'last_name' => 'required|alpha|max:25',
'role' => 'required',
'phone' => 'required|numeric|max_digits:25',
'email' => 'required|string|email|unique:users',
'password' => ['required', 'string', 'min:8', 'confirmed'],
'avatar' => 'image|file'
]);
// return $validated['avatar']->store('avatars');
DB::transaction(function() use ($validated){
if(isset($validated['avatar'])){
$validated['avatar_path'] = $validated['avatar']->store('avatars');
}
else{
$validated['avatar_path'] = 'assets/img/avatar.png';
}
$user = User::create([
'name' => $validated['name'],
'last_name' => $validated['last_name'],
'phone' => $validated['phone'],
'email' => $validated['email'],
'password' => Hash::make($validated['password']),
'avatar_path' => env('APP_URL')."/storage/".$validated['avatar_path'],
]);
$user->assignRole($validated['role']);
});
$redirect = redirect()->route("users.employees");
return $redirect->with([
'message' => "Employee has been added",
'success' => true,
]);
}
public function editEmployee(User $user)
{
$roles = Role::whereNotIn('name', ['user'])->pluck('name');
$edit = true;
return view('Pages.Users.employees-new', compact('user', 'edit', 'roles'));
}
public function updateEmployee(Request $request, User $user)
{
$rules = [
'name' => 'required|alpha|max:25',
'last_name' => 'required|alpha|max:25',
'phone' => 'required|numeric|max_digits:25',
];
//check email
if($request->email != $user->email)
{
$rules['email'] = 'required|string|email|unique:users';
}
//check password
if (isset($request->password)) {
$rules['password'] = ['required', 'string', 'min:8', 'confirmed'];
}
//check avatar
if($request->file('avatar'))
{
$rules['avatar'] = 'image|file';
}
//check role
if (isset($request->role)) {
$user->syncRoles($request->role);
}
$validated = $request->validate($rules);
// return $validated;
DB::transaction(function() use ($validated, $user){
$user->name = $validated['name'];
$user->last_name = $validated['last_name'];
$user->phone = $validated['phone'];
if(isset($validated['email'])){
$user->email = $validated['email'];
}
if(isset($validated['password'])){
$user->password = Hash::make($validated['password']);
}
if(isset($validated['avatar'])){
$user->avatar_path = env('APP_URL')."/storage/".$validated['avatar']->store('avatars');
}
$user->save();
// User::where('id',$user->id)->update($validated);
});
$redirect = redirect()->route("users.employees");
return $redirect->with([
'message' => "Employee has been Updated",
'success' => true,
]);
// return $request;
}
} |
import { CosmWasmClient } from '@cosmjs/cosmwasm-stargate'
import { StargateClient } from '@cosmjs/stargate'
type ChainClientRoutes<T> = {
[rpcEndpoint: string]: T
}
type HandleConnect<T> = (rpcEndpoint: string) => Promise<T>
/*
* This is a workaround for `@cosmjs` clients to avoid connecting to the chain more than once.
*
* @example
* export const stargateClientRouter = new ChainClientRouter({
* handleConnect: (rpcEndpoint: string) => StargateClient.connect(rpcEndpoint),
* })
*
* const client = await stargateClientRouter.connect(RPC_ENDPOINT);
*
* const queryResponse = await client.queryContractSmart(...);
* */
class ChainClientRouter<T> {
private readonly handleConnect: HandleConnect<T>
private instances: ChainClientRoutes<T> = {}
constructor({ handleConnect }: { handleConnect: HandleConnect<T> }) {
this.handleConnect = handleConnect
}
/*
* Connect to the chain and return the client
* or return an existing instance of the client.
* */
async connect(rpcEndpoint: string) {
if (!this.getClientInstance(rpcEndpoint)) {
const instance = await this.handleConnect(rpcEndpoint)
this.setClientInstance(rpcEndpoint, instance)
}
return this.getClientInstance(rpcEndpoint)
}
private getClientInstance(rpcEndpoint: string) {
return this.instances[rpcEndpoint]
}
private setClientInstance(rpcEndpoint: string, client: T) {
this.instances[rpcEndpoint] = client
}
}
/*
* Router for connecting to `CosmWasmClient`.
* */
export const cosmWasmClientRouter = new ChainClientRouter({
handleConnect: (rpcEndpoint: string) => CosmWasmClient.connect(rpcEndpoint),
})
/*
* Router for connecting to `StargateClient`.
* */
export const stargateClientRouter = new ChainClientRouter({
handleConnect: (rpcEndpoint: string) => StargateClient.connect(rpcEndpoint),
}) |
# ⚙️ Mindsync Backend Application
## ⭐ Introduction
The MindSync Backend Application is a powerful component of the MindSync platform. It is primarily an engine drafted in
Kotlin API Version 1.9 and running with the Java SDK version 17 environment. This application is responsible for
managing all the server-side operations associated with MindSync.
This application takes advantage of Spring WebFlux, a reactive-stack web framework that is a part of the Spring
Framework 5+, to handle the asynchronous and non-blocking operations. It is majorly useful in environments supporting
high volumes of concurrent users or in microservice architecture, making MindSync a robust and scalable application.
The backend implements all the business rules and logic of the application. Every action initiated from the user
interface of MindSync triggers a chain of operations on the server side, processed and managed by the MindSync Backend
Application.
The application also handles data storage and retrieval operations, managing user data, notes, resources, and other
relevant information, using a secure and efficient methodology. The design ensures a quick response time and a seamless
user experience.
Finally, the MindSync Backend Application integrates smoothly with various services and APIs, facilitating an enriched
feature set like real-time updates, external resources access, and many more to enhance knowledge management for users.
This powerhouse application sits in the back, diligently managing and regulating every minor to major operation,
ensuring the efficiency and effectiveness of MindSync.
### 🚀 Getting Started
To get started with the MindSync Backend Application, you need to have the following prerequisites installed on your
system:
- Java 17 SDK or higher. You can download the latest version of Java
from [here](https://www.oracle.com/java/technologies/downloads/) or use [SDKMAN](https://sdkman.io/) to install it.
- Gradle 8.2.1 or higher (optional). You can download the latest version of Gradle
from [here](https://gradle.org/releases/) or use [SDKMAN](https://sdkman.io/) to install it.
- Docker 20.10.8 or higher (optional). You can download the latest version of Docker
from [here](https://www.docker.com/products/docker-desktop).
### 🛫 Running the Application
To run the MindSync Backend Application, follow these steps:
1. Clone the repository to your local machine. You can use the following command to clone the repository:
```shell
git clone https://github.com/yacosta738/mindsync.git
```
2. Run the following command to run the application:
- You could use Gradle to run the application:
```shell
docker-compose -f infra/docker-compose.yml up -d
./gradlew bootRun
```
- Or you could use make to run the application:
```shell
make up start-backend-app
```
3. The application will be available at [http://localhost:8080](http://localhost:8080).
4. You can access the Swagger UI at [http://localhost:8080/swagger-ui.html](http://localhost:8080/swagger-ui.html).
5. You can access the Neo4j Browser at [http://localhost:7474/browser/](http://localhost:7474/browser/).
### 🏗️ Building the Application
To build the MindSync Backend Application, follow these steps:
1. Clone the repository to your local machine. You can use the following command to clone the repository:
```shell
git clone https://github.com/yacosta738/mindsync.git
```
2. Run the following command to build the application:
- You could use Gradle to build the application:
```shell
./gradlew build
```
- Or you could use make to build the application:
```shell
make all
```
3. The application will be built in the `build/libs` directory.
4. You can run the application using the following command:
```shell
java -jar build/libs/mindsync-0.0.1-SNAPSHOT.jar
```
5. You can also run the application using Docker. To do so, run the following command:
```shell
docker-compose up
```
6. The application will be available at [http://localhost:8080](http://localhost:8080).
7. You can access the Swagger UI at [http://localhost:8080/api-docs](http://localhost:8080/api-docs).
### 🧪 Testing the Application
To test the MindSync Backend Application, follow these steps:
1. Run the following command to test the application:
- You could use Gradle to test the application:
```shell
./gradlew test
```
- Or you could use make to test the application:
```shell
make test
```
**Note**: You need to have the docker environment running to test the application.
2. The application will be tested in the `build/reports/tests/test` directory.
#### 🧪 Testing the Application with Coverage
You can execute the following command to test the application with coverage:
```shell
./gradlew clean check sonar jacocoTestReport aggregateReports
```
### 📄 Documentation
The MindSync Backend Application is documented using Swagger. You can access the Swagger UI at [http://localhost:8080/api-docs](http://localhost:8080/api-docs) and the Swagger JSON at [http://localhost:8080/v3/api-docs](http://localhost:8080/v3/api-docs).
### 🐉 Why Spring Webflux?
There are several reasons why you might choose to use WebFlux (Project Reactor) instead of the traditional Spring MVC in
your application:
1. Non-blocking: WebFlux is based on a non-blocking programming model, meaning it can handle a large number of
concurrent
requests without the need for thread blocking. This can result in better performance and scalability for
high-concurrency applications.
2. Reactive programming: WebFlux is built on top of Project Reactor, a reactive programming library. This allows you to
write code that responds to changes in data streams in a more efficient and predictable way.
3. Full asynchronous support: WebFlux supports both asynchronous and synchronous request handling. It allows you to
fully
leverage the benefits of asynchronous programming, such as better resource utilization and lower latency.
4. Better error handling: WebFlux has built-in support for handling errors and exceptions in a more consistent and
efficient way.
5. Improved testability: WebFlux's non-blocking and reactive programming model makes it easier to test, especially when
it comes to testing asynchronous code.
That being said, Spring MVC is still a good choice for many types of applications, especially those with simpler
requirements or where performance is not as critical.
### 🚀 Why Kotlin?
Kotlin is a robust and modern language that offers multiple advantages over languages like Java. Here are some key
reasons to consider Kotlin:
- **Interoperability with Java**: The primary benefit is that it is entirely interoperable with Java, which means you
can leverage all existing Java libraries, frameworks, and even use your existing Java code.
- **Flexibility**: Kotlin is a statically typed language like Java, but it’s also flexible like a dynamically typed
language. This gives developers room for more creative solutions.
- **Null safety**: Kotlin provides inherent null safety. This is a big productivity boost since NullPointerException is
a common runtime error in several languages, including Java.
- **Concise syntax**: The syntax is more concise than that of Java. This makes it easier to read and write Kotlin code.
- **Scripting**: Apart from being a statically-typed programming language, Kotlin can also be used as a scripting
language.
### 💠 Why Neo4j (Graph Database)?
Neo4j is a popular Graph Database management system known for its efficiency and versatile application across various
domains. Here are some reasons why Neo4j is highly recommended:
- **Relationship-Focused Data Model**: The property graph model in Neo4j enables the representation of rich
relationships between data, allowing complex queries to be carried out efficiently. It is excellent for managing
interconnected data.
- **High Performance**: Neo4j offers high performance for both read and write operations. It is known for fast querying
times due to the efficient storage and processing of data in graph structures.
- **Scalability**: Neo4j supports high levels of scalability without compromising the integrity and performance of a
graph database system. It allows sharding and distributed processing for large-scale deployments.
- **Cypher Query Language**: Neo4j uses Cypher, a declarative, SQL-inspired language for describing patterns in graphs
visually. It makes querying and handling of graph data easy and intuitive.
- **Mature Ecosystem**: Neo4j has a large community and a mature ecosystem. There are a lot of resources, tools, and
drivers available across various programming languages for working with Neo4j.
- **Integrated Visualization Tools**: Neo4j provides powerful integrated visualization tools that allow real-time
querying of data and the ability to see the shape of the data in a readily understandable graphical format.
## 💻 Technologies
- 🌱 SpringBoot
- 🐉 WebFlux (Reactive)
- 🔑 Spring Security
- 💠 SDN (Spring Data Neo4j)
- ✔︎ Actuator
- 🧑🏻💻 Spring Boot DevTools
- ⚙️ Spring Boot Configuration Processor
- 🚀 Kotlin
- 💠 Neo4j (Graph Database)
- 🛠️ Gradle (Build Tool)
- 🐳 Docker (Containerization)
- 🧪 JUnit (Testing)
- ◈ TestContainers (Testing)
- 🕳️ Mockito (Testing)
- 📄 Swagger (API Documentation)
- 📋 Logback (Logging) |
import React from 'react'
import { faker } from '@faker-js/faker'
import { render } from '@testing-library/react'
import {
Heading,
Layout,
PageContent,
Paragraph,
Sidebar,
SideBarList,
SideBarListItem,
SideBarListItemBottom,
SpacedContainer,
SpacedSidebarContainer,
} from '../Layout'
describe('Layout', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Layout>
<div>{text}</div>
</Layout>,
)
expect(baseElement).toContainHTML(`<div>${text}</div>`)
})
it('renders as a div by default', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Layout>
<span>{text}</span>
</Layout>,
)
const layout = baseElement.querySelector('.layout')
expect(layout).not.toBeNull()
expect(layout?.tagName).toEqual('DIV')
})
it('renders as a main tag when specified', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Layout element="main">
<span>{text}</span>
</Layout>,
)
const layout = baseElement.querySelector('.layout')
expect(layout).not.toBeNull()
expect(layout?.tagName).toEqual('MAIN')
})
})
describe('PageContent', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<PageContent>
<div>{text}</div>
</PageContent>,
)
expect(baseElement).toContainHTML(`<div>${text}</div>`)
})
it('renders as a div by default', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<PageContent>
<span>{text}</span>
</PageContent>,
)
const pageContent = baseElement.querySelector('.page-content')
expect(pageContent).not.toBeNull()
expect(pageContent?.tagName).toEqual('DIV')
})
it('renders as a main tag when specified', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<PageContent element="main">
<span>{text}</span>
</PageContent>,
)
const pageContent = baseElement.querySelector('.page-content')
expect(pageContent).not.toBeNull()
expect(pageContent?.tagName).toEqual('MAIN')
})
it('accepts a class name', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<PageContent className="my-class" element="main">
<div>{text}</div>
</PageContent>,
)
const pageContent = baseElement.querySelector('.my-class')
expect(pageContent).not.toBeNull()
expect(pageContent?.tagName).toEqual('MAIN')
})
})
describe('Sidebar', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Sidebar>
<div>{text}</div>
</Sidebar>,
)
expect(baseElement).toContainHTML(`<div>${text}</div>`)
})
it('displays a title', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Sidebar>
<div>{text}</div>
</Sidebar>,
)
const sidebarTitle = baseElement.querySelector('.sidebar-title')
expect(sidebarTitle).not.toBeNull()
expect(sidebarTitle).toHaveTextContent('Email Builder (Beta)')
})
it('accepts a class name', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Sidebar className="my-class">
<div>{text}</div>
</Sidebar>,
)
const sidebar = baseElement.querySelector('.my-class')
expect(sidebar).toContainHTML(`<div>${text}</div>`)
})
})
describe('SideBarList', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<SideBarList>
<div>{text}</div>
</SideBarList>,
)
expect(baseElement).toContainHTML(`<div>${text}</div>`)
})
})
describe('SideBarListItem', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<SideBarListItem>
<div>{text}</div>
</SideBarListItem>,
)
expect(baseElement).toContainHTML(`<div>${text}</div>`)
})
it('accepts a className', () => {
const { baseElement } = render(
<SideBarListItem className="my-class">
<div />
</SideBarListItem>,
)
expect(baseElement).toContainHTML('<li class="sidebar-list-item my-class"><div></div></li>')
})
})
describe('SideBarListItemBottom', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<SideBarListItemBottom>
<div>{text}</div>
</SideBarListItemBottom>,
)
expect(baseElement).toContainHTML(`<div>${text}</div>`)
})
})
describe('Heading', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Heading element="h1">
<div>{text}</div>
</Heading>,
)
expect(baseElement).toContainHTML(`<div>${text}</div>`)
})
it('renders as the given element (h1)', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Heading element="h1">
<div>{text}</div>
</Heading>,
)
expect(baseElement.querySelector('h1')).not.toBeNull()
expect(baseElement.querySelector('h2')).toBeNull()
})
it('renders as the given element (h2)', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Heading element="h2">
<div>{text}</div>
</Heading>,
)
expect(baseElement.querySelector('h1')).toBeNull()
expect(baseElement.querySelector('h2')).not.toBeNull()
})
it('accepts a subheading prop', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Heading element="h1" subheading>
<div>{text}</div>
</Heading>,
)
const heading = baseElement.querySelector('h1.subheading')
})
})
describe('Paragraph', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Paragraph>
<span>{text}</span>
</Paragraph>,
)
const paragraph = baseElement.querySelector('p')
expect(paragraph).not.toBeNull()
expect(paragraph).toContainHTML(`<span>${text}</span>`)
})
it('accepts a className', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<Paragraph className="my-class">
<span>{text}</span>
</Paragraph>,
)
const paragraph = baseElement.querySelector('p')
expect(paragraph).not.toBeNull()
expect(paragraph?.className).toEqual('paragraph my-class')
})
})
describe('SpacedContainer', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<SpacedContainer>
<span>{text}</span>
</SpacedContainer>,
)
const container = baseElement.querySelector('.spaced-container')
expect(container).not.toBeNull()
expect(container).toContainHTML(`<span>${text}</span>`)
})
})
describe('SpacedSidebarContainer', () => {
it('displays its children', () => {
const text = faker.lorem.paragraph()
const { baseElement } = render(
<SpacedSidebarContainer>
<span>{text}</span>
</SpacedSidebarContainer>,
)
const container = baseElement.querySelector('.spaced-sidebar-container')
expect(container).not.toBeNull()
expect(container).toContainHTML(`<span>${text}</span>`)
})
it('accepts a className', () => {
const { baseElement } = render(
<SpacedSidebarContainer className="my-class">
<span />
</SpacedSidebarContainer>,
)
const container = baseElement.querySelector('.spaced-sidebar-container.my-class')
expect(container).not.toBeNull()
})
}) |
use clippy_config::msrvs::{self, Msrv};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_context;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::impl_lint_pass;
use rustc_span::source_map::Spanned;
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Lints subtraction between `Instant::now()` and another `Instant`.
///
/// ### Why is this bad?
/// It is easy to accidentally write `prev_instant - Instant::now()`, which will always be 0ns
/// as `Instant` subtraction saturates.
///
/// `prev_instant.elapsed()` also more clearly signals intention.
///
/// ### Example
/// ```no_run
/// use std::time::Instant;
/// let prev_instant = Instant::now();
/// let duration = Instant::now() - prev_instant;
/// ```
/// Use instead:
/// ```no_run
/// use std::time::Instant;
/// let prev_instant = Instant::now();
/// let duration = prev_instant.elapsed();
/// ```
#[clippy::version = "1.65.0"]
pub MANUAL_INSTANT_ELAPSED,
pedantic,
"subtraction between `Instant::now()` and previous `Instant`"
}
declare_clippy_lint! {
/// ### What it does
/// Lints subtraction between an `Instant` and a `Duration`.
///
/// ### Why is this bad?
/// Unchecked subtraction could cause underflow on certain platforms, leading to
/// unintentional panics.
///
/// ### Example
/// ```no_run
/// # use std::time::{Instant, Duration};
/// let time_passed = Instant::now() - Duration::from_secs(5);
/// ```
///
/// Use instead:
/// ```no_run
/// # use std::time::{Instant, Duration};
/// let time_passed = Instant::now().checked_sub(Duration::from_secs(5));
/// ```
#[clippy::version = "1.67.0"]
pub UNCHECKED_DURATION_SUBTRACTION,
pedantic,
"finds unchecked subtraction of a 'Duration' from an 'Instant'"
}
pub struct InstantSubtraction {
msrv: Msrv,
}
impl InstantSubtraction {
#[must_use]
pub fn new(msrv: Msrv) -> Self {
Self { msrv }
}
}
impl_lint_pass!(InstantSubtraction => [MANUAL_INSTANT_ELAPSED, UNCHECKED_DURATION_SUBTRACTION]);
impl LateLintPass<'_> for InstantSubtraction {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ Expr<'_>) {
if let ExprKind::Binary(
Spanned {
node: BinOpKind::Sub, ..
},
lhs,
rhs,
) = expr.kind
{
if is_instant_now_call(cx, lhs)
&& is_an_instant(cx, rhs)
&& let Some(sugg) = Sugg::hir_opt(cx, rhs)
{
print_manual_instant_elapsed_sugg(cx, expr, sugg);
} else if !expr.span.from_expansion()
&& self.msrv.meets(msrvs::TRY_FROM)
&& is_an_instant(cx, lhs)
&& is_a_duration(cx, rhs)
{
print_unchecked_duration_subtraction_sugg(cx, lhs, rhs, expr);
}
}
}
extract_msrv_attr!(LateContext);
}
fn is_instant_now_call(cx: &LateContext<'_>, expr_block: &'_ Expr<'_>) -> bool {
if let ExprKind::Call(fn_expr, []) = expr_block.kind
&& let Some(fn_id) = clippy_utils::path_def_id(cx, fn_expr)
&& clippy_utils::match_def_path(cx, fn_id, &clippy_utils::paths::INSTANT_NOW)
{
true
} else {
false
}
}
fn is_an_instant(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let expr_ty = cx.typeck_results().expr_ty(expr);
ty::is_type_diagnostic_item(cx, expr_ty, sym::Instant)
}
fn is_a_duration(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let expr_ty = cx.typeck_results().expr_ty(expr);
ty::is_type_diagnostic_item(cx, expr_ty, sym::Duration)
}
fn print_manual_instant_elapsed_sugg(cx: &LateContext<'_>, expr: &Expr<'_>, sugg: Sugg<'_>) {
span_lint_and_sugg(
cx,
MANUAL_INSTANT_ELAPSED,
expr.span,
"manual implementation of `Instant::elapsed`",
"try",
format!("{}.elapsed()", sugg.maybe_par()),
Applicability::MachineApplicable,
);
}
fn print_unchecked_duration_subtraction_sugg(
cx: &LateContext<'_>,
left_expr: &Expr<'_>,
right_expr: &Expr<'_>,
expr: &Expr<'_>,
) {
let mut applicability = Applicability::MachineApplicable;
let ctxt = expr.span.ctxt();
let left_expr = snippet_with_context(cx, left_expr.span, ctxt, "<instant>", &mut applicability).0;
let right_expr = snippet_with_context(cx, right_expr.span, ctxt, "<duration>", &mut applicability).0;
span_lint_and_sugg(
cx,
UNCHECKED_DURATION_SUBTRACTION,
expr.span,
"unchecked subtraction of a 'Duration' from an 'Instant'",
"try",
format!("{left_expr}.checked_sub({right_expr}).unwrap()"),
applicability,
);
} |
<?php
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\HasMany;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
use Illuminate\Database\Eloquent\SoftDeletes;
class Contact extends Model
{
use SoftDeletes, HasFactory;
protected $fillable = [
'name',
'last_name',
'photo',
'company_id'
];
public function phones(): HasMany
{
return $this->hasMany(PhoneNumber::class);
}
public function locations(): HasMany
{
return $this->hasMany(Location::class);
}
public function emails(): HasMany
{
return $this->hasMany(MailAddress::class);
}
public function company(): BelongsTo
{
return $this->belongsTo(Company::class);
}
} |
import { ApplicationError } from "@app/utils/common";
import middy from "@middy/core";
import cors from "@middy/http-cors";
import httpErrorHandler from "@middy/http-error-handler";
import httpEventNormalizer from "@middy/http-event-normalizer";
import httpHeaderNormalizer from "@middy/http-header-normalizer";
import middyJsonBodyParser from "@middy/http-json-body-parser";
import { HttpError, createError } from "@middy/util";
import validator from "@middy/validator";
import { transpileSchema } from "@middy/validator/transpile";
import { APIGatewayProxyEvent, APIGatewayProxyResult } from "aws-lambda";
import { checkAuthHeader } from "./auth";
import { applicationLogger } from "./logging";
type MiddlewareOptions = { eventSchema?: any; requiresAuth?: boolean };
/**
* Just used to ease testing
*/
type NamedMiddleware = middy.MiddlewareObj & { name: string };
const logger = applicationLogger;
const buildNamedMiddleware = (
name: string,
middleware: middy.MiddlewareObj
): NamedMiddleware => {
return { ...middleware, name };
};
const errorNameToStatusCodeMapping = {
InsufficientBalanceError: 409,
InvalidDataFormat: 403,
UserNotAuthenticatedError: 401,
};
const customErrorHandler = (): middy.MiddlewareObj<
APIGatewayProxyEvent,
APIGatewayProxyResult
> => {
const onError: middy.MiddlewareFn<
APIGatewayProxyEvent,
APIGatewayProxyResult
> = (request) => {
logger.info("Enter customErrorHandler");
const error = request.error;
const httpError = request.error as HttpError;
if (httpError.statusCode) {
// The error already has a status code
logger.info("Exit customErrorHandler");
return;
}
let statusCode = 500;
let errorMsg: string;
if (error instanceof ApplicationError) {
// Create custom error
statusCode = errorNameToStatusCodeMapping[error.errorName] ?? 500;
errorMsg = error.message;
}
request.error = createError(statusCode, errorMsg, {
cause: error,
expose: true,
});
logger.info("Exit customErrorHandler");
};
return {
onError,
};
};
const middyfy = (handler: any, options?: MiddlewareOptions) => {
logger.info("Enter middyfy");
const middlewares = [
buildNamedMiddleware("custom-error-handler", customErrorHandler()),
buildNamedMiddleware("http-header-normalizer", httpHeaderNormalizer()),
buildNamedMiddleware("http-event-normalizer", httpEventNormalizer()),
buildNamedMiddleware("middy-json-body-parser", middyJsonBodyParser()),
];
if (options?.requiresAuth) {
middlewares.push(
buildNamedMiddleware("check-authorizer", checkAuthHeader())
);
}
if (options?.eventSchema) {
middlewares.push(
buildNamedMiddleware(
"validator",
validator({ eventSchema: transpileSchema(options.eventSchema) })
)
);
}
middlewares.unshift(
buildNamedMiddleware(
"http-error-handler",
httpErrorHandler({ logger: (err) => logger.error(err) })
)
);
middlewares.push(buildNamedMiddleware("cors", cors()));
const result = {
handler: middy(handler).use(middlewares),
middlewares,
options,
};
logger.info("Exit middyfy");
return result;
};
export { customErrorHandler, middyfy }; |
package com.example.noterapp
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.view.View
import android.widget.Button
import android.widget.Toast
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import com.example.noterapp.databinding.ActivityQuoteBinding
import java.lang.Exception
class QuoteActivity : AppCompatActivity() {
lateinit var binding : ActivityQuoteBinding
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
binding = ActivityQuoteBinding.inflate(layoutInflater)
//coba buat intent ke MainActivity tapi gagal
// val bnoter = findViewById<Button>(R.id.noter)
// bnoter.setOnClickListener {
// val Intent = Intent(this,MainActivity::class.java)
// startActivity(Intent)
// }
setContentView(binding.root)
getQuote()
binding.nextBtn.setOnClickListener {
getQuote()
}
}
private fun getQuote(){
setInProgress(true)
GlobalScope.launch {
try{
val response = RetrofitInstance.quoteApi.getRandomQuote()
runOnUiThread {
setInProgress(false)
response.body()?.first()?.let {
setUI(it)
}
}
}catch (e : Exception){
runOnUiThread {
setInProgress(false)
Toast.makeText(applicationContext,"Something went wrong",Toast.LENGTH_SHORT).show()
}
}
}
}
private fun setUI(quote : QuoteModel){
binding.quoteTv.text = quote.q
binding.authorTv.text = quote.a
}
private fun setInProgress(inProgress : Boolean){
if(inProgress){
binding.progressBar.visibility = View.VISIBLE
binding.nextBtn.visibility = View.GONE
}else{
binding.progressBar.visibility = View.GONE
binding.nextBtn.visibility = View.VISIBLE
}
}
} |
import 'package:flutter/material.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:get/get.dart';
import 'package:test_task/bussiness/controllers/auth_controller.dart';
import 'package:test_task/core/router/get_routes.dart';
import 'package:test_task/presentation/Widgets/accept_button.dart';
import 'package:test_task/presentation/resources/app_assets.dart';
import 'package:test_task/presentation/resources/app_colors.dart';
import 'package:test_task/presentation/resources/app_styles.dart';
import 'package:test_task/presentation/widgets/email_text_field.dart';
import 'package:test_task/presentation/widgets/name_text_field.dart';
import 'package:test_task/presentation/widgets/password_text_field.dart';
import 'package:test_task/presentation/widgets/phone_text_field.dart';
import 'package:test_task/presentation/widgets/spacer.dart';
class RegisterScreen extends StatelessWidget {
RegisterScreen({super.key});
final TextEditingController nameController = TextEditingController();
final TextEditingController phoneNumberController = TextEditingController();
final TextEditingController emailController = TextEditingController();
final TextEditingController passwordController = TextEditingController();
final TextEditingController confirmPasswordController =
TextEditingController();
final GlobalKey<FormState> _formKey = GlobalKey();
final authController = Get.find<AuthController>();
@override
Widget build(BuildContext context) {
return Scaffold(
body: SafeArea(
child: SingleChildScrollView(
padding: EdgeInsets.symmetric(horizontal: 60.w, vertical: 40.h),
child: Column(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Column(
children: [
SizedBox(
width: 150.w,
height: 150.h,
child: Image.asset(AppAssets.appLogo),
),
SpacerHeight(height: 20.h),
const Text(
'Register',
style: headerTextStyle,
),
],
),
Form(
key: _formKey,
child: Column(
children: [
NameTextField(
controller: nameController,
onSaved: (value) {
authController.name = value!;
},
),
SpacerHeight(height: 20.h),
PhoneTextField(
controller: phoneNumberController,
onSaved: (value) {
authController.phone = value!.number;
authController.countryCode = value.countryCode;
},
),
SpacerHeight(height: 20.h),
EmailTextField(
controller: emailController,
onSaved: (value) {
authController.email = value!;
},
),
SpacerHeight(height: 20.h),
PasswordTextField(controller: passwordController),
SpacerHeight(height: 20.h),
TextFormField(
keyboardType: TextInputType.visiblePassword,
autovalidateMode: AutovalidateMode.onUserInteraction,
textAlign: TextAlign.center,
obscureText: true,
decoration: authInputsStyle.copyWith(
hintText: 'Confirm Password',
prefix: Container(width: 60),
suffixIcon: const Icon(Icons.visibility),
),
validator: (value) {
if (value!.isEmpty) {
return 'Confirm Password required';
} else if (value != passwordController.text) {
return 'Password does not match';
}
return null;
},
onSaved: (value) {
authController.confirmPassword = value!;
},
),
SpacerHeight(height: 30.h),
AcceptButton(
buttonText: 'Register',
buttonStyle: acceptButtonStyle,
onPressed: () async {
if (_formKey.currentState!.validate()) {
_formKey.currentState!.save();
await authController.register();
}
},
),
],
),
),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
const Text(
'Already have an account? ',
style: normalTextStyle,
),
InkWell(
onTap: () => Get.toNamed(AppRoutes.loginScreen),
child: Text(
'Login',
style: normalTextStyle.copyWith(
color: AppColors.primary,
fontWeight: FontWeight.bold,
),
),
),
],
),
],
),
),
),
);
}
} |
#==== produce country map function ==========#
Plot_nationalmap_Uganda_func <- function() {
w2hr <- map_data("world2Hires")
UGA <- w2hr[w2hr$region=="Uganda",] # obtain Uganda country outline
ggplot() +
geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
coord_equal() # plot Uganda country outline
return(UGA)
}
# Plotting district boundaries (2003-2009) - create Dataframe object (from shape file) #
# Plot district layer 2003-2009 (2006 admin data) #
# source: https://earthworks.stanford.edu/catalog/stanford-vg894mz3698
UGA_district_boundaries_function <- function(shape_file, national_map_input){
glimpse(shape_file) # check shape file
districts_tidy <- tidy(shape_file) # turn into a dataframe with tidy func
district_plot <-
ggplot() +
geom_polygon(data = national_map_input, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data = districts_tidy, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
coord_equal(ratio = 1) # plot district boundaries
# make dataframe object with variables (district name) for mapping #
shape_file$id <- row.names(shape_file) # include row ids in spatial polygon object
UGA_districts_tidy <- left_join(districts_tidy, shape_file@data) # join variables from spatial polygon into dataframe
return(list(district_plot, UGA_districts_tidy))
}
# TO DO: make function for 2010 onwards or roll into this function?
district_name_func <- function(shape_file){
#UGA_dist_MDA_names <- data.frame(Dist_name = sort(shape_file@data$dname_2006))
UGA_dist_MDA_names <- data.frame(Dist_name = sort(shape_file@data$DISTRICT))
return(UGA_dist_MDA_names)
}
# TO DO: make function for 2010 onwards or roll into this function?
# function to get dataframe to plot different coverages (based on different methods) for each year #
# 1) mapping original district coverages function #
originaldistrict_MDA_coverage_mapping0319_dataframe_func <- function(data1, data2, data3,
district_names, district_map, year_input, age_target){
temp_data1 <- cbind(district_names_0319, data1)
temp_data1$Age <- as.factor(temp_data1$Age)
temp_data2 <- cbind(district_names_0319, data2)
temp_data2$Age <- as.factor(temp_data2$Age)
temp_data3 <- cbind(district_names_0319, data3)
temp_data3$Age <- as.factor(temp_data3$Age)
# subset data based on age target #
if(age_target == "ALL") {
temp_data1 <- subset(temp_data1, Age == "ALL")
temp_data2 <- subset(temp_data2, Age == "ALL")
temp_data3 <- subset(temp_data3, Age == "ALL")
}
if(age_target == "SAC") {
temp_data2 <- subset(temp_data1, Age == "SAC")
temp_data2 <- subset(temp_data2, Age == "SAC")
temp_data3 <- subset(temp_data3, Age == "SAC")
}
if(age_target == "non-SAC") {
temp_data2 <- subset(temp_data1, Age == "non-SAC")
temp_data2 <- subset(temp_data2, Age == "non-SAC")
temp_data3 <- subset(temp_data3, Age == "non-SAC")
}
# Mapping Presence of MDA: 2003-2009 treatment year #
selecting_MDA_districts_func <- function(year_input, age_target) {
# 2003 - 2004 MDA years #
if (year_input == 2003 || year_input == 2004 && age_target == "ALL") {
MDA_districts <-
c("APAC", "MOYO", "ADJUMANI", "ARUA", "NEBBI", "LIRA", "NAKASONGOLA", "MASINDI", "HOIMA", "BUGIRI",
"BUSIA", "KAYUNGA", "JINJA", "MUKONO", "WAKISO", "MAYUGE", "BUNDIBUGYO", "KIBAALE") # vector of districts with MDA in 2003
}
if (year_input == 2003 || year_input == 2004 && age_target == "SAC") {
MDA_districts <-
c("APAC", "MOYO", "ADJUMANI", "ARUA", "NEBBI", "LIRA", "NAKASONGOLA", "MASINDI", "HOIMA", "BUGIRI",
"BUSIA", "KAYUNGA", "JINJA", "MUKONO", "WAKISO", "MAYUGE", "BUNDIBUGYO", "KIBAALE") # vector of districts with MDA in 2003
}
if (year_input == 2003 && age_target == "non-SAC") {
MDA_districts <-
c("APAC", "ARUA", "NEBBI", "LIRA", "NAKASONGOLA", "MASINDI", "BUGIRI","BUSIA",
"KAYUNGA", "JINJA", "MUKONO", "WAKISO", "MAYUGE", "KIBAALE") # vector of districts with MDA in 2003
}
if (year_input == 2004 && age_target == "non-SAC") {
MDA_districts <-
c("APAC", "ARUA", "NEBBI", "LIRA", "NAKASONGOLA", "MASINDI", "BUGIRI","BUSIA",
"KAYUNGA", "JINJA", "MUKONO", "MAYUGE", "KIBAALE", "HOIMA", "MOYO") # vector of districts with MDA in 2003
}
# 2005 - 2006 MDA years #
if (year_input == 2005 || year_input == 2006 && age_target == "ALL") {
MDA_districts <-
c("APAC","MOYO","ADJUMANI","YUMBE","ARUA","NEBBI","LIRA","KABERAMAIDO","SOROTI","NAKASONGOLA",
"MASINDI","HOIMA","KAMULI","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO","MAYUGE",
"KALANGALA","KABALE","KISORO","KANUNGU","RUKUNGIRI","BUNDIBUGYO","KIBAALE")
}
if (year_input == 2005 && age_target == "SAC") {
MDA_districts <-
c("APAC", "MOYO", "ADJUMANI","YUMBE","ARUA","NEBBI","LIRA","KABERAMAIDO","SOROTI",
"NAKASONGOLA","MASINDI","HOIMA","KAMULI","BUGIRI","KAYUNGA","JINJA","MUKONO",
"WAKISO","MAYUGE","KALANGALA","BUNDIBUGYO","KIBAALE")
}
if (year_input == 2006 && age_target == "SAC") {
MDA_districts <-
c("APAC","MOYO","ADJUMANI","ARUA","NEBBI","LIRA","KABERAMAIDO","SOROTI","NAKASONGOLA",
"MASINDI","HOIMA","KAMULI","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","MAYUGE",
"KALANGALA","BUNDIBUGYO","KIBAALE")
}
if (year_input == 2005 && age_target == "non-SAC") {
MDA_districts <-
c("APAC","MOYO","ADJUMANI","YUMBE","ARUA","NEBBI","LIRA","KABERAMAIDO","SOROTI","NAKASONGOLA",
"MASINDI","HOIMA","KAMULI","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","MAYUGE","KALANGALA",
"BUNDIBUGYO","KIBAALE")
}
if (year_input == 2006 && age_target == "non-SAC") {
MDA_districts <-
c("MOYO","LIRA","KABERAMAIDO","SOROTI","HOIMA","KAMULI","BUGIRI","BUSIA",
"KAYUNGA","JINJA","MUKONO","WAKISO","MAYUGE","KALANGALA","BUNDIBUGYO")
}
# 2007 MDA year #
if (year_input == 2007 && age_target == "ALL") {
MDA_districts <-
c("APAC","MOYO","ADJUMANI","ARUA","NEBBI","LIRA","KABERAMAIDO","SOROTI","KAMULI",
"NAKASONGOLA","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MAYUGE","KALANGALA","MPIGI","MASAKA","KABAROLE","BUNDIBUGYO","KIBAALE","MASINDI")
}
if (year_input == 2007 && age_target == "SAC") {
MDA_districts <-
c("ADJUMANI","NEBBI","LIRA","KABERAMAIDO","SOROTI","NAKASONGOLA","HOIMA","BUGIRI","BUSIA",
"KAYUNGA","JINJA","MUKONO","MAYUGE","KABAROLE","BUNDIBUGYO","KIBAALE","MASINDI", "KAMULI")
}
if (year_input == 2007 && age_target == "non-SAC") {
MDA_districts <-
c("ADJUMANI","NEBBI","LIRA","KABERAMAIDO","SOROTI","KAMULI",
"NAKASONGOLA","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO",
"MAYUGE","KABAROLE","BUNDIBUGYO","KIBAALE","MASINDI")
}
# 2008 MDA year #
if (year_input == 2008 && age_target == "ALL" || age_target == "non-SAC") {
MDA_districts <-
c("APAC","YUMBE","ARUA","NEBBI","LIRA","NAKASONGOLA","HOIMA","KAYUNGA","JINJA",
"MUKONO","WAKISO","KALANGALA","MPIGI","MASAKA","KIBAALE","MASINDI","BUSIA")
}
if (year_input == 2008 && age_target == "SAC") {
MDA_districts <-
c("YUMBE","ARUA","NEBBI","NAKASONGOLA","KAYUNGA","JINJA",
"MUKONO","KALANGALA","MASAKA","MASINDI","BUSIA")
}
# 2009 MDA year #
if (year_input == 2009 && age_target == "ALL" || age_target == "SAC" || age_target == "non-SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA","KABERAMAIDO",
"NAKASONGOLA","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MAYUGE","MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","RAKAI","SOROTI","PALLISA",
"KAMULI","MASINDI")
}
# 2009 rnd 2 MDA year#
if (year_input == "2009rnd2" && age_target == "ALL" || age_target == "SAC" || age_target == "non-SAC") {
MDA_districts <-
c("APAC","YUMBE","ARUA","GULU","LIRA","KAYUNGA","MUKONO","KAMULI","KIBAALE")
}
# 2010 MDA year #
if (year_input == 2010 && age_target == "ALL" || age_target == "SAC" || age_target == "non-SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","ARUA","GULU","LIRA","KABERAMAIDO","NAKASONGOLA","HOIMA",
"BUGIRI","MAYUGE","MUBENDE","BUNDIBUGYO","SOROTI","MASINDI","KALANGALA","MASAKA",
"ADJUMANI")
}
# 2011 MDA year: currently no available data #
# 2012 MDA year #
if (year_input == 2012 && age_target == "ALL" || age_target == "SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA","KABERAMAIDO",
"NAKASONGOLA","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MAYUGE","MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","SOROTI","PALLISA",
"KAMULI","MASINDI","ADJUMANI","MASAKA","MBARARA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA")
}
if (year_input == 2012 && age_target == "non-SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"NAKASONGOLA","HOIMA","BUGIRI","BUSIA","KAYUNGA","MUKONO","WAKISO",
"MAYUGE","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","SOROTI","PALLISA",
"KAMULI","MASINDI","ADJUMANI","MASAKA","MBARARA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA")
}
# 2013 MDA year #
if (year_input == 2013 && age_target == "ALL" || age_target == "SAC") {
MDA_districts <-
c("KITGUM","ARUA","LIRA","NAKASONGOLA","MUBENDE","SOROTI","PALLISA",
"KAMULI","MASAKA","RUKUNGIRI","RAKAI",
"TORORO","KUMI","KATAKWI","IGANGA","KAPCHORWA","MBALE","MBARARA","SIRONKO")
}
if (year_input == 2013 && age_target == "non-SAC") {
MDA_districts <-
c("")
}
# 2014 MDA year #
if (year_input == 2014 && age_target == "ALL" || age_target == "SAC" || age_target == "non-SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","SOROTI",
"KAMULI","MASINDI","ADJUMANI","MASAKA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA","IGANGA","MBALE","SIRONKO")
}
# 2015 MDA year #
if (year_input == 2015 && age_target == "ALL" || age_target == "SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA","IGANGA","MBALE","SIRONKO",
"KAPCHORWA","KATAKWI","KUMI","MBARARA","NAKAPIRIPIRIT","NAKASONGOLA","PALLISA",
"RAKAI","TORORO")
}
if (year_input == 2015 && age_target == "non-SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA")
}
# 2016 MDA year #
if (year_input == 2016 && age_target == "ALL" || age_target == "SAC" || age_target == "non-SAC") {
MDA_districts <-
c("MUBENDE","BUNDIBUGYO","BUSIA","KABAROLE","KALANGALA","KAYUNGA",
"MASINDI","MPIGI","MUKONO","WAKISO")
}
# 2017 MDA year #
if (year_input == 2017 && age_target == "ALL" || age_target == "SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA","IGANGA","MBALE","SIRONKO",
"KAPCHORWA","KATAKWI","KUMI","MBARARA","NAKAPIRIPIRIT","NAKASONGOLA","PALLISA",
"RAKAI","TORORO")
}
if (year_input == 2017 && age_target == "non-SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA")
}
# 2018 MDA year #
if (year_input == 2018 && age_target == "ALL" || age_target == "SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","BUSHENYI","KAMWENGE","KASESE",
"KALANGALA")
}
if (year_input == 2018 && age_target == "non-ALL") {
MDA_districts <-
c("PADER","APAC","YUMBE","ARUA","NEBBI","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","BUSHENYI","KAMWENGE","KASESE",
"KALANGALA")
}
# 2019 MDA year #
if (year_input == 2019 && age_target == "ALL" || age_target == "SAC") {
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA","IGANGA","MBALE","SIRONKO","KABALE",
"KAPCHORWA","KATAKWI","KUMI","MBARARA","NAKAPIRIPIRIT","NAKASONGOLA","PALLISA",
"RAKAI","TORORO")
}
if (year_input == 2019 && age_target == "non-SAC") {
MDA_districts <-
c("")
}
return(MDA_districts)
}
MDA_districts <- selecting_MDA_districts_func(year_input = year_input, age_target = age_target) # call function
length(MDA_districts)
UGA_dist_MDA_names <- district_names # copy variable (dist names)
UGA_dist_MDA_names$MDA <- ifelse(district_names$Dist_name %in% MDA_districts, "MDA","none") # code whether MDA or not
UGA_dist_MDA_names <- UGA_dist_MDA_names %>% rename(DISTRICT = Dist_name) # rename column
UGA_districts_tidy <- left_join(district_map, UGA_dist_MDA_names) # join boundary data to MDA presence data
UGA_districts_tidy$MDA <- as.factor(UGA_districts_tidy$MDA) # make MDA presence a factor
MDA.col <- c("purple2","lightgrey") # to colour MDA districts
MDA.vec <- MDA.col[UGA_districts_tidy$MDA] # specify colour for each polygon
UGA_districts_tidy$MDA_fill <- MDA.vec # new column for fill in ggplot depending on MDA
alpha.MDA.col <- c(0.6, 0.01) # alpha for gpplot depending on MDA fill
alpha.MDA.vec <- alpha.MDA.col[UGA_districts_tidy$MDA] # vector depending on MDA
UGA_districts_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
# to plot: #
# Map_03 <-
# ggplot() +
# geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# geom_polygon(data= UGA_districts_tidy_03, aes(x = long, y = lat, group = group), colour="black", size = 0.1, fill=MDA.vec, alpha=alpha.MDA.vec)+
# coord_equal()+
# #geom_point(data = UGA_TS_studies, aes(x=long, y=lat, size=Informed.prev, fill=sample.size, shape=Production.setting), colour="black", stroke=1.2, inherit.aes = FALSE)+
# #scale_fill_brewer("Sample size", palette = "YlOrRd",aesthetics = "fill")+
# #scale_size_discrete("Informed prevalence (%)")+
# #scale_shape_manual(values=c(24,25,22))+
# scale_colour_manual(values=c("black","purple2"), guide=FALSE)+
# labs(title="2003")+
# theme_void()+
# theme(
# plot.title = element_text(color="black", size=16, face="bold.italic"))+
# guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
# Coverage of MDA: 2003-2009 treatment year #
District_name_vec <- c("ADJUMANI","APAC","ARUA","BUGIRI","BUNDIBUGYO","BUSHENYI","BUSIA","GULU",
"HOIMA","IGANGA","JINJA","KABALE","KABAROLE","KABERAMAIDO","KALANGALA","KAMPALA",
"KAMULI","KAMWENGE","KANUNGU","KAPCHORWA","KASESE","KATAKWI","KAYUNGA","KIBAALE",
"KIBOGA","KISORO","KITGUM","KOTIDO","KUMI","KYENJOJO","LIRA","LUWEERO",
"MASAKA","MASINDI","MAYUGE","MBALE","MBARARA","MOROTO","MOYO","MPIGI",
"MUBENDE","MUKONO","NAKAPIRIPIRIT","NAKASONGOLA","NEBBI","NTUNGAMO","PADER","PALLISA",
"RAKAI","RUKUNGIRI","SEMBABULE","SIRONKO","SOROTI","TORORO","WAKISO","YUMBE")
#dummy_dataset1 <- data1
dummy_dataset1 <- temp_data1
#dummy_dataset1 <- filter(dummy_dataset1, District_factor %in% District_name_vec) # filter so any renamed districts incldued
dummy_dataset1 <- filter(dummy_dataset1, Dist_name %in% District_name_vec) # filter so any renamed districts incldued
UGA_dist_MDAcov1_names <- UGA_dist_MDA_names %>% distinct() # remove districts not available in 2003
#UGA_dist_MDAcov1_names$Dist_name_chr <- as.character(UGA_dist_MDAcov1_names$Dist_name)
UGA_dist_MDAcov1_names$Dist_name_chr <- as.character(UGA_dist_MDAcov1_names$DISTRICT)
UGA_dist_MDAcov1_names <- UGA_dist_MDAcov1_names[order(UGA_dist_MDAcov1_names$Dist_name_chr),] # TO DO (this is a quick fix): to get this dataframe districts to align with dummy dataset district order
make_cov_byyear_func1 <-
function (year_input,
UGA_dist_MDAcov1_names,
dummy_dataset1) {
if (year_input == 2003) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2003 # add coverage values to dataframe for mapping
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2003")
}
if (year_input == 2004) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2004
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2004")
}
if (year_input == 2005) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2005
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2005")
}
if (year_input == 2006) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2006
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2006")
}
if (year_input == 2007) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2007
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2007")
}
if (year_input == 2008) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2008
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2008")
}
if (year_input == 2009) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2009
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2009")
}
if (year_input == "2009rnd2") {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2009rnd2
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2009_rnd2")
}
if (year_input == 2010) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2010
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2010")
}
if (year_input == 2012) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2012
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2012")
}
if (year_input == 2013) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2013
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2013")
}
if (year_input == 2014) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2014
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2014")
}
if (year_input == 2015) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2015
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2015")
}
if (year_input == 2016) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2016
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2016")
}
if (year_input == 2017) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2017
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2017")
}
if (year_input == 2018) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2018
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2018")
}
if (year_input == 2019) {
UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2019
UGA_dist_MDAcov1_names$MDA_year <- as.factor("2019")
}
return(UGA_dist_MDAcov1_names)
}
UGA_dist_MDAcov1_names <-
make_cov_byyear_func1(
year_input = year_input,
UGA_dist_MDAcov1_names = UGA_dist_MDAcov1_names,
dummy_dataset1 = dummy_dataset1
) # call func
UGA_districts_MDAcov1_tidy <- left_join(district_map, UGA_dist_MDAcov1_names) # join boundary data to MDA presence data
UGA_districts_MDAcov1_tidy$MDA_cov <- as.numeric(UGA_districts_MDAcov1_tidy$MDA_cov) # make cov value a numeric variable
UGA_districts_MDAcov1_tidy$MDA <- as.factor(UGA_districts_MDAcov1_tidy$MDA) # make MDA presence a factor
UGA_districts_MDAcov1_tidy$Coverage_approach <- as.factor("denominator: total targeted")
alpha.MDA.col <- c(0.9, 0.01) # alpha for gpplot depending on MDA fill
alpha.MDA.vec <- alpha.MDA.col[UGA_districts_MDAcov1_tidy$MDA] # vector depending on MDA
UGA_districts_MDAcov1_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
UGA_districts_MDAcov1_tidy$alpha.MDA.vec <-
ifelse(
is.na(UGA_districts_MDAcov1_tidy$MDA_cov) &
as.character(UGA_districts_MDAcov1_tidy$MDA) == "MDA",
0.01,
UGA_districts_MDAcov1_tidy$alpha.MDA.vec
) # some districts coded as MDA (from original analysis) but now no MDA coverage calculated after further data review, so change these to alpha = 0.01
# to plot
# Map_03_MDAcov1 <-
# ggplot() +
# geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# geom_polygon(data= UGA_districts_MDAcov1_tidy_03, aes(x = long, y = lat, group = group, fill=MDA_cov), colour="black", size = 0.1, alpha=alpha.MDA.vec)+
# coord_equal()+
# labs(title="2003")+
# theme_void()+
# scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis")+
# theme(
# plot.title = element_text(color="black", size=16, face="bold.italic"))
#guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
dummy_dataset2 <- temp_data2
dummy_dataset2 <- filter(dummy_dataset2, Dist_name %in% District_name_vec) # filter so any renamed districts incldued
UGA_dist_MDAcov2_names <- UGA_dist_MDA_names %>% distinct() # remove districts not available in 2003
UGA_dist_MDAcov2_names$Dist_name_chr <- as.character(UGA_dist_MDAcov2_names$DISTRICT)
UGA_dist_MDAcov2_names <- UGA_dist_MDAcov2_names[order(UGA_dist_MDAcov2_names$Dist_name_chr),] # TO DO (this is a quick fix): to get this dataframe districts to align with dummy dataset district order
make_cov_byyear_func2 <-
function (year_input,
UGA_dist_MDAcov2_names,
dummy_dataset2) {
if (year_input == 2003) {
UGA_dist_MDAcov2_names$MDA_cov <-
dummy_dataset2$Cov_2003 # add coverage values to dataframe for mapping
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2003")
}
if (year_input == 2004) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2004
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2004")
}
if (year_input == 2005) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2005
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2005")
}
if (year_input == 2006) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2006
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2006")
}
if (year_input == 2007) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2007
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2007")
}
if (year_input == 2008) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2008
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2008")
}
if (year_input == 2009) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2009
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2009")
}
if (year_input == "2009rnd2") {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2009rnd2
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2009_rnd2")
}
if (year_input == 2010) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2010
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2010")
}
if (year_input == 2012) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2012
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2012")
}
if (year_input == 2013) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2013
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2013")
}
if (year_input == 2014) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2014
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2014")
}
if (year_input == 2015) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2015
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2015")
}
if (year_input == 2016) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2016
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2016")
}
if (year_input == 2017) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2017
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2017")
}
if (year_input == 2018) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2018
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2018")
}
if (year_input == 2019) {
UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2019
UGA_dist_MDAcov2_names$MDA_year <- as.factor("2019")
}
return(UGA_dist_MDAcov2_names)
}
UGA_dist_MDAcov2_names <-
make_cov_byyear_func2(
year_input = year_input,
UGA_dist_MDAcov2_names = UGA_dist_MDAcov2_names,
dummy_dataset2 = dummy_dataset2
)
# UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2003 # add coverage values to dataframe for mapping
UGA_districts_MDAcov2_tidy <- left_join(district_map, UGA_dist_MDAcov2_names) # join boundary data to MDA presence data
UGA_districts_MDAcov2_tidy$MDA_cov <- as.numeric(UGA_districts_MDAcov2_tidy$MDA_cov) # make cov value a numeric variable
UGA_districts_MDAcov2_tidy$MDA <- as.factor(UGA_districts_MDAcov2_tidy$MDA) # make MDA presence a factor
UGA_districts_MDAcov2_tidy$Coverage_approach <- as.factor("denominator: district population")
alpha.MDA.vec <- alpha.MDA.col[UGA_districts_MDAcov2_tidy$MDA] # vector depending on MDA
UGA_districts_MDAcov2_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
UGA_districts_MDAcov2_tidy$alpha.MDA.vec <-
ifelse(
is.na(UGA_districts_MDAcov2_tidy$MDA_cov) &
as.character(UGA_districts_MDAcov2_tidy$MDA) == "MDA",
0.01,
UGA_districts_MDAcov2_tidy$alpha.MDA.vec
) # some districts coded as MDA (from original analysis) but now no MDA coverage calculated after further data review, so change these to alpha = 0.01
# Map_03_MDAcov2 <-
# ggplot() +
# geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# geom_polygon(data= UGA_districts_MDAcov2_tidy_03, aes(x = long, y = lat, group = group, fill=MDA_cov), colour="black", size = 0.1, alpha=alpha.MDA.vec)+
# coord_equal()+
# labs(title="2003")+
# theme_void()+
# scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis")+
# theme(
# plot.title = element_text(color="black", size=16, face="bold.italic"))
#guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
dummy_dataset3 <- temp_data3
dummy_dataset3 <- filter(dummy_dataset3, Dist_name %in% District_name_vec) # filter so any renamed districts incldued
UGA_dist_MDAcov3_names <- UGA_dist_MDA_names %>% distinct() # remove districts not available in 2003
UGA_dist_MDAcov3_names$Dist_name_chr <- as.character(UGA_dist_MDAcov3_names$DISTRICT)
UGA_dist_MDAcov3_names <- UGA_dist_MDAcov3_names[order(UGA_dist_MDAcov3_names$Dist_name_chr),] # TO DO (this is a quick fix): to get this dataframe districts to align with dummy dataset district order
make_cov_byyear_func3 <-
function (year_input,
UGA_dist_MDAcov3_names,
dummy_dataset3) {
if (year_input == 2003) {
UGA_dist_MDAcov3_names$MDA_cov <-
dummy_dataset3$Cov_2003 # add coverage values to dataframe for mapping
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2003")
}
if (year_input == 2004) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2004
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2004")
}
if (year_input == 2005) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2005
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2005")
}
if (year_input == 2006) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2006
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2006")
}
if (year_input == 2007) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2007
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2007")
}
if (year_input == 2008) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2008
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2008")
}
if (year_input == 2009) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2009
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2009")
}
if (year_input == "2009rnd2") {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2009rnd2
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2009_rnd2")
}
if (year_input == 2010) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2010
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2010")
}
if (year_input == 2012) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2012
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2012")
}
if (year_input == 2013) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2013
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2013")
}
if (year_input == 2014) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2014
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2014")
}
if (year_input == 2015) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2015
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2015")
}
if (year_input == 2016) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2016
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2016")
}
if (year_input == 2017) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2017
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2017")
}
if (year_input == 2018) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2018
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2018")
}
if (year_input == 2019) {
UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2019
UGA_dist_MDAcov3_names$MDA_year <- as.factor("2019")
}
return(UGA_dist_MDAcov3_names)
}
UGA_dist_MDAcov3_names <-
make_cov_byyear_func3(
year_input = year_input,
UGA_dist_MDAcov3_names = UGA_dist_MDAcov3_names,
dummy_dataset3 = dummy_dataset3
)
#UGA_dist_MDAcov3_names_03$MDA_cov <- dummy_dataset_2003c$Cov_2003 # add coverage values to dataframe for mapping
UGA_districts_MDAcov3_tidy <- left_join(district_map, UGA_dist_MDAcov3_names) # join boundary data to MDA presence data
UGA_districts_MDAcov3_tidy$MDA_cov <- as.numeric(UGA_districts_MDAcov3_tidy$MDA_cov) # make cov value a numeric variable
UGA_districts_MDAcov3_tidy$MDA <- as.factor(UGA_districts_MDAcov3_tidy$MDA) # make MDA presence a factor
UGA_districts_MDAcov3_tidy$Coverage_approach <- as.factor("denominator: largest targeted number (03-19)")
alpha.MDA.vec <- alpha.MDA.col[UGA_districts_MDAcov3_tidy$MDA] # vector depending on MDA
UGA_districts_MDAcov3_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
UGA_districts_MDAcov3_tidy$alpha.MDA.vec <-
ifelse(
is.na(UGA_districts_MDAcov3_tidy$MDA_cov) &
as.character(UGA_districts_MDAcov3_tidy$MDA) == "MDA",
0.01,
UGA_districts_MDAcov3_tidy$alpha.MDA.vec
) # some districts coded as MDA (from original analysis) but now no MDA coverage calculated after further data review, so change these to alpha = 0.01
# Map_03_MDAcov3 <-
# ggplot() +
# geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# geom_polygon(data= UGA_districts_MDAcov3_tidy_03, aes(x = long, y = lat, group = group, fill=MDA_cov), colour="black", size = 0.1, alpha=alpha.MDA.vec)+
# coord_equal()+
# labs(title="2003")+
# theme_void()+
# scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis")+
# theme(
# plot.title = element_text(color="black", size=16, face="bold.italic"))
#guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
# Combined & plot different coverages #
# 2003-2019 MDA #
UGA_districts_MDAcov_tidy <- rbind(UGA_districts_MDAcov1_tidy, UGA_districts_MDAcov2_tidy,
UGA_districts_MDAcov3_tidy)
return(UGA_districts_MDAcov_tidy)
}
# 1) mapping 03-09 district coverages function - old function #
# district_MDA_coverage_mapping0309_dataframe_func <- function(data1, data2, data3, data4,
# district_names_0309, district_map_0309,
# year_input){
#
#
# # rename district names within coverage dataframes (to match shp dataframe object) #
#
# as.character(unique(unlist(data1$District))) # view names of districts in cov dataframe
# as.character(unique(unlist(data2$District))) # view names of districts in cov dataframe
# as.character(unique(unlist(data3$District))) # view names of districts in cov dataframe
# as.character(unique(unlist(data4$District))) # view names of districts in cov dataframe
#
# data1$District_factor <- as.factor(data1$District) # create new factor variable
# data2$District_factor <- as.factor(data2$District) # create new factor variable
# data3$District_factor <- as.factor(data3$District) # create new factor variable
# data4$District_factor <- as.factor(data4$District) # create new factor variable
#
# District_factor_col <- as.data.frame(data1[ , 10]) # select this column
#
# names(District_factor_col)[1] <- "District_factor" # rename col
#
# District_factor_col <- District_factor_col %>%
# mutate(
# District_factor = recode(District_factor, 'Abim' = 'ABIM', 'Adjumani'= 'ADJUMANI', 'Agogo' = 'AGAGO',
# 'Amolatar' = 'AMOLATAR', 'Amudat' = 'AMUDAT', 'Amuria' = 'AMURIA', 'Amuru' = 'AMURU',
# 'Apaca' = 'APAC', 'Aruaa' = 'ARUA', 'Budaka' = 'BUDAKA', 'Bududa' = 'BUDUDA', 'Bugiri' = 'BUGIRI',
# 'Bukeda' = 'BUKEDEA', 'Bukwo' = 'BUKWO', 'Bulilsa' = 'BULIISA', 'Bundibugyoa' = 'BUNDIBUGYO',
# 'Bushenyi' = 'BUSHENYI', 'Busia' = 'BUSIA', 'Butaleja' = 'BUTALEJA', 'Dokolo' = 'DOKOLO',
# 'Gulu' = 'GULU', 'Hoima' = 'HOIMA', 'Ibanda' = 'IBANDA', 'Iganga' = 'IGANGA', 'Isingiro' = 'ISINGIRO',
# 'Jinja' = 'JINJA', 'Kaabong' = 'KAABONG', 'Kabale' = 'KABALE', 'Kabarole' = 'KABAROLE',
# 'Kaberamaido' = 'KABERAMAIDO', 'Kalangala' = 'KALANGALA', 'Kaliro' = 'KALIRO', 'Kampala' = 'KAMPALA',
# 'Kamuli' = 'KAMULI', 'Kamwenge' = 'KAMWENGE', 'Kanungu' = 'KANUNGU', 'Kapchorwa' = 'KAPCHORWA',
# 'Kasese' = 'KASESE', 'Katakwi' = 'KATAKWI', 'Kayunga' = 'KAYUNGA', 'Kibaale' = 'KIBAALE',
# 'Kiboga' = 'KIBOGA', 'Kiruhura' = 'KIRUHURA', 'Kisoro' = 'KISORO', 'Kitgum' = 'KITGUM',
# 'Koboko' = 'KOBOKO', 'Kotido' = 'KOTIDO', 'Kumi' = 'KUMI', 'Kyenjojo' = 'KYENJOJO', 'Lamwo' = 'LAMWO',
# 'Lira' = 'LIRA', 'Luwero' = 'LUWERO', 'Lyantonde' = 'LYANTONDE', 'Manafwa' = 'MANAFWA',
# 'Maracha-Terego (or Nyadri)' = 'MARACHA (NYADRI)', 'Masaka' = 'MASAKA', 'Masindi' = 'MASINDI',
# 'Mayuge' = 'MAYUGE', 'Mbale' = 'MBALE', 'Mbarara' = 'MBARARA', 'Mityana' = 'MITYANA', 'Moroto' = 'MOROTO',
# 'Moyo' = 'MOYO', 'Mpigi' = 'MPIGI', 'Mubende' = 'MUBENDE', 'Mukono' = 'MUKONO',
# 'Nakapiripirit' = 'NAKAPIRIPIRIT', 'Nakaseke' = 'NAKASEKE', 'Nakasongola' = 'NAKASONGOLA',
# 'Namutumba' = 'NAMUTUMBA', 'Napak' = 'NAPAK', 'Nebbi' = 'NEBBI', 'Ntungamo' = 'NTUNGAMO',
# 'Nwoya (from Amuru)' = 'NWOYA', 'Oyam' = 'OYAM', 'Pader' = 'PADER', 'Pallisa ' = 'PALLISA',
# 'Rakai' = 'RAKAI', 'Rukungiri' = 'RUKUNGIRI', 'Sironko' = 'SIRONKO', 'Soroti' = 'SOROTI',
# 'SSembabule'='SSEMBABULE','Tororo' = 'TORORO',
# 'Wakiso' = 'WAKISO','Yumbe' = 'YUMBE'))
#
# # levels(District_factor_col$District_factor) # check
#
# data1$District_factor <- District_factor_col$District_factor
#
# data2$District_factor <- District_factor_col$District_factor
#
# data3$District_factor <- District_factor_col$District_factor
#
# data4$District_factor <- District_factor_col$District_factor
#
#
# # Mapping Presence of MDA: 2003-2009 treatment year #
#
# selecting_MDA_districts_func <- function(year_input) {
# if (year_input == 2003 || year_input == 2004) {
# MDA_districts <-
# c(
# "APAC",
# "MOYO",
# "ADJUMANI",
# "ARUA",
# "NEBBI",
# "LIRA",
# "NAKASONGOLA",
# "MASINDI",
# "HOIMA",
# "BUGIRI",
# "BUSIA",
# "KAYUNGA",
# "JINJA",
# "MUKONO",
# "WAKISO",
# "MAYUGE",
# "BUNDIBUGYO",
# "KIBAALE"
# ) # vector of districts with MDA in 2003
# }
# if (year_input == 2005 || year_input == 2006) {
# MDA_districts <-
# c(
# "APAC",
# "MOYO",
# "ADJUMANI",
# "YUMBE",
# "ARUA",
# "NEBBI",
# "LIRA",
# "KABERAMAIDO",
# "SOROTI",
# "NAKASONGOLA",
# "MASINDI",
# "HOIMA",
# "KAMULI",
# "BUGIRI",
# "BUSIA",
# "KAYUNGA",
# "JINJA",
# "MUKONO",
# "WAKISO",
# "MAYUGE",
# "KALANGALA",
# "KABALE",
# "KISORO",
# "KANUNGU",
# "RUKUNGIRI",
# "BUNDIBUGYO",
# "KIBAALE"
# )
# }
# if (year_input == 2005 || year_input == 2006) {
# MDA_districts <-
# c(
# "APAC",
# "MOYO",
# "ADJUMANI",
# "YUMBE",
# "ARUA",
# "NEBBI",
# "LIRA",
# "KABERAMAIDO",
# "SOROTI",
# "NAKASONGOLA",
# "MASINDI",
# "HOIMA",
# "KAMULI",
# "BUGIRI",
# "BUSIA",
# "KAYUNGA",
# "JINJA",
# "MUKONO",
# "WAKISO",
# "MAYUGE",
# "KALANGALA",
# "KABALE",
# "KISORO",
# "KANUNGU",
# "RUKUNGIRI",
# "BUNDIBUGYO",
# "KIBAALE"
# )
# }
# if (year_input == 2007) {
# MDA_districts <-
# c(
# "APAC",
# "MOYO",
# "ADJUMANI",
# "MARACHA (NYADRI)",
# "ARUA",
# "NEBBI",
# "LIRA",
# "KABERAMAIDO",
# "SOROTI",
# "AMOLATAR",
# "NAKASONGOLA",
# "DOKOLO",
# "BULISA",
# "HOIMA",
# "BUGIRI",
# "BUSIA",
# "KAYUNGA",
# "JINJA",
# "MUKONO",
# "WAKISO",
# "MAYUGE",
# "KALANGALA",
# "MPIGI",
# "MASAKA",
# "KABAROLE",
# "BUNDIBUGYO",
# "KIBAALE"
# )
# }
# if (year_input == 2008) {
# MDA_districts <-
# c(
# "APAC",
# "KOLE",
# "YUMBE",
# "KOBOKO",
# "MARACHA",
# "ARUA",
# "NEBBI",
# "LIRA",
# "ALBETONG",
# "OTUKE",
# "OYAM",
# "KABERAMAIDO",
# "SOROTI",
# "AMOLATAR",
# "NAKASONGOLA",
# "DOKOLO",
# "BULIISA",
# "HOIMA",
# "KAMULI",
# "BUGIRI",
# "KALIRO",
# "KAYUNGA",
# "JINJA",
# "MUKONO",
# "WAKISO",
# "MITYANA",
# "MAYUGE",
# "KALANGALA",
# "MPIGI",
# "GOMBA",
# "MASAKA",
# "RAKAI",
# "SSEMBABULE",
# "MUBENDE",
# "KIBAALE"
# )
# }
# if (year_input == 2009) {
# MDA_districts <-
# c(
# "PADER",
# "AGAGO",
# "LAMWO",
# "APAC",
# "KOLE",
# "KITGUM",
# "MOYO",
# "ADJUMANI",
# "YUMBE",
# "KOBOKO",
# "MARACHA",
# "ARUA",
# "NEBBI",
# "GULU",
# "LIRA",
# "ALEBTONG",
# "OTUKE",
# "OYAM",
# "KABERAMAIDO",
# "SERERE",
# "NAKASONGOLA",
# "DOKOLO",
# "KIRYANDONGO",
# "BULIISA",
# "HOIMA",
# "BUYENDE",
# "BUGIRI",
# "BUSIA",
# "KAYUNGA",
# "JINJA",
# "MUKONO",
# "WAKISO",
# "MITYANA",
# "MAYUGE",
# "KALANGALA",
# "MPIGI",
# "GOMBA",
# "MASAKA",
# "KABALE",
# "KABAROLE",
# "MUBENDE",
# "BUNDIBUGYO",
# "NTOROKO",
# "KIBAALE"
# )
# }
#
# return(MDA_districts)
# }
#
# MDA_districts <- selecting_MDA_districts_func(year_input = year_input) # call function
#
# length(MDA_districts)
#
# UGA_dist_MDA_names <- district_names_0309 # copy variable (dist names)
#
# UGA_dist_MDA_names$MDA <- ifelse(district_names_0309$Dist_name %in% MDA_districts, "MDA","none") # code whether MDA or not
#
# UGA_dist_MDA_names <- UGA_dist_MDA_names %>% rename(dname_2006 = Dist_name) # rename column
#
# UGA_districts_tidy <- left_join(district_map_0309, UGA_dist_MDA_names) # join boundary data to MDA presence data
#
# UGA_districts_tidy$MDA <- as.factor(UGA_districts_tidy$MDA) # make MDA presence a factor
#
# MDA.col <- c("purple2","lightgrey") # to colour MDA districts
# MDA.vec <- MDA.col[UGA_districts_tidy$MDA] # specify colour for each polygon
#
# UGA_districts_tidy$MDA_fill <- MDA.vec # new column for fill in ggplot depending on MDA
#
# alpha.MDA.col <- c(0.6, 0.01) # alpha for gpplot depending on MDA fill
#
# alpha.MDA.vec <- alpha.MDA.col[UGA_districts_tidy$MDA] # vector depending on MDA
#
# UGA_districts_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
#
# # to plot: #
#
# # Map_03 <-
# # ggplot() +
# # geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# # geom_polygon(data= UGA_districts_tidy_03, aes(x = long, y = lat, group = group), colour="black", size = 0.1, fill=MDA.vec, alpha=alpha.MDA.vec)+
# # coord_equal()+
# # #geom_point(data = UGA_TS_studies, aes(x=long, y=lat, size=Informed.prev, fill=sample.size, shape=Production.setting), colour="black", stroke=1.2, inherit.aes = FALSE)+
# # #scale_fill_brewer("Sample size", palette = "YlOrRd",aesthetics = "fill")+
# # #scale_size_discrete("Informed prevalence (%)")+
# # #scale_shape_manual(values=c(24,25,22))+
# # scale_colour_manual(values=c("black","purple2"), guide=FALSE)+
# # labs(title="2003")+
# # theme_void()+
# # theme(
# # plot.title = element_text(color="black", size=16, face="bold.italic"))+
# # guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
#
# # Coverage of MDA: 2003-2009 treatment year #
#
# District_name0309_vec <- c('ABIM', 'ADJUMANI', 'AGAGO', 'AMOLATAR', 'AMUDAT', 'AMURIA', 'AMURU', 'APAC', 'ARUA',
# 'BUDAKA', 'BUDUDA', 'BUGIRI', 'BUKEDEA', 'BUKWO', 'BULIISA', 'BUNDIBUGYO', 'BUSHENYI', 'BUSIA',
# 'BUTALEJA', 'DOKOLO', 'GULU', 'HOIMA', 'IBANDA', 'IGANGA', 'ISINGIRO', 'JINJA', 'KAABONG',
# 'KABALE', 'KABAROLE', 'KABERAMAIDO', 'KALANGALA', 'KALIRO', 'KAMPALA', 'KAMULI', 'KAMWENGE',
# 'KANUNGU', 'KAPCHORWA', 'KASESE', 'KATAKWI', 'KAYUNGA', 'KIBAALE', 'KIBOGA', 'KIRUHURA',
# 'KISORO', 'KITGUM', 'KOBOKO', 'KOTIDO', 'KUMI', 'KYENJOJO', 'LAMWO', 'LIRA', 'LUWERO',
# 'LYANTONDE', 'MANAFWA', 'MARACHA (NYADRI)', 'MASAKA', 'MASINDI','MAYUGE', 'MBALE',
# 'MBARARA', 'MITYANA', 'MOROTO', 'MOYO', 'MPIGI', 'MUBENDE', 'MUKONO', 'NAKAPIRIPIRIT',
# 'NAKASEKE', 'NAKASONGOLA', 'NAMUTUMBA', 'NAPAK', 'NEBBI', 'NTUNGAMO', 'NWOYA', 'OYAM', 'PADER',
# 'PALLISA', 'RAKAI', 'RUKUNGIRI', 'SIRONKO', 'SOROTI', 'SSEMBABULE', 'TORORO', 'WAKISO', 'YUMBE')
#
#
# dummy_dataset1 <- data1
#
# dummy_dataset1 <- filter(dummy_dataset1, District_factor %in% District_name0309_vec) # filter so any renamed districts incldued
#
# UGA_dist_MDAcov1_names <- UGA_dist_MDA_names %>% distinct() # remove districts not available in 2003
#
# UGA_dist_MDAcov1_names$dname_2006_chr <- as.character(UGA_dist_MDAcov1_names$dname_2006)
#
# UGA_dist_MDAcov1_names <- UGA_dist_MDAcov1_names[order(UGA_dist_MDAcov1_names$dname_2006_chr),] # TO DO (this is a quick fix): to get this dataframe districts to align with dummy dataset district order
#
# make_cov_byyear_func1 <-
# function (year_input,
# UGA_dist_MDAcov1_names,
# dummy_dataset1) {
# if (year_input == 2003) {
# UGA_dist_MDAcov1_names$MDA_cov <-
# dummy_dataset1$Cov_2003 # add coverage values to dataframe for mapping
# UGA_dist_MDAcov1_names$MDA_year <- as.factor("2003")
# }
# if (year_input == 2004) {
# UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2004
# UGA_dist_MDAcov1_names$MDA_year <- as.factor("2004")
# }
# if (year_input == 2005) {
# UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2005
# UGA_dist_MDAcov1_names$MDA_year <- as.factor("2005")
# }
# if (year_input == 2006) {
# UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2006
# UGA_dist_MDAcov1_names$MDA_year <- as.factor("2006")
# }
# if (year_input == 2007) {
# UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2007
# UGA_dist_MDAcov1_names$MDA_year <- as.factor("2007")
# }
# if (year_input == 2008) {
# UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2008
# UGA_dist_MDAcov1_names$MDA_year <- as.factor("2008")
# }
# if (year_input == 2009) {
# UGA_dist_MDAcov1_names$MDA_cov <- dummy_dataset1$Cov_2009
# UGA_dist_MDAcov1_names$MDA_year <- as.factor("2009")
# }
# return(UGA_dist_MDAcov1_names)
# }
#
# UGA_dist_MDAcov1_names <-
# make_cov_byyear_func1(
# year_input = year_input,
# UGA_dist_MDAcov1_names = UGA_dist_MDAcov1_names,
# dummy_dataset1 = dummy_dataset1
# ) # call func
#
# UGA_districts_MDAcov1_tidy <- left_join(district_map_0309, UGA_dist_MDAcov1_names) # join boundary data to MDA presence data
#
# UGA_districts_MDAcov1_tidy$MDA_cov <- as.numeric(UGA_districts_MDAcov1_tidy$MDA_cov) # make cov value a numeric variable
#
# UGA_districts_MDAcov1_tidy$MDA <- as.factor(UGA_districts_MDAcov1_tidy$MDA) # make MDA presence a factor
#
# UGA_districts_MDAcov1_tidy$Coverage_approach <- as.factor("denominator: total targeted")
#
# alpha.MDA.col <- c(0.9, 0.01) # alpha for gpplot depending on MDA fill
#
# alpha.MDA.vec <- alpha.MDA.col[UGA_districts_MDAcov1_tidy$MDA] # vector depending on MDA
#
# UGA_districts_MDAcov1_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
#
# UGA_districts_MDAcov1_tidy$alpha.MDA.vec <-
# ifelse(
# is.na(UGA_districts_MDAcov1_tidy$MDA_cov) &
# as.character(UGA_districts_MDAcov1_tidy$MDA) == "MDA",
# 0.01,
# UGA_districts_MDAcov1_tidy$alpha.MDA.vec
# ) # some districts coded as MDA (from original analysis) but now no MDA coverage calculated after further data review, so change these to alpha = 0.01
#
# # to plot
#
# # Map_03_MDAcov1 <-
# # ggplot() +
# # geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# # geom_polygon(data= UGA_districts_MDAcov1_tidy_03, aes(x = long, y = lat, group = group, fill=MDA_cov), colour="black", size = 0.1, alpha=alpha.MDA.vec)+
# # coord_equal()+
# # labs(title="2003")+
# # theme_void()+
# # scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis")+
# # theme(
# # plot.title = element_text(color="black", size=16, face="bold.italic"))
# #guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
#
#
# dummy_dataset2 <- data2
#
# dummy_dataset2 <- filter(dummy_dataset2, District_factor %in% District_name0309_vec) # filter so any renamed districts incldued
#
# UGA_dist_MDAcov2_names <- UGA_dist_MDA_names %>% distinct() # remove districts not available in 2003
#
# UGA_dist_MDAcov2_names$dname_2006_chr <- as.character(UGA_dist_MDAcov2_names$dname_2006)
#
# UGA_dist_MDAcov2_names <- UGA_dist_MDAcov2_names[order(UGA_dist_MDAcov2_names$dname_2006_chr),] # TO DO (this is a quick fix): to get this dataframe districts to align with dummy dataset district order
#
# make_cov_byyear_func2 <-
# function (year_input,
# UGA_dist_MDAcov2_names,
# dummy_dataset2) {
# if (year_input == 2003) {
# UGA_dist_MDAcov2_names$MDA_cov <-
# dummy_dataset2$Cov_2003 # add coverage values to dataframe for mapping
# UGA_dist_MDAcov2_names$MDA_year <- as.factor("2003")
# }
# if (year_input == 2004) {
# UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2004
# UGA_dist_MDAcov2_names$MDA_year <- as.factor("2004")
# }
# if (year_input == 2005) {
# UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2005
# UGA_dist_MDAcov2_names$MDA_year <- as.factor("2005")
# }
# if (year_input == 2006) {
# UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2006
# UGA_dist_MDAcov2_names$MDA_year <- as.factor("2006")
# }
# if (year_input == 2007) {
# UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2007
# UGA_dist_MDAcov2_names$MDA_year <- as.factor("2007")
# }
# if (year_input == 2008) {
# UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2008
# UGA_dist_MDAcov2_names$MDA_year <- as.factor("2008")
# }
# if (year_input == 2009) {
# UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2009
# UGA_dist_MDAcov2_names$MDA_year <- as.factor("2009")
# }
# return(UGA_dist_MDAcov2_names)
# }
#
# UGA_dist_MDAcov2_names <-
# make_cov_byyear_func2(
# year_input = year_input,
# UGA_dist_MDAcov2_names = UGA_dist_MDAcov2_names,
# dummy_dataset2 = dummy_dataset2
# )
#
#
# # UGA_dist_MDAcov2_names$MDA_cov <- dummy_dataset2$Cov_2003 # add coverage values to dataframe for mapping
#
# UGA_districts_MDAcov2_tidy <- left_join(district_map_0309, UGA_dist_MDAcov2_names) # join boundary data to MDA presence data
#
# UGA_districts_MDAcov2_tidy$MDA_cov <- as.numeric(UGA_districts_MDAcov2_tidy$MDA_cov) # make cov value a numeric variable
#
# UGA_districts_MDAcov2_tidy$MDA <- as.factor(UGA_districts_MDAcov2_tidy$MDA) # make MDA presence a factor
#
# UGA_districts_MDAcov2_tidy$Coverage_approach <- as.factor("denominator: district population (constant growth)")
#
# alpha.MDA.vec <- alpha.MDA.col[UGA_districts_MDAcov2_tidy$MDA] # vector depending on MDA
#
# UGA_districts_MDAcov2_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
#
# UGA_districts_MDAcov2_tidy$alpha.MDA.vec <-
# ifelse(
# is.na(UGA_districts_MDAcov2_tidy$MDA_cov) &
# as.character(UGA_districts_MDAcov2_tidy$MDA) == "MDA",
# 0.01,
# UGA_districts_MDAcov2_tidy$alpha.MDA.vec
# ) # some districts coded as MDA (from original analysis) but now no MDA coverage calculated after further data review, so change these to alpha = 0.01
#
#
# # Map_03_MDAcov2 <-
# # ggplot() +
# # geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# # geom_polygon(data= UGA_districts_MDAcov2_tidy_03, aes(x = long, y = lat, group = group, fill=MDA_cov), colour="black", size = 0.1, alpha=alpha.MDA.vec)+
# # coord_equal()+
# # labs(title="2003")+
# # theme_void()+
# # scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis")+
# # theme(
# # plot.title = element_text(color="black", size=16, face="bold.italic"))
# #guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
#
#
# dummy_dataset3 <- data3
#
# dummy_dataset3 <- filter(dummy_dataset3, District_factor %in% District_name0309_vec) # filter so any renamed districts incldued
#
# UGA_dist_MDAcov3_names <- UGA_dist_MDA_names %>% distinct() # remove districts not available in 2003
#
# UGA_dist_MDAcov3_names$dname_2006_chr <- as.character(UGA_dist_MDAcov3_names$dname_2006)
#
# UGA_dist_MDAcov3_names <- UGA_dist_MDAcov3_names[order(UGA_dist_MDAcov3_names$dname_2006_chr),] # TO DO (this is a quick fix): to get this dataframe districts to align with dummy dataset district order
#
# make_cov_byyear_func3 <-
# function (year_input,
# UGA_dist_MDAcov3_names,
# dummy_dataset3) {
# if (year_input == 2003) {
# UGA_dist_MDAcov3_names$MDA_cov <-
# dummy_dataset3$Cov_2003 # add coverage values to dataframe for mapping
# UGA_dist_MDAcov3_names$MDA_year <- as.factor("2003")
# }
# if (year_input == 2004) {
# UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2004
# UGA_dist_MDAcov3_names$MDA_year <- as.factor("2004")
# }
# if (year_input == 2005) {
# UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2005
# UGA_dist_MDAcov3_names$MDA_year <- as.factor("2005")
# }
# if (year_input == 2006) {
# UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2006
# UGA_dist_MDAcov3_names$MDA_year <- as.factor("2006")
# }
# if (year_input == 2007) {
# UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2007
# UGA_dist_MDAcov3_names$MDA_year <- as.factor("2007")
# }
# if (year_input == 2008) {
# UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2008
# UGA_dist_MDAcov3_names$MDA_year <- as.factor("2008")
# }
# if (year_input == 2009) {
# UGA_dist_MDAcov3_names$MDA_cov <- dummy_dataset3$Cov_2009
# UGA_dist_MDAcov3_names$MDA_year <- as.factor("2009")
# }
# return(UGA_dist_MDAcov3_names)
# }
#
# UGA_dist_MDAcov3_names <-
# make_cov_byyear_func3(
# year_input = year_input,
# UGA_dist_MDAcov3_names = UGA_dist_MDAcov3_names,
# dummy_dataset3 = dummy_dataset3
# )
#
# #UGA_dist_MDAcov3_names_03$MDA_cov <- dummy_dataset_2003c$Cov_2003 # add coverage values to dataframe for mapping
#
# UGA_districts_MDAcov3_tidy <- left_join(district_map_0309, UGA_dist_MDAcov3_names) # join boundary data to MDA presence data
#
# UGA_districts_MDAcov3_tidy$MDA_cov <- as.numeric(UGA_districts_MDAcov3_tidy$MDA_cov) # make cov value a numeric variable
#
# UGA_districts_MDAcov3_tidy$MDA <- as.factor(UGA_districts_MDAcov3_tidy$MDA) # make MDA presence a factor
#
# UGA_districts_MDAcov3_tidy$Coverage_approach <- as.factor("denominator: district population (SCIF numbers
# & constant growth)")
#
# alpha.MDA.vec <- alpha.MDA.col[UGA_districts_MDAcov3_tidy$MDA] # vector depending on MDA
#
# UGA_districts_MDAcov3_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
#
# UGA_districts_MDAcov3_tidy$alpha.MDA.vec <-
# ifelse(
# is.na(UGA_districts_MDAcov3_tidy$MDA_cov) &
# as.character(UGA_districts_MDAcov3_tidy$MDA) == "MDA",
# 0.01,
# UGA_districts_MDAcov3_tidy$alpha.MDA.vec
# ) # some districts coded as MDA (from original analysis) but now no MDA coverage calculated after further data review, so change these to alpha = 0.01
#
# # Map_03_MDAcov3 <-
# # ggplot() +
# # geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# # geom_polygon(data= UGA_districts_MDAcov3_tidy_03, aes(x = long, y = lat, group = group, fill=MDA_cov), colour="black", size = 0.1, alpha=alpha.MDA.vec)+
# # coord_equal()+
# # labs(title="2003")+
# # theme_void()+
# # scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis")+
# # theme(
# # plot.title = element_text(color="black", size=16, face="bold.italic"))
# #guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
#
#
# dummy_dataset4 <- data4
#
# dummy_dataset4 <- filter(dummy_dataset4, District_factor %in% District_name0309_vec) # filter so any renamed districts incldued
#
# UGA_dist_MDAcov4_names <- UGA_dist_MDA_names %>% distinct() # remove districts not available in 2003
#
# UGA_dist_MDAcov4_names$dname_2006_chr <- as.character(UGA_dist_MDAcov4_names$dname_2006)
#
# UGA_dist_MDAcov4_names <- UGA_dist_MDAcov4_names[order(UGA_dist_MDAcov4_names$dname_2006_chr),] # TO DO (this is a quick fix): to get this dataframe districts to align with dummy dataset district order
#
# make_cov_byyear_func4 <-
# function (year_input,
# UGA_dist_MDAcov4_names,
# dummy_dataset4) {
# if (year_input == 2003) {
# UGA_dist_MDAcov4_names$MDA_cov <-
# dummy_dataset4$Cov_2003 # add coverage values to dataframe for mapping
# UGA_dist_MDAcov4_names$MDA_year <- as.factor("2003")
# }
# if (year_input == 2004) {
# UGA_dist_MDAcov4_names$MDA_cov <- dummy_dataset4$Cov_2004
# UGA_dist_MDAcov4_names$MDA_year <- as.factor("2004")
# }
# if (year_input == 2005) {
# UGA_dist_MDAcov4_names$MDA_cov <- dummy_dataset4$Cov_2005
# UGA_dist_MDAcov4_names$MDA_year <- as.factor("2005")
# }
# if (year_input == 2006) {
# UGA_dist_MDAcov4_names$MDA_cov <- dummy_dataset4$Cov_2006
# UGA_dist_MDAcov4_names$MDA_year <- as.factor("2006")
# }
# if (year_input == 2007) {
# UGA_dist_MDAcov4_names$MDA_cov <- dummy_dataset4$Cov_2007
# UGA_dist_MDAcov4_names$MDA_year <- as.factor("2007")
# }
# if (year_input == 2008) {
# UGA_dist_MDAcov4_names$MDA_cov <- dummy_dataset4$Cov_2008
# UGA_dist_MDAcov4_names$MDA_year <- as.factor("2008")
# }
# if (year_input == 2009) {
# UGA_dist_MDAcov4_names$MDA_cov <- dummy_dataset4$Cov_2009
# UGA_dist_MDAcov4_names$MDA_year <- as.factor("2009")
# }
#
# return(UGA_dist_MDAcov4_names)
# }
#
# UGA_dist_MDAcov4_names <-
# make_cov_byyear_func4(
# year_input = year_input,
# UGA_dist_MDAcov4_names = UGA_dist_MDAcov4_names,
# dummy_dataset4 = dummy_dataset4
# )
#
# #UGA_dist_MDAcov4_names_03$MDA_cov <- dummy_dataset_2003d$Cov_2003 # add coverage values to dataframe for mapping
#
# UGA_districts_MDAcov4_tidy <- left_join(district_map_0309, UGA_dist_MDAcov4_names) # join boundary data to MDA presence data
#
# UGA_districts_MDAcov4_tidy$MDA_cov <- as.numeric(UGA_districts_MDAcov4_tidy$MDA_cov) # make cov value a numeric variable
#
# UGA_districts_MDAcov4_tidy$MDA <- as.factor(UGA_districts_MDAcov4_tidy$MDA) # make MDA presence a factor
#
# UGA_districts_MDAcov4_tidy$Coverage_approach <- as.factor("denominator: largest targeted pop (across years)")
#
# alpha.MDA.vec <- alpha.MDA.col[UGA_districts_MDAcov4_tidy$MDA] # vector depending on MDA
#
# UGA_districts_MDAcov4_tidy$alpha.MDA.vec <- alpha.MDA.vec # new column based for alpha in gpplot of each polygon
#
# UGA_districts_MDAcov4_tidy$alpha.MDA.vec <-
# ifelse(
# is.na(UGA_districts_MDAcov4_tidy$MDA_cov) &
# as.character(UGA_districts_MDAcov4_tidy$MDA) == "MDA",
# 0.01,
# UGA_districts_MDAcov4_tidy$alpha.MDA.vec
# ) # some districts coded as MDA (from original analysis) but now no MDA coverage calculated after further data review, so change these to alpha = 0.01
#
# # Map_03_MDAcov4 <-
# # ggplot() +
# # geom_polygon(data = UGA, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# # geom_polygon(data= UGA_districts_MDAcov4_tidy_03, aes(x = long, y = lat, group = group, fill=MDA_cov), colour="black", size = 0.1, alpha=alpha.MDA.vec)+
# # coord_equal()+
# # labs(title="2003")+
# # theme_void()+
# # scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis")+
# # theme(
# # plot.title = element_text(color="black", size=16, face="bold.italic"))
# #guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
#
#
# # Combined & plot different coverages #
#
#
# # 2003-2009 MDA #
# UGA_districts_MDAcov_tidy <- rbind(UGA_districts_MDAcov1_tidy, UGA_districts_MDAcov2_tidy,
# UGA_districts_MDAcov3_tidy, UGA_districts_MDAcov4_tidy)
#
#
# return(UGA_districts_MDAcov_tidy)
#
# }
# TO DO: include years for 2010 onwards or make new function?
# plot maps with all denominators for coverage (3 plots across)
plot_UGA_MDA_func <- function(national_map, MDA_data){
MDA_year_label <- as.character(unique(MDA_data$MDA_year)) # get year for plot title
Map_0319_MDAcov <-
ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data= MDA_data, aes(x = long, y = lat, group = group, fill=MDA_cov, alpha = alpha.MDA.vec), colour="black", size = 0.1)+
coord_equal()+
labs(title=MDA_year_label, caption ="Dark grey values > 100%")+
facet_wrap(~Coverage_approach)+
theme_void()+
scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis", limits=c(0, 100))+
scale_alpha_continuous(guide = "none") +
theme(
plot.title = element_text(color="black", size=16, face="bold.italic"),
plot.caption = element_text(face = "italic", size = 9))
return(Map_0319_MDAcov)
}
# plot maps with just denominator 1 (total targeted)
plot_UGA_denominator1_MDA_func <- function(national_map, MDA_data){
MDA_data <- subset(MDA_data, Coverage_approach == "denominator: total targeted")
MDA_year_label <- as.character(unique(MDA_data$MDA_year)) # get year for plot title
Map_0319_MDAcov <-
ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data= MDA_data, aes(x = long, y = lat, group = group, fill=MDA_cov, alpha = alpha.MDA.vec), colour="black", size = 0.1)+
coord_equal()+
labs(title=MDA_year_label, caption ="Dark grey values > 100%")+
#facet_wrap(~Coverage_approach)+
theme_void()+
scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis", limits=c(0, 100))+
scale_alpha_continuous(guide = "none") +
theme(
plot.title = element_text(color="black", size=16, face="bold.italic"),
plot.caption = element_text(face = "italic", size = 9))
return(Map_0319_MDAcov)
}
# plot maps with just denominator 2 (total district pop)
plot_UGA_denominator2_MDA_func <- function(national_map, MDA_data){
MDA_data <- subset(MDA_data, Coverage_approach == "denominator: district population")
MDA_year_label <- as.character(unique(MDA_data$MDA_year)) # get year for plot title
Map_0319_MDAcov <-
ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data= MDA_data, aes(x = long, y = lat, group = group, fill=MDA_cov, alpha = alpha.MDA.vec), colour="black", size = 0.1)+
coord_equal()+
labs(title=MDA_year_label, caption ="Dark grey values > 100%")+
#facet_wrap(~Coverage_approach)+
theme_void()+
scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis", limits=c(0, 100))+
scale_alpha_continuous(guide = "none") +
theme(
plot.title = element_text(color="black", size=8, face="bold.italic"),
plot.caption = element_text(face = "italic", size = 6))
return(Map_0319_MDAcov)
}
# plot maps with just denominator 3 (largest total targeted across 03-19 for given district)
plot_UGA_denominator3_MDA_func <- function(national_map, MDA_data){
MDA_data <- subset(MDA_data, Coverage_approach == "denominator: largest targeted number (03-19)")
MDA_year_label <- as.character(unique(MDA_data$MDA_year)) # get year for plot title
Map_0319_MDAcov <-
ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data= MDA_data, aes(x = long, y = lat, group = group, fill=MDA_cov, alpha = alpha.MDA.vec), colour="black", size = 0.1)+
coord_equal()+
labs(title=MDA_year_label, caption ="")+
#facet_wrap(~Coverage_approach)+
theme_void()+
scale_fill_continuous(name = "MDA Coverage (%)", type = "viridis", limits=c(0, 100))+
scale_alpha_continuous(guide = "none") +
theme(
plot.title = element_text(color="black", size=8, face="bold.italic")
#plot.caption = element_text(face = "italic", size = 5))
)
return(Map_0319_MDAcov)
}
# TO DO: extend to map from 2010, create additional map and call as list, or make new function?
# Sub-counties #
# map sub-counties function #
UGA_subcounties_boundaries_function <- function(subcounty_shape_file, district_map, national_map_input){
# need to trasnform UGA sub-county data to WGS84 lat/lon co-ordinates first
subcounties_WGS84 <- spTransform(subcounty_shape_file,
crs(Uganda_dist))
subcounties_plot <- ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data = districts_2001, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
geom_polygon(data = subcounties_WGS84, aes(x = long, y = lat, group = group), colour = "blue", alpha = 0.75, fill = NA)+
coord_equal(ratio = 1) # plot district boundaries
#View(subcounties_2010_WGS84)
# turn re-projected sub-county data into dataframe to work with
subcounties_tidy <- tidy(subcounties_WGS84) # turn into a dataframe with tidy func
# make dataframe object with variables (district name) for mapping #
subcounties_WGS84$id <- row.names(subcounties_WGS84) # include row ids in spatial polygon object
UGA_subcounties_tidy <- left_join(subcounties_tidy, subcounties_WGS84@data) # join variables from spatial polygon into dataframe
return(list(subcounties_plot, subcounties_WGS84, UGA_subcounties_tidy))
}
# sub-county names for MDA mapping function #
subcounty_name0412_func <- function(shape_file){
UGA_dist_subcounty_MDA_names_2006 <- data.frame(Dist_name = shape_file@data$DNAME_2006,
Subcounty_name = shape_file@data$SNAME_2006)
UGA_dist_subcounty_MDA_names_2006 <- with(UGA_dist_subcounty_MDA_names_2006, UGA_dist_subcounty_MDA_names_2006[order(Dist_name) , ])
# rename sub-counties for 2004 where duplicates
UGA_dist_subcounty_MDA_names_2006$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2006$Dist_name == "MAYUGE" &
UGA_dist_subcounty_MDA_names_2006$Subcounty_name == "MALONGO",
"MALONGO1", UGA_dist_subcounty_MDA_names_2006$Subcounty_name)
UGA_dist_subcounty_MDA_names_2010 <- data.frame(Dist_name = shape_file@data$DNAME_2010,
Subcounty_name = shape_file@data$SNAME_2010)
UGA_dist_subcounty_MDA_names_2010 <- with(UGA_dist_subcounty_MDA_names_2010, UGA_dist_subcounty_MDA_names_2010[order(Dist_name) , ])
# rename sub-counties for 2012 where duplicates
UGA_dist_subcounty_MDA_names_2010$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2010$Dist_name == "MAYUGE" &
UGA_dist_subcounty_MDA_names_2010$Subcounty_name == "MALONGO",
"MALONGO1", UGA_dist_subcounty_MDA_names_2010$Subcounty_name)
UGA_dist_subcounty_MDA_names_2010$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2010$Dist_name == "MASAKA" &
UGA_dist_subcounty_MDA_names_2010$Subcounty_name == "BUWUNGA",
"BUWUNGA1", UGA_dist_subcounty_MDA_names_2010$Subcounty_name) # must also change any duplicate sub-counties with MDA in this object
UGA_dist_subcounty_MDA_names_2010$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2010$Dist_name == "JINJA" &
UGA_dist_subcounty_MDA_names_2010$Subcounty_name == "CENTRAL DIVISION",
"CENTRAL DIVISION1", UGA_dist_subcounty_MDA_names_2010$Subcounty_name) # must also change any duplicate sub-counties with MDA in this object
UGA_dist_subcounty_MDA_names_2010$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2010$Dist_name == "KAYUNGA" &
UGA_dist_subcounty_MDA_names_2010$Subcounty_name == "KAYONZA",
"KAYONZA1", UGA_dist_subcounty_MDA_names_2010$Subcounty_name) # must also change any duplicate sub-counties with MDA in this object
return(list(UGA_dist_subcounty_MDA_names_2006, UGA_dist_subcounty_MDA_names_2010))
}
subcounty_name19_func <- function(shape_file){
UGA_dist_subcounty_MDA_names_2019 <- data.frame(Dist_name = shape_file@data$District,
Subcounty_name = shape_file@data$Subcounty)
UGA_dist_subcounty_MDA_names_2019 <- with(UGA_dist_subcounty_MDA_names_2019, UGA_dist_subcounty_MDA_names_2019[order(Dist_name) , ])
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "BUGWERI" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "BUYANGA",
"BUYANGA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name) # must also change any duplicate sub-counties with MDA in this object
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "KYOTERA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "KABIRA",
"KABIRA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "KAYUNGA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "KAYONZA",
"KAYONZA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "BUSIA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "MASABA",
"MASABA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "KABAROLE" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "RUTEETE",
"RUTEETE1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "NAMAYINGO" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "BANDA",
"BANDA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "BUVUMA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "BUGAYA",
"BUGAYA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "MITYANA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "CENTRAL DIVISION",
"CENTRAL DIVISION1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "JINJA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "CENTRAL DIVISION",
"CENTRAL DIVISION2", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "JINJA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "KAKIRA TOWN COUNCIL",
"KAKIRA TOWN COUNCIL1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "NTOROKO" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "KANARA",
"KANARA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "KAYUNGA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "KAYONZA",
"KAYONZA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "MAYUGE" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "MALONGO",
"MALONGO1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "BUSIA" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "MASABA",
"MASABA1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "KOBOKO" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "NORTHERN DIVISON",
"NORTHERN DIVISON1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "KOBOKO" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "SOUTHERN DIVISON",
"SOUTHERN DIVISON1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
UGA_dist_subcounty_MDA_names_2019$Subcounty_name <- ifelse(UGA_dist_subcounty_MDA_names_2019$Dist_name == "KOBOKO" &
UGA_dist_subcounty_MDA_names_2019$Subcounty_name == "WESTERN DIVISON",
"WESTERN DIVISON1", UGA_dist_subcounty_MDA_names_2019$Subcounty_name)
return(list(UGA_dist_subcounty_MDA_names_2019))
}
# get subcounties names variable (not unique) function - prepare for next stage (select by presence of MDA) #
subcounties_name_func2 <- function(shape_file, year){
if(year == 2004){
UGA_SC_MDA_names <- data.frame(Subcounty_name = shape_file@data$SNAME_2006,
District_name = shape_file@data$DNAME_2006)
UGA_SC_MDA_names <- with(UGA_SC_MDA_names, UGA_SC_MDA_names[order(District_name) , ])
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "MAYUGE" & UGA_SC_MDA_names$Subcounty_name == "MALONGO",
"MALONGO1", UGA_SC_MDA_names$Subcounty_name)
}
if(year == 2012){
UGA_SC_MDA_names <- data.frame(Subcounty_name = shape_file@data$SNAME_2010,
District_name = shape_file@data$DNAME_2010)
UGA_SC_MDA_names <- with(UGA_SC_MDA_names, UGA_SC_MDA_names[order(District_name) , ])
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "MASAKA" & UGA_SC_MDA_names$Subcounty_name == "BUWUNGA",
"BUWUNGA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "JINJA" & UGA_SC_MDA_names$Subcounty_name == "CENTRAL DIVISION",
"CENTRAL DIVISION1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KAYUNGA" & UGA_SC_MDA_names$Subcounty_name == "KAYONZA",
"KAYONZA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "MAYUGE" & UGA_SC_MDA_names$Subcounty_name == "MALONGO",
"MALONGO1", UGA_SC_MDA_names$Subcounty_name)
}
if(year == 2015){
UGA_SC_MDA_names <- data.frame(Subcounty_name = shape_file@data$Subcounty,
District_name = shape_file@data$District)
UGA_SC_MDA_names <- with(UGA_SC_MDA_names, UGA_SC_MDA_names[order(District_name) , ])
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "BUGWERI" & UGA_SC_MDA_names$Subcounty_name == "BUYANGA",
"BUYANGA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KYOTERA" & UGA_SC_MDA_names$Subcounty_name == "KABIRA",
"KABIRA1", UGA_SC_MDA_names$Subcounty_name)
}
if(year == 2016){
UGA_SC_MDA_names <- data.frame(Subcounty_name = shape_file@data$Subcounty,
District_name = shape_file@data$District)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KAYUNGA" & UGA_SC_MDA_names$Subcounty_name == "KAYONZA",
"KAYONZA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "BUSIA" & UGA_SC_MDA_names$Subcounty_name == "MASABA",
"MASABA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KABAROLE" & UGA_SC_MDA_names$Subcounty_name == "RUTEETE",
"RUTEETE1", UGA_SC_MDA_names$Subcounty_name)
}
if(year == 2018){
UGA_SC_MDA_names <- data.frame(Subcounty_name = shape_file@data$Subcounty,
District_name = shape_file@data$District)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "NAMAYINGO" & UGA_SC_MDA_names$Subcounty_name == "BANDA",
"BANDA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "BUVUMA" & UGA_SC_MDA_names$Subcounty_name == "BUGAYA",
"BUGAYA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "MITYANA" & UGA_SC_MDA_names$Subcounty_name == "CENTRAL DIVISION",
"CENTRAL DIVISION1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "JINJA" & UGA_SC_MDA_names$Subcounty_name == "CENTRAL DIVISION",
"CENTRAL DIVISION2", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "JINJA" & UGA_SC_MDA_names$Subcounty_name == "KAKIRA TOWN COUNCIL",
"KAKIRA TOWN COUNCIL1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "NTOROKO" & UGA_SC_MDA_names$Subcounty_name == "KANARA",
"KANARA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KAYUNGA" & UGA_SC_MDA_names$Subcounty_name == "KAYONZA",
"KAYONZA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "MAYUGE" & UGA_SC_MDA_names$Subcounty_name == "MALONGO",
"MALONGO1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "BUSIA" & UGA_SC_MDA_names$Subcounty_name == "MASABA",
"MASABA1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KOBOKO" & UGA_SC_MDA_names$Subcounty_name == "NORTHERN DIVISON",
"NORTHERN DIVISON1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KABAROLE" & UGA_SC_MDA_names$Subcounty_name == "RUTEETE",
"RUTEETE1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KOBOKO" & UGA_SC_MDA_names$Subcounty_name == "SOUTHERN DIVISON",
"SOUTHERN DIVISON1", UGA_SC_MDA_names$Subcounty_name)
UGA_SC_MDA_names$Subcounty_name <- ifelse(UGA_SC_MDA_names$District_name == "KOBOKO" & UGA_SC_MDA_names$Subcounty_name == "WESTERN DIVISON",
"WESTERN DIVISON1", UGA_SC_MDA_names$Subcounty_name)
}
return(UGA_SC_MDA_names)
}
# Plotting sub-county MDA (with district-level MDA overlay) function #
subcounty_MDA_processing_plotting_func <- function(sc_names, UGA_subcounties_tidy, district_2001, national_map_input, year){
# Extract sub-county MDAs #
if (year == 2004){
MDA_subcounties <-
c("DZAIPI", "AKOKORO", "RHINO CAMP", "BANDA", "LUNYO", "KYANGWALI", "BUSERUKA", "KABWOYA", "KIGOROBYA",
"MASESE/WALUKUBA", "GALIRAAYA", "MPEEFU", "MUNTU", "BIISO", "BULIISA", "MALONGO1", "DUFILE", "NGOGWE",
"LWAMPANGA", "PAKWACH", "DIVISION A", "KANARA") # vector of subcounties with MDA in 2003
# Notes on 2003 subcounties: BIISO & BULIISA sub-counties found in Buliisa, rather than in Masindi, and MUNTU found in Amolotar (not Lira
# MALONGO1 also renamed from MALONGO in MAYUGE as there are 2 MALONGO sub-counties (one if MASAKA), so need to make unique
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$MDA <- ifelse(sc_names$Subcounty_name %in% MDA_subcounties, "MDA","none") # code whether MDA or not
sc_names <- sc_names %>% rename(SNAME_2006 = Subcounty_name, DNAME_2006 = District_name) # rename column
UGA_subcounties_tidy$SNAME_2006 <- ifelse(UGA_subcounties_tidy$DNAME_2006 == "MAYUGE" & UGA_subcounties_tidy$SNAME_2006 == "MALONGO",
"MALONGO1", UGA_subcounties_tidy$SNAME_2006) # must also change any duplicate sub-counties with MDA in this object
# make MDA yes or no variable for sub counties with MDA
UGA_subcounties_tidy <- left_join(UGA_subcounties_tidy, sc_names) # join boundary data to MDA presence data
UGA_subcounties_tidy$MDA <- as.factor(UGA_subcounties_tidy$MDA) # make MDA presence a factor
MDA.SC.col <- c("purple2", NA) # to colour MDA districts
MDA.SC.vec <- MDA.SC.col[UGA_subcounties_tidy$MDA] # specify colour for each polygon
UGA_subcounties_tidy$MDA_colour <- MDA.SC.vec # new column for fill in ggplot depending on MDA
UGA_subcounties_tidy$label <- ifelse(UGA_subcounties_tidy$MDA == "MDA",
UGA_subcounties_tidy$SNAME_2006, NA)
}
if (year == 2012){
MDA_subcounties <-
c("ABER", "ACHOLIBUR","ADILANG", "ADJUMANI TC", "AGORO", "AGWATA", "AKALO", "AKOKORO",
"ALERO", "ALOI", "AMURU", "ANAKA", "APUTI", "ATANGA", "ATIAK", "AWACH", "AWELO",
"AWERE", "BAGEZZA", "BAITAMBOGWE", "BANDA", "BAR-DEGE", "BBAALE", "BELEAFE",
"BIISO", "BOBI", "BUDONDO", "BUFUMIRA", "BUHEHE", "BUJJUMBA", "BUKAKATA","BUKULULA",
"BULERA", "BULIISA", "BULULU", "BUNGATIRA", "BUSAMUZI", "BUSERUKA", "BUSIMBI",
"BUTOLOOGO", "BUWAMA", "BUWUNGA1", "BUYENDE", "BWAMBARA", "BWEEMA", "BWIKARA",
"CENTRAL DIVISION1", "CHAWENTE", "CIFORO", "DUFILE", "DZAIPI", "GALIRAAYA", "GIMARA",
"IBUJE", "IBULANKU", "IMMANYIRO", "INOMO", "ITULA", "IVUKULA", "KABWOYA", "KADUNGULU",
"KAGULU", "KAKOMONGOLE", "KALANGALA TC", "KALONGO", "KALUNGI", "KANARA", "KANGAI",
"KANGULUMIRA", "KARUGUTU", "KASANJE", "KATABI","KATUNGURU", "KATWE KABATORO TC", "KAYONZA1",
"KEI", "KICHWAMBA", "KICUZI", "KIDERA","KIGANDALO","KIGOROBYA","KIGOROBYA TC",
"KILAK","KITIGUM MATIDI","KITGUM TC", "KITYERERA","KKOME ISLANDS","KOBOKO TC","KORO",
"KULUBA", "KURU", "KYAMUSWA", "KYANAMUKAAKA", "KYANGWALI", "LABONGO AKWANG",
"LABONGO AMIDA", "LABONGO LAYAMO", "LAGORO", "LAGUTI", "LAKWANA", " LALOGI",
"LAMOGI", "LAPUL", "LAROO", "LAYIBI", "LEFORI", "LIRA PALWO", "LOBULE", "LOKUNG",
"LOREGAE", "LORO", "LUDARA", "LUMINO", "LUNYO", "LWABYATA", "LWAMPANGA", "LYAMA",
"MADI OPEI", "MALONGO1", "MASABA", "MASINDI PORT", "MAZINGA", "METU", "MIDIA",
"MIDIGO", "MOLO", "MOYO", "MPEEFU", "MUCWINI", "MUGOYE", "MUHOKYA", "MUNTU",
"MUTUMBA", "NABISWERA", "NAJJA", "NAKISUNGA", "NAMALU", "NAMANYONYI", "NAMASALE",
"NAMBIESO", " NAMOKORA", "NAMUGONGO", "NANKOMA", "NGOGWE", "NKONDO", "NTENJERU",
"NYENGA", "ODEK", "OFUA", "OGOKO", "OKOLLO", "OMIYA ANYIMA", "OMOT", "OMUGO", "ONGAKO",
"ONGINO", "OROM", "OTWAL", "PABBO", "PADER TC", "PADIBE EAST", "PADIBE WEST",
"PAICHO", "PAIMOL", "PAJULE", "PAKELLE", "PAKWACH", "PAKWACH TC", "PALABEK GEM",
"PALABEK KAL", "PALARO", "PALOGA", "PANYIMUR","PANYANGO","PATIKO","PATONGO", "PECE",
"PINGIRE","PURANGA","PURONGO","RHINO CAMP","RIGBO","ROMOGI","RUGASHARI"," RUTEETE",
"RWEBISENGO","SIBANGA","SIGULU ISLANDS","SSI-BUKUNJA","SSISA","WADELAI","WAKISI",
"WOL")
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$MDA <- ifelse(sc_names$Subcounty_name %in% MDA_subcounties, "MDA","none") # code whether MDA or not
sc_names <- sc_names %>% rename(SNAME_2010 = Subcounty_name, DNAME_2010 = District_name) # rename column
UGA_subcounties_tidy$SNAME_2010 <- ifelse(UGA_subcounties_tidy$DNAME_2010 == "MAYUGE" & UGA_subcounties_tidy$SNAME_2010 == "MALONGO",
"MALONGO1", UGA_subcounties_tidy$SNAME_2010) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$SNAME_2010 <- ifelse(UGA_subcounties_tidy$DNAME_2010 == "MASAKA" & UGA_subcounties_tidy$SNAME_2010 == "BUWUNGA",
"BUWUNGA1", UGA_subcounties_tidy$SNAME_2010) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$SNAME_2010 <- ifelse(UGA_subcounties_tidy$DNAME_2010 == "JINJA" & UGA_subcounties_tidy$SNAME_2010 == "CENTRAL DIVISION",
"CENTRAL DIVISION1", UGA_subcounties_tidy$SNAME_2010) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$SNAME_2010 <- ifelse(UGA_subcounties_tidy$DNAME_2010 == "KAYUNGA" & UGA_subcounties_tidy$SNAME_2010 == "KAYONZA",
"KAYONZA1", UGA_subcounties_tidy$SNAME_2010) # must also change any duplicate sub-counties with MDA in this object
# make MDA yes or no variable for sub counties with MDA
UGA_subcounties_tidy <- left_join(UGA_subcounties_tidy, sc_names) # join boundary data to MDA presence data
UGA_subcounties_tidy$MDA <- as.factor(UGA_subcounties_tidy$MDA) # make MDA presence a factor
MDA.SC.col <- c("purple2", NA) # to colour MDA districts
MDA.SC.vec <- MDA.SC.col[UGA_subcounties_tidy$MDA] # specify colour for each polygon
UGA_subcounties_tidy$MDA_colour <- MDA.SC.vec # new column for fill in ggplot depending on MDA
UGA_subcounties_tidy$label <- ifelse(UGA_subcounties_tidy$MDA == "MDA",
UGA_subcounties_tidy$SNAME_2010, NA)
}
if (year == 2015){
MDA_subcounties <-
c("ABANGA", "ADEKNINO", "ADOK", "AGWATA", "APOPONG", "AROMO", "BALAWOLI", "BATTA",
"BUFUMBO", "BUGOBERO", "BUGULUMBYA", "BUKHALU", "BUKHOFU", "BUKOOMA", "BUKULULA",
"BUKUYA", "BULANGE", "BULONGO", "BUMANYA", "BUREMBA", "BURUNGA", "BUTEBO", "BUTIRU",
"BUYANGA1", "BWAMBARA", "CHELEKURA", "GADUMIRE", "GOGONYO", "IBULANKU", "IKUMBYA",
"IVUKULA", "IYOLWA", "JANGOKORO", "KABIRA1", "KABWERI", "KACHONGA", "KACHUMBALA",
"KADAMA", "KAGAMBA", "KAKOMONGOLE", "KAKOOGE", "KAKUUTO", "KALIRO TOWN COUNCIL",
"KALISIZO", "KALONGO", "KALUNGI", "KAMEKE", "KAMERUKA", "KAMONKOLI", "KAMUDA",
"KANGAI","KANYARYERU","KAPIR","KAPUJAN","KASASIRA", "KASAMBYA", "KASHUMBA",
"KASODO", "KASSANDA", "KATIIRA", "KIBAALE", "KIBUKU", "KICUZI", "KIDONGOLE",
"KIJOMORO", "KIJONGO", "KIRIKA", "KISOZI", "KOBWIN", "KOLIR", "KUMI", "KWERA",
"KYALULANGIRA", "KYEBE", "LOREGAE", "LUKAYA TOWN COUNCIL", "LWABIYATA",
"LWAMPANGA", "LYAMA", "MADI OPEI", "MAGADA", "MAGOLA", "MAGORO", "MALABA TOWN COUNCIL",
"MASHA", "MAZIMASA", "MBULAMUTI", "MELLA", "MERIKIT", "MOLO", "MUKONGORO",
"MUKUJU", "MUKURA", "MULANDA", "MYANZI", "NABISWEERA", "NABUYOGA", "NAKITOMA",
"NAMALU", "NAMASAGALI", "NAMALEMBA", "NAMUTUMBA", "NAMUTUMBA TOWN COUNCIL",
"NAMUGONGO", "NAMWIWA","NANSANGA", "NAWAIKOKE", "NAWANDALA", "NGARAMA", "NGENGE",
"NGORA", "NKUNGU", "NSINZE", "NYADRI", "NYAKASHASHARA", "NYAPEA", "OKWALONGWEN",
"OKWONGODUL", "OLEBA", "OLOK", "OLUFFE", "OLUVU", "OMODOI", "ONGINO", "OSUKURU",
"PAIDHA", "PALABEK GEM", "PALABEK KAL", "PALABEK OGILI", "PALOGA", "PAYA",
"PUTI-PUTI", "RAKAI TOWN COUNCIL", "RUBONGI", "SANGA", "SIRONKO TOWN COUNCIL",
"SISUNI", "TARA", "TIRINYI", "TOROMA", "WABINYONYI", "WANKOLE", "YIVU",
"ZOMBO TOWN COUNCIL")
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$MDA <- ifelse(sc_names$Subcounty_name %in% MDA_subcounties, "MDA","none") # code whether MDA or not
sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "BUGWERI" & UGA_subcounties_tidy$Subcounty == "BUYANGA",
"BUYANGA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KYOTERA" & UGA_subcounties_tidy$Subcounty == "KABIRA",
"KABIRA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
}
if (year == 2016){
MDA_subcounties <-
c("BBAALE", "BUBEKE", "BUFUMIRA","BUHEHE","BUJUMBA", "BUKAKATA", "BURORA",
"BUSAANA", "BUSAMUZI", "BUSIME", "BUVUMA TOWN COUNCIL", "BUWAMA", "BUWOOYA",
"BWEEMA", "BWIKARA", "GALIRAYA", "KABULASOKE", "KALANGALA TOWN COUNCIL",
"KAMMENGO", "KANGULUMIRA", "KANONI TOWN COUNCIL", "KASANJE", "KASENDA",
"KATABI TOWN COUNCIL", "KAYONZA1", "KAYUNGA", "KIBIITO", "KOOME ISLAND",
"KYAKABADIIMA", "KYAMUSWA", "KYANAMUKAAKA", "KYATEREKERA", "KYEGONZA",
"KYESIIGA", "LUBYA", "LUMINO", "LUNYO", "LWAJJE", "LYABAANA", "MABAALE",
"MADDU", "MAJANJI", "MASABA1", "MAZINGA", "MPATTA", "MPEEFU", "MPUNGE",
"MUGOYE", "MUHORRO", "MUHORRO TOWN COUNCIL", "MUKUNGWE", "NAIRAMBI",
"NAJJA", "NAKISUNGA", "NAZIGO", "NDAIGA", "NGOGWE", "NJERU DIVISION",
"NKOZI", "NTENJERU", "NYENGA DIVISION", "RUGASHARI", "RUTEETE1",
"SSI-BUKUNJA", "WAKISI DIVISION")
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$MDA <- ifelse(sc_names$Subcounty_name %in% MDA_subcounties, "MDA","none") # code whether MDA or not
sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KAYUNGA" & UGA_subcounties_tidy$Subcounty == "KAYONZA",
"KAYONZA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "BUSIA" & UGA_subcounties_tidy$Subcounty == "MASABA",
"MASABA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KABAROLE" & UGA_subcounties_tidy$Subcounty == "RUTEETE",
"RUTEETE1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
}
if (year == 2018){
MDA_subcounties <-
c("ABAKO","ABIA","ABUKU","ADUKU","ADILANG","AGAGO TOWN COUNCIL","AGALI",
"AGIKDAK","AGWENG","AGWINGIRI","AKOKORO","AKURA","AKWON","ALEBTONG TOWN COUNCIL",
"ALOI","AMOLATAR TOWN COUNCIL","AMUGU","APAC","APERKIRA","APALA","AROI",
"APUTI", "AROMO", "ARUM", "ARWOTCEK", "ATIIRA", "AWEI", "AWELO",
"BAITAMBOGWE", "BANDA1", "BARR", "BBAALE", "BBANDA", "BUBEKE", "BUDHAYA",
"BUDONDO", "BUFUMIRA", "BUGAYA1", "BUGEMBE TOWN COUNCIL", "BUGONDO",
"BUHEHE", "BUHEMBA", "BUJUMBA", "BUKAKATA", "BUKABOOLI", "BUKANA",
"BUKATUBE", "BULESA", "BULIDHA", "BULUGUYI", "BULULU", "BURORA", "BUSAANA",
"BUSAKIRA", "BUSAMUZI", "BUSERUKA", "BUSIMBI DIVISION", "BUSIME", "BUSORO",
"BUSSI", "BUSWALE", "BUTAGAYA", "BUTUNGAMA", "BUVUMA TOWN COUNCIL",
"BUWAMA", "BUWOOYA", "BUWUNGA", "BUYENDE", "BUYENDE TOWN COUNCIL",
"BUYENGO", "BUYINJA", "BWEEMA", "BWERAMULE", "BWIKARA", "CENTRAL DIVISION1",
"CHAWENTE", "CHEGERE", "DRANYA", "ETAM", "GALIRAYA", "IBUJE", "JAGUZI",
"CENTRAL DIVISION2", "KABERAMAIDO", "KABULASOKE", "KABWOYA", "KADUNGULU",
"KAGULU", "KAJJANSI TOWN COUNCIL", "KAKIRA TOWN COUNCIL1", "KAKURE",
"KALAKI", "KALONGO TOWN COUNCIL", "KAMMENGO", "KANARA1", "KANARA TOWN COUNCIL",
"KANGULUMIRA", "KASANJE", "KASENDA", "KATABI TOWN COUNCIL", "KATETA",
"KATUNGURU", "KAYUNGA", "KAYONZA1", "KERWA", "KIBIITO", "KICWAMBA","KIDERA",
"KIGOROBYA", "KIGUMBA", "KIRYANDONGO", "KITYERERA","KIYOMBYA","KOCHI",
"KOBULUBULU","KOOME ISLAND","KOTOMOL","KULUBA","KULULU","KURU",
"KYAKABADIIMA","KYAMUSWA","KYANAMUKAAKA","KYANGWALI","KYATEREKERA",
"KYEGONZA","LAMIYO","LAPONO","LIRA PALWO","LOBULE","LOLWE","LUBYA",
"LUDARA","LUMINO","LUNYO","LWAJJE","LYABAANA","MAANYI","MABAALE",
"MAFUBIRA","MAJANJI","MALONGO1","NAMAYINGO TOWN COUNCIL","MASABA1",
"MASAJJA DIVISION", "MASINDI PORT", "MAZINGA", "MIDIA", "MIDIGO","MPATTA",
"MPEEFU", "MPUNGE", "MPUMUDDE DIVISION", "MUGOYE", "MUHORRO",
"MUHORRO TOWN COUNCIL", "MUKUNGWE", "MUNTU", "MUTUMBA", "MUTUNDA",
"NAIRAMBI", "NAJJA", "NAKISUNGA","NAMBIESO","NAZIGO","NDAIGA","NDEJJE DIVISION",
"NKONDO","NJERU DIVISION","NORTHERN DIVISON1","NTENJERU","NYENGA DIVISION",
"OCHERO", "ODRAVU","OGUR","OMIYA PACWA","OMORO","OMOT","PAIMOL","PARABONGO",
"PATONGO","PATONGO TOWN COUNCIL","PINGIRE","ROMOGI","RUGASHARI","RUTEETE",
"RWEBISENGO","RWIMI","SIGULU ISLAND","SOUTHERN DIVISION1","SSI-BUKUNJA",
"WAIRASA", "WAKISI DIVISION", "WESTERN DIVISION1", "WOL")
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$MDA <- ifelse(sc_names$Subcounty_name %in% MDA_subcounties, "MDA","none") # code whether MDA or not
sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "NAMAYINGO" & UGA_subcounties_tidy$Subcounty == "BANDA",
"BANDA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "BUVUMA" & UGA_subcounties_tidy$Subcounty == "BUGAYA",
"BUGAYA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "MITYANA" & UGA_subcounties_tidy$Subcounty == "CENTRAL DIVISION",
"CENTRAL DIVISION1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "JINJA" & UGA_subcounties_tidy$Subcounty == "CENTRAL DIVISION",
"CENTRAL DIVISION2", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "JINJA" & UGA_subcounties_tidy$Subcounty == "KAKIRA TOWN COUNCIL",
"KAKIRA TOWN COUNCIL1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "NTOROKO" & UGA_subcounties_tidy$Subcounty == "KANARA",
"KANARA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KAYUNGA" & UGA_subcounties_tidy$Subcounty == "KAYONZA",
"KAYONZA1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "MAYUGE" & UGA_subcounties_tidy$Subcounty == "MALONGO",
"MALONGO1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "BUSIA" & UGA_subcounties_tidy$Subcounty == "MASABA",
"MASABA1", UGA_subcounties_tidy$Subcounty)
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KOBOKO" & UGA_subcounties_tidy$Subcounty == "NORTHERN DIVISON",
"NORTHERN DIVISON1", UGA_subcounties_tidy$Subcounty)
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KABAROLE" & UGA_subcounties_tidy$Subcounty == "RUTEETE",
"RUTEETE1", UGA_subcounties_tidy$Subcounty) # must also change any duplicate sub-counties with MDA in this object
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KOBOKO" & UGA_subcounties_tidy$Subcounty == "SOUTHERN DIVISON",
"SOUTHERN DIVISON1", UGA_subcounties_tidy$Subcounty)
UGA_subcounties_tidy$Subcounty <- ifelse(UGA_subcounties_tidy$District == "KOBOKO" & UGA_subcounties_tidy$Subcounty == "WESTERN DIVISON",
"WESTERN DIVISON1", UGA_subcounties_tidy$Subcounty)
}
if (year == 2015 || year == 2016 || year == 2018){
# make MDA yes or no variable for sub counties with MDA
UGA_subcounties_tidy <- left_join(UGA_subcounties_tidy, sc_names) # join boundary data to MDA presence data
UGA_subcounties_tidy$MDA <- as.factor(UGA_subcounties_tidy$MDA) # make MDA presence a factor
MDA.SC.col <- c("purple2", NA) # to colour MDA districts
MDA.SC.vec <- MDA.SC.col[UGA_subcounties_tidy$MDA] # specify colour for each polygon
UGA_subcounties_tidy$MDA_colour <- MDA.SC.vec # new column for fill in ggplot depending on MDA
UGA_subcounties_tidy$label <- ifelse(UGA_subcounties_tidy$MDA == "MDA",
UGA_subcounties_tidy$Subcounty, NA)
}
# Extract district MDAs (valid for all MDAs across 2033-2019 when analysing by original districts of 2003) #
district_map_0319 <- UGA_district_boundaries_function(shape_file = districts_2001, national_map_input = national_map)
district_names_0319 <- district_name_func(shape_file = districts_2001) # using original districts throughout 2003-2019
UGA_dist_MDA_names <- district_names_0319 # copy variable (dist names)
if (year == 2004){
# repeat for districts to highlight & check sub-counties (for each district) for 2004#
MDA_districts <-
c("APAC", "MOYO", "ADJUMANI", "ARUA", "NEBBI", "LIRA", "NAKASONGOLA", "MASINDI", "HOIMA", "BUGIRI",
"BUSIA", "KAYUNGA", "JINJA", "MUKONO", "WAKISO", "MAYUGE", "BUNDIBUGYO", "KIBAALE") # vector of districts with MDA in 2003
}
if (year == 2012){
# repeat for districts to highlight & check sub-counties (for each district) for 2004#
MDA_districts <-
c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA","KABERAMAIDO",
"NAKASONGOLA","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MAYUGE","MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","SOROTI","PALLISA",
"KAMULI","MASINDI","ADJUMANI","MASAKA","MBARARA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA") # vector of districts with MDA in 2003
}
if (year == 2015){
# repeat for districts to highlight & check sub-counties (for each district) for 2004#
MDA_districts <-c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","KIBAALE","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","RUKUNGIRI","BUSHENYI",
"KAMWENGE","KASESE","KALANGALA","IGANGA","MBALE","SIRONKO",
"KAPCHORWA","KATAKWI","KUMI","MBARARA","NAKAPIRIPIRIT","NAKASONGOLA","PALLISA",
"RAKAI","TORORO")
}
if (year == 2016){
MDA_districts <-
c("MUBENDE","BUNDIBUGYO","BUSIA","KABAROLE","KALANGALA","KAYUNGA",
"MASINDI","MPIGI","MUKONO","WAKISO")
}
if (year == 2018){
MDA_districts <- c("PADER","APAC","KITGUM","MOYO","YUMBE","ARUA","NEBBI","GULU","LIRA",
"KABERAMAIDO","HOIMA","BUGIRI","BUSIA","KAYUNGA","JINJA","MUKONO","WAKISO",
"MPIGI","KABAROLE","MUBENDE","BUNDIBUGYO","SOROTI","MAYUGE",
"KAMULI","MASINDI","ADJUMANI","MASAKA","BUSHENYI","KAMWENGE","KASESE",
"KALANGALA")
}
UGA_dist_MDA_names$MDA <- ifelse(district_names_0319$Dist_name %in% MDA_districts, "MDA","none") # code whether MDA or not
UGA_dist_MDA_names <- UGA_dist_MDA_names %>% rename(DISTRICT = Dist_name) # rename column
UGA_districts_tidy <- left_join(district_map_0319[[2]], UGA_dist_MDA_names) # join boundary data to MDA presence data
UGA_districts_tidy$MDA <- as.factor(UGA_districts_tidy$MDA) # make MDA presence a factor
MDA.dist.col <- c("blue",NA) # to colour MDA district
MDA.dist.vec <- MDA.dist.col[UGA_districts_tidy$MDA] # specify colour for each polygon
UGA_districts_tidy$MDA_colour <- MDA.dist.vec # new column for fill in ggplot depending on MDA
# make labels for plot
if (year == 2004){
# make labels (sub-counties with MDA) for plotting
UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, MDA=="MDA") #subset just for NYS
scnames <- aggregate(cbind(long, lat) ~ SNAME_2006, data=UGA_subcounties_tidy_subset, FUN=mean)
scnames$label <- scnames$SNAME_2006
}
if (year == 2012){
# make labels (sub-counties with MDA) for plotting
UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, MDA=="MDA") #subset just for NYS
scnames <- aggregate(cbind(long, lat) ~ SNAME_2010, data=UGA_subcounties_tidy_subset, FUN=mean)
scnames$label <- scnames$SNAME_2010
}
if (year == 2015 || year == 2016 || year == 2018){
# make labels (sub-counties with MDA) for plotting
UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, MDA=="MDA") #subset just for NYS
scnames <- aggregate(cbind(long, lat) ~ Subcounty, data=UGA_subcounties_tidy_subset, FUN=mean)
scnames$label <- scnames$Subcounty
}
# PLOT
if (year == 2004){
UGA_districts_tidy_subset <- subset(UGA_districts_tidy, MDA=="MDA")
plot1 <- ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data = districts_2001, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
geom_polygon(data= UGA_districts_tidy_subset, aes(x = long, y = lat, group = group, colour= MDA_colour), fill= "purple", size = 1, alpha=0.25)+
geom_polygon(data= UGA_subcounties_tidy, aes(x = long, y = lat, group = group, colour= MDA_colour), size = 0.75, fill=NA, alpha=NA)+
coord_equal(ratio = 1)+
scale_colour_manual(values=c("blue","purple2",NA), guide=FALSE)+
ggrepel::geom_text_repel(data = scnames, aes(long, lat, label = label), box.padding = 1.15, max.overlaps = Inf, size = 4.5, family = 'Avenir', segment.color = "#333333", fontface = "bold")+
theme_void()+
theme(
plot.title = element_text(color="black", size=16, face="bold.italic"))+
guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
}
if (year == 2012 || year == 2015 || year == 2016 || year == 2018){
UGA_districts_tidy_subset <- subset(UGA_districts_tidy, MDA=="MDA") # subset MDA districts to plot
plot1 <- ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data = districts_2001, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
geom_polygon(data= UGA_districts_tidy_subset, aes(x = long, y = lat, group = group, colour= MDA_colour), fill= "purple", size = 1, alpha=0.25)+
geom_polygon(data= UGA_subcounties_tidy, aes(x = long, y = lat, group = group, colour= MDA_colour), size = 0.75, fill=NA, alpha=NA)+
coord_equal(ratio = 1)+
scale_colour_manual(values=c("blue","purple2",NA), guide=FALSE)+
ggrepel::geom_text_repel(data = scnames, aes(long, lat, label = label), box.padding = 1.15, max.overlaps = Inf, size = 2.5, family = 'Avenir', segment.color = "#333333", fontface = "bold")+
theme_void()+
theme(
plot.title = element_text(color="black", size=16, face="bold.italic"))+
guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
}
return(list(UGA_subcounties_tidy, UGA_districts_tidy, plot1, scnames, UGA_subcounties_tidy_subset, UGA_districts_tidy_subset))
}
# identifying sub-counties where PCC surveys have been conducted #
subcounties_name_func3 <- function(shape_file, year){
if(year == 2011){
UGA_SC_PCCstudies_names <- data.frame(Subcounty_name = shape_file@data$SNAME_2010,
District_name = shape_file@data$DNAME_2010)
UGA_SC_PCCstudies_names <- with(UGA_SC_PCCstudies_names, UGA_SC_PCCstudies_names[order(District_name) , ])
}
if(year == 2013 || year == 2015 || year == 2019){
UGA_SC_PCCstudies_names <- data.frame(Subcounty_name = shape_file@data$Subcounty,
District_name = shape_file@data$District)
UGA_SC_PCCstudies_names <- with(UGA_SC_PCCstudies_names, UGA_SC_PCCstudies_names[order(District_name) , ])
}
# if(year == 2019){
#
# UGA_SC_PCCstudies_names <- data.frame(Subcounty_name = shape_file@data$Subcounty,
# District_name = shape_file@data$District)
# }
return(UGA_SC_PCCstudies_names)
}
subcounty_PCCstudies_processing_plotting_func <- function(sc_names, UGA_subcounties_tidy, district_2001, national_map_input, PCC_survey_year){
# Extract sub-county locations where PCC studies have occured #
if (PCC_survey_year == 2011){
PCCstudy_subcounties <-
c("OCHERO", "GALIRAYA", "KIDERA", "NAWAIKOKE", "BULULU", "NAMBIESO",
"AWELO", "ADUMI", "LUMINO", "MUHORRO", "KALUNGU", "GADUMIRE")
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$PCC_survey <- ifelse(sc_names$Subcounty_name %in% PCCstudy_subcounties, "PCC survey","none") # code whether MDA or not
sc_names <- sc_names %>% rename(SNAME_2010 = Subcounty_name, DNAME_2010 = District_name) # rename column
# make MDA yes or no variable for sub counties with PCC survey #
UGA_subcounties_tidy <- left_join(UGA_subcounties_tidy, sc_names) # join boundary data to MDA presence data
UGA_subcounties_tidy$PCC_survey <- as.factor(UGA_subcounties_tidy$PCC_survey) # make MDA presence a factor
PCC_survey.SC.col <- c("purple2", NA) # to colour MDA districts
PCC_survey.SC.vec <- PCC_survey.SC.col[UGA_subcounties_tidy$PCC_survey] # specify colour for each polygon
UGA_subcounties_tidy$PCC_survey_colour <- PCC_survey.SC.vec # new column for fill in ggplot depending on MDA
UGA_subcounties_tidy$label <- ifelse(UGA_subcounties_tidy$PCC_survey == "PCC survey",
UGA_subcounties_tidy$SNAME_2010, NA)
}
# For PCC study locations >2012 (using a different shape file) #
if (PCC_survey_year == 2013){
PCCstudy_subcounties <-
c("BUTANSI", "KITAYUNJWA", "NAMWENDWA", "BUGULUMBYA", "KABONERA", "KATWE-BUTEGO DIVISION",
"KIMAANYA-KYABAKUZA DIVISION", "KKINGO", "KYANAMUKAAKA", "NYENDO-SSENYANGE DIVISION",
"KYAMPISI", "NTENJERU", "ARAPAI") # there is no Mukono TC sub-county
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$PCC_survey <- ifelse(sc_names$Subcounty_name %in% PCCstudy_subcounties, "PCC survey","none") # code whether MDA or not
sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
}
if (PCC_survey_year == 2015){
PCCstudy_subcounties <-
c("OJWINA DIVISION", "BARR", "LIRA", "ADYEL DIVISION", "ADEKOKWOK", "MOYO", "METU")
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$PCC_survey <- ifelse(sc_names$Subcounty_name %in% PCCstudy_subcounties, "PCC survey","none") # code whether MDA or not
sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
}
if (PCC_survey_year == 2019){
PCCstudy_subcounties <-
c("PAJULU","LAMOGI","UNYAMA")
sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
sc_names$PCC_survey <- ifelse(sc_names$Subcounty_name %in% PCCstudy_subcounties, "PCC survey","none") # code whether MDA or not
sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
}
if (PCC_survey_year == 2013 || PCC_survey_year == 2015 || PCC_survey_year == 2019){
# make MDA yes or no variable for sub counties with MDA
UGA_subcounties_tidy <- left_join(UGA_subcounties_tidy, sc_names) # join boundary data to MDA presence data
UGA_subcounties_tidy$PCC_survey <- as.factor(UGA_subcounties_tidy$PCC_survey) # make MDA presence a factor
PCC_survey.SC.col <- c("purple2", NA) # to colour MDA districts
PCC_survey.SC.vec <- PCC_survey.SC.col[UGA_subcounties_tidy$PCC_survey] # specify colour for each polygon
UGA_subcounties_tidy$PCC_survey_colour <- PCC_survey.SC.vec # new column for fill in ggplot depending on MDA
UGA_subcounties_tidy$label <- ifelse(UGA_subcounties_tidy$PCC_survey == "PCC survey",
UGA_subcounties_tidy$Subcounty, NA)
}
# Extract district MDAs (valid for all MDAs across 2033-2019 when analysing by original districts of 2003) #
district_map_0319 <- UGA_district_boundaries_function(shape_file = districts_2001, national_map_input = national_map)
district_names_0319 <- district_name_func(shape_file = districts_2001) # using original districts throughout 2003-2019
UGA_dist_PCCsurvey_names <- district_names_0319 # copy variable (dist names)
# if (year == 2004){
# # repeat for districts to highlight & check sub-counties (for each district) for 2004#
# MDA_districts <-
# c("APAC", "MOYO", "ADJUMANI", "ARUA", "NEBBI", "LIRA", "NAKASONGOLA", "MASINDI", "HOIMA", "BUGIRI",
# "BUSIA", "KAYUNGA", "JINJA", "MUKONO", "WAKISO", "MAYUGE", "BUNDIBUGYO", "KIBAALE") # vector of districts with MDA in 2003
# }
if (PCC_survey_year == 2011){
PCC_survey_districts <-
c("KABERAMAIDO","KAYUNGA", "KAMULI", "APAC", "LIRA", "ARUA", "BUSIA", "KIBAALE",
"MASAKA") # vector of districts with MDA in 2003
}
if (PCC_survey_year == 2013){
PCC_survey_districts <-
c("LWENGO", "MUKONO", "MASAKA", "KAMULI","SOROTI")
}
if (PCC_survey_year == 2015){
PCC_survey_districts <-
c("LIRA", "MOYO")
}
if (PCC_survey_year == 2019){
PCC_survey_districts <-
c("ARUA","GULU", "AMURU")
}
UGA_dist_PCCsurvey_names$PCC_survey <- ifelse(district_names_0319$Dist_name %in% PCC_survey_districts, "PCC survey","none") # code whether MDA or not
UGA_dist_PCCsurvey_names <- UGA_dist_PCCsurvey_names %>% rename(DISTRICT = Dist_name) # rename column
UGA_districts_tidy <- left_join(district_map_0319[[2]], UGA_dist_PCCsurvey_names) # join boundary data to MDA presence data
UGA_districts_tidy$PCC_survey <- as.factor(UGA_districts_tidy$PCC_survey) # make MDA presence a factor
PCC_survey.dist.col <- c("blue",NA) # to colour MDA district
PCC_survey.dist.vec <- PCC_survey.dist.col[UGA_districts_tidy$PCC_survey] # specify colour for each polygon
UGA_districts_tidy$PCC_survey_colour <- PCC_survey.dist.vec # new column for fill in ggplot depending on MDA
# make labels for plot
# if (year == 2004){
# # make labels (sub-counties with MDA) for plotting
# UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, MDA=="MDA") #subset just for NYS
# scnames <- aggregate(cbind(long, lat) ~ SNAME_2006, data=UGA_subcounties_tidy_subset, FUN=mean)
# scnames$label <- scnames$SNAME_2006
# }
if (PCC_survey_year == 2011){
# make labels (sub-counties with MDA) for plotting
UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, PCC_survey=="PCC survey") #subset just for NYS
scnames <- aggregate(cbind(long, lat) ~ SNAME_2010, data=UGA_subcounties_tidy_subset, FUN=mean)
scnames$label <- scnames$SNAME_2010
}
if (PCC_survey_year == 2013 || PCC_survey_year == 2015 || PCC_survey_year == 2019){
# make labels (sub-counties with MDA) for plotting
UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, PCC_survey=="PCC survey") #subset just for NYS
scnames <- aggregate(cbind(long, lat) ~ Subcounty, data=UGA_subcounties_tidy_subset, FUN=mean)
scnames$label <- scnames$Subcounty
}
# PLOT
# if (year == 2004){
# UGA_districts_tidy_subset <- subset(UGA_districts_tidy, MDA=="MDA")
#
# plot1 <- ggplot() +
# geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# geom_polygon(data = districts_2001, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
# geom_polygon(data= UGA_districts_tidy_subset, aes(x = long, y = lat, group = group, colour= MDA_colour), fill= "purple", size = 1, alpha=0.25)+
# geom_polygon(data= UGA_subcounties_tidy, aes(x = long, y = lat, group = group, colour= MDA_colour), size = 0.75, fill=NA, alpha=NA)+
# coord_equal(ratio = 1)+
# scale_colour_manual(values=c("blue","purple2",NA), guide=FALSE)+
# ggrepel::geom_text_repel(data = scnames, aes(long, lat, label = label), box.padding = 1.15, max.overlaps = Inf, size = 4.5, family = 'Avenir', segment.color = "#333333", fontface = "bold")+
# theme_void()+
# theme(
# plot.title = element_text(color="black", size=16, face="bold.italic"))+
# guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
# }
if (PCC_survey_year == 2011 || PCC_survey_year == 2013 || PCC_survey_year == 2015 || PCC_survey_year == 2019){
UGA_districts_tidy_subset <- subset(UGA_districts_tidy, PCC_survey=="PCC survey") # subset MDA districts to plot
plot1 <- ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data = districts_2001, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
geom_polygon(data= UGA_districts_tidy_subset, aes(x = long, y = lat, group = group, colour= PCC_survey_colour), fill= "orange", size = 1, alpha=0.25)+
geom_polygon(data= UGA_subcounties_tidy_subset, aes(x = long, y = lat, group = group, colour= PCC_survey), size = 0.75, fill=NA, alpha=NA)+
coord_equal(ratio = 1)+
scale_colour_manual(values=c("blue","purple2",NA), guide=FALSE)+
ggrepel::geom_text_repel(data = scnames, aes(long, lat, label = label), box.padding = 1.15, max.overlaps = Inf, size = 2.5, family = 'Avenir', segment.color = "#333333", fontface = "bold")+
theme_void()+
theme(
plot.title = element_text(color="black", size=16, face="bold.italic"))+
guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
}
return(list(UGA_subcounties_tidy, UGA_districts_tidy, plot1, scnames, UGA_subcounties_tidy_subset, UGA_districts_tidy_subset))
}
# identifying districts where PCC surveys have been conducted (only district-level data available) #
districts_name_func3 <- function(shape_file, year){
if(year == "2002-2005"){
UGA_SC_PCCstudies_names <- data.frame(District_name = shape_file@data$DNAME_2006)
UGA_SC_PCCstudies_names <- with(UGA_SC_PCCstudies_names, UGA_SC_PCCstudies_names[order(District_name) , ])
}
if(year == "2006-2011"){
UGA_SC_PCCstudies_names <- data.frame(District_name = shape_file@data$DNAME_2010)
UGA_SC_PCCstudies_names <- with(UGA_SC_PCCstudies_names, UGA_SC_PCCstudies_names[order(District_name) , ])
}
if(year == "2012-2015" || year == "2016-2020"){
UGA_SC_PCCstudies_names <- data.frame(District_name = shape_file@data$District)
UGA_SC_PCCstudies_names <- with(UGA_SC_PCCstudies_names, UGA_SC_PCCstudies_names[order(District_name) , ])
}
return(UGA_SC_PCCstudies_names)
}
district_PCCstudies_processing_plotting_func <- function(dist_names, district_2001, national_map_input, PCC_survey_years){
# Extract sub-county locations where PCC studies have occured #
# if (PCC_survey_years == "2022-2005"){
#
# PCCstudy_districts <-
# c("LIRA","KAMULI")
#
# dist_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
#
# dist_names$PCC_survey <- ifelse(dist_names$Dist_name %in% PCCstudy_districts, "PCC survey","none") # code whether MDA or not
#
# dist_names <- dist_names %>% rename(DNAME_2006 = Dist_name) # rename column
#
# # make MDA yes or no variable for sub counties with PCC survey #
# UGA_subcounties_tidy <- left_join(UGA_subcounties_tidy, dist_names) # join boundary data to MDA presence data
#
# UGA_subcounties_tidy$PCC_survey <- as.factor(UGA_subcounties_tidy$PCC_survey) # make MDA presence a factor
#
# PCC_survey.dist.col <- c("purple2", NA) # to colour MDA districts
#
# PCC_survey.dist.vec <- PCC_survey.dist.col[UGA_subcounties_tidy$PCC_survey] # specify colour for each polygon
#
# UGA_subcounties_tidy$PCC_survey_colour <- PCC_survey.dist.vec # new column for fill in ggplot depending on MDA
#
# UGA_subcounties_tidy$label <- ifelse(UGA_subcounties_tidy$PCC_survey == "PCC survey",
# UGA_subcounties_tidy$DNAME_2006, NA)
# }
#
# # For PCC study locations >2012 (using a different shape file) #
#
# if (PCC_survey_year == 2013){
#
# PCCstudy_subcounties <-
# c("BUTANSI", "KITAYUNJWA", "NAMWENDWA", "BUGULUMBYA", "KABONERA", "KATWE-BUTEGO DIVISION",
# "KIMAANYA-KYABAKUZA DIVISION", "KKINGO", "KYANAMUKAAKA", "NYENDO-SSENYANGE DIVISION",
# "KYAMPISI", "NTENJERU") # there is no Mukono TC sub-county
#
# sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
#
# sc_names$PCC_survey <- ifelse(sc_names$Subcounty_name %in% PCCstudy_subcounties, "PCC survey","none") # code whether MDA or not
#
# sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
#
# }
#
#
# if (PCC_survey_year == 2015){
#
# PCCstudy_subcounties <-
# c("OJWINA DIVISION", "BARR", "LIRA", "ADYEL DIVISION", "ADEKOKWOK", "MOYO", "METU")
#
# sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
#
# sc_names$PCC_survey <- ifelse(sc_names$Subcounty_name %in% PCCstudy_subcounties, "PCC survey","none") # code whether MDA or not
#
# sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
#
# }
#
# if (PCC_survey_year == 2019){
#
# PCCstudy_subcounties <-
# c("PAJULU","LAMOGI","UNYAMA")
#
# sc_names # copy variable (dist names) : UGA_dist_MDA_names <- district_names
#
# sc_names$PCC_survey <- ifelse(sc_names$Subcounty_name %in% PCCstudy_subcounties, "PCC survey","none") # code whether MDA or not
#
# sc_names <- sc_names %>% rename(Subcounty = Subcounty_name, District = District_name) # rename column
#
# }
#
# if (PCC_survey_year == 2013 || PCC_survey_year == 2015 || PCC_survey_year == 2019){
# # make MDA yes or no variable for sub counties with MDA
# UGA_subcounties_tidy <- left_join(UGA_subcounties_tidy, sc_names) # join boundary data to MDA presence data
#
# UGA_subcounties_tidy$PCC_survey <- as.factor(UGA_subcounties_tidy$PCC_survey) # make MDA presence a factor
#
# PCC_survey.SC.col <- c("purple2", NA) # to colour MDA districts
#
# PCC_survey.SC.vec <- PCC_survey.SC.col[UGA_subcounties_tidy$PCC_survey] # specify colour for each polygon
#
# UGA_subcounties_tidy$PCC_survey_colour <- PCC_survey.SC.vec # new column for fill in ggplot depending on MDA
#
# UGA_subcounties_tidy$label <- ifelse(UGA_subcounties_tidy$PCC_survey == "PCC survey",
# UGA_subcounties_tidy$Subcounty, NA)
# }
#
#
# Extract district MDAs (valid for all MDAs across 2033-2019 when analysing by original districts of 2003) #
district_map_0319 <- UGA_district_boundaries_function(shape_file = districts_2001, national_map_input = national_map)
district_names_0319 <- district_name_func(shape_file = districts_2001) # using original districts throughout 2003-2019
UGA_dist_PCCsurvey_names <- district_names_0319 # copy variable (dist names)
# if (year == 2004){
# # repeat for districts to highlight & check sub-counties (for each district) for 2004#
# MDA_districts <-
# c("APAC", "MOYO", "ADJUMANI", "ARUA", "NEBBI", "LIRA", "NAKASONGOLA", "MASINDI", "HOIMA", "BUGIRI",
# "BUSIA", "KAYUNGA", "JINJA", "MUKONO", "WAKISO", "MAYUGE", "BUNDIBUGYO", "KIBAALE") # vector of districts with MDA in 2003
# }
if (PCC_survey_years == "2002-2005"){
PCC_survey_districts <-
c("LIRA", "KAMULI") # vector of districts with MDA in 2003
}
# if (PCC_survey_year == 2013){
#
# PCC_survey_districts <-
# c("LWENGO", "MUKONO", "MASAKA", "KAMULI")
# }
#
# if (PCC_survey_year == 2015){
#
# PCC_survey_districts <-
# c("LIRA", "MOYO")
# }
#
# if (PCC_survey_year == 2019){
#
# PCC_survey_districts <-
# c("ARUA","GULU", "AMURU")
# }
#
UGA_dist_PCCsurvey_names$PCC_survey <- ifelse(district_names_0319$Dist_name %in% PCC_survey_districts, "PCC survey","none") # code whether MDA or not
UGA_dist_PCCsurvey_names <- UGA_dist_PCCsurvey_names %>% rename(DISTRICT = Dist_name) # rename column
UGA_districts_tidy <- left_join(district_map_0319[[2]], UGA_dist_PCCsurvey_names) # join boundary data to MDA presence data
UGA_districts_tidy$PCC_survey <- as.factor(UGA_districts_tidy$PCC_survey) # make MDA presence a factor
PCC_survey.dist.col <- c("blue",NA) # to colour MDA district
PCC_survey.dist.vec <- PCC_survey.dist.col[UGA_districts_tidy$PCC_survey] # specify colour for each polygon
UGA_districts_tidy$PCC_survey_colour <- PCC_survey.dist.vec # new column for fill in ggplot depending on MDA
# make labels for plot
# if (year == 2004){
# # make labels (sub-counties with MDA) for plotting
# UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, MDA=="MDA") #subset just for NYS
# scnames <- aggregate(cbind(long, lat) ~ SNAME_2006, data=UGA_subcounties_tidy_subset, FUN=mean)
# scnames$label <- scnames$SNAME_2006
# }
if (PCC_survey_years == "2002-2005"){
# make labels (sub-counties with MDA) for plotting
UGA_districts_tidy_subset <- subset(UGA_districts_tidy, PCC_survey=="PCC survey") #subset just for NYS
distnames <- aggregate(cbind(long, lat) ~ DISTRICT, data=UGA_districts_tidy_subset, FUN=mean)
distnames$label <- distnames$DISTRICT
}
# if (PCC_survey_year == 2011){
# # make labels (sub-counties with MDA) for plotting
# UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, PCC_survey=="PCC survey") #subset just for NYS
# scnames <- aggregate(cbind(long, lat) ~ SNAME_2010, data=UGA_subcounties_tidy_subset, FUN=mean)
# scnames$label <- scnames$SNAME_2010
# }
#
# if (PCC_survey_year == 2013 || PCC_survey_year == 2015 || PCC_survey_year == 2019){
# # make labels (sub-counties with MDA) for plotting
# UGA_subcounties_tidy_subset <- subset(UGA_subcounties_tidy, PCC_survey=="PCC survey") #subset just for NYS
# scnames <- aggregate(cbind(long, lat) ~ Subcounty, data=UGA_subcounties_tidy_subset, FUN=mean)
# scnames$label <- scnames$Subcounty
# }
# PLOT
# if (year == 2004){
# UGA_districts_tidy_subset <- subset(UGA_districts_tidy, MDA=="MDA")
#
# plot1 <- ggplot() +
# geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
# geom_polygon(data = districts_2001, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
# geom_polygon(data= UGA_districts_tidy_subset, aes(x = long, y = lat, group = group, colour= MDA_colour), fill= "purple", size = 1, alpha=0.25)+
# geom_polygon(data= UGA_subcounties_tidy, aes(x = long, y = lat, group = group, colour= MDA_colour), size = 0.75, fill=NA, alpha=NA)+
# coord_equal(ratio = 1)+
# scale_colour_manual(values=c("blue","purple2",NA), guide=FALSE)+
# ggrepel::geom_text_repel(data = scnames, aes(long, lat, label = label), box.padding = 1.15, max.overlaps = Inf, size = 4.5, family = 'Avenir', segment.color = "#333333", fontface = "bold")+
# theme_void()+
# theme(
# plot.title = element_text(color="black", size=16, face="bold.italic"))+
# guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
# }
if (PCC_survey_years == "2002-2005"){
#UGA_districts_tidy_subset <- subset(UGA_districts_tidy, PCC_survey=="PCC survey") # subset MDA districts to plot
plot1 <- ggplot() +
geom_polygon(data = national_map, aes(x=long, y = lat, group = group), color = "black", size = 0.1, fill = "lightgrey") +
geom_polygon(data = districts_2001, aes(x = long, y = lat, group = group), colour = "black", alpha = 1, fill = NA)+
geom_polygon(data= UGA_districts_tidy_subset, aes(x = long, y = lat, group = group, colour= PCC_survey_colour), fill= "orange", size = 1, alpha=0.25)+
#geom_polygon(data= UGA_subcounties_tidy_subset, aes(x = long, y = lat, group = group, colour= PCC_survey), size = 0.75, fill=NA, alpha=NA)+
coord_equal(ratio = 1)+
scale_colour_manual(values=c("blue","purple2",NA), guide=FALSE)+
ggrepel::geom_text_repel(data = distnames, aes(long, lat, label = label), box.padding = 1.15, max.overlaps = Inf, size = 2.5, family = 'Avenir', segment.color = "#333333", fontface = "bold")+
theme_void()+
theme(
plot.title = element_text(color="black", size=16, face="bold.italic"))+
guides(fill=guide_legend(override.aes=list(shape=21, size=3, colour="black", stroke=1.2))) # need this to get colour in the fill (sample.size) legend
}
return(list(UGA_districts_tidy, plot1, distnames, UGA_districts_tidy_subset))
} |
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.PriorityQueue;
import java.util.StringTokenizer;
public class Main {
// 간선
private static class Edge {
int to, capacity, flow, cost;
Edge reverse;
Edge(int to, int capacity, int cost) {
this.to = to;
this.capacity = capacity;
this.cost = cost;
this.flow = 0;
}
}
// 페어
private static class Pair implements Comparable<Pair> {
int index, value;
Pair(int index) {
this.index = index;
this.value = distance[index];
}
@Override
public int compareTo(Pair other) {
return this.value - other.value;
}
}
private static final int INF = Integer.MAX_VALUE;
private static int[] prev, distance;
private static Edge[] path;
private static ArrayList<ArrayList<Edge>> adj;
// 간선 추가
private static void addEdge(int from, int to, int capacity, int cost) {
Edge forward, backward;
forward = new Edge(to, capacity, cost);
backward = new Edge(from, 0, -cost);
forward.reverse = backward;
backward.reverse = forward;
adj.get(from).add(forward);
adj.get(to).add(backward);
}
// 다익스트라
private static boolean dijkstra(int source, int sink) {
int curr;
PriorityQueue<Pair> queue = new PriorityQueue<>();
Arrays.fill(distance, INF);
distance[source] = 0;
queue.add(new Pair(source));
while (!queue.isEmpty()) {
curr = queue.poll().index;
if (curr == sink) {
return true;
}
for (Edge edge : adj.get(curr)) {
if (edge.capacity - edge.flow > 0 && distance[curr] + edge.cost < distance[edge.to]) {
distance[edge.to] = distance[curr] + edge.cost;
prev[edge.to] = curr;
path[edge.to] = edge;
queue.add(new Pair(edge.to));
}
}
}
return false;
}
// MCMF
private static int[] mcmf(int source, int sink) {
int totalFlow = 0, totalCost = 0, minFlow, i;
while (dijkstra(source, sink)) {
minFlow = INF;
for (i = sink; i != source; i = prev[i]) {
minFlow = Math.min(minFlow, path[i].capacity - path[i].flow);
}
for (i = sink; i != source; i = prev[i]) {
path[i].flow += minFlow;
path[i].reverse.flow -= minFlow;
}
totalFlow += minFlow;
totalCost += minFlow * distance[sink];
}
return new int[] {totalFlow, totalCost};
}
public static void main(String[] args) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(System.out));
StringBuilder sb = new StringBuilder();
StringTokenizer st;
int n, m, source, sink, cost, i, j;
int[] ans;
int[][] capacity;
// Source, sink
st = new StringTokenizer(br.readLine());
n = Integer.parseInt(st.nextToken());
m = Integer.parseInt(st.nextToken());
source = 0;
sink = n + m + 1;
// 간선 추가
adj = new ArrayList<>();
for (i = 0; i <= sink; i++) {
adj.add(new ArrayList<>());
}
st = new StringTokenizer(br.readLine());
for (i = 1; i <= n; i++) {
addEdge(0, i, Integer.parseInt(st.nextToken()), 0);
}
st = new StringTokenizer(br.readLine());
for (i = n + 1; i <= n + m; i++) {
addEdge(i, sink, Integer.parseInt(st.nextToken()), 0);
}
capacity = new int[n + 1][m + 1];
for (i = 1; i <= m; i++) {
st = new StringTokenizer(br.readLine());
for (j = 1; j <= n; j++) {
capacity[j][i] = Integer.parseInt(st.nextToken());
}
}
for (i = 1; i <= m; i++) {
st = new StringTokenizer(br.readLine());
for (j = 1; j <= n; j++) {
cost = Integer.parseInt(st.nextToken());
if (capacity[j][i] > 0) {
addEdge(j, n + i, capacity[j][i], cost);
}
}
}
// 출력
prev = new int[sink + 1];
path = new Edge[sink + 1];
distance = new int[sink + 1];
ans = mcmf(source, sink);
sb.append(ans[0]).append("\n").append(ans[1]);
bw.write(sb.toString());
bw.flush();
bw.close();
}
} |
package com.courseproject.tindar.controllers.matchlist;
import static org.junit.Assert.assertEquals;
import com.courseproject.tindar.usecases.matchlist.MatchListInputBoundary;
import com.courseproject.tindar.usecases.matchlist.MatchListResponseModel;
import org.junit.Test;
public class MatchListControllerUnitTest {
/**
* This class tests the implementation and return values of MatchListController
**/
private static final String USER_ID_1 = "user1";
public static class MockMatchListUserInput implements MatchListInputBoundary {
/**
* Mock implementation of MatchListInputBoundary for testing purposes
**/
@Override
public MatchListResponseModel getDisplayNamesForMatches(String userId) {
// Mock list of display names from match list
assertEquals(USER_ID_1, userId);
return new MatchListResponseModel(new String[]{}, new String[]{});
}
}
@Test
public void testGetDisplayNamesForMatches() {
// Test that GetDisplayNamesForMatches is called
MatchListInputBoundary testMatchListUserInput = new MatchListControllerUnitTest.MockMatchListUserInput();
MatchListController testMatchListController = new MatchListController(testMatchListUserInput);
testMatchListController.getDisplayNamesForMatches(USER_ID_1);
}
} |
import React, { useEffect, useState } from "react";
import { db } from "../../Apis/firebase";
import { getDocs, collection } from "firebase/firestore";
import { toast } from "react-toastify";
import Moment from "react-moment";
import Styles from "./_admin.module.css";
import { AiOutlineUserSwitch } from "react-icons/ai";
import { Link } from "react-router-dom";
const ListOfUsers = () => {
let [users, setUsers] = useState([]);
useEffect(() => {
try {
let fetchUsers = async () => {
let userCollectionRef = collection(db, "users");
let userData = await getDocs(userCollectionRef);
// console.log(userData);
let payload = userData.docs.map(user => {
return { ...user.data(), id: user.id };
});
setUsers(payload);
};
fetchUsers();
} catch (error) {
toast.error(error.code);
}
}, []);
return (
<div className={Styles.card}>
<h2>
<span>
<AiOutlineUserSwitch />
</span>
Users
</h2>
<p>
<span>total users </span>
<span>{users.length > 0 && users.length}</span>
</p>
<p className={Styles.time}>
<Moment format="D/MM/YYYY" withTitle>
{new Date()}
</Moment>
</p>
<p>
<Link to="/admin/users">view more</Link>
</p>
</div>
);
};
export default ListOfUsers; |
Series I
Write a program to generate the first 'n' terms of the following series 0.5, 1.5, 4.5, 13.5, ...
Input Format:
The input is an integer 'n' which denotes the number of terms to be printed in the series.
Output Format:
Print the series and refer the sample output for formatting.
Sample Input:
5
Sample Output:
0.5 1.5 4.5 13.5 40.5
program:
#include<iostream>
using namespace std;
int main()
{
int n,i=1;
float sum=0.5;
cin>>n;
while(i<=n)
{
cout<<sum<<" ";
sum=sum*3;
i++;
}
return 0;
} |
import AppTable from "../../components/table";
import responseHandler from "../../hooks/response";
import CreateFileModal from "../../components/create-file-modal/create-file-modal";
import columns from "./column-definition";
import { deleteFile, listFile, patchFile } from "../../services/file";
import { useNavigate } from "react-router-dom";
import { useToast, Stack } from "@chakra-ui/react";
import { useQuery, useMutation, useQueryClient } from "react-query";
const FileTable = () => {
const toast = useToast();
const navigate = useNavigate();
const queryClient = useQueryClient();
const fetchFiles = async () => {
const [response, error] = await listFile();
const toastData = responseHandler("", error, navigate);
if (!error && response) {
return response.results;
} else {
toast(toastData);
}
return [];
};
const invalidateQuery = () => {
queryClient.invalidateQueries("files");
};
const removeFile = async (id: number) => {
const [_, error] = await deleteFile(id);
const toastData = responseHandler(
"Archivo Borrado Exitosamente",
error,
navigate
);
toast(toastData);
invalidateQuery();
};
const modifyFunction = async (
data: { [key: string]: string | FileList },
pk: number
) => {
delete data["modified"];
const payload: { [key: string]: any } = {};
Object.entries(data).forEach(([key, value]) => {
if (!value) {
delete data[key];
} else if (value instanceof FileList && value.length > 0) {
payload[key] = value[0];
} else {
payload[key] = value;
}
});
const [_, error] = await patchFile(payload, pk);
const toastData = responseHandler(
"Archivo Modificado Exitosamente",
error,
navigate
);
toast(toastData);
invalidateQuery();
};
const { data } = useQuery({
queryKey: ["files"],
queryFn: fetchFiles,
});
return (
<Stack>
<CreateFileModal />
<AppTable
columns={columns}
data={data}
entityName="Archivo"
modifyFunction={modifyFunction}
deleteFunction={removeFile}
caption="Lista de Archivos"
/>
</Stack>
);
};
export default FileTable; |
import { Component, ElementRef, QueryList, ViewChildren } from '@angular/core';
import { OnInit } from '@angular/core';
import { Store } from '@ngrx/store';
import { ViewportScroller } from '@angular/common';
import { BaseComponent } from 'src/app/components/base/base.component';
import { UIState } from 'src/app/store/ui.states';
import { FormBuilder, FormGroup, Validators } from '@angular/forms';
import { slideAnimation } from 'src/app/animations/slide.animation';
import { ActivatedRoute, Router } from '@angular/router';
import { ScrollHelper } from 'src/app/services/helper/scroll-helper.services';
import { WrapperOrganisationGroupService } from 'src/app/services/wrapper/wrapper-org--group-service';
import {
OrganisationGroupNameInfo,
OrganisationGroupRequestInfo,
Role,
} from 'src/app/models/organisationGroup';
import { OperationEnum } from 'src/app/constants/enum';
import { Title } from '@angular/platform-browser';
import { FormBaseComponent } from 'src/app/components/form-base/form-base.component';
import { SharedDataService } from 'src/app/shared/shared-data.service';
@Component({
selector: 'app-manage-group-edit-name',
templateUrl: './manage-group-edit-name-component.html',
styleUrls: ['./manage-group-edit-name-component.scss'],
})
export class ManageGroupEditNameComponent
extends FormBaseComponent
implements OnInit
{
submitted!: boolean;
organisationId: string;
routeData: any = {};
isEdit: boolean = false;
editingGroupId: number = 0;
groupName: string = '';
private specialChars = /^[ @().,;:'/#&+-]*$/;
@ViewChildren('input') inputs!: QueryList<ElementRef>;
constructor(
protected uiStore: Store<UIState>,
private formBuilder: FormBuilder,
private router: Router,
private activatedRoute: ActivatedRoute,
protected viewportScroller: ViewportScroller,
protected scrollHelper: ScrollHelper,
private orgGroupService: WrapperOrganisationGroupService,
private titleService: Title,
private SharedDataService: SharedDataService
) {
super(
viewportScroller,
formBuilder.group({
groupName: ['', Validators.compose([Validators.required,Validators.pattern(/^[ A-Za-z0-9@().,;:'/#&+-]*$/),Validators.maxLength(256), Validators.minLength(3)])],
})
);
let queryParams = this.activatedRoute.snapshot.queryParams;
if (queryParams.data) {
this.routeData = JSON.parse(queryParams.data);
this.isEdit = this.routeData['isEdit'];
this.editingGroupId = this.routeData['groupId'] || 0;
this.groupName = this.routeData['groupName'] || '';
this.formGroup.controls['groupName'].setValue(this.groupName);
}
this.organisationId = localStorage.getItem('cii_organisation_id') || '';
}
ngOnInit() {
this.titleService.setTitle(
`${this.isEdit ? 'Edit Name' : 'Create'} - Manage Groups - CCS`
);
if(this.isEdit){
this.groupName=sessionStorage.getItem('Gname') || ''
this.formGroup.controls['groupName'].setValue(this.groupName);
}
this.onFormValueChange();
}
public get specialCharsVaidation(){
let indexOfGname=this.formGroup.value.groupName.length
let indexOfspecialChars:number=0
for (let i = 0; i < this.formGroup.value.groupName.length; i++) {
if(this.specialChars.test(this.formGroup.value.groupName[i])){
indexOfspecialChars = indexOfspecialChars + 1
}
}
return indexOfGname === indexOfspecialChars ? true : false
}
ngAfterViewChecked() {
this.scrollHelper.doScroll();
}
scrollToAnchor(elementId: string): void {
this.viewportScroller.scrollToAnchor(elementId);
}
setFocus(inputIndex: number) {
this.inputs.toArray()[inputIndex].nativeElement.focus();
}
onSubmit(form: FormGroup) {
this.submitted = true;
if (this.formValid(form)) {
if (!this.specialCharsVaidation) {
this.groupName = form.get('groupName')?.value;
if (this.isEdit == true) {
let groupPatchRequestInfo: OrganisationGroupRequestInfo = {
groupName: this.groupName,
};
this.orgGroupService
.patchUpdateOrganisationGroup(
this.organisationId,
this.editingGroupId,
groupPatchRequestInfo
)
.subscribe(
(result) => {
this.submitted = false;
let data = {
isEdit: this.isEdit,
groupId: this.editingGroupId,
};
this.router.navigateByUrl(
`manage-groups/operation-success/${OperationEnum.GroupNameUpdate}?data=` +
JSON.stringify(data)
);
},
(error) => {
if (error.status == 409) {
form.controls['groupName'].setErrors({ alreadyExists: true });
this.scrollHelper.scrollToFirst('error-summary');
}
if (error.status == 400) {
this.formGroup.controls['groupName'].setErrors({ 'specialCharsincluded': true})
this.scrollHelper.scrollToFirst('error-summary');
}
console.log(error);
console.log(error.error);
}
);
} else {
let groupRequest: OrganisationGroupNameInfo = {
groupName: this.groupName,
};
this.SharedDataService.manageGroupStorage(this.groupName);
this.orgGroupService
.createOrganisationGroups(this.organisationId, groupRequest)
.subscribe(
(result) => {
if (result != 0) {
this.submitted = false;
let data = {
isEdit: false,
groupId: result,
userNames: [],
};
this.router.navigateByUrl(
'manage-groups/edit-users?data=' + JSON.stringify(data)
);
}
},
(error) => {
if (error.status == 409) {
form.controls['groupName'].setErrors({ alreadyExists: true });
this.scrollHelper.scrollToFirst('error-summary');
}
if (error.status == 400) {
this.formGroup.controls['groupName'].setErrors({ 'specialCharsincluded': true})
this.scrollHelper.scrollToFirst('error-summary');
}
console.log(error);
console.log(error.error);
}
);
}
}else{
this.formGroup.controls['groupName'].setErrors({ 'specialCharsincluded': true})
}
} else {
this.scrollHelper.scrollToFirst('error-summary');
}
}
formValid(form: FormGroup): Boolean {
if (form == null) return false;
if (form.controls == null) return false;
return form.valid;
}
onCancelAndGoToGroupClick() {
if (this.isEdit == true) {
this.router.navigateByUrl(
'manage-groups/view?data=' + JSON.stringify(this.routeData)
);
}
}
onCancelClick() {
this.router.navigateByUrl('manage-groups');
}
} |
<?php
namespace Src\Features\BaseApp\Presentation\Controllers\DashBoardControllers;
use App\Http\Controllers\Controller;
use Src\Base\Response\DataSuccess;
use Src\Features\BaseApp\Core\Requests\WebRequests\Package\AddPackageWebRequest;
use Src\Features\BaseApp\Core\Requests\WebRequests\Package\DeletePackageWebRequest;
use Src\Features\BaseApp\Core\Requests\WebRequests\Package\EditePackageWebRequest;
use Src\Features\BaseApp\Core\Requests\WebRequests\Package\GoToEditePackageWebRequest;
use Src\Features\BaseApp\Domain\Services\DashBoard\DashPackageService;
use Src\Features\BaseApp\Domain\Services\DashBoard\DashServiceService;
class DashPackageController extends Controller
{
private DashPackageService $dashPackageService;
private DashServiceService $dashServiceService;
/**
* @param DashPackageService $dashPackageService
* @param DashServiceService $dashServiceService
*/
public function __construct(DashPackageService $dashPackageService, DashServiceService $dashServiceService)
{
$this->dashPackageService = $dashPackageService;
$this->dashServiceService = $dashServiceService;
}
public function index()
{
$dataState = $this->dashPackageService->index();
if($dataState instanceof DataSuccess){
return view(view:'dashboard.package.index',data: ['packages'=>$dataState->getData()]);
}else{
return view(view:'dashboard.package.index');
}
}
public function create(AddPackageWebRequest $request)
{
$dataState = $this->dashPackageService->create($request);
if($dataState instanceof DataSuccess){
return $dataState->response(route:'packages');
}else{
return $dataState->response();
}
}
public function goToEditeBlade(GoToEditePackageWebRequest $request)
{
$serviceDataState = $this->dashServiceService->index();
$dataState = $this->dashPackageService->find($request);
// dd(count($dataState->getData()->services));
// print_r($dataState->getData()->services);
if($dataState instanceof DataSuccess){
return view(view:'dashboard.package.add',data: [
'package'=>$dataState->getData(),
'services'=>$serviceDataState->getData()
]);
}else{
dd(false);
}
}
public function goToAddBlade()
{
$serviceDataState = $this->dashServiceService->index();
if($serviceDataState instanceof DataSuccess){
return view(view:'dashboard.package.add',data:['services'=>$serviceDataState->getData()]);
}else{
dd(false);
}
}
public function update(EditePackageWebRequest $request)
{
$dataState = $this->dashPackageService->update($request);
if($dataState instanceof DataSuccess){
return $dataState->response(route:'packages');
}else{
return $dataState->response();
}
}
public function delete(DeletePackageWebRequest $request)
{
$dataState = $this->dashPackageService->delete($request);
if($dataState instanceof DataSuccess){
return $dataState->redirect('packages');
}else{
return $dataState->response();
}
}
} |
<script setup>
import AppLayout from "@/Layouts/AppLayout.vue";
import Welcome from "@/Components/Welcome.vue";
import DataTable from "primevue/datatable";
import Column from "primevue/column";
import Button from "primevue/button";
import Dialog from "primevue/dialog";
import HeaderCard from "../SubComponents/HeaderCard.vue";
import Calendar from "primevue/calendar";
import InputText from "primevue/inputtext";
import FileUpload from "primevue/fileupload";
import { ref } from "vue";
const value1 = ref(null);
</script>
<template>
<AppLayout title="Dashboard">
<div>
<HeaderCard title="Docente" subtitle="Bienvenido, aqui puedes ver, listar y crear nuevas tareas" />
<div class="bg-white min-h-full overflow-hidden shadow-xl sm:rounded-lg p-4 flex flex-col border gap-8">
<h2 class="py-2 text-3xl text-center font-bold text-gray-800">
Lista de Tareas Creadas
</h2>
<div class="flex justify-between flex-row-reverse gap-4">
<div class="card flex flex-wrap justify-content-center gap-4">
<span class="p-input-icon-left">
<i class="pi pi-search" />
<InputText v-model="value1" type="text" placeholder="Buscar Tarea" autofocus />
</span>
</div>
<div>
<Button label="Nueva Tarea" icon="pi pi-plus" @click="formulario = true" outline />
<Dialog v-model:visible="formulario" header="Crear Nueva Tarea" :style="{
width: '60vw',
background: 'white',
}">
<form class="flex gap-5">
<div class="flex flex-col w-1/2 gap-4">
<div class="flex-1">
<label class="font-bold block mb-2 text-sm">
Nombre de Tarea:
</label>
<span class="p-input-icon-right w-full">
<i class="pi pi-spin pi-pencil" />
<InputText v-model="value2" class="w-full" />
</span>
</div>
<div class="flex gap-2 w-full">
<div class="w-1/2">
<label class="font-bold block mb-2 text-sm">
Calificación Maxima:
</label>
<span class="p-input-icon-right w-full">
<i class="pi pi-user-edit" />
<InputText v-model="value2" class="w-full" />
</span>
</div>
<div class="w-1/2">
<label class="font-bold block mb-2 text-sm">
Numero de Intentos:
</label>
<span class="p-input-icon-right w-full">
<i class="pi pi-sort-numeric-up-alt" />
<InputText v-model="value2" class="w-full" />
</span>
</div>
</div>
<div class="flex gap-2 w-full">
<div class="flex-1/2">
<label class="font-bold block mb-2 text-sm">
Fecha de Inicio:
</label>
<Calendar id="calendar-12h" v-model="datetime12h" showIcon showTime hourFormat="12" />
</div>
<div class="flex-1/2">
<label class="font-bold block mb-2 text-sm">
Fecha de Vencimiento:
</label>
<Calendar id="calendar-12h" v-model="datetime12h" showIcon showTime hourFormat="12" />
</div>
</div>
<div class="flex-1">
<label class="font-bold block mb-2 text-sm">
Descripcion:
</label>
<Textarea v-model="value" rows="5" cols="30"
class="w-full rounded-md border border-black/20" />
</div>
</div>
<div class="w-1/2 flex flex-col gap-4">
<div>
<label class="font-bold block mb-2 text-sm">
Seleccionar Archivo:
</label>
<FileUpload name="" url="./upload.php" @upload="onAdvancedUpload($event)"
:multiple="true" accept="application/pdf" :maxFileSize="10000000"
chooseLabel="Subir" class="bg-red-500" :style="{
background: '#0079FF',
}">
<template #empty>
<p>
Carga un Documento PDF,
Word, excel o JPG; menor a
20MB
</p>
</template>
</FileUpload>
</div>
<div class="flex justify-end gap-4">
<Button size="small" severity="help" outlined label="Cancelar" />
<Button size="small" label="Guardar" />
</div>
</div>
</form>
</Dialog>
</div>
<!-- <div>
<Button icon="pi pi-file-pdf" severity="danger" outlined @click="visible = true" />
<Button icon="pi pi-file-excel" severity="success" outlined />
<Button icon="pi pi-code" outlined />
<Dialog v-model:visible="visible" modal header="Header" :style="{ width: '50vw' }">
<p>
Lorem ipsum dolor sit amet, consectetur
adipiscing elit, sed do eiusmod tempor
incididunt ut labore et dolore magna aliqua. Ut
enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in
reprehenderit in voluptate velit esse cillum
dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa
qui officia deserunt mollit anim id est laborum.
</p>
</Dialog>
</div> -->
</div>
<div class="card">
<DataTable :value="customers" paginator :rows="7" :rowsPerPageOptions="[5, 10, 20, 50]"
tableStyle="min-width: 50rem">
<Column field="name_task" header="Nombre Tareas" style="width: 20%" sortable></Column>
<Column field="description" header="Descripcion" style="width: 20%" sortable></Column>
<Column field="date_v" header="Fecha de Vencimiento" style="width: 25%" sortable></Column>
<Column field="state" header="Estado" style="width: 15%" sortable></Column>
<Column field="buttons" header="Acciones" style="width: 25%" sortable>
<template #body="slotProps">
<div class="flex gap-3">
<Button icon="pi pi-eye" outlined rounded value="primary"
@click="confirmDeleteProduct(slotProps.data)" />
<Button icon="pi pi-trash" outlined rounded severity="danger"
@click="confirmDeleteProduct(slotProps.data)" />
<Button icon="pi pi-download" outlined rounded severity="info"
@click="confirmDeleteProduct(slotProps.data)" />
</div>
</template>
</Column>
</DataTable>
</div>
<!-- <Button label="Check" icon="pi pi-check" /> -->
</div>
</div>
</AppLayout>
</template>
<script>
import { ref } from "vue";
import Calendars from './Calendars.vue';
const visible = ref(false);
const formulario = ref(false);
export default {
components: {
Button,
},
data() {
return {
count: 0,
customers: [
{
name_task: "Control de lectura SCRUM",
description: "Leer la guia de SCRUM",
date_v: "06/07/2023",
state: "Activo",
},
{
name_task: "Revision de Avance",
description: "Revision de repositorios-unap...",
date_v: "04/07/2023",
state: "Vencido",
},
{
name_task: "Desarrollo de pruebas Unitarias",
description: "Utilizar TDD en sus proyectos",
date_v: "18/05/2023",
state: "Vencido",
},
],
};
},
components: {
DataTable,
HeaderCard,
Calendars,
},
};
</script> |
// Station -> Conexió al router de casa
// NTP time -> mitjantçant la conexió wifi s'obté el temps real de qualsevol zona del món
// RTC -> Real time clock, té com a objectiu mantenir el temps després de la configuració
// inicial amb NTP (també funciona en cas de que no hi hagi conexió wifi)
#include <WiFi.h>
#include "time.h"
#include <ESP32Time.h>
// Replace with your network credentials (STATION)
const char* ssid = "Enric";
const char* password = "5a303030324346433132393633";
// Direcció del servidor NTP que farem servir
const char* ntpServer = "europe.pool.ntp.org";
// Ajustem la nostra zona horàri, UTC+1.00 es correspon amb 3600 ms
const long gmtOffset_sec = 3600;
// Ajustem si compensem l'horari, és a dir, avancem i endarrerim el rellotge dos cops l'any. 3600 si ho fem, 0 si no.
const int daylightOffset_sec = 0;
// Creació objecte RTC, podrem configurar i consultar diferents dades
ESP32Time rtc;
void setup() {
Serial.begin(115200);
initWiFi();
// RRSI Received signal strength indication
// L'escala va del 0 al -100, el 0 és la recepció més forta mentre que el 100 la més dèbil
// https://teamdynamix.umich.edu/TDClient/47/LSAPortal/KB/ArticleDet?ID=1644
Serial.print("RSSI: ");
Serial.println(WiFi.RSSI());
//init and get the time
configTime(gmtOffset_sec, daylightOffset_sec, ntpServer); //Actualització RTC intern
struct tm timeinfo;
if (getLocalTime(&timeinfo)){ // Passem la informació del RTC intern a l'estructura tm timeinfo
rtc.setTimeStruct(timeinfo); // Actualització de la variable rtc passan com a paràmetre tm timeinfo
}
WiFi.disconnect(true);
WiFi.mode(WIFI_OFF);
}
void loop() {
delay(1000);
printLocalTime();
}
void initWiFi() {
WiFi.mode(WIFI_STA);
WiFi.begin(ssid, password);
Serial.print("Connecting to WiFi ..");
while (WiFi.status() != WL_CONNECTED) {
Serial.print('.');
delay(1000);
}
Serial.println(" Connected");
Serial.print("Local IP: ");
Serial.println(WiFi.localIP());
}
void printLocalTime()
{
struct tm timeinfo;
if(!getLocalTime(&timeinfo)){
Serial.println("Failed to obtain time");
return;
}
Serial.println(&timeinfo, "%A, %B %d %Y %H:%M:%S");
}
// Desconnectar i reconnectar a la wifi, possibles opcions a tenir en compte:
// https://randomnerdtutorials.com/esp32-useful-wi-fi-functions-arduino/#8 |
<!--
$Header: /home/cvs/lucas/doc-postgresql-es/diferencia/src/sgml/ref/alter_table.sgml,v 1.2 2001/10/08 17:33:26 rssantos Exp $
Postgres documentation
-->
<refentry id="SQL-ALTERTABLE">
<refmeta>
<refentrytitle id="sql-altertable-title">
ALTER TABLE
</refentrytitle>
<refmiscinfo>SQL - Language Statements</refmiscinfo>
</refmeta>
<refnamediv>
<refname>
ALTER TABLE
</refname>
<refpurpose>
Modifies table properties
</refpurpose>
</refnamediv>
<refsynopsisdiv>
<refsynopsisdivinfo>
<date>1999-07-20</date>
</refsynopsisdivinfo>
<synopsis>
ALTER TABLE [ ONLY ] <replaceable class="PARAMETER">table</replaceable> [ * ]
ADD [ COLUMN ] <replaceable class="PARAMETER">column</replaceable> <replaceable
class="PARAMETER">type</replaceable>
ALTER TABLE [ ONLY ] <replaceable class="PARAMETER">table</replaceable> [ * ]
ALTER [ COLUMN ] <replaceable class="PARAMETER">column</replaceable> { SET DEFAULT <replaceable
class="PARAMETER">value</replaceable> | DROP DEFAULT }
ALTER TABLE <replaceable class="PARAMETER">table</replaceable> [ * ]
RENAME [ COLUMN ] <replaceable class="PARAMETER">column</replaceable> TO <replaceable
class="PARAMETER">newcolumn</replaceable>
ALTER TABLE <replaceable class="PARAMETER">table</replaceable>
RENAME TO <replaceable class="PARAMETER">newtable</replaceable>
ALTER TABLE <replaceable class="PARAMETER">table</replaceable>
ADD <replaceable class="PARAMETER">table constraint definition</replaceable>
ALTER TABLE <replaceable class="PARAMETER">table</replaceable>
OWNER TO <replaceable class="PARAMETER">new owner</replaceable>
</synopsis>
<refsect2 id="R2-SQL-ALTERTABLE-1">
<refsect2info>
<date>1998-04-15</date>
</refsect2info>
<title>
Inputs
</title>
<para>
<variablelist>
<varlistentry>
<term><replaceable class="PARAMETER"> table </replaceable></term>
<listitem>
<para>
The name of an existing table to alter.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="PARAMETER"> column </replaceable></term>
<listitem>
<para>
Name of a new or existing column.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="PARAMETER"> type </replaceable></term>
<listitem>
<para>
Type of the new column.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="PARAMETER"> newcolumn </replaceable></term>
<listitem>
<para>
New name for an existing column.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="PARAMETER"> newtable </replaceable></term>
<listitem>
<para>
New name for the table.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="PARAMETER"> table constraint definition </replaceable></term>
<listitem>
<para>
New table constraint for the table
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><replaceable class="PARAMETER">New user </replaceable></term>
<listitem>
<para>
The user name of the new owner of the table.
</para>
</listitem>
</varlistentry>
</variablelist>
</para>
</refsect2>
<refsect2 id="R2-SQL-ALTERTABLE-2">
<refsect2info>
<date>1998-04-15</date>
</refsect2info>
<title>
Outputs
</title>
<para>
<variablelist>
<varlistentry>
<term><computeroutput>ALTER</computeroutput></term>
<listitem>
<para>
Message returned from column or table renaming.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><computeroutput>ERROR</computeroutput></term>
<listitem>
<para>
Message returned if table or column is not available.
</para>
</listitem>
</varlistentry>
</variablelist>
</para>
</refsect2>
</refsynopsisdiv>
<refsect1 id="R1-SQL-ALTERTABLE-1">
<refsect1info>
<date>1998-04-15</date>
</refsect1info>
<title>
Description
</title>
<para>
<command>ALTER TABLE</command> changes the definition of an existing table.
The <literal>ADD COLUMN</literal> form adds a new column to the table
using the same syntax as <xref linkend="SQL-CREATETABLE"
endterm="SQL-CREATETABLE-title">. The <literal>ALTER COLUMN</literal> form
allows you to set or remove the default for the column. Note that defaults
only apply to newly inserted rows.
The <literal>RENAME</literal> clause causes the name of a table or column
to change without changing any of the data contained in
the affected table. Thus, the table or column will
remain of the same type and size after this command is
executed.
The ADD <replaceable class="PARAMETER">table constraint definition</replaceable> clause
adds a new constraint to the table using the same syntax as <xref
linkend="SQL-CREATETABLE" endterm="SQL-CREATETABLE-title">.
The OWNER clause chnages the owner of the table to the user <replaceable class="PARAMETER">
new user</replaceable>.
</para>
<para>
You must own the table in order to change its schema.
</para>
<refsect2 id="R2-SQL-ALTERTABLE-3">
<refsect2info>
<date>1998-04-15</date>
</refsect2info>
<title>
Notes
</title>
<para>
The keyword <literal>COLUMN</literal> is noise and can be omitted.
</para>
<para>
In the current implementation, default and constraint clauses for the
new column will be ignored. You can use the <literal>SET DEFAULT</literal>
form of <command>ALTER TABLE</command> to set the default later.
(You will also have to update the already existing rows to the
new default value, using <xref linkend="sql-update"
endterm="sql-update-title">.)
</para>
<para>
In the current implementation, only FOREIGN KEY constraints can
be added to a table. To create or remove a unique constraint, create
a unique index (see <xref linkend="SQL-CREATEINDEX"
endterm="SQL-CREATEINDEX-title">). To add check constraints
you need to recreate and reload the table, using other
parameters to the <xref linkend="SQL-CREATETABLE"
endterm="SQL-CREATETABLE-title"> command.
</para>
<para>
You must own the table in order to change it.
Renaming any part of the schema of a system
catalog is not permitted.
The <citetitle>PostgreSQL User's Guide</citetitle> has further
information on inheritance.
</para>
<para>
Refer to <command>CREATE TABLE</command> for a further description
of valid arguments.
</para>
</refsect2>
</refsect1>
<refsect1 id="R1-SQL-ALTERTABLE-2">
<title>
Usage
</title>
<para>
To add a column of type VARCHAR to a table:
<programlisting>
ALTER TABLE distributors ADD COLUMN address VARCHAR(30);
</programlisting>
</para>
<para>
To rename an existing column:
<programlisting>
ALTER TABLE distributors RENAME COLUMN address TO city;
</programlisting>
</para>
<para>
To rename an existing table:
<programlisting>
ALTER TABLE distributors RENAME TO suppliers;
</programlisting>
</para>
<para>
To add a foreign key constraint to a table:
<programlisting>
ALTER TABLE distributors ADD CONSTRAINT distfk FOREIGN KEY (address) REFERENCES addresses(address) MATCH FULL
</programlisting>
</para>
</refsect1>
<refsect1 id="R1-SQL-ALTERTABLE-3">
<title>
Compatibility
</title>
<refsect2 id="R2-SQL-ALTERTABLE-4">
<refsect2info>
<date>1998-04-15</date>
</refsect2info>
<title>SQL92</title>
<para>
The <literal>ADD COLUMN</literal> form is compliant with the exception that
it does not support defaults and constraints, as explained above.
The <literal>ALTER COLUMN</literal> form is in full compliance.
</para>
<para>
SQL92 specifies some additional capabilities for <command>ALTER TABLE</command>
statement which are not yet directly supported by <productname>Postgres</productname>:
<variablelist>
<varlistentry>
<term>
<synopsis>
ALTER TABLE <replaceable class="PARAMETER">table</replaceable> DROP CONSTRAINT <replaceable class="PARAMETER">constraint</replaceable> { RESTRICT | CASCADE }
</synopsis>
</term>
<listitem>
<para>
Removes a table constraint (such as a check constraint,
unique constraint, or foreign key constraint). To
remove a unique constraint, drop a unique index.
To remove other kinds of constraints you need to recreate
and reload the table, using other parameters to the
<xref linkend="SQL-CREATETABLE" endterm="SQL-CREATETABLE-title">
command.
</para>
<para>
For example, to drop any constraints on a table <literal>distributors</literal>:
<programlisting>
CREATE TABLE temp AS SELECT * FROM distributors;
DROP TABLE distributors;
CREATE TABLE distributors AS SELECT * FROM temp;
DROP TABLE temp;
</programlisting>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<synopsis>
ALTER TABLE <replaceable class="PARAMETER">table</replaceable> DROP [ COLUMN ] <replaceable class="PARAMETER">column</replaceable> { RESTRICT | CASCADE }
</synopsis>
</term>
<listitem>
<para>
Removes a column from a table.
Currently, to remove an existing column the table must be
recreated and reloaded:
<programlisting>
CREATE TABLE temp AS SELECT did, city FROM distributors;
DROP TABLE distributors;
CREATE TABLE distributors (
did DECIMAL(3) DEFAULT 1,
name VARCHAR(40) NOT NULL
);
INSERT INTO distributors SELECT * FROM temp;
DROP TABLE temp;
</programlisting>
</para>
</listitem>
</varlistentry>
</variablelist>
</para>
<para>
The clauses to rename columns and tables are <productname>Postgres</productname>
extensions from SQL92.
</para>
</refsect2>
</refsect1>
</refentry>
<!-- Keep this comment at the end of the file
Local variables:
mode: sgml
sgml-omittag:nil
sgml-shorttag:t
sgml-minimize-attributes:nil
sgml-always-quote-attributes:t
sgml-indent-step:1
sgml-indent-data:t
sgml-parent-document:nil
sgml-default-dtd-file:"../reference.ced"
sgml-exposed-tags:nil
sgml-local-catalogs:"/usr/lib/sgml/catalog"
sgml-local-ecat-files:nil
End:
--> |
<!DOCTYPE html>
<html lang="es">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
<script src="https://cdn.jsdelivr.net/npm/vue@2.5.16/dist/vue.js"></script>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.1/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="sha384-4bw+/aepP/YC94hEpVNVgiZdgIC5+VKNBQNGCHeKRQN+PtmoHDEXuppvnDJzQIu9" crossorigin="anonymous">
<script src="./js/app.js" defer></script>
</head>
<body>
<main id="app">
<!-- Barra de búsqueda -->
<div class="Barra">
<input type="text" v-model="inputBusqueda" placeholder="Buscar...">
<button v-on:click="buscar">Buscar</button>
</div>
<hr>
<!-- Resultados de la búsqueda -->
<div v-if="resultadosBusqueda.length > 0" class="resultados" alt="Ingrese el nombre de la pelicula">
<div v-for="(pelicula, index) in resultadosBusqueda" class="col-md-2 card p-4 m-1">
<img v-bind:src="'https://image.tmdb.org/t/p/w500/' + pelicula.poster_path" alt="Movie Poster"
v-bind:alt="pelicula.id" class="img-fluid">
<button v-on:click="getDetalle(pelicula.id)">Detalle</button>
<h5>{{ pelicula.title }}</h5>
</div>
</div>
<!-- Resultados Fetch Api -->
<!-- <div class="row">
<div v-for="(pelicula, index) in peliculas" class="col-md-2 card p-4 m-1">
<img v-bind:src="'https://image.tmdb.org/t/p/w500/' + pelicula.poster_path" alt="Movie Poster"
v-bind:alt="pelicula.id" class="img-fluid">
<button v-on:click="getDetalle(pelicula.id)">detalle</button>
<p>{{ movie.title}}</p>
</div>
</div> -->
<!-- Contenedor de resultados por defecto -->
<div v-else class="row">
<div v-for="(pelicula, index) in peliculas" class="col-md-2 card p-4 m-1">
<div v-on:click="getDetalle(pelicula.id)">
<img v-bind:src="'https://image.tmdb.org/t/p/w500/' + pelicula.poster_path" alt="Movie Poster"
v-bind:alt="pelicula.id" class="img-fluid">
</div>
<button v-on:click="getDetalle(pelicula.id)"><h5><strong>{{ pelicula.title }}</strong></h5></button>
<h5>{{ movie.title}}</h5>
</div>
</div>
</main>
</body>
</html> |
using System.Data;
using HiloGuessing.Domain.Interfaces;
using HiLoGuessing.Application.Services.Interfaces;
using HiloGuessing.Domain.Entities;
using Serilog;
using static System.Net.Mime.MediaTypeNames;
namespace HiLoGuessing.Application.Services
{
public class HiLoGuessService : IHiLoGuessService
{
private readonly IRepository<HiLoGuess> _hiloRepository;
private readonly ILogger _logger;
public HiLoGuessService(IRepository<HiLoGuess> hiloRepository, ILogger logger)
{
_hiloRepository = hiloRepository;
_logger = logger;
}
public async Task<List<HiLoGuess>> GetAllHiLoGuessesAsync()
{
try
{
_logger.Information("Getting all hilo guesses");
return await _hiloRepository.GetAllAsync();
}
catch (Exception e)
{
_logger.Error(e, "Error getting all hilo guesses");
throw;
}
}
public async Task<HiLoGuess> CreateHiLoGuessAsync(string playerName)
{
try
{
_logger.Information("Creating new hilo guess");
var hilo = new HiLoGuess();
var player = new Player() { Name = playerName };
var attempt = new Attempts();
hilo.Attempts = attempt;
hilo.Player = player;
return await _hiloRepository.AddAsync(hilo);
}
catch (Exception e)
{
_logger.Error(e, "Error creating new hilo guess");
throw;
}
}
public async Task<int> CreateMysteryNumberAsync(Guid id, int max, int min)
{
try
{
_logger.Information("Creating new mystery number");
var hilo = await _hiloRepository.GetByIdAsync(id);
var next = new Random().Next(min, max);
hilo.GeneratedMysteryNumber = next;
await _hiloRepository.UpdateAsync(hilo);
return next;
}
catch (Exception e)
{
_logger.Error(e, "Error creating new mystery number");
throw;
}
}
public async Task UpdateHiLoGuessMysteryNumberAsync(Guid id, int generatedMysteryNumber)
{
try
{
_logger.Information("Updating hilo guess mystery number");
var hilo = await _hiloRepository.GetByIdAsync(id);
hilo.GeneratedMysteryNumber = generatedMysteryNumber;
await _hiloRepository.UpdateAsync(hilo);
}
catch (Exception e)
{
_logger.Error(e, "Error updating hilo guess mystery number");
}
}
public async Task<int> GetMysteryNumberAsync(Guid id)
{
try
{
_logger.Information("Getting mystery number");
var mysteryNumber = await _hiloRepository.GetByIdAsync(id);
return mysteryNumber?.GeneratedMysteryNumber ?? 0;
}
catch (Exception e)
{
_logger.Error(e, "Error getting mystery number");
throw;
}
}
public async Task ResetHiLoGuessAsync(Guid id)
{
try
{
_logger.Information("Resetting hilo guess");
var hilo = await _hiloRepository.GetByIdAsync(id);
hilo.GeneratedMysteryNumber = 0;
await _hiloRepository.UpdateAsync(hilo);
}
catch (Exception e)
{
_logger.Error(e, "Error resetting hilo guess");
throw;
}
}
public async Task<HiLoGuess> GetHiLoGuessAsync(Guid id)
{
try
{
_logger.Information("Getting hilo guess");
var hilo = await _hiloRepository.GetByIdAsync(id);
return hilo;
}
catch (Exception e)
{
_logger.Error(e, "Error getting hilo guess");
throw;
}
}
}
} |
/*
* Copyright (C) 2016 John Li.
*
* Contact: John Li <jatsmulator(at)gmail.com>
*
* PJRCS is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* PJRCS is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with PJRCS; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef PJMSRP_HEADER_CONTENT_TYPE_H_
#define PJMSRP_HEADER_CONTENT_TYPE_H_
#include "pjmsrp/pjmsrp_config.h"
#include "pjmsrp/pjmsrp_header.h"
#include "pjmsrp/pjmsrp_ragel_state.h"
PJMSRP_BEGIN_DECLS
/// @struct
///
/// @brief MSRP header 'Content-Type'.
/// @author Mamadou
/// @date 12/3/2009
///
/// @par ABNF= Content-Type = "Content-Type:" SP media-type
/// media-type = type "/" subtype *( ";" gen-param )
/// type = token
/// subtype = token
/// gen-param = pname [ "=" pval ]
/// pname = token
/// pval = token / quoted-string
///
typedef struct pjmsrp_header_content_type_s {
pj_pool_t* pool;
pj_str_t value;
pj_param_list_t params;
}pjmsrp_header_content_type_t;
PJ_DECL(pjmsrp_header_content_type_t*) pjmsrp_header_content_type_create(pj_pool_t *pool, pj_str_t* value, pj_param_list_t* params);
PJ_DECL(pjmsrp_header_content_type_t*) pjmsrp_header_content_type_create_default(pj_pool_t *pool);
PJ_DECL(pjmsrp_header_content_type_t*) pjmsrp_header_content_type_parse(pj_pool_t *pool, const char *data, pj_size_t size);
PJ_DECL(int) pjmsrp_header_content_type_tostring(const pjmsrp_header_content_type_t *ContentType, pj_str_t* output);
PJMSRP_END_DECLS
#endif /* PJMSRP_HEADER_CONTENT_TYPE_H_ */ |
import unittest
from calculator import add, subtract, multiply, divide
class TestCalculator(unittest.TestCase):
def test_add(self):
self.assertEqual(add(5, 3), 8)
self.assertEqual(add(-1, 1), 0)
self.assertEqual(add(0, 0), 0)
def test_subtract(self):
self.assertEqual(subtract(5, 3), 2)
self.assertEqual(subtract(-1, 1), -2)
self.assertEqual(subtract(0, 0), 0)
def test_multiply(self):
self.assertEqual(multiply(5, 3), 15)
self.assertEqual(multiply(-1, 1), -1)
self.assertEqual(multiply(0, 5), 0)
def test_divide(self):
self.assertEqual(divide(6, 2), 3)
self.assertEqual(divide(-6, 2), -3)
self.assertEqual(divide(0, 5), 0)
# Test division by zero
with self.assertRaises(ValueError):
divide(5, 0)
if __name__ == "__main__":
unittest.main() |
"use client";
import Link from "next/link";
import React, { useState, forwardRef, useEffect } from "react";
import { signIn } from "next-auth/react";
import Snackbar from "@mui/material/Snackbar";
import MuiAlert from "@mui/material/Alert";
import { useRouter } from "next/navigation";
const Alert = forwardRef(function Alert(props, ref) {
return <MuiAlert elevation={6} ref={ref} variant="filled" {...props} />;
});
const Page = () => {
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [message, setMessage] = useState("");
const [open, setOpen] = useState(false);
const router = useRouter();
const handleClose = (event, reason) => {
if (reason === "clickaway") {
return;
}
setOpen(false);
};
useEffect(() => {
if (open) {
const timer = setTimeout(() => {
setOpen(false);
}, 5000);
return () => clearTimeout(timer);
}
}, [open]);
const handleSubmit = async (e) => {
e.preventDefault();
try {
const data = await signIn("credentials", {
email,
password,
redirect: false,
});
console.log(data.error);
if (data.ok) {
router.push("/main");
} else {
setOpen(true);
setMessage(data.error);
}
} catch (error) {
throw new Error(error);
}
};
return (
<>
<section>
<div className="grid h-screen grid-cols-2 ">
<div className="flex items-center justify-center ">
<div className="w-full pl-[85px] pr-[75px]">
<h1 className="text-center text-[50px] pb-3 font-semibold">Welcome...</h1>
<h1 className="pb-[47px] text-[#000000] text-[32px] font-medium">Log In</h1>
<div>
<form onSubmit={handleSubmit}>
<div className="pb-[33px]">
<input name="email" type="email" placeholder="Email Address" className="w-full outline-none border bg-transparent p-4 rounded-sm placeholder:text-[#9096B2]" value={email} onChange={(e) => setEmail(e.target.value)} />
</div>
<div className="pb-[33px]">
<input type="password" placeholder="Password" className="w-full outline-none border bg-transparent p-4 rounded-sm placeholder:text-[#9096B2]" value={password} onChange={(e) => setPassword(e.target.value)} />
</div>
<div className="pb-[23px]">
<Link className="text-[17px] text-[#9096B2]" href={"#"}>
Forgot your password?
</Link>
</div>
<div className="pb-7">
<button className="w-full py-[14px] text-[17px] text-white bg-[#FB2E86] font-bold rounded-[3px] hover:bg-purple transition duration-300" type="submit">
Sign In
</button>
</div>
<div className="text-center ">
<p className="text-[#9096B2] text-[17px]">
Don't have an Account?
<Link className="text-[#101750] " href={"/register"}>
Create account
</Link>
</p>
</div>
</form>
</div>
</div>
</div>
<div className="bg-purple">
<div className="w-full h-full" style={{ backgroundImage: "url('/img/login-page.png')", backgroundRepeat: "no-repeat", backgroundPosition: "center" }}></div>
</div>
</div>
<Snackbar open={open} autoHideDuration={6000} onClose={handleClose}>
<Alert onClose={handleClose} severity="error" sx={{ width: "100%" }}>
{message} !!!
</Alert>
</Snackbar>
</section>
</>
);
};
export default Page; |
<template>
<div id="libro">
<div id="contenedorHojas" v-if="libro.paginas">
<div
class="hoja"
:class="{ girada: numeroH - 1 < centroHojas, tapada: Math.abs((numeroH-1)-centroHojas)>=1 }"
v-for="numeroH of Math.ceil(libro.paginas.length / 2)"
:key="numeroH"
:style="[
{
zIndex:
numeroH - 1 == hojaMovida
? '0'
: -Math.abs(numeroH - 1 - hojaMovida),
},
]"
>
<pagina
:estaPagina="pagina"
v-for="(pagina, index) of libro.paginas.slice(
(numeroH - 1) * 2,
(numeroH - 1) * 2 + 2
)"
:key="pagina.id"
:class="{ reverso: index % 2 > 0 }"
:numPagina="index+((numeroH-1)*2)"
:idLibro="idLibro"
@mousedown.left.native="swipingPagina = true"
@mouseup.left.native="endSwipe"
@mousemove.native="
swipePagina($event, index % 2 > 0 ? 'atras' : 'adelante', index)
"
@mouseleave.native="endSwipe"
@touchstart.native.prevent="iniciaTouch"
@touchend.native.prevent="endTouch"
/>
</div>
</div>
</div>
</template>
<script>
import { gql } from "apollo-server-core";
import Pagina from "./Pagina.vue";
export const fragmentoCuadroTexto = gql`
fragment fragCuadroTexto on CuadroTextoCuento {
id
texto
posicionZeta
posicion {
x
y
}
size {
x
y
}
audio {
tipoReproduccion
}
formato {
fontSize
colorLetra
tipoLetra
alineacion
}
}
`;
export const fragmentoCuadroImagen = gql`
fragment fragCuadroImagen on CuadroImagenCuento {
id
sinArchivo
tipoActivacionSecundario
posicionZeta
posicion {
x
y
}
size {
x
y
}
audio {
tipoReproduccion
}
}
`;
const fragmentoPagina = gql`
fragment fragPagina on PaginaCuento {
id
numPag
color
cuadrosTexto {
...fragCuadroTexto
}
cuadrosImagen {
...fragCuadroImagen
}
}
${fragmentoCuadroTexto}
${fragmentoCuadroImagen}
`;
const QUERY_LIBRO = gql`
query($idLibro: ID!) {
libro(idLibro: $idLibro) {
id
idsEditores
titulo
paginas {
...fragPagina
}
idForo
}
}
${fragmentoPagina}
`;
export default {
components: { Pagina },
name: "Libro",
apollo: {
libro: {
query: QUERY_LIBRO,
variables() {
return {
idLibro: this.idLibro,
};
},
skip() {
return this.idLibro == null;
},
fetchPolicy: "cache-and-network",
},
},
props: {
idLibro: String,
},
data() {
return {
libro: {
paginas: null,
},
hojaMovida: 0,
centroHojas: -0.5,
swipingPagina: false,
swipeAcumulado: 0,
inicioDeTouch:{
x:null,
y:null,
}
};
},
methods: {
swipePagina(e, direccion) {
if (!this.swipingPagina) return;
if (
(e.movementX < 0 && direccion == "adelante") ||
(e.movementX > 0 && direccion == "atras")
) {
this.swipeAcumulado += e.movementX;
}
// if(direccion=='adelante' && e)
},
endSwipe() {
if (Math.abs(this.swipeAcumulado) < 5) {
this.swipingPagina = false;
this.swipeAcumulado = 0;
return;
}
if (this.swipeAcumulado < 0) {
console.log(`Avanzar una hoja`);
this.navegarHojas(1);
} else {
console.log(`Retroceder una hoja`);
this.navegarHojas(-1);
}
this.swipingPagina = false;
this.swipeAcumulado = 0;
},
navegarHojas(num) {
this.centroHojas += num;
if(num<0){
this.hojaMovida=this.centroHojas+0.5;
}
else{
this.hojaMovida=this.centroHojas-0.5;
}
},
iniciaTouch(e){
console.log("Inicio de touch")
this.$set(this.inicioDeTouch, "x", e.changedTouches[0].screenX);
this.$set(this.inicioDeTouch, "y", e.changedTouches[0].screenY);
},
endTouch(e){
console.log("End touch");
if(!this.inicioDeTouch.x || !this.inicioDeTouch.y){
return
}
const umbralSwipe=20;
var currentX=e.changedTouches[0].screenX;
var currentY=e.changedTouches[0].screenY;
var deltaX=currentX-this.inicioDeTouch.x;
var deltaY=currentY-this.inicioDeTouch.y;
console.log(`Fin de touch con deltax ${deltaX} y deltaY: ${deltaY}`);
if(deltaX<-umbralSwipe){
console.log("Navegar hacia atrás");
this.navegarHojas(-1);
}
else if(deltaX>umbralSwipe){
this.navegarHojas(1);
console.log("Navegar hacia adelante");
}
this.$set(this.inicioDeTouch, "x", null );
this.$set(this.inicioDeTouch, "y", null );
}
},
};
</script>
<style scoped>
#libro{
overflow: hidden;
}
#contenedorHojas {
width: 138vh;
height: 92vh;
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
perspective: 4000px;
}
.hoja {
position: absolute;
left: 50%;
/* width: 450px;
height: 600px; */
height: 92vh;
width: 69vh;
perspective-origin: left bottom;
transform-origin: 0% 0%;
transition: transform 0.5s;
transform-style: preserve-3d;
}
.tapada{
pointer-events: none;
}
.girada {
transform: rotateY(-180deg);
}
.hoja:not(.girada)>.reverso{
pointer-events: none;
}
.girada>.pagina:not(.reverso){
pointer-events: none;
}
.reverso {
transform: rotateY(180deg);
}
.pagina {
position: absolute;
top: 0px;
left: 0px;
backface-visibility: hidden;
transform-style: preserve-3d;
}
</style> |
package com.smarthome.uploadyiyanlogs.es;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.smarthome.uploadyiyanlogs.config.BaseConfig;
import com.smarthome.uploadyiyanlogs.util.CalendarUtils;
import com.smarthome.uploadyiyanlogs.util.EmptyUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestTemplate;
import java.util.Map;
/**
* @Author:KUN
* @Data:2021/7/5 09:27
* @Description: 查询每天ES库的登录日志和查看日志
* @Version:1.0
*/
@Component
public class EsSearch {
@Autowired
private BaseConfig baseConfig;
private static final Logger logger = LoggerFactory.getLogger(EsSearch.class);
/**
* 从ES库查询前一天的登录日志以及查询日志(默认每天登录日志或者查询日志不超过5000条)
* 2021/7/5
*/
public JsonView getLastdayLogs(String logName, String time){
try {
JsonView jsonView = new JsonView();
String url="http://"+baseConfig.getEsAddress()+":"+baseConfig.getEsPort();
HttpHeaders headers = new HttpHeaders();
JSONObject queryRoot = new JSONObject();
url+="/"+logName+"/_search?size=5000";
JSONObject boolString = new JSONObject();
JSONArray mustString = new JSONArray();
JSONArray sortString = new JSONArray();
JSONObject todayRange = (JSONObject) newJSONObject("gte",CalendarUtils.getLastday0AmMillisecond());
todayRange.put("lt",CalendarUtils.get0AmMillisecond());
mustString.add(newJSONObject("range",newJSONObject(time, todayRange)));
sortString.add(newJSONObject(time,newJSONObject("order", "desc")));
boolString.put("must", mustString);
queryRoot.put("sort",sortString);
queryRoot.put("query",newJSONObject("bool",boolString));
//logger.info("ES查询语句:{}",queryRoot.toJSONString());
//发送http请求
RestTemplate restTemplate = new RestTemplate();
HttpEntity<JSONObject> entity = new HttpEntity<JSONObject>(queryRoot, headers);
ResponseEntity<JSONObject> responseEntity = restTemplate.postForEntity(url, entity, JSONObject.class);
//获取查询结果
JSONObject result = responseEntity.getBody();
JSONArray jsonHits = result.getJSONObject("hits").getJSONArray("hits");
for(int i=0;i<jsonHits.size();i++){
JSONObject jsonobject = jsonHits.getJSONObject(i);
JSONObject data= JSONObject.parseObject(jsonobject.getString("_source"));
data.put("id",jsonobject.getString("_id"));
jsonView.getList().add(data);
}
jsonView.setNumber(result.getJSONObject("hits").getLong("total"));
return jsonView;
} catch (RestClientException e) {
e.printStackTrace();
logger.error("=====ES读取异常=====" + e);
return null;
}
}
/**
* Scroll查询
* 2021/7/5 9:53
*/
public JsonView essearchScroll(String scrollId){
try {
JsonView jsonView = new JsonView();
//BaseConfig baseConfig = SpringUtil.getBean(BaseConfig.class);
String url="http://"+baseConfig.getEsAddress()+":"+baseConfig.getEsPort();
HttpHeaders headers = new HttpHeaders();
JSONObject queryRoot = new JSONObject();
if(EmptyUtil.isEmpty(scrollId)){
url+="/labeldata/_search?scroll=10m&size=10000";
JSONObject boolString = new JSONObject();
JSONArray mustString = new JSONArray();
JSONArray mustNotString = new JSONArray();
JSONArray sortString = new JSONArray();
mustString.add(newJSONObject("exists",newJSONObject("field", "last_time")));
mustNotString.add(newJSONObject("term",newJSONObject("last_time.keyword","")));
sortString.add(newJSONObject("lan_attached_mac.keyword",newJSONObject("order", "asc")));//desc
boolString.put("must", mustString);
boolString.put("must_not", mustNotString);
queryRoot.put("sort",sortString);
queryRoot.put("query",newJSONObject("bool",boolString));
}else{
url+="/_search/scroll?scroll=10m&scroll_id="+scrollId;
}
//发送http请求
RestTemplate restTemplate = new RestTemplate();
HttpEntity<JSONObject> entity = new HttpEntity<JSONObject>(queryRoot, headers);
ResponseEntity<JSONObject> responseEntity = restTemplate.postForEntity(url, entity, JSONObject.class);
//获取查询结果
JSONObject result = responseEntity.getBody();
scrollId = result.getString("_scroll_id");
JSONArray jsonHits = result.getJSONObject("hits").getJSONArray("hits");
for(int i=0;i<jsonHits.size();i++){
JSONObject jsonobject = jsonHits.getJSONObject(i);
JSONObject data= JSONObject.parseObject(jsonobject.getString("_source"));
data.put("id",jsonobject.getString("_id"));
jsonView.getList().add(data);
}
jsonView.setScrollId(scrollId);
jsonView.setNumber(result.getJSONObject("hits").getLong("total"));
return jsonView;
} catch (RestClientException e) {
e.printStackTrace();
logger.error("=====ES读取异常=====" + e);
return null;
}
}
public static <V> Map newJSONObject(String key, V value){
JSONObject jsonObject =new JSONObject();
try {
if(key!=null){
jsonObject.put(key,value);
}
}catch (Exception e){
}
return jsonObject;
}
} |
#!/usr/bin/env python3
""" module for task 1
"""
from flask import Flask, render_template
from flask_babel import Babel
class Config:
"""Class for configuring babbel.
"""
LANGUAGES = ["en", "fr"]
BABEL_DEFAULT_LOCALE = "en"
BABEL_DEFAULT_TIMEZONE = "UTC"
app = Flask(__name__)
app.config.from_object(Config)
app.url_map.strict_slashes = False
babel = Babel(app)
@app.route('/')
def get_index():
"""method for the index page.
"""
return render_template('1-index.html')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000) |
import {
Box,
Button,
CircularProgress,
Paper,
Typography,
} from "@mui/material";
import { Container } from "@mui/system";
import { spawn } from "child_process";
import useContratacao from "data/hooks/pages/useContratacao.page";
import useIsMobile from "data/hooks/useIsMobile";
import { BrawserService } from "data/services/BrawserService";
import { TextFormatService } from "data/services/TextFormatService";
import React, { PropsWithChildren, useEffect } from "react";
import { FormProvider } from "react-hook-form";
import DataList from "UI/components/data-display/DataList/DataList";
import PageTitle from "UI/components/data-display/PageTitle/PageTitle";
import SideInformation from "UI/components/data-display/SideInformation/SideInformation";
import SafeEnvironment from "UI/components/feedback/SafeEnvironment/SafeEnvironment";
import {
UserFormContainer,
PageFormContainer,
} from "UI/components/inputs/UserForm/UserForm";
import BreadCrumb from "UI/components/navigation/BreadCrumb/BreadCrumb";
import Link from "UI/components/navigation/Link/Link";
import CadastroCliente, { LoginCliente } from "./_cadastro-cliente";
import DetalheServico from "./_detalhe-servico";
import InformacoesPagamento from "./_informacoes-pagamento";
// import { Component } from './_contratacao.styled';
const Contratacao: React.FC<PropsWithChildren> = () => {
const {
step,
breadcrumbItems,
serviceForm,
onServiceFormSubmit,
servicos,
hasLogin,
setHasLogin,
cleintForm,
onClientFormSubmit,
setStep,
loginForm,
onLoginFormSubmit,
loginError,
paymentForm,
onPaymentFormSubmit,
tamanhoCasa,
tipoLimpeza,
totalPrice,
podemosAtender,
} = useContratacao();
const isMobile = useIsMobile(),
dataAtendimento = serviceForm.watch("faxina.data_atendimento");
useEffect(() => {
BrawserService.scrollToTop();
}, [step]);
if (!servicos || servicos.length < 1) {
return (
<Container sx={{ textAlign: "center", my: 10 }}>
<CircularProgress />
</Container>
);
}
return (
<div>
{!isMobile && <SafeEnvironment />}
<BreadCrumb
selected={breadcrumbItems[step - 1]}
items={breadcrumbItems}
/>
{isMobile && [2, 3].includes(step) && (
<DataList
header={
<Typography color={"primary"} sx={{ fontWeight: "thin" }}>
O valor total do serviço é:{" "}
{TextFormatService.currency(totalPrice)}
</Typography>
}
body={
<>
{tipoLimpeza?.nome}
<br />
Tamanho: {tamanhoCasa.join(", ")}
<br />
Data: {dataAtendimento}
</>
}
/>
)}
{step === 1 && <PageTitle title="Nos conte um pouco sobre o serviço!" />}
{step === 2 && (
<PageTitle
title="Precisamos conhecer um pouco sobre você!"
subtitle={
!hasLogin ? (
<span>
Caso já tenha cadastro,{" "}
<Button onClick={() => setHasLogin(true)}>clique aqui</Button>
</span>
) : (
<span>
Caso não tenha cadastro,{" "}
<Button onClick={() => setHasLogin(false)}>clique aqui</Button>
</span>
)
}
/>
)}
{step === 3 && (
<PageTitle
title="Informe os dados do cartão para pagamento"
subtitle={
"Será feita uma reserva, mas o valor só será descontado quando você confirmar a presença do/da diarista"
}
/>
)}
<UserFormContainer>
<PageFormContainer fullWidth={step === 4}>
<Paper sx={{ p: 4 }}>
<FormProvider {...serviceForm}>
<form
onSubmit={serviceForm.handleSubmit(onServiceFormSubmit)}
hidden={step !== 1}
>
<DetalheServico
servicos={servicos}
podemosAtender={podemosAtender}
comodos={tamanhoCasa.length}
/>
</form>
</FormProvider>
<FormProvider {...cleintForm}>
<form
onSubmit={cleintForm.handleSubmit(onClientFormSubmit)}
hidden={step !== 2 || hasLogin}
>
<CadastroCliente onBack={() => setStep(1)} />
</form>
</FormProvider>
{step === 2 && hasLogin && (
<FormProvider {...loginForm}>
<form onSubmit={loginForm.handleSubmit(onLoginFormSubmit)}>
{loginError && (
<Typography color={"error"} align={"center"} sx={{ mb: 2 }}>
{loginError}
</Typography>
)}
<LoginCliente onBack={() => setStep(1)} />
</form>
</FormProvider>
)}
{step === 3 && (
<FormProvider {...paymentForm}>
<form onSubmit={paymentForm.handleSubmit(onPaymentFormSubmit)}>
<InformacoesPagamento />
</form>
</FormProvider>
)}
{step === 4 && (
<Box sx={{ textAlign: "center" }}>
<Typography sx={{ fontSize: "82px" }} color={"secondary"}>
<i className="twf-check-circle" />
</Typography>
<Typography
sx={{ fontSize: "22px", pb: 3 }}
color={"secondary"}
>
Pagamento realizado com sucesso!
</Typography>
<Typography
sx={{ mb: 3, maxWidth: "410px", mx: "auto" }}
color={"textSecondary"}
>
Sua diária foi paga com sucesso! Já estamos procurando o(a)
melhor profissional para atender sua residência. Caso
nenhum(a) profissional seja encontrado(a), devolvemos seu
dinheiro automaticamente 24 horas antes da data agendada. Você
também pode cancelar a sua diária sem nenhuma multa até 24
horas antes da hora do agendamento.
</Typography>
<Link
href="/diarias"
Component={Button}
mui={{ color: "secondary", variant: "contained" }}
>
Ir para minhas diárias
</Link>
</Box>
)}
</Paper>
{!isMobile && step !== 4 && (
<SideInformation
title="Detalhes"
items={[
{
title: "Tipo",
descricao: [tipoLimpeza?.nome],
icon: "twf-check-circle",
},
{
title: "Tamanho",
descricao: tamanhoCasa,
icon: "twf-check-circle",
},
{
title: "Data",
descricao: [dataAtendimento as string],
icon: "twf-check-circle",
},
]}
footer={{
text: TextFormatService.currency(totalPrice),
icon: "twf-credit-card",
}}
/>
)}
</PageFormContainer>
</UserFormContainer>
</div>
);
};
export default Contratacao; |
import React from 'react';
import PropTypes from 'prop-types';
import s from './ContactItem.module.css';
const ContactItem = ({ name, number, onDeleteContact }) => (
<div className={s.item}>
<h3>{name}</h3>
<p>{number}</p>
<button onClick={() => onDeleteContact(name)}>Delete</button>
</div>
);
ContactItem.propTypes = {
name: PropTypes.string.isRequired,
number: PropTypes.string.isRequired,
onDeleteContact: PropTypes.func.isRequired,
};
export default ContactItem; |
package com.leetcode.random3;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Set;
public class WordLadder2 {
class Solution {
int min = Integer.MAX_VALUE;
Set<List<String>> ans = new HashSet<>();
HashMap<String, Set<String>> map = new HashMap<>();
HashMap<String, Integer> dist = new HashMap<>();
public List<List<String>> findLadders(String beginWord, String endWord, List<String> wordList) {
Set<String> set = new HashSet<>(wordList);
if (!set.contains(endWord)) {
return new ArrayList<>();
}
bfs(beginWord, endWord, set);
List<String> hold = new ArrayList<>();
hold.add(beginWord);
HashSet<String> vis = new HashSet<>();
dfs(beginWord, endWord, set, hold, vis);
List<List<String>> ret = new ArrayList<>();
for (List<String> list : ans) {
if (list.size() == min) {
ret.add(list);
}
}
return ret;
}
private void bfs(String beginWord, String endWord, Set<String> set) {
Queue<String[]> queue = new LinkedList();
HashSet<String> visited = new HashSet<>();
queue.add(new String[]{beginWord, "1"});
dist.put(beginWord, 1);
while (!queue.isEmpty()) {
String[] curr = queue.poll();
if (curr[0].equals(endWord)) {
min = Integer.parseInt(curr[1]);
return;
}
if (!visited.contains(curr[0])) {
visited.add(curr[0]);
Set<String> next = getNext(curr[0], set);
map.put(curr[0], next);
for (String str : next) {
if (!dist.containsKey(str)) {
dist.put(str, Integer.parseInt(curr[1]) + 1);
}
queue.add(new String[]{str, String.valueOf(Integer.parseInt(curr[1]) + 1)});
}
}
}
}
private Set<String> getNext(String s, Set<String> set) {
Set<String> ret = new HashSet<>();
for (int i = 0; i < s.length(); i++) {
StringBuffer b = new StringBuffer(s);
for (char j = 'a'; j <= 'z'; j++) {
b.setCharAt(i, j);
if (set.contains(b.toString())) {
ret.add(b.toString());
}
b.setCharAt(i, s.charAt(i));
}
}
return ret;
}
private void dfs(String curr, String end, Set<String> words, List<String> hold, Set<String> vis) {
if (curr.equals(end)) {
ans.add(new ArrayList<>(hold));
return;
}
if (map.containsKey(curr)) {
for (String str : map.get(curr)) {
if (!vis.contains(str) && dist.get(str) == dist.get(curr) + 1) {
hold.add(str);
vis.add(str);
if (hold.size() <= min) {
dfs(str, end, words, hold, vis);
}
hold.remove(hold.size() - 1);
vis.remove(str);
}
}
}
}
}
/*
public static void main(String args[]) {
WordLadder2 w = new WordLadder2();
List<String> words = Lists.newArrayList("hot", "dot", "dog", "lot", "log", "cog");
System.out.println(w.findLadders("hit", "cog", words));
}
*/
} |
---
title: "\"[Updated] 2024 Approved Deciphering YouTube’s Profit for A Mil of Viewers\""
date: 2024-06-05T16:08:03.639Z
updated: 2024-06-06T16:08:03.639Z
tags:
- ai video
- ai youtube
categories:
- ai
- youtube
description: "\"This Article Describes [Updated] 2024 Approved: Deciphering YouTube’s Profit for A Mil of Viewers\""
excerpt: "\"This Article Describes [Updated] 2024 Approved: Deciphering YouTube’s Profit for A Mil of Viewers\""
keywords: "YouTube Revenue per MViewers,Viewer to Profit Ratio YouTubE,Content Earning Per MillViews,Gain Analysis by MillViewerCount,Viewership Income Estimation,Average Earnings From Million Views,MillViewerContentProfitInsight"
thumbnail: https://thmb.techidaily.com/bcb4dab5fca23c5552c696b1f9621ccec9d5240a378ece9f04f489b258c09834.jpg
---
## Deciphering YouTube’s Profit for A Mil of Viewers
How much does YouTube pay for 1 million views? As a YouTuber, you become a business, and it helps to know the YouTube views to money earned.
If you are trying to earn a living on YouTube, one of the most excellent marks of a successful creator is often earning 1 million views on the platform ([click here for tips on how to do that](https://www.filmora.io/community-blog/24-smart-ways-that-actually-work---how-to-grow-309.html)). It usually serves as a benchmark for a time at which a channel is relatively sustainable. However, rather than meaning a YouTuber has made it big financially, reaching 1 million views is more likely to say they can expect to _start_ making real money.
When you hit 1 million views on any video on YouTube, you'll have a nice paycheck. You'll likely have to hit 1 million views on at least a few other videos before you could consider quitting your full-time job and doing YouTube as your primary source of income. This article will explore what 1 million views mean for your YouTube channel. We will look more into how revenue is calculated on YouTube and what you can expect to earn-out of a video with 1 million views.
#### In this article
01 [$2000 for 1 million views](#part1)
02 [How is the revenue calculated?](#part2)
03 [CPMs and CPCs](#part3)
04 [How monetization is changing](#part4)
## $2000 for 1 Million Views
In [a case study performed by Standupbits](https://www.fastcompany.com/3018123/a-million-youtube-views-wont-pay-your-rent-but-tubestart-could) and Josef Holm, a YouTube channel is created with over 3500 comedy clips that a comedian and stand up actor had put together over the years. The YouTube clips took extensive time to upload, and the library was prevalent. The YouTube ad revenue only equated to around $2000.
Although StandUpBits had uploaded thousands of clips and received over 1 million views on their channel, their library was only able to earn around $2000 from the ad revenue sharing. It's estimated the group had spent approximately $25,000 to finish off the clips, edit them, and upload them, which means they invested far more in the channel than they earned.
If you are thinking about a career on YouTube, reaching 1 million views might seem like an excellent target for making a successful page, and it is, but reaching 1 million views doesn’t magically guarantee financial success.
## How Revenue is Calculated
In order to understand how revenue is calculated over the YouTube marketplace, a YouTube user needs to first understand what the partnership program entails. Basically, a YouTube partner has the ability to monetize their videos and serve ads on their content.
In order to join this program you need to be able to commit to uploading ad-friendly (nothing controversial) content that is completely original and high quality and which also adheres to all of the community guidelines and YouTube’s Terms of Service (YouTube actually just introduced a couple of stricter rules - click here for [YouTube Monetization 2018](https://www.filmora.io/community-blog/youtube-monetization-2018---the-new-rules-everyone-hates-331.html)).
As of February 2018, to qualify for ad revenue, the YouTube channel must have:
1\. You will need to have 1,000 subscribers.
2\. You will need to have accumulated 4,000 hours of watch time over the last 12 months.
The AdSense revenue that you earn through YouTube will vary depending on a large number of factors related to the specific ads running and what type of content you produce.
## Understanding CPM and CPCs
### What is CPM?
CPM stands as the ‘cost per mille’ or ‘cost per thousand.’
Your CPM is the amount you earn for 1000 ad impressions (1000 viewers clicking on an ad or watching a skippable ad). Your CPM is usually related to the demographics of your users, the content you regularly post, the length of time on the videos that you post, and the gender of your viewers. YouTube CPMs can vary depending on the advertising bid the company has submitted with Google. The lowest bids can be around .33 cents per thousand views, and other advertisers can spend as much as $10 for 1000 views.
For example, gaming is the most prominent genre on YouTube, and there are many gaming-related ads to go around, but most of them are very low-paying (i.e., ads for free online games). Only YouTube gamers with extensive subscriber bases get higher-paying ads.
### What is CPC?
CPC means ‘cost per click.’ A CPC ad interprets an ‘ad impression’ as a click on an ad rather than a viewer merely seeing it. Most YouTube ads are CPC ads, but skippable video ads are CPV (cost per view), and impressions are based on viewers watching the ad instead of skipping it.
## Changes on YouTube and How You Can Earn More
Changes that have affected the way that revenue is calculated are the ability to skip ads and the lower click rates on advertising through YouTube. A huge portion of viewers uses ad blockers, which eliminates them as potential sources of revenue.
Ultimately earning ad revenue is a big game of reaching targeted demographics and [achieving ongoing viewership](https://www.filmora.io/community-blog/how-to-convert-viewers-into-subscribers-%28the-easy-way%29-287.html) for your videos. It does matter where your viewers are going to be viewing from, and the audience that your viewers are in (viewers from areas with more disposable income to spend on the products advertised to them are worth more to advertisers, as are viewers who are interested in higher-cost items).
[Forming relationships with brands](https://www.filmora.io/community-blog/tips-for-youtube-brand-deals-from-marketing-pro-amanda-haines-171.html) and doing product placements or sponsored videos can be a great way to earn more revenue than you will through AdSense. Just make sure the brands you build relationships with are relevant to your audience and that you incorporate the advertising in ways that don’t annoy your viewers.
Use the right keywords in your titles, descriptions, and tags. Without this keyword information, YouTube may pair your video with advertisers that aren’t right for your audience. First, using the wrong keywords won’t put your content in front of the viewers who want to see it, and, second, the ads that run won’t be a good fit and thus are less likely to be clicked on. It's also imperative that you focus on the metadata of every video. It can take some extra time to add in all of this information for each video, but it is well worth it if you are trying to get paid from YouTube.
[Click here for 4 ways to start growing your channel faster.](https://www.filmora.io/community-blog/youtube-subscriber-boost%21-the-4-simplest-tricks-to-grow-your-317.html)
So, how much does YouTube pay for 1 million views? Not as much as you might think. But don't give up, because ad revenue is not the only way to make money through YouTube. Here are[4 alternative ways to make money as a YouTuber](https://tools.techidaily.com/wondershare/filmora/download/).
#### [Wondershare Filmora](https://tools.techidaily.com/wondershare/filmora/download/)
Get started easily with Filmora's powerful performance, intuitive interface, and countless effects!
[Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/)

02 [How is the revenue calculated?](#part2)
03 [CPMs and CPCs](#part3)
04 [How monetization is changing](#part4)
## $2000 for 1 Million Views
In [a case study performed by Standupbits](https://www.fastcompany.com/3018123/a-million-youtube-views-wont-pay-your-rent-but-tubestart-could) and Josef Holm, a YouTube channel is created with over 3500 comedy clips that a comedian and stand up actor had put together over the years. The YouTube clips took extensive time to upload, and the library was prevalent. The YouTube ad revenue only equated to around $2000.
Although StandUpBits had uploaded thousands of clips and received over 1 million views on their channel, their library was only able to earn around $2000 from the ad revenue sharing. It's estimated the group had spent approximately $25,000 to finish off the clips, edit them, and upload them, which means they invested far more in the channel than they earned.
If you are thinking about a career on YouTube, reaching 1 million views might seem like an excellent target for making a successful page, and it is, but reaching 1 million views doesn’t magically guarantee financial success.
## How Revenue is Calculated
In order to understand how revenue is calculated over the YouTube marketplace, a YouTube user needs to first understand what the partnership program entails. Basically, a YouTube partner has the ability to monetize their videos and serve ads on their content.
In order to join this program you need to be able to commit to uploading ad-friendly (nothing controversial) content that is completely original and high quality and which also adheres to all of the community guidelines and YouTube’s Terms of Service (YouTube actually just introduced a couple of stricter rules - click here for [YouTube Monetization 2018](https://www.filmora.io/community-blog/youtube-monetization-2018---the-new-rules-everyone-hates-331.html)).
As of February 2018, to qualify for ad revenue, the YouTube channel must have:
1\. You will need to have 1,000 subscribers.
2\. You will need to have accumulated 4,000 hours of watch time over the last 12 months.
The AdSense revenue that you earn through YouTube will vary depending on a large number of factors related to the specific ads running and what type of content you produce.
## Understanding CPM and CPCs
### What is CPM?
CPM stands as the ‘cost per mille’ or ‘cost per thousand.’
Your CPM is the amount you earn for 1000 ad impressions (1000 viewers clicking on an ad or watching a skippable ad). Your CPM is usually related to the demographics of your users, the content you regularly post, the length of time on the videos that you post, and the gender of your viewers. YouTube CPMs can vary depending on the advertising bid the company has submitted with Google. The lowest bids can be around .33 cents per thousand views, and other advertisers can spend as much as $10 for 1000 views.
For example, gaming is the most prominent genre on YouTube, and there are many gaming-related ads to go around, but most of them are very low-paying (i.e., ads for free online games). Only YouTube gamers with extensive subscriber bases get higher-paying ads.
### What is CPC?
CPC means ‘cost per click.’ A CPC ad interprets an ‘ad impression’ as a click on an ad rather than a viewer merely seeing it. Most YouTube ads are CPC ads, but skippable video ads are CPV (cost per view), and impressions are based on viewers watching the ad instead of skipping it.
## Changes on YouTube and How You Can Earn More
Changes that have affected the way that revenue is calculated are the ability to skip ads and the lower click rates on advertising through YouTube. A huge portion of viewers uses ad blockers, which eliminates them as potential sources of revenue.
Ultimately earning ad revenue is a big game of reaching targeted demographics and [achieving ongoing viewership](https://www.filmora.io/community-blog/how-to-convert-viewers-into-subscribers-%28the-easy-way%29-287.html) for your videos. It does matter where your viewers are going to be viewing from, and the audience that your viewers are in (viewers from areas with more disposable income to spend on the products advertised to them are worth more to advertisers, as are viewers who are interested in higher-cost items).
[Forming relationships with brands](https://www.filmora.io/community-blog/tips-for-youtube-brand-deals-from-marketing-pro-amanda-haines-171.html) and doing product placements or sponsored videos can be a great way to earn more revenue than you will through AdSense. Just make sure the brands you build relationships with are relevant to your audience and that you incorporate the advertising in ways that don’t annoy your viewers.
Use the right keywords in your titles, descriptions, and tags. Without this keyword information, YouTube may pair your video with advertisers that aren’t right for your audience. First, using the wrong keywords won’t put your content in front of the viewers who want to see it, and, second, the ads that run won’t be a good fit and thus are less likely to be clicked on. It's also imperative that you focus on the metadata of every video. It can take some extra time to add in all of this information for each video, but it is well worth it if you are trying to get paid from YouTube.
[Click here for 4 ways to start growing your channel faster.](https://www.filmora.io/community-blog/youtube-subscriber-boost%21-the-4-simplest-tricks-to-grow-your-317.html)
So, how much does YouTube pay for 1 million views? Not as much as you might think. But don't give up, because ad revenue is not the only way to make money through YouTube. Here are[4 alternative ways to make money as a YouTuber](https://tools.techidaily.com/wondershare/filmora/download/).
#### [Wondershare Filmora](https://tools.techidaily.com/wondershare/filmora/download/)
Get started easily with Filmora's powerful performance, intuitive interface, and countless effects!
[Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/)

02 [How is the revenue calculated?](#part2)
03 [CPMs and CPCs](#part3)
04 [How monetization is changing](#part4)
## $2000 for 1 Million Views
In [a case study performed by Standupbits](https://www.fastcompany.com/3018123/a-million-youtube-views-wont-pay-your-rent-but-tubestart-could) and Josef Holm, a YouTube channel is created with over 3500 comedy clips that a comedian and stand up actor had put together over the years. The YouTube clips took extensive time to upload, and the library was prevalent. The YouTube ad revenue only equated to around $2000.
Although StandUpBits had uploaded thousands of clips and received over 1 million views on their channel, their library was only able to earn around $2000 from the ad revenue sharing. It's estimated the group had spent approximately $25,000 to finish off the clips, edit them, and upload them, which means they invested far more in the channel than they earned.
If you are thinking about a career on YouTube, reaching 1 million views might seem like an excellent target for making a successful page, and it is, but reaching 1 million views doesn’t magically guarantee financial success.
## How Revenue is Calculated
In order to understand how revenue is calculated over the YouTube marketplace, a YouTube user needs to first understand what the partnership program entails. Basically, a YouTube partner has the ability to monetize their videos and serve ads on their content.
In order to join this program you need to be able to commit to uploading ad-friendly (nothing controversial) content that is completely original and high quality and which also adheres to all of the community guidelines and YouTube’s Terms of Service (YouTube actually just introduced a couple of stricter rules - click here for [YouTube Monetization 2018](https://www.filmora.io/community-blog/youtube-monetization-2018---the-new-rules-everyone-hates-331.html)).
As of February 2018, to qualify for ad revenue, the YouTube channel must have:
1\. You will need to have 1,000 subscribers.
2\. You will need to have accumulated 4,000 hours of watch time over the last 12 months.
The AdSense revenue that you earn through YouTube will vary depending on a large number of factors related to the specific ads running and what type of content you produce.
## Understanding CPM and CPCs
### What is CPM?
CPM stands as the ‘cost per mille’ or ‘cost per thousand.’
Your CPM is the amount you earn for 1000 ad impressions (1000 viewers clicking on an ad or watching a skippable ad). Your CPM is usually related to the demographics of your users, the content you regularly post, the length of time on the videos that you post, and the gender of your viewers. YouTube CPMs can vary depending on the advertising bid the company has submitted with Google. The lowest bids can be around .33 cents per thousand views, and other advertisers can spend as much as $10 for 1000 views.
For example, gaming is the most prominent genre on YouTube, and there are many gaming-related ads to go around, but most of them are very low-paying (i.e., ads for free online games). Only YouTube gamers with extensive subscriber bases get higher-paying ads.
### What is CPC?
CPC means ‘cost per click.’ A CPC ad interprets an ‘ad impression’ as a click on an ad rather than a viewer merely seeing it. Most YouTube ads are CPC ads, but skippable video ads are CPV (cost per view), and impressions are based on viewers watching the ad instead of skipping it.
## Changes on YouTube and How You Can Earn More
Changes that have affected the way that revenue is calculated are the ability to skip ads and the lower click rates on advertising through YouTube. A huge portion of viewers uses ad blockers, which eliminates them as potential sources of revenue.
Ultimately earning ad revenue is a big game of reaching targeted demographics and [achieving ongoing viewership](https://www.filmora.io/community-blog/how-to-convert-viewers-into-subscribers-%28the-easy-way%29-287.html) for your videos. It does matter where your viewers are going to be viewing from, and the audience that your viewers are in (viewers from areas with more disposable income to spend on the products advertised to them are worth more to advertisers, as are viewers who are interested in higher-cost items).
[Forming relationships with brands](https://www.filmora.io/community-blog/tips-for-youtube-brand-deals-from-marketing-pro-amanda-haines-171.html) and doing product placements or sponsored videos can be a great way to earn more revenue than you will through AdSense. Just make sure the brands you build relationships with are relevant to your audience and that you incorporate the advertising in ways that don’t annoy your viewers.
Use the right keywords in your titles, descriptions, and tags. Without this keyword information, YouTube may pair your video with advertisers that aren’t right for your audience. First, using the wrong keywords won’t put your content in front of the viewers who want to see it, and, second, the ads that run won’t be a good fit and thus are less likely to be clicked on. It's also imperative that you focus on the metadata of every video. It can take some extra time to add in all of this information for each video, but it is well worth it if you are trying to get paid from YouTube.
[Click here for 4 ways to start growing your channel faster.](https://www.filmora.io/community-blog/youtube-subscriber-boost%21-the-4-simplest-tricks-to-grow-your-317.html)
So, how much does YouTube pay for 1 million views? Not as much as you might think. But don't give up, because ad revenue is not the only way to make money through YouTube. Here are[4 alternative ways to make money as a YouTuber](https://tools.techidaily.com/wondershare/filmora/download/).
#### [Wondershare Filmora](https://tools.techidaily.com/wondershare/filmora/download/)
Get started easily with Filmora's powerful performance, intuitive interface, and countless effects!
[Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/)

02 [How is the revenue calculated?](#part2)
03 [CPMs and CPCs](#part3)
04 [How monetization is changing](#part4)
## $2000 for 1 Million Views
In [a case study performed by Standupbits](https://www.fastcompany.com/3018123/a-million-youtube-views-wont-pay-your-rent-but-tubestart-could) and Josef Holm, a YouTube channel is created with over 3500 comedy clips that a comedian and stand up actor had put together over the years. The YouTube clips took extensive time to upload, and the library was prevalent. The YouTube ad revenue only equated to around $2000.
Although StandUpBits had uploaded thousands of clips and received over 1 million views on their channel, their library was only able to earn around $2000 from the ad revenue sharing. It's estimated the group had spent approximately $25,000 to finish off the clips, edit them, and upload them, which means they invested far more in the channel than they earned.
If you are thinking about a career on YouTube, reaching 1 million views might seem like an excellent target for making a successful page, and it is, but reaching 1 million views doesn’t magically guarantee financial success.
## How Revenue is Calculated
In order to understand how revenue is calculated over the YouTube marketplace, a YouTube user needs to first understand what the partnership program entails. Basically, a YouTube partner has the ability to monetize their videos and serve ads on their content.
In order to join this program you need to be able to commit to uploading ad-friendly (nothing controversial) content that is completely original and high quality and which also adheres to all of the community guidelines and YouTube’s Terms of Service (YouTube actually just introduced a couple of stricter rules - click here for [YouTube Monetization 2018](https://www.filmora.io/community-blog/youtube-monetization-2018---the-new-rules-everyone-hates-331.html)).
As of February 2018, to qualify for ad revenue, the YouTube channel must have:
1\. You will need to have 1,000 subscribers.
2\. You will need to have accumulated 4,000 hours of watch time over the last 12 months.
The AdSense revenue that you earn through YouTube will vary depending on a large number of factors related to the specific ads running and what type of content you produce.
## Understanding CPM and CPCs
### What is CPM?
CPM stands as the ‘cost per mille’ or ‘cost per thousand.’
Your CPM is the amount you earn for 1000 ad impressions (1000 viewers clicking on an ad or watching a skippable ad). Your CPM is usually related to the demographics of your users, the content you regularly post, the length of time on the videos that you post, and the gender of your viewers. YouTube CPMs can vary depending on the advertising bid the company has submitted with Google. The lowest bids can be around .33 cents per thousand views, and other advertisers can spend as much as $10 for 1000 views.
For example, gaming is the most prominent genre on YouTube, and there are many gaming-related ads to go around, but most of them are very low-paying (i.e., ads for free online games). Only YouTube gamers with extensive subscriber bases get higher-paying ads.
### What is CPC?
CPC means ‘cost per click.’ A CPC ad interprets an ‘ad impression’ as a click on an ad rather than a viewer merely seeing it. Most YouTube ads are CPC ads, but skippable video ads are CPV (cost per view), and impressions are based on viewers watching the ad instead of skipping it.
## Changes on YouTube and How You Can Earn More
Changes that have affected the way that revenue is calculated are the ability to skip ads and the lower click rates on advertising through YouTube. A huge portion of viewers uses ad blockers, which eliminates them as potential sources of revenue.
Ultimately earning ad revenue is a big game of reaching targeted demographics and [achieving ongoing viewership](https://www.filmora.io/community-blog/how-to-convert-viewers-into-subscribers-%28the-easy-way%29-287.html) for your videos. It does matter where your viewers are going to be viewing from, and the audience that your viewers are in (viewers from areas with more disposable income to spend on the products advertised to them are worth more to advertisers, as are viewers who are interested in higher-cost items).
[Forming relationships with brands](https://www.filmora.io/community-blog/tips-for-youtube-brand-deals-from-marketing-pro-amanda-haines-171.html) and doing product placements or sponsored videos can be a great way to earn more revenue than you will through AdSense. Just make sure the brands you build relationships with are relevant to your audience and that you incorporate the advertising in ways that don’t annoy your viewers.
Use the right keywords in your titles, descriptions, and tags. Without this keyword information, YouTube may pair your video with advertisers that aren’t right for your audience. First, using the wrong keywords won’t put your content in front of the viewers who want to see it, and, second, the ads that run won’t be a good fit and thus are less likely to be clicked on. It's also imperative that you focus on the metadata of every video. It can take some extra time to add in all of this information for each video, but it is well worth it if you are trying to get paid from YouTube.
[Click here for 4 ways to start growing your channel faster.](https://www.filmora.io/community-blog/youtube-subscriber-boost%21-the-4-simplest-tricks-to-grow-your-317.html)
So, how much does YouTube pay for 1 million views? Not as much as you might think. But don't give up, because ad revenue is not the only way to make money through YouTube. Here are[4 alternative ways to make money as a YouTuber](https://tools.techidaily.com/wondershare/filmora/download/).
#### [Wondershare Filmora](https://tools.techidaily.com/wondershare/filmora/download/)
Get started easily with Filmora's powerful performance, intuitive interface, and countless effects!
[Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/) [Try It Free](https://tools.techidaily.com/wondershare/filmora/download/)

<ins class="adsbygoogle"
style="display:block"
data-ad-format="autorelaxed"
data-ad-client="ca-pub-7571918770474297"
data-ad-slot="1223367746"></ins>
<ins class="adsbygoogle"
style="display:block"
data-ad-format="autorelaxed"
data-ad-client="ca-pub-7571918770474297"
data-ad-slot="1223367746"></ins>
## The Power of Jump Cuts in Engaging Videos
# How To Use Jump Cuts in Your Vlog

##### Richard Bennett
Mar 27, 2024• Proven solutions
A common problem for new YouTubers and Vlogers is that they cannot get through a recording without feeling like they have messed up and need to restart. It can be extremely discouraging at first. More experienced vloggers know that mistakes happen and they are no reason to stop recording. Instead of trying to record a perfect clip it is common for vloggers to remove errors from their videos in editing using jump cuts.
## How To Use Jump Cuts in Your Vlog
#### 1\. What is a Jump Cut?
A jump cut is when you jump from one part of your clip to a later part of that same clip, cutting out the section in-between.
In movies or tv shows jump cuts can be distracting. In vlogs it is the exact opposite; jump cuts are used to remove distractions. It is not strange for a YouTube video to contain a lot of jump cuts. If you go and re-watch a video by your favorite vlogger you will probably notice that every once in a while the vlogger is suddenly sitting differently or that something else has suddenly changed. That is a jump cut, and chances are you did not even think twice about it when you first watched the video.
#### 2\. When to use Jump Cuts in your Youtube Video
There are a lot of things you might want to take out of your clips using jump cuts. Here are some examples:
**Repetition**: You may have had to repeat yourself because you misspoke. Or, you might watch your clip back and realize that two sections you intended – perhaps even scripted – are very similar and you only need one.
**Off-Topic Tangents**: Once you get comfortable talking to the camera it is easy to find yourself getting off-topic while recording. These tangents might be funny or feel important, but you should still cut them out of your final video. Rather than scrapping them completely, though, consider giving your tangent its very own video. Successful vloggers post new videos frequently, after all.
**Silence**: Use jump cuts to take out any pauses in your video. Every period of silence is an opportunity for viewers to get distracted and decide to click on something else. Adding music to the background of your video can help with shorter pauses, but cutting out any silence is still a good way to go.
In the video tutorial below, we will share with you some practical and creative jump cuts tips that you can try in video editing. And most of the jump cuts are done with the cutting features in [Filmora](https://tools.techidaily.com/wondershare/filmora/download/). You can download the free trial version below by clicking the Free Download button based on your system.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
#### 3\. How to Make Jump Cuts in Vlog & YouTube Videos
You can make jump cuts in almost every editing program, from free software like Windows Movie Maker to professional programs like Adobe Premiere. Remember no matter which video editing software you are using to edit your videos with jump cut, you need to ensure that the software can scroll through your video clips and audio files frame-by-frame. And some video editors may only allow you to scroll video footage frame by frame. This tutorial is for Filmora Video Editor (Now upgraded to Filmora), but many of the same steps will apply to other software.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
You may also like: [How to trim and cut videos in Windows Movie Maker for Free](https://tools.techidaily.com/wondershare/filmora/download/)
Open up Filmora Video Editor and add a clip into the timeline. Watch it through, and then revisit sections where you think you might want to make cuts.
Find a place in your clip that has silence and use the scissors icon to cut your clip into two. Then start playing your clip and pause as soon as you hear sound.
Make sure your clip is selected, and then go back frame by frame until you hear nothing.
Use the scissors icon to make another cut. You will now have three clips; one before the silence, the silent pause, and the section where sound starts again. Delete the quiet middle clip.
#### 4\. Keep video shorter for more views
The videos that get the most views on YouTube are an average of three minutes long. People will use the length of a video as a reason not to click on it. Viewers are also more likely to get distracted and stop watching longer videos before they are over. That does not mean you should not take as long as you need to properly explain your ideas, just that you should be careful your video is not longer than it needs to be. It is common for youtubers to use only about a third of the clip they record. The rest is removed using jump cuts.
Filmora video editor will help you to be more creative in video editing and saving your time at the same time. Download and leave a comment below about how do you like it.
[](https://tools.techidaily.com/wondershare/filmora/download/)[](https://tools.techidaily.com/wondershare/filmora/download/)

Richard Bennett
Richard Bennett is a writer and a lover of all things video.
Follow @Richard Bennett
##### Richard Bennett
Mar 27, 2024• Proven solutions
A common problem for new YouTubers and Vlogers is that they cannot get through a recording without feeling like they have messed up and need to restart. It can be extremely discouraging at first. More experienced vloggers know that mistakes happen and they are no reason to stop recording. Instead of trying to record a perfect clip it is common for vloggers to remove errors from their videos in editing using jump cuts.
## How To Use Jump Cuts in Your Vlog
#### 1\. What is a Jump Cut?
A jump cut is when you jump from one part of your clip to a later part of that same clip, cutting out the section in-between.
In movies or tv shows jump cuts can be distracting. In vlogs it is the exact opposite; jump cuts are used to remove distractions. It is not strange for a YouTube video to contain a lot of jump cuts. If you go and re-watch a video by your favorite vlogger you will probably notice that every once in a while the vlogger is suddenly sitting differently or that something else has suddenly changed. That is a jump cut, and chances are you did not even think twice about it when you first watched the video.
#### 2\. When to use Jump Cuts in your Youtube Video
There are a lot of things you might want to take out of your clips using jump cuts. Here are some examples:
**Repetition**: You may have had to repeat yourself because you misspoke. Or, you might watch your clip back and realize that two sections you intended – perhaps even scripted – are very similar and you only need one.
**Off-Topic Tangents**: Once you get comfortable talking to the camera it is easy to find yourself getting off-topic while recording. These tangents might be funny or feel important, but you should still cut them out of your final video. Rather than scrapping them completely, though, consider giving your tangent its very own video. Successful vloggers post new videos frequently, after all.
**Silence**: Use jump cuts to take out any pauses in your video. Every period of silence is an opportunity for viewers to get distracted and decide to click on something else. Adding music to the background of your video can help with shorter pauses, but cutting out any silence is still a good way to go.
In the video tutorial below, we will share with you some practical and creative jump cuts tips that you can try in video editing. And most of the jump cuts are done with the cutting features in [Filmora](https://tools.techidaily.com/wondershare/filmora/download/). You can download the free trial version below by clicking the Free Download button based on your system.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
#### 3\. How to Make Jump Cuts in Vlog & YouTube Videos
You can make jump cuts in almost every editing program, from free software like Windows Movie Maker to professional programs like Adobe Premiere. Remember no matter which video editing software you are using to edit your videos with jump cut, you need to ensure that the software can scroll through your video clips and audio files frame-by-frame. And some video editors may only allow you to scroll video footage frame by frame. This tutorial is for Filmora Video Editor (Now upgraded to Filmora), but many of the same steps will apply to other software.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
You may also like: [How to trim and cut videos in Windows Movie Maker for Free](https://tools.techidaily.com/wondershare/filmora/download/)
Open up Filmora Video Editor and add a clip into the timeline. Watch it through, and then revisit sections where you think you might want to make cuts.
Find a place in your clip that has silence and use the scissors icon to cut your clip into two. Then start playing your clip and pause as soon as you hear sound.
Make sure your clip is selected, and then go back frame by frame until you hear nothing.
Use the scissors icon to make another cut. You will now have three clips; one before the silence, the silent pause, and the section where sound starts again. Delete the quiet middle clip.
#### 4\. Keep video shorter for more views
The videos that get the most views on YouTube are an average of three minutes long. People will use the length of a video as a reason not to click on it. Viewers are also more likely to get distracted and stop watching longer videos before they are over. That does not mean you should not take as long as you need to properly explain your ideas, just that you should be careful your video is not longer than it needs to be. It is common for youtubers to use only about a third of the clip they record. The rest is removed using jump cuts.
Filmora video editor will help you to be more creative in video editing and saving your time at the same time. Download and leave a comment below about how do you like it.
[](https://tools.techidaily.com/wondershare/filmora/download/)[](https://tools.techidaily.com/wondershare/filmora/download/)

Richard Bennett
Richard Bennett is a writer and a lover of all things video.
Follow @Richard Bennett
##### Richard Bennett
Mar 27, 2024• Proven solutions
A common problem for new YouTubers and Vlogers is that they cannot get through a recording without feeling like they have messed up and need to restart. It can be extremely discouraging at first. More experienced vloggers know that mistakes happen and they are no reason to stop recording. Instead of trying to record a perfect clip it is common for vloggers to remove errors from their videos in editing using jump cuts.
## How To Use Jump Cuts in Your Vlog
#### 1\. What is a Jump Cut?
A jump cut is when you jump from one part of your clip to a later part of that same clip, cutting out the section in-between.
In movies or tv shows jump cuts can be distracting. In vlogs it is the exact opposite; jump cuts are used to remove distractions. It is not strange for a YouTube video to contain a lot of jump cuts. If you go and re-watch a video by your favorite vlogger you will probably notice that every once in a while the vlogger is suddenly sitting differently or that something else has suddenly changed. That is a jump cut, and chances are you did not even think twice about it when you first watched the video.
#### 2\. When to use Jump Cuts in your Youtube Video
There are a lot of things you might want to take out of your clips using jump cuts. Here are some examples:
**Repetition**: You may have had to repeat yourself because you misspoke. Or, you might watch your clip back and realize that two sections you intended – perhaps even scripted – are very similar and you only need one.
**Off-Topic Tangents**: Once you get comfortable talking to the camera it is easy to find yourself getting off-topic while recording. These tangents might be funny or feel important, but you should still cut them out of your final video. Rather than scrapping them completely, though, consider giving your tangent its very own video. Successful vloggers post new videos frequently, after all.
**Silence**: Use jump cuts to take out any pauses in your video. Every period of silence is an opportunity for viewers to get distracted and decide to click on something else. Adding music to the background of your video can help with shorter pauses, but cutting out any silence is still a good way to go.
In the video tutorial below, we will share with you some practical and creative jump cuts tips that you can try in video editing. And most of the jump cuts are done with the cutting features in [Filmora](https://tools.techidaily.com/wondershare/filmora/download/). You can download the free trial version below by clicking the Free Download button based on your system.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
#### 3\. How to Make Jump Cuts in Vlog & YouTube Videos
You can make jump cuts in almost every editing program, from free software like Windows Movie Maker to professional programs like Adobe Premiere. Remember no matter which video editing software you are using to edit your videos with jump cut, you need to ensure that the software can scroll through your video clips and audio files frame-by-frame. And some video editors may only allow you to scroll video footage frame by frame. This tutorial is for Filmora Video Editor (Now upgraded to Filmora), but many of the same steps will apply to other software.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
You may also like: [How to trim and cut videos in Windows Movie Maker for Free](https://tools.techidaily.com/wondershare/filmora/download/)
Open up Filmora Video Editor and add a clip into the timeline. Watch it through, and then revisit sections where you think you might want to make cuts.
Find a place in your clip that has silence and use the scissors icon to cut your clip into two. Then start playing your clip and pause as soon as you hear sound.
Make sure your clip is selected, and then go back frame by frame until you hear nothing.
Use the scissors icon to make another cut. You will now have three clips; one before the silence, the silent pause, and the section where sound starts again. Delete the quiet middle clip.
#### 4\. Keep video shorter for more views
The videos that get the most views on YouTube are an average of three minutes long. People will use the length of a video as a reason not to click on it. Viewers are also more likely to get distracted and stop watching longer videos before they are over. That does not mean you should not take as long as you need to properly explain your ideas, just that you should be careful your video is not longer than it needs to be. It is common for youtubers to use only about a third of the clip they record. The rest is removed using jump cuts.
Filmora video editor will help you to be more creative in video editing and saving your time at the same time. Download and leave a comment below about how do you like it.
[](https://tools.techidaily.com/wondershare/filmora/download/)[](https://tools.techidaily.com/wondershare/filmora/download/)

Richard Bennett
Richard Bennett is a writer and a lover of all things video.
Follow @Richard Bennett
##### Richard Bennett
Mar 27, 2024• Proven solutions
A common problem for new YouTubers and Vlogers is that they cannot get through a recording without feeling like they have messed up and need to restart. It can be extremely discouraging at first. More experienced vloggers know that mistakes happen and they are no reason to stop recording. Instead of trying to record a perfect clip it is common for vloggers to remove errors from their videos in editing using jump cuts.
## How To Use Jump Cuts in Your Vlog
#### 1\. What is a Jump Cut?
A jump cut is when you jump from one part of your clip to a later part of that same clip, cutting out the section in-between.
In movies or tv shows jump cuts can be distracting. In vlogs it is the exact opposite; jump cuts are used to remove distractions. It is not strange for a YouTube video to contain a lot of jump cuts. If you go and re-watch a video by your favorite vlogger you will probably notice that every once in a while the vlogger is suddenly sitting differently or that something else has suddenly changed. That is a jump cut, and chances are you did not even think twice about it when you first watched the video.
#### 2\. When to use Jump Cuts in your Youtube Video
There are a lot of things you might want to take out of your clips using jump cuts. Here are some examples:
**Repetition**: You may have had to repeat yourself because you misspoke. Or, you might watch your clip back and realize that two sections you intended – perhaps even scripted – are very similar and you only need one.
**Off-Topic Tangents**: Once you get comfortable talking to the camera it is easy to find yourself getting off-topic while recording. These tangents might be funny or feel important, but you should still cut them out of your final video. Rather than scrapping them completely, though, consider giving your tangent its very own video. Successful vloggers post new videos frequently, after all.
**Silence**: Use jump cuts to take out any pauses in your video. Every period of silence is an opportunity for viewers to get distracted and decide to click on something else. Adding music to the background of your video can help with shorter pauses, but cutting out any silence is still a good way to go.
In the video tutorial below, we will share with you some practical and creative jump cuts tips that you can try in video editing. And most of the jump cuts are done with the cutting features in [Filmora](https://tools.techidaily.com/wondershare/filmora/download/). You can download the free trial version below by clicking the Free Download button based on your system.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
#### 3\. How to Make Jump Cuts in Vlog & YouTube Videos
You can make jump cuts in almost every editing program, from free software like Windows Movie Maker to professional programs like Adobe Premiere. Remember no matter which video editing software you are using to edit your videos with jump cut, you need to ensure that the software can scroll through your video clips and audio files frame-by-frame. And some video editors may only allow you to scroll video footage frame by frame. This tutorial is for Filmora Video Editor (Now upgraded to Filmora), but many of the same steps will apply to other software.
[](https://tools.techidaily.com/wondershare/filmora/download/)[Download Mac Version](https://images.wondershare.com/filmora/guide/download-btn-mac.jpg)](https://tools.techidaily.com/wondershare/filmora/download/)
You may also like: [How to trim and cut videos in Windows Movie Maker for Free](https://tools.techidaily.com/wondershare/filmora/download/)
Open up Filmora Video Editor and add a clip into the timeline. Watch it through, and then revisit sections where you think you might want to make cuts.
Find a place in your clip that has silence and use the scissors icon to cut your clip into two. Then start playing your clip and pause as soon as you hear sound.
Make sure your clip is selected, and then go back frame by frame until you hear nothing.
Use the scissors icon to make another cut. You will now have three clips; one before the silence, the silent pause, and the section where sound starts again. Delete the quiet middle clip.
#### 4\. Keep video shorter for more views
The videos that get the most views on YouTube are an average of three minutes long. People will use the length of a video as a reason not to click on it. Viewers are also more likely to get distracted and stop watching longer videos before they are over. That does not mean you should not take as long as you need to properly explain your ideas, just that you should be careful your video is not longer than it needs to be. It is common for youtubers to use only about a third of the clip they record. The rest is removed using jump cuts.
Filmora video editor will help you to be more creative in video editing and saving your time at the same time. Download and leave a comment below about how do you like it.
[](https://tools.techidaily.com/wondershare/filmora/download/)[](https://tools.techidaily.com/wondershare/filmora/download/)

Richard Bennett
Richard Bennett is a writer and a lover of all things video.
Follow @Richard Bennett
<ins class="adsbygoogle"
style="display:block"
data-ad-format="autorelaxed"
data-ad-client="ca-pub-7571918770474297"
data-ad-slot="1223367746"></ins>
<ins class="adsbygoogle"
style="display:block"
data-ad-client="ca-pub-7571918770474297"
data-ad-slot="8358498916"
data-ad-format="auto"
data-full-width-responsive="true"></ins>
<span class="atpl-alsoreadstyle">Also read:</span>
<div><ul>
<li><a href="https://facebook-video-share.techidaily.com/new-essential-tags-to-amplify-your-youtube-gaming-channel-for-2024/"><u>[New] Essential Tags to Amplify Your YouTube Gaming Channel for 2024</u></a></li>
<li><a href="https://facebook-video-share.techidaily.com/updated-2024-approved-clear-shots-for-youtube-filmmakers/"><u>[Updated] 2024 Approved Clear Shots for YouTube Filmmakers</u></a></li>
<li><a href="https://facebook-video-share.techidaily.com/2024-approved-youtube-stardom-made-easy-essential-editing-strategies-explored/"><u>2024 Approved YouTube Stardom Made Easy Essential Editing Strategies Explored</u></a></li>
<li><a href="https://facebook-video-share.techidaily.com/updated-why-choose-av1-for-youtube-unlock-potential/"><u>[Updated] Why Choose AV1 for YouTube? – Unlock Potential</u></a></li>
<li><a href="https://facebook-video-share.techidaily.com/what-every-youtuber-should-know-about-live-thumbnails-for-2024/"><u>What Every YouTuber Should Know About Live Thumbnails for 2024</u></a></li>
<li><a href="https://facebook-video-share.techidaily.com/new-in-2024-boost-your-channels-traffic-and-value-with-innovative-hashtags/"><u>[New] In 2024, Boost Your Channels' Traffic & Value with Innovative Hashtags</u></a></li>
<li><a href="https://facebook-video-share.techidaily.com/new-drive-more-viewers-to-your-videos-top-growth-hacks-for-2024/"><u>[New] Drive More Viewers to Your Videos Top Growth Hacks for 2024</u></a></li>
<li><a href="https://facebook-video-share.techidaily.com/updated-exploring-popularly-highlighted-video-remarks-for-2024/"><u>[Updated] Exploring Popularly Highlighted Video Remarks for 2024</u></a></li>
<li><a href="https://smart-video-editing.techidaily.com/new-minitool-movie-maker-a-detailed-review-user-manual-and-alternative-solutions-for-2024/"><u>New Minitool Movie Maker A Detailed Review, User Manual, and Alternative Solutions for 2024</u></a></li>
<li><a href="https://some-skills.techidaily.com/in-2024-the-best-of-both-worlds-2023s-device-agnostic-editors/"><u>In 2024, The Best of Both Worlds 2023’S Device-Agnostic Editors</u></a></li>
<li><a href="https://android-pokemon-go.techidaily.com/in-2024-what-is-the-best-pokemon-for-pokemon-pvp-ranking-on-motorola-g24-power-drfone-by-drfone-virtual-android/"><u>In 2024, What is the best Pokemon for pokemon pvp ranking On Motorola G24 Power? | Dr.fone</u></a></li>
<li><a href="https://extra-tips.techidaily.com/2024-approved-color-cutting-edge-the-basics-of-green-screen-filming-for-newbies/"><u>2024 Approved Color Cutting Edge The Basics of Green Screen Filming for Newbies</u></a></li>
<li><a href="https://digital-screen-recording.techidaily.com/new-in-2024-sneak-peeks-stealthy-video-capture-techniques/"><u>[New] In 2024, Sneak Peeks Stealthy Video Capture Techniques</u></a></li>
<li><a href="https://digital-screen-recording.techidaily.com/new-in-2024-change-saving-spot-for-macs-photos/"><u>[New] In 2024, Change Saving Spot for Mac's Photos</u></a></li>
<li><a href="https://screen-mirror.techidaily.com/8-best-apps-for-screen-mirroring-oppo-a78-5g-pc-drfone-by-drfone-android/"><u>8 Best Apps for Screen Mirroring Oppo A78 5G PC | Dr.fone</u></a></li>
<li><a href="https://remote-screen-capture.techidaily.com/updated-2024-approved-acclaimed-pc-emulators-for-vintage-ps1-titles/"><u>[Updated] 2024 Approved Acclaimed PC Emulators for Vintage PS1 Titles</u></a></li>
<li><a href="https://video-screen-grab.techidaily.com/new-in-2024-unlocking-potential-how-to-maximize-whiteboards-in-zoom-meets/"><u>[New] In 2024, Unlocking Potential How to Maximize Whiteboards in Zoom Meets</u></a></li>
</ul></div> |
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>19.相对定位</title>
<style type="text/css">
.box1{
width: 200px;
height: 200px;
background-color: red;
}
/*
* 定位:
* -定位指的就是将指定的元素摆放到页面的任意位置,
* 通过position属性来设置元素的偏移量
*
* -可选值:
* static:默认值,元素没有开启定位
* relative:元素开启了相对定位
* absolute:元素开启了绝对定位
* fixed:元素开启了固定定位(也是相对定位的一种)
* */
.box2{
width: 200px;
height: 200px;
background-color: yellow;
/*当开启了元素的position值是一个非static值时,
* 可以通过left,right,top,bottom四个属性值来设置元素的偏移量
* left:元素相对于定位位置左侧的偏移量
* right:元素相对于定位元素位置右侧的偏移量
* top:元素相对于定位元素位置上边的偏移量
* bottom:元素相对于定位元素位置下边的偏移量
*
* 通常偏移量只需要使用两个就可以对一个元素进行定位
* 一般会选择水平方向的偏移量和垂直方向的偏移量,来为一个元素进行定位
* */
/*
* 当元素的position值设置为relative时,则开启了相对定位
* 其特点有:
* 1.当开启相对定位,而不设置偏移量时,元素没有任何变化
* 2.相对定位是相对于元素在文档流中原来的位置进行定位
* 3.相对定位的元素不会脱离文档流
* 4.相对定位会使元素提升一个层级
* -即表示当left:100px;top:200px;yellow部分会盖住yellowgreen部分
* 5.相对定位不会改变元素的性质,块元素还是块元素,内联元素还是内联元素
* */
position: relative;
left: 100px;
top: 200px;
}
.box3{
width: 200px;
height: 200px;
background-color: yellowgreen;
}
.s1{
/*内联元素设置宽高不起作用*/
position: relative;
width: 200px;
height: 200px;
background-color: cyan;
}
</style>
</head>
<body>
<!--
div.box$*3+tab
表示
<div class="box1"></div>
<div class="box2"></div>
<div class="box3"></div>
-->
<div class="box1"></div>
<div class="box2"></div>
<div class="box3"></div>
<span class="s1">我是一个span标签</span>
</body>
</html> |
@{
Object filterTemplate = new Object();
filterTemplate = (new { read = "read", write = "write" });
}
<ejs-grid id="Grid" dataSource="@ViewBag.DataSource" allowPaging="true" allowFiltering="true">
<e-grid-columns>
<e-grid-column field="OrderID" headerText="Order ID" isPrimaryKey="true" textAlign="Right" width="120"></e-grid-column>
<e-grid-column field="EmployeeID" headerText="Employee ID" filterBarTemplate="filterTemplate" width="150"></e-grid-column>
<e-grid-column field="Freight" headerText="Freight" format="C2" width="120"></e-grid-column>
<e-grid-column field="ShipCity" headerText="Ship City" width="170"></e-grid-column>
<e-grid-column field="ShipCountry" headerText="Ship Country" width="150"></e-grid-column>
</e-grid-columns>
</ejs-grid>
<script type="text/javascript">
function write(args) {
var data = [{ text: "clear", value: "clear" }, { text: "1", value: 1 }, { text: "2", value: 2 }, { text: "3", value: 3 }, { text: "4", value: 4 },
{ text: "5", value: 5 }, { text: "6", value: 6 }, { text: "7", value: 7 }, { text: "8", value: 8 }, { text: "9", value: 9 }
]
var listObj = new ej.dropdowns.DropDownList({
dataSource: data,
placeholder: 'Select EmployeeID',
change: function () { read(args) }
});
listObj.appendTo(args.element);
}
function read(args) {
var grid = document.getElementById("Grid").ej2_instances[0]
if (args.element.value == "clear") {
grid.clearFiltering(args.column.field);
args.element.value = ""
} else {
grid.filterByColumn(args.column.field, "equal", parseInt(args.element.value))
}
}
</script> |
<?php
namespace App\Models;
use Carbon\Carbon;
use DateTimeInterface;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class Booking extends Model
{
use HasFactory;
public $table = 'bookings';
protected $dates = [
'from_date',
'created_at',
'updated_at',
'deleted_at',
];
public const STATUS_SELECT = [
'pending' => 'Pending',
'confirmed' => 'Confirmed',
'completed' => 'Completed',
];
protected $fillable = [
'package_id',
'customer_name',
'customer_phone',
'from_date',
'total_people',
'total_price',
'status',
'created_at',
'updated_at',
'deleted_at',
];
protected function serializeDate(DateTimeInterface $date)
{
return $date->format('Y-m-d H:i:s');
}
public function package()
{
return $this->belongsTo(Package::class, 'package_id');
}
public function getFromDateAttribute($value)
{
return $value ? Carbon::parse($value)->format(config('panel.date_format')) : null;
}
public function setFromDateAttribute($value)
{
$this->attributes['from_date'] = $value ? Carbon::createFromFormat(config('panel.date_format'), $value)->format('Y-m-d') : null;
}
} |
<template>
<div class="menu">
<el-button
style="margin-bottom: 20px"
type="primary"
icon="el-icon-plus"
@click="toAdd"
>
添加菜单
</el-button>
<el-table v-loading="loading" :data="menus" style="width: 100%">
<el-table-column align="center" label="编号" type="index" />
<el-table-column
align="center"
label="菜单名称"
prop="name"
min-width="180"
/>
<el-table-column
align="center"
label="菜单级数"
prop="level"
min-width="180"
/>
<el-table-column
align="center"
label="前端图标"
prop="icon"
min-width="180"
/>
<el-table-column
align="center"
label="排序"
prop="orderNum"
min-width="180"
/>
<el-table-column align="center" label="操作" min-width="180">
<template slot-scope="scope">
<el-button size="mini" @click="handleEdit(scope.row)">编辑</el-button>
<el-button size="mini" type="danger" @click="handleDelete(scope.row)"
>删除</el-button
>
</template>
</el-table-column>
</el-table>
</div>
</template>
<script lang="ts">
import Vue from "vue";
import { getAll, delMenu } from "@/services/menu";
import { Menu } from "@/types";
export default Vue.extend({
name: "MenuIndex",
data() {
return {
loading: false,
menus: []
};
},
methods: {
toAdd() {
this.$router.push("menu/add");
},
handleEdit(item: Menu) {
this.$router.push({
name: "menuEdit",
params: {
id: item.id + "" || ""
}
});
},
handleDelete(item: Menu) {
this.$confirm("是否删除该菜单?").then(async () => {
try {
await delMenu(item.id || "");
this.loadMenus();
this.$message.success("删除成功");
} catch (e) {
this.$message.error(e);
}
});
},
async loadMenus() {
try {
this.loading = true;
this.menus = await getAll();
this.loading = false;
} catch (e) {
this.$message.error(e);
}
}
},
created() {
this.loadMenus();
}
});
</script> |
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:cinemapedia_app/infrastructure/datasources/moviedb_datasource.dart';
import 'package:cinemapedia_app/infrastructure/repositories/movie_repository_impl.dart';
// Este repositorio es inmutable, por lo que se puede usar un provider normal
final movieRepositoryProvider = Provider((ref) {
// Se retorna una instancia de MovieRepositoryImpl con una instancia de MoviedbDatasource
// MoviedbDatasource es la fuente de datos de la API de --The Movie DB--
return MovieRepositoryImpl(MoviedbDatasource());
});
// En conclusión, este provider envia una instancia de MovieRepositoryImpl a los widgets que lo consumen
// Enviando una lista de películas a los widgets que lo consumen |
import subprocess
import json
DEBUG = True
GITLAB_API_ENDOPOINT = 'https://gitlab.example.com/api/v4'
class gitlab:
"""API通信時に必要な情報をまとめたい(Issue/Branches/Commits/Merge Requests API通信時のリクエストで共通の使用するデータを毎回APIを呼び出したくない、、)
api_information = {
"mr_title": string(MRのタイトル),
"issue_id": int(作成したIssueのID),
"source_branch": string(開発ブランチ名),
"assignee_id": int(ユーザーID),
"mr_description": string(MR概要)
}
"""
api_information = {}
def __init__(self, url, personal_access_token, source_project_name, source_commit_sha, target_project_name, target_branch_name):
self.url = url
self.personal_access_token = personal_access_token
self.source_project_name = source_project_name
self.source_commit_sha = source_commit_sha
self.target_project_name = target_project_name
self.target_branch_name = target_branch_name
def get_file_changes_from_commit_sha(self):
""" コミットsha番号からファイルの変更情報を取得する
Args:
None
Returns
list: file_changes(ファイルの変更情報)
期待されるデータ例
[
{
"diff": "@@ -71,6 +71,8 @@\n sudo -u git -H bundle exec rake migrate_keys RAILS_ENV=production\n sudo -u git -H bundle exec rake migrate_inline_notes RAILS_ENV=production\n \n+sudo -u git -H bundle exec rake gitlab:assets:compile RAILS_ENV=production\n+\n ```\n \n ### 6. Update config files",
"new_path": "doc/update/5.4-to-6.0.md",
"old_path": "doc/update/5.4-to-6.0.md",
"a_mode": null,
"b_mode": "100644",
"new_file": false,
"renamed_file": false,
"deleted_file": false
}, ...
]
"""
api_url = f'{GITLAB_API_ENDOPOINT}/projects/{self.source_project_name}/repository/commits/{self.source_commit_sha}/diff'
header = f'PRIVATE-TOKEN: {self.personal_access_token}'
try:
result = subprocess.run(['curl', '--header', header, '--url', api_url], capture_output=True, text=True, check=True)
except Exception as e:
if DEBUG:
print(f'subprocess command error: {e}')
raise Exception('subprocessコマンドの実行に失敗しました。\nコミットsha番号:{self.source_commit_sha}')
output = result.stdout
if DEBUG:
print(f'Commits API response:\n{output}')
if '404' in output:
raise Exception(f'指定のコミットを取得できませんでした。\nコミットsha番号:{self.source_commit_sha}')
# json.loads() 関数を使用して文字列をPythonのリスト型に変換
file_changes = json.loads(output)
return file_changes
def commit_cherrypick_information_for_mr(self):
""" コミット単位でのcherrypick実行時の際、MR作成に必要な情報(title, description)を取得・成形する
Args:
None
Returns
None
"""
# titleデータの取得・成形
api_url = f'{GITLAB_API_ENDOPOINT}/projects/{self.source_project_name}/repository/commits/{self.source_commit_sha}'
header = f'PRIVATE-TOKEN: {self.personal_access_token}'
try:
result = subprocess.run(['curl', '--header', header, '--url', api_url], capture_output=True, text=True, check=True)
except Exception as e:
if DEBUG:
print(f'subprocess command error: {e}')
raise Exception('subprocessコマンドの実行に失敗しました。\nコミットsha番号:{self.source_commit_sha}')
output = result.stdout
if DEBUG:
print(f'Commits API response:\n{output}')
if '404' in output:
raise Exception(f'指定のコミットを取得できませんでした。\nコミットsha番号:{self.source_commit_sha}')
# json.loads() 関数を使用して文字列をPythonの辞書型に変換
commit_infomation = json.loads(output)
self.api_information['mr_title'] = commit_infomation['title']
description = f'{commit_infomation['title']}\n{commit_infomation['short_id']}'
self.api_information['description'] = description |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.