branch_name stringclasses 149
values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38
values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>ncphillips/py-gol<file_sep>/pong.py
import os
import requests
import json
def main():
slack_url = os.environ["PINGPONGURL"]
repo_url = "https://github.com/mittonface/py-gol"
text = "All of the tests are passing on %s. This will not stand." % repo_url
r = requests.post(slack_url, data=json.dumps({'text': text,}))
if __name__ == "__main__":
main()
<file_sep>/README.md
# py-gol
<file_sep>/py_legacy_code_retreat/test_trivia.py
import unittest
class TrivaTest(unittest.TestCase):
pass<file_sep>/tests/test_world.py
import unittest
from unittest.mock import patch
from src.gol import *
class WorldTest(unittest.TestCase):
def test_new_world_is_empty(self):
world = World()
self.assertTrue(world.empty())
def test_a_world_with_a_cell_is_not_empty(self):
world = World()
world.add_cell(0, 0)
self.assertFalse(world.empty())
def test_world_size_corresponds_to_num_cells_added(self):
world = World()
world.add_cell(0, 0)
self.assertEqual(1, world.size())
world.add_cell(1, 0)
self.assertEqual(2, world.size())
world.add_cell(1, 1)
self.assertEqual(3, world.size())
self.assertEqual(3, world.size())
def test_adding_cell_to_location_twice_doesnt_increase_size(self):
world = World()
world.add_cell(3, 1)
world.add_cell(3, 1)
world.add_cell(3, 1)
self.assertEqual(1, world.size())
def test_getting_cell_from_empty_location_returns_dead_cell(self):
world = World()
cell = world.get_cell(5, 2)
self.assertFalse(cell.is_alive())
def test_getting_cell_returns_living_cell(self):
world = World()
world.add_cell(0, 1)
cell = world.get_cell(0, 1)
self.assertTrue(cell.is_alive())
def test_count_living_neighbours_of_location_vertical_plane(self):
world = World()
world.add_cell(0, 0)
world.add_cell(0, 1)
world.add_cell(0, 2)
world.add_cell(10, 10)
self.assertEqual(1, world.count_neighbours_of(0, 0))
self.assertEqual(2, world.count_neighbours_of(0, 1))
self.assertEqual(1, world.count_neighbours_of(0, 2))
self.assertEqual(0, world.count_neighbours_of(100, 10))
def test_count_living_neighbours_of_location_horizontal_plane(self):
world = World()
world.add_cell(0, 0)
world.add_cell(1, 0)
world.add_cell(2, 0)
world.add_cell(-10, 10)
self.assertEqual(1, world.count_neighbours_of(0, 0))
self.assertEqual(2, world.count_neighbours_of(1, 0))
self.assertEqual(1, world.count_neighbours_of(2, 0))
self.assertEqual(0, world.count_neighbours_of(-10, 10))
self.assertEqual(0, world.count_neighbours_of(100, 10))
def test_here___diagonally_up(self):
world = World()
world.add_cell(0, 0)
world.add_cell(1, 1)
world.add_cell(2, 2)
world.add_cell(-10, 10)
self.assertEqual(1, world.count_neighbours_of(0, 0))
self.assertEqual(2, world.count_neighbours_of(1, 1))
self.assertEqual(1, world.count_neighbours_of(2, 2))
self.assertEqual(0, world.count_neighbours_of(-10, 10))
self.assertEqual(0, world.count_neighbours_of(100, 10))
def test_here___diagonally_down(self):
world = World()
world.add_cell(0, 0)
world.add_cell(-1, -1)
world.add_cell(-2, -2)
world.add_cell(-10, -10)
self.assertEqual(1, world.count_neighbours_of(0, 0))
self.assertEqual(2, world.count_neighbours_of(-1, -1))
self.assertEqual(1, world.count_neighbours_of(-2, -2))
self.assertEqual(0, world.count_neighbours_of(-10, 10))
self.assertEqual(0, world.count_neighbours_of(78, 13))
class CellTests(unittest.TestCase):
def test_living_cell_is_alive(self):
cell = Cell.create_living_cell()
self.assertTrue(cell.is_alive())
def test_dead_cell_is_dead(self):
cell = Cell.create_dead_cell()
self.assertFalse(cell.is_alive())
def test_living_cell_with_less_than_2_neighbors_will_not_be_alive(self):
cell = Cell.create_living_cell()
for num_neighbours in range(0, 2):
self.assertFalse(cell.will_be_alive(num_neighbours))
def test_living_cell_with_2_or_3_neighbours_will_be_alive(self):
cell = Cell.create_living_cell()
for num_neighbours in range(2, 4):
self.assertTrue(cell.will_be_alive(num_neighbours))
def test_living_cell_with_more_than_3_neighbours_will_not_be_alive(self):
cell = Cell.create_living_cell()
for num_neighbours in range(4, 9):
self.assertFalse(cell.will_be_alive(num_neighbours))
def test_dead_cell_with_3_neighbours_will_be_alive(self):
cell = Cell.create_dead_cell()
num_neighbours = 3
self.assertTrue(cell.will_be_alive(num_neighbours))
def test_dead_cell_with_less_than_3_neighbours_will_not_be_alive(self):
cell = Cell.create_dead_cell()
for num_neighbours in range(0, 3):
self.assertFalse(cell.will_be_alive(num_neighbours))
def test_dead_cell_with_more_than_3_neighbours_will_not_be_alive(self):
cell = Cell.create_dead_cell()
for num_neighbours in range(4, 9):
self.assertFalse(cell.will_be_alive(num_neighbours))
class WorldTickTest(unittest.TestCase):
def test_empty_world_is_empty_after_tick(self):
world = World()
world.tick()
self.assertTrue(world.empty())
def test_block_stays_the_same(self):
world = World()
world.add_cell(0, 0)
world.add_cell(0, 1)
world.add_cell(1, 0)
world.add_cell(1, 1)
self.assertTrue(world.get_cell(0, 0).is_alive())
self.assertTrue(world.get_cell(0, 1).is_alive())
self.assertTrue(world.get_cell(1, 0).is_alive())
self.assertTrue(world.get_cell(1, 1).is_alive())
self.assertEqual(4, world.size())
def test_blinker(self):
world = World()
world.add_cell(0, 0)
world.add_cell(-1, 0)
world.add_cell(1, 0)
world.tick()
self.assertTrue(world.get_cell(0, 0).is_alive())
self.assertFalse(world.get_cell(-1, 0).is_alive())
self.assertFalse(world.get_cell(1, 0).is_alive())
self.assertTrue(world.get_cell(0, -1).is_alive())
self.assertTrue(world.get_cell(0, 1).is_alive())
world.tick()
self.assertTrue(world.get_cell(0, 0).is_alive())
self.assertTrue(world.get_cell(-1, 0).is_alive())
self.assertTrue(world.get_cell(1, 0).is_alive())
self.assertFalse(world.get_cell(0, -1).is_alive())
self.assertFalse(world.get_cell(0, 1).is_alive())
def test_world_with_lone_block_is_empty_after_tick(self):
world = World()
world.add_cell(0, 0)
world.tick()
self.assertTrue(world.empty())
@patch.object(Cell, 'will_be_alive', return_value=False)
def test_tick_should_call_will_be_alive(self, method):
world = World()
world.add_cell(0, 0)
world.tick()
self.assertTrue(method.called)
<file_sep>/src/gol.py
class World:
def __init__(self):
self.cells = set()
def size(self):
return len(self.cells)
def empty(self):
return len(self.cells) == 0
def add_cell(self, x, y):
self.cells.add((x, y))
def get_cell(self, x, y):
if (x, y) in self.cells:
return Cell.create_living_cell()
return Cell.create_dead_cell()
def count_neighbours_of(self, x, y):
neighbours = 0
all_neighbours = set()
for i in range(x-1, x+2):
for j in range(y-1, y+2):
all_neighbours.add((i,j))
# a cell isn't a neighbour of itself
all_neighbours.remove((x, y))
for neighbour in all_neighbours:
if neighbour in self.cells:
neighbours += 1
return neighbours
def tick(self):
self.cells = set()
Cell.create_living_cell().will_be_alive(0)
return self.cells
class Cell:
def __init__(self, alive):
self.living = alive
@staticmethod
def create_living_cell():
return Cell(True)
@staticmethod
def create_dead_cell():
return Cell(False)
def is_alive(self):
return self.living
# you're a barbarian with your function naming, nolan
def willBeAlive(self, num_neighbours):
return self.will_be_alive(num_neighbours)
def will_be_alive(self, num_neighbours):
if self.living:
return (num_neighbours == 2 or num_neighbours == 3)
return num_neighbours == 3
<file_sep>/requirements.txt
nose==1.3.7
requests==2.8.1
<file_sep>/ping.py
import os
import requests
import json
def main():
# so secure
slack_url = os.environ["PINGPONGURL"]
repo_url = "https://github.com/mittonface/py-gol"
text = "Some jerk broke the tests on %s. Who do they think they are?" % repo_url
r = requests.post(slack_url, data=json.dumps({'text': text,}))
if __name__ == "__main__":
main()
| 2b07877b0c7af19a8a32908bc87682033192f2a7 | [
"Markdown",
"Python",
"Text"
] | 7 | Python | ncphillips/py-gol | 8b836ab5bc8c46a237e1e0466252fd4b58307cb1 | f1a3bc0463f7cb99d2ecb62e3aa6a2f7b3cf5f45 |
refs/heads/master | <file_sep>import React, { Component } from "react";
import UserStore from "../Stores/User";
import { observer } from "mobx-react";
import Nav from "./Nav";
import { instance } from "../Stores/User";
import ArticleStore from "../Stores/Article";
class Login extends Component {
constructor(props) {
super(props);
this.state = {
body: "",
title: "",
file: null
};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
this.handleChangeFile = this.handleChangeFile.bind(this);
}
handleChange(event) {
this.setState({ [event.target.name]: event.target.value });
}
handleChangeFile(event) {
this.setState({ file: event.target.files[0] });
}
handleSubmit(event) {
event.preventDefault();
const formData = new FormData();
formData.append("body", this.state.body);
formData.append("title", this.state.title);
formData.append("image", this.state.file);
const config = {
headers: {
"content-type": "multipart/form-data"
}
};
instance
.post("/articles/add", formData, config)
.then(response => {
ArticleStore.fetchAllArticles();
this.props.history.push("/");
})
.catch(error => {});
}
handleSubmitRegistration(event) {
event.preventDefault();
UserStore.register(this.state);
}
handleLogout(event) {
event.preventDefault();
UserStore.logout();
}
render() {
if (UserStore.user) {
}
return (
<div>
<Nav />
<header
className="masthead"
style={{
backgroundImage:
'url("/about-bg.jpg?h=ca94606afe09dc1190c07f9b89d907e7")'
}}
>
<div className="overlay" />
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<div className="site-heading">
<h1>Login</h1>
<span className="subheading" />
</div>
</div>
</div>
</div>
</header>
{/* Start: Login Form Clean */}
{!UserStore.user ? (
<>
<div className="login-clean">
<div className="form-group">
<h1>You need to login first</h1>
</div>
</div>
</>
) : (
<div className="login-clean">
<form onSubmit={this.handleSubmit}>
<h2 className="sr-only">Create Article Form</h2>
<div className="form-group">
<input
className="form-control"
type="title"
name="title"
placeholder="title"
value={this.state.title}
onChange={this.handleChange}
/>
</div>
<div className="form-group">
<textarea
rows="4"
col="4"
type="body"
name="body"
placeholder="body"
value={this.state.body}
onChange={this.handleChange}
/>
</div>
<input
type="file"
name="image"
onChange={this.handleChangeFile}
/>
<div className="form-group">
<button className="btn btn-primary btn-block" type="submit">
Create
</button>
</div>
</form>
</div>
)}
{/* End: Login Form Clean */}
<hr />
<footer>
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<ul className="list-inline text-center">
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-twitter fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-facebook fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-github fa-stack-1x fa-inverse" />
</span>
</li>
</ul>
<p className="text-muted copyright">
Copyright © InfoSecKW 2018
</p>
</div>
</div>
</div>
</footer>
</div>
);
}
}
export default observer(Login);
<file_sep>import React, { Component } from "react";
import UserStore from "../Stores/User";
import { observer } from "mobx-react";
import Nav from "./Nav";
class Login extends Component {
constructor(props) {
super(props);
this.state = {
username: "",
password: ""
};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
this.handleSubmitRegistration = this.handleSubmitRegistration.bind(this);
}
handleChange(event) {
this.setState({ [event.target.name]: event.target.value });
}
handleSubmit(event) {
event.preventDefault();
UserStore.login(this.state);
}
handleSubmitRegistration(event) {
event.preventDefault();
UserStore.register(this.state);
}
handleLogout(event) {
event.preventDefault();
UserStore.logout();
}
render() {
if (UserStore.user) {
}
return (
<div>
<Nav />
<header
className="masthead"
style={{
backgroundImage:
'url("/about-bg.jpg?h=ca94606afe09dc1190c07f9b89d907e7")'
}}
>
<div className="overlay" />
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<div className="site-heading">
<h1>Login</h1>
<span className="subheading" />
</div>
</div>
</div>
</div>
</header>
{/* Start: Login Form Clean */}
{UserStore.user ? (
<>
<div className="login-clean">
<div className="form-group">
<h1>Welcome {UserStore.user.username}</h1>
<button className="btn btn-primary" onClick={this.handleLogout}>
Logout
</button>
</div>
</div>
</>
) : (
<div className="login-clean">
<form onSubmit={this.handleSubmit}>
<h2 className="sr-only">Login Form</h2>
<div className="illustration">
<i className="icon ion-ios-navigate" />
</div>
<div className="form-group">
<input
className="form-control"
type="username"
name="username"
placeholder="username"
value={this.state.username}
onChange={this.handleChange}
/>
</div>
<div className="form-group">
<input
className="form-control"
type="<PASSWORD>"
name="password"
placeholder="<PASSWORD>"
value={this.state.password}
onChange={this.handleChange}
/>
</div>
<div className="form-group">
<button className="btn btn-primary btn-block" type="submit">
Log In
</button>
</div>
<a className="forgot" href="#" />
<button
className="btn btn-primary btn-block"
onClick={this.handleSubmitRegistration}
>
Sign Up
</button>
</form>
</div>
)}
{/* End: Login Form Clean */}
<hr />
<footer>
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<ul className="list-inline text-center">
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-twitter fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-facebook fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-github fa-stack-1x fa-inverse" />
</span>
</li>
</ul>
<p className="text-muted copyright">
Copyright © InfoSecKW 2018
</p>
</div>
</div>
</div>
</footer>
</div>
);
}
}
export default observer(Login);
<file_sep>import React, { Component } from "react";
import { Link } from "react-router-dom";
import UserStore from "../Stores/User";
import { observer } from "mobx-react";
class Nav extends Component {
handleLogout(event) {
event.preventDefault();
UserStore.logout();
}
render() {
return (
<nav
className="navbar navbar-light navbar-expand-lg fixed-top"
id="mainNav"
>
<div className="container">
<Link className="navbar-brand" to="/">
InfoSecKW
</Link>
<button
data-toggle="collapse"
data-target="#navbarResponsive"
className="navbar-toggler"
aria-controls="navbarResponsive"
aria-expanded="false"
aria-label="Toggle navigation"
>
<i className="fa fa-bars" />
</button>
<div className="collapse navbar-collapse" id="navbarResponsive">
<ul className="nav navbar-nav ml-auto">
<li className="nav-item" role="presentation">
<Link className="nav-link" to="/">
Home
</Link>
</li>
<li className="nav-item" role="presentation" />
<li className="nav-item" role="presentation">
{UserStore.user ? (
<a className="nav-link" href="/" onClick={this.handleLogout}>
Logout
</a>
) : (
<Link className="nav-link" to="/login">
Login
</Link>
)}
</li>
</ul>
</div>
</div>
</nav>
);
}
}
export default observer(Nav);
<file_sep>import { decorate, observable } from "mobx";
import axios from "axios";
import jwt_decode from "jwt-decode";
export const url = "https://api.infoseckw.com/";
// export const url = "http://127.0.0.1:3001/";
export const instance = axios.create({
baseURL: url,
timeout: 1000,
headers: {
"Access-Control-Allow-Origin": "*"
}
});
class UserStore {
user = null;
token = null;
login = async userData => {
try {
const res = await instance.post("login", userData);
const user = res.data;
this.token = user.token;
this.setUser(user.token);
} catch (err) {
console.error(err.response.data);
}
};
setUser = token => {
if (token) {
localStorage.setItem("myToken", token);
instance.defaults.headers.common.Authorization = `Bearer ${token}`;
const decodedUser = jwt_decode(token);
this.user = decodedUser;
} else {
delete instance.defaults.headers.common.Authorization;
localStorage.removeItem("myToken");
this.user = null;
}
};
register = async userData => {
try {
const res = await instance.post("register", userData);
this.login(userData);
} catch (err) {
console.error(err.response.data);
}
};
checkForToken = () => {
const token = localStorage.getItem("myToken");
if (token) {
console.log("Logged in ");
this.setUser(token);
}
};
logout = () => {
this.setUser();
};
}
decorate(UserStore, {
user: observable,
token: observable
});
const userStore = new UserStore();
//userStore();
userStore.checkForToken();
export default userStore;
<file_sep>import React, { Component } from "react";
import { observer } from "mobx-react";
import ArticleStore from "../Stores/Article";
import { url } from "../Stores/User";
import Nav from "./Nav";
class Details extends Component {
render() {
let id = this.props.match.params.articleID;
const index = ArticleStore.articles.findIndex(
article => article._id === id
);
console.log(index);
const item = ArticleStore.articles[index];
console.log(item);
if (!item) {
return <></>;
}
return (
<div>
<Nav />
<header
className="masthead"
style={{
backgroundImage:
'url("/post-bg.jpg?h=9b3eae5bf913af77d61c0390cba13bf5")'
}}
>
<div className="overlay" />
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<div className="post-heading">
<h1>{item.title}</h1>
<h2 className="subheading" />
<span className="meta">
Posted by <a href="#">{item.author}</a>
</span>
</div>
</div>
</div>
</div>
</header>
<article>
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<img width="400" src={url + "media/?file=" + item.image} />
<p>{item.body}</p>
<blockquote className="blockquote">
<p className="mb-0" />
</blockquote>
<a href="#" />
</div>
</div>
</div>
</article>
<footer>
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<ul className="list-inline text-center">
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-twitter fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-facebook fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-github fa-stack-1x fa-inverse" />
</span>
</li>
</ul>
<p className="text-muted copyright">
Copyright © InfoSecKW 2018
</p>
</div>
</div>
</div>
</footer>
</div>
);
}
}
export default observer(Details);
<file_sep>import { decorate, observable, action } from "mobx";
import { instance } from "./User";
class ArticleStore {
articles = [];
article = null;
loading = true;
fetchAllArticles = async () => {
try {
const res = await instance.get("articles");
this.articles = res.data.articles;
this.loading = false;
} catch (err) {
console.error(err);
}
};
filter(id) {
return this.articles.filter(article => article._id === id);
}
}
decorate(ArticleStore, {
articles: observable,
article: observable,
loading: observable,
filter: action
});
const articleStore = new ArticleStore();
articleStore.fetchAllArticles();
export default articleStore;
<file_sep>import React, { Component } from "react";
import { url } from "../Stores/User";
import { observer } from "mobx-react";
import ArticleStore from "../Stores/Article";
import { Link } from "react-router-dom";
import Nav from "./Nav";
import UserStore, { instance } from "../Stores/User";
class main extends Component {
constructor(props) {
super(props);
this.handleDeleteArticle = this.handleDeleteArticle.bind(this);
}
handleDeleteArticle(id) {
instance
.post("articles/delete", { id: id })
.then(res => ArticleStore.fetchAllArticles());
}
render() {
let posts = [];
if (!ArticleStore.loading) {
console.log(ArticleStore.articles);
posts = ArticleStore.articles.map(article => (
<>
<div className="post-preview">
<img width="200" src={url + "media/?file=" + article.image} />
<Link to={"/detail/" + article._id}>
<h2 className="post-title">{article.title}</h2>
<h3 className="post-subtitle">{article.body}</h3>
</Link>
<p className="post-meta">
Posted by
{article.author}
</p>
{UserStore.user ? (
UserStore.user.username === article.author ? (
<button
className="btn btn-danger"
onClick={() => this.handleDeleteArticle(article._id)}
>
Delete
</button>
) : null
) : null}
</div>
<hr />
</>
));
}
return (
<div>
<Nav />
<header
className="masthead"
style={{
backgroundImage:
'url("home-bg.jpg?h=ed6236475a1226b743bf65e6f1bebb34")'
}}
>
<div className="overlay" />
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<div className="site-heading">
<h1>InfoSecKW</h1>
<span className="subheading">Example application</span>
</div>
</div>
</div>
</div>
</header>
<div className="row justify-content-center">
<div className="col align-self-start">
<Link to="/create" className="btn btn-primary">
Create Articles
</Link>
<div className="clearfix" />
</div>
</div>
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8">
{posts}
<div className="clearfix" />
</div>
</div>
</div>
<footer>
<div className="container">
<div className="row">
<div className="col-md-10 col-lg-8 mx-auto">
<ul className="list-inline text-center">
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-twitter fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-facebook fa-stack-1x fa-inverse" />
</span>
</li>
<li className="list-inline-item">
<span className="fa-stack fa-lg">
<i className="fa fa-circle fa-stack-2x" />
<i className="fa fa-github fa-stack-1x fa-inverse" />
</span>
</li>
</ul>
<p className="text-muted copyright">
Copyright © InfoSecKW 2018
</p>
</div>
</div>
</div>
</footer>
</div>
);
}
}
export default observer(main);
| 805e552714a7537933ea8e77022d58c1453e9dd8 | [
"JavaScript"
] | 7 | JavaScript | smokeme/h4ckme | 222d140ba693b7018a1344de21af336aea4fb647 | 197f3265a591cbf3ce218b1ca519db8784efcf2f |
refs/heads/main | <file_sep>// import { expect as expectCDK, haveResource } from '@aws-cdk/assert';
import * as cdk from '@aws-cdk/core';
test('SQS Queue Created', () => {
const app = new cdk.App();
new cdk.Stack(app, "TestStack");
});<file_sep>import * as cp from '@aws-cdk/aws-codepipeline';
import * as cpa from '@aws-cdk/aws-codepipeline-actions';
import * as cb from '@aws-cdk/aws-codebuild';
import * as cc from '@aws-cdk/aws-codecommit';
import * as s3 from '@aws-cdk/aws-s3';
import * as cdk from '@aws-cdk/core';
export interface PipelineProps {
readonly projectName: string;
}
export interface AzureDeployProps extends cp.CommonAwsActionProps {
readonly stageName: string;
readonly input: cp.Artifact;
}
export class TerraformApplyAction extends cpa.CodeBuildAction {
constructor(scope: cdk.Construct, props: AzureDeployProps) {
super({
...props,
project: new cb.PipelineProject(scope, props.stageName, {
buildSpec: cb.BuildSpec.fromObject({
version: '0.2',
phases: {
install: {
commands: [
'apt install unzip -y',
'wget https://releases.hashicorp.com/terraform/0.11.14/terraform_0.11.14_linux_amd64.zip',
'unzip terraform_0.11.14_linux_amd64.zip',
'mv terraform /usr/local/bin/'
],
},
pre_build: {
commands: ['terraform init'],
},
build: {
commands: ['terraform apply -auto-approve'],
},
}
})
})
});
}
}
export class FanPipeline extends cdk.Construct {
public readonly pipeline: cp.Pipeline;
constructor(scope: cdk.Construct, id: string, props: PipelineProps) {
super(scope, id);
const repo = new cc.Repository(this, `repo_${props.projectName}`, {repositoryName: props.projectName})
const buildProject = new cb.PipelineProject(this, 'CdkBuild', {
buildSpec: cb.BuildSpec.fromObject({
version: '0.2',
phases: {
install: {
commands: 'npm install',
},
build: {
commands: [
'npm run build',
'npm run cdk synth -- -o dist'
],
},
},
artifacts: {
'base-directory': 'dist',
files: [
'LambdaStack.template.json',
],
},
}),
environment: {
buildImage: cb.LinuxBuildImage.STANDARD_4_0,
},
});
const sourceOutput = new cp.Artifact();
const lambdaBuildOutput = new cp.Artifact('LambdaBuildOutput');
const artifactBucket = new s3.Bucket(this, "ArtifactBucket", {encryption:s3.BucketEncryption.KMS_MANAGED})
this.pipeline = new cp.Pipeline(this, 'Pipeline', {
artifactBucket: artifactBucket,
stages: [
{
stageName: 'Source',
actions: [
new cpa.CodeCommitSourceAction({
actionName: 'CodeCommit_Source',
repository: repo,
output: sourceOutput,
}),
],
},
{
stageName: 'Build',
actions: [
new cpa.CodeBuildAction({
actionName: 'Lambda_Build',
project: buildProject,
input: sourceOutput,
outputs: [lambdaBuildOutput],
})
]
}
],
});
}
addStage(name: string, action: TerraformApplyAction) {
this.pipeline.addStage({stageName: name, actions: [action]})
}
}<file_sep># Welcome to your CDK TypeScript Construct Library project!
You should explore the contents of this project. It demonstrates a CDK Construct Library that includes a construct (`Pipeline`)
which contains an Amazon SQS queue that is subscribed to an Amazon SNS topic.
The construct defines an interface (`PipelineProps`) to configure the visibility timeout of the queue.
## Useful commands
```bash
REGION=$(curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | sed -n 's/.*"region" : "\([a-z0-9-]*\)",/\1/p')
docker container prune --force
docker image prune --force
docker volume prune --force
docker build -t fan \
--build-arg DOMAIN_NAME=testdomain \
--build-arg DOMAIN_OWNER=775000485103 `#Only needed if the domain is owned by an external account` \
--build-arg REPOSITORY_NAME=testrepo \
--build-arg REGION=$REGION \
.
docker create --name extract2 fan:latest
docker cp extract2:/app/dist ./dist
```<file_sep>FROM jsii/superchain AS build
ARG DOMAIN_NAME
ARG DOMAIN_OWNER
ARG REPOSITORY_NAME
ARG REGION
COPY . ./app
WORKDIR ./app
RUN curl -s "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && unzip -qq awscliv2.zip && ./aws/install
RUN npm install -g jsii-pacmak && npm install && npm run build
RUN jsii-pacmak -t python
RUN jsii-pacmak -t js
RUN jsii-pacmak -t java
# PYTHON
RUN aws codeartifact login --tool twine --domain $DOMAIN_NAME --repository $REPOSITORY_NAME ${DOMAIN_OWNER:+'--domain-owner' $DOMAIN_OWNER} --region $REGION\
&& twine upload --repository codeartifact $(find ./dist/python/*.tar.gz)
# NPM
RUN aws codeartifact login --tool npm --domain $DOMAIN_NAME --repository $REPOSITORY_NAME ${DOMAIN_OWNER:+'--domain-owner' $DOMAIN_OWNER} --region $REGION\
&& packageInfo="$(tar -zxOf $(find ./dist/js/*.tgz) package/package.json)" \
&& ver="$(node -e "console.log(${packageInfo}.version);")" \
&& mod="$(node -e "console.log(${packageInfo}.name);")" \
&& npm publish $(find ./dist/js/*.tgz)
# JAVA/Maven
RUN export pom=$(find ./dist/java -name '*.pom') && echo $pom
RUN MAVEN_PASSWORD=$(aws codeartifact get-authorization-token --domain $DOMAIN_NAME --query authorizationToken --output text ${DOMAIN_OWNER:+'--domain-owner' $DOMAIN_OWNER} --region $REGION) && touch ./mvn-settings.xml && echo "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><settings xmlns=\"http://maven.apache.org/SETTINGS/1.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/SETTINGS/1.0.0http://maven.apache.org/xsd/settings-1.0.0.xsd\"><servers><server><id>${DOMAIN_NAME}--${REPOSITORY_NAME}</id><username>aws</username><password>${<PASSWORD>></server></servers></settings>" > ./mvn-settings.xml
RUN mvn --settings=./mvn-settings.xml -f $(find ./dist/java -name '*.pom') deploy \
-DrepositoryDirectory="dist/java" \
-DrepositoryId="${DOMAIN_NAME}--${REPOSITORY_NAME}" \
-DaltDeploymentRepository="${DOMAIN_NAME}--${REPOSITORY_NAME}::default::$(aws codeartifact get-repository-endpoint --format maven --output text --domain ${DOMAIN_NAME} --repository ${REPOSITORY_NAME} ${DOMAIN_OWNER:+'--domain-owner' $DOMAIN_OWNER} --region $REGION)" \
-DserverId="${DOMAIN_NAME}--${REPOSITORY_NAME}" | 75875b777a1c193d3d51744416547f53820c642e | [
"Markdown",
"TypeScript",
"Dockerfile"
] | 4 | TypeScript | richardhboyd/pipelinedemo | bbc56dde44e40a97e367209549f6811268465598 | ac2c6c7ee9dff1bc5d07c95d4ad6df6876a46772 |
refs/heads/master | <file_sep>(function() {
'use strict';
angular.module('RedditClone')
.directive('rcNav', navDirective);
function navDirective() {
return {
templateUrl: '/app/nav/nav.directive.html',
controller: controller,
controllerAs: 'nav',
bindToController: true
};
}
controller.$inject = ['$http', '$scope'];
function controller($http, $scope) {
var nav = this;
nav.sort = "votes";
nav.clickSubmitPost = function() {
$scope.postingForm.showPostingForm = !$scope.postingForm.showPostingForm;
};
}
})();
<file_sep>exports.seed = function(knex, Promise) {
return Promise.join(
knex('users').del(),
knex('users').insert({
username: 'user1',
password_hash: '<PASSWORD>',
first_name: 'Jared',
last_name: 'Dunn'
}),
knex('users').insert({
username: 'user2',
password_hash: '<PASSWORD>',
first_name: 'Erlich',
last_name: 'Bachmann'
}),
knex('users').insert({
username: 'user3',
password_hash: '<PASSWORD>',
first_name: 'Richard',
last_name: 'Hendricks'
})
);
};
<file_sep>
exports.up = function(knex, Promise) {
return knex.schema.createTable('comments', function(table) {
table.increments();
table.integer('posting_id');
table.integer('author_id');
table.text('comment');
table.timestamp('created_at').defaultTo(knex.fn.now());
table.timestamp('updated_at').defaultTo(knex.fn.now());
})
};
exports.down = function(knex, Promise) {
return knex.schema.dropTable('comments');
};
<file_sep>const express = require('express');
const router = express.Router();
const knex = require('../db');
router.get('/postings', function(req, res, next) {
knex('postings')
.join('users', 'postings.author_id', 'users.id')
.select('postings.id',
'postings.title',
'postings.created_at',
'postings.posting',
'postings.votes',
'postings.image_url',
'users.first_name',
'users.last_name'
)
.then(function(posting) {
res.json(posting);
});
});
router.post('/newPosting', function(req, res, next) {
knex('postings')
.insert({
author_id: req.body.author_id,
title: req.body.title,
image_url: req.body.image_url,
posting: req.body.posting
})
.then(function(data) {
res.end();
});
});
router.get('/comments', function(req, res, next) {
knex('comments')
.join('users', 'comments.author_id', 'users.id')
.select('comments.posting_id',
'users.first_name',
'users.last_name',
'comments.comment'
)
.then(function(comments) {
res.json(comments);
});
});
router.post('/newComment', function(req, res, next) {
knex('comments')
.insert({
posting_id: req.body.posting_id,
author_id: req.body.author_id,
comment: req.body.comment
})
.then(function() {
res.end();
});
});
router.post('/postings/votes/', function(req, res, next) {
knex('postings')
.where({ id: req.body.id })
.update({ votes: req.body.votes })
.then(function() {
res.end();
});
});
module.exports = router;
<file_sep>exports.seed = function(knex, Promise) {
return Promise.join(
knex('comments').del(),
knex('comments').insert({
posting_id: 3,
author_id: 1,
comment: 'Hello World'
}),
knex('comments').insert({
posting_id: 1,
author_id: 2,
comment: 'Goodbye World'
}),
knex('comments').insert({
posting_id: 2,
author_id: 3,
comment: 'Welcome World'
})
);
};
<file_sep>exports.seed = function(knex, Promise) {
return Promise.join(
knex('postings').del(),
knex('postings').insert({
author_id: 1,
title: 'iLive-80',
image_url: 'http://cdn3.volusion.com/b3o4z.gn3gt/v/vspfiles/photos/9999-05589-3.jpg?1459514594',
posting: 'The Surface is simply a controller for the MixRack. It also has an audio rack built into it to conveniently provide local inputs and outputs at the mix position. It features a large analogue style, one control per function processing block, TouchScreen, name and colour virtual "write-on" strip, banks of motorised faders with independent layers, SoftKeys, built-in headphones, local monitor and talkback. The modular iLive-80 allows a variety of audio I/O modules, network formats and redundant power supply options, and can be shipped in a touring flight case.',
votes: 1
}),
knex('postings').insert({
author_id: 2,
title: 'iDR 64',
image_url: 'http://www.allen-heath.com/media/iDR_64_Front_Main.jpg',
posting: 'The MixRack is the heart of the iLive system, housing the DSP, most of the I/O, networking and control interfaces. The iDR-64 is the biggest of the fixed format range with 64 Mic/Line inputs and 32 XLR line outputs. AES Digital Output Option available, offering 2 AES3 digital stereo outputs in place of 4 analogue line outputs. iDR-64 can be ordered with a single AES Digital Output Option pre-fitted. A retro-fit kit is also available, allowing single or multiple AES Digital Output Options to be fitted. iLive Firmware V1.9 is required for this option.',
votes: 2
}),
knex('postings').insert({
author_id: 3,
title: 'Zone 2D',
image_url: 'http://www.synthtopia.com/wp-content/uploads/2007/01/allen-heath-zone-2d.jpg',
posting: 'The Xone:2D is a combination of an 18-channel USB 2.0 soundcard and MIDI controller, which DJ’s can combine with their existing analogue mixer to create a complete digital DJ mixing system, perfect for integration with DJ software such as Ableton Live and Traktor. The 96kHz 24-bit USB 2.0 soundcard is provided with a large array of connections for easy interfacing into any DJ setup, while MIDI clocks are generated using the tap tempo button or by analysing the BPM of incoming audio, available from all input channels. One of the input channels can be switched between mic and line, while the other two are phono/line, and the unit is completed by a comprehensive onboard monitoring system.',
votes: 3
})
);
};
<file_sep>(function() {
'use strict';
angular.module('RedditClone')
.directive('rcPostings', postingsDirective);
function postingsDirective() {
return {
templateUrl: '/app/postings/postings.directive.html',
controller: controller,
controllerAs: 'postings',
bindToController: true
};
}
controller.$inject = ['redditServices'];
function controller(redditServices) {
var postings = this;
postings.allPostings = [];
postings.voteUp = upVote;
postings.voteDown = downVote;
activate();
function activate() {
redditServices.allPostings().then(function(res) {
if (res.status !== 200) {
console.log(res);
} else {
return postings.allPostings = res.data;
}
});
}
function upVote(post) {
redditServices.voteUp(post).then(function(res) {
if (res.status !== 200) {
console.log(res);
}
});
}
function downVote(post) {
redditServices.voteDown(post).then(function(res) {
if (res.status !== 200) {
console.log(res);
}
});
}
}
})();
<file_sep>(function() {
'use strict';
angular.module('RedditClone')
.directive('rcPostingform', postingFormDirective);
function postingFormDirective() {
return {
templateUrl: '/app/postingForm/postingForm.directive.html',
controller: controller,
controllerAs: 'postingForm',
bindToController: true
};
}
controller.$inject = ['$scope', 'redditServices'];
function controller($scope, redditServices) {
var postingForm = this;
postingForm.showPostingForm = false;
postingForm.addNewPosting = newPosting;
function activate() {
redditServices.allPostings().then(function(res) {
if (res.status !== 200) {
console.log(res);
} else {
postingForm.newPosting = {};
$scope.addPosting.$setUntouched();
postingForm.showPostingForm = false;
return $scope.postings.allPostings = res.data;
}
});
}
function newPosting(posting) {
redditServices.newPosting(posting).then(function(res) {
if (res.status !== 200) {
console.log(res);
} else {
activate();
}
});
}
}
})();
| efa5e7713ce0ac655815707d7d62644d2a3bb54e | [
"JavaScript"
] | 8 | JavaScript | butters5789/reddit-clone-angular-express | 97ca582d1e7f132f44aeaef0d523e2143a23696c | 097290654e5b9eccb2195742bd7e8f49e7b1447c |
refs/heads/master | <repo_name>akm/rbc_mruby<file_sep>/cds.rb
#
# MIT LICENSE
#
# Copyright <NAME>, <NAME>
#
a0 = AnalogIO.new(A0, INPUT)
d0 = DigitalIO.new(D0, OUTPUT)
while true
if a0.read > 0
d0.write(1)
else
d0.write(0)
end
sleep(1)
end
d = DigitalIO.new(D0, OUTPUT)
while true
d.write(1)
sleep(1)
d.write(0)
sleep(1)
end
<file_sep>/led_display.rb
# -*- coding: utf-8 -*-
#
# MIT LICENSE
#
# Copyright <NAME>, <NAME>
#
# ややこしいけど、ON(1)にすると消えて、OFF(0)なら光る
#
# D1
# a1
# --------
# D6| |D2
# f1| D7 |b1
# | g1 |
# --------
# D5| |D3
# e1| D4 |c1
# | d1 |
# --------
#
DIOS = [D1, D2, D3, D4, D5, D6, D7]
dios = DIOS.map{|i| DigitalIO.new(i, OUTPUT)}
dios.each{|io| io.write(1) } # 全部消す
orders = [0,1,6,4,3,2,6,5]
p orders
prev = dios[orders.last]
curr = dios[orders.first]
while true
orders.each do |idx|
# p idx
prev = curr
curr = dios[idx]
prev.write(1)
curr.write(0)
delay(100)
end
end
<file_sep>/led_display_test.rb
#
# MIT LICENSE
#
# Copyright <NAME>, <NAME>
#
d1 = DigitalIO.new(D1, OUTPUT)
while true
p 1
d1.write(1)
sleep(3)
p 0
d1.write(0)
sleep(3)
end
| ce55154b64728c1cf9ff004fd4dc60a0d988c2ac | [
"Ruby"
] | 3 | Ruby | akm/rbc_mruby | dec8423d59eca3bb7db447e86f2a0270d23a90fb | edd116238234468477ea3df01e753358128ae3a8 |
refs/heads/main | <file_sep>package com.atguigu.springcloud.alibaba.myHandler;
import com.alibaba.csp.sentinel.slots.block.BlockException;
public class customerBlockHandler {
public static String test(BlockException e){
return "使用统一的处理方法";
}
}
| 50c9ff86a34bcbf7ac0e1648f85583a850b7d9c3 | [
"Java"
] | 1 | Java | LinJianpin/cloud_alibaba | 4e1e61880bd0fd482896a7c7a6e17fb1d001cf24 | 760610c442f2d6ddb6a28d04bb7b10eac71765e4 |
refs/heads/main | <repo_name>SomeoneSom/mindusimg<file_sep>/README.md
# mindusimg
this doesn’t even work, someone debug this. Writes code to make 3 bit color image from one processor.
<file_sep>/mindusimg.py
from os import getcwd
from PIL import Image
#write result cell1 0
dir = getcwd() + "/"
fname = input("Image name?\n")
output = open(dir + "out.txt", 'w')
with Image.open(dir + fname) as im:
im = im.resize((80, 80))
bstr = ''
count = 0
for x in range(0, 80):
for y in range(79, -1, -1):
r, g, b, alpha = im.getpixel((x, y))
bstr = ("1" if r > 127 else "0") + bstr
bstr = ("1" if g > 127 else "0") + bstr
bstr = ("1" if b > 127 else "0") + bstr
if len(bstr) == 63:
bstr = "0b" + bstr
output.write("write " + bstr + " bank1 " + str(count) + "\n")
count += 1
bstr = ''
while len(bstr) < 63:
bstr = "000" + bstr
bstr = "0b" + bstr
output.write("write " + bstr + " bank1 " + str(count))
output.write("""
set x 0
set y 79
set a -1
op add a a 1
read pixels bank1 a
set b -1
op add b 1 b
op mod r pixels 2
op mul r r 255
op shr pixels pixels 1
op mod g pixels 2
op mul g g 255
op shr pixels pixels 1
op mod bl pixels 2
op mul bl bl 255
op shr pixels pixels 1
draw color r g bl 255 0 0
draw rect x y 1 1 0 0
drawflush display1
op add x x 1
jump 328 lessThan x 80
op sub y y 1
set x 0
jump 311 lessThan b 20
jump 308 lessThan a 304""")
print('Done!')
output.close()
| 73f4237f9145d5216cc966f71ac60b9c21570941 | [
"Markdown",
"Python"
] | 2 | Markdown | SomeoneSom/mindusimg | f166eed1d2cd89c42fa5072859e90d25f8791bd6 | cd5590b9850427dda292dadb56d26f28c46e77c2 |
refs/heads/master | <file_sep>package app;
import java.io.File;
import beans.AssetMember;
import parser.ParseCSV;
public class MyApp {
public static void main(String[] args){
AssetMember one = new AssetMember();
ParseCSV two = new ParseCSV();
String folderPathName = "C:\\Users\\nyx\\Desktop\\asset\\";
String fileName = "HL78082_2017_02_23_14_11_13.csv";//"HM62855_2017_02_18_08_25_08.csv";
String pathName = folderPathName + fileName;
File file = new File(pathName);
// for (String a[]:two.opencsvTostringArray(file)){
// for (String b : a){
// System.out.println(b);
// }
for (AssetMember aaa:two.opencsvToBean(file)){
if ( 87955046==Integer.parseInt(aaa.get取引番号())){
System.out.println(aaa.get取引原資産());
aaa.get取引原資産();
aaa.getBet();
aaa.getEndDataTime();
aaa.get取引番号();
aaa.getBiginRate();
aaa.getDirection();
aaa.getOption();
}
}
}
}
| 5b0446858d575badb6ff51c95df02103c43c695c | [
"Java"
] | 1 | Java | rami2076/LerningOpenCSV | 42db2c795cb6081f181cbe1175f6ededf8c56f17 | 1326e35e5d79a9c51322bd61351971f718f31fc3 |
refs/heads/development | <repo_name>AscendingGames/AscendingEngine<file_sep>/core/src/com/ascending/games/engine/model/geometry/IRectangle2.kt
package com.ascending.games.engine.model.geometry
import com.badlogic.gdx.math.Rectangle
interface IRectangle2 : IPosition2, ISize2 {
val rectangle : Rectangle
get() = Rectangle(position.x, position.y, size.x, size.y)
}<file_sep>/core/src/com/ascending/games/engine/model/geometry/IHierarchical2.kt
package com.ascending.games.engine.model.geometry
import com.ascending.games.engine.model.game.IChild
import com.badlogic.gdx.math.Vector2
interface IHierarchical2 : IPosition2, IChild {
val relativePosition : Vector2
override val position: Vector2
get() = (parent?.let { if (it is IPosition2) it.position.cpy() else Vector2() } ?: Vector2()).add(relativePosition)
}<file_sep>/core/src/com/ascending/games/engine/edit/resource/ISaveable.kt
package com.ascending.games.engine.edit.resource
interface ISaveable {
fun save()
fun load()
}<file_sep>/core/src/com/ascending/games/engine/edit/action/ITimedAction.kt
package com.ascending.games.engine.edit.action
interface ITimedAction {
val canExecute : Boolean
fun execute(delta : Float) : Boolean
}<file_sep>/core/src/com/ascending/games/engine/model/pathfinding/IGraph.kt
package com.ascending.games.engine.model.pathfinding
interface IGraph<Node> {
fun getNodes() : List<Node>
fun getNeighbours(node : Node) : List<Node>
}<file_sep>/core/src/com/ascending/games/engine/edit/action/ComposedTimedAction.kt
package com.ascending.games.engine.edit.action
class ComposedTimedAction(private val actions : List<com.ascending.games.engine.edit.action.ITimedAction>) : com.ascending.games.engine.edit.action.ITimedAction {
private var currentActionIndex = 0
override val canExecute : Boolean
get() = if (currentActionIndex > actions.size - 1) false else actions.subList(currentActionIndex, actions.size - 1).none { !it.canExecute }
override fun execute(delta: Float): Boolean {
if (currentActionIndex >= actions.size) return true
if (actions[currentActionIndex].execute(delta)) {
currentActionIndex++
}
return currentActionIndex == actions.size
}
}<file_sep>/core/src/com/ascending/games/engine/model/geometry/Rectangle2.kt
package com.ascending.games.engine.model.geometry
import com.badlogic.gdx.math.Vector2
data class Rectangle2(override val position : Vector2, override val size : Vector2) : IRectangle2 {
}<file_sep>/core/src/com/ascending/games/engine/model/geometry/Direction4.kt
package com.ascending.games.engine.model.geometry
enum class Direction4 {
UP, RIGHT, DOWN, LEFT;
fun toOffset() : Coord2 {
return when (this) {
UP -> Coord2(0, 1)
RIGHT -> Coord2(1, 0)
DOWN -> Coord2(0, -1)
LEFT -> Coord2(-1, 0)
}
}
fun rotateLeft() : Direction4 {
return when (this) {
UP -> Direction4.LEFT
RIGHT -> Direction4.UP
DOWN -> Direction4.RIGHT
LEFT -> Direction4.DOWN
}
}
fun rotateRight() : Direction4 {
return when (this) {
UP -> Direction4.RIGHT
RIGHT -> Direction4.DOWN
DOWN -> Direction4.LEFT
LEFT -> Direction4.UP
}
}
fun opposite() : Direction4 {
return when (this) {
UP -> Direction4.DOWN
RIGHT -> Direction4.LEFT
DOWN -> Direction4.UP
LEFT -> Direction4.RIGHT
}
}
}<file_sep>/core/src/com/ascending/games/engine/model/game/IChild.kt
package com.ascending.games.engine.model.game
interface IChild {
val parent : Any?
val isRoot : Boolean
get() = parent == null
}<file_sep>/core/test/com/ascending/games/engine/edit/resource/JSONResourceTest.kt
package com.ascending.games.engine.edit.resource
import com.ascending.games.engine.edit.resource.JSONResource
import org.hamcrest.CoreMatchers.hasItem
import org.junit.Assert.assertEquals
import org.junit.Assert.assertThat
import org.junit.Test
class JSONResourceTest {
companion object {
data class MockObject(var x : Int = 0)
}
val jsonResource = JSONResource("test.json")
@Test
fun serialize() {
jsonResource.contents += MockObject(1)
jsonResource.contents += MockObject(2)
assertEquals("[{class:com.ascending.games.engine.edit.resource.JSONResourceTest\$Companion\$MockObject,x:1},{class:com.ascending.games.engine.edit.resource.JSONResourceTest\$Companion\$MockObject,x:2}]", jsonResource.serialize())
}
@Test
fun deserialize() {
val serialized = "[{class:com.ascending.games.engine.edit.resource.JSONResourceTest\$Companion\$MockObject,x:1},{class:com.ascending.games.engine.edit.resource.JSONResourceTest\$Companion\$MockObject,x:2}]"
jsonResource.deserialize(serialized)
assertThat(jsonResource.contents, hasItem(MockObject(1)))
assertThat(jsonResource.contents, hasItem(MockObject(2)))
}
}<file_sep>/core/src/com/ascending/games/engine/model/game/IStats.kt
package com.ascending.games.engine.model.game
interface IStats {
val stats : MutableMap<String, Float>
val statModifiers : MutableList<IStatModifier>
fun change(stat : String, value : Float, defaultValue : Float = 0f) {
val newValue = (stats[stat] ?: defaultValue) + value
stats[stat] = newValue
}
}<file_sep>/README.md
# AscendingEngine
# About this repository
[](https://travis-ci.com/AscendingGames/AscendingEngine)
[](https://codebeat.co/projects/github-com-ascendinggames-ascendingengine-development)
[](https://codecov.io/gh/AscendingGames/AscendingEngine)
<file_sep>/core/test/com/ascending/games/engine/model/pathfinding/PathfinderTest.kt
package com.ascending.games.engine.model.pathfinding
import com.ascending.games.engine.model.pathfinding.IDistanceEstimator
import com.ascending.games.engine.model.pathfinding.IGraph
import com.ascending.games.engine.model.pathfinding.Pathfinder
import org.junit.Test
import org.junit.Assert.*
class PathfinderTest {
private val graph = object : IGraph<Int> {
override fun getNodes(): List<Int> {
return listOf(0, 1,2,3)
}
override fun getNeighbours(node: Int): List<Int> {
when (node) {
0 -> return listOf(1)
1 -> return listOf(0, 2)
2 -> return listOf(3)
else -> return listOf()
}
}
}
private val distanceEstimator = object : IDistanceEstimator<Int> {
override fun estimateDistance(node1: Int, node2: Int): Float {
return Math.abs(node1 - node2).toFloat()
}
}
@Test
fun getPath() {
val pathfinder = Pathfinder<Int>(graph, distanceEstimator)
val path = pathfinder.getPath(1, 3)
assertEquals(listOf(2, 3), path)
}
}<file_sep>/core/src/com/ascending/games/engine/model/geometry/Coord2.kt
package com.ascending.games.engine.model.geometry
data class Coord2(var x : Int, var y : Int) {
companion object {
val ZERO = Coord2(0, 0)
}
fun rotate() : Coord2 {
val oldX = this.x
val oldY = this.y
this.y = -oldX
this.x = oldY
return this
}
fun add(other : Coord2) : Coord2 {
return Coord2(x + other.x, y + other.y)
}
fun distance(other : Coord2) : Float {
return Math.sqrt(((x - other.x) * (x - other.x) + (y - other.y) * (y - other.y)).toDouble()).toFloat()
}
}<file_sep>/core/src/com/ascending/games/engine/edit/action/EmptyTimedAction.kt
package com.ascending.games.engine.edit.action
class EmptyTimedAction : com.ascending.games.engine.edit.action.ITimedAction {
override val canExecute = true
override fun execute(delta: Float) = true
}<file_sep>/core/test/com/ascending/games/engine/edit/resource/ResourceFactoryTest.kt
package com.ascending.games.engine.edit.resource
import com.ascending.games.engine.edit.resource.JSONResource
import com.ascending.games.engine.edit.resource.ResourceFactory
import org.junit.Assert.assertTrue
import org.junit.Test
class ResourceFactoryTest {
val resourceFactory = ResourceFactory()
@Test
fun createResource() {
assertTrue(resourceFactory.createResource("test.json") is JSONResource)
assertTrue(resourceFactory.createResource("test.txt") is com.ascending.games.engine.edit.resource.DefaultResource)
}
}<file_sep>/core/src/com/ascending/games/engine/edit/resource/DefaultResource.kt
package com.ascending.games.engine.edit.resource
class DefaultResource(override var uri: String) : com.ascending.games.engine.edit.resource.IResource {
override val contents = mutableListOf<Any>()
override var isLoaded = false
override fun serialize(): String {
return contents.joinToString("\n")
}
override fun deserialize(serialized: String) {
contents.addAll(serialized.split("\n"))
}
}<file_sep>/core/src/com/ascending/games/engine/view/IVisible.kt
package com.ascending.games.engine.view
interface IVisible {
fun show()
fun hide()
}<file_sep>/core/src/com/ascending/games/engine/model/data/ObservableMap.kt
package com.ascending.games.engine.model.data
class ObservableMap<E, F>(private var mutableMap: MutableMap<E, F>) : IObservableMap<E, F> {
constructor() : this(mutableMapOf())
override val onPut = HashSet<(E, F) -> Unit>()
override val onRemove = HashSet<(E, F) -> Unit>()
override val onChange = HashSet<(E, F) -> Unit>()
override val size: Int
get() = mutableMap.size
override val entries: MutableSet<MutableMap.MutableEntry<E, F>>
get() = mutableMap.entries
override val keys: MutableSet<E>
get() = mutableMap.keys
override val values: MutableCollection<F>
get() = mutableMap.values
override fun containsKey(key: E): Boolean {
return mutableMap.containsKey(key)
}
override fun containsValue(value: F): Boolean {
return mutableMap.containsValue(value)
}
override fun get(key: E): F? {
return mutableMap.get(key)
}
override fun isEmpty(): Boolean {
return mutableMap.isEmpty()
}
override fun clear() {
val oldMap = mutableMap.toMap()
mutableMap.clear()
oldMap.forEach { key, value ->
onRemove.forEach { it.invoke(key, value) }
onChange.forEach { it.invoke(key, value) }
}
}
override fun put(key: E, value: F): F? {
val oldValue = mutableMap.put(key, value)
onPut.forEach { it.invoke(key, value) }
onChange.forEach { it.invoke(key, value) }
return oldValue
}
override fun remove(key: E): F? {
val value = mutableMap.remove(key)
value ?: return null
onRemove.forEach { it.invoke(key, value) }
onChange.forEach { it.invoke(key, value) }
return value
}
}<file_sep>/core/src/com/ascending/games/engine/view/IView2.kt
package com.ascending.games.engine.view
import com.badlogic.gdx.graphics.Camera
import com.badlogic.gdx.graphics.g2d.SpriteBatch
import com.badlogic.gdx.utils.Disposable
interface IView2 : Disposable {
val batchID : Int
fun render(batch: SpriteBatch, camera : Camera)
}<file_sep>/core/test/com/ascending/games/engine/edit/action/ComposedTimedActionTest.kt
package com.ascending.games.engine.edit.action
import org.junit.Test
import org.junit.Assert.*
class ComposedTimedActionTest {
val MockAction = object : com.ascending.games.engine.edit.action.ITimedAction {
override val canExecute = true
override fun execute(delta: Float): Boolean {
return true
}
}
@Test
fun canExecute() {
assertFalse(com.ascending.games.engine.edit.action.ComposedTimedAction(listOf()).canExecute)
val composedTimedAction = com.ascending.games.engine.edit.action.ComposedTimedAction(listOf(MockAction))
assertTrue(composedTimedAction.canExecute)
composedTimedAction.execute(0f)
assertFalse(composedTimedAction.canExecute)
}
@Test
fun execute() {
val composedTimedAction = com.ascending.games.engine.edit.action.ComposedTimedAction(listOf(MockAction, MockAction))
assertFalse(composedTimedAction.execute(1f))
assertTrue(composedTimedAction.execute(1f))
assertTrue(composedTimedAction.execute(1f))
}
}<file_sep>/core/src/com/ascending/games/engine/edit/resource/IResource.kt
package com.ascending.games.engine.edit.resource
import com.badlogic.gdx.Gdx
import com.badlogic.gdx.files.FileHandle
interface IResource : ISaveable {
var uri : String
val contents : MutableList<Any>
var isLoaded : Boolean
fun serialize() : String
fun deserialize(serialized : String)
fun getFile() : FileHandle {
return Gdx.files.local(uri)
}
override fun save() {
val serialized = serialize()
getFile().writeString(serialized, false)
isLoaded = true
}
override fun load() {
if (!isLoaded) {
val serialized = getFile().readString()
deserialize(serialized)
isLoaded = true
}
}
fun unload() {
if (isLoaded) {
contents.clear()
isLoaded = false
}
}
fun reload() {
unload()
load()
}
}<file_sep>/core/src/com/ascending/games/engine/view/SceneManager2.kt
package com.ascending.games.engine.view
import com.badlogic.gdx.Gdx
import com.badlogic.gdx.graphics.Color
import com.badlogic.gdx.graphics.GL20
import com.badlogic.gdx.graphics.OrthographicCamera
import com.badlogic.gdx.graphics.g2d.SpriteBatch
import com.badlogic.gdx.math.Vector2
import com.badlogic.gdx.utils.Disposable
class SceneManager2(viewportSize : Vector2) : Disposable {
val views : MutableList<IView2> = mutableListOf()
private var batchMap : Map<Int, SpriteBatch> = mutableMapOf()
private val camera = OrthographicCamera(viewportSize.x, viewportSize.y)
init {
camera.setToOrtho(false)
}
private fun getSpriteBatch(id : Int) : SpriteBatch {
var batch = batchMap[id]
if (batch == null) {
batch = SpriteBatch()
batchMap = batchMap.plus(Pair(id, batch))
}
return batch
}
fun render() {
Gdx.gl.glClearColor(Color.BLACK.r, Color.BLACK.g, Color.BLACK.b, Color.BLACK.a)
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT)
camera.update()
val viewMap = views.groupBy { it.batchID }
for (viewBatch in viewMap) {
val batch = getSpriteBatch(viewBatch.key)
batch.begin()
for (view in viewBatch.value) {
view.render(batch, camera)
}
batch.end()
}
}
override fun dispose() {
for (batch in batchMap.values) {
batch.dispose()
}
batchMap = emptyMap()
}
}<file_sep>/core/src/com/ascending/games/engine/edit/action/ITimedActionProvider.kt
package com.ascending.games.engine.edit.action
interface ITimedActionProvider {
fun getNextAction() : com.ascending.games.engine.edit.action.ITimedAction?
}<file_sep>/core/src/com/ascending/games/engine/model/data/ObservableList.kt
package com.ascending.games.engine.model.data
class ObservableList<E>(private val mutableList: MutableList<E>) : IObservableList<E> {
override val onAdd = HashSet<(Int, E) -> Unit>()
override val onRemove = HashSet<(E) -> Unit>()
override val size: Int get() = mutableList.size
override fun contains(element: E): Boolean {
return mutableList.contains(element)
}
override fun containsAll(elements: Collection<E>): Boolean {
return mutableList.containsAll(elements)
}
override fun get(index: Int): E {
return mutableList[index]
}
override fun indexOf(element: E): Int {
return mutableList.indexOf(element)
}
override fun isEmpty(): Boolean {
return mutableList.isEmpty()
}
override fun iterator(): MutableIterator<E> {
return mutableList.iterator()
}
override fun lastIndexOf(element: E): Int {
return mutableList.lastIndexOf(element)
}
override fun add(index: Int, element: E) {
mutableList.add(element)
onAdd.forEach { it.invoke(index, element) }
}
override fun clear() {
val oldList = mutableList.toList()
mutableList.clear()
oldList.forEach { element -> onRemove.forEach { it.invoke(element) } }
}
override fun listIterator(): MutableListIterator<E> {
return mutableList.listIterator()
}
override fun listIterator(index: Int): MutableListIterator<E> {
return mutableList.listIterator(index)
}
override fun removeAt(index: Int): E {
val element = this[index]
onRemove.forEach { it.invoke(element) }
mutableList.removeAt(index)
return element
}
override fun set(index: Int, element: E): E {
val oldElement = this[index]
onRemove.forEach { it.invoke(oldElement) }
mutableList[index] = element
onAdd.forEach { it.invoke(index, element) }
return oldElement
}
override fun subList(fromIndex: Int, toIndex: Int): MutableList<E> {
return ObservableList(mutableList.subList(fromIndex, toIndex))
}
}<file_sep>/core/test/com/ascending/games/engine/model/geometry/Direction4Test.kt
package com.ascending.games.engine.model.geometry
import com.ascending.games.engine.model.geometry.Direction4
import org.junit.Test
import org.junit.Assert.*
class Direction4Test {
@Test
fun rotateLeft() {
assertEquals(Direction4.UP, Direction4.UP.rotateLeft().rotateLeft().rotateLeft().rotateLeft())
}
@Test
fun rotateRight() {
assertEquals(Direction4.UP, Direction4.UP.rotateRight().rotateRight().rotateRight().rotateRight())
}
@Test
fun opposite() {
assertEquals(Direction4.UP, Direction4.UP.opposite().opposite())
assertEquals(Direction4.LEFT, Direction4.LEFT.opposite().opposite())
}
}<file_sep>/core/test/com/ascending/games/engine/model/geometry/Coord2Test.kt
package com.ascending.games.engine.model.geometry
import com.ascending.games.engine.model.geometry.Coord2
import org.junit.Test
import org.junit.Assert.*
class Coord2Test {
@Test
fun rotate() {
assertEquals(Coord2.ZERO, Coord2(0, 0).rotate())
assertEquals(Coord2(0, -1), Coord2(1, 0).rotate())
assertEquals(Coord2(-1, 0), Coord2(0, -1).rotate())
assertEquals(Coord2(0, 1), Coord2(-1, 0).rotate())
}
}<file_sep>/core/test/com/ascending/games/engine/model/data/ObservableListTest.kt
package com.ascending.games.engine.model.data
import com.ascending.games.engine.model.data.ObservableList
import org.junit.Assert.assertEquals
import org.junit.Assert.assertTrue
import org.junit.Test
class ObservableListTest {
private val data = mutableListOf(1,2,3)
private val list = ObservableList(data)
@Test
fun add() {
list.onAdd += { index, element ->
assertEquals(3, index)
assertEquals(4, element)
}
list.add(4)
}
@Test
fun addAll() {
val callbacks = mutableListOf<Pair<Int, Int>>()
list.onAdd += { index, element ->
callbacks.add(Pair(index, element))
}
list.addAll(listOf(3, 4))
assertEquals(listOf(Pair(3, 3), Pair(4, 4)), callbacks)
}
@Test
fun clear() {
val callbacks = mutableListOf<Int>()
list.onRemove += { element ->
callbacks.add(element)
}
list.clear()
assertEquals(listOf(1,2,3), callbacks)
}
@Test
fun remove() {
list.onRemove += { element ->
assertEquals(2, element)
}
list.remove(2)
}
@Test
fun testRemoveAll() {
val callbacks = mutableListOf<Int>()
list.onRemove += { element ->
callbacks.add(element)
}
list.removeAll(listOf(3, 4))
assertEquals(listOf(3), callbacks)
}
@Test
fun removeAt() {
val callbacks = mutableListOf<Int>()
list.onRemove += { element ->
callbacks.add(element)
}
list.removeAt(2)
assertEquals(listOf(3), callbacks)
}
@Test
fun retainAll() {
val callbacks = mutableListOf<Int>()
list.onRemove += { element ->
callbacks.add(element)
}
list.retainAll(listOf(1,3))
assertEquals(listOf(2), callbacks)
}
@Test
fun set() {
list.onAdd += { index, element ->
assertEquals(1, index)
assertEquals(3, element)
}
list.onRemove += { element ->
assertEquals(2, element)
}
list[1] = 3
}
@Test
fun size() {
assertEquals(data.size, list.size)
}
@Test
fun containsAll() {
assertTrue(list.containsAll(data))
}
@Test
fun lastIndexOf() {
assertEquals(data.lastIndexOf(1), list.lastIndexOf(1))
}
@Test
fun subList() {
assertEquals(data.subList(0, 2), list.subList(0, 2))
}
}<file_sep>/core/src/com/ascending/games/engine/model/game/AbsoluteStatChange.kt
package com.ascending.games.engine.model.game
open class AbsoluteStatChange(private val statChanges : MutableMap<String, Float>) : IStatModifier {
override var appliedTo: IStats? = null
override fun apply(stats: IStats) {
appliedTo = stats
statChanges.forEach { stat, value -> stats.stats.put(stat, (stats.stats.get(stat) ?: 0f) + value) }
}
override fun unapply() {
appliedTo?.let { appliedTo -> statChanges.forEach { stat, value -> appliedTo.stats.put(stat, (appliedTo.stats.get(stat) ?: 0f) - value) } }
}
}<file_sep>/core/src/com/ascending/games/engine/model/pathfinding/Pathfinder.kt
package com.ascending.games.engine.model.pathfinding
class Pathfinder<Node>(val graph : IGraph<Node>, val distanceEstimator : IDistanceEstimator<Node>) {
fun getPath(startNode : Node, targetNode : Node) : List<Node> {
val openList = mutableListOf(startNode to distanceEstimator.estimateDistance(startNode, targetNode))
val closedList = mutableSetOf<Node>()
val mapNodeToPredecessor = mutableMapOf<Node, Node>()
while (!closedList.contains(targetNode) && !openList.isEmpty()) {
val rankedNode = openList.removeAt(0)
closedList.add(rankedNode.first)
val neighbours = getUnexploredNeighbours(closedList, rankedNode, targetNode)
expandNeighbours(neighbours, openList)
neighbours.forEach { mapNodeToPredecessor[it.first] = rankedNode.first }
}
return extractPath(startNode, targetNode, mapNodeToPredecessor)
}
fun getUnexploredNeighbours(closedList: MutableSet<Node>, rankedNode: Pair<Node, Float>, targetNode: Node) : List<Pair<Node, Float>> {
return graph.getNeighbours(rankedNode.first)
.filter { !closedList.contains(it) }
.map { it to distanceEstimator.estimateDistance(rankedNode.first, it) + distanceEstimator.estimateDistance(it, targetNode) }
}
private fun expandNeighbours(neighbours : List<Pair<Node, Float>>, openList : MutableList<Pair<Node, Float>>) {
for (neighbour in neighbours) {
val nodeRanking = getOpenListRanking(neighbour, openList)
openList.add(nodeRanking, neighbour)
}
}
private fun getOpenListRanking(unrankedNode : Pair<Node, Float>, openList : MutableList<Pair<Node, Float>>) : Int {
for (i in 0 until openList.size) {
val rankedNode = openList[i]
if (unrankedNode.second < rankedNode.second) {
return i
}
}
return openList.size
}
private fun extractPath(startNode : Node, targetNode : Node, mapNodeToPredecessor : MutableMap<Node, Node>) : List<Node> {
val path = mutableListOf(targetNode)
var currentNode = mapNodeToPredecessor[targetNode]
while (currentNode != null && currentNode != startNode) {
path.add(currentNode)
currentNode = mapNodeToPredecessor[currentNode]
}
return path.reversed()
}
}<file_sep>/core/src/com/ascending/games/engine/model/game/AGameObject.kt
package com.ascending.games.engine.model.game
import com.ascending.games.engine.model.data.ObservableList
import com.ascending.games.engine.model.data.ObservableMap
import com.ascending.games.engine.model.geometry.IHierarchical2
import com.ascending.games.engine.model.geometry.IRectangle2
import com.badlogic.gdx.math.Vector2
abstract class AGameObject : IRectangle2, IHierarchical2, IStats {
override var relativePosition = Vector2()
override var size = Vector2()
override val stats = ObservableMap(HashMap<String, Float>())
override val statModifiers by lazy {
val statModifiers = ObservableList(mutableListOf<IStatModifier>())
statModifiers.onAdd += { _, modifier -> modifier.apply(this) }
statModifiers.onRemove += { it.unapply() }
statModifiers
}
}<file_sep>/core/src/com/ascending/games/engine/model/game/IStatModifier.kt
package com.ascending.games.engine.model.game
interface IStatModifier {
var appliedTo : IStats?
fun apply(stats : IStats)
fun unapply()
}<file_sep>/core/src/com/ascending/games/engine/model/pathfinding/IDistanceEstimator.kt
package com.ascending.games.engine.model.pathfinding
interface IDistanceEstimator<Node> {
fun estimateDistance(node1 : Node, node2 : Node) : Float
}<file_sep>/core/src/com/ascending/games/engine/view/SpriteView.kt
package com.ascending.games.engine.view
import com.ascending.games.engine.model.geometry.IRectangle2
import com.badlogic.gdx.graphics.Camera
import com.badlogic.gdx.graphics.Texture
import com.badlogic.gdx.graphics.g2d.SpriteBatch
class SpriteView(val rectangleProvider : IRectangle2, var texture : Texture, batchID : Int = 0) : AView2(batchID) {
override fun render(batch: SpriteBatch, camera: Camera) {
val rect = rectangleProvider.rectangle
batch.draw (texture, rect.x, rect.y, rect.width, rect.height)
}
}<file_sep>/core/src/com/ascending/games/engine/view/AView2.kt
package com.ascending.games.engine.view
abstract class AView2(override val batchID: Int = 0) : IView2 {
override fun dispose() = Unit
}<file_sep>/core/src/com/ascending/games/engine/model/geometry/ISize2.kt
package com.ascending.games.engine.model.geometry
import com.badlogic.gdx.math.Vector2
interface ISize2 {
val size : Vector2
}<file_sep>/core/src/com/ascending/games/engine/view/texture/TextureManager.kt
package com.ascending.games.engine.view.texture
import com.badlogic.gdx.graphics.Texture
class TextureManager {
private val textures = mutableMapOf<String, Texture>()
fun getTexture(name : String) : Texture {
var loadedTexture = textures[name]
if (loadedTexture == null) {
loadedTexture = Texture(name)
textures[name] = loadedTexture
}
return loadedTexture
}
fun dispose() {
textures.forEach { _, texture -> texture.dispose() }
}
}<file_sep>/core/src/com/ascending/games/engine/edit/resource/JSONResource.kt
package com.ascending.games.engine.edit.resource
import com.badlogic.gdx.utils.Json
class JSONResource(override var uri : String) : com.ascending.games.engine.edit.resource.IResource {
companion object {
const val FILE_TYPE = ".json"
}
override val contents = mutableListOf<Any>()
override var isLoaded = false
fun createJson() : Json {
val json = Json()
return json
}
override fun serialize(): String {
val json = createJson()
return json.toJson(contents)
}
override fun deserialize(serialized: String) {
val json = createJson()
@Suppress("UNCHECKED_CAST")
contents.addAll(json.fromJson(List::class.java, serialized) as List<Any>)
}
}<file_sep>/core/src/com/ascending/games/engine/edit/resource/ResourceFactory.kt
package com.ascending.games.engine.edit.resource
class ResourceFactory {
fun createResource(uri : String) : com.ascending.games.engine.edit.resource.IResource {
if (uri.endsWith(JSONResource.FILE_TYPE)) {
return JSONResource(uri)
} else {
return com.ascending.games.engine.edit.resource.DefaultResource(uri)
}
}
}<file_sep>/core/test/com/ascending/games/engine/edit/resource/DefaultResourceTest.kt
package com.ascending.games.engine.edit.resource
import com.badlogic.gdx.Gdx
import com.badlogic.gdx.backends.lwjgl.LwjglFiles
import org.junit.After
import org.junit.Assert.*
import org.junit.Before
import org.junit.Test
class DefaultResourceTest {
val defaultResource = com.ascending.games.engine.edit.resource.DefaultResource("testUri")
@Before
fun setup() {
Gdx.files = LwjglFiles()
defaultResource.contents.add("TEST")
}
@After
fun cleanUp() {
if (defaultResource.getFile().exists()) {
defaultResource.getFile().delete()
}
}
@Test
fun getFile() {
assertNotNull(defaultResource.getFile())
}
@Test
fun save() {
defaultResource.save()
assertTrue(defaultResource.isLoaded)
assertTrue(defaultResource.getFile().exists())
assertEquals("TEST", defaultResource.getFile().readString())
}
@Test
fun unload() {
defaultResource.save()
defaultResource.unload()
assertFalse(defaultResource.isLoaded)
assertTrue(defaultResource.contents.isEmpty())
}
@Test
fun load() {
defaultResource.save()
defaultResource.unload()
defaultResource.load()
assertTrue(defaultResource.isLoaded)
assertTrue(defaultResource.contents.contains("TEST"))
}
@Test
fun reload() {
defaultResource.save()
defaultResource.load()
defaultResource.contents.add("TEST2")
defaultResource.reload()
assertTrue(defaultResource.contents.contains("TEST"))
assertFalse(defaultResource.contents.contains("TEST2"))
}
}<file_sep>/core/src/com/ascending/games/engine/edit/action/IAction.kt
package com.ascending.games.engine.edit.action
interface IAction {
val canExecute : Boolean
fun execute() : Boolean
}<file_sep>/core/src/com/ascending/games/engine/model/geometry/IPosition2.kt
package com.ascending.games.engine.model.geometry
import com.badlogic.gdx.math.Vector2
interface IPosition2 {
val position : Vector2
}<file_sep>/core/test/com/ascending/games/engine/model/data/ObservableMapTest.kt
package com.ascending.games.engine.model.data
import com.ascending.games.engine.model.data.ObservableMap
import org.junit.Assert.assertEquals
import org.junit.Test
class ObservableMapTest {
private val data = mutableMapOf(Pair(1,1), Pair(2,2), Pair(3,3))
private val map = ObservableMap(data)
@Test
fun put() {
map.onPut += { key, value ->
assertEquals(4, key)
assertEquals(4, value)
}
map.onChange += { key, value ->
assertEquals(4, key)
assertEquals(4, value)
}
map.put(4, 4)
}
@Test
fun putAll() {
val callbacks = mutableListOf<Pair<Int, Int>>()
map.onPut += { key, value ->
callbacks.add(Pair(key, value))
}
map.onChange += { key, value ->
callbacks.add(Pair(key, value))
}
map.putAll(mutableMapOf(Pair(3, 4), Pair(4, 4)))
assertEquals(listOf(Pair(3, 4), Pair(3, 4), Pair(4, 4), Pair(4, 4)), callbacks)
}
@Test
fun clear() {
val callbacks = mutableListOf<Pair<Int, Int>>()
map.onRemove += { key, element ->
callbacks.add(Pair(key, element))
}
map.onChange += { key, element ->
callbacks.add(Pair(key, element))
}
map.clear()
assertEquals(listOf(Pair(1, 1), Pair(1, 1), Pair(2, 2), Pair(2, 2), Pair(3, 3), Pair(3, 3)), callbacks)
}
@Test
fun remove() {
map.onRemove += { key, value ->
assertEquals(2, key)
assertEquals(2, value)
}
map.onChange += { key, value ->
assertEquals(2, key)
assertEquals(2, value)
}
map.remove(2)
}
@Test
fun size() {
assertEquals(data.size, map.size)
}
@Test
fun keys() {
assertEquals(data.keys, map.keys)
}
@Test
fun values() {
assertEquals(data.values, map.values)
}
@Test
fun isEmpty() {
assertEquals(data.isEmpty(), map.isEmpty())
}
}<file_sep>/core/src/com/ascending/games/engine/model/data/IObservableList.kt
package com.ascending.games.engine.model.data
interface IObservableList<E> : MutableList<E> {
val onAdd : MutableCollection<(Int, E) -> Unit>
val onRemove : MutableCollection<(E) -> Unit>
override fun add(element: E): Boolean {
add(size, element)
return true
}
override fun addAll(index: Int, elements: Collection<E>): Boolean {
var currentIndex = index
elements.forEach { add(currentIndex++, it) }
return true
}
override fun addAll(elements: Collection<E>): Boolean {
elements.forEach { add(it) }
return true
}
override fun remove(element: E): Boolean {
val indexOf = indexOf(element)
if (indexOf == - 1) return false
removeAt(indexOf)
return true
}
override fun removeAll(elements: Collection<E>): Boolean {
return elements.any { remove(it) }
}
override fun retainAll(elements: Collection<E>): Boolean {
var changed = false
for (i in size - 1 downTo 0) {
val element = this[i]
if (!elements.contains(element)) {
removeAt(i)
changed = true
}
}
return changed
}
}<file_sep>/core/src/com/ascending/games/engine/view/ui/ChangeListenerService.kt
package com.ascending.games.engine.view.ui
import com.badlogic.gdx.scenes.scene2d.Actor
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener
class ChangeListenerService {
fun createChangeListener(callback : () -> Unit) : ChangeListener {
return object : ChangeListener() {
override fun changed(event: ChangeEvent?, actor: Actor?) {
if (event != null) {
callback()
event.handle()
}
}
}
}
}<file_sep>/core/src/com/ascending/games/engine/view/Toolkit.kt
package com.ascending.games.engine.view
import com.ascending.games.engine.view.texture.TextureManager
import com.badlogic.gdx.math.Vector2
class Toolkit(viewportSize : Vector2) {
val sceneManager = SceneManager2(viewportSize)
val textureManager = TextureManager()
fun dispose() {
sceneManager.dispose()
textureManager.dispose()
}
}<file_sep>/core/src/com/ascending/games/engine/model/data/IObservableMap.kt
package com.ascending.games.engine.model.data
interface IObservableMap<E, F> : MutableMap<E, F> {
val onPut : MutableCollection<(E, F) -> Unit>
val onRemove : MutableCollection<(E, F) -> Unit>
val onChange : MutableCollection<(E, F) -> Unit>
override fun putAll(from: Map<out E, F>) {
from.forEach { key, value -> put(key, value) }
}
} | c6cd0cc3de52e36821bfac334ca1a34d6e6a43f9 | [
"Markdown",
"Kotlin"
] | 47 | Kotlin | AscendingGames/AscendingEngine | f75f3d38849d0c4b64c0f7e508cfb955c6d1a48d | b5ad7523b1c2d20f393e268515d8812d318c6ad9 |
refs/heads/main | <file_sep>#importamos la libreria sys para el metodo salir de la clase Menu
import sys
#definimos la clase Articulo con los
#metodos imprimirDatosArticulo, getId, y su respectivo constructor
class Articulo:
#metodo constructor
#cada vez que se instancie un objeto de la clase Articulo se le tendran que pasar como parametros 4 variables, codigo, nombre, cantidad, precio, para despues vaciarlas en
#otras variable de tipo privado que seran los atributos de la clase u objeto
def __init__(self,codigo,nombre,cantidad,precio):
self.__codigo=codigo
self.__nombre=nombre
self.__cantidad=cantidad
self.__precio=precio
#el metodo imprimirDatosArticulo sirve para
#imprimir cada atributo de la clase
#se define el metodo
def imprimirDatosArticulo(self):
#las siguientes 4 lineas imprimen los atributos del objeto
print(f"Codigo: {self.__codigo}")
print(f"Nombre: {self.__nombre}")
print(f"Cantidad: {self.__cantidad}")
print(f"Precio: {self.__precio}")
#el metodo getId se define con el proposito de
#poder acceder al codigo o id del articulo
#se define el metodo
def getId(self):
#retorna el codigo id
return self.__codigo
# se define la clase Stock que servira como clase de almacenamiento/registro, impresora y eliminadora de los articulos gracias a sus respectivos metodos
class Stock:
#definimos el metodo constructor con un espacio en memoria de tipo lista
def __init__(self):
self.__stock=list()
#definimos otro metodo llamaso añadirArticulo que tendra la funcion de añadir un articulo al atributo stock de la clase Stock
def añadirArticulo(self,articulo):
#se toma el parametro que se pasa y se añade con la funcion de la lista 'append'
self.__stock.append(articulo)
#se define el metodo eliminarArticulo que sirve para eliminar los articulos del stock, se pasa como parametro el codigo del producto
def eliminarArticulo(self,codigo):
#se declara una lista para almacenar los codigos
self.indices=list()
#se recorre la lista stock y se toma con ayuda de la funcion getId de la clase Articulo el id
for i in self.__stock:
self.indices.append(i.getId())
#recorremos la lista indices para despuea comparar si es igual al codigo que pasamos colo parametro
for i in self.indices:
#si es igual lo almacenamos en una variable llamada indixe
if i==codigo:
indice=self.indices.index(i)
#luego esa variable la pasamos como parametro al metodo pop de la lista stock
self.__stock.pop(indice)
#listo se ha eliminado
#se define la clase mostrar articulos, la cual
#ayuda a imprimir los articulos almacenados hasta el momento
def mostrarArticulos(self):
#se reforre cada valor de la lista stock
for i in self.__stock:
print("------------------------------------------------")
#llamamos al metodo imprimirDatosArticulo de cada elemento de la lista
i.imprimirDatosArticulo()
#se define el metodo checarIds para verificar que los ids no se repitan
def checarIds(self):
#definimos una variable donde iran los ids
self.__ids=list()
for i in self.__stock:
#recorremos cada elemento y obtenemos su id con el metodo getId
self.__ids.append(i.getId())
#retornamos la lista
return self.__ids
#metodo que sirve para devolver la lista de articulos
def articulos(self):
#retornamos la lista
return self.__stock
# se define una clase llamada Menu para el menu
class Menu:
#se define wl metodo constructor que actuara como lanzador del menu
def __init__(self):
#llamamos al metodo mostrarMenu para lanzar el menu
self.mostrarMenu()
#se define el metodo que contiene el menu
def mostrarMenu(self):
#imprimimos laa opciones
print('Sistema De Alta y Baja de Articulos')
print("1)Añadir Articulo\n2)Eliminar Articulo\n3)Imprimir Articulos\n4)Salir")
#definimos varios ciclo while seguidos de un bloque try para controlar excepciones y solo se pueda introducir un valor valido
while True:
try:
respuesta = int(input())
except:
print("Valor no admintido.(Solo enteros)")
else:
break
if respuesta==1:
avanzar=True
#definimos varios ciclo while seguidos de un bloque try para controlar excepciones y solo se pueda introducir un valor valido
while avanzar:
print("Codigo del producto")
try:
id=int(input())
except:
print("Solo enteros.")
else:
if len(stock.articulos())==0:
break
elif len(stock.articulos())>0:
for i in stock.checarIds():
if i==id:
print("Ya hay un producto con este codigo. Intente con otro")
continue
else:
avanzar=False
name=input("Nombre del producto\n")
while True:
print("Cantidad de producto")
try:
quantity=int(input())
except:
print("Solo enteros.")
else:
break
while True:
print("Precio del producto")
try:
price = int(input())
except:
print("Solo enteros.")
else:
break
#definimos varios ciclo while seguidos de un bloque try para controlar excepciones y solo se pueda introducir un valor valido
while True:
opcion=input("¿Desea guardar los datos? si/no")
if opcion.lower()=="si":
articulo=Articulo(id,name,quantity,price)
stock.añadirArticulo(articulo)
print("Articulo guardado con exito.")
break
else:
print("Intenta de nuevo")
#opcion que muestra en pantalla los articulos almacenados hasta el momento para despues elegir el que queremos eliminar
elif respuesta==2:
#imprimimos los articulos
stock.mostrarArticulos()
#definimos varios ciclo while seguidos de un bloque try para controlar excepciones y solo se pueda introducir un valor valido
while True:
try:
eleccion = int(input("introduce el codido del articulo que desea borrar\n 0 para regresar \n"))
except:
print("Valor no valido")
else:
if eleccion==0:
break
else:
try:
#si pasamos los filtros se elimina el articulo con el metodo eliminae articulo
stock.eliminarArticulo(eleccion)
except:
#si no existe el articulo o es un valor no valido nos arroja un error
print("Codigo no existe, o valor no es admitido, intente de nuevo")
continue
else:
#si si se cumple nos arroja el mensaje de exito
print("Articulo eliminado con exito.")
break
#opcion que nos imprime los articulos en dado caso que tengamos
#gracias al metdo mostrarArticulos del objeto stock
elif respuesta==3:
stock.mostrarArticulos()
elif respuesta==4:
#metodo que finaliza el programa con el metodo exit de la libreria sys
sys.exit()
else:
print("Estimado usuario, la opcion no existe")
#instanciamos el stock
stock=Stock()
#definimos un bucle while para la continua ejecucion del programa y dentro de el instanciamos un objeto de la clase Menu
while True:
menu=Menu()
| b52a854d5283a1ad75fb523427c0696de465d1cb | [
"Python"
] | 1 | Python | Yahaira22/PIA_Programacion_Avanzada | fdcfbfe93b03da70965cdf2097a0f7ec771e4a09 | a01c16553c98b405b39e1eb9de2b18faf2120ffb |
refs/heads/master | <file_sep>#include <stdio.h>
#include <time.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/select.h>
#include <stdlib.h>
#include <sys/epoll.h>
#include <string.h>
#include <errno.h>
#include <pthread.h>
#include <sys/types.h>
#include <netinet/tcp.h>
#include <signal.h>
#include "tools.h"
//返回老的文件状态标志
s32 tools_set_fd_nonblock(s32 fd)
{
s32 ofl;
s32 iRet;
ofl = fcntl(fd, F_GETFL, 0);
iRet = ofl | O_NONBLOCK;
iRet = fcntl(fd, F_SETFL, iRet);
if(iRet != TOOLS_SUCCESS)
return TOOLS_FAIL;
return ofl;
}
s32 tools_connect_tmout
(s32 inet_fd, struct sockaddr *ser_addr, u32 addr_len, u32 tmout)
{
s32 iRet;
//老的inet_fd文件状态标志
s32 old_fd_fl = 0;
fd_set wr_set, rd_set;
struct timeval tm;
//把inet_fd的文件状态标志设为非阻塞模式
old_fd_fl = tools_set_fd_nonblock(inet_fd);
if(old_fd_fl == TOOLS_FAIL)
return(TOOLS_FAIL);
//如果errno不是EINPROGRESS,
//那么不是因为三次握手没有完成而导致的连接失败
iRet = connect(inet_fd, ser_addr, addr_len);
if(iRet == TOOLS_SUCCESS || errno != EINPROGRESS)
goto out;
tm.tv_sec = tmout;
tm.tv_usec = 0;
FD_ZERO(&wr_set);
FD_SET(inet_fd, &wr_set);
FD_ZERO(&rd_set);
FD_SET(inet_fd, &rd_set);
iRet = select(inet_fd+1, &rd_set, &wr_set, NULL, &tm);
switch(iRet)
{
//当连接遇到错误时,inet_fd变为即可读又可写,这样select会返回2
//前提是select设置了rd_set
//网上通过getsockopt(inet_fd, SOL_SOCKET, SO_ERROR)来获取错误
case -1:
iRet = EINTR;
break;
case 0:
iRet = TOOLS_ERR_TMOUT;
break;
case 1:
iRet = TOOLS_SUCCESS;
break;
case 2:
iRet = TOOLS_ERR_CONN_ERR;
break;
default:
iRet = TOOLS_ERR_UNKNOWN;
break;
}
out:
//还原成之前的状态标志
fcntl(inet_fd, F_SETFL, old_fd_fl);
return iRet;
}
//============================================================================================
// common(all type) queue(multi-thread)
//============================================================================================
typedef struct __q
{
u32 _capacity;
u32 _sz;
u32 _r_index;
u32 _w_index;
u32 _tmout; /* 获取_lock的等待时间 */
pthread_mutex_t _lock;
void *_eleptr[];
} queue, *p_que;
s32 tools_init_queue(u32 cap, u32 wtm, queue **ackpptr)
{
s32 iRet;
u32 ptr_sz = sizeof(void *);
queue *pAck = malloc(sizeof(queue) + ptr_sz*cap);
if(pAck == NULL)
return TOOLS_ERR_MALLOC;
pAck->_capacity = cap;
pAck->_sz = 0;
pAck->_r_index = 0;
pAck->_w_index = 0;
pAck->_tmout = wtm;
iRet = pthread_mutex_init(&pAck->_lock, NULL);
if(iRet != TOOLS_SUCCESS)
{
free(pAck);
return iRet;
}
memset(&pAck->_eleptr, 0, sizeof(cap * ptr_sz));
*ackpptr = pAck;
return TOOLS_SUCCESS;
}
s32 tools_queue_add(void *eleptr, u32 len, queue *qptr)
{
s32 iRet;
TOOLS_MUTEX_TIMED_LOCK(&qptr->_lock, qptr->_tmout, &iRet);
if(iRet != TOOLS_SUCCESS)
return TOOLS_ERR_TMOUT;
if(qptr->_sz >= qptr->_capacity)
{
pthread_mutex_unlock(&qptr->_lock);
return TOOLS_FAIL;
}
++qptr->_sz;
qptr->_eleptr[qptr->_w_index++] = eleptr;
if(qptr->_w_index >= qptr->_capacity)
qptr->_w_index = 0;
printf("addqueuesz:%u\n", qptr->_sz);
pthread_mutex_unlock(&qptr->_lock);
return TOOLS_SUCCESS;
}
s32 tools_queue_get(void **elepptr, queue *qptr)
{
s32 iRet;
void *rtptr = NULL;
TOOLS_MUTEX_TIMED_LOCK(&qptr->_lock, qptr->_tmout, &iRet);
if(iRet != TOOLS_SUCCESS)
return TOOLS_ERR_TMOUT;
if(qptr->_sz <= 0)
{
pthread_mutex_unlock(&qptr->_lock);
return TOOLS_ERR_NOT_EXISTED;
}
--qptr->_sz;
rtptr = qptr->_eleptr[qptr->_r_index++];
//要记得断掉这个link
qptr->_eleptr[qptr->_r_index-1] = NULL;
if(qptr->_r_index >= qptr->_capacity)
qptr->_r_index = 0;
printf("getqueuesz:%u\n", qptr->_sz);
pthread_mutex_unlock(&qptr->_lock);
*elepptr = rtptr;
return TOOLS_SUCCESS;
}
//=====================================================================================
// common list
//=====================================================================================
//超时5s获取锁
/*
s32 tools_task_list_get_task(ptask_list list, task **pptask)
{
s32 iRet;
task *pAck = NULL;
struct timespec tmout;
TOOLS_MUTEX_TIMED_LOCK(&list->lock, 5, &iRet);
if(iRet != TOOLS_SUCCESS)
return iRet;
if(list->sz <= 0)
{
pthread_mutex_unlock(&list->lock);
return TOOLS_ERR_NOT_EXISTED;
}
pAck = list->p_head;
list->p_head = pAck->p_next;
pAck->p_next = NULL;
if(list->sz == 1)
list->p_tail = NULL;
--list->sz;
pthread_mutex_unlock(&list->lock);
*pptask = pAck;
return TOOLS_SUCCESS;
}
s32 tools_task_list_add_task(ptask_list list, task *ptask)
{
s32 iRet;
TOOLS_MUTEX_TIMED_LOCK(&list->lock, 5, &iRet);
if(iRet != TOOLS_SUCCESS)
return iRet;
if(list->sz >= list->capacity)
{
pthread_mutex_unlock(&list->lock);
return TOOLS_ERR_NOT_EXISTED;
}
if(list->sz <= 0)
list->p_head = list->p_tail = ptask;
else
{
list->p_tail->p_next = ptask;
list->p_tail = ptask;
}
ptask->p_next = NULL;
++list->sz;
pthread_mutex_unlock(&list->lock);
return TOOLS_SUCCESS;
}
*/
//================================================================================
// thread pool
//================================================================================
typedef s32 (*tools_cb_exec_task)(void *prmptr, u32 sz, void **ackpptr, u32 *lenptr);
typedef struct __job
{
s32 prm_len; /* param length */
s32 result; /* returned by cb */
u32 acklen;
void *ackptr;
tools_cb_exec_task cb;
u8 param[];
} job;
typedef struct __thr_pool
{
u32 _base_num; /* thread num when init */
u32 _now_num; /* current thread num */
queue *_job_que;
pthread_cond_t _cond;
pthread_mutex_t _cond_mutex;
// pthread_mutex_t _mutex;
u8 remain[8]; /* remain byte */
} thr_pool, *p_thr_pool;
s32 tools_thread_pool_new_job(void *prm, u32 len, tools_cb_exec_task cb, job **jbpptr)
{
job *jbptr = malloc(sizeof(job)+len);
if(jbptr == NULL)
return TOOLS_ERR_MALLOC;
jbptr->acklen = 0;
jbptr->ackptr = NULL;
jbptr->cb = cb;
jbptr->result = 0;
jbptr->prm_len = len;
memcpy(&jbptr->param, prm, len);
*jbpptr = jbptr;
return TOOLS_SUCCESS;
}
static s32 tools_thread_pool_init(u32 tnum, u32 wtm, u32 jbnum, thr_pool **tplpptr)
{
s32 iRet;
thr_pool *ackptr = malloc(sizeof(thr_pool));
if(ackptr == NULL)
return TOOLS_ERR_MALLOC;
ackptr->_base_num = tnum;
ackptr->_now_num = 0;
iRet = tools_init_queue(jbnum, wtm, &ackptr->_job_que);
if(iRet != TOOLS_SUCCESS)
goto error_2;
iRet = pthread_cond_init(&ackptr->_cond, NULL);
if(iRet != TOOLS_SUCCESS)
goto error_2;
iRet = pthread_mutex_init(&ackptr->_cond_mutex, NULL);
if(iRet != TOOLS_SUCCESS)
goto error_1;
memset(&ackptr->remain, 0, sizeof(ackptr->remain));
*tplpptr = ackptr;
return TOOLS_SUCCESS;
error_1:
pthread_mutex_destroy(&ackptr->_cond_mutex);
error_2:
free(ackptr);
return TOOLS_FAIL;
}
//返回创建成功的数量
static s32 tools_thread_pool_create_thr(u32 tnum, void *(*cb)(void *), void *argptr)
{
s32 iRet;
s32 i = 0;
u32 cnt = 0;
pthread_t tid;
for(i=0; i<tnum; ++i)
{
iRet = pthread_create(&tid, NULL, cb, argptr);
if(iRet == TOOLS_SUCCESS)
{
++cnt;
pthread_detach(tid);
}
}
return cnt;
}
static void * tools_cb_thread_pool(void *argptr)
{
s32 iRet;
job *jbptr = NULL;
thr_pool *tplptr = argptr;
for( ; ; )
{
/* 之所以把这部分移到下面是有很重要的原因的。
想象一下,有一部分线程在处理队列中的job,只有几个线程
处于休眠状态,此时新加进来一批任务,唤醒睡眠的线程,
这些线程处理的job数量少于新加进来的job,这样当所有线程处理完后?
仍有任务在队列中而得不到处理,只有等到下一次有任务加进来才能有一部分
得到处理,这样停留在队列中的任务可能会越来越多,因此只有让队列中没有了
任务的时候线程才能去休眠。
iRet = pthread_mutex_lock(&tplptr->_cond_mutex);
if(iRet != TOOLS_SUCCESS)
continue;
//这里不需要循环,即使可能多个线程被唤醒,也不会进行抢占。
//因为接下来仍需获取队列锁。
iRet = pthread_cond_wait(&tplptr->_cond, &tplptr->_cond_mutex, );
//错误返回,应该没有加锁返回,这样就不用释放锁
if(iRet != TOOLS_SUCCESS)
continue;
*/
iRet = tools_queue_get(&jbptr, tplptr->_job_que);
//job队列中没有job需要处理,那么让线程休眠
//等待下一次触发(被唤醒再来处理)
if(iRet != TOOLS_SUCCESS)
{
iRet = pthread_mutex_lock(&tplptr->_cond_mutex);
if(iRet != TOOLS_SUCCESS)
continue;
//这里不需要循环,即使可能多个线程被唤醒,也不会进行抢占。
//因为接下来仍需获取队列锁。
pthread_cond_wait(&tplptr->_cond, &tplptr->_cond_mutex);
//错误返回,应该没有加锁返回,这样就不用释放锁
//pthread_cond_wait成功返回后会锁住_cond_mutex
(void)pthread_mutex_unlock(&tplptr->_cond_mutex);
continue;
}
if(jbptr->cb != NULL)
{
jbptr->result = (*jbptr->cb)((void *)jbptr->param, jbptr->prm_len,
&jbptr->ackptr, &jbptr->acklen);
}
free(jbptr);
jbptr = NULL;
}
}
//若创建的线程少于tnum,那么后面动态增加
s32 tools_thread_pool_create(u32 tnum, u32 wtm, u32 jbnum, thr_pool **tplpptr)
{
s32 iRet;
pthread_t tid;
iRet = tools_thread_pool_init(tnum, wtm, jbnum, tplpptr);
if(iRet != TOOLS_SUCCESS)
return iRet;
iRet = tools_thread_pool_create_thr(tnum, tools_cb_thread_pool, *tplpptr);
(*tplpptr)->_now_num += iRet;
return TOOLS_SUCCESS;
}
s32 tools_thread_pool_add_job(job *jbptr, thr_pool *tplptr)
{
s32 iRet;
queue *qptr = tplptr->_job_que;
iRet = tools_queue_add(jbptr, sizeof(job)+jbptr->prm_len, qptr);
if(iRet != TOOLS_SUCCESS)
return iRet;
(void)pthread_cond_broadcast(&tplptr->_cond);
return TOOLS_SUCCESS;
}
//================================================================================
// server high-concurrence
//================================================================================
#define TOOLS_HIGH_CONCURRENCE_NUM 1024
#define TOOLS_HOGH_CONCURRENCE_MAX_MSG_LEN 512
#define TOOLS_HIGH_CONCURRENCE_RPC_MAGIC 0x12345678
#define TOOLS_HIGH_CONCURRENCE_TRANS_TIMEO 5
//如果不想让客户端的链接是长连接就使用undef掉
//长连接的套接字只会在recv方法中检测到客户端关闭之后才会关闭
#define TOOLS_HIGH_CONCURRENCE_LONG_CONN 1
#ifdef TOOLS_HIGH_CONCURRENCE_LONG_CONN
#define TOOLS_HIGH_CONCURRENCE_KEEP_IDLE 10
#define TOOLS_HIGH_CONCURRENCE_KEEP_INTR 5
#define TOOLS_HIGH_CONCURRENCE_KEEP_CNT 2
#endif
typedef s32 (*tools_high_concurrence_cb_rpc)
(void * prmptr,u32 len,void **ackpptr,u32 *lenptr);
typedef enum __rpc_msg_type
{
_RPC_MSG_BUTT
} _rpc_type;
typedef struct __rpc_msg
{
u32 _magic;
u32 _prm_len;
u32 _head_len;
u32 _result;
s32 _clnt;
_rpc_type _type;
u8 _param[];
} rpc_msg;
typedef struct __rpc_type_cfg
{
_rpc_type _type;
tools_high_concurrence_cb_rpc _cb;
} rpc_type_cfg;
const rpc_type_cfg g_tools_high_concurrence_rpc_cfg[] =
{
};
static s32 g_tools_high_concurrence_serv_fd;
static s32 g_tools_high_concurrence_poll_fd;
static thr_pool *gp_tools_high_concurrence_recv_thr_pool;
static thr_pool *gp_tools_high_concurrence_send_thr_pool;
static struct epoll_event events[TOOLS_HIGH_CONCURRENCE_NUM];
static s32 tools_high_concurrence_new_rpc_msg
(s32 result, u32 type, void *prmptr, u32 prmlen, s32 _clnt, rpc_msg **msgpptr)
{
rpc_msg *msgptr = malloc(sizeof(rpc_msg) + prmlen);
if(msgptr == NULL)
return TOOLS_FAIL;
msgptr->_clnt = _clnt;
msgptr->_head_len = sizeof(rpc_msg);
msgptr->_magic = TOOLS_HIGH_CONCURRENCE_RPC_MAGIC;
msgptr->_result = result;
msgptr->_type = type;
msgptr->_prm_len = prmlen;
memcpy(&msgptr->_param, prmptr, prmlen);
*msgpptr = msgptr;
return TOOLS_SUCCESS;
}
static void tools_high_concurrence_set_clnt_sockopt(s32 clnt)
{
//接收和发送的超时值
u32 sndtmout = TOOLS_HIGH_CONCURRENCE_TRANS_TIMEO;
u32 rcvtmout = TOOLS_HIGH_CONCURRENCE_TRANS_TIMEO;
#ifdef TOOLS_HIGH_CONCURRENCE_LONG_CONN
u32 keepalive = 1;
u32 keepidle = TOOLS_HIGH_CONCURRENCE_KEEP_IDLE;
u32 keepinterval = TOOLS_HIGH_CONCURRENCE_KEEP_INTR;
u32 keepcnt = TOOLS_HIGH_CONCURRENCE_KEEP_CNT;
//打开自动探测
setsockopt(clnt, SOL_SOCKET, SO_KEEPALIVE, &keepalive, sizeof(u32));
//设置多久没有数据交换开始检测
setsockopt(clnt, IPPROTO_TCP, TCP_KEEPIDLE, &keepidle, sizeof(u32));
//设置发包间隔
setsockopt(clnt, IPPROTO_TCP, TCP_KEEPINTVL, &keepinterval, sizeof(u32));
//设置发探测包的个数
setsockopt(clnt, IPPROTO_TCP, TCP_KEEPCNT, &keepcnt, sizeof(u32));
//当发出的探测包接收不到一个响应时,就会把clnt的套接字变为可读写,并且
//当recv时会返回-1,并置errno为ETIMEOUT
#endif
setsockopt(clnt, SOL_SOCKET, SO_SNDTIMEO, &sndtmout, sizeof(u32));
return ;
}
static s32 tools_high_concurrence_deal_send_clnt
(void * prmptr,u32 len,void **ackpptr,u32 * lenptr)
{
s32 iRet;
void *ackptr = NULL;
u32 acklen = 0;
rpc_type_cfg *pCfg = NULL;
rpc_msg *_msgptr = prmptr;
if(_msgptr->_type >= _RPC_MSG_BUTT)
return TOOLS_ERR_NOT_SUPPORTED;
pCfg = &g_tools_high_concurrence_rpc_cfg[_msgptr->_type];
iRet = pCfg->_cb(_msgptr->_param, _msgptr->_prm_len, &ackptr, &acklen);
//不成功会舍弃返回的数据,并把错误的结果发送过去
if(iRet != TOOLS_SUCCESS)
{
if(ackptr != NULL)
free(ackptr);
ackptr = NULL;
acklen = 0;
}
iRet = tools_high_concurrence_new_rpc_msg
(iRet, _msgptr->_type, ackptr, acklen, _msgptr->_clnt, &_msgptr);
if(iRet == TOOLS_SUCCESS)
{
send(_msgptr->_clnt, _msgptr, _msgptr->_head_len+_msgptr->_prm_len, 0);
free(_msgptr);
}
#ifndef TOOLS_HIGH_CONCURRENCE_LONG_CONN
close(_msgptr->_clnt);
#endif
}
static s32 tools_high_concurrence_recv_from_clnt
(void *prmptr, u32 len, void **ackpptr, u32 *lenptr)
{
s32 iRet;
rpc_msg *msgptr = NULL;
job *jbptr = NULL;
u32 recv_len= 0;
u8 recvbuf[TOOLS_HOGH_CONCURRENCE_MAX_MSG_LEN] = {0};
//套接字状态是非阻塞的
s32 clnt = *(s32 *)prmptr;
printf("coming into recv\n");
tools_high_concurrence_set_clnt_sockopt(clnt);
while((iRet = recv(clnt, recvbuf+recv_len, sizeof(recvbuf)-recv_len, 0)) > 0)
recv_len+=iRet;
//非阻塞socket当数据读取完了errno设置为EAGAIN
//若clnt的epoll的监听事件设置了EPOLLONTSHOT,
//那么应该赶快加入到监听树。
printf("iRet:%d, no:%d, %s\n", iRet, errno, strerror(errno));
if(iRet == -1 && errno == EAGAIN)
{
//接收到的数据错误
if(recv_len < sizeof(rpc_msg) ||
((rpc_msg *)recvbuf)->_magic != TOOLS_HIGH_CONCURRENCE_RPC_MAGIC ||
((rpc_msg *)recvbuf)->_type >= _RPC_MSG_BUTT)
goto error;
((rpc_msg *)recvbuf)->_clnt = clnt;
iRet = tools_thread_pool_new_job(recvbuf, recv_len,
tools_high_concurrence_deal_send_clnt, &jbptr);
if(iRet != TOOLS_SUCCESS)
goto error;
iRet = tools_thread_pool_add_job(jbptr, gp_tools_high_concurrence_send_thr_pool);
if(iRet != TOOLS_SUCCESS)
{
free(jbptr);
goto error;
}
return TOOLS_SUCCESS;
}
//若accept的套接字客户端那一端断开,会触发EPOLLIN和EPOLLOUT事件,
//此时读写非阻塞clnt套接字会产生SIGPIPE信号并会recv返回0
//这时候需要关闭clnt,防止服务端的这个clnt套接字是TIME_WAIT2状态.
//所以服务端要处理SIGPIPE信号,防止因为这个信号而导致进程终止
else if(iRet == 0)
{
printf("===============\n");
close(clnt);
return TOOLS_SUCCESS;
}
error:
//只要不是特别的原因,那么客户端都应该收到消息,
//所以统一发送服务器异常消息给客户端
iRet = tools_high_concurrence_new_rpc_msg(clnt, 0, NULL, 0, clnt, &msgptr);
if(iRet == TOOLS_SUCCESS)
{
send(clnt, msgptr, msgptr->_head_len+msgptr->_prm_len, 0);
free(msgptr);
}
#ifndef TOOLS_HIGH_CONCURRENCE_LONG_CONN
close(clnt);
#endif
return TOOLS_FAIL;
}
//单线程版
void * tools_server_high_concurrence(void *p_arg)
{
s32 iRet;
s32 clnt_fd;
s32 poll_fd = 0;
u32 clnt_addr_len = 0;
s32 serv_fd = *(s32 *)p_arg;
struct sockaddr_in clnt_addr;
struct epoll_event serv_event, clnt_event;
//设为非阻塞
iRet = tools_set_fd_nonblock(serv_fd);
if(iRet == TOOLS_FAIL)
return NULL;
//产生一个poll_fd
serv_event.data.fd = serv_fd;
serv_event.events = EPOLLIN | EPOLLET;
poll_fd = epoll_create(TOOLS_HIGH_CONCURRENCE_NUM);
g_tools_high_concurrence_poll_fd = poll_fd;
//把服务器套接字添加到监听红黑树
iRet = epoll_ctl(poll_fd, EPOLL_CTL_ADD, serv_fd, &serv_event);
if(iRet != TOOLS_SUCCESS)
{
close(serv_fd);
return NULL;
}
printf("server_fd: %d\n", serv_fd);
u32 test = 0;
for( ; ; )
{
u32 i=0;
iRet = epoll_wait(poll_fd, events, TOOLS_HIGH_CONCURRENCE_NUM, -1);
for(i=0; i<iRet; i++)
{
if(events[i].data.fd == serv_fd)
{
printf("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n");
clnt_fd = accept(serv_fd, &clnt_addr, &clnt_addr_len);
printf("clnt_fd: %d\n", clnt_fd);
iRet = tools_set_fd_nonblock(clnt_fd);
if(iRet == TOOLS_FAIL)
{
//关闭套接字,此时服务端的套接字变为FIN_WAIT2状态。
//服务端会向客户端发送FIN报文,客户端回应ACK报文,完成前两次挥手
//之后客户端再向服务器第一次可以写,此时服务端回应一个RST报文,之后
//再向服务端写数据时,客户端将产生一个SIGPIPE信号,默认动作是终止进程,
//从而强制完成后两次挥手
close(clnt_fd);
continue;
}
clnt_event.data.fd = clnt_fd;
clnt_event.events = EPOLLIN | EPOLLET;
iRet = epoll_ctl(poll_fd, EPOLL_CTL_ADD, clnt_fd, &clnt_event);
if(iRet != TOOLS_SUCCESS)
close(clnt_fd);
}
else if(events[i].events & EPOLLIN)
{
job *jbptr = NULL;
clnt_fd = events[i].data.fd;
printf("link clnt:%d\n", clnt_fd);
s32 iRet = tools_thread_pool_new_job(&clnt_fd, sizeof(s32),
tools_high_concurrence_recv_from_clnt, &jbptr);
if(iRet != TOOLS_SUCCESS)
goto error;
iRet = tools_thread_pool_add_job(jbptr, gp_tools_high_concurrence_recv_thr_pool);
if(iRet != TOOLS_SUCCESS)
{
free(jbptr);
goto error;
}
error:
#ifndef TOOLS_HIGH_CONCURRENCE_LONG_CONN
epoll_ctl(poll_fd, EPOLL_CTL_DEL, clnt_fd, &events[i]);
close(clnt_fd);
#endif
continue;
}
//ET模式下触发EPOLLOUT事件:
//1.缓冲区满->非满
//2.触发EPOLLIN事件会附带触发EPOLLOUT
//3.accept成功之后会触发一次EPOLLOUT
}
}
}
int main_tools_connect_tmout(int argc, char **argv)
{
s32 iRet;
s32 inetfd;
struct sockaddr_in addr =
{
.sin_family = AF_INET,
.sin_port = 8800
};
inet_pton(AF_INET, "192.168.149.130", &addr.sin_addr.s_addr);
inetfd = socket(AF_INET, SOCK_STREAM, 0);
iRet = tools_connect_tmout(inetfd, &addr, sizeof(struct sockaddr_in), 10);
printf("connect iRet: %d\n", iRet);
close(inetfd);
return 0;
}
void sig_handler_of_pipe(int signo)
{
static s32 i=0;
printf("i:%d\n", i++);
}
int main_server(int argc, char **argv)
{
u32 baddr = 0;
s32 reuseaddr = 1;
pthread_t tid;
struct sockaddr_in addr;
g_tools_high_concurrence_serv_fd = socket(AF_INET, SOCK_STREAM, 0);
setsockopt(g_tools_high_concurrence_serv_fd,
SOL_SOCKET, SO_REUSEADDR, &reuseaddr, sizeof(reuseaddr));
inet_pton(AF_INET, argv[1], &baddr);
addr.sin_family = AF_INET;
addr.sin_port = htons(8800);
addr.sin_addr.s_addr = baddr;
bind(g_tools_high_concurrence_serv_fd, &addr, sizeof(addr));
listen(g_tools_high_concurrence_serv_fd, 200);
tools_thread_pool_create(3, 5, 200, &gp_tools_high_concurrence_recv_thr_pool);
tools_thread_pool_create(3, 5, 200, &gp_tools_high_concurrence_send_thr_pool);
signal(SIGPIPE, sig_handler_of_pipe);
pthread_create(&tid, NULL, tools_server_high_concurrence, &g_tools_high_concurrence_serv_fd);
pthread_detach(tid);
while(1)
{
sleep(100);
}
return 0;
}
int main_client(int argc, char **argv)
{
s32 iRet;
u8 buf[32] = {0};
u32 baddr = 0;
s32 clnt_fd = 0;
s32 reuseaddr = 1;
struct sockaddr_in addr;
clnt_fd = socket(AF_INET, SOCK_STREAM, 0);
setsockopt(clnt_fd, SOL_SOCKET, SO_REUSEADDR, &reuseaddr, sizeof(reuseaddr));
inet_pton(AF_INET, "127.0.0.1", &baddr);
addr.sin_family = AF_INET;
addr.sin_port = htons(8800);
addr.sin_addr.s_addr = baddr;
iRet = tools_connect_tmout(clnt_fd, &addr, sizeof(addr), 5);
printf("conn: %d\n", iRet);
while(1)
{
printf("clnt:%d send_len: %d\n", clnt_fd, send(clnt_fd, "hello world", sizeof("hello world"), 0));
iRet = recv(clnt_fd, buf, 32, 0);
printf("clnt:%d iRet:%d\n", clnt_fd, ((rpc_msg *)buf)->_result);
sleep(10);
}
}
#define TEST_CLIENT
//#undef TEST_CLIENT
int main(int argc, char **argv)
{
#ifdef TEST_CLIENT
return main_client(argc, argv);
#else
return main_server(argc, argv);
#endif
}
<file_sep>GCC=/usr/bin/gcc
tools:tools.o
$(GCC) tools.o -o tools
tools.o:tools.c tools.h
$(GCC) -c tools.c
clean:
rm -f *.o
rm -f tools
rm -f test
<file_sep>#ifndef TOOLS_H
#define TOOLS_H
#include <netinet/in.h>
#include <sys/socket.h>
#define TOOLS_FAIL -1
#define TOOLS_SUCCESS 0
#define TOOLS_ERR_TMOUT 1
#define TOOLS_ERR_CONN_ERR 2
#define TOOLS_ERR_NOT_EXISTED 3
#define TOOLS_ERR_MALLOC 4
#define TOOLS_ERR_NOT_SUPPORTED 5
#define TOOLS_ERR_SERVER 6
#define TOOLS_ERR_PARAM 7
#define TOOLS_ERR_UNKNOWN 8
#define TOOLS_MUTEX_TIMED_LOCK(plock, tmout, pret) \
{ \
struct timespec abs_wait_tm; \
time_t tmm; \
time(&tmm); \
abs_wait_tm.tv_sec = tmm + (tmout); \
*(pret) = pthread_mutex_timedlock((plock), &abs_wait_tm); \
}
typedef unsigned char u8;
typedef signed char s8;
typedef unsigned short u16;
typedef signed short s16;
typedef unsigned int u32;
typedef signed int s32;
extern s32 tools_connect_tmout
(s32 inet_fd, struct sockaddr *ser_addr, u32 addr_len, u32 tmout);
extern void * tools_server_high_concurrence(void *p_arg);
#endif<file_sep>#include <sys/socket.h>
#include <sys/types.h>
#include <stdio.h>
#include <netinet/in.h>
#include <unistd.h>
#include <sys/fcntl.h>
#include <sys/ioctl.h>
#include <sys/uio.h>
#include <sys/utsname.h>
int main0(int argc, char **argv)
{
int iRet;
int fd;
char buf_1[128] = "hello world";
char buf_2[128] = {"hhhhhh"};
struct iovec vec[2] =
{
{buf_1, 128},
{buf_2, 128}
};
fd = open(argv[1], O_WRONLY, 0);
iRet = write(fd, buf_2, sizeof(buf_2));
//会和前面的write共享当前的文件偏移量
iRet = writev(fd, vec, sizeof(vec)/sizeof(struct iovec));
close(fd);
return 0;
}
int main(int argc, char **argv)
{
struct utsname uts;
uname(&uts);
printf("%s,%s", uts.machine, uts.nodename);
return 0;
}
| 29bb1be6c62118f74f2322ea40cc136f2221d0c6 | [
"C",
"Makefile"
] | 4 | C | xiaobiqiang/tools | 8ae71b189595be71f03efa45ae4e1da6a6c6d8f5 | cf2da18206e1f660ef9c575c7d8ad8a9582f370b |
refs/heads/master | <file_sep>const express = require('express');
const app = express();
const mongoose = require('mongoose');
const bodyParser = require('body-parser'); //to parse all data coming from the user and db
const cors = require('cors'); //to include cross orgin request
const bcryptjs = require('bcryptjs');//to hash and compare password in an encrypted method
const config = require('./config.json');//has credentials
const User = require('./models/user.js'); //this refers to the structure for user ojects
const Conference = require('./models/conference.js'); //this refers to the structure for product ojects
const port = 3000; //set server port
//connect to db
const mongodbURI = `mongodb+srv://${config.MONGO_USER}:${config.MONGO_PASSWORD}@${config.MONGO_CLUSTER_NAME}.mongodb.net/formative3-2db?retryWrites=true&w=majority`; //set what mongoDb to look at (set which collection with word after mongodeb.net/)
mongoose.connect(mongodbURI, {useNewUrlParser: true, useUnifiedTopology: true}) // connect to above
.then(()=> console.log('DB connected!')) //success message
.catch(err =>{ //error catch
console.log(`DBConnectionError: ${err.message}`); //error message
});
//test the connectivity
const db = mongoose.connection; // checks for connection
db.on('error', console.error.bind(console, 'connection error:')); //error message
db.once('open', function() { // on open do this once
console.log('We are connected to mongo db'); // success message
});
//sets request format??
app.use((req,res,next)=>{
console.log(`${req.method} request for ${req.url}`); //missed this bit but keep it
next();//include this to go to the next middleware
});
//including body-parser, cors, bcryptjs
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended:false})); // for creating encrypted passwords
app.use(cors());
app.get('/', (req, res) => res.send('Hello World!')) //prints message on load
//keep this always at the bottom so that you can see the errors reported
app.listen(port, () => console.log(`Mongodb app listening on port ${port}!`))
// display users
app.get('/displayUsers', (req,res)=>{ //create request to show all products within Product
User.find().then(result =>{ // finds Product db
res.send(result); //print result
})
});
//note there are no actual users logged this is just a request to test connectivity | 2d5fd72a7c8055fe22f4cbe39a3c327a0de1c20e | [
"JavaScript"
] | 1 | JavaScript | valeriaosoriou/Summative3 | f946056a819ffc3eb1a8e7de4cd4b5c335ce4008 | 44169b65b9f28cbec5215ebdafbba5bd8c4d56da |
refs/heads/master | <repo_name>cberrios7289/civilianWorks<file_sep>/js/civilian-home.js
$(document).ready(function(){
$('#menu').on('click',function(){
$('#navbar-wrapper').addClass('open');
});
$('#close').on('click', function(){
$('#navbar-wrapper').removeClass('open');
});
$('#youtube').on('click',function(){
window.open('https://www.youtube.com/', '_blank');
});
$('#facebook').on('click',function(){
window.open('https://www.facebook.com/newyorkpinoy', '_blank');
});
$('#instagram').on('click',function(){
window.open('https://www.instagram.com/civilianworks/', '_blank');
});
$(window).on('resize', function(){
console.log($(window).width());
});
}); | 959b6ef449b88ac6c4626b599032463230584d07 | [
"JavaScript"
] | 1 | JavaScript | cberrios7289/civilianWorks | 72c897bd7f15f41065e09140a773cf06411fe968 | 2aa6e61db4bbcd26cef8b1edddab491dc573b39b |
refs/heads/master | <file_sep>package com.cammy.flickerimageviewer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.json.JSONObject;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.view.ViewTreeObserver;
import android.view.ViewTreeObserver.OnGlobalLayoutListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Gallery;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.Toast;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.JsonObjectRequest;
import com.cammy.flickerimageviewer.adapter.LargeImageAdapter;
import com.cammy.flickerimageviewer.adapter.SmallImageAdapter;
import com.cammy.flickerimageviewer.model.ImageResult;
import com.cammy.flickerimageviewer.model.SearchClass;
import com.google.gson.Gson;
@SuppressWarnings("deprecation")
public class MainActivity extends ActionBarActivity {
private static final String TAG = "FlickerImageView";
/**
* height_percentage used to define the height of large image and small image
*/
private static final float[] height_percentage = { 0.8390804597701149f,
0.1609195402298851f };
/**
* variables to save the width and height of some components of this activity
*/
private int screenWidth;
private int screenHeight;
private int actionBarHeight = 0;
private Gallery gl_largeimgs;
private Gallery gl_thumbimgs;
private ProgressBar progressBar;
private LargeImageAdapter imageAdapter;
private SmallImageAdapter smallImageAdapter;
private List<ImageResult> imageResults;
/**
* singleton queue returned by application
*/
private RequestQueue queue;
/**
* GSON instance for parsing gson payload
*/
private Gson gson;
/**
* json request
*/
private JsonObjectRequest fetchRequest;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
queue = ((FlickrImageViewerApplication) this.getApplicationContext())
.getQueue();
findViews();
updateLayout();
refresh();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_refresh:
refresh();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* findViews
* Used to attach all the view components in the xml
*/
private void findViews() {
imageResults = new ArrayList<ImageResult>();
gl_largeimgs = (Gallery) findViewById(R.id.gl_largeimgs);
gl_thumbimgs = (Gallery) findViewById(R.id.gl_thumbimgs);
progressBar = (ProgressBar) findViewById(R.id.progressBar);
imageAdapter = new LargeImageAdapter(this, imageResults);
smallImageAdapter = new SmallImageAdapter(this, imageResults);
/**
* set adapter for large image viewer and small image viewer
*/
gl_largeimgs.setAdapter(imageAdapter);
gl_thumbimgs.setAdapter(smallImageAdapter);
gl_largeimgs.setOnItemSelectedListener(largeImageOnSelectedListener);
gl_thumbimgs.setOnItemSelectedListener(smallImageOnSelectedListener);
/**
* get global view tree observer to get the height of actionbar as quick as possible, hence we can set proper layout params for other views
*/
ViewTreeObserver vto = gl_largeimgs.getViewTreeObserver();
vto.addOnGlobalLayoutListener(mGlobalLayoutListener);
}
/**
* large image on selection change listener
*/
private OnItemSelectedListener largeImageOnSelectedListener = new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2,
long arg3) {
gl_thumbimgs.setSelection(arg2);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
};
/**
* small image on selection change listener
*/
private OnItemSelectedListener smallImageOnSelectedListener = new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1,
int arg2, long arg3) {
gl_largeimgs.setSelection(arg2);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
};
/**
* check whether we have got the real actionbar's height
* if not, this notify function will be invoked continuously till we get the height of action bar
*/
private OnGlobalLayoutListener mGlobalLayoutListener = new OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
if (actionBarHeight == 0) {
updateLayout();
}
}
};
/**
* update the layout of large image viewer and small image viewer
*/
private void updateLayout() {
screenWidth = getWindowManager().getDefaultDisplay().getWidth();
screenHeight = getWindowManager().getDefaultDisplay().getHeight();
screenWidth = getResources().getDisplayMetrics().widthPixels;
screenHeight = getResources().getDisplayMetrics().heightPixels
- getActionBar().getHeight();
actionBarHeight = getActionBar().getHeight();
LayoutParams params = gl_largeimgs.getLayoutParams();
params.width = screenWidth;
params.height = (int) ((float) screenHeight * height_percentage[0]);
gl_largeimgs.setLayoutParams(params);
imageAdapter.setSize(params.width, params.height);
imageAdapter.notifyDataSetChanged();
params = gl_thumbimgs.getLayoutParams();
params.height = (int) ((float) screenHeight * height_percentage[1]);
int itemWidth = (int) (SmallImageAdapter.itemRatio
* (float) screenHeight * height_percentage[1]);
params.width = (screenWidth - itemWidth) * 2 + itemWidth;
((RelativeLayout.LayoutParams) params).leftMargin = -(screenWidth - itemWidth);
gl_thumbimgs.setLayoutParams(params);
smallImageAdapter.setSize(params.width, params.height);
smallImageAdapter.notifyDataSetChanged();
}
/**
* refresh the image
* by re-invoking the json request
*/
private void refresh() {
progressBar.setVisibility(View.VISIBLE);
fetchRequest = new JsonObjectRequest(Request.Method.GET, getResources()
.getString(R.string.image_fetch_url), null,
new ResponseListener(), new ErrorListener());
queue.add(fetchRequest);
};
/**
* volley callback when having successfully fetched json result
* @author Xiao
*
*/
private class ResponseListener implements Response.Listener<JSONObject> {
@Override
public void onResponse(JSONObject response) {
progressBar.setVisibility(View.GONE);
setUpResults(response);
}
}
/**
* volley callback when failing to fetch json result
* @author Xiao
*
*/
private class ErrorListener implements Response.ErrorListener {
@Override
public void onErrorResponse(VolleyError error) {
progressBar.setVisibility(View.GONE);
Toast.makeText(getApplicationContext(), "Error, Please Try Again",
Toast.LENGTH_LONG).show();
}
}
/**
* fill image viewer with data
* @param response
*/
private void setUpResults(JSONObject response) {
imageResults.clear();
imageAdapter.notifyDataSetChanged();
smallImageAdapter.notifyDataSetChanged();
gson = new Gson();
SearchClass searchClass = gson.fromJson(response.toString(),
SearchClass.class);
// make sure there is data in the response
if (searchClass.getItems() != null) {
ImageResult[] results = searchClass.getItems();
List<ImageResult> tempList = Arrays.asList(results);
imageResults.addAll(tempList);
imageAdapter.notifyDataSetChanged();
smallImageAdapter.notifyDataSetChanged();
}
}
/**
* when screen orientation get changed, notify to update ui layout params
*/
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
updateLayout();
}
}
<file_sep>package com.cammy.flickerimageviewer.adapter;
import java.util.List;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.BaseAdapter;
import android.widget.Gallery;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.NetworkImageView;
import com.cammy.flickerimageviewer.FlickrImageViewerApplication;
import com.cammy.flickerimageviewer.R;
import com.cammy.flickerimageviewer.model.ImageResult;
/**
* Small Image Viewer Adapter
* @author Xiao
*
*/
public class SmallImageAdapter extends BaseAdapter {
/**
* the width/height ratio for image viewer item.
*/
public static float itemRatio = 1.314285714285714f;
private Context context;
private List<ImageResult> results;
private ImageLoader loader;
private int parentWidth;
private int parentHeight;
public SmallImageAdapter(Context context, List<ImageResult> results) {
this.context = context;
this.results = results;
loader = ((FlickrImageViewerApplication) context
.getApplicationContext()).getImageLoader();
this.parentHeight = 0;
this.parentWidth = 0;
}
/**
* update ui layout params
* @param parentWidth the width of the small image gallery
* @param parentHeight the height of the small image gallery
*/
public void setSize(int parentWidth, int parentHeight) {
this.parentHeight = parentHeight;
this.parentWidth = parentWidth;
}
public void setResult(List<ImageResult> results) {
this.results = results;
}
@Override
public int getCount() {
if (results == null)
return 0;
return results.size();
}
@Override
public Object getItem(int position) {
if (results == null)
return null;
return results.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
/**
* return every small image view from img.xml
*/
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = convertView;
if (view == null) {
view = LayoutInflater.from(context).inflate(R.layout.img, null,
false);
ViewHolder holder = new ViewHolder();
holder.resultImage = (NetworkImageView) view
.findViewById(R.id.iv_img);
LayoutParams params = new Gallery.LayoutParams(
(int) ((float) parentHeight * itemRatio), parentHeight);
holder.resultImage.setLayoutParams(params);
holder.resultImage.setBackgroundResource(R.drawable.small_bg);
view.setTag(holder);
}
ImageResult selectedResult = (ImageResult) getItem(position);
ViewHolder holder = (ViewHolder) view.getTag();
holder.resultImage.setImageResource(R.drawable.default_large);
holder.resultImage.setImageUrl(
selectedResult.getMedia().getMedia_url(), loader);
return view;
}
static class ViewHolder {
NetworkImageView resultImage;
}
}
<file_sep>package com.cammy.flickerimageviewer;
import android.app.Application;
import com.android.volley.RequestQueue;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.Volley;
import com.cammy.flickerimageviewer.common.ImageLruCache;
/**
* Application class for Flickr Image Viewer
* @author Xiao
*
*/
public class FlickrImageViewerApplication extends Application {
private ImageLruCache imageCache;
private RequestQueue queue;
private ImageLoader imageLoader;
public void onCreate() {
imageCache = new ImageLruCache(ImageLruCache.getDefaultLruCacheSize());
queue = Volley.newRequestQueue(this);
imageLoader = new ImageLoader(queue, imageCache);
}
/**
* Used to return the singleton Image cache We do this so that if the same
* image is loaded twice on two different activities, the cache still
* remains
*
* @return ImageLruCach
*/
public ImageLruCache getCache() {
return imageCache;
}
/**
* Used to return the singleton RequestQueue
*
* @return RequestQueue
*/
public RequestQueue getQueue() {
return queue;
}
/**
* Used to return the singleton imageloader that utilizes the image lru
* cache.
*
* @return ImageLoader
*/
public ImageLoader getImageLoader() {
return imageLoader;
}
}
| 58bb3068a21738737073b2c6cbb30c66fffdb526 | [
"Java"
] | 3 | Java | xiaogegexiao/FlickrImageViewer | 99eaec2cdbfbe4a822a1a1c7a443bde8afd13277 | b2c0a544ff2cdc48ff9f1205e631b63cc88f34b5 |
refs/heads/master | <file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Orion\Http\Controllers\Controller;
use Orion\Concerns\DisableAuthorization;
use App\Models\Review;
class ReviewController extends Controller
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['recruiter', 'user'];
}
protected $model = Review::class;
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Models\User;
use Illuminate\Http\Request;
use Orion\Http\Controllers\Controller;
use Orion\Concerns\DisableAuthorization;
class UserController extends Controller
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['skills', 'roles', 'userReviews', 'userJobs', 'active_jobs','finished_jobs', 'applications'];
}
protected $model = User::class;
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class Skill extends Model
{
use HasFactory;
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = [
'name',
];
public function users()
{
return $this->belongsToMany(User::class);
}
public function jobs()
{
return $this->belongsToMany(Job::class, 'skill_job', 'skill_id', 'job_id');
}
}
<file_sep><?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreateForeignKeys extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('users', function (Blueprint $table) {
$table->foreign('recruiter_id')->references('id')->on('recruiters')->onUpdate('cascade')->onDelete('cascade');
});
Schema::table('recruiters', function (Blueprint $table) {
$table->foreign('user_id')->references('id')->on('users')->onUpdate('cascade')->onDelete('cascade');
});
Schema::table('jobs', function (Blueprint $table) {
$table->foreign('recruiter_id')->references('id')->on('recruiters')->onUpdate('cascade')->onDelete('cascade');
$table->foreign('employee_id')->references('id')->on('users')->onUpdate('cascade')->onDelete('cascade');
});
Schema::table('reviews', function (Blueprint $table) {
$table->foreign('user_id')->references('id')->on('users')->onUpdate('cascade')->onDelete('cascade');
$table->foreign('reviewer_id')->references('id')->on('recruiters')->onUpdate('cascade')->onDelete('cascade');
});
Schema::table('role_user', function (Blueprint $table) {
$table->foreign('role_id')->references('id')->on('roles')->onUpdate('cascade')->onDelete('cascade');
$table->foreign('user_id')->references('id')->on('users')->onUpdate('cascade')->onDelete('cascade');
});
Schema::table('skill_user', function (Blueprint $table) {
$table->foreign('skill_id')->references('id')->on('skills')->onUpdate('cascade')->onDelete('cascade');
$table->foreign('user_id')->references('id')->on('users')->onUpdate('cascade')->onDelete('cascade');
});
Schema::table('skill_job', function (Blueprint $table) {
$table->foreign('skill_id')->references('id')->on('skills')->onUpdate('cascade')->onDelete('cascade');
$table->foreign('job_id')->references('id')->on('jobs')->onUpdate('cascade')->onDelete('cascade');
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('foreign_keys');
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Orion\Http\Controllers\Controller;
use Orion\Concerns\DisableAuthorization;
use App\Models\Job;
class JobController extends Controller
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes(): array
{
return ['skills', 'employee', 'recruiter', 'applications'];
}
public function filterableBy(): array
{
return ['id', 'skills', 'recruiter.id', 'employee.id', 'created_at'];
}
public function importFromXml()
{
$xml_object = simplexml_load_file('D:\projects\faks\job-service-api\app\Http\Controllers\jobs.xml');
$objects = [];
foreach ($xml_object as $LDAPzapis) {
$xml_array = [];
foreach ($LDAPzapis as $attribute) {
$attrs = (array)$attribute;
if (array_key_exists($attrs['@attributes']['name'], $xml_array)) {
$old = $xml_array[$attrs['@attributes']['name']];
if (is_array($old)) {
$xml_array[$attrs['@attributes']['name']][] = $attrs[0];
} else {
unset($xml_array[$attrs['@attributes']['name']]);
$xml_array += [$attrs['@attributes']['name'] => array($old, $attrs[0])];
}
}
$xml_array += [$attrs['@attributes']['name'] => $attrs[0]];
}
$objects[] = $this->createModelFromArray(Job::class, $xml_array, Job::$relationships, Job::$keys);
}
return $objects;
}
public function createModelFromArray($class, $array, $relations = [], $keys = [])
{
$obj = new $class();
$attachments = [];
// if keys are equal to properties
if (count($keys) == 0) {
foreach ($array as $key => $value) {
// check if key ends in _id, then its a foreign key
if (isset($relations[$key])) {
$result = $this->manageRelations($key, $value, $relations);
if ($result[0] != 'success')
return 'error';
if (is_array($result[1])) {
$attachments[] = [$key => $result[1]];
} else {
$obj[$key] = $result[1];
}
// else the key is not a foreign key, just set the value
} else {
$obj[$key] = $value;
}
}
// if keys are different from properties
} else {
foreach ($array as $key => $value) {
// check if key ends in _id, then its a foreign key
if (isset($relations[$keys[$key]])) {
$result = $this->manageRelations($keys[$key], $value, $relations);
if ($result[0] != 'success')
return 'error';
if (is_array($result[1])) {
$attachments[] = [$keys[$key] => $result[1]];
} else {
$obj[$keys[$key]] = $result[1];
}
// else the key is not a foreign key, just set the value
} else {
$obj[$keys[$key]] = $value;
}
}
}
$obj->save();
foreach ($attachments as $attachment) {
foreach ($attachment as $key => $value) {
$obj->$key()->attach($value);
}
}
return $obj;
}
public function manageRelations($key, $value, $relations): array
{
$relation = $relations[$key][0];
$relationCol = $relations[$key][1];
$values = (array)$value;
$ids = [];
foreach ($values as $value) {
$relationObj = $relation::firstOrCreate(
[$relationCol => $value]
);
$ids[] = $relationObj->id;
}
// one to many
if (preg_match('/_id/i', $key)) {
return ['success', $ids[0]];
// many to many
} else {
return ['success', $ids];
}
}
protected $model = Job::class;
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Orion\Http\Controllers\RelationController;
use Orion\Concerns\DisableAuthorization;
use App\Models\User;
class RecruiterReviewsController extends RelationController
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['recruiter', 'user'];
}
protected $model = User::class;
protected $relation = 'recruiterReviews';
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class Recruiter extends Model
{
use HasFactory;
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = [
'user_id',
'title',
'location'
];
public function user()
{
return $this->hasOne(User::class);
}
public function jobs()
{
return $this->hasMany(Job::class);
}
public function finishedJobs()
{
return $this->jobs()->where('finished', 1)->get();
}
public function reviews()
{
return $this->hasMany(Review::class);
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Orion\Http\Controllers\RelationController;
use App\Models\Job;
use Orion\Concerns\DisableAuthorization;
class JobApplicationsController extends RelationController
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['skills', 'roles', 'userReviews', 'userJobs', 'active_jobs','finished_jobs', 'applications'];
}
protected $model = Job::class;
protected $relation = 'applications';
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Orion\Http\Controllers\RelationController;
use Orion\Concerns\DisableAuthorization;
use App\Models\Review;
class ReviewRecruiterController extends RelationController
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['recruiterJobs', 'finished_jobs', 'recruiterReviews'];
}
protected $model = Review::class;
protected $relation = 'recruiter';
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Skill;
use Orion\Http\Controllers\Controller;
use Orion\Concerns\DisableAuthorization;
class SkillController extends Controller
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['users', 'jobs'];
}
protected $model = Skill::class;
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Orion\Http\Controllers\Controller;
use Orion\Concerns\DisableAuthorization;
use App\Models\User;
class RecruiterController extends Controller
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['recruiterJobs', 'finished_jobs', 'recruiterReviews'];
}
protected $model = User::class;
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class Job extends Model
{
use HasFactory;
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = [
'title',
'description',
'employee_id',
'recruiter_id',
'in_progress',
'finished'
];
public static $relationships = [
'employee_id' => [User::class, 'email'],
'recruiter_id' => [User::class, 'email'],
'skills' => [Skill::class, 'name']
];
public static $keys = [
'title'=> 'title',
'description'=> 'description',
'employee email'=>'employee_id',
'recruiter email'=>'recruiter_id',
'in progress'=>'in_progress',
'skill'=>'skills',
'finished'=>'finished'
];
public function skills()
{
return $this->belongsToMany(Skill::class, 'skill_job', 'job_id', 'skill_id');
}
public function employee()
{
return $this->belongsTo(User::class, 'employee_id');
}
public function recruiter()
{
return $this->belongsTo(User::class, 'recruiter_id');
}
public function applications()
{
return $this->belongsToMany(User::class, 'applications', 'user_id', 'job_id');
}
}
<file_sep><?php
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Route;
use Illuminate\Support\Facades\URL;
use Orion\Facades\Orion;
/*
|--------------------------------------------------------------------------
| API Routes
|--------------------------------------------------------------------------
|
| Here is where you can register API routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| is assigned the "api" middleware group. Enjoy building your API!
|
*/
// Route::middleware('auth:api')->get('/user', function (Request $request) {
// return $request->user();
// });
Route::post('register', 'Auth\RegisterController@register');
Route::post('login', 'Auth\LoginController@login');
Route::post('logout', 'Auth\LoginController@logout');
Route::post('import_xml', 'JobController@importFromXml');
$resources = [
'skills' => 'SkillController',
'jobs' => 'JobController',
'reviews' => 'ReviewController',
'recruiters' => 'RecruiterController',
'roles' => 'RoleController',
'users' => 'UserController',
];
Route::group(['as' => 'api.'], function () use ($resources) {
foreach ($resources as $resource => $controller) {
Orion::resource($resource, $controller);
}
// one to one
Orion::belongsToResource('jobs', 'recruiter', JobRecruiterController::class);
Orion::belongsToResource('jobs', 'employee', JobEmployeeController::class);
Orion::belongsToResource('reviews', 'user', ReviewUserController::class);
Orion::belongsToResource('reviews', 'recruiter', ReviewRecruiterController::class);
// one to many
Orion::hasManyResource('recruiters', 'recruiterJobs', RecruiterJobsController::class);
Orion::hasManyResource('recruiters', 'recruiterReviews', RecruiterReviewsController::class);
Orion::hasManyResource('users', 'userReviews', UserReviewsController::class);
Orion::hasManyResource('users', 'userJobs', UserJobsController::class);
// many to many
Orion::belongsToManyResource('jobs', 'skills', JobSkillsController::class);
Orion::belongsToManyResource('skills', 'jobs', SkillJobsController::class);
Orion::belongsToManyResource('users', 'skills', UserSkillsController::class);
Orion::belongsToManyResource('users', 'roles', UserRolesController::class);
Orion::belongsToManyResource('users', 'applications', UserApplicationsController::class);
Orion::belongsToManyResource('jobs', 'applications', JobApplicationsController::class);
});
<file_sep><?php
namespace App\Http\Controllers;
use App\Models\Role;
use Illuminate\Http\Request;
use Orion\Http\Controllers\Controller;
use Orion\Concerns\DisableAuthorization;
class RoleController extends Controller
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['users'];
}
protected $model = Role::class;
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Orion\Http\Controllers\RelationController;
use App\Models\User;
use Orion\Concerns\DisableAuthorization;
class UserReviewsController extends RelationController
{
use DisableAuthorization;
/**
* The relations that are allowed to be included together with a resource.
*
* @return array
*/
public function includes() : array
{
return ['recruiter', 'user'];
}
protected $model = User::class;
protected $relation = 'userReviews';
}
| d023ea5e3769529063aa2116359290411b29f2a4 | [
"PHP"
] | 15 | PHP | robo58/job-service-api | de65e725d172567b653d9015583867208a0dd1d6 | 8899d70b322d373f1a03c52c73a135b876085909 |
refs/heads/master | <file_sep>package com.baizhi.controller;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.multipart.MultipartFile;
import sun.nio.ch.IOUtil;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.net.URLEncoder;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.UUID;
import static sun.nio.ch.IOStatus.EOF;
/**
* created by houchangchang on 2020/5/20
*/
@Controller
@RequestMapping("files")
public class uploadController {
@RequestMapping(value = "download")
public void download(String filename, HttpServletRequest request, HttpServletResponse response) throws IOException {
String basePath = request.getSession().getServletContext().getRealPath("/files/download");
//读取文件内容
FileInputStream is = new FileInputStream(new File(basePath, filename));
// response.setHeader("content-disposition", "attachment;fileName=" + URLEncoder.encode(filename, "UTF-8"));
response.setHeader("content-disposition", "inline;fileName=" + URLEncoder.encode(filename, "UTF-8"));
ServletOutputStream os = response.getOutputStream();
System.out.println("duwnload文件------------");
// int n = 0;
// byte[] buffer = new byte[1024 * 4];
// while (EOF != (n = is.read(buffer))) {
// os.write(buffer, 0, n);
// }
// is.close();
// os.close();
IOUtils.copy(is, os);
IOUtils.closeQuietly(is);
IOUtils.closeQuietly(os);
}
@RequestMapping(value = "upload",method = RequestMethod.POST)
public String upload(MultipartFile multipartFile, HttpServletRequest request) throws IOException {
System.out.println("路径 = " + request.getSession().getServletContext().getRealPath("/"));//获取项目根路径
System.out.println("文件名 = " + multipartFile.getOriginalFilename());
System.out.println("文件类型 = " + multipartFile.getContentType());
System.out.println("文件大小 = " + multipartFile.getSize());
String realPath = request.getSession().getServletContext().getRealPath("/files");
File file = new File(realPath, new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
if(!file.exists()) file.mkdirs();
String originalFilename = multipartFile.getOriginalFilename();
String extension = FilenameUtils.getExtension(originalFilename);
String newName = UUID.randomUUID().toString().replaceAll("-", "") + new SimpleDateFormat("YYYYMMddHHmmss").format(new Date()) + "." + extension;
multipartFile.transferTo(new File(file, newName));
return "redirect:/upload.jsp";
}
}
<file_sep>CREATE TABLE `book` (
`id` INT(11) NOT NULL AUTO_INCREMENT,
`cover` VARCHAR(255) DEFAULT '',
`title` VARCHAR(255) NOT NULL DEFAULT '',
`author` VARCHAR(255) DEFAULT '',
`date` VARCHAR(255) DEFAULT '',
`press` VARCHAR(255) DEFAULT '',
`abs` VARCHAR(255) DEFAULT NULL,
`cid` INT(11) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `fk_book_category_on_cid` (`cid`),
CONSTRAINT `fk_book_category_on_cid` FOREIGN KEY (`cid`) REFERENCES `category` (`id`) ON DELETE SET NULL ON UPDATE CASCADE
)ENGINE=INNODB DEFAULT CHARSET=utf8; | 5f295b6b600f8555f526c9a8c0c8e5a2aec4a122 | [
"Java",
"SQL"
] | 2 | Java | houchangchang/springboot_day2 | b4217ce044e8c3ec270a038c42e0d01e38f0db7b | a988df1a6f05a58feee3c55c487d8809f6efdae0 |
refs/heads/master | <repo_name>Grey360/JavaClass0<file_sep>/README.md
# JavaClass0
OSS Java Class Courses
<file_sep>/src/Main.java
import java.util.Scanner;
public class Main {
/**
*
* @param args
*/
public static void main(String[] args) {
System.out.println("Hello World!");
Scanner sc = new Scanner(System.in);
String res = sc.nextLine();
System.out.println(res);
printName();
Imprimante imprimante = new Imprimante();
System.out.println(imprimante.getNom());
Scanner name = new Scanner(System.in);
imprimante.setNom(name.nextLine());
System.out.println(imprimante.getNom());
}
/**
* @deprecated use {@link printName()} instead.
*/
public static void printNom(){
System.out.println("Nom");
}
public static void printName(){
System.out.println("Nom");
}
}
| 53eee9b95416852229bdc86065b2ed05b9499455 | [
"Markdown",
"Java"
] | 2 | Markdown | Grey360/JavaClass0 | c409c1de764d1111625aeb8d57de048aeb8fe172 | 0aeebf4fe3ae4a36d4ec76791add52503b39533a |
refs/heads/master | <repo_name>qvil/health-checker<file_sep>/src/components/ModalCircularProgress.jsx
import React from "react";
import styled from "styled-components";
import CircularProgress from "@material-ui/core/CircularProgress";
const Container = styled.div`
position: fixed;
width: 100vw;
height: 100vh;
top: 0;
left: 0;
background: gray;
display: flex;
justify-content: center;
align-items: center;
`;
const ModalCircularProgress = () => {
return (
<Container>
<CircularProgress />
</Container>
);
};
export default ModalCircularProgress;
<file_sep>/README.md
# Health Checker
Health Checker with Code Travelers!
## Demo
https://health-checker.qvil.now.sh

## Prerequisites
- Node.js >= 8.x (Recommended the latest version)
- .env : Refer to [.env.sample](.env.sample)
## Install
```
npm install
```
## Start
```
npm start
```
## 🌏 Supported Browser
| <img src="https://user-images.githubusercontent.com/1215767/34348590-250b3ca2-ea4f-11e7-9efb-da953359321f.png" alt="IE / Edge" /> IE | <img src="https://user-images.githubusercontent.com/1215767/34348380-93e77ae8-ea4d-11e7-8696-9a989ddbbbf5.png" alt="IE / Edge" /> Edge | <img src="https://user-images.githubusercontent.com/1215767/34348383-9e7ed492-ea4d-11e7-910c-03b39d52f496.png" alt="Firefox" /> Firefox | <img src="https://user-images.githubusercontent.com/1215767/34348387-a2e64588-ea4d-11e7-8267-a43365103afe.png" alt="Chrome" /> Chrome | <img src="https://user-images.githubusercontent.com/1215767/34348394-a981f892-ea4d-11e7-9156-d128d58386b9.png" alt="Safari" /> Safari |
| ------------------------------------------------------------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- |
| >= 11 | >= 14 | >= 45 | >= 49 | >= 10 |
## [License](https://github.com/qvil/health-checker/blob/master/LICENSE)
MIT
<file_sep>/src/components/ServerList.jsx
import React, { useState } from "react";
import styled from "styled-components";
import axios from "axios";
import { makeStyles } from "@material-ui/core/styles";
import List from "@material-ui/core/List";
import ListItem from "@material-ui/core/ListItem";
import ListItemIcon from "@material-ui/core/ListItemIcon";
import ListItemSecondaryAction from "@material-ui/core/ListItemSecondaryAction";
import ListItemText from "@material-ui/core/ListItemText";
import Checkbox from "@material-ui/core/Checkbox";
import IconButton from "@material-ui/core/IconButton";
import CachedIcon from "@material-ui/icons/Cached";
import WifiIcon from "@material-ui/icons/Wifi";
import teal from "@material-ui/core/colors/teal";
import pink from "@material-ui/core/colors/pink";
import grey from "@material-ui/core/colors/grey";
import { animation } from "../styles/mixin";
// import DeleteIcon from "@material-ui/icons/Delete";
// import useAxios from "@react-qooks/use-axios";
// import useAxios from "@react-daily-hooks/use-axios";
const StyledWifiIcon = styled(WifiIcon)`
color: ${({ styledcolor = grey[500] }) => styledcolor};
`;
const useStyles = makeStyles(theme => ({
root: {
width: "100%",
backgroundColor: theme.palette.background.paper
},
ok: {
backgroundColor: "grren"
},
status: {
ok: {
background: "green"
}
}
}));
const StyledIconButton = styled(IconButton)`
${animation.spinReverse};
animation: spinReverse
${({ loading }) => (loading === "true" ? "infinite" : 0)} 2.5s linear;
`;
const ServerList = ({ value: { url, seq } }) => {
const classes = useStyles();
// const [fetchTrigger, setFetchTrigger] = useState(false);
// const { loading, error, data } = useAxios(
// { url: `${process.env.REACT_APP_API_URL}/<EMAIL>` },
// fetchTrigger
// );
const [state, setState] = useState({
loading: false,
error: null,
data: null,
live: false
});
const checkHealth = () => {
setState({ ...state, loading: true });
axios
.post(`${process.env.REACT_APP_API_URL}/${seq}`)
.then(res => {
if (res.status === 200) {
// if (res.data.result.indexOf("Success") !== -1) {
// const data = res.data;
setState({ ...state, live: true });
} else {
setState({ ...state, loading: false, error: new Error() });
}
})
.catch(error => {
setState({ ...state, error });
});
};
// const deleteServer = () => {
// }
const { loading, error, live } = state;
return (
<List className={classes.root}>
<ListItem
role={undefined}
dense
// button
// onClick={() => setFetchTrigger(true)}
>
<ListItemIcon>
<Checkbox
edge="start"
// checked={checked.indexOf(value) !== -1}
tabIndex={-1}
disableRipple
inputProps={{ "aria-labelledby": url }}
/>
</ListItemIcon>
<ListItemText id={`checkbox-list-label-${url}`} primary={url} />
<ListItemSecondaryAction>
<StyledIconButton
loading={loading ? "true" : "false"}
aria-label="Status"
onClick={checkHealth}
>
<CachedIcon />
</StyledIconButton>
{/* {error && <div>{error.toString()}</div>} */}
{/* {!loading && !error && data && <div>{JSON.stringify(data)}</div>} */}
{/* setTimeout 걸어서 시간 지날 때 마다 와이파이 칸 떨어지게 */}
<IconButton aria-label="HealthCheck" onClick={checkHealth}>
<StyledWifiIcon
styledcolor={live ? teal[500] : error ? pink[500] : grey[500]}
/>
</IconButton>
{/* <IconButton ria-label="Delete" onClick={deleteServer}>
<DeleteIcon />
</IconButton> */}
</ListItemSecondaryAction>
</ListItem>
</List>
);
};
export default ServerList;
<file_sep>/src/components/Card.jsx
import styled from "styled-components";
import Card from "@material-ui/core/Card";
const MyCard = styled(Card)`
padding: 16px;
box-sizing: border-box;
width: 100%;
max-width: 400px;
`;
// const Card = () => {
// return (
// <div>
// </div>
// );
// };
export default MyCard;
<file_sep>/src/pages/Home.jsx
import React, { useState, useContext, useEffect, useRef } from "react";
import { withRouter } from "react-router-dom";
import useAxios from "@react-daily-hooks/use-axios";
import Button from "@material-ui/core/Button";
import styled from "styled-components";
import TextField from "@material-ui/core/TextField";
import CardHeader from "@material-ui/core/CardHeader";
import CardContent from "@material-ui/core/CardContent";
import CardActions from "@material-ui/core/CardActions";
import withCardLayout from "../components/withCardLayout";
import ModalCircularProgress from "../components/ModalCircularProgress";
import Store from "../store";
import { setServerList, setLoginStatus } from "../reducer";
const Form = styled.form`
display: flex;
flex-direction: column;
`;
export const StyledCardActions = styled(CardActions)`
display: flex;
justify-content: flex-end;
`;
const Home = ({ history }) => {
const [input, setInput] = useState("");
const [fetchTrigger, setFetchTrigger] = useState(false);
const { loading, error, data } = useAxios(
{
url: `${process.env.REACT_APP_API_URL}/${input}`
},
fetchTrigger
);
const { dispatch } = useContext(Store);
const inputEl = useRef(null);
const handleSubmit = event => {
event.preventDefault();
if (input === "") {
return;
}
localStorage.email = input;
setFetchTrigger(true);
};
useEffect(() => {
inputEl.current.focus();
if (!loading && !error && data) {
dispatch(setLoginStatus(true));
dispatch(setServerList(data.data));
alert("환영합니다!");
history.push("/list");
}
});
return (
<>
{loading && <ModalCircularProgress />}
<>
<CardHeader title="Home" />
<CardContent>
<Form onSubmit={handleSubmit}>
<TextField
inputRef={inputEl}
type="text"
placeholder="이메일을 입력하세요."
value={input}
onChange={e => setInput(e.target.value)}
/>
</Form>
</CardContent>
<StyledCardActions>
<Button
type="submit"
onClick={handleSubmit}
variant="contained"
color="primary"
>
확인
</Button>
</StyledCardActions>
</>
</>
);
};
export default withRouter(withCardLayout(Home));
<file_sep>/src/pages/List.jsx
import React, { useState, useContext, useEffect, useRef } from "react";
import styled from "styled-components";
import axios from "axios";
import ServerList from "../components/ServerList";
import withCardLayout from "../components/withCardLayout";
import TextField from "@material-ui/core/TextField";
import Button from "@material-ui/core/Button";
import Store from "../store";
import { addServerList } from "../reducer";
import { CardContent, CardHeader } from "@material-ui/core";
import { StyledCardActions } from "./Home";
const StyledTextField = styled(TextField)`
flex: 1;
margin-bottom: 8px;
`;
const StyledButton = styled(Button)`
align-self: flex-end;
margin-top: 8px;
`;
const Form = styled.form`
display: flex;
align-items: center;
align-self: stretch;
`;
const List = () => {
const [input, setInput] = useState("");
const {
store: { serverList },
dispatch
} = useContext(Store);
const inputEl = useRef(null);
const handleSubmit = e => {
e.preventDefault();
axios
.post(`${process.env.REACT_APP_API_URL}`, {
email: localStorage.email,
url: input
})
.then(res => {
dispatch(addServerList(input, res.data.seq));
})
.catch(error => {
console.log(error);
});
setInput("");
};
useEffect(() => {
inputEl.current.focus();
}, []);
console.log(Array.from(serverList));
return (
<>
<CardHeader title="Server List" />
<CardContent>
<Form onSubmit={handleSubmit}>
<StyledTextField
inputRef={inputEl}
placeholder="https://www.google.com"
label="서버를 추가하세요."
value={input}
onChange={e => setInput(e.target.value)}
/>
</Form>
</CardContent>
<StyledCardActions>
<StyledButton
variant="contained"
color="primary"
onClick={handleSubmit}
>
확인
</StyledButton>
</StyledCardActions>
{serverList.map((value, index) => (
<ServerList key={index} value={value} />
))}
</>
);
};
export default withCardLayout(List);
<file_sep>/src/reducer.js
export const ADD_SERVER_LIST = "ADD_SERVER_LIST";
export const SET_SERVER_LIST = "SET_SERVER_LIST";
export const SET_LOGIN_STATUS = "SET_LOGIN_STATUS";
export const addServerList = (serverList, seq) => ({
type: ADD_SERVER_LIST,
serverList,
seq
});
export const setServerList = serverList => ({
type: SET_SERVER_LIST,
serverList
});
export const setLoginStatus = isLogged => ({
type: SET_LOGIN_STATUS,
isLogged
});
export const initialState = {
isLogged: localStorage.email ? true : false,
// serverList: [
// { url: "https://www.naver.com", live: false },
// { url: "https://www.op.gg", live: false },
// { url: "https://www.daum.net", live: false }
// ]
serverList: []
};
export default (state = initialState, action) => {
const { type, serverList, seq, isLogged } = action;
switch (type) {
case ADD_SERVER_LIST:
return {
...state,
serverList: [...state.serverList, { url: serverList, seq }]
};
case SET_SERVER_LIST:
return {
...state,
serverList
};
case SET_LOGIN_STATUS:
return {
...state,
isLogged
};
default:
return state;
}
};
<file_sep>/src/App.js
import React, { useReducer, useEffect } from "react";
import { BrowserRouter as Router, Route, Redirect } from "react-router-dom";
import { ThemeProvider } from "@material-ui/styles";
import Index from "./pages/Home";
import List from "./pages/List";
import Header from "./components/Header";
import GlobalStyle from "./styles/GlobalStyle";
import theme from "./styles/theme";
import reducer, { initialState, setServerList } from "./reducer";
import Store from "./store";
import useAxios from "@react-daily-hooks/use-axios";
function App() {
const { loading, error, data } = useAxios(
{
url: `${process.env.REACT_APP_API_URL}/${localStorage.email}`
},
true
);
const [store, dispatch] = useReducer(reducer, initialState);
useEffect(() => {
if (data) {
dispatch(setServerList(data.data));
}
}, [data]);
return (
<Store.Provider value={{ store, dispatch }}>
<ThemeProvider theme={theme}>
<>
<GlobalStyle />
<Router>
<Header store={store} dispatch={dispatch} />
<Route
exact
path="/"
render={() =>
store.isLogged ? <Redirect to="/list" /> : <Index />
}
/>
<Route
path="/list"
render={() =>
!store.isLogged ? (
<Redirect to="/" />
) : (
<List loading={loading} error={error} />
)
}
/>
</Router>
</>
</ThemeProvider>
</Store.Provider>
);
}
export default App;
<file_sep>/src/components/withCardLayout.js
import React from "react";
import Card from "../components/Card";
import { Container } from ".";
const withCardLayout = WrappedComponent => {
return class extends React.Component {
render() {
return (
<Container>
<Card>
<WrappedComponent {...this.props} />
</Card>
</Container>
);
}
};
};
export default withCardLayout;
| 759b6d81cd736fd748b331609da78ee9a938c446 | [
"JavaScript",
"Markdown"
] | 9 | JavaScript | qvil/health-checker | d61a429c2526a720dc5acd6425c270394f995c95 | 4a9aa2cc100dd2828088bf5bae2ee8b74bdf743d |
refs/heads/master | <file_sep>import Cookies from 'js-cookie';
import ApiClient from '@app/src/lib/api/ApiClient';
const getCookieExpiration = () =>
new Date(
Date.now() + 365 * 24 * 60 * 60 * 1000, // expired after one year
);
const isDev = process.env.NODE_ENV === 'development';
const sameSiteAttrs: any = isDev ? {} : { sameSite: 'none', secure: true };
export const setCookie = (token: string) => {
Cookies.set('token', token, {
expires: getCookieExpiration(),
...sameSiteAttrs,
});
};
export const setAuthToken = (token: string, apiClient: ApiClient) => {
setCookie(token);
apiClient.token = token;
};
export const resetCookie = () => {
Cookies.set('token', '', {
expires: getCookieExpiration(),
...sameSiteAttrs,
});
};
<file_sep>import { State } from '@app/src/lib/store';
export const selectArticlesItem = (state: State) => state.articles.item.item;
<file_sep>export { ActionBlock } from './ActionBlock';
<file_sep>export { SelectToThisDay } from './SelectToThisDay';
<file_sep>import * as yup from 'yup';
const confirmCb = (value: string, values: { password: string }) => {
if (value !== values.password) {
throw new Error('Пароли должны совпадать');
}
};
const schema = {
login: yup.string().email('Введите email').required('Обязательное поле'),
password: yup
.string()
.min(3, 'Пароль должен быть длиннее 2 символов')
.required('Пароль должен быть длиннее 2 символов'),
confirm: yup
.string()
.min(3, 'Пароль должен быть длиннее 2 символов')
.required('Подтвердите пароль'),
};
export { confirmCb, schema };
<file_sep>export interface Ref {
_type: string;
_ref: string;
}
<file_sep>export const getLinkStyle = (withoutUnderline: boolean) =>
withoutUnderline ? 'nav' : 'link';
<file_sep>import cx from 'classnames';
import * as yup from 'yup';
import { FormComponentType } from '@app/src/features/common/form/FormConstructor';
import { RadioButtonStyles } from '@app/src/ui/RadioGroup';
const OTHER_SPECIALITY = 'Другое';
const professionOptions = [
{
value: 'Врач-инфекционист',
},
{
value: 'Врач-терапевт',
},
{
value: 'Ординатор-инфекционист',
},
{
value: 'Ординатор-терапевт',
},
{
value: OTHER_SPECIALITY,
},
];
export const formConfig = (styles) => ({
steps: [
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Эл. почта',
},
props: {
name: 'email',
validate: yup.string(),
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Как к вам обращаться?',
},
props: {
name: 'name',
validate: yup.string(),
},
},
{
type: FormComponentType.PhoneInput,
required: true,
label: {
text: 'Ваш телефон',
},
props: {
name: 'phone',
placeholder: '+7',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Ваш город',
},
props: {
name: 'city',
validate: yup.string(),
},
},
{
type: FormComponentType.Input,
label: {
text: 'Ссылка на соцсети (VK/FB)',
},
props: {
name: 'social',
},
},
{
type: FormComponentType.RadioGroup,
required: true,
className: styles.professeionWrapper,
label: {
text: 'Кто вы по профессии?',
},
props: {
name: 'profession',
buttons: professionOptions,
className: styles.professionRadioGroup,
radioStyle: RadioButtonStyles.Radio,
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Уточните вашу профессию',
},
condition: (values) => values.profession === OTHER_SPECIALITY,
props: {
name: 'otherProfession',
},
},
{
type: FormComponentType.TextArea,
required: true,
label: {
text: 'Чем вы готовы помочь?',
},
props: {
name: 'aid',
validate: yup.string(),
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Сколько времени в неделю вы готовы уделять проекту?',
},
props: {
name: 'time',
validate: yup.string(),
},
},
],
});
<file_sep>import {
HelpPartnersType,
PartneTypeLabels,
} from '@app/src/domain/models/common/PartnerTypes';
export const mapThemeValueToSelectLabel = {
[HelpPartnersType.BecomePartner]: PartneTypeLabels.BecomePartner,
[HelpPartnersType.BecomeInfopartner]: PartneTypeLabels.BecomeInfopartner,
[HelpPartnersType.VolunteerFundraising]:
PartneTypeLabels.VolunteerFundraising,
[HelpPartnersType.LectureRequest]: PartneTypeLabels.LectureRequest,
};
<file_sep>import * as yup from 'yup';
import { validator } from '../validator';
describe('schema validation works', () => {
const schema = yup
.string()
.email('введите email')
.required('обязательное поле');
const validateCb = () => undefined;
test('should return required error message if value is empty', () => {
const values = { email: '' };
const value = '';
expect(validator({ value, schema, values, validateCb })).toBe(
'обязательное поле',
);
});
test('should return email error message if value isnt email and not empty', () => {
const values = { email: 'wrong' };
const value = 'wrong';
expect(validator({ value, schema, values, validateCb })).toBe(
'введите email',
);
});
test('should return undefined if value is email', () => {
const values = { email: '<EMAIL>' };
const value = '<EMAIL>';
expect(validator({ value, schema, values, validateCb })).toBe(undefined);
});
});
describe('callback validation with schema works', () => {
const schema = yup
.string()
.email('введите email')
.required('обязательное поле');
const validateCb = (value: any, values: any) => {
if (value !== values.email) {
throw new Error('пароли должны совпадать');
}
};
test('should return undefined if values of email and confirm are the same', () => {
const values = { email: '<EMAIL>', confirm: '<EMAIL>' };
const value = '<EMAIL>';
expect(validator({ value, schema, values, validateCb })).toBe(undefined);
});
test('should return error if values of email and confirm are different', () => {
const values = { email: '<EMAIL>', confirm: '<EMAIL>' };
const value = '<EMAIL>';
expect(validator({ value, schema, values, validateCb })).toBe(
'пароли должны совпадать',
);
});
});
<file_sep>import { isMobile } from 'is-mobile';
import { canUseDOM } from '../helpers/canUseDOM';
let ScrollTop = 0;
export const useScrollBodyLock = () => {
const lock = () => {
if (!canUseDOM) return;
document.body.style.overflow = 'hidden';
document.body.style['-webkit-overflow-scrolling'] = 'touch';
if (isMobile()) {
const container = document.getElementById('__next');
if (container) {
ScrollTop = container.scrollTop;
}
}
};
const unlock = () => {
console.log('unlock');
if (!canUseDOM) return;
document.body.style.overflow = 'auto';
document.body.style['-webkit-overflow-scrolling'] = 'auto';
if (isMobile()) {
const container = document.getElementById('__next');
if (container) {
container.scrollTop = ScrollTop;
}
}
};
return { lock, unlock };
};
export function setDocumentPropertyVh() {
const vh = window.innerHeight * 0.01;
document.documentElement.style.setProperty('--vh', `${vh}px`);
}
<file_sep>export const mainLinks = [
{
href: '/for-doctors',
text: 'Врачам',
narrowText: 'Врачам',
mobileMenuText: 'Врачам',
},
{
href: '/for-hospitals',
text: 'Больницам',
narrowText: 'Больницам',
mobileMenuText: 'Больницам',
},
{
href: '/ask',
text: 'Всем',
narrowText: 'Всем',
mobileMenuText: 'Справочная служба',
},
];
export const contentLinks = [
{
href: '/supervisory',
text: 'Наблюдательный совет',
narrowText: 'Наблюдательный совет',
},
{
href: '/partners',
text: 'Партнёры',
narrowText: 'Партнёры',
},
{
href: '/news',
text: 'Новости',
narrowText: 'Новости',
},
{
href: '/contacts',
text: 'Контакты',
narrowText: 'Контакты',
},
];
<file_sep>export const SourceEnum = {
Chat: 'Chat',
};
<file_sep>import { RemoveSection } from './RemoveSection';
export interface FormContext {
removeSectionFromState: RemoveSection;
values: any;
submitting?: boolean;
submitSucceeded?: boolean;
hasValidationErrors?: boolean;
submitError?: any;
submitFailed?: boolean;
changeField: (name: string, value?: any) => void;
}
<file_sep>export { BecomePartnerPage as default } from '@app/src/features/landing';
<file_sep>export const APP_ID = 'pxkfd7bu';
<file_sep>import { Dispatch } from 'redux';
import { actions } from './reducer';
export const setPaymetWidgetData = (data: any) => async (
dispatch: Dispatch<any>,
// getState: () => State,
// { getApi }: ExtraArgs,
) => {
try {
return dispatch(actions.success(data));
} catch (error) {
return dispatch(actions.error(error.message));
}
};
<file_sep>import { State } from '@app/src/lib/store';
import { getPageKeyFromQuery, getPageKeyFromParams } from './query';
import { ArticlesFetchParams } from './config';
export const selectArticles = (query: any) => (state: State) => {
const key = getPageKeyFromQuery(query);
return state?.articles?.list?.list?.pages[key] || [];
};
export const selectArticlesWithParams = (params: ArticlesFetchParams) => (
state: State,
) => {
const key = getPageKeyFromParams(params);
return state?.articles?.list?.list?.pages[key] || [];
};
<file_sep>import { SanityBase } from '@front/domain/models/sanity/SanityBase';
export interface HospitalsHelpWidgetData extends SanityBase {
helpedCount: number;
helpWanted: number;
moneyGathered: number;
}
<file_sep>const { createProxyMiddleware } = require('http-proxy-middleware');
const proxy = createProxyMiddleware({
target: process.env.PROD_IMGPROXY_URL || 'localhost:3001',
changeOrigin: true,
});
module.exports = proxy;
<file_sep>import { NON_BREAKING_HYPHEN, NON_BREAKING_SPACE } from '@app/src/lib/config';
export const preparePhoneForTypography = (phone: string) =>
phone.replace('-', NON_BREAKING_HYPHEN).replace(' ', NON_BREAKING_SPACE);
<file_sep>export const stubNewsItemContent = [
{
_key: '4ad78df92064',
_type: 'block',
children: [
{
_key: '4ad78df920640',
_type: 'span',
marks: [],
text: 'Заголовок h1',
},
],
markDefs: [],
style: 'h1',
},
{
_key: 'a1a56ad3e4d8',
_type: 'block',
children: [
{
_key: 'a1a56ad3e4d80',
_type: 'span',
marks: [],
text: 'Заголовок h2',
},
],
markDefs: [],
style: 'h2',
},
{
_key: '78a75fe311ad',
_type: 'block',
children: [
{
_key: '78a75fe311ad0',
_type: 'span',
marks: [],
text: 'Заголовок h3',
},
],
markDefs: [],
style: 'h3',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: 'Заголовок h4',
},
],
markDefs: [],
style: 'h4',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: 'крупный текст',
},
],
markDefs: [],
style: 'blockquote',
},
{
_key: '1acaf478b30b',
_type: 'block',
children: [
{
_key: '1acaf478b30b0',
_type: 'span',
marks: ['strong'],
text: 'Жирный текст',
},
],
markDefs: [],
style: 'normal',
},
{
_key: 'acc40ea38187',
_type: 'block',
children: [
{
_key: 'acc40ea381870',
_type: 'span',
marks: [],
text: 'список',
},
],
level: 1,
listItem: 'bullet',
markDefs: [],
style: 'normal',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: 'с точками',
},
],
level: 1,
listItem: 'bullet',
markDefs: [],
style: 'normal',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: 'список ',
},
],
level: 1,
listItem: 'bullet',
markDefs: [],
style: 'normal',
},
{
_key: '354abef1a0db',
_type: 'block',
children: [
{
_key: '354abef1a0db0',
_type: 'span',
marks: [],
text: 'с точками',
},
],
level: 1,
listItem: 'bullet',
markDefs: [],
style: 'normal',
},
{
_key: '6ac17193a51e',
_type: 'block',
children: [
{
_key: '6ac17193a51e0',
_type: 'span',
marks: [],
text: 'список',
},
],
level: 1,
listItem: 'number',
markDefs: [],
style: 'normal',
},
{
_key: 'e01f7e4f421c',
_type: 'block',
children: [
{
_key: 'e01f7e4f421c0',
_type: 'span',
marks: [],
text: 'с номерами',
},
],
level: 1,
listItem: 'number',
markDefs: [],
style: 'normal',
},
{
_key: '9608777edaf4',
_type: 'block',
children: [
{
_key: '9608777edaf40',
_type: 'span',
marks: [],
text: 'список',
},
],
level: 1,
listItem: 'number',
markDefs: [],
style: 'normal',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: 'с номерами',
},
],
level: 1,
listItem: 'number',
markDefs: [],
style: 'normal',
},
{
_key: '01c3ebcc63c7',
_type: 'block',
children: [
{
_key: '01c3ebcc63c70',
_type: 'span',
marks: [],
text: '',
},
{
_key: '01c3ebcc63c71',
_type: 'span',
marks: ['59a9c17959a3'],
text: 'Ссылка стандартная',
},
{
_key: '01c3ebcc63c72',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: '59a9c17959a3',
_type: 'link',
href: '/',
options: 'default',
},
],
style: 'normal',
},
{
_key: 'bf2edbed1a48',
_type: 'block',
children: [
{
_key: 'bf2edbed1a480',
_type: 'span',
marks: [],
text: '',
},
{
_key: 'bf2edbed1a481',
_type: 'span',
marks: ['80472f93632d'],
text: 'Ссылка в новой вкладке',
},
{
_key: 'bf2edbed1a482',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: '<KEY>',
_type: 'link',
href: 'google.com',
options: 'blank',
},
],
style: 'normal',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: ['22370f6b7e3e'],
text: '',
},
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
{
_key: '<KEY>',
_type: 'span',
marks: ['7ec8c6f38554'],
text: 'Ссылкуа в попапе',
},
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: '22370f6b7e3e',
_type: 'link',
},
{
_key: '<KEY>',
_type: 'link',
href: 'google.com',
options: 'modal',
},
],
style: 'normal',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
{
_key: '<KEY>',
_type: 'span',
marks: ['be1430d6704d'],
text: 'Телефон',
},
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: 'be1430d6704d',
_type: 'link',
href: '+790000000000',
options: 'phone',
},
],
style: 'normal',
},
{
_key: '7c0e9feb3c56',
_type: 'block',
children: [
{
_key: '7c0e9feb3c560',
_type: 'span',
marks: [],
text: '',
},
{
_key: '7c0e9feb3c561',
_type: 'span',
marks: ['f7960e5c3dd6'],
text: '',
},
{
_key: '7c0e9feb3c562',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: 'f7960e5c3dd6',
_type: 'link',
href: '<EMAIL>',
options: 'email',
},
],
style: 'normal',
},
{
_key: 'bd34f88810fa',
_type: 'customImage',
image: {
_type: 'image',
asset: {
_ref: 'image-b7d9b1f87b1d961fa568eb6943886a4e0f8e1fa4-299x168-jpg',
_type: 'reference',
},
},
isFullsreen: true,
text: [
{
_key: '241f3fd5a395',
_type: 'block',
children: [
{
_key: '241f3fd5a3950',
_type: 'span',
marks: ['strong'],
text: 'Изображение',
},
{
_key: '241f3fd5a3951',
_type: 'span',
marks: [],
text: ' ',
},
{
_key: '241f3fd5a3952',
_type: 'span',
marks: ['21e8f5dcb1dc'],
text: 'коронавируса',
},
{
_key: '241f3fd5a3953',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: '<KEY>',
_type: 'link',
href:
'https://www.who.int/emergencies/diseases/novel-coronavirus-2019/events-as-they-happen',
options: 'blank',
},
],
style: 'normal',
},
],
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
{
_key: '<KEY>',
_type: 'span',
marks: ['f7960e5c3dd6'],
text: '',
},
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: 'f7960e5c3dd6',
_type: 'link',
href: '<EMAIL>',
options: 'email',
},
],
style: 'normal',
},
{
_key: 'c699a534b232',
_type: 'youtube',
text: [
{
_key: '610bdd1de520',
_type: 'block',
children: [
{
_key: '610bdd1de5200',
_type: 'span',
marks: ['strong'],
text: 'Жирная подпись ',
},
{
_key: '610bdd1de5201',
_type: 'span',
marks: [],
text: 'с текстом и ',
},
{
_key: '610bdd1de5202',
_type: 'span',
marks: ['4bdcc3427db1'],
text: 'ссылкой',
},
{
_key: '610bdd1de5203',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: '<KEY>',
_type: 'link',
href: 'google.com',
options: 'default',
},
],
style: 'normal',
},
],
url:
'https://storage.googleapis.com/coverr-main/mp4/coverr-hiking-group-traveling-a-mountain-edge-1585318491204.mp4',
},
{
_key: 'c76e66964baf',
_type: 'actionBlock',
button: {
_type: 'actionButton',
link: 'google.com',
options: 'blank',
text: 'Кнопка',
type: 'primary',
},
text: [
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: 'Обычный текст',
},
],
markDefs: [],
style: 'normal',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: ['strong'],
text: 'Жирный текст',
},
],
markDefs: [],
style: 'normal',
},
{
_key: '<KEY>',
_type: 'block',
children: [
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
{
_key: '<KEY>',
_type: 'span',
marks: ['d7712f49fc2b'],
text: 'Ссылка',
},
{
_key: '<KEY>',
_type: 'span',
marks: [],
text: '',
},
],
markDefs: [
{
_key: '<KEY>',
_type: 'link',
href: 'google.com',
options: 'blank',
},
],
style: 'normal',
},
],
title: 'Выделенный блок',
},
{
_key: '<KEY>',
_type: 'smallText',
text: 'мелкий текст',
},
];
<file_sep>import { State } from '@app/src/lib/store';
import { HospitalsHelpWidgetData } from '@front/domain/models/common/HospitalsHelpWidgetData';
export const selectHospitalsHelpWidgetData = (
state: State,
): HospitalsHelpWidgetData | undefined => state.hospitalsHelpWidget.value;
<file_sep>export { NewsItemPage } from './NewsItemPage';
<file_sep>import { Dispatch } from 'redux';
import { ExtraArgs, State } from '@app/src/lib/store';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { featuredNewsQueryBuilder } from '../helpers/featuredNewsQueryBuilder';
import { actions } from './reducer';
import { selectFeaturedNews } from './selectFeaturedNews';
export const getFeaturedNewsFromSanity = () => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
if (needToFetch(selectFeaturedNews(getState()))) {
const api = getApi(getState);
try {
dispatch(actions.request());
const query = featuredNewsQueryBuilder();
const news = await api.getNews(query);
return dispatch(actions.success(news));
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>const clientSteps = ['Заполните заявку', 'Опишите ситуацию', 'Задайте вопросы'];
const managerSteps = [
'Короткая заявка',
'Медицинские данные',
'Вопросы к эксперту',
];
export { clientSteps, managerSteps };
<file_sep>export const aboutText = `
<p>
«Что делать:» — это общественная инициатива <a target="_blank" href="https://nenaprasno.ru/" rel="noopener noreferrer">Фонда медицинских решений «Не напрасно»</a>, которая объединяет некоммерческие организации, бизнес, СМИ и простых людей в единую линию обороны против коронавирусной инфекции.
</p>
<p>
Вместе с крупнейшими специалистами России и других стран мы разрабатываем и быстро реализуем проекты направленные против COVID-19 по принципу «максимальный эффект при минимальных вложениях»:
</p>`;
<file_sep>export { ForDoctors as default } from '@app/src/features/system/for-doctors';
<file_sep>export const SIGN_UP_MODAL = 'sign-up';
<file_sep>import { InputType } from '@app/src/features/common/form';
import { FormComponentType } from '@app/src/features/common/form/FormConstructor';
import { PartneTypeLabels } from '@app/src/domain/models/common/PartnerTypes';
export const themeOptions = [
{
label: PartneTypeLabels.BecomePartner,
},
{
label: PartneTypeLabels.BecomeInfopartner,
},
{
label: PartneTypeLabels.VolunteerFundraising,
},
{
label: PartneTypeLabels.LectureRequest,
},
];
export const formConfig = {
steps: [
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Имя',
},
props: {
name: 'name',
},
},
{
type: FormComponentType.Input,
label: {
text: 'Название организации',
},
props: {
name: 'organisation_name',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Почта',
},
props: {
name: 'email',
type: InputType.Email,
},
},
{
type: FormComponentType.PhoneInput,
required: true,
label: {
text: 'Телефон',
},
props: {
name: 'phone',
},
},
{
type: FormComponentType.Select,
required: true,
label: {
text: 'Тема',
},
props: {
name: 'theme',
options: themeOptions,
},
},
{
type: FormComponentType.TextArea,
required: true,
label: {
text: 'Сообщение',
},
props: {
name: 'message',
},
},
],
};
<file_sep>import { PrimaryFooterLink } from '@app/src/ui/organisms/Footer/components/Menu';
export const linksDesktop = [
{ title: 'Врачам', href: '/for-doctors' },
{ title: 'О проекте', href: '/ask/#about' },
{ title: 'Помощь больницам', href: '/for-hospitals' },
{ title: 'Наблюдательный совет', href: '/supervisory' },
{ title: 'Справочная служба', href: '/ask' },
{ title: 'Партнёры', href: '/partners' },
{ title: '', href: '' },
{ title: 'Новости', href: '/news' },
{ title: '', href: '' },
{ title: 'Контакты', href: '/contacts' },
] as PrimaryFooterLink[];
export const linksMobile = [
{ title: 'Врачам', href: '/for-doctors' },
{ title: 'Помощь больницам', href: '/for-hospitals' },
{ title: 'Справочная служба', href: '/ask' },
{ title: 'О проекте', href: '/ask/#about' },
{ title: 'Наблюдательный совет', href: '/supervisory' },
{ title: 'Партнёры', href: '/partners' },
{ title: 'Новости', href: '/news' },
{ title: 'Контакты', href: '/contacts' },
] as PrimaryFooterLink[];
<file_sep>export { NavCards } from './NavCards';
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
export interface SignInErrorFields {
password?: boolean;
confirm?: boolean;
login?: boolean;
}
export interface SignInError {
fields: SignInErrorFields;
message: string;
code?: number;
}
interface State extends FetchingState {
token: string;
authViolateStatus?: boolean;
signInError?: SignInError;
}
interface Actions extends FetchingActions {
success(token: string): Action;
authViolateStatus(value: boolean): Action;
signInError(value?: SignInError): Action;
}
const initialState = createInitialState({
token: '',
authViolateStatus: undefined,
signInError: undefined,
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state, token) => ({
...state,
token,
}),
'signIn',
{
authViolateStatus: (state, authViolateStatus) => ({
...state,
error: false,
fetching: false,
authViolateStatus,
}),
signInError: (state, signInError) => ({
...state,
error: false,
fetching: false,
signInError,
}),
},
);
export { reducer, actions };
export type { State, Actions };
<file_sep>export { Experts as default } from '@app/src/features/system';
<file_sep>export { VolunteerPage } from './page/VolunteerPage';
<file_sep>import { useCallback } from 'react';
export const useHandleChange = (
internalValue: boolean,
setInternalValue: (v: boolean) => void,
externalValue?: boolean,
setExternalValue?: (v: boolean) => void,
disabled = false,
) => {
return useCallback(() => {
if (!disabled) {
const newValue = !internalValue;
if (setExternalValue) {
setExternalValue(newValue);
}
if (externalValue !== undefined) {
setInternalValue(externalValue);
} else {
setInternalValue(newValue);
}
}
}, [
internalValue,
setInternalValue,
externalValue,
setExternalValue,
disabled,
]);
};
<file_sep>export { Docs as default } from '@app/src/features/landing';
<file_sep>enum FooterType {
Primary = 'Primary',
Secondary = 'Secondary',
}
export default FooterType;
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
import { HospitalsHelpWidgetData } from '@front/domain/models/common/HospitalsHelpWidgetData';
interface State extends FetchingState {
value?: HospitalsHelpWidgetData;
}
interface Actions extends FetchingActions {
success(hospitalsHelpWidgetData: any): Action;
}
const initialState = createInitialState({
value: null,
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, data: HospitalsHelpWidgetData) => {
return {
...state,
value: data,
};
},
'hospitalsHelpWidget',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>import { Dispatch } from 'redux';
import { ExtraArgs, State } from '@app/src/lib/store';
import { actions as modalActions } from '@app/src/features/common/modal/reducer';
import { updateRequestFormData } from '@app/src/domain/reducers/requestConsultationReducer/actions';
import {
actions as userActions,
currentUser,
} from '@app/src/domain/reducers/userReducer';
import { setCookie } from '../../../features/login/features/signIn/helpers/setAuthToken';
import { actions } from './reducer';
import { showIntercom } from '../../../features/landing/features/request-chat/showIntercom';
import { setUserEmailLocalStorage } from './userEmailLocalStorage';
export const loginAction = (username: string, password: string) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
const { token } = await api.login(username, password);
setUserEmailLocalStorage(username);
setCookie(token);
dispatch(userActions.setToken(token));
dispatch(modalActions.close());
await dispatch(currentUser());
await dispatch(updateRequestFormData());
showIntercom();
return dispatch(actions.success(token));
} catch (error) {
const { message, fields, code } = error.response.data;
dispatch(actions.error(error.message));
dispatch(actions.signInError({ message, fields, code }));
throw error;
}
};
<file_sep>export { BecomePartnerPage } from './page/BecomePartnerPage';
<file_sep>import {
flow,
isArray,
isPlainObject,
isString,
isUndefined,
mapValues,
} from 'lodash';
const DATE_REGEX = /(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(.*)/;
export const tapDate = (value: string): Date | string =>
DATE_REGEX.test(value) ? new Date(value) : value;
const taps = [tapDate];
const actualizeStore = (data: any): any => {
if (isArray(data)) {
return data.map(actualizeStore);
}
if (isString(data)) {
return flow(taps)(data);
}
if (isUndefined(data) || !isPlainObject(data)) {
return data;
}
return mapValues(data, (value) => actualizeStore(value));
};
export default actualizeStore;
<file_sep>import { State } from '@app/src/lib/store';
export const selectResources = () => (state: State) => state.resources.list;
<file_sep>import { SanityBase } from '@app/src/domain/models/sanity/SanityBase';
import { Photo } from '@app/src/domain/models/sanity/Photo';
export interface Partner extends SanityBase {
status: boolean;
name: string;
type: string;
isATrust?: boolean;
url: string;
subtitle: string;
logo: Photo;
sortIndex: number;
pageToShow: string[];
}
<file_sep>export { ResetForm } from './ResetForm';
<file_sep>export { CloudPayments } from './CloudPayments';
<file_sep>import { Dispatch } from 'redux';
import { ExtraArgs, State } from '@app/src/lib/store';
import { actions } from './reducer';
export const getResourcesFromSanity = () => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
const resources = await api.getResources();
return dispatch(actions.success(resources));
} catch (error) {
return dispatch(actions.error(error.message));
}
};
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
interface State extends FetchingState {
data: any;
}
interface Actions extends FetchingActions {
success(data: any): Action;
}
const initialState = createInitialState({
data: null,
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, data: any) => {
return {
...state,
data,
};
},
'widgetReducer',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>export { SystemHero } from './SystemHero';
<file_sep>import { Dispatch } from 'redux';
import { head } from 'lodash';
import { ExtraArgs, State } from '@app/src/lib/store';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { articlesItemRequestBuilder } from '../helpers/articlesItemRequestBuilder';
import { actions } from './reducer';
import { selectArticlesItem } from './selectArticlesItem';
export const getArticlesItemFromSanity = (code: string) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const item = selectArticlesItem(getState());
if (needToFetch(item) || item?.code.current !== code) {
const api = getApi(getState);
try {
dispatch(actions.request());
const query = articlesItemRequestBuilder(code);
const articles = await api.getArticlesItem(query);
const articlesItem = head(articles);
if (articlesItem) {
return dispatch(actions.success(articlesItem));
}
throw new Error('not found');
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>export default class UnknownModalException extends Error {
public readonly key: string;
public constructor(key: string, message?: string) {
super(message);
this.key = key;
}
}
<file_sep>import { combineReducers } from 'redux';
import * as articlesItem from './item';
import * as articlesList from './list';
import * as featuredArticlesList from './featured';
export interface State {
list: articlesList.State;
item: articlesItem.State;
featuredArticles: featuredArticlesList.State;
}
export const reducer = combineReducers<State, any>({
item: articlesItem.reducer,
list: articlesList.reducer,
featuredArticles: featuredArticlesList.reducer,
} as any);
<file_sep>import { Action } from 'redux';
import { createSymbiote } from 'redux-symbiote';
import { Role } from '../../models/common/User';
interface State {
token: string;
login: string;
roles: Role[];
quotasAvailable: boolean;
}
interface Actions {
setToken(token: string): Action;
setLogin(login: string): Action;
setUser(roles: Role[]): Action;
setQuotasAvailable(available: boolean): Action;
}
const initialState: State = {
token: '',
login: '',
roles: [],
quotasAvailable: false,
};
const { actions, reducer } = createSymbiote<State, Actions>(
initialState,
{
setToken: (state, token: string) => ({
...state,
token,
}),
setLogin: (state, login: string) => ({
...state,
login,
}),
setUser: (state, roles: Role[]) => ({
...state,
roles,
}),
setQuotasAvailable: (state, quotasAvailable: boolean) => ({
...state,
quotasAvailable,
}),
},
'user',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>export { VolunteerPage as default } from '@app/src/features/landing';
<file_sep>export { WebinarHeader } from './WebinarHeader';
<file_sep>export { Request as default } from '@app/src/features/landing';
<file_sep>import { ArticlesItem } from '../domain/models/common/ArticlesItem';
import { CategoryType } from '../domain/models/common/ArticlesCategoryType';
export const getIsWebinar = (data: any) =>
(data as ArticlesItem).categories?.includes(CategoryType.Webinar as any);
<file_sep>// RegExp from https://www.regextester.com/93652
// DON'T READ IT
// Just use, it's correct
export const URL_REGEXP = /(?:(?:https?|ftp):\/\/|\b(?:[a-z\d]+\.))(?:(?:[^\s()<>]+|\((?:[^\s()<>]+|(?:\([^\s()<>]+\)))?\))+(?:\((?:[^\s()<>]+|(?:\(?:[^\s()<>]+\)))?\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’]))?/g;
export const PREVIEW_IMAGE_REGEXP = /\/preview-image.*/;
export const URL_FILE_REGEX = /.+\.(zip|rar|7z|tar|zipx|pdf|DMG)$/;
<file_sep>import { Dispatch } from 'redux';
import { head } from 'lodash';
import { ExtraArgs, State } from '@app/src/lib/store';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { newsItemRequestBuilder } from '../helpers/newsItemRequestBuilder';
import { actions } from './reducer';
import { selectNewsItem } from './selectNewsItem';
export const getNewsItemFromSanity = (code: string) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const item = selectNewsItem(getState());
if (needToFetch(item) || item?.code.current !== code) {
const api = getApi(getState);
try {
dispatch(actions.request());
const query = newsItemRequestBuilder(code);
const news = await api.getNewsItem(query);
const newsItem = head(news);
if (newsItem) {
return dispatch(actions.success(newsItem));
}
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>import * as yup from 'yup';
const REQUIRED_MESSAGE = 'Обязательное поле';
export const schema = {
target: yup.string().required(REQUIRED_MESSAGE),
age: yup
.number()
.positive('Введите положительное число')
.required(REQUIRED_MESSAGE),
gender: yup.string().required(REQUIRED_MESSAGE),
region: yup.string().required(REQUIRED_MESSAGE),
};
export const requiredSchema = yup.string().required(REQUIRED_MESSAGE);
<file_sep>export function showIntercom() {
if (typeof window !== 'undefined') {
const { pathname } = window.location;
if (pathname.includes('/request/chat')) {
(window as any).Intercom('show');
}
}
}
<file_sep>import { StepPointerModel, StepPointerType } from '../molecule/StepPointer';
import { clientSteps } from '../steps';
const defineType = (index: number, current: number) => {
if (index < current) {
return StepPointerType.Success;
}
if (index === current) {
return StepPointerType.Full;
}
return StepPointerType.Empty;
};
const defineHref = (index: number, id?: string) => {
if (!id) {
return undefined;
}
return [
`/client/new-claim/${id}/`,
`/client/claim/${id}/situation/`,
`/client/claim/${id}/questions/`,
][index];
};
const defineDisabled = (index: number, current: number) => index > current;
export function getClientSteps(id: string, current: number) {
return clientSteps.map(
(name, index): StepPointerModel => {
const href = defineHref(index, id);
return {
title: name,
type: defineType(index, current),
disabled: defineDisabled(index, current),
href,
};
},
);
}
<file_sep>export { ForDoctorsPage as ForDoctors } from './ForDoctorsPage';
export { ForDoctorsArticlePage as ForDoctorsArticle } from './ForDoctorsArticlePage';
<file_sep>export { HospitalAidPage as default } from '@app/src/features/landing';
<file_sep>export { ClickableText } from './ClickableText';
<file_sep>export { SystemFooter } from './SystemFooter';
<file_sep>import dayjs from 'dayjs';
import { getDateString } from './getDateString';
import { DateInterface } from './types';
const getDateInSeconds = (date: DateInterface) => {
return dayjs(getDateString(date)).valueOf();
};
export { getDateInSeconds };
<file_sep>export { SystemMobileMenu } from './SystemMobileMenu';
<file_sep>import {
DateInterface,
shouldValidateDates,
validateDates,
} from '@app/src/lib/helpers/validateDates';
import { errorMessagesMap } from './errorMessages';
import { getDates } from './getDates';
import { pickErrorMessage } from './pickErrorMessage';
const validationCb = (paths: DateInterface[]) => (_: any, values: any) => {
const dates = getDates(paths, values);
if (shouldValidateDates(dates)) {
const errorCode = validateDates(dates);
const errorMessage = pickErrorMessage(errorMessagesMap, errorCode);
if (errorMessage) {
throw new Error(errorMessage);
}
}
};
export { validationCb };
<file_sep>const nextRoutes = require('next-routes');
module.exports = nextRoutes()
// landing
.add('system', '/', 'landing')
.add('request', '/request', 'landing/request')
.add('help-volunteers', '/help-volunteers', 'landing/help-volunteers')
.add('help-partners', '/help-partners', 'landing/help-partners')
.add('hospital-aid', '/hospital-aid', 'landing/hospital-aid')
.add('checklist', '/checklist', 'landing/checklist')
.add('request-chat', '/request/chat', 'landing/request-chat')
.add('landing-partners', '/partners', 'landing/partners')
.add('landing-contacts', '/contacts', 'landing/contacts')
.add('experts', '/supervisory', '/landing/experts')
.add('expert', '/supervisory/:id', '/landing/expert')
.add('landing-for-hospitals', '/for-hospitals', 'landing/for-hospitals')
.add('landing-for-doctors', '/for-doctors', 'landing/for-doctors')
.add('for-doctors-article', '/for-doctors/:id', 'landing/for-doctors-article')
.add('landing-news', '/news', 'landing/news')
.add('landing-news-item', '/news/:id', 'landing/news-item')
// ask
.add('ask-experts', '/ask/experts/', 'ask/experts')
.add('ask-expert', '/ask/experts/:id', 'ask/expert')
.add('ask', '/ask', 'ask')
.add('docs', '/docs', 'landing/docs')
.add('icons-demo', '/icons-demo', 'icons-demo');
<file_sep>import { PrimaryFooterLink } from './components/Menu';
export const linksShort = [
{ title: 'О проекте', href: '/ask#about' },
{ title: 'Помочь проекту', href: '/#help' },
{ title: 'Обратная связь', href: '/contacts' },
] as PrimaryFooterLink[];
export const linksLong = [
{ title: 'О проекте', href: '/ask#about' },
{ title: 'Контакты', href: '/contacts' },
{ title: 'Помочь проекту', href: '#help' },
{ title: 'Обратная связь', href: '/contacts' },
{ title: 'Эксперты', href: 'ask/experts' },
] as PrimaryFooterLink[];
<file_sep>import { ButtonKind } from '../ButtonKind';
export const getKindClassName = (kind: ButtonKind): string =>
({
[ButtonKind.Primary]: 'primary',
[ButtonKind.Secondary]: 'secondary',
[ButtonKind.Extra]: 'extra',
[ButtonKind.Super]: 'super',
}[kind]);
<file_sep>import { FormComponentType } from '@app/src/features/common/form/FormConstructor';
import { InputType } from '@app/src/ui/Input';
export const formConfig = {
steps: [
{
type: FormComponentType.Input,
required: true,
hidden: true,
label: {
text: '<NAME>',
},
props: {
name: 'webinarName',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: '<NAME>',
},
props: {
name: 'email',
type: InputType.Email,
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Как к вам обращаться',
},
props: {
name: 'name',
},
},
{
type: FormComponentType.PhoneInput,
label: {
text: '<NAME>',
},
props: {
name: 'phone',
placeholder: '+7',
},
},
],
};
<file_sep>export const XX_SMALL_DOWN = 360;
export const X_SMALL_SECOND_DOWN = 440;
export const X_SMALL_DOWN = 500;
export const SMALL_SECOND_DOWN = 540;
export const SMALL_MIDDLE_DOWN = 740;
export const SMALL_DOWN = 767;
export const MEDIUM_SECOND_DOWN = 832;
export const MEDIUM_DOWN = 959;
export const LARGE_DOWN = 1200;
export const LARGE_SECOND_DOWN = 1280;
<file_sep>import { splitText } from '../splitText';
describe('splitText', () => {
test('should return the same text for empty by', () => {
const text = 'Hello world and, wait';
expect(splitText(text, [])).toEqual([text]);
});
test('should return splitted text for single by', () => {
const text = 'Hello world and, wait';
expect(splitText(text, ['and'])).toEqual(['Hello world ', 'and', ', wait']);
});
test('should return splitted text for multiply by', () => {
const text = 'Hello world and, wait';
expect(splitText(text, ['and', ','])).toEqual([
'Hello world ',
'and',
',',
' wait',
]);
});
test('should split Gdrive link with comment correctly', () => {
const text =
'Hello, check https://drive.google.com/open?id=0B_fsfvYDcajXdjVScXRUVnBhMjg';
expect(
splitText(text, [
'https://drive.google.com/open?id=0B_fsfvYDcajXdjVScXRUVnBhMjg',
]),
).toEqual([
'Hello, check ',
'https://drive.google.com/open?id=0B_fsfvYDcajXdjVScXRUVnBhMjg',
]);
});
});
<file_sep>import { State } from '@app/src/lib/store';
export const getSignUpError = (state: State) =>
state.login.signUp && state.login.signUp.signUpError;
<file_sep>import { HelpPartnersType } from '@app/src/domain/models/common/PartnerTypes';
enum HelpItemType {
Button = 'Button',
Link = 'Link',
}
export const getHelpOptions = (helpLink = '/#help') => {
return [
{
title: 'Лично',
items: [
{
id: '10',
type: HelpItemType.Button,
label: 'Пожертвовать деньги',
link: helpLink,
},
{
id: '20',
type: HelpItemType.Link,
label: 'Стать волонтером',
link: '/help-volunteers',
},
{
id: '30',
type: HelpItemType.Link,
label: 'Волонтерский фандрайзинг',
link: `/help-partners?type=${HelpPartnersType.VolunteerFundraising}`,
},
],
},
{
title: 'Организациям',
items: [
{
id: '0',
type: HelpItemType.Button,
secondary: true,
label: 'Пожертвовать деньги',
link: `/docs`,
},
{
id: '10',
type: HelpItemType.Link,
label: 'Стать партнером',
link: `/help-partners?type=${HelpPartnersType.BecomePartner}`,
},
{
id: '20',
type: HelpItemType.Link,
label: 'Стать инфопартнером',
link: `/help-partners?type=${HelpPartnersType.BecomeInfopartner}`,
},
{
id: '30',
type: HelpItemType.Link,
label: 'Заказать лекцию',
link: `/help-partners?type=${HelpPartnersType.LectureRequest}`,
},
],
},
];
};
<file_sep>export { Contacts as default } from '@app/src/features/landing';
<file_sep>import { isEmpty } from 'lodash';
export const needToFetch = (data: any) => isEmpty(data);
<file_sep>export { SerializerList } from './SerializerList';
export { SerializerListItem } from './SerializerListItem';
<file_sep>import { Dispatch } from 'redux';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { ExtraArgs, State } from '@app/src/lib/store';
import { selectAllTags } from '../../tagsReducer/selectTags';
import { newsListRequestBuilder } from '../helpers/newsListRequestBuilder';
import { NewsFetchParams } from './config';
import { actions } from './reducer';
import { selectNewsWithParams } from './selectNews';
export const getNewsFromSanity = (params: NewsFetchParams) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
if (needToFetch(selectNewsWithParams(params)(getState()))) {
const api = getApi(getState);
try {
dispatch(actions.request());
const tags = selectAllTags(getState());
const query = newsListRequestBuilder(params, tags);
const news = await api.getNews(query);
return dispatch(actions.success(news, params));
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>import dayjs from 'dayjs';
import { range } from 'lodash';
export const getYears = (amount: number) => {
const currentYear = dayjs().year();
const indexesList = range(amount);
const years = indexesList.map((index) => {
const year = currentYear - index;
return {
key: `${year}`,
label: `${year}`,
};
});
return years;
};
<file_sep>import { State } from '@app/src/lib/store';
export const selectFeaturedNews = (state: State) =>
state.news.featuredNews.list;
<file_sep>export { FeaturedNews } from './FeaturedNews';
<file_sep>import { useMemo } from 'react';
import { useMappedState } from 'redux-react-hook';
import apiFactory from '@app/src/lib/api/apiFactory';
import { getToken } from '@app/src/domain/reducers/userReducer';
export const useApi = () => {
const token = useMappedState(getToken);
const api = useMemo(() => apiFactory(token), [token]);
return api;
};
<file_sep>export { ArticlesItemContent } from './ArticlesItemContent';
<file_sep>import { Dispatch } from 'redux';
import { ExtraArgs, State } from '@app/src/lib/store';
import { SendFeedbackRequest } from '@app/src/lib/api/request/SendFeedback';
import { actions } from './reducer';
export const sendFeedback = (feedbackFields: SendFeedbackRequest) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
await api.sendFeedback(feedbackFields);
return dispatch(actions.success());
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
<file_sep>import dayjs from 'dayjs';
import { compareDates } from '../compareDates';
import { ErrorCode } from '../erorCodes';
describe('validateDates works', () => {
test('should return undefined when dates are in correct order', () => {
const date1 = dayjs('2015-01-18').valueOf();
const date2 = dayjs('2018-01-18').valueOf();
expect(compareDates([date1, date2])).toBe(undefined);
});
test('should return undefined when date1 is later than date2', () => {
const date1 = dayjs('2018-01-18').valueOf();
const date2 = dayjs('2015-01-18').valueOf();
expect(compareDates([date1, date2])).toBe(ErrorCode.MixedDateOrder);
});
});
<file_sep>import { canUseDOM } from '@app/src/lib/helpers/canUseDOM';
interface Window {
[key: string]: {
push: (name?: string) => void;
};
}
export const GTM_ID = 'GTM-MJ3WR2P';
export const YANDEX_METRIKA_ID = '52214086';
export const DATA_LAYER_NAME = 'dataLayer';
export const dataLayer =
canUseDOM && ((window as unknown) as Window)[DATA_LAYER_NAME];
export const hitYM = (url: string) => {
ym(YANDEX_METRIKA_ID, 'hit', `https://ask.nenaprasno.ru/${url}`);
};
<file_sep>// TODO: We mustn't store window size in redux!
export { default } from './withWindowSize';
export { default as getWindowSize } from './selector';
export type { WindowSize } from './selector';
<file_sep>import { FormContext } from './FormContext';
export interface OwnProps {
className?: string;
resetAfterSubmit?: boolean;
forceSubmitOnEnter?: boolean;
preventDefault?: boolean;
scrollToInvalid?: boolean;
saveDebounced?: (fields: any) => Promise<any>;
debounce?: number;
saveOnBlur?: (fields: any) => Promise<any>;
children: (formContext: FormContext) => React.ReactNode;
}
<file_sep>enum Gender {
Male = 'Мужской',
Female = 'Женский',
}
export const genderRadioGroup = Object.entries(Gender).map(([id, value]) => ({
id,
value,
}));
<file_sep>export const getDeclensionedWord = (number: number) => {
const cases = [2, 0, 1, 1, 1, 2];
const titles = ['консультация', 'консультации', 'консультаций'];
return titles[
number % 100 > 4 && number % 100 < 20
? 2
: cases[number % 10 < 5 ? number % 10 : 5]
];
};
<file_sep>export enum Role {
Client = 'client',
CaseManager = 'case-manager',
Doctor = 'doctor',
Admin = 'admin',
}
export interface User {
token: string;
login: string;
roles: Role[];
}
<file_sep>export { ThirdStep } from './ThirdStep';
<file_sep>import { ForDoctorsArticle } from '@app/src/features/system/for-doctors';
export default ForDoctorsArticle;
<file_sep>import { State } from '@app/src/lib/store';
export const getToken = (state: State) => state.login.user.token;
export const getUserLogin = (state: State) => state.login.user.login;
export const getRoles = (state: State) => state.login.user.roles;
export const getQuotasAvailable = (state: State) =>
state.login.user.quotasAvailable;
<file_sep>import { merge } from 'lodash';
import { getHeightWidth, getWindowWidth } from 'redux-windowsize';
import { createSelector } from 'reselect';
interface WindowSize {
width: number;
height: number;
}
// TODO: first any because State in JS and has no typings
// second and third any because `redux-windowsize` has no typings
const getWindowSize = createSelector<any, any, any, WindowSize>(
getHeightWidth,
getWindowWidth,
merge,
);
export default getWindowSize;
export type { WindowSize };
<file_sep>import { combineReducers } from 'redux';
import * as newsItem from './item';
import * as newsList from './list';
import * as featuredNewsList from './featured';
import * as mainPageList from './mainPageList';
import * as hospitalsList from './hospitalsList';
export interface State {
list: newsList.State;
item: newsItem.State;
featuredNews: featuredNewsList.State;
mainPageList: mainPageList.State;
hospitalsList: hospitalsList.State;
}
export const reducer = combineReducers<State, any>({
item: newsItem.reducer,
list: newsList.reducer,
featuredNews: featuredNewsList.reducer,
mainPageList: mainPageList.reducer,
hospitalsList: hospitalsList.reducer,
} as any);
<file_sep>export { NewPasswordForm } from './NewPasswordForm';
<file_sep>import { isUrl } from '../isUrl';
describe('isUrl', () => {
test('should return false for non-link text', () => {
const text = 'Hello world!';
expect(isUrl(text)).toBeFalsy();
});
test('should return true for simple text', () => {
const text = 'google.com';
expect(isUrl(text)).toBeTruthy();
});
test('should return true for complex text', () => {
const text = 'https://breadhead.ru/fhksdj?kjfhdsfds=fsdmnfbjsd&bfs';
expect(isUrl(text)).toBeTruthy();
});
test('should return false for GDrive link with comment', () => {
const text =
'Hello, check https://drive.google.com/open?id=0B_fsfvYDcajXdjVScXRUVnBhMjg';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for GDrive link with comment', () => {
const text =
'https://drive.google.com/open?id=0B_fsfvYDcajXdjVScXRUVnBhMjg Hello, check ';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for email with comment', () => {
const text = '<EMAIL> Hello, check ';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for email', () => {
const text = '<EMAIL>';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for email with long domain', () => {
const text = '<EMAIL>';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for email with underscore', () => {
const text = '<EMAIL>';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for email with +', () => {
const text = '<EMAIL>';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for data in normal order', () => {
const text = '16.04.2019';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for data in reverse order', () => {
const text = '2019.04.16';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for data without year', () => {
const text = '04.16';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for datas', () => {
const text = '04.16 2019.04.16 16.04.2019';
expect(isUrl(text)).toBeFalsy();
});
test('should return false for datas', () => {
const text = '16.04.2019 http://google.com';
expect(isUrl(text)).toBeFalsy();
});
});
<file_sep>import { isEmpty } from 'lodash';
import { flattenDepth } from 'lodash';
import * as symptomsMap from './config';
const symptomIds = flattenDepth(Object.values(symptomsMap), 1);
const getValueFromId = (id, array) => {
const result = array.find((item) => item.id === id);
return (result || {}).value;
};
const transformValue = ([key, value]) => {
if (typeof value === 'boolean') {
const newValue = getValueFromId(key, symptomIds);
return newValue;
}
return value;
};
export const formatFormData = ({ symptoms = {}, deseases = {}, ...rest }) => {
if (isEmpty(rest)) return {};
const result = rest;
result.symptoms = [];
result.deseases = [];
Object.entries(symptoms).forEach(([key, value]) => {
if (typeof value === 'object' && !!value) {
result[key] = Object.entries(value)
.filter(([_, value]) => !!value)
.map(transformValue)
.join(', ');
} else if (typeof value === 'boolean') {
result.symptoms.push(getValueFromId(key, symptomIds));
} else {
result[key] = value;
}
});
Object.entries(deseases)
.map(transformValue)
.forEach((value) => {
result.deseases.push(value);
});
result.symptoms = result.symptoms.join(', ');
result.deseases = result.deseases.join(', ');
return result;
};
<file_sep>import { createErrorMiddleware } from '@breadhead/thunk-error';
import { canUseDOM } from '@app/src/lib/helpers/canUseDOM';
import { actions } from './reducer';
export default createErrorMiddleware(
(err) => err && err.response && err.response.status === 404,
() => {
if (!canUseDOM) {
const e: any = new Error('Response not found');
e.code = 'ENOENT'; // Triggers a 404
throw e;
}
return actions.set(false);
},
);
<file_sep>import { Action } from 'redux';
import { createSymbiote, Symbiote } from 'redux-symbiote';
export interface FetchingState {
fetching: boolean;
error: boolean | string;
}
export const createInitialState = <AdditionalState>(state: AdditionalState) =>
({
fetching: false,
error: false,
...((state || {}) as object),
} as AdditionalState & FetchingState);
export interface FetchingActions {
request(): Action;
error(error: string | boolean): Action;
}
export const requestSymbiote = <AnyState extends FetchingState>(
state: AnyState,
) =>
({
...((state || {}) as object),
fetching: true,
error: false,
} as AnyState);
export const errorSymbiote = <AnyState extends FetchingState>(
state: AnyState,
error: boolean | string,
) =>
({
...((state || {}) as object),
error,
fetching: false,
} as AnyState);
export const createFetchingSymbiote = <
State extends FetchingState,
Actions extends FetchingActions
>(
initialState: State,
successSymbiote: (state: State, ...payload: any[]) => State,
prefix: string,
additionalSymbiotes: { [key: string]: Symbiote<State> } = {},
) =>
createSymbiote<State, Actions>(
initialState,
{
request: requestSymbiote,
success: (state: State, ...payload: any[]) => ({
...(successSymbiote(state, ...payload) as {}),
fetching: false,
error: false,
}),
error: errorSymbiote,
...additionalSymbiotes,
} as any,
prefix,
);
<file_sep>import { State } from '@app/src/lib/store';
export const selectHospitalNews = (state: State) =>
state.news.hospitalsList.list;
<file_sep>export enum CategoryTypes {
Articles = 'article',
News = 'news',
}
<file_sep>export { default, FORM_ERROR_CLASSNAME } from './container';
export type { Schema } from './container';
<file_sep>import { State } from '@app/src/lib/store';
export const selectRequestForm = (state: State) =>
state.requestForm.requestFormData;
<file_sep>import { State } from '@app/src/lib/store';
export const selectToken = (state: State) => state.login.user.token;
<file_sep>import { ErrorCode } from '@app/src/lib/helpers/validateDates';
const pickErrorMessage = (errorMessages: any, errorCode?: ErrorCode) =>
errorCode ? errorMessages[errorCode] : undefined;
export { pickErrorMessage };
<file_sep>import { IconsList } from '@app/src/ui/sprite';
export const helpFooterSocial = [
{
id: '10',
label: 'Facebook',
icon: IconsList.Facebook,
link: 'https://nenaprasno.ru',
},
{
id: '20',
label: 'vk',
icon: IconsList.Vk,
link: 'https://nenaprasno.ru',
},
{
id: '30',
label: 'vk',
icon: IconsList.Telegram,
link: 'https://nenaprasno.ru',
},
{
id: '40',
label: 'vk',
icon: IconsList.OK,
link: 'https://nenaprasno.ru',
},
];
<file_sep>import { PageType } from '@app/src/features/landing/features/partners/organisms/PartnersList/config';
import { frequencyForm, costForm, getTargetSelect } from './formConfig';
import * as actions from './widgetActions';
interface WidgetReducerType {
frequency: string | null;
cost: string | null;
target: string | null;
name: string;
surname: string;
email: string;
otherCost: string;
}
export interface WidgetForm {
name: string;
surname: string;
email: string;
cost: string;
frequency: string;
target: string;
otherCost: string;
}
export const getInitialState = (
pageType = PageType.Hospitals,
): WidgetReducerType => ({
frequency: frequencyForm.find((it) => !!it.checked)?.id || null,
cost: costForm.find((it) => !!it.checked)?.id || null,
target:
getTargetSelect(pageType).options.find((opt) => !!opt.selected)?.value ||
null,
name: '',
surname: '',
email: '',
otherCost: '',
});
export const reducer = (state, action) => {
switch (action.type) {
case actions.SET_FREQUENCY:
return { ...state, frequency: action.value };
case actions.SET_COST:
return { ...state, cost: action.value };
case actions.SET_TARGET:
return { ...state, target: action.value };
case actions.SET_NAME:
return { ...state, name: action.value };
case actions.SET_SURNAME:
return { ...state, surname: action.value };
case actions.SET_EMAIL:
return { ...state, email: action.value };
case actions.SET_OTHER_COST:
return { ...state, otherCost: action.value };
default:
return state;
}
};
<file_sep>export { RegisterButton } from './RegisterButton';
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
export interface SignUpErrorFields {
password?: boolean;
confirm?: boolean;
login?: boolean;
}
export const ACCOUNT_EXISTS_STATUS = 409;
export interface SignUpError {
fields: SignUpErrorFields;
message: string;
code?: number;
}
interface State extends FetchingState {
token: string;
signUpError?: SignUpError;
}
interface Actions extends FetchingActions {
success(token: string): Action;
signUpError(value?: SignUpError): Action;
}
const initialState = createInitialState({
token: '',
signUpError: undefined,
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state, token) => ({
...state,
token,
}),
'signUp',
{
signUpError: (state, signUpError) => ({
...state,
error: false,
fetching: false,
signUpError,
}),
},
);
export { reducer, actions };
export type { State, Actions };
<file_sep>export enum IconsList {
ArrowSmallRightGray = 'arrow-small-right-gray',
ArrowBigBack = 'arrow-big-back',
ArrowUp = 'arrow-up',
ArrowBigBackHover = 'arrow-big-back-hover',
ArrowSelectDown = 'arrow-select-down',
ArrowRight = 'arrow-right',
AttachFile = 'attach-file',
AwayLink = 'away-link',
ButtonLoader = 'button-loader',
BurgerMenu = 'burger-menu',
Chat = 'chat',
ChatNewMessage = 'chat-new-message',
CancelConsult = 'cancel-consult',
CancelRed = 'cancel-red',
CloseBlue = 'close-blue',
CloseLight = 'close-light',
ConsultationNew = 'consultation-new',
DownloadLight = 'download-light',
Edit = 'edit',
FoundationLogoMark = 'foundation-logo-mark',
FoundationLogo = 'foundation-logo',
InputValid = 'input-valid',
LogoFull = 'logo-full',
LogoShort = 'logo-short',
LoadingLight = 'loading-light',
LocationLight = 'location-light',
MailLight = 'mail-light',
MyConsultation = 'my-consultation',
NewMessage = 'new-message',
PhoneLight = 'phone-light',
Plus = 'plus',
PrintLight = 'print-light',
Search = 'search',
SendMessage = 'send-message',
Settings = 'settings',
SliderArrowRight = 'slider-arrow-right',
SliderArrowLeft = 'slider-arrow-left',
Success = 'success',
User = 'user',
View = 'view',
RotateIcon = 'rotate-icon',
Avon = 'avon',
CovidLogo = 'covid-logo',
CovidLogoDark = 'covid-logo-dark',
CovidLogoMobile = 'covid-logo-mobile',
SystemLogo = 'system-logo',
ArrowRight2 = 'arrow-right-2',
Facebook = 'facebook',
Vk = 'vk',
Telegram = 'telegram',
OK = 'ok',
CloseSystem = 'close-system',
Enlarge = 'enlarge',
Camera = 'camera',
LogoDesktop = 'logoDesktop',
LogoMobile = 'logoMobile',
}
<file_sep>import * as content from '../conslutionConfig';
import { getConclutionText } from '../getConclutionText';
import {
dataSuccess,
dataRisk,
dataOncological,
dataDanger2,
withOtherSymptomsAndNoRiskGroup,
dataOtherSymptomsAndRisk,
dataDanger,
dataDangerAndRisk,
} from './mock';
describe('getConclutionText', () => {
test('should return success values for success data', () => {
const income = dataSuccess;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.SUCCESS,
articles: content.SUCCESS_LINKS,
});
});
test('should return risk values for risk data', () => {
const income = dataRisk;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.RISK_GROUP,
articles: content.RISK_LINKS,
});
});
test('should return onco values for onco data', () => {
const income = dataOncological;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.ONCOLOGICAL,
articles: content.ONCO_LINKS,
});
});
test('should return success values for for success data', () => {
const income = withOtherSymptomsAndNoRiskGroup;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.WITH_OTHER_SYMPTOMS,
articles: content.WITH_OTHER_SYMPTOMS_LINKS,
});
});
test('should return with-other-symtoms-and-risk values for with-other-symtoms-and-risk data', () => {
const income = dataOtherSymptomsAndRisk;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.WITH_OTHER_SYMPTOMS_AND_RISK_GROUP,
articles: content.WITH_OTHER_SYMPTOMS_AND_RISK_LINKS,
});
});
test('should return danger values for danger data 1', () => {
const income = dataDanger;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.DANGER,
articles: content.WITH_OTHER_SYMPTOMS_LINKS,
});
});
test('should return danger values for danger data 2', () => {
const income = dataDanger2;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.DANGER,
articles: content.WITH_OTHER_SYMPTOMS_LINKS,
});
});
test('should return danger-and-risk values for danger-and-risk data', () => {
const income = dataDangerAndRisk;
const res = getConclutionText(income);
expect(res).toMatchObject({
text: content.DANGER_AND_RISK_GROUP,
articles: content.WITH_OTHER_SYMPTOMS_LINKS,
});
});
});
<file_sep>import { useCallback } from 'react';
import { useDispatch } from 'redux-react-hook';
import { actions } from './reducer';
export const useModal = () => {
const dispatch = useDispatch();
const open = useCallback((key: string) => dispatch(actions.open(key)), []);
const close = useCallback(() => dispatch(actions.close()), []);
return {
open,
close,
};
};
<file_sep>import { DateInterface } from './types';
const getDateString = ({ year, month, day = 1 }: DateInterface) =>
`${year}-${month}-${day}`;
export { getDateString };
<file_sep>export const mock2 = {
symptoms: {
dyspnea: true,
cough: true,
'sore-throat': true,
thorax: true,
temperature: true,
chills: true,
'loose-stools': true,
sneezing: true,
weakness: true,
'runny-nose': true,
'body-aches': true,
headache: true,
nausea: true,
'abdominal-pain': true,
caughtType: 'Сухой без мокроты',
thoraxType: {
inhale: true,
coughing: true,
rest: true,
'physical-activity': true,
},
temperatureType: 'выше 39 градусов',
dyspneaType: {
rest: true,
'physical-activity': true,
},
symptomsSince: '0-2 дня назад',
},
target: 'Для себя',
region: 'Москва',
gender: 'Мужской',
age: '234234',
deseases: {
diabetes: true,
oncological: true,
cardiovascular: true,
COPD: true,
},
};
<file_sep>import { State } from '@app/src/lib/store';
export const selectNewsItem = (state: State) => state.news.item.item;
<file_sep>export interface SanityBase {
_createdAt: string;
_id: string;
_rev: string;
_type: string;
_updatedAt: string;
}
<file_sep>import { combineReducers } from 'redux';
import {
reducer as signUpReducer,
State as SignUpState,
} from '@app/src/domain/reducers/signupReducer/reducer';
import {
reducer as signInReducer,
State as SignInState,
} from '@app/src/domain/reducers/signInReducer/reducer';
import { reducer as userReducer, State as UserState } from '../userReducer';
export interface State {
signIn: SignInState;
signUp: SignUpState;
user: UserState;
}
export const reducer = combineReducers<State>({
signIn: signInReducer,
signUp: signUpReducer,
user: userReducer,
} as any);
<file_sep>export { RequstChatPage as default } from '@app/src/features/landing';
<file_sep>import { State } from '@app/src/lib/store';
export const selectPaymentWidgetData = (state: State) =>
state.paymentWidget.data;
<file_sep>export { SystemHelp } from './SystemHelp';
<file_sep>import base64 from 'base-64';
import { isObject } from 'lodash';
import { parse } from 'url';
import { Photo } from '@app/src/domain/models/sanity/Photo';
import { getFromConfig } from '../getPublicRuntimeConfig';
import { imageUrlBuilder } from './builder';
const getProxyPath = (src: string | '') => {
const parsedURL = parse(src);
const newSrc = isObject(parsedURL)
? `/content/${parsedURL.query}${parsedURL.pathname}`
: src;
return getFromConfig('prodUrl') + newSrc;
};
const getImageProxySrc = (url: string, width = 0, height = 0) =>
`${getFromConfig('storageUrl')}/w:${width}/h:${height}/${base64.encode(url)}`;
export const getImageSrc = (
image?: Photo,
width?: number,
height?: number,
): string | null => {
if (!image || !image.asset) return null;
const src = imageUrlBuilder.image(image).url() || '';
const url = getProxyPath(src);
return getImageProxySrc(url, width, height);
};
<file_sep>import { queryString } from '../queryString';
describe('queryString', () => {
test('should create empty string for empty query', () => {
expect(queryString()).toBe('');
});
test('should create empty string for empty with empty values', () => {
expect(
queryString({
from: null,
}),
).toBe('');
});
test('should create query string for one string values', () => {
expect(
queryString({
from: 'Tomsk',
}),
).toBe('from=Tomsk');
});
test('should create query string for many string values', () => {
expect(
queryString({
from: 'Tomsk',
to: 'Spb',
}),
).toBe('from=Tomsk&to=Spb');
});
test('should create query string for string value with spaces', () => {
expect(
queryString({
to: 'S<NAME>',
}),
).toBe('to=Saint%20Petersburg');
});
test('should create query string for number value', () => {
expect(
queryString({
to: 2111,
}),
).toBe('to=2111');
});
test('should create query string for date value', () => {
expect(
queryString({
to: new Date('2018-01-02'),
}),
).toBe('to=2018-01-02T00%3A00%3A00.000Z');
});
});
<file_sep>import { getYears } from '../years';
const curYear = new Date().getFullYear();
test('year shouldbe valid', () => {
expect(getYears(3)).toEqual([
{ key: `${curYear}`, label: `${curYear}` },
{ key: `${curYear - 1}`, label: `${curYear - 1}` },
{ key: `${curYear - 2}`, label: `${curYear - 2}` },
]);
});
<file_sep>export { ShareWidget } from './ShareWidget';
<file_sep>import dayjs from 'dayjs';
export const getCurrentDate = () => {
return {
month: dayjs().month() + 1,
// + 1 because months have offset
year: dayjs().year().toString(),
};
};
<file_sep>export { HelpOptions } from './HelpOptions';
<file_sep>FROM mhart/alpine-node:10 as build
WORKDIR /usr/src
COPY package.json yarn.lock ./
RUN yarn install
COPY . .
RUN yarn build && yarn --production
FROM keymetrics/pm2:10-alpine
WORKDIR /usr/src
ENV NODE_ENV="production"
ENV PATH="./node_modules/.bin:$PATH"
COPY --from=build /usr/src .
EXPOSE 3001
CMD [ "pm2-docker", "start", "ecosystem.config.js" ]<file_sep>import * as yup from 'yup';
export const schema = {
email: yup.string().email('Введите email').required('Обязательное поле'),
name: yup.string().required('Обязательное поле'),
surname: yup.string().required('Обязательное поле'),
};
<file_sep>export const clearPhoneForLink = (phone: string) =>
phone.replace(/[^\+\d]/gm, '');
<file_sep>import sanityImageUrlBuilder from '@sanity/image-url';
import { sanityClient } from '@app/src/lib/sanity-client';
export const imageUrlBuilder = sanityImageUrlBuilder(sanityClient);
<file_sep>import { State } from '@app/src/lib/store';
export const selectExperts = (state: State) => state.experts.list;
<file_sep>import { findUrls } from '../findUrls';
describe('findUrls', () => {
test('should return empty if text no contains links', () => {
const text = 'Hello world!';
expect(findUrls(text)).toEqual([]);
});
test('sould return one url if text with one link', () => {
const text = 'Check google.com please';
expect(findUrls(text)).toEqual(['google.com']);
});
test('sould return one url if text with many links', () => {
const text = 'Check google.com and https://breadhead.ru please';
expect(findUrls(text)).toEqual(['google.com', 'https://breadhead.ru']);
});
test('should return only url for Gdrive url with comment', () => {
const text =
'Hello, check https://drive.google.com/open?id=0B_fsfvYDcajXdjVScXRUVnBhMjg';
expect(findUrls(text)).toEqual([
'https://drive.google.com/open?id=0B_fsfvYDcajXdjVScXRUVnBhMjg',
]);
});
});
<file_sep>import actualizeStore from '../actualizeStore';
describe('actualizeStore', () => {
test('pass data as is if its simple object', () => {
const data = {
name: 'Igor',
};
expect(actualizeStore(data)).toEqual(data);
});
test('tap date data in flat object with one key', () => {
const data = {
createdAt: '2018-12-19T14:00:01.677Z',
};
expect(actualizeStore(data)).toEqual({
createdAt: new Date('2018-12-19T14:00:01.677Z'),
});
});
test('tap date data in flat object with many keys', () => {
const data = {
createdAt: '2018-12-19T14:00:01.677Z',
updatedAt: '2018-01-19T14:00:01.677Z',
};
expect(actualizeStore(data)).toEqual({
createdAt: new Date('2018-12-19T14:00:01.677Z'),
updatedAt: new Date('2018-01-19T14:00:01.677Z'),
});
});
test('pick lang data in deep object', () => {
const data = {
updatedAt: '2018-01-19T14:00:01.677Z',
data: {
createdAt: '2018-12-19T14:00:01.677Z',
},
};
expect(actualizeStore(data)).toEqual({
updatedAt: new Date('2018-01-19T14:00:01.677Z'),
data: {
createdAt: new Date('2018-12-19T14:00:01.677Z'),
},
});
});
test('pick lang data in object with normal keys', () => {
const data = {
createdAt: '2018-12-19T14:00:01.677Z',
data: {
description: 'simple string',
},
};
expect(actualizeStore(data)).toEqual({
createdAt: new Date('2018-12-19T14:00:01.677Z'),
data: {
description: 'simple string',
},
});
});
test('pick lang data in object with array', () => {
const data = {
createdAt: '2018-12-19T14:00:01.677Z',
slides: ['Lang insensitive 1', 'Lang insensitive 2'],
};
expect(actualizeStore(data)).toEqual({
createdAt: new Date('2018-12-19T14:00:01.677Z'),
slides: ['Lang insensitive 1', 'Lang insensitive 2'],
});
});
test('remove inappropriate elements from arrays', () => {
const data = {
slides: [
{
name: 'Lang insensitive 1',
createdAt: '2018-12-19T14:00:01.677Z',
},
{
name: 'Lang insensitive 2',
createdAt: '2018-01-19T14:00:01.677Z',
},
],
};
expect(actualizeStore(data)).toEqual({
slides: [
{
name: 'Lang insensitive 1',
createdAt: new Date('2018-12-19T14:00:01.677Z'),
},
{
name: 'Lang insensitive 2',
createdAt: new Date('2018-01-19T14:00:01.677Z'),
},
],
});
});
});
<file_sep>declare module 'nanomerge' {
export default function merge<T extends object, U extends object>(
first: T,
second: U,
): T & U;
}
<file_sep>import getConfig from 'next/config';
import ApiClient from './ApiClient';
import RealApiClient from './RealApiClient';
const { publicRuntimeConfig } = getConfig();
type Factory = (token: string) => ApiClient;
const factory: Factory = (token) =>
new RealApiClient(publicRuntimeConfig.backUrl, token);
export default factory;
<file_sep>import { createContext, useContext } from 'react';
import { ArticlesItem } from '@app/src/domain/models/common/ArticlesItem';
interface ArticleContextType {
data?: ArticlesItem;
}
export const articleContext = createContext<ArticleContextType>({});
export const useArticleContext = () => useContext(articleContext);
<file_sep>export { Expert as default } from '@app/src/features/system';
<file_sep>export interface Block {
_key: string;
_type: string;
children: {
_key: string;
_type: string;
text: string;
}[];
style: string;
}
<file_sep>export { PageFilter } from './PageFilter';
<file_sep>import { State } from '@app/src/lib/store';
export const getQuery = <Q extends object>(state: State) =>
state.browserQuery as Q;
<file_sep>import store from 'store2';
export const saveFormDraft = (key: string) => (data: any) =>
store.set(key, data, true);
export const getFormDraft = (key: string) => () => store.get(key);
export const resetFormDraft = (key: string) => () => {
store.remove(key);
};
<file_sep>import { ReactNode } from 'react';
import { ButtonKind } from './ButtonKind';
import { ButtonSize } from './ButtonSize';
export interface ButtonProps {
children: ReactNode;
size?: ButtonSize;
kind?: ButtonKind;
onClick?: (
event: React.MouseEvent<HTMLButtonElement | HTMLAnchorElement>,
) => void | undefined | any;
disabled?: boolean;
loading?: boolean;
submit?: boolean;
className?: string;
icon?: string;
href?: string;
}
<file_sep>export { SystemMain } from './main';
export { default as Experts, Expert } from './expert-board';
<file_sep>export { SystemAbout } from './SystemAbout';
<file_sep>export const normalizeWantTo = (path: string) =>
path
.replace(/^\//g, '')
.replace(/(\?.+)/g, '')
.replace(/\/$/g, '');
<file_sep>import { State } from '@app/src/lib/store';
export const selectFeaturedArticles = (state: State) =>
state.articles.featuredArticles.list;
<file_sep>import { isDate } from './isDate';
import { isEmail } from './isEmail';
import { URL_REGEXP, URL_FILE_REGEX } from './URL_REGEXP';
export const isUrl = (text: string): boolean => {
if (text.startsWith('/preview-image')) {
return true;
}
if (isEmail(text)) {
return false;
}
if (isDate(text)) {
return false;
}
const regularMatch = text.match(URL_REGEXP);
if (regularMatch) {
return regularMatch[0] === text;
}
const zipMatch = text.match(URL_FILE_REGEX);
if (zipMatch) {
return true;
}
return false;
};
<file_sep>import * as yup from 'yup';
export const schema = {
login: yup.string().email('Введите email').required('Обязательное поле'),
};
<file_sep>import Router from 'next/router';
import { Dispatch } from 'redux';
import { setFormRequestFinished } from '@app/src/features/landing/features/request/organisms/RequestForm/localStorage';
import { setCookie } from '@app/src/features/login/features/signIn/helpers/setAuthToken';
import { actions as userActions } from '@app/src/domain/reducers/userReducer';
import {
resetUserEmailLocalStorage,
getUserEmailLocalStorage,
} from '../../../../domain/reducers/signInReducer/userEmailLocalStorage';
export default () => (dispatch: Dispatch<any>) => {
setCookie('');
Router.push('/');
setFormRequestFinished(false);
resetUserEmailLocalStorage();
console.log(getUserEmailLocalStorage());
if ((window as any).Intercom) {
(window as any).Intercom('shutdown');
}
return dispatch(userActions.setToken(''));
};
<file_sep>export { ChecklistPage } from './ChecklistPage';
<file_sep>import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
import { NewsCategoryType } from '@app/src/domain/models/common/NewsCategoryType';
export const newsForHospitalsQueryBuilder = () => {
return `*[
_type == 'news' &&
${ACTIVE_AND_NOT_DRAFT_SANITY} &&
showOnMain == true &&
'${NewsCategoryType.Help}' in categories[]
] |
order(date desc) |
order(sortIndex desc)
{..., 'tags': tags[]-> }`;
};
<file_sep># coronairus-web
### prod https://defeatcovid.ru/
## Development
+ `git clone`
+ `npm i -g commitizen`
+ `yarn`
+ `yarn dev`
### if you want to use real api
+ `BACK_URL=https://api.defeatcovid.ru/ yarn dev`
```js
http://localhost:3001
```
### Checks
+ `yarn test` to run unit tests
+ `yarn lint:code` to check code-style
+ `yarn lint:style` to check style-style
<file_sep>import { State } from '@app/src/lib/store';
export const selectExpertBoard = (state: State) => state.expertBoard.list;
<file_sep>import Url from 'url-parse';
export const stripUrl = (url: string) => {
return new Url(url).hostname.replace(/^www./gi, '');
};
<file_sep>export { reducer } from './reducer';
export type { State } from './reducer';
export { setPaymetWidgetData } from './actions';
<file_sep>import dayjs from 'dayjs';
import { ErrorCode } from '../erorCodes';
import { validateDates } from '../validateDates';
describe('validateDates works', () => {
test('should return correct error code if date1 or date2 is in the future', () => {
expect(
validateDates([
{
year: dayjs().add(5000, 'day').year(),
month: '5',
},
{ year: '2017', month: '5' },
]),
).toBe(ErrorCode.FutureDate);
});
test('should return correct error code if date1 is greater than date2', () => {
expect(
validateDates([
{ year: '2018', month: '5' },
{ year: '2017', month: '5' },
]),
).toBe(ErrorCode.MixedDateOrder);
});
test('should return correct error code if date1 is greater than date2', () => {
expect(
validateDates([
{ year: '2017', month: '5' },
{ year: '2016', month: '5' },
]),
).toBe(ErrorCode.MixedDateOrder);
});
test('should return undefined if date2 is greater than date1', () => {
expect(
validateDates([
{ year: '2017', month: '5' },
{ year: '2018', month: '5' },
]),
).toBe(undefined);
});
test('should return undefined if date2 is equal to date1', () => {
expect(
validateDates([
{ year: '2018', month: '5' },
{ year: '2018', month: '5' },
]),
).toBe(undefined);
});
});
<file_sep>export { SystemLayout } from './SystemLayout';
<file_sep>import { StepPointerModel, StepPointerType } from '../molecule/StepPointer';
import { managerSteps } from '../steps';
const defineType = (index: number, current: number) => {
if (index === current) {
return StepPointerType.Full;
}
return StepPointerType.Stroked;
};
const defineHref = (index: number, id?: string) => {
if (!id) {
return undefined;
}
return [
`/manager/new-claim/${id}`,
`/manager/claim/${id}/situation`,
`/manager/claim/${id}/questions`,
][index];
};
export function getManagerSteps(id: string, current: number) {
return managerSteps.map(
(name, index): StepPointerModel => {
const href = defineHref(index, id);
return {
title: name,
type: defineType(index, current),
disabled: false,
href,
};
},
);
}
<file_sep>import { State } from '@app/src/lib/store';
export const getFound = (state: State) => state.notFound.found;
<file_sep>import { PageType } from '@app/src/features/landing/features/partners/organisms/PartnersList/config';
const mapRouteToPageType = {
landing: PageType.Ask,
};
export const getPageTypeFromRoute = (route: string) => {
if (!route) {
return PageType.Ask;
}
return mapRouteToPageType[route.replace('/', '')];
};
<file_sep>module.exports = {
apps: [
{
name: 'oncohelp-web',
script: 'server.js',
watch: false,
instances: 1,
exec_mode: 'cluster',
merge_logs: true,
env_production: { NODE_ENV: 'production' },
},
],
}
<file_sep>import { StepPointerType } from '../molecule/StepPointer';
import { clientSteps } from '../steps';
export const getDisabledSteps = () =>
clientSteps.map((step) => ({
title: step,
type: StepPointerType.Empty,
disabled: true,
}));
<file_sep>import { isEmail } from '../isEmail';
describe('isEmail', () => {
test('should return true for regular email', () => {
const email = '<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return true for email with dot', () => {
const email = '<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return true for email with _', () => {
const email = '<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return true for email with rarely domen', () => {
const email = '<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return true for email with rarely mail agent', () => {
const email = '<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return true for email with short mail agent', () => {
const email = '<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return true for email with short domen', () => {
const email = '<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return true for email started on +', () => {
const email = '+<EMAIL>';
expect(isEmail(email)).toBeTruthy();
});
test('should return false for regular string', () => {
const email = 'hello';
expect(isEmail(email)).toBeFalsy();
});
test('should return false for empty string', () => {
const email = '';
expect(isEmail(email)).toBeFalsy();
});
test('should return false for string with @', () => {
const email = 'memem @ memem';
expect(isEmail(email)).toBeFalsy();
});
test('should return false for string with @mention', () => {
const email = 'memem @memem';
expect(isEmail(email)).toBeFalsy();
});
test('should return false for string starts on @mention', () => {
const email = '@memem';
expect(isEmail(email)).toBeFalsy();
});
test('should return false for url', () => {
const email = 'http://google.com';
expect(isEmail(email)).toBeFalsy();
});
});
<file_sep>import { Dispatch } from 'redux';
import { ExtraArgs, State } from '@app/src/lib/store';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
import { actions } from './reducer';
import { selectFeaturedArticles } from './selectFeaturedArticles';
export const getFeaturedArticlesFromSanity = () => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
if (needToFetch(selectFeaturedArticles(getState()))) {
const api = getApi(getState);
try {
dispatch(actions.request());
// const query = featuredArticlesQueryBuilder();
const query = `*[_type == 'article' && ${ACTIVE_AND_NOT_DRAFT_SANITY}]{_type, status, sortIndex, name, code, categories, image, pin, date, webinarDate, 'tags': tags[] -> }`;
const articles = await api.getArticles(query);
return dispatch(actions.success(articles));
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>import { LabeledValue } from 'antd/lib/select';
type EnumEntry = [string, string];
export const mapString = (str: string): LabeledValue => ({
key: str,
label: str,
});
export const mapEnum = ([key, label]: EnumEntry): LabeledValue => ({
key,
label,
});
<file_sep>import { ButtonSize } from '../ButtonSize';
export const getSizeClassName = (size: ButtonSize): string =>
({
[ButtonSize.ExtraLarge]: 'extra-large',
[ButtonSize.Large]: 'large',
[ButtonSize.Medium]: 'medium',
[ButtonSize.Small]: 'small',
}[size]);
<file_sep>enum ErrorCode {
MixedDateOrder = 'MixedDateOrder',
FutureDate = 'FutureDate',
}
export { ErrorCode };
<file_sep>import { dataLayer } from '../config';
export default class EventPusher {
public constructor(private readonly sourceName: string) {}
public smsSend = () => {
const event = {
event: 'custom_event',
event_id: 'sms_send',
};
this.push(event);
};
private push = (event: any) => {
if (dataLayer && dataLayer.push) {
dataLayer.push({
event_name: this.sourceName,
...event,
});
}
};
}
<file_sep>import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
export const featuredArticlesQueryBuilder = () => {
return `*[_type == 'article' && ${ACTIVE_AND_NOT_DRAFT_SANITY}] | order(date desc) {..., 'tags': tags[]-> }[0...5]`;
};
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
import { NewsItem } from '../../../models/common/NewsItem';
interface State extends FetchingState {
item?: NewsItem;
}
interface Actions extends FetchingActions {
success(news: NewsItem): Action;
}
const initialState = createInitialState({
item: undefined,
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, newsItem: NewsItem) => {
return {
...state,
item: newsItem,
};
},
'newsItem',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>export const bubbleText = [
'Есть симптомы COVID. Что делать?',
'Как защитить себя и близких?',
'Что делать, если нашли COVID-19',
];
<file_sep>import { checkForFutureDateError } from './checkForFutureDate';
import { compareDates } from './compareDates';
const getValidators = (datesCount: number) => {
switch (datesCount) {
case 2:
return [checkForFutureDateError, compareDates];
default:
return [checkForFutureDateError];
}
};
export { getValidators };
<file_sep>import { notification } from 'antd';
interface Notification {
message: string;
description?: string;
duration?: number;
className?: string;
}
export const push = ({
message,
description,
duration,
className,
}: Notification) =>
notification.open({
message,
description,
duration,
className,
});
<file_sep>import { State } from '@app/src/lib/store';
import { getPageKeyFromQuery, getPageKeyFromParams } from './query';
import { NewsFetchParams } from './config';
export const selectNews = (query: any) => (state: State) => {
const key = getPageKeyFromQuery(query);
return state.news.list.list.pages[key] || [];
};
export const selectNewsWithParams = (params: NewsFetchParams) => (
state: State,
) => {
const key = getPageKeyFromParams(params);
return state.news.list.list.pages[key] || [];
};
<file_sep>export const OPTIONS_MONTHS = [
{
key: '1',
label: 'Январь',
},
{
key: '2',
label: 'Февраль',
},
{
key: '3',
label: 'Март',
},
{
key: '4',
label: 'Апрель',
},
{
key: '5',
label: 'Май',
},
{
key: '6',
label: 'Июнь',
},
{
key: '7',
label: 'Июль',
},
{
key: '8',
label: 'Август',
},
{
key: '9',
label: 'Сентябрь',
},
{
key: '10',
label: 'Октябрь',
},
{
key: '11',
label: 'Ноябрь',
},
{
key: '12',
label: 'Декабрь',
},
];
<file_sep>import { getDeclensionedWord } from '../helpers';
describe('getDeclensionedWord', () => {
test('should save right end for singular', () => {
const num = 1;
expect(getDeclensionedWord(num)).toBe('консультация');
});
test('should save right end for a few characters word', () => {
const num = 2;
expect(getDeclensionedWord(num)).toBe('консультации');
});
test('should save right end for a few characters word', () => {
const num = 3;
expect(getDeclensionedWord(num)).toBe('консультации');
});
test('should save right end for a few characters word', () => {
const num = 21;
expect(getDeclensionedWord(num)).toBe('консультация');
});
test('should save right end for plural', () => {
const num = 11;
expect(getDeclensionedWord(num)).toBe('консультаций');
});
});
<file_sep>export const getDisabledClassName = (disabled: boolean) =>
disabled ? 'disabled' : '';
<file_sep>import { State } from '@app/src/lib/store';
export const selectTrustPartners = (state: State) =>
state.partners.list?.filter(({ isATrust }) => isATrust);
<file_sep>export const RESTORE_PASSWORD_MODAL_KEY = '<PASSWORD>';
<file_sep>export { CategoriesTags } from './CategoriesTags';
<file_sep>export const SPACE = '\u0020';
export const NON_BREAKING_SPACE = '\u00A0';
export const NON_BREAKING_HYPHEN = '\u2011';
export const ASTERISK = '\u002A';
export const MOBILE_WIDTH = 767;
export const CHAT_DEFAULT_OPEN_WIDTH = 959;
export interface StylesType {
[key: string]: string;
}
<file_sep>export const normalizeUrl = (url: string) => {
if (url.startsWith('http://') || url.startsWith('https://')) {
return url;
}
if (url.startsWith('/preview-image')) {
return url;
}
return `http://${url}`;
};
<file_sep>export const ACTIVE_AND_NOT_DRAFT_SANITY = `status == true && !(_id in path("drafts.**"))`;
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
import { ArticlesItem } from '../../../models/common/ArticlesItem';
interface State extends FetchingState {
item?: ArticlesItem;
}
interface Actions extends FetchingActions {
success(articles: ArticlesItem): Action;
}
const initialState = createInitialState({
item: undefined,
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, articlesItem: ArticlesItem) => {
return {
...state,
item: articlesItem,
};
},
'articlesItem',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>import { PageType } from '@app/src/features/landing/features/partners/organisms/PartnersList/config';
export enum FrequencyEnum {
Monthly = 'monthly',
Once = 'once',
}
export const frequencyForm = [
{
id: FrequencyEnum.Monthly,
value: FrequencyEnum.Monthly,
name: 'frequency',
label: 'Помогать ежемесячно',
checked: true,
size: 1,
},
{
id: FrequencyEnum.Once,
value: FrequencyEnum.Once,
name: 'frequency',
label: 'Разово',
checked: false,
size: 1,
},
];
export const costForm = [
{
id: '100',
value: '100',
name: 'cost',
label: '100 ₽',
size: 1,
checked: false,
},
{
id: '300',
value: '300',
name: 'cost',
label: '300 ₽',
size: 1,
checked: false,
},
{
id: '500',
value: '500',
name: 'cost',
label: '500 ₽',
size: 1,
checked: true,
},
{
id: '1000',
value: '1000',
name: 'cost',
label: '1 000 ₽',
size: 1,
checked: false,
},
{
id: '2000',
value: '2000',
name: 'cost',
label: '2 000 ₽',
size: 1,
checked: false,
},
{
id: '5000',
value: '5000',
name: 'cost',
label: '5 000 ₽',
size: 1,
checked: false,
},
{
id: '10000',
value: '10000',
name: 'cost',
label: '10 000 ₽',
size: 1,
checked: false,
},
{
id: 'other',
value: 'other',
name: 'cost',
label: 'Другая сумма',
size: 2,
checked: false,
},
];
const targetSelectOptions = [
{
value: PageType.Ask,
label: 'Справочная служба',
},
];
export const getTargetSelect = (pageType: PageType) => {
return {
label: 'Назначение:',
name: 'target',
options: targetSelectOptions.map((opt) => ({
...opt,
selected: pageType === opt.value,
})),
};
};
<file_sep>export const aboutFeatures = [
{
id: '10',
content:
'Создали и поддерживаем справочную службу для людей, которая снижает нагрузку на систему здравоохранения.',
},
{
id: '20',
content:
'Обучаем врачей по всей России, повышая эффективность лечения COVID-19.',
},
{
id: '30',
content:
'Выявляем потребности и обеспечиваем больницы, принимающие пациентов с ОРВИ, гриппом, пневмонией и COVID-19.',
},
];
<file_sep>import { Dispatch } from 'redux';
import { actions } from '@app/src/domain/reducers/signInReducer/reducer';
import { ExtraArgs, State } from '@app/src/lib/store';
import { actions as userActions } from './index';
export const currentUser = () => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
const user = await api.currentUser();
dispatch(userActions.setUser(user.roles));
dispatch(userActions.setLogin(user.login));
return user;
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
<file_sep>import Container from './container';
import { ModalDispatcher } from './helpers/ModalDispatcher';
export default Container;
export { reducer, actions } from './reducer';
export type { State } from './reducer';
export { default as withModal } from './withModal';
export type { WithModalProps } from './withModal';
export const { isModal } = ModalDispatcher.getInstance();
export { useModal } from './useModal';
export { useSpecificModal } from './useSpecificModal';
<file_sep>declare module '*.css';
declare module '*.svg?sprite';
<file_sep>export const PER_PAGE_ARTICLES = 10;
export interface ArticlesQuery {
page?: string;
category?: string;
tags?: string;
}
export interface ArticlesFetchParams {
page: number;
category: string;
tags: string[];
}
<file_sep>import { Dispatch } from 'redux';
import { ExtraArgs, State } from '@app/src/lib/store';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { actions } from './reducer';
import { selectHospitals } from './selectHospitals';
export const getHospitalsFromSanity = () => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
if (needToFetch(selectHospitals(getState()))) {
const api = getApi(getState);
try {
dispatch(actions.request());
const hospitals = await api.getHospitals();
return dispatch(actions.success(hospitals));
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>import { AnyAction } from 'redux';
import { useDispatch } from 'redux-react-hook';
import { ThunkAction } from 'redux-thunk';
import { ExtraArgs, State } from '@app/src/lib/store';
export const useThunk = () => {
const dispatch = useDispatch();
return <Result = Promise<void>>(
action: ThunkAction<Result, State, ExtraArgs, AnyAction>,
) => dispatch(action as any);
};
<file_sep>import getConfig from 'next/config';
import { WidgetForm } from '../widgetReducer';
import { FrequencyEnum } from '../formConfig';
const { publicRuntimeConfig } = getConfig();
export const getWidgetData = (formData: WidgetForm) => {
const { name, surname, email, frequency, target, cost, otherCost } = formData;
const data = {
firstname: name,
lastname: surname,
email,
};
const recurrent = frequency === FrequencyEnum.Monthly;
if (recurrent) {
(data as any).cloudPayments = {
recurrent: { interval: 'Month', period: 1 },
};
}
const amount = !!otherCost ? otherCost : cost;
return {
publicId: publicRuntimeConfig.cloudPaymentId,
description: target,
amount: Number(amount),
currency: 'RUB',
accountId: email,
data,
};
};
<file_sep>export { HelpFooter } from './HelpFooter';
<file_sep>export { SystemDonation } from './SystemDonation';
<file_sep>export { SystemMain } from './Main';
<file_sep>import { IMAGE_REGEXP } from '@app/src/helpers/regexs';
export const getPreviewLink = (link: string) => {
if (IMAGE_REGEXP.test(link)) {
return `/preview-image/${encodeURIComponent(link)}`;
}
if (
link.endsWith('.jpg') ||
link.endsWith('.jpeg') ||
link.endsWith('.JPG') ||
link.endsWith('.JPEG') ||
link.endsWith('.png') ||
link.endsWith('.PNG')
) {
return `/preview-image/${encodeURIComponent(link)}`;
}
return link;
};
<file_sep>import { get } from 'lodash';
import * as content from './conslutionConfig';
import { temperatureList } from '../request/organisms/RequestForm/config';
export const getCovidSymptoms = (data: any) =>
!!get(data, 'symptoms.temperature') ||
!!get(data, 'symptoms.dyspnea') ||
!!get(data, 'symptoms.thorax');
export const getDangerSymptoms = (data: any) => {
return (
!!get(data, 'symptoms.thorax') ||
!!get(data, 'symptoms.dyspnea') ||
(!!data.symptoms.temperatureType &&
(data.symptoms.temperatureType === temperatureList[1].value ||
data.symptoms.temperatureType === temperatureList[2].value))
);
};
export const getConclutionText = (data: any) => {
if (!data)
return {
text: 'no data',
articles: content.ONCO_LINKS,
};
const age = Number(data.age);
const withoutSymptoms =
!data.symptoms || Object.keys(data.symptoms).length === 0;
const withoutDeseases =
!data.deseases || Object.keys(data.deseases).length === 0;
const withSymptoms = !!data.symptoms && Object.keys(data.symptoms).length > 0;
const withDeseases = !!data.deseases && Object.keys(data.deseases).length > 0;
// в любой группе с основными симптомами ковида
if (!!data.symptoms && getDangerSymptoms(data)) {
return {
text: content.DANGER,
articles: content.WITH_OTHER_SYMPTOMS_LINKS,
};
}
// в любой группе без симптомов, с болезнями онкологии
if (withoutSymptoms && withDeseases && !!data.deseases.oncological) {
return {
text: content.ONCOLOGICAL,
articles: content.ONCO_LINKS,
};
}
// не в группе риска без симптомов, без болезней
if (age < 60 && withoutSymptoms && withoutDeseases) {
return {
text: content.SUCCESS,
articles: content.SUCCESS_LINKS,
};
}
// не в группе риска, с симптомами не ковида
if (age < 60 && withSymptoms && !getCovidSymptoms(data)) {
return {
text: content.WITH_OTHER_SYMPTOMS,
articles: content.WITH_OTHER_SYMPTOMS_LINKS,
};
}
// в группе риска, без симптомов, с болезнями, без онкологии
if (
age >= 60 &&
withoutSymptoms &&
withDeseases &&
!data.deseases.oncological
) {
return {
text: content.RISK_GROUP,
articles: content.RISK_LINKS,
};
}
// в группе риска, с симптомами не ковида
if (age >= 60 && withSymptoms && !getCovidSymptoms(data)) {
return {
text: content.WITH_OTHER_SYMPTOMS_AND_RISK_GROUP,
articles: content.WITH_OTHER_SYMPTOMS_AND_RISK_LINKS,
};
}
// в группе риска с симптомами ковида
if (age >= 60 && getCovidSymptoms(data)) {
return {
text: content.DANGER_AND_RISK_GROUP,
articles: content.WITH_OTHER_SYMPTOMS_LINKS,
};
}
return {
text: content.SUCCESS,
articles: content.SUCCESS_LINKS,
};
};
<file_sep>import { Action } from 'redux';
import { Partner } from '@app/src/domain/models/common/Partner';
import { PageType } from '@app/src/features/landing/features/partners/organisms/PartnersList/config';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
import { TagType } from '../../models/common/Tag';
interface State extends FetchingState {
list: TagType[];
}
interface Actions extends FetchingActions {
success(tags: any): Action;
}
const initialState = createInitialState({
list: [],
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, tags: TagType[]) => {
return {
...state,
list: tags,
};
},
'tags',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>import { URL_REGEXP, PREVIEW_IMAGE_REGEXP } from './URL_REGEXP';
export const findUrls = (text: string): string[] | any => {
if (text.startsWith('/preview-image')) {
return text.match(PREVIEW_IMAGE_REGEXP);
}
return text.match(URL_REGEXP) || [];
};
<file_sep>export { NavLink } from './NavLink';
export type { NavLinkProps } from './NavLinkProps';
<file_sep>import { Dispatch } from 'redux';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { ExtraArgs, State } from '@app/src/lib/store';
import { newsForHospitalsQueryBuilder } from '../helpers/newsForHospitals';
import { actions } from './reducer';
import { selectHospitalNews } from './selectHospitalNews';
export const getNewsForHospitals = () => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
if (needToFetch(selectHospitalNews(getState()))) {
const api = getApi(getState);
try {
dispatch(actions.request());
const query = newsForHospitalsQueryBuilder();
const news = await api.getNews(query);
return dispatch(actions.success(news));
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>export { default as DateValidationTooltip } from './components/DateValidationTooltip';
export { default as QuestionsValidationTooltip } from './components/QuestionsValidationTooltip';
export { default as ValidationTooltip } from './components/withFinalForm/ValidationTooltip';
export { default as ComboBox } from './components/withFinalForm/ComboBox';
export {
default as EmergingFormElement,
ControlTypes as EmergingControlTypes,
} from './components/withFinalForm/EmergingFormElement';
export { default as Form } from './components/Form';
export type { RemoveSection } from './components/Form';
export { default as Input, InputType } from './components/withFinalForm/Input';
export { default as RadioGroup } from './components/withFinalForm/RadioGroup';
export { default as Select } from './components/withFinalForm/Select';
export { default as Toggle } from './components/withFinalForm/Toggle';
export { default as TextArea } from './components/withFinalForm/TextArea';
export { default as SelectMonths } from './components/withFinalForm/SelectMonths';
export { default as SelectYears } from './components/withFinalForm/SelectYears';
export { default as Checkbox } from './components/withFinalForm/Checkbox';
export { default as PhoneInput } from './components/withFinalForm/PhoneInput';
export {
default as ButtonWithTooltip,
ButtonSize,
ButtonKind,
} from './components/withFinalForm/ButtonWithTooltip';
export { default as ComboSearch } from './components/withFinalForm/ComboSearch';
<file_sep>export { reducer } from './reducer';
export type { State } from './reducer';
export { getQuery } from './selectors';
import { actions } from './reducer';
const { set } = actions;
export { set };
<file_sep>import { isEmpty } from 'lodash';
import { ALL_CATEGORIES } from '@app/src/domain/models/common/NewsCategoryType';
import { TagType } from '@app/src/domain/models/common/Tag';
import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
import { NewsFetchParams, PER_PAGE_NEWS } from '../list/config';
export const newsListRequestBuilder = (
params: NewsFetchParams,
tagValues: TagType[],
) => {
return `*[_type == 'news' && ${ACTIVE_AND_NOT_DRAFT_SANITY} ${renderTags(
params.tags,
tagValues,
)} ${renderCategories(
params.category,
)}] | order(date desc) | order(sortIndex desc) ${renderAmount(
params.page,
)} {..., 'tags': tags[]-> }`;
};
const renderTags = (tagSlugs: string[], tagValues: TagType[]) => {
if (isEmpty(tagSlugs)) return '';
const tagIds = tagSlugs
.map(
(slug) =>
tagValues.find((tagValue) => tagValue.code.current === slug)?._id,
)
.filter((it) => !!it)
.map((tagId) => `'${tagId}'`)
.join(', ');
return ` && tags[]._ref in [${tagIds}]`;
};
const renderCategories = (category: string) => {
if (category === ALL_CATEGORIES) return '';
return ` && '${category}' in categories[]`;
};
const renderAmount = (page: number) => {
return '';
const end = PER_PAGE_NEWS * page - 1;
const start = end - PER_PAGE_NEWS + 1;
return `[${start}..${end}]`;
};
<file_sep>export { SerializerActionButton } from './SerializerActionButton';
<file_sep>import { State } from '@app/src/lib/store';
export const selectHospitals = (state: State) => state.hospitals.list;
<file_sep>export enum ButtonKind {
Primary,
Secondary,
Extra,
Super,
}
<file_sep>export { Home as default } from '@app/src/features/landing';
<file_sep>import dayjs from 'dayjs';
import { checkForFutureDateError } from '../checkForFutureDate';
import { ErrorCode } from '../erorCodes';
describe('checkForFutureDates works', () => {
test('should return correct error code if date1 is in the future', () => {
const date1 = dayjs().add(5000, 'day').valueOf();
const date2 = dayjs('2018-01-18').valueOf();
expect(checkForFutureDateError([date1, date2])).toBe(ErrorCode.FutureDate);
});
test('should return correct error code if date2 is in the future', () => {
const date1 = dayjs('2018-01-18').valueOf();
const date2 = dayjs().add(1, 'day').valueOf();
expect(checkForFutureDateError([date1, date2])).toBe(ErrorCode.FutureDate);
});
test('should return undefined if both dates are in the past', () => {
const date1 = dayjs().subtract(100, 'day').valueOf();
const date2 = dayjs().subtract(500, 'day').valueOf();
expect(checkForFutureDateError([date1, date2])).toBe(undefined);
});
});
<file_sep>import * as _ from 'lodash';
import { Partner } from '@app/src/domain/models/common/Partner';
enum PartnersType {
All = 'all',
Donor = 'donor',
InfoPartner = 'info',
InfrastructurePartner = 'infrastructure',
}
enum PartnersLabel {
All = 'Все',
Donor = 'Доноры',
InfoPartner = 'Информационные партнёры',
InfrastructurePartner = 'Инфраструктурные партнёры',
}
enum PageType {
Main = 'main',
Hospitals = 'hospitals',
Info = 'info',
Doctors = 'doctors',
Ask = 'ask',
}
const customOptions = [
{ type: PartnersType.Donor, label: PartnersLabel.Donor },
{
type: PartnersType.InfrastructurePartner,
label: PartnersLabel.InfrastructurePartner,
},
{ type: PartnersType.InfoPartner, label: PartnersLabel.InfoPartner },
];
const defaultOptions = [{ type: PartnersType.All, label: PartnersLabel.All }];
const getCurrentPartnersOptions = (currentPartners: Partner[]) => {
const types = currentPartners.map((partner: Partner) => partner.type);
return [
...defaultOptions,
...customOptions.filter((opt) => types.includes(opt.type)),
];
};
export { PartnersType, PartnersLabel, PageType, getCurrentPartnersOptions };
<file_sep>export { SerializerImage } from './SerializerImage';
<file_sep>import EventPusher from './EventPusher';
export const useGoogleAnalyticsPush = (sourceName: string) => {
const gtmPush = new EventPusher(sourceName);
return gtmPush;
};
<file_sep>import { SanityBase } from '@app/src/domain/models/sanity/SanityBase';
import { Photo } from '@app/src/domain/models/sanity/Photo';
import { Block } from '@app/src/domain/models/sanity/Block';
export interface Expert extends SanityBase {
status: boolean;
name: string;
subtitle: string;
logo: Photo;
code: { current: string };
sortIndex: number;
description: Block[];
}
<file_sep>import * as yup from 'yup';
export type Schema = yup.Schema<any>;
export type ValidateCb = (value: any, values: object) => void;
interface ValidatorParams {
value: any;
values: any;
schema?: Schema;
validateCb?: ValidateCb;
}
export type Validator = (params: ValidatorParams) => undefined | string;
// eslint-disable-next-line consistent-return
const validator: Validator = ({ value, values, schema, validateCb }) => {
try {
if (schema) {
schema.validateSync(value);
}
if (validateCb) {
validateCb(value, values);
}
} catch (error) {
return error.message;
}
};
export { validator };
<file_sep>export const countries = [
'Беларусь',
'Украина',
'Казахстан',
'Армения',
'Грузия',
'Узбекистан',
'Таджикистан',
'Андорра',
'Объединенные Арабские Эмираты',
'Афганистан',
'Антигуа и Барбуда',
'Ангилья',
'Албания',
'Ангола',
'Антарктика',
'Аргентина',
'Восточное Самоа',
'Австрия',
'Австралия',
'Аруба',
'Азербайджан',
'Босния и Герцеговина',
'Барбадос',
'Бангладеш',
'Бельгия',
'Буркина-Фасо',
'Болгария',
'Бахрейн',
'Бурунди',
'Бенин',
'Сен-Бартелеми',
'Бермудские Острова',
'Бруней',
'Боливия',
'Бразилия',
'Багамские Острова',
'Бутан',
'Остров Буве',
'Ботсвана',
'Белиз',
'Канада',
'Кокосовые острова',
'Конго, Демократическая Республика',
'Центральноафриканская Республика',
'Конго, Республика',
'Швейцария',
'Кот-д’Ивуар',
'Острова Кука',
'Чили',
'Камерун',
'Китай',
'Колумбия',
'Коста-Рика',
'Куба',
'Кабо-Верде',
'Кюрасао',
'Остров Рождества',
'Кипр',
'Чешская Республика',
'Германия',
'Джибути',
'Дания',
'Доминика',
'Доминиканская Республика',
'Алжир',
'Эквадор',
'Эстония',
'Египет',
'Западная Сахара',
'Эритрея',
'Испания',
'Эфиопия',
'Финляндия',
'Фиджи',
'Фолклендские острова (Мальвинские острова)',
'Федеративные Штаты Микронезии',
'Фарерские острова',
'Франция',
'Метрополия Франции',
'Габон',
'Великобритания',
'Гренада',
'Французская Гвиана',
'Гернси',
'Гана',
'Гибралтар',
'Гренландия',
'Гамбия',
'Гвинея',
'Гваделупа',
'Экваториальная Гвинея',
'Греция',
'Южная Георгия и Южные Сандвичевы острова',
'Гватемала',
'Гуам',
'Гвинея-Бисау',
'Гайана',
'Гонконг',
'Остров Херд и острова Макдональд',
'Гондурас',
'Хорватия',
'Гаити',
'Венгрия',
'Индонезия',
'Ирландия',
'Израиль',
'Остров Мэн',
'Индия',
'Британская территория в Индийском океане',
'Ирак',
'Иран',
'Исландия',
'Италия',
'Джерси',
'Ямайка',
'Иордания',
'Япония',
'Кения',
'Кыргызстан',
'Камбоджа',
'Кирибати',
'Коморские Острова',
'Сент-Китс и Невис',
'Корейская Народно-Демократическая Республика',
'Республика Корея',
'Кувейт',
'Каймановы острова',
'Лаос',
'Ливан',
'Сент-Люсия',
'Лихтенштейн',
'Шри-Ланка',
'Либерия',
'Лесото',
'Литва',
'Люксембург',
'Латвия',
'Ливия',
'Марокко',
'Монако',
'Молдова',
'Черногория',
'Сен-Мартен',
'Мадагаскар',
'Маршалловы острова',
'Македония',
'Мали',
'Мьянма',
'Монголия',
'Макао',
'Северные Марианские острова',
'Мартиника',
'Мавритания',
'Монтсеррат',
'Мальта',
'Маврикий',
'Мальдивы',
'Малави',
'Мексика',
'Малайзия',
'Мозамбик',
'Намибия',
'Новая Каледония',
'Нигер',
'<NAME>',
'Нигерия',
'Никарагуа',
'Нидерланды',
'Норвегия',
'Непал',
'Науру',
'Ниуэ',
'<NAME>',
'Оман',
'Панама',
'Перу',
'Французская Полинезия',
'Папуа – Н<NAME>винея',
'Филиппины',
'Пакистан',
'Польша',
'Сен-Пьер и Микелон',
'<NAME>',
'Пуэрто-Рико',
'<NAME>',
'Западный берег реки Иордан',
'Португалия',
'Палау',
'Парагвай',
'Катар',
'Реюньон',
'Румыния',
'Сербия',
'Россия',
'Руанда',
'Саудовская Аравия',
'Соломоновы Острова',
'Сейшельские острова',
'Судан',
'Швеция',
'Сингапур',
'Острова Святой Елены, Вознесения и Тристан-да-Кунья',
'Словения',
'Шпицберген',
'Словакия',
'Сьерра-Леоне',
'Сан-Марино',
'Сенегал',
'Сомали',
'Суринам',
'Южный Судан',
'Сан-Томе и Принсипи',
'Сальвадор',
'Синт-Мартен',
'Сирия',
'Свазиленд',
'Теркс и Кайкос',
'Чад',
'Французские Южные и Антарктические территории',
'Того',
'Таиланд',
'Токелау',
'Восточный Тимор',
'Туркменистан',
'Тунис',
'Тонга',
'Турция',
'Тринидад и Тобаго',
'Тувалу',
'Тайвань',
'Танзания',
'Уганда',
'Внешние малые острова США',
'Соединенные Штаты Америки',
'Уругвай',
'Святой Престол (город-государство Ватикан)',
'Сент-Винсент и Гренадины',
'Венесуэла',
'Британские Виргинские острова',
'Виргинские острова',
'Вьетнам',
'Вануату',
'Уоллис и Футуна',
'Самоа',
'Косово',
'Йемен',
'Майотта',
'Южно-Африканская Республика',
'Замбия',
'Зимбабве',
];
<file_sep>import { tryOr } from '@front/helpers/tryOr';
const clientUrls = ['client%2Fconsultation', 'client/consultation'];
export const isClientConsultationUrl = () =>
tryOr(() => {
return !!clientUrls.find((el) => window.location.href.includes(el));
}, false);
<file_sep>export const infoText = `<div>
Мы создали сервис, помогающий найти ответы на вопросы о COVID-19. Это
справочник с проверенной информацией, консультации экспертов и
образовательные онлайн-мероприятия для врачей всей страны
</div>
<br />
<div>
Мы собираем средства на развитие проекта. Поддерживая справочную службу,
вы помогаете снизить нагрузку на систему здравоохранения, чтобы быстрее
победить эпидемию и сохранить здоровье людей.
</div>`;
<file_sep>import { createErrorMiddleware } from '@breadhead/thunk-error';
import { checkForAuthError } from '../../../features/login/features/signIn/helpers/checkForAuthError';
import { actions } from './reducer';
export const authViolateStatus = (value: boolean) =>
actions.authViolateStatus(value);
export const unauthorizedMiddleware = createErrorMiddleware(
checkForAuthError,
() => actions.authViolateStatus(true),
);
<file_sep>import { Ref } from '@app/src/domain/models/sanity/Ref';
export interface Photo {
_key: string;
_type: string;
alt: string;
asset: Ref;
}
<file_sep>import { SanityBase } from '@app/src/domain/models/sanity/SanityBase';
import { Photo } from '@app/src/domain/models/sanity/Photo';
import { Block } from '@app/src/domain/models/sanity/Block';
import { NewsCategoryType } from './NewsCategoryType';
import { TagType } from './Tag';
export interface NewsItem extends SanityBase {
status: boolean;
name: string;
sortIndex: number;
code: { current: string };
image?: Photo;
content?: Block[];
showOnMain?: boolean;
categories?: NewsCategoryType[];
tags?: TagType[];
date?: string;
}
<file_sep>import { isEmpty } from 'lodash';
import { ALL_CATEGORIES } from '@app/src/domain/models/common/ArticlesCategoryType';
import { TagType } from '@app/src/domain/models/common/Tag';
import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
import { ArticlesFetchParams, PER_PAGE_ARTICLES } from '../list/config';
export const articlesListRequestBuilder = (
params: ArticlesFetchParams,
tagValues: TagType[],
) => {
return `*[_type == 'article' && ${ACTIVE_AND_NOT_DRAFT_SANITY} ${renderTags(
params.tags,
tagValues,
)} ${renderCategories(params.category)}] | order(date desc) ${renderAmount(
params.page,
)} { _type, status, sortIndex, name, code, categories, image, pin, date, webinarDate, 'tags': tags[]-> }`;
};
const renderTags = (tagSlugs: string[], tagValues: TagType[]) => {
if (isEmpty(tagSlugs)) return '';
const tagIds = tagSlugs
.map(
(slug) =>
tagValues.find((tagValue) => tagValue.code.current === slug)?._id,
)
.filter((it) => !!it)
.map((tagId) => `'${tagId}'`)
.join(', ');
return ` && tags[]._ref in [${tagIds}]`;
};
const renderCategories = (category: string) => {
if (category === ALL_CATEGORIES) return '';
return ` && '${category}' in categories[]`;
};
const renderAmount = (page: number) => {
return '';
const end = PER_PAGE_ARTICLES * page - 1;
const start = end - PER_PAGE_ARTICLES + 1;
return `[${start}..${end}]`;
};
<file_sep>import { State } from '@app/src/lib/store';
export const getModal = (state: State) => state.modal;
<file_sep>export { SystemWidget } from './SystemWidget';
<file_sep>import { Role } from '@app/src/domain/models/common/User';
export enum Condition {
signIn = 'signIn',
signUp = 'signUp',
}
export default (
roles: Role[],
wantTo?: string | string[],
condition?: Condition,
) => {
console.log('roles', roles);
console.log('wantTo', wantTo);
console.log('condition', condition);
};
<file_sep>const DATE_REGEX = /(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(.*)/;
export const tapDate = <E>(entity: E): E => {
let newEntity = {};
Object.entries(entity).forEach(([key, value]) => {
newEntity = {
...newEntity,
[key]: DATE_REGEX.test(value) ? new Date(value) : value,
};
});
return newEntity as E;
};
<file_sep>import Container from './container';
import Menu from './organisms/Menu';
export const ClientMenu = Container(Menu) as any;
<file_sep>import bugsnag from '@bugsnag/js';
import bugsnagReact from '@bugsnag/plugin-react';
import getConfig from 'next/config';
import React from 'react';
const { serverRuntimeConfig, publicRuntimeConfig } = getConfig();
const bugsnagClient = bugsnag({
apiKey:
serverRuntimeConfig.BUGSNAG_API_KEY || publicRuntimeConfig.BUGSNAG_API_KEY,
});
bugsnagClient.use(bugsnagReact, React);
export default bugsnagClient;
<file_sep>import { ClearAction, createClearRedux } from 'redux-clear';
type State = object;
interface Actions {
set: ClearAction<[object]>;
}
const { actions, reducer } = createClearRedux<State, Actions>(
{
set: () => (query) => query,
},
{},
'browser-query',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>import { Dispatch } from 'redux';
import routes from '@app/routes';
import { ExtraArgs, State } from '@app/src/lib/store';
import { getUserEmailLocalStorage } from '@app/src/domain/reducers/signInReducer/userEmailLocalStorage';
import { actions } from './reducer';
import {
getFormId,
resetFormId,
setFormId,
setFormRequestFinished,
} from '../../../features/landing/features/request/organisms/RequestForm/localStorage';
import { FormRequestType } from '../../models/common/FormRequestType';
const { Router } = routes;
export const saveCoronaRequestForm = (requestFormData: any) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
const email = getUserEmailLocalStorage();
const { id } = await api.saveCoronaRequestForm(
{
...requestFormData,
email,
},
FormRequestType.Corona,
);
setFormId(id);
setFormRequestFinished();
Router.pushRoute('/request/chat');
return dispatch(actions.success(requestFormData));
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
export const saveForHospitalsForm = (requestFormData: any) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
await api.saveCoronaRequestForm(
requestFormData,
FormRequestType.ForHospitals,
);
return dispatch(actions.success(requestFormData));
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
export const savePartnerForm = (requestFormData: any) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
await api.saveCoronaRequestForm(requestFormData, FormRequestType.Partner);
return dispatch(actions.success(requestFormData));
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
export const saveVolunteerForm = (requestFormData: any) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
await api.saveCoronaRequestForm(requestFormData, FormRequestType.Volunteer);
return dispatch(actions.success(requestFormData));
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
export const saveChecklistForm = (requestFormData: any) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
await api.saveCoronaRequestForm(requestFormData, FormRequestType.Checklist);
return dispatch(actions.success(requestFormData));
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
export const saveWebinarRegistrationForm = (formData: any) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
dispatch(actions.request());
await api.saveCoronaRequestForm(
formData,
FormRequestType.WebinarRegistration,
);
return dispatch(actions.success(formData));
} catch (error) {
dispatch(actions.error(error.message));
throw error;
}
};
export const updateRequestFormData = () => async (
_dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
const api = getApi(getState);
try {
const formId = getFormId();
const email = getUserEmailLocalStorage();
if (formId) {
await api.updateCoronaRequestForm({ id: formId, email: email });
resetFormId();
}
} catch (error) {
console.log('error', error);
}
};
<file_sep>import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
export const featuredNewsQueryBuilder = () => {
return `*[_type == 'news' && ${ACTIVE_AND_NOT_DRAFT_SANITY}] | order(date desc) | order(sortIndex desc) {..., 'tags': tags[]-> }[0...5]`;
};
<file_sep>export { ChecklistPage as default } from '@app/src/features/landing';
<file_sep>export const keywords = [
'Что делать',
'Просто спросить',
'не напрасно',
'COVID-19',
'коронавирус',
'благотворительность',
'Фонд медицинских решений',
];
export const description = 'Отвечаем на вопросы, помогаем врачам и больницам';
export const SHARE_IMAGE = '/static/images/share/dc_facebook-share.png';
export const SHARE_IMAGE_SUPPORT =
'/static/images/share/dc_facebook-share-support.png';
export const SHARE_IMAGE_VK = '/static/images/share/dc_vk-share.png';
export const SHARE_IMAGE_SUPPORT_VK =
'/static/images/share/dc_vk-share-support.png';
<file_sep>import { shouldValidateDates } from '../shouldValidateDates';
import { DateInterface } from '../types';
describe('shouldValidateDates works', () => {
test('should return true if dates are correct', () => {
expect(
shouldValidateDates([
{ year: 2018, month: 1 },
{ year: 2019, month: 1 },
]),
).toBe(true);
});
test('should return false if dates arent correct', () => {
expect(
shouldValidateDates([
{ year: 2018 } as DateInterface,
{
year: 2019,
month: 1,
},
]),
).toBe(false);
});
});
<file_sep>export { SmallText } from './SmallText';
<file_sep>import { NewsPage } from '@app/src/features/landing/features/news/newsPage';
export default NewsPage;
<file_sep>import { get } from 'lodash';
import { DateInterface } from '@app/src/lib/helpers/validateDates';
export const getDates = (paths: DateInterface[], values: any) =>
paths.map((path) => ({
year: get(values, path.year) || null,
month: get(values, path.month) || null,
}));
<file_sep>import { IncomingMessage, OutgoingMessage } from 'http';
import { Store } from './store';
export interface AppContext<Query = {}> {
reduxStore: Store;
req: IncomingMessage & { cookies?: any[] };
res: OutgoingMessage;
ctx: any; // TODO: fix it
query: Query;
}
interface ServerResponse {
status: number;
}
export interface ServerError {
message: string;
response?: ServerResponse;
}
<file_sep>import { Dispatch } from 'redux';
import { ExtraArgs, State } from '@app/src/lib/store';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { actions } from './reducer';
import { selectExpertBoard } from './selectExperts';
export const getExpertBoardFromSanity = () => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
if (needToFetch(selectExpertBoard(getState()))) {
const api = getApi(getState);
try {
const experts = await api
.getExpertBoard()
.then((res) =>
res.sort((a, b) => (a.sortIndex < b.sortIndex ? -1 : 1)),
);
return dispatch(actions.success(experts));
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>import { getPreviewLink } from '../getPreviewLink';
describe('getPreviewLink', () => {
test('should include preview-image with .jpg', () => {
const link =
'https://sun9-44.userapi.com/c854216/v854216679/bc669/pns7i1dGFto.jpg';
const res = getPreviewLink(link).includes('preview-image');
expect(res).toEqual(true);
});
test('should include preview-image with .JPG', () => {
const link =
'https://sun9-44.userapi.com/c854216/v854216679/bc669/pns7i1dGFto.JPG';
const res = getPreviewLink(link).includes('preview-image');
expect(res).toEqual(true);
});
test('should include preview-image with .jpeg', () => {
const link =
'https://sun9-44.userapi.com/c854216/v854216679/bc669/pns7i1dGFto.jpeg';
const res = getPreviewLink(link).includes('preview-image');
expect(res).toEqual(true);
});
test('should include preview-image with .JPEG', () => {
const link =
'https://sun9-44.userapi.com/c854216/v854216679/bc669/pns7i1dGFto.JPEG';
const res = getPreviewLink(link).includes('preview-image');
expect(res).toEqual(true);
});
test('url with _', () => {
const link =
'1a33ecc23b4e76f172f4db366228b1ed_viber_image_2019-10-03_,_17.56.35.jpg';
const res = getPreviewLink(link).includes('preview-image');
expect(res).toEqual(true);
});
test('viber url', () => {
const link =
'https://store.ask.nenaprasno.ru/oncobucket/0655f1aecaba945d99db98c998436d29_viber_image_2019-10-03_,_17.56.36.jpg';
const res = getPreviewLink(link).includes('preview-image');
expect(res).toEqual(true);
});
test('viber url', () => {
const link =
'https://store.ask.nenaprasno.ru/oncobucket/1a33ecc23b4e76f172f4db366228b1ed_viber_image_2019-10-03_,_17.56.35.jpg';
const res = getPreviewLink(link).includes('preview-image');
expect(res).toEqual(true);
});
});
<file_sep>export { SytemRadioButton } from './SytemRadioButton';
<file_sep>const CHECKED_CLASS_NAME = 'checked';
const UNCHECKED_CLASS_NAME = 'unchecked';
export const getCheckedClassName = (checked = false) =>
checked ? CHECKED_CLASS_NAME : UNCHECKED_CLASS_NAME;
<file_sep>export { AskButton } from './AskButton';
<file_sep>export { HospitalAidPage } from './page/HospitalAidPage';
<file_sep>import { EMAIL_REGEXP } from './EMAIL_REGEXP';
export const isEmail = (email: string) => {
// we must set lastIndex to zero, because
// https://stackoverflow.com/questions/15276873/is-javascript-test-saving-state-in-the-regex
EMAIL_REGEXP.lastIndex = 0;
if (EMAIL_REGEXP.test(email)) {
EMAIL_REGEXP.lastIndex = 0;
// we must reset RegExp internal state after usage
return true;
}
return false;
};
<file_sep>import Container from './container';
import FeedbackForm from './FeedbackForm';
export default Container(FeedbackForm as any);
export { reducer } from '../../../../../../domain/reducers/feedbackReducer/reducer';
export type { State } from '../../../../../../domain/reducers/feedbackReducer/reducer';
export { sendFeedback } from '../../../../../../domain/reducers/feedbackReducer/actions';
<file_sep>import { Action } from 'redux';
import { Expert } from '@app/src/domain/models/common/Expert';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
interface State extends FetchingState {
list: Expert[];
}
interface Actions extends FetchingActions {
success(experts: any): Action;
}
const initialState = createInitialState({
list: [],
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, expertsFromSanity: Expert[]) => {
return {
...state,
list: expertsFromSanity,
};
},
'getExpertBoardFromSanity',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>import { DateInterface } from './types';
const shouldValidateDates = (dates: DateInterface[]) => {
const datesArePresent = dates.every(
(date) => !!(date && date.year && date.month),
);
return datesArePresent;
};
export { shouldValidateDates };
<file_sep>import { CategoryTypes } from '@app/src/domain/models/common/CategoryTypes';
import { getNewsCategoryText } from '@app/src/domain/models/common/NewsCategoryType';
import { getArticleCategoryText } from '@app/src/domain/models/common/ArticlesCategoryType';
export const getCategoryText = (category: string, type: CategoryTypes) => {
switch (type) {
case CategoryTypes.News:
return getNewsCategoryText(category);
case CategoryTypes.Articles:
return getArticleCategoryText(category);
default:
return null;
}
};
<file_sep>export { getHospitalsHelpWidgetDataFromSanity } from './actions';
<file_sep>import { ServerError } from '@app/src/lib/server-types';
// TODO: check for 403
export const checkForAuthError = (error: ServerError) =>
!!error.response && error.response.status === 401;
<file_sep>export enum HelpPartnersType {
BecomePartner = 'become-partner',
BecomeInfopartner = 'become-infopartner',
LectureRequest = 'lecture-request',
VolunteerFundraising = 'volunteer-fundraising',
}
export enum PartneTypeLabels {
BecomePartner = 'Стать партнёром',
BecomeInfopartner = 'Стать инфопартнером',
LectureRequest = 'Заказать лекцию',
VolunteerFundraising = 'Волонтёрский фандрайзинг',
}
<file_sep>import { temperatureList } from '../../request/organisms/RequestForm/config';
export const dataSuccess = {
target: 'Для себя',
region: 'Ивановская область',
age: '40',
gender: 'Мужской',
symptoms: {},
deseases: {},
};
export const dataRisk = {
target: 'Для себя',
region: 'Ивановская область',
age: '65',
gender: 'Мужской',
symptoms: {},
deseases: { diabetes: true, cardiovascular: true, COPD: true },
};
export const dataOncological = {
target: 'Для себя',
region: 'Ивановская область',
age: '78',
gender: 'Мужской',
symptoms: {},
deseases: {
diabetes: true,
cardiovascular: true,
oncological: true,
COPD: true,
},
};
export const withOtherSymptomsAndNoRiskGroup = {
target: 'Для себя',
region: 'Ивановская область',
age: '56',
gender: 'Мужской',
symptoms: {
sneezing: true,
},
};
export const dataOtherSymptomsAndRisk = {
target: 'Для себя',
region: 'Ивановская область',
age: '60',
gender: 'Мужской',
symptoms: {
sneezing: true,
},
};
export const dataDanger = {
target: 'Для себя',
region: 'Ивановская область',
age: '57',
gender: 'Мужской',
symptoms: {
temperature: true,
temperatureType: temperatureList[1].value,
},
};
export const dataDanger2 = {
target: 'Для себя',
region: 'Ивановская область',
age: '60',
gender: 'Мужской',
symptoms: {
dyspnea: true,
temperatureType: temperatureList[0].value,
},
};
export const dataDangerMultiple = {
target: 'Для себя',
region: 'Ивановская область',
age: '60',
gender: 'Мужской',
symptoms: {
temperature: true,
thorax: true,
dyspnea: true,
temperatureType: temperatureList[1].value,
},
};
export const dataDangerAndRisk = {
target: 'Для себя',
region: 'Ивановская область',
age: '66',
gender: 'Мужской',
symptoms: {
temperature: true,
temperatureType: temperatureList[0].value,
},
};
// export const dataOncological = {
// target: 'Для себя',
// region: 'Ивановская область',
// age: '76678',
// gender: 'Мужской',
// symptoms: {
// temperature: true,
// ['sore-throat']: true,
// thorax: true,
// cough: true,
// chills: true,
// dyspnea: true,
// ['loose-stools']: true,
// sneezing: true,
// ['runny-nose']: true,
// weakness: true,
// ['body-aches']: true,
// headache: true,
// nausea: true,
// ['abdominal-pain']: true,
// caughtType: 'Влажный с мокротой',
// thoraxType: { coughing: true, inhale: true, ['physical-activity']: true, rest: true },
// temperatureType: temperatureList[1].value,
// dyspneaType: { rest: true, ['physical-activity']: true },
// symptomsSince: 'Несколько недель назад'
// },
// deseases: { diabetes: true, cardiovascular: true, oncological: true, COPD: true },
// }
<file_sep>export { SystemLogo } from './SystemLogo';
<file_sep>export { default as OpenRestorePasswordModalButton } from './OpenRestorePasswordModalButton';
<file_sep>export { SerializerVideo } from './SerializerVideo';
<file_sep>export const linkOptions = {
default: 'default',
blank: 'blank',
phone: 'phone',
email: 'email',
} as const;
export type LinkOptionsType = typeof linkOptions[keyof typeof linkOptions];
<file_sep>import { getDateString } from '../getDateString';
describe('makeDateString works', () => {
test('returns correct date string', () => {
expect(getDateString({ year: 2017, month: 1, day: 29 })).toBe('2017-1-29');
});
test('returns correct date string if day isnt passed', () => {
expect(getDateString({ year: 2017, month: 1 })).toBe('2017-1-1');
});
});
<file_sep>import { getShouldValidate } from '../getShouldValidate';
describe('getShouldValidate', () => {
test('should return false if is active, isnt submitFailed', () => {
expect(
getShouldValidate({
active: true,
submitFailed: false,
touched: true,
pristine: true,
}),
).toBe(false);
});
test('should return true if field isnt active and submitFailed', () => {
expect(
getShouldValidate({
active: false,
submitFailed: true,
touched: true,
pristine: true,
}),
).toBe(true);
});
test('should return true if field isnt active, isnt submitFailed, touched and pristine', () => {
expect(
getShouldValidate({
active: false,
submitFailed: false,
touched: true,
pristine: true,
}),
).toBe(false);
});
test('should return false if field isnt active, isnt submitFailed, isnt touched and pristine', () => {
expect(
getShouldValidate({
active: false,
submitFailed: false,
touched: false,
pristine: true,
}),
).toBe(false);
});
test('should return if eager validation is true', () => {
expect(
getShouldValidate({
eagerValidation: true,
active: false,
submitFailed: false,
touched: false,
pristine: true,
}),
).toBe(true);
});
});
<file_sep>export enum NewsCategoryType {
News = 'news',
Help = 'help',
Report = 'report',
}
export const ALL_CATEGORIES = 'all_categories';
export const getNewsCategoryText = (category: string) =>
({
[NewsCategoryType.Help]: 'Помощь больницам',
[NewsCategoryType.News]: 'Новости',
[NewsCategoryType.Report]: 'Отчёты',
[ALL_CATEGORIES]: 'Все категории',
}[category]);
<file_sep>import { ALL_CATEGORIES } from '@app/src/domain/models/common/NewsCategoryType';
import { NewsQuery, NewsFetchParams } from './config';
export const getParamsFromQuery = ({
page = '1',
category = ALL_CATEGORIES,
tags = '',
}: NewsQuery): NewsFetchParams => {
const tagsList = tags.split(',').filter((it) => !!it);
return {
page: Number(page),
category,
tags: tagsList,
};
};
export const getQueryKey = (params: NewsFetchParams) => {
return params.category + (params.tags || []).join('-');
};
export const getPageKeyFromQuery = (query: any) => {
return getPageKeyFromParams(getParamsFromQuery(query));
};
export const getPageKeyFromParams = (params: NewsFetchParams) => {
return getQueryKey(params) + '-page-' + params.page;
};
<file_sep>export const PER_PAGE_NEWS = 10;
export interface NewsQuery {
page?: string;
category?: string;
tags?: string;
}
export interface NewsFetchParams {
page: number;
category: string;
tags: string[];
}
<file_sep>export { default as RestorePasswordModal } from './RestorePasswordModal';
export { RESTORE_PASSWORD_MODAL_KEY } from './modal-key';
<file_sep>export const steps = [
{
index: 1,
img: '/static/images/1-step.png',
text: 'Пройдите короткий тест\nи получите рекомендации',
},
{
index: 2,
img: '/static/images/landing/computer2x.png',
text: 'Если останутся вопросы,\nзадайте их в чате',
},
{
index: 3,
img: '/static/images/2-step.png',
text: 'Подождите пока эксперт\nответит вам',
},
];
<file_sep>export { SystemNavigation } from './SystemNavigation';
<file_sep>export { Toggle } from './Toggle';
export type { Props } from './Toggle';
<file_sep>const withCSS = require('next-css-unpluggable');
const withPlugins = require('next-compose-plugins');
const withLess = require('@zeit/next-less');
const withOptimizedImages = require('next-optimized-images');
const withTM = require('next-transpile-modules');
const lessToJS = require('less-vars-to-js');
const fs = require('fs');
const path = require('path');
const IgnoreNotFoundExportPlugin = require('ignore-not-found-export-plugin');
// Where your antd-custom.less file lives
const themeVariables = lessToJS(
fs.readFileSync(path.resolve(__dirname, './src/ui/antd-custom.less'), 'utf8'),
);
// fix: prevents error when .less files are required by node
if (typeof require !== 'undefined') {
require.extensions['.less'] = () => {
// pass
};
}
module.exports = withPlugins(
[
[
withTM,
{
transpileModules: ['query-string'],
},
],
[
withOptimizedImages,
{
optimizeImages: false,
},
],
[
withLess,
{
lessLoaderOptions: {
javascriptEnabled: true,
modifyVars: themeVariables, // make your antd custom effective
},
},
],
[
withCSS,
{
cssModules: true,
},
],
{
serverRuntimeConfig: {
BUGSNAG_API_KEY: process.env.BUGSNAG_SERVER_API_KEY || 'mememe',
},
publicRuntimeConfig: {
backUrl: process.env.BACK_URL || 'https://api.defeatcovid.ru/',
siteUrl: process.env.SITE_URL || 'http://localhost:3001',
BUGSNAG_API_KEY: process.env.BUGSNAG_BROWSER_API_KEY || 'mememe',
prodUrl: process.env.PROD_IMGPROXY_URL || 'https://defeatcovid.ru',
storageUrl:
process.env.STORAGE_URL || 'https://image.covid.breadhead.ru',
cloudPaymentId:
process.env.CLOUD_PAYMENT_ID || 'pk_ae5b79dd2b9e73b5a2640f0738fcf',
},
},
],
{
webpack: (config) => {
config.plugins.unshift(
new IgnoreNotFoundExportPlugin([
'Actions',
'Props',
'State',
'WithLoginModal',
'WithModalProps',
'WithQuotaTypeModal',
'WithBindQuotaModal',
'WithSignUpModal',
'Validator',
'FormFinish',
'WithCloseClaimModal',
'FetchingState',
'WithChooseDoctorModal',
'RemoveSection',
]),
);
return config;
},
},
);
<file_sep>export const IMAGE_REGEXP = /([/|.|\w|\s|-])*\.(jpe?g|gif|png|bmp)$/im;
<file_sep>import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
export const articlesItemRequestBuilder = (code: string) => {
return `*[_type == 'article' && ${ACTIVE_AND_NOT_DRAFT_SANITY} && code.current == '${code}']
{
...,
'tags': tags[]->
}`;
};
<file_sep>import { RestorePasswordModal } from '@app/src/features/login/features/restorePassword';
import SignUpModal from '../features/login/features/signUp';
import SignInModal from '../features/login/features/signIn';
const modals = [SignUpModal, SignInModal, RestorePasswordModal];
export default () => modals;
<file_sep>export { ForHospitalsPage as ForHospitals } from './components/ForHospitals/ForHospitals';
<file_sep>export enum FormRequestType {
Corona = 'corona',
ForHospitals = 'for-hospitals',
Volunteer = 'volunteer',
WebinarRegistration = 'webinar-registration',
Partner = 'partner',
Checklist = 'checklist',
}
<file_sep>export const REQUIRED_MESSAGE = 'Обязательное поле';
<file_sep>export { ErrorCode } from './erorCodes';
export { shouldValidateDates } from './shouldValidateDates';
export { validateDates } from './validateDates';
export type { DateInterface } from './types';
<file_sep>import { useEffect } from 'react';
import { useModal } from '@app/src/features/common/modal';
import { SIGN_IN_MODAL } from '@app/src/domain/reducers/signInReducer/const';
import { SIGN_UP_MODAL } from '@app/src/domain/reducers/signupReducer/const';
const SIGN_IN_URL = 'signIn';
const SIGN_UP_URL = 'signUp';
export const useAuthModalByUrl = () => {
const { open } = useModal();
useEffect(() => {
const currentPopup = window.location.href;
if (currentPopup.includes(SIGN_IN_URL)) {
open(SIGN_IN_MODAL);
return;
}
if (currentPopup.includes(SIGN_UP_URL)) {
open(SIGN_UP_MODAL);
}
}, [open]);
};
<file_sep>const CHECKED_TEXT = 'Да';
const UNCHECKED_TEXT = 'Нет';
export const getCheckedText = (checked = false) =>
checked ? CHECKED_TEXT : UNCHECKED_TEXT;
<file_sep>import Container from './container';
import ContactsPage from './page';
export default Container(ContactsPage);
<file_sep>export { ResourcesMobile } from './ResourcesMobile';
<file_sep>export { OtherPartners } from './OtherPartners';
<file_sep>import { SanityBase } from '@app/src/domain/models/sanity/SanityBase';
import { Photo } from '../sanity/Photo';
export interface Hospital extends SanityBase {
logo?: Photo;
city?: string;
name?: string;
sortIndex?: number;
url?: string;
}
<file_sep>import { Partner } from '@app/src/domain/models/common/Partner';
import { Expert } from '@app/src/domain/models/common/Expert';
import { FormRequestType } from '@app/src/domain/models/common/FormRequestType';
import { User } from '@app/src/domain/models/common/User';
import { TagType } from '@app/src/domain/models/common/Tag';
import { NewsItem } from '@app/src/domain/models/common/NewsItem';
import { ArticlesItem } from '@app/src/domain/models/common/ArticlesItem';
import { ResourcesItem } from '@app/src/domain/models/common/ResourcesItem';
import { Hospital } from '@app/src/domain/models/common/Hospital';
import { HospitalsHelpWidgetData } from '@front/domain/models/common/HospitalsHelpWidgetData';
export interface UploadedFile {
path: string;
}
export default interface ApiClient {
token: string;
login(login: string, password: string): Promise<User>;
signUp(login: string, password: string, confirm: string): Promise<User>;
currentUser(): Promise<User>;
sendFeedback(feedbackFields: any): Promise<any>;
uploadFile(
file: File,
onProgress?: (precent: number) => void,
): Promise<UploadedFile>;
searchDoctor(query: string): Promise<string[]>;
searchClinic(query: string): Promise<string[]>;
searchClinicByRegion(region: string, name: string): Promise<string[]>;
saveCoronaRequestForm(data: any, type: FormRequestType): Promise<any>;
updateCoronaRequestForm(data: any): Promise<any>;
getPartners(): Promise<Partner[]>;
getHospitalsHelpWidget(): Promise<HospitalsHelpWidgetData>;
getExperts(): Promise<Expert[]>;
getExpertBoard(): Promise<Expert[]>;
getTags(): Promise<TagType[]>;
getNews(query: string): Promise<NewsItem[]>;
getNewsItem(query: string): Promise<NewsItem[]>;
getArticles(query: string): Promise<ArticlesItem[]>;
getArticlesItem(query: string): Promise<ArticlesItem[]>;
getResources(): Promise<ResourcesItem[]>;
getHospitals(): Promise<Hospital[]>;
}
<file_sep>import dayjs from 'dayjs';
import { getDateInSeconds } from '../getDateInSeconds';
describe('getDateInSeconds works', () => {
test('should return a correct date', () => {
expect(getDateInSeconds({ year: 2018, month: 1 })).toBe(
dayjs('2018-1-1').valueOf(),
);
});
});
<file_sep>export { reducer as requestFormReducer } from './reducer';
export type { State as RequestFormReducerState } from './reducer';
export { saveCoronaRequestForm as saveRequestFormData } from './actions';
<file_sep>import store from 'store2';
import { formatFormData } from './formatFormData';
const getKey = () => 'request_form';
export const REQUEST_FORM_FINISHED = 'REQUEST_FORM_FINISHED';
export const FORM_ID_KEY = 'FORM_ID_KEY';
export const saveRequestFormDraft = () => (data: any) =>
Promise.resolve(store.set(getKey(), data, true));
export const getRequestFormDraft = () => store.get(getKey()) || {};
export const setFormId = (id: string) => store.set(FORM_ID_KEY, id);
export const getFormId = () => store.get(FORM_ID_KEY);
export const resetFormId = () => store.remove(FORM_ID_KEY);
export const setFormRequestFinished = (arg = true) =>
store.set(REQUEST_FORM_FINISHED, arg);
export const isFormRequestFinished = () => store.get(REQUEST_FORM_FINISHED);
export const resetRequestFormDraft = () => {
store.remove(getKey());
};
export const getFormattedForm = () => {
const result = formatFormData(getRequestFormDraft());
return result || {};
};
<file_sep>import escapeStringRegexp from 'escape-string-regexp';
import { flatten } from 'lodash';
const createDelimiterRegExp = (delimiter: string): RegExp =>
new RegExp(`(${escapeStringRegexp(delimiter)})`, 'g');
export const splitText = (text: string, by: string[]) => {
let strings = [text];
for (const delimiter of by) {
const regExp = createDelimiterRegExp(delimiter);
strings = flatten(strings.map((s) => s.split(regExp)));
}
return strings.filter(Boolean);
};
<file_sep>import { FormComponentType } from '@app/src/features/common/form/FormConstructor';
import { InputType } from '@app/src/ui/Input';
export const formConfig = {
steps: [
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Город',
},
props: {
name: 'city',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Больница',
},
props: {
name: 'hospital',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'ФИО контактного лица',
},
props: {
name: 'name',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Должность',
},
props: {
name: 'position',
},
},
{
type: FormComponentType.PhoneInput,
required: true,
label: {
text: 'Телефон',
},
props: {
name: 'phone',
placeholder: '+7',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Почта',
},
props: {
name: 'email',
type: InputType.Email,
},
},
{
type: FormComponentType.TextArea,
required: true,
label: {
text: 'Чего не хватает?',
},
props: {
name: 'what_is_needed',
},
},
],
};
<file_sep>export { SystemMain as default } from '@app/src/features/system';
<file_sep>interface Values {
defaultQuestions: { string: boolean };
}
export const MAX_QUESTIONS_COUNT = 5;
const ERROR_TEXT = `Пожалуйста, выберите не более ${MAX_QUESTIONS_COUNT} вопросов`;
export const validateCb = (_name: any, values: Values) => {
if (Object.keys(values).length > 0) {
const currentValues = Object.values(values.defaultQuestions);
const truhyValues = currentValues.filter((val) => !!val);
if (truhyValues.length > MAX_QUESTIONS_COUNT) {
throw new Error(ERROR_TEXT);
}
}
};
<file_sep>export const infoText = `
<div>
Беда которая пришла настоящий вызов для всего мира. Это удивительное и страшноватое время, в которое нам с вами довелось жить. Не каждому поколению выпадает такое.
<div>
<br/>
<div>
Единственное что мы действительно можем противопоставить этой беде — это объединение всех слоев общества вне зависимости от политических взглядов, веры или национальности. Только объединившись можно использовать общие ресурсы максимально рационально и эффективно. Присоединяйтесь.
</div>
`;
<file_sep>export { PaymentWidgetInput } from './PaymentWidgetInput';
<file_sep>import { NON_BREAKING_SPACE } from '@app/src/lib/config';
export const footerProjects = [
{
id: '10',
text: `Системные проекты, которые меняют медицину России к${NON_BREAKING_SPACE}лучшему`,
logo: '/static/images/footer/foundation_logo.png',
href: 'https://nenaprasno.ru',
},
{
id: '20',
text: `Бесплатная справочная служба для онкологических пациентов`,
logo: '/static/images/footer/covid-logo.png',
href: 'https://ask.nenaprasno.ru',
},
{
id: '30',
text: `Просветительский портал о${NON_BREAKING_SPACE}профилактике и лечении онкологических и${NON_BREAKING_SPACE}
других заболеваний`,
logo: '/static/images/footer/media_logo.png',
href: 'https://media.nenaprasno.ru',
},
];
<file_sep>import { State } from '@app/src/lib/store';
export const getSignInError = (state: State) =>
state.login.signIn && state.login.signIn.signInError;
<file_sep>import Container from './container';
import RegionSelect from './organisms/RegionSelect';
export default Container(RegionSelect);
<file_sep>export { Expert as default } from '@app/src/features/landing';
<file_sep>export type { FormContext } from './types/FormContext';
export { default } from './Form';
export type { RemoveSection } from './types/RemoveSection';
<file_sep>import { ErrorCode } from './erorCodes';
export type Validator = (dates: number[]) => ErrorCode | undefined;
type DateType = number | string;
export interface DateInterface {
year: DateType;
month: DateType;
day?: DateType;
}
<file_sep>import { ReactNode } from 'react';
export interface NavLinkProps {
href: string | object;
children: ReactNode;
withoutUnderline?: boolean;
as?: string;
blank?: boolean;
className?: string;
}
<file_sep>export { SystemFooterMenu } from './SystemFooterMenu';
<file_sep>import { SPACE } from '@app/src/lib/config';
export const helpHospitalsList = [
{
id: '1',
text: `Мы получаем запрос от медучреждения на инструменты, расходные
материалы.`,
},
{
id: '2',
text: `Вместе с сотрудниками медучреждения сопоставляем список с нашим
чек-листом для реанимации, составленным вместе с членами
${SPACE}<NavLink href="/supervisory">наблюдательного совета</NavLink>.`,
},
{
id: '3',
text: `Корректируем и дополняем список нужд в соответствии с этим
чек-листом.`,
},
{
id: '4',
text: `Составляем смету и показываем её компании-донору..`,
},
{
id: '5',
text: `Получаем пожертвование от компании или собираем средства.`,
},
{
id: '6',
text: `Закупаем всё необходимое.`,
},
{
id: '7',
text: `Организуем доставку в клинику.`,
},
];
<file_sep>const DEFAULT_TYPE = 'button';
const SUBMIT_TYPE = 'submit';
export const getButtonType = (submit: boolean) =>
submit ? SUBMIT_TYPE : DEFAULT_TYPE;
<file_sep>export { SubmitTooltip } from './SubmitTooltip';
<file_sep>import store from 'store2';
const USER_KEY = 'covid_user';
export const setUserEmailLocalStorage = (email: string) => {
store.set(USER_KEY, email);
};
export const resetUserEmailLocalStorage = () => {
store.set(USER_KEY, undefined);
};
export const getUserEmailLocalStorage = () => store.get(USER_KEY);
<file_sep>import dayjs from 'dayjs';
import { ErrorCode } from './erorCodes';
import { Validator } from './types';
const checkForFutureDateError: Validator = (dates: number[]) => {
const now = dayjs().valueOf();
const noFutureDate = dates.every((date) => date <= now);
return noFutureDate ? undefined : ErrorCode.FutureDate;
};
export { checkForFutureDateError };
<file_sep>import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
import { NewsCategoryType } from '../../models/common/NewsCategoryType';
import { CategoryType as ArticlesCategoryType } from '../../models/common/ArticlesCategoryType';
const categoriesQuery = () => {
return [
...Object.values(NewsCategoryType),
...Object.values(ArticlesCategoryType),
]
.map(
(category) =>
`'${category}': count(*['${category}' in categories && references(^._id)]),`,
)
.join('');
};
export const tagsQuery = `*[_type == "tag" && ${ACTIVE_AND_NOT_DRAFT_SANITY}] {...,
'count': {${categoriesQuery()}},
'newsCount': count(*[_type == 'news' && references(^._id) && ^.status == true ]),
'articlesCount': count(*[_type == 'article' && references(^._id) && ^.status == true ])
}`;
<file_sep>import { SanityBase } from '@app/src/domain/models/sanity/SanityBase';
export interface TagType extends SanityBase {
name: string;
code: {
current: string;
};
sortIndex: number;
status: boolean;
newsCount: number;
articlesCount: number;
count: { [key: string]: number | undefined };
}
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
type State = FetchingState;
interface Actions extends FetchingActions {
success(): Action;
}
const initialState = createInitialState({});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State) => ({
...state,
error: false,
fetching: false,
}),
'sendFeedback',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>import { State } from '@app/src/lib/store';
import { ALL_CATEGORIES } from '../../models/common/NewsCategoryType';
export enum TagsType {
News = 'newsCount',
Articles = 'articlesCount',
}
export const selectTags = (tagsType: TagsType, category: string) => (
state: State,
) => {
const nonEmptyTagsByType = state.tags.list.filter(
(item) => item[tagsType] > 0,
);
if (category === ALL_CATEGORIES) return nonEmptyTagsByType;
return nonEmptyTagsByType.filter((tag) => (tag.count[category] || 0) > 0);
};
export const selectAllTags = (state: State) => state.tags.list;
<file_sep>export enum RouteType {
landing = '/landing',
landingRequest = '/landing/request',
landingVolunteer = '/landing/help-volunteers',
landingHelp = '/landing/help',
landingHospitalAid = '/landing/hospital-aid',
landingRequestChat = '/landing/request-chat',
landingPartners = '/landing/partners',
landingContacts = '/landing/contacts',
expertBoard = '/supervisory',
expertBoardExpert = '/supervisory',
landingExperts = '/landing/experts',
landingExpert = '/landing/expert',
landingForHospitals = '/landing/for-hospitals',
helpVolunteers = '/landing/help-volunteers',
helpPartners = '/landing/help-partners',
ask = '/ask',
askAbout = '/ask#about',
landingNews = '/landing/news',
}
<file_sep>const MAX_LENGTH = 19;
const BACKLASH = 5;
export const displayFileName = (fileName: string) => {
const underscorePosition = fileName.indexOf('_');
if (underscorePosition === -1) {
return fileName;
}
const position =
underscorePosition === -1 ? fileName.length - 16 : underscorePosition;
const realFileName = fileName.slice(position + 1);
if (realFileName.length < MAX_LENGTH) {
return realFileName;
}
const start = realFileName.slice(0, MAX_LENGTH - BACKLASH);
const end = realFileName.slice(realFileName.length - BACKLASH);
return `${start}...${end}`;
};
<file_sep>import { State } from '@app/src/lib/store';
export const getViolateState = (state: State) =>
state.login.signIn.authViolateStatus;
<file_sep>import { ErrorCode } from '@app/src/lib/helpers/validateDates';
const errorMessagesMap = {
[ErrorCode.MixedDateOrder]:
'Дата окончания не может быть раньше, чем дата начала лечения',
[ErrorCode.FutureDate]: 'Нельзя выбирать даты в будущем',
};
export { errorMessagesMap };
<file_sep>import * as yup from 'yup';
import cx from 'classnames';
import { FormComponentType } from '@front/features/common/form/FormConstructor';
import { InputType } from '@front/ui/Input';
enum standardInputType {
daysRemaining = 'daysRemaining',
count = 'count',
supplied = 'supplied',
bedsCount = 'bedsCount',
}
const standardInputGroup = (
name,
title,
styles,
type = standardInputType.daysRemaining,
level = 2,
) => {
let firstInput;
switch (type) {
case standardInputType.count:
firstInput = {
type: FormComponentType.Number,
className: cx(styles.small),
label: {
text: 'Количество',
},
props: {
name: `${name}Count`,
validate: yup.number(),
},
};
break;
case standardInputType.daysRemaining:
firstInput = {
type: FormComponentType.Number,
className: styles.small,
label: {
text: 'На сколько суток хватит?',
},
props: {
name: `${name}DaysRemaining`,
validate: yup.number(),
},
};
break;
case standardInputType.bedsCount:
firstInput = {
type: FormComponentType.Number,
className: cx(styles.small),
label: {
text: 'Количество обеспеченных коек',
},
props: {
name: `${name}BedsCount`,
validate: yup.number(),
},
};
break;
case standardInputType.supplied:
firstInput = {
type: FormComponentType.Toggle,
className: cx(styles.small),
label: {
text: 'Обеспечены?',
},
props: {
name: `${name}`,
},
};
break;
}
return {
type: FormComponentType.FieldGroup,
title,
level,
className: styles.rowInputGroup,
children: [
firstInput,
{
type: FormComponentType.Toggle,
className: cx(styles.small, styles.thumblr),
label: {
text: 'Прогнозируете проблемы с поставкой?',
},
props: {
name: `${name}ShortageExpected`,
},
},
],
};
};
export const formConfig = (styles) => ({
steps: [
{
type: FormComponentType.FieldGroup,
title: 'Средства индивидуальной защиты',
className: styles.step,
level: 1,
children: [
standardInputGroup('disposableSuits', 'Одноразовые костюмы', styles),
standardInputGroup('disposableRobes', 'Одноразовые халаты', styles),
standardInputGroup('glasses', 'Очки', styles),
standardInputGroup('regularMasks', 'Маски обычные', styles),
standardInputGroup('respiratorsFFP2', 'Респираторы FFP2', styles),
standardInputGroup('respiratorsFFP3', 'Респираторы FFP3', styles),
standardInputGroup(
'shoeCovers',
'Бахилы (высокие, хирургические)',
styles,
),
standardInputGroup('disposableGloves', 'Одноразовые перчатки', styles),
standardInputGroup('antiseptics', 'Антисептики', styles),
],
},
{
type: FormComponentType.FieldGroup,
title: 'Оксигенотерапия и респираторная терапия',
className: styles.step,
level: 1,
children: [
standardInputGroup(
'videoLaryngoscopes',
'Видеоларингоскопы',
styles,
standardInputType.count,
),
standardInputGroup(
'oxygenConcentrators',
'Кислородные концентраторы',
styles,
standardInputType.supplied,
),
standardInputGroup(
'regularNasalCannula',
'Обычная носовая канюля',
styles,
),
standardInputGroup(
'oxygenMask',
'Кислородная лицевая маска с мешком (non-rebreather)',
styles,
),
standardInputGroup(
'oxygenMoisturizers',
'Увлажнители кислорода',
styles,
),
standardInputGroup(
'nebulizersCompressors',
'Небулайзеры-компрессоры',
styles,
standardInputType.count,
),
standardInputGroup(
'nebulizerAttachments',
'Небулайзерные насадки',
styles,
),
],
},
{
type: FormComponentType.FieldGroup,
title: 'Вентиляция легких',
className: styles.step,
level: 1,
children: [
{
type: FormComponentType.FieldGroup,
title: 'Рабочих аппаратов НИВЛ CPAP/BIPAP',
level: 2,
className: styles.rowInputGroup,
children: [
{
type: FormComponentType.Number,
className: cx(styles.small),
label: {
text: 'Количество аппаратов',
},
props: {
name: `workingCPAP/BIPAPCount`,
validate: yup.number(),
},
},
{
type: FormComponentType.Toggle,
className: cx(styles.small, styles.thumblr),
label: {
text: 'Прогнозируете проблемы с поставкой?',
},
props: {
name: `workingCPAP/BIPAPShortageExpected`,
},
},
],
},
standardInputGroup(
'masksForCPAP/BIPAP',
'Маски для аппараторв НИВЛ CPAP/BIPAP (однопатрубочных)',
styles,
),
{
type: FormComponentType.FieldGroup,
title: 'Рабочих аппаратов ИВЛ',
level: 2,
className: styles.rowInputGroup,
children: [
{
type: FormComponentType.Number,
className: styles.small,
label: {
text: 'Количество аппаратов',
},
props: {
name: `workingVentilators`,
validate: yup.number(),
},
},
{
type: FormComponentType.Toggle,
className: cx(styles.small, styles.thumblr),
label: {
text: 'Прогнозируете проблемы с поставкой?',
},
props: {
name: `workingVentilatorsShortageExpected`,
},
},
],
},
{
type: FormComponentType.FieldGroup,
title: '<NAME>',
level: 2,
className: styles.rowInputGroup,
children: [
{
type: FormComponentType.Toggle,
className: cx(styles.small),
label: {
text: 'Обеспечены?',
},
props: {
name: `ambuBagsSwitcher`,
},
},
{
type: FormComponentType.Toggle,
className: cx(styles.small, styles.thumblr),
label: {
text: 'Прогнозируете проблемы с поставкой?',
},
props: {
name: `ambuBagsShortageExpected`,
},
},
],
},
standardInputGroup(
'airCompressor',
'Компрессоры сжатого воздуха',
styles,
standardInputType.bedsCount,
),
{
type: FormComponentType.FieldGroup,
title: 'Концентраторы кислорода высокого давления',
level: 2,
className: styles.rowInputGroup,
children: [
{
type: FormComponentType.Toggle,
className: cx(styles.small),
label: {
text: 'Есть в наличии?',
},
props: {
name: `highPressureOxygenConcentratorsSwitcher`,
},
},
{
type: FormComponentType.Toggle,
className: cx(styles.small, styles.thumblr),
label: {
text: 'Прогнозируете проблемы с поставкой?',
},
props: {
name: `highPressureOxygenConcentratorsShortageExpected`,
},
},
],
},
standardInputGroup(
'punctureTracheostomy',
'Пункционнные трахеостомы',
styles,
),
standardInputGroup(
'tracheostomyTube',
'Трахеостомические трубки',
styles,
),
standardInputGroup(
'closedTBDsystems',
'Закрытые системы для санации ТБД',
styles,
),
standardInputGroup(
'TBDcatheter',
'Обычные катетеры для санации ТБД',
styles,
),
standardInputGroup(
'disposableBreathingCircuits',
'Одноразовые дыхательные контуры',
styles,
),
standardInputGroup(
'virusBacterialFilters',
'Вирусно-бактериальные фильтры',
styles,
),
standardInputGroup(
'heatMoistureExchangers',
'Тепловлагообменники',
styles,
),
standardInputGroup(
'breathingCircuitConnectors',
'Уголковые коннекторы между контуром и эндотрахеальной трубкой',
styles,
),
standardInputGroup(
'endotrachealTubes',
'Эндотрахеальные трубки разных диаметров',
styles,
),
standardInputGroup(
'intubationStylets',
'Проводники для интубации',
styles,
),
standardInputGroup(
'heliumPillows',
'Гелевые подушки подлобные подгрудные для вентиляции в прон-позиции',
styles,
standardInputType.bedsCount,
),
],
},
{
type: FormComponentType.FieldGroup,
title: 'Инфузионная терапия',
className: styles.step,
level: 1,
children: [
standardInputGroup('IVBags', 'Капельницы', styles),
standardInputGroup(
'peripheralVenousCatheters',
'Периферические венозные катетеры',
styles,
),
standardInputGroup(
'centralVenousCatheters',
'Центральные венозные катетеры',
styles,
),
standardInputGroup('threeWayStopcocks', 'Трехходовые краники', styles),
standardInputGroup(
'syringeInfusionPumps',
'Инфузоматы шприцевые',
styles,
standardInputType.bedsCount,
),
standardInputGroup(
'peristalticInfusionPumps',
'Инфузоматы перистальтические',
styles,
standardInputType.count,
),
standardInputGroup(
'infusionSyringes',
'Шприцы для инфузоматов (50 мл)',
styles,
),
standardInputGroup('infusionLines', 'Инфузионные линии', styles),
standardInputGroup(
'infusionBags',
'Капельницы для инфузоматов',
styles,
),
],
},
{
type: FormComponentType.FieldGroup,
title: 'Мониторинг',
className: styles.step,
level: 1,
children: [
{
type: FormComponentType.FieldGroup,
title: 'Пульсоксиметры (портативные)',
level: 2,
className: styles.rowInputGroup,
children: [
{
type: FormComponentType.Number,
className: cx(styles.small),
label: {
text: 'Приблизительное количество',
},
props: {
name: `portablePulseOximetersCount`,
validate: yup.number(),
},
},
{
type: FormComponentType.Toggle,
className: cx(styles.small, styles.thumblr),
label: {
text: 'Прогнозируете проблемы с поставкой?',
},
props: {
name: `portablePulseOximetersShortageExpected`,
},
},
],
},
{
type: FormComponentType.FieldGroup,
title: 'Пульсоксиметры-мониторы',
level: 2,
className: styles.rowInputGroup,
children: [
{
type: FormComponentType.Number,
className: cx(styles.small),
label: {
text: 'Приблизительное количество',
},
props: {
name: `pulseOximetersCount`,
validate: yup.number(),
},
},
{
type: FormComponentType.Toggle,
className: cx(styles.small, styles.thumblr),
label: {
text: 'Прогнозируете проблемы с поставкой?',
},
props: {
name: `pulseOximetersShortageExpected`,
},
},
],
},
standardInputGroup(
'capnographs',
'Капнографы',
styles,
standardInputType.count,
),
standardInputGroup(
'simpleMonitors',
'Простые мониторы (ниАД, ЭКГ, SpO2)',
styles,
standardInputType.count,
),
standardInputGroup(
'bloodGasAnalyzer',
'Анализаторы газов крови',
styles,
standardInputType.count,
),
],
},
{
type: FormComponentType.FieldGroup,
title: 'Питание',
className: styles.step,
level: 1,
children: [
standardInputGroup(
'enteralInfusionSystems',
'Системы для инфузии энтерального питания',
styles,
),
standardInputGroup(
'nasogastricTubes',
'Назогастральные/дуоденальные зонды для питания',
styles,
),
standardInputGroup(
'peristalticEnteralInfusionPumps',
'Инфузоматы перистальтические для энтерального питания',
styles,
standardInputType.count,
),
],
},
{
type: FormComponentType.FieldGroup,
title: 'Другое',
className: styles.step,
level: 1,
children: [
standardInputGroup('bandages', 'Бинты', styles),
standardInputGroup('bandAids', 'Пластыри', styles),
standardInputGroup('stickers', 'Наклейки', styles),
standardInputGroup('foleyCatheters', 'Катетеры Фолея', styles),
standardInputGroup('urineBags', 'Мочеприемники', styles),
],
},
{
type: FormComponentType.FieldGroup,
title:
'Оставьте контактные данные, чтобы мы могли с вами связаться для консультации и возможного оказания помощи',
className: cx(styles.step, styles.contacts),
level: 1,
children: [
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Город',
},
props: {
name: 'city',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Больница',
},
props: {
name: 'hospital',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'ФИО контактного лица',
},
props: {
name: 'name',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Должность',
},
props: {
name: 'position',
},
},
{
type: FormComponentType.PhoneInput,
required: true,
label: {
text: 'Мобильный телефон',
},
props: {
name: 'phone',
placeholder: '+7',
},
},
{
type: FormComponentType.Input,
required: true,
label: {
text: 'Почта',
},
props: {
name: 'email',
type: InputType.Email,
},
},
{
type: FormComponentType.TextArea,
required: true,
label: {
text: 'Комментарий',
},
props: {
name: 'comment',
},
},
],
},
],
});
<file_sep>const fs = require('fs');
const path = require('path');
const fetch = require('node-fetch');
const util = require('util');
const stream = require('stream');
const pipeline = util.promisify(stream.pipeline);
const exists = util.promisify(fs.exists);
const mkdir = util.promisify(fs.mkdir);
const root = path.resolve(__dirname, 'public', 'content');
module.exports = async (req, res) => {
try {
const filePath = path.resolve(root, req.params[0]);
if (await exists(filePath)) {
return res.sendFile(filePath);
}
const [urlQuery, ...urlPath] = req.params[0].split('/');
const apiUrl = `https://cdn.sanity.io/${urlPath.join('/')}?${urlQuery}`;
await mkdir(path.dirname(filePath), { recursive: true });
const fileStream = fs.createWriteStream(filePath);
const image = await fetch(apiUrl);
await pipeline(image.body, fileStream);
res.sendFile(filePath);
} catch (e) {
res.status(500);
res.end(e);
}
};
<file_sep>import { getClientSteps } from './getClientSteps';
import { getDisabledSteps } from './getDisabledSteps';
import { getManagerSteps } from './getManagerSteps';
export enum ProgressBarKind {
Client = 'Client',
Disabled = 'Disabled',
Manager = 'Manager',
}
const getStepsFactory = (kind: ProgressBarKind) =>
({
[ProgressBarKind.Client]: getClientSteps,
[ProgressBarKind.Disabled]: getDisabledSteps,
[ProgressBarKind.Manager]: getManagerSteps,
}[kind]);
export { getStepsFactory };
<file_sep>declare module 'plural-ru' {
export default function pluralise(
count: number,
first: string,
second: string,
third: string = second,
): string;
}
<file_sep>import { ClearAction, createClearRedux } from 'redux-clear';
export const EMPTY_MODAL = 'empty';
type State = string;
interface Actions {
open: ClearAction<[string]>;
close: ClearAction;
}
const { actions, reducer } = createClearRedux<State, Actions>(
{
open: () => (key) => key,
close: () => () => EMPTY_MODAL,
},
EMPTY_MODAL,
'modals',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>import {
applyMiddleware,
combineReducers,
createStore,
Store as ReduxStore,
} from 'redux';
import { composeWithDevTools } from 'redux-devtools-extension';
import thunk from 'redux-thunk';
import windowSize, { REDUCER_KEY } from 'redux-windowsize';
export type Store = ReduxStore<State>;
import {
notFoundMiddleware,
reducer as notFoundReducer,
State as NotFoundState,
} from '@app/src/features/common/notFound';
import {
reducer as modalReducer,
State as ModalState,
} from '@app/src/features/common/modal';
import {
reducer as loginReducer,
State as LoginState,
} from '@app/src/domain/reducers/loginReducer/reducer';
import {
reducer as sendFeedbackReducer,
State as SendFeedbackState,
} from '@app/src/features/landing/features/contacts/organisms/FeedbackForm';
import {
requestFormReducer,
RequestFormReducerState,
} from '@app/src/domain/reducers/requestConsultationReducer';
import {
reducer as browserQueryReducer,
State as BrowserQueryState,
} from '@app/src/features/common/browserQuery';
import {
reducer as partnerReducer,
State as PartnerState,
} from '@app/src/domain/reducers/partnerReducer/reducer';
import {
reducer as hospitalsHelpWidgetReducer,
State as HospitalsHelpWidgetState,
} from '@app/src/domain/reducers/hospitalsHelpWidgetReducer/reducer';
import {
reducer as expertReducer,
State as ExpertState,
} from '@app/src/domain/reducers/expertReducer/reducer';
import {
reducer as expertBoardReducer,
State as ExpertBoardState,
} from '@app/src/domain/reducers/expertBoardReducer/reducer';
import {
reducer as paymentWidgetReducer,
State as PaymentWidgetState,
} from '@app/src/domain/reducers/paymentWidgetReducer/';
import * as tags from '@app/src/domain/reducers/tagsReducer';
import * as news from '@app/src/domain/reducers/newsReducer';
import * as articles from '@app/src/domain/reducers/articlesReducer';
import * as resources from '@app/src/domain/reducers/resourcesReducer';
import * as hospitals from '@app/src/domain/reducers/hospitalsReducer';
import ApiClient from './api/ApiClient';
import factory from './api/apiFactory';
import { getToken } from '../domain/reducers/userReducer';
import { unauthorizedMiddleware } from '../domain/reducers/signInReducer/middleware';
export interface State {
browserQuery: BrowserQueryState;
modal: ModalState;
feedback: SendFeedbackState;
notFound: NotFoundState;
login: LoginState;
requestForm: RequestFormReducerState;
partners: PartnerState;
experts: ExpertState;
expertBoard: ExpertBoardState;
paymentWidget: PaymentWidgetState;
tags: tags.State;
news: news.State;
articles: articles.State;
resources: resources.State;
hospitals: hospitals.State;
hospitalsHelpWidget: HospitalsHelpWidgetState;
}
const reducer = combineReducers({
browserQuery: browserQueryReducer,
requestForm: requestFormReducer,
login: loginReducer,
modal: modalReducer,
[REDUCER_KEY]: windowSize,
feedback: sendFeedbackReducer,
notFound: notFoundReducer,
partners: partnerReducer,
experts: expertReducer,
expertBoard: expertBoardReducer,
paymentWidget: paymentWidgetReducer,
hospitalsHelpWidget: hospitalsHelpWidgetReducer,
tags: tags.reducer,
news: news.reducer,
articles: articles.reducer,
resources: resources.reducer,
hospitals: hospitals.reducer,
} as any);
export interface ExtraArgs {
api: ApiClient;
getApi: (getState: () => State) => ApiClient;
}
export const initializeStore = (initialState?: State) =>
createStore(
reducer,
initialState,
composeWithDevTools(
applyMiddleware(
unauthorizedMiddleware,
notFoundMiddleware,
thunk.withExtraArgument({
getApi: (getState) => factory(getToken(getState())),
} as ExtraArgs),
),
),
);
<file_sep>export const SET_FREQUENCY = 'SET_FREQUENCY';
export const SET_COST = 'SET_COST';
export const SET_TARGET = 'SET_TARGET';
export const SET_NAME = 'SET_NAME';
export const SET_SURNAME = 'SET_SURNAME';
export const SET_EMAIL = 'SET_EMAIL';
export const SET_OTHER_COST = 'SET_OTHER_COST';
<file_sep>import { NewsItemPage } from '@app/src/features/landing/features/news/newsItemPage';
export default NewsItemPage;
<file_sep>import { Dispatch } from 'redux';
import { needToFetch } from '@app/src/helpers/needToFetch';
import { ExtraArgs, State } from '@app/src/lib/store';
import { selectAllTags } from '../../tagsReducer/selectTags';
import { articlesListRequestBuilder } from '../helpers/articlesListRequestBuilder';
import { ArticlesFetchParams } from './config';
import { actions } from './reducer';
import { selectArticlesWithParams } from './selectArticles';
export const getArticlesFromSanity = (params: ArticlesFetchParams) => async (
dispatch: Dispatch<any>,
getState: () => State,
{ getApi }: ExtraArgs,
) => {
if (needToFetch(selectArticlesWithParams(params)(getState()))) {
const api = getApi(getState);
try {
dispatch(actions.request());
const tags = selectAllTags(getState());
const query = articlesListRequestBuilder(params, tags);
const articles = await api.getArticles(query);
return dispatch(actions.success(articles, params));
} catch (error) {
return dispatch(actions.error(error.message));
}
}
};
<file_sep>import { actions, reducer } from './reducer';
export { reducer, actions };
export type { State, Actions } from './reducer';
export { getToken, getRoles } from './selectors';
export { currentUser } from './actions';
export const { setToken } = actions;
<file_sep>import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
export const newsItemRequestBuilder = (code: string) => {
return `*[_type == 'news' && ${ACTIVE_AND_NOT_DRAFT_SANITY} && code.current == '${code}']
{
...,
'tags': tags[]->
}`;
};
<file_sep>import { Action } from 'redux';
import { createSymbiote } from 'redux-symbiote';
export interface State {
found: boolean;
}
const initial = {
found: true,
};
interface Actions {
set: (found: boolean) => Action;
}
export const { actions, reducer } = createSymbiote<State, Actions>(
initial,
{
set: (state: State, found: boolean) => ({
...state,
found,
}),
},
'not-found-state',
);
<file_sep>import axios, { AxiosInstance } from 'axios';
import { FormRequestType } from '@app/src/domain/models/common/FormRequestType';
import { User } from '@app/src/domain/models/common/User';
import { getFromConfig } from '@app/src/helpers/getPublicRuntimeConfig';
import { ACTIVE_AND_NOT_DRAFT_SANITY } from '@app/src/helpers/activeAndNotDraftSanity';
import { tagsQuery } from '@app/src/domain/reducers/tagsReducer/config';
import ApiClient, { UploadedFile } from './ApiClient';
import { queryString } from './helper/queryString';
import { SendFeedbackRequest } from './request/SendFeedback';
import { sanityClient } from '../sanity-client';
export default class RealApiClient implements ApiClient {
private readonly axiosInstance: AxiosInstance;
private readonly apiProxyInstance: AxiosInstance;
private _token = '';
public constructor(baseUrl: string, token?: string) {
const bearer = !!token && token.length > 1 ? `Bearer ${token}` : null;
this.axiosInstance = axios.create({
baseURL: baseUrl,
headers: {
Authorization: bearer,
},
});
this.apiProxyInstance = axios.create({
baseURL: getFromConfig('prodUrl') + '/api/query/',
});
}
public get token() {
return this._token;
}
public set token(newToken: string) {
this._token = newToken;
}
public login = (login: string, password: string) =>
this.axiosInstance
.post('/auth/login', { login, password })
.then((response) => response.data as User);
public signUp = (login: string, password: string, confirm: string) =>
this.axiosInstance
.post('/auth/register', {
email: login,
password,
confirm,
})
.then((response) => response.data as User);
public currentUser = () =>
this.axiosInstance
.get('/users/current')
.then((response) => response.data as User);
public sendFeedback = (feedback: SendFeedbackRequest) =>
this.axiosInstance
.post('/feedback/send', feedback)
.then((response) => response.data as SendFeedbackRequest);
public uploadFile = async (
file: File,
onProgress?: (precent: number) => void,
) => {
const form = new FormData();
form.append('file', file);
const response = await this.axiosInstance.post('/file/upload', form, {
headers: { 'Content-Type': 'multipart/form-data' },
onUploadProgress: ({ loaded, total }) =>
onProgress && onProgress((loaded / total) * 100),
});
return response.data as UploadedFile;
};
public searchDoctor = (query: string) =>
this.axiosInstance
.get(`/base/doctors?${queryString({ query })}`)
.then((res) => res.data as string[]);
public searchClinic = (query: string) =>
this.axiosInstance
.get(`/base/hospitals?${queryString({ query })}`)
.then((res) => res.data as string[]);
public searchClinicByRegion = (region: string, name: string) => {
return this.axiosInstance
.get(
`/base/hospitals-by-region?${queryString({
region,
name,
})}`,
)
.then((res) => res.data as any[]);
};
public saveCoronaRequestForm = (data: any, type: FormRequestType) =>
this.axiosInstance
.post('/form/save', {
type: type,
fields: data,
})
.then((res) => res.data as any);
public updateCoronaRequestForm = (data: any) =>
this.axiosInstance
.post('/form/update', data)
.then((res) => res.data as any);
public getPartners = () => {
return this.apiProxyInstance
.get(`*[_type == "partner" && ${ACTIVE_AND_NOT_DRAFT_SANITY}]`)
.then((res) => res.data);
};
public getHospitalsHelpWidget = () => {
return this.apiProxyInstance
.get('*[_type == "hospitalsHelpWidget"][0]')
.then((res) => res.data);
};
public getExperts = () => {
return this.apiProxyInstance
.get(`*[_type == "expert" && ${ACTIVE_AND_NOT_DRAFT_SANITY}]`)
.then((res) => res.data);
};
public getExpertBoard = () => {
return this.apiProxyInstance
.get(`*[_type == "expertBoard" && ${ACTIVE_AND_NOT_DRAFT_SANITY}]`)
.then((res) => res.data);
};
public getTags = () => {
return this.apiProxyInstance.get(tagsQuery).then((res) => res.data);
};
public getNews = (query: string) => {
return this.apiProxyInstance.get(query).then((res) => res.data);
};
public getNewsItem = (query: string) => {
return this.apiProxyInstance.get(query).then((res) => res.data);
};
public getArticles = (query: string) => {
return this.apiProxyInstance.get(query).then((res) => res.data);
};
public getArticlesItem = (query: string) => {
return this.apiProxyInstance.get(query).then((res) => res.data);
};
public getResources = () => {
return this.apiProxyInstance
.get(`*[_type == "resource" && ${ACTIVE_AND_NOT_DRAFT_SANITY}]`)
.then((res) => res.data);
};
public getHospitals = () => {
return this.apiProxyInstance
.get(
`*[_type == "hospital" && ${ACTIVE_AND_NOT_DRAFT_SANITY}] | order(sortIndex desc)`,
)
.then((res) => res.data);
};
}
<file_sep>declare const cp: any;
<file_sep>export { Button } from './Button';
export { ButtonSize } from './ButtonSize';
export type { ButtonProps } from './ButtonProps';
export { ButtonKind } from './ButtonKind';
<file_sep>export { ForHospitals as default } from '@app/src/features/system/for-hospitals';
<file_sep>export default interface Button {
value: string;
text?: string;
component?: React.ReactNode;
count?: number;
}
<file_sep>export enum CategoryType {
ClinicalRecommends = 'clinicalRecommends',
Webinar = 'webinar',
Article = 'article',
}
export const ALL_CATEGORIES = 'all_categories';
export const getArticleCategoryText = (category: string) =>
({
[CategoryType.Article]: 'Статьи и переводы',
[CategoryType.Webinar]: 'Вебинары',
[CategoryType.ClinicalRecommends]: 'Клинические рекомендации',
[ALL_CATEGORIES]: 'Все материалы',
}[category]);
<file_sep>import { EMPTY_MODAL } from '../reducer';
import { ModalDispatcher } from './ModalDispatcher';
import UnknownModalException from './UnknownModalException';
export const shouldOpenModal = (modal: string) => {
const allowedKeys = [...ModalDispatcher.getInstance().keys, EMPTY_MODAL];
if (!allowedKeys.includes(modal)) {
throw new UnknownModalException(modal);
}
return modal !== EMPTY_MODAL;
};
<file_sep>import { SanityBase } from '@app/src/domain/models/sanity/SanityBase';
import { Photo } from '../sanity/Photo';
export interface ResourcesItem extends SanityBase {
status: boolean;
sortIndex: number;
name: string;
url: string;
logo: Photo;
}
<file_sep>export { SerializerLink } from './SerializerLink';
<file_sep>export { LinksList } from './LinksList';
<file_sep>export { RequestChat } from './RequestChat';
<file_sep>export const getShouldValidate = ({
active,
submitFailed,
touched,
pristine,
eagerValidation,
}: any) => {
if (submitFailed || eagerValidation) {
return true;
}
if (active) {
return false;
}
return touched && !pristine;
};
<file_sep>import { stringify } from 'query-string';
import { Option } from 'tsoption';
import { AppContext } from '@app/src/lib/server-types';
import NextRoutes from '../../../routes';
const redirectOnServer = (route: string, context: any, query: any) => {
const sq = stringify(query);
context.res.writeHead(302, { Location: `${route}?${sq}` });
context.res.end();
};
export const pushRoute = async (
route: string,
context: Option<AppContext<any>> = Option.of(null),
options: any = {},
): Promise<void> => {
if (context.nonEmpty()) {
return redirectOnServer(route, context.get(), options.query);
}
return NextRoutes.Router.pushRoute(route, options);
};
<file_sep>import { ErrorCode } from './erorCodes';
import { Validator } from './types';
const compareDates: Validator = (dates) => {
return dates[1] >= dates[0] ? undefined : ErrorCode.MixedDateOrder;
};
export { compareDates };
<file_sep>import { compose } from 'recompose';
import withFinalForm from '@app/src/features/common/formHOCs/withFinalForm';
import withTooltip from '@app/src/features/common/formHOCs/withTooltip';
export default <Before, After>() =>
compose<Before, After>(withFinalForm, withTooltip);
<file_sep>import { useRouter } from 'next/router';
import { useEffect } from 'react';
import { useScrollBodyLock } from './scroll-lock/useScrollBodyLock';
export const useResetScrollLock = () => {
const { asPath } = useRouter();
const { unlock } = useScrollBodyLock();
useEffect(() => {
unlock();
}, [asPath]);
};
<file_sep>export { push } from './push';
<file_sep>export const SIGN_IN_MODAL = 'signIn';
<file_sep>export { Conclution } from './Conclution';
<file_sep>import { State } from '@app/src/lib/store';
export const selectPartners = (state: State) => state.partners.list;
<file_sep>type QueryValue = Date | number | string | null | undefined;
interface Query {
[key: string]: QueryValue;
}
type UrlValue = Date | number | string;
interface UrlParameter {
[key: string]: UrlValue;
}
const urlValueToString = (value: UrlValue): string => {
if (value instanceof Date) {
return value.toISOString();
}
if (typeof value === 'number') {
return value.toString();
}
return value;
};
export const queryString = (query: Query = {}) =>
Object.entries(query)
.filter(([_, value]) => value !== undefined && value !== null)
.map(([key, value]) => ({ key, value }))
.map((parameter) => parameter as UrlParameter)
.map(({ key, value }) => ({ key, value: urlValueToString(value) }))
.map(({ key, value }) => ({ key, value: encodeURIComponent(value) }))
.map(({ key, value }) => `${key}=${value}`)
.join('&');
<file_sep>import { isDate } from '../isDate';
describe('isDate', () => {
test('should return true for regular date', () => {
const date = '06.16.2019';
expect(isDate(date)).toBeTruthy();
});
test('should return true for date with /', () => {
const date = '06/16/2019';
expect(isDate(date)).toBeTruthy();
});
test('should return true for with short year', () => {
const date = '06.16.19';
expect(isDate(date)).toBeTruthy();
});
test('should return true for date without year', () => {
const date = '06.16';
expect(isDate(date)).toBeTruthy();
});
test('should return true for reverse date', () => {
const date = '2019.04.17';
expect(isDate(date)).toBeTruthy();
});
test('should return true for reverse date with short year', () => {
const date = '19.04.17';
expect(isDate(date)).toBeTruthy();
});
test('should return false for regular string', () => {
const date = 'hello';
expect(isDate(date)).toBeFalsy();
});
test('should return false for empty string', () => {
const date = '';
expect(isDate(date)).toBeFalsy();
});
test('should return false for telephone number', () => {
const date = '89211234567';
expect(isDate(date)).toBeFalsy();
});
test('should return false for telephone number starts on +', () => {
const date = '+79211234567';
expect(isDate(date)).toBeFalsy();
});
test('should return false for telephone number with dashes', () => {
const date = '+7-921-123-45-67';
expect(isDate(date)).toBeFalsy();
});
test('should return false for time', () => {
const date = '14:56';
expect(isDate(date)).toBeFalsy();
});
test('should return false for url', () => {
const date = 'http://google.com';
expect(isDate(date)).toBeFalsy();
});
});
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
import { ArticlesItem } from '../../../models/common/ArticlesItem';
import { ArticlesFetchParams } from './config';
import { getQueryKey, getPageKeyFromParams } from './query';
interface State extends FetchingState {
list: {
key?: string;
pages: { [pageKey: string]: ArticlesItem[] | undefined };
};
}
interface Actions extends FetchingActions {
success(articles: ArticlesItem[], queryParams: ArticlesFetchParams): Action;
}
const initialState = createInitialState({
list: { key: undefined, pages: {} },
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, articles: ArticlesItem[], params: ArticlesFetchParams) => {
const key = getQueryKey(params);
const shouldReplace = state.list.key !== key;
const pageKey = getPageKeyFromParams(params);
if (shouldReplace) {
return { ...state, list: { key, pages: { [pageKey]: articles } } };
}
return {
...state,
list: {
key,
pages: { ...(state.list.pages || {}), [pageKey]: articles },
},
};
},
'articles',
);
export { reducer, actions };
export type { State, Actions };
<file_sep>export { NewsItemContent } from './NewsItemContent';
<file_sep>export { default } from './Uploader';
export { default as FormFileInput } from './FormFileInput';
<file_sep>enum FooterTheme {
Default = 'default',
White = 'white',
}
export default FooterTheme;
<file_sep>import { size } from 'lodash';
import { getDateInSeconds } from './getDateInSeconds';
import { getValidators } from './getValidators';
import { DateInterface } from './types';
const validateDates = (dates: DateInterface[]) => {
let errorMessage;
const datesInSeconds = dates.map(getDateInSeconds);
getValidators(size(dates)).some((validator) => {
const errorCode = validator(datesInSeconds);
if (errorCode) {
errorMessage = errorCode;
return true;
}
return false;
});
return errorMessage;
};
export { validateDates };
<file_sep>export { Symptoms } from './Symptoms';
<file_sep>import { IconsSprite } from './IconsSprite';
export { IconsList } from './IconsList';
export { IconsSprite as Sprite };
<file_sep>declare const ym: any;
<file_sep>export const isDate = (date: string) => {
if (/\d{2,4}\.\d{2}\.\d{2,4}/.test(date)) {
return true;
}
if (!isNaN(new Date(date).valueOf())) {
return true;
}
return false;
};
<file_sep>export enum ButtonSize {
ExtraLarge,
Large,
Medium,
Small,
}
<file_sep>export { default as Home } from './features/home';
export { default as Partners } from './features/partners';
export { default as Contacts } from './features/contacts';
export { default as Experts, Expert } from './features/experts';
export { default as Request } from './features/request';
export { VolunteerPage } from '../system/volunteer';
export { ChecklistPage } from '../system/checklist';
export { BecomePartnerPage } from './features/become-partner';
export { HospitalAidPage } from './features/hospital-aid';
export { default as RequstChatPage } from './features/request-chat/RequestChatPage';
export { Docs } from '../system/docs';
<file_sep>export { HospitalsHelpWidget } from './HospitalsHelpWidget';
<file_sep>export type RemoveSection = (key: number, name: string) => () => void;
<file_sep>export { NewsCard } from './NewsCard';
<file_sep>import { Action } from 'redux';
import {
createFetchingSymbiote,
createInitialState,
FetchingActions,
FetchingState,
} from '@app/src/lib/symbioteFactory';
interface State extends FetchingState {
requestFormData?: any;
}
interface Actions extends FetchingActions {
success(requestFormData: string): Action;
}
const initialState = createInitialState({
requestFormData: undefined,
});
const { actions, reducer } = createFetchingSymbiote<State, Actions>(
initialState,
(state: State, requestFormData: string) => {
return {
...state,
requestFormData,
};
},
'saveRequestForm',
);
export { reducer, actions };
export type { State, Actions };
| d1b69ad5cb863514f24a5c4adb42dc27e2b76252 | [
"JavaScript",
"TypeScript",
"Dockerfile",
"Markdown"
] | 365 | TypeScript | breadhead/covid-web | 0b0f4854b5c483515a9a555df4868daa1412edff | 2065ba8f8094c6f0cc46f25acff5d9c4ebea328f |
refs/heads/master | <file_sep>import { toast } from './toast'
import * as firebase from 'firebase'
const config = {
apiKey: "<KEY>",
authDomain: "thermalcomfortdataapp.firebaseapp.com",
databaseURL: "https://thermalcomfortdataapp.firebaseio.com",
projectId: "thermalcomfortdataapp",
storageBucket: "thermalcomfortdataapp.appspot.com",
messagingSenderId: "683582280356",
appId: "1:683582280356:web:39f0e8e22efe1d57d18c31"
}
firebase.initializeApp(config);
export async function pushData(data: any, user: string) {
firebase.firestore().collection('users').doc(user).collection('surveys').add({
userId: user,
buildingNumber: data.buildingNumber,
roomNumber: data.roomNumber,
thermalSensation: data.thermalSensation,
thermalPreference: data.thermalPreference,
breezy: data.breezy,
humiditySensation: data.humiditySensation,
clothing: data.clothing,
recentAction: data.recentAction,
});
}
export async function pushNotifs(data: any, user: string) {
const ref = firebase.firestore().collection('users').doc(user).collection('notifications').doc('notifications');
await ref.set({
"8:00 AM": data.eightAm,
"9:00 AM": data.nineAm,
"10:00 AM": data.tenAm,
"11:00 AM": data.elevenAm,
"12:00 PM": data.twelvePm,
"1:00 PM": data.onePm,
"2:00 PM": data.twoPm,
"3:00 PM": data.threePm,
"4:00 PM": data.fourPm,
"5:00 PM": data.fivePm,
"6:00 PM": data.sixPm,
"7:00 PM": data.sevenPm,
"8:00 PM": data.eightPm,
});
}
export async function pushProfile(data: any, user: string) {
const ref = firebase.firestore().collection('users').doc(user).collection('profile').doc('profile');
await ref.set({
"Age": data.age,
"Gender": data.gender,
"Zip Code": data.zipCode,
"Native Conditions": data.nativeConditions,
"Preferred Conditions": data.preferredConditions
});
}
export function logoutUser() {
return firebase.auth().signOut()
}
export async function loginUser(email: string, password: string) {
// authenticate with firebase
try {
const res = await firebase.auth().signInWithEmailAndPassword(email, password);
return res;
} catch(error) {
toast(error.message, 4000)
return false;
}
}
export async function registerUser(email: string, password: string) {
// authenticate with firebase
try {
const res = await firebase.auth().createUserWithEmailAndPassword(email, password);
return true;
} catch(error) {
toast(error.message, 4000);
return false;
}
}
| 357facbf30a6c8dd0acc44325893dfb1dda232a0 | [
"TypeScript"
] | 1 | TypeScript | ThomKaar/thermalComfortDataApp | d40236ca412fac4795cd1a6ca4c5235992223427 | 38d1c54f8fc24fbd6c2054de24edd9789f830a13 |
refs/heads/main | <repo_name>Castillo14/moral<file_sep>/das/signin.php
<?php include('header.php'); ?>
<!DOCTYPE html>
<html lang="en">
<head>
<link rel="icon" type="image/png" sizes="32x32" href="img/favicon-32x32.png"> <!--favicon-->
<title>Moral</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="css/style.css" rel="stylesheet" type="text/css"> <!--style.css document-->
<link href="css/font-awesome.min.css" rel="stylesheet"> <!--font-awesome-->
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css"> <!--bootstrap-->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script> <!--googleapis jquery-->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css"> <!--font-awesome-->
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js"></script> <!--bootstrap-->
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.0/umd/popper.min.js"></script> <!--bootstrap-->
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/js/bootstrap.min.js"></script> <!--bootstrap-->
</head>
<style>
.flex-column {
max-width : 260px;
}
.container {
background: #f9f9f9;
}
.img {
margin: 5px;
}
.logo img{
width:150px;
height:250px;
margin-top:90px;
margin-bottom:40px;
}
</style>
<body>
<div class="container-fluid">
<!-- <div class="header_top"> -->
<!-- <a href="index.php"><img src="img/dash2.jpg" alt="logo.png img"></a> -->
<!-- <span style="font-size:50px;color:#2c2f84;font-weight:bolder;margin-left:15px;">Doctor Appoinment System</span>
</div> -->
<!-- this is for menu -->
<nav class="navbar navbar-expand-md navbar-light fixed-top">
<div class="container">
<a class="navbar-brand" href="index.php">
<strong><em>Moral</em></strong>
</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navi">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navi">
<ul class="navbar-nav mr-auto">
<?php
//set navigation bar when logged in
if(isset($_SESSION['user_id'])){ echo'
<li class="nav-item">
<a class="nav-link" href="reservation.php" >New Reservation</a>
</li>
<li class="nav-item">
<a class="nav-link" href="view_reservations.php" >View Reservations</a>
</li>';
//set navigation bar when logged in and role of admin
if($_SESSION['role']==2) {
echo'
<li class="nav-item">
<a class="nav-link" href="schedule.php" >Edit Schedule</a>
</li>
<li class="nav-item">
<a class="nav-link" href="tables.php" >Edit Tables</a>
</li>
<li class="nav-item">
<a class="nav-link" href="view_tables.php" >View Tables</a>
</li>';
}
}
//main page not logged in navigation bar
else { echo'
<li class="nav-item">
<a class="nav-link" href="#aboutus">About Us</a>
</li>
<li class="nav-item">
<a class="nav-link" href="#reservation">Reservation</a>
</li>
<li class="nav-item">
<a class="nav-link" href="#footer">Find Us</a>
</li>
'; }
?>
</ul>
<?php
//log out button when user is logged in
if(isset($_SESSION['user_id'])){
echo '
<form class="navbar-form navbar-right" action="includes/logout.inc.php" method="post">
<button type="submit" name="logout-submit" class="btn btn-outline-dark">Logout</button>
</form>';
}
else{
echo '
<div>
<ul class="navbar-nav ml-auto">
</ul>
</div>
';}
?>
</div>
</div>
</nav>
<div class="container">
<h3 class="text-center"><br><br>User Login</h3>
<div class="row">
<!--carousel-->
<div class="col-md-12">
<div class="col-md-4"></div>
<div class="col-md-4" style="float: right;padding:20px 115px;border: 1px solid lightgrey;margin-right:415px; margin-bottom:30px;background-color:#f3f3f8;color:#141313;">
<h4 class="text-center;"><a href="patient_login.php">Patient login</a></h4>
<h4 class="text-center;"><a href="doctors/doctorlogin.php">Doctor login</a></h4>
</div>
<div class="col-md-4"></div>
</div>
</div>
</div><br>
</div>
</section>
<!-- this is for donor registraton -->
<!-- <div class="main_content" style="background-color:#fff;">
<h3 class="text-center" style="background-color:#272327;color: #fff;">User Login</h3>
<div class="formstyles" style="background-color: #101011;color: #0d0623;"> -->
</div><br><br><br><br>
</div><!-- containerFluid Ends -->
<script src="js/bootstrap.min.js"></script>
</body>
</html> | 55e1fdb1c60237e84019a91adf458e4da5e98d71 | [
"PHP"
] | 1 | PHP | Castillo14/moral | 025f639ea7f6a9d9e0e08373928d147ecf10a16c | 929be5f1db75413d9c79bf1e78d15f9da33859ab |
refs/heads/master | <repo_name>orlandosaraivajr/django_api_rest<file_sep>/pollsapi/polls/admin.py
from django.contrib import admin
from .models import Poll
from .models import Choice
from .models import Vote
admin.site.register(Poll)
admin.site.register(Choice)
admin.site.register(Vote)<file_sep>/pollsapi/polls/views.py
from rest_framework import viewsets
from .models import Poll, Choice
from .serializers import PollSerializer, ChoiceSerializer, VoteSerializer
class PollViewSet(viewsets.ModelViewSet):
queryset = Poll.objects.all()
serializer_class = PollSerializer | e5047004e06c7fa801d399a9506934c49d87d5e3 | [
"Python"
] | 2 | Python | orlandosaraivajr/django_api_rest | 6367dcf718fa97e3e089a503e1ece599b8ac381c | 76ccb05a0b4bf6f074a1df8d653f688b4f5082e1 |
refs/heads/master | <repo_name>xinxingyu/GB<file_sep>/src/router/index.js
import Vue from 'vue'
import Router from 'vue-router'
Vue.use(Router)
const Index = r => require.ensure(
[],
() => r(require('@/page/Index')),
'Record'
)
const Record = r => require.ensure(
[],
() => r(require('@/page/Record')),
'Record'
)
const My = r => require.ensure(
[],
() => r(require('@/page/My')),
'Record'
)
export default new Router({
routes: [
{
path: '/index',
alias: '/',
name: '一元购',
component: Index
},
{
path: '/record',
alias: '/',
name: '参与记录',
component: Record
},
{
path: '/my',
alias: '/',
name: '我的',
component: My
}
]
})
| f6993425aa8343fe4dd19568599d754777648769 | [
"JavaScript"
] | 1 | JavaScript | xinxingyu/GB | 393209cf7a80d7185880c41337f40df9369d5f7c | 6f745818399a7172f750dbe65daf5b408b8814fa |
refs/heads/main | <file_sep>import {
BrowserRouter as Router,
Switch,
Route,
Link,
Redirect
} from 'react-router-dom';
import ID from './component/ID';
import Signature from './component/Signature';
import Register from './component/Register';
import Login from './component/Login';
import Edit from './component/Edit';
function App() {
return (
<Router>
<div className='header'>
<Link to='/'>
<h1 className='outline' style={{ fontWeight: 700 }}>
<img src='/logo192.png' />
repul.ink
</h1>
</Link>
<Link to='/id/me'>
<h1 style={{ color: 'var(--foreground)' }}>
<i className='fas fa-address-card'></i>
</h1>
</Link>
</div>
<div className='center'>
<Switch>
<Route path='/register'>
<Register />
</Route>
<Route path='/login'>
<Login />
</Route>
<Route path='/id/:username'>
<ID />
</Route>
<Route path='/sig/:uuid'>
<Signature />
</Route>
<Route path='/edit'>
<Edit />
</Route>
<Route path='/' exact>
<div style={{ zIndex: -1, position: 'absolute', top: 0, left: 0, width: '100%', height: '100%', overflow: 'hidden' }}>
<div className='blob'>
<svg fill='var(--highlight)' viewBox='0 0 310 350'>
<path d='M156.4,339.5c31.8-2.5,59.4-26.8,80.2-48.5c28.3-29.5,40.5-47,56.1-85.1c14-34.3,20.7-75.6,2.3-111 c-18.1-34.8-55.7-58-90.4-72.3c-11.7-4.8-24.1-8.8-36.8-11.5l-0.9-0.9l-0.6,0.6c-27.7-5.8-56.6-6-82.4,3c-38.8,13.6-64,48.8-66.8,90.3c-3,43.9,17.8,88.3,33.7,128.8c5.3,13.5,10.4,27.1,14.9,40.9C77.5,309.9,111,343,156.4,339.5z' />
</svg>
</div>
</div>
<div className='center home'>
<div>
</div>
<div className='box' style={{ backgroundColor: 'transparent' }}><div>
<Link to='/login'>
<button style={{ marginBottom: 10 }}>Login</button>
</Link>
<br />
<Link to='/register'>
<button>Register</button>
</Link>
<br /><br />
<p style={{ paddingLeft: 10, borderLeftStyle: 'solid', marginLeft: 10, borderWidth: 1 }}>
As misinformation increases in popular media and machine learning pushes the limits of imitating human faces and voices, it's necessary to return normalcy to our trust of online sources.
</p>
<h2>Identity <i>v.</i> Anonymity <i className='fas fa-theater-masks'></i></h2>
<br />
<p style={{ paddingLeft: 10, borderLeftStyle: 'solid', marginLeft: 10, borderWidth: 1 }}>
Your private keys will only ever be available to you, they are encrypted locally in the browser before they are stored in secure Google Cloud databases.
</p>
<h2>Local Encryption <i className='fas fa-key'></i></h2>
<br />
<p style={{ paddingLeft: 10, borderLeftStyle: 'solid', marginLeft: 10, borderWidth: 1 }}>
Safe and easy to use cryptography should be a right of all users on the internet. As the online world expands into our lives, no person should ever feel unsafe in its growth.
</p>
<h2>Ease of Use <i className='fas fa-seedling'></i></h2>
</div></div>
</div>
</Route>
<Route>
<div>
<h1>Page not found</h1>
<p>Seems you've taken a wrong turn!</p>
</div>
</Route>
</Switch>
</div>
</Router>
);
}
export default App;
<file_sep>import { useState } from 'react';
import { Link } from 'react-router-dom';
import forge from 'node-forge';
import axios from 'axios';
import { encrypt, decrypt } from '../crypto';
function Yes() {
return (
<i style={{ textAlign: 'center', width: 16 }} className='fas fa-check'></i>
);
}
function No() {
return (
<i style={{ textAlign: 'center', width: 16 }} className='fas fa-times'></i>
);
}
function Login() {
const [ username, setUsername ] = useState('');
const [ password, setPassword ] = useState('');
const handleUsernameChange = (e) => { setUsername(e.target.value); };
const handlePasswordChange = (e) => { setPassword(e.target.value); };
const reqLength = password.length >= 14;
const reqCase = /(?=.*[a-z])(?=.*[A-Z])/.test(password);
const reqSpecial = /(?=.*[-+_!@#$%^&*., ?])/.test(password);
const handleLogin = (e) => {
if (!username) return alert('Username is missing!');
if (!reqLength) return alert('Password contains at least 14 characters!');
if (!reqCase) return alert('Password contains both capital and lowercase letters');
if (!reqSpecial) return alert('Password contains at least 1 symbol');
axios.post('/api/prepareLogin', { username })
.then(res => {
switch (res.data.code) {
case 0:
try {
const key_encryption = forge.pki.privateKeyFromPem(decrypt(
password, res.data.key_encryption));
const md = forge.md.sha1.create();
md.update(username, 'utf8');
const signature = key_encryption.sign(md);
axios.post('/api/login', { username, signature })
.then(res => {
window.location.href = '/id/me';
})
.catch(err => {
alert('Unknown error!');
});
}
catch (err) {
alert('Invalid password!');
}
break;
default:
alert(res.data.message);
break;
}
});
};
return (
<div className='box'>
<div>
<h2>Login to your repul.ink account</h2>
<div style={{ padding: 5 }}>
<p>{reqLength ? <Yes/> : <No/>} Password contains at least 14 characters</p>
<p>{reqCase ? <Yes/> : <No/>} Password contains both capital and lowercase letters</p>
<p>{reqSpecial ? <Yes/> : <No/>} Password contains at least 1 symbol</p>
<input
type='text' placeholder='Username'
value={ username } onChange={ handleUsernameChange } />
<input
type='password' placeholder='<PASSWORD>'
value={ <PASSWORD> } onChange={ handlePasswordChange } />
<button style={{ marginTop: 10 }} onClick={ handleLogin }>Login</button>
<Link to='/register'>
<small className='soft'>Don't have an account?</small>
</Link>
</div>
</div>
</div>
);
}
export default Login;
<file_sep>const path = require('path');
const Buffer = require('buffer');
require('dotenv').config();
const axios = require('axios');
const forge = require('node-forge');
// Initializing
const short = require('short-uuid');
const UUID = short(short.constants.flickrBase58);
// Initializing express
const express = require('express');
const PORT = process.env.PORT || 6754;
const app = express();
const bodyParser = require('body-parser');
const cookieParser = require('cookie-parser');
const session = require('express-session');
app.use(bodyParser.json());
app.use(cookieParser());
app.use(session({ secret: process.env.SESSION_SECRET }));
// Initializing MySQL
const mysql = require('mysql');
const sql = mysql.createConnection({
host: process.env.SQL_HOST,
user: process.env.SQL_USER,
password: <PASSWORD>,
database: process.env.SQL_DATABASE
});
// Initializing passport
const passport = require('passport');
const LocalStrategy = require('passport-local').Strategy;
const FacebookStrategy = require('passport-facebook').Strategy;
const TwitterStrategy = require('passport-twitter').Strategy;
const GitHubStrategy = require('passport-github2').Strategy;
app.use(passport.initialize());
app.use(passport.session());
passport.serializeUser((token, done) => {
done(null, token);
});
passport.deserializeUser((token, done) => {
done(null, token);
});
// Local auth
passport.use(
new LocalStrategy(
{
usernameField: 'username',
passwordField: '<PASSWORD>'
},
(username, signature, done) => {
sql.query(
'SELECT `id`,`key_decrypt` FROM `users` WHERE `username` = ?',
username, (err, rows, fields) => {
if (err) throw err;
// Check if user is found
if (rows.length == 0)
return done(null, false);
const md = forge.md.sha1.create();
md.update(username, 'utf8');
const key_decrypt = forge.pki.publicKeyFromPem(
rows[0].key_decrypt);
const verified = key_decrypt.verify(
md.digest().bytes(), signature);
if (verified) {
return done(null, {
id: rows[0].id,
username
});
}
else {
return done(null, false);
}
}
);
}
)
);
app.post('/api/login',
passport.authenticate('local', {
successRedirect: '/id/me',
failureRedirect: '/login',
failureFlash: false
})
);
app.get('/logout', (req, res) => {
req.logout();
res.redirect('/');
});
function isAuth(req, res, next) {
if (req.user)
next();
else
res.redirect('/login');
}
// Facebook OAuth
passport.use(
new FacebookStrategy({
clientID: process.env.FACEBOOK_APP_ID,
clientSecret: process.env.FACEBOOK_APP_SECRET,
callbackURL: 'https://repul.ink/auth/facebook/callback'
},
(accessToken, refreshToken, profile, done) => {
axios.get(`https://graph.facebook.com/v10.0/${profile.id}?access_token=${accessToken}&fields=link`)
.then(res => {
done(null, {
id: profile.id,
name: profile.displayName,
link: res.data.link
});
})
.catch(err => {
console.log('error');
});
}
));
app.get('/auth/facebook',
passport.authorize('facebook', { scope: 'user_link' }));
app.get('/auth/facebook/callback',
passport.authorize('facebook', {
failureRedirect: '/id/me'
}),
(req, res) => {
sql.query(
'UPDATE `users` SET `facebook_id` = ?, `facebook_name` = ?, `facebook_link` = ? WHERE (`id` = ?)',
[ req.account.id, req.account.name, req.account.link, req.user.id ],
(err, rows, fields) => {
if (err) throw err;
}
);
res.end('<html><body><script>window.location.href=\'/id/me\';</script></body></html>');
}
);
// Twitter OAuth
passport.use(
new TwitterStrategy({
consumerKey: process.env.TWITTER_APP_KEY,
consumerSecret: process.env.TWITTER_APP_SECRET,
callbackURL: 'https://repul.ink/auth/twitter/callback'
},
(accessToken, refreshToken, profile, done) => {
done(null, {
id: profile.id,
name: `${profile.displayName} (@${profile.username})`,
link: `https://twitter.com/${profile.username}`
});
}
));
app.get('/auth/twitter',
passport.authorize('twitter'));
app.get('/auth/twitter/callback',
passport.authorize('twitter', {
failureRedirect: '/id/me'
}),
(req, res) => {
sql.query(
'UPDATE `users` SET `twitter_id` = ?, `twitter_name` = ?, `twitter_link` = ? WHERE (`id` = ?)',
[ req.account.id, req.account.name, req.account.link, req.user.id ],
(err, rows, fields) => {
if (err) throw err;
}
);
res.end('<html><body><script>window.location.href=\'/id/me\';</script></body></html>');
}
);
// GitHub OAuth
passport.use(
new GitHubStrategy({
clientID: process.env.GITHUB_APP_ID,
clientSecret: process.env.GITHUB_APP_SECRET,
callbackURL: 'https://repul.ink/auth/github/callback'
},
(accessToken, refreshToken, profile, done) => {
done(null, {
id: profile.id,
name: `${profile.displayName} (@${profile.username})`,
link: `https://github.com/${profile.username}`
});
}
));
app.get('/auth/github',
passport.authorize('github'));
app.get('/auth/github/callback',
passport.authorize('github', {
failureRedirect: '/id/me'
}),
(req, res) => {
sql.query(
'UPDATE `users` SET `github_id` = ?, `github_name` = ?, `github_link` = ? WHERE (`id` = ?)',
[ req.account.id, req.account.name, req.account.link, req.user.id ],
(err, rows, fields) => {
if (err) throw err;
}
);
res.end('<html><body><script>window.location.href=\'/id/me\';</script></body></html>');
}
);
// Handle static files
app.use(express.static(path.join(__dirname, 'build')));
// Public APIs
app.get('/api/id/:username', (req, res) => {
var username = req.params.username
if (username == 'me') {
if (req.user)
username = req.user.username;
else
return res.json({ code: 3, message: 'Not authenticated!' });
}
if (!username) return res.json({ code: 1, message: 'Missing user!' });
sql.query(
'SELECT `username`,`name`,`blurb`,`key_decrypt`,`key_encrypt`,`facebook_name`,`facebook_link`,`twitter_name`,`twitter_link`,`github_name`,`github_link` FROM `users` WHERE `username` = ?',
username, (err, rows, fields) => {
if (err) throw err;
// Check if user is found
if (rows.length == 0)
return res.json({ code: 2, message: 'User not found!' });
res.json({
code: 0,
message: 'Success',
username: rows[0].username,
name: rows[0].name,
blurb: rows[0].blurb,
key_decrypt: rows[0].key_decrypt.toString(),
facebook_name: rows[0].facebook_name,
facebook_link: rows[0].facebook_link,
twitter_name: rows[0].twitter_name,
twitter_link: rows[0].twitter_link,
github_name: rows[0].github_name,
github_link: rows[0].github_link
});
}
);
});
app.get('/api/sig/:uuid', (req, res) => {
const uuid = req.params.uuid;
sql.query(
'SELECT * FROM `signatures` WHERE `uuid` = ?',
uuid, (err, rows, fields) => {
if (err) throw err;
// Check if user is found
if (rows.length == 0)
return res.json({ code: 1, message: 'Signature not found!' });
res.json({
code: 0,
message: 'Success',
uuid: rows[0].uuid,
username: rows[0].username,
signature: rows[0].signature.toString(),
hash: rows[0].hash,
text: rows[0].text
});
}
);
});
app.post('/api/register', (req, res) => {
const { username, key_encrypt, key_decrypt } = req.body;
if (!username) return res.json({ code: 1, message: 'Missing username!' });
if (!key_encrypt) return res.json({ code: 2, message: 'Missing key_encrypt!' });
if (!key_decrypt) return res.json({ code: 3, message: 'Missing key_decrypt!' });
sql.query(
'SELECT EXISTS(SELECT * FROM `users` WHERE `username` = ?)',
username, (err, rows, fields) => {
if (err) throw err;
if (Object.values(rows[0])[0])
return res.json({ code: 4, message: 'Username is already in use!' });
sql.query(
'INSERT INTO `users` (`username`,`name`,`key_encrypt`,`key_decrypt`) VALUES (?,?,?,?)',
[ username, username, key_encrypt, key_decrypt ],
(err, rows, fields) => {
if (err) throw err;
return res.json({ code: 0, message: 'Success' });
}
);
}
);
});
app.post('/api/prepareLogin', (req, res) => {
const { username } = req.body;
if (!username) return res.json({ code: 1, message: 'Missing username!' });
sql.query(
'SELECT EXISTS(SELECT * FROM `users` WHERE `username` = ?)',
username, (err, rows, fields) => {
if (err) throw err;
if (!Object.values(rows[0])[0])
return res.json({ code: 4, message: 'Invalid username!' });
sql.query(
'SELECT `key_encrypt` FROM `users` WHERE `username` = ?',
username,
(err, rows, fields) => {
if (err) throw err;
return res.json({
code: 0,
message: 'Success',
key_encryption: rows[0].key_encrypt.toString()
});
}
);
}
);
});
app.post('/api/prepareSign', isAuth, (req, res) => {
sql.query(
'SELECT EXISTS(SELECT * FROM `users` WHERE `username` = ?)',
req.user.username, (err, rows, fields) => {
if (err) throw err;
if (!Object.values(rows[0])[0])
return res.json({ code: 4, message: 'Invalid username!' });
sql.query(
'SELECT `key_encrypt` FROM `users` WHERE `username` = ?',
req.user.username,
(err, rows, fields) => {
if (err) throw err;
return res.json({
code: 0,
message: 'Success',
key_encryption: rows[0].key_encrypt.toString()
});
}
);
}
);
});
app.post('/api/sign', isAuth, (req, res) => {
let { hash, payload, signature } = req.body;
sql.query(
'SELECT `id`,`key_decrypt` FROM `users` WHERE `username` = ?',
req.user.username, (err, rows, fields) => {
if (err) throw err;
// Check if user is found
if (rows.length == 0)
return done(null, false);
const md = forge.md.sha1.create().update(payload, 'utf8');
const key_decrypt = forge.pki.publicKeyFromPem(
rows[0].key_decrypt);
hash = hash || md.digest().toHex();
const verified = key_decrypt.verify(
forge.util.hexToBytes(hash), signature);
if (verified) {
const uuid = UUID.new();
sql.query(
'INSERT INTO `signatures` (`uuid`,`username`,`signature`,`hash`,`text`) VALUES (?,?,?,?,?)',
[ uuid, req.user.username, signature, hash, payload ],
(err, rows, fields) => {
if (err) throw err;
return res.json({
code: 0,
message: 'Success',
uuid
});
}
);
}
else {
return res.json({
code: 1,
message: 'Invalid signature!'
});
}
}
);
});
app.post('/api/edit', isAuth, (req, res) => {
const { name, blurb } = req.body;
if (!name) return res.json({ code: 1, message: 'Missing name!' });
sql.query(
'UPDATE `users` SET `name` = ?, `blurb` = ? WHERE `id` = ?',
[ name, blurb, req.user.id ], (err, rows, fields) => {
if (err) throw err;
return res.json({
code: 0,
message: 'Success'
});
}
);
});
// Wildcard router for React app
function wildcard(req, res) {
res.sendFile(path.join(__dirname, 'build/index.html'));
}
app.get('/id/me', isAuth, wildcard);
app.get('/edit', isAuth, wildcard);
app.get('*', wildcard);
// Start server
sql.connect((err) => {
if (err)
console.log(`sql error while connecting: ${err.stack}`);
else {
console.log(`connected to sql server`);
app.listen(PORT, () => console.log(`server running on port ${PORT}`));
}
});
<file_sep>import { useState, useEffect } from 'react';
import { useParams, Link } from 'react-router-dom';
import { QRCode } from 'react-qrcode-logo';
import axios from 'axios';
import forge from 'node-forge';
import Social from './Social';
import ID from './ID';
function Signature() {
const [ uuid, setUuid ] = useState(useParams().uuid);
const [ username, setUsername ] = useState();
const [ signature, setSignature ] = useState();
const [ hash, setHash ] = useState();
const [ text, setText ] = useState();
const [ key, setKey ] = useState();
const [ name, setName ] = useState();
const [ blurb, setBlurb ] = useState();
const [ facebookName, setFacebookName ] = useState();
const [ facebookLink, setFacebookLink ] = useState();
const [ twitterName, setTwitterName ] = useState();
const [ twitterLink, setTwitterLink ] = useState();
const [ githubName, setGithubName ] = useState();
const [ githubLink, setGithubLink ] = useState();
const [ verified, setVerified ] = useState('waiting');
const [ checksum, setChecksum ] = useState('waiting');
const fileMode = !text;
const [ file, setFile ] = useState();
const handleFileChange = (e) => { setFile(e.target.files[0]); };
useEffect(() => {
fetch(`/api/sig/${uuid}`)
.then(res => res.json())
.then(
(res) => {
if (res.code)
window.location.href = '/';
setUsername(res.username);
setSignature(res.signature);
setHash(res.hash);
setText(res.text);
},
(err) => {
window.location.href = '/';
}
);
}, []);
useEffect(() => {
if (username) {
fetch(`/api/id/${username}`)
.then(res => res.json())
.then(
(res) => {
if (res.code)
window.location.href = '/';
setName(res.name);
setBlurb(res.blurb);
setKey(res.key_decrypt);
setFacebookName(res.facebook_name);
setFacebookLink(res.facebook_link);
setTwitterName(res.twitter_name);
setTwitterLink(res.twitter_link);
setGithubName(res.github_name);
setGithubLink(res.github_link);
},
(err) => {
window.location.href = '/';
}
);
}
}, [ username ]);
useEffect(() => {
if (key) {
const key_decrypt = forge.pki.publicKeyFromPem(key);
setVerified(
key_decrypt.verify(forge.util.hexToBytes(hash), signature) ?
'verified' : 'invalid'
);
if (fileMode && file) {
const fileReader = new FileReader();
fileReader.onloadend = (e) => {
const content = fileReader.result;
const md = forge.md.sha1.create();
md.update(content, 'binary');
const body = md.digest().toHex();
setChecksum(
hash == body ?
'match' : 'mismatch'
);
};
fileReader.readAsBinaryString(file);
}
}
}, [ key, file ]);
return (
<div style={{
opacity: key ? 1 : 0, display: key ? 'flex' : 'none', flexDirection: 'column'
}} className='box'>
<QRCode
size={ 225 }
value={ `${window.location.origin}/sig/${uuid}` }
qrStyle='dots' bgColor='#1c1f28' fgColor='#f8faf9'
logoImage='/logo.png' logoWidth='75' logoOpacity='0.8' />
<br />
{ fileMode
? <div style={{ marginBottom: 8 }}>
<label style={{ marginRight: 4 }} for='file'>
Browse
</label>
<small style={{ color: 'var(--midground)' }}>
{ file?.name || 'No file selected' }
</small>
<input
id='file'
style={{ display: 'none' }}
type='file' onChange={ handleFileChange } />
</div>
: <p>{ text }</p>
}
{ checksum == 'mismatch' && fileMode &&
<small style={{ color: 'red' }}>
Files Mismatch
</small> }
{ checksum == 'match' && fileMode &&
<small style={{ color: 'deepskyblue' }}>
Files Match
</small> }
{ checksum == 'waiting' && fileMode &&
<small style={{ color: 'var(--foreground)' }}>Waiting for file</small> }
<small className='soft'>{ hash }</small>
{ verified == 'invalid' &&
<h2 style={{ color: 'red' }}>
Invalid <i className='fas fa-times-circle'></i>
</h2> }
{ verified == 'verified' &&
<h2 style={{ color: 'deepskyblue' }}>
Verified <i className='fas fa-check-circle'></i>
</h2> }
{ verified == 'waiting' &&
<h2 style={{ color: 'var(--foreground)' }}>Verifying...</h2> }
<div style={{ width: '100%' }}>
<hr />
</div>
<Link to={ `/id/${username}` }>
<h1 style={{ fontWeight: 400 }}>{ name }</h1>
</Link>
<i className='soft'>@{ username }</i>
<p>{ blurb }</p>
<div>
<div style={{ display: 'inline-block' }}>
{ twitterName &&
<Social
text={ twitterName }
link={ twitterLink }
color='#1DA1F2' type='twitter' /> }
{ githubName &&
<Social
text={ githubName }
link={ githubLink }
color='#0366D6' type='github' /> }
{ facebookName &&
<Social
text={ facebookName }
link={ facebookLink }
color='#4267B2' type='facebook' /> }
</div>
</div>
</div>
);
}
export default Signature;
<file_sep>import forge from 'node-forge';
const keySize = 24;
const ivSize = 8;
function encrypt(pass, message) {
const salt = forge.random.getBytesSync(8);
const derivedBytes = forge.pbe.opensslDeriveBytes(
pass, salt, keySize + ivSize
);
const buffer = forge.util.createBuffer(derivedBytes);
const key = buffer.getBytes(keySize);
const iv = buffer.getBytes(ivSize);
const cipher = forge.cipher.createCipher('3DES-CBC', key);
cipher.start({ iv });
cipher.update(forge.util.createBuffer(message));
cipher.finish();
const output = forge.util.createBuffer();
if (salt !== null) {
output.putBytes('Salted__');
output.putBytes(salt);
}
output.putBuffer(cipher.output);
return output.getBytes();
}
function decrypt(pass, message) {
const input = forge.util.createBuffer(message);
input.getBytes('Salted__'.length);
const salt = input.getBytes(8);
const derivedBytes = forge.pbe.opensslDeriveBytes(
pass, salt, keySize + ivSize);
const buffer = forge.util.createBuffer(derivedBytes);
const key = buffer.getBytes(keySize);
const iv = buffer.getBytes(ivSize);
const decipher = forge.cipher.createDecipher('3DES-CBC', key);
decipher.start({ iv });
decipher.update(input);
decipher.finish();
return decipher.output.toString();
}
export {
encrypt, decrypt
};
<file_sep>import { useState } from 'react';
import { Link } from 'react-router-dom';
import forge from 'node-forge';
import axios from 'axios';
import { encrypt, decrypt } from '../crypto';
function Yes() {
return (
<i style={{ textAlign: 'center', width: 16 }} className='fas fa-check'></i>
);
}
function No() {
return (
<i style={{ textAlign: 'center', width: 16 }} className='fas fa-times'></i>
);
}
function Register() {
const [ username, setUsername ] = useState('');
const [ password, setPassword ] = useState('');
const handleUsernameChange = (e) => { setUsername(e.target.value); };
const handlePasswordChange = (e) => { setPassword(e.target.value); };
const reqLength = password.length >= 14;
const reqCase = /(?=.*[a-z])(?=.*[A-Z])/.test(password);
const reqSpecial = /(?=.*[-+_!@#$%^&*., ?])/.test(password);
const handleRegister = (e) => {
if (!username) return alert('Username is required!');
if (!reqLength) return alert('Must contain at least 14 characters!');
if (!reqCase) return alert('Must have both capital and lowercase letters');
if (!reqSpecial) return alert('Must contain at least 1 symbol');
forge.pki.rsa.generateKeyPair(
{ bits: 2048, workers: 2 },
(err, keypair) => {
if (err) console.error(err);
const key_decrypt = forge.pki.publicKeyToPem(keypair.publicKey);
const key_encrypt = forge.pki.privateKeyToPem(keypair.privateKey);
console.log(key_decrypt);
console.log(key_encrypt);
axios.post('/api/register', {
username,
key_decrypt,
key_encrypt: encrypt(password, key_encrypt)
})
.then(res => {
switch (res.data.code) {
case 0:
window.location.href = '/login';
break;
default:
alert(res.data.message);
break;
}
});
}
);
};
return (
<div className='box'>
<div>
<h2>Create your repul.ink account</h2>
<div style={{ padding: 5 }}>
<p>{reqLength ? <Yes/> : <No/>} Must contain at least 14 characters</p>
<p>{reqCase ? <Yes/> : <No/>} Must have both capital and lowercase letters</p>
<p>{reqSpecial ? <Yes/> : <No/>} Must contain at least 1 symbol</p>
<input
type='text' placeholder='Username'
value={ username } onChange={ handleUsernameChange } />
<input
type='<PASSWORD>' placeholder='<PASSWORD>'
value={ <PASSWORD> } onChange={ handlePasswordChange } />
<button style={{ marginTop: 10 }} onClick={ handleRegister }>Register</button>
<Link to='/login'>
<small className='soft'>Already have an account?</small>
</Link>
</div>
</div>
</div>
);
}
export default Register;
| 1796d7cdbb4f736d7376169ea9d205312f20ef82 | [
"JavaScript"
] | 6 | JavaScript | lanpai/repul.ink | df647074fb3bdc580b89efb7a60a5056e8069e79 | 36b7c73022e771d8c563d0a924e87dc844f36756 |
refs/heads/master | <repo_name>kurtyazdizadeh/react_to_dos<file_sep>/src/components/to_do_list.jsx
import React from 'react';
import ListItem from './list_item';
const toDos = [
{
id: '01',
title: 'Wash car'
},
{
id: '02',
title: 'Code more'
},
{
id: '03',
title: 'Cook dinner'
},
{
id: '04',
title: 'Make money'
},
{
id: '05',
title: 'Get paid'
}
]
class ToDoList extends React.Component {
constructor(props){
super(props);
this.state = {
list: []
};
}
render() {
const toDoElements = toDos.map((item) => {
return <ListItem key={item.id} title={item.title} />
});
return (
<ol>
{toDoElements}
</ol>
)
}
}
export default ToDoList;
| d55489ab117add321ff1e33478a79a8113bbb797 | [
"JavaScript"
] | 1 | JavaScript | kurtyazdizadeh/react_to_dos | 6aa1b32499fdce0201b39ce987aa796e6297f83a | d0a96445d49a74ff00a4fd39989bd198595c8164 |
refs/heads/master | <file_sep>import { assert } from 'chai';
import 'mocha';
import { processor } from '../src/processor';
describe("processor", () => {
describe("exec", () => {
it("should work", async () => {
const text = await processor("C:\\Users\\matt.miller\\Pictures\\Capture.PNG");
assert.isNotNull(text);
});
});
});
<file_sep>import { Client } from 'elasticsearch';
import * as fs from 'fs';
import * as path from 'path';
import { Config } from '../../keepers-server/src/config';
import { ErrorMessage, getTypedMessage, IndexingFinishedMessage, QueueForIndexingMessage } from '../../keepers-server/src/core/messages';
import { Queuer } from '../../keepers-server/src/queuer';
import { processor } from './processor';
const config = new Config();
const queuer = new Queuer(config);
console.log("Worker listening for documents to index...");
queuer.startWorking<QueueForIndexingMessage>(config.readyToIndexQueueName, async (item, done, error) => {
const workingPath = path.join(config.workerWorkingDirectory, `${item.id}.jpg`);
const bytes = Buffer.from(item.document.image_enc, "base64");
fs.writeFile(workingPath, bytes, async (err) => {
if (err) {
queuer.broadcastMessage(new ErrorMessage(err), config.documentIndexedFailedExchangeName);
error(err.message);
return;
}
const text = await processor(workingPath);
item.document.text = text.replace(/\n/g, " ");
const client = new Client({ host: config.elasticSearchUrl, log: "trace" });
const result = await client.index(
{
index: 'documents',
type: 'document',
id: item.id,
body: {
text: item.document.text,
image: item.document.image_enc,
tags: item.document.tags,
created: new Date().toISOString(),
},
});
item.document.image_enc = "";
queuer.broadcastMessage(new IndexingFinishedMessage(item), config.documentIndexedExchangeName);
done();
});
});
<file_sep>// tslint:disable-next-line:no-var-requires
const tesseract = require('node-tesseract');
import * as fs from 'fs';
import * as path from 'path';
import { Config } from '../../keepers-server/src/config';
import { ErrorMessage, getTypedMessage, IndexingFinishedMessage, QueueForIndexingMessage } from '../../keepers-server/src/core/messages';
export function processor(workingPath: string): Promise<string> {
const options = {
binary: "\"C:\\Program Files (x86)\\Tesseract-OCR\\tesseract.exe\"",
};
return new Promise<string>((resolve, reject) => {
tesseract.process(workingPath, options, (failed: any, text: string) => {
if (failed) {
console.error(failed);
reject(JSON.stringify(failed));
} else {
resolve(text);
}
});
});
}
| 9f9285923bae06c0b2a6cc5998ad3fbc2f31aaac | [
"TypeScript"
] | 3 | TypeScript | mattmiller85/keepers-worker | 01e5ba76d732acbe90d33c29509f1f0c21123d70 | 7e01d3fa7694f91daa49c0e69d6f1fef9160b58c |
refs/heads/master | <repo_name>kwynne94/Day17<file_sep>/homework/config/routes.rb
Rails.application.routes.draw do
root to: "pages#home"
get "/hello" => "pages#hello"
end
<file_sep>/homework/app/controllers/home_controller.rb
class HomeController < ApplicationController
def welcome
end
<file_sep>/homework/app/controllers/pages_controller.rb
class PagesController < ApplicationController
def hello
end
def home
end
end
| ab9ffa634ce2e08682d2d40c4ac9c019bb4d7469 | [
"Ruby"
] | 3 | Ruby | kwynne94/Day17 | 3f717a0278371b97a1583b0b4cf58ba24731cb3e | b09985ceaa30a0656fc19f4ffba52c8240f6d3f5 |
refs/heads/master | <repo_name>DimaKachenya/LP_LAB_3-5<file_sep>/myItCompany/src/main/java/com/company/workmen/IWrokMen.java
package com.company.workmen;
interface IWorkMen
{
void doWork();
void whoIam();
}<file_sep>/myItCompany/src/main/java/com/company/ItCompanyCollection.java
package com.company;
import com.company.workmen.WorkMen;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
public class ItCompanyCollection implements Serializable {
public List<WorkMen> myList;
public ItCompanyCollection() {
this.myList = new ArrayList();
}
public void Add(WorkMen workMen) {
myList.add(workMen);
}
public void Clear() {
myList.clear();
}
public void PrintAllInformationAboutWorkMen() {
for (WorkMen item :
myList) {
System.out.println(item.toString());
}
}
}
<file_sep>/myItCompany/src/main/java/com/company/workmen/sysadmin/SysAdmin.java
package com.company.workmen.sysadmin;
import com.company.workmen.WorkMen;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class SysAdmin extends WorkMen {
@JsonCreator
public SysAdmin(@JsonProperty("lvlSkills")int lvlSkills,@JsonProperty("salary") double salary,@JsonProperty("name") String name, @JsonProperty("surName")String surName,@JsonProperty("age") int age,@JsonProperty("workExperience") int workExperience) {
super(lvlSkills, salary, name, surName, age, workExperience);
}
@Override
public void doWork() {
System.out.println("Я настраиваю сеть");
}
@Override
public void whoIam() {
System.out.printf("Я СИСАдмин в IT компании KACHENYA-corporation");
}
}
| c4395b2558d933e17163ba8b8d2819cf8f96bb20 | [
"Java"
] | 3 | Java | DimaKachenya/LP_LAB_3-5 | 9f26ab4e0944873517bb15af11b71016ed73b42b | 3c3bd046ed48d30faf36462991b5809c6b10d591 |
refs/heads/master | <repo_name>saranyaUST/MavenJenkins<file_sep>/src/main/java/service/CitizenService.java
package service;
import java.time.LocalDate;
import java.time.Period;
public class CitizenService {
public int computeAge(LocalDate dob){
int age=0;
LocalDate today = LocalDate.now();
age = Period.between(dob, today).getYears();
return age;
}
public boolean isEligibleToVote(LocalDate dob ) {
boolean isEligible=false;
int age = computeAge(dob);
if(age>=18)
isEligible=true;
return isEligible;
}
}
<file_sep>/src/test/java/test/TestCitizenService.java
package test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.time.LocalDate;
import java.time.Month;
import org.junit.Test;
import service.CitizenService;
public class TestCitizenService {
CitizenService cService = new CitizenService();
@Test
public void testComputeAgeWithPastDate() {
LocalDate input = LocalDate.of(1985, Month.JUNE, 11);
int expected = 34;
int actual = cService.computeAge(input);
assertTrue("expected value matched", expected == actual);
}
@Test
public void testComputeAgeWithToday() {
LocalDate input = LocalDate.now();
int expected = 0;
int actual = cService.computeAge(input);
assertTrue("expected value matched", expected == actual);
}
}
| a9edfa3363d4dc50ee0bcc30c28599c63028d555 | [
"Java"
] | 2 | Java | saranyaUST/MavenJenkins | 235230cf68bc58b2b0057e7ef3e2aae7d890261e | 041bcbb9ad2a4b980d94f118b81ac29f9a6469f4 |
refs/heads/master | <repo_name>klaudynaz/CharityCrm<file_sep>/CharityCrm/Models/DataBase/User.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class User
{
[KeyAttribute]
public int IdUser { get; set; }
[RequiredAttribute]
public string Nickname { get; set; }
[RequiredAttribute]
public string Name { get; set; }
public string Surname { get; set; }
[EmailAddressAttribute]
public string Address { get; set; }
[RequiredAttribute]
public string Email { get; set; }
[PhoneAttribute]
public string PhoneNumber { get; set; }
public virtual User Superior { get; set; } //Tutaj będzie odnośnik do przełożonego
public virtual Role Role { get; set; }//Tutaj dodam jeszcze rolę
}
}<file_sep>/CharityCrm/Models/DataBase/Sale.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Sale
{
[KeyAttribute]
public int IdSale { get; set; }
[RequiredAttribute]
public string SaleTitle { get; set; }
[RequiredAttribute]
public string SaleContent { get; set; }
[RequiredAttribute]
public double SalePrice { get; set; }
[RequiredAttribute]
public string Contact { get; set; }
public virtual List<File> Files { get; set; } //Plik
public virtual ProductCategory ProductCategory { get; set; }//Kategoria
public virtual Product Product { get; set; } //Produkt
}
}<file_sep>/CharityCrm/Startup.cs
using Microsoft.Owin;
using Owin;
[assembly: OwinStartupAttribute(typeof(CharityCrm.Startup))]
namespace CharityCrm
{
public partial class Startup
{
public void Configuration(IAppBuilder app)
{
ConfigureAuth(app);
}
}
}
<file_sep>/CharityCrm/Models/DataBase/Registration.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Registration
{
[KeyAttribute]
public int IdRegistr { get; set; }
[RequiredAttribute]
public DateTime Created { get; set; }
[RequiredAttribute]
public DateTime Saved { get; set; }
[RequiredAttribute]
public DateTime SendExport { get; set; }
public virtual DocumentTemplate DocumentTemplate { get; set; }//DokumentTemplate
public virtual User User { get; set; }//User
}
}<file_sep>/CharityCrm/Models/DataBase/Message.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Message
{
[KeyAttribute]
public int IdMessage { get; set; }
[RequiredAttribute]
public string TitleMessage { get; set; }
[RequiredAttribute]
public string ContentMessage { get; set; }
[RequiredAttribute]
public DateTime SendTime { get; set; }
[RequiredAttribute]
public DateTime ReadTime { get; set; }
public virtual User Recipient { get; set; }//Odbiorca Id
public virtual User Sender { get; set; }//Nadawca Id
//Nadawca Nazwa
}
}<file_sep>/CharityCrm/Models/DataBase/Field.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Field
{
[KeyAttribute]
public int IdField { get; set; }
[RequiredAttribute]
public string FieldName { get; set; }
[RequiredAttribute]
public string FieldContent { get; set; }
}
}<file_sep>/CharityCrm/Models/DataBase/RodoProposal.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class RodoProposal
{
[KeyAttribute]
public int IdProposal { get; set; }
[RequiredAttribute]
public string ProposalName { get; set; }
[RequiredAttribute]
public string Content { get; set; }
[RequiredAttribute]
public string KindOfLaw { get; set; }
[RequiredAttribute]
public string Reply { get; set; }
[RequiredAttribute]
public bool CopyOfData { get; set; }
[RequiredAttribute]
public DateTime CreatedDate { get; set; }
public virtual User ResponsiblePerson { get; set; }//Osoba odpowiedzialna
public virtual User Declarant { get; set; }//User
}
}<file_sep>/CharityCrm/Models/DataBase/DocumentTemplate.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class DocumentTemplate
{
[KeyAttribute]
public int IdDocument { get; set; }
[RequiredAttribute]
public string DocTitle { get; set; }
[RequiredAttribute]
public string DocContent { get; set; }
public virtual List<Field> Fields { get; set; } //Pola (FK)
}
}<file_sep>/CharityCrm/Models/DataBase/Product.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Product
{
[KeyAttribute]
public int IdProduct { get; set; }
[RequiredAttribute]
public string ProductName { get; set; }
[RequiredAttribute]
public string ProductDesc { get; set; }
[RequiredAttribute]
public double Price { get; set; }
}
}<file_sep>/CharityCrm/Models/DataBase/Quest.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Quest
{
[KeyAttribute]
public int IdQuest { get; set; }
[RequiredAttribute]
public string QuestName { get; set; }
[RequiredAttribute]
public string DescQuest { get; set; }
[RequiredAttribute]
public string Status { get; set; }
[RequiredAttribute]
public DateTime CreatDate { get; set; }
[RequiredAttribute]
public DateTime FinishDate { get; set; }
public virtual User Owner { get; set; }//PracownikProwadzacy
public virtual List<User> People { get; set; } //Osoby powiązane
public virtual User Author { get; set; }//TwórcaId
}
}<file_sep>/CharityCrm/Models/DataBase/Action.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Action
{
[KeyAttribute]
public int IdAction { get; set; }
[RequiredAttribute]
public string ActionName { get; set; }
[RequiredAttribute]
public DateTime StartDate { get; set; }
[RequiredAttribute]
public DateTime FinishDate { get; set; }
public string ActionDescr { get; set; }
[RequiredAttribute]
public string KindOfAction { get; set; }
public virtual User Leader { get; set; } //Relacja Prowadzacy Akcje
public virtual List<User> Users { get; set; } // Lista zaangażowanych w akcje
public virtual Product Product { get; set; } //Produkt
public virtual List<File> Files { get; set; } //Lista plików
}
}<file_sep>/CharityCrm/Models/DataBase/Role.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class Role
{
[KeyAttribute]
public int IdRole { get; set; }
[RequiredAttribute]
public String RoleName { get; set; }
}
}<file_sep>/CharityCrm/Models/DataBase/File.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace CharityCrm.Models.DataBase
{
public class File
{
[KeyAttribute]
public int IdFile { get; set; }
[RequiredAttribute]
public string OriginalName { get; set; }
[RequiredAttribute]
public string NewName { get; set; }
[RequiredAttribute]
public string Path { get; set; }
[RequiredAttribute]
public string ObjectId { get; set; }
[RequiredAttribute]
public string ObjectType { get; set; }
public virtual Action Action { get; set; } // IdAction
public virtual Product Product { get; set; }//IdProduct
public virtual User User { get; set; }//UserID
}
}<file_sep>/CharityCrm/Models/IdentityModels.cs
using System.Data.Entity;
using System.Security.Claims;
using System.Threading.Tasks;
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.EntityFramework;
namespace CharityCrm.Models
{
// You can add profile data for the user by adding more properties to your ApplicationUser class, please visit https://go.microsoft.com/fwlink/?LinkID=317594 to learn more.
public class ApplicationUser : IdentityUser
{
public async Task<ClaimsIdentity> GenerateUserIdentityAsync(UserManager<ApplicationUser> manager)
{
// Note the authenticationType must match the one defined in CookieAuthenticationOptions.AuthenticationType
var userIdentity = await manager.CreateIdentityAsync(this, DefaultAuthenticationTypes.ApplicationCookie);
// Add custom user claims here
return userIdentity;
}
}
public class ApplicationDbContext : IdentityDbContext<ApplicationUser>
{
public ApplicationDbContext()
: base("DefaultConnection", throwIfV1Schema: false)
{
}
//public DbSet<Action> Action { get; set; }
//public DbSet<DocumentTemplate> DocumentTemplate { get; set; }
//public DbSet<Field> Field { get; set; }
//public DbSet<File> File { get; set; }
//public DbSet<Message> Message { get; set; }
//public DbSet<Product> Product { get; set; }
//public DbSet<ProductCategory> ProductCategory { get; set; }
//public DbSet<Quest> Quest { get; set; }
//public DbSet<Registration> Registratrion { get; set; }
//public DbSet<RodoProposal> RodoProposal { get; set; }
//public DbSet<Role> Role { get; set; }
//public DbSet<Sale> Sale { get; set; }
//public DbSet<User> User { get; set; }
}
//public static ApplicationDbContext Create()
//{
// return new ApplicationDbContext();
//}
} | 93f89a4faa1f7cac7c918394c434499ab4d40bc3 | [
"C#"
] | 14 | C# | klaudynaz/CharityCrm | b3ca5bfe9c254073ef2b2fae6756e4adbaca3f6e | 85b008471dcb29bf17199ab9697e91a378c89cb1 |
refs/heads/master | <repo_name>joshfedo/projecteuler<file_sep>/P1-25/p_15.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/2/2017
* Time: 3:21 PM
* Starting in the top left corner of a 2×2 grid, and only being able to move to the right and down, there are exactly 6 routes to the bottom right corner.
* 
* How many such routes are there through a 20×20 grid?
*I was able to solve this using Central the formula Binomial Coefficient. This method is very efficient and fast.
*/
/**
* @param $num , the number to factor
* @return int, The factorial of the input
*/
function factorial($num)
{
$result = 1;
for ($i = $num; $i >= 1; $i--) {
$result *= $i;
}
return $result;
}
/**
* @param $grid_size , How large the grid is
* @return int, how many paths there are
*/
function solve_paths($grid_size)
{
return factorial(2 * $grid_size) / factorial($grid_size) ** 2;
}
echo solve_paths(20);
/*
* This method worked but was very slow and took up a lot of memory. After looking at the output i realised this
* could probably be solved with simple math. Im leaving the rest here just to show my recursion attempt*
*/
//$grid = array(
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//array(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
//
//
//
//);
//function rout_counter($grid,$i,$j,$count){
// $this_gird = $grid;
// $this_gird[$i][$j] = 1;
// if($i ==5 && $j ==5){
// $count++;
//// echo "<pre>";
//// echo print_r($this_gird);
//// echo "</pre>";
// return $count;
// }
// if(($j +1 < count($this_gird) && $this_gird[$i][$j+1] != 1 )){
// $count = rout_counter($this_gird,$i,$j+1,$count);
// }
// if( ($i+ 1 < count($this_gird)) && $this_gird[$i+1][$j]!= 1){
// $count = rout_counter($this_gird,$i+1,$j,$count);
// }
// unset($this_gird);
// return $count;
//}
////echo "total " . rout_counter(make_arr(6),0,0,0);
//
//
<file_sep>/P1-25/p_9.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 5/24/2017
* Time: 4:17 PM
* A Pythagorean triplet is a set of three natural numbers, a < b < c, for which,
* a2 + b2 = c2
* For example, 32 + 42 = 9 + 16 = 25 = 52.
* There exists exactly one Pythagorean triplet for which a + b + c = 1000.
* Find the product abc.
*/
//brute force does work but this is definitely not efficient
for ($a = 1; $a < 1000; $a++) {
for ($b = 1; $b < 1000; $b++) {
for ($c = 1; $c < 1000; $c++) {
if ($a ** 2 + $b ** 2 == $c ** 2 && $a + $b + $c == 1000) {
echo "a = $a b = $b c=$c";
break;
}
}
}
}
<file_sep>/P1-25/p_17.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/8/2017
* Time: 10:37 AM
* If the numbers 1 to 5 are written out in words: one, two, three, four, five, then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total.
* If all the numbers from 1 to 1000 (one thousand) inclusive were written out in words, how many letters would be used?
* NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20 letters. The use of "and" when writing out numbers is in compliance with British usage.
*/
//using phps built in number format class made this very simple
$f = new NumberFormatter("en_GB", NumberFormatter::SPELLOUT);
$sum = 0;
for ($i = 1; $i <= 1000; $i++) {
$sum += strlen(str_replace(array('-', ' '), '', $f->format($i)));
//the problems asks to add 'and' to the numbers
//so we only add it on numbers over 100 and that are not multiples of 100
if ($i >= 101 && $i % 100 != 0) {
$sum += 3;
}
}
echo $sum;<file_sep>/P1-25/p_1.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 5/12/2017
* Time: 2:57 PM
* If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
* Find the sum of all the multiples of 3 or 5 below 1000.
*/
$total = 0;
for ($i = 0; $i <= 1000; $i++) {
if ($i % 3 === 0 || $i % 5 === 0) {
$total += $i;
}
}
echo "Total is $total";<file_sep>/P1-25/p_5.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 5/12/2017
* Time: 4:33 PM
* 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder.
* What is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20?
*/
$flag = true;
//nothing will be smaller then the product of the two largest divisors 20*19
$small = 380;
while ($flag) {
$small += 2;
for ($i = 20; $i > 0; $i--) {
if ($small % $i !== 0) {
break;
}
}
if ($i == 0) {
$flag = false;
}
}
echo $small;<file_sep>/P1-25/p_12.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/1/2017
* Time: 11:31 AM
* The sequence of triangle numbers is generated by adding the natural numbers. So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ten terms would be:
* 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
* Let us list the factors of the first seven triangle numbers:
* 1: 1
* 3: 1,3
* 6: 1,2,3,6
* 10: 1,2,5,10
* 15: 1,3,5,15
* 21: 1,3,7,21
* 28: 1,2,4,7,14,28
* We can see that 28 is the first triangle number to have over five divisors.
* What is the value of the first triangle number to have over five hundred divisors?
*/
class Triangle_Divisors
{
/**
* @param $max_divisors , How many divisors your looking for
* @return int, The triangle number
*/
function find_triangle($max_divisors)
{
$num_of_divs = 0;
$tri_num = 1;
$adder = 2;
//this is just a arbitrary number and could be optimized
$primes = $this->find_primes(0, 100000);
while ($num_of_divs < $max_divisors) {
$tri_num += $adder;
$adder++;
if (!in_array($tri_num, $primes)) {
$prime_factors = $this->prime_factors($tri_num, $primes);
$count = array_count_values($prime_factors);
$num_of_divs = 1;
foreach ($count as $num) {
$num_of_divs *= ($num + 1);
}
}
}
return $tri_num;
}
/**
* @param $num
* @param $list_of_primes
* @return array, an array of prime numbers
*/
private function prime_factors($num, $list_of_primes)
{
$prime_factors = array();
while ($num != 1) {
foreach ($list_of_primes as $prime) {
while ($num % $prime == 0) {
array_push($prime_factors, $prime);
$num = $num / $prime;
}
if ($num == 1) {
break;
}
}
}
return $prime_factors;
}
/**
* @param $start
* @param int $num_of_primes , how man primes you would like to find
* @return array, A array containg all the prime numbers it found
*/
private function find_primes($start, $num_of_primes = 100)
{
$start = 2;
$number_line = range(2, $num_of_primes);
while ($start < $num_of_primes) {
for ($i = $start; $i <= $num_of_primes; $i += $start) {
if ($i != $start) {
unset($number_line[$i - 2]);
}
}
$start++;
}
return ($number_line);
}
}
$finder = new Triangle_Divisors();
echo $finder->find_triangle(500);
<file_sep>/P1-25/p_16.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/7/2017
* Time: 8:18 AM
* 2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
* What is the sum of the digits of the number 2^1000?
*/
//These kinds of problems are too easy in php
$num = 2 ** 1000;
echo $num;
$num = str_split(number_format($num, 1000));
$num = array_sum($num);
echo $num;
<file_sep>/P1-25/p_10.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 5/24/2017
* Time: 4:41 PM
* The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
* Find the sum of all the primes below two million.
*/
function find_prime_sum($max = 2000000)
{
$test = 2;
$number_line = range(2, $max);
while ($test < $max) {
for ($i = $test; $i <= $max; $i += $test) {
if ($i != $test) {
unset($number_line[$i - 2]);
}
}
$test++;
}
return array_sum($number_line);
}
echo find_prime_sum();
<file_sep>/P1-25/p_6.php
<?php
/**
* Created by PhpStorm.
* User: Fed
* Date: 5/13/2017
* Time: 11:48 AM
* The sum of the squares of the first ten natural numbers is,
* 12 + 22 + ... + 102 = 385
* The square of the sum of the first ten natural numbers is,
* (1 + 2 + ... + 10)2 = 552 = 3025
* Hence the difference between the sum of the squares of the first ten natural numbers and the square of the sum is 3025 − 385 = 2640.
* Find the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum.
*/
$sum_of_sqr = 0;
$sqr_of_sum = 0;
for ($i = 1; $i <= 100; $i++) {
$sum_of_sqr += $i ** 2;
$sqr_of_sum += $i;
}
echo 2 ** 2 . "<hr>";
echo ($sqr_of_sum ** 2) - $sum_of_sqr;<file_sep>/P1-25/p_3.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 5/12/2017
* Time: 2:57 PM
* The prime factors of 13195 are 5, 7, 13 and 29.
* What is the largest prime factor of the number 600851475143 ?
* --------------------------------------------------------------
* Thoughts and notes:
*/
$number = 600851475143;
$i = 3;
//no prime numbers will be above the sqrt
while ($i < ceil(sqrt($number))) {
if ($number % $i == 0) {
$number /= $i;
} else {
$i++;
}
}
echo "largest is: $number";<file_sep>/P1-25/p_14.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/2/2017
* Time: 2:47 PM
*/
$solved_numbers = array(1);
for ($i = 2; $i < 1000000; $i++) {
$count = 0;
$number = $i;
while ($number != 1) {
//by keeping track of seqences weve already solved we dont need to resolve them
//this is a major time saver
if (array_key_exists($number, $solved_numbers)) {
$count += $solved_numbers[$number];
break;
}
if ($number % 2 == 0) {
$number = $number / 2;
} else {
$number = $number * 3 + 1;
}
$count++;
}
$solved_numbers[$i] = $count + 1;
}
echo array_search(max($solved_numbers), $solved_numbers);
<file_sep>/P1-25/p_4.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 5/12/2017
* Time: 4:18 PM
* A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 × 99.
* Find the largest palindrome made from the product of two 3-digit numbers.
*/
$largest = 0;
for ($i = 999; $i > 99; $i--) {
for ($j = $i; $j > 99; $j--) {
$num = (string)($i * $j);
//echo "$num <br>";
if ($num == strrev($num)) {
if ($num > $largest) {
$largest = $num;
}
}
}
}
echo "Largest is $largest";<file_sep>/P1-25/p_2.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 5/12/2017
* Time: 2:57 PM
* Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be:
*/
$buff1 = 1;
$buff2 = 1;
$total = 0;
$sum = 0;
while ($total < 4000000) {
$total = $buff1 + $buff2;
$buff2 = $buff1;
$buff2 = $total;
if ($total % 2 === 0) {
$sum += $total;
}
}
echo "Total is $sum";<file_sep>/P1-25/p_18.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/8/2017
* Time: 11:07 AM
* By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from top to bottom is 23.
* 3
* 7 4
* 2 4 6
* 8 5 9 3
* That is, 3 + 7 + 4 + 9 = 23.
* Find the maximum total from top to bottom of the triangle below:
* 75
* 95 64
* 17 47 82
* 18 35 87 10
* 20 04 82 47 65
* 19 01 23 75 03 34
* 88 02 77 73 07 63 67
* 99 65 04 28 06 16 70 92
* 41 41 26 56 83 40 80 70 33
* 41 48 72 33 47 32 37 16 94 29
* 53 71 44 65 25 43 91 52 97 51 14
* 70 11 33 28 77 73 17 78 39 68 17 57
* 91 71 52 38 17 14 91 43 58 50 27 29 48
* 63 66 04 68 89 53 67 30 73 16 69 87 40 31
* 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
*/
$tri = array(
array(75),
array(95, 64),
array(17, 47, 82),
array(18, 35, 87, 10),
array(20, 4, 82, 47, 65),
array(19, 1, 23, 75, 3, 34),
array(88, 2, 77, 73, 7, 63, 67),
array(99, 65, 4, 28, 6, 16, 70, 92),
array(41, 41, 26, 56, 83, 40, 80, 70, 33),
array(41, 48, 72, 33, 47, 32, 37, 16, 94, 29),
array(53, 71, 44, 65, 25, 43, 91, 52, 97, 51, 14),
array(70, 11, 33, 28, 77, 73, 17, 78, 39, 68, 17, 57),
array(91, 71, 52, 38, 17, 14, 91, 43, 58, 50, 27, 29, 48),
array(63, 66, 4, 68, 89, 53, 67, 30, 73, 16, 69, 87, 40, 31),
array(4, 62, 98, 27, 23, 9, 70, 98, 73, 93, 38, 53, 60, 4, 23),
);
//to solve this i used "dynamic programming"
//instead of brute force this method solved the
//problem one layer at a time
$tri = array_reverse($tri);
for ($i = 0; $i < count($tri); $i++) {
for ($j = 0; $j < count($tri[$i]) - 1; $j++) {
//look one level ahead in the tree(array)
//and combine the largest of the two nodes
if ($tri[$i][$j] < $tri[$i][$j + 1]) {
$tri[$i + 1][$j] += $tri[$i][$j + 1];
} else {
$tri[$i + 1][$j] += $tri[$i][$j];
}
}
}
$awn = end($tri);
echo $awn[0];
<file_sep>/P1-25/p_19.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/9/2017
* Time: 3:07 PM
* You are given the following information, but you may prefer to do some research for yourself.
* 1 Jan 1900 was a Monday.
* Thirty days has September,
* April, June and November.
* All the rest have thirty-one,
* Saving February alone,
* Which has twenty-eight, rain or shine.
* And on leap years, twenty-nine.
* A leap year occurs on any year evenly divisible by 4, but not on a century unless it is divisible by 400.
* How many Sundays fell on the first of the month during the twentieth century (1 Jan 1901 to 31 Dec 2000)?
*/
//php i <3 u
//too easy
$sum_sundays = 0;
$date = '1901-01-01';
while ($date < '2001-01-01') {
if (date('D', strtotime($date)) == 'Sun') {
$sum_sundays++;
}
$date = date('Y-m-d', strtotime('+1 month ' . $date));
}
echo $sum_sundays;<file_sep>/P1-25/p_7.php
<?php
/**
* Created by PhpStorm.
* User: Fed
* Date: 5/13/2017
* Time: 11:57 AM
* By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13, we can see that the 6th prime is 13.
* What is the 10 001st prime number?
* no the fastest solution but it is correct
*/
//i didn't want to remove this solution, but a much more efficient way to solve this would be using the
//Sieve of Eratosthenes. I did this in problem 10 and it was much quicker
function is_prime($int)
{
for ($i = 3; $i < $int; $i++) {
if ($int % $i == 0) {
return false;
}
}
return true;
}
$prime_count = 1;
$num = 3;
while ($num != 1000000) {
if (is_prime($num)) {
$prime_count++;
}
$num += 2;
}
echo $num - 2;
<file_sep>/P1-25/p_21.php
<?php
/**
* Created by PhpStorm.
* User: fedo
* Date: 6/30/2017
* Time: 10:09 AM
* Let d(n) be defined as the sum of proper divisors of n (numbers less than n which divide evenly into n).
* If d(a) = b and d(b) = a, where a ≠ b, then a and b are an amicable pair and each of a and b are called amicable numbers.
* For example, the proper divisors of 220 are 1, 2, 4, 5, 10, 11, 20, 22, 44, 55 and 110; therefore d(220) = 284. The proper divisors of 284 are 1, 2, 4, 71 and 142; so d(284) = 220.
* Evaluate the sum of all the amicable numbers under 10000.
*/
echo "go";
for ($i = 1; $i <= 1000; $i++) {
$possible = array();
for ($j = 1; $j < $i; $j++) {
if ($i % $j == 0) {
array_push($possible, $j);
}
}
foreach ($possible as $item) {
if ($item % $i == 0) {
echo "$item and $j are amicable \n";
}
}
}
echo "end"; | bc3a61751526ade468a5f458d0c5e3a16b5b4433 | [
"PHP"
] | 17 | PHP | joshfedo/projecteuler | 73799403a44f58944652556a1bb055587c29b981 | 279d482e7fb963a07616f33c01a13d59f3a23ff8 |
refs/heads/final | <file_sep>import Vue from 'vue'
import Router from 'vue-router'
import { normalizeURL, decode } from 'ufo'
import { interopDefault } from './utils'
import scrollBehavior from './router.scrollBehavior.js'
const _5abb9f0a = () => interopDefault(import('..\\pages\\post\\index.vue' /* webpackChunkName: "pages/post/index" */))
const _27876672 = () => interopDefault(import('..\\pages\\profile.vue' /* webpackChunkName: "pages/profile" */))
const _7f5496df = () => interopDefault(import('..\\pages\\signup.vue' /* webpackChunkName: "pages/signup" */))
const _1d42516d = () => interopDefault(import('..\\pages\\hashtag\\_id\\index.vue' /* webpackChunkName: "pages/hashtag/_id/index" */))
const _1c6815a3 = () => interopDefault(import('..\\pages\\post\\_id.vue' /* webpackChunkName: "pages/post/_id" */))
const _cd0662ac = () => interopDefault(import('..\\pages\\user\\_id\\index.vue' /* webpackChunkName: "pages/user/_id/index" */))
const _a8cb7d4a = () => interopDefault(import('..\\pages\\index.vue' /* webpackChunkName: "pages/index" */))
const emptyFn = () => {}
Vue.use(Router)
export const routerOptions = {
mode: 'history',
base: '/',
linkActiveClass: 'nuxt-link-active',
linkExactActiveClass: 'nuxt-link-exact-active',
scrollBehavior,
routes: [{
path: "/post",
component: _5abb9f0a,
name: "post"
}, {
path: "/profile",
component: _27876672,
name: "profile"
}, {
path: "/signup",
component: _7f5496df,
name: "signup"
}, {
path: "/hashtag/:id",
component: _1d42516d,
name: "hashtag-id"
}, {
path: "/post/:id",
component: _1c6815a3,
name: "post-id"
}, {
path: "/user/:id",
component: _cd0662ac,
name: "user-id"
}, {
path: "/",
component: _a8cb7d4a,
name: "index"
}],
fallback: false
}
export function createRouter (ssrContext, config) {
const base = (config.app && config.app.basePath) || routerOptions.base
const router = new Router({ ...routerOptions, base })
// TODO: remove in Nuxt 3
const originalPush = router.push
router.push = function push (location, onComplete = emptyFn, onAbort) {
return originalPush.call(this, location, onComplete, onAbort)
}
const resolve = router.resolve.bind(router)
router.resolve = (to, current, append) => {
if (typeof to === 'string') {
to = normalizeURL(to)
}
return resolve(to, current, append)
}
return router
}
<file_sep>export const state=()=>({
me:null,
other:null, //남의정보
// followerList:[{
// id:1,
// nickname:'roen'
// },{
// id:2,
// nickname:'네로'
// },{
// id:3,
// nickname:'히어로'
// }],
// followingList:[{
// id:1,
// nickname:'roen'
// },{
// id:2,
// nickname:'네로'
// },{
// id:3,
// nickname:'히어로'
// }],
followerList:[],
followingList:[],
hasMoreFollower:true,
hasMoreFollowing:true,
})
const totalFollowers=8;
const totalFollowings=6;
const limit=3;
export const mutations={
setMe(state,payload){
state.me=payload
},
setOther(state,payload){
state.other=payload;
},
changeNickname(state,payload){
state.me.nickname=payload.nickname;
},
removeFollower(state, payload) {
let index = state.me.Followers.findIndex(v => v.id === payload.userId);
state.me.Followers.splice(index, 1);
index = state.followerList.findIndex(v => v.id === payload.id);
state.followerList.splice(index, 1);
},
removeFollowing(state, payload) {
let index = state.me.Followings.findIndex(v => v.id === payload.userId);
state.me.Followings.splice(index, 1);
index = state.followerList.findIndex(v => v.id === payload.userId);
state.followingList.splice(index, 1);
},
loadFollowings(state, payload) {
if (payload.offset === 0) {
state.followingList = payload.data;
} else {
state.followingList = state.followingList.concat(payload.data);
}
state.hasMoreFollowing = payload.data.length === limit;
},
loadFollowers(state, payload) {
if (payload.offset === 0) {
state.followerList = payload.data;
} else {
state.followerList = state.followerList.concat(payload.data);
}
state.hasMoreFollower = payload.data.length === limit;
},
following(state,payload){
state.me.Followings.push({id:payload.userId})
},
}
// 비동기처리는 actions에서한다
export const actions={
async loadUser({ state, commit }) {
try {
const res = await this.$axios.get('/user', {
withCredentials: true,
});
commit('setMe', res.data);
} catch (err) {
console.error(err);
}
},
async loadOther({commit},payload){
try {
const res =await this.$axios.get(`/user/${payload.userId}`,{
withCredentials:true,
});
commit('setOther',res.data)
} catch (error) {
console.log(error)
}
},
signUp({ commit, state }, payload) {
this.$axios.post('/user', {
userId: payload.userId,
nickname: payload.nickname,
password: <PASSWORD>,
}, {
withCredentials: true,
})
.then((res) => {
console.log(res)
commit('setMe', res.data);
})
.catch((err) => {
console.error(err);
});
},
login({ commit }, payload) {
this.$axios.post('/user/login', {
userId: payload.userId,
password: <PASSWORD>,
}, {
withCredentials: true,
})
.then((res) => {
commit('setMe', res.data);
})
.catch((err) => {
console.error(err);
});
},
logout({commit},payload){
this.$axios.post('/user/logout',{},{
withCredentials: true,
})
.then((data)=>{
commit('setMe',null)
})
.catch((err)=>{
console.error(err)
})
},
changeNickname({commit},payload){
this.$axios.patch(`/user/nickname`,{
nickname:payload.nickname
},{
withCredentials:true
})
.then(()=>{
commit("changeNickname",payload)
})
.catch((err)=>{
console.log(err)
})
},
addFollwing({commit},payload){
state.followingList.push(payload)
},
addFollower({commit},payload){
state.followerList.push(payload)
},
// removeFollowing({commit},payload){
// commit('removeFollowing',payload)
// // state.followingList.push(payload)
// // state.followingList= state.followingList.fillter(user=>user.id !== payload.id);
// // const index=state.followingList.findIndex(v=>v.id === payload.id)
// // state.followerList.splice(index,1)
// },
loadFollowers({ commit, state }, payload) {
if (!(payload && payload.offset === 0) && !state.hasMoreFollower) {
return;
}
let offset = state.followerList.length;
if (payload && payload.offset === 0) {
offset = 0;
}
return this.$axios.get(`/user/${state.me.id}/followers?limit=3&offset=${offset}`, {
withCredentials: true,
})
.then((res) => {
commit('loadFollowers', {
data: res.data,
offset,
});
})
.catch((err) => {
console.error(err);
});
},
loadFollowings({ commit, state }, payload) {
if (!(payload && payload.offset === 0) && !state.hasMoreFollowing) {
return;
}
let offset = state.followingList.length;
if (payload && payload.offset === 0) {
offset = 0;
}
return this.$axios.get(`/user/${state.me.id}/followings?limit=3&offset=${offset}`, {
withCredentials: true,
})
.then((res) => {
commit('loadFollowings', {
data: res.data,
offset,
});
})
.catch((err) => {
console.error(err);
});
},
follow({ commit }, payload) {
this.$axios.post(`/user/${payload.userId}/follow`, {}, {
withCredentials: true,
})
.then((res) => {
commit('following', {
userId: payload.userId,
});
})
.catch((err) => {
console.error(err);
});
},
unfollow({ commit }, payload) {
return this.$axios.delete(`/user/${payload.userId}/follow`, {
withCredentials: true,
})
.then((res) => {
commit('removeFollowing', {
userId: payload.userId,
});
})
.catch((err) => {
console.error(err);
});
},
removeFollower({commit},payload){
return this.$axios.delete(`/user/${payload.userId}/follower`,{
withCredentials:true
})
.then(()=>{
commit('removeFollower',{
userId:payload.userId
})
})
.catch((err)=>{
console.error(err)
})
},
}
// context안에 commit,dispatch,state,rootState,getters,rootGetters<file_sep>const express = require('express');
const cors = require('cors');
const bcrypt = require('bcrypt');
const passport = require('passport');
const session = require('express-session');
const cookie = require('cookie-parser');
const morgan = require('morgan');
const db = require('./models');
const passportConfig = require('./passport');
const usersRouter=require('./routes/user')
const postRouter=require('./routes/post')
const postsRouter=require('./routes/posts')
const hashtagRouter=require('./routes/hastag')
const app = express();
db.sequelize.sync();
passportConfig();
app.use(morgan('dev'));
app.use(cors({
origin: 'http://localhost:3080',
credentials: true,
}));
// app.use(morgan('dev'));
// // app.use(cors())
// // app.use(cors('http://localhost:58696'));
// app.use(cors({
// origin: 'http://localhost:3000',
// credentials: true,
// }));
// 모든요청허용임
//하지만 실무에서는이렇게하면안된다
// app.use('/', express.static('uploads'));
app.use('/',express.static('uploads'))//이미지업로드땜에..프론트에서접근할수잇게한다
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookie('cookiesecret'));
app.use(session({
resave: false,
saveUninitialized: false,
secret: 'cookiesecret',
cookie: {
httpOnly: true,
secure: false,
},
}));
app.use(passport.initialize());
app.use(passport.session());
app.get('/', (req, res) => {
res.status(200).send('안녕 dd');
});
app.use('/user',usersRouter);
app.use('/post',postRouter);
app.use('/posts',postsRouter);
app.use('/hashtag', hashtagRouter);
app.listen(4000,()=>{
console.log(`백엔드 서버 ${3090}번 포트에서 작동중`)
})<file_sep>const passport= require("passport");
const db = require("../models");
const local=require('./local')
module.exports=()=>{
passport.serializeUser((user,done)=>{
return done(null,user.id)
});
passport.deserializeUser(async (id,done)=>{
try {
const user=await db.User.findOne({ where : {id},attributes:['id','nickname'], include:[{
model:db.Post,
attributes:['id']
},{
model:db.User,
as:'Followings',
attributes:['id'],
},{
model:db.User,
as:'Followers',
attributes:['id']
}]})
return done(null,user) //req.user에넣어준다 req.isAuthenticaated() ==true 로 만들어준다 한번로그인하게되면 이것 모든요청에 계속 호출한다
//나중에는 그래서이걸 캐싱?해준다
} catch (error) {
console.error(error)
return done(error)
}
});
local();
}<file_sep>const passport = require("passport");
const bcrypt=require('bcrypt')
const db=require("../models")
const {Strategy:LocalStrategy} =require('passport-local');
module.exports=()=>{
passport.use(new LocalStrategy({
usernameField:'userId', //req.body.email
passwordField:'<PASSWORD>', //req.body.password
}, async (userId,password,done)=>{
try {
const exUser = await db.User.findOne({ where: { userId } });
if(!exUser){
return done(null,false,{reason:'존재하지 않는 사용자입니다'})
//done(에러,성공,실패)
}
const result=await bcrypt.compare(password,exUser.password);
if(result){
return done(null,exUser);
}else{
return done(null,false,{reason:'비밀번호가 틀립니다'})
}
} catch (error) {
console.error(error);
return done(error);
}
}))
}<file_sep>export default function({store,redirect}){
if(store.state.users.me){
redirect('/');
}
}<file_sep>module.exports={
head:{
title:'NodeBird'
},
modules:[
'@nuxtjs/axios'
],
buildModules: [
'@nuxtjs/vuetify',
'@nuxtjs/moment'
],
moment:{
locales:['ko']
},
vuetify: {
/* module options */
},
axios: {
browserBaseURL: 'http://localhost:4000',
baseURL: 'http://localhost:4000',
https: false,
},
// server:{
// port:3081,
// }
}<file_sep>
module.exports=(sequelize,DataTypes)=>{
const Post=sequelize.define('Post',{ //대문자에단수형으로만히씀 테이블명은 posts
content:{
type:DataTypes.TEXT,//매우긴글 제한이없다
allowNull:false,
//cereatAt updatedAt 자동생성
},
},{
charset:'utf8mb4', //mp4는 이모티콘때메
collate:'utf8mb4_general_ci'
});
Post.associate=(db)=>{
//여기서는관계?쓰는듯
db.Post.belongsTo(db.User); //UserId도 추가된다 addUser addusers
db.Post.hasMany(db.Comment); // addComments addComment 둘다존재 헤즈메니랑 빌롱스투 두개만
db.Post.hasMany(db.Image);
db.Post.belongsToMany(db.User,{through:"Like", as:'Likers'})
db.Post.belongsToMany(db.Hashtag,{through:"PostHashtag"})
db.Post.belongsTo(db.Post, {as:'Retweet'});//PostId 생긴다 하지만 리트윗아이디가 생기길원한다
};
return Post;
} | 763a0f68ec8127c7e0f7390afd924cc997959c85 | [
"JavaScript"
] | 8 | JavaScript | Roen77/vbird | 0915ffbc4c247614091f71cdd95bfb88b902bb45 | e5ca45449957039d192fdc878442cf9825692218 |
refs/heads/main | <repo_name>ghimici-laura-30125/ISP-lab11-12<file_sep>/c3-samples/src/main/java/aut/utcluj/isp/ex4/Employee.java
package aut.utcluj.isp.ex4;
/**
* @author stefan
*/
public class Employee {
private String firstName;
private String lastName;
private String cnp;
private SalaryInfo employeeSalaryInfo;
public Employee(String firstName, String lastName, String cnp, Double monthlyRevenue) {
throw new UnsupportedOperationException("Not supported yet.");
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
public String getCnp() {
return cnp;
}
/**
* Add salary to the employee
*/
public void addSalary(double newSalary) {
throw new UnsupportedOperationException("Not supported yet.");
}
/**
* Add money as bonus to the employee
* Value added should be positive
*
* @param money - money to be added
*/
public void addMoney(final Double money) {
throw new UnsupportedOperationException("Not supported yet.");
}
/**
* Pay tax from salary
*
* @param tax - tax to be paid
*/
public void payTax(final Double tax) {
throw new UnsupportedOperationException("Not supported yet.");
}
/**
* Get salary info
*
* @return salary info
*/
public SalaryInfo getSalaryInfo() {
throw new UnsupportedOperationException("Not supported yet.");
}
}
<file_sep>/c3-samples/src/main/java/aut/utcluj/isp/ex1/Person.java
package aut.utcluj.isp.ex1;
public class Person {
private final String firstName;
private final String lastName;
public Person(String firstName) {
this.firstName = firstName;
<<<<<<< HEAD
this.lastName="";
=======
this.lastName = "";
>>>>>>> ba66fb5c13c97dfb649007abd54f88bd9e581356
}
public Person(String firstName, String lastName) {
this.firstName = firstName;
<<<<<<< HEAD
this.lastName= lastName;
=======
this.lastName = lastName;
>>>>>>> ba66fb5c13c97dfb649007abd54f88bd9e581356
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
}
<file_sep>/c3-samples/src/test/java/aut/utcluj/isp/ex3/EmployeeControllerTest.java
package aut.utcluj.isp.ex3;
import org.junit.Test;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
/**
* @author stefan
*/
public class EmployeeControllerTest {
@Test
public void testAddEmployee() {
final Employee employee = new Employee("Harvey", "Specter", 2000d, "1861216244480");
final EmployeeController employeeController = new EmployeeController();
employeeController.addEmployee(employee);
assertEquals("Number of created employees to be one", 1, employeeController.getNumberOfEmployees());
}
@Test
public void testGetEmployeeByCnp() {
final Employee employee = new Employee("Harvey", "Specter", 2000d, "1861216244480");
final EmployeeController employeeController = new EmployeeController();
employeeController.addEmployee(employee);
employeeController.addEmployee(new Employee("John", "Wick", 2000d, "1851216244480"));
final Employee notFoundEmployee = employeeController.getEmployeeByCnp("1861216244481");
assertNull("No employee should be found", notFoundEmployee);
final Employee foundEmployee = employeeController.getEmployeeByCnp("1861216244480");
assertNotNull("Employee should be found", foundEmployee);
assertEquals("Employee information should be the same", employee, foundEmployee);
}
@Test
public void testUpdateEmployeeSalaryByCnp() {
final EmployeeController employeeController = new EmployeeController();
employeeController.addEmployee(new Employee("Harvey", "Specter", 2000d, "1861216244480"));
employeeController.addEmployee(new Employee("John", "Wick", 3000d, "1851216244480"));
final Employee notFoundEmployee = employeeController.updateEmployeeSalaryByCnp("1861216244481", 100d);
assertNull("No employee should be updated", notFoundEmployee);
final Employee updatedEmployee = employeeController.updateEmployeeSalaryByCnp("1861216244480", 300d);
assertNotNull("Updated employee should not be null", updatedEmployee);
assertEquals("CNP should be the same", "1861216244480", updatedEmployee.getCnp());
assertEquals("Salary should be the new one", Double.valueOf(300d), updatedEmployee.getSalary());
}
@Test
public void testDeleteEmployeeByCnp() {
final Employee employee = new Employee("Harvey", "Specter", 2000d, "1861216244480");
final EmployeeController employeeController = new EmployeeController();
employeeController.addEmployee(new Employee("John", "Wick", 2000d, "1851216244480"));
employeeController.addEmployee(employee);
assertEquals("Two employees should be found", 2, employeeController.getNumberOfEmployees());
final Employee notFoundEmployee = employeeController.deleteEmployeeByCnp("2851216244480");
assertNull("Deleted employee should be null", notFoundEmployee);
final String cnpToBeDeleted = "1851216244480";
final Employee deletedEmployee = employeeController.deleteEmployeeByCnp(cnpToBeDeleted);
assertNotNull("Deleted employee should not be null", deletedEmployee);
assertEquals("One employee remained", 1, employeeController.getNumberOfEmployees());
assertNull("Employee should not be found anymore", employeeController.getEmployeeByCnp(cnpToBeDeleted));
}
@Test
public void testGetEmployees() {
final EmployeeController employeeController = new EmployeeController();
employeeController.addEmployee(new Employee("Harvey", "Specter", 2000d, "1861216244480"));
employeeController.addEmployee(new Employee("John", "Wick", 3000d, "1851216244480"));
final List<Employee> employees = employeeController.getEmployees();
assertEquals("Two employees should be found", 2, employees.size());
assertEquals("First employee name should be 'Harvey'", "Harvey", employees.get(0).getFirstName());
assertEquals("First employee last name should be 'Specter'", "Specter", employees.get(0).getLastName());
assertEquals("First employee salary should be 2000", Double.valueOf(2000), employees.get(0).getSalary());
assertEquals("Second employee name should be 'Harvey'", "John", employees.get(1).getFirstName());
assertEquals("Second employee last name should be 'Specter'", "Wick", employees.get(1).getLastName());
assertEquals("Second employee salary should be 2000", Double.valueOf(3000), employees.get(1).getSalary());
}
}<file_sep>/c3-samples/src/test/java/aut/utcluj/isp/ex4/SalaryInfoTest.java
package aut.utcluj.isp.ex4;
import org.junit.Test;
import static org.junit.Assert.*;
public class SalaryInfoTest {
@Test
public void testCreate() {
final SalaryInfo salaryInfo = new SalaryInfo(300d);
assertNotNull("Salary object cannot be null", salaryInfo);
assertEquals("Revenue is 0", Double.valueOf(0d), salaryInfo.getTotalRevenue());
assertEquals("Monthly revenue is 300", Double.valueOf(300d), salaryInfo.getMonthlyRevenue());
}
@Test(expected = NegativeAmountException.class)
public void testCreateShouldThrowExceptionWhenMonthlyRevenueIsNegative() {
new SalaryInfo(-300d);
}
@Test
public void testAddSalary() {
final SalaryInfo salaryInfo = new SalaryInfo(300d);
assertNotNull("Salary object cannot be null", salaryInfo);
assertEquals("Revenue is 0", Double.valueOf(0d), salaryInfo.getTotalRevenue());
assertEquals("Monthly revenue is 300", Double.valueOf(300d), salaryInfo.getMonthlyRevenue());
salaryInfo.addSalary();
assertEquals("Revenue is 300", Double.valueOf(300d), salaryInfo.getTotalRevenue());
salaryInfo.addSalary();
assertEquals("Revenue is 600", Double.valueOf(600d), salaryInfo.getTotalRevenue());
}
@Test
public void testAddMoney() {
final SalaryInfo salaryInfo = new SalaryInfo(300d);
assertNotNull("Salary object cannot be null", salaryInfo);
assertEquals("Revenue is 0", Double.valueOf(0d), salaryInfo.getTotalRevenue());
assertEquals("Monthly revenue is 300", Double.valueOf(300d), salaryInfo.getMonthlyRevenue());
salaryInfo.addMoney(100d);
assertEquals("Revenue is 100", Double.valueOf(100d), salaryInfo.getTotalRevenue());
}
@Test(expected = NegativeAmountException.class)
public void testAddMoneyShouldThrowExceptionWhenValueIsNegative() {
final SalaryInfo salaryInfo = new SalaryInfo(300d);
assertNotNull("Salary object cannot be null", salaryInfo);
assertEquals("Revenue is 0", Double.valueOf(0d), salaryInfo.getTotalRevenue());
assertEquals("Monthly revenue is 300", Double.valueOf(300d), salaryInfo.getMonthlyRevenue());
salaryInfo.addMoney(-100d);
}
@Test
public void testPayTax() {
final SalaryInfo salaryInfo = new SalaryInfo(300d);
salaryInfo.addSalary();
assertEquals("Revenue is 300", Double.valueOf(300d), salaryInfo.getTotalRevenue());
salaryInfo.payTax(100d);
assertEquals("Revenue is 200", Double.valueOf(200d), salaryInfo.getTotalRevenue());
}
@Test(expected = NegativeAmountException.class)
public void testPayTaxShouldThrowExceptionWhenTaxIsNegative() {
final SalaryInfo salaryInfo = new SalaryInfo(300d);
salaryInfo.payTax(-100d);
}
@Test(expected = NegativeBalanceException.class)
public void testPayTaxShouldThrowExceptionWhenNoEnoughMoneyFound() {
final SalaryInfo salaryInfo = new SalaryInfo(300d);
salaryInfo.payTax(100d);
}
}<file_sep>/c3-samples/src/main/java/aut/utcluj/isp/ex3/EmployeeController.java
package aut.utcluj.isp.ex3;
import java.util.ArrayList;
import java.util.List;
/**
* @author stefan
*/
public class EmployeeController {
/**
* Add new employee to the list of employees
*
* @param employee - employee information
*/
List <Employee> employees = new ArrayList<>();
public void addEmployee(final Employee employee) {
employees.add(new Employee());
}
/**
* Get employee by cnp
*
* @param cnp - unique cnp
* @return found employee or null if not found
*/
public Employee getEmployeeByCnp(final String cnp) {
for(Employee empl:employees)
if(empl.getCnp().equals(cnp))
return empl;
return null;
}
/**
* Update employee salary by cnp
*
* @param cnp - unique cnp
* @param salary - salary
* @return updated employee
*/
public Employee updateEmployeeSalaryByCnp(final String cnp, final Double salary) {
for(Employee empl:employees)
if(empl.getCnp().equals(cnp))
{ empl.setSalary(salary);
return empl;}
return null;
}
/**
* Delete employee by cnp
*
* @param cnp - unique cnp
* @return deleted employee or null if not found
*/
public Employee deleteEmployeeByCnp(final String cnp) {
for(Employee empl:employees)
if(empl.getCnp().equals(cnp))
{employees.remove(empl);
return empl;
}
return null;
}
/**
* Return current list of employees
*
* @return current list of employees
*/
public List<Employee> getEmployees() {
return employees;
}
/**
* Get number of employees
*
* @return - number of registered employees
*/
public int getNumberOfEmployees() {
return employees.size();
}
}
<file_sep>/c3-samples/src/main/java/aut/utcluj/isp/ex3/Employee.java
package aut.utcluj.isp.ex3;
/**
* @author stefan
*/
public class Employee {
private String firstName;
private String lastName;
private Double salary;
private String cnp;
public Employee(String firstName, String lastName, Double salary, String cnp) {
}
Employee() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
public Double getSalary() {
return salary;
}
public String getCnp() {
return cnp;
}
public void setSalary(double salary){
this.salary=salary;
}
}
<file_sep>/README.md
This is the repository for ISP Group Project<file_sep>/c3-samples/src/main/java/aut/utcluj/isp/ex4/NegativeAmountException.java
package aut.utcluj.isp.ex4;
/**
* @author stefan
*/
public class NegativeAmountException extends Exception {
}
<file_sep>/c3-samples/src/main/java/aut/utcluj/isp/ex4/IdentityManager.java
package aut.utcluj.isp.ex4;
/**
* @author stefan
*/
public interface IdentityManager {
/**
* Return identity details
* @return identity details (e.g: for employee: firstname_lastname_cnp)
*/
String getIdentity();
}
| b038fe8a6ad4df8bb26d5ff28f6bf6e1c9b8440c | [
"Markdown",
"Java"
] | 9 | Java | ghimici-laura-30125/ISP-lab11-12 | cc014ca99b1499c23b3925335509c1dc24ab9576 | 3d2db69e531bfd488892b31a87aae77de8b758e4 |
refs/heads/master | <file_sep>from torch.nn import Conv2d
from torch.nn.functional import conv2d, conv_transpose2d
from backpack.core.derivatives.basederivatives import BaseParameterDerivatives
from backpack.utils import conv as convUtils
from backpack.utils.ein import eingroup, einsum
class Conv2DDerivatives(BaseParameterDerivatives):
def get_module(self):
return Conv2d
def hessian_is_zero(self):
return True
def get_unfolded_input(self, module):
return convUtils.unfold_func(module)(module.input0)
# TODO: Require tests
def ea_jac_t_mat_jac_prod(self, module, g_inp, g_out, mat):
_, in_c, in_x, in_y = module.input0.size()
in_features = in_c * in_x * in_y
_, out_c, out_x, out_y = module.output.size()
out_features = out_c * out_x * out_y
# 1) apply conv_transpose to multiply with W^T
result = mat.view(out_c, out_x, out_y, out_features)
result = einsum("cxyf->fcxy", (result,))
# result: W^T mat
result = self.__apply_jacobian_t_of(module, result).view(
out_features, in_features
)
# 2) transpose: mat^T W
result = result.t()
# 3) apply conv_transpose
result = result.view(in_features, out_c, out_x, out_y)
result = self.__apply_jacobian_t_of(module, result)
# 4) transpose to obtain W^T mat W
return result.view(in_features, in_features).t()
def _jac_mat_prod(self, module, g_inp, g_out, mat):
mat_as_conv = eingroup("v,n,c,h,w->vn,c,h,w", mat)
jmp_as_conv = conv2d(
mat_as_conv,
module.weight.data,
stride=module.stride,
padding=module.padding,
dilation=module.dilation,
groups=module.groups,
)
return self.view_like_output(jmp_as_conv, module)
def _jac_t_mat_prod(self, module, g_inp, g_out, mat):
mat_as_conv = eingroup("v,n,c,h,w->vn,c,h,w", mat)
jmp_as_conv = conv_transpose2d(
mat_as_conv,
module.weight.data,
stride=module.stride,
padding=module.padding,
dilation=module.dilation,
groups=module.groups,
)
return self.view_like_input(jmp_as_conv, module)
def _bias_jac_mat_prod(self, module, g_inp, g_out, mat):
"""mat has shape [V, C_out]"""
# expand for each batch and for each channel
N_axis, H_axis, W_axis = 1, 3, 4
jac_mat = mat.unsqueeze(N_axis).unsqueeze(H_axis).unsqueeze(W_axis)
N, _, H_out, W_out = module.output_shape
return jac_mat.expand(-1, N, -1, H_out, W_out)
def _bias_jac_t_mat_prod(self, module, g_inp, g_out, mat, sum_batch=True):
N_axis, H_axis, W_axis = 1, 3, 4
axes = [H_axis, W_axis]
if sum_batch:
axes = [N_axis] + axes
return mat.sum(axes)
# TODO: Improve performance by using conv instead of unfold
def _weight_jac_mat_prod(self, module, g_inp, g_out, mat):
jac_mat = eingroup("v,o,i,h,w->v,o,ihw", mat)
X = self.get_unfolded_input(module)
jac_mat = einsum("nij,vki->vnkj", (X, jac_mat))
return self.view_like_output(jac_mat, module)
def _weight_jac_t_mat_prod(self, module, g_inp, g_out, mat, sum_batch=True):
"""Unintuitive, but faster due to convolution."""
V = mat.shape[0]
N, C_out, _, _ = module.output_shape
_, C_in, _, _ = module.input0_shape
mat = eingroup("v,n,c,w,h->vn,c,w,h", mat).repeat(1, C_in, 1, 1)
C_in_axis = 1
# a,b represent the combined/repeated dimensions
mat = eingroup("a,b,w,h->ab,w,h", mat).unsqueeze(C_in_axis)
N_axis = 0
input = eingroup("n,c,h,w->nc,h,w", module.input0).unsqueeze(N_axis)
input = input.repeat(1, V, 1, 1)
grad_weight = conv2d(
input,
mat,
bias=None,
stride=module.dilation,
padding=module.padding,
dilation=module.stride,
groups=C_in * N * V,
).squeeze(0)
K_H_axis, K_W_axis = 1, 2
_, _, K_H, K_W = module.weight.shape
grad_weight = grad_weight.narrow(K_H_axis, 0, K_H).narrow(K_W_axis, 0, K_W)
eingroup_eq = "vnio,x,y->v,{}o,i,x,y".format("" if sum_batch else "n,")
return eingroup(
eingroup_eq, grad_weight, dim={"v": V, "n": N, "i": C_in, "o": C_out}
)
<file_sep>cd splash
bundle exec jekyll build -d "../../docs"
cd ..
sphinx-build "rtd" "../docs/rtd"
touch ../docs/.nojekyll
touch ../docs/rtd/.nojekyll
touch ../docs/rtd/_static/.nojekyll
<file_sep>"""
Curvature-matrix product backPACK extensions.
Those extension propagate additional information through the computation graph.
They are more expensive to run than a standard gradient backpropagation.
This extension does not compute information directly, but gives access to
functions to compute Matrix-Matrix products with Block-Diagonal approximations
of the curvature, such as the Block-diagonal Generalized Gauss-Newton
"""
from torch.nn import (
AvgPool2d,
BatchNorm1d,
Conv2d,
CrossEntropyLoss,
Dropout,
Flatten,
Linear,
MaxPool2d,
MSELoss,
ReLU,
Sigmoid,
Tanh,
ZeroPad2d,
)
from backpack.extensions.backprop_extension import BackpropExtension
from . import (
activations,
batchnorm1d,
conv2d,
dropout,
flatten,
linear,
losses,
padding,
pooling,
)
class CMP(BackpropExtension):
def __init__(self, curv_type, savefield="cmp"):
self.curv_type = curv_type
super().__init__(
savefield=savefield,
fail_mode="ERROR",
module_exts={
MSELoss: losses.CMPMSELoss(),
CrossEntropyLoss: losses.CMPCrossEntropyLoss(),
Linear: linear.CMPLinear(),
MaxPool2d: pooling.CMPMaxpool2d(),
AvgPool2d: pooling.CMPAvgPool2d(),
ZeroPad2d: padding.CMPZeroPad2d(),
Conv2d: conv2d.CMPConv2d(),
Dropout: dropout.CMPDropout(),
Flatten: flatten.CMPFlatten(),
ReLU: activations.CMPReLU(),
Sigmoid: activations.CMPSigmoid(),
Tanh: activations.CMPTanh(),
BatchNorm1d: batchnorm1d.CMPBatchNorm1d(),
},
)
def get_curv_type(self):
return self.curv_type
<file_sep>"""
First order backPACK extensions.
Those extension do not backpropagate additional information, and their
computational overhead is small.
They make it easier to extract more information from the gradient being already
backpropagated through the computational graph, such as
- `BatchGrad`: The individual gradients, rather than the sum over the samples
- `SumGradSquared`: The second moment of the individual gradient
- `Variance`: The variance of the individual gradients
- `BatchL2Grad`: The L2 norm of the individual gradients
"""
from .batch_grad import BatchGrad
from .batch_l2_grad import BatchL2Grad
from .sum_grad_squared import SumGradSquared
from .variance import Variance
__all__ = ["BatchL2Grad", "BatchGrad", "SumGradSquared", "Variance"]
<file_sep>"""
Run all example files.
Example files are identified by the pattern 'example_*.py'.
"""
import glob
import os
import subprocess
HERE = os.path.dirname(os.path.realpath(__file__))
PATTERN = os.path.join(HERE, r"example_*.py")
FILES = glob.glob(PATTERN)
for example in FILES:
print("\nRunning {}".format(example))
exit_code = subprocess.call(["python", example])
crash = exit_code != 0
if crash:
raise RuntimeError("Error running {}".format(example))
<file_sep>"""Compute the gradient with PyTorch."""
from torch.nn import CrossEntropyLoss, Flatten, Linear, Sequential
from backpack.utils.examples import load_mnist_data
B = 4
X, y = load_mnist_data(B)
print("# Gradient with PyTorch | B =", B)
model = Sequential(Flatten(), Linear(784, 10),)
lossfunc = CrossEntropyLoss()
loss = lossfunc(model(X), y)
loss.backward()
for name, param in model.named_parameters():
print(name)
print(".grad.shape: ", param.grad.shape)
<file_sep>.PHONY: help
.PHONY: black black-check flake8
.PHONY: install install-dev install-devtools install-test install-lint
.PHONY: test
.PHONY: conda-env
.PHONY: black isort format
.PHONY: black-check isort-check format-check
.PHONY: flake8
.DEFAULT: help
help:
@echo "test"
@echo " Run pytest on the project and report coverage"
@echo "black"
@echo " Run black on the project"
@echo "black-check"
@echo " Check if black would change files"
@echo "flake8"
@echo " Run flake8 on the project"
@echo "install"
@echo " Install backpack and dependencies"
@echo "install-dev"
@echo " Install all development tools"
@echo "install-lint"
@echo " Install only the linter tools (included in install-dev)"
@echo "install-test"
@echo " Install only the testing tools (included in install-dev)"
@echo "conda-env"
@echo " Create conda environment 'backpack' with dev setup"
###
# Test coverage
test:
@pytest -vx --cov=backpack .
###
# Linter and autoformatter
# Uses black.toml config instead of pyproject.toml to avoid pip issues. See
# - https://github.com/psf/black/issues/683
# - https://github.com/pypa/pip/pull/6370
# - https://pip.pypa.io/en/stable/reference/pip/#pep-517-and-518-support
black:
@black . --config=black.toml
black-check:
@black . --config=black.toml --check
flake8:
@flake8 .
isort:
@isort --apply
isort-check:
@isort --check
format:
@make black
@make isort
@make black-check
format-check: black-check isort-check
###
# Installation
install:
@pip install -r requirements.txt
@pip install .
install-lint:
@pip install -r requirements/lint.txt
install-test:
@pip install -r requirements/test.txt
install-devtools:
@echo "Install dev tools..."
@pip install -r requirements-dev.txt
install-dev: install-devtools
@echo "Install dependencies..."
@pip install -r requirements.txt
@echo "Uninstall existing version of backpack..."
@pip uninstall backpack-for-pytorch
@echo "Install backpack in editable mode..."
@pip install -e .
@echo "Install pre-commit hooks..."
@pre-commit install
###
# Conda environment
conda-env:
@conda env create --file .conda_env.yml
<file_sep>"""Utility functions for examples."""
import torch
import torchvision
def download_mnist():
"""Download and normalize MNIST training data."""
mnist_dataset = torchvision.datasets.MNIST(
root="./data",
train=True,
transform=torchvision.transforms.Compose(
[
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((0.1307,), (0.3081,)),
]
),
download=True,
)
return mnist_dataset
def load_mnist_data(batch_size=64, shuffle=True):
"""Return (inputs, labels) for an MNIST mini-batch."""
mnist_dataset = download_mnist()
mnist_loader = torch.utils.data.dataloader.DataLoader(
mnist_dataset, batch_size=batch_size, shuffle=shuffle,
)
X, y = next(iter(mnist_loader))
return X, y
<file_sep>from torch.nn import BatchNorm1d, Conv2d, Linear
from backpack.extensions.backprop_extension import BackpropExtension
from . import batchnorm1d, conv2d, linear
class BatchGrad(BackpropExtension):
"""
The individual gradients for each sample in a minibatch.
Is only meaningful is the individual functions are independent (no batchnorm).
Stores the output in :code:`grad_batch` as a :code:`[N x ...]` tensor,
where :code:`N` is the size of the minibatch and :code:`...`
is the shape of the gradient.
"""
def __init__(self):
super().__init__(
savefield="grad_batch",
fail_mode="WARNING",
module_exts={
Linear: linear.BatchGradLinear(),
Conv2d: conv2d.BatchGradConv2d(),
BatchNorm1d: batchnorm1d.BatchGradBatchNorm1d(),
},
)
<file_sep>from torch.nn import Conv2d, Linear
from backpack.extensions.backprop_extension import BackpropExtension
from . import conv2d, linear
class SumGradSquared(BackpropExtension):
"""
The sum of individual-gradients-squared, or second moment of the gradient.
Is only meaningful is the individual functions are independent (no batchnorm).
Stores the output in :code:`sum_grad_squared`. Same dimension as the gradient.
"""
def __init__(self):
super().__init__(
savefield="sum_grad_squared",
fail_mode="WARNING",
module_exts={Linear: linear.SGSLinear(), Conv2d: conv2d.SGSConv2d(),},
)
<file_sep>from backpack.core.derivatives.dropout import DropoutDerivatives
from .cmpbase import CMPBase
class CMPDropout(CMPBase):
def __init__(self):
super().__init__(derivatives=DropoutDerivatives())
<file_sep>Supported models
====================================
BackPACK expects models to be
`sequences <https://pytorch.org/docs/stable/nn.html#sequential>`_
of `PyTorch NN modules <https://pytorch.org/docs/stable/nn.html>`_.
For example,
.. code-block:: python
model = torch.nn.Sequential(
torch.nn.Linear(784, 64),
torch.nn.ReLU(),
torch.nn.Linear(64, 10)
)
This page lists the layers currently supported by BackPACK.
**Do not rewrite the** :code:`forward()` **function of the** :code:`Sequential` **or the inner modules!**
If the forward is not standard, the additional backward pass to compute second-order quantities will not match the actual function.
First-order extensions that extract information might work outside of this framework, but it is not tested.
.. raw:: html
<hr/>
For first-order extensions
--------------------------------------
You can use any layers, as long as they do not have parameters.
BackPACK can extract more information about the gradient w.r.t. the parameters of those layers:
* `Conv2d <https://pytorch.org/docs/stable/nn.html#conv2d>`_
* `Linear <https://pytorch.org/docs/stable/nn.html#linear>`_
**Some layers lead to the concept of "inidividual gradient for a sample in a minibatch" to be ill-defined.**
This is the case for Batch Normalization layers, for example.
.. raw:: html
<hr/>
For second-order extensions
--------------------------------------
BackPACK needs to know how to compute an additional backward pass.
In addition to the parametrized layers above, this implemented for the following layers:
**Loss functions**
* `MSELoss <https://pytorch.org/docs/stable/nn.html#mseloss>`_
* `CrossEntropyLoss <https://pytorch.org/docs/stable/nn.html#crossentropyloss>`_
**Layers without parameters**
* `MaxPool2d <https://pytorch.org/docs/stable/nn.html#maxpool2d>`_
* `AvgPool2d <https://pytorch.org/docs/stable/nn.html#avgpool2d>`_
* `Dropout <https://pytorch.org/docs/stable/nn.html#dropout>`_
* `ReLU <https://pytorch.org/docs/stable/nn.html#relu>`_
* `Sigmoid <https://pytorch.org/docs/stable/nn.html#sigmoid>`_
* `Tanh <https://pytorch.org/docs/stable/nn.html#tanh>`_
Custom layers
--------------------------------------
:code:`torch.nn.functional.flatten` can not be used in this setup because it is a function, not a module.
Use :code:`backpack.core.layers.Flatten` instead.
<file_sep>opt-einsum >= 2.3.2, < 3.0.0
torch >= 1.3.0, < 2.0.0
torchvision >= 0.3.0, < 1.0.0
<file_sep>"""The code relies on the insight that average pooling can be understood as
convolution over single channels with a constant kernel."""
import torch.nn
from torch.nn import AvgPool2d, Conv2d, ConvTranspose2d
from backpack.core.derivatives.basederivatives import BaseDerivatives
from backpack.utils.ein import eingroup, einsum
class AvgPool2DDerivatives(BaseDerivatives):
def get_module(self):
return AvgPool2d
def hessian_is_zero(self):
return True
# TODO: Require tests
def ea_jac_t_mat_jac_prod(self, module, g_inp, g_out, mat):
"""Use fact that average pooling can be implemented as conv."""
_, channels, in_x, in_y = module.input0.size()
in_features = channels * in_x * in_y
_, _, out_x, out_y = module.output.size()
out_features = channels * out_x * out_y
# 1) apply conv_transpose to multiply with W^T
result = mat.view(channels, out_x, out_y, out_features)
result = einsum("cxyf->fcxy", (result,)).contiguous()
result = result.view(out_features * channels, 1, out_x, out_y)
# result: W^T mat
result = self.__apply_jacobian_t_of(module, result)
result = result.view(out_features, in_features)
# 2) transpose: mat^T W
result = result.t().contiguous()
# 3) apply conv_transpose
result = result.view(in_features * channels, 1, out_x, out_y)
result = self.__apply_jacobian_t_of(module, result)
# 4) transpose to obtain W^T mat W
return result.view(in_features, in_features).t()
def check_exotic_parameters(self, module):
assert module.count_include_pad, (
"Might not work for exotic hyperparameters of AvgPool2d, "
+ "like count_include_pad=False"
)
def _jac_mat_prod(self, module, g_inp, g_out, mat):
self.check_exotic_parameters(module)
mat_as_pool = self.__make_single_channel(mat, module)
jmp_as_pool = self.__apply_jacobian_of(module, mat_as_pool)
self.__check_jmp_out_as_pool(mat, jmp_as_pool, module)
return self.view_like_output(jmp_as_pool, module)
# return self.__view_as_output(jmp_as_pool, module)
def __make_single_channel(self, mat, module):
"""Create fake single-channel images, grouping batch,
class and channel dimension."""
result = eingroup("v,n,c,w,h->vnc,w,h", mat)
C_axis = 1
return result.unsqueeze(C_axis)
def __apply_jacobian_of(self, module, mat):
conv2d = Conv2d(
in_channels=1,
out_channels=1,
kernel_size=module.kernel_size,
stride=module.stride,
padding=module.padding,
bias=False,
).to(module.input0.device)
conv2d.weight.requires_grad = False
avg_kernel = torch.ones_like(conv2d.weight) / conv2d.weight.numel()
conv2d.weight.data = avg_kernel
return conv2d(mat)
def __check_jmp_out_as_pool(self, mat, jmp_as_pool, module):
V = mat.size(0)
N, C_out, H_out, W_out = module.output_shape
assert jmp_as_pool.shape == (V * N * C_out, 1, H_out, W_out)
def _jac_t_mat_prod(self, module, g_inp, g_out, mat):
self.check_exotic_parameters(module)
mat_as_pool = self.__make_single_channel(mat, module)
jmp_as_pool = self.__apply_jacobian_t_of(module, mat_as_pool)
self.__check_jmp_in_as_pool(mat, jmp_as_pool, module)
return self.view_like_input(jmp_as_pool, module)
def __apply_jacobian_t_of(self, module, mat):
C_for_conv_t = 1
conv2d_t = ConvTranspose2d(
in_channels=C_for_conv_t,
out_channels=C_for_conv_t,
kernel_size=module.kernel_size,
stride=module.stride,
padding=module.padding,
bias=False,
).to(module.input0.device)
conv2d_t.weight.requires_grad = False
avg_kernel = torch.ones_like(conv2d_t.weight) / conv2d_t.weight.numel()
conv2d_t.weight.data = avg_kernel
V_N_C_in = mat.size(0)
_, _, H_in, W_in = module.input0.size()
output_size = (V_N_C_in, C_for_conv_t, H_in, W_in)
return conv2d_t(mat, output_size=output_size)
def __check_jmp_in_as_pool(self, mat, jmp_as_pool, module):
V = mat.size(0)
N, C_in, H_in, W_in = module.input0_shape
assert jmp_as_pool.shape == (V * N * C_in, 1, H_in, W_in)
<file_sep>""" TODO: Implement all features for BN, then add to automated tests. """
import pytest
import torch
from .automated_test import check_sizes, check_values
from .implementation.implementation_autograd import AutogradImpl
from .implementation.implementation_bpext import BpextImpl
from .test_problems_bn import TEST_PROBLEMS as BN_TEST_PROBLEMS
if torch.cuda.is_available():
DEVICES = {
"cpu": "cpu",
"gpu": "cuda:0",
}
else:
DEVICES = {
"cpu": "cpu",
}
TEST_PROBLEMS = {
**BN_TEST_PROBLEMS,
}
ALL_CONFIGURATIONS = []
CONFIGURATION_IDS = []
for dev_name, dev in DEVICES.items():
for probname, prob in TEST_PROBLEMS.items():
ALL_CONFIGURATIONS.append((prob, dev))
CONFIGURATION_IDS.append(probname + "-" + dev_name)
###
# Tests
###
@pytest.mark.parametrize("problem,device", ALL_CONFIGURATIONS, ids=CONFIGURATION_IDS)
def test_batch_gradients_sum_to_grad(problem, device):
problem.to(device)
backpack_batch_res = BpextImpl(problem).batch_gradients()
backpack_res = [g.sum(0) for g in backpack_batch_res]
autograd_res = AutogradImpl(problem).gradient()
check_sizes(autograd_res, backpack_res, list(problem.model.parameters()))
check_values(autograd_res, backpack_res)
@pytest.mark.parametrize("problem,device", ALL_CONFIGURATIONS, ids=CONFIGURATION_IDS)
def test_ggn_mp(problem, device):
problem.to(device)
NUM_COLS = 10
matrices = [
torch.randn(NUM_COLS, *p.shape, device=device)
for p in problem.model.parameters()
]
autograd_res = AutogradImpl(problem).ggn_mp(matrices)
backpack_res = BpextImpl(problem).ggn_mp(matrices)
check_sizes(autograd_res, backpack_res)
check_values(autograd_res, backpack_res)
@pytest.mark.parametrize("problem,device", ALL_CONFIGURATIONS, ids=CONFIGURATION_IDS)
def test_ggn_vp(problem, device):
problem.to(device)
vecs = [torch.randn(*p.shape, device=device) for p in problem.model.parameters()]
backpack_res = BpextImpl(problem).ggn_vp(vecs)
autograd_res = AutogradImpl(problem).ggn_vp(vecs)
check_sizes(autograd_res, backpack_res)
check_values(autograd_res, backpack_res)
@pytest.mark.parametrize("problem,device", ALL_CONFIGURATIONS, ids=CONFIGURATION_IDS)
def test_hvp_is_not_implemented(problem, device):
# TODO: Rename after implementing BatchNorm R_mat_prod
problem.to(device)
vecs = [torch.randn(*p.shape, device=device) for p in problem.model.parameters()]
# TODO: Implement BatchNorm R_mat_prod in backpack/core/derivatives/batchnorm1d.py
try:
backpack_res = BpextImpl(problem).hvp(vecs)
except NotImplementedError:
return
autograd_res = AutogradImpl(problem).hvp(vecs)
check_sizes(autograd_res, backpack_res)
check_values(autograd_res, backpack_res)
@pytest.mark.parametrize("problem,device", ALL_CONFIGURATIONS, ids=CONFIGURATION_IDS)
def test_hmp_is_not_implemented(problem, device):
# TODO: Rename after implementing BatchNorm R_mat_prod
problem.to(device)
NUM_COLS = 10
matrices = [
torch.randn(NUM_COLS, *p.shape, device=device)
for p in problem.model.parameters()
]
# TODO: Implement BatchNorm R_mat_prod in backpack/core/derivatives/batchnorm1d.py
try:
backpack_res = BpextImpl(problem).hmp(matrices)
except NotImplementedError:
return
autograd_res = AutogradImpl(problem).hmp(matrices)
check_sizes(autograd_res, backpack_res)
check_values(autograd_res, backpack_res)
<file_sep>from torch.nn import Conv2d, Linear
from backpack.extensions.backprop_extension import BackpropExtension
from . import conv2d, linear
class Variance(BackpropExtension):
"""
Estimates the variance of the gradient using the samples in the minibatch.
Is only meaningful is the individual functions are independent (no batchnorm).
Stores the output in :code:`variance`, has the same dimension as the gradient.
"""
def __init__(self):
super().__init__(
savefield="variance",
fail_mode="WARNING",
module_exts={
Linear: linear.VarianceLinear(),
Conv2d: conv2d.VarianceConv2d(),
},
)
<file_sep># Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## [1.1.0] - 2020-02-11
### Added
- Support MC sampling
[[Issue](https://github.com/f-dangel/backpack/issues/21),
[PR](https://github.com/f-dangel/backpack/pull/36)]
- Utilities to handle Kronecker factors
[[PR](https://github.com/f-dangel/backpack/pull/17)]
- Examples
[[PR](https://github.com/f-dangel/backpack/pull/34)]
### Fixed
- Fixed documentation issue in `Batch l2`
[[PR](https://github.com/f-dangel/backpack/pull/33)]
- Added support for stride parameter in Conv2d
[[Issue](https://github.com/f-dangel/backpack/issues/30),
[PR](https://github.com/f-dangel/backpack/pull/31)]
- Pytorch `1.3.0` compatibility
[[PR](https://github.com/f-dangel/backpack/pull/8),
[PR](https://github.com/f-dangel/backpack/pull/9)]
### Internal
- Added
continuous integration [[PR](https://github.com/f-dangel/backpack/pull/19)],
test coverage [[PR](https://github.com/f-dangel/backpack/pull/25)],
style guide enforcement [[PR](https://github.com/f-dangel/backpack/pull/27)]
- Changed internal shape conventions of backpropagated quantities for performance improvements
[[PR](https://github.com/f-dangel/backpack/pull/37)]
## [1.0.1] - 2019-09-05
### Fixed
- Fixed PyPI installaton
## [1.0.0] - 2019-10-03
Initial release
[Unreleased]: https://github.com/f-dangel/backpack/compare/v1.1.0...HEAD
[1.1.0]: https://github.com/f-dangel/backpack/compare/1.0.1...1.1.0
[1.0.1]: https://github.com/f-dangel/backpack/compare/1.0.0...1.0.1
[1.0.0]: https://github.com/f-dangel/backpack/releases/tag/1.0.0
<file_sep>from backpack.core.derivatives.linear import LinearDerivatives
from backpack.extensions.curvmatprod.cmpbase import CMPBase
class CMPLinear(CMPBase):
def __init__(self):
super().__init__(derivatives=LinearDerivatives(), params=["weight", "bias"])
def weight(self, ext, module, g_inp, g_out, backproped):
CMP_out = backproped
def weight_cmp(mat):
Jmat = self.derivatives.weight_jac_mat_prod(module, g_inp, g_out, mat)
CJmat = CMP_out(Jmat)
JTCJmat = self.derivatives.weight_jac_t_mat_prod(
module, g_inp, g_out, CJmat
)
return JTCJmat
return weight_cmp
def bias(self, ext, module, g_inp, g_out, backproped):
CMP_out = backproped
def bias_cmp(mat):
Jmat = self.derivatives.bias_jac_mat_prod(module, g_inp, g_out, mat)
CJmat = CMP_out(Jmat)
JTCJmat = self.derivatives.bias_jac_t_mat_prod(module, g_inp, g_out, CJmat)
return JTCJmat
return bias_cmp
<file_sep>Basics about the development setup
|-|-|
|-|-|
| Python version | The subset of Python 3 and Pytorch (`3.5, 3.6, 3.7`) and use `3.7` for development |
| Tooling management | [`make`](https://www.gnu.org/software/make/) as an interface to the dev tools ([makefile](makefile)) |
| Testing | [`pytest`](https://docs.pytest.org) ([testing readme](test/readme.md))
| Style | [`black`](https://black.readthedocs.io) ([rules](black.toml)) for formatting and [`flake8`](http://flake8.pycqa.org/) ([rules](.flake8)) for linting |
| CI/QA | [`Travis`](https://travis-ci.org/f-dangel/backpack) ([config](.travis.yaml)) to run tests and [`Github workflows`](https://github.com/f-dangel/backpack/actions) ([config](.github/workflows)) to check formatting and linting |
<file_sep>BackPACK
====================================
.. toctree::
:maxdepth: 2
main-api
supported-layers
extensions
<file_sep>from backpack.core.derivatives import shape_check
from backpack.utils.ein import try_view
class BaseDerivatives:
"""First- and second-order partial derivatives of a module.
Shape conventions:
------------------
* Batch size: N
* Free dimension for vectorization: V
For vector-processing layers (2d input):
* input [N, C_in], output [N, C_out]
For image-processing layers (4d input)
* Input/output channels: C_in/C_out
* Input/output height: H_in/H_out
* Input/output width: W_in/W_out
* input [N, C_in, H_in, W_in], output [N, C_out, H_in, W_in]
Definitions:
------------
* The Jacobian J is defined as
J[n, c, w, ..., ̃n, ̃c, ̃w, ...]
= 𝜕output[n, c, w, ...] / 𝜕input[̃n, ̃c, ̃w, ...]
* The transposed Jacobian Jᵀ is defined as
Jᵀ[̃n, ̃c, ̃w, ..., n, c, w, ...]
= 𝜕output[n, c, w, ...] / 𝜕input[̃n, ̃c, ̃w, ...]
"""
@shape_check.jac_mat_prod_accept_vectors
@shape_check.jac_mat_prod_check_shapes
def jac_mat_prod(self, module, g_inp, g_out, mat):
"""Apply Jacobian of the output w.r.t. input to a matrix.
Implicit application of J:
result[v, n, c, w, ...]
= ∑_{̃n, ̃c, ̃w} J[n, c, w, ..., ̃n, ̃c, ̃w, ...] mat[̃n, ̃c, ̃w, ...].
Parameters:
-----------
mat: torch.Tensor
Matrix the Jacobian will be applied to.
Must have shape [V, N, C_in, H_in, ...].
Returns:
--------
result: torch.Tensor
Jacobian-matrix product.
Has shape [V, N, C_out, H_out, ...].
"""
return self._jac_mat_prod(module, g_inp, g_out, mat)
def _jac_mat_prod(self, module, g_inp, g_out, mat):
"""Internal implementation of the Jacobian."""
raise NotImplementedError
@shape_check.jac_t_mat_prod_accept_vectors
@shape_check.jac_t_mat_prod_check_shapes
def jac_t_mat_prod(self, module, g_inp, g_out, mat):
"""Apply transposed Jacobian of module output w.r.t. input to a matrix.
Implicit application of Jᵀ:
result[v, ̃n, ̃c, ̃w, ...]
= ∑_{n, c, w} Jᵀ[̃n, ̃c, ̃w, ..., n, c, w, ...] mat[v, n, c, w, ...].
Parameters:
-----------
mat: torch.Tensor
Matrix the transposed Jacobian will be applied to.
Must have shape [V, N, C_out, H_out, ...].
Returns:
--------
result: torch.Tensor
Transposed Jacobian-matrix product.
Has shape [V, N, C_in, H_in, ...].
"""
return self._jac_t_mat_prod(module, g_inp, g_out, mat)
def _jac_t_mat_prod(self, module, g_inp, g_out, mat):
"""Internal implementation of transposed Jacobian."""
raise NotImplementedError
# TODO Add shape check
# TODO Use new convention
def ea_jac_t_mat_jac_prod(self, module, g_inp, g_out, mat):
raise NotImplementedError
def hessian_is_zero(self):
raise NotImplementedError
def hessian_is_diagonal(self):
raise NotImplementedError
def hessian_diagonal(self):
raise NotImplementedError
def hessian_is_psd(self):
raise NotImplementedError
# TODO make accept vectors
# TODO add shape check
def make_residual_mat_prod(self, module, g_inp, g_out):
"""Return multiplication routine with the residual term.
The function performs the mapping: mat → [∑_{k} Hz_k(x) 𝛿z_k] mat.
(required for extension `curvmatprod`)
Note:
-----
This function only has to be implemented if the residual is not
zero and not diagonal (for instance, `BatchNorm`).
"""
raise NotImplementedError
# TODO Refactor and remove
def batch_flat(self, tensor):
batch = tensor.size(0)
# TODO Removing the clone().detach() will destroy the computation graph
# Tests will fail
return batch, tensor.clone().detach().view(batch, -1)
# TODO Refactor and remove
def get_batch(self, module):
return module.input0.size(0)
# TODO Refactor and remove
def get_output(self, module):
return module.output
@staticmethod
def _view_like(mat, like):
"""View as like with trailing and additional 0th dimension.
If like is [N, C, H, ...], returns shape [-1, N, C, H, ...]
"""
V = -1
shape = (V, *like.shape)
return try_view(mat, shape)
@classmethod
def view_like_input(cls, mat, module):
return cls._view_like(mat, module.input0)
@classmethod
def view_like_output(cls, mat, module):
return cls._view_like(mat, module.output)
class BaseParameterDerivatives(BaseDerivatives):
"""First- and second order partial derivatives of a module with parameters.
Assumptions (true for `nn.Linear`, `nn.Conv(Transpose)Nd`, `nn.BatchNormNd`):
- Parameters are saved as `.weight` and `.bias` fields in a module
- The output is linear in the model parameters
Shape conventions:
------------------
Weight [C_w, H_w, W_w, ...] (usually 1d, 2d, 4d)
Bias [C_b, ...] (usually 1d)
For most layers, these shapes correspond to shapes of the module input or output.
"""
@shape_check.bias_jac_mat_prod_accept_vectors
@shape_check.bias_jac_mat_prod_check_shapes
def bias_jac_mat_prod(self, module, g_inp, g_out, mat):
"""Apply Jacobian of the output w.r.t. bias to a matrix.
Parameters:
-----------
mat: torch.Tensor
Matrix the Jacobian will be applied to.
Must have shape [V, C_b, ...].
Returns:
--------
result: torch.Tensor
Jacobian-matrix product.
Has shape [V, N, C_out, H_out, ...].
"""
return self._bias_jac_mat_prod(module, g_inp, g_out, mat)
def _bias_jac_mat_prod(self, module, g_inp, g_out, mat):
"""Internal implementation of the bias Jacobian."""
raise NotImplementedError
@shape_check.bias_jac_t_mat_prod_accept_vectors
@shape_check.bias_jac_t_mat_prod_check_shapes
def bias_jac_t_mat_prod(self, module, g_inp, g_out, mat, sum_batch=True):
"""Apply transposed Jacobian of the output w.r.t. bias to a matrix.
Parameters:
-----------
mat: torch.Tensor
Matrix the transposed Jacobian will be applied to.
Must have shape [V, N, C_out, H_out, ...].
sum_batch: bool
Whether to sum over the batch dimension on the fly.
Returns:
--------
result: torch.Tensor
Jacobian-matrix product.
Has shape [V, N, C_b, ...] if `sum_batch == False`.
Has shape [V, C_b, ...] if `sum_batch == True`.
"""
return self._bias_jac_t_mat_prod(module, g_inp, g_out, mat, sum_batch=sum_batch)
def _bias_jac_t_mat_prod(self, module, g_inp, g_out, mat, sum_batch=True):
"""Internal implementation of the transposed bias Jacobian."""
raise NotImplementedError
@shape_check.weight_jac_mat_prod_accept_vectors
@shape_check.weight_jac_mat_prod_check_shapes
def weight_jac_mat_prod(self, module, g_inp, g_out, mat):
"""Apply Jacobian of the output w.r.t. weight to a matrix.
Parameters:
-----------
mat: torch.Tensor
Matrix the Jacobian will be applied to.
Must have shape [V, C_w, H_w, ...].
Returns:
--------
result: torch.Tensor
Jacobian-matrix product.
Has shape [V, N, C_out, H_out, ...].
"""
return self._weight_jac_mat_prod(module, g_inp, g_out, mat)
def _weight_jac_mat_prod(self, module, g_inp, g_out, mat):
"""Internal implementation of weight Jacobian."""
raise NotImplementedError
@shape_check.weight_jac_t_mat_prod_accept_vectors
@shape_check.weight_jac_t_mat_prod_check_shapes
def weight_jac_t_mat_prod(self, module, g_inp, g_out, mat, sum_batch=True):
"""Apply transposed Jacobian of the output w.r.t. weight to a matrix.
Parameters:
-----------
mat: torch.Tensor
Matrix the transposed Jacobian will be applied to.
Must have shape [V, N, C_out, H_out, ...].
sum_batch: bool
Whether to sum over the batch dimension on the fly.
Returns:
--------
result: torch.Tensor
Jacobian-matrix product.
Has shape [V, N, C_w, H_w, ...] if `sum_batch == False`.
Has shape [V, C_w, H_w, ...] if `sum_batch == True`.
"""
return self._weight_jac_t_mat_prod(
module, g_inp, g_out, mat, sum_batch=sum_batch
)
def _weight_jac_t_mat_prod(self, module, g_inp, g_out, mat, sum_batch=True):
"""Internal implementation of transposed weight Jacobian."""
raise NotImplementedError
class BaseLossDerivatives(BaseDerivatives):
"""Second- order partial derivatives of loss functions.
"""
# TODO Add shape check
def sqrt_hessian(self, module, g_inp, g_out):
"""Symmetric factorization ('sqrt') of the loss Hessian."""
return self._sqrt_hessian(module, g_inp, g_out)
def _sqrt_hessian(self, module, g_inp, g_out):
raise NotImplementedError
# TODO Add shape check
def sqrt_hessian_sampled(self, module, g_inp, g_out, mc_samples=1):
"""Monte-Carlo sampled symmetric factorization of the loss Hessian."""
return self._sqrt_hessian_sampled(module, g_inp, g_out, mc_samples=mc_samples)
def _sqrt_hessian_sampled(self, module, g_inp, g_out, mc_samples=1):
raise NotImplementedError
@shape_check.make_hessian_mat_prod_accept_vectors
@shape_check.make_hessian_mat_prod_check_shapes
def make_hessian_mat_prod(self, module, g_inp, g_out):
"""Multiplication of the input Hessian with a matrix.
Return a function that maps mat to H * mat.
"""
return self._make_hessian_mat_prod(module, g_inp, g_out)
def _make_hessian_mat_prod(self, module, g_inp, g_out):
raise NotImplementedError
# TODO Add shape check
def sum_hessian(self, module, g_inp, g_out):
"""Loss Hessians, summed over the batch dimension."""
return self._sum_hessian(module, g_inp, g_out)
def _sum_hessian(self, module, g_inp, g_out):
raise NotImplementedError
<file_sep>"""
Compute the gradient and Hessian-vector products with PyTorch.
"""
import torch
from torch.nn import CrossEntropyLoss, Flatten, Linear, Sequential
from torch.nn.utils import parameters_to_vector
from backpack.hessianfree.ggnvp import ggn_vector_product
from backpack.utils.convert_parameters import vector_to_parameter_list
from backpack.utils.examples import load_mnist_data
B = 4
X, y = load_mnist_data(B)
print("# GGN-vector product and gradients with PyTorch | B =", B)
model = Sequential(Flatten(), Linear(784, 10),)
lossfunc = CrossEntropyLoss()
print("# 1) Vector with shapes like parameters | B =", B)
output = model(X)
loss = lossfunc(output, y)
v = [torch.randn_like(p) for p in model.parameters()]
GGNv = ggn_vector_product(loss, output, model, v)
# has to be called afterwards, or with create_graph=True
loss.backward()
for (name, param), vec, GGNvec in zip(model.named_parameters(), v, GGNv):
print(name)
print(".grad.shape: ", param.grad.shape)
# vector
print("vector shape: ", vec.shape)
# Hessian-vector product
print("GGN-vector product shape: ", GGNvec.shape)
print("# 2) Flattened vector | B =", B)
output = model(X)
loss = lossfunc(output, y)
num_params = sum(p.numel() for p in model.parameters())
v_flat = torch.randn(num_params)
v = vector_to_parameter_list(v_flat, model.parameters())
GGNv = ggn_vector_product(loss, output, model, v)
GGNv_flat = parameters_to_vector(GGNv)
# has to be called afterwards, or with create_graph=True
loss.backward()
print("Model parameters: ", num_params)
# vector
print("flat vector shape: ", v_flat.shape)
# individual gradient L2 norm
print("flat GGN-vector product shape: ", GGNv_flat.shape)
<file_sep>from backpack.core.derivatives.basederivatives import BaseDerivatives
from backpack.utils.ein import einsum
class ElementwiseDerivatives(BaseDerivatives):
def _jac_t_mat_prod(self, module, g_inp, g_out, mat):
df_elementwise = self.df(module, g_inp, g_out)
return einsum("...,v...->v...", (df_elementwise, mat))
def _jac_mat_prod(self, module, g_inp, g_out, mat):
return self.jac_t_mat_prod(module, g_inp, g_out, mat)
def ea_jac_t_mat_jac_prod(self, module, g_inp, g_out, mat):
batch, df_flat = self.batch_flat(self.df(module, g_inp, g_out))
return einsum("ni,nj,ij->ij", (df_flat, df_flat, mat)) / batch
def hessian_diagonal(self, module, g_inp, g_out):
return self.d2f(module, g_inp, g_out) * g_out[0]
def df(self, module, g_inp, g_out):
raise NotImplementedError("First derivatives not implemented")
def d2f(self, module, g_inp, g_out):
raise NotImplementedError("Second derivatives not implemented")
<file_sep>"""
Compute the full Hessian matrix with automatic differentiation.
Use Hessian-vector products for row-wise construction.
"""
import time
import torch
from torch.nn import CrossEntropyLoss, Flatten, Linear, Sequential
from torch.nn.utils.convert_parameters import parameters_to_vector
from backpack.hessianfree.hvp import hessian_vector_product
from backpack.utils.convert_parameters import vector_to_parameter_list
from backpack.utils.examples import load_mnist_data
B = 4
X, y = load_mnist_data(B)
model = Sequential(Flatten(), Linear(784, 10),)
lossfunc = CrossEntropyLoss()
print("# 1) Hessian matrix with automatic differentiation | B =", B)
loss = lossfunc(model(X), y)
num_params = sum(p.numel() for p in model.parameters())
hessian = torch.zeros(num_params, num_params)
start = time.time()
for i in range(num_params):
# GGN-vector product with i.th unit vector yields the i.th row
e_i = torch.zeros(num_params)
e_i[i] = 1.0
# convert to model parameter shapes
e_i_list = vector_to_parameter_list(e_i, model.parameters())
hessian_i_list = hessian_vector_product(loss, list(model.parameters()), e_i_list)
hessian_i = parameters_to_vector(hessian_i_list)
hessian[i, :] = hessian_i
end = time.time()
print("Model parameters: ", num_params)
print("Hessian shape: ", hessian.shape)
print("Hessian: ", hessian)
print("Time [s]: ", end - start)
print("# 2) Hessian matrix with automatic differentiation (faster) | B =", B)
print("# Save one backpropagation for each HVP by recycling gradients")
loss = lossfunc(model(X), y)
loss.backward(create_graph=True)
grad_params = [p.grad for p in model.parameters()]
num_params = sum(p.numel() for p in model.parameters())
hessian = torch.zeros(num_params, num_params)
start = time.time()
for i in range(num_params):
# GGN-vector product with i.th unit vector yields the i.th row
e_i = torch.zeros(num_params)
e_i[i] = 1.0
# convert to model parameter shapes
e_i_list = vector_to_parameter_list(e_i, model.parameters())
hessian_i_list = hessian_vector_product(
loss, list(model.parameters()), e_i_list, grad_params=grad_params
)
hessian_i = parameters_to_vector(hessian_i_list)
hessian[i, :] = hessian_i
end = time.time()
print("Model parameters: ", num_params)
print("Hessian shape: ", hessian.shape)
print("Hessian: ", hessian)
print("Time [s]: ", end - start)
<file_sep>"""
Compute the gradient and Hessian-vector products with PyTorch.
"""
import torch
from torch.nn import CrossEntropyLoss, Flatten, Linear, Sequential
from torch.nn.utils import parameters_to_vector
from backpack.hessianfree.hvp import hessian_vector_product
from backpack.utils.convert_parameters import vector_to_parameter_list
from backpack.utils.examples import load_mnist_data
B = 4
X, y = load_mnist_data(B)
print("# Hessian-vector product and gradients with PyTorch | B =", B)
model = Sequential(Flatten(), Linear(784, 10),)
lossfunc = CrossEntropyLoss()
print("# 1) Vector with shapes like parameters | B =", B)
loss = lossfunc(model(X), y)
v = [torch.randn_like(p) for p in model.parameters()]
Hv = hessian_vector_product(loss, list(model.parameters()), v)
# has to be called afterwards, or with create_graph=True
loss.backward()
for (name, param), vec, Hvec in zip(model.named_parameters(), v, Hv):
print(name)
print(".grad.shape: ", param.grad.shape)
# vector
print("vector shape: ", vec.shape)
# Hessian-vector product
print("Hessian-vector product shape: ", Hvec.shape)
print("# 2) Flattened vector | B =", B)
loss = lossfunc(model(X), y)
num_params = sum(p.numel() for p in model.parameters())
v_flat = torch.randn(num_params)
v = vector_to_parameter_list(v_flat, model.parameters())
Hv = hessian_vector_product(loss, list(model.parameters()), v)
Hv_flat = parameters_to_vector(Hv)
# has to be called afterwards, or with create_graph=True
loss.backward()
print("Model parameters: ", num_params)
# vector
print("flat vector shape: ", v_flat.shape)
# individual gradient L2 norm
print("flat Hessian-vector product shape: ", Hv_flat.shape)
print("# 3) Using gradients to save one backward pass | B =", B)
loss = lossfunc(model(X), y)
# has to be called with create_graph=True
loss.backward(create_graph=True)
v = [torch.randn_like(p) for p in model.parameters()]
params = list(model.parameters())
grad_params = [p.grad for p in params]
Hv = hessian_vector_product(loss, params, v, grad_params=grad_params)
for (name, param), vec, Hvec in zip(model.named_parameters(), v, Hv):
print(name)
print(".grad.shape: ", param.grad.shape)
# vector
print("vector shape: ", vec.shape)
# Hessian-vector product
print("Hessian-vector product shape: ", Hvec.shape)
<file_sep>-r requirements/test.txt
-r requirements/lint.txt
pre-commit
<file_sep>"""Compute the gradient with PyTorch and the variance with BackPACK."""
import torch
from torch.nn import Flatten, Linear, Sequential
from backpack import backpack, extend, extensions
X = torch.randn(size=(50, 784), requires_grad=True)
model = Sequential(Flatten(), extend(Linear(784, 10)),)
loss = torch.mean(torch.sqrt(torch.abs(model(X))))
with backpack(extensions.BatchGrad()):
loss.backward()
for name, param in model.named_parameters():
print(name, param.grad_batch.shape)
<file_sep>from torch.nn import Sigmoid
from backpack.core.derivatives.elementwise import ElementwiseDerivatives
class SigmoidDerivatives(ElementwiseDerivatives):
def get_module(self):
return Sigmoid
def hessian_is_zero(self):
return False
def hessian_is_diagonal(self):
return True
def df(self, module, g_inp, g_out):
return module.output * (1.0 - module.output)
def d2f(self, module, g_inp, g_out):
return module.output * (1 - module.output) * (1 - 2 * module.output)
<file_sep># BACKpropagation PACKage - a backpack for `PyTorch`
| branch | tests & examples | coverage |
|--------|---------------------------|----------|
|`master` | [](https://travis-ci.org/f-dangel/backpack) | [](https://coveralls.io/github/f-dangel/backpack) |
| `development` | [](https://travis-ci.org/f-dangel/backpack) | [](https://coveralls.io/github/f-dangel/backpack) |
A backpack for PyTorch that extends the backward pass of feedforward networks to compute quantities beyond the gradient.
- Check out the [cheatsheet](examples/cheatsheet.pdf) for an overview of quantities.
- Check out the [examples](https://f-dangel.github.io/backpack/) on how to use the code.
## Installation
```bash
pip install backpack-for-pytorch
```
Alternative: Clone the repository and run `setup.py`
```bash
git clone https://github.com/f-dangel/backpack.git ~/backpack
cd ~/backpack
python setup.py install
```
## How to cite
If you are using `backpack` for your research, consider citing the [paper](https://openreview.net/forum?id=BJlrF24twB)
```
@inproceedings{dangel2020backpack,
title = {Back{PACK}: Packing more into Backprop},
author = {<NAME> and <NAME> and <NAME>},
booktitle = {International Conference on Learning Representations},
year = {2020},
url = {https://openreview.net/forum?id=BJlrF24twB}
}
```
<file_sep>from backpack.core.derivatives.zeropad2d import ZeroPad2dDerivatives
from .cmpbase import CMPBase
class CMPZeroPad2d(CMPBase):
def __init__(self):
super().__init__(derivatives=ZeroPad2dDerivatives())
<file_sep>from torch import zeros
from torch.nn import MaxPool2d
from torch.nn.functional import max_pool2d
from backpack.core.derivatives.basederivatives import BaseDerivatives
from backpack.utils.ein import eingroup
class MaxPool2DDerivatives(BaseDerivatives):
def get_module(self):
return MaxPool2d
# TODO: Do not recompute but get from forward pass of module
def get_pooling_idx(self, module):
_, pool_idx = max_pool2d(
module.input0,
kernel_size=module.kernel_size,
stride=module.stride,
padding=module.padding,
dilation=module.dilation,
return_indices=True,
ceil_mode=module.ceil_mode,
)
return pool_idx
def ea_jac_t_mat_jac_prod(self, module, g_inp, g_out, mat):
"""
Note: It is highly questionable whether this makes sense both
in terms of the approximation and memory costs.
"""
device = mat.device
N, channels, H_in, W_in = module.input0.size()
in_features = channels * H_in * W_in
_, _, H_out, W_out = module.output.size()
out_features = channels * H_out * W_out
pool_idx = self.get_pooling_idx(module).view(N, channels, H_out * W_out)
result = zeros(in_features, in_features, device=device)
for b in range(N):
idx = pool_idx[b, :, :]
temp = zeros(in_features, out_features, device=device)
temp.scatter_add_(1, idx, mat)
result.scatter_add_(0, idx.t(), temp)
return result / N
def hessian_is_zero(self):
return True
def _jac_mat_prod(self, module, g_inp, g_out, mat):
mat_as_pool = eingroup("v,n,c,h,w->v,n,c,hw", mat)
jmp_as_pool = self.__apply_jacobian_of(module, mat_as_pool)
return self.view_like_output(jmp_as_pool, module)
def __apply_jacobian_of(self, module, mat):
V, HW_axis = mat.shape[0], 3
pool_idx = self.__pool_idx_for_jac(module, V)
return mat.gather(HW_axis, pool_idx)
def __pool_idx_for_jac(self, module, V):
"""Manipulated pooling indices ready-to-use in jac(t)."""
pool_idx = self.get_pooling_idx(module)
V_axis = 0
return (
eingroup("n,c,h,w->n,c,hw", pool_idx)
.unsqueeze(V_axis)
.expand(V, -1, -1, -1)
)
def _jac_t_mat_prod(self, module, g_inp, g_out, mat):
mat_as_pool = eingroup("v,n,c,h,w->v,n,c,hw", mat)
jmp_as_pool = self.__apply_jacobian_t_of(module, mat_as_pool)
return self.view_like_input(jmp_as_pool, module)
def __apply_jacobian_t_of(self, module, mat):
V = mat.shape[0]
result = self.__zero_for_jac_t(module, V, mat.device)
pool_idx = self.__pool_idx_for_jac(module, V)
HW_axis = 3
result.scatter_add_(HW_axis, pool_idx, mat)
return result
def __zero_for_jac_t(self, module, V, device):
N, C_out, _, _ = module.output_shape
_, _, H_in, W_in = module.input0.size()
shape = (V, N, C_out, H_in * W_in)
return zeros(shape, device=device)
<file_sep>from backpack.core.derivatives.relu import ReLUDerivatives
from backpack.core.derivatives.sigmoid import SigmoidDerivatives
from backpack.core.derivatives.tanh import TanhDerivatives
from .cmpbase import CMPBase
class CMPReLU(CMPBase):
def __init__(self):
super().__init__(derivatives=ReLUDerivatives())
class CMPSigmoid(CMPBase):
def __init__(self):
super().__init__(derivatives=SigmoidDerivatives())
class CMPTanh(CMPBase):
def __init__(self):
super().__init__(derivatives=TanhDerivatives())
<file_sep>from backpack.core.derivatives.crossentropyloss import CrossEntropyLossDerivatives
from backpack.core.derivatives.mseloss import MSELossDerivatives
from backpack.extensions.curvature import Curvature
from backpack.extensions.curvmatprod.cmpbase import CMPBase
class CMPLoss(CMPBase):
def backpropagate(self, ext, module, g_inp, g_out, backproped):
Curvature.check_loss_hessian(
self.derivatives.hessian_is_psd(), curv_type=ext.get_curv_type()
)
CMP = self.derivatives.make_hessian_mat_prod(module, g_inp, g_out)
return CMP
class CMPMSELoss(CMPLoss):
def __init__(self):
super().__init__(derivatives=MSELossDerivatives())
class CMPCrossEntropyLoss(CMPLoss):
def __init__(self):
super().__init__(derivatives=CrossEntropyLossDerivatives())
<file_sep>"""
Second order backPACK extensions.
Those extension propagate additional information through the computation graph.
They are more expensive to run than a standard gradient backpropagation.
Those extension make it easier to extract structural or local approximations
to second-order information, such as
- `DiagHessian`: The diagonal of the Hessian.
- `DiagGGN`: The diagonal of the Generalized Gauss-Newton
(or Fisher information matrix), supports exact computation or sampling.
- `DiagGGNExact`: Exact diagonal of the GGN
- `DiagGGNMC`: MC-sampled diagonal of the GGN/Fisher
- `KFAC`, `KFRA`, `KFLR`: Kronecker Block-Diagonal approximations of the
Generalized Gauss-Newton (or Fisher information matrix).
- `HBP`: A general framework that encompasses KFAC, KFRA and KFLR.
"""
from .diag_ggn import DiagGGN, DiagGGNExact, DiagGGNMC
from .diag_hessian import DiagHessian
from .hbp import HBP, KFAC, KFLR, KFRA
__all__ = [
"DiagGGN",
"DiagGGNExact",
"DiagGGNMC",
"DiagHessian",
"HBP",
"KFAC",
"KFLR",
"KFRA",
]
<file_sep>from backpack.core.derivatives.avgpool2d import AvgPool2DDerivatives
from backpack.core.derivatives.maxpool2d import MaxPool2DDerivatives
from .cmpbase import CMPBase
class CMPAvgPool2d(CMPBase, AvgPool2DDerivatives):
def __init__(self):
super().__init__(derivatives=AvgPool2DDerivatives())
class CMPMaxpool2d(CMPBase, MaxPool2DDerivatives):
def __init__(self):
super().__init__(derivatives=MaxPool2DDerivatives())
<file_sep>"""
Compute the full GGN matrix with automatic differentiation.
Use GGN-vector products for row-wise construction.
"""
import torch
from torch.nn import CrossEntropyLoss, Flatten, Linear, Sequential
from torch.nn.utils.convert_parameters import parameters_to_vector
from backpack.hessianfree.ggnvp import ggn_vector_product
from backpack.utils.convert_parameters import vector_to_parameter_list
from backpack.utils.examples import load_mnist_data
B = 4
X, y = load_mnist_data(B)
print("# GGN matrix with automatic differentiation | B =", B)
model = Sequential(Flatten(), Linear(784, 10),)
lossfunc = CrossEntropyLoss()
output = model(X)
loss = lossfunc(output, y)
num_params = sum(p.numel() for p in model.parameters())
ggn = torch.zeros(num_params, num_params)
for i in range(num_params):
# GGN-vector product with i.th unit vector yields the i.th row
e_i = torch.zeros(num_params)
e_i[i] = 1.0
# convert to model parameter shapes
e_i_list = vector_to_parameter_list(e_i, model.parameters())
ggn_i_list = ggn_vector_product(loss, output, model, e_i_list)
ggn_i = parameters_to_vector(ggn_i_list)
ggn[i, :] = ggn_i
print("Model parameters: ", num_params)
print("GGN shape: ", ggn.shape)
print("GGN: ", ggn)
<file_sep>from math import sqrt
from warnings import warn
from torch import diag, diag_embed, ones, ones_like
from torch.nn import MSELoss
from backpack.core.derivatives.basederivatives import BaseLossDerivatives
class MSELossDerivatives(BaseLossDerivatives):
def get_module(self):
return MSELoss
def _sqrt_hessian(self, module, g_inp, g_out):
self.check_input_dims(module)
V_dim, C_dim = 0, 2
diag = sqrt(2) * ones_like(module.input0)
sqrt_H = diag_embed(diag, dim1=V_dim, dim2=C_dim)
if module.reduction == "mean":
N = module.input0.shape[0]
sqrt_H /= sqrt(N)
return sqrt_H
def _sqrt_hessian_sampled(self, module, g_inp, g_out, mc_samples=None):
"""
Note:
-----
The parameter `mc_samples` is ignored.
The method always returns the full square root.
The computational cost between the sampled and full version is the same,
so the method always return the more accurate version.
The cost is the same because the hessian of the loss w.r.t. its inputs
for a single sample is one-dimensional.
"""
warn(
"[MC Sampling Hessian of MSE loss] "
+ "Returning the symmetric factorization of the full Hessian "
+ "(same computation cost)",
UserWarning,
)
return self.sqrt_hessian(module, g_inp, g_out)
def _sum_hessian(self, module, g_inp, g_out):
self.check_input_dims(module)
N = module.input0_shape[0]
num_features = module.input0.numel() // N
sum_H = 2 * N * diag(ones(num_features, device=module.input0.device))
if module.reduction == "mean":
sum_H /= N
return sum_H
def _make_hessian_mat_prod(self, module, g_inp, g_out):
"""Multiplication of the input Hessian with a matrix."""
def hessian_mat_prod(mat):
Hmat = 2 * mat
if module.reduction == "mean":
N = module.input0.shape[0]
Hmat /= N
return Hmat
return hessian_mat_prod
def check_input_dims(self, module):
if not len(module.input0.shape) == 2:
raise ValueError("Only 2D inputs are currently supported for MSELoss.")
def hessian_is_psd(self):
return True
<file_sep>---
layout: default
---
BackPACK is a library built on top of [PyTorch](https://pytorch.org/)
to make it easy to extract more information from a backward pass.
Some of the things you can compute:
{% include code-samples.html %}
---
**Install with**
```
pip install backpack-for-pytorch
```
<file_sep>"""
Compute the gradient with PyTorch and the KFLR approximation with BackPACK.
"""
from torch.nn import CrossEntropyLoss, Flatten, Linear, Sequential
from backpack import backpack, extend, extensions
from backpack.utils.examples import load_mnist_data
B = 4
X, y = load_mnist_data(B)
print("# Gradient with PyTorch, KFLR approximation with BackPACK | B =", B)
model = Sequential(Flatten(), Linear(784, 10),)
lossfunc = CrossEntropyLoss()
model = extend(model)
lossfunc = extend(lossfunc)
loss = lossfunc(model(X), y)
with backpack(extensions.KFLR()):
loss.backward()
for name, param in model.named_parameters():
print(name)
print(".grad.shape: ", param.grad.shape)
print(".kflr (shapes): ", [kflr.shape for kflr in param.kflr])
<file_sep>from torch.nn import Conv2d, Linear
from backpack.extensions.backprop_extension import BackpropExtension
from . import conv2d, linear
class BatchL2Grad(BackpropExtension):
"""
The squared L2 norm of individual gradients in the minibatch.
Is only meaningful is the individual functions are independent (no batchnorm).
Stores the output in :code:`batch_l2`
as a vector of the size as the minibatch.
"""
def __init__(self):
super().__init__(
savefield="batch_l2",
fail_mode="WARNING",
module_exts={
Linear: linear.BatchL2Linear(),
Conv2d: conv2d.BatchL2Conv2d(),
},
)
<file_sep>"""
Quick example: A small second-order optimizer with BackPACK
on the classic MNIST example from PyTorch,
https://github.com/pytorch/examples/blob/master/mnist/main.py
The optimizer we implement uses a constant damping parameter
and uses the diagonal of the GGN/Fisher matrix as a preconditioner;
```
x_{t+1} = x_t - (G_t + bI)^{-1} g_t
```
- `x_t` are the parameters of the model
- `G_t` is the diagonal of the Gauss-Newton/Fisher matrix at `x_t`
- `b` is a damping parameter
- `g_t` is the gradient
"""
import torch
from backpack import backpack, extend
from backpack.extensions import DiagGGNMC
from backpack.utils.examples import download_mnist
# Hyperparameters
BATCH_SIZE = 64
STEP_SIZE = 0.01
DAMPING = 1.0
MAX_ITER = 100
torch.manual_seed(0)
"""
Step 1: Load data and create the model.
We're going to load the MNIST dataset,
and fit a 3-layer MLP with ReLU activations.
"""
mnist_dataset = download_mnist()
mnist_loader = torch.utils.data.dataloader.DataLoader(
mnist_dataset, batch_size=BATCH_SIZE, shuffle=True
)
model = torch.nn.Sequential(
torch.nn.Conv2d(1, 20, 5, 1),
torch.nn.ReLU(),
torch.nn.MaxPool2d(2, 2),
torch.nn.Conv2d(20, 50, 5, 1),
torch.nn.ReLU(),
torch.nn.MaxPool2d(2, 2),
torch.nn.Flatten(),
torch.nn.Linear(4 * 4 * 50, 500),
torch.nn.ReLU(),
torch.nn.Linear(500, 10),
)
loss_function = torch.nn.CrossEntropyLoss()
def get_accuracy(output, targets):
"""Helper function to print the accuracy"""
predictions = output.argmax(dim=1, keepdim=True).view_as(targets)
return predictions.eq(targets).float().mean().item()
"""
Step 2: Create the optimizer.
After we call the backward pass with backpack,
every parameter will have a `diag_ggn_mc` field
in addition to a `grad` field.
We can use it to compute the search direction for that parameter,
```
step_direction = p.grad / (p.diag_ggn_mc + group["damping"])
```
and update the weights
"""
class DiagGGNOptimizer(torch.optim.Optimizer):
def __init__(self, parameters, step_size, damping):
super().__init__(parameters, dict(step_size=step_size, damping=damping))
def step(self):
for group in self.param_groups:
for p in group["params"]:
step_direction = p.grad / (p.diag_ggn_mc + group["damping"])
p.data.add_(-group["step_size"], step_direction)
return loss
"""
Step 3: Tell BackPACK about the model and loss function,
create the optimizer, and we will be ready to go
"""
extend(model)
extend(loss_function)
optimizer = DiagGGNOptimizer(model.parameters(), step_size=STEP_SIZE, damping=DAMPING)
"""
Final step: The training loop!
The only difference with a traditional training loop:
Before calling the backward pass, we will call
```
with backpack(DiagGGNMC()):
```
BackPACK will then add the diagonal of the GGN in the
`diag_ggn_mc` field during the backward pass.
"""
for batch_idx, (x, y) in enumerate(mnist_loader):
output = model(x)
accuracy = get_accuracy(output, y)
with backpack(DiagGGNMC()):
loss = loss_function(output, y)
loss.backward()
optimizer.step()
print(
"Iteration %3.d/%d " % (batch_idx, MAX_ITER)
+ "Minibatch Loss %.3f " % (loss)
+ "Accuracy %.0f" % (accuracy * 100)
+ "%"
)
if batch_idx >= MAX_ITER:
break
<file_sep>from math import sqrt
from torch import diag, diag_embed, multinomial, ones_like, softmax
from torch import sqrt as torchsqrt
from torch.nn import CrossEntropyLoss
from torch.nn.functional import one_hot
from backpack.core.derivatives.basederivatives import BaseLossDerivatives
from backpack.utils.ein import einsum
class CrossEntropyLossDerivatives(BaseLossDerivatives):
def get_module(self):
return CrossEntropyLoss
def _sqrt_hessian(self, module, g_inp, g_out):
probs = self.get_probs(module)
tau = torchsqrt(probs)
V_dim, C_dim = 0, 2
Id = diag_embed(ones_like(probs), dim1=V_dim, dim2=C_dim)
Id_tautau = Id - einsum("nv,nc->vnc", tau, tau)
sqrt_H = einsum("nc,vnc->vnc", tau, Id_tautau)
if module.reduction == "mean":
N = module.input0.shape[0]
sqrt_H /= sqrt(N)
return sqrt_H
def _sqrt_hessian_sampled(self, module, g_inp, g_out, mc_samples=1):
M = mc_samples
C = module.input0.shape[1]
probs = self.get_probs(module)
V_dim = 0
probs_unsqueezed = probs.unsqueeze(V_dim).repeat(M, 1, 1)
multi = multinomial(probs, M, replacement=True)
classes = one_hot(multi, num_classes=C)
classes = einsum("nvc->vnc", classes).float()
sqrt_mc_h = (probs_unsqueezed - classes) / sqrt(M)
if module.reduction == "mean":
N = module.input0.shape[0]
sqrt_mc_h /= sqrt(N)
return sqrt_mc_h
def _sum_hessian(self, module, g_inp, g_out):
probs = self.get_probs(module)
sum_H = diag(probs.sum(0)) - einsum("bi,bj->ij", (probs, probs))
if module.reduction == "mean":
N = module.input0.shape[0]
sum_H /= N
return sum_H
def _make_hessian_mat_prod(self, module, g_inp, g_out):
"""Multiplication of the input Hessian with a matrix."""
probs = self.get_probs(module)
def hessian_mat_prod(mat):
Hmat = einsum("bi,cbi->cbi", (probs, mat)) - einsum(
"bi,bj,cbj->cbi", (probs, probs, mat)
)
if module.reduction == "mean":
N = module.input0.shape[0]
Hmat /= N
return Hmat
return hessian_mat_prod
def hessian_is_psd(self):
return True
def get_probs(self, module):
return softmax(module.input0, dim=1)
<file_sep>How to use BackPACK
====================================
The use BackPACK with your setup, you first need to :py:meth:`backpack.extend` the model and the loss function
and register the extension you want to use with :py:meth:`backpack.backpack`
before calling the :code:`backward()` function
Extending the model and loss function
--------------------------------------------
.. code-block:: python
import torch
model = torch.nn.Sequential(
torch.nn.Linear(784, 64),
torch.nn.ReLU(),
torch.nn.Linear(64, 10)
)
lossfunc = torch.nn.CrossEntropyLoss()
model = extend(model)
lossfunc = extend(lossfunc)
See :ref:`Supported models` for the list of supported layers.
.. autofunction:: backpack.extend
Calling the extension
---------------------------------
.. code-block:: python
from backpack import backpack
from backpack.extensions import KFAC
from utils import load_data
X, y = load_data()
loss = lossfunc(model(X), y)
with backpack(KFAC()):
loss.backward()
for param in model.parameters():
print(param.grad)
print(param.kfac)
See :ref:`Extensions` for the list of available extensions and how to access the quantities.
.. autofunction:: backpack.backpack
<file_sep>"""
Einsum utility functions.
Makes it easy to switch to opt_einsum rather than torch's einsum for tests.
"""
import numpy as np
import opt_einsum as oe
import torch
TORCH = "torch"
OPT_EINSUM = "opt_einsum"
BPEXTS_EINSUM = "torch"
def _oe_einsum(equation, *operands):
# handle old interface, passing operands as one list
# see https://pytorch.org/docs/stable/_modules/torch/functional.html#einsum
if len(operands) == 1 and isinstance(operands[0], (list, tuple)):
operands = operands[0]
return oe.contract(equation, *operands, backend="torch")
EINSUMS = {
TORCH: torch.einsum,
OPT_EINSUM: _oe_einsum,
}
def einsum(equation, *operands):
"""`einsum` implementations used by `backpack`.
Modify by setting `backpack.utils.utils.BPEXTS_EINSUM`.
See `backpack.utils.utils.EINSUMS` for supported implementations.
"""
return EINSUMS[BPEXTS_EINSUM](equation, *operands)
def eingroup(equation, operand, dim=None):
"""Use einsum notation for (un-)grouping dimensions.
Dimensions that cannot be inferred can be handed in via the
dictionary `dim`.
Many operations in `backpack` require that certain axes of a tensor
be treated identically, and will therefore be grouped into a single
dimesion of the tensor. One way to do that is using `view`s or
`reshape`s. `eingroup` helps facilitate this process. It can be
used in the same way as `einsum`, but acts only on a single tensor at
a time (although this could be fixed with an improved syntax and
equation analysis).
Idea:
-----
* "a,b,c->ab,c": group dimension a and b into a single one
* "a,b,c->ba,c" to transpose, then group b and a dimension
Raises:
-------
`KeyError`: If information about a dimension in `dim` is missing
or can be removed.
`RuntimeError`: If the groups inferred from `equation` do not match
the number of axes of `operand`
Example usage:
```
import torch
from backpack.utils.ein import einsum, eingroup
dim_a, dim_b, dim_c, dim_d = torch.randint(low=1, high=10, size=(4,))
tensor = torch.randn((dim_a, dim_b, dim_c, dim_d))
# 1) Transposition: Note the slightly different syntax for `eingroup`
tensor_trans = einsum("abcd->cbad", tensor)
tensor_trans_eingroup = eingroup("a,b,c,d->c,b,a,d", tensor)
assert torch.allclose(tensor_trans, tensor_trans_eingroup)
# 2) Grouping axes (a,c) and (b,d) together
tensor_group = einsum("abcd->acbd", tensor).reshape((dim_a * dim_c, dim_b * dim_d))
tensor_group_eingroup = eingroup("a,b,c,d->ac,bd", tensor)
assert torch.allclose(tensor_group, tensor_group_eingroup)
# 3) Ungrouping a tensor whose axes where merged
tensor_merge = tensor.reshape(dim_a * dim_b, dim_c, dim_d)
tensor_unmerge = tensor.reshape(dim_a, dim_b, dim_c, dim_d)
assert torch.allclose(tensor_unmerge, tensor)
# eingroup needs to know the dimensions of the ungrouped dimension
tensor_unmerge_eingroup = eingroup(
"ab,c,d->a,b,c,d", tensor_merge, dim={"a": dim_a, "b": dim_b}
)
assert torch.allclose(tensor_unmerge, tensor_unmerge_eingroup)
# 4) `einsum` functionality to sum out dimensions
# sum over dim_c, group dim_a and dim_d
tensor_sum = einsum("abcd->adb", tensor).reshape(dim_a * dim_d, dim_b)
tensor_sum_eingroup = eingroup("a,b,c,d->ad,b", tensor)
assert torch.allclose(tensor_sum, tensor_sum_eingroup)
```
"""
dim = {} if dim is None else dim
in_shape, out_shape, einsum_eq = _eingroup_preprocess(equation, operand, dim=dim)
operand_in = try_view(operand, in_shape)
result = einsum(einsum_eq, operand_in)
return try_view(result, out_shape)
def _eingroup_preprocess(equation, operand, dim):
"""Process `eingroup` equation.
Return the `reshape`s and `einsum` equations that have to
be performed.
"""
split, sep = "->", ","
def groups(string):
return string.split(sep)
lhs, rhs = equation.split(split)
in_groups, out_groups = groups(lhs), groups(rhs)
dim = __eingroup_infer(in_groups, operand, dim)
in_shape_flat, out_shape = __eingroup_shapes(in_groups, out_groups, dim)
return in_shape_flat, out_shape, equation.replace(sep, "")
def __eingroup_shapes(in_groups, out_groups, dim):
"""Return shape the input needs to be reshaped, and the output shape"""
def shape(groups, dim):
return [group_dim(group, dim) for group in groups]
def group_dim(group, dim):
try:
return np.prod([dim[g] for g in group])
except KeyError as e:
raise KeyError("Unknown dimension for an axis {}".format(e))
out_shape = shape(out_groups, dim)
in_groups_flat = []
for group in in_groups:
for letter in group:
in_groups_flat.append(letter)
in_shape_flat = shape(in_groups_flat, dim)
return in_shape_flat, out_shape
def __eingroup_infer(in_groups, operand, dim):
"""Infer the size of each axis."""
if not len(in_groups) == len(operand.shape):
raise RuntimeError(
"Got {} input groups {}, but tensor has {} axes.".format(
len(in_groups), in_groups, len(operand.shape)
)
)
for group, size in zip(in_groups, operand.shape):
if len(group) == 1:
axis = group[0]
if axis in dim.keys():
raise KeyError(
"Can infer dimension of axis {}.".format(axis),
"Remove from dim = {}.".format(dim),
)
dim[axis] = size
return dim
def try_view(tensor, shape):
"""Fall back to reshape (more expensive) if viewing does not work."""
try:
return tensor.view(shape)
except RuntimeError:
return tensor.reshape(shape)
| aabb5c2d86595152845932402bd00456da8e1e1f | [
"reStructuredText",
"Markdown",
"Makefile",
"Python",
"Text",
"Shell"
] | 44 | Python | rioyokotalab/backpack | 3122de062d5bbcdcba8f8e02d24adb1bd2cdada6 | 4b9fbc4c71412b5b19e7d0210c5aa516e18b3f5f |
refs/heads/master | <file_sep>import React from 'react';
import DrumPad from './DrumPad';
import './DrumPads.sass';
const keyCodes = [81, 87, 69, 65, 83, 68, 90, 88, 67];
class DrumPads extends React.Component {
componentWillMount() {
window.addEventListener('keydown', this.handleKeyDown);
}
componentWillUnmount() {
window.removeEventListener('keydown', this.handleKeyDown);
}
handleKeyDown = (event) => {
const { powerOn } = this.props;
const { keyCode } = event;
if (keyCodes.includes(keyCode) && powerOn) {
const { pads, lightUpDrumPad } = this.props;
const id = String.fromCharCode(keyCode);
const { name } = pads.find(pad => pad.id === id);
this.playAudio(id);
lightUpDrumPad(id, name);
}
}
playAudio = (id) => {
const { volume, powerOn } = this.props;
if (powerOn) {
const { current } = this[id];
current.currentTime = 0;
current.volume = volume / 100;
current.play();
}
}
handlePadClick = (id, padName) => {
const { powerOn, lightUpDrumPad } = this.props;
if (powerOn) {
this.playAudio(id);
lightUpDrumPad(id, padName);
}
}
createPadRef = (id) => {
this[id] = React.createRef();
return this[id];
}
render() {
const { pads } = this.props;
return (
<div className="pad-box">
{pads.map(p => (
<DrumPad
key={p.id}
padData={p}
handlePadClick={this.handlePadClick}
forwardedRef={this.createPadRef(p.id)}
/>
))}
</div>
);
}
}
export default DrumPads;
<file_sep>import React, { Component } from 'react';
import DrumPads from './DrumPads';
import Switch from './Switch';
import Display from './Display';
import './App.sass';
import Dial from './Dial';
import Slider from './Slider';
import PADS from '../globals/pads';
class App extends Component {
state = {
powerOn: false,
volume: 94,
display: 'power: off',
pads: [],
}
timer = null;
componentDidMount() {
this.setState({ pads: PADS });
}
componentWillUnmount() {
if (this.timer) clearTimeout(this.timer);
}
handlePowerClick = () => {
const { powerOn } = this.state;
this.setState({
powerOn: !powerOn,
display: `power: ${powerOn ? 'off' : 'on'}`,
});
}
handleVolumeChange = (e) => {
const volume = parseInt(e.target.value, 10);
this.setState({
volume,
display: `volume: ${volume}`,
});
}
lightUpDrumPad = (id, padName) => {
const { pads } = this.state;
const display = padName.replace('-', ' ');
const newPads = pads.map(pad => (
pad.id === id ? { ...pad, active: true } : pad
));
this.setState({ pads: newPads, display });
this.timer = setTimeout(() => {
this.setState({ pads: PADS, display: '' });
}, 300);
}
render() {
const {
powerOn,
volume,
display,
pads,
} = this.state;
return (
<div className="drum-machine-wrapper">
<DrumPads handlePadClick={this.handlePadClick} pads={pads} lightUpDrumPad={this.lightUpDrumPad} volume={volume} powerOn={powerOn} />
<div>
<Display message={display} />
<div className="dial-container">
<Dial label="bass" />
<Dial label="treble" />
<Dial label="tone" />
</div>
<Slider volume={volume} handleVolumeChange={this.handleVolumeChange} />
<Switch handlePowerClick={this.handlePowerClick} powerOn={powerOn} />
</div>
</div>
);
}
}
export default App;
<file_sep>import soundSD from '../sounds/SD.mp3';
import soundCP from '../sounds/CP.mp3';
import soundOH from '../sounds/OH.mp3';
import soundKD from '../sounds/BD.mp3';
import soundFT from '../sounds/LT.mp3';
import soundRS from '../sounds/RS.mp3';
import soundHC from '../sounds/HC.mp3';
import soundCB from '../sounds/CB.mp3';
import soundMT from '../sounds/MT.mp3';
export default [
{
id: 'Q',
active: false,
name: 'high-hat',
sound: soundOH,
},
{
id: 'W',
active: false,
name: 'kick-drum',
sound: soundKD,
},
{
id: 'E',
active: false,
name: 'floor-tom',
sound: soundFT,
},
{
id: 'A',
active: false,
name: 'rim-shot',
sound: soundRS,
},
{
id: 'S',
active: false,
name: 'conga',
sound: soundHC,
},
{
id: 'D',
active: false,
name: 'cow-bell',
sound: soundCB,
},
{
id: 'Z',
active: false,
name: 'mid-tom',
sound: soundMT,
},
{
id: 'X',
active: false,
name: 'clap',
sound: soundCP,
},
{
id: 'C',
active: false,
name: 'snare',
sound: soundSD,
},
];
<file_sep>import React from 'react';
import './Slider.sass';
export default ({ volume, handleVolumeChange }) => (
<div className="slider-wrapper">
<svg style={{ width: '24px', height: '24px' }} viewBox="0 0 24 24">
<path fill="#000000" d="M7,9V15H11L16,20V4L11,9H7Z" />
</svg>
<input type="range" className="slider" min={0} max={100} value={volume} onChange={handleVolumeChange} />
<svg style={{ width: '24px', height: '24px' }} viewBox="0 0 24 24">
<path fill="#000000" d="M14,3.23V5.29C16.89,6.15 19,8.83 19,12C19,15.17 16.89,17.84 14,18.7V20.77C18,19.86 21,16.28 21,12C21,7.72 18,4.14 14,3.23M16.5,12C16.5,10.23 15.5,8.71 14,7.97V16C15.5,15.29 16.5,13.76 16.5,12M3,9V15H7L12,20V4L7,9H3Z" />
</svg>
</div>
);
<file_sep>import React, { Fragment } from 'react';
import './Switch.sass';
export default ({ powerOn, handlePowerClick }) => (
<Fragment>
<div className="switch-wrapper">
<p className="switch_tag">ON</p>
<div className="switch" onClick={handlePowerClick} role="button" tabIndex="0">
<div className="switch_inner" style={{ left: powerOn ? 0 : '25px' }} />
</div>
<p className="switch_tag">OFF</p>
</div>
</Fragment>
);
<file_sep># drum-machine
Javascript Drum Machine<file_sep>import React from 'react';
import './DrumPad.sass';
const Pad = ({ padData, forwardedRef, handlePadClick }) => (
<div
className="drum-pad"
onClick={() => handlePadClick(padData.id, padData.name)}
role="button"
tabIndex="0"
id={padData.name}
style={{
backgroundColor: padData.active ? '#51EBF5' : '#46C0C8',
}}
>
<audio
ref={forwardedRef}
src={padData.sound}
className="clip"
id={padData.id}
autoPlay={false}
/>
{padData.id}
</div>
);
export default Pad;
<file_sep>import React from 'react';
import './Dial.sass';
const Dial = ({ label }) => (
<div className="dial-column">
<p className="dial-label">{label.toUpperCase()}</p>
<div className="dial">
<div className="dial_marker" />
</div>
</div>
);
Dial.defaultProps = {
label: 'name',
};
export default Dial;
<file_sep>import React from 'react';
import './Display.sass';
export default ({ message }) => (
<div className="display" id="display">{message}</div>
);
| de0b9f1b6d1444df554ae207dda0658869abf66e | [
"JavaScript",
"Markdown"
] | 9 | JavaScript | chazmcgrill/drum-machine | cfdf213c8bd064097ff6a0b9233b775dd23f22f2 | f0a0f53274680f064c46be5e355c5de2e5d66372 |
refs/heads/master | <file_sep>class Solution:
def allPathsSourceTarget(self, graph: List[List[int]]) -> List[List[int]]:
result = []
n = len(graph)
def traverse(start, path):
path.append(start)
if start == n - 1:
result.append(path[::])
path.pop()
return
for x in graph[start]:
traverse(x, path)
path.pop()
traverse(0, [])
return result
<file_sep>export function number(busStops: [number, number][]): number {
return busStops.reduce((s, [on, off]) => s + on - off, 0);
}<file_sep>import Database from "./单例模式";
test("测试数据库连接类单例", () => {
expect(Database.getInstance()).toBe(Database.getInstance());
});
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function constructMaximumBinaryTree(nums: number[]): TreeNode | null {
if(nums.length === 0) return null;
const max = Math.max(...nums);
const maxIndex = nums.indexOf(max);
const left = constructMaximumBinaryTree(nums.slice(0, maxIndex));
const right = constructMaximumBinaryTree(nums.slice(maxIndex + 1));
return new TreeNode(max, left, right)
};
<file_sep>/**
* @param {number[][]} grid
* @return {number}
*/
var surfaceArea = function (grid) {
let di = [1, 0, -1, 0];
let dj = [0, 1, 0, -1];
let N = grid.length;
let surfaceArea = 0;
for (let i = 0; i < N; i++) {
for (let j = 0; j < N; j++) {
let x = grid[i][j];
if (x > 0) {
surfaceArea += 2;
for (let k = 0; k < 4; k++) {
let r = i + di[k];
let c = j + dj[k];
let y = 0;
if (r < N && c < N && r >= 0 && c >= 0) {
y = grid[r][c];
}
surfaceArea += Math.max(x - y, 0);
}
}
}
}
return surfaceArea;
};<file_sep>/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} head
* @return {ListNode}
*/
var reverseList = function (head) {
let p = head,
pre = null;
while (p) {
let next = p.next;
p.next = pre;
pre = p;
p = next;
}
return pre;
};
<file_sep>var longestPalindrome = function (s: string): string {
if (s.length < 2) return s;
let ss = addSplit(s, "#");
let begin = 0,
maxLen = 1;
let center = 0,
maxRight = 0;
let dp = Array(ss.length).fill(0);
for (let i = 0; i < ss.length; i++) {
if (i < maxRight) {
let mirror = 2 * center - i;
dp[i] = Math.min(dp[mirror], maxRight - i);
}
let l = i - dp[i] - 1,
r = i + dp[i] + 1;
while (l >= 0 && r < ss.length && ss[l] === ss[r]) l--, r++, dp[i]++;
if (i + dp[i] > maxRight) (maxRight = i + dp[i]), (center = i);
if (dp[i] > maxLen) (maxLen = dp[i]), (begin = (i - maxLen) / 2);
}
return s.substr(begin, maxLen);
};
const addSplit = (s: string, sp: string): string =>
s.replace(/./g, sp + "$&") + sp;
<file_sep>function displayTable(orders: string[][]): string[][] {
let rowsMap = new Map<string,Map<string,number>>();
let foodSet = new Set<string>(orders.map(order => order[2]));
for (let [,tableNumber, foodItem] of orders) {
const foodCountMap = rowsMap.get(tableNumber)
?? new Map<string,number>([...foodSet].map(food => [food, 0]));
foodCountMap.set(foodItem, (foodCountMap.get(foodItem) ?? 0) + 1);
rowsMap.set(tableNumber, foodCountMap);
}
return [['Table', ...[...foodSet].sort((name1,name2) => name1 < name2 ? -1 : 1)]].concat(
[...rowsMap]
.sort(([table1Number], [table2Number]) => Number(table1Number) - Number(table2Number))
.map(([tableNumber, foodCounts]) =>
[
tableNumber,
...[...foodCounts].sort(([name1],[name2]) => name1 < name2 ? -1 : 1).map(([,count]) => String(count))
]
)
)
};
<file_sep>/**
* @param {number[]} nums
* @return {number[]}
*/
var smallerNumbersThanCurrent = function (nums) {
let copy = [...nums];
let m = new Map();
copy.sort((a, b) => a - b).forEach((x, i) => m.set(x, m.has(x) ? m.get(x) : i));
return nums.map(x => m.get(x));
};<file_sep>use std::collections::HashSet;
impl Solution {
pub fn is_happy(n: i32) -> bool {
let mut computed=HashSet::new();
let mut t=n;
while t!=1{
if computed.contains(&t){
return false;
}
computed.insert(t);
let mut x=0;
while t!=0{
let c=t%10;
x+=c*c;
t/=10;
}
t=x;
}
return true;
}
}<file_sep>function allPathsSourceTarget(graph: number[][]): number[][] {
const result = [];
const n = graph.length;
function dfs(curPath: number[], curEle: number) {
if (curEle === n - 1) {
return result.push(curPath);
}
for (let i = 0; i < n; i++) {
const target = graph[curEle][i];
if (target === undefined) continue;
dfs([...curPath,target],target);
}
}
dfs([0],0);
return result;
};
<file_sep>class Solution
{
public:
int findTheLongestSubstring(string s)
{
vector<int> statusPos(1 << 5, -1);
int ans = 0, status = 0;
statusPos[0] = 0;
for (int i = 0; i < s.length(); i++)
{
if (s[i] == 'a')
status ^= 1 << 0;
else if (s[i] == 'e')
status ^= 1 << 1;
else if (s[i] == 'i')
status ^= 1 << 2;
else if (s[i] == 'o')
status ^= 1 << 3;
else if (s[i] == 'u')
status ^= 1 << 4;
if (~statusPos[status])
ans = max(ans, i + 1 - statusPos[status]);
else
statusPos[status] = i + 1;
}
return ans;
}
};<file_sep>class DLinkedNode
{
public:
int k;
int v;
DLinkedNode *prev;
DLinkedNode *next;
DLinkedNode() : k(0), v(0), prev(nullptr), next(nullptr) {}
DLinkedNode(int _k, int _v) : k(_k), v(_v), prev(nullptr), next(nullptr) {}
};
class LRUCache
{
private:
int capacity;
int size;
DLinkedNode *head;
DLinkedNode *tail;
unordered_map<int, DLinkedNode *> m;
public:
LRUCache(int capacity)
{
this->capacity = capacity;
size = 0;
head = new DLinkedNode();
tail = new DLinkedNode();
head->next = tail;
tail->prev = head;
}
int get(int key)
{
if (!m.count(key))
return -1;
DLinkedNode *ret = m[key];
moveToHead(ret);
return ret->v;
}
void put(int key, int value)
{
if (!m.count(key))
{
DLinkedNode *node = new DLinkedNode(key, value);
m[key] = node;
addToHead(node);
++size;
if (size > capacity)
{
int removedKey = removeTail();
m.erase(removedKey);
--size;
}
}
else
{
DLinkedNode *node = m[key];
node->v = value;
moveToHead(node);
}
}
void moveToHead(DLinkedNode *node)
{
removeNode(node);
addToHead(node);
}
void removeNode(DLinkedNode *node)
{
node->prev->next = node->next;
node->next->prev = node->prev;
}
void addToHead(DLinkedNode *node)
{
node->prev = head;
node->next = head->next;
head->next = node;
node->next->prev = node;
}
int removeTail()
{
DLinkedNode *retNode = tail->prev;
removeNode(retNode);
return retNode->k;
}
};
/**
* Your LRUCache object will be instantiated and called as such:
* LRUCache* obj = new LRUCache(capacity);
* int param_1 = obj->get(key);
* obj->put(key,value);
*/<file_sep>/**
* @param {number} numCourses
* @param {number[][]} prerequisites
* @return {number[]}
*/
var findOrder = function (numCourses, prerequisites) {
const q = [];
const result = [];
let neighbors = new Map();
for (let i = 0; i < numCourses; i++) neighbors.set(i, []);
let indegree = Array(numCourses).fill(0);
prerequisites.forEach(([x, pre], i) => {
neighbors.get(pre).push(x);
indegree[x]++;
});
indegree.forEach((x, i) => x === 0 && q.push(i));
while (q.length) {
let cur = q.shift();
result.push(cur);
indegree[cur]--;
for (let neighbor of neighbors.get(cur)) {
indegree[neighbor]--;
if (indegree[neighbor] === 0) q.push(neighbor);
}
}
return result.length === numCourses ? result : [];
};
<file_sep>impl Solution {
pub fn max_sub_array(nums: Vec<i32>) -> i32 {
let mut max=nums[0];
let mut sum=0;
let len=nums.len();
for i in 0..len{
sum+=nums[i];
max=if max>sum{max}else{sum};
if sum<0{
sum=0;
}
}
max
}
}<file_sep>/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} head
* @return {ListNode}
*/
var middleNode = function (head) {
if (!head) return null;
let l = 0;
let p = head;
while (p && ++l) p = p.next;
console.log(l)
let mid = (l / 2 | 0) + 1;
let x = 0;
p = head;
while (x++ < mid - 1) p = p.next;
return p;
};<file_sep>function searchInsert(nums: number[], target: number): number {
let l = 0,
r = nums.length - 1;
let result = 0;
while (l <= r) {
let mid = (l + (r - l) / 2) | 0;
if (nums[mid] === target) return mid;
if (nums[mid] > target) r = mid - 1;
else (l = mid + 1), (result = l);
}
return result;
}
<file_sep>use std::collections::HashMap;
impl Solution {
pub fn num_trees(n: i32) -> i32 {
let mut memo = HashMap::new();
fn count(l: i32, h: i32, mut memo: &mut HashMap<String,i32>) -> i32 {
if l > h {
return 1;
}
let entry = memo.entry(format!("{},{}", l, h)).or_insert(0);
if *entry != 0 {
return *entry;
}
let mut result = 0;
for i in l..=h {
let left = count(l, i - 1, &mut memo);
let right = count(i + 1, h, &mut memo);
result += left * right;
}
let entry = memo.entry(format!("{},{}", l, h)).or_insert(0);
*entry = result;
return result;
}
return count(1, n, &mut memo);
}
}
<file_sep>fn accum(s:&str)->String {
s.chars().enumerate().map(|(i,c)| c.to_string().to_uppercase()+c.to_string().to_lowercase().repeat(i).as_str()).collect::<Vec<String>>().join("-")
}<file_sep>/**
* @param {number[]} height
* @return {number}
*/
var trap = function (height) {
let dpL = Array(height.length), dpR = Array(height.length);
dpL[0] = 0, dpR[dpR.length - 1] = 0;
for (let i = 1; i < dpL.length; i++) {
dpL[i] = Math.max(dpL[i - 1], height[i - 1]);
}
for (let i = dpR.length - 2; i >= 0; i--) {
dpR[i] = Math.max(dpR[i + 1], height[i + 1]);
};
let sum = 0;
height.forEach((x, i) => {
let cur = Math.min(dpL[i], dpR[i]);
sum += cur > x ? (cur - x) : 0;
});
return sum;
};<file_sep>impl Solution {
pub fn can_finish(num_courses: i32, prerequisites: Vec<Vec<i32>>) -> bool {
let num_courses = num_courses as usize;
let mut has_circle = Box::new(false);
let mut visited = vec![false; num_courses];
let mut on_path = vec![false; num_courses];
let mut graph = vec![Vec::new(); num_courses];
for pair in &prerequisites[..] {
if let &[cur, pre] = &pair[..] {
graph[pre as usize].push(cur);
}
}
fn traverse(mut visited: &mut Vec<bool>, mut on_path: &mut Vec<bool>, graph: &Vec<Vec<i32>>, mut has_circle: &mut Box<bool>, start:i32) {
let idx = start as usize;
if on_path[idx] {
**has_circle = true;
}
if **has_circle || visited[idx] {
return;
}
on_path[idx] = true;
visited[idx] = true;
for &x in &graph[idx] {
traverse(&mut visited, &mut on_path, &graph, &mut has_circle, x);
}
on_path[idx] = false;
}
for x in 0..num_courses {
traverse(&mut visited, &mut on_path, &graph, &mut has_circle, x as i32);
}
return !*has_circle;
}
}
<file_sep>/*
* @lc app=leetcode.cn id=695 lang=javascript
*
* [695] 岛屿的最大面积
*
* https://leetcode-cn.com/problems/max-area-of-island/description/
*
* algorithms
* Medium (59.73%)
* Likes: 192
* Dislikes: 0
* Total Accepted: 23.1K
* Total Submissions: 37.8K
* Testcase Example: '[[1,1,0,0,0],[1,1,0,0,0],[0,0,0,1,1],[0,0,0,1,1]]'
*
* 给定一个包含了一些 0 和 1的非空二维数组 grid , 一个 岛屿 是由四个方向 (水平或垂直) 的 1 (代表土地)
* 构成的组合。你可以假设二维矩阵的四个边缘都被水包围着。
*
* 找到给定的二维数组中最大的岛屿面积。(如果没有岛屿,则返回面积为0。)
*
* 示例 1:
*
*
* [[0,0,1,0,0,0,0,1,0,0,0,0,0],
* [0,0,0,0,0,0,0,1,1,1,0,0,0],
* [0,1,1,0,1,0,0,0,0,0,0,0,0],
* [0,1,0,0,1,1,0,0,1,0,1,0,0],
* [0,1,0,0,1,1,0,0,1,1,1,0,0],
* [0,0,0,0,0,0,0,0,0,0,1,0,0],
* [0,0,0,0,0,0,0,1,1,1,0,0,0],
* [0,0,0,0,0,0,0,1,1,0,0,0,0]]
*
*
* 对于上面这个给定矩阵应返回 6。注意答案不应该是11,因为岛屿只能包含水平或垂直的四个方向的‘1’。
*
* 示例 2:
*
*
* [[0,0,0,0,0,0,0,0]]
*
* 对于上面这个给定的矩阵, 返回 0。
*
* 注意: 给定的矩阵grid 的长度和宽度都不超过 50。
*
*/
// @lc code=start
/**
* @param {number[][]} grid
* @return {number}
*/
var maxAreaOfIsland = function(grid) {
let max=0;
let r=grid.length,c=grid[0].length;
let count=0;
for (let i = 0; i < grid.length; i++) {
const x = grid[i];
for (let j = 0; j < x.length; j++) {
const e = x[j];
if(e===0) continue;
dfs(i,j);
max=max>count?max:count;
count=0;
}
}
function dfs(i,j){
if(grid[i][j]===0) return;
count++;
grid[i][j]=0;
if(i-1>=0) dfs(i-1,j);
if(i+1<r) dfs(i+1,j);
if(j-1>=0) dfs(i,j-1);
if(j+1<c) dfs(i,j+1);
}
return max;
};
// @lc code=end
<file_sep>function findOrder(numCourses: number, prerequisites: number[][]): number[] {
const visited = Array(numCourses).fill(false);
const onPath = Array(numCourses).fill(false);
let hasCircle = false;
const result = [];
const graph = Array.from({length: numCourses}, () => []);
for (let [cur, pre] of prerequisites) {
graph[pre].push(cur);
}
const traverse = (start: number) => {
if (onPath[start]) {
hasCircle = true;
}
if (hasCircle || visited[start]) return;
onPath[start] = true;
visited[start] = true;
for (let x of graph[start]) {
traverse(x);
}
result.push(start);
onPath[start] = false;
};
for(let i = 0; i < numCourses; i++) {
traverse(i);
}
if (hasCircle) return [];
return result.reverse();
};
<file_sep>impl Solution {
pub fn single_number(nums: Vec<i32>) -> i32 {
nums.iter().fold(0,|r,x| r^x)
}
}<file_sep>impl Solution {
pub fn find_duplicate(nums: Vec<i32>) -> i32 {
let (mut slow,mut fast)=(0,0);
loop{
slow=nums[slow as usize];
fast=nums[nums[fast as usize] as usize];
if slow==fast{
break;
}
}
slow=0;
while slow!=fast{
slow=nums[slow as usize];
fast=nums[fast as usize];
}
slow
}
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn is_valid_bst(root: Option<Rc<RefCell<TreeNode>>>) -> bool {
return Solution::validate(root.clone(), None, None);
}
fn validate(node: Option<Rc<RefCell<TreeNode>>>, min: Option<i32>, max: Option<i32>) -> bool {
match node {
None => true,
Some(node) => {
let n = node.borrow();
return min.map_or(true, |x| x < n.val)
&& max.map_or(true, |x| x > n.val)
&& Solution::validate(n.left.clone(), min, Some(n.val))
&& Solution::validate(n.right.clone(), Some(n.val), max);
}
}
}
}
<file_sep>use std::collections::HashMap;
impl Solution {
pub fn subarrays_div_by_k(a: Vec<i32>, k: i32) -> i32 {
let mut m=HashMap::new();
m.insert(0,1);
let (mut ans,mut pre)=(0,0);
for x in a.iter(){
pre=(pre+x)%k;
if pre<0{pre+=k;}
if m.contains_key(&pre) {ans+=m.get(&pre).unwrap();m.insert(pre,m.get(&pre).unwrap()+1);}
else {m.insert(pre,1);}
}
ans
}
}<file_sep>export interface Iterator<K, V> {
current(): V;
next(): V;
key(): K;
valid(): boolean;
rewind(): void;
}
export class AlphabeticalOrderIterator implements Iterator<number, string> {
private position: number = 0;
constructor(
private collection: WordsCollection,
private reverse: boolean = false
) {
if (reverse) this.position = this.collection.getCount() - 1;
}
key(): number {
return this.position;
}
current(): string {
return this.collection.getItems()[this.position];
}
next(): string {
let item = this.collection.getItems()[this.position];
this.position += this.reverse ? -1 : 1;
return item;
}
valid(): boolean {
return this.reverse
? this.position >= 0
: this.position < this.collection.getCount();
}
rewind(): void {}
}
export interface Iterable<K, V> {
getIterator(): Iterator<K, V>;
}
export class WordsCollection implements Iterable<number, string> {
private items: string[] = [];
public getItems(): string[] {
return this.items;
}
public getCount(): number {
return this.items.length;
}
public addItem(item: string): void {
this.items.push(item);
}
getIterator(): Iterator<number, string> {
return new AlphabeticalOrderIterator(this);
}
getIteratorReverse(): Iterator<number, string> {
return new AlphabeticalOrderIterator(this, true);
}
}
<file_sep>struct CQueue {
s1: Vec<i32>,
s2: Vec<i32>,
}
/**
* `&self` means the method takes an immutable reference.
* If you need a mutable reference, change it to `&mut self` instead.
*/
impl CQueue {
fn new() -> Self {
return CQueue {
s1: Vec::new(),
s2: Vec::new()
}
}
fn append_tail(&mut self, value: i32) {
self.s1.push(value);
}
fn delete_head(&mut self) -> i32 {
if self.s2.len() == 0 {
while self.s1.len() != 0 {
self.s2.push(self.s1.pop().unwrap());
}
}
match self.s2.pop() {
Some(x) => x,
NONE => -1,
}
}
}
/**
* Your CQueue object will be instantiated and called as such:
* let obj = CQueue::new();
* obj.append_tail(value);
* let ret_2: i32 = obj.delete_head();
*/
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::VecDeque;
struct Codec {
}
const SEP: &str = ",";
const NULL: &str = "#";
/**
* `&self` means the method takes an immutable reference.
* If you need a mutable reference, change it to `&mut self` instead.
*/
impl Codec {
fn new() -> Self {
Self {}
}
fn serialize(&self, root: Option<Rc<RefCell<TreeNode>>>) -> String {
match root {
None => NULL.to_string(),
Some(node) => {
let mut result: Vec<String> = vec![];
let mut traversal: VecDeque<Option<Rc<RefCell<TreeNode>>>> = VecDeque::new();
traversal.push_back(Some(node));
while !traversal.is_empty() {
let cur = traversal.pop_front().unwrap();
match cur {
None => {
result.push(NULL.to_string());
},
Some(cur) => {
let cur = cur.borrow();
result.push(cur.val.to_string());
traversal.push_back(cur.left.clone());
traversal.push_back(cur.right.clone());
}
}
result.push(SEP.to_string());
}
result.pop();
result.join("")
}
}
}
fn deserialize(&self, data: String) -> Option<Rc<RefCell<TreeNode>>> {
let mut nodes: VecDeque<&str> = data.split(SEP).collect();
let mut result: VecDeque<Rc<RefCell<TreeNode>>> = VecDeque::new();
let root = nodes.pop_front();
match root {
None => None,
Some(s) => {
if s == NULL { None }
else {
let root = TreeNode::new(s.parse().unwrap());
let root = Rc::new(RefCell::new(root));
result.push_back(root.clone());
while !result.is_empty() {
let mut cur = result.pop_front().unwrap();
let mut cur = cur.borrow_mut();
let leftVal = nodes.pop_front().unwrap();
if (leftVal != NULL) {
let left = Rc::new(RefCell::new(TreeNode::new(leftVal.to_string().parse().unwrap())));
result.push_back(left.clone());
cur.left = Some(left);
} else {
cur.left = None;
}
let rightVal = nodes.pop_front().unwrap();
if rightVal != NULL {
let right = Rc::new(RefCell::new(TreeNode::new(rightVal.to_string().parse().unwrap())));
result.push_back(right.clone());
cur.right = Some(right);
} else {
cur.right = None;
}
}
Some(root)
}
}
}
}
}
/**
* Your Codec object will be instantiated and called as such:
* let obj = Codec::new();
* let data: String = obj.serialize(strs);
* let ans: Option<Rc<RefCell<TreeNode>>> = obj.deserialize(data);
*/
<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var maxSubArray = function (nums) {
let r = 0;
let sum = 0, max = Number.MIN_SAFE_INTEGER;
while (r < nums.length) {
sum += nums[r];
max = max > sum ? max : sum;
if (sum < 0) sum = 0;
r++;
}
return max;
};<file_sep>interface Product {
operation(): string;
}
class ProductA implements Product {
operation() {
return "product A";
}
}
class ProductB implements Product {
operation() {
return "product B";
}
}
abstract class Creator {
public abstract factoryMethod(): Product;
public someOperation(): string {
const prod = this.factoryMethod();
return `created a ${prod.operation()}`;
}
}
export class CreatorA extends Creator {
public factoryMethod(): Product {
return new ProductA();
}
}
export class CreatorB extends Creator {
public factoryMethod(): Product {
return new ProductB();
}
}
<file_sep>import {
ProductA,
ProductB,
ProductA1,
ProductB1,
ProductA2,
ProductB2,
AbstractFactory,
Factory1,
Factory2,
} from "./抽象工厂";
test("测试抽象工厂", () => {
let factory1: AbstractFactory = new Factory1();
let factory2: AbstractFactory = new Factory2();
expect(factory1.createProductA()).toBeInstanceOf(ProductA1);
expect(factory1.createProductB()).toBeInstanceOf(ProductB1);
expect(factory2.createProductA()).toBeInstanceOf(ProductA2);
expect(factory2.createProductB()).toBeInstanceOf(ProductB2);
});
<file_sep>/**
* @param {string} s
* @return {number}
*/
var lengthOfLongestSubstring = function (s) {
let l = 0, r = 0;
let ans = 0, st = new Set();
while (r < s.length) {
if (st.has(s[r])) {
st.delete(s[l++]);
continue;
}
st.add(s[r++]);
ans = Math.max(st.size, ans);
}
return ans;
};<file_sep>// Definition for singly-linked list.
// #[derive(PartialEq, Eq, Clone, Debug)]
// pub struct ListNode {
// pub val: i32,
// pub next: Option<Box<ListNode>>
// }
//
// impl ListNode {
// #[inline]
// fn new(val: i32) -> Self {
// ListNode {
// next: None,
// val
// }
// }
// }
impl Solution {
pub fn merge_two_lists(list1: Option<Box<ListNode>>, list2: Option<Box<ListNode>>) -> Option<Box<ListNode>> {
let mut virtual_head = ListNode::new(-1);
let mut p = &mut virtual_head;
let mut p1 = list1;
let mut p2 = list2;
while p1.is_some() && p2.is_some() {
let (mut p1_data, mut p2_data) = (p1.as_deref_mut().unwrap(), p2.as_deref_mut().unwrap());
if p1_data.val > p2_data.val {
let next = p2_data.next.take();
p.next = p2.take();
p2 = next;
} else {
let next = p1_data.next.take();
p.next = p1.take();
p1 = next;
}
p = p.next.as_deref_mut().unwrap();
}
p.next = p1.or(p2);
return virtual_head.next;
}
}
<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
import math
class Solution:
def maxSumBST(self, root: Optional[TreeNode]) -> int:
maxSum = 0
def traverse(node: Optional[TreeNode]) -> (bool, int, int, int):
nonlocal maxSum
if not node:
return (True, 0, math.inf, -math.inf)
left = traverse(node.left)
right = traverse(node.right)
if left[0] and right[0] and node.val > left[3] and node.val < right[2]:
curSum = node.val + left[1] + right[1]
maxSum = max(maxSum, curSum)
return (True, curSum, min(node.val, left[2]), max(node.val, right[3]))
return (False, 0, 0, 0)
traverse(root)
return maxSum
<file_sep>/**
* @param {number} n
* @return {number}
*/
var waysToChange = function (n) {
const coins = [25, 10, 5, 1];
let dp = Array(n + 1).fill(0);
dp[0] = 1;
for (let x of coins) {
for (let i = x; i <= n; i++) {
dp[i] = (dp[i] + dp[i - x]) % 1000000007;
}
}
return dp[n];
};<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn helper(root:&Option<Rc<RefCell<TreeNode>>>,lower:i64,upper:i64)->bool{
match root{
None=>true,
Some(node)=>{
if node.borrow().val as i64<=lower||node.borrow().val as i64>=upper{
false
}else{
Solution::helper(&node.borrow().left,lower,node.borrow().val as i64)&&Solution::helper(&node.borrow().right,node.borrow().val as i64,upper)
}
}
}
}
pub fn is_valid_bst(root: Option<Rc<RefCell<TreeNode>>>) -> bool {
Solution::helper(&root,i64::min_value(),i64::max_value())
}
}<file_sep>/**
* @param {string} digits
* @return {string[]}
*/
var letterCombinations = function (digits) {
let result = [];
if (digits.length === 0) return result;
const m = new Map([['2', 'abc'], ['3', 'def'], ['4', 'ghi'], ['5', 'jkl'], ['6', 'mno'], ['7', 'pqrs'], ['8', 'tuv'], ['9', 'wxyz']]);
function helper(cur, rm) {
if (cur.length === digits.length) {
result.push(cur);
return;
}
let x = rm[0], chrs = m.get(x);
let last = rm.slice(1);
for (let i = 0; i < chrs.length; i++) {
helper(cur + chrs[i], last);
}
}
helper('', digits);
return result;
};<file_sep># 剑指offer 题解
## 数组中重复的数字
> 在一个长度为n的数组里的所有数字都在0到n-1的范围内。 数组中某些数字是重复的,但不知道有几个数字是重复的。也不知道每个数字重复几次。请找出数组中任意一个重复的数字。 例如,如果输入长度为7的数组{2,3,1,0,2,5,3},那么对应的输出是第一个重复的数字2。
>
> [牛客网](https://www.nowcoder.com/practice/623a5ac0ea5b4e5f95552655361ae0a8?tpId=13&tqId=11203&tPage=1&rp=1&ru=/ta/coding-interviews&qru=/ta/coding-interviews/question-ranking&from=scarboroughcoral)
### 思路
因为数组长度为n,而且元素值刚好是0~n-1的范围,对应数组下标,因此可以利用这么一个特点,将当前元素与以当前元素值为下标的元素进行交换,如果存在重复的值时交换的位置必定存在重复的元素。
- 时间On,空间O1
### 代码
```javascript
function duplicate(numbers, duplication)
{
// write code here
//这里要特别注意~找到任意重复的一个值并赋值到duplication[0]
//函数返回True/False
for(let i=0;i<numbers.length;i++){
while(numbers[i]!==i){
if(numbers[numbers[i]]===numbers[i]){
duplication[0]=numbers[i];
return true;
}
let tmp=numbers[i];
numbers[i]=numbers[numbers[i]];
numbers[tmp]=tmp;
}
}
return false;
}
```
## 二维数组中的查找
> 在一个二维数组中(每个一维数组的长度相同),每一行都按照从左到右递增的顺序排序,每一列都按照从上到下递增的顺序排序。请完成一个函数,输入这样的一个二维数组和一个整数,判断数组中是否含有该整数。
>
> [牛客网](https://www.nowcoder.com/practice/abc3fe2ce8e146608e868a70efebf62e?tpId=13&tqId=11154&tPage=1&rp=1&ru=/ta/coding-interviews&qru=/ta/coding-interviews/question-ranking&from=scarboroughcoral)
### 思路
因为每行从左到右递增,每列从上向下递增,那么如果从右上角开始查找,如果目标值比当前元素值大就一定在下面,目标值比当前值小一定在左边。也就是说:如果目标值比当前元素值大那就向下找,目标值比当前元素之小那就向左查找。
- 时间复杂度O(M+N),空间O1
### 代码
```javascript
function Find(target, array)
{
// write code here
if(array.length===0||array.length===0||array[0].length===0) return false;
let rl=array.length;
let cl=array[0].length;
let i=0,j=cl-1;
while(array[i][j]!==target){
if(array[i][j]<target){
i++;
}else{
j--;
}
if(i>=rl||j<0) return false;
}
return true;
}
```
## 替换空格
> 请实现一个函数,将一个字符串中的每个空格替换成“%20”。例如,当字符串为We Are Happy.则经过替换之后的字符串为We%20Are%20Happy。
>
> [牛客网](https://www.nowcoder.com/practice/4060ac7e3e404ad1a894ef3e17650423?tpId=13&tqId=11155&tPage=1&rp=1&ru=/ta/coding-interviews&qru=/ta/coding-interviews/question-ranking&from=scarboroughcoral)
### 思路
首先想到可以使用`String.prototype.replace`方法。也可以利用双指针直接操作的方式。
### 代码
- String.prototype.replace
```javascript
function replaceSpace(str)
{
// write code here
return str.replace(/\s/g,'%20');
}
```
- 直接操作
```javascript
function replaceSpace(str)
{
let a=str.split('');
let spaceCount=a.filter(x=>x==' ').length;
if(spaceCount===0) return str;
let l=a.length-1;
a.push(...(Array(spaceCount*2).fill('')));
let r=a.length-1;
while(l>=0){
if(a[l]!==' '){
a[r--]=a[l--];
continue;
}
a[r--]='0',a[r--]='2',a[r--]='%';
l--;
}
return a.join('')
}
```
## 树的子结构
> 输入两棵二叉树A,B,判断B是不是A的子结构。(ps:我们约定空树不是任意一个树的子结构)
>
> [牛客网](https://www.nowcoder.com/practice/6e196c44c7004d15b1610b9afca8bd88?tpId=13&tqId=11170&tPage=1&rp=1&ru=/ta/coding-interviews&qru=/ta/coding-interviews/question-ranking&from=scarboroughcoral)
### 思路
递归处理。
- 时间复杂度ONM,空间ON,M和N分别是两棵树节点个数。
### 代码
```javascript
/* function TreeNode(x) {
this.val = x;
this.left = null;
this.right = null;
} */
function HasSubtree(pRoot1, pRoot2)
{
// write code here
function helper(a,b){
if(!b) return true;
if(!a) return false;
if(a.val!==b.val) return false;
return helper(a.left,b.left)&&helper(a.right,b.right);
}
if(!pRoot2||!pRoot1) return false;
return helper(pRoot1,pRoot2)||HasSubtree(pRoot1.left,pRoot2)||HasSubtree(pRoot1.right,pRoot2)
}
```
## 从尾到头打印链表
> 输入一个链表,按链表从尾到头的顺序返回一个数组。
>
> [牛客网](https://www.nowcoder.com/practice/d0267f7f55b3412ba93bd35cfa8e8035?tpId=13&tqId=11156&tPage=1&rp=1&ru=/ta/coding-interviews&qru=/ta/coding-interviews/question-ranking&from=scarboroughcoral)
### 思路
使用递归。递归基本条件是遍历到最后节点直接返回当前节点的数组(或者遍历到null返回空数组),递推关系是当前结果是将剩余节点先放入然后放入当前节点的数组。
- 时间复杂度On,空间On
### 代码
```javascript
/*function ListNode(x){
this.val = x;
this.next = null;
}*/
function printListFromTailToHead(head)
{
// write code here
if(!head) return [];
return [...printListFromTailToHead(head.next),head.val]
}
```
## 面试题07. 重建二叉树
> 输入某二叉树的前序遍历和中序遍历的结果,请重建该二叉树。假设输入的前序遍历和中序遍历的结果中都不含重复的数字。
>
> 例如,给出
>
> 前序遍历 preorder = [3,9,20,15,7]
> 中序遍历 inorder = [9,3,15,20,7]
> 返回如下的二叉树:
>
> 3
> / \
> 9 20
> / \
> 15 7
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/zhong-jian-er-cha-shu-lcof
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
### 思路
递归构建。前序遍历数组第一个元素是根节点,然后这个元素在中序遍历中的左侧是左子树,右侧是右子树,当然需要考虑当前子树的左右边界。这样递归的进行就可以了。
- 时间复杂度On,空间On
### 代码
```js
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {number[]} preorder
* @param {number[]} inorder
* @return {TreeNode}
*/
var buildTree = function(preorder, inorder) {
let m=new Map();
inorder.forEach((x,i)=>m.set(x,i));
function helper(preL,preR,inL){
if(preL>preR) return null;
let x=new TreeNode(preorder[preL]);
let inIdx=m.get(x.val);
let leftSize=inIdx-inL;
x.left=helper(preL+1,preL+leftSize,inL);
x.right=helper(preL+leftSize+1,preR,inL+leftSize+1);
return x;
}
return helper(0,preorder.length-1,0)
};
```
## 面试题11. 旋转数组的最小数字
> 把一个数组最开始的若干个元素搬到数组的末尾,我们称之为数组的旋转。输入一个递增排序的数组的一个旋转,输出旋转数组的最小元素。例如,数组 [3,4,5,1,2] 为 [1,2,3,4,5] 的一个旋转,该数组的最小值为1。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/xuan-zhuan-shu-zu-de-zui-xiao-shu-zi-lcof
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
### 思路
二分查找。查找规律。
首先排除旋转数组刚好旋转回未旋转的样子,那么第一个元素一定是最小元素。
l,r分别设为当前查找子数组的左右边界,mid为中间下标,即`mid=l+(r-l)/2`向下取整。如果mid对应数值比r数值要大,那最小值一定存在于mid右侧;如果mid对应数值比r数值要小,那么mid一定在mid以及mid的左侧。如果mid对应数值和r相等,那么没法判断,调整边界r--。
- 时间复杂度Ologn,空间O1
### 代码
```js
/**
* @param {number[]} numbers
* @return {number}
*/
var minArray = function(numbers) {
//is not rotated
if(numbers[0]<numbers[numbers.length-1]) return numbers[0]
//is rotated
let l=0,r=numbers.length-1;
while(l<r){
let mid=l+((r-l)/2|0);
if(numbers[mid]>numbers[r]) l=mid+1;
else if(numbers[mid]<numbers[r]) r=mid;
else r--;
}
return numbers[l]
};
```
## 面试题57 - II. 和为s的连续正数序列
> 输入一个正整数 target ,输出所有和为 target 的连续正整数序列(至少含有两个数)。
>
> 序列内的数字由小到大排列,不同序列按照首个数字从小到大排列。
>
>
>
> 示例 1:
>
> 输入:target = 9
> 输出:[[2,3,4],[4,5]]
> 示例 2:
>
> 输入:target = 15
> 输出:[[1,2,3,4,5],[4,5,6],[7,8]]
>
>
> 限制:
>
> 1 <= target <= 10^5
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/he-wei-sde-lian-xu-zheng-shu-xu-lie-lcof
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
### 思路
双指针。
阈值是右指针小于$\lceil target\rceil+1$。
每次检测当前序列s和target:
1. 如果`s===target`则加入序列并更新双指针和sum值(左右指针都向右移动,sum值减去左边的旧值加上右边新值)
2. 如果`s>target`则更新双指针和sum值(左指针右移,sum减去左边的旧值)
3. 如果`s<target`则更新双指针和sum值(右指针右移,sum加上右边的新值)
### 代码
```javascript
/**
* @param {number} target
* @return {number[][]}
*/
var findContinuousSequence = function(target) {
let result=[];
let th=Math.ceil(target/2)+1;
let i=1,j=2;
let s=i+j;
while(j<th){
if(s===target){
result.push(Array.from({length:j-i+1},(x,idx)=>idx+i));
j++;
s+=j-i;
i++;
}else if(s>target){
s-=i;
i++;
}else{
j++;
s+=j;
}
}
return result;
};
```
## 面试题59 - II. 队列的最大值
> 请定义一个队列并实现函数 max_value 得到队列里的最大值,要求函数max_value、push_back 和 pop_front 的时间复杂度都是O(1)。
>
> 若队列为空,pop_front 和 max_value 需要返回 -1
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/dui-lie-de-zui-da-zhi-lcof
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
### 思路
在push和pop时更新max值。并不满足题意
- max_value、push_back时间复杂度O1,pop_front时间复杂度On,空间复杂度O1
### 代码
```javascript
var MaxQueue = function() {
this.q=[];
this.max=-1;
};
/**
* @return {number}
*/
MaxQueue.prototype.max_value = function() {
return this.q.length?this.max:-1;
};
/**
* @param {number} value
* @return {void}
*/
MaxQueue.prototype.push_back = function(value) {
if(this.max<value) this.max=value;
this.q.push(value)
};
/**
* @return {number}
*/
MaxQueue.prototype.pop_front = function() {
if(this.q.length===0) return -1;
let val=this.q.shift();
if(val===this.max) this.max=Math.max.apply(null,this.q);
return val;
};
/**
* Your MaxQueue object will be instantiated and called as such:
* var obj = new MaxQueue()
* var param_1 = obj.max_value()
* obj.push_back(value)
* var param_3 = obj.pop_front()
*/
```
<file_sep>/**
* @param {number} n
* @return {number[]}
*/
var printNumbers = function (n) {
return Array.from({ length: 10 ** n - 1 }, (x, i) => i + 1);
};<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var singleNumber = function (nums) {
return nums.reduce((r, x) => r ^ x);
};
<file_sep>/**
Do not return anything, modify s in-place instead.
*/
function reverseString(s: string[]): void {
let i=0,j=s.length-1;
let tmp;
while(i<j){
tmp=s[i];
s[i]=s[j];
s[j]=tmp;
i++;
j--;
}
};
<file_sep>/**
* @param {string[]} words
* @param {string} chars
* @return {number}
*/
var countCharacters = function (words, chars) {
let m = new Map();
let result = 0;
chars.split('').forEach(x => m.set(x, m.has(x) ? m.get(x) + 1 : 1));
for (let w of words) {
if (test(w, new Map(m))) result += w.length;
}
return result;
function test(s, m) {
for (let c of s) {
if (m.has(c) && m.get(c) > 0) m.set(c, m.get(c) - 1);
else return false;
}
return true;
}
};
<file_sep>class Solution {
private:
bool valid;
vector<int> colors;
public:
bool isBipartite(vector<vector<int>>& graph) {
valid=true;
int n=graph.size();
colors.assign(n,0);
for(int i=0;i<n&&valid;i++){
if(colors[i]==0){
dfs(i,-1,graph);
}
}
return valid;
}
void dfs(int index,int color,vector<vector<int>>&graph){
colors[index]=color;
int nextColor=-color;
for(int neighbor:graph[index]){
if(colors[neighbor]==0){
dfs(neighbor,nextColor,graph);
}else if(colors[neighbor]!=nextColor){
valid=false;
return;
}
}
}
};
<file_sep>function openLock(deadends: string[], target: string): number {
let q1: Set<string> = new Set();
let q2: Set<string> = new Set();
const deadendsSet = new Set(deadends);
const visited: Set<string> = new Set();
let step = 0;
q1.add('0000');
q2.add(target);
const plusOne = (s: string, idx: number) => s.slice(0, idx) + (s[idx] === '9' ? '0' : String.fromCharCode(s.charCodeAt(idx) + 1)) + s.slice(idx + 1);
const minusOne = (s: string, idx: number) => s.slice(0, idx) + (s[idx] === '0' ? '9' : String.fromCharCode(s.charCodeAt(idx) - 1)) + s.slice(idx + 1);
while (q1.size !== 0 && q2.size !== 0) {
const tmp: Set<string> = new Set();
for(let x of q1) {
if (deadendsSet.has(x)) {
continue;
}
if (q2.has(x)) {
return step;
}
visited.add(x);
for (let i = 0; i < 4; i++) {
const plus = plusOne(x, i);
if (!visited.has(plus)) {
tmp.add(plus);
}
const minus = minusOne(x, i);
if (!visited.has(minus)) {
tmp.add(minus);
}
}
}
step++;
q1 = q2;
q2 = tmp;
}
return -1;
};
<file_sep>/**
* @param {character[][]} matrix
* @return {number}
*/
var maximalSquare = function (matrix) {
if (matrix.length === 0) return 0;
let m = matrix.length,
n = matrix[0].length;
let dp = Array(m)
.fill(null)
.map((x) => Array(n).fill(0));
function isInGrid(i, j) {
return i >= 0 && j >= 0 && i < m && j < n;
}
function updateDP() {
for (let i = 0; i < m; i++) {
for (let j = 0; j < n; j++) {
if (matrix[i][j] === "0") continue;
dp[i][j] = 1;
if (!isInGrid(i - 1, j) || !isInGrid(i, j - 1)) continue;
dp[i][j] += Math.min(dp[i - 1][j - 1], dp[i - 1][j], dp[i][j - 1]);
}
}
}
function maxDP() {
let max = 0;
for (let sub of dp) {
for (let x of sub) {
max = Math.max(max, x);
}
}
return max;
}
updateDP();
return maxDP() ** 2;
};
<file_sep>var subarraysDivByK = function (A: number[], K: number): number {
const m = new Map();
let ans = 0,
pre = 0;
m.set(0, 1);
for (let x of A) {
pre = (pre + x) % K;
if (pre < 0) pre += K;
if (m.has(pre)) (ans += m.get(pre)), m.set(pre, m.get(pre) + 1);
else m.set(pre, 1);
}
return ans;
};
<file_sep>/**
* @param {character[][]} board
* @return {number}
*/
var numRookCaptures = function (board) {
let i = 0, j = 0, r = board.length, c = board[0].length;
let res = 0;
let flag = true;
for (let x = 0; x < r; x++) {
for (let y = 0; y < c; y++) {
if (board[x][y] === 'R') {
i = x;
j = y;
flag = false;
break;
}
}
if (!flag) break;
}
for (let x = i - 1; x >= 0; x--) {
if (board[x][j] === '.') continue;
if (board[x][j] === 'B') break;
res++;
break;
}
for (let x = i + 1; x < r; x++) {
if (board[x][j] === '.') continue;
if (board[x][j] === 'B') break;
res++;
break;
}
for (let x = j - 1; x >= 0; x--) {
if (board[i][x] === '.') continue;
if (board[i][x] === 'B') break;
res++;
break;
}
for (let x = j + 1; x < c; x++) {
if (board[i][x] === '.') continue;
if (board[i][x] === 'B') break;
res++;
break;
}
return res;
};<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
type TreeNodeOptional = TreeNode | null;
const isSame = (a: TreeNodeOptional, b: TreeNodeOptional): boolean => {
if (!a && !b) return true;
if (!a || !b) return false;
return a.val === b.val && isSame(a.left, b.right) && isSame(a.right, b.left);
};
var isSymmetric = function (root: TreeNode | null): boolean {
if (!root) return true;
return isSame(root.left, root.right);
};
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} A
* @param {TreeNode} B
* @return {boolean}
*/
var isSubStructure = function (A, B) {
function isSame(a, b) {
if (!b) return true;
if (!a) return false;
return a.val === b.val && isSame(a.left, b.left) && isSame(a.right, b.right);
}
function helper(a, b) {
if (!b || !a) return false;
if (isSame(a, b)) return true;
return helper(a.left, b) || helper(a.right, b);
}
return helper(A, B);
};<file_sep>/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} head
* @return {ListNode}
*/
var reverseList = function (head) {
if (!head) return null;
if (head.next) {
let nx = head.next;
let r = reverseList(head.next);
nx.next = head;
head.next = null;
return r;
}
return head;
};<file_sep>impl Solution {
pub fn merge(a: &mut Vec<i32>, m: i32, b: &mut Vec<i32>, n: i32) {
let mut len = (m + n - 1) as usize;
let mut i = m as usize;
let mut j = n as usize;
while i > 0 && j > 0{
if a[i-1] >= b[j-1]{
a[len] = a[i-1];
len -= 1;
i -= 1;
}else{
a[len] = b[j-1];
len -= 1;
j -= 1;
}
}
while j > 0{
a[len] = b[j-1];
len -= 1;
j -= 1;
}
}
}
<file_sep>/**
* @param {number[][]} grid
* @return {number}
*/
var countNegatives = function (grid) {
return grid.reduce((s, x) => {
let l = 0, r = x.length - 1;
if (x[0] < 0) return s + x.length;
let pos = -1;
while (l <= r) {
let mid = l + (r - l) / 2 | 0;
if (x[mid] < 0) pos = mid, r = mid - 1;
else l = mid + 1;
}
return pos >= 0 ? s + x.length - pos : s;
}, 0);
};<file_sep>class Solution
{
public:
string minWindow(string s, string t)
{
if (s.length() == 0 || t.length() == 0 || s.length() < t.length())
return "";
unordered_map<char, int> tFreq;
for (char c : t)
tFreq[c]++;
const int sLen = s.length(), tLen = t.length();
int begin = 0, l = 0, r = 0, minLen = sLen + 1, d = tLen;
while (r < sLen)
{
const char rc = s[r];
if (!tFreq.count(rc))
{
r++;
continue;
}
if (tFreq[rc] > 0)
{
d--;
}
tFreq[rc]--;
r++;
while (d == 0)
{
const char lc = s[l];
if (r - l < minLen)
{
minLen = r - l;
begin = l;
}
if (!tFreq.count(lc))
{
l++;
continue;
}
if (tFreq[lc] == 0)
{
d++;
}
tFreq[lc]++;
l++;
}
}
return minLen == sLen + 1 ? "" : s.substr(begin, minLen);
}
};<file_sep>/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution
{
public:
ListNode *reverseKGroup(ListNode *head, int k)
{
ListNode *hair = new ListNode(0);
hair->next = head;
ListNode *pre = hair;
while (head)
{
ListNode *tail = pre;
for (int i = 0; i < k; i++)
{
tail = tail->next;
if (!tail)
return hair->next;
}
ListNode *nxt = tail->next;
tie(head, tail) = reverseLink(head, tail);
pre->next = head;
tail->next = nxt;
pre = tail;
head = nxt;
}
return hair->next;
}
private:
pair<ListNode *, ListNode *> reverseLink(ListNode *head, ListNode *tail)
{
ListNode *pre = tail->next;
ListNode *p = head;
while (pre != tail)
{
ListNode *nxt = p->next;
p->next = pre;
pre = p;
p = nxt;
}
return {tail, head};
}
};<file_sep>function subsetXORSum(nums: number[]): number {
let result = 0;
function dfs(cur: number, remain: number) {
if (remain === 0) {
result += cur;
return;
}
dfs(cur, remain - 1);
dfs(cur ^ nums[nums.length - remain], remain - 1)
}
dfs(0, nums.length);
return result;
};
<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def generateTrees(self, n: int) -> List[TreeNode]:
def build(l: int, h: int) -> List[TreeNode]:
if l > h:
return [None]
result = []
for i in range(l, h + 1):
lefts = build(l, i - 1)
rights = build(i + 1, h)
for left in lefts:
for right in rights:
node = TreeNode(i, left, right)
result.append(node)
return result
return build(1, n)
<file_sep>impl Solution {
pub fn sort_array_by_parity_ii(a: Vec<i32>) -> Vec<i32> {
let mut a = a.clone();
let (mut even, mut odd) = (0, 1);
while even < a.len() && odd < a.len() {
while even < a.len() && a[even] % 2 == 0 {
even += 2;
}
while odd < a.len() && a[odd] % 2 == 1 {
odd += 2;
}
if odd >= a.len() || even >= a.len() {
return a;
}
let tmp = a[even];
a[even] = a[odd];
a[odd] = tmp;
}
a
}
}
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
static mut result: i32 = -1;
static mut i: i32 = 0;
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn kth_smallest(root: Option<Rc<RefCell<TreeNode>>>, k: i32) -> i32 {
unsafe {
result = -1;
i = 0;
}
Solution::traverse(root.clone(), k);
unsafe { return result; }
}
fn traverse(node: Option<Rc<RefCell<TreeNode>>>, k: i32) {
if let Some(node) = node {
unsafe {
if i >= k {
return;
}
}
Solution::traverse(node.borrow().left.clone(), k);
unsafe {
i += 1;
if i == k {
result = node.borrow().val;
return;
}
}
Solution::traverse(node.borrow().right.clone(), k);
}
}
}
<file_sep>use std::collections::HashMap;
fn count_duplicates(text: &str) -> u32 {
let mut count: HashMap<char, u32> = HashMap::new();
for c in text.to_lowercase().chars() {
let mut e = count.entry(c).or_default();
*e += 1;
}
count.values().filter(|&&v| v > 1).count() as u32
}
<file_sep>import Singleton from "./单例模式";
test("测试单例模式", () => {
expect(Singleton.getInstance()).toBe(Singleton.getInstance());
});
<file_sep>/**
* @param {number[]} nums
* @return {number[]}
*/
var singleNumbers = function (nums) {
let r = nums.reduce((s, x) => s ^ x, 0);
let d = 1;
while ((d & r) === 0) d <<= 1;
let a = 0, b = 0;
for (let x of nums) {
if (x & d) a ^= x;
else b ^= x;
}
return [a, b];
};<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::HashMap;
impl Solution {
pub fn find_duplicate_subtrees(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<Option<Rc<RefCell<TreeNode>>>> {
let mut result = vec![];
let mut subtrees = HashMap::new();
Solution::subtree(root, &mut result, &mut subtrees);
result
}
fn subtree(node: Option<Rc<RefCell<TreeNode>>>, mut result: &mut Vec<Option<Rc<RefCell<TreeNode>>>>, mut subtrees: &mut HashMap<String, i32>) -> String {
match node {
None => '#'.to_string(),
Some(node) => {
let val = node.borrow().val;
let left = Solution::subtree(node.borrow().left.clone(), &mut result, &mut subtrees);
let right = Solution::subtree(node.borrow().right.clone(), &mut result, &mut subtrees);
let subtree = format!("{},{},{}", left, right, val);
if let Some(count) = subtrees.get_mut(&subtree) {
if *count == 1 {
result.push(Some(node.clone()));
}
*count += 1;
} else {
subtrees.insert(subtree.clone(), 1);
}
subtree
}
}
}
}
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {number[]}
*/
var rightSideView = function (root) {
let q = [];
let result = [];
if (!root) return result;
q.push(root);
while (q.length) {
let size = q.length;
let cur = null;
for (let i = 0; i < size; i++) {
cur = q.shift();
if (cur.left) q.push(cur.left);
if (cur.right) q.push(cur.right);
}
if (cur) result.push(cur.val);
}
return result;
};<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var majorityElement = function (nums) {
let votes = 0, x;
for (let i of nums) {
if (votes === 0) x = i;
votes += (x === i ? 1 : -1);
}
return x;
};<file_sep>export class SingletonCounter {
// TODO:
private count = 0;
private static instance: SingletonCounter | null = null;
private constructor() { }
public static getInstance() {
return this.instance ? this.instance : this.instance = new SingletonCounter();
}
public inc(): number {
return ++(SingletonCounter.instance as SingletonCounter).count;
}
}
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function postorderTraversal(root: TreeNode | null): number[] {
const visited=new Set<TreeNode>();
const s:Array<TreeNode>=[];
const result:Array<number>=[];
if(!root) return result;
s.push(root);
while(s.length){
const cur=s.pop() as TreeNode;
if(!visited.has(cur)){
visited.add(cur);
s.push(cur);
if(cur.right) s.push(cur.right);
if(cur.left) s.push(cur.left);
}else{
result.push(cur.val);
}
}
return result;
};
<file_sep>class Solution
{
private:
string addSplit(string &s, string sp)
{
string ss = "#";
for (char c : s)
{
ss += c;
ss += "#";
}
return ss;
}
public:
string longestPalindrome(string s)
{
if (s.length() < 2)
return s;
string ss = addSplit(s, "#");
int maxLen = 1, begin = 0;
int center = 0, maxRight = 0;
vector<int> dp(ss.length(), 0);
for (int i = 0; i < ss.length(); i++)
{
if (i < maxRight)
{
int mirror = 2 * center - i;
dp[i] = min(dp[mirror], maxRight - i);
}
int l = i - dp[i] - 1, r = i + dp[i] + 1;
while (l >= 0 && r < ss.length() && ss[l] == ss[r])
l--, r++, dp[i]++;
if (i + dp[i] > maxRight)
{
maxRight = i + dp[i];
center = i;
}
if (dp[i] > maxLen)
{
maxLen = dp[i];
begin = (i - maxLen) / 2;
}
}
return s.substr(begin, maxLen);
}
};<file_sep># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Codec:
SEP = ','
NULL = '#'
def serialize(self, root):
"""Encodes a tree to a single string.
:type root: TreeNode
:rtype: str
"""
result = []
def traverse(node):
if not node:
result.append(self.NULL)
result.append(self.SEP)
return
result.append(str(node.val))
result.append(self.SEP)
traverse(node.left)
traverse(node.right)
traverse(root)
return ''.join(result)
def deserialize(self, data):
"""Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
nodes = data.split(self.SEP)
def traverse(nodes):
if len(nodes) == 0:
return None
nodeVal = nodes.pop(0)
if nodeVal == self.NULL:
return None
left = traverse(nodes)
right = traverse(nodes)
return TreeNode(nodeVal, left, right)
return traverse(nodes)
# Your Codec object will be instantiated and called as such:
# ser = Codec()
# deser = Codec()
# ans = deser.deserialize(ser.serialize(root))
<file_sep>function countPairs(deliciousness: number[]): number {
let count = 0;
const feasts = new Set(Array.from({length: 22}).map((v,idx) => 2**idx));
const watched = new Map<number, number>();
deliciousness.forEach((del) => {
feasts.forEach((feast) => {
const targetPair = feast - del;
if (watched.has(targetPair)) {
count += watched.get(targetPair);
}
})
watched.set(del, (watched.get(del) ?? 0) + 1)
})
return count % (10 ** 9 + 7);
};
<file_sep>interface Token {
idx: number;
val: string;
}
function longestValidParentheses(s: string): number {
let utilContainer: Token[] = [];
for (let i = 0; i < s.length; i++) {
if (utilContainer.length === 0) {
utilContainer.push({
idx: i,
val: s[i],
});
continue;
}
if (utilContainer[utilContainer.length - 1].val === "(" && s[i] === ")") {
utilContainer.pop();
} else {
utilContainer.push({
idx: i,
val: s[i],
});
}
}
if (utilContainer.length === 0) return s.length;
let maxLength = 0;
for (let i = 0; i < utilContainer.length; i++) {
if (i === 0) {
maxLength = Math.max(maxLength, utilContainer[i].idx);
}
if (i === utilContainer.length - 1) {
maxLength = Math.max(
maxLength,
s.length - utilContainer[utilContainer.length - 1].idx - 1
);
}
if (i !== 0) {
maxLength = Math.max(
maxLength,
utilContainer[i].idx - utilContainer[i - 1].idx - 1
);
}
}
return maxLength;
}
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
declare class TreeNode {
val: number;
left: TreeNode | null;
right: TreeNode | null;
constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null);
}
type TreeNodeOptional = TreeNode | null;
function flatten(root: TreeNodeOptional): void {
if (!root) return;
let visited = new Set();
let s = [];
s.push(root);
let prev = null;
while (s.length !== 0) {
let cur = s.pop() as TreeNode;
if (!visited.has(cur)) {
visited.add(cur);
s.push(cur);
if (cur.left) s.push(cur.left);
if (cur.right) s.push(cur.right);
} else {
cur.left = null;
cur.right = prev;
prev = cur;
}
}
}
<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var majorityElement = function (nums) {
let votes = 0, x;
for (let i of nums) {
if (votes === 0) x = i;
votes += (x === i ? 1 : -1);
}
let count = 0, n = nums.length / 2 | 0 + 1;
for (let i of nums) {
if (i === x) count++;
if (count === n) return x;
}
return -1;
};<file_sep>/**
* Initialize your data structure here.
*/
var Trie = function () {
this.end = false;
this.next = Array(26).fill(null);
};
/**
* Inserts a word into the trie.
* @param {string} word
* @return {void}
*/
Trie.prototype.insert = function (word) {
let node = this;
for (let c of word) {
let idx = c.charCodeAt(0) - 'a'.charCodeAt(0);
if (node.next[idx] === null) {
node.next[idx] = new Trie();
}
node = node.next[idx];
}
node.end = true;
};
/**
* Returns if the word is in the trie.
* @param {string} word
* @return {boolean}
*/
Trie.prototype.search = function (word) {
let node = this;
for (let c of word) {
let idx = c.charCodeAt(0) - 'a'.charCodeAt(0);
if (node.next[idx] === null) return false;
node = node.next[idx];
}
return node.end;
};
/**
* Returns if there is any word in the trie that starts with the given prefix.
* @param {string} prefix
* @return {boolean}
*/
Trie.prototype.startsWith = function (prefix) {
let node = this;
for (let c of prefix) {
let idx = c.charCodeAt(0) - 'a'.charCodeAt(0);
if (node.next[idx] === null) return false;
node = node.next[idx];
}
return true;
};
/**
* Your Trie object will be instantiated and called as such:
* var obj = new Trie()
* obj.insert(word)
* var param_2 = obj.search(word)
* var param_3 = obj.startsWith(prefix)
*/<file_sep>/**
* @param {number} n
* @return {number[][]}
*/
var generateMatrix = function (n) {
let result = Array(n).fill(0).map(x => Array(n).fill(0));
let up = 0, bottom = n - 1, left = 0, right = n - 1;
let cnt = 1;
while (true) {
for (let i = left; i <= right; i++) result[up][i] = cnt++;
if (++up > bottom) break;
for (let i = up; i <= bottom; i++) result[i][right] = cnt++;
if (--right < left) break;
for (let i = right; i >= left; i--) result[bottom][i] = cnt++;
if (--bottom < up) break;
for (let i = bottom; i >= up; i--) result[i][left] = cnt++;
if (++left > right) break;
}
return result;
};<file_sep>let count = 0n;
function Item(val) {
this.val = val;
this.freq = 1;
this.time = count++;
}
/**
* @param {number} capacity
*/
var LFUCache = function (capacity) {
this.capacity = capacity;
this.cache = new Map();
};
/**
* @param {number} key
* @return {number}
*/
LFUCache.prototype.get = function (key) {
if (!this.cache.has(key)) return -1;
let item = this.cache.get(key);
item.freq++;
item.time = count++;
return item.val;
};
/**
* @param {number} key
* @param {number} value
* @return {void}
*/
LFUCache.prototype.put = function (key, value) {
if (this.capacity <= 0) return;
if (this.cache.has(key)) {
let item = this.cache.get(key);
item.freq++;
item.val = value;
item.time = count++;
return;
}
if (this.cache.size === this.capacity) {
let minFreq = Number.MAX_SAFE_INTEGER, minK = -1, minT;
for (let [k, { freq, time }] of this.cache) {
if (minFreq > freq) {
minFreq = freq;
minK = k;
minT = time;
} else if (minFreq === freq && time < minT) {
minK = k;
minT = time;
}
}
this.cache.delete(minK);
}
let x = new Item(value);
this.cache.set(key, x);
};
/**
* Your LFUCache object will be instantiated and called as such:
* var obj = new LFUCache(capacity)
* var param_1 = obj.get(key)
* obj.put(key,value)
*/<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
const SEP = ',';
const NULL = '#';
/*
* Encodes a tree to a single string.
*/
function serialize(root: TreeNode | null): string {
let result = [];
const traverse = (node: TreeNode | null) => {
if (node === null) {
result.push(NULL,SEP);
return;
}
result.push(node.val, SEP);
traverse(node.left);
traverse(node.right);
}
traverse(root);
return result.join('');
};
/*
* Decodes your encoded data to tree.
*/
function deserialize(data: string): TreeNode | null {
const nodes = data.split(',');
const traverse = (nodes: string[]): TreeNode | null => {
if (nodes.length === 0) return null;
const rootVal = nodes.shift();
if (rootVal === NULL) return null;
const root = new TreeNode(Number(rootVal));
const left = traverse(nodes);
const right = traverse(nodes);
root.left = left;
root.right = right;
return root;
}
return traverse(nodes);
};
/**
* Your functions will be called as such:
* deserialize(serialize(root));
*/
<file_sep>function combinationSum(candidates: number[], target: number): number[][] {
const reuslt = [];
function dfs(cur: number[], remain: number, startIndex = 0) {
if (remain === 0) {
reuslt.push(cur);
return;
}
for (let i = startIndex; i < candidates.length; i++) {
const curEle = candidates[i];
if (remain >= curEle) dfs([...cur,curEle], remain - curEle, i);
}
}
dfs([],target);
return reuslt;
};
<file_sep>class Solution {
private:
vector<int> indegree;
vector<int> result;
vector<vector<int>> m;
public:
vector<int> findOrder(int numCourses, vector<vector<int>>& prerequisites) {
indegree.resize(numCourses);
m.resize(numCourses);
for(auto& sub:prerequisites){
indegree[sub[0]]++;
m[sub[1]].push_back(sub[0]);
}
queue<int> q;
for(int i=0;i<indegree.size();i++){
if(indegree[i]==0) q.push(i);
}
while(!q.empty()){
int cur=q.front();
result.push_back(cur);
q.pop();
for(int neighbor:m[cur]){
indegree[neighbor]--;
if(indegree[neighbor]==0){
q.push(neighbor);
}
}
}
if(result.size()!=numCourses) return {};
return result;
}
};<file_sep>/**
* @param {number} num
* @return {string[]}
*/
var readBinaryWatch = function (num) {
let result = new Set();
function helper(h, m, c) {
// console.log(h,"-",m,"-",c)
if (c < 0 || 10 - h.length - m.length < c) return;
if (h.length > 4 || m.length > 6) return;
if (h.length === 4 && m.length === 6) {
if (c !== 0) return;
let hours = parseInt(h, 2);
let minutes = parseInt(m, 2);
if (hours > 11 || minutes > 59) return;
result.add(`${hours}:${minutes < 10 ? '0' + minutes : minutes}`);
return;
}
if (h.length < 4) {
helper('1' + h, m, c - 1);
helper('0' + h, m, c);
}
if (m.length < 6) {
helper(h, '1' + m, c - 1);
helper(h, '0' + m, c);
}
}
helper('', '', num);
return [...result];
};<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function findDuplicateSubtrees(root: TreeNode | null): Array<TreeNode | null> {
let result = [];
let subtreeMap = new Map();
const subtree = (node: TreeNode | null): string => {
if(node === null) return '#';
const val = node.val;
const left = subtree(node.left);
const right = subtree(node.right);
const s = `${left},${right},${val}`;
console.log(s)
if (subtreeMap.get(s) === 1) {
result.push(node);
}
subtreeMap.set(s, (subtreeMap.get(s) ?? 0) + 1);
return s;
};
subtree(root);
return result;
};
<file_sep>function generateParenthesis(n: number): string[] {
let result=[];
function dfs(cur: string, lCount: number, rCount: number) {
if (lCount>rCount) return;
if (cur.length === 2 * n) {
result.push(cur);
return;
}
if (lCount > 0) dfs(cur+'(',lCount - 1,rCount);
if (rCount > 0) dfs(cur+')',lCount,rCount - 1);
}
dfs('',n,n);
return result;
};
<file_sep>/**
* // Definition for a Node.
* function Node(val, left, right, next) {
* this.val = val === undefined ? null : val;
* this.left = left === undefined ? null : left;
* this.right = right === undefined ? null : right;
* this.next = next === undefined ? null : next;
* };
*/
/**
* @param {Node} root
* @return {Node}
*/
var connect = function(root) {
const q=[];
if(!root) return root;
q.push(root);
while(q.length){
let size=q.length;
let prev=null;
for(let i=0;i<size;i++){
const cur=q.shift();
if(cur.left) q.push(cur.left);
if(cur.right) q.push(cur.right);
if(prev===null){
prev=cur;
continue;
}
prev.next=cur;
prev=cur;
}
if(prev) prev.next=null;//此时prev是每行最后一个
}
return root;
};
<file_sep>/**
* @param {string} S
* @return {string[]}
*/
var permutation = function (S) {
let res = [];
function dfs(cur, c) {
if (cur.length === S.length) {
res.push(cur);
return;
}
for (let i = 0; i < S.length; i++) {
if (c.has(i)) continue;
dfs(cur + S[i], new Set(c).add(i));
}
}
dfs('', new Set());
return [...new Set(res)];
};<file_sep>class Solution {
private:
bool isPalindrome(string &s,int l,int r){
for(;l<r&&s[l]==s[r];l++,r--);
return l>=r;
}
public:
bool validPalindrome(string s) {
int l=0,r=s.size()-1;
for(;l<r&&s[l]==s[r];l++,r--);
return isPalindrome(s,l+1,r)||isPalindrome(s,l,r-1);
}
};<file_sep>impl Solution {
pub fn find_order(num_courses: i32, prerequisites: Vec<Vec<i32>>) -> Vec<i32> {
let mut result = vec![];
let mut graph = vec![Vec::new(); num_courses as usize];
let mut on_path = vec![false; num_courses as usize];
let mut visited = vec![false; num_courses as usize];
let mut has_circle = false;
for pair in &prerequisites[..] {
if let &[cur, pre] = &pair[..] {
graph[pre as usize].push(cur);
}
}
fn traverse(graph: &Vec<Vec<i32>>, mut on_path: &mut Vec<bool>, mut visited: &mut Vec<bool>, mut has_circle: &mut bool, mut result: &mut Vec<i32>, start: i32) {
let idx = start as usize;
if on_path[idx] {
*has_circle = true;
}
if *has_circle || visited[idx] {
return;
}
on_path[idx] = true;
visited[idx] = true;
for &x in &graph[idx] {
traverse(&graph, &mut on_path, &mut visited, &mut has_circle, &mut result, x);
}
result.push(start);
on_path[idx] = false;
}
for x in 0..num_courses {
traverse(&graph, &mut on_path, &mut visited, &mut has_circle, &mut result, x);
}
if has_circle {
return vec![];
}
return result.into_iter().rev().collect();
}
}
<file_sep>// Definition for singly-linked list.
// #[derive(PartialEq, Eq, Clone, Debug)]
// pub struct ListNode {
// pub val: i32,
// pub next: Option<Box<ListNode>>
// }
//
// impl ListNode {
// #[inline]
// fn new(val: i32) -> Self {
// ListNode {
// next: None,
// val
// }
// }
// }
impl Solution {
pub fn reverse_list(mut head: Option<Box<ListNode>>) -> Option<Box<ListNode>> {
let mut pre=None;
while head.is_some(){
let mut n=head.unwrap();
head=n.next;
n.next=pre;
pre=Some(n);
}
return pre;
}
pub fn reverse_list1(head: Option<Box<ListNode>>) -> Option<Box<ListNode>> {
let mut oldHead = head;
let mut newHead = None;
while let Some(mut node) = oldHead {
oldHead = node.next.take();
node.next = newHead;
newHead = Some(node);
}
newHead
}
}
<file_sep>/**
* @param {number} num
* @return {number}
*/
var numberOfSteps = function (num) {
return num.toString(2).length + (num.toString(2).match(/1/g) || []).length - 1;
};<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var findMin = function (nums) {
if (nums[0] < nums[nums.length - 1]) return nums[0];//not rotate
let l = 0, r = nums.length - 1;
while (l < r) {
let mid = l + (r - l) / 2 | 0;
if (nums[mid] > nums[r]) l = mid + 1;
else if (nums[mid] < nums[r]) r = mid;
else r--;
}
return nums[l];
};<file_sep>/**
* @param {string} seq
* @return {number[]}
*/
var maxDepthAfterSplit = function (seq) {
let dep = 0;
return seq.split('').map(x => x === '(' ? (++dep % 2) : (dep-- % 2));
};<file_sep>/**
* @param {string[]} words
* @return {number}
*/
var minimumLengthEncoding = function (words) {
let s = new Set(words);
for (let c of words) {
for (let i = 1; i < c.length; i++) {
let x = c.slice(i);
s.has(x) && s.delete(x);
}
}
return [...s].reduce((s, x) => s + x.length, 0) + s.size;
};<file_sep>fn sum_cubes(n: u32) -> u32 {
(1..=n).map(|x| x.pow(3)).sum()
}
<file_sep>use std::collections::HashSet;
use std::convert::TryInto;
impl Solution {
pub fn length_of_longest_substring(s: String) -> i32 {
let s=s.as_bytes();
let mut visited=HashSet::new();
let mut l=0;
let mut r=0;
let mut max=0;
let len=s.len();
while r<len{
if visited.contains(&s[r]){
max=if max>visited.len(){max}else{visited.len()};
visited.remove(&s[l]);
l+=1;
continue;
}
visited.insert(&s[r]);
r+=1;
}
max=if max>visited.len(){max}else{visited.len()};
max.try_into().unwrap()
}
}<file_sep>import { Iterable, Iterator } from "./迭代器模式";
export interface BinaryTreeIterable<K, V> {
getPreOrderIterator(): Iterator<K, V>;
getInOrderIterator(): Iterator<K, V>;
getPostOrderIterator(): Iterator<K, V>;
}
export class BinaryTreeIterator<K, V> implements Iterator<K, V> {
current(): V {
throw new Error("Method not implemented.");
}
next(): V {
throw new Error("Method not implemented.");
}
key(): K {
throw new Error("Method not implemented.");
}
valid(): boolean {
throw new Error("Method not implemented.");
}
rewind(): void {
throw new Error("Method not implemented.");
}
}
export class Node<T> {
constructor(
private val: T,
private left: Node<T> | null = null,
private right: Node<T> | null = null
) {}
}
export class BinaryTree<T> implements Iterable<T, T>, BinaryTreeIterable<T, T> {
getPreOrderIterator(): Iterator<T, T> {
throw new Error("Method not implemented.");
}
getInOrderIterator(): Iterator<T, T> {
throw new Error("Method not implemented.");
}
getPostOrderIterator(): Iterator<T, T> {
throw new Error("Method not implemented.");
}
getIterator(): Iterator<T, T> {
throw new Error("Method not implemented.");
}
}
<file_sep>function isBipartite(graph: number[][]): boolean {
let valid=true;
let n = graph.length;
let colors=Array(n).fill(0);
function dfs(i:number,color:number){
colors[i]=color;
let nextColor=-color;
for(let neighbor of graph[i]){
if(colors[neighbor]===0){
dfs(neighbor,nextColor);
}else if(colors[neighbor]!==nextColor){
valid=false;
return;
}
}
}
for(let i=0;i<n&&valid;i++){
if(colors[i]===0){
dfs(i,-1);
}
}
return valid;
};
<file_sep>/**
* Definition for Node.
* class Node {
* val: number
* left: Node | null
* right: Node | null
* next: Node | null
* constructor(val?: number, left?: Node, right?: Node, next?: Node) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* this.next = (next===undefined ? null : next)
* }
* }
*/
function connect(root: Node | null): Node | null {
if (root === null) return null
const connect2Nodes = (a: Node | null,b: Node | null) => {
if (a === null || b === null) return
a.next = b
connect2Nodes(a.left, a.right)
connect2Nodes(a.right, b.left)
connect2Nodes(b.left, b.right)
}
connect2Nodes(root.left, root.right)
return root
};
<file_sep>class Solution:
def findOrder(self, numCourses: int, prerequisites: List[List[int]]) -> List[int]:
result = []
visited = [False for i in range(numCourses)]
onPath = [False for i in range(numCourses)]
hasCircle = False
graph = [[] for i in range(numCourses)]
for [cur, pre] in prerequisites:
graph[pre].append(cur)
def traverse(start: int):
nonlocal onPath, visited, result, hasCircle, graph
if onPath[start]:
hasCircle = True
if hasCircle or visited[start]:
return
onPath[start] = True
visited[start] = True
for x in graph[start]:
traverse(x)
result.append(start)
onPath[start] = False
for i in range(numCourses):
traverse(i)
return [] if hasCircle else result[::-1]
<file_sep>let time=0;
function Item(val){
this.val=val;
this.time=time++;
}
/**
* @param {number} capacity
*/
var LRUCache = function(capacity) {
this.capacity=capacity;
this.cache=new Map();
};
/**
* @param {number} key
* @return {number}
*/
LRUCache.prototype.get = function(key) {
if(!this.cache.has(key)) return -1;
let item=this.cache.get(key);
item.time=time++;
return item.val;
};
/**
* @param {number} key
* @param {number} value
* @return {void}
*/
LRUCache.prototype.put = function(key, value) {
if(this.cache.has(key)){
let item=this.cache.get(key);
item.time=time++;
item.val=value;
return;
}
if(this.cache.size>=this.capacity){
let minT=Number.MAX_VALUE,minK;
for(let [k,{time}] of this.cache){
if(minT>time){
minT=time;
minK=k;
}
}
this.cache.delete(minK);
}
let item=new Item(value);
this.cache.set(key,item);
};
/**
* Your LRUCache object will be instantiated and called as such:
* var obj = new LRUCache(capacity)
* var param_1 = obj.get(key)
* obj.put(key,value)
*/<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isValidBST(self, root: TreeNode) -> bool:
def validate(node: TreeNode, min: TreeNode, max: TreeNode) -> bool:
if not node:
return True
if min and min.val >= node.val:
return False
if max and max.val <= node.val:
return False
return validate(node.left, min, node) and validate(node.right, node, max)
return validate(root, None, None)
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @param {number} k
* @return {number}
*/
var kthLargest = function (root, k) {
let count = 0;
let s = [];
let visited = new Set();
s.push(root);
while (s.length !== 0) {
let cur = s.pop();
if (!visited.has(cur)) {
visited.add(cur);
cur.left && s.push(cur.left);
s.push(cur);
cur.right && s.push(cur.right);
} else {
count++;
if (count === k) return cur.val;
}
}
};<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function maxSumBST(root: TreeNode | null): number {
let maxSum = 0;
const traverse = (node: TreeNode | null): [boolean,number, number, number] => {
if (node === null) return [true, 0, Number.MAX_SAFE_INTEGER, Number.MIN_SAFE_INTEGER];
const left = traverse(node.left);
const right = traverse(node.right);
if (left[0] && right[0] && node.val > left[3] && node.val < right[2]) {
maxSum = Math.max(left[1] + right[1] + node.val, maxSum)
return [true, left[1] + right[1] + node.val, Math.min(node.val, left[2]), Math.max(node.val, right[3])];
}
return [false, 0, Number.MIN_SAFE_INTEGER, Number.MAX_SAFE_INTEGER];
}
traverse(root)
return maxSum
};
<file_sep><img width="150px" src="_media/favicon.png">
# 刷题笔记
- LeetCode
- 面试编程题记录
- Codewars
- CSS常见问题
- JavaScript特色
[Get Started](README.md)
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @param {TreeNode} p
* @param {TreeNode} q
* @return {TreeNode}
*/
var lowestCommonAncestor = function (root, p, q) {
let s = [];
let visited = new Set();
let candidates = new Set();
s.push(root);
let idx = 0;
while (s.length) {
let cur = s.pop();
if (!visited.has(cur)) {
visited.add(cur);
if (cur.right) s.push(cur.right);
s.push(cur);
if (cur.left) s.push(cur.left);
} else {
if (cur === p || cur === q) {
idx++;
}
if (idx === 1) {
candidates.add(cur);
}
if (idx === 2) {
candidates.add(cur);
break;
}
}
}
s = [];
visited = new Set();
s.push(root);
while (s.length) {
let cur = s.pop();
if (!visited.has(cur)) {
visited.add(cur);
if (cur.right) s.push(cur.right);
if (cur.left) s.push(cur.left);
s.push(cur);
} else {
if (candidates.has(cur)) return cur;
}
}
return null;
};
<file_sep>use std::collections::VecDeque;
// #[derive(Debug, PartialEq, Eq)]
// pub enum NestedInteger {
// Int(i32),
// List(Vec<NestedInteger>)
// }
struct NestedIterator {
list: VecDeque<NestedInteger>
}
/**
* `&self` means the method takes an immutable reference.
* If you need a mutable reference, change it to `&mut self` instead.
*/
impl NestedIterator {
fn new(nestedList: Vec<NestedInteger>) -> Self {
Self{
list: VecDeque::from(nestedList)
}
}
fn next(&mut self) -> i32 {
match self.list.pop_front().unwrap() {
NestedInteger::Int(n) => n,
_ => panic!("never!")
}
}
fn has_next(&mut self) -> bool {
while !self.list.is_empty() {
if let NestedInteger::Int(num) = self.list[0] {
return true;
}
if let NestedInteger::List(mut first) = self.list.pop_front().unwrap() {
while !first.is_empty() {
self.list.push_front(first.pop().unwrap());
}
}
}
!self.list.is_empty()
}
}
/**
* Your NestedIterator object will be instantiated and called as such:
* let obj = NestedIterator::new(nestedList);
* let ret_1: i32 = obj.next();
* let ret_2: bool = obj.has_next();
*/
<file_sep>import { CreatorB, CreatorA } from './工厂方法';
test('测试工厂方法', () => {
let creatorA = new CreatorA();
let creatorB = new CreatorB();
expect(creatorA.someOperation()).toBe('created a product A');
expect(creatorB.someOperation()).toBe('created a product B')
})<file_sep>use std::cmp::*;
impl Solution {
pub fn max_product(nums: Vec<i32>) -> i32 {
let mut max0=nums[0];
let mut min0=nums[0];
let mut result=nums[0];
for i in 1..nums.len(){
let mut tMax=max0*nums[i];
let mut tMin=min0*nums[i];
max0=max(nums[i],max(tMax,tMin));
min0=min(nums[i],min(tMin,tMax));
result=max(max0,result);
}
return result;
}
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::cmp::{ max, min };
impl Solution {
pub fn max_sum_bst(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
let mut max_sum = 0;
fn traverse(node: Option<Rc<RefCell<TreeNode>>>, mut max_sum: &mut i32) -> (bool, i32, i32, i32) {
match node {
None => (true, 0, i32::MAX, i32::MIN),
Some(node) => {
let node_ref = node.borrow();
let left = traverse(node_ref.left.clone(), max_sum);
let right = traverse(node_ref.right.clone(), max_sum);
if left.0 && right.0 && node_ref.val > left.3 && node_ref.val < right.2 {
let cur_sum = left.1 + right.1 + node_ref.val;
*max_sum = max(cur_sum, *max_sum);
return (true, cur_sum, min(node_ref.val, left.2), max(node_ref.val, right.3));
}
return (false, 0, 0, 0);
}
}
}
traverse(root, &mut max_sum);
return max_sum;
}
}
<file_sep>impl Solution {
pub fn subarray_sum(nums: Vec<i32>, k: i32) -> i32 {
let mut m=std::collections::HashMap::new();
m.insert(0,1);
let mut c=0;
let mut pre=0;
for x in nums{
pre+=x;
c+=m.get(&(pre-k)).unwrap_or(&0);
*m.entry(pre).or_insert(0)+=1;
}
return c;
}
}<file_sep>function permutation(S: string): string[] {
const result:Array<string> = [];
function dfs(cur: string, used: Set<number>) {
if (cur.length===S.length) {
result.push(cur);
return;
}
for(let i = 0; i < S.length; i ++) {
if (used.has(i)) continue;
used.add(i);
dfs(cur+S[i],used);
used.delete(i);
}
}
dfs('',new Set())
return result;
};
function permutation1(S: string): string[] {
if (S.length > 1) {
const curChar = S[S.length - 1];
// 上一个排列组合单个元素字符串长度
const lastItemCount = S.length - 1;
const last = permutation(S.slice(0,lastItemCount));
const cur: string[] = [];
for (let s of last) {
for (let i = 0; i < lastItemCount + 1; i++) {
cur.push([...s.slice(0,i), curChar,...s.slice(i)].join(''));
}
}
return cur;
}
return [S[0]];
};
<file_sep>/**
* @param {number[]} arr
* @param {number} k
* @return {number[]}
*/
var getLeastNumbers = function (arr, k) {
if (k === 0) return [];
function ajust(parent, len) {
let child = 2 * parent + 1;
let tmp = arr[parent];
while (child < len) {
if (child + 1 < len && arr[child + 1] < arr[child]) child += 1;
if (tmp <= arr[child]) break;
arr[parent] = arr[child];
parent = child;
child = 2 * parent + 1;
}
arr[parent] = tmp;
}
for (let i = (arr.length - 1) / 2 | 0; i >= 0; i--) ajust(i, arr.length);
for (let i = arr.length - 1; i > arr.length - 1 - k; i--) {
let tmp = arr[i];
arr[i] = arr[0];
arr[0] = tmp;
ajust(0, i);
}
return arr.slice(-k);
};<file_sep>function subsets(nums: number[]): number[][] {
const result = [];
function dfs(cur: number[], remainCount: number) {
if(remainCount === 0) {
result.push(cur);
return;
}
dfs([...cur, nums[nums.length-remainCount]], remainCount - 1)
dfs([...cur], remainCount - 1)
}
dfs([],nums.length);
return result;
};
<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var arrayPairSum = function (nums) {
return nums.sort((a, b) => b - a).reduce((s, x, i) => i % 2 ? s + x : s, 0);
};<file_sep>function generateParenthesis(n: number): string[] {
const result = [];
function dfs(cur: string, lCnt: number, rCnt: number) {
if (lCnt>rCnt) return;
if (cur.length === 2 * n) {
result.push(cur);
return;
}
if (lCnt > 0) dfs(cur + '(', lCnt - 1, rCnt);
if (rCnt > 0) dfs(cur + ')', lCnt, rCnt - 1);
}
dfs('', n, n);
return result;
};
<file_sep>impl Solution {
pub fn my_pow(x: f64, n: i32) -> f64 {
if n==0{
return 1.0;
}
if n==1{
return x;
}
if n==-1{
return 1.0/x;
}
let e=n/2;
let t=Solution::my_pow(x,e);
if n%2==0{t*t}else{t*t*if n>0{x}else{1.0/x}}
}
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn level_order(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<Vec<i32>> {
let mut q=match root{
None=>vec![],
Some(r)=>vec![r]
};
let mut result=vec![];
while !q.is_empty(){
let (mut sub,mut next)=(vec![],vec![]);
for x in q{
sub.push(x.borrow().val.clone());
if let Some(l)=x.borrow().left.clone(){
next.push(l);
}
if let Some(r)=x.borrow().right.clone(){
next.push(r);
}
}
result.push(sub);
q=next;
}
return result;
}
}<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def diameterOfBinaryTree(self, root: TreeNode) -> int:
diameter = 0
def depth(node: TreeNode) -> int:
if not node:
return 0
left = depth(node.left)
right = depth(node.right)
return max(left, right) + 1
def traverse(node: TreeNode):
nonlocal diameter
if not node:
return
left = depth(node.left)
right = depth(node.right)
diameter = max(diameter, left + right)
traverse(node.left)
traverse(node.right)
traverse(root)
return diameter
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def diameterOfBinaryTreeInfer(self, root: TreeNode) -> int:
diameter = 0
def depth(node: TreeNode) ->int:
nonlocal diameter
if not node:
return 0
left = depth(node.left)
right = depth(node.right)
diameter = max(left + right, diameter)
return max(left, right) + 1
depth(root)
return diameter
<file_sep>use std::cmp::max;
impl Solution {
pub fn find_the_longest_substring(s: String) -> i32 {
let mut status_pos:Vec<i32>=vec![-1;1<<5];
let (mut status,mut ans)=(0,0);
status_pos[0]=0;
for (i,c) in s.char_indices(){
match c{
'a'=>status^=1<<0,
'e'=>status^=1<<1,
'i'=>status^=1<<2,
'o'=>status^=1<<3,
'u'=>status^=1<<4,
_=>()
}
match status_pos[status]{
-1=>status_pos[status]=(i as i32)+1,
_=>ans=max(ans,(i as i32)+1-status_pos[status])
}
}
ans
}
}<file_sep>/**
* @param {number[][]} matrix
* @return {number[]}
*/
var spiralOrder = function (matrix) {
let result = [];
if (matrix.length === 0) return result;
let l = 0, r = matrix[0].length - 1, u = 0, b = matrix.length - 1;
while (true) {
for (let i = l; i <= r; i++) result.push(matrix[u][i]);
u++;
if (u > b) return result;
for (let i = u; i <= b; i++) result.push(matrix[i][r]);
r--;
if (r < l) return result;
for (let i = r; i >= l; i--) result.push(matrix[b][i]);
b--;
if (b < u) return result;
for (let i = b; i >= u; i--) result.push(matrix[i][l]);
l++;
if (l > r) return result;
}
};<file_sep>/*
* @lc app=leetcode.cn id=79 lang=javascript
*
* [79] 单词搜索
*
* https://leetcode-cn.com/problems/word-search/description/
*
* algorithms
* Medium (40.89%)
* Likes: 338
* Dislikes: 0
* Total Accepted: 43.1K
* Total Submissions: 105.6K
* Testcase Example: '[["A","B","C","E"],["S","F","C","S"],["A","D","E","E"]]\n"ABCCED"'
*
* 给定一个二维网格和一个单词,找出该单词是否存在于网格中。
*
* 单词必须按照字母顺序,通过相邻的单元格内的字母构成,其中“相邻”单元格是那些水平相邻或垂直相邻的单元格。同一个单元格内的字母不允许被重复使用。
*
* 示例:
*
* board =
* [
* ['A','B','C','E'],
* ['S','F','C','S'],
* ['A','D','E','E']
* ]
*
* 给定 word = "ABCCED", 返回 true.
* 给定 word = "SEE", 返回 true.
* 给定 word = "ABCB", 返回 false.
*
*/
// @lc code=start
/**
* @param {character[][]} board
* @param {string} word
* @return {boolean}
*/
var exist = function(board, word) {
let l=word.length;
let r=board.length,c=board[0].length;
function dfs(i,j,k){
if(i<0||j<0||i>=r||j>=c) return false;
if(board[i][j]!==word[k]) return false;
if(k===l-1) return true;
let t=board[i][j];
board[i][j]='-'
let res=dfs(i-1,j,k+1)||dfs(i,j-1,k+1)||dfs(i+1,j,k+1)||dfs(i,j+1,k+1);
board[i][j]=t;
return res;
}
for(let i=0;i<r;i++){
for(let j=0;j<c;j++) if(dfs(i,j,0)) return true;
}
return false;
};
// @lc code=end
<file_sep>/**
* @param {string} s
* @return {string[]}
*/
var permutation = function (s) {
let len = s.length;
let result = new Set();
function helper(cur, st) {
if (cur.length === len) {
result.add(cur);
return;
}
for (let i = 0; i < len; i++) {
if (st.has(i)) continue;
st.add(i);
helper(cur + s[i], st);
st.delete(i);
}
}
helper('', new Set());
return [...result];
};<file_sep>class Solution
{
public:
int maxProduct(vector<int> &nums)
{
int result = nums[0];
int min0 = nums[0], max0 = nums[0];
for (int i = 1; i < nums.size(); i++)
{
int tMin = min0 * nums[i], tMax = max0 * nums[i];
min0 = min(tMax, min(tMin, nums[i]));
max0 = max(tMin, max(tMax, nums[i]));
result = max(max0, result);
}
return result;
}
};<file_sep>use num::integer::sqrt;
fn divisors(integer: u32) -> Result<Vec<u32>, String> {
let divisors:Vec<u32>=(2..integer/2+1).filter(|x| integer%x==0).collect();
match !divisors.is_empty(){
true => Ok(divisors),
_ => Err(format!("{} is prime",integer).to_string())
}
}<file_sep>impl Solution {
pub fn single_numbers(nums: Vec<i32>) -> Vec<i32> {
let mut r=0;
for x in &nums{
r^=x;
}
let mut d=1;
while (d&r)==0{
d<<=1;
}
let mut a=0;
let mut b=0;
for x in &nums{
if x&d==0{
a^=x;
}
else{
b^=x;
}
}
return vec![a,b];
}
}<file_sep>class Solution
{
public:
double findMedianSortedArrays(vector<int> &nums1, vector<int> &nums2)
{
if (nums1.size() > nums2.size())
{
return findMedianSortedArrays(nums2, nums1);
}
int m = nums1.size(), n = nums2.size();
int leftCount = (m + n + 1) / 2;
int l = 0, r = m;
while (l < r)
{
int i = (l + r) / 2;
int j = leftCount - i;
if (nums2[j - 1] > nums1[i])
{
l = i + 1;
}
else
{
r = i;
}
}
int i = l;
int j = leftCount - i;
int leftMax1 = i == 0 ? INT_MIN : nums1[i - 1];
int leftMax2 = j == 0 ? INT_MIN : nums2[j - 1];
int rightMin1 = i == m ? INT_MAX : nums1[i];
int rightMin2 = j == n ? INT_MAX : nums2[j];
return (m + n) % 2 ? max(leftMax1, leftMax2) : (max(leftMax2, leftMax1) + min(rightMin1, rightMin2)) / 2.0;
}
};<file_sep>/**
* Definition for singly-linked list.
* class ListNode {
* val: number
* next: ListNode | null
* constructor(val?: number, next?: ListNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.next = (next===undefined ? null : next)
* }
* }
*/
function mergeTwoLists(list1: ListNode | null, list2: ListNode | null): ListNode | null {
const virtualHead = new ListNode(-1);
let p = virtualHead;
let p1 = list1;
let p2 = list2;
while (p1 !== null && p2 !== null) {
if (p1.val > p2.val) {
p.next = p2;
p2 = p2.next;
} else {
p.next = p1;
p1 = p1.next;
}
p = p.next;
}
if (p1 !== null) p.next = p1;
if (p2 !== null) p.next = p2;
return virtualHead.next;
};
<file_sep>/**
* @param {string} astr
* @return {boolean}
*/
var isUnique = function (astr) {
return new Set(astr).size === astr.length;
};<file_sep># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
from collections import deque
class Codec:
SEP = ','
NULL = '#'
def serialize(self, root):
if not root:
return self.NULL
q = deque([])
result = []
q.append(root)
while len(q) != 0:
cur = q.popleft()
if cur:
result.append(str(cur.val))
result.append(self.SEP)
q.append(cur.left)
q.append(cur.right)
else:
result.append(self.NULL)
result.append(self.SEP)
result.pop()
return ''.join(result)
def deserialize(self, data):
nodes = deque(data.split(self.SEP))
if len(nodes) == 0:
return None
rootVal = nodes.popleft()
if rootVal == self.NULL:
return None
root = TreeNode(rootVal)
q = deque([])
q.append(root)
while len(q) != 0:
cur = q.popleft()
if len(nodes) != 0:
leftVal = nodes.popleft()
if leftVal != self.NULL:
cur.left = TreeNode(leftVal)
q.append(cur.left)
else:
cur.left = None
if len(nodes) != 0:
rightVal = nodes.popleft()
if rightVal != self.NULL:
cur.right = TreeNode(rightVal)
q.append(cur.right)
else:
cur.right = None
return root
# Your Codec object will be instantiated and called as such:
# ser = Codec()
# deser = Codec()
# ans = deser.deserialize(ser.serialize(root))
<file_sep>function frequencySort(s: string): string {
const freqs = new Map<string,number>();
for (let char of s) {
freqs.set(char, (freqs.get(char)??0) + 1);
}
return [...freqs.entries()].map(([char,count]) => char.repeat(count)).sort((a,b)=>b.length - a.length).join('')
};
<file_sep>impl Solution {
pub fn solve_n_queens(n: i32) -> Vec<Vec<String>> {
let mut result = vec![];
let size = n as usize;
fn backtrace(mut track: &mut Vec<Vec<char>>, row: usize, mut result: &mut Vec<Vec<String>>, size: usize) {
if row >= size {
result.push(track.into_iter().map(|chars| chars.iter().collect()).collect());
return;
}
for i in 0..size {
if !Solution::is_valid(&track, row, i, size) {
continue;
}
track[row][i] = 'Q';
backtrace(&mut track, row + 1, &mut result, size);
track[row][i] = '.';
}
}
let mut track = vec![vec!['.'; size]; size];
backtrace(&mut track, 0, &mut result, size);
return result;
}
fn is_valid(borad: &Vec<Vec<char>>, row: usize, col: usize, size: usize) -> bool {
for i in 0..row {
if borad[i][col] == 'Q' {
return false;
}
}
for j in 0..col {
if borad[row][j] == 'Q' {
return false;
}
}
let (mut i, mut j) = (row - 1, col - 1);
while i as isize >= 0 && j as isize >= 0 {
if borad[i][j] == 'Q' {
return false;
}
i -= 1;
j -= 1;
}
let (mut i, mut j) = (row - 1, col + 1);
while i as isize >= 0 && j < size {
if borad[i][j] == 'Q' {
return false;
}
i -= 1;
j += 1;
}
return true;
}
}
<file_sep>use std::collections::BinaryHeap;
impl Solution {
pub fn k_closest(points: Vec<Vec<i32>>, k: i32) -> Vec<Vec<i32>> {
let mut heap = BinaryHeap::new();
for v in points {
let distance = v[0].pow(2) + v[1].pow(2);
if heap.len() < k as usize {
heap.push((distance,v));
} else if heap.peek().unwrap().0 > distance {
heap.pop();
heap.push((distance,v));
}
}
heap.into_vec().into_iter().map(|kv| kv.1).collect()
}
}
<file_sep>/**
* 代码中的类名、方法名、参数名已经指定,请勿修改,直接返回方法规定的值即可
*
* @param target int整型
* @param array int整型二维数组
* @return bool布尔型
*/
export function Find(target: number, array: number[][]): boolean {
// write code here
const h = array.length;
const w = array?.[0].length || -1;
let i = w-1;
let j = 0;
while (i >= 0 && j < h) {
if (array[j][i] === target) {
return true;
}
if (array[j][i] > target) {
i--;
continue;
}
j++;
}
return false;
}
<file_sep>/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} l1
* @param {ListNode} l2
* @return {ListNode}
*/
var addTwoNumbers = function (l1, l2) {
let p1 = l1, p2 = l2;
let s1 = [], s2 = [];
while (p1) {
s1.push(p1.val);
p1 = p1.next;
}
while (p2) {
s2.push(p2.val);
p2 = p2.next;
}
let ans = null, carry = 0;
while (s1.length || s2.length || carry !== 0) {
let a = s1.length ? s1.pop() : 0;
let b = s2.length ? s2.pop() : 0;
let x = a + b + carry;
carry = x / 10 | 0;
x = x % 10;
let cur = new ListNode(x);
cur.next = ans;
ans = cur;
}
return ans;
};<file_sep>impl Solution {
pub fn valid_palindrome(s: String) -> bool {
let s=s.as_bytes();
let (mut l,mut r)=(0,s.len()-1);
let is_palindrome=|mut l,mut r|{
while l<r&&s[l]==s[r]{
l+=1;
r-=1;
}
return l>=r;
};
while l<r&&s[l]==s[r]{
l+=1;
r-=1;
}
return is_palindrome(l+1,r)||is_palindrome(l,r-1);
}
}<file_sep>/**
* @param {string} s
* @return {string}
*/
var longestPalindrome = function (s) {
if (s.length < 2) return s;
let origin = s;
s = addSplit(s, "#");
let begin = 0,
maxLen = 1;
let maxRight = 0,
center = 0;
let dp = Array(s.length).fill(0);
for (let i = 0; i < s.length; i++) {
if (i < maxRight) {
let mirror = 2 * center - i;
dp[i] = Math.min(dp[mirror], maxRight - i);
}
let l = i - dp[i] - 1,
r = i + dp[i] + 1;
while (l >= 0 && r < s.length && s[l] === s[r]) dp[i]++, l--, r++;
if (i + dp[i] > maxRight) {
center = i;
maxRight = i + dp[i];
}
if (dp[i] > maxLen) {
maxLen = dp[i];
begin = (i - maxLen) / 2;
}
}
return origin.substr(begin, maxLen);
};
const addSplit = (s, sp) => s.replace(/./g, "#$&") + "#";
<file_sep># CodeWars系列
## 8kyu
### String repeat
> Write a function called `repeatString` which repeats the given String `src` exactly `count` times.
>
> ```javas
> repeatStr(6, "I") // "IIIIII"
> repeatStr(5, "Hello") // "HelloHelloHelloHelloHello"
> ```
#### 思路
首先想到的是`Array`、`Array.prototype.fill`和`Array.prototype.join`方法。但是看别人用的最多的是`String.prototype.repeat(n)`方法。
#### 代码
- Array.prototype.fill
```javascript
function repeatStr (n, s) {
return Array(n).fill(s).join('');
}
```
- String.prototype.repeat
```javascript
function repeatStr (n, s) {
return s.repeat(n);
}
```
### Reversed Strings
> Complete the solution so that it reverses the string value passed into it.
>
> ```javascript
> solution('world'); // returns 'dlrow'
> ```
#### 思路
利用`Array.prototype.reverse`,需要先将字符串转换为数组。
#### 代码
```javascript
function solution(str){
return str.split('').reverse().join('');
}
```
### Keep Hydrated!
> Nathan loves cycling.
>
> Because Nathan knows it is important to stay hydrated, he drinks 0.5 litres of water per hour of cycling.
>
> You get given the time in hours and you need to return the number of litres Nathan will drink, rounded to the smallest value.
>
> For example:
>
> time = 3 ----> litres = 1
>
> time = 6.7---> litres = 3
>
> time = 11.8--> litres = 5
#### 思路
可以使用`parseInt`或者`Math.floor`静态方法进行向下取整,也可以利用逻辑运算隐式转换为整数。
#### 代码
- 逻辑隐式转换
```javascript
function litres(time) {
return time/2|0;
}
```
- parseInt
```javascript
function litres(time) {
return parseInt(time*0.5);
}
```
### Square(n) Sum
> Complete the square sum function so that it squares each number passed into it and then sums the results together.
>
> For example, for `[1, 2, 2]` it should return `9` because `1^2 + 2^2 + 2^2 = 9`.
#### 思路
map&reduce
#### 代码
```javascript
function squareSum(numbers){
return numbers.map(x=>x**2).reduce((s,x)=>s+x,0);
}
```
### Convert a String to a Number!
> Note: This kata is inspired by [Convert a Number to a String!](http://www.codewars.com/kata/convert-a-number-to-a-string/). Try that one too.
>
> ## Description
>
> We need a function that can transform a string into a number. What ways of achieving this do you know?
>
> Note: Don't worry, all inputs will be strings, and every string is a perfectly valid representation of an integral number.
>
> ## Examples
>
> ```javascript
> stringToNumber("1234") == 1234
> stringToNumber("605" ) == 605
> stringToNumber("1405") == 1405
> stringToNumber("-7" ) == -7
> ```
#### 思路
利用定义的转换函数,或者隐式的ToNumber转换。`parseInt`、`Number`、`+str`、`str|0`等操作。
#### 代码
```javascript
var stringToNumber = function(str){
// put your code here
return +str;
}
```
### Count the Monkeys!
> You take your son to the forest to see the monkeys. You know that there are a certain number there (n), but your son is too young to just appreciate the full number, he has to start counting them from 1.
>
> As a good parent, you will sit and count with him. Given the number (n), populate an array with all numbers up to and including that number, but excluding zero.
>
> For example:
>
> ```javascript
> monkeyCount(10) // --> [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
> monkeyCount(1) // --> [1]
> ```
#### 思路
利用`Array`,`Array.prototype.fill`,`Arrayp.prototype.map`方式生成数组。不过看别人最好的方法应该是使用`Array.from`静态方法因为其中包含了mapFn。
#### 代码
- Array、fill、map
```javascript
function monkeyCount(n) {
// your code here
return Array(n).fill(0).map((x,i)=>i+1);
}
```
- Array.from
```javascript
function monkeyCount(n) {
// your code here
return Array.from({length:n},(x,i)=>i+1);
}
```
### Reversed Words
> Complete the solution so that it reverses all of the words within the string passed in.
>
> Example:
>
> ```javascript
> reverseWords("The greatest victory is that which requires no battle")
> // should return "battle no requires which that is victory greatest The"
> ```
#### 思路
按照空格分隔并倒转。
- 时间复杂度On,空间On
#### 代码
```javascript
function reverseWords(str){
return str.split(' ').reverse().join(' '); // reverse those words
}
```
### If you can't sleep, just count sheep!!
> If you can't sleep, just count sheep!!
>
> ## Task:
>
> Given a non-negative integer, `3` for example, return a string with a murmur: `"1 sheep...2 sheep...3 sheep..."`. Input will always be valid, i.e. no negative integers.
#### 思路
使用`Array.from`静态方法创建数组,提供映射函数,其中使用`Template Literals`,然后使用`Array.prototype.join`方法拼接。
- 时间On,空间On
#### 代码
```javascript
var countSheep = function (num){
//your code here
return Array.from({length:num},(x,i)=>`${i+1} sheep...`).join('');
}
```
### Reversed sequence
> Get the number n `(n>0)` to return the reversed sequence from n to 1.
>
> Example : n=5 >> [5,4,3,2,1]
#### 思路
生成数组,提供映射函数。
- 时间On,空间On
```javascript
const reverseSeq = n => {
return Array.from({length:n},(x,i)=>n-i);
};
```
### To square(root) or not to square(root)
> \#To square(root) or not to square(root)
>
> Write a method, that will get an integer array as parameter and will process every number from this array.
> Return a new array with processing every number of the input-array like this:
>
> ```
> If the number has an integer square root, take this, otherwise square the number.
>
> [4,3,9,7,2,1] -> [2,9,3,49,4,1]
> ```
>
> The input array will always contain only positive numbers and will never be empty or null.
>
> The input array should not be modified!
>
> Have fun coding it and please don't forget to vote and rank this kata! :-)
>
> I have also created other katas. Take a look if you enjoyed this kata!
##### 思路
映射即可。
- 时间On,空间On
#### 代码
```javascript
function squareOrSquareRoot(array) {
return array.map(x=>{
let tmp=Math.sqrt(x)|0;
return tmp**2===x?tmp:x**2;
});
}
```
## 7kyu
### Get the Middle Character
> You are going to be given a word. Your job is to return the middle character of the word. If the word's length is odd, return the middle character. If the word's length is even, return the middle 2 characters.
>
> \#Examples:
>
> ```
> Kata.getMiddle("test") should return "es"
>
> Kata.getMiddle("testing") should return "t"
>
> Kata.getMiddle("middle") should return "dd"
>
> Kata.getMiddle("A") should return "A"
> ```
>
> \#Input
>
> A word (string) of length `0 < str < 1000` (In javascript you may get slightly more than 1000 in some test cases due to an error in the test cases). You do not need to test for this. This is only here to tell you that you do not need to worry about your solution timing out.
>
> \#Output
>
> The middle character(s) of the word represented as a string.
#### 思路
利用`String.prototype.substr`方法。
- 时间O1,空间O1
#### 代码
```javascript
function getMiddle(s)
{
//Code goes here!
return s.substr((s.length-1)/2|0,s.length%2?1:2);
}
```
### Isograms
> An isogram is a word that has no repeating letters, consecutive or non-consecutive. Implement a function that determines whether a string that contains only letters is an isogram. Assume the empty string is an isogram. Ignore letter case.
>
> ```javascript
> isIsogram("Dermatoglyphics") == true
> isIsogram("aba") == false
> isIsogram("moOse") == false // -- ignore letter case
> ```
#### 思路
使用集合,如果产生的集合大小比原字符串小,则存在重复的。
- 时间On,空间On
#### 代码
```javascript
function isIsogram(str){
return new Set(str.toLowerCase()).size===str.length;
}
```
### Singleton Pattern
> n software engineering, the singleton pattern is a design pattern that restricts the instantiation of a class to one object. This is useful when exactly one object is needed to coordinate actions across the system.
>
> Create an Singleton pattern, so there is one object in system.
>
> Example:
>
> ```javascript
> var obj1 = new Singleton();
> var obj2 = new Singleton();
> obj1 === obj2; // => true
> obj1.test = 1;
> obj2.test; // => 1
> ```
#### 思路
可以使用闭包来保存单例。
#### 代码
```javascript
var Singleton = (function(){
// implement singleton Class
let instance;
return function(){
return instance||(instance=this);
}
})();
```
### Sum of Odd Cubed Numbers
> Find the sum of the odd numbers within an array, after cubing the initial integers. The function should return `undefined`/`None`/`nil`/`NULL` if any of the values aren't numbers.
#### 思路
筛选计算即可。不过这题了解到了逗号表达式的妙用。`return`只能当做语句而不能当做表达式,那样就能在三元运算符中使用了。
#### 代码
- 解1
```javascript
function cubeOdd(arr) {
// insert code here >.<
let flag=false;
let result = arr.filter(n=>Number.isInteger(n)?(Math.abs(n)%2===1):(flag=true,n)).map(x=>x**3).reduce((a,b)=>a+b,0);
return flag?(void 0):result;
}
```
- 解2
```javascript
function cubeOdd(arr) {
return arr.every(Number.isInteger)?arr.filter(x=>Math.abs(x)%2===1).reduce((a,b)=>a+b**3,0):void 0;
}
```
### Currying functions: multiply all elements in an array
> To complete this Kata you need to make a function `multiplyAll`/`multiply_all` which takes an array of integers as an argument. This function must return another function, which takes a single integer as an argument and returns a new array.
>
> The returned array should consist of each of the elements from the first array multiplied by the integer.
>
> Example:
>
> ```javascript
> multiplyAll([1, 2, 3])(2) = [2, 4, 6];
> ```
>
> Here's a [nice Youtube video about currying](https://www.youtube.com/watch?v=iZLP4qOwY8I), which might help you if this is new to you.
#### 思路
使用`Array.prototype.map`函数进行映射。
- 时间On,空间O1
#### 代码
```javascript
/* left blank for unlimited creativity :) */
const multiplyAll=arr=>times=>arr.map(x=>x*times);
```
### Sum of a Beach
> Beaches are filled with sand, water, fish, and sun. Given a string, calculate how many times the words `"Sand"`, `"Water"`, `"Fish"`, and `"Sun"` appear without overlapping (regardless of the case).
>
> ## Examples
>
> ```javascript
> sumOfABeach("WAtErSlIde") ==> 1
> sumOfABeach("GolDeNSanDyWateRyBeaChSuNN") ==> 3
> sumOfABeach("gOfIshsunesunFiSh") ==> 4
> sumOfABeach("cItYTowNcARShoW") ==> 0
> ```
#### 思路
global & ignore case。正则匹配。
#### 代码
```javascript
function sumOfABeach(beach) {
// your code here
return (beach.match(/(water)|(sand)|(fish)|(sun)/ig)||"").length;
}
```
### Regexp Basics - is it a letter?
> Complete the code which should return `true` if the given object is a single ASCII letter (lower or upper case), `false` otherwise.
#### 思路
在原型链上添加方法,使用this指向当前调用者,正则匹配时调用ToPrimitive转化为字符串。
#### 代码
```javascript
String.prototype.isLetter = function() {
return /^[a-z]$/i.test(this)
}
```
### Regexp Basics - is it a vowel?
> Implement the function which should return `true` if given object is a vowel (meaning `a, e, i, o, u`), and `false` otherwise.
#### 思路
正则匹配。
#### 代码
```javascript
String.prototype.vowel = function() {
return /^[aeiou]$/i.test(this);
};
```
### Hide password from jdbc url
> We have to create a function that receives a connection string with password included and you have to mask the password i.e. change password by asterisks.
>
> Preconditions:
>
> - non empty valid url
> - password always next to string section `password=`
> - assume password will not contain ampersand sign for sake of simplicity
> - to make it more real it has non ASCII characters
> - "password=" and "user" will occur only once
>
> > empty passwords are not validated but best solutions take empty passwords into account
>
> Example:
>
> ------
>
> ## input
>
> > ```
> > jdbc:mysql://sdasdasdasd:szdasdasd:dfsdfsdfsdf/sdfsdfsdf?user=root&password=<PASSWORD>
> > ```
>
> ## output
>
> > ```
> > jdbc:mysql://sdasdasdasd:szdasdasd:dfsdfsdfsdf/sdfsdfsdf?user=root&password=*****
> > ```
>
> Extra readings:
>
> <https://alvinalexander.com/java/jdbc-connection-string-mysql-postgresql-sqlserver>
#### 思路
正则匹配querystring的password部分,以`password`开头截止到`&`或者字符串尾结束。
#### 代码
```javascript
function hidePasswordFromConnection(urlString){
return urlString.replace(/password=[^&]*/g,x=>`password=${'*'.repeat(x.length-9)}`);
}
```
### Coding Meetup #1 - Higher-Order Functions Series - Count the number of JavaScript developers coming from Europe
> You will be given an array of objects (hashes in ruby) representing data about developers who have signed up to attend the coding meetup that you are organising for the first time.
>
> Your task is to **return the number of JavaScript developers coming from Europe**.
>
> For example, given the following list:
>
> ```javascript
> var list1 = [
> { firstName: 'Noah', lastName: 'M.', country: 'Switzerland', continent: 'Europe', age: 19, language: 'JavaScript' },
> { firstName: 'Maia', lastName: 'S.', country: 'Tahiti', continent: 'Oceania', age: 28, language: 'JavaScript' },
> { firstName: 'Shufen', lastName: 'L.', country: 'Taiwan', continent: 'Asia', age: 35, language: 'HTML' },
> { firstName: 'Sumayah', lastName: 'M.', country: 'Tajikistan', continent: 'Asia', age: 30, language: 'CSS' }
> ];
> ```
>
> your function should return number `1`.
>
> If, there are no JavaScript developers from Europe then your function should return `0`.
>
> Notes:
>
> - The format of the strings will always be `Europe` and `JavaScript`.
> - All data will always be valid and uniform as in the example above.
#### 思路
reduce统计。
#### 代码
```javascript
function countDevelopers(list) {
// your awesome code here :)
return list.reduce((count,x)=>x.continent==='Europe'&&x.language==='JavaScript'?count+1:count,0)
}
```
## 6kyu
### Find the odd int
> Given an array, find the integer that appears an odd number of times.
>
> There will always be only one integer that appears an odd number of times.
#### 思路
利用异或的交换律,两个相同数异或为0,0与任何数异或等于任何数。
- 时间复杂度On,空间O1
#### 代码
```javascript
function findOdd(A) {
//happy coding!
return A.reduce((last,cur)=>last^cur);
}
```
### Convert string to camel case
> Complete the method/function so that it converts dash/underscore delimited words into camel casing. The first word within the output should be capitalized **only** if the original word was capitalized (known as Upper Camel Case, also often referred to as Pascal case).
>
> ## Examples
>
> ```javascript
> toCamelCase("the-stealth-warrior") // returns "theStealthWarrior"
>
> toCamelCase("The_Stealth_Warrior") // returns "TheStealthWarrior"
> ```
#### 思路
使用正则表达式和`String.prototype.replace`。正则表达式匹配下划线和后续字母。`String.prototype.replace`参数如下:
```javascript
var newStr = str.replace(regexp|substr, newSubstr|function)
```
- 第一个参数是正则表达式或者子字符串,子字符串只匹配第一次出现。
- 第二个参数是新的替换子字符串或者替换函数。如果使用正则表达式全局模式则每次匹配都会调用一次替换函数,返回值是要替换的新函数。替换函数第一个参数是匹配到的子串,剩下的参数是捕获组捕获的内容,最后两个参数是匹配字符串首字符下标`offset`和原字符串`string`

#### 代码
```javascript
function toCamelCase(str){
return str.replace(/(_|-)[a-z]/ig,x=>x[1].toUpperCase())
}
```
### Count the smiley faces!
> Description:
> Given an array (arr) as an argument complete the function `countSmileys` that should return the total number of smiling faces.
> Rules for a smiling face:
> -Each smiley face must contain a valid pair of eyes. Eyes can be marked as `:` or `;`
> -A smiley face can have a nose but it does not have to. Valid characters for a nose are `-` or `~`
> -Every smiling face must have a smiling mouth that should be marked with either `)` or `D`.
> No additional characters are allowed except for those mentioned.
> **Valid smiley face examples:**
> `:) :D ;-D :~)`
> **Invalid smiley faces:**
> `;( :> :} :]`
>
> **Example cases:**
>
> ```javascript
> countSmileys([':)', ';(', ';}', ':-D']); // should return 2;
> countSmileys([';D', ':-(', ':-)', ';~)']); // should return 3;
> countSmileys([';]', ':[', ';*', ':$', ';-D']); // should return 1;
> ```
>
> Note:
>
> In case of an empty array return 0. You will not be tested with invalid input (input will always be an array). Order of the face (eyes, nose, mouth) elements will always be the same
>
> ### Happy coding!
#### 思路
使用正则表达式。眼睛必须有,鼻子不必须有,嘴必须有。。

#### 代码
```javascript
//return the total number of smiling faces in the array
function countSmileys(arr) {
return arr.reduce((count,x)=>/[:;][-~]?[\)D]/g.test(x)+count,0)
}
```
### Dashatize it
> Given a number, return a string with dash`'-'`marks before and after each odd integer, but do not begin or end the string with a dash mark.
>
> Ex:
>
> ```javascript
> dashatize(274) -> '2-7-4'
> dashatize(6815) -> '68-1-5'
> ```
#### 思路
正则表达式,将数组字符串化,然后匹配奇数,将其替换。替换时可能会产生重复(例如相邻的两个奇数)。最后将首尾多余的dash删除。
#### 代码
- v1
```javascript
function dashatize(num) {
if(num<10&&num>-10) return Math.abs(num)+'';
return num.toString().replace(/[13579]/g,x=>`-${x}-`).replace(/^-|--|-$/g,x=>x.length===2?'-':'');
}
```
- v2,优化后
```javascript
function dashatize(num) {
return num.toString().replace(/[13579]/g,x=>`-${x}-`).replace(/--/g,'-').replace(/^-|-$/g,'');
}
```
### Valid Phone Number
> Write a function that accepts a string, and returns true if it is in the form of a phone number.
> Assume that any integer from 0-9 in any of the spots will produce a valid phone number.
>
> Only worry about the following format:
> (123) 456-7890 (don't forget the space after the close parentheses)
>
> Examples:
>
> ```j
> validPhoneNumber("(123) 456-7890") => returns true
> validPhoneNumber("(1111)555 2345") => returns false
> validPhoneNumber("(098) 123 4567") => returns false
> ```
#### 思路
正则表达式。
#### 代码
```javascript
function validPhoneNumber(phoneNumber){
return /^\(\d{3}\) \d{3}-\d{4}$/g.test(phoneNumber);
}
```
## 5kyu
### Simple Events
> Your goal is to write an **Event** constructor function, which can be used to make **event** objects.
>
> An **event** object should work like this:
>
> - it has a **.subscribe()** method, which takes a function and stores it as its handler
> - it has an **.unsubscribe()** method, which takes a function and removes it from its handlers
> - it has an **.emit()** method, which takes an arbitrary number of arguments and calls all the stored functions with these arguments
>
> As this is an elementary example of events, there are some simplifications:
>
> - all functions are called with correct arguments (*e.g.* only functions will be passed to unsubscribe)
> - you should not worry about the order of handlers' execution
> - the handlers will not attempt to modify an event object (*e.g.* add or remove handlers)
> - the context of handlers' execution is not important
> - each handler will be subscribed at most once at any given moment of time. It can still be unsubscribed and then subscribed again
>
> Also see an example test fixture for suggested usage
#### 思路
观察者模式。使用`Set`来保存和删除添加的Handler。emit时调用所有注册的Handler。
#### 代码
```javascript
function Event() {
this.handlers=new Set();
}
Event.prototype.subscribe=function(handler){
this.handlers.add(handler);
}
Event.prototype.unsubscribe=function(handler){
this.handlers.delete(handler);
}
Event.prototype.emit=function(...args){
for(let fn of this.handlers.values()){
fn(...args);
}
}
```
### A Chain adding function
> We want to create a function that will add numbers together when called in succession.
>
> ```javascript
> add(1)(2);
> // returns 3
> ```
>
> We also want to be able to continue to add numbers to our chain.
>
> ```javascript
> add(1)(2)(3); // 6
> add(1)(2)(3)(4); // 10
> add(1)(2)(3)(4)(5); // 15
> ```
>
> and so on.
>
> A single call should return the number passed in.
>
> ```javascript
> add(1); // 1
> ```
>
> We should be able to store the returned values and reuse them.
>
> ```javascript
> var addTwo = add(2);
> addTwo; // 2
> addTwo + 5; // 7
> addTwo(3); // 5
> addTwo(3)(5); // 10
> ```
>
> We can assume any number being passed in will be valid whole number.
#### 思路
通过观察上述`addTwo`函数既可以当数值也可以调用,因此可以通过`valueOf`方法返回所求数值。
#### 代码
```javascript
function add(n){
const helper=x=>add(n+x);
helper.valueOf=()=>n;
return helper;
}
```
## 4kyu
### Advanced Events
> This excercise is a more sophisticated version of [Simple Events](http://www.codewars.com/dojo/katas/52d3b68215be7c2d5300022f/) kata.
>
> Your task is to implement an **Event** constructor function for creating event objects
>
> ```javascript
> var event = new Event();
> ```
>
> which comply to the following:
>
> - an **event** object should have **.subscribe()** and **.unsubscribe()** methods to add and remove handlers
> - **.subscribe()** and **.unsubscribe()** should be able take an arbitrary number of arguments and tolerate invalid arguments (not functions, or for **unsubscribe**, functions which are not subscribed) by simply skipping them
> - multiple subscription of the same handler is allowed, and in this case unsubscription removes the last subscription of the same handler
> - an **event** object should have an **.emit()** method which must invoke all the handlers with the arguments provided
> - **.emit()** should use its own invocation context as handers' invocation context
> - the order of handlers invocation must match the order of subscription
>
> - handler functions can subscribe and unsubscribe handlers, but the changes should only apply to the next **emit** call - the handlers for an ongoing **emit** call should not be affected
> - **subscribe**, **unsubscribe** and **emit** are the only public properties that are allowed on **event** objects (apart from **Object.prototype** methods)
>
> Check the test fixture for usage example
#### 思路
这题值32分!指定用function形式来写,不过想尝试一下class,和别人代码相比自己好捞。下面是要点和难点,本题有时间会额外写一篇博客记录。
- 整体还是观察者模式
- 要求只公开`subscribe`,`unsubscribe`和`emit`方法,因此可以利用JS的闭包,使用模块模式,构造函数中返回对象。
- handler里可能会继续触发subscribe和unsubscribe,但对于本次emit不起作用,应用到下次emit。
#### 代码
```javascript
class Event{
constructor(){
let fns=[];
let copy=[];
let emitting=false;
return {
subscribe(...handlers){
for(let fn of handlers){
if(!fn instanceof Function) continue;
copy.push(fn);
if(emitting) return;
fns.push(fn);
}
},
unsubscribe(...handlers){
for(let fn of handlers){
if(!fn instanceof Function) continue;
let idx=copy.lastIndexOf(fn);
if(idx<0) continue;
copy.splice(idx,1);
if(emitting) return;
fns.splice(idx,1);
}
},
emit(...args){
emitting=true;
for(let fn of fns){
if(fn instanceof Function){
fn.call(this,...args);
}
}
fns=[...copy];
emitting=false;
}
}
}
}
```
<file_sep>function search(nums: number[], target: number): number {
let l = 0, r = nums.length - 1;
while(l < r) {
let mid = l + Math.floor((r - l) / 2);
const midVal = nums[mid];
if (midVal === target) return mid;
if (midVal > target) r = mid;
else l = mid + 1;
}
return nums[l] === target ? l : -1;
};
<file_sep>var kidsWithCandies = function (
candies: number[],
extraCandies: number
): boolean[] {
let maxCandies = Math.max(...candies);
return candies.map((candy) => candy + extraCandies >= maxCandies);
};
<file_sep>use std::collections::VecDeque;
impl Solution {
pub fn is_bipartite(graph: Vec<Vec<i32>>) -> bool {
let mut ok = true;
let n = graph.len();
let mut visited = vec![false; n];
let mut colors = vec![false; n];
for i in 0..n {
Solution::bfs(&graph, &mut ok, i, &mut visited, &mut colors);
}
return ok;
}
pub fn bfs(graph: &Vec<Vec<i32>>, ok: &mut bool, start: usize, mut visited: &mut Vec<bool>, mut colors: &mut Vec<bool>) {
if !*ok {
return;
}
let mut q = VecDeque::new();
q.push_back(start);
visited[start] = true;
while q.len() != 0 {
let cur = q.pop_front().unwrap();
for &next in &graph[cur] {
let next_idx = next as usize;
if visited[next_idx] {
if colors[next_idx] == colors[cur] {
*ok = false;
return;
}
} else {
colors[next_idx] = !colors[cur];
visited[next_idx] = true;
q.push_back(next_idx);
}
}
}
}
}
<file_sep>export function basicOp(operation: string, value1: number, value2: number): number {
return eval(value1 + operation + value2);
}<file_sep>function findErrorNums(nums: number[]): number[] {
let repeated: number,losted: number;
let countsMap = new Map<number,number>();
nums.forEach((v) => {
countsMap.set(v, (countsMap.get(v) ?? 0) + 1)
})
for(let i = 1; i <= nums.length; i++) {
if (countsMap.get(i) === 2){
repeated = i;
continue;
}
if (!countsMap.has(i)) {
losted = i;
}
}
return [repeated, losted]
};
<file_sep>export default class Database {
private static instance: Database | null = null;
private constructor() {}
public static getInstance() {
return this.instance || (this.instance = new Database());
}
}
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn construct_from_pre_post(preorder: Vec<i32>, postorder: Vec<i32>) -> Option<Rc<RefCell<TreeNode>>> {
match preorder.len() {
0 => None,
1 => Some(Rc::new(RefCell::new(TreeNode::new(preorder[0])))),
l => {
let val = preorder[0];
let mut node = TreeNode::new(val);
let val_of_left_child = preorder[1];
let postorder_index = postorder.iter().position(|&x| x == val_of_left_child).unwrap();
let left_count = postorder_index + 1;
let left = Solution::construct_from_pre_post((&preorder[1..left_count + 1]).to_vec(), (&postorder[..left_count]).to_vec());
let right = Solution::construct_from_pre_post((&preorder[left_count + 1..]).to_vec(), (&postorder[left_count..l - 1]).to_vec());
node.left = left;
node.right = right;
Some(Rc::new(RefCell::new(node)))
}
}
}
}
<file_sep>export function hexToDec(hexString: string): number {
//your code here
return parseInt(hexString, 16);
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn count_nodes(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
match root {
None => 0,
Some(node) => {
let lh = Self::count_height(Some(node.clone()), true);
let rh = Self::count_height(Some(node.clone()), false);
if (lh == rh) {
return 2_i32.pow(lh) - 1;
}
return 1 + Self::count_nodes(node.borrow().left.clone()) + Self::count_nodes(node.borrow().right.clone());
}
}
}
pub fn count_height(node: Option<Rc<RefCell<TreeNode>>>, is_left: bool) -> u32 {
match node {
None => 0,
Some(node) => {
1 + if is_left {Self::count_height(node.borrow().left.clone(), true)} else {Self::count_height(node.borrow().right.clone(), false)}
}
}
}
}
<file_sep>/**
* The knows API is defined in the parent class Relation.
* isBadVersion(version: number): boolean {
* ...
* };
*/
var solution = function(isBadVersion: any) {
return function(n: number): number {
let l = 1, r = n;
while (l <= r) {
let mid = l + Math.floor((r - l) / 2);
const midVal = isBadVersion(mid);
if (mid === 1 && midVal === true) return mid;
const midLastVal = isBadVersion(mid - 1);
if (midVal === true && midLastVal === false) return mid;
if (midVal === true) r = mid;
else l = mid + 1;
}
return -1;
};
};
<file_sep>function numTrees(n: number): number {
const memo = new Map<string, number>();
const count = (l: number, h: number): number => {
if (l > h) return 1;
if (memo.has(`${l},${h}`)) return memo.get(`${l},${h}`);
let result = 0;
for (let i = l; i <= h; i++) {
const left = count(l, i - 1);
const right = count(i + 1, h);
result += left * right;
}
memo.set(`${l},${h}`, result);
return result;
}
return count(1, n);
};
<file_sep>/**
* @param {number[]} nums
* @return {number[]}
*/
var productExceptSelf = function (nums) {
let left = Array(nums.length).fill(1);
let right = Array(nums.length).fill(1);
for (let i = 1; i < nums.length; i++) {
left[i] = left[i - 1] * nums[i - 1];
}
for (let j = nums.length - 2; j >= 0; j--) {
right[j] = right[j + 1] * nums[j + 1];
}
return left.map((x, i) => x * right[i]);
};<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function deleteNode(root: TreeNode | null, key: number): TreeNode | null {
if (root === null) return null;
if (root.val === key) {
if (root.left === null) return root.right;
if (root.right === null) return root.left;
let rightMin = root.right;
while(rightMin.left) {
rightMin = rightMin.left;
}
root.right = deleteNode(root.right, rightMin.val);
rightMin.left = root.left;
rightMin.right = root.right;
return rightMin;
}
if (root.val > key) {
root.left = deleteNode(root.left, key);
}
if (root.val < key) {
root.right = deleteNode(root.right, key);
}
return root;
};
<file_sep>function isValidSudoku(board: string[][]): boolean {
const rows = Array.from({length:9},()=>new Set<string>());
const cols = Array.from({length:9},()=>new Set<string>());
const subs = Array.from({length:9},()=>new Set<string>());
for (let i=0;i<9;i++) {
for (let j=0;j<9;j++) {
const cur = board[i][j];
if (cur === '.') continue;
const subIndex = Math.floor(i/3) * 3 + Math.floor(j/3);
if (rows[i].has(cur) || cols[j].has(cur) || subs[subIndex].has(cur)) return false;
rows[i].add(cur);
cols[j].add(cur);
subs[subIndex].add(cur);
}
}
return true;
};
<file_sep>function canConstruct(ransomNote: string, magazine: string): boolean {
const magazineTable = new Map<string,number>();
for (let c of magazine.split('')) {
magazineTable.set(c, (magazineTable.get(c) ?? 0) + 1);
};
for (let c of ransomNote.split('')) {
if (!magazineTable.has(c)) return false;
if (magazineTable.get(c) <= 0) return false;
magazineTable.set(c, magazineTable.get(c) - 1);
}
return true;
};
<file_sep>function findMin(numbers: number[]): number {
let len = numbers.length;
if (numbers[len - 1] > numbers[0]) return numbers[0];
let l = 0,
r = len - 1;
while (l < r) {
let mid = (l + (r - l) / 2) | 0;
if (numbers[mid] > numbers[r]) l = mid + 1;
else if (numbers[mid] < numbers[r]) r = mid;
else r--;
}
return numbers[l];
}
<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def preorderTraversal(self, root: Optional[TreeNode]) -> List[int]:
stk = []
visited = None
result = []
def pushLeft(node: Optional[TreeNode]):
p = node
while p:
result.append(p.val)
stk.append(p)
p = p.left
pushLeft(root)
while len(stk) != 0:
cur = stk[len(stk) - 1]
if (not cur.left or cur.left == visited) and cur.right != visited:
pushLeft(cur.right)
if not cur.right or cur.right == visited:
visited = stk.pop()
return result
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::HashMap;
impl Solution {
pub fn build_tree(preorder: Vec<i32>, inorder: Vec<i32>) -> Option<Rc<RefCell<TreeNode>>> {
if preorder.len()==0||inorder.len()!=preorder.len(){
return None;
}
let mut in_map=HashMap::new();
for i in 0..inorder.len(){
in_map.insert(inorder[i],i);
}
return Solution::build_tree_dfs(&preorder,0,preorder.len()-1,&in_map,0,inorder.len()-1);
}
fn build_tree_dfs(preorder:&Vec<i32>,pre_left:usize,pre_right:usize,in_map:&HashMap<i32,usize>,in_left:usize,in_right:usize) -> Option<Rc<RefCell<TreeNode>>>{
if pre_left>pre_right||in_left>in_right {
return None;
}
let val=preorder[pre_left];
let p_idx=in_map.get(&val).unwrap();
let mut node=TreeNode::new(val);
node.left=Solution::build_tree_dfs(&preorder,pre_left+1,p_idx-in_left+pre_left,&in_map,in_left,p_idx-1);
node.right=Solution::build_tree_dfs(&preorder,p_idx-in_left+pre_left+1,pre_right,&in_map,p_idx+1,in_right);
Some(Rc::new(RefCell::new(node)))
}
}<file_sep>function maxIceCream(costs: number[], coins: number): number {
const costsCopied = [...costs].sort((a,b) => a-b);
let count = 0, consumed = 0;
for(let cost of costsCopied) {
if (coins < consumed + cost) {
return count;
}
count ++;
consumed += cost;
}
return count;
};
<file_sep>//实体类
export interface Checkbox {}
export interface Button {}
export class WinCheckbox implements Checkbox {}
export class MacCheckbox implements Checkbox {}
export class WinButton implements Button {}
export class MacButton implements Button {}
export interface GUIFactory {
createCheckbox(): Checkbox;
createButton(): Button;
}
export class WinGUIFactory implements GUIFactory {
createCheckbox(): Checkbox {
return new WinCheckbox();
}
createButton(): Button {
return new WinButton();
}
}
export class MacGUIFactory implements GUIFactory {
createCheckbox(): Checkbox {
return new MacCheckbox();
}
createButton(): Button {
return new MacButton();
}
}
<file_sep>var findMedianSortedArrays = function (
nums1: number[],
nums2: number[]
): number {
if (nums1.length > nums2.length) {
let t = nums1;
nums1 = nums2;
nums2 = t;
}
let m = nums1.length,
n = nums2.length;
let leftCount = ((m + n + 1) / 2) | 0;
let left = 0,
right = m;
while (left < right) {
let i = ((right + left) / 2) | 0;
let j = leftCount - i;
if (nums2[j - 1] > nums1[i]) {
left = i + 1;
} else {
right = i;
}
}
let i = left;
let j = leftCount - i;
let leftMax1 = i === 0 ? Number.MIN_SAFE_INTEGER : nums1[i - 1];
let leftMax2 = j === 0 ? Number.MIN_SAFE_INTEGER : nums2[j - 1];
let rightMin1 = i === m ? Number.MAX_SAFE_INTEGER : nums1[i];
let rightMin2 = j === n ? Number.MAX_SAFE_INTEGER : nums2[j];
return (m + n) % 2 === 0
? (Math.max(leftMax1, leftMax2) + Math.min(rightMin1, rightMin2)) / 2
: Math.max(leftMax2, leftMax1);
};
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn generate_trees(n: i32) -> Vec<Option<Rc<RefCell<TreeNode>>>> {
fn build(l: i32, h: i32) -> Vec<Option<Rc<RefCell<TreeNode>>>> {
if l > h {
return vec![None];
}
let mut result = vec![];
for i in l..=h {
let mut lefts = build(l, i - 1);
let mut rights = build(i + 1, h);
for left in &lefts {
for right in &rights {
let mut node = TreeNode::new(i);
node.left = left.clone();
node.right = right.clone();
result.push(Some(Rc::new(RefCell::new(node))));
}
}
}
return result;
}
return build(1, n)
}
}
<file_sep>impl Solution {
pub fn all_paths_source_target(graph: Vec<Vec<i32>>) -> Vec<Vec<i32>> {
let mut result = vec![];
fn traverse(graph: &Vec<Vec<i32>>, start: i32, mut path: &mut Vec<i32>, mut result: &mut Vec<Vec<i32>>) {
path.push(start);
let n = graph.len() as i32;
if start == n - 1 {
result.push(path.clone());
path.pop();
return;
}
for &x in &graph[start as usize] {
traverse(&graph, x, &mut path, &mut result);
}
path.pop();
}
traverse(&graph, 0, &mut Vec::new(), &mut result);
return result;
}
}
<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var maxProduct = function (nums) {
let result = nums[0];
let min = nums[0],
max = nums[0];
for (let i = 1; i < nums.length; i++) {
let tMax = max * nums[i],
tMin = min * nums[i];
max = Math.max(tMax, tMin, nums[i]);
min = Math.min(tMin, tMax, nums[i]);
result = Math.max(max, result);
}
return result;
};
<file_sep>#[derive(Debug)]
struct Boolean;
impl PartialEq<bool> for Boolean {
fn eq(&self, _: &bool) -> bool {
true
}
}
const omnibool: Boolean = Boolean; //perform you magic
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function insertIntoBST(root: TreeNode | null, val: number): TreeNode | null {
const node=new TreeNode(val);
if(!root) return node;
let cur=root;
while(cur){
if(node.val<cur.val){//向左放
if(!cur.left){
cur.left=node;
return root;
}
cur=cur.left;
continue;
}
// 向右放
if(!cur.right){
cur.right=node;
return root;
}
cur=cur.right;
}
return root;
};
<file_sep>/**
* Definition for singly-linked list.
* type ListNode struct {
* Val int
* Next *ListNode
* }
*/
func reverseKGroup(head *ListNode, k int) *ListNode {
hair:=&ListNode{Next:head}
pre:=hair
for head!=nil{
tail:=pre
for i:=0;i<k;i++{
tail=tail.Next;
if tail==nil{
return hair.Next
}
}
nxt:=tail.Next
head,tail=reverseLink(head,tail)
pre.Next=head
tail.Next=nxt
pre=tail
head=nxt
}
return hair.Next
}
func reverseLink(head *ListNode,tail *ListNode) (*ListNode,*ListNode){
pre:=tail.Next
p:=head
for pre!=tail{
nxt:=p.Next
p.Next=pre
pre=p
p=nxt
}
return tail,head
}<file_sep>class Solution
{
public:
void merge(vector<int> &A, int m, vector<int> &B, int n)
{
int a = m - 1, b = n - 1, r = m + n - 1;
while (a >= 0 && b >= 0)
{
if (A[a] > B[b])
A[r--] = A[a--];
else
A[r--] = B[b--];
}
while (b >= 0)
A[r--] = B[b--];
}
};<file_sep>/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution
{
public:
TreeNode *buildTree(vector<int> &preorder, vector<int> &inorder)
{
unordered_map<int, int> inMap;
for (int i = 0; i < inorder.size(); i++)
inMap[inorder[i]] = i;
return buildTree(preorder, 0, preorder.size() - 1, inMap, 0, inorder.size() - 1);
}
TreeNode *buildTree(vector<int> &preorder, int preL, int preR, unordered_map<int, int> &inMap, int inL, int inR)
{
if (preL > preR || inL > inR)
return NULL;
int val = preorder[preL];
int pIdx = inMap[val];
TreeNode *node = new TreeNode(val);
node->left = buildTree(preorder, preL + 1, pIdx - inL + preL, inMap, inL, pIdx - 1);
node->right = buildTree(preorder, pIdx - inL + preL + 1, preR, inMap, pIdx + 1, inR);
return node;
}
};<file_sep># LeetCode 题解
- [LeetCode 题解](#leetcode-%e9%a2%98%e8%a7%a3)
- [算法题](#%e7%ae%97%e6%b3%95%e9%a2%98)
- [简单](#%e7%ae%80%e5%8d%95)
- [1. 两数之和](#1-%e4%b8%a4%e6%95%b0%e4%b9%8b%e5%92%8c)
- [13. 罗马数字转整数](#13-%e7%bd%97%e9%a9%ac%e6%95%b0%e5%ad%97%e8%bd%ac%e6%95%b4%e6%95%b0)
- [14. 最长公共前缀](#14-%e6%9c%80%e9%95%bf%e5%85%ac%e5%85%b1%e5%89%8d%e7%bc%80)
- [15. 三数之和](#15-%e4%b8%89%e6%95%b0%e4%b9%8b%e5%92%8c)
- [20. 有效的括号](#20-%e6%9c%89%e6%95%88%e7%9a%84%e6%8b%ac%e5%8f%b7)
- [21. 合并两个有序链表](#21-%e5%90%88%e5%b9%b6%e4%b8%a4%e4%b8%aa%e6%9c%89%e5%ba%8f%e9%93%be%e8%a1%a8)
- [26. 删除排序数组中的重复项](#26-%e5%88%a0%e9%99%a4%e6%8e%92%e5%ba%8f%e6%95%b0%e7%bb%84%e4%b8%ad%e7%9a%84%e9%87%8d%e5%a4%8d%e9%a1%b9)
- [27. 移除元素](#27-%e7%a7%bb%e9%99%a4%e5%85%83%e7%b4%a0)
- [28. 实现 strStr()](#28-%e5%ae%9e%e7%8e%b0-strstr)
- [66. 加一](#66-%e5%8a%a0%e4%b8%80)
- [67. 二进制求和](#67-%e4%ba%8c%e8%bf%9b%e5%88%b6%e6%b1%82%e5%92%8c)
- [69. x 的平方根](#69-x-%e7%9a%84%e5%b9%b3%e6%96%b9%e6%a0%b9)
- [70. 爬楼梯](#70-%e7%88%ac%e6%a5%bc%e6%a2%af)
- [88. 合并两个有序数组](#88-%e5%90%88%e5%b9%b6%e4%b8%a4%e4%b8%aa%e6%9c%89%e5%ba%8f%e6%95%b0%e7%bb%84)
- [101. 对称二叉树](#101-%e5%af%b9%e7%a7%b0%e4%ba%8c%e5%8f%89%e6%a0%91)
- [104. 二叉树的最大深度](#104-%e4%ba%8c%e5%8f%89%e6%a0%91%e7%9a%84%e6%9c%80%e5%a4%a7%e6%b7%b1%e5%ba%a6)
- [107. 二叉树的层次遍历 II](#107-%e4%ba%8c%e5%8f%89%e6%a0%91%e7%9a%84%e5%b1%82%e6%ac%a1%e9%81%8d%e5%8e%86-ii)
- [108. 将有序数组转换为二叉搜索树](#108-%e5%b0%86%e6%9c%89%e5%ba%8f%e6%95%b0%e7%bb%84%e8%bd%ac%e6%8d%a2%e4%b8%ba%e4%ba%8c%e5%8f%89%e6%90%9c%e7%b4%a2%e6%a0%91)
- [112. 路径总和](#112-%e8%b7%af%e5%be%84%e6%80%bb%e5%92%8c)
- [118. 杨辉三角](#118-%e6%9d%a8%e8%be%89%e4%b8%89%e8%a7%92)
- [119. 杨辉三角 II](#119-%e6%9d%a8%e8%be%89%e4%b8%89%e8%a7%92-ii)
- [121. 买卖股票的最佳时机](#121-%e4%b9%b0%e5%8d%96%e8%82%a1%e7%a5%a8%e7%9a%84%e6%9c%80%e4%bd%b3%e6%97%b6%e6%9c%ba)
- [125. 验证回文串](#125-%e9%aa%8c%e8%af%81%e5%9b%9e%e6%96%87%e4%b8%b2)
- [189. 旋转数组](#189-%e6%97%8b%e8%bd%ac%e6%95%b0%e7%bb%84)
- [169. 多数元素](#169-%e5%a4%9a%e6%95%b0%e5%85%83%e7%b4%a0)
- [190. 颠倒二进制位](#190-%e9%a2%a0%e5%80%92%e4%ba%8c%e8%bf%9b%e5%88%b6%e4%bd%8d)
- [198. 打家劫舍](#198-%e6%89%93%e5%ae%b6%e5%8a%ab%e8%88%8d)
- [202. 快乐数](#202-%e5%bf%ab%e4%b9%90%e6%95%b0)
- [204. 计数质数](#204-%e8%ae%a1%e6%95%b0%e8%b4%a8%e6%95%b0)
- [205. 同构字符串](#205-%e5%90%8c%e6%9e%84%e5%ad%97%e7%ac%a6%e4%b8%b2)
- [226. 翻转二叉树](#226-%e7%bf%bb%e8%bd%ac%e4%ba%8c%e5%8f%89%e6%a0%91)
- [235. 二叉搜索树的最近公共祖先](#235-%e4%ba%8c%e5%8f%89%e6%90%9c%e7%b4%a2%e6%a0%91%e7%9a%84%e6%9c%80%e8%bf%91%e5%85%ac%e5%85%b1%e7%a5%96%e5%85%88)
- [242. 有效的字母异位词](#242-%e6%9c%89%e6%95%88%e7%9a%84%e5%ad%97%e6%af%8d%e5%bc%82%e4%bd%8d%e8%af%8d)
- [258. 各位相加](#258-%e5%90%84%e4%bd%8d%e7%9b%b8%e5%8a%a0)
- [268. 缺失数字](#268-%e7%bc%ba%e5%a4%b1%e6%95%b0%e5%ad%97)
- [283. 移动零](#283-%e7%a7%bb%e5%8a%a8%e9%9b%b6)
- [326. 3的幂](#326-3%e7%9a%84%e5%b9%82)
- [349. 两个数组的交集](#349-%e4%b8%a4%e4%b8%aa%e6%95%b0%e7%bb%84%e7%9a%84%e4%ba%a4%e9%9b%86)
- [389. 找不同](#389-%e6%89%be%e4%b8%8d%e5%90%8c)
- [412. Fizz Buzz](#412-fizz-buzz)
- [461. 汉明距离](#461-%e6%b1%89%e6%98%8e%e8%b7%9d%e7%a6%bb)
- [500. 键盘行](#500-%e9%94%ae%e7%9b%98%e8%a1%8c)
- [504. 七进制数](#504-%e4%b8%83%e8%bf%9b%e5%88%b6%e6%95%b0)
- [509. 斐波那契数](#509-%e6%96%90%e6%b3%a2%e9%82%a3%e5%a5%91%e6%95%b0)
- [520. 检测大写字母](#520-%e6%a3%80%e6%b5%8b%e5%a4%a7%e5%86%99%e5%ad%97%e6%af%8d)
- [543. 二叉树的直径](#543-%e4%ba%8c%e5%8f%89%e6%a0%91%e7%9a%84%e7%9b%b4%e5%be%84)
- [557. 反转字符串中的单词 III](#557-%e5%8f%8d%e8%bd%ac%e5%ad%97%e7%ac%a6%e4%b8%b2%e4%b8%ad%e7%9a%84%e5%8d%95%e8%af%8d-iii)
- [559. N叉树的最大深度](#559-n%e5%8f%89%e6%a0%91%e7%9a%84%e6%9c%80%e5%a4%a7%e6%b7%b1%e5%ba%a6)
- [572. 另一个树的子树](#572-%e5%8f%a6%e4%b8%80%e4%b8%aa%e6%a0%91%e7%9a%84%e5%ad%90%e6%a0%91)
- [589. N叉树的前序遍历](#589-n%e5%8f%89%e6%a0%91%e7%9a%84%e5%89%8d%e5%ba%8f%e9%81%8d%e5%8e%86)
- [590. N叉树的后序遍历](#590-n%e5%8f%89%e6%a0%91%e7%9a%84%e5%90%8e%e5%ba%8f%e9%81%8d%e5%8e%86)
- [637. 二叉树的层平均值](#637-%e4%ba%8c%e5%8f%89%e6%a0%91%e7%9a%84%e5%b1%82%e5%b9%b3%e5%9d%87%e5%80%bc)
- [657. 机器人能否返回原点](#657-%e6%9c%ba%e5%99%a8%e4%ba%ba%e8%83%bd%e5%90%a6%e8%bf%94%e5%9b%9e%e5%8e%9f%e7%82%b9)
- [674. 最长连续递增序列](#674-%e6%9c%80%e9%95%bf%e8%bf%9e%e7%bb%ad%e9%80%92%e5%a2%9e%e5%ba%8f%e5%88%97)
- [703. 数据流中的第K大元素](#703-%e6%95%b0%e6%8d%ae%e6%b5%81%e4%b8%ad%e7%9a%84%e7%ac%ack%e5%a4%a7%e5%85%83%e7%b4%a0)
- [709. 转换成小写字母](#709-%e8%bd%ac%e6%8d%a2%e6%88%90%e5%b0%8f%e5%86%99%e5%ad%97%e6%af%8d)
- [724. 寻找数组的中心索引](#724-%e5%af%bb%e6%89%be%e6%95%b0%e7%bb%84%e7%9a%84%e4%b8%ad%e5%bf%83%e7%b4%a2%e5%bc%95)
- [747. 至少是其他数字两倍的最大数](#747-%e8%87%b3%e5%b0%91%e6%98%af%e5%85%b6%e4%bb%96%e6%95%b0%e5%ad%97%e4%b8%a4%e5%80%8d%e7%9a%84%e6%9c%80%e5%a4%a7%e6%95%b0)
- [832. 翻转图像](#832-%e7%bf%bb%e8%bd%ac%e5%9b%be%e5%83%8f)
- [897. 递增顺序查找树](#897-%e9%80%92%e5%a2%9e%e9%a1%ba%e5%ba%8f%e6%9f%a5%e6%89%be%e6%a0%91)
- [917. 仅仅反转字母](#917-%e4%bb%85%e4%bb%85%e5%8f%8d%e8%bd%ac%e5%ad%97%e6%af%8d)
- [922. 按奇偶排序数组 II](#922-%e6%8c%89%e5%a5%87%e5%81%b6%e6%8e%92%e5%ba%8f%e6%95%b0%e7%bb%84-ii)
- [938. 二叉搜索树的范围和](#938-%e4%ba%8c%e5%8f%89%e6%90%9c%e7%b4%a2%e6%a0%91%e7%9a%84%e8%8c%83%e5%9b%b4%e5%92%8c)
- [961. 重复 N 次的元素](#961-%e9%87%8d%e5%a4%8d-n-%e6%ac%a1%e7%9a%84%e5%85%83%e7%b4%a0)
- [965. 单值二叉树](#965-%e5%8d%95%e5%80%bc%e4%ba%8c%e5%8f%89%e6%a0%91)
- [976. 三角形的最大周长](#976-%e4%b8%89%e8%a7%92%e5%bd%a2%e7%9a%84%e6%9c%80%e5%a4%a7%e5%91%a8%e9%95%bf)
- [977. 有序数组的平方](#977-%e6%9c%89%e5%ba%8f%e6%95%b0%e7%bb%84%e7%9a%84%e5%b9%b3%e6%96%b9)
- [994. 腐烂的橘子](#994-%e8%85%90%e7%83%82%e7%9a%84%e6%a9%98%e5%ad%90)
- [1009. 十进制整数的反码](#1009-%e5%8d%81%e8%bf%9b%e5%88%b6%e6%95%b4%e6%95%b0%e7%9a%84%e5%8f%8d%e7%a0%81)
- [1013. 将数组分成和相等的三个部分](#1013-%e5%b0%86%e6%95%b0%e7%bb%84%e5%88%86%e6%88%90%e5%92%8c%e7%9b%b8%e7%ad%89%e7%9a%84%e4%b8%89%e4%b8%aa%e9%83%a8%e5%88%86)
- [1071. 字符串的最大公因子](#1071-%e5%ad%97%e7%ac%a6%e4%b8%b2%e7%9a%84%e6%9c%80%e5%a4%a7%e5%85%ac%e5%9b%a0%e5%ad%90)
- [1103. 分糖果 II](#1103-%e5%88%86%e7%b3%96%e6%9e%9c-ii)
- [1108. IP 地址无效化](#1108-ip-%e5%9c%b0%e5%9d%80%e6%97%a0%e6%95%88%e5%8c%96)
- [1137. 第 N 个泰波那契数](#1137-%e7%ac%ac-n-%e4%b8%aa%e6%b3%b0%e6%b3%a2%e9%82%a3%e5%a5%91%e6%95%b0)
- [1189. “气球” 的最大数量](#1189-%e6%b0%94%e7%90%83-%e7%9a%84%e6%9c%80%e5%a4%a7%e6%95%b0%e9%87%8f)
- [1200. 最小绝对差](#1200-%e6%9c%80%e5%b0%8f%e7%bb%9d%e5%af%b9%e5%b7%ae)
- [1207. 独一无二的出现次数](#1207-%e7%8b%ac%e4%b8%80%e6%97%a0%e4%ba%8c%e7%9a%84%e5%87%ba%e7%8e%b0%e6%ac%a1%e6%95%b0)
- [1232. 缀点成线](#1232-%e7%bc%80%e7%82%b9%e6%88%90%e7%ba%bf)
- [1249. 移除无效的括号](#1249-%e7%a7%bb%e9%99%a4%e6%97%a0%e6%95%88%e7%9a%84%e6%8b%ac%e5%8f%b7)
- [1281. 整数的各位积和之差](#1281-%e6%95%b4%e6%95%b0%e7%9a%84%e5%90%84%e4%bd%8d%e7%a7%af%e5%92%8c%e4%b9%8b%e5%b7%ae)
- [1287. 有序数组中出现次数超过25%的元素](#1287-%e6%9c%89%e5%ba%8f%e6%95%b0%e7%bb%84%e4%b8%ad%e5%87%ba%e7%8e%b0%e6%ac%a1%e6%95%b0%e8%b6%85%e8%bf%8725%e7%9a%84%e5%85%83%e7%b4%a0)
- [1290. 二进制链表转整数](#1290-%e4%ba%8c%e8%bf%9b%e5%88%b6%e9%93%be%e8%a1%a8%e8%bd%ac%e6%95%b4%e6%95%b0)
- [1299. 将每个元素替换为右侧最大元素](#1299-%e5%b0%86%e6%af%8f%e4%b8%aa%e5%85%83%e7%b4%a0%e6%9b%bf%e6%8d%a2%e4%b8%ba%e5%8f%b3%e4%be%a7%e6%9c%80%e5%a4%a7%e5%85%83%e7%b4%a0)
- [1304. 和为零的N个唯一整数](#1304-%e5%92%8c%e4%b8%ba%e9%9b%b6%e7%9a%84n%e4%b8%aa%e5%94%af%e4%b8%80%e6%95%b4%e6%95%b0)
- [1309. 解码字母到整数映射](#1309-%e8%a7%a3%e7%a0%81%e5%ad%97%e6%af%8d%e5%88%b0%e6%95%b4%e6%95%b0%e6%98%a0%e5%b0%84)
- [5143. 解压缩编码列表](#5143-%e8%a7%a3%e5%8e%8b%e7%bc%a9%e7%bc%96%e7%a0%81%e5%88%97%e8%a1%a8)
- [5291. 统计位数为偶数的数字](#5291-%e7%bb%9f%e8%ae%a1%e4%bd%8d%e6%95%b0%e4%b8%ba%e5%81%b6%e6%95%b0%e7%9a%84%e6%95%b0%e5%ad%97)
- [中等](#%e4%b8%ad%e7%ad%89)
- [2. 两数相加](#2-%e4%b8%a4%e6%95%b0%e7%9b%b8%e5%8a%a0)
- [11. 盛最多水的容器](#11-%e7%9b%9b%e6%9c%80%e5%a4%9a%e6%b0%b4%e7%9a%84%e5%ae%b9%e5%99%a8)
- [24. 两两交换链表中的节点](#24-%e4%b8%a4%e4%b8%a4%e4%ba%a4%e6%8d%a2%e9%93%be%e8%a1%a8%e4%b8%ad%e7%9a%84%e8%8a%82%e7%82%b9)
- [46. 全排列](#46-%e5%85%a8%e6%8e%92%e5%88%97)
- [48. 旋转图像](#48-%e6%97%8b%e8%bd%ac%e5%9b%be%e5%83%8f)
- [49. 字母异位词分组](#49-%e5%ad%97%e6%af%8d%e5%bc%82%e4%bd%8d%e8%af%8d%e5%88%86%e7%bb%84)
- [54. 螺旋矩阵](#54-%e8%9e%ba%e6%97%8b%e7%9f%a9%e9%98%b5)
- [62. 不同路径](#62-%e4%b8%8d%e5%90%8c%e8%b7%af%e5%be%84)
- [63. 不同路径 II](#63-%e4%b8%8d%e5%90%8c%e8%b7%af%e5%be%84-ii)
- [78. 子集](#78-%e5%ad%90%e9%9b%86)
- [94. 二叉树的中序遍历](#94-%e4%ba%8c%e5%8f%89%e6%a0%91%e7%9a%84%e4%b8%ad%e5%ba%8f%e9%81%8d%e5%8e%86)
- [98. 验证二叉搜索树](#98-%e9%aa%8c%e8%af%81%e4%ba%8c%e5%8f%89%e6%90%9c%e7%b4%a2%e6%a0%91)
- [102. 二叉树的层次遍历](#102-%e4%ba%8c%e5%8f%89%e6%a0%91%e7%9a%84%e5%b1%82%e6%ac%a1%e9%81%8d%e5%8e%86)
- [114. 二叉树展开为链表](#114-%e4%ba%8c%e5%8f%89%e6%a0%91%e5%b1%95%e5%bc%80%e4%b8%ba%e9%93%be%e8%a1%a8)
- [133. 克隆图](#133-%e5%85%8b%e9%9a%86%e5%9b%be)
- [148. 排序链表](#148-%e6%8e%92%e5%ba%8f%e9%93%be%e8%a1%a8)
- [151. 翻转字符串里的单词](#151-%e7%bf%bb%e8%bd%ac%e5%ad%97%e7%ac%a6%e4%b8%b2%e9%87%8c%e7%9a%84%e5%8d%95%e8%af%8d)
- [230. 二叉搜索树中第K小的元素](#230-%e4%ba%8c%e5%8f%89%e6%90%9c%e7%b4%a2%e6%a0%91%e4%b8%ad%e7%ac%ack%e5%b0%8f%e7%9a%84%e5%85%83%e7%b4%a0)
- [240. 搜索二维矩阵 II](#240-%e6%90%9c%e7%b4%a2%e4%ba%8c%e7%bb%b4%e7%9f%a9%e9%98%b5-ii)
- [300. 最长上升子序列](#300-%e6%9c%80%e9%95%bf%e4%b8%8a%e5%8d%87%e5%ad%90%e5%ba%8f%e5%88%97)
- [322. 零钱兑换](#322-%e9%9b%b6%e9%92%b1%e5%85%91%e6%8d%a2)
- [429. N叉树的层序遍历](#429-n%e5%8f%89%e6%a0%91%e7%9a%84%e5%b1%82%e5%ba%8f%e9%81%8d%e5%8e%86)
- [450. 删除二叉搜索树中的节点](#450-%e5%88%a0%e9%99%a4%e4%ba%8c%e5%8f%89%e6%90%9c%e7%b4%a2%e6%a0%91%e4%b8%ad%e7%9a%84%e8%8a%82%e7%82%b9)
- [494. 目标和](#494-%e7%9b%ae%e6%a0%87%e5%92%8c)
- [498. 对角线遍历](#498-%e5%af%b9%e8%a7%92%e7%ba%bf%e9%81%8d%e5%8e%86)
- [513. 找树左下角的值](#513-%e6%89%be%e6%a0%91%e5%b7%a6%e4%b8%8b%e8%a7%92%e7%9a%84%e5%80%bc)
- [713. 乘积小于K的子数组](#713-%e4%b9%98%e7%a7%af%e5%b0%8f%e4%ba%8ek%e7%9a%84%e5%ad%90%e6%95%b0%e7%bb%84)
- [814. 二叉树剪枝](#814-%e4%ba%8c%e5%8f%89%e6%a0%91%e5%89%aa%e6%9e%9d)
- [912. 排序数组](#912-%e6%8e%92%e5%ba%8f%e6%95%b0%e7%bb%84)
- [5296. 两棵二叉搜索树中的所有元素](#5296-%e4%b8%a4%e6%a3%b5%e4%ba%8c%e5%8f%89%e6%90%9c%e7%b4%a2%e6%a0%91%e4%b8%ad%e7%9a%84%e6%89%80%e6%9c%89%e5%85%83%e7%b4%a0)
- [困难](#%e5%9b%b0%e9%9a%be)
- [10. 正则表达式匹配](#10-%e6%ad%a3%e5%88%99%e8%a1%a8%e8%be%be%e5%bc%8f%e5%8c%b9%e9%85%8d)
- [32. 最长有效括号](#32-%e6%9c%80%e9%95%bf%e6%9c%89%e6%95%88%e6%8b%ac%e5%8f%b7)
- [41. 缺失的第一个正数](#41-%e7%bc%ba%e5%a4%b1%e7%9a%84%e7%ac%ac%e4%b8%80%e4%b8%aa%e6%ad%a3%e6%95%b0)
- [1250. 检查「好数组」](#1250-%e6%a3%80%e6%9f%a5%e5%a5%bd%e6%95%b0%e7%bb%84)
## 算法题
### 简单
#### 1. 两数之和
> 给定一个整数数组 nums 和一个目标值 target,请你在该数组中找出和为目标值的那 两个 整数,并返回他们的数组下标。
>
> 你可以假设每种输入只会对应一个答案。但是,你不能重复利用这个数组中同样的元素。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/two-sum
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
一遍哈希,键值对为(数组值,索引)。遍历数组,如果target-nums[i]存在于哈希中,则返回这个值的索引和i;不存在哈希中则插入。
- 时间复杂度On,空间On
##### 代码
```c++
class Solution {
public:
vector<int> twoSum(vector<int>& nums, int target) {
unordered_map<int,int> map;
int size=nums.size();
vector<int> result;
for(int i=0;i<size;i++){
int c=target-nums[i];
if(map.count(c)>0){
result.push_back(map[c]);
result.push_back(i);
return result;
}
map[nums[i]]=i;
}
return result;
}
};
```
#### 13. 罗马数字转整数
> 通常情况下,罗马数字中小的数字在大的数字的右边。但也存在特例,例如 4 不写做 IIII,而是 IV。数字 1 在数字 5 的左边,所表示的数等于大数 5 减小数 1 得到的数值 4 。同样地,数字 9 表示为 IX。这个特殊的规则只适用于以下**六种情况:**
>
> - I 可以放在 V (5) 和 X (10) 的左边,来表示 4 和 9。
>
> - X 可以放在 L (50) 和 C (100) 的左边,来表示 40 和 90。
> - C 可以放在 D (500) 和 M (1000) 的左边,来表示 400 和 900。
>
> 给定一个罗马数字,将其转换成整数。输入确保在 1 到 3999 的范围内。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/roman-to-integer
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
将负数部分先减去,只剩下正数部分相加即可。
- 负数部分是比后续元素值小的部分,需要减去
- 利用栈,依次压栈,将比当前元素值小的部分弹栈并减去并将当前元素压栈。最后将栈内剩余元素加起来减去负值部分就是答案。
- 复杂度,时间On,空间On
##### 代码
```c++
class Solution {
public:
int romanToInt(string s) {
map<char,int> m;
m['I']=1;
m['V']=5;
m['X']=10;
m['L']=50;
m['C']=100;
m['D']=500;
m['M']=1000;
int r=0;
stack<int> st;
for(int i=0;i<s.size();i++){
if(i==0){
st.push(m[s[i]]);
continue;
}
while(!st.empty()&&st.top()<m[s[i]]){
r-=st.top();
st.pop();
}
st.push(m[s[i]]);
}
while(!st.empty()){
r+=st.top();
st.pop();
}
return r;
}
};
```
#### 14. 最长公共前缀
> 编写一个函数来查找字符串数组中的最长公共前缀。
>
> 如果不存在公共前缀,返回空字符串 `""`。
>
> **示例 1:**
>
> ```bash
> 输入: ["flower","flow","flight"]
> 输出: "fl"
> ```
##### 思路
简单暴力,效率并不低。取第一个字符串当做最大前缀,然后遍历剩余的字符串,比较,取相同的公共前缀当做最大前缀,如果出现了并无相同前缀则直接返回空串,否则继续遍历,剩余的即最大公共前缀。
- 时间O(n*k),空间O(k),k是第一个字符串的长度。
##### 代码
```javascript
/**
* @param {string[]} strs
* @return {string}
*/
var longestCommonPrefix = function(strs) {
if(strs.length===0) return "";
let max=strs[0];
for(let i=1;i<strs.length;i++){
let cur=strs[i];
let j=0;
while(j<cur.length&&j<max.length&&max[j]===cur[j]) j++;
if(j==0) return "";
if(j===max.length) continue;
max=max.slice(0,j);
}
return max;
};
```
#### 15. 三数之和
> 给定一个包含 n 个整数的数组 nums,判断 nums 中是否存在三个元素 a,b,c ,使得 a + b + c = 0 ?找出所有满足条件且不重复的三元组。
>
> **注意:答案中不可以包含重复的三元组。**
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/3sum
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
和2sum几乎完全不同,首先想到的是暴力解(n^3),显然不行。比较好的思路是排序+双指针。
1. 排除数组长度小于3的情况。
2. 排序数组(nlogn)
3. 遍历数组,当前元素下标设为i,值设为x,已经重复的跳过
1. x>0,后续并无结果,直接返回
2. 双指针设Left=i+1,L为对应值,Right=nums.length-1,R为对应值
3. 如果L+R+x===0则添加到结果集合,并检查接下来是否有重复元素如果有则直接跳过
4. 如果L+R+x<0,则Left++
5. 如果L+R+x>0,则Right--
- 时间复杂度Onlogn,空间O1
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number[][]}
*/
var threeSum = function(nums) {
let result=[];
if(nums.length<3) return result;
nums.sort((a,b)=>a-b);
for(let i=0;i<nums.length;i++){
if(nums[i]>0) return result;
if(i>0&&nums[i]===nums[i-1]) continue;
let L=i+1,R=nums.length-1;
while(L<R){
if(nums[i]+nums[L]+nums[R]===0){
result.push([nums[i],nums[L],nums[R]]);
while(L<R&&nums[L]===nums[L+1]) L++;
while(L<R&&nums[R]===nums[R-1]) R--;
L+=1;
R-=1;
}else if(nums[i]+nums[L]+nums[R]>0){
R--;
}else{
L++;
}
}
}
return result;
};
```
#### 20. 有效的括号
##### 思路
使用栈,拿到当前元素,如果当前元素和栈顶元素匹配则弹出,否则把当前元素压栈,直到遍历完。如果此时栈不为空则无效,为空则有效。
- 时间复杂度On,空间On
##### 代码
```c++
class Solution {
public:
bool isValid(string s) {
if(!s.size()) return true;
map<char,char> m;
m['(']=')';
m['[']=']';
m['{']='}';
stack<char> st;
for(int i=0;i<s.size();i++){
if(st.empty()){
st.push(s[i]);
continue;
}
char top=st.top();
if(m[top]==s[i]) st.pop();
else st.push(s[i]);
}
return st.empty();
}
};
```
#### 21. 合并两个有序链表
> 将两个有序链表合并为一个新的有序链表并返回。新链表是通过拼接给定的两个链表的所有节点组成的。
##### 思路
数组**升序。**类似于合并有序数组,使用2+1指针,两个指针分别指向两个链表未被合并部分的最小元素,一个指针指向当前已经合并的最大元素。
- 时间复杂度O(m+n),空间复杂度O1
##### 代码
```c++
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode* mergeTwoLists(ListNode* l1, ListNode* l2) {
ListNode* cur1=l1,*cur2=l2,*head=NULL,*cur=NULL,*tmp;
if(cur1&&cur2){
if(cur1->val>cur2->val){
head=cur=l2;
cur2=cur2->next;
}else{
head=cur=l1;
cur1=cur1->next;
}
//more than 0 is NULL
}else if(cur1){
return cur1;
}else{
return cur2;
}
while(cur1&&cur2){
if(cur1->val<cur2->val){
cur->next=cur1;
cur1=cur1->next;
}else{
cur->next=cur2;
cur2=cur2->next;
}
cur=cur->next;
}
if(cur1) cur->next=cur1;
if(cur2) cur->next=cur2;
return head;
}
};
```
#### 26. 删除排序数组中的重复项
> 给定一个排序数组,你需要在原地删除重复出现的元素,使得每个元素只出现一次,返回移除后数组的新长度。
>
> 不要使用额外的数组空间,你必须在原地修改输入数组并在使用 O(1) 额外空间的条件下完成。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/remove-duplicates-from-sorted-array
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
更为有效的方式(代码2)是使用双指针法,如下代码1虽使用了双指针,但进行了删除操作,时间上会增加。
- 时间复杂度On,空间O1
##### 代码1
```c++
class Solution {
public:
int removeDuplicates(vector<int>& nums) {
auto i=nums.begin();
while(i!=nums.end()){
auto j=i+1;
int count=0;
while(j!=nums.end()&&*j==*i){
j++;
count++;
}
if(count>0){
nums.erase(i+1,j);
}
i++;
}
return nums.size();
}
};
```
##### 代码2
```c++
class Solution {
public:
int removeDuplicates(vector<int>& nums) {
if(nums.size()==0) return 0;
int i=0;
for(int j=1;j<nums.size();j++){
if(nums[i]!=nums[j]){
i++;
nums[i]=nums[j];
}
}
return i+1;
}
};
```
#### 27. 移除元素
> 给定一个数组 *nums* 和一个值 *val*,你需要**原地**移除所有数值等于 *val* 的元素,返回移除后数组的新长度。
##### 思路
使用双指针,一个指向当前元素,一个指向被删除后前缀数组的最后元素。
- 时间复杂度On,空间O1
##### 代码
```c++
class Solution {
public:
int removeElement(vector<int>& nums, int val) {
int l=0;
for(int i=0;i<nums.size();i++){
if(nums[i]!=val){
nums[l]=nums[i];
l++;
}
}
return l;
}
};
```
#### 28. 实现 strStr()
> 实现 strStr() 函数。
>
> 给定一个 haystack 字符串和一个 needle 字符串,在 haystack 字符串中找出 needle 字符串出现的第一个位置 (从0开始)。如果不存在,则返回 -1。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/implement-strstr
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
方法一是直接indexOf,方法二是KMP。
##### 代码
```javascript
/**
* @param {string} haystack
* @param {string} needle
* @return {number}
*/
var strStr = function(haystack, needle) {
return haystack.indexOf(needle);
};
```
#### 66. 加一
> 给定一个由整数组成的非空数组所表示的非负整数,在该数的基础上加一。
>
> 最高位数字存放在数组的首位, 数组中每个元素只存储单个数字。
>
> 你可以假设除了整数 0 之外,这个整数不会以零开头。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/plus-one
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
仅需要处理进一的情况,还有溢出的情况。
- 复杂度:时间On,空间O1
##### 代码
```c++
class Solution {
public:
vector<int> plusOne(vector<int>& digits) {
digits[digits.size()-1]++;
int i=digits.size()-1;
while(digits[i]==10){
digits[i]=0;
i--;
if(i<0) break;
digits[i]++;
}
if(i<0) digits.insert(digits.begin(),1);
return digits;
}
};
```
#### 67. 二进制求和
> 给定两个二进制字符串,返回他们的和(用二进制表示)。
>
> 输入为**非空**字符串且只包含数字 `1` 和 `0`。
##### 思路
从低位向高位加,注意进位
- 时间复杂度O(max(m,n)),空闲复杂度O(max(m,n))
##### 代码
```c++
class Solution {
public:
string addBinary(string a, string b) {
int i=a.size()-1,j=b.size()-1;
string result="";
int carry=0;
while(i>=0&&j>=0){
if(a[i]=='1'&&b[j]=='1'){
result.insert(0,carry?"1":"0");
carry=1;
}else if(a[i]=='1'||b[j]=='1'){
if(carry==1){
result.insert(0,"0");
}else{
result.insert(0,"1");
}
}else{
result.insert(0,carry?"1":"0");
carry=0;
}
i--;
j--;
}
while(i>=0){
if(a[i]=='1'){
if(carry==1){
result.insert(0,"0");
}else{
result.insert(0,"1");
}
}else{
result.insert(0,carry?"1":"0");
carry=0;
}
i--;
}
while(j>=0){
if(b[j]=='1'){
if(carry==1){
result.insert(0,"0");
}else{
result.insert(0,"1");
}
}else{
result.insert(0,carry?"1":"0");
carry=0;
}
j--;
}
if(carry==1) result.insert(0,"1");
return result;
}
};
```
#### 69. x 的平方根
> 实现 int sqrt(int x) 函数。
>
> 计算并返回 x 的平方根,其中 x 是非负整数。
>
> 由于返回类型是整数,结果只保留整数的部分,小数部分将被舍去。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/sqrtx
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用二分查找(还有牛顿法,没整,感觉logn挺好了),除了开始的几个数外,总有$ \sqrt{x}<=\frac{x}{2} $。右边界从这开始找就行了。
- 时间复杂度Ologn,空间O1
##### 代码
```c++
class Solution {
public:
int mySqrt(int x) {
int left=0,right=x;
if(x>4) right=x/2;
while(left<=right){
long mid=left+(right-left)/2;
long s=mid*mid;
long s1=(mid+1)*(mid+1);
if(s>x) right=mid-1;
else if(s<=x&&s1>x) return mid;
else left=mid+1;
}
return -1;
}
};
```
#### 70. 爬楼梯
> 假设你正在爬楼梯。需要 *n* 阶你才能到达楼顶。
>
> 每次你可以爬 1 或 2 个台阶。你有多少种不同的方法可以爬到楼顶呢?
>
> **注意:**给定 *n* 是一个正整数。
##### 思路
斐波那契数列,利用动态规划`dp[i]=dp[i-1]+dp[i-2]`。
- 时间复杂度On,空间O1
##### 代码
- 动态规划
```c++
class Solution {
public:
int climbStairs(int n) {
if(n<3) return n;
long f1=1,f2=2,r=0;
while(n>2){
f2= f1 +f2;
f1 = f2-f1;
n--;
}
return f2;
}
};
```
- 公式法
```javascript
/**
* @param {number} n
* @return {number}
*/
var climbStairs = function(n) {
let sqrtFive=Math.sqrt(5);
phi=(1+sqrtFive)/2;
return ((phi**(n+1))-((1-phi)**(n+1)))/sqrtFive;
};
```
#### 88. 合并两个有序数组
> 给定两个有序整数数组 nums1 和 nums2,将 nums2 合并到 nums1 中,使得 num1 成为一个有序数组。
>
> **说明:**
>
> - 初始化 nums1 和 nums2 的元素数量分别为 m 和 n。
>
> - 你可以假设 nums1 有足够的空间(空间大小大于或等于 m + n)来保存 nums2 中的元素。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/merge-sorted-array
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
和合并两个有序链表类似,使用2+1指针,2个指针分别指向两个数组的最小待合并元素,1个指针指向已合并元素的最后位置。
- 时间复杂度O(m+n),空间复杂度O(m+n)
##### 代码
```c++
class Solution {
public:
void merge(vector<int>& nums1, int m, vector<int>& nums2, int n) {
if(!nums2.size()) return;
if(!nums1.size()){
nums1=nums2;
return;
}
vector<int> r;
int i=0,j=0;
while(i<m&&j<n){
if(nums1[i]<nums2[j]){
r.push_back(nums1[i]);
i++;
}else{
r.push_back(nums2[j]);
j++;
}
}
while(i<m){
r.push_back(nums1[i]);
i++;
}
while(j<n){
r.push_back(nums2[j]);
j++;
}
nums1=r;
}
};
```
#### 101. 对称二叉树
> 给定一个二叉树,检查它是否是镜像对称的。
##### 思路
利用递归,自顶向下的递归。先判断当前节点对的情况,然后判断子节点对的情况,子节点对的情况依赖于其父节点对的参数。
- 时间复杂度On,空间On
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
bool isSymmetric(TreeNode* root) {
if(!root) return true;
return top_down(root->left,root->right);
}
private:
bool top_down(TreeNode* x,TreeNode*y){
if(!x&&!y) return true;
if(!x||!y) return false;
if(x->val!=y->val) return false;
return top_down(x->left,y->right)&&top_down(x->right,y->left);
}
};
```
#### 104. 二叉树的最大深度
>给定一个二叉树,找出其最大深度。
>
>二叉树的深度为根节点到最远叶子节点的最长路径上的节点数。
>
>**说明**: 叶子节点是指没有子节点的节点。
>
>
>
>来源:力扣(LeetCode)
>链接:https://leetcode-cn.com/problems/maximum-depth-of-binary-tree
>著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用递归实现,二叉树的最大深度就是1+左右子树的最大深度。
- 时间复杂度On,空间复杂度On
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
int maxDepth(TreeNode* root) {
if(!root) return 0;
return 1+max(maxDepth(root->left),maxDepth(root->right));
}
int max(int left,int right){
return left>right?left:right;
}
};
```
#### 107. 二叉树的层次遍历 II
> 给定一个二叉树,返回其节点值自底向上的层次遍历。 (即按从叶子节点所在层到根节点所在的层,逐层从左向右遍历)
##### 思路
简单粗暴,层次遍历然后reverse。
- 时间复杂度On,空间On
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<vector<int>> levelOrderBottom(TreeNode* root) {
queue<TreeNode*> q;
q.push(root);
vector<vector<int>> result;
if(!root) return result;
while(!q.empty()){
int size=q.size();
vector<int> v;
for(int i=0;i<size;i++){
TreeNode*cur=q.front();
v.push_back(cur->val);
q.pop();
if(cur->left) q.push(cur->left);
if(cur->right) q.push(cur->right);
}
result.push_back(v);
}
int i=0,j=result.size()-1;
while(i<j){
auto tmp=result[i];
result[i]=result[j];
result[j]=tmp;
i++;
j--;
}
return result;
}
};
```
#### 108. 将有序数组转换为二叉搜索树
> 将一个按照升序排列的有序数组,转换为一棵高度平衡二叉搜索树。
>
> 本题中,一个高度平衡二叉树是指一个二叉树*每个节点* 的左右两个子树的高度差的绝对值不超过 1。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/convert-sorted-array-to-binary-search-tree
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用二分查找构造的二叉树一定是高度平衡的二叉树。
- 时间复杂度On,空间复杂度Ologn
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
TreeNode* sortedArrayToBST(vector<int>& nums) {
int left=0,right=nums.size()-1;
return build(nums,left,right);
}
TreeNode* build(vector<int>& nums,int left,int right){
if(left>right) return NULL;
int mid=left+(right-left)/2;
TreeNode * cur= new TreeNode(nums[mid]);
cur->left=build(nums,left,mid-1);
cur->right=build(nums,mid+1,right);
return cur;
}
};
```
#### 112. 路径总和
> 给定一个二叉树和一个目标和,判断该树中是否存在根节点到叶子节点的路径,这条路径上所有节点值相加等于目标和。
>
> **说明:** 叶子节点是指没有子节点的节点。
##### 思路
采用递归,自顶向下的递归。先访问当前节点,然后子节点的状态需要父节点传入参数。
- 时间复杂度On。空间复杂度一般Ologn,最差On
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
bool hasPathSum(TreeNode* root, int sum) {
if(!root&&sum==0) return false;
return top_down(root,sum);
}
bool top_down(TreeNode* root, int sum){
if(!root&&sum!=0) return false;
if(!root) return true;
int nextSum=sum-root->val;
if(root->left&&root->right) return top_down(root->left,nextSum)||top_down(root->right,nextSum);
if(root->left) return top_down(root->left,nextSum);
return top_down(root->right,nextSum);
}
};
```
#### 118. 杨辉三角
> 给定一个非负整数 *numRows,*生成杨辉三角的前 *numRows* 行。
>
> 
>
> 在杨辉三角中,每个数是它左上方和右上方的数的和。
>
> **示例:**
>
> ```text
> 输入: 5
> 输出:
> [
> [1],
> [1,1],
> [1,2,1],
> [1,3,3,1],
> [1,4,6,4,1]
> ]
>
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/pascals-triangle
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
递归或者动态规划,找到基本情况(**每行第一个和最后一个是1**)和递推关系(**`a[i][j]=a[i-1][j-1]+a[i-1]`[j]**)。
- 时间复杂度O(n^2),空间O1
##### 代码
```javascript
/**
* @param {number} numRows
* @return {number[][]}
*/
var generate = function(numRows) {
if(numRows<=0) return [];
let result=[];
for(let i=0;i<numRows;i++){
let tmp=[];
for(let j=0;j<i+1;j++){
if(j==0||j==i) tmp.push(1);
else tmp.push(result[i-1][j-1]+result[i-1][j]);
}
result.push(tmp);
}
return result;
};
```
#### 119. 杨辉三角 II
> 给定一个非负索引 *k*,其中 *k* ≤ 33,返回杨辉三角的第 *k* 行。
>
> 
>
> 在杨辉三角中,每个数是它左上方和右上方的数的和。
>
> **示例:**
>
> ```
> 输入: 3
> 输出: [1,3,3,1]
> ```
>
> **进阶:**
>
> 你可以优化你的算法到 *O*(*k*) 空间复杂度吗?
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/pascals-triangle-ii/
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
只返回第K行的元素,为了避免创建二维数组,可以使用一维进行优化,思考每次在数组末尾新添加一个元素从后向前计算,这样不会覆盖原来的结果。
- 时间复杂度O(k^2),空间O(k)
##### 代码
```javascript
/**
* @param {number} rowIndex
* @return {number[]}
*/
var getRow = function(rowIndex) {
let result=[];
for(let i=0;i<rowIndex;i++){
result.push(1);
for(let j=i;j>0;j--){
result[j]=result[j]+result[j-1];
}
}
result.push(1);
return result;
};
```
#### 121. 买卖股票的最佳时机
> 给定一个数组,它的第 i 个元素是一支给定股票第 i 天的价格。
>
> 如果你最多只允许完成一笔交易(即买入和卖出一支股票),设计一个算法来计算你所能获取的最大利润。
>
> 注意你不能在买入股票前卖出股票。
>
> **示例 1:**
>
> 输入: [7,1,5,3,6,4]
> 输出: 5
> 解释: 在第 2 天(股票价格 = 1)的时候买入,在第 5 天(股票价格 = 6)的时候卖出,最大利润 = 6-1 = 5 。
> 注意利润不能是 7-1 = 6, 因为卖出价格需要大于买入价格。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/best-time-to-buy-and-sell-stock
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
动态规划,当前的利润等于当前股票价格减去前面股票价格最小值,即`cur=prices[i]-minBefore`,最大利润是这些利润的最大值。
- 时间On,空间O1
##### 代码
```c++
class Solution {
public:
int maxProfit(vector<int>& prices) {
int max=0;
if(prices.size()==0) return max;
int minBefore=prices[0];
for(int i=1;i<prices.size();i++){
if(minBefore>prices[i-1]) minBefore=prices[i-1];
if(max<prices[i]-minBefore) max=prices[i]-minBefore;
}
return max;
}
};
```
#### 125. 验证回文串
> 给定一个字符串,验证它是否是回文串,只考虑字母和数字字符,可以忽略字母的大小写。
>
> 说明:本题中,我们将空字符串定义为有效的回文串。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/valid-palindrome
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用双指针,分别从前往后找和从后往前找,如果当前都是字母则比较,不同则false,如果都相同则true
- 时间复杂度On,空间复杂度O1
##### 代码
```c++
class Solution {
public:
bool isPalindrome(string s) {
if(s.size()<=1) return true;
int left=0,right=s.size()-1;
transform(s.begin(),s.end(),s.begin(),::tolower);
while(left<right){
while(left<s.size()&&!isAlphaDigit(s[left])) left++;
while(right>=0&&!isAlphaDigit(s[right])) right--;
if(left>=s.size()||right<0) break;
if(s[left]!=s[right]) return false;
left++;
right--;
}
return true;
}
bool isAlphaDigit(char c){
return (c>='0'&&c<='9')||(c>='a'&&c<='z');
}
};
```
#### 189. 旋转数组
> 给定一个数组,将数组中的元素向右移动 *k* 个位置,其中 *k* 是非负数。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/rotate-array
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用额外数组,将前面部分移动到后面,再将前面的清除。空间复杂度是On,对应**代码1**
- 时间复杂度On,空间复杂度On
还有更高效的方法就是利用翻转,不使用额外空间。对应**代码2**
```
原始数组 : 1 2 3 4 5 6 7
反转所有数字后 : 7 6 5 4 3 2 1
反转前 k 个数字后 : 5 6 7 4 3 2 1
反转后 n-k 个数字后 : 5 6 7 1 2 3 4 --> 结果
```
- 时间复杂度On,空间复杂度O1
##### 代码1
```c++
class Solution {
public:
void rotate(vector<int>& nums, int k) {
k=k%nums.size();
if(k==0) return;
int size=nums.size();
for(int i=0;i<size-k;i++){
nums.push_back(nums[i]);
}
nums.erase(nums.begin(),nums.begin()+size-k);
}
};
```
##### 代码2
```c++
class Solution {
public:
void rotate(vector<int>& nums, int k) {
k=k%nums.size();
if(k==0) return;
reverse(nums.begin(),nums.end());
reverse(nums.begin(),nums.begin()+k);
reverse(nums.begin()+k,nums.end());
}
};
```
#### 169. 多数元素
> 给定一个大小为 n 的数组,找到其中的多数元素。多数元素是指在数组中出现次数大于 ⌊ n/2 ⌋ 的元素。
>
> 你可以假设数组是非空的,并且给定的数组总是存在多数元素。
>
> **示例 1:**
>
> ```
> 输入: [3,2,3]
> 输出: 3
> ```
>
> **示例 2:**
>
> ```
> 输入: [2,2,1,1,1,2,2]
> 输出: 2
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/majority-element
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用哈希映射计数,计数过程中计算。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number}
*/
var majorityElement = function(nums) {
let threshold = Math.floor(nums.length/2);
let m=new Map();
for(let i=0;i<nums.length;i++){
if(!m.has(nums[i])){
m.set(nums[i],1);
}else{
m.set(nums[i],m.get(nums[i])+1)
}
if(m.get(nums[i])>threshold) return nums[i];
}
return -1;
};
```
#### 190. 颠倒二进制位
> 颠倒给定的 32 位无符号整数的二进制位。
>
>
>
> **示例 1**:
>
> ```
> 输入: 00000010100101000001111010011100
> 输出: 00111001011110000010100101000000
> 解释: 输入的二进制串 00000010100101000001111010011100 表示无符号整数 43261596,
> 因此返回 964176192,其二进制表示形式为 00111001011110000010100101000000。
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/reverse-bits
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用tostring和parseInt函数,另外前面补〇用substr函数。
- 时间复杂度O1,空间O1
##### 代码
```javascript
/**
* @param {number} n - a positive integer
* @return {number} - a positive integer
*/
var reverseBits = function(n) {
return parseInt(('00000000000000000000000000000000'+n.toString(2)).substr(-32).split('').reverse().join(''),2);
};
```
#### 198. 打家劫舍
>你是一个专业的小偷,计划偷窃沿街的房屋。每间房内都藏有一定的现金,影响你偷窃的唯一制约因素就是相邻的房屋装有相互连通的防盗系统,如果两间相邻的房屋在同一晚上被小偷闯入,系统会自动报警。
>
>给定一个代表每个房屋存放金额的非负整数数组,计算你在不触动警报装置的情况下,能够偷窃到的最高金额。
>
>**示例 1:**
>
>```输入: [1,2,3,1]
>输入: [1,2,3,1]
>输出: 4
>解释: 偷窃 1 号房屋 (金额 = 1) ,然后偷窃 3 号房屋 (金额 = 3)。
> 偷窃到的最高金额 = 1 + 3 = 4 。
>```
>
>来源:力扣(LeetCode)
>链接:https://leetcode-cn.com/problems/house-robber
>著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
动态规划,关键是找到状态转移方程。
考虑如下情况:
1. `i=0`时,`f(i)=nums[i]`
2. `i=1`时,`f(i)=max{nums[1],nums[0]}`
3. `i=2`时,`f(i)=max{nums[2]+f(0),f(1)}`
总结如下,
**`f(i)=max{nums[i]+f(i-2),nums[i-1]}`**
考虑令`f(-2)=f(-1)=0`比较容易实现。
- 时间复杂度On,空间O1
##### 代码
```c++
class Solution {
public:
int rob(vector<int>& nums) {
int p=0,q=0,r=0;
for(int i=0;i<nums.size();i++){
r=max(nums[i]+p,q);
p=q;
q=r;
}
return r;
}
};
```
#### 202. 快乐数
> 编写一个算法来判断一个数是不是“快乐数”。
>
> 一个“快乐数”定义为:对于一个正整数,每一次将该数替换为它每个位置上的数字的平方和,然后重复这个过程直到这个数变为 1,也可能是无限循环但始终变不到 1。如果可以变为 1,那么这个数就是快乐数。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/happy-number
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用哈希集合,计算当前值sum,如果当前值为1则返回`true`,如果当前值已经出现过(`set`中存在)则返回false,否则下一次使用这个sum值并把sum值加入到集合内。
- 时间复杂度未知。。空间复杂度未知。。
##### 代码
```javascript
/**
* @param {number} n
* @return {boolean}
*/
var isHappy = function(n) {
if(n==1) return true;
const s = new Set();
s.add(n);
let cur=n;
while(true){
let sum=0;
while(cur!=0){
let mod=cur%10;
sum+=mod**2;
cur=parseInt(cur/10);
}
if(sum==1) return true;
if(s.has(sum)) return false;
s.add(sum);
cur=sum;
}
return false;
};
```
#### 204. 计数质数
> 统计所有小于非负整数 n 的质数的数量。
>
> 示例:
>
> ```
> 输入: 10
> 输出: 4
> 解释: 小于 10 的质数一共有 4 个, 它们是 2, 3, 5, 7 。
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/count-primes
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用[Sieve of Eratosthenes](https://en.wikipedia.org/wiki/Sieve_of_Eratosthenes)方法,额外使用n大小的数组来标记是否是质数。
x的所有倍数都不是质数。x=2,3,4...
所有标记完成后,剩下的都是质数,除了0和1。
- 时间复杂度On,空间 On
```javascript
/**
* @param {number} n
* @return {number}
*/
var countPrimes = function(n) {
let marks=Array(n).fill(true);
let sqrtn=Math.sqrt(n);
for(let i=2;i<sqrtn;i++){
if(!marks[i]) continue;
for(let j=i**2;j<n;j+=i){
marks[j]=false;
}
}
let r=marks.filter(x=>x).length;
return r<2?0:r-2;
};
```
#### 205. 同构字符串
> 给定两个字符串 s 和 t,判断它们是否是同构的。
>
> 如果 s 中的字符可以被替换得到 t ,那么这两个字符串是同构的。
>
> 所有出现的字符都必须用另一个字符替换,同时保留字符的顺序。两个字符不能映射到同一个字符上,但字符可以映射自己本身。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/isomorphic-strings
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
不考虑技巧,直观思路,同构根据题意可知一个字符能够映射到唯一的字符(x对应唯一的y),并且两个字符不能映射到同一个字符(y对应唯一的x),即两个字符一一对应,即双射。
- 时间复杂度On,空间On
##### 代码
```c++
class Solution {
public:
bool isIsomorphic(string s, string t) {
map<char,char> m1,m2;
for(int i=0;i<s.size();i++){
if(m1.count(s[i])){
if(m1[s[i]]!=t[i]) return false;
}else{
m1[s[i]]=t[i];
}
if(m2.count(t[i])){
if(m2[t[i]]!=s[i]) return false;
}else{
m2[t[i]]=s[i];
}
}
return true;
}
};
```
#### 226. 翻转二叉树
> 翻转一棵二叉树。
##### 思路
递归。交换当前左右孩子,递归左孩子和右孩子。
- 时间复杂度On,空间Ologn(假设树是相对平衡的)
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {TreeNode}
*/
var invertTree = function(root) {
if(root===null) return root;
let tmp=root.left;
root.left=root.right;
root.right=tmp;
invertTree(root.left);
invertTree(root.right);
return root;
};
```
#### 235. 二叉搜索树的最近公共祖先
> 给定一个二叉搜索树, 找到该树中两个指定节点的最近公共祖先。
>
> 百度百科中最近公共祖先的定义为:“对于有根树 T 的两个结点 p、q,最近公共祖先表示为一个结点 x,满足 x 是 p、q 的祖先且 x 的深度尽可能大(一个节点也可以是它自己的祖先)。”
>
> 例如,给定如下二叉搜索树: root = [6,2,8,0,4,7,9,null,null,3,5]
>
> 
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/lowest-common-ancestor-of-a-binary-search-tree
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
最近的祖先元素一定在中序遍历的p和q之间的节点。共同祖先便是这些结点中前序遍历最先出现的。
- 时间On,空间On
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @param {TreeNode} p
* @param {TreeNode} q
* @return {TreeNode}
*/
var lowestCommonAncestor = function(root, p, q) {
//the first node in preorder of the sequence for inorder between p&q
let s=[];
let visited=new Set();
if(root===null) return null;
let tmp=new Set();
s.push(root);
let flag=0;
while(s.length!==0){
let cur=s.pop();
if(!visited.has(cur)){
visited.add(cur);
if(cur.right) s.push(cur.right);
s.push(cur);
if(cur.left) s.push(cur.left);
}else{
if(cur===p||cur===q){
flag++;
}
if(flag>0){
tmp.add(cur);
}
if(flag===2) break;
}
}
s=[];
visited.clear();
s.push(root);
while(s.length!==0){
let cur=s.pop();
if(!visited.has(cur)){
visited.add(cur);
if(cur.right) s.push(cur.right);
if(cur.left) s.push(cur.left);
s.push(cur);
}else{
if(tmp.has(cur)) return cur;
}
}
return null;
};
```
#### 242. 有效的字母异位词
> 给定两个字符串 *s* 和 *t* ,编写一个函数来判断 *t* 是否是 *s* 的字母异位词。
##### 思路
排序后字符串相同,但是时间复杂度较高。
- 时间复杂度Onlogn,空间On
更有效的方法是字母计数,然后比较次数。在此就不写了。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {string} s
* @param {string} t
* @return {boolean}
*/
var isAnagram = function(s, t) {
return s.split('').sort().join('')===t.split('').sort().join('');
};
```
#### 258. 各位相加
> 给定一个非负整数 num,反复将各个位上的数字相加,直到结果为一位数。
>
> **示例:**
>
> ```
> 输入: 38
> 输出: 2
> 解释: 各位相加的过程为:3 + 8 = 11, 1 + 1 = 2。 由于 2 是一位数,所以返回 2。
> ```
>
> **进阶:**
> 你可以不使用循环或者递归,且在 O(1) 时间复杂度内解决这个问题吗?
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/add-digits
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用字符串分割循环计算。
- 时间复杂度不确定,空间O1
##### 代码
```javascript
/**
* @param {number} num
* @return {number}
*/
var addDigits = function(num) {
let tmp=num;
while(tmp>=10){
tmp=(tmp+"").split("").reduce((sum,x)=>sum+Number(x),0);
}
return tmp;
};
```
#### 268. 缺失数字
> 给定一个包含 0, 1, 2, ..., n 中 n 个数的序列,找出 0 .. n 中没有出现在序列中的那个数。
>
> **示例 1:**
>
> ```
> 输入: [3,0,1]
> 输出: 2
> ```
>
>
> **示例 2:**
>
> ```
> 输入: [9,6,4,2,3,5,7,0,1]
> 输出: 8
> ```
>
> 说明:
> 你的算法应具有线性时间复杂度。你能否仅使用额外常数空间来实现?
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/missing-number
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
因为是连续的n+1个数缺少了其中1个,因此可以将连续的数求和减去缺少数字的序列之和,差值就是结果。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number}
*/
var missingNumber = function(nums) {
return parseInt((nums.length)*(nums.length+1)/2)-nums.reduce((sum,x)=>x+sum);
};
```
#### 283. 移动零
> 给定一个数组 nums,编写一个函数将所有 0 移动到数组的末尾,同时保持非零元素的相对顺序。
>
>
> 说明:
>
> 必须在原数组上操作,不能拷贝额外的数组。
> 尽量减少操作次数。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/move-zeroes
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用双指针,一个指向“新”数组的当前元素,一个指向“旧”数组的非零元素。最后将新数组末尾全部置0即可。
- 时间复杂度On,空间复杂度O1
##### 代码
```c++
class Solution {
public:
void moveZeroes(vector<int>& nums) {
int size=nums.size();
int i=0,j=0;
for(;j<size;j++){
if(nums[j]!=0){
nums[i++]=nums[j];
}
}
while(i<size){
nums[i]=0;
i++;
}
}
};
```
#### 326. 3的幂
> 给定一个整数,写一个函数来判断它是否是 3 的幂次方。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/power-of-three
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用循环和递归,循环除以3判断最后是否包含小数。下面这个例子使用的是换底公式。判断$\log_3n=\frac{\ln n}{\ln3}$是否是整数。但是有坑,比如有个测试用例27,使用`Math.log(27)/Math.log(3)`计算结果是`3.0000000000000004`,参考You dont konw js:number了解可以使用`Number.EPSILON`来粗略表示相等。
- 时间复杂度O1,空间O1
##### 代码
```javascript
/**
* @param {number} n
* @return {boolean}
*/
var isPowerOfThree = function(n) {
let tmp=Math.log(n)/Math.log(3);
const isAroundEqual=(a,b)=>Math.abs(a-b)<Number.EPSILON*100;
return isAroundEqual(tmp|0,tmp);
};
```
#### 349. 两个数组的交集
> 给定两个数组,编写一个函数来计算它们的交集。
##### 思路
利用集合和filter。
- 时间复杂度O(n+m),空间O(n+m)
##### 代码
```javascript
/**
* @param {number[]} nums1
* @param {number[]} nums2
* @return {number[]}
*/
var intersection = function(nums1, nums2) {
let result=[];
let s1=new Set(nums1);
let s2=new Set(nums2);
return [...s1].filter(x=>s2.has(x));
};
```
#### 389. 找不同
> 给定两个字符串 s 和 t,它们只包含小写字母。
>
> 字符串 t 由字符串 s 随机重排,然后在随机位置添加一个字母。
>
> 请找出在 t 中被添加的字母。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/find-the-difference
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
首先想到的是哈希计数,把第一个字符串每个字符统计个数,第二个字符串减,最后判断那个字符已经为0则当前字符就是要找的字符。**更简单的方法是使用异或,除了目标字符其他字符都是出现两次**
- 时间复杂度On,空间On;(异或时间On,空间O1)
##### 代码
```c++
class Solution {
public:
char findTheDifference(string s, string t) {
map<char,int> m;
for(int i=0;i<s.size();i++){
if(m.count(s[i])==0||m[s[i]]==0){
m[s[i]]=1;
}else{
m[s[i]]++;
}
}
for(int i=0;i<t.size();i++){
if(m.count(t[i])==0||m[t[i]]==0) return t[i];
m[t[i]]--;
}
return -1;
}
};
```
#### 412. Fizz Buzz
> 写一个程序,输出从 1 到 n 数字的字符串表示。
>
> 1. 如果 n 是3的倍数,输出“Fizz”;
> 2. 如果 n 是5的倍数,输出“Buzz”;
> 3. 如果 n 同时是3和5的倍数,输出 “FizzBuzz”;
> 4. 否则输出n的字符串表示。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/fizz-buzz
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
遍历映射即可。
- 时间On,空间On
##### 代码
```javascript
/**
* @param {number} n
* @return {string[]}
*/
var fizzBuzz = function(n) {
return Array.from({length:n},(x,i)=>i+1).map(x=>{
if(x%3===0&&x%5===0) return "FizzBuzz";
if(x%3===0) return "Fizz";
if(x%5===0) return "Buzz";
return String(x);
})
};
```
#### 461. 汉明距离
> 两个整数之间的汉明距离指的是这两个数字对应二进制位不同的位置的数目。
>
> 给出两个整数 x 和 y,计算它们之间的汉明距离。
>
> **注意:**
> 0 ≤ x, y < 2^31.
>
> **示例:**
>
> ```
> 输入: x = 1, y = 4
>
> 输出: 2
>
> 解释:
> 1 (0 0 0 1)
> 4 (0 1 0 0)
> ↑ ↑
>
> 上面的箭头指出了对应二进制位不同的位置。
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/hamming-distance
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
按位异或后统计1的个数。
- 时间复杂度O1,空间O1
##### 代码
```javascript
/**
* @param {number} x
* @param {number} y
* @return {number}
*/
var hammingDistance = function(x, y) {
return ((x^y).toString(2).match(/1/g)||"").length;
};
```
#### 500. 键盘行
> 给定一个单词列表,只返回可以使用在键盘同一行的字母打印出来的单词。
##### 思路
创建三个集合(键盘三行字母的集合),然后取当前字符串取交集,如果交集结果和当前字符串相同,那么满足条件。
- 时间复杂度O(n*k),空间复杂度O(1)
##### 代码
```javascript
/**
* @param {string[]} words
* @return {string[]}
*/
var findWords = function(words) {
l1 = 'qwertyuiopQWERTYUIOP'
l2 = 'asdfghjklASDFGHJKL'
l3 = 'zxcvbnmZXCVBNM'
let result=[];
const s1=new Set(l1);
const s2=new Set(l2);
const s3=new Set(l3);
for(let i=0;i<words.length;i++){
const cur=words[i];
if(s1.has(cur[0])){
let j=1;
while(j<cur.length&&s1.has(cur[j])) j++;
if(j===cur.length) result.push(words[i]);
}else if(s2.has(cur[0])){
let j=1;
while(j<cur.length&&s2.has(cur[j])) j++;
if(j===cur.length) result.push(words[i]);
}else{
let j=1;
while(j<cur.length&&s3.has(cur[j])) j++;
if(j===cur.length) result.push(words[i]);
}
}
return result;
};
```
#### 504. 七进制数
> 给定一个整数,将其转化为7进制,并以字符串形式输出。
>
> **示例 1**:
>
> ```
> 输入: 100
> 输出: "202"
> ```
>
> **示例 2**:
>
> ```
> 输入: -7
> 输出: "-10"
> ```
>
> **注意**: 输入范围是 [-1e7, 1e7] 。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/base-7
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
可以取余取商法做。直接用toString了。
- 时间复杂度O1,空间O1
##### 代码
```javascript
/**
* @param {number} num
* @return {string}
*/
var convertToBase7 = function(num) {
return num.toString(7);
};
```
#### 509. 斐波那契数
> 斐波那契数,通常用 F(n) 表示,形成的序列称为斐波那契数列。该数列由 0 和 1 开始,后面的每一项数字都是前面两项数字的和。也就是:
>
> ```
> F(0) = 0, F(1) = 1
> F(N) = F(N - 1) + F(N - 2), 其中 N > 1.
> 给定 N,计算 F(N)。
> ```
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/fibonacci-number
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
目前知道四种思路。1是递归求解有重复计算;2是动态规划;3是递归求解加记忆化技术(`memorization`);
4是最快的使用公式法(`通过差分方程可以推出公式`)。以下代码使用记忆化技术。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {number} N
* @return {number}
*/
var fib = function(N) {
let computed=new Map();
function helper(n){
if(computed.has(n)){
return computed.get(n);
}
let result;
if(n<2){
result=n;
}else{
result=helper(n-1)+helper(n-2)
}
computed.set(n,result);
return result;
}
return helper(N);
};
```
#### 520. 检测大写字母
> 给定一个单词,你需要判断单词的大写使用是否正确。
>
>
>
> 我们定义,在以下情况时,单词的大写用法是正确的:
>
> - 全部字母都是大写,比如"USA"。
>
> - 单词中所有字母都不是大写,比如"leetcode"。
> - 如果单词不只含有一个字母,只有首字母大写, 比如 "Google"。
>
> 否则,我们定义这个单词没有正确使用大写字母。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/detect-capital
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
条件判断就行。
- 时间On,空间O1
##### 代码
```c++
class Solution {
public:
bool detectCapitalUse(string word) {
if(word.size()<2) return true;
if(word.size()==2) return !(word[0]>='a'&&word[1]<'a');
bool first=word[0]<'a';
bool second=word[1]<'a';
if(!first&&second) return false;
for(int i=2;i<word.size();i++){
if(first&&second&&word[i]>='a') return false;
else if(!second&&word[i]<'a') return false;
else if(!first&&word[i]<'a') return false;
}
return true;
}
};
```
#### 543. 二叉树的直径
> 给定一棵二叉树,你需要计算它的直径长度。一棵二叉树的直径长度是任意两个结点路径长度中的最大值。这条路径可能穿过根结点。
>
> **示例** :
> 给定二叉树
>
> 1
> / \
> 2 3
> / \
> 4 5
> 返回 3, 它的长度是路径 [4,2,1,3] 或者 [5,2,1,3]。
>
> **注意**:两结点之间的路径长度是以它们之间边的数目表示。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/diameter-of-binary-tree
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
任意两个结点路径长度的最大值,这两个结点肯定不是父子关系或者祖先孩子的关系,这两个结点一定是某个节点的左右子树上的孩子。更具体的说,这两个结点一定分别在某个节点的左子树最大深度分支和右子树最大深度分支的末端。
即两个节点路径长度的最大值就是所有节点中左右子树深度之和中的最大值。
- 时间复杂度On,空间复杂度Oh
##### 代码
```js
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {number}
*/
var diameterOfBinaryTree = function(root) {
let m=new Map();
function depth(node){
if(m.has(node)) return m.get(node);
let result=0;
if(node===null) result=0;
else if(node.left===null&&node.right===null) result=1;
else result=Math.max(depth(node.left),depth(node.right))+1
m.set(node,result)
return result
}
let max=0;
function dfs(node){
if(node===null) return;
let cur=depth(node.left)+depth(node.right);
max=cur>max?cur:max;
dfs(node.left)
dfs(node.right)
}
dfs(root)
return max;
};
```
#### 557. 反转字符串中的单词 III
> 给定一个字符串,你需要反转字符串中每个单词的字符顺序,同时仍保留空格和单词的初始顺序。
>
>
> 注意:在字符串中,每个单词由单个空格分隔,并且字符串中不会有任何额外的空格。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/reverse-words-in-a-string-iii
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
按照单词来翻转,不复杂,直接上。
- 时间复杂度On,空间O1
##### 代码
```c++
class Solution {
public:
string reverseWords(string s) {
int i=0,j=0;
while(i<s.size()){
while(j<s.size()&&s[j]!=' ') j++;
int r=j-1;
while(i<r){
char tmp=s[i];
s[i]=s[r];
s[r]=tmp;
i++;
r--;
}
i=j+1;
j=i;
}
return s;
}
};
```
#### 559. N叉树的最大深度
> 给定一个 N 叉树,找到其最大深度。
>
> 最大深度是指从根节点到最远叶子节点的最长路径上的节点总数。
##### 思路
采用递归,自底向上的递归,先求子节点的深度,父节点的深度由子节点的深度决定。
- 时间复杂度On,空间复杂度Oh
##### 代码
```c++
/*
// Definition for a Node.
class Node {
public:
int val;
vector<Node*> children;
Node() {}
Node(int _val, vector<Node*> _children) {
val = _val;
children = _children;
}
};
*/
class Solution {
public:
int maxDepth(Node* root) {
if(!root) return 0;
int max=0;
for(int i=0;i<root->children.size();i++){
int cur=maxDepth(root->children[i]);
if(cur>max) max=cur;
}
return max+1;
}
};
```
#### 572. 另一个树的子树
> 给定两个非空二叉树 s 和 t,检验 s 中是否包含和 t 具有相同结构和节点值的子树。s 的一个子树包括 s 的一个节点和这个节点的所有子孙。s 也可以看做它自身的一棵子树。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/subtree-of-another-tree
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
递归求解。
- 时间复杂度OMN,空间OM,MN分别为两棵树的节点个数。
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} s
* @param {TreeNode} t
* @return {boolean}
*/
var isSubtree = function(s, t) {
function isSame(a,b){
if(!a&&!b) return true;
if(!a||!b) return false;
if(a.val!==b.val) return false;
return isSame(a.left,b.left)&&isSame(a.right,b.right);
}
if(!t) return true;
if(!s) return false;
if(s.val===t.val) return isSame(s,t)||isSubtree(s.left,t)||isSubtree(s.right,t);
return isSubtree(s.left,t)||isSubtree(s.right,t);
};
```
#### 589. N叉树的前序遍历
> 给定一个 N 叉树,返回其节点值的*前序遍历*。
##### 思路
和二叉树类似,直接写就行。
##### 代码
```c++
/*
// Definition for a Node.
class Node {
public:
int val;
vector<Node*> children;
Node() {}
Node(int _val, vector<Node*> _children) {
val = _val;
children = _children;
}
};
*/
class Solution {
public:
vector<int> preorder(Node* root) {
set<Node*> st;
stack<Node*> s;
vector<int> r;
if(!root) return r;
s.push(root);
while(!s.empty()){
Node * cur=s.top();
s.pop();
if(!st.count(cur)){
for(int i=cur->children.size()-1;i>=0;i--){
if(cur->children[i]) s.push(cur->children[i]);
}
s.push(cur);
st.insert(cur);
}else{
r.push_back(cur->val);
}
}
return r;
}
};
```
#### 590. N叉树的后序遍历
> 给定一个 N 叉树,返回其节点值的*后序遍历*。
##### 思路
和二叉树类似,直接写就行。
##### 代码
```c++
/*
// Definition for a Node.
class Node {
public:
int val;
vector<Node*> children;
Node() {}
Node(int _val, vector<Node*> _children) {
val = _val;
children = _children;
}
};
*/
class Solution {
public:
vector<int> postorder(Node* root) {
set<Node*> st;
stack<Node*> s;
vector<int> r;
if(!root) return r;
s.push(root);
while(!s.empty()){
Node * cur=s.top();
s.pop();
if(!st.count(cur)){
s.push(cur);
st.insert(cur);
for(int i=cur->children.size()-1;i>=0;i--){
if(cur->children[i]) s.push(cur->children[i]);
}
}else{
r.push_back(cur->val);
}
}
return r;
}
};
```
#### 637. 二叉树的层平均值
> 给定一个非空二叉树, 返回一个由每层节点平均值组成的数组.
>
> **示例 1:**
>
> ```
> 输入:
> 3
> / \
> 9 20
> / \
> 15 7
> 输出: [3, 14.5, 11]
> 解释:
> 第0层的平均值是 3, 第1层是 14.5, 第2层是 11. 因此返回 [3, 14.5, 11].
> ```
>
> **注意:**
>
> 节点值的范围在32位有符号整数范围内。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/average-of-levels-in-binary-tree
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用队列进行层次遍历,每层计算平均值。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {number[]}
*/
var averageOfLevels = function(root) {
let q=[];
let result=[];
if(!root) return result;
q.push(root);
while(q.length!==0){
let size=q.length;
let sum=0;
for(let i=0;i<size;i++){
let cur=q.shift();
sum+=cur.val;
if(cur.left) q.push(cur.left);
if(cur.right) q.push(cur.right);
}
result.push(sum/size);
}
return result;
};
```
#### 657. 机器人能否返回原点
> 在二维平面上,有一个机器人从原点 (0, 0) 开始。给出它的移动顺序,判断这个机器人在完成移动后是否在 (0, 0) 处结束。
>
> 移动顺序由字符串表示。字符 move[i] 表示其第 i 次移动。机器人的有效动作有 R(右),L(左),U(上)和 D(下)。如果机器人在完成所有动作后返回原点,则返回 true。否则,返回 false。
>
> 注意:机器人“面朝”的方向无关紧要。 “R” 将始终使机器人向右移动一次,“L” 将始终向左移动等。此外,假设每次移动机器人的移动幅度相同。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/robot-return-to-origin
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
左右移动次数相同,前后移动次数相同。
- 时间On,空间O1
##### 代码
```javascript
/**
* @param {string} moves
* @return {boolean}
*/
var judgeCircle = function(moves) {
let m={
'U':0,
'D':0,
'L':0,
'R':0
};
for(let i=0;i<moves.length;i++){
m[moves[i]]++;
}
return m['U']===m['D']&&m['L']===m['R'];
};
```
#### 674. 最长连续递增序列
> 给定一个未经排序的整数数组,找到最长且连续的的递增序列。
>
> **示例 1:**
>
> ```
> 输入: [1,3,5,4,7]
> 输出: 3
> 解释: 最长连续递增序列是 [1,3,5], 长度为3。
> 尽管 [1,3,5,7] 也是升序的子序列, 但它不是连续的,因为5和7在原数组里被4隔开。
> ```
>
>
> **示例 2:**
>
> ```
> 输入: [2,2,2,2,2]
> 输出: 1
> 解释: 最长连续递增序列是 [2], 长度为1。
> ```
>
> **注意:**数组长度不会超过10000。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/longest-continuous-increasing-subsequence
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
遍历统计即可。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number}
*/
var findLengthOfLCIS = function(nums) {
return nums.length===0?0:nums.reduce((last,cur,i)=>{
if(i===0){
return {
v:cur,
count:1,
max:1
}
}
if(cur>last.v){
last.count++;
}else{
last.count=1;
}
last.v=cur;
last.max=last.max>last.count?last.max:last.count;
return last;
},null).max;
};
```
#### 703. 数据流中的第K大元素
> 设计一个找到数据流中第K大元素的类(class)。注意是排序后的第K大元素,不是第K个不同的元素。
>
> 你的 KthLargest 类需要一个同时接收整数 k 和整数数组nums 的构造器,它包含数据流中的初始元素。每次调用 KthLargest.add,返回当前数据流中第K大的元素。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/kth-largest-element-in-a-stream
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
用`C++`实现前先看一下C++的优先级队列(也就是堆)是怎么用的,第一次用,有点懵。
定义,下面是从[CPP Reference](http://www.cplusplus.com/reference/queue/priority_queue/)里摘出来的。模板需要接受至少一个参数,即队列元素的类型。另外还要选择性的传入存储容器类型和比较函数类。默认的容器是`vector`,比较函数是`less`即大根堆。
```c++
template <class T, class Container = vector<T>,
class Compare = less<typename Container::value_type> > class priority_queue;
```
题目要求找数据流中第K大的数,我们仅需要维护一个小根堆,这个小根堆大小最大为K,小根堆里存的是当前前K大的数。当有新的数`val`加入的时候,如果小根堆的元素数量小于K那么直接加入,否则比较`val`和堆顶的数,如果比堆顶的值要大那就替换掉,否则忽略当前值。
- 插入时间O(logK),空间复杂度O(K)
##### 代码
```c++
class KthLargest {
private:
priority_queue<int,vector<int>,greater<int>> p;
int limit;
public:
KthLargest(int k, vector<int>& nums) {
this->limit=k;
for(int i=0;i<nums.size();i++){
add(nums[i]);
}
}
int add(int val) {
if(p.size()<limit){
p.push(val);
}else if(p.top()<val){
p.pop();
p.push(val);
}
return p.top();
}
};
/**
* Your KthLargest object will be instantiated and called as such:
* KthLargest* obj = new KthLargest(k, nums);
* int param_1 = obj->add(val);
*/
```
#### 709. 转换成小写字母
> 实现函数 ToLowerCase(),该函数接收一个字符串参数 str,并将该字符串中的大写字母转换成小写字母,之后返回新的字符串。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/to-lower-case
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
直接使用内置`String.prototype.toLowerCase`函数。
- 时间On,空间On
##### 代码
```javascript
/**
* @param {string} str
* @return {string}
*/
var toLowerCase = function(str) {
return str.toLowerCase();
};
```
#### 724. 寻找数组的中心索引
> 给定一个整数类型的数组 nums,请编写一个能够返回数组“中心索引”的方法。
>
> 我们是这样定义数组中心索引的:数组中心索引的左侧所有元素相加的和等于右侧所有元素相加的和。
>
> 如果数组不存在中心索引,那么我们应该返回 -1。如果数组有多个中心索引,那么我们应该返回最靠近左边的那一个。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/find-pivot-index
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
先排除数组为空的情况。求和减去第一个元素,表示第一个元素右边的和,左边的和初始化为0,依次右移修改sum值直到两者相等或者中心索引超出数组的最大索引
- 时间复杂度On,空间O1
##### 代码
```c++
class Solution {
public:
int pivotIndex(vector<int>& nums) {
if(nums.size()==0) return -1;
int sum_right=0;
for_each(nums.begin(),nums.end(),[&](int n){
sum_right+=n;
});
sum_right-=nums[0];
int sum_left=0;
int i=0;
while(i+1<nums.size()&&sum_left!=sum_right){
sum_left+=nums[i];
sum_right-=nums[i+1];
i++;
}
return sum_left==sum_right?i:-1;
}
};
```
#### 747. 至少是其他数字两倍的最大数
> 在一个给定的数组nums中,总是存在一个最大元素 。
>
> 查找数组中的最大元素是否至少是数组中每个其他数字的两倍。
>
> 如果是,则返回最大元素的索引,否则返回-1。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/largest-number-at-least-twice-of-others
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思略
首先如果这个最大数存在的话就肯定是整个数组的最大值。那么思路就是找到这个最大值,并且比较其他元素是否满足条件即可。
- 时间复杂度On,空间O1
##### 代码
```c++
class Solution {
public:
int dominantIndex(vector<int>& nums) {
if(nums.size()==0) return -1;
int index=0,max=nums[0];
for(int i=1;i<nums.size();i++){
if(nums[index]<nums[i]){
max=nums[i];
index=i;
}
}
for(int i=0;i<nums.size();i++){
if(index==i) continue;
if(nums[i]*2>max) return -1;
}
return index;
}
};
```
#### 832. 翻转图像
> 给定一个二进制矩阵 A,我们想先水平翻转图像,然后反转图像并返回结果。
>
> 水平翻转图片就是将图片的每一行都进行翻转,即逆序。例如,水平翻转 [1, 1, 0] 的结果是 [0, 1, 1]。
>
> 反转图片的意思是图片中的 0 全部被 1 替换, 1 全部被 0 替换。例如,反转 [0, 1, 1] 的结果是 [1, 0, 0]。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/flipping-an-image
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
按行遍历,每行双指针左右利用异或取反并交换。
- 时间On,空间O1
##### 代码
```javascript
/**
* @param {number[][]} A
* @return {number[][]}
*/
var flipAndInvertImage = function(A) {
let r=A.length,c=A[0].length;
for(let i=0;i<r;i++){
let p=0,q=c-1;
while(p<q){
let tmp=A[i][p]^1;
A[i][p]=A[i][q]^1;
A[i][q]=tmp;
p++;
q--;
}
if(p===q) A[i][p]=1^A[i][p];
}
return A;
};
```
#### 897. 递增顺序查找树
> 给定一个树,按中序遍历重新排列树,使树中最左边的结点现在是树的根,并且每个结点没有左子结点,只有一个右子结点。
>
> 示例 :
>
> 输入:[5,3,6,2,4,null,8,1,null,null,null,7,9]
> 5
> / \
> 3 6
> / \ \
> 2 4 8
> / / \
> 1 7 9
> 输出:[1,null,2,null,3,null,4,null,5,null,6,null,7,null,8,null,9]
> 1
> \
> 2
> \
> 3
> \
> 4
> \
> 5
> \
> 6
> \
> 7
> \
> 8
> \
> 9
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/increasing-order-search-tree
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
中序遍历。遍历当前节点时上一个节点可以修改了。因为其指向的节点已经入栈。
- 时间On,空间On
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {TreeNode}
*/
var increasingBST = function(root) {
let visited=new Set();
let s=[];
if(!root) return root;
s.push(root);
let result=null,prev=null;
let isFirst=true;
while(s.length!==0){
let cur=s.pop();
if(!visited.has(cur)){
visited.add(cur);
if(cur.right) s.push(cur.right);
s.push(cur);
if(cur.left) s.push(cur.left);
}else{
if(isFirst){
result=cur;
isFirst=false;
}else{
prev.right=cur;
prev.left=null;
}
prev=cur;
}
}
prev.right=null;
prev.left=null;
return result;
};
```
#### 917. 仅仅反转字母
> 给定一个字符串 S,返回 “反转后的” 字符串,其中不是字母的字符都保留在原地,而所有字母的位置发生反转。
>
> **示例 1:**
>
> ```
> 输入:"ab-cd"
> 输出:"dc-ba"
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/reverse-only-letters
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
双指针,首尾交换。
- 时间On,空间On
##### 代码
```javascript
/**
* @param {string} S
* @return {string}
*/
var reverseOnlyLetters = function(S) {
function isAlpha(c){
return (c>='a'&&c<='z')||(c>='A'&&c<='Z');
}
let a=S.split('');
let l=0,r=a.length-1;
while(l<r){
while(l<r&&!isAlpha(a[l])) l++;
while(l<r&&!isAlpha(a[r])) r--;
if(l>=r) return a.join('');
let tmp=a[l];
a[l]=a[r];
a[r]=tmp;
l++,r--;
}
return a.join('');
};
```
#### 922. 按奇偶排序数组 II
> 给定一个非负整数数组 A, A 中一半整数是奇数,一半整数是偶数。
>
> 对数组进行排序,以便当 A[i] 为奇数时,i 也是奇数;当 A[i] 为偶数时, i 也是偶数。
>
> 你可以返回任何满足上述条件的数组作为答案。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/sort-array-by-parity-ii
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
双指针。
- 时间复杂度On,空间复杂度O1
##### 代码
```javascript
/**
* @param {number[]} A
* @return {number[]}
*/
var sortArrayByParityII = function(A) {
let slow=0,fast=1;
while(slow<A.length&&fast<A.length){
while(slow<A.length&&A[slow]%2===0) slow+=2;
while(fast<A.length&&A[fast]%2===1) fast+=2;
if(slow>=A.length) break;
if(fast>=A.length) break;
A[slow]=A[slow]+A[fast];
A[fast]=A[slow]-A[fast];
A[slow]=A[slow]-A[fast];
}
return A;
};
```
#### 938. 二叉搜索树的范围和
> 给定二叉搜索树的根结点 root,返回 L 和 R(含)之间的所有结点的值的和。
>
> 二叉搜索树保证具有唯一的值。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/range-sum-of-bst
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
中序遍历即可。
- 时间On,空间On
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @param {number} L
* @param {number} R
* @return {number}
*/
var rangeSumBST = function(root, L, R) {
let visited=new Set();
let s=[];
let result=0;
if(!root) return result;
s.push(root);
while(s.length!==0){
let cur=s.pop();
if(!visited.has(cur)){
visited.add(cur);
if(cur.right) s.push(cur.right);
s.push(cur);
if(cur.left) s.push(cur.left);
}else{
let val=cur.val;
if(val>R) return result;
if(val<L) continue;
result+=val;
}
}
return result;
};
```
#### 961. 重复 N 次的元素
> 在大小为 2N 的数组 A 中有 N+1 个不同的元素,其中有一个元素重复了 N 次。
>
> 返回重复了 N 次的那个元素。
>
> **提示:**
>
> - 4 <= A.length <= 10000
> - 0 <= A[i] < 10000
> - A.length 为偶数
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/n-repeated-element-in-size-2n-array
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用集合。2N个数中N+1个不同的数,其中1个数出现了N次。那么其余所有数都出现了一次。当出现第二次的时候就是这个数。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {number[]} A
* @return {number}
*/
var repeatedNTimes = function(A) {
let s=new Set();
for(let i=0;i<A.length;i++){
if(s.has(A[i])) return A[i];
s.add(A[i]);
}
return -1;
};
```
#### 965. 单值二叉树
> 如果二叉树每个节点都具有相同的值,那么该二叉树就是*单值*二叉树。
>
> 只有给定的树是单值二叉树时,才返回 `true`;否则返回 `false`。
>
> **提示:**
>
> 1. 给定树的节点数范围是 `[1, 100]`。
> 2. 每个节点的值都是整数,范围为 `[0, 99]` 。
##### 思路
遍历或者递归,遍历可以深度遍历和层次遍历。以下代码是递归。
- 时间复杂度On,空间Ologn
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {boolean}
*/
var isUnivalTree = function(root) {
if(!root) return true;
if(root.left&&root.right&&!(root.left.val===root.val&&root.right.val===root.val)) return false;
else if((root.left||root.right)&&((root.left||root.right).val!==root.val)) return false;
else if(!root.left&&!root.right) return true;
return isUnivalTree(root.left)&&isUnivalTree(root.right);
};
```
#### 976. 三角形的最大周长
> 给定由一些正数(代表长度)组成的数组 A,返回由其中三个长度组成的、面积不为零的三角形的最大周长。
>
> 如果不能形成任何面积不为零的三角形,返回 0。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/largest-perimeter-triangle
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
三角形的最大周长一定来自于相对较大的几个数,所以说需要排序。假如说是升序排序则从后向前找只要满足三角形的构造规则即可。JS中比较坑的一点是`Array.prototype.sort`如果不指定比较函数则对其`UTF-16`编码进行排序。
- 时间复杂度Onlogn,空间O1
##### 代码
```javascript
/**
* @param {number[]} A
* @return {number}
*/
var largestPerimeter = function(A) {
A.sort((a,b)=>a-b);
let i=A.length-1;
while(i-2>=0){
if(A[i-2]+A[i-1]<=A[i]){
i--;
continue;
}
return A[i-2]+A[i-1]+A[i];
}
return 0;
};
```
#### 977. 有序数组的平方
> 给定一个按非递减顺序排序的整数数组 A,返回每个数字的平方组成的新数组,要求也按非递减顺序排序。
>
> **示例 1:**
>
> ```
> 输入:[-4,-1,0,3,10]
> 输出:[0,1,9,16,100]
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/squares-of-a-sorted-array
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
映射后排序。
- 时间Onlogn,空间On
更简单的方法是使用双指针,从绝对值最小的位置向两侧遍历。
- 时间On,空间On
##### 代码
- 映射后排序
```javascript
/**
* @param {number[]} A
* @return {number[]}
*/
var sortedSquares = function(A) {
return A.map(x=>x*x).sort((a,b)=>a-b);
};
```
- 双指针
```javascript
/**
* @param {number[]} A
* @return {number[]}
*/
var sortedSquares = function(A) {
let r=0;
while(r<A.length&&A[r]<0) r++;
let l=r-1;
let result=Array(A.length);
let idx=0;
while(l>=0&&r<A.length){
let lv=A[l]**2,rv=A[r]**2;
if(lv<rv){
result[idx++]=lv;
l--;
}
else{
result[idx++]=rv;
r++;
}
}
while(l>=0){
result[idx++]=A[l]**2;
l--;
}
while(r<A.length){
result[idx++]=A[r]**2;
r++;
}
return result;
};
```
#### 994. 腐烂的橘子
> 在给定的网格中,每个单元格可以有以下三个值之一:
>
> 值 0 代表空单元格;
> 值 1 代表新鲜橘子;
> 值 2 代表腐烂的橘子。
> 每分钟,任何与腐烂的橘子(在 4 个正方向上)相邻的新鲜橘子都会腐烂。
>
> 返回直到单元格中没有新鲜橘子为止所必须经过的最小分钟数。如果不可能,返回 -1。
>
>
>
> 示例 1:
>
> 
>
> ```
> 输入:[[2,1,1],[1,1,0],[0,1,1]]
> 输出:4
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/rotting-oranges
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
广度优先搜索。
1. 初始化c1为新鲜橘子数量,rot为腐烂橘子的坐标集合
2. 如果c1为0,则直接返回0
3. 如果rot为空,则返回-1
4. 初始化timer为0,将初始所有的腐烂橘子坐标加入队列
5. 进入循环BFS,直到队列为空或者c1为0
1. 取出队列所有坐标,将坐标四个方向进行查询,将新鲜橘子变为腐烂橘子,并将此橘子坐标加入队列,更新c1的数量
2. timer++
3. c1为0返回timer
6. 返回-1
##### 代码
```js
/**
* @param {number[][]} grid
* @return {number}
*/
var orangesRotting = function(grid) {
let rot=[];
let c1=grid.reduce((s,x)=>s+x.filter(x=>x===1).length,0)
for(let i=0;i<grid.length;i++){
let tmp=grid[i];
for(let j=0;j<tmp.length;j++){
if(tmp[j]===2) rot.push({i,j})
}
}
if(c1===0) return 0;
let q=[...rot];
let timer=0;
let r=grid.length,c=grid[0].length;
while(q.length!==0){
let size=q.length;
for(let i=0;i<size;i++){
let {i:x,j:y}=q.shift();
if(x!==0&&grid[x-1][y]===1) grid[x-1][y]=2,q.push({i:x-1,j:y}),c1--;
if(y!==0&&grid[x][y-1]===1) grid[x][y-1]=2,q.push({i:x,j:y-1}),c1--;
if(x!==r-1&&grid[x+1][y]===1) grid[x+1][y]=2,q.push({i:x+1,j:y}),c1--;
if(y!==c-1&&grid[x][y+1]===1) grid[x][y+1]=2,q.push({i:x,j:y+1}),c1--;
}
timer++;
if(c1===0) return timer;
}
return -1;
};
```
#### 1009. 十进制整数的反码
> 每个非负整数 N 都有其二进制表示。例如, 5 可以被表示为二进制 "101",11 可以用二进制 "1011" 表示,依此类推。注意,除 N = 0 外,任何二进制表示中都不含前导零。
>
> 二进制的反码表示是将每个 1 改为 0 且每个 0 变为 1。例如,二进制数 "101" 的二进制反码为 "010"。
>
> 给定十进制数 N,返回其二进制表示的反码所对应的十进制整数。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/complement-of-base-10-integer
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用toString和map方法转换反码,用parseInt函数转换为数值。
- 时间复杂度O1,空间O1
##### 代码
```javascript
/**
* @param {number} N
* @return {number}
*/
var bitwiseComplement = function(N) {
return parseInt(N.toString(2).split('').map(x=>x==='0'?'1':'0').join(''),2);
};
```
#### 1013. 将数组分成和相等的三个部分
> 给你一个整数数组 A,只有可以将其划分为三个和相等的非空部分时才返回 true,否则返回 false。
>
> 形式上,如果可以找出索引 i+1 < j 且满足 (A[0] + A[1] + ... + A[i] == A[i+1] + A[i+2] + ... + A[j-1] == A[j] + A[j-1] + ... + A[A.length - 1]) 就可以将数组三等分。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/partition-array-into-three-parts-with-equal-sum
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
将数组分成和相等的三个部分,即每一部分和一定是数组总和的1/3。如果这个和不是3的倍数那一定不等分。
考虑有多个分界点,第一部分和第二部分有多个分界点,0-i和0-j都满足部分和,假设i<j那么i+1~j的和一定是0,这并没有关系。
1. 求出总和sum,计算其1/3记为t
2. 计算三次和为t的值cur
##### 代码
```js
/**
* @param {number[]} A
* @return {boolean}
*/
var canThreePartsEqualSum = function(A) {
let sum=A.reduce((s,x)=>s+x);
let t=sum/3;
if(!Number.isInteger(t)) return false;
let cur=0;
let count=0,last=false;
for(let i=0;i<A.length;i++){
cur+=A[i];
if(cur===t){
if(count===2){
last=true;
continue;
};
cur=0;
count++;
}
}
return last&&cur===t;
};
```
#### 1071. 字符串的最大公因子
> 对于字符串 S 和 T,只有在 S = T + ... + T(T 与自身连接 1 次或多次)时,我们才认定 “T 能除尽 S”。
>
> 返回最长字符串 X,要求满足 X 能除尽 str1 且 X 能除尽 str2。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/greatest-common-divisor-of-strings
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
数学题。考虑两个字符串的最大公因子,一定是两个字符串拼凑的公因子,`str1+str2`或者`str2+str1`。
假设存在X是他们的最大公因子,则`str1=n*X,str2=m*X`,`str1+str2=str2+str1=(m+n)*X`。根据逆否命题等价条件,如果`str1+str2!=str2+str1`,那么必定不存在X。
如果满足了条件,那么怎么求X呢?
我们只需要知道X的长度就可以了,设其长度为L,则str1长度为`n*L`,str2长度为`m*L`。一定有m和n互质,则L即为str1长度和str2的最大公因子。
- 时间复杂度Oh(h是较小的数10进制下的位数),空间O1
##### 代码
```js
/**
* @param {string} str1
* @param {string} str2
* @return {string}
*/
var gcdOfStrings = function(str1, str2) {
if(str1+str2!==str2+str1) return ''
const gcd=(a,b)=>b==0?a:gcd(b,a%b);
return str1.substring(0,gcd(str1.length,str2.length))
};
```
#### 1103. 分糖果 II
> 排排坐,分糖果。
>
> 我们买了一些糖果 candies,打算把它们分给排好队的 n = num_people 个小朋友。
>
> 给第一个小朋友 1 颗糖果,第二个小朋友 2 颗,依此类推,直到给最后一个小朋友 n 颗糖果。
>
> 然后,我们再回到队伍的起点,给第一个小朋友 n + 1 颗糖果,第二个小朋友 n + 2 颗,依此类推,直到给最后一个小朋友 2 * n 颗糖果。
>
> 重复上述过程(每次都比上一次多给出一颗糖果,当到达队伍终点后再次从队伍起点开始),直到我们分完所有的糖果。注意,就算我们手中的剩下糖果数不够(不比前一次发出的糖果多),这些糖果也会全部发给当前的小朋友。
>
> 返回一个长度为 num_people、元素之和为 candies 的数组,以表示糖果的最终分发情况(即 ans[i] 表示第 i 个小朋友分到的糖果数)。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/distribute-candies-to-people
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
模拟一个小朋友一个小朋友的发糖果,直到结束。
- 时间复杂度$\max(\sqrt{candies},n)$,空间复杂度O1
##### 代码
```javascript
/**
* @param {number} candies
* @param {number} num_people
* @return {number[]}
*/
var distributeCandies = function(candies, num_people) {
let count=1;
let result=Array(num_people).fill(0);
while(candies>0){
for(let i=0;i<num_people;i++,count++){
if(count>candies){
result[i]+=candies;
candies=0;
}else{
result[i]+=count;
candies-=count;
}
}
}
return result;
};
```
#### 1108. IP 地址无效化
> 给你一个有效的 IPv4 地址 address,返回这个 IP 地址的无效化版本。
>
> 所谓无效化 IP 地址,其实就是用 "[.]" 代替了每个 "."。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/defanging-an-ip-address
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用JavaScript的replace函数。第一个参数是正则对象或者字符串(仅仅匹配第一次出现),第二个参数是用于替换的字符串或者匹配转换函数。
```javascript
var newStr = str.replace(regexp|substr, newSubstr|function)
```
##### 代码
```javascript
/**
* @param {string} address
* @return {string}
*/
var defangIPaddr = function(address) {
return address.replace(/\./g,'[.]');
};
```
#### 1137. 第 N 个泰波那契数
> 泰波那契序列 Tn 定义如下:
>
> T0 = 0, T1 = 1, T2 = 1, 且在 n >= 0 的条件下 Tn+3 = Tn + Tn+1 + Tn+2
>
> 给你整数 n,请返回第 n 个泰波那契数 Tn 的值。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/n-th-tribonacci-number
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
递归加记忆化。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {number} n
* @return {number}
*/
var tribonacci = function(n) {
let computed=new Map();
function helper(n){
if(computed.has(n)){
return computed.get(n);
}
let result;
if(n<2) result=n;
else if(n===2) result=1;
else result=helper(n-1)+helper(n-2)+helper(n-3);
computed.set(n,result);
return result;
}
return helper(n);
};
```
#### 1189. “气球” 的最大数量
> 给你一个字符串 text,你需要使用 text 中的字母来拼凑尽可能多的单词 "balloon"(气球)。
>
> 字符串 text 中的每个字母最多只能被使用一次。请你返回最多可以拼凑出多少个单词 "balloon"。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/maximum-number-of-balloons
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
统计单词中包含字母的个数,找到最小满足单词的字母数量就是结果。
- 时间On,空间On
##### 代码
```javascript
/**
* @param {string} text
* @return {number}
*/
var maxNumberOfBalloons = function(text) {
return Math.min(...Array.from(text.split('').reduce((m,x,i,a)=>{
if(x==='l'||x==='o'){
m.set(x,m.get(x)+0.5)
}else if(/[ban]/.test(x)){
m.set(x,m.get(x)+1)
}
return m;
},new Map([['b',0],['a',0],['l',0],['o',0],['n',0]])).values()))|0;
};
```
#### 1200. 最小绝对差
> 给你个整数数组 arr,其中每个元素都 不相同。
>
> 请你找到所有具有最小绝对差的元素对,并且按升序的顺序返回。
>
> **示例 1:**
>
> ```
> 输入:arr = [4,2,1,3]
> 输出:[[1,2],[2,3],[3,4]]
> ```
>
> **示例 2:**
>
> ```
> 输入:arr = [1,3,6,10,15]
> 输出:[[1,3]]
> ```
>
> **示例 3:**
>
> ```
> 输入:arr = [3,8,-10,23,19,-4,-14,27]
> 输出:[[-14,-10],[19,23],[23,27]]
> ```
>
> **提示:**
>
> - 2 <= arr.length <= 10^5
> - -10^6 <= arr[i] <= 10^6
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/minimum-absolute-difference
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
先排序后遍历。每次比较差值大小,最小的记录下来。
- 时间复杂度Onlogn,空间On
##### 代码
```javascript
/**
* @param {number[]} arr
* @return {number[][]}
*/
var minimumAbsDifference = function(arr) {
return arr.sort((a,b)=>a-b).reduce((result,x,i,arr)=>{
if(i===0){
result.val=Number.MAX_VALUE;
return result;
}
let cur=arr[i]-arr[i-1];
if(cur===result.val){
result.push([arr[i-1],arr[i]]);
}
else if(cur<result.val){
result=[[arr[i-1],arr[i]]];
result.val=cur;
}
return result
},[])
};
```
#### 1207. 独一无二的出现次数
> 给你一个整数数组 arr,请你帮忙统计数组中每个数的出现次数。
>
> 如果每个数的出现次数都是独一无二的,就返回 true;否则返回 false。
>
> **提示:**
>
> - 1 <= arr.length <= 1000
> - -1000 <= arr[i] <= 1000
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/unique-number-of-occurrences
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
用map计数,用set检测重复。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {number[]} arr
* @return {boolean}
*/
var uniqueOccurrences = function(arr) {
let m=new Map();
for(let i=0;i<arr.length;i++){
if(m.has(arr[i])){
m.set(arr[i],m.get(arr[i])+1);
continue;
}
m.set(arr[i],1);
}
let s=new Set();
for(let value of m.values()){
if(s.has(value)) return false;
s.add(value);
}
return true;
};
```
#### 1232. 缀点成线
> 在一个 XY 坐标系中有一些点,我们用数组 coordinates 来分别记录它们的坐标,其中 coordinates[i] = [x, y] 表示横坐标为 x、纵坐标为 y 的点。
>
> 请你来判断,这些点是否在该坐标系中属于同一条直线上,是则返回 true,否则请返回 false。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/check-if-it-is-a-straight-line
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
遍历求本次斜率和上次斜率,不同为false。所有相同为true
- 时间On,空间O1
##### 代码
```javascript
/**
* @param {number[][]} coordinates
* @return {boolean}
*/
var checkStraightLine = function(coordinates) {
if(coordinates.length<=2) return true;
let k=(coordinates[1][1]-coordinates[0][1])/(coordinates[1][0]-coordinates[0][0]);
for(let i=2;i<coordinates.length;i++){
if((coordinates[i][1]-coordinates[i-1][1])/(coordinates[i][0]-coordinates[i-1][0])!==k) return false;
}
return true;
};
```
#### 1249. 移除无效的括号
> 给你一个由 '('、')' 和小写字母组成的字符串 s。
>
> 你需要从字符串中删除最少数目的 '(' 或者 ')' (可以删除任意位置的括号),使得剩下的「括号字符串」有效。
>
> 请返回任意一个合法字符串。
>
> 有效「括号字符串」应当符合以下 **任意一条** 要求:
>
> - 空字符串或只包含小写字母的字符串
> - 可以被写作 AB(A 连接 B)的字符串,其中 A 和 B 都是有效「括号字符串」
> - 可以被写作 (A) 的字符串,其中 A 是一个有效的「括号字符串」
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/minimum-remove-to-make-valid-parentheses
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
和检测括号有效性相同,需要记录每个括号的位置。最后在残留在栈内的就是无效的括号,去除即可。
- 时间复杂度On,空间复杂度On
##### 代码
```c++
struct Node{
char c;
int i;
};
class Solution {
public:
string minRemoveToMakeValid(string str) {
stack<Node> s;
for(int i=0;i<str.size();i++){
if(str[i]!='('&&str[i]!=')') continue;
if(str[i]=='('){
s.push({str[i],i});
continue;
}
if(s.empty()){
s.push({str[i],i});
continue;
}
if(s.top().c=='('){
s.pop();
continue;
}
s.push({str[i],i});
}
while(!s.empty()){
str.erase(s.top().i,1);
s.pop();
}
return str;
}
};
```
#### 1281. 整数的各位积和之差
> 给你一个整数 `n`,请你帮忙计算并返回该整数「各位数字之积」与「各位数字之和」的差。
##### 思路
使用toString、split、map、ToNumber转换,遍历计算。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {number} n
* @return {number}
*/
var subtractProductAndSum = function(n) {
return n.toString().split('').map(x=>+x).reduce(({sum,prod},x,i,arr)=>{
if(i===0&&arr.length>1){
return {
sum:x,
prod:x
}
}
if(i!==arr.length-1){
return {
sum:sum+x,
prod:prod*x
}
}
return i===0?0:prod*x-sum-x;
},{sum:0,prod:1})
};
```
#### 1287. 有序数组中出现次数超过25%的元素
> 给你一个非递减的 有序 整数数组,已知这个数组中恰好有一个整数,它的出现次数超过数组元素总数的 25%。
>
> 请你找到并返回这个整数
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/element-appearing-more-than-25-in-sorted-array
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
数组有序,单调递增。如果存在数组下标差值为1/4长度的元素相等,那么这个元素就是目标值。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {number[]} arr
* @return {number}
*/
var findSpecialInteger = function(arr) {
let start=parseInt(arr.length/4);
for(let i=start;i<arr.length;i++){
if(arr[i]===arr[i-start]||arr[i]===arr[i+start]) return arr[i];
}
return 0;
};
```
#### 1290. 二进制链表转整数
> 给你一个单链表的引用结点 head。链表中每个结点的值不是 0 就是 1。已知此链表是一个整数数字的二进制表示形式。
>
> 请你返回该链表所表示数字的 十进制值 。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/convert-binary-number-in-a-linked-list-to-integer
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
迭代,每次乘2加当前值作为当前结果值。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} head
* @return {number}
*/
var getDecimalValue = function(head) {
let result=0;
let cur=head;
while(cur){
result=result*2+cur.val;
cur=cur.next;
}
return result;
};
```
#### 1299. 将每个元素替换为右侧最大元素
> 给你一个数组 arr ,请你将每个元素用它右边最大的元素替换,如果是最后一个元素,用 -1 替换。
>
> 完成所有替换操作后,请你返回这个数组。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/replace-elements-with-greatest-element-on-right-side
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
从右向左替换。
- 时间On,空间O1
##### 代码
```javascript
/**
* @param {number[]} arr
* @return {number[]}
*/
var replaceElements = function(arr) {
let max=arr[arr.length-1];
let last=arr[arr.length-1];
arr[arr.length-1]=-1;
for(let i=arr.length-2;i>=0;i--){
last=arr[i];
arr[i]=max;
max=max>last?max:last;
}
return arr;
};
```
#### 1304. 和为零的N个唯一整数
> 给你一个整数 n,请你返回 任意 一个由 n 个 各不相同 的整数组成的数组,并且这 n 个数相加和为 0 。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/find-n-unique-integers-sum-up-to-zero
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
前n-1使用下标,最后一个使用前面下标和的相反数。
- 时间On,空间On
##### 代码
```javascript
/**
* @param {number} n
* @return {number[]}
*/
var sumZero = function(n) {
return Array(n).fill(0).map((x,i,arr)=>i<n-1?i:-(n-2)*(1+n-2)/2);
};
```
#### 1309. 解码字母到整数映射
> 给你一个字符串 s,它由数字('0' - '9')和 '#' 组成。我们希望按下述规则将 s 映射为一些小写英文字符:
>
> 字符('a' - 'i')分别用('1' - '9')表示。
> 字符('j' - 'z')分别用('10#' - '26#')表示。
> 返回映射之后形成的新字符串。
>
> 题目数据保证映射始终唯一。
>
> **示例 1**:
>
> ```
> 输入:s = "10#11#12"
> 输出:"jkab"
> 解释:"j" -> "10#" , "k" -> "11#" , "a" -> "1" , "b" -> "2".
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/decrypt-string-from-alphabet-to-integer-mapping
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
正则表达式匹配。贪婪模式,优先匹配#类别,再匹配其他。

- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {string} s
* @return {string}
*/
var freqAlphabets = function(s) {
return s.match(/((1[0-9])|(2[0-6]))#|[1-9]/g).map(x=>{
return x.includes('#')?String.fromCharCode(+(x.split('#')[0])+'j'.charCodeAt(0)-10):String.fromCharCode(+x+'a'.charCodeAt(0)-1);
}).join('');
};
```
#### 5143. 解压缩编码列表
> 给你一个以行程长度编码压缩的整数列表 nums 。
>
> 考虑每相邻两个元素 [a, b] = [nums[2*i], nums[2*i+1]] (其中 i >= 0 ),每一对都表示解压后有 a 个值为 b 的元素。
>
> 请你返回解压后的列表。
>
> **示例:**
>
> ```
> 输入:nums = [1,2,3,4]
> 输出:[2,4,4,4]
> ```
>
> **提示:**
>
> - 2 <= nums.length <= 100
> - nums.length % 2 == 0
> - 1 <= nums[i] <= 100
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/decompress-run-length-encoded-list
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
每两个进行遍历,将结果加入result数组
- 时间On,空间O1
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number[]}
*/
var decompressRLElist = function(nums) {
let result=[];
for(let i=0;i+1<nums.length;i+=2){
result.push.apply(result,Array(nums[i]).fill(nums[i+1]));
}
return result;
};
```
#### 5291. 统计位数为偶数的数字
> 给你一个整数数组 nums,请你返回其中位数为 偶数 的数字的个数。
>
>
>
>
> 提示:
>
> - 1 <= nums.length <= 500
> - 1 <= nums[i] <= 10^5
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/find-numbers-with-even-number-of-digits
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
遍历查找即可。使用Functional Programming,reduce函数。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number}
*/
var findNumbers = function(nums) {
return nums.reduce((count,x)=>{
let bit=0;
while(x!==0){
x=parseInt(x/10);
bit++;
}
return count+(bit%2===0);
},0)
};
```
### 中等
#### 2. 两数相加
> 给出两个 非空 的链表用来表示两个非负的整数。其中,它们各自的位数是按照 逆序 的方式存储的,并且它们的每个节点只能存储 一位 数字。
>
> 如果,我们将这两个数相加起来,则会返回一个新的链表来表示它们的和。
>
> 您可以假设除了数字 0 之外,这两个数都不会以 0 开头。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/add-two-numbers
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
目的很明确,加的过程中注意进位,还要注意999的情况,可能还要添加最后新节点。
- 时间O(max{M,N}),空间O(max{M,N})
```javascript
/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} l1
* @param {ListNode} l2
* @return {ListNode}
*/
var addTwoNumbers = function(l1, l2) {
if(!l1||!l2) return l1||l2;
let cur1=l1,cur2=l2;
let carry=0;
let pre1=null,pre2=null;
while(cur1&&cur2){
let tmp=cur1.val+cur2.val+carry;
if(tmp>9){
carry=1;
cur1.val=tmp-10;
}else{
carry=0;
cur1.val=tmp;
}
pre1=cur1;
pre2=cur2;
cur1=cur1.next;
cur2=cur2.next;
}
if(cur1){
while(cur1){
let tmp=cur1.val+carry;
if(tmp>9){
carry=1;
cur1.val=tmp-10;
}else{
carry=0;
cur1.val=tmp;
}
pre1=cur1;
cur1=cur1.next;
}
if(carry) pre1.next=new ListNode(carry);
return l1;
};
if(!cur2){
if(carry) pre1.next=new ListNode(carry);
return l1;
}
pre1.next=cur2;
while(cur2){
let tmp=cur2.val+carry;
if(tmp>9){
carry=1;
cur2.val=tmp-10;
}else{
carry=0;
cur2.val=tmp;
}
pre2=cur2;
cur2=cur2.next;
}
if(carry) pre2.next=new ListNode(carry);
return l1;
};
```
#### 11. 盛最多水的容器
> 给定 n 个非负整数 a1,a2,...,an,每个数代表坐标中的一个点 (i, ai) 。在坐标内画 n 条垂直线,垂直线 i 的两个端点分别为 (i, ai) 和 (i, 0)。找出其中的两条线,使得它们与 x 轴共同构成的容器可以容纳最多的水。
>
> 说明:你不能倾斜容器,且 n 的值至少为 2。
>
> 
>
> *图中垂直线代表输入数组 [1,8,6,2,5,4,8,3,7]。在此情况下,容器能够容纳水(表示为蓝色部分)的最大值为 49。*
>
> **示例:**
>
> ```
> 输入: [1,8,6,2,5,4,8,3,7]
> 输出: 49
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/container-with-most-water
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
双指针,分别从头和尾部进行扫描,当前双指针状态计算面积,更新max。然后双指针中指向高度较低的向较高方向移动。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {number[]} height
* @return {number}
*/
var maxArea = function(height) {
let l=0,r=height.length-1;
let max=0;
while(l<r){
let cur;
if(height[l]<height[r]){
cur=height[l]*(r-l);
l++;
}else{
cur=height[r]*(r-l);
r--;
}
max=max>cur?max:cur;
}
return max;
};
```
#### 24. 两两交换链表中的节点
> 给定一个链表,两两交换其中相邻的节点,并返回交换后的链表。
>
> 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。
>
>
>
> **示例:**
>
> ```
> 给定 1->2->3->4, 你应该返回 2->1->4->3.
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/swap-nodes-in-pairs
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
递归的交换,并返回交换后第一个节点的值,用于递归完成后上一层递归的第二个节点的指向修改。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} head
* @return {ListNode}
*/
var swapPairs = function(head) {
if(head===null||head.next===null) return head;
let t=head.next;
head.next=t.next;
t.next=head;
let r=swapPairs(t.next.next);
head.next=r;
return t;
};
```
#### 46. 全排列
> 给定一个**没有重复**数字的序列,返回其所有可能的全排列。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/permutations
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用递归回溯,每次保留当前状态,数组需要拷贝。**回溯=深度优先+状态重置+剪枝**。
- 时间复杂度O(n!),空间O(n!)
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number[][]}
*/
var permute = function(nums) {
function swap(nums,i,j){
let t=nums[i];
nums[i]=nums[j];
nums[j]=t;
}
function construct(n,nums,output,first){
if(first===n) output.push([...nums]);
for(let i=first;i<n;i++){
swap(nums,first,i);
construct(n,nums,output,first+1);
swap(nums,first,i);
}
}
let result=[];
let n=nums.length;
construct(n,nums,result,0);
return result;
};
```
#### 48. 旋转图像
> 给定一个 n × n 的二维矩阵表示一个图像。
>
> 将图像顺时针旋转 90 度。
>
> 说明:
>
> 你必须在原地旋转图像,这意味着你需要直接修改输入的二维矩阵。请不要使用另一个矩阵来旋转图像。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/rotate-image
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
先沿着主对角线做对称,后沿着垂直中线做对称。
- 时间复杂度On,空间O1
##### 代码
```javascript
/**
* @param {number[][]} matrix
* @return {void} Do not return anything, modify matrix in-place instead.
*/
var rotate = function(matrix) {
//mirror with main axis
let n=matrix.length;
for(let i=0;i<n;i++){
for(let j=0;j<i;j++){
let tmp=matrix[i][j];
matrix[i][j]=matrix[j][i];
matrix[j][i]=tmp;
}
}
//mirror with vertical line
let mid=parseInt(n/2);
for(let i=0;i<n;i++){
for(let j=0;j<mid;j++){
let tmp=matrix[i][j];
matrix[i][j]=matrix[i][n-j-1];
matrix[i][n-j-1]=tmp;
}
}
return matrix;
};
```
#### 49. 字母异位词分组
> 给定一个字符串数组,将字母异位词组合在一起。字母异位词指字母相同,但排列不同的字符串。
>
> **示例:**
>
> 输入: ["eat", "tea", "tan", "ate", "nat", "bat"],
> 输出:
> [
> ["ate","eat","tea"],
> ["nat","tan"],
> ["bat"]
> ]
> **说明:**
>
> 所有输入均为小写字母。
> 不考虑答案输出的顺序。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/group-anagrams
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
首先想到哈希。因为这是一种映射,将字母异位词映射到同一个组,哈希函数就是字符串排序结果。
- 时间复杂度On,空间复杂度On(也可以认为空间是结果的一部分可以看做和不同字母异位词的个数相关)
##### 代码
```c++
class Solution {
public:
vector<vector<string>> groupAnagrams(vector<string>& strs) {
vector<vector<string>> result;
unordered_map<string,vector<string>> map;
for(int i=0;i<strs.size();i++){
string tmp=strs[i];
sort(tmp.begin(),tmp.end());
map[tmp].push_back(strs[i]);
}
for(auto it=map.begin();it!=map.end();it++){
result.push_back(it->second);
}
return result;
}
};
```
#### 54. 螺旋矩阵
> 给定一个包含 *m* x *n* 个元素的矩阵(*m* 行, *n* 列),请按照顺时针螺旋顺序,返回矩阵中的所有元素。
##### 思路
设置边界值,每遍历一行或者一列,缩小边界,如果边界重合则说明无剩余元素,退出。
- 时间复杂度On,空间O1
##### 代码
```c++
class Solution {
public:
vector<int> spiralOrder(vector<vector<int>>& matrix) {
vector<int> result;
if(matrix.size()==0) return result;
int up=0,down=matrix.size()-1,left=0,right=matrix[0].size()-1;
while(true){
for(int i=left;i<=right;i++) result.push_back(matrix[up][i]);
if(++up>down) break;
for(int i=up;i<=down;i++) result.push_back(matrix[i][right]);
if(--right<left) break;
for(int i=right;i>=left;i--) result.push_back(matrix[down][i]);
if(--down<up) break;
for(int i=down;i>=up;i--) result.push_back(matrix[i][left]);
if(++left>right) break;
}
return result;
}
};
```
#### 62. 不同路径
> 一个机器人位于一个 m x n 网格的左上角 (起始点在下图中标记为“Start” )。
>
> 机器人每次只能向下或者向右移动一步。机器人试图达到网格的右下角(在下图中标记为“Finish”)。
>
> 问总共有多少条不同的路径?
>
> 
>
> 例如,上图是一个7 x 3 的网格。有多少可能的路径?
>
> 说明:m 和 n 的值均不超过 100。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/unique-paths
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
动态规划,除了第一行和第一列,设置dp数组,从`(0,0)`到`(i,j)`共有`dp[i][j]=dp[i-1][j]+dp[i][j-1]`种情况。
- 时间复杂度O(mn),空间O(mn)
##### 代码
```javascript
/**
* @param {number} m
* @param {number} n
* @return {number}
*/
var uniquePaths = function(m, n) {
let dp=Array(m).fill(Array(n).fill(0));
for(let i=0;i<m;i++){
for(let j=0;j<n;j++){
if(i===0||j===0){
dp[i][j]=1;
}else{
dp[i][j]=dp[i-1][j]+dp[i][j-1];
}
}
}
return dp[m-1][n-1];
};
```
#### 63. 不同路径 II
> 一个机器人位于一个 m x n 网格的左上角 (起始点在下图中标记为“Start” )。
>
> 机器人每次只能向下或者向右移动一步。机器人试图达到网格的右下角(在下图中标记为“Finish”)。
>
> 现在考虑网格中有障碍物。那么从左上角到右下角将会有多少条不同的路径?
>
> 
>
> 网格中的障碍物和空位置分别用 1 和 0 来表示。
>
> 说明:m 和 n 的值均不超过 100。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/unique-paths-ii
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
动态规划。如果当前位置是障碍物则`dp[i][j]=0`,否则考虑:第一行`dp[i][j]=dp[i][j-1]`,第一列`dp[i][j]=dp[i-1][j]`,其他`dp[i][j]=dp[i-1][j]+dp[i][j-1]。`
- 时间Omn,空间Omn
##### 代码
```javascript
/**
* @param {number[][]} obstacleGrid
* @return {number}
*/
var uniquePathsWithObstacles = function(obstacleGrid) {
let m=obstacleGrid.length,n=obstacleGrid[0].length;
let dp=Array(m).fill(Array(n).fill(0));
for(let i=0;i<m;i++){
for(let j=0;j<n;j++){
if(i===0&&j===0){
dp[i][j]=obstacleGrid[i][j]===0?1:0;
continue;
}
if(i===0||j===0){
if(obstacleGrid[i][j]!==1){
if(i===0){
dp[i][j]=dp[i][j-1];
}else{
dp[i][j]=dp[i-1][j];
}
}else{
dp[i][j]=0;
}
}else{
if(obstacleGrid[i][j]!==1){
dp[i][j]=dp[i-1][j]+dp[i][j-1];
}else{
dp[i][j]=0;
}
}
}
}
return dp[m-1][n-1];
};
```
#### 78. 子集
> 给定一组**不含重复元素**的整数数组 nums,返回该数组所有可能的子集(幂集)。
>
> 说明:解集不能包含重复的子集。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/subsets
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
暴力回溯。利用每次选取当前元素后下次只能选取后面的元素以防重复。
- 时间复杂度O(n!),空间On
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number[][]}
*/
var subsets = function(nums) {
if(nums.length===0) return [];
let result=[];
let len=nums.length;
function construct(idx,tmp){
result.push(tmp);
for(let i=idx;i<len;i++){
construct(i+1,tmp.concat([nums[i]]));
}
}
construct(0,[]);
return result;
};
```
#### 94. 二叉树的中序遍历
> 给定一个二叉树,返回它的*中序* 遍历。
##### 思路
二叉树的前中后序遍历都是深度优先搜索,前序是第一次访问时便记录,中序是第二次访问,后序是第三次访问。我们可以使用自定义栈来模拟函数栈。
- 时间复杂度On,空间复杂度On
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<int> inorderTraversal(TreeNode* root) {
set<TreeNode*> visited;
stack<TreeNode*> s;
vector<int> result;
if(!root) return result;
s.push(root);
while(!s.empty()){
TreeNode* cur=s.top();
s.pop();
if(!cur) continue;
if(visited.find(cur)==visited.end()){
visited.insert(cur);
s.push(cur->right);
s.push(cur);
s.push(cur->left);
}
else{
result.push_back(cur->val);
}
}
return result;
}
};
```
#### 98. 验证二叉搜索树
> 给定一个二叉树,判断其是否是一个有效的二叉搜索树。
>
> 假设一个二叉搜索树具有**如下特征**:
>
> - 节点的左子树只包含小于当前节点的数。
>
> - 节点的右子树只包含大于当前节点的数。
> - 所有左子树和右子树自身必须也是二叉搜索树。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/validate-binary-search-tree
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用中序遍历有序,如果中序遍历存在当前元素比上一个元素小(假设升序)则不是二叉搜索树,否则遍历完成则是二叉搜索树。
- 时间复杂度On,空间复杂度On
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
bool isValidBST(TreeNode* root) {
stack<TreeNode*> s;
set<TreeNode*> visited;
s.push(root);
int last,first=1;
while(!s.empty()){
TreeNode* cur=s.top();
s.pop();
if(!cur) continue;
if(!visited.count(cur)){
visited.insert(cur);
s.push(cur->right);
s.push(cur);
s.push(cur->left);
}else{
if(first==1){
first=0;
last=cur->val;
continue;
}
if(last>=cur->val) return false;
last=cur->val;
}
}
return true;
}
};
```
#### 102. 二叉树的层次遍历
> 给定一个二叉树,返回其按层次遍历的节点值。 (即逐层地,从左到右访问所有节点)。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/binary-tree-level-order-traversal
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
树的层次遍历就是一种广度优先搜索,使用队列来完成操作。
- 时间复杂度On,空间复杂度On
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<vector<int>> levelOrder(TreeNode* root) {
vector<vector<int>> result;
queue<TreeNode*> q;
if(!root) return result;
q.push(root);
while(!q.empty()){
int size=q.size();
vector<int> curlevel;
for(int i=0;i<size;i++){
TreeNode* cur=q.front();
curlevel.push_back(cur->val);
if(cur->left)
q.push(cur->left);
if(cur->right)
q.push(cur->right);
q.pop();
}
result.push_back(curlevel);
}
return result;
}
};
```
#### 114. 二叉树展开为链表
> 给定一个二叉树,[原地](https://baike.baidu.com/item/%E5%8E%9F%E5%9C%B0%E7%AE%97%E6%B3%95/8010757)将它展开为链表。
##### 思路
遍历的同时构造,所以需要记录上一个节点,但是根元素必须晚于孩子元素的遍历,也就是说自底向上。所以说遍历只能“左右根”或者“右左根”。根据题意,通过right连接链表节点,而且顺序有规定,所以需要通过“右左根”顺序遍历。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {void} Do not return anything, modify root in-place instead.
*/
var flatten = function(root) {
if(root===null) return;
let st=new Set();
let s=[];
s.push(root);
let pre;
let first=true;
while(s.length!==0){
let cur=s.pop();
if(!st.has(cur)){
st.add(cur);
s.push(cur);
if(cur.left!==null) s.push(cur.left);
if(cur.right!==null) s.push(cur.right);
}else{
if(first){
pre=cur;
first=false;
continue;
}
cur.left=null;
cur.right=pre;
pre=cur;
}
}
};
```
#### 133. 克隆图
> 给定无向[**连通**](https://baike.baidu.com/item/%E8%BF%9E%E9%80%9A%E5%9B%BE/6460995?fr=aladdin)图中一个节点的引用,返回该图的[**深拷贝**](https://baike.baidu.com/item/%E6%B7%B1%E6%8B%B7%E8%B4%9D/22785317?fr=aladdin)(克隆)。图中的每个节点都包含它的值 `val`(`Int`) 和其邻居的列表(`list[Node]`)。
>
> **提示:**
>
> 1. 节点数介于 1 到 100 之间。
>
> 2. 无向图是一个简单图,这意味着图中没有重复的边,也没有自环。
> 3. 由于图是无向的,如果节点 p 是节点 q 的邻居,那么节点 q 也必须是节点 p 的邻居。
> 4. 必须将给定节点的拷贝作为对克隆图的引用返回。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/clone-graph
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
深度优先搜索迭代实现。
1. 定义遍历原图的栈`olds`和集合`oldst`
2. 定义已拷贝节点和原图节点的映射`created<old,new>`
3. 对原图进行遍历:
1. 访问当前节点`oldCur`
2. 如果没有创建拷贝节点`cur`则创建拷贝节点,否则通过`created`集合获取,定义拷贝节点邻居集合`neighbors`
3. 访问`oldCur`的所有邻居`oldCur->neighbors`
1. 如果当前邻居`oldCur->neighbors[i]`已经被创建过,则通过`created[oldCur->neighbors[i]]`获取加入到`neighbors`
2. 如果当前邻居`oldCur->neighbors[i]`未被创建过,那么创建并加入到`created`映射中和`neighbors`中
4. 令当前拷贝节点`cur->neighbors=neighbors`
4. 返回原图起始节点的拷贝
- 时间复杂度On(**好像是吧。。考虑邻居怎么说?**),空间On
##### 代码
```c++
/*
// Definition for a Node.
class Node {
public:
int val;
vector<Node*> neighbors;
Node() {}
Node(int _val, vector<Node*> _neighbors) {
val = _val;
neighbors = _neighbors;
}
};
*/
class Solution {
public:
Node* cloneGraph(Node* node) {
if(!node) return NULL;
set<Node*> oldst;
stack<Node*> olds;
map<Node*,Node*> created;
olds.push(node);
Node * root=NULL;
while(!olds.empty()){
Node *oldCur=olds.top();
olds.pop();
if(!oldst.count(oldCur)){
oldst.insert(oldCur);
for(int i=0;i<oldCur->neighbors.size();i++){
if(oldCur->neighbors[i]) olds.push(oldCur->neighbors[i]);
}
olds.push(oldCur);
}else{//右左根,访问当前节点
vector<Node*> neighbors;
for(int i=0;i<oldCur->neighbors.size();i++){
if(!created.count(oldCur->neighbors[i])){
Node * neighbor = new Node(oldCur->neighbors[i]->val,{});
neighbors.push_back(neighbor);
created[oldCur->neighbors[i]]=neighbor;
}else{
neighbors.push_back(created[oldCur->neighbors[i]]);
}
}
Node * cur;
if(!created.count(oldCur)){
cur=new Node(oldCur->val,neighbors);
}else{
cur=created[oldCur];
cur->neighbors=neighbors;
}
if(oldCur==node){
root=cur;
}
}
}
return root;
}
};
```
#### 148. 排序链表
> 在 *O*(*n* log *n*) 时间复杂度和常数级空间复杂度下,对链表进行排序。
##### 思路
使用归并排序。因为是链表所以可以原地归并,不需要使用额外空间(非常数空间)。归并使用迭代方式,因此需要自底向上直接分cut然后merge。
- 时间复杂度Onlogn,空间O1
##### 代码
```javascript
/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} head
* @return {ListNode}
*/
var sortList = function(head) {
function mergeSort(head){
let dummyHead=new ListNode(0);
dummyHead.next=head;
let p=head;
let length=0;
while(p){
p=p.next;
length++;
}
for(let size=1;size<length;size*=2){
let cur=dummyHead.next;
let tail=dummyHead;
while(cur!==null){
let left=cur;
let right=cut(left,size);
cur=cut(right,size);
tail.next=merge(left,right);
while(tail.next!==null) tail=tail.next;
}
}
return dummyHead.next;
}
function cut(node,len){
for(let i=1;i<len;i++){
if(node===null) return null;
node=node.next;
}
if(node===null) return null;
let tmp=node.next;
node.next=null;
return tmp;
}
function merge(l1,l2){
let dummyHead=new ListNode(0);
let p=dummyHead;
while(l1!==null&&l2!==null){
if(l1.val<l2.val){
p.next=l1;
p=l1;
l1=l1.next;
}else{
p.next=l2;
p=l2;
l2=l2.next;
}
}
p.next=l1===null?l2:l1;
return dummyHead.next;
}
return mergeSort(head);
};
```
#### 151. 翻转字符串里的单词
> 给定一个字符串,逐个翻转字符串中的每个单词。
>
> 说明:
>
> 无空格字符构成一个单词。
> 输入字符串可以在前面或者后面包含多余的空格,但是反转后的字符不能包括。
> 如果两个单词间有多余的空格,将反转后单词间的空格减少到只含一个。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/reverse-words-in-a-string
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
整体思路是首先翻转所有字符,然后再按单词翻转一次,得到答案。注意去除空格。
- 时间复杂度On(不考虑string erase造成的浪费),空间复杂度O1
##### 代码
```c++
class Solution {
public:
string reverseWords(string s) {
//reverse all chars
reverse(s.begin(),s.end());
//remove space at start&end
while(s[0]==' ') s.erase(0,1);
while(s[s.size()-1]==' ') s.erase(s.size()-1,1);
//reverse all word in turn
int i=0,j=0;
while(i<s.size()){
while(j<s.size()&&s[j]!=' ') j++;
//remove extra space
if(s[j]==' '){
int k=j+1;
while(k<s.size()&&s[k]==' ') s.erase(k,1);
}
int r=j-1;
//cout<<i<<r<<endl;
while(i<r){
char tmp=s[i];
s[i]=s[r];
s[r]=tmp;
i++;
r--;
}
i=j+1;
j=i;
}
return s;
}
};
```
#### 230. 二叉搜索树中第K小的元素
> 给定一个二叉搜索树,编写一个函数 `kthSmallest` 来查找其中第 **k** 个最小的元素。
##### 思路
BST中序遍历有序,加个计数器即可。
- 时间复杂度On,空间On
进阶:二叉搜索树可以动态插入,即有数据流流入,可以使用大根堆,始终保证大根堆里的K个元素始终是最小的K个元素即可,堆顶就是第K小的。
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @param {number} k
* @return {number}
*/
var kthSmallest = function(root, k) {
let st=new Set();
let s=[];
if(root===null) return -1;
s.push(root);
let count=0;
while(s.length!==0){
let cur=s.pop();
if(!st.has(cur)){
st.add(cur);
if(cur.right!==null) s.push(cur.right);
s.push(cur);
if(cur.left!==null) s.push(cur.left);
}else{
count++;
if(count===k) return cur.val;
}
}
return -1;
};
```
#### 240. 搜索二维矩阵 II
> 编写一个高效的算法来搜索 m x n 矩阵 matrix 中的一个目标值 target。该矩阵具有以下特性:
>
> 每行的元素从左到右升序排列。
> 每列的元素从上到下升序排列。
> **示例**:
>
> 现有矩阵 matrix 如下:
>
> ```
> [
> [1, 4, 7, 11, 15],
> [2, 5, 8, 12, 19],
> [3, 6, 9, 16, 22],
> [10, 13, 14, 17, 24],
> [18, 21, 23, 26, 30]
> ]
> ```
>
>
> 给定 target = 5,返回 true。
>
> 给定 target = 20,返回 false。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/search-a-2d-matrix-ii
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
从右上向左下搜,如果当前值比目标值大那一定在下面的行中,如果比目标值小那么一定在左边的列中。
- 时间复杂度O(m+n),空间O1
##### 代码
```javascript
/**
* @param {number[][]} matrix
* @param {number} target
* @return {boolean}
*/
var searchMatrix = function(matrix, target) {
if(!matrix||matrix.length===0) return false;
let i=0,j=matrix[0].length-1;
while(i<matrix.length&&j>=0&&matrix[i][j]!=target){
if(matrix[i][j]>target) j--;
else i++;
}
return i<matrix.length&&j>=0;
};
```
#### 300. 最长上升子序列
> 给定一个无序的整数数组,找到其中最长上升子序列的长度。
>
> 示例:
>
> ```
> 输入: [10,9,2,5,3,7,101,18]
> 输出: 4
> 解释: 最长的上升子序列是 [2,3,7,101],它的长度是 4。
> ```
>
>
> 说明:
>
> 可能会有多种最长上升子序列的组合,你只需要输出对应的长度即可。
> 你算法的时间复杂度应该为 O(n2) 。
> 进阶: 你能将算法的时间复杂度降低到 O(n log n) 吗?
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/longest-increasing-subsequence
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 动态规划
###### 思路
1. 状态定义dp[i]是以下标i为结束的最长子序列的长度
2. 状态转移方程是dp[i]=max{dp[j]+1|j<i&&nums[j]<nums[i]}
3. 最长上升子序列的长度就是max{dp[i]|0<=i<nums.length}
- 时间复杂度On^2,空间On
###### 代码
```js
/**
* @param {number[]} nums
* @return {number}
*/
var lengthOfLIS = function(nums) {
if(nums.length===0) return 0;
let dp=Array(nums.length).fill(1);
for(let i=0;i<nums.length;i++){
for(let j=0;j<i;j++){
if(nums[i]>nums[j]) dp[i]=Math.max(dp[i],dp[j]+1)
}
}
return Math.max.apply(null,dp)
};
```
#### 322. 零钱兑换
> 给定不同面额的硬币 coins 和一个总金额 amount。编写一个函数来计算可以凑成总金额所需的最少的硬币个数。如果没有任何一种硬币组合能组成总金额,返回 -1。
>
> **示例 1:**
>
> ```
> 输入: coins = [1, 2, 5], amount = 11
> 输出: 3
> 解释: 11 = 5 + 5 + 1
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/coin-change
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
动态规划,代码是自顶向下的Memorize方法。
1. 基本条件总金额amount为0时需要硬币0个,小于0时无解返回-1
2. 状态转移方程:$\min\{dp[n-coin]+1|coin\in coins\}$
- 时间复杂度O(amount*coins.length),空间复杂度O(amount)
##### 代码
```javascript
/**
* @param {number[]} coins
* @param {number} amount
* @return {number}
*/
var coinChange = function(coins, amount) {
let m=new Map();
function helper(n){
if(m.has(n)) return m.get(n);
if(n===0) return 0;
if(n<0) return -1;
let res=Number.MAX_SAFE_INTEGER;
for(let c of coins){
let child=helper(n-c);
if(child===-1) continue;
res=Math.min(res,child+1);
}
m.set(n,res===Number.MAX_SAFE_INTEGER?-1:res)
return m.get(n);
}
return helper(amount);
};
```
#### 429. N叉树的层序遍历
> 给定一个 N 叉树,返回其节点值的*层序遍历*。 (即从左到右,逐层遍历)。
##### 思路
和二叉树层次遍历类似,BFS,使用队列。
- 时间复杂度On,空间On
##### 代码
```c++
/*
// Definition for a Node.
class Node {
public:
int val;
vector<Node*> children;
Node() {}
Node(int _val, vector<Node*> _children) {
val = _val;
children = _children;
}
};
*/
class Solution {
public:
vector<vector<int>> levelOrder(Node* root) {
vector<vector<int>> result;
if(!root) return result;
queue<Node*> q;
q.push(root);
while(!q.empty()){
int size=q.size();
vector<int> row;
for(int i=0;i<size;i++){
Node* cur=q.front();
row.push_back(cur->val);
q.pop();
for(int j=0;j<cur->children.size();j++){
if(cur->children[j]){
q.push(cur->children[j]);
}
}
}
result.push_back(row);
}
return result;
}
};
```
#### 450. 删除二叉搜索树中的节点
> 给定一个二叉搜索树的根节点 root 和一个值 key,删除二叉搜索树中的 key 对应的节点,并保证二叉搜索树的性质不变。返回二叉搜索树(有可能被更新)的根节点的引用。
>
> 一般来说,删除节点可分为两个步骤:
>
> 首先找到需要删除的节点;
> 如果找到了,删除它。
> **说明**: 要求算法时间复杂度为 O(h),h 为树的高度。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/delete-node-in-a-bst
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
1. 如果空树则直接返回根
2. 查找待删除节点,如果没有则直接返回根
3. 如果找到了,(查找时记录父亲节点`pre`和当前节点是父亲的左孩子还是右孩子(`left`),找到时`key==cur->val`)
1. 如果没有孩子,直接删除:`pre->child=NULL `(删除如果是根节点单独处理)
2. 如果有一个孩子,则令`pre->child=cur->child`(删除如果是根节点单独处理)
3. 如果两个孩子,则替换`cur->val`和`successor->val`,然后使用步骤1和步骤2来删除`successor`节点(该节点不可能有两个孩子,因为`cur`的中序遍历的后续节点是在右子树上的最左下角)
- 时间复杂度Oh,空间O1
##### 代码
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
TreeNode* deleteNode(TreeNode* root, int key) {
if(!root) return root;
TreeNode*cur=root;
TreeNode*pre=NULL;
bool left=false;
while(cur&&cur->val!=key){
pre=cur;
if(cur->val>key){
cur=cur->left;
left=true;
}
else{
cur=cur->right;
left=false;
}
}
if(!cur) return root;//not found
if(cur->left&&cur->right){//case 3:two child
//if two child,exchange from the successor and delete
pre=cur;
TreeNode * tmp=cur->right;
left=false;
while(tmp->left){
left=true;
pre=tmp;
tmp=tmp->left;
}
cur->val=tmp->val;
cur=tmp;
}
if(cur->left||cur->right){//case 2:one child
if(pre==NULL) return cur->left?cur->left:cur->right;//delete root
//if one child,delete and move child to the node
(left?pre->left:pre->right)=(cur->left?cur->left:cur->right);
return root;
}else{//case 1:no child
if(pre==NULL) return NULL;//delete root;
//if no child,delete directly
(left?pre->left:pre->right)=NULL;
return root;
}
}
};
```
#### 494. 目标和
> 给定一个非负整数数组,a1, a2, ..., an, 和一个目标数,S。现在你有两个符号 + 和 -。对于数组中的任意一个整数,你都可以从 + 或 -中选择一个符号添加在前面。
>
> 返回可以使最终数组和为目标数 S 的所有添加符号的方法数。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/target-sum
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
深度优先搜索递归实现。之前用迭代实现参数的保存太麻烦了,剪不断理还乱。递归实现思路清晰。当深度到达最深处时判断当前是否满足条件,满足条件总数加一。
- 时间复杂度O($2^n$),空间On(函数栈到最深层会回溯销毁栈帧)
##### 代码
```c++
class Solution {
public:
int findTargetSumWays(vector<int>& nums, int S) {
return ways(nums,S,0);
}
int ways(vector<int>& nums,long s,int i){
if(i>=nums.size()){
return s==0;
}
return ways(nums,s-nums[i],i+1)+ways(nums,s+nums[i],i+1);
}
};
```
#### 498. 对角线遍历
> 二维数组对角线遍历。
>
> 
##### 思路
> x+y=b(b=0,1,2,3)
##### 代码
```c++
class Solution {
public:
vector<int> findDiagonalOrder(vector<vector<int>>& matrix) {
//x+y=b(b=0,1,2)
vector<int> result;
if(matrix.size()==0) return result;
int M=matrix.size();
int N=matrix[0].size();
int max_len=M>N?M:N;
int flag=0;//0->up,1->down
for(int b=0;b<=2*max_len-2;b++){
if(!flag){
int y=b;
while(y>=M) y--;
for(;y>=0;y--){
if(b-y>=N) continue;
result.push_back(matrix[y][b-y]);
}
}else{
int x=b;
while(x>=N) x--;
for(;x>=0;x--){
if(b-x>=M) continue;
result.push_back(matrix[b-x][x]);
}
}
flag=!flag;
}
return result;
}
};
```
#### 513. 找树左下角的值
> 给定一个二叉树,在树的最后一行找到最左边的值。
##### 思路
二叉树的层次遍历,即BFS,使用队列,每次遍历更新结果值为最新一行的第一个元素。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {number}
*/
var findBottomLeftValue = function(root) {
let q=[];
q.push(root);
let result=root.val;
while(q.length!==0){
let size=q.length;
for(let i=0;i<size;i++){
let cur=q.shift();
if(i===0) result=cur.val;
if(cur.left!==null) q.push(cur.left);
if(cur.right!==null) q.push(cur.right);
}
}
return result;
};
```
#### 713. 乘积小于K的子数组
> 给定一个正整数数组 nums。
>
> 找出该数组内乘积小于 k 的连续的子数组的个数。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/subarray-product-less-than-k
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
使用双指针,每次增加right,乘积乘上right,调整left使得当前prod满足条件,则当前的count应该增加`right-left+1`,因为left到right范围内不包含right的子数组已经在前面的right中考虑过了。
- 时间On,空间O1
##### 代码
```c++
class Solution {
public:
int numSubarrayProductLessThanK(vector<int>& nums, int k) {
if(k<=1) return 0;
int count=0;
int prod=1,left=0,right=0;
for(;right<nums.size();right++){
prod*=nums[right];
while(prod>=k) prod/=nums[left++];
count+=right-left+1;
}
return count;
}
};
```
##### 695. 岛屿的最大面积
> 给定一个包含了一些 0 和 1的非空二维数组 grid , 一个 岛屿 是由四个方向 (水平或垂直) 的 1 (代表土地) 构成的组合。你可以假设二维矩阵的四个边缘都被水包围着。
>
> 找到给定的二维数组中最大的岛屿面积。(如果没有岛屿,则返回面积为0。)
>
> **示例 1**:
>
> ```
> [[0,0,1,0,0,0,0,1,0,0,0,0,0],
> [0,0,0,0,0,0,0,1,1,1,0,0,0],
> [0,1,1,0,1,0,0,0,0,0,0,0,0],
> [0,1,0,0,1,1,0,0,1,0,1,0,0],
> [0,1,0,0,1,1,0,0,1,1,1,0,0],
> [0,0,0,0,0,0,0,0,0,0,1,0,0],
> [0,0,0,0,0,0,0,1,1,1,0,0,0],
> [0,0,0,0,0,0,0,1,1,0,0,0,0]]
> ```
>
>
> 对于上面这个给定矩阵应返回 6。注意答案不应该是11,因为岛屿只能包含水平或垂直的四个方向的‘1’。
>
> **示例 2**:
>
> ```
> [[0,0,0,0,0,0,0,0]]
> ```
>
>
> 对于上面这个给定的矩阵, 返回 0。
>
> **注意:** 给定的矩阵grid 的长度和宽度都不超过 50。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/max-area-of-island
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
深度优先搜索。每次搜索到一个1前先把count置为0,然后统计,更新max。
- 时间复杂度Omn,空间Omn
##### 代码
```javascript
/**
* @param {number[][]} grid
* @return {number}
*/
var maxAreaOfIsland = function(grid) {
let max=0;
let m=grid.length,n=grid[0].length;
let count;
function dfs(i,j){
if(i<0||j<0||i>=m||j>=n) return;
if(grid[i][j]===1){
count++;
grid[i][j]=0;
}else{
return;
}
if(i+1<m) dfs(i+1,j);
if(i-1>=0) dfs(i-1,j);
if(j+1<n) dfs(i,j+1);
if(j-1>=0) dfs(i,j-1);
}
for(let i=0;i<m;i++){
for(let j=0;j<n;j++){
if(grid[i][j]===0) continue;
count=0;
dfs(i,j);
max=count>max?count:max;
}
}
return max;
};
```
#### 814. 二叉树剪枝
> 给定二叉树根结点 root ,此外树的每个结点的值要么是 0,要么是 1。
>
> 返回移除了所有不包含 1 的子树的原二叉树。
>
> ( 节点 X 的子树为 X 本身,以及所有 X 的后代。)
>
> 示例1:
>
> ```
> 输入: [1,null,0,0,1]
> 输出: [1,null,0,null,1]
> 解释:
> 只有红色节点满足条件“所有不包含 1 的子树”。
> 右图为返回的答案。
> ```
>
> 
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/binary-tree-pruning
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
递归。基本条件是如果当前节点是null则不包含1,否则当前节点包含1的条件是左右子树中包含1或者当前节点包含1。递归过程中如果左右子树不包含1顺便将子树置为null。
- 时间复杂度On,空间Oh
##### 代码
```javascript
var pruneTree = function(root) {
return helper(root)?root:null;
function helper(x){
if(x===null) return false;
let left=helper(x.left);
let right=helper(x.right);
if(!left) x.left=null;
if(!right) x.right=null;
return x.val===1||left||right;
}
};
```
#### 912. 排序数组
> 给定一个整数数组 `nums`,将该数组升序排列。
>
>
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/sort-an-array
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
多种排序一一尝试。
##### 插入排序
> 类似于摸扑克牌后的排序。每次摸到扑克牌,手中的扑克是有序的,需要将新扑克插入到适当的位置。
###### 特点
- 对于部分有序数组更快
- 对于数组元素较少时排序较快
- 空间复杂度O(1),时间复杂度O(n^2)
- 稳定排序
###### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
for(int i=1;i<nums.size();i++){
int tmp=nums[i];
int j=i-1;
for(;j>=0&&nums[j]>tmp;j--){
nums[j+1]=nums[j];
}
nums[j+1]=tmp;
}
return nums;
}
};
```
##### 朴素希尔排序
> 插入排序的升级版。源于对部分有序的数组进行插入排序会比较快。
###### 特点
- 空间复杂度O(1),最差时间复杂度O(n^2)(甚至比插入还蛮,分组都是有序的,一组时进行插入排序,不如不分组)
- **非朴素希尔排序(增量选择使用其他策略,不是简单地折半)时间复杂度为O(n^(3/2)) 或 O(n^(4/3))**
###### 算法
思路:将原始数组进行分组,每个组内进行插入排序。
分组思路:选取跨度,首先跨度是`length/2`,分成`length/2`个组,各组排好序后,跨度减半`length/4`,这时共有`length/4`个组,继续迭代,直到最后剩余一个组**排序后**结束。
###### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
int d=nums.size();
while(d>1){
d/=2;
for(int i=0;i<d;i++){
for(int j=i+d;j<nums.size();j+=d){
int tmp=nums[j];
int k=j-d;
for(;k>=0&&nums[k]>tmp;k-=d){
nums[k+d]=nums[k];
}
nums[k+d]=tmp;
}
}
}
return nums;
}
};
```
##### 选择排序
> 选择未排序元素中最小(或最大)的元素,放到已排好序的末尾
###### 特点
- 空间复杂度O(1),时间复杂度O(n^2)
- 不稳定排序
###### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
for(int i=0;i<nums.size()-1;i++){
int min_index=i;
for(int j=i+1;j<nums.size();j++){
if(nums[j]<nums[min_index]){
min_index=j;
}
}
int tmp=nums[i];
nums[i]=nums[min_index];
nums[min_index]=tmp;
}
return nums;
}
};
```
##### 简单冒泡排序
> 像冒泡一样排序。
###### 特点
- 空间复杂度O(1),时间复杂度O(n^2)
- 稳定排序
###### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
for(int i=0;i<nums.size();i++){
for(int j=0;j<nums.size()-i-1;j++){
if(nums[j]>nums[j+1]){
int tmp = nums[j];
nums[j]=nums[j+1];
nums[j+1]=tmp;
}
}
}
return nums;
}
};
```
##### 优化冒泡排序
###### 第一步优化
> 若冒泡过程中数组已经有序,则无需执行后续操作了
####### 思路
若一轮冒泡中没有交换,那说明已经有序了。则添加标志位即可判断此情况。
####### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
bool isSorted;
for (size_t i = 0; i < nums.size(); i++)
{
isSorted=true;
for (size_t j = 0; j < nums.size()-i-1; j++)
{
if (nums[j]>nums[j+1])
{
int tmp=nums[j];
nums[j]=nums[j+1];
nums[j+1]=tmp;
isSorted=false;
}
}
if (isSorted)
{
break;
}
}
return nums;
}
};
```
###### 第二步优化
> 已经冒泡结束的有序部分长度,可能小于有序的部分,此时做了无用比较,如下图,4-8已经有序了,8是已经冒泡的部分,下一轮冒泡4-7还得比较,浪费了资源

####### 思路
记录一轮冒泡中最后交换的位置,这个位置就是有序的边界。
####### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
bool isSorted;
int lastSwapIndex;
int border=nums.size()-1;
for (size_t i = 0; i < nums.size(); i++)
{
isSorted=true;
for (size_t j = 0; j < border; j++)
{
if (nums[j]>nums[j+1])
{
int tmp=nums[j];
nums[j]=nums[j+1];
nums[j+1]=tmp;
isSorted=false;
lastSwapIndex=j;
}
}
border=lastSwapIndex;
if (isSorted)
{
break;
}
}
return nums;
}
};
```
##### 小范围或双射——计数排序
> 针对数值范围较小的数组,或者数组数值可以一一对应映射到较小范围的数组,可以采用技术排序
###### 特点
- 局限性较强,只适用于数值小范围或双射小范围的数组
- 时间复杂度和空间复杂度都为O(n)
###### 思路
利用有限范围数组进行计数,初始化为0。每遇见一个元素X,对应映射下标Y数组元素计数加一;最后下标Y映射为原来的数值X输出。
###### 代码
> 映射函数y=x,即数值x和数组下标y的关系是y=x的关系,其他情况类似,例如y=x-90等
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
int a[10];
for (size_t i = 0; i < 10; i++)
{
a[i]=0;
}
for (size_t i = 0; i < nums.size(); i++)
{
a[nums[i]]++;
}
int k=0;
for (size_t i = 0; i < 10; i++)
{
for (size_t j = 0; j < a[i]; j++)
{
nums[k]=i;
k++;
}
}
return nums;
}
};
int main(int argc, const char** argv) {
vector<int> a={9,2,5,7,3,4,8,7,0};
Solution s;
s.sortArray(a);
for (size_t i = 0; i < a.size(); i++)
{
std::cout << a[i] << " ";
}
return 0;
}
```
##### 桶排序
> 类似计数排序。取不同的桶,每个桶放入同范围的数,桶内的元素自动排序。
###### 特点
- 时间复杂度O(n),空间复杂度O(n+m),m是桶的个数
###### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
int maxValue = *max_element(nums.begin(),nums.end());
int minValue = *min_element(nums.begin(),nums.end());
int n=maxValue-minValue+1;
vector<int> bucket(n);
for(int i=0;i<nums.size();i++){
bucket[nums[i]-minValue]++;
}
int k=0;
for(int i=0;i<n;i++){
for(int j=0;j<bucket[i];j++){
nums[k++]=i+minValue;
}
}
return nums;
}
};
```
##### 归并排序
> 顾名思义,先递归,后合并。通过二分递归至每个子数组只有一个元素,然后每两个进行合并成一个较大数组,然后回溯合并较大数组成更大数组,循环往复,直到结束
###### 特点
- 稳定排序
- 空间复杂度O(n),时间复杂度O(nlogn)
- 类似于二叉树的层次遍历(从下向上)
###### 算法
- `递归`
1. `给定数组边界:left、right`
2. `中间值mid=left+(right-left)/2`
3. `递归左边[left,mid]`
4. `递归右边[mid+1,right]`
5. `合并[left,right]`
- `合并`
> 即合并[left,mid]和[mid+1,right]两个有序数组
###### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
mergeSort(nums,0,nums.size()-1);
return nums;
}
//归并排序
void mergeSort(vector<int>&nums,int left,int right){
if(left>=right) return;
int mid=left+(right-left)/2;
mergeSort(nums,left,mid);
mergeSort(nums,mid+1,right);
merge(nums,left,right,mid);
}
//合并已有序的两个相邻子数组
void merge(vector<int>&nums,int left,int right,int mid){
vector<int> tmp;
int i=left,j=mid+1;
while(i<=mid&&j<=right){
if(nums[i]<=nums[j]){
tmp.push_back(nums[i]);
i++;
}else{
tmp.push_back(nums[j]);
j++;
}
}
while(i<=mid){
tmp.push_back(nums[i]);
i++;
}
while(j<=right){
tmp.push_back(nums[j]);
j++;
}
for(int k=left,p=0;k<=right;k++,p++){
nums[k]=tmp[p];
}
}
};
```
##### 快速排序
> 分治策略。冒泡的升级版,一堆数据一起冒泡,基准值当水面。
###### 特征
- 时间复杂度O(nlogn),最差时间复杂度O(n^2)
- 不稳定排序
###### 算法
1. `选基准值pivot`
2. `大于pivot的放右边,小于等于的放左边`
3. `左边和右边分治迭代`
###### 代码
####### 递归版本
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
quickSort(nums,0,nums.size()-1);
return nums;
}
void quickSort(vector<int>& nums,int start,int end){
if(start>=end) return;
int pivotIndex=partition(nums,start,end);
quickSort(nums,start,pivotIndex-1);
quickSort(nums,pivotIndex+1,end);
}
int partition(vector<int>& nums,int start,int end){
int pivot=nums[start];
int left=start;
int right=end;
while(left!=right){
while(right>left&&nums[right]>pivot){
right--;
}
while(left<right&&nums[left]<=pivot){
left++;
}
int tmp=nums[left];
nums[left]=nums[right];
nums[right]=tmp;
}
nums[start]=nums[left];
nums[left]=pivot;
return left;
}
};
```
####### 迭代版本
> 用栈模拟函数栈,保存所需参数即可
```c++
struct Border{
int start;
int end;
};
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
quickSort(nums,0,nums.size()-1);
return nums;
}
void quickSort(vector<int>& nums,int start,int end){
stack<Border*> s;
s.push(new Border{start,end});
while(!s.empty()){
Border* cur=s.top();
s.pop();
int pivotIndex=partition(nums,cur->start,cur->end);
if(cur->start<pivotIndex-1){
s.push(new Border{cur->start,pivotIndex-1});
}
if(cur->end>pivotIndex+1){
s.push(new Border{pivotIndex+1,cur->end});
}
}
}
int partition(vector<int>& nums,int start,int end){
int pivot=nums[start];
int left=start;
int right=end;
while(left!=right){
while(right>left&&nums[right]>pivot){
right--;
}
while(left<right&&nums[left]<=pivot){
left++;
}
int tmp=nums[left];
nums[left]=nums[right];
nums[right]=tmp;
}
nums[start]=nums[left];
nums[left]=pivot;
return left;
}
};
```
##### 堆排序
> 堆是完全二叉树,可直接用数组来存。堆排序基于堆。
###### 特点
- 不稳定
- 空间复杂度O(1),时间复杂度O(nlogn)
###### 算法
1. `构建堆`
2. `替换堆顶和末尾元素,末尾部分已有序`
3. `长度减一`
4. `调整,继续执行2,直到全部有序`
###### 代码
```javascript
/**
* @param {number[]} nums
* @return {number[]}
*/
var sortArray = function(nums) {
function ajust(nums,parent,len){
let tmp=nums[parent];
let child=2*parent+1;
while(child<len){
if(child+1<len&&nums[child+1]>nums[child]) child++;
if(tmp>=nums[child]) break;
nums[parent]=nums[child];
parent=child;
child=2*parent+1;
}
nums[parent]=tmp;
}
for(let i=parseInt((nums.length-2)/2);i>=0;i--){
ajust(nums,i,nums.length);
}
for(let i=nums.length-1;i>0;i--){
let tmp=nums[i];
nums[i]=nums[0];
nums[0]=tmp;
ajust(nums,0,i);
}
return nums;
};
```
#### 5296. 两棵二叉搜索树中的所有元素
> 给你 root1 和 root2 这两棵二叉搜索树。
>
> 请你返回一个列表,其中包含 两棵树 中的所有整数并按 升序 排序。.
>
>
>
> **示例 1:**
>
> 
>
> ```
> 输入:root1 = [2,1,4], root2 = [1,0,3]
> 输出:[0,1,1,2,3,4]
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/all-elements-in-two-binary-search-trees
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
二叉搜索树中序遍历有序,假设得到有序数组后就类似于合并两个有序数组,这儿使用一个数组暂存第一棵树的遍历结果,第二棵树遍历同时进行构建结果。
- 时间复杂度O(M+N),空间O(M+N)
##### 代码
```javascript
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root1
* @param {TreeNode} root2
* @return {number[]}
*/
var getAllElements = function(root1, root2) {
let tmp=[];
let s=[];
let st=new Set();
if(root1) s.push(root1);
while(s.length!==0){
let cur=s.pop();
if(!st.has(cur)){
st.add(cur);
if(cur.right) s.push(cur.right);
s.push(cur);
if(cur.left) s.push(cur.left);
}else{
tmp.push(cur.val);
}
}
let result=[];
if(root2) s.push(root2);
while(s.length!==0){
let cur=s.pop();
if(!st.has(cur)){
st.add(cur);
if(cur.right) s.push(cur.right);
s.push(cur);
if(cur.left) s.push(cur.left);
}else{
while(tmp.length>0&&tmp[0]<=cur.val) result.push(tmp.shift());
result.push(cur.val);
}
}
for(let i=0;i<tmp.length;i++) result.push(tmp[i]);
return result;
};
```
### 困难
#### 10. 正则表达式匹配
> 给你一个字符串 s 和一个字符规律 p,请你来实现一个支持 '.' 和 '*' 的正则表达式匹配。
>
> ```
> '.' 匹配任意单个字符
> '*' 匹配零个或多个前面的那一个元素
> ```
>
>
> 所谓匹配,是要涵盖 整个 字符串 s的,而不是部分字符串。
>
> **说明:**
>
> - s 可能为空,且只包含从 a-z 的小写字母。
>
> - p 可能为空,且只包含从 a-z 的小写字母,以及字符 . 和 *。
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/regular-expression-matching
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
tricky,利用JavaScript正则表达式。
##### 代码
```javascript
/**
* @param {string} s
* @param {string} p
* @return {boolean}
*/
var isMatch = function(s, p) {
return (new RegExp("^"+p+"$")).test(s);
};
```
#### 32. 最长有效括号
> 给定一个只包含 '(' 和 ')' 的字符串,找出最长的包含有效括号的子串的长度。
>
> **示例 1:**
>
> ```
> 输入: "(()"
> 输出: 2
> 解释: 最长有效括号子串为 "()"
> ```
>
> **示例 2:**
>
> ```
> 输入: ")()())"
> 输出: 4
> 解释: 最长有效括号子串为 "()()"
> ```
>
> 来源:力扣(LeetCode)
> 链接:https://leetcode-cn.com/problems/longest-valid-parentheses
> 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
##### 思路
利用栈,就像检测括号有效性那样,如果当前右括号匹配栈顶左括号,就将左括号弹栈。只不过这次栈内的元素不仅仅是`()`,另外还添加了当前括号的下标。最终将检测栈内剩余的元素两两相邻的下标之差就是合法括号的长度(注意栈内第一个元素前和最后一个元素后)。结果就是这些差值中的最大值。
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {string} s
* @return {number}
*/
var longestValidParentheses = function(str) {
const m={
')':'('
};
let s=[];
for(let i=0;i<str.length;i++){
if(s.length===0){
s.push({
idx:i,
v:str[i]
});
continue;
}
if(s[s.length-1].v===m[str[i]]){
s.pop();
}else{
s.push({
idx:i,
v:str[i]
});
}
}
if(s.length===0) return str.length;
let max=s[0].idx;
for(let i=1;i<s.length;i++){
let cur=s[i].idx-s[i-1].idx-1;
if(cur>max) max=cur;
}
return str.length-s[s.length-1].idx-1>max?str.length-s[s.length-1].idx-1:max;
};
```
#### 41. 缺失的第一个正数
> 给定一个未排序的整数数组,找出其中没有出现的最小的正整数。
##### 思路
n个数,没有出现最小的正整数的范围是[1,n+1]。创建一个大小为n的数组用于标记,出现过的标记为1。全部标记完后从小向大找,第一个未被标记的对应值为目标值,如果都已经标记则结果就是n+1
- 时间复杂度On,空间On
##### 代码
```javascript
/**
* @param {number[]} nums
* @return {number}
*/
var firstMissingPositive = function(nums) {
let mark=(new Array(nums.length)).fill(0);
for(let i=0;i<nums.length;i++){
if(nums[i]>=1&&nums[i]<=nums.length) mark[nums[i]-1]=1;
}
for(let i=0;i<mark.length;i++){
if(mark[i]===0) return i+1;
}
return nums.length+1;
};
```
#### 1250. 检查「好数组」
> 给你一个正整数数组 `nums`,你需要从中任选一些子集,然后将子集中每一个数乘以一个 **任意整数**,并求出他们的和。
>
> 假如该和结果为 `1`,那么原数组就是一个「**好数组**」,则返回 `True`;否则请返回 `False`。
>
> **示例 1:**
>
> ```
> 输入:nums = [12,5,7,23]
> 输出:true
> 解释:挑选数字 5 和 7。
> 5*3 + 7*(-2) = 1
>
>
> ```
##### 思路
规律是如果这些数的最大公约数是1,那么就是好数组。规律以后再谈
- 时间复杂度On,空间O1
##### 代码
```c++
class Solution {
public:
bool isGoodArray(vector<int>& nums) {
int g=nums[0];
for(int i=1;i<nums.size();i++){
g=gcd(g,nums[i]);
}
return g==1;
}
int gcd(int a,int b){
int c=a%b;
while(c!=0){
a=b;
b=c;
c=a%b;
}
return b;
}
};
```
<file_sep>function permute(nums: number[]): number[][] {
const result = [];
function dfs(cur: number[], used: Set<number>) {
if (cur.length === nums.length) {
result.push(cur);
return;
}
for (let i = 0;i < nums.length; i++) {
if (used.has(i)) continue;
used.add(i);
dfs([...cur,nums[i]], used);
used.delete(i);
}
}
dfs([],new Set());
return result;
};
<file_sep>/**
* @param {string} s
* @return {character}
*/
var firstUniqChar = function (s) {
let m = new Map();
for (let c of s) m.set(c, m.has(c) ? m.get(c) + 1 : 1);
for (let c of s) if (m.get(c) === 1) return c;
return ' ';
};<file_sep>/**
* @param {number[]} cost
* @return {number}
*/
var minCostClimbingStairs = function (cost) {
//dp[i]=Math.min(dp[i-1],dp[i-2])+cost[i]
let dp = Array(cost.length);
[dp[0], dp[1]] = cost;
for (let i = 2; i < dp.length; i++) {
dp[i] = Math.min(dp[i - 1], dp[i - 2]) + cost[i];
}
return Math.min(dp[dp.length - 1], dp[dp.length - 2])
};<file_sep>/**
* @param {string} s
* @return {boolean}
*/
var validPalindrome = function (s) {
let i = 0,
j = s.length - 1;
for (; i < j && s[i] === s[j]; i++, j--);
function isPalindrome(i, j) {
for (; i < j && s[i] === s[j]; i++, j--);
return i >= j;
}
return isPalindrome(i, j - 1) || isPalindrome(i + 1, j);
};
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn construct_maximum_binary_tree(nums: Vec<i32>) -> Option<Rc<RefCell<TreeNode>>> {
match nums.len() {
0 => None,
_ => {
let max_number = *nums.iter().max().unwrap();
let max_index = nums.iter().position(|&n| n == max_number).unwrap();
let left = Solution::construct_maximum_binary_tree((&nums[..max_index]).to_vec());
let right = Solution::construct_maximum_binary_tree((&nums[max_index+1..]).to_vec());
let mut node = TreeNode::new(max_number);
node.left = left;
node.right = right;
Some(Rc::new(RefCell::new(node)))
}
}
}
}
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function isValidBST(root: TreeNode | null): boolean {
const validate = (node: TreeNode | null, min: TreeNode | null, max: TreeNode | null): boolean => {
if(node === null) return true;
if(max !== null && node.val >= max.val) return false;
if(min !== null && node.val <= min.val) return false;
return validate(node.left, min, node) && validate(node.right, node, max);
};
return validate(root, null, null);
};
<file_sep>use std::collections::HashSet;
impl Solution {
pub fn permute(nums: Vec<i32>) -> Vec<Vec<i32>> {
fn backtrace(mut track: &mut Vec<i32>, nums: &Vec<i32>, mut visited: &mut HashSet<i32>, mut result: &mut Vec<Vec<i32>>) {
if track.len() == nums.len() {
result.push(track.clone());
return;
}
for &x in &nums[..] {
if visited.contains(&x) {
continue;
}
track.push(x);
visited.insert(x);
backtrace(&mut track, &nums, &mut visited, &mut result);
visited.remove(&x);
track.pop();
}
}
let mut result = vec![];
let mut track = vec![];
let mut visited = HashSet::new();
backtrace(&mut track, &nums, &mut visited, &mut result);
return result;
}
}
<file_sep>function sortedSquares(nums: number[]): number[] {
let l = 0, r = nums.length - 1;
const result: Array<number> = [];
while(l <= r) {
const lVal = nums[l] ** 2;
const rVal = nums[r] ** 2;
if (lVal > rVal) {
result.push(lVal);
l++;
} else {
result.push(rVal);
r--;
}
}
return result.reverse();
};
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {number[][]}
*/
var levelOrder = function (root) {
let result = [];
if (!root) return result;
let s1 = [], s2 = [], flag = true;
s1.push(root);
let curS, anotherS, size;
while (size = (anotherS = !flag ? s1 : s2, curS = flag ? s1 : s2).length) {
let tmp = [];
while (size--) {
let x = curS.pop();
flag ? (x.left && anotherS.push(x.left), x.right && anotherS.push(x.right))
: (x.right && anotherS.push(x.right), x.left && anotherS.push(x.left))
tmp.push(x.val);
}
result.push(tmp);
flag = !flag;
}
return result;
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn lowest_common_ancestor(root: Option<Rc<RefCell<TreeNode>>>, p: Option<Rc<RefCell<TreeNode>>>, q: Option<Rc<RefCell<TreeNode>>>) -> Option<Rc<RefCell<TreeNode>>> {
if root == None {
return None;
}
if root == p {
return root.clone();
}
if root == q {
return root.clone();
}
let root = root.unwrap();
let left = Self::lowest_common_ancestor(root.borrow().left.clone(), p.clone(), q.clone());
let right = Self::lowest_common_ancestor(root.borrow().right.clone(), p.clone(), q.clone());
match (left.clone(), right.clone()) {
(Some(_), Some(_)) => Some(root.clone()),
_ => if left.is_some() { left } else { right }
}
}
}
<file_sep># Coral的代码笔记
## 主要内容
**之前一直在用markdown来写题解,使用docsify渲染页面,写的太过冗余**
之后只会上传代码,有疑问的或者复杂的题会将重点写在项目ISSUE中。
- LeetCode刷题记录
- 剑指offer
- 开发小笔记
- 面试编程题记录
- CSS常见问题实现
- Codewars刷题记录(主要用于熟悉语言)
[网页阅读【之前的题解】](https://scarboroughcoral.github.io/Notes/#/)
<file_sep>impl Solution {
pub fn is_bipartite(graph: Vec<Vec<i32>>) -> bool {
let mut ok = true;
let n = graph.len();
let mut visited = vec![false; n];
let mut colors = vec![false; n];
fn traverse(graph: &Vec<Vec<i32>>, mut visited: &mut Vec<bool>, mut ok: &mut bool, mut colors: &mut Vec<bool>, start: i32) {
if !*ok {
return;
}
let idx = start as usize;
for &neighbor in &graph[idx] {
let neighbor_idx = neighbor as usize;
if !visited[neighbor_idx] {
visited[neighbor_idx] = true;
colors[neighbor_idx] = !colors[idx];
traverse(&graph, &mut visited, &mut ok, &mut colors, neighbor);
} else {
if colors[neighbor_idx] == colors[idx] {
*ok = false;
}
}
}
}
for i in 0..n {
traverse(&graph, &mut visited, &mut ok, &mut colors, i as i32);
}
return ok;
}
}
<file_sep>/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
#include <queue>
#include <vector>
using namespace std;
struct ListNode
{
int val;
ListNode *next;
ListNode(int x) : val(x), next(NULL) {}
};
class Solution
{
struct Node
{
int val;
ListNode *p;
bool operator<(const Node &rhs) const
{
return val > rhs.val;
}
};
priority_queue<Node> q;
public:
ListNode *mergeKLists(vector<ListNode *> &lists)
{
for (auto l : lists)
{
if (l)
q.push({l->val, l});
}
ListNode result;
ListNode *tail = &result;
while (!q.empty())
{
auto p = q.top();
q.pop();
tail->next = p.p;
tail = tail->next;
if (p.p->next)
{
q.push({p.p->next->val, p.p->next});
if (q.size() == 1)
break;
}
}
return result.next;
}
};<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def findDuplicateSubtrees(self, root: Optional[TreeNode]) -> List[Optional[TreeNode]]:
result = []
subtrees = {}
def subtree(node: Optional[TreeNode]) -> List[Optional[TreeNode]]:
if not node:
return '#'
left = subtree(node.left)
right = subtree(node.right)
val = node.val
subtreeStr = left + ',' + right + ',' + str(val)
if subtreeStr not in subtrees:
subtrees[subtreeStr] = 1
elif subtrees[subtreeStr] == 1:
result.append(node)
subtrees[subtreeStr] += 1
else:
subtrees[subtreeStr] += 1
return subtreeStr
subtree(root)
return result
<file_sep>/**
* @param {number} n
* @return {string[]}
*/
var generateParenthesis = function (n) {
let res = [];
function dfs(cur, cntL, cntR) {
if (cntR < cntL) return;
if (cur.length === n << 1) {
res.push(cur);
return;
}
if (cntL > 0) dfs(cur + '(', cntL - 1, cntR);
if (cntR > 0) dfs(cur + ')', cntL, cntR - 1);
}
dfs('', n, n);
return res;
};<file_sep>export const summation = (num: number) => {
return num * (1 + num) / 2;
}<file_sep>function canFinish(numCourses: number, prerequisites: number[][]): boolean {
const visited: boolean[] = new Array(numCourses).fill(false);
const onPath: boolean[] = new Array(numCourses).fill(false);
let hasCircle = false;
const buildGraph = (prerequisites: number[][]) => {
const graph: number[][] = new Array(numCourses);
for (let i = 0; i < graph.length; i++) {
graph[i] = []
}
for(let [cur, pre] of prerequisites) {
graph[pre].push(cur);
}
return graph;
}
const graph = buildGraph(prerequisites);
const traverse = (start: number) => {
if (onPath[start]) {
hasCircle = true;
}
if (visited[start] || hasCircle) {
return;
}
onPath[start] = true;
visited[start] = true;
for (let x of graph[start]) {
traverse(x);
}
onPath[start] = false;
}
for (let i = 0; i< numCourses; i++){
traverse(i);
}
return !hasCircle;
};
<file_sep>use std::collections::HashMap;
impl Solution {
pub fn min_window(s: String, t: String) -> String {
let (t_len,s_len)=(t.len(),s.len());
if t_len==0||s_len==0||t_len>s_len {
return "".to_string();
}
let mut t_freq=HashMap::new();
for c in t.as_bytes(){
t_freq.entry(c).or_insert(0);
*t_freq.get_mut(&c).unwrap()+=1;
}
let (mut l,mut r,mut begin,mut min_len,mut d)=(0,0,0,s_len+1,t_len);
let sc=s.as_bytes();
while r<s_len{
let rc=sc[r];
if !t_freq.contains_key(&rc) {
r+=1;
continue;
}
if t_freq.get(&rc)>Some(&0) {
d-=1;
}
*t_freq.get_mut(&rc).unwrap()-=1;
r+=1;
while d==0{
let lc=sc[l];
if r-l<min_len{
min_len=r-l;
begin=l;
}
if !t_freq.contains_key(&lc) {
l+=1;
continue;
}
if t_freq.get(&lc)==Some(&0){
d+=1;
}
*t_freq.get_mut(&lc).unwrap()+=1;
l+=1;
}
}
if min_len==s_len+1{
"".to_string()
}else{
s.chars().skip(begin).take(min_len).collect()
}
}
}<file_sep>//实例类
export interface ProductA {
doA(): string;
}
export interface ProductB {
doB(): string;
}
export class ProductA1 implements ProductA {
doA(): string {
return "A1";
}
}
export class ProductA2 implements ProductA {
doA(): string {
return "A2";
}
}
export class ProductB1 implements ProductB {
doB(): string {
return "B1";
}
}
export class ProductB2 implements ProductB {
doB(): string {
return "B2";
}
}
//工厂类
export interface AbstractFactory {
createProductA(): ProductA;
createProductB(): ProductB;
}
export class Factory1 implements AbstractFactory {
createProductA(): ProductA {
return new ProductA1();
}
createProductB(): ProductB {
return new ProductB1();
}
}
export class Factory2 implements AbstractFactory {
createProductA(): ProductA {
return new ProductA2();
}
createProductB(): ProductB {
return new ProductB2();
}
}
<file_sep>import { Director, Builder1, Builder2, Builder, Product } from "./建造者";
test("测试建造者模式【导演模式】", () => {
let director = new Director(new Builder1());
expect(director.make("simple").listParts()).toBe("3");
expect(director.make("normal").listParts()).toBe("1-2-3");
director.changeBuilder(new Builder2());
expect(director.make("simple").listParts()).toBe("3");
expect(director.make("normal").listParts()).toBe("1+2+3");
});
test("测试建造者模式【自定义模式】", () => {
let builder: Builder = new Builder1();
let prod1: Product = builder
.buildStep1()
.buildStep3()
.buildStep2()
.buildStep1()
.getResult();
expect(prod1.listParts()).toBe("1-3-2-1");
builder = new Builder2();
let prod2: Product = builder.buildStep1().buildStep2().getResult();
expect(prod2.listParts()).toBe("1+2");
});
<file_sep>var minWindow = function(s: string, t: string): string {
let tFreq=new Map();
for(let c of t){
tFreq.set(c,tFreq.has(c)?tFreq.get(c)+1:1);
}
let begin=0,minLen=s.length+1;
let l=0,r=0;
let d=t.length;
while(r<s.length){
let rc=s[r];
if(!tFreq.has(rc)){
r++;
continue;
}
if(tFreq.get(rc)>0){
d--;
}
tFreq.set(rc,tFreq.get(rc)-1);
r++;
while(d===0){
let lc=s[l];
if(!tFreq.has(lc)){
l++;
continue;
}
if(r-l<minLen){
minLen=r-l;
begin=l;
}
if(tFreq.get(lc)===0){
d++;
}
tFreq.set(lc,tFreq.get(lc)+1);
l++;
}
}
return (minLen===s.length+1)?"":s.substr(begin,minLen);
};<file_sep>/**
* @param {number} k
* @param {number} n
* @return {number[][]}
*/
var combinationSum3 = function (k, n) {
let result = [];
function helper(cur, s, idx) {
if (s < 0) return;
if (cur.length === k) {
if (s === 0) result.push(cur);
return;
}
if (idx <= 9) {
helper([...cur, idx], s - idx, idx + 1);
helper([...cur], s, idx + 1);
}
}
helper([], n, 1);
return result;
};<file_sep>## LeetCode
- [🐹LeetCode 题解](lc.md)
- [🐮周赛]()
- [🐶剑指offer 题解](offer.md)
## JavaScript
- [🐍死磕lodash]()
- [🐷JavaScript特色]()
## CSS
- [⛄️30s CSS实现打卡记录]()
## 面试编程题记录
- [🌀2020前端面试编程题记录]()
## 其他
- [🐨codewars 题解](codewars.md)
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function maxDepthWithTraverse(root: TreeNode | null): number {
let depth = 1
let result = 0;
function calc(node: TreeNode | null): void {
if (node === null) return;
result = Math.max(depth, result);
depth++;
if (node.left !== null) calc(node.left);
if (node.right !== null) calc(node.right);
depth--;
}
calc(root);
return result
};
function maxDepthWithInfer(root: TreeNode | null): number {
if(root === null) return 0;
const left = maxDepth(root.left);
const right = maxDepth(root.right);
return Math.max(left, right) + 1;
};
<file_sep>var findDuplicate = function (nums: number[]): number {
let slow = 0,
fast = 0;
do {
slow = nums[slow];
fast = nums[nums[fast]];
} while (slow !== fast);
slow = 0;
while (slow != fast) {
slow = nums[slow];
fast = nums[fast];
}
return slow;
};
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {boolean}
*/
var isValidBST = function (root) {
if (!root) return true;
let visited = new Set();
let s = [];
s.push(root);
let pre = Number.NEGATIVE_INFINITY;
while (s.length) {
let cur = s.pop();
if (!visited.has(cur)) {
visited.add(cur);
if (cur.right) s.push(cur.right);
s.push(cur);
if (cur.left) s.push(cur.left);
} else {
let { val } = cur;
if (val <= pre) return false;
pre = val;
}
}
return true;
};
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
fn is_same(a: Option<Rc<RefCell<TreeNode>>>,b: Option<Rc<RefCell<TreeNode>>>)->bool{
if a==None&&b==None{
return true;
}
if a==None||b==None{
return false;
}
return if let (Some(a),Some(b))=(a,b){
a.borrow().val==b.borrow().val&&Solution::is_same(a.borrow().left.clone(),b.borrow().right.clone())&&Solution::is_same(a.borrow().right.clone(),b.borrow().left.clone())
}else{
false
};
}
pub fn is_symmetric(root: Option<Rc<RefCell<TreeNode>>>) -> bool {
match root{
None=>true,
Some(r)=>{
let r=r.borrow();
Solution::is_same(r.left.clone(),r.right.clone())
}
}
}
}<file_sep>/**
* @param {number[]} nums
* @return {number}
*/
var massage = function (nums) {
//dp[i]是以i作为最后元素的最长时间,dp[i]=max(dp[i-1],dp[i-2]+n[i],dp[i-3]+n[i]);
if (nums.length === 0) return 0;
if (nums.length === 1) return nums[0];
if (nums.length === 2) return Math.max(nums[0], nums[1]);
let dp = Array(nums.length);
dp[0] = nums[0];
dp[1] = nums[1];
dp[2] = nums[2] + nums[0];
for (let i = 3; i < nums.length; i++) {
dp[i] = Math.max(dp[i - 1], dp[i - 2] + nums[i], dp[i - 3] + nums[i]);
}
return Math.max(dp[dp.length - 1], dp[dp.length - 2]);
};<file_sep>function removeDuplicates(nums: number[]): number {
let i = 0, j = 1;
while(j < nums.length) {
while (j < nums.length && nums[i] === nums[j]) j++;
if (j >= nums.length) break;
i++;
nums[i] = nums[j];
}
return i + 1;
};
<file_sep>var decodeString = function (s: string): string {
let stack = [];
let i = 0;
while (i < s.length) {
if (isDigit(s[i])) {
let n = "";
while (isDigit(s[i])) n += s[i++];
stack.push(+n);
} else if (s[i] === "[" || isAlpha(s[i]))
while (i < s.length && (s[i] === "[" || isAlpha(s[i])))
stack.push(s[i++]);
else {
i++;
let c,
str = "";
while ((c = stack.pop()) !== "[") str = c + str;
let num = stack.pop() as number;
stack.push(str.repeat(num));
}
}
return stack.join("");
};
function isDigit(n: string): boolean {
return /\d/.test(n);
}
function isAlpha(c: string): boolean {
return /[a-zA-Z]/.test(c);
}
<file_sep># 计算机系统
## 内存管理
### 内存分配
**内存分配和释放的最佳实践是分离链表加平衡二叉搜索树,实际上可以看做是一个HashMap**
> 提高处理内存申请和释放操作的吞吐量(单位时间满足请求),提高内存利用率(减少内部碎片和外部碎片)。两者不可能同时很好满足,需要适当衡量。
#### 需要解决的问题
##### 0x01. free内存时,如何通过指针获取该内存块的大小?
需要额外的空间。通过在申请块的头部(header field)字添加标记,标记此区域的大小

##### 0x02. 如何记录未被分配的块?
一共有4种方法。
- 隐式链表(申请需要O(n)复杂度,实际不会使用,可能用于特殊程序)
> 连接所有的块。需要标记当前块是已分配的还是空闲的。


**优化后:**

- 显示链表
> 只连接空闲块。需要额外的块来存储指针。


- 单独的链表记录(Segregated free list)
> 不同大小量级的块记录到不同的链表中,申请时酌情分配。。
- 平衡二叉树(eg.红黑树)记录
> 平衡的二分查找树(BBST)存储。key是大小,value是地址
##### 0x03. 如何找到一个合适的空闲块来满足申请的请求(隐式链表示例)
常见三种方式
- 首次匹配
> 每次都从链表开始搜索满足申请的空闲块,遇到第一个合适的块就分配。实际应用中链表开始会碎片化。
- 二次匹配
> 每次查找都从上次完成申请时的块开始搜索,遇到第一个合适的就分配。比首次分配更快,因为不用重复搜搜无用块。实际可能碎片化更严重。
- 最佳匹配
> 遍历链表,找到满足的最小块,然后分配。这样提高了内存利用率。大多数情况比首次分配要慢。
##### 0x04. 释放内存块后的合并问题
**合并策略:**
- 立即合并:free时合并
- 推迟合并:外部碎片太多时合并;malloc需要块时合并
**一些合并情况:**
- 对于情况1,修改为空闲,直接释放。
- 对于情况2,修改为空闲,合并后续块,并修改大小
- 对于情况3,需要知道前驱块的大小才能合并
- 对于情况4,也需要知道前驱块大小才能合并

对于情况3和4,需要修改块的结构。

> 类似双向链表,这样可以通过上一个块的尾部来反向访问隐式链表了。
**进一步优化,已申请的块不需要尾部**
> 利用块大小是4字的整数倍的块,低2位空闲。将这两位当做标志位。

### 垃圾回收
#### 主要问题
- 内存管理器如何知道哪些内存时可以清理的
> 如果没有指针指向他,那么这块内存就不能被使用。
#### 可以把内存看做一个有向图
- 每个内存块是一个结点
- 每个指针是一条边
- 不在堆里的结点称为根节点。(可能是全局变量、寄存器变量、栈中的变量)
- 如果可以从根节点到达堆里的某个节点,那么称这个堆里的节点是可达的
- 如果堆中的节点无法通过任意一个根节点到达,那么是不可达的

#### 典型垃圾回收算法
##### 0x01. 标记清除(Mark & Sweep)
> 用一个标记位来标记内存块
1. 标记:对于所有根节点,从根节点开始标记所有可达的内存块
2. 清除:清除所有未被标记的内存块
Mark&Sweep代码
- Mark
``` c
//对于每个根节点,都执行一遍
ptr mark(ptr p){
if(!is_ptr(p)) return;//如果不是指针类型,直接返回
if(markBitSet(p)) return;//如果已经标记,则返回
setMarkBit(p);//标记
for(i=0;i<length(p);i++)//该节点内部的所有指针,DFS标记
mark(p[i]);
return;
}
```
- Sweep
```c
//遍历堆
ptr sweep(ptr p,ptr end){
while(p<end){//遍历堆中所有对象
if(markBitSet(p))//如果已经标记则清除标记
clearMarkBit();
else if(allocateBitSet(p))//否则,如果该区域是allocated则free
free(p);
p+=length(p);//下一个内存块
}
}
```
##### 0x02. 引用计数 (Reference counting)
##### 0x03. 复制算法(Copying collection)
##### 0x04. 分代收集器(Generational Collectors)
> 基于生命周期的
## IO
- 所有外设都看做文件
- 描述符表(进程独有)->打开文件表(所有进程共享)->v-node表(所有进程共享)
- 描述符表表项用文件描述符索引;每个描述符表表项指向打开文件表的一个表项,这个表项记录了当前打开的文件的状态;每个打开文件表表项指向v-node表表项。
- 硬盘上相同的文件可能对应多个打开文件表表项,比如open两次相同文件并读取,两个读取位置不同,读取位置记录在打开文件表表项中。
- 两个描述符表项最终引用相同的文件可以用来实现文件共享。
- v-node表表项存了文件的状态
- 啊
### IO多路复用
> 笔记源于时间驱动的服务器,区别于进程和线程的服务器
- 服务器维护活动连接的集合(array of connfd's)
- 循环:
- 判断哪个描述符connfd或listenfd有pending的数据输入
- eg,利用select函数
- pending就是一个事件
- 如果listenfd有输入,那就accept
- 将新的connfd加入到连接集合中
- 给所有pending的提供服务

## 并发
### 线程
> 可类比进程
- 每个线程 都有自己的逻辑控制流
- 每个线程共享同样的代码、数据和内核上下文
- 每个线程都有自己的栈和局部变量
- 不会隔离其他线程,其他线程有可能会访问
- 每个线程都有一个tid

<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function preorderTraversal(root: TreeNode | null): number[] {
let visited: TreeNode | null = null;
const stk: Array<TreeNode | null> = [];
const result = [];
const pushLeft = (node: TreeNode | null) => {
let p = node;
while(p) {
result.push(p.val);
stk.push(p);
p = p.left;
}
};
pushLeft(root);
while(stk.length !== 0) {
const cur = stk[stk.length - 1];
if ((cur.left === null || cur.left === visited) && cur.right !== visited) {
pushLeft(cur.right);
}
if (cur.right === null || cur.right === visited) {
visited = stk.pop();
}
}
return result;
};
<file_sep>class Solution {
public:
int subarraysDivByK(vector<int>& A, int K) {
unordered_map<int,int> m;
int ans=0,pre=0;
m[0]=1;
for(auto x:A){
pre=(pre+x)%K;
if(pre<0) pre+=K;
if(m.count(pre)) ans+=m[pre],m[pre]++;
else m[pre]=1;
}
return ans;
}
};<file_sep>function spiralOrder(matrix: number[][]): number[] {
let ans: number[] = [];
if (matrix.length === 0) return ans;
let top = 0,
left = 0,
bottom = matrix.length - 1,
right = matrix[0].length - 1;
while (true) {
for (let i = left; i <= right; i++) ans.push(matrix[top][i]);
top++;
if (top > bottom) break;
for (let i = top; i <= bottom; i++) ans.push(matrix[i][right]);
right--;
if (right < left) break;
for (let i = right; i >= left; i--) ans.push(matrix[bottom][i]);
bottom--;
if (bottom < top) break;
for (let i = bottom; i >= top; i--) ans.push(matrix[i][left]);
left++;
if (left > right) break;
}
return ans;
}
<file_sep>
class Solution
{
public:
int singleNumber(vector<int> &nums)
{
int r = 0;
for (auto x : nums)
r ^= x;
return r;
}
};<file_sep>use std::collections::HashSet;
use std::collections::VecDeque;
impl Solution {
pub fn open_lock(deadends: Vec<String>, target: String) -> i32 {
let mut deadends: HashSet<String> = deadends.into_iter().collect();
let mut q: VecDeque<String> = VecDeque::new();
let mut visited: HashSet<String> = HashSet::new();
q.push_back("0000".into());
let mut step = 0;
while q.len() != 0 {
let l = q.len();
for i in 0..l {
let cur = q.pop_front().unwrap();
if deadends.contains(&cur) {
continue;
}
if cur == target {
return step;
}
for next in 0..4 {
let minus = Solution::minus_one(&cur[..], next);
if !visited.contains(&minus) {
q.push_back(minus.clone());
visited.insert(minus);
}
let plus = Solution::plus_one(&cur[..], next);
if !visited.contains(&plus) {
q.push_back(plus.clone());
visited.insert(plus);
}
}
}
step += 1;
}
-1
}
fn minus_one(s: &str, idx: usize) -> String {
s.chars().enumerate().map(|(i, c)| if i == idx {
if c == '0' {
'9'
} else {
((c as u8) - 1) as char
}
} else {
c
}).collect()
}
fn plus_one(s: &str, idx: usize) -> String {
s.chars().enumerate().map(|(i, c)| if i == idx {
if c == '9' {
'0'
} else {
((c as u8) + 1) as char
}
} else {
c
}).collect()
}
}
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
static mut diameter: i32 = 0;
use std::rc::Rc;
use std::cell::RefCell;
use std::cmp;
impl Solution {
pub fn diameter_of_binary_tree(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
unsafe {
diameter = 0;
}
Solution::traverse(root.clone());
unsafe {
return diameter;
}
}
fn traverse(node: Option<Rc<RefCell<TreeNode>>>) {
if let Some(node) = node {
let left = Solution::depth(node.borrow().left.clone());
let right = Solution::depth(node.borrow().right.clone());
let d = left + right;
unsafe {
diameter = cmp::max(d, diameter);
}
Solution::traverse(node.borrow().left.clone());
Solution::traverse(node.borrow().right.clone());
}
}
fn depth(node: Option<Rc<RefCell<TreeNode>>>) -> i32 {
match node {
None => 0,
Some(node) => {
let left = Solution::depth(node.borrow().left.clone());
let right = Solution::depth(node.borrow().right.clone());
cmp::max(left, right) + 1
}
}
}
}
// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
static mut diameter: i32 = 0;
use std::rc::Rc;
use std::cell::RefCell;
use std::cmp;
impl SolutionInfer {
pub fn diameter_of_binary_tree(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
unsafe {
diameter = 0;
}
Solution::depth(root.clone());
unsafe {
return diameter;
}
}
fn depth(node: Option<Rc<RefCell<TreeNode>>>) -> i32 {
match node {
None => 0,
Some(node) => {
let left = Solution::depth(node.borrow().left.clone());
let right = Solution::depth(node.borrow().right.clone());
unsafe {
diameter = cmp::max(left + right, diameter);
}
cmp::max(left, right) + 1
}
}
}
}
<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::VecDeque;
struct Codec {
}
const SEP: &str = ",";
const NULL: &str = "#";
/**
* `&self` means the method takes an immutable reference.
* If you need a mutable reference, change it to `&mut self` instead.
*/
impl Codec {
fn new() -> Self {
Self {}
}
fn serialize(&self, root: Option<Rc<RefCell<TreeNode>>>) -> String {
let mut result = vec![];
fn traverse(node: Option<Rc<RefCell<TreeNode>>>, result: &mut Vec<String>) {
match node {
None => {
result.push(NULL.to_string());
result.push(SEP.to_string());
},
Some(node) => {
let node_ref = node.borrow();
result.push(node_ref.val.to_string());
result.push(SEP.to_string());
traverse(node_ref.left.clone(), result);
traverse(node_ref.right.clone(), result);
}
}
}
traverse(root.clone(), &mut result);
return result.join("");
}
fn deserialize(&self, data: String) -> Option<Rc<RefCell<TreeNode>>> {
let mut nodes: VecDeque<&str> = data.split(SEP).collect();
fn traverse(mut nodes: &mut VecDeque<&str>) -> Option<Rc<RefCell<TreeNode>>> {
if nodes.len() == 0 {
return None;
}
let nodeVal = nodes.pop_front().unwrap();
if nodeVal == NULL {
return None;
}
let mut node = TreeNode::new(nodeVal.parse().unwrap());
node.left = traverse(&mut nodes);
node.right = traverse(&mut nodes);
return Some(Rc::new(RefCell::new(node)));
}
return traverse(&mut nodes);
}
}
/**
* Your Codec object will be instantiated and called as such:
* let obj = Codec::new();
* let data: String = obj.serialize(strs);
* let ans: Option<Rc<RefCell<TreeNode>>> = obj.deserialize(data);
*/
<file_sep>export function minSum(arr: number[]) {
// your code here
arr.sort((a, b) => a - b);
let i = 0, j = arr.length - 1;
let result = 0;
while (i < j) {
result += arr[i] * arr[j];
i++, j--;
}
return result;
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
impl Solution {
pub fn build_tree(inorder: Vec<i32>, postorder: Vec<i32>) -> Option<Rc<RefCell<TreeNode>>> {
match inorder.len() {
0 => None,
l => {
let val = postorder[l - 1];
let inorder_index = inorder.iter().position(|&x| x == val).unwrap();
let left_count = inorder_index;
let left = Solution::build_tree((&inorder[..inorder_index]).to_vec(), (&postorder[..left_count]).to_vec());
let right = Solution::build_tree((&inorder[inorder_index + 1..]).to_vec(), (&postorder[left_count..l-1]).to_vec());
let mut node = TreeNode::new(val);
node.left = left;
node.right = right;
Some(Rc::new(RefCell::new(node)))
}
}
}
}
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function diameterOfBinaryTree(root: TreeNode | null): number {
let result = 0;
const depthMap = new Map<TreeNode, number>();
const depth = (node: TreeNode | null): number => {
if (node === null) return 0;
if (depthMap.has(node)) return depthMap.get(node);
const left = depth(node.left);
const right = depth(node.right);
const d = Math.max(left, right) + 1;
depthMap.set(node, d);
return d;
};
const traverse = (node: TreeNode | null) => {
if (node === null) return;
const left = depth(node.left);
const right = depth(node.right);
const cur = left + right;
result = Math.max(cur, result);
traverse(node.left);
traverse(node.right);
};
traverse(root);
return result;
};
/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function diameterOfBinaryTreeInfer(root: TreeNode | null): number {
let diameter = 0;
const depth = (node: TreeNode | null): number => {
if(node === null) return 0;
const left = depth(node.left);
const right = depth(node.right);
diameter = Math.max(diameter, left + right);
return Math.max(left, right) + 1;
};
depth(root);
return diameter;
};
<file_sep>impl Solution {
pub fn spiral_order(matrix: Vec<Vec<i32>>) -> Vec<i32> {
let mut ans=Vec::new();
if matrix.len()==0{
return ans;
}
let (mut left,mut right,mut top,mut bottom)=(0,matrix[0].len()-1,0,matrix.len()-1);
loop{
for i in left..right+1{
ans.push(matrix[top][i]);
}
top+=1;
if (top as i32)>(bottom as i32){
break;
}
for i in top..bottom+1{
ans.push(matrix[i][right]);
}
right-=1;
if (right as i32)<(left as i32){
break;
}
for i in (left..right+1).rev(){
ans.push(matrix[bottom][i]);
}
bottom-=1;
if (bottom as i32)<(top as i32){
break;
}
for i in (top..bottom+1).rev(){
ans.push(matrix[i][left]);
}
left+=1;
if (left as i32)>(right as i32){
break;
}
}
ans
}
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::cmp;
struct Meta {
pub result: i32,
pub depth: i32,
}
static mut meta: Meta = Meta {
result: 0,
depth: 1,
};
impl Solution {
pub fn max_depth(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
unsafe {
meta = Meta {
result: 0,
depth: 1,
};
}
Solution::calc(root);
unsafe {
return meta.result;
}
}
fn calc(node: Option<Rc<RefCell<TreeNode>>>) {
if node == None {
return;
}
unsafe {
meta.result = cmp::max(meta.result, meta.depth);
meta.depth += 1;
}
if let Some(tmp) = node {
if let Some(left) = &tmp.borrow().left {
Solution::calc(Some(Rc::clone(&left)));
}
if let Some(right) = &tmp.borrow().right {
Solution::calc(Some(Rc::clone(&right)));
}
}
unsafe {
meta.depth -= 1;
}
}
}
impl SolutionWithInfer {
pub fn max_depth(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
match root {
None => 0,
Some(node) => {
let left = SolutionWithInfer::max_depth(node.borrow().left.clone());
let right = SolutionWithInfer::max_depth(node.borrow().right.clone());
return cmp::max(left, right) + 1;
}
}
}
}
<file_sep>class Solution:
def canFinish(self, numCourses: int, prerequisites: List[List[int]]) -> bool:
hasCircle = False
visited = [False for i in range(numCourses)]
onPath = [False for i in range(numCourses)]
graph = [[] for i in range(numCourses)]
for (cur, pre) in prerequisites:
graph[pre].append(cur)
def traverse(start):
nonlocal hasCircle,visited,onPath,graph
if onPath[start]:
hasCircle = True
if hasCircle or visited[start]:
return
onPath[start] = True
visited[start] = True
for x in graph[start]:
traverse(x)
onPath[start] = False
for i in range(numCourses):
traverse(i)
return not hasCircle
<file_sep>/**==============================堆排
* @param {number[]} nums
* @return {number[]}
*/
var sortArray = function (nums) {
function ajust(parent, len) {
let tmp = nums[parent];
let child = 2 * parent + 1;
while (child < len) {
if (child + 1 < len && nums[child + 1] > nums[child]) child++;
if (nums[child] <= tmp) break;
nums[parent] = nums[child];
parent = child;
child = 2 * parent + 1;
}
nums[parent] = tmp;
}
for (let i = (nums.length - 2) / 2 | 0; i >= 0; i--) ajust(i, nums.length);
for (let i = nums.length - 1; i >= 0; i--) {
let tmp = nums[0];
nums[0] = nums[i];
nums[i] = tmp;
ajust(0, i);
}
return nums;
};<file_sep>class Solution:
def numTrees(self, n: int) -> int:
memo = {}
def count(l: int, h: int) -> int:
if l > h:
return 1
k = "%s,%s" %(l, h)
if k in memo:
return memo[k]
result = 0
for i in range(l, h + 1):
left = count(l, i - 1)
right = count(i + 1, h)
result += left * right
memo[k] = result
return result
return count(1, n)
<file_sep>/**
* @param {number[]} nums
* @param {number} target
* @return {number[]}
*/
var twoSum = function (nums, target) {
let s = new Set();
for (let i = 0; i < nums.length; i++) {
if (!s.has(target - nums[i])) {
s.add(nums[i]);
} else {
return [nums[i], target - nums[i]];
}
}
};<file_sep>use std::cmp::*;
impl Solution {
pub fn find_median_sorted_arrays(nums1: Vec<i32>, nums2: Vec<i32>) -> f64 {
if nums1.len()>nums2.len(){
return Solution::find_median_sorted_arrays(nums2,nums1);
}
let m=nums1.len();
let n=nums2.len();
let (mut l,mut r)=(0,m);
let left_count=(m+n+1)/2;
while l<r{
let i=(l+r)/2;
let j=left_count-i;
if nums2[j-1]>nums1[i]{
l=i+1;
}else{
r=i;
}
}
let i=l;
let j=left_count-i;
let left_max_1=if i==0{i32::min_value()}else{nums1[i-1]};
let left_max_2=if j==0{i32::min_value()}else{nums2[j-1]};
let right_min_1=if i==m{i32::max_value()}else{nums1[i]};
let right_min_2=if j==n{i32::max_value()}else{nums2[j]};
match (m+n)%2==0{
true=>f64::from(max(left_max_1,left_max_2)+min(right_min_1,right_min_2))/2.0,
_=>f64::from(max(left_max_2,left_max_1))
}
}
}<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
const SEP = ',';
const NULL = '#';
/*
* Encodes a tree to a single string.
*/
function serialize(root: TreeNode | null): string {
if (root === null) return NULL;
let result = [];
let q: Array<TreeNode | null> = [];
q.push(root);
while (q.length !== 0) {
let cur = q.shift();
if (cur === null) {
result.push(NULL, SEP);
} else {
result.push(cur.val, SEP);
q.push(cur.left);
q.push(cur.right);
}
}
result.pop();
return result.join('');
};
/*
* Decodes your encoded data to tree.
*/
function deserialize(data: string): TreeNode | null {
const nodes = data.split(SEP);
if (nodes.length === 0) return null;
const rootVal = nodes.shift();
const q: Array<TreeNode | null> = [];
if (rootVal === NULL) return null;
const root = new TreeNode(Number(rootVal));
q.push(root);
while(q.length !== 0) {
const cur = q.shift();
if (nodes.length !== 0) {
const leftVal = nodes.shift();
if (leftVal !== NULL) {
cur.left = new TreeNode(Number(leftVal));
q.push(cur.left);
} else {
cur.left = null;
}
}
if (nodes.length !== 0) {
const rightVal = nodes.shift();
if (rightVal !== NULL) {
cur.right = new TreeNode(Number(rightVal));
q.push(cur.right);
} else {
cur.right = null;
}
}
}
return root;
};
/**
* Your functions will be called as such:
* deserialize(serialize(root));
*/
<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
function maxPathSum(root: TreeNode | null): number {
let max = Number.MIN_SAFE_INTEGER;
let getSideMax = (node: TreeNode | null) : number => {
if (node === null) return 0;
let left = Math.max(0, getSideMax(node.left));
let right = Math.max(0, getSideMax(node.right));
max = Math.max(max, left + right + node.val);
return Math.max(left, right) + node.val;
}
getSideMax(root);
return max;
};
<file_sep>//组件类
interface Button {
render(): string;
onClick(): void;
}
class HTMLButton implements Button {
render(): string {
return "<button></button>";
}
onClick(): void {
console.log("Web Button clicked");
}
}
class MFCButton implements Button {
render(): string {
return "<mfc-button></mfc-button>";
}
onClick(): void {
console.log("MFC Button clicked");
}
}
//工厂方法类
abstract class Dialog {
abstract createButton(): Button;
render() {
let button = this.createButton();
return button.render();
}
}
export class MFCDialog extends Dialog {
createButton(): Button {
return new MFCButton();
}
}
export class HTMLDialog extends Dialog {
createButton(): Button {
return new HTMLButton();
}
}
<file_sep>export function isUpperCase(str: string) {
// your code here
return str === str.toUpperCase();
}<file_sep>// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::VecDeque;
impl Solution {
pub fn min_depth(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
if root.is_none() {
return 0;
}
let mut result = 1;
let mut q: VecDeque<Rc<RefCell<TreeNode>>> = VecDeque::new();
q.push_back(root.clone().unwrap());
while q.len() != 0 {
let l = q.len();
for i in 0..l {
let cur = q.pop_front().unwrap();
let cur_borrow = cur.borrow();
if cur_borrow.left.is_none() && cur_borrow.right.is_none() {
return result;
}
if cur_borrow.left.is_some() {
q.push_back(cur_borrow.left.clone().unwrap());
}
if cur_borrow.right.is_some() {
q.push_back(cur_borrow.right.clone().unwrap());
}
}
result += 1;
}
return result;
}
}
<file_sep>/**
* @param {number[]} candidates
* @param {number} target
* @return {number[][]}
*/
var combinationSum2 = function (candidates, target) {
candidates.sort((a, b) => a - b);
let result = new Set();
function helper(cur, idx, n) {
if (n < 0) return;
if (n === 0) {
result.add(cur);
return;
}
if (idx < candidates.length) {
helper(`${cur + (cur ? '+' : '')}${candidates[idx]}`, idx + 1, n - candidates[idx]);
helper(cur, idx + 1, n);
}
}
helper('', 0, target)
return [...result].map(x => x.split('+').map(t => +t));
};<file_sep>/**
* @param {number} n
* @return {number}
*/
var waysToStep = function (n) {
let a = 1, b = 2, c = 4;
switch (n) {
case 1: return a;
case 2: return b;
case 3: return c;
}
while (n-- > 3) {
let t = c;
c = (a + b + c) % 1000000007;
a = b;
b = t;
}
return c;
};<file_sep>impl Solution {
pub fn find_order(num_courses: i32, prerequisites: Vec<Vec<i32>>) -> Vec<i32> {
let size:usize=num_courses as usize;
let mut indegree=vec![0;size];
let mut neighbors=vec![vec![];size];
for item in prerequisites.iter(){
indegree[item[0] as usize]+=1;
neighbors[item[1] as usize].push(item[0] as usize);
}
let mut q:Vec<usize> = (0..num_courses as usize).filter(|&i| indegree[i]==0).collect();
let mut result=q.clone();
while !q.is_empty(){
let mut p=vec![];
for &i in q.iter(){
for &neighbor in neighbors[i].iter(){
indegree[neighbor]-=1;
if indegree[neighbor]==0{
p.push(neighbor);
}
}
}
result.extend(p.iter());
q=p;
}
match result.len()==size{
true=>result.iter().map(|&x| x as i32).collect(),
_=>vec![]
}
}
}<file_sep># Write your MySQL query statement below
select Email from Person group by Email having count(Email)>=2<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def kthSmallest(self, root: Optional[TreeNode], k: int) -> int:
r = -1
i = 0
def traverse(node: Optional[TreeNode]):
nonlocal r, i
if not node or i >= k:
return
traverse(node.left)
i += 1
if i == k:
r = node.val
return
traverse(node.right)
traverse(root)
return r
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {number}
*/
var sumRootToLeaf = function (root) {
let result = 0;
if (!root) return result;
!function dfs(x, s) {
if (!x.left && !x.right) {
result += parseInt(s + x.val, 2);
}
if (x.left) dfs(x.left, s + x.val);
if (x.right) dfs(x.right, s + x.val);
}(root, '');
return result % (10 ** 9 + 7);
};<file_sep>import {
GUIFactory,
MacGUIFactory,
MacButton,
MacCheckbox,
WinGUIFactory,
WinButton,
WinCheckbox,
} from "./跨平台UI";
test("测试跨平台UI抽象工厂", () => {
let win: GUIFactory = new WinGUIFactory();
let mac: GUIFactory = new MacGUIFactory();
expect(win.createButton()).toBeInstanceOf(WinButton);
expect(win.createCheckbox()).toBeInstanceOf(WinCheckbox);
expect(mac.createButton()).toBeInstanceOf(MacButton);
expect(mac.createCheckbox()).toBeInstanceOf(MacCheckbox);
});
<file_sep>fn is_divisible(n: i32, x: i32, y: i32) -> bool {
// your code here
n%x==0&&n%y==0
}<file_sep>/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} head
* @param {number} k
* @return {ListNode}
*/
var reverseKGroup = function (head, k) {
let hair = new ListNode(0);
hair.next = head;
let pre = hair;
while (head) {
let tail = pre;
for (let i = 0; i < k; i++) {
tail = tail.next;
if (!tail) return hair.next;
}
let nex = tail.next;
[head, tail] = myReverse(head, tail);
pre.next = head;
tail.next = nex;
pre = tail;
head = tail.next;
}
return hair.next;
};
function myReverse(head, tail) {
let pre = tail.next;
let p = head;
while (tail !== pre) {
const nex = p.next;
p.next = pre;
pre = p;
p = nex;
}
return [tail, head];
}
<file_sep># 简单数据结构和应用
## 队列和栈
### 队列和广度优先搜索
#### 算法
1. `根节点入队并标记已访问`
2. `当前队列所有节点(每个节点记为A)出队,依次访问节点A`
3. `如果A为目标节点,结束返回;否则4`
4. `当前节点A邻居节点入队并标记已访问`
5. `继续执行2,直到所有出队的节点(新入队的不算)处理完`
6. `继续执行2,直到队列为空`
#### 例子:[《岛屿数量》](https://leetcode-cn.com/problems/number-of-islands/)
### 广度优先搜索和Dijkstra
> - 简单广度优先搜索是图的所有边权值都为1的情况下的搜索。
>
> - Dijkstra处理的图(更一般图)更多的是权值为不等的。
>
> - 简单BFS每次入队的是一个层次(距离根节点距离相等的且相对剩余节点最近的)所有节点
> - 更一般图每次入队的也是距离根节点相对剩余节点最近的(可以看做每层节点数只有一个,省去了队列)
#### 算法
1. `定义访问集visited、最短距离向量distance`
2. `根节点(源节点)加入visited,更新distance`
3. `获取未加入visited节点的distance最小的节点A`
4. `将A加入visited,更新distance(更新规则:distance中未加入visited的节点D=min{D的当前值,distance[A]+A到D的距离})`
#### 代码
```c++
#include <iostream>
#include <string>
#include <set>
using namespace std;
unsigned MAX_D = (-1U);
void dijkstra(unsigned a[][5], int count, int src)
{
set<int> visited;
unsigned *distance = new unsigned[count];
for (size_t i = 0; i < count; i++)
{
distance[i] = MAX_D;
}
distance[src] = 0;
visited.insert(src);
// 获取每个节点的最短路径,BFS,每一次循环获得一个节点的最短距离
for (size_t i = 0; i < count; i++)
{
if (i==src)
{
continue;
}
// 找到距离根节点最短且未被计算过得节点
unsigned min_d=MAX_D,min_i=0;
for (size_t j = 0; j < count; j++)
{
if (visited.count(j)>0)
{
continue;
}
if (distance[j]<min_d)
{
min_d=distance[j];
min_i=j;
}
}
visited.insert(min_i);
// 更新距离向量
for (size_t i = 0; i < count; i++)
{
if (visited.count(i)>0)
{
continue;
}
if (a[min_i][i]==MAX_D)
{
continue;
}
distance[i]=min(distance[i],distance[min_i]+a[min_i][i]);
}
}
for (size_t i = 0; i < count; i++)
{
std::cout << distance[i] << std::endl;
}
}
unsigned min(unsigned a, unsigned b)
{
return a > b ? b : a;
}
int main()
{
unsigned a[5][5]={
{0,1,2,MAX_D,MAX_D},
{1,0,MAX_D,5,MAX_D},
{2,MAX_D,0,2,6},
{MAX_D,5,2,0,MAX_D},
{MAX_D,MAX_D,6,MAX_D}
};
dijkstra(a,5,0);
return 0;
}
```
### 栈和表达式求值
#### 中缀表达式转后缀表达式
`中缀表达式字符串从左向右扫描每个对象A:`
1. `对象A为数字:直接输出;`
2. `对象A为左括号:入栈;`
3. `对象A为右括号:弹栈并输出直到左括号(括号不必输出)`
4. `对象A为运算符,则和栈顶运算符B比较优先级:`
1. `若B的优先级小,则将A压栈`
2. `若B的优先级大于等于A,弹栈输出直到B的优先级小于A,然后将A压栈`
5. `栈内剩余运算符弹出并输出`
#### 后缀表达式计算
`后缀表达式字符串从左向后扫描每个对象A:`
1. `对象A为数字:压栈`
2. `对象A为符号x:依次弹栈得到a和b,计算bxa结果压栈`
3. `后缀表达式读取完毕,弹栈输出`
### 栈和深度优先搜索
#### 算法
1. `根节点压栈`
2. `弹栈得到节点A`
3. `如果节点为A目标,则结束返回;否则4`
4. `将A的每个邻居节点标记为已访问并按次序压栈`
5. `执行2,直到栈空`
#### 例子:[《树的中序遍历》]()
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<int> inorderTraversal(TreeNode* root) {
set<TreeNode*> visited;
stack<TreeNode*> s;
vector<int> result;
if(!root) return result;
s.push(root);
while(!s.empty()){
TreeNode* cur=s.top();
s.pop();
if(!cur) continue;
if(visited.find(cur)==visited.end()){
visited.insert(cur);
s.push(cur->right);
s.push(cur);
s.push(cur->left);
}
else{
result.push_back(cur->val);
}
}
return result;
}
};
```
### 队列实现栈
#### 算法
用两个队列S[2]
- 压栈操作
1. 入队S[0]
- 弹栈操作
1. S[0]出队直到剩一个元素A,出队的元素依次入队S[1]
2. S[0]中最后一个元素A出队并返回
3. 切换S[0]和S[1]的作用
#### 代码
```c++
class MyStack {
queue<int> q[2];
int index=0;
public:
void changeIndex(){
index=(index+1)%2;
}
/** Initialize your data structure here. */
MyStack() {
}
/** Push element x onto stack. */
void push(int x) {
q[index].push(x);
}
/** Removes the element on top of the stack and returns that element. */
int pop() {
while(q[index].size()!=1){
q[(index+1)%2].push(q[index].front());
q[index].pop();
}
int result = q[index].front();
q[index].pop();
changeIndex();
return result;
}
/** Get the top element. */
int top() {
while(q[index].size()!=1){
q[(index+1)%2].push(q[index].front());
q[index].pop();
}
int result = q[index].front();
q[(index+1)%2].push(q[index].front());
q[index].pop();
changeIndex();
return result;
}
/** Returns whether the stack is empty. */
bool empty() {
return q[0].empty()&&q[1].empty();
}
};
/**
* Your MyStack object will be instantiated and called as such:
* MyStack* obj = new MyStack();
* obj->push(x);
* int param_2 = obj->pop();
* int param_3 = obj->top();
* bool param_4 = obj->empty();
*/
```
### 栈实现队列
#### 算法
两个栈S[2]
- 入队操作
1. 压栈到S[0]
- 出队操作
1. 如果S[1]为空,则S[0]依次弹栈,弹出的元素依次压栈至S[1]
2. S[1]弹出元素并返回
#### 代码
```c++
class MyQueue {
private:
stack<int> s;
stack<int> q;
public:
/** Initialize your data structure here. */
MyQueue() {
}
/** Push element x to the back of queue. */
void push(int x) {
s.push(x);
}
/** Removes the element from in front of queue and returns that element. */
int pop() {
int result = peek();
q.pop();
return result;
}
/** Get the front element. */
int peek() {
if(q.empty()){
while(!s.empty()){
q.push(s.top());
s.pop();
}
}
return q.top();
}
/** Returns whether the queue is empty. */
bool empty() {
return q.empty()&&s.empty();
}
};
/**
* Your MyQueue object will be instantiated and called as such:
* MyQueue* obj = new MyQueue();
* obj->push(x);
* int param_2 = obj->pop();
* int param_3 = obj->peek();
* bool param_4 = obj->empty();
*/
```
## 数组和字符串
### 双指针技巧
- 情景一——加速迭代(`从两端向中间迭代数组(比如翻转数组)`)
- 情景二——快指针和慢指针的不同步来解决问题(`给定一个数组和一个值,原地删除改值的所有实例并返回新的长度;判断链表中是否有环(快慢指针肯定相遇第二次)`)
## 链表
### 反转链表
#### 算法
> 按原始顺序迭代,并将已翻转列表的后续列表依次放到已翻转列表头部

#### 代码
```c++
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode* reverseList(ListNode* head) {
ListNode* cur=head,*tmp=head;
if(!head) return head;
while(cur->next){
cur=cur->next;
tmp->next=cur->next;
cur->next=head;
head=cur;
cur=tmp;
}
return head;
}
};
```
## 哈希表
### 原理
> 利用哈希函数将数据转换为索引,以实现快速插入和搜索
### 哈希集
> 哈希集的存储数据只有一个数value
#### 应用情景
1. 查重
#### 代码
```c++
struct Node{
int val;
Node * next;
Node(int x):val(x),next(NULL){}
};
class MyHashSet {
private:
Node* set[1000];
public:
/** Initialize your data structure here. */
MyHashSet() {
for(int i=0;i<1000;i++){
set[i]=NULL;
}
}
void add(int key) {
int box=key%1000;
if(!set[box]){
Node * node = new Node(key);
set[box]=node;
return;
}
Node * cur=set[box];
Node * pre=NULL;
while(cur){
if(cur->val==key) return;
pre=cur;
cur=cur->next;
}
Node * node = new Node(key);
pre->next=node;
}
void remove(int key) {
int box=key%1000;
if(!set[box]){
return;
}
Node * cur=set[box];
if(cur->val==key){
set[box]=set[box]->next;
delete(cur);
return;
}
Node * pre=NULL;
while(cur){
if(cur->val==key){
pre->next=cur->next;
delete(cur);
return;
}
pre=cur;
cur=cur->next;
}
}
/** Returns true if this set contains the specified element */
bool contains(int key) {
int box=key%1000;
if(!set[box]){
return false;
}
Node * cur=set[box];
while(cur){
if(cur->val==key) return true;
cur=cur->next;
}
return false;
}
};
/**
* Your MyHashSet object will be instantiated and called as such:
* MyHashSet* obj = new MyHashSet();
* obj->add(key);
* obj->remove(key);
* bool param_3 = obj->contains(key);
*/
```
### 哈希映射
> 哈希映射的存储数据是{key,value}键值对
#### 应用情景
1. 提供更多信息
2. 按键聚合数据:
- 属于同一组的所有值都将映射到同一组中。
- 需要分成不同组的值不会映射到同一组。
#### 代码
```c++
struct Node{
int key;
int val;
Node* next;
Node(int k,int v):key(k),val(v),next(NULL){}
};
class MyHashMap {
private:
Node* map[1000];
public:
/** Initialize your data structure here. */
MyHashMap() {
for(int i=0;i<1000;i++){
map[i]=NULL;
}
}
/** value will always be non-negative. */
void put(int key, int value) {
int box=key%1000;
if(!map[box]){
Node *node = new Node(key,value);
map[box]=node;
return;
}
Node*cur=map[box],*pre=NULL;
while(cur){
if(cur->key==key){
cur->val=value;
return;
}
pre=cur;
cur=cur->next;
}
Node *node = new Node(key,value);
pre->next=node;
}
/** Returns the value to which the specified key is mapped, or -1 if this map contains no mapping for the key */
int get(int key) {
int box=key%1000;
if(!map[box]) return -1;
Node*cur=map[box];
while(cur){
if(cur->key==key){
return cur->val;
}
cur=cur->next;
}
return -1;
}
/** Removes the mapping of the specified value key if this map contains a mapping for the key */
void remove(int key) {
int box=key%1000;
if(!map[box]) return;
Node*cur=map[box],*pre=NULL;
if(map[box]->key==key){
map[box]=map[box]->next;
delete(cur);
return;
}
while(cur){
if(cur->key==key){
pre->next=cur->next;
delete cur;
return;
}
pre=cur;
cur=cur->next;
}
}
};
/**
* Your MyHashMap object will be instantiated and called as such:
* MyHashMap* obj = new MyHashMap();
* obj->put(key,value);
* int param_2 = obj->get(key);
* obj->remove(key);
*/
```
#### 例子:[《字母异位词分组》](<https://leetcode-cn.com/problems/group-anagrams/>)
- 算法
> 利用哈希映射,按键聚合数据,关键是“键”的设计。
- 代码
```c++
class Solution {
public:
vector<vector<string>> groupAnagrams(vector<string>& strs) {
vector<vector<string>> result;
unordered_map<string,vector<string>> map;
for(int i=0;i<strs.size();i++){
string tmp=strs[i];
sort(tmp.begin(),tmp.end());
map[tmp].push_back(strs[i]);
}
for(auto it=map.begin();it!=map.end();it++){
result.push_back(it->second);
}
return result;
}
};
```
## 排序
### 插入排序
> 类似于摸扑克牌后的排序。每次摸到扑克牌,手中的扑克是有序的,需要将新扑克插入到适当的位置。
#### 特点
- 对于部分有序数组更快
- 对于数组元素较少时排序较快
- 空间复杂度O(1),时间复杂度O(n^2)
- 稳定排序
#### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
for(int i=1;i<nums.size();i++){
int tmp=nums[i];
int j=i-1;
for(;j>=0&&nums[j]>tmp;j--){
nums[j+1]=nums[j];
}
nums[j+1]=tmp;
}
return nums;
}
};
```
### 朴素希尔排序
> 插入排序的升级版。源于对部分有序的数组进行插入排序会比较快。
#### 特点
- 空间复杂度O(1),最差时间复杂度O(n^2)(甚至比插入还蛮,分组都是有序的,一组时进行插入排序,不如不分组)
- **非朴素希尔排序(增量选择使用其他策略,不是简单地折半)时间复杂度为O(n^(3/2)) 或 O(n^(4/3))**
#### 算法
思路:将原始数组进行分组,每个组内进行插入排序。
分组思路:选取跨度,首先跨度是`length/2`,分成`length/2`个组,各组排好序后,跨度减半`length/4`,这时共有`length/4`个组,继续迭代,直到最后剩余一个组**排序后**结束。
#### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
int d=nums.size();
while(d>1){
d/=2;
for(int i=0;i<d;i++){
for(int j=i+d;j<nums.size();j+=d){
int tmp=nums[j];
int k=j-d;
for(;k>=0&&nums[k]>tmp;k-=d){
nums[k+d]=nums[k];
}
nums[k+d]=tmp;
}
}
}
return nums;
}
};
```
### 选择排序
> 选择未排序元素中最小(或最大)的元素,放到已排好序的末尾
#### 特点
- 空间复杂度O(1),时间复杂度O(n^2)
- 不稳定排序
#### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
for(int i=0;i<nums.size()-1;i++){
int min_index=i;
for(int j=i+1;j<nums.size();j++){
if(nums[j]<nums[min_index]){
min_index=j;
}
}
int tmp=nums[i];
nums[i]=nums[min_index];
nums[min_index]=tmp;
}
return nums;
}
};
```
### 简单冒泡排序
> 像冒泡一样排序。
#### 特点
- 空间复杂度O(1),时间复杂度O(n^2)
- 稳定排序
#### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
for(int i=0;i<nums.size();i++){
for(int j=0;j<nums.size()-i-1;j++){
if(nums[j]>nums[j+1]){
int tmp = nums[j];
nums[j]=nums[j+1];
nums[j+1]=tmp;
}
}
}
return nums;
}
};
```
### 优化冒泡排序
#### 第一步优化
> 若冒泡过程中数组已经有序,则无需执行后续操作了
##### 思路
若一轮冒泡中没有交换,那说明已经有序了。则添加标志位即可判断此情况。
##### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
bool isSorted;
for (size_t i = 0; i < nums.size(); i++)
{
isSorted=true;
for (size_t j = 0; j < nums.size()-i-1; j++)
{
if (nums[j]>nums[j+1])
{
int tmp=nums[j];
nums[j]=nums[j+1];
nums[j+1]=tmp;
isSorted=false;
}
}
if (isSorted)
{
break;
}
}
return nums;
}
};
```
#### 第二步优化
> 已经冒泡结束的有序部分长度,可能小于有序的部分,此时做了无用比较,如下图,4-8已经有序了,8是已经冒泡的部分,下一轮冒泡4-7还得比较,浪费了资源

##### 思路
记录一轮冒泡中最后交换的位置,这个位置就是有序的边界。
##### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
bool isSorted;
int lastSwapIndex;
int border=nums.size()-1;
for (size_t i = 0; i < nums.size(); i++)
{
isSorted=true;
for (size_t j = 0; j < border; j++)
{
if (nums[j]>nums[j+1])
{
int tmp=nums[j];
nums[j]=nums[j+1];
nums[j+1]=tmp;
isSorted=false;
lastSwapIndex=j;
}
}
border=lastSwapIndex;
if (isSorted)
{
break;
}
}
return nums;
}
};
```
### 小范围或双射——计数排序
> 针对数值范围较小的数组,或者数组数值可以一一对应映射到较小范围的数组,可以采用技术排序
#### 特点
- 局限性较强,只适用于数值小范围或双射小范围的数组
- 时间复杂度和空间复杂度都为O(n)
#### 思路
利用有限范围数组进行计数,初始化为0。每遇见一个元素X,对应映射下标Y数组元素计数加一;最后下标Y映射为原来的数值X输出。
#### 代码
> 映射函数y=x,即数值x和数组下标y的关系是y=x的关系,其他情况类似,例如y=x-90等
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
int a[10];
for (size_t i = 0; i < 10; i++)
{
a[i]=0;
}
for (size_t i = 0; i < nums.size(); i++)
{
a[nums[i]]++;
}
int k=0;
for (size_t i = 0; i < 10; i++)
{
for (size_t j = 0; j < a[i]; j++)
{
nums[k]=i;
k++;
}
}
return nums;
}
};
int main(int argc, const char** argv) {
vector<int> a={9,2,5,7,3,4,8,7,0};
Solution s;
s.sortArray(a);
for (size_t i = 0; i < a.size(); i++)
{
std::cout << a[i] << " ";
}
return 0;
}
```
### 桶排序
> 类似计数排序。取不同的桶,每个桶放入同范围的数,桶内的元素自动排序。
#### 特点
- 时间复杂度O(n),空间复杂度O(n+m),m是桶的个数
#### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
int maxValue = *max_element(nums.begin(),nums.end());
int minValue = *min_element(nums.begin(),nums.end());
int n=maxValue-minValue+1;
vector<int> bucket(n);
for(int i=0;i<nums.size();i++){
bucket[nums[i]-minValue]++;
}
int k=0;
for(int i=0;i<n;i++){
for(int j=0;j<bucket[i];j++){
nums[k++]=i+minValue;
}
}
return nums;
}
};
```
### 归并排序
> 顾名思义,先递归,后合并。通过二分递归至每个子数组只有一个元素,然后每两个进行合并成一个较大数组,然后回溯合并较大数组成更大数组,循环往复,直到结束
#### 特点
- 稳定排序
- 空间复杂度O(n),时间复杂度O(nlogn)
- 类似于二叉树的层次遍历(从下向上)
#### 算法
- `递归`
1. `给定数组边界:left、right`
2. `中间值mid=left+(right-left)/2`
3. `递归左边[left,mid]`
4. `递归右边[mid+1,right]`
5. `合并[left,right]`
- `合并`
> 即合并[left,mid]和[mid+1,right]两个有序数组
#### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
mergeSort(nums,0,nums.size()-1);
return nums;
}
//归并排序
void mergeSort(vector<int>&nums,int left,int right){
if(left>=right) return;
int mid=left+(right-left)/2;
mergeSort(nums,left,mid);
mergeSort(nums,mid+1,right);
merge(nums,left,right,mid);
}
//合并已有序的两个相邻子数组
void merge(vector<int>&nums,int left,int right,int mid){
vector<int> tmp;
int i=left,j=mid+1;
while(i<=mid&&j<=right){
if(nums[i]<=nums[j]){
tmp.push_back(nums[i]);
i++;
}else{
tmp.push_back(nums[j]);
j++;
}
}
while(i<=mid){
tmp.push_back(nums[i]);
i++;
}
while(j<=right){
tmp.push_back(nums[j]);
j++;
}
for(int k=left,p=0;k<=right;k++,p++){
nums[k]=tmp[p];
}
}
};
```
### 快速排序
> 分治策略。冒泡的升级版,一堆数据一起冒泡,基准值当水面。
#### 特征
- 时间复杂度O(nlogn),最差时间复杂度O(n^2)
- 不稳定排序
#### 算法
1. `选基准值pivot`
2. `大于pivot的放右边,小于等于的放左边`
3. `左边和右边分治迭代`
#### 代码
##### 递归版本
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
quickSort(nums,0,nums.size()-1);
return nums;
}
void quickSort(vector<int>& nums,int start,int end){
if(start>=end) return;
int pivotIndex=partition(nums,start,end);
quickSort(nums,start,pivotIndex-1);
quickSort(nums,pivotIndex+1,end);
}
int partition(vector<int>& nums,int start,int end){
int pivot=nums[start];
int left=start;
int right=end;
while(left!=right){
while(right>left&&nums[right]>pivot){
right--;
}
while(left<right&&nums[left]<=pivot){
left++;
}
int tmp=nums[left];
nums[left]=nums[right];
nums[right]=tmp;
}
nums[start]=nums[left];
nums[left]=pivot;
return left;
}
};
```
##### 迭代版本
> 用栈模拟函数栈,保存所需参数即可
```c++
struct Border{
int start;
int end;
};
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
quickSort(nums,0,nums.size()-1);
return nums;
}
void quickSort(vector<int>& nums,int start,int end){
stack<Border*> s;
s.push(new Border{start,end});
while(!s.empty()){
Border* cur=s.top();
s.pop();
int pivotIndex=partition(nums,cur->start,cur->end);
if(cur->start<pivotIndex-1){
s.push(new Border{cur->start,pivotIndex-1});
}
if(cur->end>pivotIndex+1){
s.push(new Border{pivotIndex+1,cur->end});
}
}
}
int partition(vector<int>& nums,int start,int end){
int pivot=nums[start];
int left=start;
int right=end;
while(left!=right){
while(right>left&&nums[right]>pivot){
right--;
}
while(left<right&&nums[left]<=pivot){
left++;
}
int tmp=nums[left];
nums[left]=nums[right];
nums[right]=tmp;
}
nums[start]=nums[left];
nums[left]=pivot;
return left;
}
};
```
### 堆排序
> 堆是完全二叉树,可直接用数组来存。堆排序基于堆。
#### 特点
- 不稳定
- 空间复杂度O(1),时间复杂度O(nlogn)
#### 算法
1. `构建堆`
2. `替换堆顶和末尾元素,末尾部分已有序`
3. `长度减一`
4. `调整,继续执行2,直到全部有序`
#### 代码
```c++
class Solution {
public:
vector<int> sortArray(vector<int>& nums) {
for(int i=(nums.size()-2)/2;i>=0;i--){
ajust(nums,i,nums.size());
}
for(int i=nums.size()-1;i>0;i--){
int tmp=nums[i];
nums[i]=nums[0];
nums[0]=tmp;
ajust(nums,0,i);
}
return nums;
}
void ajust(vector<int>& nums,int parent,int len){
int tmp=nums[parent];
int child=2*parent+1;
while(child<len){
if(child+1<len&&nums[child+1]>nums[child]){
child++;
}
if(tmp>=nums[child]){
break;
}
nums[parent]=nums[child];
parent=child;
child=2*parent+1;
}
nums[parent]=tmp;
}
};
```
## 二叉树
### 二叉树遍历
> 用栈数据结构模拟系统栈,代码统一化
#### 算法
1. `初始化栈s、访问集visited`
2. `根节点压栈s`
3. `当s不为空执行下列操作:`
1. `弹栈得到元素A`
2. `如果A未被访问`
1. `则A标记为已访问,`
2. `然后A、A->left、A->right依次入栈**(不同遍历顺序恰恰相反)**`
3. `如果A已被访问`
1. `输出A`
4. `结束`
#### 代码
- 前序遍历
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<int> preorderTraversal(TreeNode* root) {
vector<int> result;
set<TreeNode*> visited;
stack<TreeNode*> s;
s.push(root);
while(!s.empty()){
TreeNode* cur=s.top();
s.pop();
if(!cur) continue;
if(visited.count(cur)==0){
s.push(cur->right);
s.push(cur->left);
s.push(cur);
visited.insert(cur);
}
else{
result.push_back(cur->val);
}
}
return result;
}
};
```
- 中序遍历
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<int> inorderTraversal(TreeNode* root) {
set<TreeNode*> visited;
stack<TreeNode*> s;
vector<int> result;
if(!root) return result;
s.push(root);
while(!s.empty()){
TreeNode* cur=s.top();
s.pop();
if(!cur) continue;
if(visited.find(cur)==visited.end()){
visited.insert(cur);
s.push(cur->right);
s.push(cur);
s.push(cur->left);
}
else{
result.push_back(cur->val);
}
}
return result;
}
};
```
- 后序遍历
```c++
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<int> postorderTraversal(TreeNode* root) {
vector<int> result;
set<TreeNode*> visited;
stack<TreeNode*> s;
s.push(root);
while(!s.empty()){
TreeNode* cur=s.top();
s.pop();
if(!cur) continue;
if(visited.count(cur)==0){
s.push(cur);
visited.insert(cur);
s.push(cur->right);
s.push(cur->left);
}
else{
result.push_back(cur->val);
}
}
return result;
}
};
```
<file_sep>//实体类
export interface Product {
listParts(): string;
addParts(part: number): void;
}
export class Product1 implements Product {
private parts: number[] = [];
listParts(): string {
return this.parts.join("-");
}
addParts(part: number): void {
this.parts.push(part);
}
}
export class Product2 implements Product {
private parts: number[] = [];
listParts(): string {
return this.parts.join("+");
}
addParts(part: number): void {
this.parts.push(part);
}
}
export interface Builder {
reset(): void;
buildStep1(): Builder;
buildStep2(): Builder;
buildStep3(): Builder;
getResult(): Product;
}
export class Builder1 implements Builder {
private result: Product = new Product1();
reset(): void {
this.result = new Product1();
}
buildStep1(): Builder {
this.result.addParts(1);
return this;
}
buildStep2(): Builder {
this.result.addParts(2);
return this;
}
buildStep3(): Builder {
this.result.addParts(3);
return this;
}
getResult(): Product {
let res = this.result;
this.reset();
return res;
}
}
export class Builder2 implements Builder {
private result: Product = new Product2();
reset(): void {
this.result = new Product2();
}
buildStep1(): Builder {
this.result.addParts(1);
return this;
}
buildStep2(): Builder {
this.result.addParts(2);
return this;
}
buildStep3(): Builder {
this.result.addParts(3);
return this;
}
getResult(): Product {
let res = this.result;
this.reset();
return res;
}
}
type BuilderType = "simple" | "normal";
export class Director {
constructor(private builder: Builder) {
this.builder = builder;
}
changeBuilder(builder: Builder) {
this.builder = builder;
}
make(type: BuilderType): Product {
switch (type) {
case "normal":
this.builder.buildStep1().buildStep2();
case "simple":
this.builder.buildStep3();
break;
default:
}
return this.builder.getResult();
}
}
<file_sep>class DLinkedNode{
constructor(public k=0,public v=0,public prev:any=null,public next:any=null){}
}
class LRUCache{
constructor(public capacity: number,
public size=0,
public m=new Map<number,DLinkedNode>(),
public head=new DLinkedNode(),
public tail=new DLinkedNode()
){
this.head.next=this.tail;
this.tail.prev=this.head;
}
get(key: number): number {
if(!this.m.has(key)) return -1;
let node=this.m.get(key) as DLinkedNode;
this.moveToHead(node);
return node.v;
}
put(key: number, value: number): void {
if(!this.m.has(key)){
let node=new DLinkedNode(key,value);
this.m.set(key,node);
this.addToHead(node);
++this.size;
if(this.size>this.capacity){
let removed=this.removeTail();
this.m.delete(removed);
--this.size;
}
}else{
let node=this.m.get(key) as DLinkedNode;
node.v=value;
this.moveToHead(node);
}
}
addToHead(node:DLinkedNode){
node.next=this.head.next;
node.next.prev=node;
node.prev=this.head;
this.head.next=node;
}
removeNode(node:DLinkedNode){
node.prev.next=node.next;
node.next.prev=node.prev;
}
moveToHead(node:DLinkedNode){
this.removeNode(node);
this.addToHead(node);
}
removeTail():number{
let ret=this.tail.prev;
this.removeNode(ret);
return ret.k;
}
}
/**
* Your LRUCache object will be instantiated and called as such:
* var obj = new LRUCache(capacity)
* var param_1 = obj.get(key)
* obj.put(key,value)
*/
<file_sep>/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @param {TreeNode} p
* @param {TreeNode} q
* @return {TreeNode}
*/
var lowestCommonAncestor = function (root, p, q) {
//the first node in preorder of the sequence for inorder between p&q
let s = [];
let visited = new Set();
if (root === null) return null;
let tmp = new Set();
s.push(root);
let flag = 0;
while (s.length !== 0) {
let cur = s.pop();
if (!visited.has(cur)) {
visited.add(cur);
if (cur.right) s.push(cur.right);
s.push(cur);
if (cur.left) s.push(cur.left);
} else {
if (cur === p || cur === q) {
flag++;
}
if (flag > 0) {
tmp.add(cur);
}
if (flag === 2) break;
}
}
s = [];
visited.clear();
s.push(root);
while (s.length !== 0) {
let cur = s.pop();
if (!visited.has(cur)) {
visited.add(cur);
if (cur.right) s.push(cur.right);
if (cur.left) s.push(cur.left);
s.push(cur);
} else {
if (tmp.has(cur)) return cur;
}
}
return null;
};<file_sep>use std::cmp::min;
struct Data{
val:i32,
min:i32
}
struct MinStack {
data:Vec<Data>
}
/**
* `&self` means the method takes an immutable reference.
* If you need a mutable reference, change it to `&mut self` instead.
*/
impl MinStack {
/** initialize your data structure here. */
fn new() -> Self {
Self{
data:Vec::new()
}
}
fn push(&mut self, x: i32) {
match self.data.last() {
None => self.data.push(Data{
val:x,
min:x
}),
Some(item) => self.data.push(Data{
val:x,
min:min(x,self.data[self.data.len()-1].min)
})
}
}
fn pop(&mut self) {
self.data.pop();
}
fn top(&self) -> i32 {
self.data[self.data.len()-1].val
}
fn get_min(&self) -> i32 {
self.data[self.data.len()-1].min
}
}
/**
* Your MinStack object will be instantiated and called as such:
* let obj = MinStack::new();
* obj.push(x);
* obj.pop();
* let ret_3: i32 = obj.top();
* let ret_4: i32 = obj.get_min();
*/<file_sep>class Solution
{
public:
vector<int> spiralOrder(vector<vector<int>> &matrix)
{
vector<int> ans;
if (matrix.size() == 0)
return ans;
int left = 0, top = 0, bottom = matrix.size() - 1, right = matrix[0].size() - 1;
while (true)
{
for (int i = left; i <= right; i++)
ans.push_back(matrix[top][i]);
top++;
if (top > bottom)
break;
for (int i = top; i <= bottom; i++)
ans.push_back(matrix[i][right]);
right--;
if (right < left)
break;
for (int i = right; i >= left; i--)
ans.push_back(matrix[bottom][i]);
bottom--;
if (bottom < top)
break;
for (int i = bottom; i >= top; i--)
ans.push_back(matrix[i][left]);
left++;
if (left > right)
break;
}
return ans;
}
};<file_sep>import { WordsCollection } from "./迭代器模式";
test("测试迭代器模式", () => {
let words = new WordsCollection();
words.addItem("what");
words.addItem("is");
words.addItem("your");
words.addItem("name");
let it = words.getIterator();
let idx = 0;
while (it.valid()) {
expect(it.next()).toBe(words.getItems()[idx++]);
}
let itReverse = words.getIteratorReverse();
let idxReverse = words.getCount() - 1;
while (itReverse.valid()) {
expect(itReverse.next()).toBe(words.getItems()[idxReverse--]);
}
});
<file_sep>/**
* @param {string} s1
* @param {string} s2
* @return {boolean}
*/
var CheckPermutation = function (s1, s2) {
let st1 = new Set(s1), st2 = new Set(s2);
for (let c of st1) if (!st2.has(c)) return false;
for (let c of st2) if (!st1.has(c)) return false;
return true;
};<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def countNodes(self, root: TreeNode) -> int:
if not root:
return 0
lh, rh = 1, 1
left = root
right = root
while left.left:
lh += 1
left = left.left
while right.right:
rh += 1
right = right.right
if lh == rh:
return pow(2, lh) - 1
return 1 + self.countNodes(root.left) + self.countNodes(root.right)
<file_sep>import { HTMLDialog, MFCDialog } from "./跨平台UI组件";
test("测试跨平台UI组件工厂方法", () => {
let htmlDialog = new HTMLDialog();
let mfcDialog = new MFCDialog();
expect(htmlDialog.createButton().render()).toBe("<button></button>");
expect(mfcDialog.createButton().render()).toBe("<mfc-button></mfc-button>");
});
<file_sep>/**
* @param {number[]} nums
* @return {number[][]}
*/
var subsetsWithDup = function (nums) {
let result = [];
nums.sort((a, b) => a - b);
function helper(cur, idx) {
if (idx === nums.length) {
result.push(cur);
return;
}
let n = idx + 1;
while (n < nums.length && nums[n] === nums[idx]) n++;
helper([...cur], n);
helper([...cur, nums[idx]], idx + 1);
}
helper([], 0);
return result;
};<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode:
if len(inorder) == 0:
return None
l = len(inorder)
val = postorder[l - 1]
inorderIndex = inorder.index(val)
leftCount = inorderIndex
left = self.buildTree(inorder[:inorderIndex], postorder[:leftCount])
right = self.buildTree(inorder[inorderIndex + 1:], postorder[leftCount:l-1])
return TreeNode(val, left, right)
<file_sep># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def constructFromPrePost(self, preorder: List[int], postorder: List[int]) -> TreeNode:
if len(preorder) == 0:
return None
l = len(preorder)
val = preorder[0]
if l == 1:
return TreeNode(val)
valOfLeftChild = preorder[1]
postorderIndex = postorder.index(valOfLeftChild)
leftCount = postorderIndex + 1
left = self.constructFromPrePost(preorder[1:1 + leftCount], postorder[:leftCount])
right = self.constructFromPrePost(preorder[1 + leftCount:], postorder[leftCount:l - 1])
return TreeNode(val, left, right)
<file_sep>/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution
{
public:
TreeNode *lowestCommonAncestor(TreeNode *root, TreeNode *p, TreeNode *q)
{
stack<TreeNode *> s;
unordered_set<TreeNode *> visited;
unordered_set<TreeNode *> candidates;
int tag = 0;
s.push(root);
while (!s.empty())
{
TreeNode *cur = s.top();
s.pop();
if (!visited.count(cur))
{
visited.insert(cur);
if (cur->right)
s.push(cur->right);
s.push(cur);
if (cur->left)
s.push(cur->left);
}
else
{
if (cur == p || cur == q)
{
tag++;
}
if (tag > 0)
{
candidates.insert(cur);
}
if (tag == 2)
break;
}
}
stack<TreeNode *>().swap(s);
visited.clear();
s.push(root);
while (!s.empty())
{
TreeNode *cur = s.top();
s.pop();
if (!visited.count(cur))
{
visited.insert(cur);
if (cur->right)
s.push(cur->right);
if (cur->left)
s.push(cur->left);
s.push(cur);
}
else
{
if (candidates.count(cur))
return cur;
}
}
return NULL;
}
};<file_sep>/**
* @param {number} num
* @return {number[]}
*/
var countBits = function (num) {
let dp = Array(num + 1);
dp[0] = 0;
for (let i = 1; i <= num; i++) {
if (i % 2 === 1) {
dp[i] = 1 + dp[i / 2 | 0];
continue;
}
dp[i] = dp[i / 2 | 0];
}
return dp;
};<file_sep>use std::cmp::max;
impl Solution {
pub fn rob(nums: Vec<i32>) -> i32 {
if nums.len()<=2{
return nums.into_iter().max().unwrap_or(0);
}
let mut dp=vec![0;nums.len()];
dp[0]=nums[0];
dp[1]=max(nums[0],nums[1]);
for i in 2..dp.len(){
dp[i]=max(dp[i-1],dp[i-2]+nums[i]);
}
dp[dp.len()-1]
}
}<file_sep>/**
* Definition for a binary tree node.
* class TreeNode {
* val: number
* left: TreeNode | null
* right: TreeNode | null
* constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) {
* this.val = (val===undefined ? 0 : val)
* this.left = (left===undefined ? null : left)
* this.right = (right===undefined ? null : right)
* }
* }
*/
var buildTree = function (
preorder: number[],
inorder: number[]
): TreeNode | null {
const inMap = new Map();
inorder.forEach((v, i) => inMap.set(v, i));
return buildTreeDFS(
preorder,
0,
preorder.length - 1,
inMap,
0,
inorder.length - 1
);
};
function buildTreeDFS(
preorder: number[],
preL: number,
preR: number,
inMap: Map<number, number>,
inL: number,
inR: number
): TreeNode | null {
if (preL > preR || inL > inR) return null;
let val = preorder[preL];
let node = new TreeNode(val);
let pIndex = inMap.get(val);
if (typeof pIndex === "undefined") {
throw Error("inMap error!");
}
node.left = buildTreeDFS(
preorder,
preL + 1,
pIndex - inL + preL,
inMap,
inL,
pIndex - 1
);
node.right = buildTreeDFS(
preorder,
pIndex - inL + preL + 1,
preR,
inMap,
pIndex + 1,
inR
);
return node;
}
<file_sep>/**
* @param {string} s
* @return {boolean}
*/
var canPermutePalindrome = function (s) {
let m = new Map();
for (let c of s) {
m.set(c, m.has(c) ? m.get(c) + 1 : 1);
}
return [...m.values()].reduce((s, x) => s + x % 2, 0) < 2;
}; | 18137fcbae9cb8c7d1a2f7d3e6a2103ef1a389e0 | [
"SQL",
"JavaScript",
"Markdown",
"Rust",
"Python",
"TypeScript",
"Go",
"C++"
] | 246 | Python | ScarboroughCoral/Notes | a6005899c143197895f530e06f6259a3a05d20a4 | 30d9492bc2a92172795cfd9674e0d798a0757131 |
refs/heads/master | <repo_name>matmazur/spring-data-test<file_sep>/src/main/java/com/matmazur/SpringDataApplication.java
package com.matmazur;
import com.matmazur.model.Person;
import com.matmazur.repository.PersonRepository;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import java.util.Arrays;
import java.util.List;
@Configuration
@ComponentScan
public class SpringDataApplication {
public static void main(String[] args) {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(SpringDataApplication.class);
Person p1 = Person.builder().withName("Jack").withSurname("Strong").withAge(36).build();
Person p2 = Person.builder().withName("John").withSurname("Doe").withAge(44).build();
Person p3 = Person.builder().withName("Amy").withSurname("Ashley").withAge(22).build();
List<Person> list = Arrays.asList(p1, p2, p3);
PersonRepository repo = context.getBean(PersonRepository.class);
repo.saveAll(list);
repo.findAll().forEach(System.out::println);
}
} | 98b874d879e536a0a0dbb229956ceaf500d1a948 | [
"Java"
] | 1 | Java | matmazur/spring-data-test | debb4e2a162fc721d8f639882cb2747e365e088d | 2858683051ed69d9c4845b386256cef438ac74b1 |
refs/heads/master | <file_sep><?php
/* @var $this \yii\web\View */
/* @var $content string */
use yii\helpers\Html;
use yii\bootstrap\Nav;
use yii\bootstrap\NavBar;
use yii\widgets\Breadcrumbs;
use app\assets\AppAsset;
use yii\helpers\Url;
AppAsset::register($this);
?>
<?php $this->beginPage() ?>
<!DOCTYPE html>
<html lang="<?= Yii::$app->language ?>">
<head>
<meta charset="<?= Yii::$app->charset ?>">
<meta name="viewport" content="width=device-width, initial-scale=1">
<?= Html::csrfMetaTags() ?>
<title><?= Html::encode($this->title) ?></title>
<?php $this->head() ?>
</head>
<body>
<?php $this->beginBody() ?>
<div id="wrapper">
<!-- Navigation -->
<nav class="navbar navbar-default navbar-static-top" role="navigation" style="margin-bottom: 0">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="<?php echo Url::toRoute('/site/index'); ?>">Sumka</a>
<a class="navbar-brand" href="<?php echo Url::toRoute('/site/about'); ?>">About</a>
<a class="navbar-brand" href="<?php echo Url::toRoute('/site/contact'); ?>">Contact</a>
<a class="navbar-brand" href="<?php echo Url::toRoute('/site/login'); ?>">Login</a>
</div>
<!-- /.navbar-header -->
<ul class="nav navbar-top-links navbar-right">
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">
<i class="fa fa-envelope fa-fw"></i> <i class="fa fa-caret-down"></i>
</a>
<ul class="dropdown-menu dropdown-messages">
<li>
<a href="#">
<div>
<strong><NAME></strong>
<span class="pull-right text-muted">
<em>Yesterday</em>
</span>
</div>
<div>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eleifend...</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<strong><NAME></strong>
<span class="pull-right text-muted">
<em>Yesterday</em>
</span>
</div>
<div>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eleifend...</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<strong><NAME></strong>
<span class="pull-right text-muted">
<em>Yesterday</em>
</span>
</div>
<div>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eleifend...</div>
</a>
</li>
<li class="divider"></li>
<li>
<a class="text-center" href="#">
<strong>Read All Messages</strong>
<i class="fa fa-angle-right"></i>
</a>
</li>
</ul>
<!-- /.dropdown-messages -->
</li>
<!-- /.dropdown -->
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">
<i class="fa fa-tasks fa-fw"></i> <i class="fa fa-caret-down"></i>
</a>
<ul class="dropdown-menu dropdown-tasks">
<li>
<a href="#">
<div>
<p>
<strong>Task 1</strong>
<span class="pull-right text-muted">40% Complete</span>
</p>
<div class="progress progress-striped active">
<div class="progress-bar progress-bar-success" role="progressbar" aria-valuenow="40" aria-valuemin="0" aria-valuemax="100" style="width: 40%">
<span class="sr-only">40% Complete (success)</span>
</div>
</div>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<p>
<strong>Task 2</strong>
<span class="pull-right text-muted">20% Complete</span>
</p>
<div class="progress progress-striped active">
<div class="progress-bar progress-bar-info" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100" style="width: 20%">
<span class="sr-only">20% Complete</span>
</div>
</div>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<p>
<strong>Task 3</strong>
<span class="pull-right text-muted">60% Complete</span>
</p>
<div class="progress progress-striped active">
<div class="progress-bar progress-bar-warning" role="progressbar" aria-valuenow="60" aria-valuemin="0" aria-valuemax="100" style="width: 60%">
<span class="sr-only">60% Complete (warning)</span>
</div>
</div>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<p>
<strong>Task 4</strong>
<span class="pull-right text-muted">80% Complete</span>
</p>
<div class="progress progress-striped active">
<div class="progress-bar progress-bar-danger" role="progressbar" aria-valuenow="80" aria-valuemin="0" aria-valuemax="100" style="width: 80%">
<span class="sr-only">80% Complete (danger)</span>
</div>
</div>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a class="text-center" href="#">
<strong>See All Tasks</strong>
<i class="fa fa-angle-right"></i>
</a>
</li>
</ul>
<!-- /.dropdown-tasks -->
</li>
<!-- /.dropdown -->
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">
<i class="fa fa-bell fa-fw"></i> <i class="fa fa-caret-down"></i>
</a>
<ul class="dropdown-menu dropdown-alerts">
<li>
<a href="#">
<div>
<i class="fa fa-comment fa-fw"></i> New Comment
<span class="pull-right text-muted small">4 minutes ago</span>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<i class="fa fa-twitter fa-fw"></i> 3 New Followers
<span class="pull-right text-muted small">12 minutes ago</span>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<i class="fa fa-envelope fa-fw"></i> Message Sent
<span class="pull-right text-muted small">4 minutes ago</span>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<i class="fa fa-tasks fa-fw"></i> New Task
<span class="pull-right text-muted small">4 minutes ago</span>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a href="#">
<div>
<i class="fa fa-upload fa-fw"></i> Server Rebooted
<span class="pull-right text-muted small">4 minutes ago</span>
</div>
</a>
</li>
<li class="divider"></li>
<li>
<a class="text-center" href="#">
<strong>See All Alerts</strong>
<i class="fa fa-angle-right"></i>
</a>
</li>
</ul>
<!-- /.dropdown-alerts -->
</li>
<!-- /.dropdown -->
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">
<i class="fa fa-user fa-fw"></i> <i class="fa fa-caret-down"></i>
</a>
<ul class="dropdown-menu dropdown-user">
<li><a href="#"><i class="fa fa-user fa-fw"></i> User Profile</a>
</li>
<li><a href="#"><i class="fa fa-gear fa-fw"></i> Settings</a>
</li>
<li class="divider"></li>
<li><a href="login.html"><i class="fa fa-sign-out fa-fw"></i> Logout</a>
</li>
</ul>
<!-- /.dropdown-user -->
</li>
<!-- /.dropdown -->
</ul>
<!-- /.navbar-top-links -->
<div class="navbar-default sidebar" role="navigation">
<div class="sidebar-nav navbar-collapse">
<ul class="nav" id="side-menu">
<li class="sidebar-search">
<div class="input-group custom-search-form">
<input type="text" class="form-control" placeholder="Search...">
<span class="input-group-btn">
<button class="btn btn-default" type="button">
<i class="fa fa-search"></i>
</button>
</span>
</div>
<!-- /input-group -->
</li>
<li>
<a href="<?php echo Url::toRoute('/site/dashboard'); ?>"><i class="fa fa-dashboard fa-fw"></i> Dashboard</a>
</li>
<li>
<a href="#"><i class="fa fa-bar-chart-o fa-fw"></i> Link 1<span class="fa arrow"></span></a>
<ul class="nav nav-second-level">
<li>
<a href="flot.html">Link 2</a>
</li>
<li>
<a href="morris.html">Link 3</a>
</li>
</ul>
<!-- /.nav-second-level -->
</li>
<li>
<a href="tables.html"><i class="fa fa-table fa-fw"></i> Link 4</a>
</li>
<li>
<a href="forms.html"><i class="fa fa-edit fa-fw"></i> Link 5</a>
</li>
<li>
<a href="#"><i class="fa fa-wrench fa-fw"></i> Link 6<span class="fa arrow"></span></a>
<ul class="nav nav-second-level">
<li>
<a href="panels-wells.html">Link 7</a>
</li>
<li>
<a href="buttons.html">Link 8</a>
</li>
<li>
<a href="notifications.html">Link 9</a>
</li>
<li>
<a href="typography.html">Link 10</a>
</li>
<li>
<a href="icons.html"> Link 11</a>
</li>
<li>
<a href="grid.html">Link 12</a>
</li>
</ul>
<!-- /.nav-second-level -->
</li>
<li>
<a href="#"><i class="fa fa-sitemap fa-fw"></i> Link 13<span class="fa arrow"></span></a>
<ul class="nav nav-second-level">
<li>
<a href="#">Link 14</a>
</li>
<li>
<a href="#">Link 15</a>
</li>
<li>
<a href="#">Third Level <span class="fa arrow"></span></a>
<ul class="nav nav-third-level">
<li>
<a href="#">Third Level Item</a>
</li>
<li>
<a href="#">Third Level Item</a>
</li>
<li>
<a href="#">Third Level Item</a>
</li>
<li>
<a href="#">Third Level Item</a>
</li>
</ul>
<!-- /.nav-third-level -->
</li>
</ul>
<!-- /.nav-second-level -->
</li>
<li>
<a href="#"><i class="fa fa-files-o fa-fw"></i> Link 16<span class="fa arrow"></span></a>
<ul class="nav nav-second-level">
<li>
<a href="blank.html">Link 17</a>
</li>
<li>
<a href="login.html">Link 18</a>
</li>
</ul>
<!-- /.nav-second-level -->
</li>
</ul>
</div>
<!-- /.sidebar-collapse -->
</div>
<!-- /.navbar-static-side -->
</nav>
<div id="page-wrapper">
<?= Breadcrumbs::widget([
'links' => isset($this->params['breadcrumbs']) ? $this->params['breadcrumbs'] : [],
]) ?>
<?= $content ?>
</div>
<!-- /#page-wrapper -->
</div>
<!-- /#wrapper -->
<div class="wrap">
</div>
<footer class="footer">
<div class="container">
<p class="pull-left">© My Company <?= date('Y') ?></p>
<p class="pull-right"><?= Yii::powered() ?></p>
</div>
</footer>
<?php $this->endBody() ?>
</body>
</html>
<?php $this->endPage() ?>
<file_sep><?php
namespace app\assets;
use yii\web\AssetBundle;
/**
* Sbadmin2Asset bundle
*/
class Sbadmin2Asset extends AssetBundle
{
public $sourcePath = '@bower/startbootstrap-sb-admin-2/dist';
public $css = [
'css/sb-admin-2.css',
'css/timeline.css',
];
public $js = [
'js/sb-admin-2.js'
];
}
| ce66f654899710e7fe52f4f21c8f06da806f1aae | [
"PHP"
] | 2 | PHP | d-bo/yii2-sb | ff82d2f0f48e4fad804cfc798cce5cd1e38738e0 | caf2d8b87fe12058d5e62b4d504f1538367f76a7 |
refs/heads/master | <file_sep>package cn.onloc.service.impl;
import cn.onloc.bean.User;
import cn.onloc.dao.UserDao;
import cn.onloc.service.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class UserServiceImpl implements UserService {
// @Autowired
private UserDao userDao;
@Override
public User getUserById(Integer id) {
User user = userDao.getUserById(id);
return user;
}
@Override
public User updateUserById(Integer id) {
User user = userDao.getUserById(id);
return user;
}
}
<file_sep>package cn.onloc.test;
import cn.onloc.bean.User;
import cn.onloc.service.UserService;
import cn.onloc.service.impl.UserServiceImpl;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.annotation.Transactional;
import static org.springframework.transaction.TransactionDefinition.ISOLATION_DEFAULT;
public class Transaction_1 {
public static void main(String[] srgs) {
ApplicationContext acx = new ClassPathXmlApplicationContext("mybatis.xml");
JdbcTemplate jdbcTemplate = (JdbcTemplate) acx.getBean("jdbcTemplate");
String sql = "update user_t set age = 50 where id = ?";
int count = jdbcTemplate.update(sql, 1);
int a = 1/0;
String sql_2 = "update user_t set age = 40 where id = ?";
int count_2 = jdbcTemplate.update(sql, 1);
System.out.println( "count{}:" + count_2);
}
}
<file_sep>jdbc.drivername=org.postgresql.Driver
jdbc.url=jdbc:postgresql://localhost:5432/test_1
jdbc.username=postgres
jdbc.password=<PASSWORD><file_sep>package cn.onloc.dao;
import cn.onloc.bean.User;
public interface UserDao {
User getUserById(Integer id);
}
<file_sep>package cn.onloc.dao;
public class JdbcTemplate_dao {
}
<file_sep># vue_test
my vue test
| 39093f65a274186e8179a13ae2bbe92543c8d180 | [
"Markdown",
"Java",
"INI"
] | 6 | Java | notliulk/vue_test | a666e9d3b4791445f45c7d1f63aab64151c15e63 | 0cbf452310f23019eefba11e4a0f8dc6d0fec8e3 |
refs/heads/master | <file_sep>cask 'harmony' do
version '0.4.5'
sha256 '4baefe6ff06a977afcb4bf8cf6ba0449b13a62876fb47539f2778671d49e27e8'
# github.com/vincelwt/harmony was verified as official when first introduced to the cask
url "https://github.com/vincelwt/harmony/releases/download/v#{version}/harmony-#{version}.dmg"
appcast 'https://github.com/vincelwt/harmony/releases.atom',
checkpoint: 'a02fdc8573b9852046d096542dd2f2f7f0c2b1c7d37ab88e52de0353434fcd26'
name 'Harmony'
homepage 'http://getharmony.xyz/'
app 'Harmony.app'
end
<file_sep>cask 'teampaper' do
version '2.0.8610'
sha256 'bb0911addcc22462ebf826137c049411a9b6215cf80917939e592d9e5065435c'
url 'http://teampaper.me/snap/download/teampapersnap.dmg'
appcast 'https://www.teampaper.me/sparkle/sparkle.xml',
checkpoint: '2f477deb9b9b1eb2911395024627d6cb5e5588cd2a9be54d980cfe4c6db7c95e'
name 'TeamPaper'
homepage 'https://teampaper.me/snap/'
app 'TeamPaper.app'
end
<file_sep>cask 'glyphs' do
version '2.4.1-971'
sha256 '02842b3dd44c73ccfcdacb0714c0825e6cc459c31b7d9f2a5f5fe172e9207a54'
url "https://updates.glyphsapp.com/Glyphs#{version}.zip"
appcast "https://updates.glyphsapp.com/appcast#{version.major}.xml",
checkpoint: 'f54781e6536cd869faa6151a51c827937b2d3790532a51fc257c255fdf60780b'
name 'Glyphs'
homepage 'https://www.glyphsapp.com/'
auto_updates true
app 'Glyphs.app'
end
| 6092986879df9e9ce74dbdb9720705b8152f39db | [
"Ruby"
] | 3 | Ruby | yellinben/homebrew-cask | d63f4b5598566f926a718de41704389d3e906a68 | ce336d25cf4fd64a671876ca4bfc5b4eab998e45 |
refs/heads/master | <file_sep>import React, { useState } from "react";
import Layout from "../Layout/Layout";
import About from "../About/About";
import Main from "../Main/Main";
const Home = () => {
const [section, setSection] = useState(About);
const sectionHandler = (section) => {
setSection(section);
};
return (
<Layout click={sectionHandler}>
<Main />
{section}
</Layout>
);
};
export default Home;
<file_sep>import React from "react";
import { FaFile, FaGithub } from "react-icons/fa";
import { IoIosGlobe } from "react-icons/io";
import "./Button.css";
import Pdf from "../../assets/cv.pdf";
export const ButtonCV = () => {
return (
<a href={Pdf} className="Button ButtonPaper" target="_blank" rel="noopener noreferrer">
<FaFile />
<span>CV</span>
</a>
);
};
export const ButtonCode = (props) => (
<a href={props.page} className="Button ButtonCode" target="_blank" rel="noopener noreferrer">
<FaGithub />
</a>
);
export const ButtonWeb = (props) => {
return (
<a href={props.page} className="Button ButtonPaper" target="_blank" rel="noopener noreferrer">
<IoIosGlobe />
</a>
);
};
export const ButtonMedia = (props) => {
return (
<a href={props.page} className="ButtonMedia" target="_blank" rel="noopener noreferrer">
{props.children}
</a>
);
};
<file_sep>import recipesGif from "./recipes-app.gif";
import guessNumberGif from "./guess-number-app.gif";
import cvImage from "./cv-page.PNG";
import playgroundGif from "./playground-page.gif";
export default [
{
title: "Curriculum Vitae",
description: "Create your own CV with ready template! ",
image: cvImage,
codeUrl: "https://github.com/goskan93/curriculum-vitae",
projectUrl: "https://goskan93.github.io/curriculum-vitae/",
technologies: ["react", "redux", "materialUI"],
},
{
title: "Playground",
description: "Play tic tac toe, memory or treasure hunt games for free.",
image: playgroundGif,
codeUrl: "https://github.com/goskan93/playground",
projectUrl: "https://goskan93.github.io/playground/",
technologies: ["react", "react-router"],
},
{
title: "Recipes",
description: "Enjoy the best recipes from all over the world.",
image: recipesGif,
codeUrl: "https://github.com/goskan93/recipes-app",
projectUrl: "https://expo.io/@goskan93/meal-app?release-channel=recipes-app",
technologies: ["expo", "react-native", "redux", "react-navigation"],
},
{
title: "Guess the number app",
description: "Let your opponent guess the number.",
image: guessNumberGif,
codeUrl: "https://github.com/goskan93/guess-number-app",
projectUrl: "https://expo.io/@goskan93/guess-number-app?release-channel=guess-number-app",
technologies: ["expo", "react-native"],
},
];
<file_sep>import React from "react";
import Info from "../../components/Info/Info";
import SkillTable from "../../components/SkillTable/SkillTable";
const About = () => {
return (
<article>
<Info title="About me">
<p>
I consider myself a 'forever student' - never stop improving my skills, knowledge and overcome my weaknesses. I am highly
motivated to get new experiences, grow in my professional and personal life. I am ready to join a team and share my experiences,
good attitude and challenges with new colleagues.
</p>
<SkillTable />
</Info>
<Info title="Work">
<p>
The most interesting experience I acquired is my current job in Brazil as System Developer. In a short amount of time I had to
learn portuguese and all technologies needed for web development. After some time I was assigned to create the mobile application
for our clients, that was the moment I got to know React Native technology and got interested in React.js and frontend
development. Before that I worked few months as Junior SAP Consultant, where I had opportunity to work in an international team.
</p>
</Info>
<Info title="Education">
<p>
I graduated in Applied Mathematics at Wroclaw University of Science and Techonology (Poland) which I finished with Master of
Science degree. The studies helped me to develop analytical and logical skills and I also learned how to acquire the knowledge in
a fast way. During that time I have been an active student, taking part in extra activities, such as being a member of the
Scientific Circle of Financial Engineering, where after some time I became a chairman of this organization and led the biggest
student project in the Department of Mathemtics - Polish Nationwide Mathematical Conference "OMatKo!!!". I also spent one semester
abroad in Portugal, where I improved my english and communication skills.
</p>
</Info>
</article>
);
};
export default About;
<file_sep>import React from "react";
import Card from "../../components/CardProject/CardProject";
import projects from "../../assets/projects";
const Projects = () => {
return (
<article>
{projects.map((project) => (
<Card
title={project.title}
description={project.description}
codeUrl={project.codeUrl}
technologies={project.technologies}
projectUrl={project.projectUrl}
image={project.image}
/>
))}
</article>
);
};
export default Projects;
<file_sep>import React from "react";
import "./Chip.css";
const Chip = (props) => {
return (
<div className="Chip">
<h5>{props.text}</h5>
</div>
);
};
export default Chip;
<file_sep>import React from "react";
import { ButtonMedia } from "../../components/Button/Button";
import { FaGithub, FaLinkedin } from "react-icons/fa";
import meImage from "../../assets/me.jpg";
import "./Main.css";
const Main = () => {
return (
<div className="Main-Container">
<div className="Image-Container">
<img alt="profile" src={meImage} />
</div>
<div className="Info-Container">
<div>
<h2><NAME></h2>
<h3>
email: <span><EMAIL></span>
</h3>
<p>Originally from Poland, currently in Brazil.</p>
<p>Looking for new opportunieties as Frontend Develper in React.js.</p>
</div>
<div className="Media-Buttons">
<ButtonMedia page="https://github.com/goskan93">
<FaGithub />
</ButtonMedia>
<ButtonMedia page="https://www.linkedin.com/in/natalia-goska/">
<FaLinkedin />
</ButtonMedia>
</div>
</div>
</div>
);
};
export default Main;
| 3c838a69a44062be8a61be827e272f16d8f947e0 | [
"JavaScript"
] | 7 | JavaScript | goskan93/portfolio_v1 | 325f5704219e1263e971daa02a0b0732abd797a5 | 1d6692d542167bf456babfb4e26d63a13a845dfc |
refs/heads/master | <repo_name>juliano-nunes/alexa-clickbus-skill<file_sep>/lambda/index.js
// This sample demonstrates handling intents from an Alexa skill using the Alexa Skills Kit SDK (v2).
// Please visit https://alexa.design/cookbook for additional examples on implementing slots, dialog management,
// session persistence, api calls, and more.
const Alexa = require('ask-sdk-core');
const https = require('https');
const times = [
{
id: 'morning',
startTime: 06,
endTime: 12
},
{
id: 'afternoon',
startTime: 12,
endTime: 18
},
{
id: 'evening',
startTime: 18,
endTime: 00
},
{
id: 'dawn',
startTime: 00,
endTime: 06
}
];
const LaunchRequestHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'LaunchRequest';
},
handle(handlerInput) {
const speakOutput = 'Hoje é um ótimo dia para comprar sua passagem de ônibus, me diga para onde quer viajar.';
return handlerInput.responseBuilder
.speak(speakOutput)
.reprompt(speakOutput)
.getResponse();
}
};
const httpGet = function(origem, destino, departureDate) {
return new Promise(((resolve, reject) => {
var options = {
host: 'alexa-testing.free.beeceptor.com',
port: 443,
path: `/api/v3/trips?from=${origem}&to=${destino}&departureDate=${departureDate}&clientId=2&returnDate=`,
method: 'GET',
};
const request = https.request(options, (response) => {
response.setEncoding('utf8');
let returnData = '';
response.on('data', (chunk) => {
returnData += chunk;
});
response.on('end', () => {
resolve(JSON.parse(returnData));
});
response.on('error', (error) => {
reject(error);
});
});
request.end();
}));
};
const BuyTicketIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'BuyTicketIntent';
},
handle(handlerInput) {
const origin = Alexa.getSlot(handlerInput.requestEnvelope, 'origin');
const originId = origin.resolutions.resolutionsPerAuthority.filter(res => res.status.code === 'ER_SUCCESS_MATCH')[0].values[0]["value"].id;
const destination = Alexa.getSlot(handlerInput.requestEnvelope, 'destination');
const destinationId = destination.resolutions.resolutionsPerAuthority.filter(res => res.status.code === 'ER_SUCCESS_MATCH')[0].values[0]["value"].id;
const departureDate = Alexa.getSlotValue(handlerInput.requestEnvelope, 'departureDate');
const attributes = handlerInput.attributesManager.getSessionAttributes();
attributes.originId = originId;
attributes.destinationId = destinationId;
attributes.departureDate = departureDate;
handlerInput.attributesManager.setSessionAttributes(attributes);
return handlerInput.responseBuilder
.speak(`Você prefere viajar pela manhã, a tarde, a noite ou de madrugada?`)
.getResponse();
}
};
const DepartureTimeIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'DepartureTimeIntent';
},
async handle(handlerInput) {
const departureTime = Alexa.getSlot(handlerInput.requestEnvelope, 'departureTime');
const departureTimeValue = Alexa.getSlotValue(handlerInput.requestEnvelope, 'departureTime');
const departureTimeId = departureTime.resolutions.resolutionsPerAuthority.filter(res => res.status.code === 'ER_SUCCESS_MATCH')[0].values[0]["value"].id;
const attributes = handlerInput.attributesManager.getSessionAttributes();
attributes.departureTime = times.filter(time => time.id === departureTimeId)[0];
handlerInput.attributesManager.setSessionAttributes(attributes);
const response = await httpGet(attributes.originId, attributes.destinationId, attributes.departureDate);
const departures = response.departures.filter(
departure => {
let hour = parseInt(departure.parts[0].departure.schedule.time.split(':')[0]);
return attributes.departureTime.startTime < hour && attributes.departureTime.endTime > hour;
}
)
.sort((a, b) => {
if (a.price > b.price) {
return -1;
}
if (b.price > a.price) {
return 1;
}
return 0;
});
const cheapestTrip = departures[0].price;
const serviceClass = departures[0].parts[0].bus.name;
const terminal = departures[0].parts[0].departure.place.terminal;
const timeHour = departures[0].parts[0].departure.schedule.time.split(':')[0];
const timeMinutes = departures[0].parts[0].departure.schedule.time.split(':')[1];
return handlerInput.responseBuilder
.speak(`O menor preço encontrado no período da ${departureTimeValue} foi R$ ${cheapestTrip} na categoria ${serviceClass} saindo do ${terminal} às ${timeHour}:${timeMinutes}.`)
.getResponse();
}
};
const HelpIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& Alexa.getIntentName(handlerInput.requestEnvelope) === 'AMAZON.HelpIntent';
},
handle(handlerInput) {
const speakOutput = 'You can say hello to me! How can I help?';
return handlerInput.responseBuilder
.speak(speakOutput)
.reprompt(speakOutput)
.getResponse();
}
};
const CancelAndStopIntentHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest'
&& (Alexa.getIntentName(handlerInput.requestEnvelope) === 'AMAZON.CancelIntent'
|| Alexa.getIntentName(handlerInput.requestEnvelope) === 'AMAZON.StopIntent');
},
handle(handlerInput) {
const speakOutput = 'Goodbye!';
return handlerInput.responseBuilder
.speak(speakOutput)
.getResponse();
}
};
const SessionEndedRequestHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'SessionEndedRequest';
},
handle(handlerInput) {
// Any cleanup logic goes here.
return handlerInput.responseBuilder.getResponse();
}
};
// The intent reflector is used for interaction model testing and debugging.
// It will simply repeat the intent the user said. You can create custom handlers
// for your intents by defining them above, then also adding them to the request
// handler chain below.
const IntentReflectorHandler = {
canHandle(handlerInput) {
return Alexa.getRequestType(handlerInput.requestEnvelope) === 'IntentRequest';
},
handle(handlerInput) {
const intentName = Alexa.getIntentName(handlerInput.requestEnvelope);
const speakOutput = `You just triggered ${intentName}`;
return handlerInput.responseBuilder
.speak(speakOutput)
//.reprompt('add a reprompt if you want to keep the session open for the user to respond')
.getResponse();
}
};
// Generic error handling to capture any syntax or routing errors. If you receive an error
// stating the request handler chain is not found, you have not implemented a handler for
// the intent being invoked or included it in the skill builder below.
const ErrorHandler = {
canHandle() {
return true;
},
handle(handlerInput, error) {
console.log(`~~~~ Error handled: ${error.stack}`);
const speakOutput = `Sorry, I had trouble doing what you asked. Please try again.`;
return handlerInput.responseBuilder
.speak(speakOutput)
.reprompt(speakOutput)
.getResponse();
}
};
// The SkillBuilder acts as the entry point for your skill, routing all request and response
// payloads to the handlers above. Make sure any new handlers or interceptors you've
// defined are included below. The order matters - they're processed top to bottom.
exports.handler = Alexa.SkillBuilders.custom()
.addRequestHandlers(
LaunchRequestHandler,
BuyTicketIntentHandler,
DepartureTimeIntentHandler,
HelpIntentHandler,
CancelAndStopIntentHandler,
SessionEndedRequestHandler,
IntentReflectorHandler, // make sure IntentReflectorHandler is last so it doesn't override your custom intent handlers
)
.addErrorHandlers(
ErrorHandler,
)
.lambda();
<file_sep>/README.md
# Alexa Skill to Get Bus Travel Tickets Info
This is an experiment with Amazon Alexa. The main purpose was understand how the Alexa API works and build a simple app with it.
| 7c473acf218d642c27ea9c9898574a2da1c73f8b | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | juliano-nunes/alexa-clickbus-skill | 28e73f70ccbe3e590fca1635f31c0d30b22a56c2 | 6957435c905aae851c6029456d2847c51c80224d |
refs/heads/master | <file_sep>//
// XDCSessionManager.swift
// XDIM
//
// Created by 白大卫 on 17/3/10.
// Copyright © 2017年 bdw. All rights reserved.
//
//会话管理
import Foundation
//
class XDCSessionManager{
//接收方名称 id 群/个人
//
}
<file_sep>//
// XDCStatusCode.swift
// XDIM
//
// Created by 白大卫 on 17/3/13.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
<file_sep>//
// XDCUtilCode.swift
// XDIM
//
// Created by 白大卫 on 17/3/13.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
enum UtilCode{
static let signup: UInt16 = 0x0101
static let setup: UInt16 = 0x0102
static let userUtil: UInt16 = 0x0103
static let groupUtil: UInt16 = 0x0104
static let messageUtil: UInt16 = 0x0105
}
enum SubUtilCodeForSignUp{
static let verify: UInt16 = 0x0100
static let signup: UInt16 = 0x0101
static let login: UInt16 = 0x0200
static let loginFeedback: UInt16 = 0x0201
static let pwdForgotten: UInt16 = 0x0300
static let pwdReset: UInt16 = 0x0301
static let logout: UInt16 = 0x0400
static let errExit: UInt16 = 0x0500
}
<file_sep>//
// XDCMessage.swift
// XDIM
//
// Created by 白大卫 on 17/3/10.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
//会话消息
class XDCMessage{
//内容
var strContent: String?
//长度
var intLength: Int?
}
<file_sep>//
// XDCDataManager.swift
// XDIM
//
// Created by 白大卫 on 17/3/9.
// Copyright © 2017年 bdw. All rights reserved.
//
//接收数据并并生成类实例
//持久化?
//
import Foundation
import SwiftyJSON
class XDCDataManager: NSObject{
var objData: NSObject?
static let sharedInstance: XDCDataManager = {
var instance = XDCDataManager()
return instance
}()
func getUserEntity(objData: Data)->XDCUser{
let json = JSON(objData)
let strUserName: String? = json["name"].string
let strSex: String? = json["sex"].string
let strAge: String? = json["age"].string
let strEmail: String? = json["email"].string
let strTel: String? = json["tel"].string
let user: XDCUser = XDCUser(strUserName: strUserName, strSex: strSex, strAge: strAge, strEmail: strEmail, strTel: strTel)
return user
}
//接收服务器数据包获取数据
//func
}
<file_sep>//
// XDCPackStruct.swift
// XDIM
//
// Created by 白大卫 on 17/3/13.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
//12 bytes packageheader
struct PackageHeader {
//协议版本号 1byte
let versionCode: UInt8
//加密方式协议状态
let cryptType: UInt8
//业务类型
let packageID: UInt16
//数据域长度
let dataSegementLEN: UInt32
//包序号
let packageNUM: UInt32
}
//
struct DataHeader {
let utilCode: UInt16
let subUtilCode: UInt16
let packageID: UInt32
}
<file_sep>//
// XDCPackageManager.swift
// XDIM
//
// Created by 白大卫 on 17/3/14.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
class XDCPackageManager{
static let sharedInstance:XDCPackageManager = {
let instance = XDCPackageManager()
return instance
}()
//将包头参数和包数据contentData拼接为完整包packageData
func packData(utilCode: UInt16, subUtilCode: UInt16, packageID: UInt32, contentData: Data, dataLength: UInt32) -> Data{
var dataHeader: DataHeader = DataHeader(utilCode: utilCode, subUtilCode: subUtilCode, packageID: packageID)
var packageData: Data = Data(bytes: &dataHeader, count: MemoryLayout<DataHeader>.size)
packageData.append(contentData)
return packageData
}
//从返回包中获取包数据段
func unpackData(package: Data) -> Data? {
var pacakgeData: Data?
if (package.count > MemoryLayout<DataHeader>.size){
pacakgeData = package.subdata(in: MemoryLayout<DataHeader>.size..<package.count)
}
return pacakgeData
}
}
<file_sep>//
// XDCRegex.swift
// XDIM
//
// Created by 白大卫 on 17/3/9.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
class XDCRegex{
var regex: NSRegularExpression?
init(pattern: String) {
do {
regex = try NSRegularExpression(pattern: pattern, options: .caseInsensitive)
}catch{
XDCLog("NSRegularExpression init ERR")
}
}
func match(input: String) -> Bool {
if let matchesResult = regex?.matches(in: input, options: [], range: NSMakeRange(0, (input as NSString).length)){
return matchesResult.count > 0
}else{
return false
}
}
}
<file_sep>//
// XDCSession.swift
// XDIM
//
// Created by 白大卫 on 17/3/10.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
//
enum SessionType{
case single
case group
}
class XDCSession{
//名称(接收方)
var strSessionID: String?
var strSessionName: String?
//会话类型 群/个人
var enmSessionType: SessionType?
//消息内容 message
//时间 年月日 时分秒
//
//
//
}
<file_sep>//
// XDCMessageManager.swift
// XDIM
//
// Created by 白大卫 on 17/3/10.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
<file_sep>//
// XDCGlobal.swift
// XDIM
//
// Created by 白大卫 on 17/3/6.
// Copyright © 2017年 bdw. All rights reserved.
//
//import Foundation
import UIKit
let ipAddress:String = ""
let hostAdress:String = "baidu.com"
let portNUM:UInt16 = 80
//pacakge length
let headerLEN: UInt = 12
let verificationLEN: UInt = 64
<file_sep>//
// XDCLog.swift
// XDIM
//
// Created by 白大卫 on 17/3/6.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
//封装的日志输出功能(T表示不指定日志信息参数类型)
func XDCLog<T>(_ message:T, file:String = #file, function:String = #function, line:Int = #line) {
#if DEBUG
//获取文件名
let fileName = (file as NSString).lastPathComponent
//打印日志内容
print("\(fileName):\(line) \(function) | \(message)")
#endif
}
<file_sep>//
// XDCConnectionManager.swift
// XDIM
//
// Created by 白大卫 on 17/3/3.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
import CocoaAsyncSocket
enum ConnectionStatus {
case connected
case disconnected
}
enum PackageSegementTag: Int{
case header = 100
case payload = 200
}
class XDCConnectionManager: NSObject, GCDAsyncSocketDelegate
{
var port: UInt16 = 80
var host: String = "localhost"
var socket: GCDAsyncSocket?
var connectionStatus: ConnectionStatus?
var reconnectCount: UInt16 = 0
var reconnectTimer: Timer?
var packageBuffer: NSMutableData?
var headerBuffer: Data?
var dataSegementLEN: UInt = 0
//单例
static let sharedInstance: XDCConnectionManager = {
let instance = XDCConnectionManager()
instance.host = hostAdress
instance.port = portNUM
instance.socket = GCDAsyncSocket(delegate: instance, delegateQueue: DispatchQueue.main)
instance.connectionStatus = .disconnected
return instance
}()
private override init(){}
func connect(){
if let socket = self.socket{
if (connectionStatus == .disconnected){
do{
try socket.connect(toHost: host, onPort: port)
connectionStatus = .connected
}catch{
XDCLog("connectERR")
}
}else{
XDCLog("alreadyConnected")
}
}
}
func disconnect(){
if let socket = self.socket{
socket.disconnect()
connectionStatus = .disconnected
XDCLog("disconnected")
}
}
func sendData(strMessage:String) {
if let data : Data = strMessage.data(using: String.Encoding.utf8){
if let aSocket = self.socket{
aSocket.write(data, withTimeout: -1, tag: 0)
}
}else{
XDCLog("invalid msg")
}
}
func sendPackageData(packageData: Data){
if let aSocket = self.socket{
aSocket.write(packageData, withTimeout: -1, tag: 100)
}
}
// MARK: - managePackageData
//获取包头 转入didread
func getPackageHeader(socket: GCDAsyncSocket){
socket.readData(toLength: headerLEN, withTimeout: -1, buffer: packageBuffer, bufferOffset: 0, tag: PackageSegementTag.header.rawValue)
}
//解析包头
//read to length
func resolvePackageHeader(packageHeaderData: Data) -> PackageHeader? {
var packageHeader: PackageHeader?
if (packageHeaderData.count == Int(headerLEN)){
let versionCode: UInt8 = packageHeaderData.withUnsafeBytes{$0.pointee}
let cryptType: UInt8 = packageHeaderData.withUnsafeBytes{($0+1).pointee}
let packageID: UInt16 = packageHeaderData.withUnsafeBytes{($0+1).pointee}
let dataSegementLEN: UInt32 = packageHeaderData.withUnsafeBytes{($0+1).pointee}
let packageNUM: UInt32 = packageHeaderData.withUnsafeBytes{($0+2).pointee}
packageHeader = PackageHeader(versionCode: versionCode, cryptType: cryptType, packageID: packageID, dataSegementLEN: dataSegementLEN, packageNUM: packageNUM)
}else{
XDCLog("packageHeaderData ERR")
}
return packageHeader
}
//根据包头中数据长度接收完整包
func getPackagePayload(socket: GCDAsyncSocket){
let packageFullLength = dataSegementLEN + verificationLEN
if let buffer = self.packageBuffer{
let bufferOffset = UInt(buffer.length)
socket.readData(toLength: UInt(packageFullLength), withTimeout: -1, buffer: buffer, bufferOffset: bufferOffset, tag: PackageSegementTag.payload.rawValue)
}
}
//解析数据头部 dataHeader
func resolveDataHeader(payloadData: Data) -> DataHeader? {
var dataHeader: DataHeader?
if(payloadData.count > Int(headerLEN))
{
let utilCode: UInt16 = payloadData.withUnsafeBytes{$0.pointee}
let subUtilCode: UInt16 = payloadData.withUnsafeBytes{($0+1).pointee}
let packageID: UInt32 = payloadData.withUnsafeBytes{($0+1).pointee}
dataHeader = DataHeader(utilCode: utilCode, subUtilCode: subUtilCode, packageID: packageID)
}else{
XDCLog("payloadData ERR")
}
return dataHeader
}
//解析 dataHeader dataHeader data
func resolvePackagePayload(packagePayload: Data, dataHeader: inout DataHeader?, dataContent: inout Data?){
let rangeFrom = Int(headerLEN)
let rangeTo = Int(headerLEN + self.dataSegementLEN)
//dataHeader + dataContent
let data = packagePayload.subdata(in: rangeFrom..<rangeTo)
dataHeader = resolveDataHeader(payloadData: data)
dataContent = data.subdata(in: 12..<data.count)
}
//socket?.writeData(msgTF.text?.dataUsingEncoding(NSUTF8StringEncoding), withTimeout: -1, tag: 0)
// MARK: - GCDAsyncSocketDelegate
func socket(_ sock: GCDAsyncSocket, didConnectToHost host: String, port: UInt16) {
XDCLog("connect succeed")
//sock.readData(withTimeout: -1, tag: 0)
}
func socket(_ sock: GCDAsyncSocket, didWriteDataWithTag tag: Int) {
XDCLog("send succeed")
}
func socket(_ sock: GCDAsyncSocket, didRead data: Data, withTag tag: Int) {
//get data from server
switch tag {
case PackageSegementTag.header.rawValue:
//接收数据为包头
if let packageHeader = resolvePackageHeader(packageHeaderData: data){
print("get header")
self.dataSegementLEN = UInt(packageHeader.dataSegementLEN)
//继续接收数据部分
getPackagePayload(socket: sock)
}else{
XDCLog("resolvePackageHeader ERR")
}
case PackageSegementTag.payload.rawValue:
//接收数据为包内容
//数据部分头部
var dataHeader: DataHeader?
//数据部分内容
var dataContent: Data?
resolvePackagePayload(packagePayload: data, dataHeader: &dataHeader, dataContent: &dataContent)
default:
if let strReceived = String(data: data, encoding: .utf8){
XDCLog("readDataDefault")
print(strReceived)
}else{
XDCLog("readData ERR")
}
}
//sock.readData(withTimeout: -1, tag: 0)
}
func socketDidDisconnect(_ sock: GCDAsyncSocket, withError err: Error?) {
XDCLog("disconnected")
XDCLog(err.debugDescription)
if #available(iOS 10.0, *)
{
reconnectTimer = Timer.scheduledTimer(withTimeInterval: Double(reconnectCount) * 2.0, repeats: true, block: {(time) in
self.reconnect()
})
}else{
reconnectTimer = Timer.scheduledTimer(timeInterval: Double(reconnectCount) * 2.0, target: self, selector: #selector(self.reconnect), userInfo: nil, repeats: true);
}
}
func reconnect(){
self.reconnectCount += 1
//重链接
XDCLog("reconnect")
self.connect()
}
func socketDidSecure(_ sock: GCDAsyncSocket) {
}
}
<file_sep>//
// XDCUser.swift
// XDIM
//
// Created by 白大卫 on 17/3/9.
// Copyright © 2017年 bdw. All rights reserved.
//
import Foundation
enum Sex{
case male
case female
}
class XDCUser : NSObject{
var strUserID: String? = "xx_xxxx"
var strUsername: String? = "whoImI"
var enmSex: Sex? = .male
var intAge: Int? = 99
var strEmail: String? = "<EMAIL>"
var strTel: String? = "13786868686"
init(strUserName: String?, strSex: String?, strAge: String?, strEmail: String?, strTel: String?){
super.init()
let mailPattern: String = "^([a-z0-9_\\.-]+)@([\\da-z\\.-]+)\\.([a-z\\.]{2,6})$"
let telPattern: String = "^1(3[0-9]|4[57]|5[0-35-9]|7[0135678]|8[0-9])\\d{8}$"
if let userName = strUsername{
self.strUsername = userName
}
if let sex = strSex{
if (sex == "male"){
self.enmSex = .male
}else if(sex == "female"){
self.enmSex = .female
}
}
if let age = strAge{
if let intAge = Int(age){
if 1 ... 99 ~= intAge{
self.intAge = intAge
}
}
}
if let email = strEmail{
let matcher = XDCRegex(pattern: mailPattern)
if matcher.match(input: email){
self.strEmail = email
}
}
if let tel = strTel{
let matcher = XDCRegex(pattern: telPattern)
if matcher.match(input: tel) {
self.strTel = tel
}
}
}
}
| 2b4856a356cdd01a0523d43b889741d155e42a73 | [
"Swift"
] | 14 | Swift | laowu/xdcim | e0f7b6b521670a45e81598d7be5e6bc31ad1694d | 4faa0be4a807dbfec74efd4c2707ea7ddf9ca756 |
refs/heads/master | <repo_name>kharysharpe/appstrapjs<file_sep>/config/routes.js
var router = new $.mobile.Router(
[
{"#grade(?:[?/](.*))?" : {events: 'bs', handler: gradeController.beforePageShow}},
{"#grade(?:[?/](.*))?" : {events: 'h', handler: gradeController.beforePageHide}},
]
);
<file_sep>/README.md
appstrapjs
==========
Javascript Application Framework - Hybrid fusion of Twitter Bootstrap, JQuery Mobile Core and KnockoutJS | 2687372ebe87766447a8b794e37a55c2018f2016 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | kharysharpe/appstrapjs | ee140b44060d5001e1afa4c63504aa5da718db19 | cf84a8439234415aaa0ad4f75b8f7048dd116c8e |
refs/heads/master | <file_sep><?php
session_start();
$_SESSION['notify'] = "Bạn vừa đăng xuất !!!";
$_SESSION['out'] = "out"; // đánh dấu là đăng xuất
unset($_SESSION['manv']);
session_destroy();
header('Location: ../login');
?><file_sep><?php // kết nối cơ sở dữ liệu
$conn = mysqli_connect('localhost','root','','giang');
if (!$conn){
die("Connect failed: " . mysqli_connect_error());
}
mysqli_set_charset($conn,'utf8');
session_start();
?><file_sep><?php
require_once('../database_conn.php');
if (isset($_POST['button-submit'])){
$manv = $_POST['manv'];
$pass = $_POST['password'];
if ($manv == "" || $pass == "") {
$_SESSION['notify'] = 'Bạn vui lòng nhập đầy đủ thông tin' ;
header('location: index.php ') ;
} else {
$query=mysqli_query($conn,"select * from nv where MatKhau='$pass' and MaNV='$manv'");
$num_rows = mysqli_num_rows($query);
if ($num_rows == 1) {
$query = mysqli_fetch_assoc($query);
$_SESSION['manv']=$manv;
$_SESSION['ten_nv']=$query['TenNV'];
header('location: ../index.php');
} else{
$_SESSION['notify']="Thông tin đăng nhập không đúng";
header('location: index.php');
}
}
}
?><file_sep>$manv=$_SESSION['manv'];
$date=date("Y/m/d H:i:s");
$query=mysqli_query($conn,"select * from khachhang where SoTK='$sotk'");
if(mysqli_num_rows($query)==0) {
$_SESSION['notif']="Khong co tai khoan nao nhu yeu cau";
header('location: index.php');
} else
if($sotien<0){
$_SESSION['notif']="Số tiền không hợp lệ";
header('location: index.php');
} else
if ($sotk =="" || $sotien=="" ){
$_SESSION['notif']="Yêu cầu nhập đầy đủ thông tin";
header('location: index.php');
} else
if ($select=="first"){
$string="gửi tiền";
// thêm vào bảng giao dịch
$query=mysqli_query($conn,"SELECT * from khachhang where SoTK='$sotk' ");
while ($row=mysqli_fetch_assoc($query)){
$makh = $row['MaKH'];
$add=mysqli_query($conn,"INSERT INTO giaodich(MaKH,MaNV) VALUES ('$makh','$manv')");
}
// thêm vào bảng khách hàng *** can sua
$query=mysqli_query($conn,"SELECT * FROM tygia WHERE TenNgoaiTe='$loaitien'");
while ($row=mysqli_fetch_assoc($query)){
$tygia=$row['TyGiaMuaTienMat'];$matygia=$row['MaTyGia'];
}
$query=mysqli_query($conn,"SELECT SoduTK FROM khachhang WHERE MaKH='$makh'");
while ($row=mysqli_fetch_assoc($query)){
$sodu=$row['SoduTK'];
}
$tientang=$tygia*$sotien;
$tien_gui=$tygia*$sotien+$sodu;
$update=mysqli_query($conn,"UPDATE khachhang Set SoduTK='$tien_gui' WHERE MaKH='$makh'");
$_SESSION['notif']="Giao dịch thành công . Tài khoản hiện tại là ". $tien_gui." đồng vào lúc ".$date;
// $_SESSION['notif']=$tientang ." ". $tien_gui ." ".$tygia;
// in ra thông báo về thông tin và số tiền khách hang
//$query=mysqli_query($conn,"UPDATE chitietgiaodich SET ThoiGianGiaoDich='$date' WHERE MaGiaoDich=1");
$lgd="Mua - Chuyển khoản";
$ltg="TyGiaMuaCK";
$query=mysqli_query($conn,"INSERT INTO chitietgiaodich(ThoiGianGiaoDich,SoTienLucDau,LoaiTienLucDau,LoaiGiaoDich,MaKH,MaNV,LoaiTyGia,SoTienLucSau) VAlUES ('$date','$sotien','$loaitien','$lgd','$makh','$manv','$ltg','$tientang')");
<file_sep><?php // kết nối cơ sở dữ liệu
require_once('database_conn.php');
if(empty($_SESSION['manv'])) header('location: login/index.php');
require_once('header.php');
?>
<!DOCTYPE html>
<html>
<a href="index.php">Quay lại</a>
<head>
<style>
ul{
list-style-type :none;
margin: 0;/*lề*/
padding: 0;/*đệm*/
overflow: hidden;/*tràn*/
background-color: #333;/*màu nền*/
position: fixed;/*cố định*/
top:0;
width :100%;/*nvbar = màn hình*/
}
li{
float: left;/* nổi trái*/
}
li a{
display: block;/*khung bao quanh chữ*/
color:white;/*chữ*/
text-align: center;
padding:14px 16px;/*đệm trên+trái*/
text-decoration: none;
}
li a:hover:not(.active){
background-color: #111;
}
.active {
background-color: #4CAF50;
}
table {
font-family: arial, sans-serif;
border-collapse: collapse;
width: 100%;
}
td, th {
border: 1px solid #dddddd;
text-align: left;
padding: 8px;
}
tr:nth-child(even) {
background-color: #dddddd;
}
</style>
</head>
<body>
<ul>
<li style="float:right"><a href="logout.php/"><span class="glyphicon glyphicon-log-in"></span> Đăng xuất</a></li>
<li style="float:right"><a href="#">Xin chào <?php echo $_SESSION['ten_nv'] ?></a></li>
</ul>
<br><br><br>
<h2 class="text-center">Tính toán</h2>
<?php
if(isset($_SESSION['notif']))
{echo $_SESSION['notif'];
unset($_SESSION['notif']);
}
?>
<style>.bootstrap-iso .formden_header h2, .bootstrap-iso .formden_header p, .bootstrap-iso form{font-family: Arial, Helvetica, sans-serif; color: black}.bootstrap-iso form button, .bootstrap-iso form button:hover{color: white !important;} .asteriskField{color: red;}</style>
<div class="container-fluid">
<div class="row">
<div class="col-sm-4">
<h4 class="text-center">Mua chuyển khoản</h4>
<div class="form-group ">
<label class="control-label " for="name1">
Số tiền lúc đầu <span class="asteriskField">*</span>
</label>
<input class="form-control" id="st1" name="st1" type="text"/>
</div>
<div class="form-group ">
<label class="control-label " for="select">
Loại tiền lúc đầu <span class="asteriskField">*</span>
</label>
<!-- 1 -->
<select class="select form-control" id="select1" name="select1">
<?php
$laytygia=mysqli_query($conn,"SELECT TenNgoaiTe,TyGiaMuaCK FROM tygia");
while ($row=mysqli_fetch_assoc($laytygia)){
$x=$row['TenNgoaiTe'];
$y=$row['TyGiaMuaCK'];
echo "<option value='{$y}'>$x $y</option>";
}
?>
</select>
</div>
<!-- . -->
<div class="form-group">
<label class="control-label " for="select"> Giá trị quy đổi</label>
<div id="cost1" name="cost1">...</div>
</div>
</div>
<!-- 2 -->
<div class="col-sm-4">
<h4 class="text-center">Mua tiền mặt</h4>
<div class="form-group ">
<label class="control-label " for="name2">
Số tiền lúc đầu<span class="asteriskField">*</span>
</label>
<input class="form-control" id="st2" name="st2" type="text"/>
</div>
<div class="form-group ">
<label class="control-label " for="select">
Loại tiền lúc đầu <span class="asteriskField">*</span>
</label>
<!-- 1 -->
<select class="select form-control" id="select2" name="select2">
<?php
$laytygia=mysqli_query($conn,"SELECT TenNgoaiTe,TyGiaMuaTienMat FROM tygia");
while ($row=mysqli_fetch_assoc($laytygia)){
$x=$row['TenNgoaiTe'];
$y=$row['TyGiaMuaTienMat'];
echo "<option value='{$y}'>$x $y</option>";
}
?>
</select>
</div>
<div class="form-group">
<label class="control-label " for="select"> Giá trị quy đổi</label>
<div id="cost2" name="cost2">...</div>
</div>
</div>
<!-- 3 -->
<div class="col-sm-4">
<h4 class="text-center">Chuyển khoản</h4>
<div class="form-group ">
<label class="control-label " for="name1">
Số tiền lúc đầu<span class="asteriskField">*</span>
</label>
<input class="form-control" id="st3" name="st3" type="text"/>
</div>
<div class="form-group ">
<label class="control-label " for="select">
Loại tiền lúc đầu <span class="asteriskField">*</span>
</label>
<!-- 1 -->
<select class="select form-control" id="select3" name="select3">
<?php
$laytygia=mysqli_query($conn,"SELECT TenNgoaiTe,TyGiaBan FROM tygia");
while ($row=mysqli_fetch_assoc($laytygia)){
$x=$row['TenNgoaiTe'];
$y=$row['TyGiaBan'];
echo "<option value='{$y}'>$x $y</option>";
}
?>
</select>
</div>
<div class="form-group">
<label class="control-label " for="select"> Giá trị quy đổi</label>
<div id="cost3" name="cost3">...</div>
</div>
</div>
</div>
</div>
<script type="text/javascript">
var string ="VND";
// JS1
$("#select1").change(function(){
var tigia1 = $(this).val();
var sotien1= $("input[name='st1']").val();
$("#cost1").html(sotien1*tigia1+string);
});
$("#st1").change(function(){
var sotien1_ = $(this).val();
var tigia1_= $('#select1').val();
$("#cost1").html(sotien1_*tigia1_ + string);
// alert(tigia__) ;
});
$("#select2").change(function(){
var tigia2 = $(this).val();
var sotien2= $("input[name='st2']").val();
$("#cost2").html(sotien2*tigia2+string);
});
$("#st2").change(function(){
var sotien2_ = $(this).val();
var tigia2_= $('#select2').val();
$("#cost2").html(sotien2_*tigia2_ + string);
// alert(tigia__) ;
})
$("#select3").change(function(){
var tigia3 = $(this).val();
var sotien3= $("input[name='st3']").val();
$("#cost3").html(sotien3*tigia3+string);
});
$("#st3").change(function(){
var sotien3_ = $(this).val();
var tigia3_= $('#select3').val();
$("#cost3").html(sotien3_*tigia3_ + string);
// alert(tigia__) ;
})
</script>
</body>
</html>
<!-- mã khách hàng || số tiền || mã nv (sesion)|| mã gd (auto) || thời gian--><file_sep><?php // kết nối cơ sở dữ liệu
require_once('database_conn.php');
if(empty($_SESSION['manv'])) header('location: login/index.php');
require_once('header.php');
?>
<!DOCTYPE html>
<html>
<head>
<style>
ul{
list-style-type :none;
margin: 0;/*lề*/
padding: 0;/*đệm*/
overflow: hidden;/*tràn*/
background-color: #333;/*màu nền*/
position: fixed;/*cố định*/
top:0;
width :100%;/*nvbar = màn hình*/
}
li{
float: left;/* nổi trái*/
}
li a{
display: block;/*khung bao quanh chữ*/
color:white;/*chữ*/
text-align: center;
padding:14px 16px;/*đệm trên+trái*/
text-decoration: none;
}
li a:hover:not(.active){
background-color: #111;
}
.active {
background-color: #4CAF50;
}
table {
font-family: arial, sans-serif;
border-collapse: collapse;
width: 100%;
}
td, th {
border: 1px solid #dddddd;
text-align: left;
padding: 8px;
}
tr:nth-child(even) {
background-color: #dddddd;
}
</style>
</head>
<body>
<ul>
<li style="float:right"><a href="logout.php/"><span class="glyphicon glyphicon-log-in"></span> Đăng xuất</a></li>
<li style="float:right"><a href="#">Xin chào <?php echo $_SESSION['ten_nv'] ?></a></li>
</ul>
<br><br><br>
<h2>Chuyển khoản</h2>
<?php
if(isset($_SESSION['notif']))
{echo $_SESSION['notif'];
unset($_SESSION['notif']);
}
?>
<!-- Special version of Bootstrap that only affects content wrapped in .bootstrap-iso -->
<!-- <link rel="stylesheet" href="https://formden.com/static/cdn/bootstrap-iso.css" /> -->
<!-- Inline CSS based on choices in "Settings" tab -->
<style>.bootstrap-iso .formden_header h2, .bootstrap-iso .formden_header p, .bootstrap-iso form{font-family: Arial, Helvetica, sans-serif; color: black}.bootstrap-iso form button, .bootstrap-iso form button:hover{color: white !important;} .asteriskField{color: red;}</style>
<!-- HTML Form (wrapped in a .bootstrap-iso div) -->
<div class="bootstrap-iso">
<div class="container-fluid">
<div class="row">
<div class="col-md-6 col-sm-6 col-xs-12">
<form action="chuyenkhoan_action.php" method="post">
<div class="form-group ">
<label class="control-label " for="name">
Tài khoản gửi <span class="asteriskField">*</span>
</label>
<input class="form-control" id="makh" name="sotk" type="text"/>
</div>
<div class="form-group ">
<label class="control-label " for="name1">
Tài khoản nhận <span class="asteriskField">*</span>
</label>
<input class="form-control" id="makh1" name="sotk1" type="text"/>
</div>
<div class="form-group ">
<label class="control-label requiredField" for="email">
Số tiền lúc đầu
<span class="asteriskField">*</span>
</label>
<input class="form-control" id="sotien" name="sotien" type="text"/>
</div>
<div class="form-group ">
<label class="control-label " for="select">
Loại tiền lúc đầu <span class="asteriskField">*</span>
</label>
<select class="select form-control" id="select1" name="select1">
<?php
$laytygia=mysqli_query($conn,"SELECT TenNgoaiTe,TyGiaBan FROM tygia");
while ($row=mysqli_fetch_assoc($laytygia)){
$x=$row['TenNgoaiTe'];
$y=$row['TyGiaBan'];
echo "<option value='{$y}'>$x</option>";
}
?>
</select>
</div>
<div class="form-group">
<label class="control-label " for="select"> Giá trị quy đổi</label>
<div id="cost" name="cost">...</div>
</div>
<div class="form-group ">
<label class="control-label " for="select">
Loại giao dịch
</label>
<select class="select form-control" id="select" name="select">
<option value="first" selected>
Chuyển tiền vào tài khoản
</option>
<option value="second" selected>
Chuyển khoản
</option>
<option value="third">
Thống kê
</option>
<option value="forth">
Tính toán
</option>
</select>
</div>
<div class="form-group">
<div>
<button class="btn btn-primary " name="submit" type="submit">
Thực hiện giao dịch
</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
<script type="text/javascript">
var string ="VND";
$("#select").change(function() {
// alert($(this).find("option:selected").text());
var value = $(this).val();
if (value=='first'){
window.location.assign("index.php");
} else if (value=='second'){
window.location.assign("chuyenkhoan.php");
} else if (value=='third'){
window.location.assign("danhsach.php");
} else if(value=='forth'){
window.location.assign("tinhtoan.php");
}
});
$("#select1").change(function(){
var tigia_ = $(this).val();
var sotien_= $("input[name='sotien']").val();
$("#cost").html(sotien_*tigia_+string);
});
$("#sotien").change(function(){
var sotien__ = $(this).val();
var tigia__= $('#select1').val();
$("#cost").html(sotien__*tigia__ + string);
// alert(tigia__) ;
});
</script>
</body>
</html>
<!-- mã khách hàng || số tiền || mã nv (sesion)|| mã gd (auto) || thời gian--><file_sep><?php
require_once('database_conn.php');
if(isset($_POST['submit'])){
$sotk_gui=$_POST['sotk'];
$sotk_nhan=$_POST['sotk1'];
$sotien=$_POST['sotien'];
// $select=$_POST['select'];
$tygiaz=$_POST['select1'];
// bộ đệm chuyển đổi tiền <-> value
$query=mysqli_query($conn,"SELECT * FROM tygia WHERE TyGiaBan='$tygiaz' ");
while ($row=mysqli_fetch_assoc($query)){
$loaitien=$row['TenNgoaiTe'];
}
$manv=$_SESSION['manv'];
$date=date("Y/m/d H:i:s");
// header('location: ');
if ($sotk_gui =="" || $sotien=="" || $sotk_nhan == "" ){
$_SESSION['notif']="Yêu cầu nhập đầy đủ thông tin";
header('location: chuyenkhoan.php');
}
else if ($sotien<0){
$_SESSION['notif'] ="Số tiền không hợp lệ";
header('location: chuyenkhoan.php');
}
else if ($sotk_nhan==$sotk_gui){
$_SESSION['notif'] ="Không thể chuyển vào chính tài khoản của mình";
header('location: chuyenkhoan.php');
} else
{
$query=mysqli_query($conn,"SELECT MaKH,SoduTK from khachhang where SoTK='$sotk_gui'");
$query1=mysqli_query($conn,"SELECT MaKH,SoduTK from khachhang where SoTK='$sotk_nhan'");
if (mysqli_num_rows($query)==0 || mysqli_num_rows($query1)==0){
$_SESSION['notif']="Tài khoản gửi hoặc nhận không đúng";
header('location: chuyenkhoan.php');
} else{
$row=mysqli_fetch_row($query);
$makh_gui=$row[0]; $sodu_gui=$row[1];
$row=mysqli_fetch_row($query1);
$makh_nhan=$row[0]; $sodu_nhan=$row[1];
//
$query=mysqli_query($conn,"SELECT TyGiaMuaTienMat,MaTyGia,TenNgoaiTe FROM tygia WHERE TenNgoaiTe='$loaitien'");
while ($row=mysqli_fetch_assoc($query)){
$tygia=$row['TyGiaMuaTienMat'];$matygia=$row['MaTyGia'];
$tenngoaite=$row['TenNgoaiTe'];
}
$sodu_gui=$sodu_gui-$tygia*$sotien;
if ($sodu_gui<0){
$_SESSION['notif']="Số dư không đủ để chuyển ";
header('location: chuyenkhoan.php');
} else{
// thêm vào bảng giao dịch
$add_giaodich=mysqli_query($conn,"INSERT INTO giaodich(MaKH,MaNV) VALUES ('$makh_gui','$manv')");
// cập nhật số tiền 2 tài khoản
$update_gui=mysqli_query($conn,"UPDATE khachhang Set SoduTK='$sodu_gui' WHERE MaKH='$makh_gui'");
$sodu_nhan+=$tygia*$sotien;
$update_nhan=mysqli_query($conn,"UPDATE khachhang Set SoduTK='$sodu_nhan' WHERE MaKH='$makh_nhan'");
// thêm vào bảng chi tiết giao dịch
$loaigiaodich="Chuyen khoan";
$query=mysqli_query($conn,"INSERT INTO chitietgiaodich(ThoiGianGiaoDich,SoTienLucDau,LoaiTienLucDau,MaTyGia,LoaiGiaoDich,MaKh,MaNV,MaKHNhan) VALUES('$date','$sotien','$tenngoaite','$matygia','$loaigiaodich','$makh_nhan','$manv','$makh_gui')");
// lấy ra tên khách hàng người gửi và nhận
$query=mysqli_query($conn,"SELECT TenKH FROM khachhang WHERE MaKH='$makh_gui'");
$query=mysqli_fetch_row($query);
$query1=mysqli_query($conn,"SELECT TenKH FROM khachhang WHERE MaKH='$makh_nhan'");
$query1=mysqli_fetch_row($query1);
$_SESSION['notif']="Giao dịch thành công "."Người gửi " .$query[0]." tới người nhận ".$query1[0] ." số tiền " .$tygia*$sotien ." vào lúc ".$date;
header('location: chuyenkhoan.php');
}
}
}
}
?>
<!-- giao dich chitietgiaodich khachhang -->
<!-- thêm MaKHNhan vào bảng chi tiết giao dịch--><file_sep><?php // kết nối cơ sở dữ liệu
require_once('database_conn.php');
mysqli_set_charset($conn,'utf8');
?>
<?php
if (isset($_POST["Them"])){
$ten = $_POST["ten"];
// $ma = $_POST["ma"];
$sdt = $_POST["sdt"];
$diachi = $_POST["diachi"];
$sotk = $_POST["sotk"];
$check=mysqli_query($conn,"select * from khachhang where SoTK='$sotk'");
if (mysqli_num_rows($check)==0){
$x=0;
$add = mysqli_query($conn,"INSERT INTO khachhang (TenKH,SoduTK,SDT,DiaChi,SoTK) VALUES ('$ten','$x','$sdt','$diachi','$sotk')");
}
}
if (isset($_POST["Sua"])){
$ten = $_POST["ten"];
$ma = $_POST["ma"];
$sdt = $_POST["sdt"];
$diachi = $_POST["diachi"];
$sotk = $_POST["sotk"];
$sql = mysqli_query($conn,"SELECT * FROM khachhang WHERE MaKH='$ma'");
if (mysqli_num_rows($sql)>0){
$update = mysqli_query($conn,"UPDATE khachhang SET TenKH='$ten',SDT='$sdt',Diachi='$diachi' ,SoTK='$sotk' WHERE MaKH='$ma'");
}
}
if (isset($_POST["Xoa"])){
$ma=$_POST["ma"];
$query = mysqli_query($conn, "DELETE FROM khachhang WHERE MaKH='$ma'");
}
header('location: danhsach.php');
?><file_sep><?php
require_once('database_conn.php');
?>
<html>
<body>
<a href="index.php">Quay lại</a>
<h2>Danh sách tất cả khách hàng</h2>
<table>
<tr>
<th>Tên khách hàng</th>
<th>Mã khách hàng</th>
<th>Số điện thoại</th>
<th>Địa chỉ </th>
<th>Số tài khoản</th>
</tr>
<?php
$sql="SELECT * from khachhang ";
$query = mysqli_query($conn,$sql);
// $num_rows=mysqli_num_rows($query);
while($row=mysqli_fetch_assoc($query)){?>
<tr><td><?php echo $row['TenKH'] ;?></td>
<td><?php echo $row['MaKH'] ;?></td>
<td><?php echo $row['SDT'] ;?></td>
<td><?php echo $row['DiaChi'];?></td>
<td><?php echo $row['SoTK'];?></td>
</tr><?php
}?>
</table><br><br>
<form action="danhsach_action.php" method="POST">
Thêm người<br>
<input type="text" name="ten" placeholder="Tên">
<br>
<!-- <input type="text" name="ma" placeholder="Mã">
<br> -->
<input type="text" name="sdt" placeholder="SĐT">
<br>
<input type="text" name="diachi" placeholder="Địa chỉ"><br>
<input type="text" name="sotk" placeholder="Số tài khoản"><br>
<input type="submit" name="Them" value="Thêm">
</form>
<form action="s_action.php" method="POST">
Thay đổi thông tin<br>
<input type="text" name="ma" placeholder="Mã muốn sửa">
<br>
<input type="text" name="ten" placeholder="Tên">
<br>
<input type="text" name="sdt" placeholder="SĐT">
<br>
<input type="text" name="diachi" placeholder="Địa chỉ"><br>
<input type="text" name="sotk" placeholder="Số tài khoản"><br>
<input type="submit" name="Sua" value="Sửa">
</form>
<form action="g_action.php" method="POST" >
<input type="text" name="ma" placeholder="Mã muốn xóa"><br>
<input type="submit" name="Xoa" value="Xoa">
</form>
</body>
</html>
<file_sep><?php
session_start();
// if(isset($_SESSION['manv'])) header('location: ../index.php');
?>
<html>
<head>
<?php include ("../header.php") ?>
<style>
ul{
list-style-type :none;
margin: 0;/*lề*/
padding: 0;/*đệm*/
overflow: hidden;/*tràn*/
background-color: #333;/*màu nền*/
position: fixed;/*cố định*/
top:0;
width :100%;/*nvbar = màn hình*/
}
li{
float: left;/* nổi trái*/
}
li a{
display: block;/*khung bao quanh chữ*/
color:white;/*chữ*/
text-align: center;
padding:14px 16px;/*đệm trên+trái*/
text-decoration: none;
}
li a:hover:not(.active){
background-color: #111;
}
.active {
background-color: #4CAF50;
}
/* chân trang*/
footer {
background-color: pink;
padding : 8px;
}
</style>
</head>
<body>
<?php if (isset($_SESSION['manv']) && !isset($_SESSION['out'])) header('location: ../index.php') ?>
<ul>
<li style="float:right"><a href="../login/"><span class="glyphicon glyphicon-log-in"></span> Đăng nhập</a></li>
</ul><br><br><br>
<div class="container-fluid">
<div class="row">
<div class="col-md-12"> <!-- độ dài thanh nhập -->
<div class="row">
<div class="col-md-2"> <!-- mép lùi vào -->
</div>
<div class="col-md-5">
<h3 class="text-center">
Đăng nhập
</h3>
<p class="notification" style="color:red;">
<?php
if(isset($_SESSION['notify'])){
echo $_SESSION['notify'];
}
unset($_SESSION['notify']);
?>
</p>
<form class="form-horizontal" role="form" method="POST" action="action.php">
<div class="form-group">
<label for="inputEmail3" class="col-sm-2 control-label">
<NAME>
</label> <!--đặt chữ email bên trái khung nhập-->
<div class="col-sm-10">
<input type="text" class="form-control" id="manv" name="manv" /> <!-- *** -->
</div>
</div>
<div class="form-group">
<label for="inputPassword3" class="col-sm-2 control-label">
Mật khẩu
</label>
<div class="col-sm-10">
<input type="<PASSWORD>" class="form-control" id="inputPassword3" name="password" /> <!-- *** -->
</div>
</div>
<div class="form-group">
<div class="col-sm-offset-2 col-sm-10">
<?php $bn="button-submit"?>
<button type="submit" class="btn btn-success" id="loginbutton" name='button-submit'>
Đăng nhập </button><!-- tạo 1 buton tên là button submit trong 1 form có POST -->
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</body>
</html> | 16235db7982db47ad05ee993c36730ceef171e62 | [
"PHP"
] | 10 | PHP | conlacda/giang | 75d70dfb3b9e2deecdb2abd7dd865026e6755bfa | 305e79d67e7a5a7d58ca9aacc1d9877a3a08b4ad |
refs/heads/master | <file_sep>def bringData():
from selenium import webdriver
import pandas as pd
import time
options = webdriver.ChromeOptions()
options.add_argument('headless') //arkaplana atma işlemi
url = "https://covid19.saglik.gov.tr/"
driver = webdriver.Chrome(options = options)
driver.get(url)
time.sleep(1)
item = driver.find_element_by_xpath('//*[@id="post-carosel-6"]/div/div[4]/div/table/tbody').text
data = item.replace(",",".")
# Veriler : İllere Göre Haftalık Vaka Sayısı (100 binde)
ss = pd.Series(data.splitlines()).str.split(expand = True).rename(columns = {0: "Sehir Adi", 1:"Vaka Sayisi"})
#splitlines, split metodu bir dizedeki kelimeleri boşluklardan ayırır ve bir dizeler listesi verir.
#string.SplitYöntemi, giriş dizesini bir veya daha fazla sınırlayıcı temelinde bölerek bir alt dizeler dizisi oluşturur.
#data'yi newline'lardan ayırarak 81 elemanlı bir listeye dönüştürüyoruz,
# pd.Series'e paslıyoruz.
# Sonra oradaki str.split'ten yararlanarak her veriyi,
# "il-vaka" gibi bölüyoruz; expand=True bölüm sayısı kadar yeni sütuna koymayı sağlıyor.
# Sonrasında sütunları yeniden adlandırıyoruz.
ss.to_csv('verilerim.csv') # csv dosyasına yazdırıyoruz.
<file_sep># CoronaVirusMap
### Python 'Folium' library was used. Coronavirus world map and detailed Turkey map were made.
## -Project Usage-
The data is in the Data folder.
Turkey data is dynamically extracted from https://covid19.saglik.gov.tr/ for selenium.
World data is drawn from excel file. I'll make it dynamic in the future.
Colors vary according to the number of cases.
The variables will be edited.
## Launch from CoronaVirusMap.py file may take a while. It will bring you the world_map.html file, open it with the browser you are using.
Don't forget to give a star if you like it :)
## Some images from the project








<file_sep>
def vaka_Sayisi_renk(vaka):
if vaka < 200000:
return "green"
elif vaka < 450000:
return "white"
elif vaka < 1750000:
return "orange"
else:
return "red"
def vaka_sayisi_yaricap(vaka):
if vaka < 200000:
return 40000
elif vaka < 450000:
return 100000
elif vaka < 1750000:
return 200000
else:
return 400000
def olum_orani_yaricap(vaka,vefat):
if (vefat/vaka) * 100 < 2.5:
return 40000
elif (vefat/vaka) * 100 < 5:
return 100000
elif (vefat/vaka) * 100 < 7.5:
return 200000
else:
return 400000
def olum_orani_renk(vaka,vefat):
if (vefat/vaka) * 100 < 2.5:
return "green"
elif (vefat/vaka) * 100 < 5:
return "white"
elif (vefat/vaka) * 100 < 7.5:
return "orange"
else:
return "red"
def aktif_vaka_renk(aktif):
if aktif < 100000:
return "green"
elif aktif < 250000:
return "white"
elif aktif < 750000:
return "orange"
else:
return "red"
def aktif_vaka_yaricap(aktif):
if aktif < 100000:
return 40000
elif aktif < 300000:
return 100000
elif aktif < 750000:
return 200000
else:
return 400000
def test_orani_yaricap(nufus,test):
if (test / nufus) * 100 < 2.5:
return 400000
elif (test / nufus) * 100 < 5:
return 200000
elif (test / nufus) * 100 < 7.5:
return 100000
else:
return 40000
def test_orani_renk(nufus,test):
if (test / nufus) * 100 < 2.5:
return "red"
elif (test / nufus) * 100 < 5:
return "orange"
elif (test / nufus) * 100 < 7.5:
return "white"
else:
return "green"
#TR
def tr_renklendir(sayi):
if sayi < 30.000:
return "green"
elif 30.000 <= sayi < 70.000:
return "blue"
elif 70.000 <= sayi < 110.000:
return "orange"
else:
return "red"
def risk(sayi):
if sayi < 30.000:
return "Düşük Risk"
elif 30.000 <=sayi <70.000:
return "Orta Risk"
elif 70.000 <= sayi < 110.000:
return "Yüksek Risk"
else:
return "Çok Yüksek Risk"
#TR circle ile kullanmak için açılması gereken metot.
"""def tr_vaka_sayisi_yaricap(vaka):
if vaka < 50000:
return 40000
elif 50000 <= vaka < 150000:
return 100000
elif 150000 <= vaka < 300000:
return 200000
else:
return 300000"""<file_sep>import folium
from folium import plugins
from folium.plugins import Draw
from branca import colormap as cm #Bu kütüphane, haritaya özgü olmayan özellikleri barındıracak olan folium'dan bir yan ürün. Gelecekte bir HTML+JS nesil kütüphanesi olabilir.
import pandas as pd
import time
import bringDataTR,Methods
data = pd.read_excel("Data/world_coronavirus_cases.xlsx")
dataTr = pd.read_csv("Data/tr_enlem_boylam.csv")
bringDataTR.bringData()
time.sleep(1)
veriTrVaka = pd.read_csv("verilerim.csv")#,encoding="utf-8-sig")
#Sehir nüfüs ve bolge adlarını çektik.
ilBilgi = pd.read_excel("Data/SehirlerBolgeler.xlsx")
tr_nufus = list(ilBilgi["Nufus"])
tr_bolge_ad = list(ilBilgi["BolgeAd"])
enlemler = list(data["Enlem"])
boylamlar = list(data["Boylam"])
toplam_vaka = list(data["Toplam Vaka"])
vefatlar = list(data["Vefat Edenler"])
aktifVakalar = list(data["Aktif Vakalar"])
nufus = list(data["Nüfus"])
toplam_test = list(data["Toplam Test"])
t_enlemler = list(dataTr["tEnlem"])
t_boylamlar = list(dataTr["tBoylam"])
t_isimler = list(veriTrVaka["Sehir Adi"])
t_vakalar = list(veriTrVaka["Vaka Sayisi"])
linear = cm.LinearColormap(["green","blue","orange","red"],vmin=50,vmax=300) #Tr color info
vaka_sayisi_haritasi = folium.FeatureGroup(name='Toplam Vaka Sayısı',show=False)
olum_orani_haritasi = folium.FeatureGroup(name="Ölüm Oranı",show=False)
aktif_vaka_haritasi = folium.FeatureGroup(name ="Aktif Vaka Sayısı",show=False)
test_orani_haritasi = folium.FeatureGroup(name="Test Oranı",show=False)
nufus_dagilim_haritasi = folium.FeatureGroup(name="Nüfus Dağılım Haritası",show=False)
tr_dagilim_haritasi = folium.FeatureGroup(name="Türkiye Vaka Haritası",show=False)
tr_sinir_haritasi = folium.FeatureGroup("Türkiye Sınır Dağılım Haritası",show=False)
draw = Draw(export=True)
world_map = folium.Map()
folium.TileLayer("Cartodb Positron").add_to(world_map)
folium.TileLayer("openstreetmap").add_to(world_map)
folium.TileLayer("Cartodb dark_matter").add_to(world_map)
folium.TileLayer("Stamen Terrain").add_to(world_map)
for enlem,boylam,vaka in zip(enlemler,boylamlar,toplam_vaka):
vaka_sayisi_haritasi.add_child(folium.Circle(location=(enlem,boylam),
radius=Methods.vaka_sayisi_yaricap(vaka),
color=Methods.vaka_Sayisi_renk(vaka),
fill_color=Methods.vaka_Sayisi_renk(vaka),
fill_opacity=0.4))
for enlem,boylam,vaka,vefat in zip(enlemler,boylamlar,toplam_vaka,vefatlar):
olum_orani_haritasi.add_child(folium.Circle(location=(enlem,boylam),
radius=Methods.olum_orani_yaricap(vaka, vefat),
color=Methods.olum_orani_renk(vaka, vefat),
fill_color=Methods.olum_orani_renk(vaka, vefat),
fill_opacity=0.4))
for enlem,boylam,aktif in zip(enlemler,boylamlar,aktifVakalar):
aktif_vaka_haritasi.add_child(folium.Circle(location=(enlem,boylam),
radius=Methods.aktif_vaka_yaricap(aktif),
color=Methods.aktif_vaka_renk(aktif),
fill_color=Methods.aktif_vaka_renk(aktif),
fill_opacity=0.4))
for enlem,boylam,ulke_nufus ,test in zip(enlemler,boylamlar,nufus,toplam_test):
test_orani_haritasi.add_child(folium.Circle(location=(enlem,boylam),
radius=Methods.test_orani_yaricap(ulke_nufus, test),
color=Methods.test_orani_renk(ulke_nufus, test),
fill_color=Methods.test_orani_renk(ulke_nufus, test),
fill_opacity=0.4))
for t_enlem,t_boylam,il,vaka in zip(t_enlemler,t_boylamlar,t_isimler,t_vakalar):
tr_dagilim_haritasi.add_child(folium.Marker(location=[t_enlem,t_boylam], color = "red",
icon=folium.Icon(color=Methods.tr_renklendir(sayi=vaka),
icon='bar-chart',
prefix='fa'),
popup=Methods.risk(sayi=vaka),
tooltip=["Sehir Adi:",il,
"\nVaka Sayisi:",vaka]))
sehirIndex = veriTrVaka.set_index("Sehir Adi")
def vakaKontrol(vak):
sonuc = (sehirIndex.loc[vak,'Vaka Sayisi'])
return Methods.tr_renklendir(sonuc)
#Circle şeklinde yapmak için:
"""for t_enlem,t_boylam,il,vaka in zip(t_enlemler,t_boylamlar,t_isimler,t_vakalar):
tr_dagilim_haritasi.add_child(folium.Circle(location=(t_enlem,t_boylam),
radius=tr_vaka_sayisi_yaricap(vaka),
color=Tr_renklendir(vaka),
fill_color=Tr_renklendir(vaka),
fill_opacity=0.4))"""
for enlem,boylam,ulke_nufus,test in zip(enlemler,boylamlar,nufus,toplam_test):
test_orani_haritasi.add_child(folium.Circle(location=(enlem,boylam),
radius=Methods.test_orani_yaricap(ulke_nufus, test),
color=Methods.test_orani_renk(ulke_nufus, test),
fill_color=Methods.test_orani_renk(ulke_nufus, test),
fill_opacity=0.4))
nufus_dagilim_haritasi.add_child(folium.GeoJson(data=(open("Data/world.json", "r",
encoding="utf-8-sig").read()),
style_function=lambda x: {"fillColor":"green"
if x["properties"]["POP2005"] < 20000000
else
"white"
if 20000000 <= x["properties"]["POP2005"] <= 50000000
else
"orange"
if 50000000 <= x["properties"]["POP2005"] <= 100000000
else
"red"}))
tr_sinir_haritasi.add_child(folium.GeoJson(open("Data/turkey-il-sinirlar.json", "r",
encoding="utf-8-sig").read(),
style_function=lambda il:
{'fillColor':vakaKontrol(il["properties"]["Name"])}))
miniMap = plugins.MiniMap(toggle_display=True)
world_map.add_child(vaka_sayisi_haritasi)
world_map.add_child(olum_orani_haritasi)
world_map.add_child(aktif_vaka_haritasi)
world_map.add_child(test_orani_haritasi)
world_map.add_child(nufus_dagilim_haritasi)
world_map.add_child(tr_dagilim_haritasi)
world_map.add_child(tr_sinir_haritasi)
world_map.add_child(miniMap)
world_map.add_child(linear)
world_map.add_child(draw)
world_map.add_child(folium.LayerControl(position="topright"))
world_map.save("world_map.html") | 88f7618efd2ad0db27f2a3e9658bea1fc9457bf8 | [
"Markdown",
"Python"
] | 4 | Python | yusufenesaras/CoronaVirusMap | 7ff6491a45937f2b2d024772143bc24db39a5c9d | 8c4f7ec4c9fabd02c4233d2916b6f8011a39ec0f |
refs/heads/master | <repo_name>dedenistiawan/scmamp<file_sep>/vignettes/Data_loading_and_manipulation.R
## ---- prompt=TRUE-------------------------------------------------------------
library(scmamp)
data(data_gh_2008)
head(data.gh.2008)
data(data_gh_2010)
head(data.gh.2010)
data(data_blum_2015)
head(data.blum.2015)
## ---- eval=FALSE, prompt=TRUE-------------------------------------------------
# data.raw <- readComparisonFile(file="results.dat", alg.cols=c('Alg_1', 'Alg_2', 'Alg_3'),
# skip=5, sep=";")
## ---- prompt=TRUE-------------------------------------------------------------
data.dir <- system.file("loading_tests",package="scmamp")
file.path <- paste(data.dir, "rgg_complete_comparison.out", sep="/")
data.raw <- readComparisonFile(file=file.path, alg.cols=3:10, col.names=NULL)
head(data.raw)
## ---- prompt=TRUE-------------------------------------------------------------
dir <- paste(system.file("loading_tests",package="scmamp"),
"comparison_files", sep="/")
list.files(dir)
## ---- prompt=TRUE-------------------------------------------------------------
fname.pattern <- "rgg_size_([0-9]*)_r_([0-9]*.[0-9]*)\\.out"
## ---- prompt=TRUE-------------------------------------------------------------
var.names <- c("Size", "Radius")
## ---- prompt=TRUE-------------------------------------------------------------
alg.names <- c("FruitFly", "Shukla", "Ikeda", "Turau", "Rand1", "Rand2", "FrogCOL", "FrogMIS")
## ---- prompt=TRUE-------------------------------------------------------------
rm("data.raw")
data.raw <- readComparisonDir (directory=dir, alg.cols=alg.names, col.names=NULL,
names=var.names, fname.pattern=fname.pattern)
head(data.raw)
## ---- prompt=TRUE-------------------------------------------------------------
dir <- system.file("loading_tests", package="scmamp")
file <- paste(dir, "rgg_complete_experiment.out", sep="/")
content <- read.csv(file)
content[c(1,901,181),]
## ---- prompt=TRUE, cache=TRUE-------------------------------------------------
rm("data.raw")
data.raw <- readExperimentFile (file=file, alg.col="Algorithm", value.col="Evaluation")
head(data.raw)
## ---- prompt=TRUE, cache=TRUE-------------------------------------------------
rm("data.raw")
dir <- paste(system.file("loading_tests", package="scmamp"),
"experiment_files", sep="/")
list.files(dir)[1:10]
pattern <- "rgg_size_([0-9]*)_r_(0.[0-9]*)_([a-z, A-Z, 1, 2]*).out"
var.names <- c("Size", "Radius", "Algorithm")
data.raw <- readExperimentDir (directory=dir, names=var.names, fname.pattern=pattern,
alg.var.name='Algorithm', value.col=1, col.names="Evaluation")
head(data.raw)
## ---- echo=-1-----------------------------------------------------------------
summarizeData(data=data.raw, fun=median, group.by=c("Size"), ignore=c("Radius"))
## -----------------------------------------------------------------------------
data.filtered <- filterData(data=data.raw,
condition="Size == 100 & Rand1 <= Rand2",
remove.cols="Size")
dim(data.filtered)
dim(data.raw)
## -----------------------------------------------------------------------------
summarizeData(data.filtered, group.by=c("Radius"))
## ---- warning=FALSE , cache=TRUE , echo=-1------------------------------------
test <- "wilcoxon"
group.by <- c("Size","Radius")
alg.cols <- 3:10
result <- postHocTest(data=data.raw, algorithms=alg.cols, group.by=group.by,
test=test, control="max", correct="holland")
## -----------------------------------------------------------------------------
summ <- result$summary
pval <- result$corrected.pval
bold <- is.na(pval)
mark <- pval > 0.05
mark[, (1:2)] <- FALSE
mark[is.na(mark)] <- FALSE
digits <- c(0, 3, rep(2, 8))
writeTabular(table=summ, format="f", bold=bold, mark=mark, mark.char="+",
hrule=c(0, 10, 20, 30), vrule = c(2, 4), digits=digits,
print.row.names=FALSE)
<file_sep>/vignettes/Bayesian_analysis_of_algorithm_rankings.R
## -----------------------------------------------------------------------------
library(scmamp)
data("data_blum_2015")
head(data.blum.2015)
## ---- message=FALSE-----------------------------------------------------------
data.100 <- subset(data.blum.2015, subset=data.blum.2015$Size==100)
results.100 <- bPlackettLuceModel(x.matrix=data.100[,-c(1,2)], min=FALSE,
nsim=2000, nchains=10,parallel=TRUE)
## -----------------------------------------------------------------------------
results.100$expected.win.prob
results.100$expected.mode.rank
## ---- , fig.width=10, fig.height=6, out.width="100%"--------------------------
hist(results.100$posterior.weights[,"FrogCOL"],
main="", xlab="Prob. FrogCOL being the best")
## ----message=FALSE------------------------------------------------------------
data.1000 <- subset(data.blum.2015, subset=data.blum.2015$Size==1000)
results.1000 <- bPlackettLuceModel(x.matrix=data.1000[,-c(1,2)],
min=FALSE, nsim=2000, nchains=10,parallel=TRUE)
## ---- fig.width=10, fig.height=6, out.width="100%"----------------------------
boxplot(results.1000$posterior.weights)
## ---- fig.width=8, fig.height=8, out.width="100%"-----------------------------
weights <- results.1000$posterior.weights[,c(1, 7, 8)]
weights <- weights / rowSums(weights)
plotBarycentric(weights)
## ---- fig.width=8, fig.height=8, out.width="100%"-----------------------------
fc.better.fm <- weights[, 2] / (weights[, 2] + weights[, 3])
fc.better.ff <- weights[, 2] / (weights[, 2] + weights[, 1])
exp.fc.vs.fm <- mean(fc.better.fm)
exp.fc.vs.ff <- mean(fc.better.ff)
hist(fc.better.fm, main=paste("Expected probability =",
round(exp.fc.vs.fm,3)),
xlab="Probability of FrogCOL better than FrogMIS")
hist(fc.better.ff, main=paste("Expected probability =",
round(exp.fc.vs.ff,3)),
xlab="Probability of FrogCOL better than FruitFly")
## -----------------------------------------------------------------------------
mean(data.1000[,"FrogCOL"]>data.1000[,"FruitFly"])
## -----------------------------------------------------------------------------
mean(data.1000[,"FrogCOL"]>data.1000[,"Ikeda"])
mean(data.1000[,"FrogMIS"]>data.1000[,"Ikeda"])
mean(data.1000[,"FruitFly"]>data.1000[,"Ikeda"])
## ----message=FALSE------------------------------------------------------------
data.1000.sub <- subset(data.blum.2015[, c(3, 9,10)], subset=data.blum.2015$Size==1000)
results.1000.sub <- bPlackettLuceModel(x.matrix=data.1000.sub,
min=FALSE, nsim=2000, nchains=10,parallel=TRUE)
## ---- fig.width=8, fig.height=8, out.width="100%"-----------------------------
weights.sub <- results.1000.sub$posterior.weights
plotBarycentric(weights.sub)
fc.better.fm <- weights.sub[, 2] / (weights.sub[, 2] + weights.sub[, 3])
fc.better.ff <- weights.sub[, 2] / (weights.sub[, 2] + weights.sub[, 1])
exp.fc.vs.fm <- mean(fc.better.fm)
exp.fc.vs.ff <- mean(fc.better.ff)
hist(fc.better.fm, main=paste("Expected probability =",
round(exp.fc.vs.fm,3)),
xlab="Probability of FrogCOL better than FrogMIS")
hist(fc.better.ff, main=paste("Expected probability =",
round(exp.fc.vs.ff,3)),
xlab="Probability of FrogCOL better than FruitFly")
<file_sep>/README.md
# scmamp: Statistical Comparison of Multiple Algorithms in Multiple Problems
This is a simple R package aimed at simplifying the statistical analysis of the results in the comparison of algorithms in different problems.
It is mainly focused on non parametric methods and implements Shaffer static and Bergmann and Hommel dynamic corrections for pairwise tests.
The package also includes some plotting tools, such as the critical difference plots shown in _Demšar, J., 2006_. Indeed, the package is mainly based on the papers:
<NAME>. (2006) Statistical Comparisons of Classifiers over Multiple Data Sets. _Journal of Machine Learning Research_, 7, 1-30.
<NAME>. and <NAME>. (2008) An Extension on "Statistical Comparisons of Classifiers over Multiple Data Sets" for all Pairwise Comparisons. _Journal of Machine Learning Research_, 9, 2677-2694.
<NAME>. and <NAME>. (2010) Advanced Nonparametric Tests for Multiple Comparison in the Design of Experiments in Computational Intelligence and Data Mining: Experimental Analysis of Power. _Information Sciences_, 180, 2044-2064.
## Package installation
The last version of the package can be installed running the following commands:
```r
if (!require("devtools")) {
install.packages("devtools")
}
devtools::install_github("b0rxa/scmamp")
```
The package can also be installed using the tar.gz files in the root directory. First, download the `scmamp_*.tar.gz` file and install it running:
```r
install.packages(path.tar.gz.file, reps=NULL)
```
where `path.tar.gz.file` refers to the path of the downloaded file. Note that these files may not be up to date.
## Documentation
The package includes two vignettes, one for the [basic use](http://htmlpreview.github.io/?https://github.com/b0rxa/scmamp/blob/master/inst/doc/Statistical_assessment_of_the_differences.html) and another with information about the [data manipulation](http://htmlpreview.github.io/?https://raw.githubusercontent.com/b0rxa/scmamp/master/inst/doc/Data_loading_and_manipulation.html). To access to the local versions of these vignettes (once the package is installed):
```r
library("scmamp")
browseVignettes("scmamp")
```
<file_sep>/vignettes/Statistical_assessment_of_the_differences.R
## ---- prompt=TRUE-------------------------------------------------------------
library("scmamp")
library("ggplot2")
library("Rgraphviz")
data(data_blum_2015)
data(data_gh_2008)
head(data.blum.2015)
head(data.gh.2008)
## ----prompt=TRUE , fig.width=7, fig.height=5, warning=FALSE-------------------
plotDensities (data=data.gh.2008, size=1.1)
## ----prompt=TRUE , fig.width=7, fig.height=5----------------------------------
qqplot <- qqplotGaussian (data.gh.2008[,"k-NN(k=1)"], size=5 , col="orchid")
qqplot + theme_classic()
## ----prompt=TRUE--------------------------------------------------------------
friedmanTest(data.gh.2008)
imanDavenportTest(data.gh.2008)
friedmanAlignedRanksTest(data.gh.2008)
quadeTest(data.gh.2008)
## ----prompt=TRUE--------------------------------------------------------------
test <- nemenyiTest (data.gh.2008, alpha=0.05)
test
test$diff.matrix
abs(test$diff.matrix) > test$statistic
## ----prompt=TRUE,fig.width=7 , fig.height=3-----------------------------------
plotCD (data.gh.2008, alpha=0.05, cex=1.25)
plotCD (data.gh.2008, alpha=0.01, cex=1.25)
## ----prompt=TRUE--------------------------------------------------------------
friedmanPost(data=data.gh.2008, control=NULL)
quadePost(data=data.gh.2008, control=NULL)
pv.matrix <- friedmanAlignedRanksPost(data=data.gh.2008, control=NULL)
## ----prompt=TRUE , warning=FALSE----------------------------------------------
pv.matrix
adjustShaffer(pv.matrix)
pv.adj <- adjustBergmannHommel(pv.matrix)
pv.adj
## ----prompt=TRUE,eval=FALSE---------------------------------------------------
# install.packages("BiocManager")
# BiocManager::install("Rgraphviz")
## ----prompt=TRUE,fig.width=7 , fig.height=5-----------------------------------
r.means <- colMeans(rankMatrix(data.gh.2008))
drawAlgorithmGraph(pvalue.matrix=pv.adj, mean.value=r.means, alpha=0.05,
font.size=10, node.width=3, node.height=1)
## ----prompt=TRUE,fig.width=7 , fig.height=5-----------------------------------
r.means <- colMeans (rankMatrix(data.gh.2008))
drawAlgorithmGraph (pvalue.matrix=pv.adj, mean.value=r.means, alpha=0.05, 'fdp',
highlight.color="red", node.color="white", font.color="black",
font.size=10, node.width=2, node.height=1)
## ----prompt=TRUE, fig.width=7 , fig.height=7, warning=FALSE-------------------
plt <- plotPvalues(pvalue.matrix=pv.adj,
alg.order=order(r.means, decreasing=FALSE))
plt +
labs(title="Corrected p-values using Bergmann and Hommel procedure") +
scale_fill_gradientn("Corrected p-values" , colours = c("skyblue4" , "orange"))
## ----prompt=TRUE--------------------------------------------------------------
friedmanAlignedRanksPost(data.gh.2008, control = "NaiveBayes")
pv <- quadePost(data.gh.2008, control = 2)
## ----prompt=TRUE--------------------------------------------------------------
adjustHolland(pvalues=pv)
adjustFinner(pvalues=pv)
adjustRom(pvalues=pv, alpha=0.05)
adjustLi(pvalues=pv)
## ----full_process_1, prompt=TRUE----------------------------------------------
alpha <- 0.05
data <- data.gh.2008
friedmanTest(data)
## ----full_process_2, prompt=TRUE----------------------------------------------
multipleComparisonTest(data=data, test="iman")
## ----full_process_3, prompt=TRUE , fig.width=7 , fig.height=5-----------------
post.results <- postHocTest(data=data, test="aligned ranks", correct="bergmann",
use.rank=TRUE)
post.results
alg.order <- order(post.results$summary)
plt <- plotPvalues(post.results$corrected.pval, alg.order=alg.order)
plt + labs(title=paste("Corrected p-values using Bergmann and Hommel procedure",sep=""))
drawAlgorithmGraph(post.results$corrected.pval, mean.value=post.results$summary,
alpha=alpha, font.size=10)
## ----full_process_4, prompt=TRUE----------------------------------------------
data <- data.blum.2015
group.by <- c("Size","Radius")
multipleComparisonTest(data=data, group.by=group.by,
test="quade", correct="finner")
control <- "FrogCOL"
post.results <- postHocTest(data=data, group.by=group.by, control=control,
test="aligned ranks", correct="rom", use.rank=FALSE)
## ----full_process_5, prompt=TRUE----------------------------------------------
avg.val <- post.results$summary
best <- apply(avg.val, MARGIN=1,
FUN=function(x){
m <- max(x[-(1:2)])
return(c(FALSE, FALSE, x[-(1:2)]==m))
})
best <- t(best)
no.diff <- post.results$corrected.pval > alpha
# The size and radius columns set as false
no.diff[,1:2] <- FALSE
no.diff[is.na(no.diff)] <- FALSE
writeTabular(table=avg.val, format='f', bold=best, italic=no.diff,
hrule=c(0, 10, 20, 30), vrule=2, digits=c(0, 3, rep(2, 8)),
print.row.names = FALSE)
## ----full_process_6, prompt=TRUE, fig.width=7 , fig.height=5------------------
control <- NULL
group.by <- "Size"
post.results <- postHocTest(data=data, algorithms=3:10, group.by=group.by,
control=control, test="aligned ranks", correct="holland",
use.rank=TRUE)
# Plot the matrix for the first group
i <- 1
alg.order <- order(post.results$summary[i,-1])
plotPvalues(post.results$corrected.pval[, , i], alg.order=alg.order)
# Plot the matrix for the second group
i <- 2
alg.order <- order(post.results$summary[i,-1])
plotPvalues(post.results$corrected.pval[, , i], alg.order=alg.order)
# Plot the matrix for the third group
i <- 3
alg.order <- order(post.results$summary[i,-1])
plotPvalues(post.results$corrected.pval[, , i], alg.order=alg.order)
<file_sep>/R/loading_functions.R
# NON-EXPORTED, AUXILIAR FUNCTIONS --------------------------------------------
processExperimentMatrix <- function (data, alg.col, value.col) {
# Auxiliar function to process an experiment matrix
# Args:
# data: Data to be processed
# alg.col: Names or id of the columns containing the results
# value.col: Name or id of the column containing the values
# Returns:
# Processed matrix
#
# Verify the column names/id's
if (is.character(alg.col)) {
if (!alg.col %in% colnames(data)) {
stop (alg.col, "column not found. Column names in the file are (",
paste(colnames(data), collapse=","), ")", sep="")
} else {
alg.col <- which(colnames(data) %in% alg.col)
}
} else if(is.numeric(alg.col)) {
if (alg.col > dim(data)[2] | alg.col <= 0) {
stop ("The alg.col parameter has to be a valid value (between 1 and ",
dim(data)[2], ")", sep="")
}
} else {
stop ("The alg.col parameter has to be either a number between 1 and ",
dim(data)[2], " or the name of a column (",
paste(colnames(data), collapse=","), ")", sep="")
}
if (is.character(value.col)) {
if (!value.col %in% colnames(data)) {
stop (paste(value.col, " column not found. Column names in the file are (",
paste(colnames(data), collapse=","), ")",sep=""))
} else {
value.col <- which(colnames(data) %in% value.col)
}
} else if (is.numeric(value.col)) {
if (value.col > dim(data)[2] | value.col <= 0) {
stop ("The value.col parameter has to be a valid value (between 1 and " ,
dim(data)[2], ")", sep="")
}
} else {
stop ("The value.col parameter has to be either a number between 1 and ",
dim(data)[2], " or the name of a column (",
paste(colnames(data), collapse = ","), ")",sep="")
}
# Process the file to build the final matrix
grouping <- which(!1:ncol(data) %in% c(alg.col, value.col))
groups <- unique(data[, grouping])
algorithms <- as.character(unique(data[, alg.col]))
processCombination <- function (i) {
# Subset of the whole dataset
# Using the apply function here is not efficient. A loop in the 'grouping'
# variables is far more computationally efficient.
rows <- rep(TRUE, nrow(data))
for (j in seq(along.with=grouping)) {
g <- grouping[j]
rows <- rows & data[, g]==groups[i, j]
}
sub <- subset (data, rows)
# Process all the algorithms
processAlg <- function(alg.name) {
sb <- subset(sub, sub[, alg.col] == alg.name)[, value.col]
return(sb)
}
aux <- lapply(algorithms, processAlg)
# Check that all the vectors have the same length
l.aux <- sapply(aux, length)
if (length(unique(l.aux)) > 1) {
comb <- paste("(", paste(colnames(groups), collapse=","),
") = (", paste(groups[i, ], collapse=","),
")",sep="")
lengths <- paste("(", paste(algorithms, collapse=","),
") = (", paste(l.aux , collapse=","), ")", sep="")
mssg <- paste("Problems while parsing the file. For every combination ",
"of the parameters the algorithms should have the same ","
number of values. In the combination ", comb, " the lengths ",
"associated to each algoithm are ", lengths, sep="")
stop(mssg)
}
res <- do.call(cbind, aux)
colnames(res) <- algorithms
suppressWarnings (expr={
cbind(groups[i,], res)
})
}
aux <- lapply(1:nrow(groups), FUN=processCombination)
return(do.call(rbind, aux))
}
processExpFile <- function(file, fname.pattern, names, alg.var.name , value.col, col.names=NULL, ...){
# Auxiliar function to process individual experiment files in a directory
# Args:
# file: Path of the file to process
# fname.pattern: Pattern to extract information from the file name
# names: Vector of names for the values extracted from the file name
# alg.var.name: Name of the variable (either a column or an extracted value)
# containing the information about the algorithm used
# value.col: Column containing the results
# col.names: Names for the columns in the file. If NULL, the first row
# in the file is used as name
#
# Returns:
# Matrix with the information read from the file
#
# Process the name of the file
fname <- basename(file)
symbs <- c('_', ':', ',', ';', '-', '+', '*', '&', '%', '#')
chars.in.name <- unlist(strsplit(fname, split=vector()))
splt.id <- which(!(symbs %in% chars.in.name))[[1]]
splt <- symbs[splt.id]
# Transform the name into something easy to split and the separate it into the elements
replacement <- paste(paste("\\", 1:length(names), sep=""), collapse=splt)
params <- strsplit(gsub(fname.pattern, replacement, fname), splt)[[1]]
names(params)<-names
if (!is.null(col.names)) {
data <- read.csv(file, header=FALSE, ...)
if (length(col.names) != ncol(data)) {
stop("The number of columns (", ncol(data), ") in the file does not match ",
"the length of 'col.names' (", length(col.names), ")")
}
colnames(data) <- col.names
} else {
data <- read.csv(file, header=TRUE, ...)
}
# Merge the info get from the file name with that inside it
output <- cbind(matrix(rep(params, nrow(data)), ncol=length(params), byrow=TRUE),
data)
colnames(output) <- c(names, colnames(data))
return(output)
}
processCompFile <- function(file, fname.pattern, names, alg.cols, col.names, ...){
# Auxiliar function to process individual comparison files in a directory
# Args:
# file: Path of the file to process
# fname.pattern: Pattern to extract information from the file name
# names: Vector of names for the values extracted from the file name
# alg.cols: Columns containing the results of the algorithms
# col.names: Names for the columns in the file. If NULL, the first row
# in the file is used as name
#
# Returns:
# Matrix with the information read from the file
#
rcsv.args <- list(...)
if (!is.null(rcsv.args$header)) {
stop("The argument header cannot be set by hand. It depends on whether a ",
"col.names argument is passed or not")
}
fname <- basename(file)
symbs <- c('_',':',',',';','-','+','*','&','%','#')
chars.in.name <- unlist(strsplit(fname, split=vector()))
splt.id <- which(!(symbs %in% chars.in.name))[[1]]
splt <- symbs[splt.id]
# Transform the name into something easy to split and the separate it into the elements
replacement <- paste(paste("\\", 1:length(names), sep=""), collapse=splt)
params <- strsplit(gsub(fname.pattern, replacement, fname), splt)[[1]]
names(params) <- names
if(is.null(col.names)){
header <- TRUE
} else {
header <- FALSE
}
data <- read.csv(file, header=header, ...)
if (!is.null(col.names)) {
if (ncol(data) != length(col.names)) {
stop ("The size of the table and the number of column names do not match")
}
names(data) <- col.names
}
if(is.character(alg.cols)) {
aux <- which(names(data) %in% alg.cols)
}else{
aux <- alg.cols
}
id.alg <- subset(aux, subset=((aux > 0) & (aux <= ncol(data))))
if (length(id.alg) != length(alg.cols)) {
stop ("Not all the algorithm names provided have been found in the ",
"file header")
}
aux.matrix <- matrix(rep(params, nrow(data)), ncol=length(params), byrow=T)
res <- cbind(aux.matrix, data[, -id.alg], data[, id.alg])
names(res) <- c(names(params), names(data)[-id.alg], names(data)[id.alg])
return(res)
}
# EXPORTED FUNCTIONS -----------------------------------------------------------
#' @title Read data from an experiment-like file
#'
#' @description This function reads the data from a file where each row is an experiment characterized by some variables (one of which should be the algorithm used) and with one and only one numeric result. For files where there is more than one result per line see \code{\link{readComparisonFile}}.
#' @param file Path to the file to read.
#' @param alg.col Name or index of the column corresponding to the algorithm used in the experiment.
#' @param value.col Name or index of the column corresponding to the numerical result of the experiment.
#' @param col.names Vector of names for the columns. If not provided (or \code{NULL}) the names will be read from the first line of the file.
#' @param ... Additional parameters for the read.csv function used to load the data. It can be used, for example, to set the separator (e.g., \code{sep="\t"}). Note that the \code{header} argument is automatically set according to the \code{col.names} argument.
#' @return A data.frame where each column represents either a feature of the experiment or the result of running an algorithm. Algorithm columns are placed always at the end of the table.
#' @seealso \code{\link{readExperimentDir}}, \code{\link{readComparisonFile}}, \code{\link{readComparisonDir}} and the vignette \code{vignette(topic="Data_loading_and_manipulation", package="scmamp")}
#' @examples
#' dir <- system.file("loading_tests",package="scmamp")
#' file <- paste(dir , "rgg_complete_experiment.out" , sep="/")
#' data <- readExperimentFile (file=file, alg.col="Algorithm", value.col="Evaluation")
#' dim(data)
#' head(data)
readExperimentFile <- function (file, alg.col, value.col, col.names=NULL, ...) {
rcsv.args <- list(...)
if (!is.null(rcsv.args$header)) {
stop("The argument header cannot be set by hand. It depends on whether the ",
"col.names argument is passed or not")
}
if (!is.null(col.names)) {
if (length(col.names) != ncol(data)) {
stop("The number of columns (", ncol(data),
") in the file does not match the length of 'col.names' (",
length(col.names), ")")
}
data <- read.csv(file, header=FALSE, ...)
colnames(data) <- col.names
} else {
data <- read.csv(file, header=TRUE, ...)
}
data <- processExperimentMatrix(data, alg.col, value.col)
return(data)
}
#' @title Read data from an experiment-like files in a directory
#'
#' @description This function reads the data from all the files in a directory. Only one column of results is expected in each file. If the files contain the results of two or more algorithms, see function \code{\link{readComparisonFile}}. The function can extract information from the file name.
#' @param directory Directory with the files to load. It should only contain files to load, no other kind of file.
#' @param names List of names for the variables to be extracted from the file name
#' @param alg.var.name Name of the variable that defines the algorithm used in the experiment. It can be either one of the variables extracted from the file name or the name of one of the columns in the file.
#' @param value.col Name or index (referred to the column in the file) of the column containing the results.
#' @param fname.pattern Regular expression to extract information from the file names. It has to be a regular expression that matches the name of the files and where the information to be extrcted has to be between brakets. As an example, to store the whole file name the expression \code{'([.]*)'} can be used. For more example see the examples below or the vignette covering the data loading.
#' @param col.names Vector of names for the columns. If not provided (or \code{NULL}) the names will be read from the first line of the file.
#' @param ... Additional parameters for the read.csv function used to load the data. It can be used, for example, to set the separator (e.g., \code{sep="\t"}). Note that the \code{header} argument is automatically set according to the \code{col.names} argument.
#' @return A data.frame where each column represents either a feature of the experiment or the result of running an algorithm. Algorithm columns are placed always at the end of the table.
#' @details Note that all the files should have the same format (same number of columns and, in case they have, same header)
#' @seealso \code{\link{readExperimentFile}}, \code{\link{readComparisonFile}}, \code{\link{readComparisonDir}} and the vignette \code{vignette(topic="Data_loading_and_manipulation", package="scmamp")}
#' @examples
#' dir <- paste(system.file("loading_tests",package="scmamp"), "experiment_files", sep="/")
#' # The format of the files is rgg_size_SIZE_r_RADIUS_ALGORITHM.out, where variables
#' # to extract are in capital letters.
#' list.files(dir)[1:5]
#' # The regular expresion can be as simple as substituting each variable name in the expression
#' # above by ([XXX]*), where XXX is the list of symbols that appear in the name.
#' pattern <- "rgg_size_([0-9]*)_r_(0.[0-9]*)_([a-z,A-Z,1,2]*).out"
#' var.names <- c("Size", "Radius", "Algorithm")
#' data <- readExperimentDir (directory=dir, names=var.names, fname.pattern=pattern,
#' alg.var.name="Algorithm", value.col="Evaluation",
#' col.names="Evaluation")
#' dim(data)
#' head(data)
readExperimentDir <- function(directory, names, fname.pattern, alg.var.name,
value.col, col.names=NULL, ...){
rcsv.args <- list(...)
if (!is.null(rcsv.args$header)) {
stop("The argument header cannot be set by hand. It depends on whether a ",
"col.names argument is passed or not")
}
if(!is.character(alg.var.name)) {
stop("This function only accepts a name as the column indicating the ",
"algorithm ('alg.var.name' argument)")
}
if (length(alg.var.name) != 1 | length(value.col) != 1) {
stop ("The 'alg.var.name' and 'value.col' have to be of dimension 1")
}
if (substring(fname.pattern, 1, 1) == "(") {
first <- 1
} else {
first <- 2
}
# Load the first file to check the header name
f <- list.files(directory)[1]
if (is.null(col.names)) {
data <- read.csv(paste(directory, f, sep="/"), header=TRUE, ...)
} else {
data <- read.csv(paste(directory, f, sep="/"), header=FALSE, ...)
if (length(col.names) != ncol(data)) {
stop("The number of columns (", ncol(data), ") in the file does not ",
"match the length of 'col.names' (", length(col.names), ")")
}
colnames(data) <- col.names
}
if ((!alg.var.name %in% names) & (!alg.var.name %in% colnames(data))) {
stop("The name ", alg.var.name, " not found neither in the file name ","
nor in the header.", sep="")
}
if (is.character(value.col)) {
if (!value.col %in% colnames(data)) {
stop("Column named ", value.col, " not found in the files")
}
} else {
if (value.col < 1 | value.col > ncol(data)) {
stop("The column index ", value.col, " is out of the range of the file")
}else{
value.col <- colnames(data)[value.col]
}
}
data <- data.frame()
for (file in list.files(directory)) {
data.new <- processExpFile(file=paste(directory, file, sep="/"),
fname.pattern=fname.pattern, names=names,
alg.var.name=alg.var.name, value.col=value.col,
col.names=col.names, ...)
data <- rbind(data, data.new)
}
d <- processExperimentMatrix(data=data, alg.col=alg.var.name, value.col=value.col)
return(d)
}
#' @title Read data from a comparison file
#'
#' @description This function reads the data from a files where two or more algorithms are compared in different problems. The file can have some columns that characterize the problem and one column per algorithm. If each row contain only the result obtained by one algorithm, use the \code{\link{readExperimentFile}} function.
#' @param file Path of the file to load
#' @param alg.cols A vector column names or indices inicating which columns contain the results. The rest are assumed as descriptors of the problems
#' @param col.names Vector of names of the columns. If not NULL, the files are assumed not to have a header and the columns are named using this vector
#' @param ... Additional parameters for the read.csv function used to load the data. It can be used, for example, to set the separator (e.g., \code{sep="\t"}). Note that the \code{header} argument is automatically set according to the \code{col.names} argument.
#' @return A data.frame where each column represents either a feature of the experiment or the result of running an algorithm. Algorithm columns are placed always at the end of the table.
#' @seealso \code{\link{readExperimentFile}}, \code{\link{readExperimentDir}}, \code{\link{readComparisonDir}} and the vignette \code{vignette(topic="Data_loading_and_manipulation", package="scmamp")}
#' @examples
#' dir <- system.file("loading_tests",package="scmamp")
#' file <- paste(dir , "rgg_complete_comparison.out" , sep="/")
#' data <- readComparisonFile(file=file, alg.cols=3:10)
#' dim(data)
#' head(data)
#'
readComparisonFile <- function(file, alg.cols, col.names=NULL, ...) {
rcsv.args <- list(...)
if (!is.null(rcsv.args$header)) {
stop("The argument header cannot be set by hand. It depends on whether a ",
"col.names argument is passed or not")
}
if (is.null(col.names)) {
header <- TRUE
} else {
header <- FALSE
}
data <- read.csv (file, header=header, ...)
if (!is.null(col.names)) {
if (ncol(data) != col.names) {
stop ("The size of the table and the number of column names do not match")
}
names(data) <- col.names
}
if(is.character(alg.cols)) {
aux <- which(names(data) %in% alg.cols)
} else {
aux <- alg.cols
}
id.alg <- subset(aux, subset=((aux > 0) & (aux <= ncol(data))))
if (length(id.alg) != length(alg.cols)) {
stop ("Not all the algorithm names provided have been found in the file header")
}
res <- cbind(data[, -id.alg], data[, id.alg])
return(res)
}
#' @title Read data from a directory of comparison-like files
#'
#' @description This function reads the data from all files in a directory. Each file is expected to to be formated as a comparison file, i.e., the file can have some columns that characterize the problem and one column per algorithm. If each row contain only the result obtained by one algorithm, use the \code{\link{readExperimentDir}} function.
#' @param directory Directory where the files are located.
#' @param alg.cols A vector column names or indices inicating which columns contain the results. The rest are assumed as descriptors of the problems
#' @param col.names Vector of names of the columns. If not NULL, the files are assumed not to have a header and the columns are named using this vector.
#' @param names List of names for the variables to be extracted from the file name.
#' @param fname.pattern Regular expression to extract information from the file names. It has to be a regular expression that matches the name of the files and where the information to be extrcted has to be between brakets. As an example, if the whole file name wants to be used, the expression \code{'([.]*)'} can be used. For more example see the examples below or the vignette covering the data loading.
#' @param ... Additional parameters for the read.csv function used to load the data. It can be used, for example, to set the separator (e.g., \code{sep="\t"}). Note that the \code{header} argument is automatically set according to the \code{col.names} argument.
#' @return A data.frame where each column represents either a feature of the experiment or the result of running an algorithm. Algorithm columns are placed always at the end of the table.
#' @seealso \code{\link{readExperimentFile}}, \code{\link{readExperimentDir}}, \code{\link{readComparisonDir}} and the vignette \code{vignette(topic="Data_loading_and_manipulation", package="scmamp")}
#' @examples
#' dir <- paste(system.file("loading_tests",package="scmamp") , "comparison_files" , sep="/")
#' # The format of the files is rgg_size_SIZE_r_RADIUS.out, where variables to extract are in
#' # capital letters.
#' list.files(dir)[1]
#' # The regular expresion can be as simple as substituting each variable name in the expression
#' # above by ([XXX]*), where XXX is the list of symbols that appear in the name.
#' pattern <- "rgg_size_([0-9]*)_r_(0.[0-9]*).out"
#' var.names <- c("Size", "Radius")
#' data <- readComparisonDir (directory=dir, alg.cols=1:8, names=var.names,
#' fname.pattern=pattern)
#' dim(data)
#' head(data)
readComparisonDir <- function (directory, alg.cols, names, fname.pattern,
col.names=NULL, ...){
rcsv.args <- list(...)
if (!is.null(rcsv.args$header)) stop("The argument header cannot be set by hand. It depends on whether a col.names argument is passed or not")
first <- ifelse(substring(fname.pattern , 1 , 1)=="(" , 1 , 2)
## Load the first file to check the header name
f <- list.files(directory)[1]
data <- data.frame()
for (file in list.files(directory)){
data <- rbind(data,
processCompFile(file=paste(directory,file,sep="/"),
fname.pattern=fname.pattern, names=names,
alg.cols=alg.cols, col.names=col.names, ...))
}
data
}
<file_sep>/vignettes/Bayesian_analysis_of_the_differences.Rmd
---
title: "Bayesian Analysis of the Differences between Classifiers"
author: "<NAME> and <NAME>"
date: "`r Sys.Date()`"
output: rmarkdown::html_vignette
bibliography: refs.bib
vignette: >
%\VignetteIndexEntry{Bayesian Analysis of the Differences}
%\VignetteEngine{knitr::rmarkdown}
\usepackage[utf8]{inputenc}
---
# Time for a change ...
Since the very beginning, the use of statistical tests has been surrounded by controversy. For decades staticians have highlighted the weak points of these statistical tools and, in the last years, a concern about their misuse in scientific works has been added. Indeed, in different domains researches have started to move from classical statistical tests to other methods that provide further insights about the data analysed.
As its title suggests, with @benavoli2017 this recommendation has also arrived to the empirical comparison of algorithms (in the particular topic of classification algorithms). This paper proposes some Bayesian approaches to compare pairs of classifiers in individual datasets as well as in groups of datasets.
We have included the methods proposed in @benavoli2017 in `scmamp`, and this tutorial will show you how you can make use of them. The code included is an adapttion of that provided by the authors of the paper, available at [github](https://github.com/BayesianTestsML/tutorial).
As an example, we have also included in the package a set of results that can be used to test the methods, so first we will load the required packages and the data.
```{r , prompt=TRUE}
library("scmamp")
library("ggplot2")
data(data_kcv_example)
```
The example dataset contains the AUC value obtained by four algorithms (AlgA to AlgD) in all the folds of a 10 times 10 fold crossvalidation in 10 different datasets. The first three columns identify the dataset, repetition and fold respectively.
Before we start with the analysis, we will explore the data to see how the samples are distributed. For that, we can use the `plotDensities` function (remember that this function makes use of the [`ggplot2`](http://ggplot2.org/) package, @wickham2009, and thus you can further manipulate the output to change the look of the plot).
```{r,prompt=TRUE , fig.width=7, fig.height=5, warning=FALSE}
algorithms <- names(data.kcv.example)[4:7]
db <- 5
plotDensities (data=data.kcv.example[data.kcv.example$DB==db, algorithms], size=1.1)
```
The code above plots the densities of the four algorithms in the fifth dataset. The last three algorithms have a density that could be roughly considered as normal, but we can see that the density of the first algorithm, AlgA, is quite skewed. You can check the rest of the densities modifying the `db` variable.
## Bayesian approach
Before going into the details, in this section we will briefly review the basic idea behind the Bayesian approach followed here. For further information you can check @benavoli2017.
Although in Bayesian inference it is possible to test hipothesis, in this package we have included the estimation approach presented in @benavoli2017, where the methods used will model (or approximate) the posterior probability density function of the parameters. Then, the posterior probability is used to compute/estimate different probabilities of interest.
In particular, we will be interested in providing an answer to the question is Algorithm A better/equal/worse than Algorithm B?. Of special interest is the concept of "equal", which is closely related with the concept of null hypothesis in hypothesis testing. However, from a Bayesian point of view, such a definition of equality is useless (for any continuous parameter, the probability of that parameter being equal in two populations will be, in practical terms, zero).
To solve this problem, and in order to provide a sensible definition of equality, in @benavoli2017 the authors recommend using the concept of _rope_, which is simply a segment of the possible values of the parameter where we regard both algorithms as equal. In all the cases covered in this vignette, the parameter of interest is the performance difference between two algorithms, which typicaly ranges between -1 and 1. In such a contest, a possible rope would be [-0.01, 0.01], meaning that any difference smaller that 0.01 in magnitude is regarded as irrelevant. Of course, this rope can be adapted to the particularities of any application.
## Comparison of two classifiers in a single dataset
The most basic situation is the comparison of two given algorithms in a single dataset. In that case, we face two problems when the samples come from a cross validation. The first problem is that the samples we have are not independent, due to the cross validation scheme. The second one is the assumption of a parametric distribution for the data. The problem here is that the only way we have to include the correlation in the analysis is assuming that the data follows a Gaussian distribution. In such case, we can make use of the Bayesian equivalent to the correlated t-test proposed in @nadeau2003.
If we cannot assume normality for the data, then we need to move to a non-parametric alternative, after getting rid of the correlation by averaging the results of the cross validation. In the following subsections we will explore these alternatives.
### Bayesian version of the correlated t-test
Let us assume that we regard the densities plotted above as "reasonably" normal. We can perform the comparison of any two algorithms (say AlgA and AlgB) using the Bayesian alternative to the correlated t-test implemented in the function `bCorrelatedTtest` as follows:
```{r , prompt=TRUE}
db <- 5
sample.a <- data.kcv.example[data.kcv.example$DB==db, "AlgA"]
sample.b <- data.kcv.example[data.kcv.example$DB==db, "AlgB"]
results <- bCorrelatedTtest(x=sample.a, y=sample.b, rho=0.1, rope=c(-0.01, 0.01))
results$posterior.probabilities
```
The function returns a list with a number of elements. The single most importants is `posterior.probabilities`, which contains the probability of the difference between the two algorithms (x-y) being below the lower bound of the rope (column labelled as `Left`), inside the rope (column labelled as `Rope`) and the probability of being above the upper bound of the rope (column labelled as `Right`). In this particular case we can see that most of the probability mass (```{r} round(results$posterior.probabilities["Right"], 3```) is in the right, meaning that we can be quite certain that Algorithm A is better (in terms of performance) than Algorithm B.
The result of this function includes also the posterior density function of the average difference (`results$posterior`), so we can use it to visually assess our uncertainty about the conclusion. For that, you can use the function `plotPosterior` as follows:
```{r,prompt=TRUE , fig.width=7, fig.height=5}
plotPosterior(results, plot.rope=TRUE)
```
For this particular case, the relevant paremeters of the function are `plot.rope`, to indicate whether the rope should be plotted or not and `num.points`, to indicate the number of points used to plot the function (1000 by default).
In the plot we can see that there is quite uncertainty about the difference (most of the density spreads between 0.5 and 2.5), although it is quite clear that the difference is above 0.01. In some other cases the decision may not be clear, as in the following example.
```{r , prompt=TRUE}
db <- 5
sample.a <- data.kcv.example[data.kcv.example$DB==db, "AlgC"]
sample.b <- data.kcv.example[data.kcv.example$DB==db, "AlgD"]
results <- bCorrelatedTtest(x=sample.a, y=sample.b, rho=0.1, rope=c(-0.01, 0.01))
results$posterior.probabilities
```
Now there is not a clear answer, the uncertainty is too high to conclude anything. We can, again, see this graphically:
```{r,prompt=TRUE , fig.width=7, fig.height=5}
plotPosterior(results, plot.rope=TRUE)
```
This is one of the difference with the classical frequentist analysis. While with statistical test we can either say that there are differences or that there is not enough evidence to say so, with the Bayesian analysis of the results we can have high certainty about a certain answer or, too much uncertainty to conclude anything. Moreover, we can have high certainty about a number of possible situations::
* Algorithm A is better than B (when the right probability is above, say 0.9)
* Both algorithms are practically equivalent (when the rope probability is above, say 0.9)
* Algorithm B is better than A (when the left probability is above, say 0.9)
* Algorithm A is not worse than B (when the right probability + the rope probability is above, say 0.9)
* Algorithm B is not worse than A (when the left probability + the rope probability is above, say 0.9)
* Algorithms A and B are not equivalent (when the right probability + the left probability is above, say 0.9)
### Bayesian version of the signed-rank test
In the previous subsection we have assumed that the difference in performance follows a Gaussian distribution. In some cases, this is hardly true (e.g., plot the samples for dataset number 9 or 10) and thus, we should move to a non-parametric approach. The downside is that we cannot account for the correlation when no particular parametric distribution is considered. Instead, we have to get rid of it by averaging the values obtained in the k folds. Unfortunately, this way we also reduce the sample size and, thus, the reduction of the uncertainty will be smaller.
As an example, we will analyse the results obtained with the nineth dataset. As a first step, we have to average the results of the 10 folds.
```{r , prompt=TRUE}
db <- 9
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.kcv.example[, 1:2], FUN=mean)
sample.a <- summarized.data[summarized.data$DB==db, "AlgC"]
sample.b <- summarized.data[summarized.data$DB==db, "AlgD"]
```
Now we can proceed with the analysis. In this case, as we have mentioned, we will apply the alternative to the signed-rank test presented in @benavoli2017. This analysis is accessible through the function `bSignedRankTest`:
```{r , prompt=TRUE, message=FALSE}
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
```
As can be from the results, we can be quite confident that Algorithms C and D, in the ninth dataset have an almos equal behaviour (i.e., the differences are almost surely inside the rope). Actually, the interpretation in this case is somewhat different to the previous example, as now the estimations are based on sampling (see @benavoli2017 for more details). In particular, the method samples the probability of being bellow, inside or above the rope, as in the previous example. These probabilities are also reported by the function, inside the `posterior` element of the list (now we do not have a parametric function to represent the posterior distribution).
```{r , prompt=TRUE, message=FALSE}
head(results$posterior)
```
From these results we can get the expected probability of each region just averaging the columns.
```{r , prompt=TRUE, message=FALSE}
colMeans(results$posterior)
```
As we have the distribution of the triplet of probabilities, we can in this case compute a different probability, namely, the probability of each region being the one with the highest probability. That is, for each sample we can identify which region is the most probable one and, then, estimate the probability of the righ, rope, left segments being the most probable; these are the probabilities collected in `results$posterior.probabilities`.
In order to clarify these probabilities we can show them graphically using a Simplex plot. You can produce it using the `plotSimplex` function.
```{r,prompt=TRUE , fig.width=7, fig.height=7}
plotSimplex(results, A="Algorithm C", B="Algorithm D")
```
This plot represents all the samples obtained from the posterior distribution of the probabilities of the three segments (i.e., the triplets in `results$posterior`). The plot is in barycentric coordinates, being each vertex one of the probabilities in the `results$posterior` matrix. As can be seen, the triangle is divided into three equal areas, each corresponding to a vertex. All the points inside those areas have the particularity of being closest to the corresponding vertex, which in turn means that the highest probability for that point is the corresponding to that vertex.
The plot, therefore, represents the empirical distribution of the triplets, while the probabilities in `results$posterior.probabilities` correspond to the probability of the triplet falling into each area.
In this particular case the expected probability of falling in the rope is the highest, but the expected probability of falling in the right part is not negligible. However, there is very low uncertainty about the probability of the rope being the highest (results$posterior.probabilities["Rope"]) and, thus, we can conclude that both algorithms are practically equal in this dataset.
If we repeat the analysis with the results of the eight dataset the conclusions are different:
```{r,prompt=TRUE , fig.width=7, fig.height=7}
db <- 8
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.kcv.example[, 1:2], FUN=mean)
sample.a <- summarized.data[summarized.data$DB==db, "AlgC"]
sample.b <- summarized.data[summarized.data$DB==db, "AlgD"]
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
colMeans(results$posterior)
plotSimplex(results, plot.density=FALSE, A="Algorithm C", B="Algorithm D",
posterior.label=TRUE)
```
In this case the expected probability for the rope (```{r} round(colMeans(results$posterior)["Rope"], 3)```) and the right ((```{r} round(colMeans(results$posterior)["Right"], 3)```)) region are quite similar and, as a consequence, there is more uncertainty about which is the region with the highest probability (in this example you can see how you can include this information in the plot). Therefore, in this case there is more uncertainty about C being better or equal to D, but we can conclude that Algorithm C is not worse than Algorithm D.
## Comparison of two classifiers in several datasets
In the previous section we have seen that the conclusions we can draw in different datasets may differ. In some cases it may be of interest knowing the behaviour of the algorithms in eacn dataset, but quite often we are looking for a global answer to the question of which algorithm (if any) is better. We can answer to this question by comparing the results obtained in all the datasets at the same time.
The particularity of this analysis is that, even if we can assume normality for the cross validated results in each dataset, merging all of them toghether makes no sense. For that reason, in @benavoli2017 the authors propose two methodologies, one non-parametric (the method presented at the end of the previous section) and a hierarchical model that allows us to take into account, separately, the differences inside each dataset and among datasets, as well as the correlation due to the cross validation. In the following subsections we will show you how you can run the proposed analyses.
### Bayesian signed-rank test to compare two algorithms in multiple datasets
Using and interpreting the Bayesian equivalent to the signed-rank test is very similar to what we did in the previous section, with the exception that now we will average the results for each dataset and will use these results as the samples for the analysis.
First, we produce the summaries of the data:
```{r , prompt=TRUE}
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.frame(DB=data.kcv.example[, 1]), FUN=mean)
sample.a <- summarized.data[, "AlgC"]
sample.b <- summarized.data[, "AlgD"]
```
Now we can proceed with the analysis using the function `bSignedRankTest`:
```{r , prompt=TRUE, message=FALSE}
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
```
From the results we can see that, according to the 10 datasets in our data, either algorithms C and D are equivalent or C is better than D, but certainly it is very unlikely that algorithm D outperforms algorithm C. However, there is too much uncertainty to be sure that C outperforms D. We can see this graphically:
```{r,prompt=TRUE , fig.width=7, fig.height=7}
plotSimplex(results, A="Algorithm C", B="Algorithm D", plot.density=FALSE, alpha=0.5)
```
Conversely, if we compare algorithms A and B, the conclusion is quite clear.
```{r , prompt=TRUE}
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.frame(DB=data.kcv.example[, 1]), FUN=mean)
sample.a <- summarized.data[, "AlgA"]
sample.b <- summarized.data[, "AlgB"]
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
```
We can visually assess the uncertainty using the Simplex plot:
```{r,prompt=TRUE , fig.width=7, fig.height=7}
plotSimplex(results, A="Algorithm A", B="Algorithm B", plot.density=FALSE, alpha=0.5)
```
In the plot we can clearly see that algorithm A is better than algorithm B. Moreover, all the points are in the rope-A line, meaning that the probability of algorithm B being better than A is very small. We can assess this with the expected probabilities as calculated in the previous section:
```{r , prompt=TRUE, message=FALSE}
colMeans(results$posterior)
```
### Hierarchical Bayesian model for the comparison of algorithms in multiple datasets
In the analysis above we use the averged values for each dataset. We do so to get rid of the correlation between samples. In this section we will use a hierarchical model that will allow us considering each observation individually, modelling the correlation in each dataset.
Briefly, the model will assume that the samples of each dataset follow a Gaussian distribution with a certain mean and variance. The priors for these parameters are a Students t distribution for the means (i.e., the model assumes that all the individual means are independent sample from a Student's t distribution) and a uniform distribution for the variances. The model further goes up in the hierarchy modeling the prior for the parameters of the Student's t distribution. For further information, please see @benavoli2017.
We can use this model through the function `bHierarchicalTest`. In this case, as we are not averaging the results and each dataset is modelled independently, we need to build two sample matrices where each row is a dataset. Let us prepare the data to compare algorithms C and D. Note that, in our data, the results are ordered by the dataset. If the arrangement of the data is different you will need to build the matrices in a different way.
```{r , prompt=TRUE, message=FALSE}
sample.a <- matrix(data.kcv.example$AlgC, byrow=TRUE, nrow=10)
sample.b <- matrix(data.kcv.example$AlgD, byrow=TRUE, nrow=10)
```
Now we proceed with the analysis. As in the non-parametric approach, we cannot analyitically compute the posterior of the parameters and, thus, we need to simulate (sample) it. In this case, due to the complexity of the model, we need to use MCMC methods to obtaine the samples. The function that wil will use is based on a [Stan](http://mc-stan.org/) program, so you need to have the **rstan** package installed. The sampling in this cases can be quite slow, depending on the problem, so be patient ...
```{r , prompt=TRUE, message=FALSE, warning=FALSE}
results <- bHierarchicalTest(sample.a, sample.b, rho=0.1, rope=c(-0.01, 0.01), nsim=2000, nchains=5)
```
As we can see, in this function we again have the `rho` parameter, which is the correlation factor used in the model of each dataset. Additionally we have two parameters, `nsim` and `nchains`. These paremeters have to do with the simulation through the MCMC algorithm. The first parameter is the number of simulations obtained for each chain and the second is the number of chains used in the simulation. By default, in each chain half the of the samples are used as burn-in (Warmup in the output of Stan program). Therefore, the number of samples we will obtain is `nchains*nsim/2`, 5000 in our example.
To assess the results, we can directly plot the samples in the Simplex, together with the probabilities associated to each region.
```{r,prompt=TRUE , fig.width=7, fig.height=7}
plotSimplex(results, A="Alg. C", B="Alg. D", posterior.label=TRUE, alpha=0.5)
```
In the plot above we can see that we cannot be sure about whether Algorithm C is equal or better to D, but quite certainly it is not worse.
In addition to this global information, we can analyse the results per dataset. This information is contained in the `additional` element of the results list.
```{r , prompt=TRUE, message=FALSE}
results$additional$per.dataset
```
The results shown in the table above are similar to those obtained with the Bayesian version of the correlated t-test (plus the expected mean difference under the posterior distribution) but with one exception. However, there is a subtle but important difference: the ten mean differences share a common, original distribution. The consequence is a shrinkage in the estimation of the averaged distributions (see @benavoli2017 for more details).
## References
<file_sep>/R/data_manipulation.R
#' @title Expression based row filtering
#'
#' @description This is a simple function to filter data based on an expression defined using the colum names
#' @param data A NAMED matrix or data frame to be filtered (column names are required).
#' @param condition A string indicating the condition that the row have to fulfill to be retained. The column names are used as variables in the condition (see examples bellow).
#' @param remove.cols Either a vector of column names or a vector of column indices to be removed from the result
#' @return The original data where the rows for which the condition is \code{FALSE} and the columns in the vector \code{remove.cols} have been removed
#' @seealso \code{\link{summarizeData}}, \code{\link{writeTabular}} and the vignette \code{vignette(topic="Data_loading_and_manipulation",
#' package="scmamp")}
#' @examples
#' data(data_gh_2008)
#' names(data.gh.2008)
#' filterData(data.gh.2008, condition="CN2 > 0.7 & Kernel < 0.7", remove.cols=1:2)
#'
filterData <- function (data, condition="TRUE", remove.cols=NULL) {
checkRow <- function (row) {
# Extract columns as variables
for (i in seq(along.with=row)) {
assign(names(row)[i], row[i])
}
# Evaluate the condition
cond <- eval(parse(text=condition))
return(cond)
}
# Generate the subset of rows
sub <- apply(data, MARGIN=1, FUN=checkRow)
## Generate the colums to select
if (is.character(remove.cols)) {
id.retain <- which(!(colnames(data) %in% remove.cols))
} else {
id.retain <- which(!(1:ncol(data) %in% remove.cols))
}
# In case there are indices out of range, remove them
id.retain <- subset(id.retain,
subset=id.retain > 0 & id.retain <= ncol(data))
# Get the subset
sbst <- subset(data, subset=sub, select=id.retain)
return(sbst)
}
#' @title Summarization of data
#'
#' @description This is a simple function to apply a summarization function to a matrix or data frame.
#' @param data A matrix or data frame to be summarized.
#' @param fun Function to be used in the summarization. It can be any function that, taking as first argument a numeric vector, otuputs a numeric value. Typical examples are \code{\link{mean}}, \code{\link{median}}, \code{\link{min}}, \code{\link{max}} or \code{\link{sd}}.
#' @param group.by A vector of either column names or column indices according to which the data will be grouped to be summarized.
#' @param ignore A vector of either column names or column indices of the columns that have to be removed from the output.
#' @param ... Additional parameters to the summarization function (\code{fun}). For example, \code{na.rm=TRUE} to indicate that the missing values should be ignored.
#' @return A data frame where, for each combination of the values in the columns indicated by \code{group.by}, each column (except those in \code{ignore}) contains the summarization of the values in the original matrix that have that combination of values.
#' #' @seealso \code{\link{filterData}}, \code{\link{writeTabular}} and the vignette \code{vignette(topic="Data_loading_and_manipulation",
#' package="scmamp")}
#' @examples
#' data(data_blum_2015)
#' # Group by size and radius. Get the mean and variance of only the last two
#' # columns.
#' summarizeData (data.blum.2015, group.by=c("Radius","Size"), ignore=3:8,
#' fun=mean, na.rm=TRUE)
#' summarizeData (data.blum.2015, group.by=c("Radius","Size"), ignore=3:8,
#' fun=sd, na.rm=TRUE)
#'
summarizeData <- function (data, fun=mean, group.by=NULL, ignore=NULL, ... ) {
if (!is.data.frame(data)) {
data <- data.frame(data)
}
# Convert character definitions to colum id
if (is.character(group.by)) {
group.by <- which(colnames(data) %in% group.by)
}
if (is.character(ignore)) {
ignore <- which(colnames(data) %in% ignore)
}
## Only numeric columns can be summarized
non.numeric <- which(!unlist(lapply(data, is.numeric)))
if (!all(non.numeric %in% c(group.by, ignore))) {
warning ("Only numeric columns can be summarized. Character and factor ",
"columns should be either in the 'group.by' or the 'ignore' list. ",
"Non numeric columns will be ignored")
ignore <- unique(c(ignore, non.numeric[!(non.numeric %in% group.by)]))
}
# Remove any index out of bounds
group.by <- subset(group.by, subset=group.by > 0 & group.by <= ncol(data))
ignore <- subset(ignore, subset=ignore > 0 & ignore <= ncol(data))
if (length(intersect(group.by,ignore)) > 0) {
stop("The same column cannot be simultaneously in the 'group.by' and the ",
"'ignore' list")
}
if (is.null(group.by)) {
if (!is.null(ignore)) {
data <- data[, -ignore]
}
summ <- apply(data, MARGIN=2,
FUN=function(x) {
fun(x, ...)
})
}else{
groups <- unique(data[, group.by])
if(length(group.by)) groups <- data.frame(groups)
to.summarize <- (1:ncol(data))[-c(ignore, group.by)]
summGroup <- function (i) {
sub <- rep(TRUE, nrow(data))
for (j in seq(along.with=group.by)) {
sub <- sub & data[, group.by[j]] == groups[i,j]
}
m <- subset(data, subset=sub)
m <- m[, to.summarize]
if (length(to.summarize) == 1) {
m <- matrix(m, ncol=1)
}
apply(m, MARGIN=2,
FUN=function(x) {
fun(x , ...)
})
}
aux <- lapply(1:nrow(groups), FUN=summGroup)
summ <- cbind(groups, do.call(rbind, aux))
}
return(summ)
}
#' @title Creation of boolean matrices for highlighting results
#'
#' @description A simple function to create boolean matrices to be used when constructing LaTeX tables.
#' @param data It can be a data frame, a matrix or a vector.
#' @param find A string indicating what has to be detected. Possible values are:
#' \itemize{
#' \item{\code{'eq'}}{ All values equal to the value passed in \code{th}}
#' \item{\code{'le'}}{ All values lower or equal to the value passed in \code{th}}
#' \item{\code{'ge'}}{ All values greater or equal to the value passed in \code{th}}
#' \item{\code{'lw'}}{ All values lower than the value passed in \code{th}}
#' \item{\code{'gt'}}{ All values greater than the value passed in \code{th}}
#' \item{\code{'min'}}{ Minimum value in each row / column / matrix}
#' \item{\code{'max'}}{ Maximum value in each row / column / matrix}
#' }
#' @param th Thershold used when \code{find} is set to \code{'eq'}, \code{'ge'}, \code{'le'}, \code{'gt'} or \code{'lw'}.
#' @param by A string or string vector indicating where the min/max values have to be find. It can be \code{'row'}, \code{'col'} or \code{'mat'} for the row, column and matrix min/max respectively.
#' @return A boolean matrix that matches in dimension the output data and where the identified elements are marked as TRUE.
#' @examples
#' data('data_gh_2008')
#' booleanMatrix(data.gh.2008, find='min', by='row')
#' booleanMatrix(data.gh.2008, find='ge', th=0.5)
#'
booleanMatrix <- function (data, find='max', th=0, by='row') {
# Check whether all the values are numeric or not
if (is.data.frame(data)) {
numeric.data <- all(apply(data, FUN="is.numeric", MARGIN=c(1,2)))
} else if (is.matrix(data) | is.vector(data)) {
numeric.data <- is.numeric(data)
} else {
stop("The 'data' argument has to be either a data frame, a matrix or a vector")
}
if (!numeric.data && find!='eq') {
stop("For non-numeric matrices the only posible comparison is find='eq'")
}
if (by=='col') {
margin <- 2
} else if (by == 'row') {
margin <- 1
} else if (by != 'mat') {
stop("The 'by' argument can only take values 'col', 'row' and 'mat'")
}
matrix <- switch(find,
'eq'={
data == th
},
'ge'={
data >= th
},
'le'={
data <= th
},
'gt'={
data > th
},
'lw'={
data < th
},
'min'={
if (is.vector(data)) {
res <- data == min(data)
} else {
if(by == 'mat') {
res <- data == min(data)
} else {
res <- apply(data, MARGIN=margin,
FUN=function(x) {
return (x==min(x))
})
if (margin == 1) {
res <- t(res)
}
}
}
res
},
'max'={
if (is.vector(data)) {
res <- data == max(data)
} else {
if(length(margin) > 1) {
res <- data == max(data)
} else {
res <- apply(data, MARGIN=margin,
FUN=function(x) {
return (x==max(x))
})
if (margin == 1) {
res <- t(res)
}
}
}
res
})
return(matrix)
}<file_sep>/vignettes/Bayesian_analysis_of_algorithm_rankings.Rmd
---
title: "Bayesian Analysis of the Algorithm Rankings"
author: "<NAME>"
date: "`r Sys.Date()`"
output: rmarkdown::html_vignette
bibliography: refs.bib
vignette: >
%\VignetteIndexEntry{Bayesian Analysis of Rankings}
%\VignetteEngine{knitr::rmarkdown}
\usepackage[utf8]{inputenc}
---
One of the latest additions to the package is a Bayesian model to analyse rankings of algorithms (@calvo2018; @calvo2019). In this vignette we will give a brief introduction to this model and some basic ideas about what kind of inference can we do using it.
## Plackett-Luce model for rankings
The Bayesian model implemented in the package is based on the Plackett-Luce (PL) model for rankings (@Plackett1975TheAO). In the Plackett-Luce model we have a parameter, the weight, for each element in the ranking. We will denote as $w_i$ the weight of the $i$-th element. This model assumes that the probability of an item being before another item in a ranking is independent of the rest of the items.
For a given ranking (or permutation) $\sigma=(\sigma_1,\ldots,\sigma_n)$ and a given set of weights $\mathbf{w}=(w_1, \ldots,w_n)$, the probability of $\sigma$ is:
$$
P(\sigma|\mathbf{w})=\prod_{i=1}^n \frac{w_{\sigma_i}}{\sum_{j=i}^n w_{\sigma_j}}
$$
As can be inferred from the equation above, any common factor in the weights does not affect to the probabilities. For that reason we can assume, without loss of generality, that the sum of all the weights is equal to 1. This provides us with a direct interpretation of the weights: $w_i=P(\sigma_1=i)$, that is, the probability that the $i$-th element is the top ranked one.
### Bayesian Plackett-Luce model
Although there are other Bayesian models based on the PL model, in this package we have implemented a Bayesian model where the prior distribution of parameters is modelled as a Dirichlet distribution. Thus, the hyperparemeters of the model will be the $(\alpha_1,\ldots,\alpha_n)$ parameters of the prior Diriechlet distributions.
The model cannot be analytically solved, but samples of the posterior distribution of the weights can be obtained using MCMC methods.
## Using the Bayesian PL model
This model is based on rankings and, thus, our input data have to be rankings. The standard PL model does not consider the possibility of ties and, thus, in the current version of the package no ties are allowed. To overcome this limitation we can solve ties at random.
Usually our starting point will be a matrix containing the result of a number of algorithms in a number of instances of a problem. Note that, in order to transform this data into rankings, the results have to be paired. If they are not (e.g., we we have independent repetitions) the data should be processed in order to have one measure per instance.
As a running example we will use the `data.blum.2015` dataset. In this dataset we have the performance of 8 algorithms in some randomly generated instances. These instances have been generated in groups of 30 instances generated using each of 30 random generators. Thus, globally we have 900 different random instances and, thus, we can take this data as paired. Moreover, we can analyse subsets of these instances attending, for example, to their size. More details about the dataset can be found in @blum2015.
First, we have to load the data:
```{r}
library(scmamp)
data("data_blum_2015")
head(data.blum.2015)
```
The function that implements the model is `bPlackettLuceModel`. We just need to provide the data, the criterion for ranking the data (either minimising or maximising) and, optionally, the details of the MCMC simulation (number of chains simulated and number of samples from each chain). In our example the goal is maximising the objective function and, thus, the algorithm with the highest value should have rank 1. To get so we have to set `min` to `FALSE`. In this example we will analyse the results when the instance size is 100.
```{r, message=FALSE}
data.100 <- subset(data.blum.2015, subset=data.blum.2015$Size==100)
results.100 <- bPlackettLuceModel(x.matrix=data.100[,-c(1,2)], min=FALSE,
nsim=2000, nchains=10,parallel=TRUE)
```
The method will return a list with different elements. The single most important is `posterior.weights`, as this table contains the weights sampled from the posterior distribution and we can get any usefull distribution or estimation from this sample. The result includes two particular estimations, the expected probability of an algorithm being the best and the expected mode ranking (i.e., the expected rank of each algorithm in the ranking with highest probability).
```{r}
results.100$expected.win.prob
results.100$expected.mode.rank
```
What we can see in the code above is that, with an expected probability of 0.94 FrogCOL is the best algorithm for problems of size 100. Moreover, in the most probable ranking (the ranking mode), we expect to find FrogCOL in first position, FrogMIS in second position and FruitFly in thrid position.
These are estimations, but the true advantage of using Bayesian methods is that they provide a natural way to assess the uncertainty about these estimations. For example, we can see the posterior distribution of the probability of FrogCOL being the overall best algorithm just plotting its sampled weight.
```{r, , fig.width=10, fig.height=6, out.width="100%"}
hist(results.100$posterior.weights[,"FrogCOL"],
main="", xlab="Prob. FrogCOL being the best")
```
From this analyisis we can say that we are almost sure that the probability of FrogCOL being the best algorithm in problems of size 100 is above 0.9.
There is more information we can extract from the analysis. To illustrate it, let us move to a different scenario: problems of size 1000.
```{r,message=FALSE}
data.1000 <- subset(data.blum.2015, subset=data.blum.2015$Size==1000)
results.1000 <- bPlackettLuceModel(x.matrix=data.1000[,-c(1,2)],
min=FALSE, nsim=2000, nchains=10,parallel=TRUE)
```
For this new comparison we will visualize the distribution of the probability of being the best for all the algorithms. We can do this with a simple boxplot:
```{r, fig.width=10, fig.height=6, out.width="100%"}
boxplot(results.1000$posterior.weights)
```
Again, as in the previous analysis, the three best algorithms are FrogCOL, FrogMIS and FruitFly. Now, in order to simplify the analysis we can focus our attention on these three algorithms. Note that, due to the properties of the Plackett-Luce model, we can just take the weights of these algorithms in order to compute probabilities and that, given that any common factor does not affect those computations, we can normalize the weights so they sum 1. We need to do this to show the following plot:
```{r, fig.width=8, fig.height=8, out.width="100%"}
weights <- results.1000$posterior.weights[,c(1, 7, 8)]
weights <- weights / rowSums(weights)
plotBarycentric(weights)
```
We can see that there is not very much uncertainty about FrogCOL being the best algorithm. We can put this in numbers analysing the probability of FrogCOL being better than FrogMIS and FrogCOL being better than FruitFly. This can be estimated directly using the odds-ratio of the weights:
```{r, fig.width=8, fig.height=8, out.width="100%"}
fc.better.fm <- weights[, 2] / (weights[, 2] + weights[, 3])
fc.better.ff <- weights[, 2] / (weights[, 2] + weights[, 1])
exp.fc.vs.fm <- mean(fc.better.fm)
exp.fc.vs.ff <- mean(fc.better.ff)
hist(fc.better.fm, main=paste("Expected probability =",
round(exp.fc.vs.fm,3)),
xlab="Probability of FrogCOL better than FrogMIS")
hist(fc.better.ff, main=paste("Expected probability =",
round(exp.fc.vs.ff,3)),
xlab="Probability of FrogCOL better than FruitFly")
```
This probability could be directly estimated from the data:
```{r}
mean(data.1000[,"FrogCOL"]>data.1000[,"FruitFly"])
```
As we can see, both estimations disagree quite a lot. The explanation for this big difference has to do with the rest of the algorithms. It is important to remember that the model is fitted to account for all the preferences, and thus all the algorithms have a potential effect on the comparison of two algorithms. In the estimation above we are only considering the cases in which FrogCOL is better than FruitFly, but if we see what happens with, say, Ikeda we can see that FrogCOL is always better than Ikeda, but FruitFly only outperforms Ikeda in 90% of the experiments.
```{r}
mean(data.1000[,"FrogCOL"]>data.1000[,"Ikeda"])
mean(data.1000[,"FrogMIS"]>data.1000[,"Ikeda"])
mean(data.1000[,"FruitFly"]>data.1000[,"Ikeda"])
```
This information is incorporated in the model, resulting in a worse valoration of FruitFly with respect to FrogCOL. As a result, if we limit our experiment to a subset of the algorithms the resulting model is not (necessarily) exactly the same and, thus, the estimations may change.
```{r,message=FALSE}
data.1000.sub <- subset(data.blum.2015[, c(3, 9,10)], subset=data.blum.2015$Size==1000)
results.1000.sub <- bPlackettLuceModel(x.matrix=data.1000.sub,
min=FALSE, nsim=2000, nchains=10,parallel=TRUE)
```
Now we analyse the results.
```{r, fig.width=8, fig.height=8, out.width="100%"}
weights.sub <- results.1000.sub$posterior.weights
plotBarycentric(weights.sub)
fc.better.fm <- weights.sub[, 2] / (weights.sub[, 2] + weights.sub[, 3])
fc.better.ff <- weights.sub[, 2] / (weights.sub[, 2] + weights.sub[, 1])
exp.fc.vs.fm <- mean(fc.better.fm)
exp.fc.vs.ff <- mean(fc.better.ff)
hist(fc.better.fm, main=paste("Expected probability =",
round(exp.fc.vs.fm,3)),
xlab="Probability of FrogCOL better than FrogMIS")
hist(fc.better.ff, main=paste("Expected probability =",
round(exp.fc.vs.ff,3)),
xlab="Probability of FrogCOL better than FruitFly")
```
One may think that it makes no sense having (so) different estimations depending on the set of algorithms analysed, but we have to remember that these analyses focus on the ranking of the algorithms, not on the magnitude of the differences. In that regard, additional algorithm can correct for this situation when some of the algorithms have very bad results in certain cases (as happens with FruitFly).
To sum up, regarding the experimental design, it is important to have in mind that this analysis involves a model and, thus, a simplification of the reality. Also, that using a model for the ranking of a number of algorithms the conclusions are linked to that set of algorithms (for comparing two algorithms we can use other type of methods).
## References<file_sep>/vignettes/Bayesian_analysis_of_the_differences.R
## ---- prompt=TRUE-------------------------------------------------------------
library("scmamp")
library("ggplot2")
data(data_kcv_example)
## ----prompt=TRUE , fig.width=7, fig.height=5, warning=FALSE-------------------
algorithms <- names(data.kcv.example)[4:7]
db <- 5
plotDensities (data=data.kcv.example[data.kcv.example$DB==db, algorithms], size=1.1)
## ---- prompt=TRUE-------------------------------------------------------------
db <- 5
sample.a <- data.kcv.example[data.kcv.example$DB==db, "AlgA"]
sample.b <- data.kcv.example[data.kcv.example$DB==db, "AlgB"]
results <- bCorrelatedTtest(x=sample.a, y=sample.b, rho=0.1, rope=c(-0.01, 0.01))
results$posterior.probabilities
## ----prompt=TRUE , fig.width=7, fig.height=5----------------------------------
plotPosterior(results, plot.rope=TRUE)
## ---- prompt=TRUE-------------------------------------------------------------
db <- 5
sample.a <- data.kcv.example[data.kcv.example$DB==db, "AlgC"]
sample.b <- data.kcv.example[data.kcv.example$DB==db, "AlgD"]
results <- bCorrelatedTtest(x=sample.a, y=sample.b, rho=0.1, rope=c(-0.01, 0.01))
results$posterior.probabilities
## ----prompt=TRUE , fig.width=7, fig.height=5----------------------------------
plotPosterior(results, plot.rope=TRUE)
## ---- prompt=TRUE-------------------------------------------------------------
db <- 9
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.kcv.example[, 1:2], FUN=mean)
sample.a <- summarized.data[summarized.data$DB==db, "AlgC"]
sample.b <- summarized.data[summarized.data$DB==db, "AlgD"]
## ---- prompt=TRUE, message=FALSE----------------------------------------------
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
## ---- prompt=TRUE, message=FALSE----------------------------------------------
head(results$posterior)
## ---- prompt=TRUE, message=FALSE----------------------------------------------
colMeans(results$posterior)
## ----prompt=TRUE , fig.width=7, fig.height=7----------------------------------
plotSimplex(results, A="Algorithm C", B="Algorithm D")
## ----prompt=TRUE , fig.width=7, fig.height=7----------------------------------
db <- 8
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.kcv.example[, 1:2], FUN=mean)
sample.a <- summarized.data[summarized.data$DB==db, "AlgC"]
sample.b <- summarized.data[summarized.data$DB==db, "AlgD"]
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
colMeans(results$posterior)
plotSimplex(results, plot.density=FALSE, A="Algorithm C", B="Algorithm D",
posterior.label=TRUE)
## ---- prompt=TRUE-------------------------------------------------------------
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.frame(DB=data.kcv.example[, 1]), FUN=mean)
sample.a <- summarized.data[, "AlgC"]
sample.b <- summarized.data[, "AlgD"]
## ---- prompt=TRUE, message=FALSE----------------------------------------------
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
## ----prompt=TRUE , fig.width=7, fig.height=7----------------------------------
plotSimplex(results, A="Algorithm C", B="Algorithm D", plot.density=FALSE, alpha=0.5)
## ---- prompt=TRUE-------------------------------------------------------------
summarized.data <- aggregate(data.kcv.example[, algorithms],
by=data.frame(DB=data.kcv.example[, 1]), FUN=mean)
sample.a <- summarized.data[, "AlgA"]
sample.b <- summarized.data[, "AlgB"]
results <- bSignedRankTest(x=sample.a, y=sample.b,rope=c(-0.01, 0.01))
results$posterior.probabilities
## ----prompt=TRUE , fig.width=7, fig.height=7----------------------------------
plotSimplex(results, A="Algorithm A", B="Algorithm B", plot.density=FALSE, alpha=0.5)
## ---- prompt=TRUE, message=FALSE----------------------------------------------
colMeans(results$posterior)
## ---- prompt=TRUE, message=FALSE----------------------------------------------
sample.a <- matrix(data.kcv.example$AlgC, byrow=TRUE, nrow=10)
sample.b <- matrix(data.kcv.example$AlgD, byrow=TRUE, nrow=10)
## ---- prompt=TRUE, message=FALSE, warning=FALSE-------------------------------
results <- bHierarchicalTest(sample.a, sample.b, rho=0.1, rope=c(-0.01, 0.01), nsim=2000, nchains=5)
## ----prompt=TRUE , fig.width=7, fig.height=7----------------------------------
plotSimplex(results, A="Alg. C", B="Alg. D", posterior.label=TRUE, alpha=0.5)
## ---- prompt=TRUE, message=FALSE----------------------------------------------
results$additional$per.dataset
<file_sep>/R/latex.R
# NON-EXPORTED, AUXILIAR FUNCTIONS --------------------------------------------
printCell <- function (value, bold, italic, format, digits, mark, mark.char, na.as, ...){
# Auxiliar function to write a single cell of the table
# Args:
# value: Value to be printed
# bold: Logic value indicating whether the value is in bold font
# italic: Logic value indicating whether the value is in italic font
# format: String indicating the format of the value, in case it is numeric
# digits: Number of digits for numeric values
# mark: Logic value indicating whether the value has to be marked
# mark.char: Mark to be used (any LaTeX Math valid symbol)
# na.as: How NA have to be printed
#
# Returns:
# The text correspondig to the cell
#
value <- unlist(value)
# Get the string to print. Convert it to either character or numeric
if (is.factor(value)) {
value <- as.character(value)
}
suppressWarnings(expr={
value.num <- as.numeric(value)
})
if (!is.na(value.num)) {
strv <- formatC(value.num, digits=digits, format=format)
}else{
if (is.na(value)) {
strv <- na.as
} else {
strv <- value
}
}
# Check mark
if (mark){
strv <- paste(strv, "$^", mark.char,"$", sep="")
}
# Check bold and/or italic
if (bold | italic){
left <- "{"
if (bold) {
left <- paste(left, "\\bf ", sep="")
}
if (italic) {
left <- paste(left, "\\it ", sep="")
}
strv <- paste(left, strv, "}", sep="")
}
return(strv)
}
## Function to process a line
processTableRow <- function (row, bold, italic, format, digits,
mark, mark.char, na.as) {
# Auxiliar function to write a table row
# Args:
# row: Row to be processed
# bold: Logic value indicating whether the value is in bold font
# italic: Logic value indicating whether the value is in italic font
# format: String indicating the format of the value, in case it is numeric
# digits: Number of digits for numeric values
# mark: Logic value indicating whether the value has to be marked
# mark.char: Mark to be used (any LaTeX Math valid symbol)
# na.as: How NA have to be printed
#
# Returns:
# The text correspondig to the cell
#
processed <- sapply(1:length(row),
FUN = function(i) {
printCell(value=row[i], bold=bold[i], italic=italic[i],
format=format, digits=digits[i], mark=mark[i],
mark.char=mark.char, na.as=na.as)
})
line <- paste(paste(processed, collapse=" & "), "\\\\")
return(line)
}
# EXPORTED FUNCTIONS -----------------------------------------------------------
#' @title Write a table in LaTeX format
#'
#' @description This is a simple function to create tabular environment in LaTeX
#' @param table A data frame with the information to write
#' @param file Path of a file. If provided, the tabular is wirten in the given file. Otherwise, it is writen to the standard output
#' @param format Format for the numeric values. The accepted formats are those in the function \code{\link{formatC}}. The typical values are \code{'g'} to automatically set the format, \code{'f'} for a fixed sized floating point format and \code{'e'} or \code{'E'} for scientific notation
#' @param bold A matrix that matches \code{'table'} in size indicating with \code{TRUE} those cells that have to be printed in bold font
#' @param italic A matrix that matches \code{'table'} in size indicating with \code{TRUE} those cells that have to be printed in italic
#' @param mark A matrix that matches \code{'table'} in size indicating with \code{TRUE} those cells that have to be marked with a superscipt symbol
#' @param mark.char Character to be used to mark cells. Note that the superscript is included in a math environment, so this has to be either a character or a valid math command in LaTeX
#' @param na.as Character to be used to write NA values in the table
#' @param align Character indicating the alignment of the colums (\code{'l'},\code{'r'} or \code{'c'})
#' @param hrule A vector of positions for the horizontal lines in the tabular. All the lines are drawn after the indicated line. When the column names are included, 0 means drawing a line after the column names. The maximum value is the number of rows - 1 (for a line after the last line see parametr \code{bty})
#' @param vrule Similar to \code{'hrule'} but for vertical lines. . The maximum value is the number of columns - 1 (for a line after the last columns see parametr \code{bty})
#' @param bty Vector indicating which borders should be printed. The vector can contain any of subset of \code{c('l','r','t','b')}, which represent, respectively, left, right, top and bottom border. If the parameter is set to \code{NULL} no border is printed.
#' @param print.col.names Logical value indicating whether the column names have to be printed or not
#' @param print.row.names Logical value indicating whether the row names have to be printed or not
#' @param digits A single number or a numeric vector with the number of digits in each column. Its size has to match the number of the final table, i.e., the colums in \code{'table'} if the row names are not included or the number of columns + 1 if the row names are printed in the final table
#' @param wrap.as.table Logical value indicating whether the latex object has to be wrapped into a table enviroment
#' @param table.position Character indicating the position of the table (\code{'h'}: here, \code{'t'}: top, or \code{'b'}: botton)
#' @param caption Character string containing the caption of the table. If NULL, no caption is printed
#' @param caption.position Character indicating the possition of the caption (\code{t}: top, the caption is printed over the table; \code{b}: botton, the caption is printed under the table)
#' @param centering Logical value indicating whether the table should be centered in the page
#' @param label Character string containing the label of the table for further references. If NULL, no label is used
#' @return LaTeX code to print the table
#' @seealso \code{\link{summarizeData}}, \code{\link{filterData}} and the vignette \code{vignette(topic="Data_loading_and_manipulation",
#' package="scmamp")}
#' @examples
#' data(data_blum_2015)
#' args <- list()
#' # Write the summarization of the data
#' args$table <- summarizeData(data.blum.2015, group.by=1:2)
#'
#' # Set in bold the maximum values per row
#' bold <- apply(args$table[, -(1:2)], MARGIN=1,
#' FUN=function(x) {
#' return(x==max(x))
#' })
#' args$bold <- cbind(FALSE, FALSE, t(bold))
#' # Fixed width, 2 decimals for the values, 0 for the size and 3 for the radius
#' args$format <- "f"
#' args$digits <- c(0,3,rep(2, 8))
#'
#' # Print the colum names but not the row names
#' args$print.row.names <- FALSE
#'
#' # Only top and bottom borders
#' args$bty <- c("t","b")
#'
#' # Add additional horizontal rules to separate the sizes
#' args$hrule <- c(0,10,20,30)
#'
#' # An additional vertical rule to separate size and radius from the results
#' args$vrule <- 2
#'
#' # Print the table
#' do.call(writeTabular, args)
writeTabular <- function (table, file=NULL, format="g", bold=NULL, italic=NULL,
mark=NULL, mark.char="*", na.as="n/a", align="l",
hrule=NULL, vrule=NULL, bty=c("t","b","l","r"),
print.col.names=TRUE, print.row.names=TRUE, digits=3,
wrap.as.table=FALSE, table.position="h", caption=NULL,
caption.position="b", centering=FALSE, label=NULL) {
rows <- nrow(table)
cols <- ncol(table)
print.col.names <- print.col.names & !is.null(colnames(table))
print.row.names <- print.row.names & !is.null(rownames(table))
if (length(digits) == 1) {
digits <- rep(digits, ncol(table) + print.row.names)
}
# Control of the digits
if (length(digits) - print.row.names != cols) {
stop("The number of elements in the digits vector is incorrect. The vector ",
"should have length equal to the number of columns in 'table' if ",
"'print.row.names' is false and the number of columns + 1 if ",
"'print.row.names' is true. Alternatively, it can have a single value ",
"to be used in all the numeric columns")
}
if (print.col.names) {
hmin <- 0
} else {
hmin <- 1
}
if (print.row.names) {
vmin <- 0
} else {
vmin <- 1
}
# Remove any vrule and hrule beyond the limits
if (!is.null(hrule)) {
hrule <- subset(hrule,
subset=((hrule >= hmin) & (hrule < rows)))
}
if (!is.null(vrule)){
vrule <- subset(vrule,
subset=((vrule >= vmin) & (vrule < cols)))
}
# In case being NULL, build the bold, italic and mark matrices
if (is.null(bold)) {
bold <- matrix(rep(FALSE, rows * cols), ncol=cols)
}
if (is.null(italic)) {
italic <- matrix(rep(FALSE, rows * cols), ncol=cols)
}
if (is.null(mark)) {
mark <- matrix(rep(FALSE, rows * cols), ncol=cols)
}
# Pass all to character to avoid problems with factors
nm <- rownames(table)
table <- apply(table, MARGIN=2, FUN=as.character)
## if table was a matrix with only one row, apply will transform it in a vector and it may
## cause problems letter. Therefore, it is converted back to a matrix
if(is.null(dim(table))){
table <- t(as.matrix(table))
}
rownames(table) <- nm
# Include row and/or col names as additional info in the table
if (print.row.names) {
suppressWarnings(expr={
table <- cbind(rownames(table), table)
})
bold <- cbind(rep(FALSE, rows), bold)
italic <- cbind(rep(FALSE, rows), italic)
mark <- cbind(rep(FALSE, rows), mark)
if (!is.null(vrule)) {
vrule <- vrule + 1
}
cols <- cols + 1
}
if (print.col.names) {
suppressWarnings(expr={
table <- rbind(colnames(table) , table)
})
bold <- rbind(rep(FALSE, cols), bold)
italic <- rbind(rep(FALSE, cols), italic)
mark <- rbind(rep(FALSE, cols), mark)
if (!is.null(hrule)) {
hrule <- hrule + 1
}
rows <- rows + 1
}
# Open the file or the stdout
if(is.null(file)) {
out.file <- stdout()
} else {
out.file <- file(file, "w")
}
# begin table enviroment if wrap.as.table
if(wrap.as.table){
l <- paste0("\\begin{table}[", table.position, "]")
cat(l, file=out.file, sep="\n")
# caption on top
if(caption.position=="t" & !is.null(caption)){
l <- paste0("\\caption{", caption, "}")
cat(l, file=out.file, sep="\n")
}
# centering
if(centering){
cat("\\centering", file=out.file, sep="\n")
}
}
# Begin the tabular
if ("l" %in% bty) {
algn <- "|"
} else {
algn <- ""
}
if (is.null(vrule) | length(vrule) == 0) {
algn <- paste(algn, paste(rep(align, cols), collapse=""), sep="")
} else {
aux <- c(vrule[1], diff(c(vrule, cols)))
aux2 <- sapply (aux[-1],
FUN=function(x) {
c("|", rep(align, x))
})
algn <- paste(algn, paste(c(rep(align, aux[1]), unlist(aux2)), collapse=""),
sep="")
}
if ("r" %in% bty) {
algn.end <- "|"
} else {
algn.end <- ""
}
algn <- paste(algn, algn.end, sep = "")
l <- paste("\\begin{tabular}{", algn, "}", sep="")
cat(l, file=out.file, sep="\n")
if ('t' %in% bty) {
cat("\\hline", file=out.file, sep="\n")
}
# Rows of the table
current.row <- 1
for (r in 1:rows) {
l <- processTableRow(row=table[r, ], bold=bold[r, ], italic=italic[r, ],
mark=mark[r,], mark.char=mark.char, format=format,
digits=digits, na.as=na.as)
cat(l, file=out.file, sep="\n")
if (current.row %in% hrule) {
cat("\\hline", file=out.file, sep="\n")
}
current.row <- current.row+1
}
if ('b' %in% bty) {
cat("\\hline", file=out.file, sep="\n")
}
# End of tabular
cat("\\end{tabular}", file=out.file, sep="\n")
# end table enviroment if wrap.as.table
if(wrap.as.table){
# caption on botton
if(caption.position=="b" & !is.null(caption)){
l <- paste0("\\caption{", caption, "}")
cat(l, file=out.file, sep="\n")
}
# label
if(!is.null(label)){
l <- paste0("\\label{", label, "}")
cat(l, file=out.file, sep="\n")
}
# end of table
cat("\\end{table}", file=out.file, sep="\n")
}
# Bye bye
if(!is.null(file)) {
close(out.file)
}
}
<file_sep>/vignettes/Statistical_assessment_of_the_differences.Rmd
---
title: "Statistical Assessment of the Differences"
author: "<NAME> and <NAME>"
date: "`r Sys.Date()`"
output: rmarkdown::html_vignette
bibliography: refs.bib
vignette: >
%\VignetteIndexEntry{Statistical Assessment of the Differences}
%\VignetteEngine{knitr::rmarkdown}
\usepackage[utf8]{inputenc}
---
# Statistical Assessment of the Differences
This vignette shows the use of the package `scmamp` to assess the statistical differences between the results obtained by a number of algorithms in different problems. This is a typical task in areas such as Machine Learning or Optimization, where algorithms are typically compared measuring their performance in different instances of problems, datasets, etc. However, a similar procedure may be used in other contexts.
The package and this vignette is based mainly on the papers @garcia2010 and @garcia2008, which is an extenstion of Demšar's paper (@demsar2006).
If you are familiar with these papers and want a quick guide, jump to the last section of this document (_Summary_). Then, you can review the rest of the vignette for more details.
This vignette is divided into three different parts. The first reviews the global analysis for any algorithm behaving differently, and the other two are concerned with the post-hoc tests run in case not all the algorithms have the same performance. The second part shows how all pairwise tests can be conducted and the third how comparisons with respect to a control algorithm can be done. The election of the comparison will depend on the type of experimentation and the conclusions we want to draw.
As a guiding examples, we will use the results included in @garcia2008 (Table 2) and the part of the results in @blum2015. These data is available in the package and can be loaded typing:
```{r , prompt=TRUE}
library("scmamp")
library("ggplot2")
library("Rgraphviz")
data(data_blum_2015)
data(data_gh_2008)
head(data.blum.2015)
head(data.gh.2008)
```
These data represents the accuracy obtained by some algorithms in different datasets. Any other data can be used, provided that it is a **named** `matrix` or `data.frame`. For more details about how to load experimental results, please see the vignette concerning the loading and manipulation of data.
## Parametric vs. non-parametric
One of the very first things we need to decide is whether we can safely use parametric tests to assess the differences between algorithms. This is quite a tricky question, as using parametric tests when the assumptions hold yields a more powerful test, but the opposite may be true if they do not hold.
The classical parametric tests assume that the data is distributed according to a Gaussian distribution and, in most cases, that the variance is the same for all the samples. When this is true we can use these tests to have an increased power (compared with non parametric tests). Although there are statistical tests to check both the normality and the homocedasticity ---many of them can be found in R, e.g., `shapiro.test` and `bartlett.test`---, they are not very powerful and this, together with the typically small samples, render them as non-effective tools.
For this reason, in this package we have included a couple of functions to visually valorate the assumptions of normality and homocedasticity. Note that, in some cases, there is no need to test this, as the data may be evidently non unimodal. An example of such situation is the `data.blum.2015` data, where we have different types of problems, each with values in a different scale---You can check this by using the following functions to visualize the data.
The first plot we can crate is a density plot, using the function `plotDensities`. This function uses a kernel density estimation (KDE) of the distribution of the samples to visualize it.
```{r,prompt=TRUE , fig.width=7, fig.height=5, warning=FALSE}
plotDensities (data=data.gh.2008, size=1.1)
```
The first and only mandatory argument is the matrix that includes the results for each algorithm. The plots are created using [`ggplot2`](http://ggplot2.org/), which is a powerfull tool to create plots in R. Morover, the result of this function is an object that can be further modified, as we will see in other plots. The function also accepts additional parameters that are directly passed to `ggplot2`'s function `geom_line`, which is the one that actually creates the lines; the `size = 1.1` argument is an example of this.
In this plot we can see that most of the samples can be hardly regarded as normal, mainly due to their lack of symmetry and unimodality. Not only the assumption of normality does not hold, neither the assumption of equal variances seems to be true.
An additional kind of plot we can use to visually check the goodness of fit of the samples is the classical quantile-quantile plot, which represents the empirical and theoretical quantiles---assuming a Gaussian distribution. When all the points lay in the diagonal of the plot, both theoretical and empirical quantiles are equal and, thus, we can assume that the data can be approached with a Gaussian distribution. We can create these plots for each column using the `qqplotGaussian` function.
```{r,prompt=TRUE , fig.width=7, fig.height=5}
qqplot <- qqplotGaussian (data.gh.2008[,"k-NN(k=1)"], size=5 , col="orchid")
qqplot + theme_classic()
```
As can be seen in the plot, there are regions where the sample points are away of the diagonal. This is particularly evident in the left part of the plot, due to the relatively long left tail of the empirical distribution. Additionally, the example shows one of the possible ways in which the result of the function can be modify to change its appearence. For the interested reader, there is an excelent book covering the use and the phylosophy behind `ggplot2` (@wickham2009).
As a conclusion, in @demsar2006 the author arguments against the use of parametric tests in the context of Machine Learning experiment analysis (see @demsar2006, page 10); similar arguments can be applied to the evaluation of optimization algorithms.
## Testing for differences
Once the question parametric/non parametric is clear, the next step should be the use of a statistical test to check whether there are differences among the algorithms or not. In other words, determine if there is one or more algorithms whose performance can be regarded as significantly different.
In case the required assumptions are reasonably true, _F-test for K population means (ANOVA)_ test can be used to assess the differences between the algorithms (the package include the function `anovaTest` to do this). However, as we have seen, in our running example it is clear that we cannot assume normality in the data. Therefore, in this example we will restrict the use of the package to non parametric methods.
This package includes two non parametric methods to compare multiple algorithms, the classical Friedman test (@friedman1937) and a modification by <NAME> Davenport (@iman1980). Although R's base installation includes the former, we have reimplemented it in this as explained in @demsar2006, page 11. This tests are available through functions `friedmanTest` and `imanDavenportTest`. In addition, the Friedman's Aligned Rank Test and Quade Test presented in @garcia2010 have been implemented---Note that this paper includes some errors in the computations due to some bugs in their code; for this reason, the results obtained with this package may not match those in the paper.
```{r,prompt=TRUE}
friedmanTest(data.gh.2008)
imanDavenportTest(data.gh.2008)
friedmanAlignedRanksTest(data.gh.2008)
quadeTest(data.gh.2008)
```
The obtained p-values indicate that we can safely reject the null hypothesis that all the algorithms perform the same. Therefore, we can proceed with the post-hoc test.
We have two options, comparing all the algorithms among them or comparing all with a control. The latter is the typical situation where we are comparing a new proposal with the state of the art algorithms while the former fits better in a review of existing methods.
## Pairwise differences
Once we have verified that not all the performances of the algorithms are the same, the next step is analyzing which are different. For that, we have different possibilities.
### Nemenyi _post hoc_ test
In @demsar2006 the author proposes the use of the Nemenyi test that compares all the algorithms pairwise. It is the non parametric equivalent to the Tukey _post hoc_ test for ANOVA (which is also available through the `tukeyPost` function), and is based on the absolute difference of the average rankings of the classifiers. For a significance level $\alpha$ the test determines the critical difference (CD); if the difference between the average ranking of two algorithms is grater than CD, then the null hypothesis that the algorithms have the same performance is rejected. The function `nemenyiTest` computes the critical difference and all the pairwise differences.
```{r,prompt=TRUE}
test <- nemenyiTest (data.gh.2008, alpha=0.05)
test
test$diff.matrix
abs(test$diff.matrix) > test$statistic
```
As the code above shows, with a significance of $\alpha = 0.05$ any two algorithms with a difference in the mean rank above `r round(test$statistic,3)` will be regarded as non equal. The test also returns a matrix with all the pair differences, so it can be used to see for which pairs the null hypothesis is rejected. As an example, the performance of C4.5 and 1NN are different, but we cannot state that C4.5 and Naive Bayes have a different behaviour.
In @demsar2006 the author proposes a plot to visually check the differences, the _critical differece plot_. This kind of plot can be created using the `plotCD` function, which has two parameters, the data. matrix and the significance level. In the plot, those algorithms that are not joined by a line can be regarded as different.
```{r,prompt=TRUE,fig.width=7 , fig.height=3}
plotCD (data.gh.2008, alpha=0.05, cex=1.25)
plotCD (data.gh.2008, alpha=0.01, cex=1.25)
```
Note that the text in the plot is defined in absolute size, while the rest is relative to the size of the plot. The default size (0.75) is tuned for a plot width of, roughly, 7 inches. In case the dimensions of the plot need to be bigger, the default size can be changed with the `cex` option, as in the example above (the dimension of these plots is 12x4 inches).
This procedure is, among those implemented in the package, the one most conservative---i.e., the one with the less statistical power. Howerver, it provides an intiutive way to visualize the results.
### Corrected pairwise tests
The second approach consists in using a classical test to assess all the pairwise differences between algorithms and then correct the p-values for multiple testing. In a parametric context the typicall election would be a paired t-test but, given that we cannot assume normality, we should use a non parametric test, such as Wilcoxon signed-rank test or the corresponding _post hoc_ tests for Friedman, Friedman's Aligned Ranks and Quade tests (see @garcia2008, Section 2.1 and @garcia2010, Section 5).
The package includes the implementations of the _post hoc_ tests mentioned in @garcia2010 through functions `friedmanPost`, `friedmanAlignedRanksPost` and `quadePost`.
```{r,prompt=TRUE}
friedmanPost(data=data.gh.2008, control=NULL)
quadePost(data=data.gh.2008, control=NULL)
pv.matrix <- friedmanAlignedRanksPost(data=data.gh.2008, control=NULL)
```
For the sake of flexibility, there is a special wrapper function, `customPost`, that allows applying any test. This function has a special argument, `test`, that has to be a function with, at least, two arguments, `x` and `y`, that performs the desired test. For more information, type `?customPost`.
The chosen test is applied to the $\frac{k(k-1)}{2}$ pairwise comparisons, where $k$ is the number of algorithms. Due to the multiple application of the test, some p-value correction method has to be used in order to control the _familywise error rate_.
There are many general methods to correct this p-values, such as the well known Bonferroni procedure or Holm's step-down method (@holm1979). However, these methods do not take into account the particular situation of pair-wise comparisons, where not any combination of null hypothesis can be true at the same time. As an example, suppose that we know that algorithms A and B are equal and, simultneously, A and C are also equal. Then, we cannot reject the hypothesis that A and C are equal.
This problem was tackled by <NAME> (@shaffer1986). There are two procedures to correct the p-values, accoding to this paper. In the first one (sometimes called Shaffer static) the particular ordering of the null hypothesis is not taken into account and only the maximum number of simultaneous hypothesis is considered. The second one further limits the number of possible hypothesis by considering which particular hypothesis have been rejected. This increases the power of the method, but it is computationally very expensive. Instead of this procedure, in @garcia2008, the authors propose to use Bergmann and Hommel's method (@bergmann1988).
These procedures can be applied to a matrix of raw p-values using functions `adjustShaffer` and `adjustBergmannHommel`.
```{r,prompt=TRUE , warning=FALSE}
pv.matrix
adjustShaffer(pv.matrix)
pv.adj <- adjustBergmannHommel(pv.matrix)
pv.adj
```
The package also includes other correction methods, as we will see in the comparisons with a control algorithm. However, as these do not take into account the particular interactions between hypothesis, they are more restrictive approaches.
Bergmann and Hommel's correction is extremely expensive method---in computational terms. However, the structures required to perform the correction are stored in the disk and, thus, it is computationally feasible up to 9 algorithms.
### Graphical representations
Conversely to what happen with Nemenyi test, it makes no sense to draw a critical difference plot, since the critical differences are not constant throughout the comparisons. In absence of this intuitive plot, the package includes two types of plots to graphically display the results.
The first function is `drawAlgorithmGraph`, which plots a graph where the algorithms are the nodes and two nodes are linked in the null hypothesis of being equal cannot be rejected. This function makes use of the `Rgraphviz` package, so it has to be installed in order to use this function. The package is currently in [Bioconductor](www.bioconductor.org), so it can be installed as follows.
```{r,prompt=TRUE,eval=FALSE}
install.packages("BiocManager")
BiocManager::install("Rgraphviz")
```
The plot can incorporate information about each algorithm. In this case we will print the average ranking, in a similar way as in the critical difference plot.
```{r,prompt=TRUE,fig.width=7 , fig.height=5}
r.means <- colMeans(rankMatrix(data.gh.2008))
drawAlgorithmGraph(pvalue.matrix=pv.adj, mean.value=r.means, alpha=0.05,
font.size=10, node.width=3, node.height=1)
```
In the code above we can see that there is a parameter called `font.size`, that can be used to change the font size to adapt it to the size of the plot (in a similar way as it happens in the critical difference plot). In addition to this, there is a number of parameters that can allow the user to customize the plot. The options are:
* `...` Special argument used to pass additional parameters. Its main use is changing the layout (see example bellow)
* `highlight` It can be either `'min'`, `'max'` or `'none'`, to highlight the node with the minimum value, the maximum value or none, respectively.
* `highlight.color` A valid R color to fill the highlighted node
* `node.color` A valid R color to fill the rest of the nodes
* `font.color` A valid R color for the font
* `digits` Number of digits to round the value included in the node
* `node.width` Width of the node. By default it is set at 5
* `node.height` Height of the node. By default it is set at 2
The `Rgraphviz` package has a number of layouts that can be used to plot graphs (called `'dot'`, the one used by default, `'twopi'`, `'neato'`, `'circo'` and `'fdp'`). These layouts can be used including them right after the two first parameters.
```{r,prompt=TRUE,fig.width=7 , fig.height=5}
r.means <- colMeans (rankMatrix(data.gh.2008))
drawAlgorithmGraph (pvalue.matrix=pv.adj, mean.value=r.means, alpha=0.05, 'fdp',
highlight.color="red", node.color="white", font.color="black",
font.size=10, node.width=2, node.height=1)
```
This graph is the one corresponding to Bergmann and Hommel dynamic procedure. From its comparision with the previous one, we can check its increased power, as with the same $\alpha$ it rejects two more hypothesis, namely, that CN2 is equal to Naive Bayes and C4.5.
The second plot can be used to directly visualize the p-value matrix generated when doing all the pairwise comparisons. The function that creates such a plot is `plotPvalues`.
```{r,prompt=TRUE, fig.width=7 , fig.height=7, warning=FALSE}
plt <- plotPvalues(pvalue.matrix=pv.adj,
alg.order=order(r.means, decreasing=FALSE))
plt +
labs(title="Corrected p-values using Bergmann and Hommel procedure") +
scale_fill_gradientn("Corrected p-values" , colours = c("skyblue4" , "orange"))
```
The code above also shows how to modify some aesthetic aspects using `ggplot2`.
## Comaprison with a control
In some experimentations we will be interested in comparing a set of algorithms with a control one---our proposal, typically. All the tests presented in the previous section can be also used in this case, fixing the `control` parameter to one of the algorithms in the data. When this parameter is not fixed ---or set as `NULL`---, all the pairwise comparisons are performed, but when it takes a (valid) value, all the algorithms are compared with a reference.
```{r,prompt=TRUE}
friedmanAlignedRanksPost(data.gh.2008, control = "NaiveBayes")
pv <- quadePost(data.gh.2008, control = 2)
```
As can be seen in the code above, the reference can be set either using the column name or its index. The values computed in this way can be corrected to cope with the problem of multiple testing. However, in this case, using Shaffer and Brgmann and Hommel procedures makes no sense, as we do not have all the comparisons. Instead, we can use any of the methods listed in @garcia2010. Some of these are implemented in the package and other are available through R's `p.adjust` function. In particular, the methods implemented are:
```{r,prompt=TRUE}
adjustHolland(pvalues=pv)
adjustFinner(pvalues=pv)
adjustRom(pvalues=pv, alpha=0.05)
adjustLi(pvalues=pv)
```
## Comparisons by groups of problems
In some empirical evaluations we may be interested in analyzing the results obtained in different groups of instances. For example, in the case of the data from @blum2015 we may be interested in evaluating the algorithms in each problem size (100, 1000 and 5000). Computing the p-value in such an scenario is as simple as with a single group, but the correction of the p-values is by no means trivial, as all the comparisons should be considered. This is particularly complex in Shaffer and Bergmann and Hommel corrections, as the information about multiple pairwise comparisons has to be introduced.
## Global functions
With the aim at further simplifying the use of the package we have defined two wrapper functions, `multipleComparisonsTest`, to perform the multiple comparison tests and `postHocTest` to run the individual comparisons. Both methods can be used either grouping the problems or using the whole dataset. Note that, in case all the problems are grouped the number of tests performed increases and, thus, the global number of tests should be considered when the p-values are adjusted.
In the next section there are some example of the use of these functions, so here we will briefly descibe the main common arguments. Further information about the functions can be obtained from their help pages.
- `data` A matrix or data.frame containing the algorithm results. The matrix can contain additional information (such as that used for grouping problems).
- `algorithms` A vector (of, at least, size 2) with either the name or the indices of the columns in `data` that contain the observations that have to be compared. If this parameter is not passed (or if it is `NULL`), then all the columns except those used for grouping problems are regarded as algorithm columns.
- `group.by` A vector with either the name or the indices of the columns in `data` that have to be used to group the problems. For each combination of values in these columns a test (or set of tests) is performed.
- `test` Either a function or a string indicating the test to be used. The options and the type of function required depend on the function considered. For a complete list of options and the definition of the function please check the help page of each function.
- `correct` Either a function or a string indicating the type of correction that has to be performed. The complete list of options and the type of functions required can be consulted in the help pages. Note that, for pairwise comparisons, Shaffer's and Bergmann and Hommel's corrections can be used, but only if the problems are not grouped. In case they are grouped, there are additional test repetitions that have to be accounted for and, thus, these methods have to be adapted. So far the package does not include this option and, thus, any of the other, general methods have to be used.
- `alpha` Alpha value used only for Rom's correction.
- `...` Additional parameters to be passed to either the `test` or the `correct` functions. In the case of `postHocTest`, these arguments are also passed to the `rankMatirx` function that computes the ranks of the data. Therefore, this can be used to change the default behaviour that ranks as 1 the highest value. To rank first the lowest value you can add the `decreasing=FALSE` option to the call.
In the case of the `postHocTest` function, there are four additional parameters:
- `control` This argument can be the index or the name of the column that has to be used as reference, `min` or `max`. In the last two options, for each comparison, the algorithm with the minimum/maximum summarized value is used as reference. If `NULL` (or not provided), all the pairwise comparisons are performed.
- `use.rank` If `TRUE`, the rank of the algorithms is returned and used, in case `control` is set at `min` or `max`, to determine the control algorithm. If `FALSE`, the value in `data` is used instead.
- `sum.fun` Function to be used to summarize the data. By default, the average value is used. This function also recieves the additional arguments passed to the function, so additional parameters can be passed to this function.
Regarding the output of the functions, it depends on whether the problems are grouped or not. In the case of `multipleComparisonsTest` function, if the data is not grouped the result is an `htest` object, as any of the functions that performs this type of test. If the data is grouped, then the output is a matrix with the p-values (raw and adjusted) obtained for each group. In the case of `postHocTest`, in both cases the function outputs the summarized data (grouped or not), the raw p-values and the corrected p-values. In case the data is grouped and all the pairwise comparisons are performed, then the p-values are in a three dimensional array, being the last dimension the group to which the p-values correspond.
## Summary
This section shows a couple of examples of typical comparisons done in the context of algorithm comparisons. In the first one all the data is included in a single comparison while in the second the data will be grouped according the the problem features.
The typical sequence of analysis includes, first, testing the presence of any algorithm that behaves differently, using a test that compares simultaneously all the algorithms. Then, provided that the null hypothesis is rejected, a _post hoc_ can be conducted. In case we can designate a control method, then the rest are tested against the control; in any other case, all the pairwise comparisons are performed.
For the first example we will use the dataset from @garcia2008.
```{r,full_process_1, prompt=TRUE}
alpha <- 0.05
data <- data.gh.2008
friedmanTest(data)
```
Alternatively, we can use any of the other methods implemented (e.g., `imanDavenportTest` or `quadeTest`), or the wrapper function `multipleComparisonTest`:
```{r,full_process_2, prompt=TRUE}
multipleComparisonTest(data=data, test="iman")
```
Provided that the p-value obtained is below $\alpha$, if we have no control method then we can proceed with all the pairwise comparisons using the `postHocTest` wrapper function ---alternatively you can use directly the functions that implement all the tests and corrections. In this case we can select any test for the comparisons. For the p-value correction, any method can be used, but in this particular case it is advisable using Bergman Hommel's procedure if the number of algorithms to compare is 9 or less and Shaffer's method in case they are 10 or more. The reason is that these methods include the particularities of the pairwise comparisons in order to perform a less conservative correction, leading to statistically more powerfull methods.
```{r,full_process_3, prompt=TRUE , fig.width=7 , fig.height=5}
post.results <- postHocTest(data=data, test="aligned ranks", correct="bergmann",
use.rank=TRUE)
post.results
alg.order <- order(post.results$summary)
plt <- plotPvalues(post.results$corrected.pval, alg.order=alg.order)
plt + labs(title=paste("Corrected p-values using Bergmann and Hommel procedure",sep=""))
drawAlgorithmGraph(post.results$corrected.pval, mean.value=post.results$summary,
alpha=alpha, font.size=10)
```
For the second example we will use the dataset `data.blum.2015`, which contains two columns, `Size` and `Radius`, that allow us grouping the problems. First, we will search for differences in each comination of size and radius. Then, given that in all the cases the null hypothesis can be safely rejected, we will proceed with the comparison of all the methods with a control, the `FrogCOL` algorithm.
```{r,full_process_4, prompt=TRUE}
data <- data.blum.2015
group.by <- c("Size","Radius")
multipleComparisonTest(data=data, group.by=group.by,
test="quade", correct="finner")
control <- "FrogCOL"
post.results <- postHocTest(data=data, group.by=group.by, control=control,
test="aligned ranks", correct="rom", use.rank=FALSE)
```
The results can be used to create a LaTeX table where the results without significant differences with respect to our control are highlighted in italic and the best results in bold font.
```{r,full_process_5, prompt=TRUE}
avg.val <- post.results$summary
best <- apply(avg.val, MARGIN=1,
FUN=function(x){
m <- max(x[-(1:2)])
return(c(FALSE, FALSE, x[-(1:2)]==m))
})
best <- t(best)
no.diff <- post.results$corrected.pval > alpha
# The size and radius columns set as false
no.diff[,1:2] <- FALSE
no.diff[is.na(no.diff)] <- FALSE
writeTabular(table=avg.val, format='f', bold=best, italic=no.diff,
hrule=c(0, 10, 20, 30), vrule=2, digits=c(0, 3, rep(2, 8)),
print.row.names = FALSE)
```
As an alternative analysis, we will compare, for each graph size, all the algorithms. Note that, in this case, as the data contains the `Radius` column that should not be included in the comparison, we have to specify the columns that contain the algorithms---or, alterantively, remove the column from the data.
```{r,full_process_6, prompt=TRUE, fig.width=7 , fig.height=5}
control <- NULL
group.by <- "Size"
post.results <- postHocTest(data=data, algorithms=3:10, group.by=group.by,
control=control, test="aligned ranks", correct="holland",
use.rank=TRUE)
# Plot the matrix for the first group
i <- 1
alg.order <- order(post.results$summary[i,-1])
plotPvalues(post.results$corrected.pval[, , i], alg.order=alg.order)
# Plot the matrix for the second group
i <- 2
alg.order <- order(post.results$summary[i,-1])
plotPvalues(post.results$corrected.pval[, , i], alg.order=alg.order)
# Plot the matrix for the third group
i <- 3
alg.order <- order(post.results$summary[i,-1])
plotPvalues(post.results$corrected.pval[, , i], alg.order=alg.order)
```
## References
<file_sep>/R/scmamp.R
# PACKAGE AND DATASET INFORMATION ----------------------------------------------
#' Statistical comparison of multiple algorithms
#'
#' This package has been develop to simplify the statistical assessment of algorithms when tested in different problems. It includes statistical tests, as well as some plotting functions.
#' @name scmamp
#' @author
#' <NAME> \email{<EMAIL>},
#' <NAME> \email{guzman.santafe@@unavarra.es}
#'
#' Maintainer: <NAME> \email{borja.calvo@@ehu.es}
#' @docType package
#' @aliases scmamp-package
#' @seealso For an overview of the use see #' \code{vignette(topic=
#' "Statistical_comparison_of_multiple_algorithms_in_multiple_problems",
#' package="scmamp")} and \code{vignette(topic="Data_loading_and_manipulation",
#' package="scmamp")}
NULL
#' Example in Garcia and Herrera (2008)
#'
#' Dataset corresponding to the accuracy of 5 classifiers in 30 datasets.
#' Each algorithm is in a column. This is the dataset used as example in
#' Garcia and Herrera (2008).
#'
#' @format A data frame with 5 columns and 30 rows
#' @source <NAME> and <NAME> (2008) An Extension on "Statistical
#' Comparisons of Classifiers over Multiple Data Sets" for all Pairwise
#' Comparisons. \emph{Journal of Machine Learning Research}. 9, 2677-2694.
#' @name data.gh.2008
NULL
#' Example in Garcia and Herrera (2010)
#'
#' Dataset corresponding to the accuracy of 4 classifiers in 24 datasets.
#' Each algorithm is in a column. This is the dataset used as example in
#' Garcia and Herrera (2010).
#'
#' @format A data frame with 4 columns and 24 rows
#' @source <NAME> and <NAME> (2010) Advanced Nonparametric Tests for
#' Multiple Comparison in the Design of Experiments in Computational Intelligence
#' and Data Mining: Experimental Analysis of Power. \emph{Information Sciences},
#' 180, 2044-2064.
#' @name data.gh.2010
NULL
#' Comparison of optimization algorithms in Blum \emph{et al.} (2015)
#'
#' This dataset contains part of the results obtained in the comparison of decentralyzed
#' optimization algorithms presented in Blum \emph{et al.} (2015). The dataset contains
#' 900 rows and 10 colums. Each row reprsents an instance of the maximum independent
#' set problem (a graph). The first two are descriptors of the problem in each
#' row (size and radius used to create random geometric graphs) and the other 8
#' contain the results obtained by 8 algorithms for the MIS problem instance.
#'
#'
#' @format A data frame with 10 columns and 900 rows
#' @source <NAME>, <NAME> and <NAME> (2015) FrogCOL and FrogMIS: New Decentralized Algorithms for Finding Large Independent Sets in Graphs. \emph{Swarm Intelligence}. In press.
#' @name data.blum.2015
NULL
# AUXILIAR FUNCTIONS -----------------------------------------------------------
correctPValues <- function(pvalues, correct) {
# Auxiliar function to correct pvalues in either a vector (or a one row matrix) or
# a symmetric matrix where pvalues are repeated in both the upper and the lower half
# of the matrix
# Args:
# pvalues: Vector or matrix of the pvalues to correct
# correct: Function to perform the correction
#
# Returns:
# The corrected p-values matrix
#
if (is.vector(pvalues) || nrow(pvalues)==1){
res <- correct(pvalues)
}else{
k <- nrow(pvalues)
pairs <- generatePairs(k, NULL)
pval.vector <- pvalues[pairs]
pval.vector.corrected <- correct(pval.vector)
corrected.pvalues <- pvalues
corrected.pvalues[pairs] <- pval.vector.corrected
corrected.pvalues[pairs[,2:1]] <- pval.vector.corrected
colnames(corrected.pvalues) <- colnames(pvalues)
rownames(corrected.pvalues) <- rownames(pvalues)
res <- corrected.pvalues
}
return(res)
}
runPostHoc <- function (data, test, control, ...) {
# Auxiliar function to conduct the post hoc test
# Args:
# data: Dataset where the test is conducted. It should contain only the values
# to compare
# test: Test to be performed
# control: Algorithm used as control
#
# Returns:
# The obtained p-values
#
if (is.function(test)) {
matrix.raw <- customPost(data=data, control=control, test=test, ...)
}else{
matrix.raw <- switch(test,
"t-test"= {
customPost(data=data, control=control,
test=function(x, y) {
return(t.test(x, y, paired=TRUE))
})
},
"wilcoxon"= {
customPost(data=data, control=control,
test=function(x,y) {
return(wilcoxonSignedTest(x,y))
})
},
"friedman"= {
friedmanPost(data=data,
control=control)
},
"aligned ranks"= {
friedmanAlignedRanksPost(data=data,
control=control)
},
"quade"= {
quadePost(data=data,
control=control)
},
"tukey"= {
tukeyPost(data=data,
control=control)
},
stop("Unknown test. Valid options in the current version ",
"are 'wilcoxon', 'friedman', 'aligned ranks', 'quade', ",
"and 't-test'. Alternatively, you can pass a function ",
"that performs a paired statistical texts which ",
"should have, at least, two parameters, 'x' and 'y' ",
"and returns the p-value associted to the comparison"))
}
return(matrix.raw)
}
# EXPORTED FUNCTIONS -----------------------------------------------------------
#' @title Post hoc tests for multiple comparison analises
#'
#' @description This function is a wrapper to run the post hoc tests. It can run both all vs. control and all vs. all post hoc tests.
#' @param data A matrix or data frame containing the results obtained by the algorithms (columns) in each problem (rows). It can contain additional columns, but if any of the column has to be discarderd (not used neither to group the problems nor to be part of the comparison), then it is mandatory to indicate, in the \code{algorithms} parameter, which columns contain the algorithm information.
#' @param algorithms Vector with either the names or the indices of the columns that contain the values to be tested. If not provided, the function assumes that all the columns except those indicated in \code{group.by} represent the results obtained by an algorithm.
#' @param group.by Vector with either the names or the indices of the columns to be used to group the data. Each group is tested independently. If \code{NULL}, all the data is used for a single comparison.
#' @param test Parameter that indicates the statistical test to be used. It can be either a string indicating one of the available test or a function. As a string, it can take the following values:
#' \itemize{
#' \item {\code{'wilcoxon'} - Wilcoxon Signed Rank test, as in Demsar (2006)}
#' \item {\code{'t-test'} - t-test (R's t.test function with paired option set at \code{TRUE})}
#' \item {\code{'friedman'} - Friedman post hoc test, as in Demsar (2006)}
#' \item {\code{'aligned ranks'} Friedman's Aligned Ranks post hoc test, as in Garcia and Herrera (2010)}
#' \item {\code{'quade'} - Quade post hoc test, as in Garcia and Herrera (2010)}
#' \item {\code{'tukey'} - Tukey's ANOVA post hoc test, as in Test 28 in Kanji (2006).}
#' }
#'
#' If a function is provided, then it has to have as first argument a matrix containing the columns to be compared. The function has to return a list with, at least, an element named \code{p.value} (as the \code{htest} objects that are usually returned by R's test implementations).
#' @param control Either the name or the index of a column in the dataset (one of those in the \code{algorithms} vector), to be used as control. Alternatively, this argument can be \code{'min'}, to select the algorithm with the minimum value, \code{'max'}, to select the algorithm with the maximum value as control. If the argument is not provided (or is \code{NULL}), all the pairwise comparisons are performed instead of all vs. control comparisons.
#' @param use.rank If \code{TRUE}, then the summarization of the data is based on the ranks, rather than on the actual values. The selecion of the algorithm with the maximum or minimum value is also done in terms of the summarized ranking.
#' @param sum.fun Function to be used to summarize the data. By default, average is used.
#' @param correct Either string indicating the type of correction that has to be applied or a function to correct the p-values for multiple testing; This parameter is only need in case the data is grouped. As a string, the valid values are:
#' \itemize{
#' \item{\code{shaffer} - Shaffer's (static) procedure, as in Garcia and Herrera (2008)}
#' \item{\code{bergmann} - Bergman and Hommel's procedure (similar to Shaffer dynamic), as in Garcia and Herrera (2008)}
#' \item{\code{holland} - Holland's procedure, as in Garcia and Herrera (2010)}
#' \item{\code{finner} - Finner's procedure, as in Garcia and Herrera (2010)}
#' \item{\code{rom} - Rom's procedure, as in Garcia and Herrera (2010)}
#' \item{\code{li} - Li's procedure, as in Garcia and Herrera (2010)}
#' \item{Any of the methods implemented in the \code{p.adjust} function. For a list of options, type \code{p.adjust.methods}}
#' }.
#' If a function is provided, the it has to recieve, as first argument, a vector of pvalues to be corrected and has to return a verctor with the corrected p-values \emph{in the same order} as the input vector.
#' @param alpha Alpha value used in Rom's correction. By default, it is set at 0.05.
#' @param ... Special argument used to pass additional parameters to the statistical test and the correction method.
#' @return In all cases the function returns a list with three elements, the summarization of the data (a row per group), the raw p-values and the corrected p-values. When the data is grouped and all the pairwise comparisons are performed (no control is provided), the p-values are in three dimensional arrays where the last dimension is corresponds to the group. In any other cases the result is a matrix with one or more rows.
#'
#' Note that Shaffer and Bergmann and Hommel's correction can only be applied when all the pairwise tests are conducted, due to their assumptions. Moreover, its use when the data is grouped (multiple pairwise comparsions) is not trivial and, thus, it is not possible to use it when the data is grouped.
#'
#' @seealso \code{\link{friedmanPost}}, \code{\link{friedmanAlignedRanksPost}}, \code{\link{quadePost}}, \code{\link{tukeyPost}}, \code{\link{adjustShaffer}}, \code{\link{adjustBergmannHommel}}, \code{\link{adjustHolland}}, \code{\link{adjustFinner}}, \code{\link{adjustRom}}, \code{\link{adjustLi}}
#'
#' @references <NAME> and <NAME> (2010) Advanced nonparametric tests for multiple comparisons in the design of experiments in computational intelligence and ata mining: Experimental analysis of power. \emph{Information Sciences}, 180, 2044-2064.
#' @references <NAME>. and <NAME>. (2008) An Extension on "Statistical Comparisons of Classifiers over Multiple Data Sets" for All Pairwise Comparisons. \emph{Journal of Machine Learning Research}, 9, 2677-2694.
#' @references <NAME>. (2006) \emph{100 Statistical Tests}. SAGE Publications Ltd, 3rd edition.
#' @references <NAME>. (2006) Statistical Comparisons of Classifiers over Multiple Data Sets. \emph{Journal of Machine Learning Research}, 7, 1-30.
#'
#' @examples
#' # Grouped data, all pairwise
#' data(data_blum_2015)
#' res <- postHocTest (data=data.blum.2015, algorithms=c("FrogCOL", "FrogMIS", "FruitFly"),
#' use.rank=TRUE, group.by=c("Size"), test="quade", correct="finner")
#'
#' # Data summarization
#' res$summary
#'
#' # Corrected pvalues for the first group
#' res$corrected.pval[, , 1]
#'
#' # Grouped data, all vs. control
#' res <- postHocTest (data=data.blum.2015, control="max", use.rank=FALSE,
#' group.by=c("Size","Radius"), test="wilcoxon", correct="finner")
#'
#' # Data summarization
#' res$summary
#'
#' # Corrected pvalues
#' res$corrected.pval
#'
#' # Not grouped data
#' data(data_gh_2008)
#' postHocTest (data=data.gh.2008, test="aligned ranks", correct="bergmann")
#'
postHocTest <- function (data, algorithms=NULL, group.by=NULL, test="friedman",
control=NULL, use.rank=FALSE, sum.fun=mean,
correct="finner", alpha=0.05, ... ) {
#If data is a matrix, convert it to a data.frame to avoid problems with the use of further methods
if(is.matrix(data)){
data <- data.frame(data)
}
# If there are only two algorithms all vs. all approach is, actually equalt to
# all vs control. To avoid problems trying to generate matrix with only one row
# we will convert take the second algorithm as the control
if (length(algorithms) == 2 & is.null(control)) {
control <- algorithms[2]
}
# Convert string columns to their corresponding ID
if (!is.null(group.by) & is.character(group.by)) {
if (!all(group.by %in% colnames(data))) {
warning("Not all the columns indicated in the 'group.by' argument are in ",
"the dataset. None existing columns will be ignored")
}
group.by <- which(colnames(data) %in% group.by)
}
# Remove any index out of bounds
sbt <- group.by > 0 & group.by <= ncol(data)
if (!all(sbt)) {
warning("Not all the columns indicated in the 'group.by' argument are in ",
"the dataset. Out of range values will be ignored.")
}
group.by <- subset(group.by, subset=sbt)
# In case there is not a list of algorithms, then all the columns except those
# used to group the data are regarded as algorithm results
if (is.null(algorithms)) {
algorithms <- which(!(1:ncol(data) %in% group.by))
} else {
if (is.character(algorithms)) {
if (!all(algorithms %in% colnames(data))) {
warning("Not all the columns indicated in the 'algorithms' argument are in ",
"the dataset. None existing columns will be ignored")
}
algorithms <- which(colnames(data) %in% algorithms)
}
sbt <- algorithms > 0 & algorithms <= ncol(data)
if (!all(sbt)) {
warning("Not all the columns indicated in the 'group.by' argument are in ",
"the dataset. Out of range values will be ignored.")
}
algorithms <- subset(algorithms, subset=sbt)
}
# Just in case ...
if (length(algorithms) < 2) {
stop("At least two algorithms are required to run the function")
}
# Use name for the control to avoid problems when filtering
if (!is.null(control)) {
if (is.character(correct) & (correct == "shaffer" | correct == "bergmann")) {
stop("Shaffer's and Bergman and Hommel's correction can only be used ",
"when all the pairs are compared. For comparisons with a control ",
"use any of the other corrections that do not take into account ",
"the particular nature of all pairwise comparisons.")
}
if (is.numeric(control)) {
control <- names(data)[control]
}
}
if (!is.null(group.by) & (correct == "shaffer" | correct == "bergmann")) {
stop("Shaffer's and Bergmann and Hommel's corrections cannot be used with grouped data.",
" Please select another correction method")
}
# Prepare the correction
if(is.character(correct)) {
correct.name <- correct
correct <- switch (correct,
"shaffer"=adjustShaffer,
"bergmann"=adjustBergmannHommel,
"holland"={
function(pvalues) {
fun <- adjustHolland
return(correctPValues(pvalues=pvalues,
correct=fun))
}
},
"finner"={
function(pvalues) {
fun <- adjustFinner
return(correctPValues(pvalues=pvalues,
correct=fun))
}
},
"rom"={
function(pvalues) {
fun <- function(pvalues) {
adjustRom(pvalues=pvalues, alpha=alpha)
}
return(correctPValues(pvalues=pvalues,
correct=fun))
}
},
"li"={
function(pvalues) {
fun <- adjustLi
return(correctPValues(pvalues=pvalues,
correct=fun))
}
},
{
if (!(correct %in% p.adjust.methods)){
stop("Non valid method for p.adjust function. ",
"Valid options are ",
paste(p.adjust.methods, collapse="; "),
". Additionally, 'holland', 'finner', 'rom' and ",
"'li' are also valid options")
}
function(pvalues) {
fun <- function(pvalues) {
p.adjust(p=pvalues, method=correct.name)
}
return(correctPValues(pvalues=pvalues,
correct=fun))
}
})
} else {
correct.name <- deparse(substitute(test))
}
# Build the summary matrix
if (is.null(group.by)){
if (use.rank){
aux <- rankMatrix(data=data[, algorithms], ...)
}else{
aux <- data[, algorithms]
}
# Note that in aux the group.by columns are at the begining
# Some operations may change the name of the algorithms (special characters)
sum.matrix <- summarizeData(data=aux, fun=sum.fun, group.by=NULL, ...)
## when the data is not summaryced by groups summarizeData returns a verctor of numeric values.
## As this may produce problems in other method, we force the sum.matrix to be a matrix
if(is.numeric(sum.matrix)){
sum.matrix <- t(as.matrix(sum.matrix))
}
colnames(sum.matrix) <- colnames(data)[algorithms]
} else {
if (use.rank) {
aux <- cbind(data[, group.by], data.frame(rankMatrix(data=data[, algorithms], ...)))
} else {
aux <- cbind(data[, group.by], data[, algorithms])
}
# Note that in aux the group.by columns are at the begining
sum.matrix <- summarizeData(data=aux, fun=sum.fun, group.by=1:length(group.by), ...)
# Some operations may change the name of the algorithms (special characters)
colnames(sum.matrix) <- names(data)[c(group.by, algorithms)]
}
if (!is.null(group.by)){
# Generate all the groups as a data.frame
groups <- unique(data[, group.by])
# In case the result is a vector, convert it into a data.frame
if(length(group.by)==1) {
groups <- data.frame(groups)
}
names(groups) <- names(data)[group.by]
getRawPvalues <- function (i) {
# Filter the data
rows <- rep(TRUE, nrow(data))
for (j in seq(along.with=group.by)) {
g <- group.by[j]
rows <- rows & data[, g]==groups[i, j]
}
data.sub <- subset (data, rows)[, algorithms]
# Check the control algorithm
if (use.rank){
aux <- rankMatrix(data.sub)
} else {
aux <- data.sub
}
ref <- apply(aux, MARGIN=2, FUN=sum.fun)
if (!is.null(control) & is.character(control)) {
if (control=="max") {
control <- which.max(ref)
} else if (control=="min") {
control <- which.min(ref)
}
}
group.result <- runPostHoc (data.sub, test=test, control=control, ...)
return(group.result)
}
group.raw.pval <- unlist(lapply (1:nrow(groups), FUN=getRawPvalues))
# Now we create either the arrays or the matrix, depending on whether we have
# a control or not
k <- length(algorithms)
p <- nrow(groups)
if (is.null(control)){
dim(group.raw.pval) <- c(k, k, p)
# Now correct the pvalues. Note that we have to take only the upper part of each matrix
# We will create the index triplets we need for selecting the correct pvalues
pairs <- generatePairs(k, NULL)
triplets <- cbind(pairs[rep(1:nrow(pairs), p), ],
unlist(lapply(1:p,
FUN=function(i) {
return (rep(i, nrow(pairs)))
})))
corrected.pvalues <- correct(group.raw.pval[triplets])
group.corrected.pval <- group.raw.pval
group.corrected.pval[triplets] <- corrected.pvalues
group.corrected.pval[triplets[,c(2,1,3)]] <- corrected.pvalues
# Name the dimensions
group.names <- paste(names(groups)[1], groups[, 1], sep=": ")
if(ncol(groups) > 1) {
for (j in 2:ncol(groups)) {
group.names <- cbind(group.names,
paste(names(groups)[j], groups[, j], sep=": "))
}
group.names <- apply(group.names, MARGIN=1,
FUN=function(x) {
return(paste(x, collapse="; "))
})
}
dimnames(group.raw.pval) <- list(names(data)[algorithms],
names(data)[algorithms],
group.names)
dimnames(group.corrected.pval) <- list(names(data)[algorithms],
names(data)[algorithms],
group.names)
} else {
## Correct with all the values
group.corrected.pval <- correct(group.raw.pval)
dim(group.raw.pval) <- c(k,p)
dim(group.corrected.pval) <- c(k,p)
group.raw.pval <- t(group.raw.pval)
group.corrected.pval <- t(group.corrected.pval)
colnames(group.raw.pval) <- names(data)[algorithms]
colnames(group.corrected.pval) <- names(data)[algorithms]
group.raw.pval <- cbind(groups, group.raw.pval)
group.corrected.pval <- cbind(groups, group.corrected.pval)
}
raw.pval <- group.raw.pval
corrected.pval <- group.corrected.pval
} else {
if (is.null(control)){
# Check the control algorithm
aux <- data[, algorithms]
if (use.rank){
aux <- rankMatrix(aux, ...)
}
ref <- apply(aux, MARGIN=2, FUN=sum.fun)
if (is.character(control)) {
if (control=="max") {
control <- which.max(ref)
} else if (control=="min") {
control <- which.min(ref)
}
}
}
raw.pval <- runPostHoc(data[, algorithms], test=test, control=control, ...)
corrected.pval <- correct(raw.pval)
}
results <- list(summary=sum.matrix, raw.pval=raw.pval,
corrected.pval=corrected.pval)
return(results)
}
#' @title Tests for multiple comparisons
#'
#' @description This function is a wrapper to multiple comparison tests.
#' @param data A matrix or data frame containing the results obtained by the algorithms (columns) in each problem (rows). It can contain additional columns, but if any of the column has to be discarderd (not used neither to group the problems nor to be part of the comparison), then it is mandatory to indicate, in the \code{algorithms} parameter, which columns contain the algorithm information.
#' @param algorithms Vector with either the names or the indices of the columns that contain the values to be tested. If not provided, the function assumes that all the columns except those indicated in \code{group.by} represent the results obtained by an algorithm.
#' @param group.by Vector with either the names or the indices of the columns to be used to group the data. Each group is tested independently. If \code{NULL}, all the data is used for a single comparison.
#' @param test Parameter that indicates the statistical test to be used. It can be either a string indicating one of the available test or a function. As a string, it can take the following values:
#' \itemize{
#' \item {\code{'aligned ranks'} Friedman's Aligned Ranks test, as in Garcia and Herrera (2010)}
#' \item {\code{'anova'} - ANOVA test, as in Test 22 in Kanji (2006).}
#' \item {\code{'friedman'} - Friedman test, as in Garcia and Herrera (2010)}
#' \item {\code{'iman'} - <NAME>’s modification of Friedman’s test, as in Demsar (2006)}
#' \item {\code{'quade'} - Quade test, as in Garcia and Herrera (2010)}
#' }
#'
#' If a function is provided, then it has to have as first argument a matrix containing the columns to be compared. The function has to return a list with, at least, an element named \code{p.value} (as the \code{htest} objects that are usually returned by R's test implementations).
#' @param correct Either string indicating the type of correction that has to be applied or a function to correct the p-values for multiple testing; This parameter is only need in case the data is grouped. As a string, the valid values are:
#' \itemize{
#' \item{\code{holland} - Holland's procedure, as in Garcia and Herrera (2010)}
#' \item{\code{finner} - Finner's procedure, as in Garcia and Herrera (2010)}
#' \item{\code{rom} - Rom's procedure, as in Garcia and Herrera (2010)}
#' \item{\code{li} - Li's procedure, as in Garcia and Herrera (2010)}
#' \item{Any of the methods implemented in the \code{p.adjust} function. For a list of options, type \code{p.adjust.methods}}
#' }.
#' If a function is provided, the it has to recieve, as first argument, a vector of pvalues to be corrected and has to return a verctor with the corrected p-values \emph{in the same order} as the input vector.
#' @param alpha Alpha value used in Rom's correction. By default, set at 0.05.
#' @param ... Special argument used to pass additional parameters to the statistical test and the correction method.
#' @return In case the \code{group.by} argument is not provided (or it is \code{NULL}), the function return an object of class \code{htest}. If columns for grouping are provided, then the function returns a matrix that includes, for each group, the values of the \code{group.by} columns, the raw p-value and the corrected p-value.
#'
#' #' @seealso \code{\link{friedmanTest}}, \code{\link{friedmanAlignedRanksTest}}, \code{\link{quadeTest}}, \code{\link{anovaTest}}, \code{\link{adjustShaffer}}, \code{\link{adjustBergmannHommel}}, \code{\link{adjustHolland}}, \code{\link{adjustFinner}}, \code{\link{adjustRom}}, \code{\link{adjustLi}}
#'
#' @references <NAME> and <NAME> (2010) Advanced nonparametric tests for multiple comparisons in the design of experiments in computational intelligence and ata mining: Experimental analysis of power. \emph{Information Sciences}, 180, 2044-2064.
#' @references Kanji, <NAME>. (2006) \emph{100 Statistical Tests}. SAGE Publications Ltd, 3rd edition.
#'
#' @examples
#' # Grouped data
#' data(data_blum_2015)
#' multipleComparisonTest (data=data.blum.2015,
#' algorithms=c("FrogCOL", "FrogMIS", "FruitFly"),
#' group.by=c("Size", "Radius"),
#' test="quade", correct="finner")
#' # Not grouped data
#' data(data_gh_2008)
#' multipleComparisonTest (data=data.gh.2008, test="aligned ranks",
#' correct="hochberg")
#'
multipleComparisonTest <- function (data, algorithms=NULL, group.by=NULL,
test="aligned ranks", correct="finner",
alpha=0.05, ...){
# Convert string columns to their corresponding ID
if (!is.null(group.by) & is.character(group.by)) {
if (!all(group.by %in% colnames(data))) {
warning("Not all the columns indicated in the 'group.by' argument are in ",
"the dataset. None existing columns will be ignored")
}
group.by <- which(colnames(data) %in% group.by)
}
# Remove any index out of bounds
sbt <- group.by > 0 & group.by <= ncol(data)
if (!all(sbt)) {
warning("Not all the columns indicated in the 'group.by' argument are in ",
"the dataset. Out of range values will be ignored.")
}
group.by <- subset(group.by, subset=sbt)
# In case there is not a list of algorithms, then all the columns except those
# used to group the data are regarded as algorithm results
if (is.null(algorithms)) {
algorithms <- which(!(1:ncol(data) %in% group.by))
} else {
# Same processing as with 'group.by'
if (is.character(algorithms)) {
if (!all(algorithms %in% colnames(data))) {
warning("Not all the columns indicated in the 'algorithms' argument are in ",
"the dataset. None existing columns will be ignored")
}
algorithms <- which(colnames(data) %in% algorithms)
}
sbt <- algorithms > 0 & algorithms <= ncol(data)
if (!all(sbt)) {
warning("Not all the columns indicated in the 'group.by' argument are in ",
"the dataset. Out of range values will be ignored.")
}
algorithms <- subset(algorithms, subset=sbt)
}
# Just in case ...
if (length(algorithms) < 3) {
stop("At least three algorithms are required to run the function")
}
# Prepare the test function
if(is.character(test)) {
test.name <- test
test <- switch (test,
"friedman"=friedmanTest,
"iman"=imanDavenportTest,
"aligned ranks"=friedmanAlignedRanksTest,
"quade"=quadeTest,
"anova"=anovaTest,
{
stop("Unknown test. Valid options are 'friedman', ",
"'aligned ranks', 'quade', 'anova', 'iman', or a function that ",
"gets as input a data.frame or matrix where each ",
"algorithm is in a column")
})
} else {
test.name <- deparse(substitute(test))
}
# Prepare the correction function
if(is.character(correct)) {
correct.name <- correct
correct <- switch (correct,
"holland"=adjustHolland,
"finner"=adjustFinner,
"rom"={
function(pvalues) {
return(adjustRom(pvalues=pvalues, alpha=alpha))
}
},
"li"=adjustLi,
{
if (!(correct %in% p.adjust.methods)){
stop("Non valid method for p.adjust function. ",
"Valid options are ",
paste(p.adjust.methods, collapse="; "),
". Additionally, 'holland', 'finner', 'rom' and ",
"'li' are also valid options")
}
function(pvalues, ...) {
p.adjust(p=pvalues, method=correct.name)
}
})
} else {
correct.name <- deparse(substitute(test))
}
if (!is.null(group.by)){
# Generate all the groups as a data.frame
groups <- unique(data[, group.by])
# In case the result is a vector, convert it into a data.frame
if(length(group.by)==1) {
groups <- data.frame(groups)
}
names(groups) <- names(data)[group.by]
getRawPvalues <- function (i) {
# Filter the data
rows <- rep(TRUE, nrow(data))
for (j in seq(along.with=group.by)) {
g <- group.by[j]
rows <- rows & data[, g]==groups[i, j]
}
data.sub <- subset (data, subset=rows, select=algorithms)
group.result <- test(data.sub, ...)$p.value
return(group.result)
}
group.raw.pval <- sapply (1:nrow(groups), FUN=getRawPvalues)
# Correct the p-values and generate the final matrix
group.corrected.pval <- correct(group.raw.pval, ...)
res.matrix <- cbind(groups, group.raw.pval, group.corrected.pval)
colnames(res.matrix) <- c(colnames(groups),
paste("Raw_p-val_", test.name, sep=""),
paste("Corrected_p-val_", correct.name, sep=""))
result <- res.matrix
} else {
data.multipleComparisonTest <- data[, algorithms]
result <- test(data.multipleComparisonTest, ...)
}
return(result)
}
#' @title Contrast estimation based on medians
#'
#' @description This function performs estimates the contrast between algorithms through the medians
#' @param data Matrix or data frame with the data to compare
#' @return A matrix where the estimation of all the pairs of differences are output.
#' The differences correspond to row-column.
#' @details The test has been implemented according to Garcia \emph{et al.} (2010), Section 3.3.
#' @references Kanji, <NAME>. (2006) \emph{100 Statistical Tests}. SAGE Publications Ltd, 3rd edition.
#' @examples
#' data(data_gh_2008)
#' contrastEstimationMatrix(data.gh.2008)
contrastEstimationMatrix <- function (data) {
k <- dim(data)[2]
pairs <- generatePairs(k=k, control=NULL)
medians <- apply(pairs, MARGIN=1,
FUN=function(x) {
differences <- data[, x[1]] - data[, x[2]]
return(median(differences))
})
median.matrix <- matrix(rep(0, k^2), ncol=k)
median.matrix[pairs] <- medians
median.matrix[pairs[,c(2,1)]] <- -1*medians
adjusted.m <- rowMeans(median.matrix)
adjusted.differences <- apply(pairs, MARGIN=1,
FUN=function(x) {
differences <- adjusted.m[x[1]] - adjusted.m[x[2]]
return(median(differences))
})
adjusted.matrix <- matrix(rep(0, k^2), ncol=k)
adjusted.matrix[pairs] <- adjusted.differences
adjusted.matrix[pairs[,c(2,1)]] <- -1 * adjusted.differences
colnames(adjusted.matrix) <- colnames(data)
rownames(adjusted.matrix) <- colnames(data)
return(adjusted.matrix)
} | 9fbfd13af6074f3b04819722edb29f7ec07031ee | [
"Markdown",
"R",
"RMarkdown"
] | 12 | R | dedenistiawan/scmamp | e435f9d48078f93ab49b23a19fdb6ef6e12ea5f9 | 3cdd03ccb6b1b2cd843e7f99fc4ddc00a04b8f85 |
refs/heads/master | <file_sep># WikiBrainDocker
# Overview
WikiBrainDocker is a dockerfile for the purpose of extracting
Cartograph-compatible data files from a Wikipedia of a given language. It is
based on the default Ubuntu Docker image, and automatically downloads and
installs all prerequisites for using WikiBrain and CartoExtractor. It
automatically installs and runs CartoExtractor and WikiBrain. It then runs
WikiBrain's loader on a Wikipedia of any language (specified by user), and runs
the CartoExtractor pipeline. In order to run this or any other docker image,
docker must be installed.
# Installation
First, download and install Docker if it is not already installed. Then, go to
Docker preferences, and under the "Advanced" tab, change the memory allocation
to at least 10.0 GB.
Download WikiBrainDocker as a zip file from its GitHub page and unzip it.
Alternatively, you can clone it with Git. Open a terminal window, and change
your working directory to the uncompressed WikiBrainDocker folder.
# Running
If you are on a unix system and are looking for ease of use, simply run the
following command:
./run.sh <WIKILANG>
where \<WIKILANG\> is the (usually two-letter) language code of the Wikipedia
you'd like to download from, e.g. "en" for English or "zh" for Chinese (without
the quotes). For example:
./run.sh en
will download and extract the whole English Wikipedia, then output
Cartograph-compatible data (in the form of .tsv files) to ./output (where . is
the current working directory).
Alternatively, one can use "docker build -t IMAGENAME ." where IMAGENAME is the
name you want to give your image (e.g. "WikiBrain") as a way of building
the docker image. Once the image has been successfully built, run it by typing
"docker run --sysctl kernel.shmmax=64205988352 --sysctl kernel.shmall=15675290
-v HOMEPATHFOROUTPUTFILES:/output --name outputFiles -it IMAGENAME" to then
begin to run the shell.
The dockerfile does the following when it builds the image:
1. Install Maven
2. Install Java 8
3. Check out and configure the wikibrain and cartoextractor repositories
4. Install postgreSQL and postGIS
When a user runs the built image, it first does the following:
5. Start up a PostgreSQL server daemon
5. Add a "wikibrain" user and a "wikibrain\_en" database in Postgresql
6. Compile and generate data from the Wikibrain loader file for the simple
english Wikipedia database
7. Use that data in order to generate tsv files for use in visualization using
the CartoExtractor extractor functionality.
8. Save the generated files in a volume accessible to
other containers and the local host
9. Run a bash terminal for user input in the event that they'd like to modify
the parameters.
# Runtime defaults
The "docker run" command in buildrun.sh automatically does the following:
1. Set the kernel shmall value to 15.6 megabytes.
2. Set the kernel shmmax value to 64.2 gigabytes.
3. Custom setting changes to postgreSQL can be found in the postgres.conf file.
These settings overwrite the default choices.
4. The JVM memory allocation is set to 9 gigabytes (at runtime).
Allocating less than this amount may lead to errors at runtime.
When running the loader and extractor files on your own, the following commands
are done as default:
- ./wb-java.sh -Xmx3500m org.wikibrain.Loader -l LANGUAGE
- exec:java -Dexec.mainClass="info.cartograph.Extractor" -Dexec.args="-o /output --base-dir ../wikibrain -r 1"
# Software Versions
The versions for software installed are:
- Java 8
- PostgreSQL version 9.5
- PostGIS 2.3
- Ubuntu is based on the most current docker image available at runtime
# Changing output
By default, the /output directory is mapped to ./output. This directory can be
changed in the ./run.sh script before running. If you wish to use your own
modified WikiBrain or CartoExtractor library, the devMode branch includes a
separate dockerfile and run.sh script which will install all the necessary
software but instead of cloning wikibrains and cartoextractor from the git
master, this branch instead relies upon the user providing their own copy of
WikiBrain and CartoExtractor. DevMode also doesn't run the loader and
extractor, and instead provides a direct access to bash on runtime.
To use editions of Wikipedia other than Simple English, change the string after
the -l flag when running the WikiBrain loader (the line that starts with
'./wb-java.sh...'). To change the output directory in the container, change
the -o flag when you run the cartograph Extractor. Note that in run.sh, the
/output directory is a volume that's also mounted to a directory on the host
computer. Changing this directory may lead to being unable to find your files
on the host machine after running.
<file_sep>#!/bin/bash
rm -rf output
rm -rf host
docker build -f DockerfileMaster -t wikibraindocker .
docker run --sysctl kernel.shmmax=64205988352 --sysctl kernel.shmall=15675290 -e WIKILANG=$1 -e MEM=9g -v $PWD/host:/host -v $PWD/output:/output wikibraindocker
<file_sep>su postgres -c "createdb wikibrain_$WIKILANG"
su postgres -c "psql wikibrain_$WIKILANG -c CREATE\ USER\ wikibrain\ WITH\ PASSWORD\ \'<PASSWORD>\'\ LOGIN;"
| df83c84b74197661c3bd0b469a93f2426caa92a3 | [
"Markdown",
"Shell"
] | 3 | Markdown | cascadianblue/WikiBrainDocker | f46f5cf1dff83cc95fcc45ed024efc22fbc7c2b9 | 435db56226477d26638852f0d268aa3c711fb64e |
refs/heads/master | <file_sep># scw
wap
<file_sep>$("form").on("submit",function(){
window.localStorage.setItem("value",$("#J_search").val());
window.location.href = "cart.html";
return false;
})
$("form").on("keypress",function(e){
var keycode = e.keyCode;
if(keycode==13){
window.localStorage.setItem("value",$("#J_search").val());
window.location.href = "cart.html";
return false;
}
}) | 6d4880a2aa04e9d47ea2cb8a164e0f0d0c14231f | [
"Markdown",
"JavaScript"
] | 2 | Markdown | BDFQ/scw | f4e998258fbd78ba346ea6ff076230ae3d25fa6f | e1b3cb0b9f42e34789a5dc990ed6eccb8c9c5304 |
refs/heads/master | <repo_name>bybzmt/blog.react<file_sep>/pages/about.jsx
import Layout from '~/components/layout'
import Head from 'next/head'
export default (props)=>(
<Layout>
<Head>
<title>About Me</title>
</Head>
<div class="widewrapper main">
<div class="container about">
<h1>Hello, My name is <span class="about-bold"> <NAME></span></h1>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Repellendus inventore magni ratione perferendis ex molestiae cum reiciendis perspiciatis consequuntur, nihil ducimus corrupti! Ipsum nesciunt ipsa nobis obcaecati labore, rem recusandae?</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Qui quo sint asperiores, ut doloremque eum commodi, odit nisi sed repellendus earum recusandae pariatur consectetur minus rerum, magni dolores officiis magnam.</p>
<div class="about-button">
<a class="btn btn-xlarge btn-clean-one" href="/contact">Contact Me</a>
</div>
<hr />
</div>
</div>
</Layout>
)
<file_sep>/components/aside/tags.jsx
export default (props)=>(props.tags.length > 0 && (
<div className="aside-widget">
<header>
<h3>Tags</h3>
</header>
<div className="body clearfix">
<ul className="tags">
{props.tags.map(
tag => <li key={tag.id}><a href={'/article'+tag.id}>{tag.name}</a></li>
)}
</ul>
</div>
</div>
)
)
<file_sep>/pages/index.jsx
import React, { Component } from 'react';
import { render } from 'react-dom';
import fetch from 'isomorphic-fetch';
import Link from 'next/link'
import Layout from '~/components/layout'
import AsideFeaturedPost from '~/components/aside/featured_post'
import AsideTags from '~/components/aside/tags'
import Pagination from '~/components/pagination'
class LikeButton extends Component {
static async getInitialProps({ req }) {
let params = `query{
articleList(tagId:0){
items{
id,
title,
intro,
addTime,
author{
nickname
}
tags{
id,
name
}
},
count,
length,
}
}`.replace(/\s+/g, '')
console.log(params);
let resp = await fetch("http://blogapi/", {
method: 'POST',
headers: new Headers({
"Content-Type": 'application/graphql'
}),
body: params
});
// console.log("error:", await resp.text())
let data = await resp.json()
return data.data
}
tags = new Map();
constructor(props) {
super(props);
this.goPage = this.goPage.bind(this)
}
tagAdd(tag) {
if (!this.tags.has(tag.id)) {
this.tags.set(tag.id, tag)
}
}
goPage(page) {
console.log("page:"+page);
}
render() {
return (
<Layout>
<div className="widewrapper main">
<div className="container">
<div className="row">
<div className="col-md-8 blog-main">
{this.props.articleList.items.map(article=>
<div key={article.id} className="col-md-12 col-sm-12">
<article className="blog-list">
<header>
<h3><a href={"/article/"+article.id}>{article.title}</a></h3>
<div className="meta">
<i className="fa fa-user"></i> {article.author.nickname}
<i className="fa fa-calendar"></i> {article.addtime}
<i className="fa fa-comments"></i>
<span className="data">
<a href={"/article/"+article.id+"#comments"}>{article.commentsNum}Comments</a>
</span>
</div>
</header>
<div className="body">
{article.intro}
</div>
<div className="clearfix">
{article.tags.map(tag=>{
this.tagAdd(tag);
return <a key={tag.id} href={"/tag/"+tag.id}>{tag.name}</a>
})}
<a href={"/article/"+article.id} className="btn btn-clean-one">Read more</a>
</div>
<hr />
</article>
</div>
)}
<Pagination
count={this.props.articleList.count}
size={this.props.articleList.length}
current={1}
fn={this.goPage}
/>
</div>
<aside className="col-md-4 blog-aside">
<AsideFeaturedPost />
<AsideTags tags={[...this.tags.values()]} />
</aside>
</div>
</div>
</div>
</Layout>
)
}
}
export default LikeButton
<file_sep>/pages/article.jsx
import React, { Component } from 'react';
import { render } from 'react-dom';
import Link from 'next/link'
import fetch from 'isomorphic-fetch';
import Layout from '~/components/layout'
import Head from 'next/head'
import AsideFeaturedPost from '~/components/aside/featured_post'
import AsideRelatedPost from '~/components/aside/related_post'
import AsideTags from '~/components/aside/tags'
import Pagination from '~/components/pagination'
let commentsNum = 10;
export default class Article extends Component {
static async getInitialProps({ req, query:{id}}) {
let params = `query{
article(id:${id}){
id,
title,
html,
addTime,
author{
nickname
},
tags{
id,
name
},
comments{
id,
user{
nickname
},
content,
addTime,
replys{
id,
user{
nickname
},
content,
addtime
}
},
commentsNum
}
}`.replace(/\s+/g, '')
// console.log(params);
let resp = await fetch("http://blogapi/", {
method: 'POST',
headers: new Headers({
"Content-Type": 'application/graphql'
}),
body: params
});
// console.log("error:", await resp.text())
let data = await resp.json()
return data.data
}
constructor(props) {
super(props);
this.state = {
resultModal: false,
resultModalStatus: 0,
resultModalMessage: "",
content:"",
reply:0,
};
this.dosubmit = this.dosubmit.bind(this);
this.contentChange = this.contentChange.bind(this);
this.commentGoPage = this.commentGoPage.bind(this);
}
commentGoPage(page) {
console.log(page)
}
contentChange(event) {
let content = event.target.value;
let reply = this.state.reply
if (content.lenght < 1 || content[0] != "@") {
reply = 0
}
this.setState({
content: content,
reply: reply,
})
}
reply(reply_id, nickname) {
this.setState({
reply:0,
content: '@'+nickname+" ",
});
}
dosubmit() {
this.setState({
resultModal: true,
resultModalStatus: 0,
})
let data = new FormData();
data.append("id", this.props.article.id);
data.append("reply", this.props.reply);
fetch("/comment", {
method: 'POST',
body: data
})
.then(resp=>resp.json())
.then(json=>{
this.setState({
resultModalStatus: (json.ret > 0) ? 2 : 1,
resultModalMessage: json.data,
})
})
return false;
}
render() {
return (
<Layout>
<Head>
<title>{this.props.article.title}</title>
</Head>
<div className="widewrapper main">
<div className="container">
<div className="row">
<div className="col-md-8 blog-main">
<article className="blog-post">
<div className="body">
<h1>{this.props.article.title}</h1>
<div className="meta">
<i className="fa fa-user"></i> {this.props.article.author.nickname}
<i className="fa fa-calendar"></i> {this.props.article.addtime}
<i className="fa fa-comments"></i>
<span className="data"><a href="#comments"> {this.props.article.commentsNum} Comments</a></span>
</div>
<div className="markdown-body" dangerouslySetInnerHTML={{__html:this.props.article.html}} />
</div>
</article>
<aside className="comments" id="comments">
<hr />
<h2><i className="fa fa-comments"></i> {commentsNum} Comments</h2>
{this.props.article.comments.map(comment=>(
<div key={comment.id}>
<article className="comment" id={"comment-"+comment.id}>
<header className="clearfix">
<div className="meta">
<h3><a href="#">{comment.user.nickname}</a></h3>
<span className="date">
{comment.addtime}
</span>
<span className="separator">
-
</span>
<a onClick={((comment)=>()=>this.reply(comment.id, comment.user.nickname)).bind(this)(comment)} href="#create-comment" className="reply-link">Reply</a>
</div>
</header>
<div className="body">
{comment.content}
</div>
</article>
{(comment.replys.length > 0) && (
<aside className="comments" id={"replys-"+comment.id}>
{comment.replys.map((reply,i)=> (i<=10) ? (
<article key={i} className="comment reply" id={"reply-"+reply.id}>
<header className="clearfix">
<div className="meta">
<h3><a href="#">{reply.user.nickname}</a></h3>
<span className="date">
{reply.addtime}
</span>
<span className="separator">
-
</span>
<a onClick={((reply)=>()=>this.reply(reply.id, reply.user.nickname)).bind(this)(reply)} href="#create-comment" className="reply-link">Reply</a>
</div>
</header>
<div className="body">
{reply.content}
</div>
</article>
) : (
<div className="replysMore">
<a href="javascript:void()" onClick="replyPage({{comment.id}}, 2)">下一页</a>
</div>
))}
</aside>
)}
</div>
))}
</aside>
<Pagination
count={this.props.article.commentsNum}
size={10}
current={1}
fn={this.commentGoPage}
/>
<aside className="create-comment" id="create-comment">
<hr />
<h2><i className="fa fa-pencil"></i> Add Comment</h2>
<form id="hid_form" onSubmit={this.dosubmit}>
<textarea id="hid_content" onChange={this.contentChange} rows="10" name="content" id="comment-body" placeholder="<PASSWORD> Message" className="form-control input-lg" value={this.state.content}></textarea>
<div className="buttons clearfix">
<button type="submit" className="btn btn-xlarge btn-clean-one">Submit</button>
</div>
</form>
</aside>
</div>
<aside className="col-md-4 blog-aside">
<AsideFeaturedPost />
<AsideRelatedPost />
<AsideTags tags={this.props.article.tags} />
</aside>
</div>
</div>
</div>
{this.state.resultModal && (
<div id="ResultModal" className="modal fade" tabIndex="-1" role="dialog">
<div className="modal-dialog" role="document">
<div className="modal-content">
<div className="modal-header">
<button type="button" className="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button>
<h4 className="modal-title">操作结果</h4>
</div>
<div className="modal-body">
{(this.state.resultModalStatus==0) ? "处理中..." : (
this.state.resultModalStatus==1
) ?
<p class='alert alert-success'> {this.state.resultModalMessage} </p>
:
<p class='alert alert-warning'>{this.state.resultModalMessage}</p>
}
</div>
<div className="modal-footer">
<button type="button" className="btn btn-primary" data-dismiss="modal">确定</button>
</div>
</div>
</div>
</div>
)}
</Layout>
)
}
}
<file_sep>/components/pagination.jsx
import React, { Component } from 'react';
import { render } from 'react-dom';
import PropTypes from 'prop-types';
class Pagination extends Component {
tags = new Map();
constructor(props) {
super(props);
let pageNum = props.num > 0 ? props.num : 10;
let current = props.current > 0 ? props.current : 1;
let count = props.count;
let size = props.size;
let fn = props.fn;
let i = current - parseInt(pageNum / 2);
if (i < 1) {
i = 1;
}
let max = parseInt(count/size) + (count%size>0 ? 1 : 0);
if (max < 1) {
max = 1;
}
let end = i+(pageNum-1);
let pages = [];
if (i != 1) {
pages.push({
page: 1,
fn: ()=>fn(1),
active: 1==current,
disabled: false,
})
}
for (; i <= end; i++) {
pages.push({
page: i,
fn: (n=>()=>fn(n))(i),
active: i==current,
disabled: i>max,
})
}
if (end < max) {
pages.push({
page: max,
fn: ()=>fn(max),
active: max==current,
disabled: false,
})
}
this.state = {
previous: current > 1 ? ()=>fn(current-1) : null,
pages: pages,
next: current < max ? ()=>fn(current+1) : null,
}
}
render() {
return (
<nav>
<ul className="pagination">
{ this.state.previous ? (
<li>
<a onClick={this.state.previous} aria-label="Previous">
<span aria-hidden="true">«</span>
</a>
</li>
) : (
<li className="disabled">
<a aria-label="Previous"><span aria-hidden="true">«</span></a>
</li>
)}
{this.state.pages.map((page,i)=>{
if (page.active) {
return <li key={i} className="active"><a>{page.page}</a></li>
} else if(page.disabled) {
return <li key={i} className="disabled"><a>{page.page}</a></li>
}else{
return <li key={i}> <a onClick={page.fn}>{page.page}</a></li>
}
})}
{(()=>{
if (this.state.next) {
return <li>
<a onClick={this.state.next} aria-label="Next">
<span aria-hidden="true">»</span>
</a>
</li>
} else {
return <li className="disabled">
<a aria-label="Next"><span aria-hidden="true">»</span></a>
</li>
}
})()}
</ul>
</nav>
)}
}
Pagination.propTypes = {
count: PropTypes.number.isRequired,
size: PropTypes.number.isRequired,
current: PropTypes.number.isRequired,
fn: PropTypes.func.isRequired,
num: PropTypes.number,
}
export default Pagination
<file_sep>/README.md
Blog
========
个人博客react练手用
<file_sep>/next.config.js
const withLess = require('@zeit/next-less')
module.exports = withLess({
webpack: (config, { buildId, dev, isServer, defaultLoaders }) => {
config.resolve.alias["~"] = __dirname
return config
},
cssModules: true,
cssLoaderOptions: {
importLoaders: 1,
localIdentName: "[local]___[hash:base64:5]",
}
})
<file_sep>/pages/contact.jsx
import Layout from '~/components/layout'
import Head from 'next/head'
export default (props)=>(
<Layout>
<Head>
<title>Contact Me</title>
</Head>
<div class="widewrapper main">
<div class="container">
<div class="row">
<div class="col-md-6 col-md-offset-3 clean-superblock" id="contact">
<h2>Contact</h2>
<form action="#" method="get" accept-charset="utf-8" class="contact-form">
<input type="text" name="name" id="contact-name" placeholder="Name" class="form-control input-lg" />
<input type="email" name="email" id="contact-email" placeholder="Email" class="form-control input-lg" />
<textarea rows="10" name="message" id="contact-body" placeholder="Your Message" class="form-control input-lg"></textarea>
<div class="buttons clearfix">
<button type="submit" class="btn btn-xlarge btn-clean-one">Submit</button>
</div>
</form>
</div>
</div>
</div>
</div>
</Layout>
)
| c9ade859e8b2762079a15f300dac1147335851b3 | [
"JavaScript",
"Markdown"
] | 8 | JavaScript | bybzmt/blog.react | a967e71610f2ddd42c846c6fbce8bb5b8c0c3379 | 9b93ef68720bafc19d1d2ad300581e7e66c28f04 |
refs/heads/master | <repo_name>jrooneo/mydu<file_sep>/makefile
CC = g++
CFLAGS =
OBJS = main.o
PROG = mydu
.SUFFIXES: .c .o
$(PROG): $(OBJS)
$(CC) -o $@ $(OBJS)
.c.o:
$(CC) -c -o $@ $<
clean:
rm *.o $(PROG)
<file_sep>/main.c
/*********************************************************
* $Author: o-rooneo $
* $Date: 2015/02/10 07:36:49 $
* $Log: main.c,v $
* Revision 1.7 2015/02/10 07:36:49 o-rooneo
* Finished project with bugs. View readme for full listing
*
* Revision 1.6 2015/02/09 21:53:05 o-rooneo
* it compiles! Added queue functionality
*
* Revision 1.5 2015/02/09 01:31:51 o-rooneo
* Merged functions.c into the tail end main.c
*
* Revision 1.4 2015/02/05 16:06:39 o-rooneo
* cleaned up debug messages. added case '?'
*
* Revision 1.3 2015/02/03 00:51:42 o-rooneo
* Added keywords for RCS. Added flags and getopt
*
********************************************************/
#include <stdio.h> //perror, printf
#include <stdlib.h> //
#include <unistd.h> //
#include <sys/stat.h> //
#include <sys/queue.h> //TAIL_QUEUE
#include <limits.h> //PATH_MAX
#include <sys/types.h>
#include <dirent.h>
#include <string.h>
TAILQ_HEAD(tailhead, entry) head;
struct tailhead *headPtr;
struct entry{ //struct to define the nodes used for the tail queue
TAILQ_ENTRY(entry) entries;
char path[PATH_MAX];
};
int sizePathFun(char *path);
int breadthFirstApply(char *path, int pathFun(char *path1));
char executable[15];
char error[PATH_MAX+30];
int flags[7]; //To keep includes lower using int instead of flag. Global to allow access from all funcs
int main(int argc, char **argv)
{
strncpy(executable, argv[0], 15);
char opt;
int i;
float flagSize = 0;
int size = 0;
char path[PATH_MAX];
for(i = 0; i<7; i++) flags[i]=0; //initialize flags to 0
while((opt = getopt(argc,argv, "ashHkLx")) != -1){ //assign flag values
switch(opt){
case 'a':
if(flags[1]){
printf("Cannot set -a with -s\n");
break;
}
flags[0]=1;
break;
case 's':
if(flags[0]){
printf("Cannot set -s with -a\n");
break;
}
flags[1]=1;
break;
case 'h':
if(flags[3]){
printf("Cannot set -h with -H\n");
break;
}
//flags[2]=1;
break;
case 'H':
if(flags[2]){
printf("Cannot set -h with -H\n");
break;
}
//flags[3]=1;
break;
case 'k':
flags[4]=1;
break;
case 'L':
flags[5]=1;
break;
case 'x':
flags[6]=1;
break;
case '?':
default:
sprintf(error, "%s %s", executable,opt);
perror(error);
break;
}
}
/* Code to support only a single file parameter
if(optind >= argc){
strcpy(path,".");
size = breadthFirstApply(path,sizePathFun);
printf("%i\t%s\n",size,path);
}else{
strcpy(path,argv[optind]);
}
*/
//Probably unnecessary but du supports multiple files so I added the functionality
if(optind < argc){
while(optind < argc){
strcpy(path,argv[optind]);
breadthFirstApply(path,sizePathFun);
optind++;
}
return 0;
}
strcpy(path,".");
breadthFirstApply(path,sizePathFun);
return 0;
}
int sizePathFun(char *path)
{
struct stat statBuffer;
if(stat(path, &statBuffer) == -1){
sprintf(error, "%s %s", executable,path);
perror(error);
return -1;
}
return statBuffer.st_blocks/2;
}
int breadthFirstApply(char *path, int pathFun(char *path1))
{
struct dirent *direntPtr;
struct stat statBuffer;
struct entry *holder, *tailPtr, *traversalPtr;
DIR *currentDir;
char fullPath[PATH_MAX];
char tempPath[PATH_MAX];
int size = 0;
int sum = 0;
int directorySize = 0;
int classSize = 0;
char sizeClass = 'B';
float tempSize = 0;
float flagValue = 0.0;
TAILQ_INIT(&head);
headPtr = &head;
holder = (struct entry *) malloc(sizeof(struct entry));
strcpy(holder->path, path);
TAILQ_INSERT_TAIL(headPtr,holder,entries);
while(!TAILQ_EMPTY(headPtr)){
strcpy(tempPath, headPtr->tqh_first->path);
tailPtr = headPtr->tqh_first; //Get the next item in the queue
TAILQ_REMOVE(headPtr,headPtr->tqh_first,entries);
free(tailPtr);
directorySize = 0;
if ((currentDir = opendir(tempPath)) == NULL) {
if(headPtr->tqh_first){
TAILQ_REMOVE(headPtr,headPtr->tqh_first,entries);
}
sprintf(error, "%s %s", executable,tempPath);
perror(error);
continue;
}
while(currentDir != NULL && (direntPtr = readdir(currentDir))){
snprintf(fullPath, PATH_MAX, "%s/%s", tempPath, direntPtr->d_name);
if(stat(fullPath, &statBuffer) == -1){
printf("File not found");
continue;
}
switch(statBuffer.st_mode & S_IFMT){
case S_IFREG:
size = pathFun(fullPath)/2;
if(size > -1){
if(flags[2]){
while((tempSize / 1024.0) > 1){
tempSize/=1024.0;
classSize++;
}
}
if(flags[3]){
while((tempSize / 1000.0) > 1){
tempSize/=1000.0;
classSize++;
}
}
if(!flags[2] || !flags[3]) directorySize += size;
if(flags[0]){
if(flags[2] || flags[3]){
switch(classSize){
case 1: sizeClass = 'K'; break;
case 2: sizeClass = 'M'; break;
}
if(classSize > 2) sizeClass = 'G';
printf("%i %c\t%s\n",tempSize,fullPath);
}else{
printf("%i\t%s\n",size,fullPath);
}
}
}
break;
case S_IFDIR:
if(strcmp(direntPtr->d_name, ".") && strcmp(direntPtr->d_name, "..")) {
holder = (struct entry *)malloc(sizeof(struct entry));
strcpy(holder->path, fullPath);
TAILQ_INSERT_TAIL(headPtr, holder, entries);
}
break;
case S_IFLNK:
if(!flags[5]){
if(lstat(fullPath, &statBuffer) == -1){
sprintf(error, "%s %s", executable,fullPath);
perror(error);
continue;
}
}
size = pathFun(fullPath);
//Following will be treated as special files. No size printed.
case S_IFBLK:
printf("Block Device: %s\n", fullPath);
break;
case S_IFCHR:
printf("Character Device: %s\n",path);
break;
case S_IFIFO:
printf("FIFO Pipe: %s\n",path);
break;
}
}
if(!flags[1] || !flags[2] || !flags[3]){
printf("%i\t %s \n", directorySize, tempPath);
}
if(flags[2] || flags[3]){
printf("%i %c\t%s\n",tempSize,fullPath);
}
if(!flags[2] || !flags[3]){
sum += directorySize;
}
closedir(currentDir);
}
return sum;
}
| 24437718f3d6a22bde17b2be263094cc4fe48752 | [
"C",
"Makefile"
] | 2 | Makefile | jrooneo/mydu | 5b388089a08b8e1e66a8bf586918af93433d66f9 | d29c0e31cfe67ea5e569f8c94f67cf20a89a35cb |
refs/heads/master | <file_sep># -*- coding: utf-8 -*-
"""
Created on Tue Oct 21 02:25:04 2014
+------------------------------------------------------+
|(c) 2014 The University of Texas at Austin |
| Mechanical Enigneering Department |
| NERDLab - Neuro-Engineering, Research & |
| Development Laboratory |
| @author: benito |
+------------------------------------------------------+
"""
""" A Python Class Enum Constants
A simple Python BondGraph class, demonstrating the essential
facts and functionalities of BondGraph.
"""
from Extras.enum import Enum
class BondType(Enum):
Signal = (0, 'SignalBond')
Power = (1, 'PowerBond')
class CausalityType(Enum):
Tail = (-1, 'Tail')
Acausal = ( 0, 'Acausal') # Undefined
Head = ( 1, 'Head')
class ArrowType(Enum):
Tail = (-1, 'Tail')
Undefined = ( 0, 'Undefined')
Head = ( 1, 'Head')
class ElementType(Enum):
Junction = (0,'Junction')
Source = (1,'Source')
Storage = (2,'Storage')
Transduction = (2,'Transduction')
Dissipation = (5,'Dissipation')
Undefined = (-1,'Undefined')
class JunctionType(Enum):
Zero = 0 + 2*ElementType.Junction[0]
One = 1 + 2*ElementType.Junction[0]
class SourceType(Enum):
Flow = 0 + 2*ElementType.Source[0]
Effort = 1 + 2*ElementType.Source[0]
class StorageType(Enum):
Capacitor = 0 + 2*ElementType.Storage[0]
Inertia = 1 + 2*ElementType.Storage[0]
class TransducerType(Enum):
Transformer = 0 + 2*ElementType.Transduction[0]
Gyrator = 1 + 2*ElementType.Transduction[0]
class DissipationType(Enum):
Resistance = 0 + 2*ElementType.Dissipation[0]
Admittance = 1 + 2*ElementType.Dissipation[0]
BGelementSymbols = [(JunctionType.Zero, '0', 'ZeroJunction'), \
(JunctionType.One, '1', 'OneJunction'), \
(SourceType.Flow, 'F', 'FlowSource'), \
(SourceType.Effort, 'E', 'EffortSource'), \
(StorageType.Capacitor, 'C', 'Capacitor'), \
(StorageType.Inertia, 'I', 'inertia'), \
(TransducerType.Transformer, 'T', 'Transformer'), \
(TransducerType.Gyrator, 'G', 'Gyrator'), \
(DissipationType.Resistance, 'R', 'Resistance'), \
(DissipationType.Admittance, 'Y', 'Admittance')]
#<<<----------------------------------------------------->>>#
<file_sep>' to be implemented'
<file_sep># -*- coding: utf-8 -*-
"""
Created on Wed Oct 15 08:39:08 2014
+------------------------------------------------------+
|(c) 2014 The University of Texas at Austin |
| Mechanical Enigneering Department |
| NERDLab - Neuro-Engineering, Research & |
| Development Laboratory |
| @author: benito |
+------------------------------------------------------+
"""
try:
import matplotlib.pyplot as plt
except:
raise
from BondGraphs import graphClass as grph
graph = { "a" : ["c"],
"b" : ["c", "e"],
"c" : ["a", "b", "d", "e"],
"d" : ["c"],
"e" : ["c", "b"],
"f" : []
}
g = { "a" : ["d"],
"b" : ["c"],
"c" : ["b", "c", "d", "e"],
"d" : ["a", "c"],
"e" : ["c"],
"f" : []
}
h = { "a" : ["d","f"],
"b" : ["c","b"],
"c" : ["b", "c", "d", "e"],
"d" : ["a", "c"],
"e" : ["c"],
"f" : ["a"]
}
complete_graph = {
"a" : ["b","c"],
"b" : ["a","c"],
"c" : ["a","b"]
}
isolated_graph = {
"a" : [],
"b" : [],
"c" : []
}
graph0 = grph.Graph(graph)
print("graph density of complete_graph:")
print(graph0.density())
print 'Edges of graph'
print(graph0.edges())
#->|[('a', 'c'), ('c', 'a'), ('c', 'b'), ('c', 'd'), ('c', 'e'), ('b', 'c'), ('b', 'e'), ('e', 'c'), ('e', 'b'), ('d', 'c')]
print 'Isolated nodes on graph'
print(graph0.find_isolated_vertices())
#->|
graph1 = grph.Graph(complete_graph)
print("graph density of complete_graph:")
print(graph1.density())
graph2 = grph.Graph(isolated_graph)
print("graph density of isolated_graph:")
print(graph2.density())
graph3 = grph.Graph(g)
print("graph density of g:")
print(graph3.density())
print("Vertices of graph:")
print(graph3.vertices())
print("Edges of graph:")
print(graph3.edges())
print("Add vertex:")
graph3.add_vertex("z")
print("Vertices of graph:")
print(graph3.vertices())
print("Add an edge:")
graph3.add_edge({"a","z"})
print("Vertices of graph:")
print(graph3.vertices())
print("Edges of graph:")
print(graph3.edges())
print('Adding an edge {"x","y"} with new vertices:')
graph3.add_edge({"x","y"})
print("Vertices of graph:")
print(graph3.vertices())
print("Edges of graph:")
print(graph3.edges())
print ' ... done!'
<file_sep>PyBondGraph
===========
BondGraph using Python
<file_sep># -*- coding: utf-8 -*-
"""
Created on Wed Oct 15 08:42:25 2014
+------------------------------------------------------+
|(c) 2014 The University of Texas at Austin |
| Mechanical Enigneering Department |
| NERDLab - Neuro-Engineering, Research & |
| Development Laboratory |
| @author: benito |
+------------------------------------------------------+
"""
""" A Python Class
A simple Python BondGraph class, demonstrating the essential
facts and functionalities of BondGraph.
"""
import itertools
from graphClass import Graph
from BGconstants import *
#<<<----------------------------------------------------->>>#
class BGcomponent( object ):
id_generator = itertools.count(0) # first generated is 0
__ID = 0
def __init__(self, Name = None, Type = None, Position = []):
""" initializes a BGcomponent object """
self.id = next(self.id_generator)
self.__id = BGcomponent.__ID
BGcomponent.__ID += 1
self.__id += 1
self.__name = Name
self.__type = Type
self.__value = 0
self.__effort = 0
self.__flow = 0
self.__energyDomain = None
self.__parameter = 0
self.__input = 0
self.__output = 0
self.__position = Position
def __str__(self):
display = 'BGcomponent::_____:___________________\n'
display += ' : id: %d\n' % self.__id
display += ' : Name: %s\n' % self.__name
display += ' : Type: %s' % self.__type
return display
@staticmethod
def is_defined(self):
# check if the sequence sequence is non-increasing:
return not (self.__id == 0)
#<<<----------------------------------------------------->>>#
class BGbond( BGcomponent, Graph ):
id_generator = itertools.count(0) # first generated is 0
__ID = 0
def __init__(self, fromPort = 0, toPort = 0, Type = 'PowerBond'):
""" initializes a BGcomponent object """
BGcomponent.__init__(self)
Graph.__init__(self)
self.id = next(self.id_generator)
self.__id = BGbond.__ID
BGbond.__ID += 1
self.__id += 1
self.__fromPort = fromPort
self.__toPort = toPort
self.__causalityStroke = 1
self.__directionArrow = 1
self.__type = Type
def __str__(self):
display = 'BGbond::_____:___________________\n'
display += ' : id: %d\n' % self.__id
display += ' : from: %s\n' % self.__fromPort
display += ' : to: %s\n' % self.__toPort
display += ' : Type: %s' % self.__type
return display
def setType(self, Variable = None):
self.__type = Variable
def getType(self):
return self.__type
def setFromPort(self, Variable = None):
self.__fromPort = Variable
def getFromPort(self):
return self.__fromPort
def setToPort(self, Variable = None):
self.__toPort = Variable
def getToPort(self):
return self.__toPort
def getId(self):
return self.__id
def addBond(self, fromPort = 0, toPort = 0, Type = 'PowerBond'):
self.add_edge(fromPort, toPort)
def setCausalityStroke(self, Variable = None):
self.__causalityStroke = Variable
def getCausalityStroke(self):
return self.__causalityStroke
def setDirectionArrow(self, Variable = None):
self.__directionArrow = Variable
def getDirectionArrow(self):
return self.__directionArrow
#<<<----------------------------------------------------->>>#
class BGelement( BGcomponent, Graph ):
id_generator = itertools.count(0) # first generated is 0
__ID = 0
def __init__(self, Name = None, Type = None, Position = [0,0]):
""" initializes a BGcomponent object """
BGcomponent.__init__(self)
Graph.__init__(self)
self.id = next(self.id_generator)
self.__id = BGelement.__ID
BGelement.__ID += 1
self.__id += 1
self.__type = Type
self.__name = Name
self.__position = Position
self.variable = None
self.stateEquation = None
self.outputEquation = None
self.modulus = None
self.common = None
def __str__(self):
display = 'BGelement::_____:___________________\n'
display += ' : id: %d\n' % self.__id
display += ' : name: %s\n' % self.__name
display += ' : Type: %s\n' % self.__type
display += ' : pos: %s\n' % self.__position
return display
def setType(self, Variable = None):
self.__type = Variable
def getType(self):
return self.__type
def setName(self, Variable = None):
self.__name = Variable
def getName(self):
return self.__name
def setPosition(self, Variable = None):
self.__position = Variable
def getPosition(self):
return self.__position
def getId(self):
return self.__id
def addElement(self, Name = None, Type = None, Position = [0,0]):
self.add_node(Name, position = Position)
def setStateEquation(self, Equation = None):
self.stateEquation = Equation
def getStateEquation(self):
return self.stateEquation
def setOutputEquation(self, Equation = None):
self.outputEquation = Equation
def getOutputEquation(self):
return self.outputEquation
def setVariable(self, Variable = None):
self.variable = Variable
def getVariable(self):
return self.variable
#<<<----------------------------------------------------->>>#
class BondGraph( BGbond, BGelement, Graph ):
id_generator = itertools.count(0) # first generated is 0
__ID = 0
def __init__(self, BondsList = [], ElementsList = [], \
graph = Graph(), Name = None):
""" initializes a BGcomponent object """
BGcomponent.__init__(self)
Graph.__init__(self)
self.id = next(self.id_generator)
self.__id = BondGraph.__ID
BondGraph.__ID += 1
self.__id += 1
self.__bondsList = BondsList
self.__elementsList = ElementsList
self.__name = Name
def __str__(self):
display = 'BondGraph::_____:___________________\n'
display += ' : Name: %s\n' % self.__name
display += 'BG:BGelement::__:___________________\n'
for element in self.__elementsList:
display += ' : id: %d\n' % element.getId()
display += ' : name: %s\n' % element.getName()
display += ' : Type: %s\n' % element.getType()
display += ' : pos: %s\n' % element.getPosition()
display += ' :-------------------\n'
display += 'BG:BGbond::_____:___________________\n'
for element in self.__bondsList:
display += ' : id: %d\n' % element.getId()
display += ' : from: %s\n' % element.getFromPort()
display += ' : to: %s\n' % element.getToPort()
display += ' : Type: %s\n' % element.getType()
display += ' :-------------------\n'
return display
def addBond(self, fromPort = 0, toPort = 0, Type = 'PowerBond'):
self.add_edge(fromPort, toPort)
def addElement(self, fromPort = 0, toPort = 0, Type = 'PowerBond'):
self.add_edge(fromPort, toPort)
#<<<----------------------------------------------------->>>#
#<<<----------------------------------------------------->>>#
#<<<----------------------------------------------------->>>#
#<<<----------------------------------------------------->>>#
#--------------------------------------------------------
# END-OF-Graph class
#
if __name__ == "__main__":
print('BGcomponent::\n' \
'Tried to execute BonGraph.py module\n' \
'(with PyBondGraph classes definitions)')
print('\n\n')
print('------------------------------------')
print('--------> BGcomponents <------------')
print('------------------------------------')
print('\n\n')
bgc0 = BGcomponent()
bgc1 = BGcomponent('F','FlowSource')
bgc2 = BGcomponent('C','Capacitor')
bgc3 = BGcomponent('0','ZeroJunction')
bgc4 = BGcomponent('T','Transformer')
bgc5 = BGcomponent('R','Resistance')
bgcs = [bgc0, bgc1, bgc2, bgc3, bgc4, bgc5]
for bgc in bgcs:
print('------------------------------------')
print(bgc)
print('------------------------------------')
print('\n\n')
print('------------------------------------')
print('----------> BGbonds <---------------')
print('------------------------------------')
print('\n\n')
bgb0 = BGbond()
bgb1 = BGbond(0,1)
bgb2 = BGbond(1,4,'SignalBond')
bgbs = [bgb0, bgb1, bgb2]
for bgb in bgbs:
print('------------------------------------')
print(bgb)
print('------------------------------------')
print('\n\n')
print('------------------------------------')
print('---------> BGelements <-------------')
print('------------------------------------')
print('\n\n')
bge0 = BGelement()
bge1 = BGelement('E','EffortSource',[1,3])
bge2 = BGelement('1','OneJunction',[2,4])
bge3 = BGelement('C','Capacitor',[2,7])
bges = [bge0, bge1, bge2, bge3]
for bge in bges:
print('------------------------------------')
print(bge)
print('------------------------------------')
bge3.setVariable('x')
bge3.setStateEquation('dx/dt = -2*cos(x)')
bge3.setOutputEquation('y = x**2')
print(' Element bge1 variable: %s' % bge3.getVariable())
print(' Element bge1 state equation: %s' % bge3.getStateEquation())
print(' Element bge1 output equation: %s' % bge3.getOutputEquation())
print('------------------------------------')
print('bgc1.is_defined(bgc1)',bgc1.is_defined(bgc1))
print('bgb1.is_defined(bgb1)',bgb1.is_defined(bgb1))
print('bge1.is_defined(bge1)',bge1.is_defined(bge1))
print('------------------------------------')
for (key,val,name) in BGelementSymbols:
print('BGelementSymbols[%2s] = %s <- %s' % (key, val, name))
print('------------------------------------')
print('BondType.Signal[%s] = %s' % (BondType.Signal[0], BondType.Signal[1]))
print('BondType.Power[%s] = %s' % (BondType.Power[0], BondType.Power[1]))
print('\n\n')
print('------------------------------------')
print('---------> BondGraphs <-------------')
print('------------------------------------')
print('\n\n')
Bgraph = Graph()
bg0 = BondGraph()
bg1 = BondGraph(BondsList = bgbs, ElementsList = bges, \
graph = Bgraph, Name = 'Test BG1')
bgb5 = BGbond(0,1)
bgb6 = BGbond(1,2)
bgb7 = BGbond(1,3)
bgb8 = BGbond(2,4)
bgb9 = BGbond(4,5)
bge5 = BGelement('E','EffortSource',[2,2])
bge6 = BGelement('1','OneJunction',[2,3])
bge7 = BGelement('T','Transformer',[3,1])
bge8 = BGelement('0','ZeroJunction',[4,5])
bge9 = BGelement('C','Capacitor',[3,5])
bg2 = BondGraph(BondsList = [bgb5, bgb6, bgb7, bgb8, bgb9],
ElementsList = [bge5, bge6, bge7, bge8, bge9],
graph = Bgraph, Name = '<NAME>')
bgs = [bg0, bg1, bg2]
for bg in bgs:
print('<<<<<<<<--------====-------->>>>>>>>\n')
print(bg)
# print('------------------------------------'
#
# bge1.setVariable('x')
# bge1.setStateEquation('dx/dt = -2*cos(x)')
# bge1.setOutputEquation('y = x**2')
#
# print(' BG:Element bge1 variable: %s' % bge1.getVariable()
# print(' BG:Element bge1 state equation: %s' % bge1.getStateEquation()
# print(' BG:Element bge1 output equation: %s' % bge1.getOutputEquation()
#
print('*********************************')
print('*-------------------------------*')
print('*-----*****---*---*---****------*')
print('*-----*-------**--*---*---*-----*')
print('*-----***-----*-*-*---*---*-----*')
print('*-----*-------*--**---*---*-----*')
print('*-----*****---*---*---****------*')
print('*-------------------------------*')
print('*********************************')
print('\n\n ... done!')
#
# NEXT: draw graph
# pos=nx.spring_layout(G) # positions for all nodes
#
# # nodes
# nx.draw_networkx_nodes(G,pos,node_size=700)
| fac58f5c8a8970a055e5253e70dbf5e9d09d902d | [
"Markdown",
"Python"
] | 5 | Python | hieroPhant/PyBondGraph | b2424bb9a476a51336d51715bcb7f1b1e7b5b327 | 9c372dae4d4c250c365190243dd3c80bfc7f031b |
refs/heads/master | <repo_name>shethchintan7/spring-state-machine-thread-leak<file_sep>/src/main/java/com/example/distributedlocktest/StateMachineConfig.java
package com.example.distributedlocktest;
import org.springframework.context.annotation.Configuration;
@Configuration
class StateMachineConfig {
public enum States {
START,
INTERMEDIATE,
STOP
}
public enum Events {
GO1,
GO2
}
}
<file_sep>/src/main/java/com/example/distributedlocktest/DistributedlocktestApplication.java
package com.example.distributedlocktest;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class DistributedlocktestApplication {
public static void main(String[] args) {
SpringApplication.run(DistributedlocktestApplication.class, args);
}
}
<file_sep>/src/main/java/com/example/distributedlocktest/StopAction.java
package com.example.distributedlocktest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.statemachine.StateContext;
import org.springframework.statemachine.action.Action;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
@Component
public class StopAction implements Action<StateMachineConfig.States, StateMachineConfig.Events> {
static final Logger log = LoggerFactory.getLogger(StopAction.class);
@Override
@Transactional
public void execute(StateContext<StateMachineConfig.States, StateMachineConfig.Events> context) {
log.info("In stop event", context.getStateMachine().getId());
}
}
<file_sep>/src/main/resources/application.properties
logging.level.org.springframework.statemachine=TRACE | 9e5abf90a95797857ede4bb8ccfcf7d79ed558fa | [
"Java",
"INI"
] | 4 | Java | shethchintan7/spring-state-machine-thread-leak | b564fbd0495cbd995856d9b473da500298b7b822 | ddf058baeae6d741eb963ee62a031ecd9ca977c6 |
refs/heads/master | <repo_name>EliasMartins007/API_mysql<file_sep>/README.md
APi simples de exmplo logrocket
[x] GET/programming-languages ->getMultiple()
[x] POST/programming-languages ->create()
[] PUT/programming-languages/:id -> update()
[] DELETE/programming-languages/:id -> remove()
<file_sep>/services/programmingLanguages.js
const db = require('./db');
const helper = require('../helper');
const config = require('../config');
//querys sql ficam nesse arquivo!
//
//
//buscar todos os registros limite listPerPage
async function getMultiple(page = 1) {
const offset = helper.getOffset(page, config.listPerPage);
const rows = await db.query(
`SELECT id, name, released_year, githut_rank, pypl_rank, tiobe_rank
FROM programming_languages LIMIT ?,?`,
[offset, config.listPerPage]
);
const data = helper.emptyOrRows(rows);
const meta = { page };
return {
data,
meta,
};
}
//criar novo registro http://localhost:3000/programming-languages/
async function create(programmingLanguage) {
const result = await db.query(
`INSERT INTO programming_languages
(name, released_year, githut_rank, pypl_rank, tiobe_rank)
VALUES
(?, ?, ?, ?, ?)`,
[
programmingLanguage.name,
programmingLanguage.released_year,
programmingLanguage.githut_rank,
programmingLanguage.pypl_rank,
programmingLanguage.tiobe_rank,
]
);
let message = 'Error in creating programming language';
if (result.affectedRows) {
message = 'Programing language created successfuly';
}
return { message }; //obj estava passando como variavel
}
//update de um registro http://localhost:3000/programming-languages/:id
async function update(id, programmingLanguage) {
const result = await db.query(
`UPDATE programming_languages
SET name=?, released_year=?, githut_rank=?,
pypl_rank=?, tiobe_rank=?
WHERE id=?`,
[
programmingLanguage.name,
programmingLanguage.released_year,
programmingLanguage.githut_rank,
programmingLanguage.pypl_rank,
programmingLanguage.tiobe_rank,
id,
]
);
let message = 'Error in updating programing language'; //esta caindo aki 30/04/2021
if (result.affectedRouws) {
message = 'Programming language update successfully';
}
return { message }; //obj estava passando como varivel apenas
}
//delete de um registro http://localhost:3000/programming-languages/:id
async function remove(id) {
const result = await db.query(
`DELETE FROM programming_languages WHERE id=?`,
[id]
);
let message = 'Erro in deleting programming language';
if (result.affectedRows) {
message = 'Programming language deleted successfuly';
}
return { message };
}
module.exports = {
getMultiple,
create,
update,
remove,
};
| 8040bf3fa0a35b93ff693700e399a3349883548c | [
"Markdown",
"JavaScript"
] | 2 | Markdown | EliasMartins007/API_mysql | b07422568a7e9e3b22a6f21be2be6781df55a893 | 9ab04489e3f948166823790d64de22e1938a0fe7 |
refs/heads/master | <file_sep>library(ape)
library(bGMYC)
library(parallel)
suppressMessages(library(R.utils))
source('bgmyc.gibbs.mpi.R')
source('bgmyc.multiphylo.mpi.R')
args <- commandArgs(
trailingOnly=TRUE, asValues=TRUE,
defaults=c(
py1=0, py2=2, pc1=0, pc2=2, t1=2, t2=51,
scale=c(20, 10, 5), start=c(1, 0.5, 50),
noproc=1
),
adhoc=TRUE
)
outputSVG <- function(result, output) {
svg(paste0(output, '.svg'))
plot(result)
dev.off()
}
readNexus <- function(treesFile) {
treesFile <- paste0(treesFile, '.trees')
trees <- read.nexus(file=treesFile)
}
specTableOutput <- function(result, output) {
output <- paste0(output, '.txt')
bgmyc.spec(result, output)
}
specHeatmap <- function(result) {
result.probmat <- spec.probmat(result)
}
trees <- readNexus(args$id)
result.multi <- bgmyc.multiphylo.mpi(
trees, mcmc=args$mcmc, burnin=args$burnin, thinning=args$thinning,
noproc=args$noproc, py1=args$py1, py2=args$py2, pc1=args$pc1,
pc2=args$pc2, t1=args$t1, t2=args$t2, start=c(args$start1, args$start2,
args$start3), scale=c(args$scale1, args$scale2, args$scale3)
)
outputSVG(result.multi, paste0(args$id, '_MCMC'))
specTableOutput(result.multi, args$id)
result.probmat <- specHeatmap(result.multi)
outputSVG(result.multi, paste0(args$id, '_prob'))
<file_sep>bgmyc.gibbs.mpi <- function (
data, m, burnin=1, thinning=1, py1, py2, pc1, pc2,
t1, t2, scale=c(20, 10, 5.00),
start=c(1.0, 0.5, 50.0), likelihood, prior
)
{
NNodes<-data$tree$Nnode
p = length(start)
vth = array(0, dim = c(m, p+1))
f0 = likelihood(start, data)+prior(start, py1, py2, pc1, pc2, t1, t2) #####################* PRIOR
arate = array(0, dim = c(1, p))
th0 = start
th1 = th0
mover<-function(index, initial){
if(index == 1){return(rgamma(1, shape=scale[1], rate=(scale[1]/initial[1])))}
if(index == 2){return(rgamma(1, shape=scale[2], rate=(scale[2]/initial[2])))}
if(index == 3){return(round(initial[3] + rnorm(1) * scale[3]))}
}
for (i in 1:m) {
th1<-th0
for (j in 1:p) {
th1[j] = mover(j,th0)
if(j<3){
f1 = likelihood(th1, data)+prior(th1, py1, py2, pc1, pc2, t1, t2) ##############* PRIOR
u = runif(1) < exp(f1 - f0)*(dgamma(th0[j], shape=scale[j], rate=(scale[j]/th1[j])) / dgamma(th1[j], shape=scale[j], rate=(scale[j]/th0[j])))
}else{
if(th1[3]<NNodes && th1[3]>=2){
f1 = likelihood(th1, data)+prior(th1, py1, py2, pc1, pc2, t1, t2) ###############*PRIOR
}else{
f1=log(0)
}
u = runif(1) < exp(f1 - f0)
}
if (u){
th0[j] = th1[j]
f0 = f1
}
else {
th0[j] = th0[j]
f0 =f0
}
vth[i, j] = th0[j]
vth[i,p+1] = f0
arate[j] = arate[j] + u
}
# No longer necessary; user won't see comprehensible output
#if((i==m*0.1)|(i==m*0.2)|(i==m*0.3)|(i==m*0.4)|(i==m*0.5)|(i==m*0.6)|(i==m*0.7)|(i==m*0.8)|(i==m*0.9)|(i==m)){
#cat((i/m)*100, "%","\n")
#}
}
arate = arate/m
stuff = list(par = vth[((burnin+1)/thinning):(m/thinning)*thinning,], accept = arate, tree = data$tree, mrca = data$mrca.nodes)
parnames<-c("py", "pc", "th")
# No longer necessary; user won't see comprehensible output
#cat("acceptance rates", "\n", parnames, "\n", stuff$accept, "\n")
class(stuff)<-"singlebgmyc"
return(stuff)
}
<file_sep>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# {{{ Header
# Code written by: <NAME>
# email: <EMAIL>
# phone: +1 (513) 426-4187
# github: https://github.com/EdRice4
#
# Initiation date|time: 08/12/2015|11:17:38
# }}}
# {{{ Imports
from subprocess import Popen, PIPE
from lxml import etree as ET
from random import randrange
from numpy import genfromtxt
from acor import acor
from re import sub
from shutil import move
from StringIO import StringIO
from glob import glob
import argparse
import os
# }}}
# {{{ jModelTest
class jModelTest(object):
""" {{{ Docstrings
Run jModelTest and store parameters associated with output.
}}} """
# {{{ add_args
@staticmethod
def add_args():
""" {{{ Docstrings
Add argument group "jMT" to namespace and subsequent pertinent
arguments to aforementioned group.
}}} """
args_jMT = arg_parser.add_argument_group(
'jMT', 'Arguments for running jModelTest module.'
)
args_jMT.add_argument(
'jMT', type=str, help='Path to jModelTest.jar.'
)
# }}}
# {{{ __init__
def __init__(self):
""" {{{ Docstrings
Upon instantiating instance of class, run functions and store
parameters.
}}} """
self._jMT_out = 'jModelTest_{0}.out'.format(self._identifier)
self.run_jmodeltest()
self._jMT_parameters = self.r_jModelTest_parameters(self._jMT_out)
# }}}
# {{{ run_jmodeltest
def run_jmodeltest(self):
""" {{{ Docstrings
Run jModelTest by spawning child process, utilizing "Popen" function
provided by "subprocess" python module. The output/errors of this
process are then subsequently printed, to stdout and written to a file,
in real-time.
}}} """
# Specify child process, including any pertinent arguments; see
# jModelTest documentation for explanantion of arguments
# ::MODIFIABLE::
# NOTE: If you would like to modify arguments passed to jModelTest,
# simply format the following string in a matter of your choosing.
# You may also have to change the manner in which jModelTest is
# called, depending on your system.
jModelTest = (
'java -jar {0} -d {1} -t fixed -s 11 -i -g 4 -f '
'-v -a -BIC -AIC -AICc -DT -tr {2}'
).format(args.jMT, self._nexus_file, args.no_proc)
# Spawn child process and run
# TODO(Edwin):
# 1.) Open jMT_out prior to spawning child process and pass to stdout?
# https://stackoverflow.com/questions/15167603/python-using-files-as-stdin-and-stdout-for-subprocess
jMT_run = Popen(
jModelTest.split(), stderr=PIPE, stdout=PIPE,
universal_newlines=True
)
# Communicate with PIPE to get standard output and errors, respectively
jMT_run_stdout, jMT_run_stderr = jMT_run.communicate()
# TODO: Exception handling.
# Write standard output to file
with open(self._jMT_out, 'w') as jMT_out:
jMT_out.write(jMT_run_stdout)
# }}}
# {{{ r_jModelTest_output
def r_jModelTest_output(self, jModelTest_file):
""" {{{ Docstrings
Given the name of the jModelTest output file (as a string), opens the
file in read mode and reads it into a list which is then parsed
(here and in subsequent functions; see below) for the pertinent
selected model paramters at the end of the file.
NOTE: This function does not support comparison of selected models
between different selection criterion; to do so would require
human interference which does not coincide with the purpose of this
script. Rather, it simply selects the model output "first."
}}} """
# Open jModelTest output in read mode
with open(jModelTest_file, 'r') as jmt_out:
# Read into list
jmt_out = jmt_out.readlines()
# Get beginning psoition of selected model block
delimiter = jmt_out.index('::Best Models::\n')
# Truncate the output
jmt_out = jmt_out[delimiter + 2:]
# Get the names of the variables
variables = jmt_out[0]
# Get the values of the variables
values = jmt_out[2]
return variables, values
# }}}
# {{{ r_jModelTest_variables
def r_jModelTest_variables(self, variables):
""" {{{ Docstrings
Given a string of variable names (as parsed by r_jModelTest_output),
further parses them into a list, stripping the values, and formatting
them, in order to generate a "pretty" dictionary.
}}} """
# Split string by occurences of tab "\t" character
variables = variables.split('\t')
# Filter out empty values
variables = filter(None, variables)
# Strip values of leading and trailing whitespace characters
variables = map(lambda x: x.strip(), variables)
return variables
# }}}
# {{{ r_jModelTest_values
def r_jModelTest_values(self, values):
""" {{{ Docstrings
Given a string of variable values (as parsed by r_jModelTest_output),
further parses them into a list, stripping the values, and formatting
them, in order to generate a "pretty" dictionary.
}}} """
# Replace tab "\t" character with blank space; not every tab character
# in values line corresponds to a tab character in variables line, so
# in order for len(values) == len(variables), must split by another
# method
values = values.replace('\t', ' ')
# Split string by occurrences of space
values = values.split(' ')
# Filter out empty values
values = filter(None, values)
# Do not want selection criteria as value
values = values[1:]
# Strip values of leading and trailing whitespace characters
values = map(lambda x: x.strip(), values)
return values
# }}}
# {{{ r_jModelTest_parameters
def r_jModelTest_parameters(self, jModelTest_file):
""" {{{ Docstrings
Concatenates r_jModelTest* functionality; performing all necessary
steps to generate dictionary in format of:
dictionary = {
'variable' : 'value'
'variable' : 'value'
}
}}} """
# Get variables and corresponding values as strings
variables, values = self.r_jModelTest_output(jModelTest_file)
# Parse variables into list
variables = self.r_jModelTest_variables(variables)
# Parse values into list
values = self.r_jModelTest_values(values)
# Generate dictionary
jMT_parameters = dict((i, j) for i, j in zip(variables, values))
return jMT_parameters
# }}}
# }}}
# {{{ Garli
class Garli(jModelTest):
""" {{{ Docstrings
Run garli and store parameters associated with output.
}}} """
# {{{ models
# Dictionary utilized to store and reference pertienent parameters
# (ratematrix and statefrequenceis) for each respective substitution model
# NOTE: TM1ef and TM1 are missing 'I' string because all occurences of
# 'I' and 'G' are later removed from selected model to make number of
# models more tractable, otherwise would have had to sepcify 4 distinct
# models, one without invarant and gamma, one with just invariant, etc.
models = {
'JC': ['1rate', 'equal'],
'F81': ['1rate', 'estimate'],
'K80': ['2rate', 'equal'],
'HKY': ['2rate', 'estimate'],
'TrNef': ['(0 1 0 0 2 0)', 'equal'],
'TrN': ['(0 1 0 0 2 0)', 'estimate'],
'TPM1': ['(0 1 2 2 1 0)', 'equal'],
'TPM1uf': ['(0 1 2 2 1 0)', 'estimate'],
'TPM2': ['(0 1 0 2 1 2)', 'equal'],
'TPM2uf': ['(0 1 0 2 1 2)', 'estimate'],
'TPM3': ['(0 1 2 0 1 2)', 'equal'],
'TPM3uf': ['(0 1 2 0 1 2)', 'estimate'],
'K3P': ['(0 1 2 2 1 0)', 'equal'],
'K3Puf': ['(0 1 2 2 1 0)', 'estimate'],
'TM1ef': ['(0 1 2 2 3 0)', 'equal'], # Remove 'I' for translate
'TM1': ['(0 1 2 2 3 0)', 'estimate'], # Remove 'I' for translate
'TM2ef': ['(0 1 0 2 3 2)', 'equal'],
'TM2': ['(0 1 0 2 3 2)', 'estimate'],
'TM3ef': ['(0 1 2 0 3 2)', 'equal'],
'TM3': ['(0 1 2 0 3 2)', 'estimate'],
'TVMef': ['(0 1 2 3 1 4)', 'equal'],
'TVM': ['(0 1 2 3 1 4)', 'estimate'],
'SYM': ['6rate', 'equal'],
'TR': ['6rate', 'estimate'] # Remove 'G' for translate
}
# }}}
# {{{ add_args
@staticmethod
def add_args():
""" {{{ Docstrings
Add argument group "garli" to namespace and subsequent pertinent
arguments to aforementioned group.
}}} """
args_garli = arg_parser.add_argument_group(
'garli', 'Arguments for running garli module.'
)
args_garli.add_argument(
'-g', '--garli', help='Run garli analysis.',
action='store_true'
)
args_garli.add_argument(
'--bstr', type=int, help=(
'# of bootstrap replications for garli analysis, if '
'applicable.'
),
default=0
)
# }}}
# {{{ __init__
def __init__(self):
""" {{{ Docstrings
Upon instantiating instance of class, run functions and store
parameters.
}}} """
self._garli_out = 'garli_{0}'.format(self._identifier)
garli_conf = self.r_garli_conf()
self.w_garli_conf(garli_conf)
self.run_garli()
# }}}
# {{{ r_garli_conf
def r_garli_conf(self):
""" {{{ Docstrings
Reads in garli.conf template file as a list.
}}} """
# Open in read mode
with open('Garli_standard.conf', 'r') as garli_conf:
# Read in as list
garli_conf = garli_conf.readlines()
# Strip leading and trailing white characters on every line
garli_conf = [line.strip() for line in garli_conf]
# garli_conf = map(lambda x: x.strip(), garli_conf)
return garli_conf
# }}}
# {{{ edit_garli_conf
def edit_garli_conf(self, garli_conf, lines_to_edit, values_to_insert):
""" {{{ Docstrings
Returns a modified garli configuration file, given the original
file, a list corresponding to the string values of the lines to be
edited, and the values which are to be inserted.
The values of 'lines_to_edit' and 'values_to_insert' arguments should
be in corresponding order so that the first value of the former
corresponds the the value you wish that parameter to have in the later.
}}} """
# Iterate through lines_to_edit and values_to_insert in tandem
for i, j in zip(lines_to_edit, values_to_insert):
# Define variables for readability
line = garli_conf.index(i)
# Append value
garli_conf[line] = '{0} {1}'.format(garli_conf[line], j)
return garli_conf
# }}}
# {{{ w_garli_conf
def w_garli_conf(self, garli_conf):
""" {{{ Docstrings
Given the garli configuration file, read in a a list (utilizing the
"readlines" function), modifies the garli.conf template file to
reflect the paramters of the selected model as determined by
jModelTest, utilizing edit_garli_conf, and writes it to a separate
file.
}}} """
# Get the model selected
model_selected = self._jMT_parameters['Model']
# Check if model includes gamma distribution
het = '+G' in model_selected
# Check if model includes proportion of invariant sites
inv = '+I' in model_selected
# Remove these values; conflicts with Garli models dictionary
model_selected = model_selected.translate(None, '+IG')
# Variables in garli.conf to edit; see
# https://molevol.mbl.edu/index.php/GARLI_Configuration_Settings
# for explanation on pertinent variables
# ::MODIFIABLE::
# NOTE: If you would like to modify variables not currently
# specified in garli_params, simply delete the value in the
# template file so that the line appears as the others and add
# the corresponding value, in the corresponding position, to
# garli_values
garli_variables = [
'datafname =', 'ofprefix =', 'ratematrix =',
'statefrequencies =', 'ratehetmodel =', 'numratecats =',
'invariantsites ='
]
# Values of variables to insert
garli_values = [
self._nexus_file, self._identifier,
Garli.models[model_selected][0],
Garli.models[model_selected][1]
]
# If model selected by jModelTest included gamma distribution, do so
# in garli.conf
if het:
garli_values.extend(['gamma', '4'])
# Else, don't
else:
garli_values.extend(['none', '1'])
# If model selected by jModelTest included proportion invariant, do so
# in garli.conf
if inv:
garli_values.append('estimate')
# Else, don't
else:
garli_values.append('none')
# Append values to respective variables
garli_params = self.edit_garli_conf(
garli_conf, garli_variables, garli_values
)
# Add newline "\n" character to end of every line
garli_params = map(lambda x: x + '\n', garli_params)
# Write modified garli.conf
# Open in write mode
# NOTE: Garli configuration file is initially written as "garli.conf"
# as that is how the MPI version of Garli expects it to be named. It
# is later renamed uniquely in NexusFile/cleanup.
with open('garli.conf', 'w') as garli_input:
# Write lines
for line in garli_params:
garli_input.write(line)
# }}}
# {{{ run_garli
def run_garli(self):
""" {{{ Docstrings
Run garli by spawning child process, utilizing "Popen" function
provided by "subprocess" python module. The output/errors of this
process are then subsequently printed, to stdout and written to a file,
in real-time.
}}} """
# Specify child process, including any pertinent arguments
# ::MODIFIABLE::
# NOTE: You may have to change the manner in which garli is called,
# depending on your system
# Do not want to run heinous number of garli threads, so limit to five
if args.no_proc > 5:
garli = (
'Garli -{0}'
).format(5)
else:
garli = (
'Garli -{0}'
).format(args.no_proc)
# Spawn child process
garli_run = Popen(
garli.split(), stderr=PIPE, stdout=PIPE,
universal_newlines=True
)
# NOTE: Do not need to write standard output to file as garli
# automatically handles this
# Communicate with PIPE to get standard output and errors, respectively
garli_run.communicate()
# }}}
# }}}
# {{{ BEAST
class BEAST(Garli):
""" {{{ Docstrings
Run BEAST and store parameters associated with output.
}}} """
# {{{ add_args
@staticmethod
def add_args():
""" {{{ Docstrings
Add argument group "BEAST" to namespace and subsequent pertinent
arguments to aforementioned group.
}}} """
args_BEAST = arg_parser.add_argument_group(
'BEAST', 'Arguments for running BEAST module.'
)
args_BEAST.add_argument(
'BEAST', type=str, help='Path to beast.jar.'
)
args_BEAST.add_argument(
'--MCMC_BEAST', type=int, help=(
'Length of MCMC chain for BEAST analysis.'
),
default=50000000)
args_BEAST.add_argument(
'--burnin_BEAST', type=int, help=(
'Burnin for BEAST analysis.'
),
default=10000000)
args_BEAST.add_argument(
'--log_every', type=int, help=(
'Sample interval for BEAST analysis. This value will '
'be utilized to determine the frequency with which '
'the ".state" and ".trees" files are written to.'
),
default=1000)
args_BEAST.add_argument(
'-t', '--threshold', type=int, help=(
'Run script in threshold mode (i.e. BEAST will '
'continue to run, creating separate output directory '
'for each respective run, if after previous run, the '
'effective sample size did not meet this threshold).'
),
default=0)
# }}}
# {{{ __init__
def __init__(self):
""" {{{ Docstrings
Upon instantiating instance of class, run functions and store
parameters.
}}} """
self._BEAST_XML = 'BEAST_{0}.xml'.format(self._identifier)
self._BEAST_out = 'BEAST_{0}.out'.format(self._identifier)
BEAST_XML, BEAST_XML_ele_dict = self.parse_beast_xml()
self.w_beast_submodel(BEAST_XML_ele_dict)
self.w_beast_rates(BEAST_XML_ele_dict)
self.w_beast_sequences(BEAST_XML_ele_dict)
self.w_beast_parameters(BEAST_XML_ele_dict, BEAST_XML)
self.run_beast()
# }}}
# {{{ JC_F81
def JC_F81(self, xml_elements):
""" {{{ Docstrings
Function to handle setting of transition rates in BEAST XML input file
for JC and F81 models given list of XML elements to edit.
}}} """
# Every transition rate is equal to "1.0"
for i in xml_elements:
i.text = '1.0'
# }}}
# {{{ K80_HKY
def K80_HKY(self, xml_elements):
""" {{{ Docstrings
Function to handle setting of transition rates in BEAST XML input file
for k80 and HKY models, given list of XML elements to edit.
}}} """
# Two distinct transition rates, one for transitions, the other for
# transversions
for i in xml_elements:
if 'rateAG.s:' in i.get('id') or 'rateCT.s:' in i.get('id'):
i.text = self._jMT_parameters['titv']
else:
i.text = '1.0'
# }}}
# {{{ parse_beast_xml
def parse_beast_xml(self):
""" {{{ Docstrings
Parses BEAST XML input file utilizing "lxml" python module.
}}} """
# Initialize empty dictionary to store XML elements
BEAST_XML_ele_dict = {}
# Set parser to automatically remove any impertinent whitespace as
# well as any comments, respectively
XML_parser = ET.XMLParser(remove_blank_text=True, remove_comments=True)
# Parse BEAST XML input file template
BEAST_XML = ET.parse('BEAST_standard.xml', XML_parser)
# Get root of tree ('beast') element
BEAST_XML_ele_dict['root'] = BEAST_XML.getroot()
# Get 'data' element where sequence information is stored
BEAST_XML_ele_dict['data'] = BEAST_XML.xpath('data')[0]
# Get 'run' element where information pertaining to BEAST parameters
# is stored
BEAST_XML_ele_dict['run'] = BEAST_XML.xpath('run')[0]
# Get all pertinent subelements of run element
for element in BEAST_XML_ele_dict['run'].iter():
if element.tag == 'state':
BEAST_XML_ele_dict['state'] = element
if element.tag == 'substModel':
BEAST_XML_ele_dict['substmodel'] = element
if element.tag == 'siteModel':
BEAST_XML_ele_dict['sitemodel'] = element
for element in BEAST_XML_ele_dict['sitemodel'].iter():
if 'gammaShape.s:' in element.get('id'):
BEAST_XML_ele_dict['gamma'] = element
if 'proportionInvariant.s:' in element.get('id'):
BEAST_XML_ele_dict['inv'] = element
for element in BEAST_XML_ele_dict['run'].iterfind('logger'):
if 'tracelog' in element.get('id'):
BEAST_XML_ele_dict['trace_log'] = element
if 'treelog.t:' in element.get('id'):
BEAST_XML_ele_dict['tree_log'] = element
return(BEAST_XML, BEAST_XML_ele_dict)
# }}}
# {{{ calculate_ess
def calculate_ess(self):
""" {{{ Docstrings
Calculates the effective sample size of data, given the name of the
BEAST output file as a string, utilizing "genfromtxt" provided by the
"numpy" python module and "acor" provided by the "acor" python module.
}}} """
# Read in data, ignoring comments, sample column, and header,
# respectively
data = genfromtxt(
self._BEAST_ID, comments='#', usecols=range(1, 17)
)[1:]
# Concatenate data by columns
data = zip(*data)
# Calculate autocorrelation times (and other statistics) for each
# column
stats = map(lambda x: acor(x), data)
# Extract autocorrelation times from statistics
auto_cor_times = zip(*stats)[0]
# Calculate MCMC chain length
chain_length = int(args.MCMC_BEAST * (1 - args.burnin_BEAST))
# Calculate effective sample size
eff_sample_size = map(lambda x: chain_length / x, auto_cor_times)
return eff_sample_size
# }}}
# {{{ w_beast_submodel
def w_beast_submodel(self, BEAST_XML_ele_dict):
""" {{{ Docstrings
Writes parameters (i.e. gamma and proportion invariant) of model
selected by jModelTest to BEAST XML.
}}} """
# Get the model selected
model_selected = self._jMT_parameters['Model']
# Check if model includes gamma distribution
het = '+G' in model_selected
# Check if model includes proportion invariant sites
inv = '+I' in model_selected
# Remove these values; conflicts with Garli models dictionary
model_selected = model_selected.translate(None, '+IG')
# If frequencies are estimated, do:
# {{{ if estimate
if Garli.models[model_selected][1] == 'estimate':
ET.SubElement(
BEAST_XML_ele_dict['state'], 'parameter',
attrib={
'dimension': '4',
'id': 'freqParameter.s:{0}'.format(
self._sequence_name
),
'lower': '0.0', 'name': 'stateNode',
'upper': '1.0'
}
).text = '0.25'
ET.SubElement(
BEAST_XML_ele_dict['substmodel'], 'frequencies',
attrib={
'id': 'estimatedFreqs.s:{0}'.format(
self._sequence_name
),
'spec': 'Frequencies',
'frequencies': '@freqParameter.s:{0}'.format(
self._sequence_name
)
}
)
freq_operator = ET.SubElement(
BEAST_XML_ele_dict['run'], 'operator',
attrib={
'id': 'FrequenciesExchanger.s:{0}'.format(
self._sequence_name
),
'spec': 'DeltaExchangeOperator',
'delta': '0.01',
'weight': '0.01'
}
)
ET.SubElement(
freq_operator, 'parameter',
attrib={
'idref': 'freqParameter.s:{0}'.format(
self._sequence_name
)
}
)
ET.SubElement(
BEAST_XML_ele_dict['trace_log'], 'log',
attrib={
'idref': 'freqParameter.s:{0}'.format(
self._sequence_name
)
}
)
# }}}
# Else, if frequencies are equal, do:
# {{{ elif equal
elif Garli.models[str(model_selected)][1] == 'equal':
ET.SubElement(
BEAST_XML_ele_dict['substmodel'], 'frequencies',
attrib={
'id': 'equalFreqs.s:{0}'.format(
self._sequence_name
),
'spec': 'Frequencies',
'data': '@{0}'.format(
self._sequence_name
),
'estimate': 'false'
}
)
# }}}
# If model includes gamma distribution, do:
if het:
BEAST_XML_ele_dict['sitemodel'].set('gammaCategoryCount', '4')
BEAST_XML_ele_dict['gamma'].text = self._jMT_parameters['gamma']
# If model includes proportion of invariant sites, do:
if inv:
BEAST_XML_ele_dict['inv'].text = self._jMT_parameters['pInv']
# }}}
# {{{ w_beast_rates
def w_beast_rates(self, BEAST_XML_ele_dict):
""" {{{ Docstrings
Writes transition rates to BEAST XML.
NOTE: The BEAST XML input file template, "Standard.xml" is configured
to begin with a GTR model of DNA sequence evolution. This model is
then "paired down" to reflect the model selected by jModelTest.
}}} """
# Initiate empty list to store pertinent XML elements
xml_elements = []
# Get the model selected by jModelTest, removing conflicting strings
model_selected = self._jMT_parameters['Model'].translate(None, '+IG')
# Iterate over subelements of the substmodel element, define each
# respectively, and append to list
for element in BEAST_XML_ele_dict['substmodel'].iter():
if 'rateAC.s:' in element.get('id'):
rateAC = element
xml_elements.append(element)
if 'rateAG.s:' in element.get('id'):
rateAG = element
xml_elements.append(element)
if 'rateAT.s:' in element.get('id'):
rateAT = element
xml_elements.append(element)
if 'rateCG.s:' in element.get('id'):
rateCG = element
xml_elements.append(element)
if 'rateCT.s:' in element.get('id'):
rateCT = element
xml_elements.append(element)
if 'rateGT.s:' in element.get('id'):
rateGT = element
xml_elements.append(element)
# If model selected is JC/F81 or K80/HKY, pass xml_elements list to
# respective function
if model_selected == 'JC' or model_selected == 'F81':
BEAST.JC_F81(self, xml_elements)
elif model_selected == 'K80' or model_selected == 'HKY':
BEAST.K80_HKY(self, xml_elements)
# Else, set each rate individually
else:
rateAC.text = self._jMT_parameters['Ra']
rateAG.text = self._jMT_parameters['Rb']
rateAT.text = self._jMT_parameters['Rc']
rateCG.text = self._jMT_parameters['Rd']
rateCT.text = self._jMT_parameters['Re']
rateGT.text = self._jMT_parameters['Rf']
# }}}
# {{{ get_sequence_range
def get_sequence_range(self, nexus_file, start, end):
""" {{{ Docstrings
Returns the index of a user-specified start and end sequence,
given these and the name of the nexus file as a string.
The "start" and "end" arguments must match corresponding lines in
range_file exactly, including any whitespace characters.
For instance, in parsing the nexus file, the line immediately before
the data block (the section containing the sequences and their
respective IDs) should be "matrix\n" and the line immediately below
should be ";\n" to ensure that every sequence is parsed, nothing more,
nothing less.
}}} """
# Get index of start
range_start = nexus_file.index(start)
# Get index of end
range_end = nexus_file.index(end)
return range_start, range_end
# }}}
# {{{ w_beast_sequences
def w_beast_sequences(self, BEAST_XML_ele_dict):
""" {{{ Docstrings
Creates a subelement in the BEAST XML input file for each respective
sequence, given a nexus file as a list and the 'data' XML element as an
XML element.
}}} """
# Open nexus file in read mode
with open(self._nexus_file, 'r') as nexus_file:
# Read into list
nexus_file = nexus_file.readlines()
# Get start and end position of sequence block in nexus file
sequence_start, sequence_end = self.get_sequence_range(
nexus_file, 'matrix\n', ';\n'
)
# However, do not want to include these lines, just the lines between
# them
sequence_start += 1
sequence_end -= 1
# Iterate over lines in nexus file, defining each respective XML
# element
for line in nexus_file[sequence_start:sequence_end]:
# Define variables for readability
# Split line by occurrence of tab "\t" character
line = line.split('\t')
# Get sequence ID
sequence_id = line[0].strip()
# Get sequence
sequence = line[1].strip()
# Create subelement
ET.SubElement(
BEAST_XML_ele_dict['data'], 'sequence',
attrib={
'id': 'seq_{0}'.format(sequence_id),
'taxon': '{0}'.format(sequence_id),
'totalcount': '4',
'value': '{0}'.format(sequence)
}
)
# }}}
# {{{ w_beast_parameters
def w_beast_parameters(self, BEAST_XML_ele_dict, BEAST_XML):
""" {{{ Docstrings
Finalizes parsing of the BEAST XML input file and writes the modifed
ElementTree to a separate file given the BEAST XML input file as an
ElementTree.
}}} """
# Set BEAST run parameters
# Frequency with which to save to state file
BEAST_XML_ele_dict['state'].set('storeEvery', str(args.log_every))
# MCMC chain length
BEAST_XML_ele_dict['run'].set('chainLength', str(args.MCMC_BEAST))
# MCMC burnin
BEAST_XML_ele_dict['run'].set('preBurnin', str(args.burnin_BEAST))
# Frequency with which to save to tree file
BEAST_XML_ele_dict['tree_log'].set('logEvery', str(args.log_every))
# Convert ElementTree to string in order to perform substitution
beast_string = ET.tostring(BEAST_XML)
# Substitute every occurrence of "replace_taxon" and "replace_ID" with
# self._sequence_name and self._identifier, respectively
beast_string = sub('replace_taxon', self._sequence_name, beast_string)
beast_string = sub('replace_ID', self._identifier, beast_string)
# Convert beast_string to file like object in order to re-parse it
beast_file_obj = StringIO(beast_string)
# Set parser to automatically remove any impertinent whitespace as
# well as any comments, respectively
XML_parser = ET.XMLParser(remove_blank_text=True, remove_comments=True)
# Re-parse beast_file_obj into an ElementTree
beast_xml = ET.parse(beast_file_obj, XML_parser)
# Write beast_xml ElementTree to file, ensuring that it prints in a
# human readable format and declaring pertinent XML parameters
beast_xml.write(
self._BEAST_XML, pretty_print=True, xml_declaration=True,
encoding='UTF-8', standalone=False
)
# }}}
# {{{ run_beast
def run_beast(self):
""" {{{ Docstrings
Run BEAST by spawning child process, utilizing "Popen" function
provided by "subprocess" python module. The output/errors of this
process are then subsequently printed, to stdout and written to a file,
in real-time.
}}} """
# Specify child process, including any pertinent arguments; see BEAST
# documentation for explanation of additional arguments
# ::MODIFIABLE::
# NOTE: If you would like to modify arguments passed to BEAST,
# simply format the following string in a matter of your choosing.
# You may also have to change the manner in which BEAST is called,
# depending on your system.
BEAST = (
'java -jar {0} -working -seed {1} -threads {2} '
'-beagle {3}'
).format(
args.BEAST, randrange(0, 999999999999), args.no_proc,
self._BEAST_XML
)
# Spawn child process
BEAST_run = Popen(
BEAST.split(), stderr=PIPE, stdout=PIPE,
)
# Wait until process has completed to continue
BEAST_run.communicate()
# NOTE: Do not need to write standard output to file as BEAST
# automatically handles this
# If user specified threshold in command line arguments, run
# resume_beast
if args.threshold:
self.resume_beast()
# }}}
# {{{ resume_beast
def resume_beast(self):
""" {{{ Docstrings
Continue to run BEAST in resume mode as long as any parameter in the
BEAST output file does not meet the effective sample size threshold,
creating a separate output directory for each run utilizing "Popen"
function provided by "subprocess" python module. The output/errors of
this process are then subsequently printed, to stdout and written to a
file, in real-time.
}}} """
# Get current effective sample size
effective_sample_size = self.calculate_ess()
# Filter effective_sample_size to only include values less than the
# threshold
effective_sample_size = filter(
lambda x: x < args.threshold, effective_sample_size
)
# While effective_sample_size does not equal an object of NoneType
# (i.e. an empty list) BEAST will continue to run
while effective_sample_size:
# Specify child process, including any pertinent arguments
# ::MODIFIABLE::
# NOTE: See run_beast above.
BEAST = (
'java -jar {0} -working -seed {1} '
'-threads {2} -beagle -resume -statefile {3}.xml.state {4}'
).format(
args.BEAST, str(randrange(0, 999999999999)),
args.no_proc, self._BEAST_out, self._BEAST_XML
)
# Spawn child process
BEAST_run = Popen(
BEAST.split(), stderr=PIPE, stdout=PIPE,
)
# Wait until process has completed to continue
BEAST_run.communicate()
# NOTE: Do not need to write standard output to file as BEAST
# automatically handles this
# Get effective sample size of run
effective_sample_size = self.calculate_ess()
# Filter effective_sample_size to only include values greater than
# the threshold; if none are, empty list of NoneType is returned
effective_sample_size = [
x for x in effective_sample_size if x < args.threshold
]
# }}}
# }}}
# {{{ bGMYC
class bGMYC(BEAST):
""" {{{ Docstrings
Run bGMYC with Rscript and store output.
}}} """
# {{{ add_args
@staticmethod
def add_args():
""" {{{ Docstrings
Add argument group "bGMYC" to namespace and subsequent pertinent
arguments to aforementioned group.
}}} """
args_bGMYC = arg_parser.add_argument_group(
'bGMYC', 'Arguments for running bGMYC module.'
)
args_bGMYC.add_argument(
'--MCMC_bGMYC', type=int, help=(
'Length of MCMC chain for bGMYC '
'analysis.'
),
default=50000000)
args_bGMYC.add_argument(
'--burnin_bGMYC', type=int, help=(
'Burnin for bGMYC analysis.'
),
default=10000000)
args_bGMYC.add_argument(
'--thinning', type=int, help=(
'Sample interval for bGMYC analysis.'
),
default=10000)
args_bGMYC.add_argument(
'--bGMYC_params', help=(
'If you wish to specify additional arguments for '
'the bGMYC. These parameters should be specified in a '
'tab delimited file named \'bGMYC_parameters.txt\' '
'along with the taxon name, where the taxon name '
'corresponds to the respecitve nexus file sans the '
'\'.nex\' extension. For instance, if I wanted to run '
'a bGMYC analysis on Periplaneta americana, the '
'American cockroach, and wanted to modify the \'t1\' '
'and \'start\' variables (see documentation provided '
'by Noah for explanation of parameters), then my file '
'would look like: P_americana.nex, P_americana.txt '
'and the .txt file would contain: '
'P_americana\\t-t1=32\\tstart1=0'
'\\tstart2=0\\tstart3=0.5 Notice how each value of '
'start vector must be specified seperately.'
),
action='store_true'
)
# }}}
# {{{ r_bgmyc_parameters
@staticmethod
def r_bgmyc_parameters():
""" {{{ Docstrings
Reads additional bGMYC paramters (see bGMYC documentation for
explanation of parameters and default settings) into a dictionary
with the format of:
dictionary = {
'Taxon' : [
'-bGMYC_parameter1',
'-bGMYC_parameter2'
]
'Taxon2' : [ ... ]
}
Given the bGMYC paramter file as a string. Method is static because
only necessary to run once; do not need to run upon every
instantiation of bGMYC class.
}}} """
# Initialize empty dicitonary to store parameters
bgmyc_parameters = {}
# Open bGMYC parameters file in read mode
with open('bGMYC_parameters.txt', 'r') as param_file:
# Read into list
param_file = param_file.readlines()
# Strip values of leading and trailing whitespace characters
param_file = [x.strip() for x in param_file]
# Split string by occurences of tab "\t" character
param_file = [x.split('\t') for x in param_file]
for line in param_file:
# Set variables for readability
taxon = line[0]
parameters = line[1:]
# Generate dictionary key, value pair
bgmyc_parameters[taxon] = parameters
return bgmyc_parameters
# }}}
# {{{ __init__
def __init__(self, bgmyc_parameters):
""" {{{ Docstrings
Upon instantiating instance of class, run functions and store
parameters.
}}} """
self.run_bgmyc(bgmyc_parameters)
# }}}
# {{{ run_bgmyc
def run_bgmyc(self, bgmyc_parameters):
# Get parameters for taxon, if applicable
# If not, return empty list
parameters = bgmyc_parameters.get(self._sequence_name, [])
# Specify child process, including any pertinent arguments; see bGMYC
# documentation for explanation of additional arguments
# ::MODIFIABLE::
# NOTE: If you would like to modify arguments passed to bGMYC,
# simply format the following string in a matter of your choosing.
# You may also have to change the manner in which bGMYC is called,
# depending on your system.
bGMYC = (
'Rscript --save bGMYC.R --args -id={0} -mcmc={1} -burnin={2} '
'-thinning={3} -noproc={4} {5}'
).format(
self._identifier, args.MCMC_bGMYC, args.burnin_bGMYC,
args.thinning, args.no_proc, ' '.join(parameters)
)
# Spawn child process
bGMYC_run = Popen(
bGMYC.split(), stderr=PIPE, stdout=PIPE
)
# Wait until process has completed to continue
bGMYC_run.communicate()
# }}}
# }}}
# {{{ NexusFile
class NexusFile(bGMYC):
""" {{{ Docstrings
A class in which we will store the parameters associated with each
given nexus file.
}}} """
# {{{ add_args
@staticmethod
def add_args():
args_nex = arg_parser.add_argument_group(
'Nexus', 'Arguments for parsing nexus files.'
)
args_nex.add_argument(
'-b', '--batch', help=(
'Run script in batch mode for multiple nexus files. '
'Note: All nexus files should have the extension '
'\'.nex\', NOT \'.nexus\' and the line immediately '
'above the data block (section containing the '
'sequences and their respective IDs) should read '
'\'matrix\\n\' while the line immediately below '
'should read \';\\n\'. Furthermore, if running in '
'batch mode, ensure that the nexus files are the only '
'files present in the directory containing the string '
'\'nex\' in their name, including the extension.'
),
action='store_true')
args_nex.add_argument(
'-np', '--no_proc', type=int, help=(
'When running script in HPC environment, specify '
'total number of processors requested so that the '
'script has knowledge of the environment and can '
'take full advantage of it.'
),
default=1
)
# }}}
# {{{ __init__
def __init__(self, nexus_file):
""" {{{ Docstrings
Upon instantiating instance of class, run functions and store
parameters.
}}} """
self._nexus_file = nexus_file
self._sequence_name = self._nexus_file.replace('.nex', '')
self._identifier = '{0}_{1}'.format(
self._sequence_name, randrange(0, 999999999)
)
jModelTest.__init__(self)
if args.garli:
Garli.__init__(self)
BEAST.__init__(self)
bGMYC.__init__(self, bGMYC_parameters)
self.clean_up_dir()
# }}}
# {{{ clean_up_dir
def clean_up_dir(self):
""" {{{ Docstrings
After each respective run, clean up the directory, placing all
pertinent files, including in/output files, into unique
directory.
}}} """
if args.garli:
# Rename garli.conf iff garli was ran
move('garli.conf', self._garli_out)
# Get current working directory
cwd = os.getcwd()
# Get all files in current working directory
files_in_dir = os.listdir(cwd)
# Filter files_in_dir to only include output files
output_files = filter(lambda x: self._identifier in x, files_in_dir)
# Make unique directory to store output
os.mkdir(self._identifier)
# Move all output files in cwd to unqiue directory
for dir_file in output_files:
move(dir_file, self._identifier)
# Move nexus file to unique directory
move(self._nexus_file, self._identifier)
# Delete all 'lock' files generated by Garli
for fl in glob('.*lock*'):
os.remove(fl)
# }}}
# }}}
# {{{ ArgParser
# Main argument parser
arg_parser = argparse.ArgumentParser(
prog='Pipeline',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=(
'A modular python script providing the backbone for '
'customizable, ad hoc pipeline analyses. For instance, '
'originally developed to function between jModelTest, Garli, '
'BEAST and bGMYC. Also provides batch functionality for '
'relatively large datasets and supports HPC environments.'
)
)
# Run add_args for each class when passing '-h' flag and prior to instantiating
# instances of any class
if __name__ == '__main__':
jModelTest.add_args()
Garli.add_args()
BEAST.add_args()
bGMYC.add_args()
NexusFile.add_args()
# Parse args into namespace
args = arg_parser.parse_args()
# }}}
# {{{ Run non "add_args" staticmethods
# If user specified additional bGMYC parameters, do:
if args.bGMYC_params:
# Read bGMYC parameters
bGMYC_parameters = bGMYC.r_bgmyc_parameters()
# Else, do:
else:
# Initiate empty dictionary
bGMYC_parameters = {}
# }}}
# {{{ Instantiate instances of NexusFile class
# If batch "-b or --batch" specified, do:
if args.batch:
# Get current working directory
cwd = os.getcwd()
# Get all files in current working directory
files_in_dir = os.listdir(cwd)
# Filter files_in_dir to only include nexus files
nexus_files = filter(lambda x: '.nex' in x, files_in_dir)
# Instantiate instances of NexusFile class
for nexus_file in nexus_files:
NexusFile(nexus_file)
# Else, do:
else:
# Initiate empty list to store names of nexus files
nexus_files = []
print('The program will prompt you for the path to each sequence file.')
# Prompt user for number of runs
no_runs = raw_input('How many runs would you like to perform? ')
# Prompt user for path of nexus file for each run
for i in range(int(no_runs)):
nexus_files.append(raw_input('Path to sequence file: '))
# Instantiate instances of NexusFile class
for nexus_file in nexus_files:
NexusFile(nexus_file)
# }}}
<file_sep>#!/usr/bin/env Rscript
bgmyc.multiphylo.mpi <- function(
multiphylo, mcmc, burnin, thinning, noproc,
py1=0, py2=2, pc1=0, pc2=2, t1=2, t2=51,
scale=c(20, 10, 5), start=c(1, 0.5, 50),
sampler=bgmyc.gibbs.mpi, likelihood=bgmyc.lik,
prior=bgmyc.prior
) {
# Get length of data for informative output
ntre <- length(multiphylo)
# Generate SOCK cluster
cl <- makeCluster(noproc)
# Partition data amongst processors
trees.split <- clusterSplit(cl, multiphylo)
# Print informative output for user
cat("You are running a multi tree analysis on", ntre, "trees.\n")
cat("These trees each contain", length(multiphylo$tip.label[[1]]), "tips.\n")
cat("The Yule process rate change parameter has a uniform prior ranging from", py1, "to", py2, ".\n")
cat("The coalescent process rate change parameter has a uniform prior ranging from", pc1, "to", pc2, ".\n")
cat("The threshold parameter, which is equal to the number of species, has a uniform prior ranging from", t1, "to", t2, ". The upper bound of this prior should not be more than the number of tips in your trees.\n")
cat("The MCMC will start with the Yule parameter set to", start[1], ".\n")
cat("The MCMC will start with the coalescent parameter set to", start[2], ".\n")
cat("The MCMC will start with the threshold parameter set to", start[3], ". If this number is greater than the number of tips in your tree, an error will result.\n")
cat("Given your settings for MCMC, burnin and thinning, your analysis will result in", ((mcmc-burnin)/thinning)*ntre, "samples being retained.\n")
cat("Given your settings for MPI, your analysis will result in:\n")
for(i in seq_along(trees.split)) {
cat(length(trees.split[[i]]), "samples being sent to slave", i, "\n ")
}
# Optimize function for MPI environment
bgmyc.mpi <- function(trees, ...) {
# Initialize empty list for output
outputlist <- list()
for (i in 1:length(trees)) {
data <- bgmyc.dataprep(trees[[i]])
NNodes <- data$tree$Nnode
outputlist[[i]] <- sampler(
data, m=mcmc, burnin, thinning, py1,
py2, pc1, pc2, t1, t2, scale, start,
likelihood, prior
)
}
class(outputlist) <- "multibgmyc"
return(outputlist)
}
# Prepare environment
funcs2send <- c(
'bgmyc.gibbs.mpi', 'bgmyc.lik', 'bgmyc.prior',
'bgmyc.dataprep', 'is.ultrametric', 'is.binary.tree',
'branching.times'
)
clusterExport(cl, funcs2send)
# Run that function
output <- parLapply(
cl, trees.split, bgmyc.mpi, mcmc, burnin,
thinning, py1, py2, pc1, pc2, t1, t2, scale, start,
likelihood, prior
)
# Shutdown SOCK cluster
stopCluster(cl)
# Collapse output by first level only
output <- unlist(output, recursive=FALSE)
# Return output
return(output)
}
| b7e83c43c1226c734132e8fc6b73663bb9d90594 | [
"Python",
"R"
] | 4 | R | EdRice4/Pipeline_Analysis | df313974ff9daefab26d1524d2d6b14364cf67cb | 4f10fe03a8bd9cab0f84a633e77a85595a0a0061 |
refs/heads/master | <file_sep>package test;
import java.awt.Container;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JTextField;
public class baitapGUI extends JFrame
{
JLabel lblSoN;
JTextField tfSoN;
JLabel lblKetqua;
JTextField tfKetqua;
JButton btnTinhgt;
JButton btnTinhcan;
public baitapGUI()
{
lblSoN = new JLabel();
lblSoN.setText("Số N");
tfSoN = new JTextField(10);
lblKetqua = new JLabel();
lblKetqua.setText("Kết Quả");
tfKetqua = new JTextField(10);
btnTinhgt = new JButton();
btnTinhgt.setText("Tính gt");
btnTinhcan = new JButton();
btnTinhcan.setText("Tính Căn");
btnTinhgt.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
// TODO Auto-generated method stub
String SoN = tfSoN.getText();
int n = Integer.parseInt(SoN);
float Tich = 1;
for (int i=1;i<=n;i++)
{
Tich=Tich*i;
}
tfKetqua.setText(String.valueOf(Tich));
}
});
btnTinhcan.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e) {
// TODO Auto-generated method stub
String soN = tfSoN.getText();
int m = Integer.parseInt(soN);
float Tongcan =0;
for (int i=1;i<=m;i++)
{
Tongcan+=(float)Math.sqrt(i);
}
tfKetqua.setText(String.valueOf(Tongcan));
}
});
Container cont = getContentPane();
setSize(300, 400);
setLayout(new FlowLayout());
cont.add(lblSoN);
cont.add(tfSoN);
cont.add(lblKetqua);
cont.add(tfKetqua);
cont.add(btnTinhgt);
cont.add(btnTinhcan);
setVisible(true);
}
public static void main(String[] args) {
// TODO Auto-generated method stub
new baitapGUI();
}
}
<file_sep>package test;
import java.awt.Container;
import java.awt.Desktop.Action;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.stream.Stream;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
public class Tong1chiaN extends JFrame implements ActionListener {
JLabel lblsoN;
JTextField tfsoN;
JLabel lblketqua;
JTextField tfketqua;
JButton btnTongchiaN;
public Tong1chiaN()
{
lblsoN = new JLabel();
lblsoN.setText("Số N");
tfsoN = new JTextField(20);
lblketqua = new JLabel();
lblketqua.setText("Kết quả ");
tfketqua = new JTextField(20);
btnTongchiaN = new JButton();
btnTongchiaN.setText("Tổng 1/N");
btnTongchiaN.addActionListener(this);
Container cont = this.getContentPane();
setSize(400, 400);
cont.setLayout(new FlowLayout());
cont.add(lblsoN);
cont.add(tfsoN);
cont.add(lblketqua);
cont.add(tfketqua);
cont.add(btnTongchiaN);
setVisible(true);
}
public void actionPerformed(ActionEvent e)
{
String soN = tfsoN.getText();
int n = Integer.parseInt(soN);
float Tong =1;
for(int i=1;i<=n;i++)
{
Tong=(float)Tong+1/i;
}
tfketqua.setText(String.valueOf(Tong));
}
public static void main(String[] args) {
// TODO Auto-generated method stub
Tong1chiaN obj = new Tong1chiaN ();
}
}
<file_sep>package test;
import java.awt.Container;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.ButtonGroup;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPasswordField;
import javax.swing.JRadioButton;
import javax.swing.JTextField;
public class StudentGUI extends JFrame
{
JLabel lbluser;
JTextField tfuser;
JLabel lblpass;
JPasswordField tfpass;
JLabel lblgender;
JRadioButton rdomale;
JRadioButton rdofemale;
ButtonGroup bg;
JComboBox cbohome;
JButton btnregist;
public StudentGUI ()
{
lbluser = new JLabel("UserName");
tfuser = new JTextField(10);
lblpass = new JLabel("<PASSWORD>Word");
tfpass = new JPasswordField(10);
lblgender = new JLabel("Gender");
rdomale = new JRadioButton("Male");
rdofemale = new JRadioButton("Female");
bg = new ButtonGroup();
bg.add(rdomale);
bg.add(rdofemale);
cbohome = new JComboBox();
cbohome.addItem("Đà Nẵng");
cbohome.addItem("Huế");
cbohome.addItem("Hà Nội");
cbohome.addItem("<NAME>ng");
btnregist = new JButton();
btnregist.setText("Register");
btnregist.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
// TODO Auto-generated method stub
//lấy giới tính
String gt="";
if (rdomale.isSelected()==true) gt=rdomale.getText();
else gt= rdofemale.getText();
//lấy giá trị tại item mà combobox đang được chọn
String home = (String) cbohome.getSelectedItem();
JOptionPane.showMessageDialog(null,"User is "+tfuser.getText()
+"\nAnd Pass: "+tfpass.getText()
+"\nGender: "+gt
+"\nHome: "+home);
}
});
//tạo container để add các component
Container cont = getContentPane();
setSize(400, 300);
setLayout(new FlowLayout());
cont.add(lbluser);
cont.add(tfuser);
cont.add(lblpass);
cont.add(tfpass);
cont.add(lblgender);
cont.add(rdomale);
cont.add(rdofemale);
cont.add(cbohome);
cont.add(btnregist);
setVisible(true);
}
public static void main(String[] args) {
// TODO Auto-generated method stub
new StudentGUI();
}
}
<file_sep>package test;
import java.awt.Container;
import java.awt.FlowLayout;
import java.awt.Label;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
public class ThueGUI extends JFrame implements ActionListener
{
JLabel lblMaThue;
JTextField tfMaThue;
JButton btnDK;
public ThueGUI() {
lblMaThue = new JLabel();
lblMaThue.setText("<NAME>");
tfMaThue = new JTextField(40);
btnDK = new JButton();
btnDK.setText("Đăng Ký");
//Báo nút sẽ xử lí sự kiện
btnDK.addActionListener(this);
// Lấy content page frame để add các com
Container cont = this.getContentPane();
setSize(500, 500);
//set layout
cont.setLayout(new FlowLayout());
cont.add(lblMaThue);
cont.add(tfMaThue);
cont.add(btnDK);
// cho phep hien thi GUI
setVisible(true);
}
//Viết hàm để khi bấm nút, sự kiện sẽ thực hiện
public void actionPerformed(ActionEvent e)
{
//Hiển Thị Hộp Thoại
JOptionPane.showConfirmDialog(null,"Hello"+tfMaThue.getText());
}
public static void main(String[] args) {
// TODO Auto-generated method stub
ThueGUI obj = new ThueGUI();
}
}
<file_sep>package test;
import java.awt.Container;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JTextField;
public class Tongcan extends JFrame implements ActionListener
{
JLabel lblsoN;
JTextField tfsoN;
JLabel lblketqua;
JTextField tfketqua ;
JButton btnTongcan;
public Tongcan()
{
lblsoN = new JLabel();
lblsoN.setText("Số N");
tfsoN = new JTextField(20);
lblketqua = new JLabel();
lblketqua.setText("Kết Quả ");
tfketqua = new JTextField(20);
btnTongcan = new JButton();
btnTongcan.setText("Tổng căn");
btnTongcan.addActionListener(this);
Container cont = this.getContentPane();
setSize(400, 400);
cont.setLayout(new FlowLayout());
cont.add(lblsoN);
cont.add(tfsoN);
cont.add(lblketqua);
cont.add(tfketqua);
cont.add(btnTongcan);
setVisible(true);
}
public void actionPerformed(ActionEvent ev)
{
String soN = tfsoN.getText();
int m = Integer.parseInt(soN);
float Tongcan =0;
for (int i=1;i<=m;i++)
{
Tongcan+=(float)Math.sqrt(i);
}
tfketqua.setText(String.valueOf(Tongcan));
}
public static void main(String[] args) {
// TODO Auto-generated method stub
Tongcan obj = new Tongcan();
}
}
| 763e25f201189482b51194b9d34f5f4e8a16155c | [
"Java"
] | 5 | Java | btlam18it2/backup_TheGUI | 8ecf519fcd28bf98c31e4bd65deabf9fa8462407 | ebb7bcb7472406450e72adf0512784f3c4a0ea68 |
refs/heads/master | <repo_name>Collins1738/Palindrome<file_sep>/README.md
# Palindrome
This is a python program that tells the user if a string inputed is a palindrome
<file_sep>/palindrome.py
x=""
while x != "exit":
x=input("Type in a word and I will tell you if this word is a palindrome: \n(type exit to end)\n")
y=[]
y.append(x)
z= x[::-1]
if x==z:
print ('This is a palindrome')
else:
print("This is not a palindrome")
print("\n\n\n\n\nKeep up!")
| aae3efd0734c69b1fb422fb070ec31efebd4b92b | [
"Markdown",
"Python"
] | 2 | Markdown | Collins1738/Palindrome | 425385fb8afb58f0e41f789aafefcc98baceb416 | e71407a4bce196fdcdddff1c59e92121d3d88af8 |
refs/heads/main | <repo_name>amarp86/enterprise-wc<file_sep>/src/ids-radio/ids-radio-group.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
export default class IdsRadioGroup extends HTMLElement {
/** Sets the dirty tracking feature on to indicate a changed field */
dirtyTracker: boolean;
/** Sets checkbox to disabled * */
disabled: boolean;
/** Flips the checkbox orientation to horizontal * */
horizontal: boolean;
/** Sets the checkbox label text * */
label: string;
/** Sets the checkbox label font size (rarely used) * */
labelFontSize: 10 | 12 | 14 | 16 | 20 | 24 | 28 | 32 | 40 | 48 | 60 | 72 |
'xs' | 'sm ' | 'lg' | 'xl' | string | number;
/** Sets the checkbox to required * */
labelRequired: boolean;
/** Sets the validation check to use * */
validate: 'required' | string;
/** Sets which events to fire validation on * */
validationEvents: 'change' | string;
/** Sets the checkbox `value` attribute * */
value: string;
}
<file_sep>/app/ids-icon/index.js
import IdsIcon from '../../src/ids-icon/ids-icon';
<file_sep>/app/ids-toolbar/example.js
// Supporting components
import IdsButton from '../../src/ids-button/ids-button';
import IdsInput from '../../src/ids-input/ids-input';
import IdsMenuButton from '../../src/ids-menu-button/ids-menu-button';
import IdsPopupMenu, {
IdsMenuGroup,
IdsMenuItem,
IdsMenuHeader,
IdsSeparator
} from '../../src/ids-popup-menu/ids-popup-menu';
import IdsText from '../../src/ids-text/ids-text';
<file_sep>/app/ids-input/example.js
// Supporting components
import IdsButton from '../../src/ids-button/ids-button';
document.addEventListener('DOMContentLoaded', () => {
const btnEnable = document.querySelector('#btn-input-enable');
const btnDisable = document.querySelector('#btn-input-disable');
const btnReadonly = document.querySelector('#btn-input-readonly');
const input = document.querySelector('#input-toggle-state') || {};
// Enable
btnEnable?.addEventListener('click', () => {
input.disabled = false;
input.readonly = false;
});
// Disable
btnDisable?.addEventListener('click', () => {
input.disabled = true;
});
// Readonly
btnReadonly?.addEventListener('click', () => {
input.readonly = true;
});
});
<file_sep>/src/ids-button/TODO.md
# Base Button
- [x] Default/Primary/Secondary/Tertiary styles
- [x] Pass "prototype" CSS classes (ids-button/ids-icon-button) from the corresponding JS classes.
- [x] Extra CSS classes (user defined)
- [x] Focusable prop
- [x] Disabled prop (figure out why it broke?)
- [x] Ripple effect (take coords instead of mouse directly)
- [ ] Dispatch events?
- [x] Why doesn't the linter like `get protoClasses()` if it works?
- [x] See if we can remove the button-level "disabled" and "focusable" in favor of one that's component-level
- [x] Lots of tests
- [ ] API method/prop for toggling the `.audible` class on the text span
- [ ] API method/prop for re-arranging the order of named text/icon slots?
## Icon Button
[x] get/set icon def
[x] get/set audible text/label def
## Toggle Button
[x] get/set pressed() state
[x] "unpressed" icon def
[x] "pressed" icon def
[x] "unpressed" text def
[x] "pressed" text def
## Menu Button
[ ] get/set value() from menu? (store/report menu's current value)
[ ] "is-open" state?
[ ] make it open a popup (maybe call this `IdsPopupButton`?)
<file_sep>/app/ids-switch/index.js
import IdsSwitch from '../../src/ids-switch/ids-switch';
<file_sep>/app/ids-popup/test-sandbox.js
import IdsPopup from '../../src/ids-popup/ids-popup';
import IdsInput from '../../src/ids-input/ids-input';
import './test-sandbox.scss';
let popupEl;
let xyControlFieldsetLabelEl;
let alignmentDisplayEl;
/**
* @param {Event} e the change event object
*/
function targetChangeHandler(e) {
popupEl.alignTarget = e.target.value;
if (!e.target.value) {
xyControlFieldsetLabelEl.textContent = 'Coordinates';
} else {
xyControlFieldsetLabelEl.textContent = 'Offsets';
}
}
/**
* @param {Event} e the change event object
*/
function xAlignChangeHandler(e) {
popupEl.alignX = e.target.value;
}
/**
* @param {Event} e the change event object
*/
function yAlignChangeHandler(e) {
popupEl.alignY = e.target.value;
}
/**
* @param {Event} e the change event object
*/
function xPosChangeHandler(e) {
popupEl.x = e.target.value;
}
/**
* @param {Event} e the change event object
*/
function yPosChangeHandler(e) {
popupEl.y = e.target.value;
}
/**
* @param {Event} e the change event object
*/
function animatedChangeHandler(e) {
popupEl.animated = e.target.checked;
}
/**
* @param {Event} e the change event object
*/
function visibleChangeHandler(e) {
popupEl.visible = e.target.checked;
}
// When the Popup page loads, we need to use the CSSOM to append some styles
// that can be modified by changing the attribute (tests the MutationObserver/ResizeObserver)
document.addEventListener('DOMContentLoaded', () => {
popupEl = document.querySelector('ids-popup');
xyControlFieldsetLabelEl = document.querySelector('#xy-controls legend');
alignmentDisplayEl = document.querySelector('#alignment-display');
const centerTargetEl = document.querySelector('#center-point');
const secondTargetEl = document.querySelector('#second-target');
const thirdTargetEl = document.querySelector('#third-target');
// This one is centered on the page, but needs a 100px top margin to shift it around
centerTargetEl.style.marginTop = '100px';
// This one is aligned 150px from the top and right viewport edges
secondTargetEl.style.top = '150px';
secondTargetEl.style.right = '150px';
// This one is aligned 300px from the top and left viewport edges,
// as well as allows the size to be controlled (tests some other math)
thirdTargetEl.style.top = '300px';
thirdTargetEl.style.left = '300px';
thirdTargetEl.style.height = '50px';
thirdTargetEl.style.width = '50px';
// Setup align-target controls
const alignTargetGroupEl = document.querySelector('#align-targets');
const targetRadioEls = alignTargetGroupEl.querySelectorAll('input[type="radio"]');
targetRadioEls.forEach((radioEl) => {
radioEl.addEventListener('change', targetChangeHandler);
});
// Setup x-alignment controls
const xAlignGroupEl = document.querySelector('#x-alignments');
const xAlignRadioEls = xAlignGroupEl.querySelectorAll('input[type="radio"]');
xAlignRadioEls.forEach((radioEl) => {
radioEl.addEventListener('click', xAlignChangeHandler);
});
// Setup y-alignment controls
const yAlignGroupEl = document.querySelector('#y-alignments');
const yAlignRadioEls = yAlignGroupEl.querySelectorAll('input[type="radio"]');
yAlignRadioEls.forEach((radioEl) => {
radioEl.addEventListener('click', yAlignChangeHandler);
});
// Setup X/Y coordinates/offsets controls
const xControlEl = document.querySelector('#x-control');
xControlEl.addEventListener('change', xPosChangeHandler);
const yControlEl = document.querySelector('#y-control');
yControlEl.addEventListener('change', yPosChangeHandler);
// Setup toggles
const animatedControlEl = document.querySelector('#animated-option');
animatedControlEl.addEventListener('change', animatedChangeHandler);
const visibleControlEl = document.querySelector('#visible-option');
visibleControlEl.addEventListener('change', visibleChangeHandler);
// Have a MutationObserver watch the popup for attribute changes,
// causing an update to some control displays.
const testMo = new MutationObserver((mutations) => {
let changedOnce = false;
mutations.forEach((mutation) => {
if (changedOnce || mutation.type !== 'attributes') {
return;
}
alignmentDisplayEl.textContent = `Edge order: "${popupEl.align}"`;
changedOnce = true;
});
});
testMo.observe(popupEl, {
attributes: true,
attributeFilter: ['align'],
attributeOldValue: true,
subtree: true
});
});
<file_sep>/src/ids-loader/ids-loader.js
import {
IdsElement,
customElement,
scss,
mix,
props
} from '../ids-base/ids-element';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
import styles from './ids-loader.scss';
/**
* IDS Loader Component
* @type {IdsLoader}
* @inherits IdsElement
* @mixes IdsEventsMixin
* @mixes IdsThemeMixin
* @part loader - the loader container element
*/
@customElement('ids-loader')
@scss(styles)
class IdsLoader extends mix(IdsElement).with(IdsEventsMixin, IdsThemeMixin) {
/**
* Call the constructor and then initialize
*/
constructor() {
super();
}
/**
* Invoked each time the custom element is appended into a document-connected element.
*/
connectedCallback() {
super.connectedCallback();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [props.MODE, props.VERSION];
}
/**
* Create the Template for the contents
* @returns {string} The template
*/
template() {
return `<div class="ids-loader" part="loader">
<div class="ids-loader-indeterminate"></div>
</div>`;
}
}
export default IdsLoader;
<file_sep>/src/ids-base/ids-resize-mixin.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
export class IdsResizeMixin {
/** */
observed: HTMLElement[];
/** Sets up connection to the global ResizeObserver instance */
setupResize(): void;
/** Disconnects from the global ResizeObserver instance */
disconnectResize(): void;
/** Describes which base element contained by this component is being watched for Resize changes */
resizeDetectionTarget(): HTMLElement;
/** Adds an element to the global ResizeObserver instance for tracking, watched locally by this component */
addObservedElement(el: HTMLElement): void;
/** Removes an element watched locally by this component from the global ResizeObserver instance */
removeObservedElement(el: HTMLElement): void;
/** Removes all elements from the global ResizeObserver watched locally by this component */
removeAllObservedElements(): void;
/** Sets up connection to a shared MutationObserver instance */
setupDetectMutations(): void;
/** Disconnects from a shared MutationObserver instance */
disconnectDetectMutations(): void;
/** Describes whether or not this component should be allowed to detect mutation changes */
shouldDetectMutations(): boolean;
}
<file_sep>/src/ids-upload-advanced/TODO.md
## TODO
- [] Need to add feature that let NOT auto start uploading after files dropped/browsed.
- [x] Documentation and type files `xx.d.ts`
- [x] Tests (functional and e2e)
<file_sep>/src/ids-popup-menu/ids-popup-menu.js
import {
customElement,
mix,
scss,
props
} from '../ids-base/ids-element';
import { IdsRenderLoopItem, IdsRenderLoopMixin } from '../ids-render-loop/ids-render-loop-mixin';
import IdsMenu from '../ids-menu/ids-menu';
import IdsPopup from '../ids-popup/ids-popup';
import styles from './ids-popup-menu.scss';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
const POPUPMENU_PROPERTIES = [
props.TARGET,
props.TRIGGER
];
const POPUPMENU_TRIGGER_TYPES = [
'contextmenu',
'click',
'immediate'
];
/**
* IDS Popup Menu Component
* @type {IdsPopupMenu}
* @inherits IdsElement
* @mixes IdsRenderLoopMixin
*/
@customElement('ids-popup-menu')
@scss(styles)
class IdsPopupMenu extends mix(IdsMenu).with(IdsRenderLoopMixin, IdsEventsMixin) {
constructor() {
super();
this.state.trigger = POPUPMENU_TRIGGER_TYPES[0];
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return POPUPMENU_PROPERTIES;
}
/**
* Inner template contents
* @returns {string} The template
*/
template() {
const menuTemplate = IdsMenu.prototype.template.apply(this);
return `<ids-popup class="ids-popup-menu" type="menu">${menuTemplate}</ids-popup>`;
}
/**
* @returns {void}
*/
connectedCallback() {
if (!this.hasAttribute('hidden')) {
this.setAttribute('hidden', '');
}
this.shouldUpdate = true;
// If this Popupmenu is a submenu, and no target is pre-defined,
// align the menu against the parent menu item.
// @TODO change this logic if Popup accepts HTMLElement
if (this.parentMenuItem && !this.target) {
this.target = this.parentMenuItem;
this.popup.align = 'right, top';
}
IdsMenu.prototype.connectedCallback.apply(this);
}
/**
* Sets up event handlers used in this menu.
* @returns {void}
*/
handleEvents() {
IdsMenu.prototype.handleEvents.apply(this);
// This handler runs whenever an item contained by the Popupmenu needs to become focused.
const doFocusHandler = () => {
this.rl.register(new IdsRenderLoopItem({
duration: 1,
timeoutCallback: () => {
this.focusTarget?.focus();
}
}));
};
// In some situations, hide the menu when an item is selected.
this.onEvent('selected', this, (/** @type {any} */ e) => {
const item = e.detail.elem;
if (!item?.group?.keepOpen) {
this.hide();
}
});
// When the underlying Popup triggers it's "show" event, focus on the derived focusTarget.
this.onEvent('show', this.container, doFocusHandler);
// Set up all the events specifically-related to the "trigger" type
this.refreshTriggerEvents();
}
/**
* Sets up the connection to the global keyboard handler
* @returns {void}
*/
handleKeys() {
IdsMenu.prototype.handleKeys.apply(this);
// Arrow Right on an item containing a submenu causes that submenu to open
this.listen(['ArrowRight'], this, (/** @type {any} */ e) => {
e.preventDefault();
const thisItem = e.target.closest('ids-menu-item');
if (thisItem.hasSubmenu) {
thisItem.showSubmenu();
}
});
// Arrow Left on a submenu item causes the submenu to close, as well as focus
// on a parent menu item to occur.
// NOTE: This will never occur on a top-level Popupmenu.
if (this.parentMenu) {
this.listen(['ArrowLeft'], this, (/** @type {any} */ e) => {
e.preventDefault();
this.hide();
this.parentMenuItem.focus();
});
}
// Escape closes the menu
// (NOTE: This only applies to top-level Popupmenus)
if (!this.parentMenu) {
this.listen(['Escape'], this, (/** @type {any} */ e) => {
if (this.hidden) {
return;
}
e.preventDefault();
e.stopPropagation();
this.hide();
// Since Escape cancels without selection, re-focus the button
/* istanbul ignore next */
if (this.target) {
this.target.focus();
}
});
}
}
/**
* @readonly
* @returns {IdsPopup} reference to the inner Popup component
*/
get popup() {
return this.shadowRoot.querySelector('ids-popup');
}
/**
* @returns {any} [HTMLElement|undefined] reference to a target element, if applicable
*/
get target() {
return this.popup.alignTarget;
}
/**
* @param {any} val [HTMLElement|string] reference to an element, or a string that will be used
* as a CSS Selector referencing an element, that the Popupmenu will align against.
*/
set target(val) {
this.popup.alignTarget = val;
}
/**
* @returns {string} the type of action that will trigger this Popupmenu
*/
get trigger() {
return this.state.trigger;
}
/**
* @param {string} val a valid trigger type
*/
set trigger(val) {
let trueTriggerType = val;
if (!POPUPMENU_TRIGGER_TYPES.includes(val)) {
trueTriggerType = POPUPMENU_TRIGGER_TYPES[0];
}
this.state.trigger = trueTriggerType;
this.refreshTriggerEvents();
}
/**
* Causes events related to the Popupmenu's "trigger" style to be unbound/rebound
* @private
*/
/* istanbul ignore next */
refreshTriggerEvents() {
if (!this.shouldUpdate) {
return;
}
// Remove any pre-existing trigger events
const removeEventTargets = ['contextmenu.trigger', 'click.trigger'];
removeEventTargets.forEach((eventName) => {
const evt = this.handledEvents.get(eventName);
if (evt) {
this.detachEventsByName(eventName);
}
});
// Based on the trigger type, bind new events
const targetElem = this.target || window;
switch (this.trigger) {
case 'immediate':
// @TODO
break;
case 'click':
// Configure some settings for opening
this.popup.align = 'bottom, left';
this.popup.arrow = 'bottom';
this.popup.y = 8;
// Open/Close the menu when the trigger element is clicked
this.detachAllEvents();
this.onEvent('click.trigger', targetElem, (/** @type {any} */e) => {
if (e.currentTarget !== window) {
e.preventDefault();
}
if (this.hidden) {
this.show();
} else {
this.hide();
}
});
break;
default:
// Standard `contextmenu` event behavior.
// `contextmenu` events should only apply to top-level popupmenus.
// (submenus open/close events are handled by their parent items)
if (this.parentMenu) {
break;
}
// Attach a contextmenu handler to the target element for opening the popup
this.onEvent('contextmenu.trigger', targetElem, (/** @type {any} */e) => {
e.preventDefault();
e.stopPropagation();
this.popup.x = e.pageX;
this.popup.y = e.pageY;
this.show();
});
break;
}
}
/**
* Attaches some events when the Popupmenu is opened.
* @private
* @returns {void}
*/
addOpenEvents() {
// Attach all these events on a Renderloop-staggered timeout
this.rl.register(new IdsRenderLoopItem({
duration: 1,
timeoutCallback: () => {
// Attach a click handler to the window for detecting clicks outside the popup.
// If these aren't captured by a popup, the menu will close.
this.onEvent('click.toplevel', window, () => {
this.hide();
});
this.hasOpenEvents = true;
}
}));
}
/**
* Detaches some events when the Popupmenu is closed.
* @private
* @returns {void}
*/
removeOpenEvents() {
if (!this.hasOpenEvents) {
return;
}
this.offEvent('click.toplevel', window);
this.hasOpenEvents = false;
}
/**
* @readonly
* @returns {boolean} true if the Popup Menu is currently being displayed
*/
get visible() {
return this.popup.visible;
}
/**
* Hides this menu and any of its submenus.
* @returns {void}
*/
hide() {
this.hidden = true;
this.popup.querySelector('nav')?.removeAttribute('role');
this.lastHovered = undefined;
// Hide the Ids Popup and all Submenus
this.popup.visible = false;
this.hideSubmenus();
this.removeOpenEvents();
}
/**
* @returns {void}
*/
show() {
// Trigger a veto-able `beforeshow` event.
let canShow = true;
const beforeShowResponse = (/** @type {any} */ veto) => {
canShow = !!veto;
};
this.triggerEvent('beforeshow', this, {
detail: {
elem: this,
response: beforeShowResponse
}
});
if (!canShow) {
return;
}
this.hidden = false;
this.popup.querySelector('nav')?.setAttribute('role', 'menu');
// Hide any "open" submenus (in the event the menu is already open and being positioned)
this.hideSubmenus();
// Show this popup
this.popup.visible = true;
this.addOpenEvents();
}
/**
* Hides any "open" submenus within this menu structure, optionally ingorning a single
* menu to "keep open".
* @param {any} [focusedMenuItem] [IdsMenuItem] if provided, will be ignored and considered the
* "currently open" menu.
* @returns {void}
*/
hideSubmenus(focusedMenuItem = undefined) {
const submenus = this.submenus;
let focusedSubmenu;
if (focusedMenuItem?.hasSubmenu) {
focusedSubmenu = focusedMenuItem.submenu;
}
submenus.forEach((submenu) => {
const submenuIsIgnored = focusedSubmenu && focusedSubmenu.isEqualNode(submenu);
if (!submenu.hidden && !submenuIsIgnored) {
submenu.hide();
}
});
}
}
export default IdsPopupMenu;
export {
IdsMenuGroup,
IdsMenuHeader,
IdsMenuItem,
IdsSeparator
} from '../ids-menu/ids-menu';
<file_sep>/app/ids-fieldset/index.js
import IdsFieldset from '../../src/ids-fieldset/ids-fieldset';<file_sep>/src/ids-render-loop/README.md
# Ids RenderLoop
The IDS RenderLoop is a utility component that provides a single [`requestAnimationFrame`](https://developer.mozilla.org/en-US/docs/Web/API/window/requestAnimationFrame) instance to other components that may need to run asynchronous operations. The loop instance is made available to these components by way of a mixin.
The IDS RenderLoop exists to provide an alternative, performance-friendly way to run code that would otherwise require the use of `setTimeout/setInterval` or multiple `requestAnimationFrame` loops. When creating/using IDS components, using the RenderLoop for these purposes is preferable.
## Use Cases
- Running code after a specified duration. For example, if we need to run a `render()` method on a component in 100ms.
- Running code every (x) durations. For example, updating a counter by 1 every 5 seconds.
- Running multiple async operations all at once (for example, several Toast messages updating/dismissing at different timeout intervals).
## Terminology
*RenderLoop:* The main queuing system that wraps `requestAnimationFrame`.
*RenderLoopItem:* Describes an object that lives inside the RenderLoop's `items` array that is configured to trigger callback methods at certain intervals, at the end of its lifespan, or both.
*Tick:* identifies the timing on which the RenderLoop cycles through its `items` array and runs Updates or Timeouts on each, if necessary.
*Timeout:* The end of a RenderLoop Item's lifecycle. In some cases, a `timeoutCallback` method may occur.
*Update:* A specified time in which a callback representing an update is fired (by default, this occurs on every tick if an `updateCallback` is defined).
## Features (With Code Samples)
Many IDS Components, such as the [Popup]('../ids-popup/README.md'), use the global RenderLoop instance internally and allow access using the `rl` property:
```js
const popup = new IdsPopup();
console.log(popup.rl);
```
If creating a custom component that needs RenderLoop access, you can use the `IdsRenderLoopMixin`. No other setup is necessary -- a single instance of the RenderLoop will be setup and activated upon first access of the `rl` property:
```js
import { IdsRenderLoopMixin, IdsRenderLoopItem } from '[my-path-to-ids]/src/ids-render-loop/ids-render-loop-mixin';
@customElement('my-component')
@scss(styles)
@mixin(IdsRenderLoopMixin)
class MyComponent {
constructor() {
// ...
}
}
```
In some cases, you may not want to roll the RenderLoop into a component, and simply want access to add non-DOM-related asynchronous operations. For this, it's possible to simply access the `rl` property directly on the mixin definition:
```js
console.log(IdsRenderLoopMixin.rl);
```
### RenderLoop Items with a Timeout
To run an asynchronous operation using the RenderLoop, you must build and register an `IdsRenderLoopItem` object.
Creating an item that will timeout after 100ms and switch the contents of a text span could look like this:
```js
const textSpan = document.querySelector('span.my-span');
textSpan.textContent = 'Hello!';
const item = new IdsRenderLoopItem({
id: 'test-loop-item',
duration: 100,
timeoutCallback: () => {
textSpan.textContent = 'Goodbye!';
}
});
```
The RenderLoop Item won't execute until it's registered with the loop. We can access the RenderLoop instance from the component we built earlier, using the `register()` method to pass it on:
```js
const myComponent = document.querySelector('my-component');
myComponent.rl.register(item);
```
After 100ms passes, the text content of our `span` tag will change!
### RenderLoop Items with an Update Duration
We might also want to build a RenderLoop Item that doesn't timeout on its own, and updates the text content of our
text span every 2 seconds. To perform this, we can utilize the `updateCallback` and `updateDuration` properties:
```js
const myComponent = document.querySelector('my-component');
const textSpan = document.querySelector('span.my-span');
textSpan.textContent = '0';
let counter = 0;
const item = new IdsRenderLoopItem({
id: 'test-loop-item-2',
updateDuration: 2000, // ms
updateCallback: () => {
counter++;
textSpan.textContent = `${counter}`;
}
});
myComponent.rl.register(item);
```
The above example doesn't specify a duration. The default duration (`-1`) causes the RenderLoop item to remain in the queue indefinitely until it's cleared. This can be done by removing the RenderLoop item from the `items` array in various ways:
```js
// Remove using the Loop API:
myComponent.rl.remove(item);
// ... OR destroy the item directly
item.destroy();
```
### RenderLoop Items that do everything
RenderLoop Items can be configured to both Update AND Timeout. The modified version of our updating item also has a `timeoutCallback`:
```js
const myComponent = document.querySelector('my-component');
const textSpan = document.querySelector('span.my-span');
textSpan.textContent = '0';
let counter = 0;
const item = new IdsRenderLoopItem({
id: 'test-loop-item-3',
timeoutCallback: () => {
textSpan.textContent = `DONE! Final Count was ${counter}`;
},
updateDuration: 2000, // ms
updateCallback: () => {
counter++;
textSpan.textContent = `${counter}`;
}
});
myComponent.rl.register(item);
```
When removing the item from the queue as shown above, the `updateCallback` will cease firing, and the `timeoutCallback` will fire.
In some cases, it may be desirable to destroy the RenderLoop item without firing the `timeoutCallback`. For this case it's possible to veto the `timeoutCallback`:
```js
// Destroy the item directly and prevent a timeout callback
item.destroy(true);
```
## States and Variations
The RenderLoop itself has two opposite states:
- Stopped
- Started
Each individual RenderLoop Item has two opposite states:
- Paused
- Resumed
## Converting from Previous Versions
- 4.x: The RenderLoop API wasn't exposed publicly in the previous release. The API has been modified in this version, but it's largely been simplified.
<file_sep>/app/ids-input/index.js
import IdsInput from '../../src/ids-input/ids-input';
<file_sep>/src/ids-toggle-button/README.md
# Ids Toggle Button
## Description
The IDS Toggle Button component is an extension of the standard IDS Button component, which provides some API around "toggling" a feature and having a "pressed" state, in addition to the IDS Button's standard features.
## Use Cases
- Create buttons with "pressed" and "unpressed" states, which represent an action being toggled "on" and "off".
## Terminology
- Unpressed: an unpressed state is the visual equivalent to turning a feature "off".
- Pressed: a pressed state is the visual equivalent to turning a feature "on".
- Toggle: the action of toggling switches between unpressed and pressed, swapping to the alternate of what is currently set.
## Features (With Code Examples)
While it's possible to manually set the `icon` and `text` attributes of a Toggle Button the same way that a standard IDS Button would be used, the Toggle Button also contains similarly-named attributes that represent their "on" (pressed) and "off" (unpressed) visual states. Consider the following example:
```html
<ids-toggle-button id="my-toggle" icon-on="star-filled" icon-off="star-outlined" text-off="Toggle Button (Off)" text-on="Toggle Button (On)">
<ids-icon slot="icon" icon="settings"></ids-icon>
<span slot="text"></span>
</ids-toggle-button>
```
In this example, the "on" (pressed) state would be:
- `icon-on="star-filled"`
- `text-on="Toggle Button (On)"`
The "off" (unpressed) state would be:
- `icon-off="star-outlined"`
- `text-off="Toggle Button (Off)"`
In the above example, the default state of the Toggle button will be "off", and the content of the `icon-off`/`text-off` properties will populate the standard `icon`/`text` button properties. To default the button on, simply use the `pressed` attribute:
```html
<ids-toggle-button id="my-toggle" pressed="true" icon-on="star-filled" icon-off="star-outlined" text-off="Toggle Button (Off)" text-on="Toggle Button (On)">
<!-- ... -->
</ids-toggle-button>
```
## States and Variations
In addition to having the same states as Buttons, Toggle Buttons can also have:
- Pressed (on)
- Unpressed (off)
### "Default" Type
Toggle buttons can only be displayed in the "default" button type
## Keyboard Guidelines
## Responsive Guidelines
## Converting from Previous Versions
## Designs
## Alternate Designs
## Proposed Changes
## Test Plan
1. Accessibility - Axe
1. Visual Regression Test
1. Repeat Tests in All Supported Browsers
1. Some of these as test cases from the [WC gold standard](https://github.com/webcomponents/gold-standard/wiki#api)
1. Can be consumed in NG/Vue/React (pull it in standalone/built see it works standalone)
## Accessibility Guidelines
## Regional Considerations
Be conscious of the layout of content within your buttons when they are present in RTL situations.
<file_sep>/app/ids-wizard/example.js
import IdsLayoutGrid from '../../src/ids-layout-grid/ids-layout-grid';
import IdsLayoutGridCell from '../../src/ids-layout-grid/ids-layout-grid-cell';
import IdsText from '../../src/ids-text/ids-text';
<file_sep>/src/ids-hyperlink/README.MD
# Ids Hyperlink Component
## Description
This component styles hyperlinks to the design guidelines. Also called a link. Typically a hyperlink will open a file file or document or be used to navigate to a new location.
## Use Cases
- When you need static text on a page
- When you need disabled appearing text in a page
## Terminology
- **Link/Hyperlink**: An interactive link to another page within Infor software to external destinations.
- **Disabled**: A link can be disabled if its not actionable at the moment.
## Features (With Code Examples)
A normal hyperlink element used as a web component that opens a url in a new window.
```html
<ids-hyperlink href="http://www.example.com" target="_blank">Normal Link</ids-hyperlink>
```
A Disabled appearing hyperlink element.
```html
<ids-hyperlink href="http://www.example.com" disabled="true" target="_blank">Disabled Link</ids-hyperlink>
```
## Settings and Attributes
- `disabled` {boolean} Set the link to disabled
- `href` {string} Set the links href to a url or file
- `target` {string} Set the links target attribute. Valid values are '_blank' | '_self' | '_parent' | '_top' | frame name.
- `mode` {string} Set the theme mode
- `version` {string} Set the theme version
## Themeable Parts
- `link` allows you to further style the link element
## States and Variations (With Code Examples)
- Disabled
- Visited
- Hover
- Active
## Keyboard Guidelines
- <kbd>Tab/Shift+Tab</kbd>: If the link is enabled this will focus or unfocus the link.
- <kbd>Enter</kbd>: If this will follow the link url or action.
## Responsive Guidelines
- Flows with padding and margin within the width and height of the parent container. Possibly scrolling as needed based on parent dimensions.
## Converting from Previous Versions
- 3.x: Used a css class on `<a>` tags
- 4.x: Used a css class on `<a>` tags
## Designs
[Design Specs](https://www.figma.com/file/ok0LLOT9PP1J0kBkPMaZ5c/IDS_Component_File_v4.6-(Draft))
## Accessibility Guidelines
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. Ensure the color tags that indicate state like OK, cancel, ect have other ways to indicate that information. This is failing.
- 1.4.3 Contrast (Minimum) - The visual presentation of text and images of text has a contrast ratio of at least 4.5:1. Ensure the color tags pass contrast.
## Regional Considerations
Link text should be localized in the current language. And should flip to the alternate side in Right To Left mode. Consider that in some languages text may be a lot longer (German). And in some cases it cant be wrapped (Thai).
<file_sep>/app/ids-theme-switcher/index.js
import IdsContainer from '../../src/ids-container/ids-container';
import IdsThemeSwitcher from '../../src/ids-theme-switcher/ids-theme-switcher';
import IdsTag from '../../src/ids-tag/ids-tag';
<file_sep>/src/ids-theme-switcher/README.md
# Ids Theme Switcher Component
## Description
We include a theme switcher component that can be visual or non visual. If visual it comes with a menu button to allow you to pick theme. If non visible you can set its properties and all other components in the page will change theme to the set theme.
## Use Cases
- When you want to use a common look across applications with a theme
- When you prefer a dark vs light mode or need a high contrast mode for low light situations or Accessibility.
## Terminology
- **Theme**: A broad term to describe changing the look of the application. We essentially have 6 themes from a historical sense but have tried change Terminology over time and currently refer to a theme as one of the UI versions (New vs Classic)
- **Modes**: Formerly called Variant and lets you switch between dark, light and high contrast with in the version essentially constituting a new theme.
- **Versions**: Formerly called Theme and lets you switch between the New look and the Classic look and maybe in the future more themes.
- **New**: The new Formerly called Uplift and Vibrant
- **Classic**: Formerly called Soho and Subtle
## Features (With Code Examples)
Add an ids-theme-switcher to the page near the top and set the version and mode properties.
```html
<ids-theme-switcher version="new" mode="dark"></ids-theme-switcher>
```
## Settings and Attributes
- `version` {string} Turns on the functionality to make the tag clickable like a link
- `mode` {string} Turns on the functionality to add an (x) button to clear remove the tag
## Converting from Previous Versions
- 3.x: This version did not have any themes
- 4.x: You no longer change the style sheet out like in previous versions
- 5.x: You no longer need to change the style sheet out like in previous versions because the css in encapsulated within each component now
## Proposed Changes
- Fix Accessibility issue (1.4.1 Use of Color) by adding an icon to the color tags.
- Fix Accessibility issue (1.4.3 Contrast (Minimum)) by changing or not using some tags
## Accessibility Guidelines
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. The New Version of the theme has better color usage than the classic theme.
- 1.4.3 Contrast (Minimum) - The visual presentation of text and images of text has a contrast ratio of at least 4.5:1. The New Version of the theme has better contrast than the classic theme. Light and Dark mode pass WCAG AA and High Contrast passes WCAG AAA
## Regional Considerations
As a point of interest colors do have certain meaning associated to them for countries and cultures. We found this [article on color meaning](https://www.shutterstock.com/blog/color-symbolism-and-meanings-around-the-world) interesting.
<file_sep>/app/ids-wizard/index.js
import IdsWizard from '../../src/ids-wizard/ids-wizard';
import IdsWizardStep from '../../src/ids-wizard/ids-wizard-step';
<file_sep>/src/ids-switch/ids-switch.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
export default class IdsSwitch extends HTMLElement {
/** Sets the checked state to true or false */
checked: boolean;
/** Sets checkbox to disabled * */
disabled: boolean;
/** Sets the checkbox label text * */
label: string;
/** Sets the checkbox label font size (rarely used) * */
labelFontSize: 10 | 12 | 14 | 16 | 20 | 24 | 28 | 32 | 40 | 48 | 60 | 72 |
'xs' | 'sm ' | 'lg' | 'xl' | string | number;
/** Sets the checkbox `value` attribute * */
value: string;
/** Set the theme mode */
mode: 'light' | 'dark' | 'contrast' | string;
/** Set the theme version */
version: 'new' | 'classic' | string;
}
<file_sep>/app/ids-data-grid/virtual-scroll.js
import { IdsDataGrid } from '../../src/ids-data-grid/ids-data-grid';
// Example for populating the DataGrid
const dataGrid = document.querySelector('#data-grid-1');
// Do an ajax request
const xmlhttp = new XMLHttpRequest();
const url = '/api/products';
const columns = [];
// Set up columns
columns.push({
id: 'id',
name: 'ID',
field: 'id',
formatter: dataGrid.formatters.text,
width: 80,
sortable: true
});
columns.push({
id: 'color',
name: 'Color',
field: 'color',
formatter: dataGrid.formatters.text,
sortable: true
});
columns.push({
id: 'inStock',
name: 'In Stock',
field: 'inStock',
formatter: dataGrid.formatters.text,
sortable: true
});
columns.push({
id: 'productId',
name: 'Product Id',
field: 'productId',
formatter: dataGrid.formatters.text,
sortable: true
});
columns.push({
id: 'productName',
name: 'Product Name',
field: 'productName',
formatter: dataGrid.formatters.text,
sortable: true
});
columns.push({
id: 'unitPrice',
name: 'Unit Price',
field: 'unitPrice',
formatter: dataGrid.formatters.text,
sortable: true
});
columns.push({
id: 'units',
name: 'Units',
field: 'units',
formatter: dataGrid.formatters.text,
sortable: true
});
xmlhttp.onreadystatechange = function onreadystatechange() {
if (this.readyState === 4 && this.status === 200) {
dataGrid.columns = columns;
dataGrid.data = JSON.parse(this.responseText);
}
};
// Execute the request
xmlhttp.open('GET', url, true);
xmlhttp.send();
<file_sep>/server.js
// Setup a simple express server used only for running tests
const express = require('express');
const fs = require('fs');
const log = require('loglevel');
const app = express();
const port = process.env.PORT || 4444;
// Handle no extension files as html
app.use((req, res, next) => {
if (req.path.indexOf('.') === -1) {
res.setHeader('Content-Type', 'text/html');
}
next();
});
app.use('/', express.static(`${__dirname}/dist`));
// Server the static data in app data
app.get('/api/:fileName', (req, res) => {
const { fileName } = req.params;
const json = fs.readFileSync(`./app/data/${fileName}.json`, 'utf8');
res.json(JSON.parse(json));
});
// Listen on port 4444
app.listen(port, () => {
log.warn(`Dev server listening on port ${port}`);
});
<file_sep>/src/ids-container/ids-container.js
import {
IdsElement,
customElement,
scss,
mix,
props
} from '../ids-base/ids-element';
// Import Mixins
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
import { IdsStringUtils as stringUtils } from '../ids-base/ids-string-utils';
import styles from './ids-container.scss';
/**
* IDS Container Component
* @type {IdsContainer}
* @inherits IdsElement
* @mixes IdsThemeMixin
* @mixes IdsEventsMixin
* @part container - the entire container element
*/
@customElement('ids-container')
@scss(styles)
class IdsContainer extends mix(IdsElement).with(IdsEventsMixin, IdsThemeMixin) {
constructor() {
super();
}
/**
* Invoked each time the custom element is appended into a document-connected element.
*/
connectedCallback() {
super.connectedCallback();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [props.SCROLLABLE, props.MODE, props.VERSION];
}
/**
* Create the Template for the contents
* @returns {string} The template
*/
template() {
return `<div class="ids-container" part="container"${this.scrollable === 'true' ? ' tabindex="0"' : ''}><slot></slot></div>`;
}
/**
* If set to true the container is scollable
* @param {boolean|string} value true of false depending if the tag is scrollable
*/
set scrollable(value) {
if (stringUtils.stringToBool(value)) {
this.setAttribute('scrollable', 'true');
this.container.setAttribute('scrollable', 'true');
this.container.setAttribute('tabindex', '0');
return;
}
this.setAttribute('scrollable', 'false');
this.container.setAttribute('scrollable', 'false');
this.container.removeAttribute('tabindex');
}
get scrollable() { return this.getAttribute('scrollable') || 'true'; }
}
export default IdsContainer;
<file_sep>/app/ids-trigger-field/index.js
import IdsTriggerField from '../../src/ids-trigger-field/ids-trigger-field';
<file_sep>/app/ids-base/ids-mixin.js
import mix from '../../src/ids-base/ids-mixin'; //eslint-disable-line
<file_sep>/app/ids-virtual-scroll/index.js
import IdsVirtualScroll from '../../src/ids-virtual-scroll/ids-virtual-scroll';
<file_sep>/src/ids-textarea/ids-textarea.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
interface IdsTextareaEventDetail extends Event {
detail: {
elem: IdsTextarea
}
}
export class IdsTextarea extends HTMLElement {
/** Will automatically expand the textarea to fit the contents when typing */
autogrow: boolean;
/** The Max Height of the textarea when autogrow is enabled */
autogrowMaxHeight: number | string;
/** When set will select all text on focus */
autoselect: boolean;
/** Text that will be used in place of the `max` text */
charMaxText: string;
/** Text that will be used in place of the `remaining` */
charRemainingText: string;
/** Displays a counter that counts down from the maximum */
characterCounter: boolean;
/** Sets the dirty tracking feature on to indicate a changed field */
dirtyTracker: boolean;
/** Sets textarea to disabled */
disabled: boolean;
/** Sets the label text */
label: string;
/** Sets the validation required indicator on label text, it's default to `true` */
labelRequired: boolean;
/** Maximum characters allowed in textarea */
maxlength: number | string;
/** Sets the placeholder text */
placeholder: string;
/** Determines whether or not the textarea can be displayed on a printed page */
printable: boolean;
/** Sets the size (width) */
size: 'sm ' | 'md' | 'lg' | 'full' | string;
/** Sets to readonly state */
readonly: boolean;
/** Can resize the height of the textarea */
resizable: boolean;
/** Sets to visible height of a text area in lines */
rows: number | string;
/** Sets the text alignment */
textAlign: 'left' | 'center ' | 'right' | string;
/** Sets the validation check to use */
validate: 'required' | string;
/** Sets the validation events to use */
validationEvents: 'blur' | string;
/** Sets the `value` attribute */
value: string | number;
/** Set the theme mode */
mode: 'light' | 'dark' | 'contrast' | string;
/** Set the theme version */
version: 'new' | 'classic' | string;
/** Fires when value change. */
on(event: 'change', listener: (detail: IdsTextareaEventDetail) => void): this;
/** Fires when user type. */
on(event: 'input', listener: (detail: IdsTextareaEventDetail) => void): this;
/** Fires when copy paste and value change. */
on(event: 'propertychange', listener: (detail: IdsTextareaEventDetail) => void): this;
/** Fires when textarea get focus. */
on(event: 'focus', listener: (detail: IdsTextareaEventDetail) => void): this;
/** Fires when textarea text get selected. */
on(event: 'select', listener: (detail: IdsTextareaEventDetail) => void): this;
}
<file_sep>/src/ids-base/ids-events-mixin.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
export class IdsEventsMixin {
}
<file_sep>/src/ids-upload/ids-upload.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
interface IdsUploadEventDetail extends Event {
detail: {
elem: IdsUpload
}
}
export default class IdsUpload extends HTMLElement {
/** Sets limit the file types to be uploaded */
accept: string;
/** Sets the dirty tracking feature on to indicate a changed field */
dirtyTracker: boolean|string;
/** Sets to disabled state * */
disabled: boolean|string;
/** Sets the label text for text input * */
label: string;
/** Sets the label text for file input * */
labelFiletype: string;
/** Sets to allows multiple files to be uploaded */
multiple: boolean|string;
/** Sets ellipsis to be not shown on text input */
noTextEllipsis: boolean|string;
/** Sets the input placeholder text * */
placeholder: string;
/** Sets the size (width) of input * */
size: string;
/** Sets to readonly state * */
readonly: boolean|string;
/** Sets the label text for trigger button * */
triggerLabel: string;
/** Sets the validation check to use * */
validate: string;
/** Sets the validation events to use * */
validationEvents: string;
/** Sets the `value` attribute * */
value: string;
/** Set the theme mode */
mode: 'light' | 'dark' | 'contrast' | string;
/** Set the theme version */
version: 'new' | 'classic' | string;
/** Fires when file input files change. */
on(event: 'change', listener: (detail: IdsUploadEventDetail) => void): this;
}
<file_sep>/test/ids-expandable-area/ids-expandable-area-func-test.js
/**
* @jest-environment jsdom
*/
import IdsExpandableArea from '../../src/ids-expandable-area/ids-expandable-area';
import IdsToggleButton from '../../src/ids-toggle-button/ids-toggle-button';
describe('IdsExpandableArea Component', () => {
let el;
beforeEach(async () => {
jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb());
const elem = new IdsExpandableArea();
document.body.appendChild(elem);
el = document.querySelector('ids-expandable-area');
});
afterEach(async () => {
document.body.innerHTML = '';
el = null;
window.requestAnimationFrame.mockRestore();
});
it('renders correctly', () => {
expect(el.outerHTML).toMatchSnapshot();
el.type = 'toggle-btn';
expect(el.outerHTML).toMatchSnapshot();
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
el.remove();
el = new IdsExpandableArea();
document.body.appendChild(el);
expect(document.querySelectorAll('ids-expandable-area').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('can change its type property', () => {
const rootEl = el.shadowRoot.querySelector('.ids-expandable-area');
expect(rootEl.getAttribute('type')).toBe(null);
expect(el.getAttribute('type')).toBe(null);
el.type = 'toggle-btn';
rootEl.setAttribute('type', 'toggle-btn');
expect(rootEl.getAttribute('type')).toBe('toggle-btn');
expect(el.getAttribute('type')).toBe('toggle-btn');
el.type = 'bad-name';
rootEl.setAttribute('type', 'bad-name');
expect(rootEl.getAttribute('type')).toBe('bad-name');
expect(el.getAttribute('type')).toBe('');
});
it('can change its expanded property', () => {
const rootEl = el.shadowRoot.querySelector('.ids-expandable-area');
rootEl.setAttribute('expanded', true);
el.expanded = true;
expect(rootEl.getAttribute('expanded')).toBe('true');
expect(el.getAttribute('expanded')).toBe('true');
rootEl.setAttribute('expanded', false);
el.expanded = false;
expect(rootEl.getAttribute('expanded')).toBe('false');
expect(el.getAttribute('expanded')).toBe('false');
});
it('renders with IdsToggleButton as expander', () => {
let expander;
el.type = 'toggle-btn';
expander = new IdsToggleButton();
expect(expander.classList).not.toContain('ids-expandable-area-expander');
el.type = null;
expander = el.expander;
expect(expander.classList).toContain('ids-expandable-area-expander');
});
it('can change set its aria-expanded attribute', () => {
el.state.expanded = true;
el.expander.setAttribute('aria-expanded', el.state.expanded);
expect(el.expander.getAttribute('aria-expanded')).toBe('true');
});
it('can be expanded/collapsed when clicked (mouse)', () => {
el.type = null;
const event = new MouseEvent('click', {
target: el.expander,
bubbles: true,
cancelable: true,
view: window
});
// Expand
el.expander.dispatchEvent(event);
expect(el.state.expanded).toBe(true);
expect(el.expanded).toBe('true');
// Collapse
el.expander.dispatchEvent(event);
expect(el.state.expanded).toBe(false);
expect(el.expanded).toBe('false');
// Change type to 'toggle-btn'
el.type = 'toggle-btn';
el.state.expanded = false;
el.expanded = false;
const event2 = new MouseEvent('click', {
target: el.expander,
bubbles: true,
cancelable: true,
view: window
});
// Expand
el.expander.dispatchEvent(event2);
expect(el.state.expanded).toBe(true);
expect(el.expanded).toBe('true');
// Collapse
el.expander.dispatchEvent(event2);
expect(el.state.expanded).toBe(false);
expect(el.expanded).toBe('false');
});
it('can be expanded/collapsed when touched', () => {
const event = new TouchEvent('touchstart', {
touches: [{
identifier: '123',
pageX: 0,
pageY: 0,
target: el.expander
}],
bubbles: true,
cancelable: true,
view: window
});
// Expand
el.expander.dispatchEvent(event);
expect(el.state.expanded).toBe(true);
expect(el.expanded).toBe('true');
// Collapse
el.expander.dispatchEvent(event);
expect(el.state.expanded).toBe(false);
expect(el.expanded).toBe('false');
});
it('can change the height of the pane', () => {
el.pane.style.height = `100px`;
requestAnimationFrame(() => {
el.pane.style.height = `100px`;
requestAnimationFrame(() => {
el.pane.style.height = `0px`;
});
});
expect(el.pane.style.height).toEqual('0px');
});
it('can render different templates', () => {
const rootEl = el.shadowRoot.querySelector('.ids-expandable-area');
const header = rootEl.querySelector('.ids-expandable-area-header');
el.type = null;
header.removeAttribute('data-expander');
el.template();
expect(header.getAttribute('data-expander')).toBe(null);
el.type = 'toggle-btn';
header.setAttribute('data-expander', 'header');
el.template();
expect(header.getAttribute('data-expander')).toBe('header');
});
it('wont error caling api with no panel', () => {
el.pane = null;
el.expanded = true;
el.expanded = false;
el.expanded = true;
expect(el.expanded).toEqual('true');
});
});
<file_sep>/app/ids-tooltip/index.js
import IdsTooltip from '../../src/ids-tooltip/ids-tooltip';
<file_sep>/app/ids-checkbox/index.js
import IdsCheckbox from '../../src/ids-checkbox/ids-checkbox';
<file_sep>/app/ids-base/ids-decorators.js
/* eslint-disable */
import {
customElement,
version,
scss
} from '../../src/ids-base/ids-decorators';
/* eslint-enable */
<file_sep>/src/ids-block-grid/README.md
# Ids Block Grid Component
## Description
The IDS Block Grid Component displays data as selectable blocks within a simple grid. It gives a way of evenly split contents of a list within a grid.
## Use Cases
Use when you wanted to create rows of images with paragraphs/links that need to stay evenly spaced.
## Terminology
- **Align**: The position of the block grid. You can set it to be center, left, or right.
## Feature (With the Code Examples)
A block grid is created by using the `ids-block-grid` as the main container, and `ids-block-grid-item` for the item inside of it.
```html
<ids-block-grid>
<ids-blockgrid-item>
Content Here...
</ids-blockgrid-item>
</ids-block-grid>
```
You can set the alignment of the block grid by setting the `align` property with these three options `left`, `centered` or `right`. By default, the position is set to `centered` align.
```html
<ids-block-grid align="centered">
<ids-blockgrid-item>
Content Here...
</ids-blockgrid-item>
</ids-block-grid>
```
## Settings and Attributes
- `align` {string} Sets the position of the block grid and it's contents.
## States and Variations
- Align
## Responsive Guidelines
- The block grid uses flex container to be able to fill available free space and shrinks them to prevent overflow, while the block grid item has a width of 200px. It automatically sets the height depends on how many the block grid item is.
## Accessibility
The use of this component is not recommended for Accessibility since the lack of elements in the page may pose issues for screen reader and other assistive technology. Consider a way to disable this functionality.
<file_sep>/app/ids-expandable-area/index.js
import IdsExpandableArea from '../../src/ids-expandable-area/ids-expandable-area';
<file_sep>/src/ids-trigger-field/ids-trigger-field.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
interface IdsTriggerFieldEventDetail extends Event {
detail: {
elem: IdsTriggerField
}
}
export default class IdsTriggerField extends HTMLElement {
/** Set if the trigger field is tabbable */
tabbable: boolean;
/** Set the appearance of the trigger field */
appearance: 'normal' | 'compact' | string;
/** Set the theme mode */
mode: 'light' | 'dark' | 'contrast' | string;
/** Set the theme version */
version: 'new' | 'classic' | string;
/** Fire the trigger event and action */
trigger(): void;
/** Fires before the trigger button is clicked, you can return false in the response to veto. */
on(event: 'beforetriggerclicked', listener: (detail: IdsTriggerFieldEventDetail) => void): this;
/** Fires as the trigger button is clicked. */
on(event: 'triggerclicked', listener: (detail: IdsTriggerFieldEventDetail) => void): this;
}
<file_sep>/test/ids-toolbar/ids-toolbar-func-test.js
/**
* @jest-environment jsdom
*/
import IdsToolbar from '../../src/ids-toolbar/ids-toolbar';
const exampleHTML = `
<ids-toolbar-section id="appmenu-section">
<ids-button icon="menu" role="button" id="button-appmenu">
<span slot="text" class="audible">Application Menu Trigger</span>
</ids-button>
</ids-toolbar-section>
<ids-toolbar-section id="title-section" type="title">
<ids-text type="h3">My Toolbar</ids-text>
</ids-toolbar-section>
<ids-toolbar-section id="buttonset-section" type="buttonset" align="end">
<ids-button id="button-1" role="button">
<span slot="text">Text</span>
</ids-button>
<ids-menu-button role="button" id="button-2" menu="button-2-menu" dropdown-icon>
<span slot="text">Menu</span>
</ids-menu-button>
<ids-popup-menu id="button-2-menu" target="#button-2">
<ids-menu-group>
<ids-menu-item value="1">Item One</ids-menu-item>
<ids-menu-item value="2">Item Two</ids-menu-item>
<ids-menu-item value="3">Item Three</ids-menu-item>
<ids-menu-item>More Items
<ids-popup-menu>
<ids-menu-group>
<ids-menu-item value="4">Item Four</ids-menu-item>
<ids-menu-item value="4">Item Five</ids-menu-item>
<ids-menu-item value="4">Item Six</ids-menu-item>
</ids-menu-group>
</ids-popup-menu>
</ids-menu-item>
</ids-menu-group>
</ids-popup-menu>
<ids-button id="button-3" disabled>
<span slot="text" class="audible">Settings</span>
<ids-icon slot="icon" icon="settings"></ids-icon>
</ids-button>
<ids-button id="button-4">
<span slot="text" class="audible">Trash</span>
<ids-icon slot="icon" icon="delete"></ids-icon>
</ids-button>
<a href="#">Outgoing Link</a>
</ids-toolbar-section>
<ids-toolbar-more-actions id="section-more">
<ids-menu-group>
<ids-menu-item value="1">Option One</ids-menu-item>
<ids-menu-item value="2">Option Two</ids-menu-item>
<ids-menu-item value="3">Option Three</ids-menu-item>
<ids-menu-item>More Options
<ids-popup-menu>
<ids-menu-group>
<ids-menu-item value="4">Option Four</ids-menu-item>
<ids-menu-item value="5">Option Five</ids-menu-item>
<ids-menu-item value="6">Option Six</ids-menu-item>
</ids-menu-group>
</ids-popup-menu>
</ids-menu-item>
</ids-menu-group>
</ids-toolbar-more-actions>
`;
describe('IdsToolbar Component', () => {
let toolbar;
let sectionMore;
let buttonAppMenu;
let button1;
let button2;
let button3;
let button4;
beforeEach(async () => {
const elem = new IdsToolbar();
document.body.appendChild(elem);
toolbar = document.querySelector('ids-toolbar');
toolbar.insertAdjacentHTML('afterbegin', exampleHTML);
// Reference sections/items
sectionMore = document.querySelector('ids-toolbar-more-actions');
buttonAppMenu = document.querySelector('#button-appmenu');
button1 = document.querySelector('#button-1');
button2 = document.querySelector('#button-2');
button3 = document.querySelector('#button-3');
button4 = document.querySelector('#button-4');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
const elem = new IdsToolbar();
document.body.appendChild(elem);
elem.remove();
expect(document.querySelectorAll('ids-toolbar').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('can get a list of its sections', () => {
const sections = toolbar.sections;
expect(sections).toBeDefined();
expect(sections.length).toBe(4);
});
it('can get a list of its items', () => {
const items = toolbar.items;
expect(items).toBeDefined();
expect(items.length).toBe(6);
});
it('can be disabled and enabled', () => {
toolbar.disabled = true;
expect(toolbar.disabled).toBeTruthy();
expect(toolbar.container.classList.contains('disabled')).toBeTruthy();
toolbar.disabled = false;
expect(toolbar.disabled).toBeFalsy();
expect(toolbar.container.classList.contains('disabled')).toBeFalsy();
toolbar.setAttribute('disabled', true);
expect(toolbar.disabled).toBeTruthy();
expect(toolbar.container.classList.contains('disabled')).toBeTruthy();
toolbar.removeAttribute('disabled');
expect(toolbar.disabled).toBeFalsy();
expect(toolbar.container.classList.contains('disabled')).toBeFalsy();
});
it('can be configured to be navigated with Tab/Shift+Tab, or not, with the "tabbable" feature', () => {
toolbar.tabbable = true;
expect(toolbar.tabbable).toBeTruthy();
expect(button1.tabIndex).toBe(0);
expect(button2.tabIndex).toBe(0);
// Focus a button that isn't the first one, then disable "tabbable".
// The previously focused item should retain its zero tabIndex,
// while the others are all set to -1.
button2.focus();
toolbar.tabbable = false;
expect(toolbar.tabbable).toBeFalsy();
expect(button1.tabIndex).toBe(-1);
expect(button2.tabIndex).toBe(0);
expect(button3.tabIndex).toBe(-1);
const currentTabbableElem = toolbar.detectTabbable();
expect(currentTabbableElem).toBe(button2);
});
it('can announce what is focused and navigate among its items', () => {
const items = toolbar.items;
// Navigate forward (down) 2 items
toolbar.navigate(2, true);
// The component should be able to explain which of its items is focused
expect(toolbar.focused).toEqual(items[2]);
// Navigate backward (up) 1 item
toolbar.navigate(-1, true);
expect(toolbar.focused).toEqual(items[1]);
// Won't navigate anywhere if a junk/NaN value is provided
toolbar.navigate('forward', true);
expect(toolbar.focused).toEqual(items[1]);
});
it('navigates nowhere if no number of steps is provided', () => {
button1.focus();
toolbar.navigate();
expect(toolbar.focused).toEqual(button1);
});
it('loops around if `navigate()` tries to go too far', () => {
sectionMore.focus();
toolbar.navigate(1, true);
expect(toolbar.focused).toEqual(buttonAppMenu);
toolbar.navigate(-1, true);
expect(toolbar.focused).toEqual(sectionMore.button);
});
it('skips disabled items while navigating', () => {
button2.focus();
toolbar.navigate(1, true);
// Button 3 is disabled, Button 4 should become focused
expect(toolbar.focused).toEqual(button4);
});
it('navigates menu items using the keyboard', () => {
const navigateLeftEvent = new KeyboardEvent('keydown', { key: 'ArrowLeft' });
const navigateRightEvent = new KeyboardEvent('keydown', { key: 'ArrowRight' });
// Focus the first one
button1.focus();
expect(toolbar.focused).toEqual(button1);
// Navigate right one item
toolbar.dispatchEvent(navigateRightEvent);
expect(toolbar.focused).toEqual(button2);
// Navigate left two items (navigation will wrap to the bottom item)
toolbar.dispatchEvent(navigateLeftEvent);
toolbar.dispatchEvent(navigateLeftEvent);
expect(toolbar.focused).toEqual(buttonAppMenu);
});
it('cannot navigate away from an open menu button', (done) => {
const navigateLeftEvent = new KeyboardEvent('keydown', { key: 'ArrowLeft', bubbles: true });
const navigateRightEvent = new KeyboardEvent('keydown', { key: 'ArrowRight', bubbles: true });
// Button 2 is the Menu Button
button2.focus();
button2.menuEl.show();
// Wait for the menu to be open
setTimeout(() => {
const topMenuItem = button2.menuEl.items[0];
topMenuItem.dispatchEvent(navigateRightEvent);
expect(button2.menuEl.visible).toBeTruthy();
topMenuItem.dispatchEvent(navigateLeftEvent);
expect(button2.menuEl.visible).toBeTruthy();
done();
}, 30);
});
});
<file_sep>/app/ids-menu/example.js
document.addEventListener('DOMContentLoaded', () => {
// Add a `beforeselected` veto to one of the menu items
const nonSelectableItem = document.querySelector('#no-select');
nonSelectableItem.addEventListener('beforeselected', (e) => {
// eslint-disable-next-line
console.info('%c You cannot select this item', 'color: #ff0000;', e.detail.elem);
e.detail.response(false);
});
// Log to the console on `selected`
const menu = document.querySelector('#complex-menu');
menu.addEventListener('selected', (e) => {
// eslint-disable-next-line
console.info(`Item "${e.detail.elem.text}" was selected`);
});
});
<file_sep>/app/ids-base/ids-dirty-tracker-mixin.js
import IdsDirtyTracker from '../../src/ids-base/ids-dirty-tracker-mixin';
<file_sep>/src/ids-layout-grid/ids-layout-grid-cell.js
import { customElement, IdsElement, props } from '../ids-base/ids-element';
/**
* IDS Layout Grid Cell Component
* @type {IdsLayoutGridCell}
* @inherits IdsElement
*/
@customElement('ids-layout-grid-cell')
class IdsLayoutGridCell extends IdsElement {
constructor() {
super();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [
props.FILL,
props.COL_SPAN,
props.COL_START,
props.COL_END,
props.ROW_SPAN,
props.ROW_START,
props.ROW_END
];
}
/**
* Handle The Fill Setting
* @returns {string | null} The fill to true for theme default color
*/
get fill() { return this.getAttribute(props.FILL); }
/**
* Set the background fill
* @param {string | null} value The fill color or true for theme default color
*/
set fill(value) {
if (value) {
this.setAttribute(props.FILL, value);
this.classList.add('ids-background-fill');
return;
}
this.removeAttribute(props.FILL);
this.classList.remove('ids-background-fill');
}
/**
* Set the amount of columns to span
* @param {string | null} value The number value for the columns to span in the grid
*/
set colSpan(value) {
if (value) {
this.setAttribute(props.COL_SPAN, value);
this.style.setProperty('--grid-col-span', value);
this.classList.add(`ids-layout-grid-col-span`);
return;
}
this.style.removeProperty('--grid-col-span');
this.classList.remove(`ids-layout-grid-col-span`);
this.removeAttribute(props.COL_SPAN);
}
get colSpan() { return this.getAttribute(props.COL_SPAN); }
/**
* Set the starting column
* @param {string | null} value The number value for the column starting point
*/
set colStart(value) {
if (value) {
this.setAttribute(props.COL_START, value);
this.style.setProperty('--grid-col-start', value);
this.classList.add(`ids-layout-grid-col-start`);
return;
}
this.style.removeProperty('--grid-col-start');
this.classList.remove(`ids-layout-grid-col-start`);
this.removeAttribute(props.COL_START);
}
get colStart() { return this.getAttribute(props.COL_START); }
/**
* Set the ending column
* @param {string | null} value The number value for the column starting point
*/
set colEnd(value) {
if (value) {
this.setAttribute(props.COL_END, value);
this.style.setProperty('--grid-col-end', value);
this.classList.add(`ids-layout-grid-col-end`);
return;
}
this.style.removeProperty('--grid-col-end');
this.classList.remove(`ids-layout-grid-col-end`);
this.removeAttribute(props.COL_END);
}
get colEnd() { return this.getAttribute(props.COL_END); }
/**
* Set the amount of rows to span
* @param {string | null} value The number value for the rows to span in the grid
*/
set rowSpan(value) {
if (value) {
this.setAttribute(props.ROW_SPAN, value);
this.style.setProperty('--grid-row-span', value);
this.classList.add(`ids-layout-grid-row-span`);
return;
}
this.style.removeProperty('--grid-row-span');
this.classList.remove(`ids-layout-grid-row-span`);
this.removeAttribute(props.ROW_SPAN);
}
get rowSpan() { return this.getAttribute(props.ROW_SPAN); }
/**
* Set the starting row
* @param {string | null} value The number value for the row starting point
*/
set rowStart(value) {
if (value) {
this.setAttribute(props.ROW_START, value);
this.style.setProperty('--grid-row-start', value);
this.classList.add(`ids-layout-grid-row-start`);
return;
}
this.style.removeProperty('--grid-row-start');
this.classList.remove(`ids-layout-grid-row-start`);
this.removeAttribute(props.ROW_START);
}
get rowStart() { return this.getAttribute(props.ROW_START); }
/**
* Set the ending row
* @param {string | null} value The number value for the row ending point
*/
set rowEnd(value) {
if (value) {
this.setAttribute(props.ROW_END, value);
this.style.setProperty('--grid-row-end', value);
this.classList.add(`ids-layout-grid-row-end`);
return;
}
this.style.removeProperty('--grid-row-end');
this.classList.remove(`ids-layout-grid-row-end`);
this.removeAttribute(props.ROW_END);
}
get rowEnd() { return this.getAttribute(props.ROW_END); }
}
export default IdsLayoutGridCell;
<file_sep>/doc/CHANGELOG.md
# v5.0.0
## 5.0.0 Breaking Changes
- `[Button]` The button component has been changed to a web component. ([#4390](https://github.com/infor-design/enterprise/issues/4390))
- Markup has changed to a custom element `<ids-button id="my-button" type="primary"></ids-button>`
- Can now be imported as a single JS file and used with encapsulated styles.
- Some button properties are now attributes - "type", "text", "icon", "disabled", "focusable", etc.
- `[Card]` The Card/Widget component has been changed to a web component and renamed to ids-card.
- Markup has changed to a custom element `<ids-card></ids-card>` and has slots for the header and body content
- Can now be imported as a single JS file and used with encapsulated styles
- `[Checkbox]` The Checkbox component has been changed to a web component and renamed to ids-checkbox.
- Markup has changed to a custom element `<ids-checkbox></ids-checkbox>`
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
- `[Datagrid]` The Datagrid component has been changed to a web component `ids-data-grid`.
- If using events events are now plain JS events for example: sorted, rendered
- If using properties/settings these are now attributes or as plain properties for example: data, virtual-scroll
- Markup has changed to a custom element `<ids-data-grid></ids-data-grid>`
- Can now be imported as a single JS file and used with encapsulated styles
- `[Icons]` The icons are now a web component called ids-icon. You no longer need the huge block of svg in the page
- The icon and size are set via properties/attributes
- Can now be imported as a single JS file and used with encapsulated styles
- `[Input]` The Input component has been changed to a web component and renamed to ids-input.
- Markup has changed to a custom element `<ids-input></ids-input>`
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
- If using a clearable X on the input the x is now tabbable by default for accessibility
- `[ListView]` The List View component has been changed to a web component and renamed to ids-list-view.
- If using properties/settings these are now attributes.
- Markup has changed to a custom element `<ids-list-view></ids-list-view>`
- If using events events are now plain JS events for example
- The template is now a template element that uses simple string substitution
- Can now be imported as a single JS file and used with encapsulated styles
- `[Loader]` The Busy Indicator component has been changed to a web component and renamed to ids-loader.
- If using properties/settings these are now attributes: dismissible, color
- Markup has changed to a custom element `<ids-loader></ids-loader>`
- Can now be imported as a single JS file and used with encapsulated styles
- `[Menu Button]` Added the Menu Button WebComponent
- Now exists as its own component
- Markup is `<ids-menu-button></ids-menu-button>`
- Implicitly linked to a menu by using `menu="my-menu"` CSS selector attribute, or direct element reference in JS
- `[Popup]` Added the Popup WebComponent. ([#4259](https://github.com/infor-design/enterprise/issues/4259))
- This is the new base component to be used for building Popupmenu, Tooltip, Modal, and any other component that requires placement of a fixed-position element.
- If using properties/settings, these are now attributes: x, y, align, alignX, alignY, alignEdge, alignTarget, animated, visible.
- Popup is now driven by its own HTMLElement rather than a being a behavior applied to any element: `<ids-popup x="0" y="0" align="top, left">My Popup</ids-popup>`
- Can now be imported as a single JS file and used with encapsulated styles
- `[Popup Menu]` The Popup Menu is now a web component called ids-popup-menu.
- Markup has been changed to a custom element `<ids-popup-menu></ids-popup-menu>`.
- Events triggered by the Popup Menu are now plain JS (for example `selected`/`deselected`)
- Selection of items is now divided up by Menu Groups `<ids-menu-group></ids-menu-group>`. Multiple menu groups in each Popup Menu are supported, and selection in one group will not affect selection in other groups.
- Many previous settings are now attribute-friendly (for example `closeOnSelect` is now a `keep-open` attribute on menu groups).
- Made dynamic menus possible by using IdsDataSource
- `[Progress]` The progress component has been changed to a web component and renamed to ids-progress.
- Markup has changed to a custom element `<ids-progress></ids-progress>`
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
- `[Radio]` The Radio component has been changed to a web component and use with ids-radio and ids-radio-group.
- Markup has changed to a custom element `<ids-radio-group><ids-radio></ids-radio></ids-radio-group>`
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
- `[RenderLoop]` The RenderLoop component has been implemented as a mixin.
- Added `IdsRenderLoopMixin` for access to a global renderLoop instance from within WebComponents.
- API has been simplified and made more user-friendly.
- `[Switch]` The Switch component has been changed to a web component and renamed to ids-switch.
- Markup has changed to a custom element `<ids-switch></ids-switch>`
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
- `[Tags]` The Tag component has been changed to a web component.
- If using events events are now plain JS events. beforetagremoved, tagremoved, aftertagremoved
- If using properties/settings these are now attributes: dismissible, color
- Markup has changed to a custom element `<ids-tag color="error">Text</ids-tag>`
- Can now be imported as a single JS file and used with encapsulated styles
- `[Textarea]` The Textarea component has been changed to a web component and renamed to ids-textarea.
- Markup has changed to a custom element `<ids-textarea></ids-textarea>`
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
- If using a clearable X on the textarea the x is now tabbable by default for accessibility
- `[Toolbar]` The Toolbar component has been changed to a web component.
- Markup is now a custom element `<ids-toolbar></ids-toolbar>`
- There is no longer a "Toolbar Item" component, instead use standard components directly.
- Toolbar Sections are now codified as components `<ids-toolbar-section>`
- The More Actions Button is now a codified component `<ids-toolbar-more-actions>`
- Toolbar can be tabbable or not
- `[Tooltip]` The Tooltip component has been changed to a web component.
- Markup is now a custom element `<ids-tooltip></ids-tooltip>`
- There is no longer a "KeepOpen" setting, instead add a component to the page and use `visible=true`
- Tooltip can be used as a mixin or standalone component
- Tooltip is no longer shared with popups so all options related to that are removed
- Error Tooltips are removed/not needed at this time
- attachToBody option is removed/not needed at this time
- Content can now be set with `tooltipElem.textContent`
- Instead of open and close use `tooltipElem.visible=true`
- After show event was not needed as its the same as show so this is removed
- `[Wizard]` The Wizard component has been changed to a web component. ([#5026](https://github.com/infor-design/enterprise/issues/5026))
- Wizard is now a custom element `<ids-wizard></ids-wizard>`
- Steps are now codified as custom elements `<ids-wizard-step>MY-LABEL</ids-wizard-step>`
- Setting the step number is done via `ids-wizard`'s `step-number` attribute.
- `clickable` property is available on both any `ids-wizard-step` and `ids-wizard` overall.
- `[Upload]` The file upload component has been changed to a web component and renamed to ids-upload.
- Markup has changed to a custom element `<ids-upload></ids-upload>`
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
- `[Upload Advanced]` The file upload advanced component has been changed to a web component and renamed to ids-upload-advanced.
- Markup has changed to a custom element `<ids-upload-advanced></ids-upload-advanced>`
- Each file ui sections now use as components `<ids-upload-advanced-file></ids-upload-advanced-file>`
- Can get each file status, can get by simple component properties
- If using events, events are now plain JS events.
- Can now be imported as a single JS file and used with encapsulated styles
<file_sep>/README.md
# Infor Design System's Enterprise Components
[](https://opensource.org/licenses/Apache-2.0)
[](https://badge.fury.io/js/ids-enterprise)
[](https://github.com/infor-design/enterprise-wc/actions/workflows/ci.yml)
[](https://coveralls.io/github/infor-design/enterprise-wc?branch=main)
Infor Design System's Enterprise component library is a framework-independent UI library consisting of CSS and JS that provides Infor product development teams, partners, and customers the tools to create user experiences that are approachable, focused, relevant and perceptive.
For guidelines on when and where to use the components see the [design.infor.com](http://design.infor.com).
## Key Features
- Multiple themes, including a WCAG 2.0 AAA compatible high-contrast theme
- Responsive components, patterns and layouts
- Touch-friendly interactions
- SVG-based iconography compatible with high DPI screens
- Built-in, extendible localization system
- Built-in mitigation of XSS exploits
- 140+ Components
## Browser Support
We support the latest release and the release previous to the latest (R-1) for browsers and OS versions:
<!-- markdownlint-disable MD013 MD033 -->
| [<img src="https://raw.githubusercontent.com/alrra/browser-logos/master/src/edge/edge_48x48.png" alt="IE / Edge" width="24px" height="24px" />](http://godban.github.io/browsers-support-badges/)</br>IE Edge | [<img src="https://raw.githubusercontent.com/alrra/browser-logos/master/src/firefox/firefox_48x48.png" alt="Firefox" width="24px" height="24px" />](http://godban.github.io/browsers-support-badges/)</br>Firefox | [<img src="https://raw.githubusercontent.com/alrra/browser-logos/master/src/chrome/chrome_48x48.png" alt="Chrome" width="24px" height="24px" />](http://godban.github.io/browsers-support-badges/)</br>Chrome | [<img src="https://raw.githubusercontent.com/alrra/browser-logos/master/src/safari/safari_48x48.png" alt="Safari" width="24px" height="24px" />](http://godban.github.io/browsers-support-badges/)</br>Safari | [<img src="https://raw.githubusercontent.com/alrra/browser-logos/master/src/safari-ios/safari-ios_48x48.png" alt="iOS Safari" width="24px" height="24px" />](http://godban.github.io/browsers-support-badges/)</br>iOS Safari |
| --------- | --------- | --------- | --------- | --------- |
| R-1 | R-1 | R-1| R-1| R-1
<!-- markdownlint-enable MD013 MD033 -->
## Installation
```sh
npm install --save ids-enterprise@latest
```
For additional usage methods, see [Installing IDS](docs/DEVELOPER.md#installing-ids-into-your-project)
## Documentation
- [Latest Release Docs](https://design.infor.com/code/ids-enterprise/latest)
- [Change Log](docs/CHANGELOG.md) (Includes latest release changes)
- [How to build the documentation from source](docs/DEVELOPER.md#basic-commands)
In Ids Web Components we chose to use Javascript and not TypeScript, however we want to support developers that use typescript. For each component we also include TypeScript declaration files for all methods, settings and events. And we ensure our code if included directly is type safe via. We also created a small [example TypeScript project](https://github.com/infor-design/enterprise-wc-examples/tree/master/typescript-ids-wc) to show one way to use Ids Web Components in a plain typescript project.
## Contributing
- [Contribution Guidelines](docs/CONTRIBUTING.md)
- [Developer Information](docs/DEVELOPER.md)
- Use [Github Issues](https://github.com/infor-design/enterprise/issues) to report all requests, bugs, questions, and feature requests
- [Review source code changes](https://github.com/infor-design/enterprise/pulls)
- [Releases, previous and upcoming](https://github.com/infor-design/enterprise/releases)
- [Microsoft Teams Group](https://teams.microsoft.com/l/team/19%3a2b0c9ce520b0481a9ce115f0ca4a326f%40thread.skype/conversations?groupId=4f50ef7d-e88d-4ccb-98ca-65f26e57fe35&tenantId=457d5685-0467-4d05-b23b-8f817adda47c) (Infor Employees only)
- [Roadmap and Sprint Board](https://github.com/orgs/infor-design/projects)
- Test first - 100% Test Coverage
- Passes all code security scans and is fully CSP compatible
- Well documented in `.md` format
- Contains an extensive [Change log](./doc/CHANGELOG.md) which lists any and all breaking changes
- [Fully linted code](./doc/LINTING.md)
- Follows [WAI-ARIA Authoring Practices](https://www.w3.org/TR/wai-aria-practices-1.1/#keyboard-interaction-12) with a huge focus on accessibility
- Fully Namespaced with an `ids-` namespace
- We Follow the [Gold Standard For Making Web Components](https://github.com/webcomponents/gold-standard/wiki)
- Type safe for TypeScript users
- Every component has the Css and Dom Encapsulated (in supported browsers)
<file_sep>/app/ids-mask/number-leading-zeros.js
import IdsInput from '../../src/ids-input/ids-input';
document.addEventListener('DOMContentLoaded', () => {
// Number Input - use `number` string to pre-configure the internal Number Mask
const numberInput = document.querySelector('#mask-number');
numberInput.mask = 'number';
numberInput.maskOptions = {
allowDecimal: true,
allowLeadingZeros: true,
allowNegative: true,
allowThousandsSeparator: true,
decimalLimit: 2,
integerLimit: 7
};
});
<file_sep>/test/ids-loader/ids-loader-func-test.js
/**
* @jest-environment jsdom
*/
import IdsLoader from '../../src/ids-loader/ids-loader';
describe('IdsLoader Component', () => {
let loader;
beforeEach(async () => {
const elem = new IdsLoader();
document.body.appendChild(elem);
loader = document.querySelector('ids-loader');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
loader = new IdsLoader();
document.body.appendChild(loader);
expect(document.querySelectorAll('ids-loader').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('renders correctly', () => {
expect(loader.shadowRoot.innerHTML).toMatchSnapshot();
});
it('supports setting mode', () => {
loader.mode = 'dark';
expect(loader.container.getAttribute('mode')).toEqual('dark');
});
it('supports setting version', () => {
loader.version = 'classic';
expect(loader.container.getAttribute('version')).toEqual('classic');
});
});
<file_sep>/doc/CHECKLIST.md
# Task List for this POC
## Component Checklist
**About** (ids-about)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Accordion** (ids-accordion)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Alerts** (ids-icon)
- [x] Docs
- [x] 100% Test Coverage
- [x] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [x] Typings
- [x] NG / Vue / React Example
- [x] Works in Page with 4.x
Standalone Css not applicable use SVG files or PNG files. Could also still use an ids-alert explicit component.
**Applicationmenu** (ids-app-menu)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Area** (ids-area-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Autocomplete** (ids-autocomplete)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Badges** (ids-badge)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Bar** (ids-bar-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Blockgrid** (ids-block-grid)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Breadcrumb** (ids-bread-crumb)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Bubble** (ids-bubble-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Bullet** (ids-bulet-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Loader** (ids-loader) aka Busy Indicator / Loading Indicator
- [x] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Button** (ids-button)
- [x] Docs
- [x] 100% Test Coverage
- [x] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Calendar** (ids-calendar)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Cards** (ids-card)
- [x] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Checkboxes** (ids-checkbox)
- [x] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Circlepager** (ids-pager)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
Will make a pager type for this.
**Colorpicker** (ids-color-picker)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Column** (ids-column-chart or ids-bar with a orientation setting)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Completion Chart** (ids-completion-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Contextualactionpanel** (ids-contextual-action-panel or ids-modal)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Datagrid** (ids-data-grid)
- [x] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Datepicker** (ids-date-picker)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Donut** (ids-dount or combined with ids-pie)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Dropdown** (ids-drop-down)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Editor** (ids-editor)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Emptymessage** (ids-empty-message)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Error Page** (ids-page-error)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Expandablearea** (ids-expandable-area)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Fieldset** (ids-field-set)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Fileupload** (ids-file-upload)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Fileupload Advanced** (ids-file-upload with options)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Fontpicker** (ids-font-picker)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Form ** (ids-form if needed)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Grid** (ids-layout-grid)
- [x] Docs
- [ ] 100% Test Coverage
- [x] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Header** (ids-header)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Hierarchy** (ids-hierarchy)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Homepage** (ids-homepage)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Hyperlinks** (ids-hyperlink)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Icons** (ids-icon)
- [x] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [x] NG / Vue / React Example
- [x] Standalone Css (Not Possible use design system icons and png)
- [ ] Works in Page with 4.x
**Images** (ids-image)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Input** (ids-input)
- [x] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Line** (ids-line-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Listbuilder** (ids-list-builder)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Listview** (ids-list-view)
- [x] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Locale** (ids-locale)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Lookup** (ids-lookup)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Mask** (ids-mask - done as a mixin on components that support it)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Masthead** (ids-mast-head)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**MenuButton** (ids-menu-button)
- [x] Docs
- [x] 100% Test Coverage
- [x] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [x] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Message** (ids-message)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Modal** (ids-modal)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Monthview** (ids-month-view)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Multiselect** (ids-multi-select)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Notification** (ids-notification)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Page Layouts** (ids-page-layout or as individual separate examples)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Page Patterns** (ids-page-pattern or as individual separate examples)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Pager** (ids-pager)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Personalize** (ids-personalize or as a mixin on components thats support)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Pie** (ids-pie-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Popover** (ids-popup)
- [x] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Popupmenu** (ids-menu)
- [x] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Positive Negative** (ids-positive-negative-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Radar** (ids-radar-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Radios** (ids-radio)
- [x] Docs
- [ ] 100% Test Coverage
- [x] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Rating** (ids-rating)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Renderloop** (ids-render-loop)
- [x] Docs
- [x] 100% Test Coverage
- [x] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Scatterplot** (ids-scatter-plot-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Searchfield** (ids-search-field)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Signin** (ids-sign-in)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Skiplink** (ids-skip-link)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Slider** (ids-slider)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Sparkline** (ids-spark-line-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Spinbox** (ids-spin-box)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Splitter** (ids-splitter)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Stepchart** (ids-step-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Swaplist** (ids-swap-list)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Switch** (ids-switch)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Tabs** (ids-tabs)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Tabs Header** (ids-tabs with option)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Tabs Module** (ids-tabs with option)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Tabs Multi** (ids-tabs with option)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Tabs Vertical** (ids-tabs with option)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Tag** (ids-tag)
- [x] Docs
- [x] 100% Test Coverage
- [x] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [x] NG / Vue / React Example
- [x] Standalone Css
- [x] Works in Page with 4.x
**Targeted Achievement** (ids-target-chart)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Textarea** (ids-text-area)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Timeline** (ids-time-line)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Timepicker** (ids-time-picker)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Toast** (ids-toast)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Toolbar** (ids-toolbar)
- [x] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [x] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [x] Standalone Css
- [ ] Works in Page with 4.x
**Tooltip** (ids-tooltip)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Trackdirty** (ids-dirty-tracker)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [x] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Tree** (ids-tree)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Treemap** (ids-tree-map)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Typography** (ids-text)
- [ ] Docs
- [x] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [x] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Validation** (ids-validation (mixin))
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Week View** (ids-week-view)
- [ ] Docs
- [ ] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [ ] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
**Wizard** (ids-wizard)
- [ ] Docs
- [*] 100% Test Coverage
- [ ] Feature Parity with 4.x
- [ ] Upgrade Docs in Changelog
- [*] Typings
- [ ] NG / Vue / React Example
- [ ] Standalone Css
- [ ] Works in Page with 4.x
<file_sep>/src/ids-menu-button/ids-menu-button.d.ts
import { IdsButton } from '../ids-button/ids-button';
import IdsIcon from '../ids-icon/ids-icon';
import IdsPopupMenu from '../ids-popup-menu/ids-popup-menu';
export default class IdsMenuButton extends IdsButton {
/** Defines the type of icon used as the secondary "dropdown icon". If null, no icon is used */
dropdownIcon: string | boolean;
/** */
readonly dropdownIconEl: IdsIcon | null;
/** */
menu: string | null;
/** */
readonly menuEl: IdsPopupMenu;
/** */
configureMenu(): void;
/** */
refreshMenu(): void;
}
<file_sep>/test/helpers/snapshot-resolver.js
/**
* Changes the folder mapping of jest snapshots to flatten them.
*/
module.exports = {
resolveSnapshotPath: (testPath, snapshotExtension) =>
testPath + snapshotExtension,
resolveTestPath: (snapshotFilePath, snapshotExtension) =>
snapshotFilePath.slice(0, -snapshotExtension.length),
testPathForConsistencyCheck: 'test/ids-component/ids-component-func-test.js'
};
<file_sep>/src/ids-icon/README.md
# Ids Icon Component
## Description
The design system provides a list of standard icons which can be used in your applications. The list of icons is a result of a studying common iconography and consolidating that with standardized icons (for example, a floppy disk to represent a "save" function). The icons in the list should be familiar to users.
## Use Cases
- Icons are most effective when they improve visual interest and grab the user's attention. They help guide users while they're navigating a page.
- Use too many icons and they'll become nothing more than decoration. Their use for navigation on a webpage can often cause dilution.
## Terminology
- **Icon**: An icon is a symbol. In this design case this is literal, for example the little trashcan to indicate delete.
- **Svg**: Stands for eXtensible Markup Language (XML)-based vector graphic format for the Web and other environments
## Features (With Code Examples)
For a normal sized icon just specific the icon name and the web component will display the appropriate icon.
```html
<ids-icon icon="notes"></ids-icon>
```
Icons come in 3 sizes depending where it is used.
```html
<ids-icon icon="notes" size="large"></ids-icon>
<ids-icon icon="notes" size="medium"></ids-icon>
<ids-icon icon="notes" size="small"></ids-icon>
```
## States and Variations
- Color
- Size
- Alert
## Keyboard Guidelines
An icon is not on its own keyboard focusable and has no keyboard interaction.
## Responsive Guidelines
- Flows within its parent/placement and is usually centered vertically.
## Converting from Previous Versions
- 3.x: Icons have all new markup and classes.
- 4.x: Icons have all new markup and classes again.
- 4.x: Icons have all new markup and classes and a custom element for web components.
## Designs
[Figma Design Specs](https://www.figma.com/files/team/715586812838044954/Hook%26Loop)
## Alternate Designs
Icons differ in the two provided theme/icon versions.
## Proposed Changes
- Fix Accessibility issue (1.4.1 Use of Color) by changing some alert colors
- Fix Accessibility issue (1.4.3 Contrast (Minimum)) by changing some icon colors when its used with text
## Accessibility Guidelines
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. Ensure the color tags that indicate state like OK, cancel, ect have other ways to indicate that information. This is failing.
- 1.4.3 Contrast (Minimum) - The visual presentation of text and images of text has a contrast ratio of at least 4.5:1. Ensure the icons tags pass contrast when combined with text.
## Regional Considerations
Some icons that indicate direction will be flipped when in Right-To-Left languages. This is a TODO still.
<file_sep>/test/ids-base/ids-theme-mixin-func-test.js
/**
* @jest-environment jsdom
*/
import IdsTag from '../../src/ids-tag/ids-tag';
import IdsThemeSwitcher from '../../src/ids-theme-switcher/ids-theme-switcher';
describe('IdsThemeMixin Tests', () => {
let elem;
let switcher;
beforeEach(async () => {
elem = new IdsTag();
switcher = new IdsThemeSwitcher();
document.body.appendChild(switcher);
document.body.appendChild(elem);
elem.initThemeHandlers();
});
afterEach(async () => {
document.body.innerHTML = '';
elem = null;
});
it('sets up mode and version setters', () => {
elem.mode = 'dark';
elem.version = 'classic';
expect(elem.container.getAttribute('mode')).toEqual('dark');
expect(elem.container.getAttribute('mode')).toEqual('dark');
});
it('fires themechanged event', () => {
const mockCallback = jest.fn((x) => {
expect(x.detail.mode).toEqual('dark');
expect(x.detail.version).toEqual('new');
});
switcher.addEventListener('themechanged', mockCallback);
switcher.mode = 'dark';
expect(mockCallback.mock.calls.length).toBe(2);
expect(elem.mode).toEqual('dark');
});
});
<file_sep>/app/ids-popup/index.js
import IdsPopup from '../../src/ids-popup/ids-popup';
<file_sep>/postcss.config.js
const autoprefixer = require('autoprefixer');
const cssnano = require('cssnano');
// Only Minify on Prod Setting
if (process.env.NODE_ENV === 'production') {
module.exports = {
plugins: [
autoprefixer,
cssnano
]
};
} else {
module.exports = {
plugins: [
autoprefixer
]
};
}
<file_sep>/src/ids-popup/ids-popup.js
import {
IdsElement,
customElement,
props,
scss,
mix,
stringUtils
} from '../ids-base/ids-element';
import IdsDOMUtils from '../ids-base/ids-dom-utils';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsRenderLoopMixin, IdsRenderLoopItem } from '../ids-render-loop/ids-render-loop-mixin';
import { IdsResizeMixin } from '../ids-base/ids-resize-mixin';
import styles from './ids-popup.scss';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
const CENTER = 'center';
// Locations in which a parent-positioned Popup can be located
const ALIGNMENT_EDGES = [CENTER, 'bottom', 'top', 'left', 'right'];
// Methods for X/Y-coordinate alignment against a parent
const ALIGNMENTS_X = [CENTER, 'left', 'right'];
const ALIGNMENTS_Y = [CENTER, 'top', 'bottom'];
const ALIGNMENTS_EDGES_X = ALIGNMENTS_X.filter((x) => x !== CENTER);
const ALIGNMENTS_EDGES_Y = ALIGNMENTS_Y.filter((y) => y !== CENTER);
// Arrow Directions (defaults to 'none')
const ARROW_TYPES = ['none', 'bottom', 'top', 'left', 'right'];
// Types of Popups
const TYPES = ['none', 'menu', 'menu-alt', 'tooltip', 'tooltip-alt'];
// Properties exposed with getters/setters
// safeSet/RemoveAttribute also use these so we pull them out
const POPUP_PROPERTIES = [
'align',
'align-x',
'align-y',
'align-edge',
'align-target',
'arrow',
'arrow-target',
props.ANIMATED,
props.TYPE,
props.VISIBLE,
'x',
'y'
];
/**
* Formats the text value of the `align` attribute.
* @private
* @param {string} alignX matches a value from the ALIGNMENTS_X array
* @param {string} alignY matches a value from the ALIGNMENTS_Y array
* @param {string} edge matches a value from the ALIGNMENT_EDGES array
* @returns {string} containing the properly formatted align value
*/
function formatAlignAttribute(alignX, alignY, edge) {
// Check the edge for a "Y" alignment
if (ALIGNMENTS_EDGES_Y.includes(edge)) {
if (!alignX || !alignX.length || alignX === CENTER) {
return `${edge}`;
}
return `${edge}, ${alignX}`;
}
// Alignment is definitely "X"
if (!alignY || !alignY.length || alignY === CENTER) {
return `${alignX}`;
}
if (edge === CENTER) {
return `${alignY}`;
}
return `${edge}, ${alignY}`;
}
/**
* IDS Popup Component
* @type {IdsPopup}
* @inherits IdsElement
* @mixes IdsRenderLoopMixin
* @mixes IdsEventsMixin
* @mixes IdsThemeMixin
* @part popup - the popup outer element
* @part arrow - the arrow element
*/
@customElement('ids-popup')
@scss(styles)
class IdsPopup extends mix(IdsElement).with(
IdsRenderLoopMixin,
IdsResizeMixin,
IdsEventsMixin,
IdsThemeMixin
) {
constructor() {
super();
this.alignment = {
edge: ALIGNMENT_EDGES[0],
target: undefined,
x: ALIGNMENTS_X[0],
y: ALIGNMENTS_Y[0]
};
this.coords = {
x: 0,
y: 0
};
this.state = {
arrow: ARROW_TYPES[0],
arrowTarget: null,
};
this.isVisible = false;
this.isAnimated = false;
this.trueType = 'none';
this.shouldUpdate = true;
}
/**
* `IdsElement.prototype.connectedCallback` implementation
* @returns {void}
*/
connectedCallback() {
this.animated = this.hasAttribute('animated');
this.trueType = this.getAttribute('type') || this.trueType;
this.isVisible = this.hasAttribute('visible');
this.setupDetectMutations();
this.setupResize();
this.handleEvents();
super.connectedCallback();
this.shouldUpdate = true;
this.refresh();
}
/**
* Custom Element `disconnectedCallback` implementation
* @returns {void}
*/
disconnectedCallback() {
IdsElement.prototype.disconnectedCallback.apply(this);
if (this.shouldResize()) {
this.disconnectResize();
}
if (this.shouldDetectMutations()) {
this.disconnectDetectMutations();
}
}
/**
* Override `attributeChangedCallback` from IdsElement to wrap its normal operation in a
* check for a true `shouldUpdate` property.
* @param {string} name The property name
* @param {string} oldValue The property old value
* @param {string} newValue The property new value
*/
attributeChangedCallback(name, oldValue, newValue) {
if (this.shouldUpdate) {
IdsElement.prototype.attributeChangedCallback.apply(this, [name, oldValue, newValue]);
}
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return POPUP_PROPERTIES;
}
/**
* @readonly
* @returns {HTMLElement} reference to the `content-wrapper` element
*/
get wrapper() {
return this.shadowRoot.querySelector('.content-wrapper');
}
/**
* Sets the element to align with via a css selector
* @param {string | HTMLElement | undefined} val ['string|HTMLElement'] a CSS selector string
*/
set alignTarget(val) {
const isString = typeof val === 'string' && val.length;
const isElem = val instanceof HTMLElement;
if (!isString && !isElem) {
this.alignment.target = undefined;
this.removeAttribute('align-target');
this.refresh();
return;
}
let elem;
if (isString) {
// @TODO Harden for security (XSS)
const rootNode = IdsDOMUtils.getClosestRootNode(this);
elem = rootNode.querySelector(val);
if (!(elem instanceof HTMLElement)) {
return;
}
this.setAttribute('align-target', val);
} else {
elem = val;
}
this.alignment.target = elem;
this.refresh();
}
/**
* @returns {HTMLElement| undefined} the element in the page that the Popup will take
* coordinates from for relative placement
*/
get alignTarget() {
return this.alignment.target;
}
/**
* Sets the alignment direction between left, right, top, bottom, center and can be a comma
* delimited set of multiple alignment types for example `left, top`
* @param {string} val a comma-delimited set of alignment types `direction1, direction2`
*/
set align(val) {
this.shouldUpdate = false;
let trueVal = val;
if (typeof trueVal !== 'string' || !trueVal.length) {
trueVal = CENTER;
}
// Normalize values and store the first entry as the "edge" to align against
let vals = trueVal.split(',');
vals = vals.map((thisVal) => thisVal.trim().toLowerCase());
// Adust the first value and set it as the "edge"
const edge = vals[0];
if (ALIGNMENT_EDGES.includes(edge)) {
this.alignEdge = edge;
vals[0] = this.alignEdge;
}
// If there's no second value, assumxae it's 'center'
if (!vals[1]) {
vals.push('center');
}
// If the values are defined in reverse (y, x), switch them
if (ALIGNMENTS_EDGES_Y.includes(vals[0]) || ALIGNMENTS_EDGES_X.includes(vals[1])) {
const val1 = vals[1];
vals[1] = vals[0];
vals[0] = val1;
}
// Update each alignment value if it's valid, and render the attribute
let attrX;
let attrY;
if (ALIGNMENTS_X.includes(vals[0])) {
attrX = vals[0];
this.alignX = vals[0];
} else {
attrX = this.alignX;
}
if (ALIGNMENTS_Y.includes(vals[1])) {
attrY = vals[1];
this.alignY = vals[1];
} else {
attrY = this.alignY;
}
this.setAttribute('align', formatAlignAttribute(attrX, attrY, this.alignment.edge));
this.shouldUpdate = true;
this.refresh();
}
/**
* @returns {string} a DOM-friendly string reprentation of alignment types
*/
get align() {
const { alignX, alignY } = this;
const edge = this.alignEdge;
return formatAlignAttribute(alignX, alignY, edge);
}
/**
* Strategy for the parent X alignment (see the ALIGNMENTS_X array)
* @param {string} val the strategy to use
*/
set alignX(val) {
if (typeof val !== 'string' || !val.length) {
return;
}
let alignX = val;
if (!ALIGNMENTS_X.includes(val)) {
alignX = ALIGNMENTS_X[0];
}
this.alignment.x = alignX;
const alignY = this.alignment.y;
// If `align-x` was used directy, standardize against the `align` attribute
if (this.hasAttribute('align-x')) {
this.safeRemoveAttribute('align-x');
this.align = formatAlignAttribute(alignX, alignY, alignX);
} else if (this.shouldUpdate) {
this.align = formatAlignAttribute(alignX, alignY, alignX);
}
this.refresh();
}
/**
* Strategy for the parent X alignment ((see the ALIGNMENTS_Y array)
* @returns {string} the strategy to use
*/
get alignX() {
return this.alignment.x;
}
set alignY(val) {
if (typeof val !== 'string' || !val.length) {
return;
}
let alignY = ALIGNMENTS_Y[0];
if (ALIGNMENTS_Y.includes(val)) {
alignY = val;
}
this.alignment.y = alignY;
const alignX = this.alignment.x;
// If `align-y` was used directy, standardize against the `align` attribute
if (this.hasAttribute('align-y')) {
this.safeRemoveAttribute('align-y');
this.align = formatAlignAttribute(alignX, alignY, alignY);
} else if (this.shouldUpdate) {
this.align = formatAlignAttribute(alignX, alignY, alignY);
}
this.refresh();
}
/**
* @returns {string} alignment strategy for the current parent Y alignment
*/
get alignY() {
return this.alignment.y;
}
/**
* Specifies the edge of the parent element to be placed adjacent,
* in configurations where a relative placement occurs
* @param {string} val The edge to align to
*/
set alignEdge(val) {
if (typeof val !== 'string' || !val.length) {
return;
}
// Sanitize the alignment edge
let edge;
let alignX = this.alignment.x;
let alignY = this.alignment.y;
if (ALIGNMENT_EDGES.includes(val)) {
edge = val;
if (val === CENTER) {
alignX = val;
alignY = val;
}
} else {
edge = ALIGNMENT_EDGES[0];
}
this.alignment.edge = edge;
if (this.hasAttribute('align-edge')) {
this.shouldUpdate = false;
this.removeAttribute('align-edge');
this.align = formatAlignAttribute(alignX, alignY, edge);
this.shouldUpdate = true;
} else if (this.shouldUpdate) {
this.align = formatAlignAttribute(alignX, alignY, edge);
}
this.refresh();
}
/**
* @returns {string} representing the current adjacent edge of the parent element
*/
get alignEdge() {
return this.alignment.edge;
}
/**
* Whether or not the component should animate its movement
* @param {boolean} val The alignment setting
*/
set animated(val) {
this.isAnimated = stringUtils.stringToBool(val);
if (this.isAnimated) {
this.safeSetAttribute('animated', true);
} else {
this.safeRemoveAttribute('animated');
}
this.refresh();
}
get animated() {
return this.isAnimated;
}
/**
* Specifies whether to show the Popup Arrow, and in which direction.
* The direction is in relation to the alignment setting. So for example of you align: top
* you want arrow: top as well.
* @param {string|null} val the arrow direction. Defaults to `none`
*/
set arrow(val) {
let trueVal = ARROW_TYPES[0];
if (val && ARROW_TYPES.includes(val)) {
trueVal = val;
}
if (trueVal !== ARROW_TYPES[0]) {
this.safeSetAttribute('arrow', `${trueVal}`);
} else {
this.safeRemoveAttribute('arrow');
}
this.refresh();
}
/**
* @returns {string|null} the arrow setting, or null
*/
get arrow() {
const attr = this.getAttribute('arrow');
if (!attr) {
return ARROW_TYPES[0];
}
return attr;
}
/**
* @readonly
* @returns {HTMLElement} referencing the internal arrow element
*/
get arrowEl() {
return this.container.querySelector('.arrow');
}
/**
* Sets the element to align with via a css selector
* @param {any} val ['string|HTMLElement'] a CSS selector string
*/
set arrowTarget(val) {
const isString = typeof val === 'string' && val.length;
const isElem = val instanceof HTMLElement;
if (!isString && !isElem) {
this.state.arrowTarget = undefined;
this.removeAttribute('arrow-target');
this.refresh();
return;
}
let elem;
if (isString) {
// @TODO Harden for security (XSS)
const rootNode = IdsDOMUtils.getClosestRootNode(this);
elem = rootNode.querySelector(val);
if (!(elem instanceof HTMLElement)) {
return;
}
this.setAttribute('arrow-target', val);
} else {
elem = val;
}
this.state.arrowTarget = elem;
this.refresh();
}
/**
* @returns {HTMLElement} the element in the page that the Popup will take
* coordinates from for relative placement
*/
get arrowTarget() {
return this.state.arrowTarget || this.alignTarget;
}
/**
* The style of popup to use between 'none', 'menu', 'menu-alt', 'tooltip', 'tooltip-alt'
* @param {string} val The popup type
*/
set type(val) {
if (val && TYPES.includes(val)) {
this.trueType = val;
}
this.safeSetAttribute('type', this.trueType);
this.refresh();
}
get type() {
return this.trueType;
}
/**
* Whether or not the component should be displayed
* @param {boolean} val a boolean for displaying or hiding the popup
*/
set visible(val) {
this.isVisible = stringUtils.stringToBool(val);
if (this.isVisible) {
this.safeSetAttribute('visible', true);
} else {
this.safeRemoveAttribute('visible');
}
this.refresh();
}
get visible() {
return this.isVisible;
}
/**
* Sets the X (left) coordinate of the Popup
* @param {number} val the coordinate's value
*/
set x(val) {
let trueVal = parseInt(val?.toString(), 10);
if (Number.isNaN(trueVal)) {
trueVal = 0;
}
this.coords.x = trueVal;
this.setAttribute('x', trueVal.toString());
this.refresh();
}
get x() {
return this.coords.x;
}
/**
* Sets the Y (top) coordinate of the Popup
* @param {number} val the coordinate's value
*/
set y(val) {
let trueVal = parseInt(val?.toString(), 10);
if (Number.isNaN(trueVal)) {
trueVal = 0;
}
this.coords.y = trueVal;
this.setAttribute('y', trueVal.toString());
this.refresh();
}
get y() {
return this.coords.y;
}
/**
* Calculates the current placement of the Popup
*/
refresh() {
if (!this.shouldUpdate) {
return;
}
// Attach to the global ResizeObserver
// (this doesn't need updating)
// @TODO possibly replace `this.resizeDetectionTarget()`
// with IdsPopupBoundary (specifically to contain)
if (this.shouldResize()) {
this.addObservedElement(this.resizeDetectionTarget());
}
// Set the Popup type
const thisType = this.trueType;
const thisCl = this.container.classList;
TYPES.forEach((type) => {
if (type !== thisType && thisCl.contains(type)) {
thisCl.remove(type);
} else if (type === thisType && !thisCl.contains(type)) {
thisCl.add(type);
}
});
// Make the popup actually render before doing placement calcs
if (this.isVisible) {
thisCl.add('visible');
} else {
thisCl.remove('open');
}
// Show/Hide Arrow class, if applicable
const arrowClass = this.arrow;
const arrowElCl = this.arrowEl.classList;
ARROW_TYPES.forEach((type) => {
if (type !== 'none' && type !== arrowClass) {
arrowElCl.remove(type);
this.arrowEl.hidden = true;
}
});
if (this.arrow !== 'none' && !arrowElCl.contains(this.arrow)) {
arrowElCl.add(this.arrow);
this.arrowEl.hidden = false;
}
// If no alignment target is present, do a simple x/y coordinate placement.
const { alignTarget } = this;
if (!alignTarget) {
// Remove an established MutationObserver if one exists.
if (this.hasMutations) {
this.mo.disconnect();
this.disconnectDetectMutations();
delete this.hasMutations;
}
if (this.visible) {
this.placeAtCoords();
}
} else {
// connect the alignTarget to the global MutationObserver, if applicable.
if (this.shouldDetectMutations() && !this.hasMutations) {
this.mo.observe(this.alignTarget, {
attributes: true,
attributeFilter: ['style', 'height', 'width'],
attributeOldValue: true,
subtree: true
});
this.hasMutations = true;
}
this.placeAgainstTarget();
}
// Adds a RenderLoop-staggered check for whether to show the Popup.
if (this.openCheck) {
this.openCheck.destroy(true);
}
this.openCheck = this.rl.register(new IdsRenderLoopItem({
duration: 70,
timeoutCallback: () => {
if (this.isVisible) {
// If an arrow is displayed, place it correctly.
this.placeArrow();
// Always fire the 'show' event
this.triggerEvent('show', this, {
bubbles: true,
detail: {
elem: this
}
});
this.container.classList.add('open');
}
if (!this.isAnimated && this.container.classList.contains('animated')) {
this.container.classList.remove('animated');
}
}
}));
// Adds another RenderLoop-staggered check for whether to hide the Popup.
if (this.animatedCheck) {
this.animatedCheck.destroy(true);
}
this.animatedCheck = this.rl.register(new IdsRenderLoopItem({
duration: 200,
timeoutCallback: () => {
if (!this.isVisible) {
// Always fire the 'hide' event
this.triggerEvent('hide', this, {
bubbles: true,
detail: {
elem: this
}
});
// Remove the `visible` class if its there
if (this.container.classList.contains('visible')) {
this.container.classList.remove('visible');
}
}
if (this.isAnimated && !this.container.classList.contains('animated')) {
this.container.classList.add('animated');
}
}
}));
}
/**
* Places the Popup using numeric x/y coordinates as a starting point.
* @private
* @returns {void}
*/
placeAtCoords() {
const popupRect = this.container.getBoundingClientRect();
let x = this.x;
let y = this.y;
switch (this.alignX) {
case 'right':
x -= popupRect.width;
break;
case 'center':
x -= popupRect.width / 2;
break;
default: // left
break;
}
switch (this.alignY) {
case 'bottom':
y -= popupRect.height;
break;
case 'center':
y -= popupRect.height / 2;
break;
default: // top
break;
}
this.container.style.left = `${x}px`;
this.container.style.top = `${y}px`;
}
/**
* Places the Popup using an external element as a starting point.
* @private
* @returns {void}
*/
placeAgainstTarget() {
let x = this.x;
let y = this.y;
// Detect sizes/locations of the popup and the alignment target Element
const popupRect = this.container.getBoundingClientRect();
const targetRect = this.alignTarget.getBoundingClientRect();
const { alignEdge } = this;
let alignXCentered = false;
let alignYCentered = false;
/*
* NOTE: All calculatations are based on the top/left corner of the element rectangles.
*/
// If alignment edge is top or bottom, the defined Y coordinate is used as an offset,
// and the X position will be set using the provided X alignment rule (or centered by default)
// and use the defined X coordinate as a X offset.
if (ALIGNMENTS_Y.includes(alignEdge)) {
switch (alignEdge) {
case 'top':
y = targetRect.top - popupRect.height - y;
break;
case 'bottom':
y = targetRect.bottom + y;
break;
default: // center
y = (targetRect.top + targetRect.height / 2) - (popupRect.height / 2) + y;
alignYCentered = true;
}
switch (this.alignX) {
case 'left':
x = targetRect.left + x;
break;
case 'right':
x = targetRect.right - popupRect.width - x;
break;
default: // center
x = (targetRect.left + targetRect.width / 2) - popupRect.width / 2 + x;
alignXCentered = true;
}
}
// If alignment edge is left or right, the defined X coordinate is used as an offset,
// and the Y position will be set using the provided Y alignment rule (or centered by default)
// and use the defined Y coordinate as a Y offset.
if (ALIGNMENTS_X.includes(alignEdge)) {
switch (alignEdge) {
case 'left':
x = targetRect.left - popupRect.width - x;
break;
case 'right':
x = targetRect.right + x;
break;
default: // center
if (alignXCentered) {
break;
}
x = (targetRect.left + targetRect.width / 2) - popupRect.width / 2 + x;
}
switch (this.alignY) {
case 'top':
y = targetRect.top + y;
break;
case 'bottom':
y = targetRect.bottom - popupRect.height + y;
break;
default: // center
if (alignYCentered) {
break;
}
y = (targetRect.top + targetRect.height / 2) - (popupRect.height / 2) + y;
}
}
this.container.style.left = `${x}px`;
this.container.style.top = `${y}px`;
}
/**
* Handles alignment of an optional arrow element. If an arrow target is specified,
* the arrow is placed to align correctly against the target.
* @returns {void}
*/
placeArrow() {
const arrow = this.arrow;
const arrowEl = this.arrowEl;
const element = this.alignTarget;
const target = this.arrowTarget;
if (arrow === 'none' || !element || !target) {
arrowEl.hidden = true;
return;
}
// Clear previous styles
arrowEl.removeAttribute('hidden');
arrowEl.style.marginLeft = '';
arrowEl.style.marginTop = '';
const arrowRect = arrowEl.getBoundingClientRect();
const elementRect = element.getBoundingClientRect();
const targetRect = target.getBoundingClientRect();
const newArrowRect = {};
const targetMargin = (arrow === 'right' || arrow === 'left') ? 'marginTop' : 'marginLeft';
let arrowHidden = false;
let targetCenter = 0;
let currentArrowCenter = 0;
let d;
// Figure out the distance needed to move the arrow to match the position of the `target`
if (arrow === 'left' || arrow === 'right') {
targetCenter = targetRect.top + (targetRect.height / 2);
currentArrowCenter = arrowRect.top + (arrowRect.height / 2);
d = targetCenter - currentArrowCenter;
newArrowRect.top = arrowRect.top + d;
newArrowRect.bottom = arrowRect.bottom + d;
/* istanbul ignore next */
if (newArrowRect.top <= elementRect.top || newArrowRect.bottom >= elementRect.bottom) {
arrowHidden = true;
}
}
if (arrow === 'top' || arrow === 'bottom') {
targetCenter = targetRect.left + (targetRect.width / 2);
currentArrowCenter = arrowRect.left + (arrowRect.width / 2);
d = targetCenter - currentArrowCenter;
newArrowRect.left = arrowRect.left + d;
newArrowRect.right = arrowRect.right + d;
/* istanbul ignore next */
if (newArrowRect.left <= elementRect.left || newArrowRect.right >= elementRect.right) {
arrowHidden = true;
}
}
// Round the number up
d = Math.ceil(d);
// Hide the arrow if it goes beyond the element boundaries
/* istanbul ignore next */
if (arrowHidden) {
arrowEl.hidden = true;
}
arrowEl.style[targetMargin] = `${d}px`;
}
/**
* Turns off the ability of the popup to respond to attribute changes, in order to
* set an attribute that may incorrectly change the popup's display/state otherwise.
* @param {string} attr the attribute of the popup that will change, passed to `setAttribute`
* @param {any} value the value to pass to `setAttribute`
*/
safeSetAttribute(attr, value) {
if (!POPUP_PROPERTIES.includes(attr)) {
return;
}
const prev = this.shouldUpdate;
this.shouldUpdate = false;
this.setAttribute(attr, value);
this.shouldUpdate = prev;
}
/**
* Turns off the ability of the popup to respond to attribute changes, in order to
* remove an attribute that may incorrectly change the popup's display/state otherwise.
* @param {string} attr the attribute of the popup that will be removed
*/
safeRemoveAttribute(attr) {
if (!POPUP_PROPERTIES.includes(attr)) {
return;
}
const prev = this.shouldUpdate;
this.shouldUpdate = false;
this.removeAttribute(attr);
this.shouldUpdate = prev;
}
/**
* Inner template contents
* @returns {string} The template
*/
template() {
return `<div class="ids-popup" part="popup">
<div class="arrow" part="arrow"></div>
<div class="content-wrapper">
<slot name="content"></slot>
</div>
</div>`;
}
/**
* @private
* @returns {void}
*/
handleEvents() {
const slot = this.shadowRoot.querySelector('slot');
this.onEvent('slotchange', slot, () => {
this.refresh();
});
}
}
export default IdsPopup;
<file_sep>/app/ids-render-loop/index.js
import { IdsRenderLoop, IdsRenderLoopItem } from '../../src/ids-render-loop/ids-render-loop';
<file_sep>/app/ids-button/index.js
import IdsButton from '../../src/ids-button/ids-button';
<file_sep>/test/ids-mask/ids-mask-date-api-func-test.js
import MaskAPI from '../../src/ids-mask/ids-mask-api';
import { dateMask, autoCorrectedDatePipe } from '../../src/ids-mask/ids-masks';
let api;
describe('IdsMaskAPI (Date)', () => {
beforeEach(() => {
api = new MaskAPI();
});
afterEach(() => {
api = null;
});
it('should process short dates', () => {
const textValue = '1111111111';
const opts = {
selection: {
start: 0
},
pattern: dateMask,
patternOptions: {
format: 'M/d/yyyy',
symbols: {
separator: '/'
}
}
};
const result = api.process(textValue, opts);
expect(result.maskResult).toBeTruthy();
expect(result.conformedValue).toEqual('11/11/1111');
});
it('should process short dates with default patternOptions', () => {
const textValue = '1111111111';
const opts = {
selection: {
start: 0
},
pattern: dateMask,
pipe: autoCorrectedDatePipe
};
const result = api.process(textValue, opts);
expect(result.maskResult).toBeTruthy();
expect(result.conformedValue).toEqual('11/11/1111');
});
it('should process short dates with no separators or other literals present', () => {
const textValue = '12122012';
let opts = {
selection: {
start: 0
},
pattern: dateMask,
patternOptions: {
format: 'ddMMyyyy'
}
};
let result = api.process(textValue, opts);
expect(result.conformedValue).toEqual('12122012');
opts = {
selection: {
start: 0
},
pattern: dateMask,
patternOptions: {
format: 'Mdyyyy'
}
};
result = api.process(textValue, opts);
expect(result.conformedValue).toEqual('12122012');
});
it('should process partial short dates', () => {
const textValue = '1111111111';
const opts = {
selection: {
start: 0
},
pattern: dateMask,
patternOptions: {
format: 'M/d/yyyy',
symbols: {
separator: '/'
}
}
};
const result = api.process(textValue, opts);
expect(result.conformedValue).toEqual('11/11/1111');
});
it('should process short dates when the format allows for single digit months and days', () => {
const textValue = '1/1/2020';
const opts = {
selection: {
start: 0
},
pattern: dateMask,
patternOptions: {
format: 'M/d/yyyy',
symbols: {
separator: '/'
}
},
pipe: autoCorrectedDatePipe
};
const result = api.process(textValue, opts);
expect(result.maskResult).toBeTruthy();
expect(result.conformedValue).toEqual('1/1/2020');
});
// @TODO: fix partial autocorrect
it.skip('can partially autocorrect incorrect dates', () => {
const textValue = '15/32/2020';
const opts = {
selection: {
start: 0
},
pattern: dateMask,
patternOptions: {
format: 'M/d/yyyy',
symbols: {
separator: '/'
}
},
pipe: autoCorrectedDatePipe
};
const result = api.process(textValue, opts);
expect(result.maskResult).toBeTruthy();
expect(result.conformedValue).toEqual('12/31/2020');
});
});
describe('Date Mask function', () => {
it('can mask with defaults', () => {
const result = dateMask(undefined, undefined);
// Resulting mask will match default 'en-us' date format:
// [/\d/, /\d/, '[]', '/', '[]', /\d/, /\d/, '[]', '/', '[]', /\d/, /\d/, /\d/, /\d/]
expect(result.mask.length).toBe(14);
});
it('should always provide masking space for at least one number', () => {
const result = dateMask(null, {});
// Resulting mask will match default 'en-us' date format:
// [/\d/, /\d/, '[]', '/', '[]', /\d/, /\d/, '[]', '/', '[]', /\d/, /\d/, /\d/, /\d/]
expect(result.mask.length).toBe(14);
});
it('can handle time periods', () => {
const result = dateMask('1212am', {
format: 'HH:mm a'
});
// Resulting mask will be:
// [/\d/, /\d/, '[]', ':', '[]', /\d/, /\d/, '[]', ' ', '[]', /[aApP]/, /[mM]/]
expect(result.mask.length).toBe(12);
});
// @TODO: Re-enable after Locale exists
it.skip('can handle `ah`', () => {
const result = dateMask('202006', {
format: 'ah:mm'
});
// Resulting mask will be:
// [/[aApP]/, /[Mm]/, '[]', '[]', /\d/, /\d/, '[]', ':', '[]', /\d/, /\d/]
expect(result.mask.length).toBe(11);
});
});
<file_sep>/src/ids-menu/TODO.md
# TODO on IDS Menu
## Features
- [x] Disabled state (single item)
- [] Disabled state (entire menu)
- [x] Selected state (single)
- [x] Selected state (multi/group)
- [x] Trigger `selected` event (optionally pass a trigger element)
- [x] Expose "selected" list values on the component
- [x] Submenu state
- [x] Submenu functionality
- [x] Keyboard Nav
- [x] Keyboard Select
- [x] Make keyboard navigation not occur twice on nested menu items (prevent bubbling after it occurs once?)
- [] Ensure current aria labels work properly to describe groups (Voiceover doesn't currently read out group headers)
- [x] `focus()` method that figures out which item in the menu to focus, and does so
- [x] Tests
- [x] Docs
- [ ] Get aXe tests passing
## Ids Popup
- [] Add "flip" and "nudge" logic
- [] Might also need to create Ids Popup Boundary?
## Ids Popup Menu
- [x] Combine Popup and Menu
- [x] Create hidden / nested fly-out behavior for submenus
- [] Shift + F10 to open (need to have different targets in Keyboard mixin)
## ContextMenu Mixin?
- [] Apply an IdsPopupMenu to an element with API for open/close and menu functions
<file_sep>/app/ids-textarea/example.js
// Supporting components
import IdsButton from '../../src/ids-button/ids-button';
document.addEventListener('DOMContentLoaded', () => {
const btnUpdateValue = document.querySelector('#btn-textarea-update-value');
const btnResetValue = document.querySelector('#btn-textarea-reset-value');
const textareaUpdateValue = document.querySelector('#textarea-update-value') || {};
const orgVal = textareaUpdateValue?.value || '';
const newVal = 'New value updated';
// Update value
btnUpdateValue?.addEventListener('click', () => {
textareaUpdateValue.value = newVal;
});
// Reset value
btnResetValue?.addEventListener('click', () => {
textareaUpdateValue.value = orgVal;
});
const btnEnable = document.querySelector('#btn-textarea-enable');
const btnDisable = document.querySelector('#btn-textarea-disable');
const btnReadonly = document.querySelector('#btn-textarea-readonly');
const textareaToggleState = document.querySelector('#textarea-toggle-state') || {};
// Enable
btnEnable?.addEventListener('click', () => {
textareaToggleState.disabled = false;
textareaToggleState.readonly = false;
});
// Disable
btnDisable?.addEventListener('click', () => {
textareaToggleState.disabled = true;
});
// Readonly
btnReadonly?.addEventListener('click', () => {
textareaToggleState.readonly = true;
});
});
<file_sep>/src/ids-base/ids-string-utils.js
/**
* Ids String parsing/processing utilities
*/
const IdsStringUtils = {
/**
* Convert a string in presumed kebab case to camel case
* @param {string} str [description]
* @returns {string} The return string
*/
camelCase(str) {
return (str.slice(0, 1).toLowerCase() + str.slice(1))
.replace(/([-_ ]){1,}/g, ' ')
.split(/[-_ ]/)
.reduce((cur, acc) => cur + acc[0].toUpperCase() + acc.substring(1));
},
/**
* Removes all duplicate characters from a string and returns another string
* containing ALL unique characters. Useful for construction of REGEX objects
* with characters from an input field, etc.
* @param {string} str The string to process
* @returns {string} The processed string
*/
removeDuplicates(str) {
return str
.split('')
.filter((item, pos, self) => self.indexOf(item) === pos)
.join('');
},
/**
* Convert a string value into a boolean
* @param {string|boolean|any} val string value from the component property
* @returns {boolean} The return boolean
*/
stringToBool(val) {
if (typeof val === 'string' && val.toLowerCase() === 'false') {
return false;
}
return val !== null && (val === true || (typeof val === 'string' && val !== 'false'));
},
/**
* Convert a string value into a number
* @param {string|number|any} val string value from the component property
* @returns {number} The return boolean
*/
stringToNumber(val) {
const v = val?.toString() * 1; // Converting String to Number
return !isNaN(v) ? v : 0; // eslint-disable-line
},
/**
* Inject template variables in a string
* @param {string} str The string to inject into
* @param {string} obj The string to inject into
* @returns {obj} The dataset row / item
*/
injectTemplate(str, obj) {
return str.replace(/\${(.*?)}/g, (_x, g) => obj[g]);
}
};
export { IdsStringUtils };
<file_sep>/test/ids-badge/ids-badge-func-test.js
/**
* @jest-environment jsdom
*/
import IdsBadge from '../../src/ids-badge/ids-badge';
import IdsIcon from '../../src/ids-icon/ids-icon';
describe('IdsBadge Component', () => {
let badge;
beforeEach(async () => {
const elem = new IdsBadge();
document.body.appendChild(elem);
badge = document.querySelector('ids-badge');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
const elem = new IdsBadge();
document.body.appendChild(elem);
elem.remove();
expect(document.querySelectorAll('ids-badge').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('renders correctly', () => {
expect(badge.outerHTML).toMatchSnapshot();
badge.color = 'error';
expect(badge.outerHTML).toMatchSnapshot();
badge.color = 'alert';
badge.shape = 'round';
expect(badge.outerHTML).toMatchSnapshot();
});
it('renders alert color and round shape of badge', () => {
badge.color = 'alert';
expect(badge.getAttribute('color')).toEqual('alert');
expect(badge.color).toEqual('alert');
badge.shape = 'round';
expect(badge.getAttribute('shape')).toEqual('round');
expect(badge.shape).toEqual('round');
});
it('renders error color of badge', () => {
badge.color = 'error';
expect(badge.getAttribute('color')).toEqual('error');
expect(badge.color).toEqual('error');
});
it('renders info color of badge', () => {
badge.color = 'info';
expect(badge.getAttribute('color')).toEqual('info');
expect(badge.color).toEqual('info');
});
it('renders warning color and round shape of badge', () => {
badge.color = 'warning';
expect(badge.getAttribute('color')).toEqual('warning');
expect(badge.color).toEqual('warning');
badge.shape = 'round';
expect(badge.getAttribute('shape')).toEqual('round');
expect(badge.shape).toEqual('round');
});
it('renders success color of badge', () => {
badge.color = 'success';
expect(badge.getAttribute('color')).toEqual('success');
expect(badge.color).toEqual('success');
});
it('renders badge with normal icon', () => {
badge.color = 'alert';
expect(badge.getAttribute('color')).toBe('alert');
expect(badge.color).toBe('alert');
});
it('removes the color and attribute', () => {
badge.color = 'alert';
expect(badge.getAttribute('color')).toEqual('alert');
expect(badge.color).toEqual('alert');
badge.removeAttribute('color');
expect(badge.getAttribute('color')).toEqual(null);
expect(badge.color).toEqual(null);
});
it('removes the shape attribute', () => {
badge.shape = 'round';
expect(badge.getAttribute('shape')).toEqual('round');
expect(badge.shape).toEqual('round');
badge.removeAttribute('shape');
expect(badge.getAttribute('shape')).toEqual(null);
expect(badge.shape).toEqual('normal');
});
it('supports setting mode', () => {
badge.mode = 'dark';
expect(badge.container.getAttribute('mode')).toEqual('dark');
});
it('supports setting version', () => {
badge.version = 'classic';
expect(badge.container.getAttribute('version')).toEqual('classic');
});
});
<file_sep>/app/ids-tooltip/test-sandbox.js
import './test-sandbox.scss';
import IdsTooltip from '../../src/ids-tooltip/ids-tooltip';
import IdsInput from '../../src/ids-input/ids-input';
// Use the syncronous `beforeshow` event to log a message
const tooltipTop = document.querySelector('[target="#tooltip-top"]');
tooltipTop.addEventListener('beforeshow', (e) => {
console.info('beforeshow', e, e.detail);
});
// Use the asyncronous `beforeshow` event to load contents
const getContents = () => new Promise((resolve) => {
const xhr = new XMLHttpRequest();
xhr.open('get', '/api/bikes', true);
xhr.onload = () => {
const status = xhr.status;
if (status === 200) {
resolve(JSON.parse(xhr.responseText)[1].manufacturerName);
}
};
xhr.send();
});
const tooltipAsync = document.querySelector('[target="#tooltip-async"]');
tooltipAsync.beforeShow = async function beforeShow() {
return getContents();
};
<file_sep>/app/ids-mask/prefix-suffix.js
import IdsInput from '../../src/ids-input/ids-input';
import { DIGITS_REGEX, EMPTY_STRING } from '../../src/ids-mask/ids-mask-common';
document.addEventListener('DOMContentLoaded', () => {
// Masked field with a prefix
const prefix = document.querySelector('#mask-prefix');
prefix.maskOptions = {
prefix: '$'
};
prefix.mask = (rawValue, opts) => {
const totalDigits = rawValue.split(EMPTY_STRING).map(() => DIGITS_REGEX);
return [opts.prefix, ...totalDigits];
};
// Masked field with a suffix
const suffix = document.querySelector('#mask-suffix');
suffix.maskOptions = {
suffix: '%'
};
suffix.mask = (rawValue, opts) => {
const totalDigits = rawValue.split(EMPTY_STRING).map(() => DIGITS_REGEX);
return [...totalDigits, opts.suffix];
};
});
<file_sep>/test/ids-alert/ids-alert-func-test.js
/**
* @jest-environment jsdom
*/
import IdsAlert from '../../src/ids-alert/ids-alert';
describe('IdsAlert Component', () => {
let el;
let rootEl;
beforeEach(async () => {
const alert = new IdsAlert();
alert.icon = 'success';
document.body.appendChild(alert);
el = document.querySelector('ids-alert');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
el.remove();
el = new IdsAlert();
document.body.appendChild(el);
expect(document.querySelectorAll('ids-alert').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('renders correctly', () => {
expect(el.outerHTML).toMatchSnapshot();
el.icon = 'info';
expect(el.outerHTML).toMatchSnapshot();
el.icon = 'new';
expect(el.outerHTML).toMatchSnapshot();
});
it('renders icon setting', () => {
el.icon = 'success';
expect(el.icon).toEqual('success');
expect(el.getAttribute('icon')).toEqual('success');
el.icon = 'info';
expect(el.icon).toEqual('info');
expect(el.getAttribute('icon')).toEqual('info');
});
it('renders icon info then removes it', () => {
el = new IdsAlert();
document.body.appendChild(el);
el.icon = 'info';
expect(el.icon).toEqual('info');
rootEl = el.shadowRoot.querySelector('ids-icon');
expect(rootEl.icon).toBe('info');
el.icon = null;
expect(el.icon).toEqual(null);
});
it('renders icon success then removes it', () => {
el = new IdsAlert();
document.body.appendChild(el);
el.icon = 'success';
expect(el.icon).toEqual('success');
rootEl = el.shadowRoot.querySelector('ids-icon');
expect(rootEl.icon).toBe('success');
el.icon = null;
expect(el.icon).toEqual(null);
});
it('renders icon info-field then removes it', () => {
el = new IdsAlert();
document.body.appendChild(el);
el.icon = 'info-field';
expect(el.icon).toEqual('info-field');
rootEl = el.shadowRoot.querySelector('ids-icon');
expect(rootEl.icon).toBe('info-field');
el.icon = null;
expect(el.icon).toEqual(null);
});
it('should disable and enable', () => {
el.disabled = 'true';
el.template();
document.body.innerHTML = '';
const alert = new IdsAlert();
alert.icon = 'success';
document.body.appendChild(alert);
el = document.querySelector('ids-alert');
let icon = el.shadowRoot.querySelector('ids-icon');
expect(el.getAttribute('disabled')).toEqual(null);
expect(icon.classList).not.toContain('disabled');
el.disabled = 'true';
icon = el.shadowRoot.querySelector('ids-icon');
expect(el.getAttribute('disabled')).toEqual('true');
expect(icon.classList).toContain('disabled');
el.disabled = 'false';
icon = el.shadowRoot.querySelector('ids-icon');
expect(el.getAttribute('disabled')).toEqual(null);
expect(icon.classList).not.toContain('disabled');
});
it('supports setting mode', () => {
el = new IdsAlert();
document.body.appendChild(el);
el.mode = 'dark';
expect(el.container.getAttribute('mode')).toEqual('dark');
});
it('supports setting version', () => {
el = new IdsAlert();
document.body.appendChild(el);
el.version = 'classic';
expect(el.container.getAttribute('version')).toEqual('classic');
});
});
<file_sep>/app/ids-base/ids-events-mixin.js
import IdsEventsMixin from '../../src/ids-base/ids-events-mixin';
<file_sep>/src/ids-toolbar/ids-toolbar-more-actions.ts
import IdsToolbarSection from './ids-toolbar-section';
// Subcomponents
import IdsPopupMenu from '../ids-popup-menu/ids-popup-menu';
import IdsMenuButton from '../ids-menu-button/ids-menu-button';
export default class IdsToolbarMoreActions extends IdsToolbarSection {
/** The internal Menu Button element */
readonly buttonEl?: IdsMenuButton;
/** The internal Popup Menu element */
readonly menuEl?: IdsPopupMenu;
}
export {
IdsMenuButton,
IdsPopupMenu
};
<file_sep>/test/ids-switch/ids-switch-func-test.js
/**
* @jest-environment jsdom
*/
import IdsSwitch from '../../src/ids-switch/ids-switch';
describe('IdsSwitch Component', () => {
let el;
beforeEach(async () => {
const elem = new IdsSwitch();
document.body.appendChild(elem);
el = document.querySelector('ids-switch');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
const elem = new IdsSwitch();
document.body.appendChild(elem);
elem.remove();
expect(document.querySelectorAll('ids-switch').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('should renders checked', () => {
el.checked = 'true';
expect(el.getAttribute('checked')).toEqual('true');
expect(el.checked).toEqual('true');
});
it('should renders as disabled', () => {
expect(el.getAttribute('disabled')).toEqual(null);
expect(el.input.hasAttribute('disabled')).toBe(false);
let rootEl = el.shadowRoot.querySelector('.ids-switch');
expect(rootEl.classList).not.toContain('disabled');
el.disabled = true;
expect(el.getAttribute('disabled')).toEqual('true');
expect(el.input.hasAttribute('disabled')).toBe(true);
rootEl = el.shadowRoot.querySelector('.ids-switch');
expect(rootEl.classList).toContain('disabled');
el.disabled = false;
expect(el.getAttribute('disabled')).toEqual(null);
expect(el.input.hasAttribute('disabled')).toBe(false);
rootEl = el.shadowRoot.querySelector('.ids-switch');
expect(rootEl.classList).not.toContain('disabled');
});
it('should set label text', () => {
let label = el.labelEl.querySelector('.label-text');
label.remove();
el.label = 'test';
document.body.innerHTML = '';
const elem = new IdsSwitch();
document.body.appendChild(elem);
el = document.querySelector('ids-switch');
label = el.labelEl.querySelector('.label-text');
expect(label.textContent.trim()).toBe('');
el.label = 'test';
label = el.labelEl.querySelector('.label-text');
expect(label.textContent.trim()).toBe('test');
el.label = null;
label = el.labelEl.querySelector('.label-text');
expect(label.textContent.trim()).toBe('');
});
it('should renders value', () => {
const value = 'test';
expect(el.getAttribute('value')).toEqual(null);
el.value = value;
expect(el.getAttribute('value')).toEqual(value);
expect(el.input.value).toEqual(value);
el.value = null;
expect(el.getAttribute('value')).toEqual(null);
});
it('should dispatch native events', () => {
const events = ['change', 'focus', 'keydown', 'keypress', 'keyup', 'click', 'dbclick'];
events.forEach((evt) => {
let response = null;
el.addEventListener(evt, () => {
response = 'triggered';
});
const event = new Event(evt);
el.input.dispatchEvent(event);
expect(response).toEqual('triggered');
});
});
it('should remove events', () => {
el.input = null;
document.body.innerHTML = '';
const elem = new IdsSwitch();
document.body.appendChild(elem);
el = document.querySelector('ids-switch');
el.handleNativeEvents('remove');
const events = ['change', 'focus', 'keydown', 'keypress', 'keyup', 'click', 'dbclick'];
events.forEach((evt) => {
let response = null;
el.addEventListener(evt, () => {
response = 'triggered';
});
const event = new Event(evt);
el.input.dispatchEvent(event);
expect(response).not.toEqual('triggered');
});
});
it('should renders template', () => {
document.body.innerHTML = '';
el = document.createElement('ids-switch');
el.setAttribute('disabled', true);
el.setAttribute('checked', true);
el.template();
expect(el.getAttribute('disabled')).toEqual('true');
expect(el.input.hasAttribute('disabled')).toBe(true);
const rootEl = el.shadowRoot.querySelector('.ids-switch');
expect(rootEl.classList).toContain('disabled');
expect(el.getAttribute('checked')).toEqual('true');
expect(el.checked).toEqual('true');
});
});
<file_sep>/src/ids-menu/ids-menu-header.js
import {
IdsElement,
customElement,
scss,
mix,
props
} from '../ids-base/ids-element';
import styles from './ids-menu-header.scss';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
/**
* IDS Menu Header Component
* @type {IdsMenuHeader}
* @inherits IdsElement
* @mixes IdsEventsMixin
* @mixes IdsThemeMixin
* @part header - the menu header element
*/
@customElement('ids-menu-header')
@scss(styles)
class IdsMenuHeader extends mix(IdsElement).with(IdsEventsMixin, IdsThemeMixin) {
constructor() {
super();
}
connectedCallback() {
super.connectedCallback();
}
static get properties() {
return [props.MODE, props.VERSION];
}
template() {
return `<div class="ids-menu-header" part="header"><slot></slot></div>`;
}
}
export default IdsMenuHeader;
<file_sep>/src/ids-input/ids-input.js
import {
IdsElement,
customElement,
mix,
scss,
props,
stringUtils
} from '../ids-base/ids-element';
import styles from './ids-input.scss';
// Supporting components
import IdsIcon from '../ids-icon/ids-icon';
import IdsText from '../ids-text/ids-text';
import IdsTriggerButton from '../ids-trigger-field/ids-trigger-button';
// Mixins
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsKeyboardMixin } from '../ids-base/ids-keyboard-mixin';
import { IdsClearableMixin } from '../ids-base/ids-clearable-mixin';
import { IdsDirtyTrackerMixin } from '../ids-base/ids-dirty-tracker-mixin';
import IdsMaskMixin from '../ids-mask/ids-mask-mixin';
import { IdsValidationMixin } from '../ids-base/ids-validation-mixin';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
import { IdsTooltipMixin } from '../ids-base/ids-tooltip-mixin';
// Properties observed by the Input
const INPUT_PROPS = [
props.AUTOSELECT,
props.BG_TRANSPARENT,
props.CLEARABLE,
props.CLEARABLE_FORCED,
props.DIRTY_TRACKER,
props.DISABLED,
props.LABEL,
props.LABEL_REQUIRED,
props.MODE,
props.PLACEHOLDER,
props.SIZE,
props.READONLY,
props.TEXT_ALIGN,
props.TEXT_ELLIPSIS,
props.TRIGGERFIELD,
props.TYPE,
props.VALIDATE,
props.VALIDATION_EVENTS,
props.VALUE,
props.VERSION
];
// Types
const TYPES = {
default: 'text',
text: 'text',
password: '<PASSWORD>',
number: 'number',
email: 'email'
};
// Setting defaults sizes
const SIZES = {
default: 'md',
xs: 'xs',
sm: 'sm',
mm: 'mm',
md: 'md',
lg: 'lg',
full: 'full'
};
// Setting defaults text-align
const TEXT_ALIGN = {
default: 'left',
left: 'left',
center: 'center',
right: 'right'
};
const appliedMixins = [
IdsEventsMixin,
IdsClearableMixin,
IdsKeyboardMixin,
IdsDirtyTrackerMixin,
IdsMaskMixin,
IdsThemeMixin,
IdsValidationMixin,
IdsTooltipMixin
];
/**
* IDS Input Component
* @type {IdsInput}
* @inherits IdsElement
* @mixes IdsClearableMixin
* @mixes IdsKeyboardMixin
* @mixes IdsDirtyTrackerMixin
* @mixes IdsEventsMixin
* @mixes IdsMaskMixin
* @mixes IdsValidationMixin
* @mixes IdsThemeMixin
* @mixes IdsTooltipMixin
* @part input - the input element
* @part label - the label element
*/
@customElement('ids-input')
@scss(styles)
class IdsInput extends mix(IdsElement).with(...appliedMixins) {
/**
* Call the constructor and then initialize
*/
constructor() {
super();
}
/**
* @returns {Array<string>} IdsInput component observable properties
*/
static get properties() {
return [...super.properties, ...INPUT_PROPS];
}
/**
* Custom Element `connectedCallback` implementation
* @returns {void}
*/
connectedCallback() {
super.connectedCallback?.();
this.handleEvents();
this.handleAutoselect();
this.handleClearable();
this.handleDirtyTracker();
// @ts-ignore
this.handleValidation();
}
/**
* Create the Template for the contents
* @returns {string} The template
*/
template() {
if (!this.state || !this.state?.id) {
this.state = { id: 'ids-input-id' };
}
// Input
const placeholder = this.placeholder ? ` placeholder="${this.placeholder}"` : '';
const type = ` type="${this.type || TYPES.default}"`;
let inputClass = `ids-input-field ${this.size} ${this.textAlign}`;
inputClass += stringUtils.stringToBool(this.triggerfield) ? ' has-triggerfield' : '';
inputClass += stringUtils.stringToBool(this.bgTransparent) ? ' bg-transparent' : '';
inputClass += stringUtils.stringToBool(this.textEllipsis) ? ' text-ellipsis' : '';
inputClass = ` class="${inputClass}"`;
let inputState = stringUtils.stringToBool(this.readonly) ? ' readonly' : '';
inputState = stringUtils.stringToBool(this.disabled) ? ' disabled' : inputState;
return `
<div class="ids-input${inputState}">
<label for="${this.state.id}" class="label-text">
<ids-text part="label" label="true">${this.label}</ids-text>
</label>
<div class="field-container">
<input part="input" id="${this.state.id}"${type}${inputClass}${placeholder}${inputState} />
</div>
</div>
`;
}
/**
* @readonly
* @returns {HTMLInputElement} the inner `input` element
*/
get input() {
return this.shadowRoot?.querySelector(`#${this.state.id}`);
}
/**
* @readonly
* @returns {HTMLLabelElement} the inner `label` element
*/
get labelEl() {
return this.shadowRoot?.querySelector(`[for="${this.state.id}"]`);
}
/**
* Set input state for disabled or readonly
* @private
* @param {string} prop The property.
* @returns {void}
*/
setInputState(prop) {
if (prop === props.READONLY || prop === props.DISABLED) {
const msgNodes = [].slice.call(this.shadowRoot.querySelectorAll('.validation-message'));
const options = {
prop1: prop,
prop2: prop !== props.READONLY ? props.READONLY : props.DISABLED,
val: stringUtils.stringToBool(this[prop])
};
const rootEl = this.shadowRoot.querySelector('.ids-input');
if (options.val) {
this.input?.removeAttribute(options.prop2);
rootEl?.classList.remove(options.prop2);
msgNodes.forEach((x) => x.classList.remove(options.prop2));
this.input?.setAttribute(options.prop1, 'true');
rootEl?.classList.add(options.prop1);
msgNodes.forEach((x) => x.classList.add(options.prop1));
} else {
this.input?.removeAttribute(options.prop1);
rootEl?.classList.remove(options.prop1);
msgNodes.forEach((x) => x.classList.remove(options.prop1));
}
}
}
/**
* Set the label text
* @private
* @param {string} value of label
* @returns {void}
*/
setLabelText(value) {
const labelText = this.shadowRoot.querySelector(`[for="${this.state.id}"] ids-text`);
if (labelText) {
labelText.innerHTML = value || '';
}
}
/**
* Handle autoselect
* @private
* @returns {void}
*/
handleAutoselect() {
if (this.autoselect) {
this.handleInputFocusEvent();
} else {
this.handleInputFocusEvent('remove');
}
}
/**
* Handle input focus event
* @private
* @param {string} option If 'remove', will remove attached events
* @returns {void}
*/
handleInputFocusEvent(option = '') {
const eventName = 'focus';
if (option === 'remove') {
const handler = this?.handledEvents?.get(eventName);
if (handler && handler.target === this.input) {
this.offEvent(eventName, this.input);
}
} else {
this.onEvent(eventName, this.input, () => {
setTimeout(() => { // safari has delay
this.input?.select();
}, 1);
});
}
}
/**
* Handle input change event
* @private
* @returns {void}
*/
handleInputChangeEvent() {
const eventName = 'change.input';
this.onEvent(eventName, this.input, () => {
this.value = this.input.value;
});
}
/**
* Establish Internal Event Handlers
* @private
* @returns {object} The object for chaining.
*/
handleNativeEvents() {
const events = ['change.input', 'focus', 'select', 'keydown', 'keypress', 'keyup', 'click', 'dbclick'];
events.forEach((evt) => {
this.onEvent(evt, this.input, (/** @type {any} */ e) => {
/**
* Trigger event on parent and compose the args
* will fire nativeEvents.
* @private
* @param {object} elem Actual event
* @param {string} value The updated input element value
*/
this.triggerEvent(e.type, this, {
detail: {
elem: this,
nativeEvent: e,
value: this.value
}
});
});
});
return this;
}
/**
* Handle events
* @private
* @returns {void}
*/
handleEvents() {
this.handleNativeEvents();
this.handleInputChangeEvent();
}
/**
* When set the input will select all text on focus
* @param {boolean|string} value If true will set `autoselect` attribute
*/
set autoselect(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.AUTOSELECT, val.toString());
} else {
this.removeAttribute(props.AUTOSELECT);
}
this.handleAutoselect();
}
get autoselect() { return this.getAttribute(props.AUTOSELECT); }
/**
* When set the input will add css class `bg-transparent`
* @param {boolean|string} value If true will set `bg-transparent` attribute
*/
set bgTransparent(value) {
const val = stringUtils.stringToBool(value);
const className = 'bg-transparent';
if (val) {
this.setAttribute(props.BG_TRANSPARENT, val.toString());
this.input?.classList.add(className);
} else {
this.removeAttribute(props.BG_TRANSPARENT);
this.input?.classList.remove(className);
}
}
get bgTransparent() { return this.getAttribute(props.BG_TRANSPARENT); }
/**
* When set the input will add css class `text-ellipsis`
* @param {boolean|string} value If true will set `text-ellipsis` attribute
*/
set textEllipsis(value) {
const val = stringUtils.stringToBool(value);
const className = 'text-ellipsis';
if (val) {
this.setAttribute(props.TEXT_ELLIPSIS, val.toString());
this.input?.classList.add(className);
} else {
this.removeAttribute(props.TEXT_ELLIPSIS);
this.input?.classList.remove(className);
}
}
get textEllipsis() { return this.getAttribute(props.TEXT_ELLIPSIS); }
/**
* When set the input will add a clearable x button
* @param {boolean|string} value If true will set `clearable` attribute
*/
set clearable(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.CLEARABLE, val.toString());
} else {
this.removeAttribute(props.CLEARABLE);
}
this.handleClearable();
}
get clearable() { return this.getAttribute(props.CLEARABLE); }
/**
* When set the input will force to add a clearable x button on readonly and disabled
* @param {boolean|string} value If true will set `clearable-forced` attribute
*/
set clearableForced(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.CLEARABLE_FORCED, val.toString());
} else {
this.removeAttribute(props.CLEARABLE_FORCED);
}
this.handleClearable();
}
get clearableForced() { return this.getAttribute(props.CLEARABLE_FORCED); }
/**
* Set the dirty tracking feature on to indicate a changed field
* @param {boolean|string} value If true will set `dirty-tracker` attribute
*/
set dirtyTracker(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.DIRTY_TRACKER, val.toString());
} else {
this.removeAttribute(props.DIRTY_TRACKER);
}
this.handleDirtyTracker();
}
get dirtyTracker() { return this.getAttribute(props.DIRTY_TRACKER); }
/**
* Sets input to disabled
* @param {boolean|string} value If true will set `disabled` attribute
*/
set disabled(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.DISABLED, 'true');
this.container.querySelector('ids-text').setAttribute(props.DISABLED, 'true');
} else {
this.removeAttribute(props.DISABLED);
this.container.querySelector('ids-text').removeAttribute(props.DISABLED);
}
this.setInputState(props.DISABLED);
}
get disabled() { return this.getAttribute(props.DISABLED); }
/**
* Set the `label` text of input label
* @param {string} value of the `label` text property
*/
set label(value) {
if (value) {
this.setAttribute(props.LABEL, value.toString());
} else {
this.removeAttribute(props.LABEL);
}
this.setLabelText(value);
}
get label() { return this.getAttribute(props.LABEL) || ''; }
/**
* Set `label-required` attribute
* @param {string} value The `label-required` attribute
*/
set labelRequired(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.LABEL_REQUIRED, val.toString());
} else {
this.removeAttribute(props.LABEL_REQUIRED);
}
this.labelEl?.classList[!val ? 'add' : 'remove']('no-required-indicator');
}
get labelRequired() { return this.getAttribute(props.LABEL_REQUIRED); }
/**
* Set the `placeholder` of input
* @param {string} value of the `placeholder` property
*/
set placeholder(value) {
if (value) {
this.setAttribute(props.PLACEHOLDER, value);
this.input?.setAttribute(props.PLACEHOLDER, value);
return;
}
this.removeAttribute(props.PLACEHOLDER);
this.input?.removeAttribute(props.PLACEHOLDER);
}
get placeholder() { return this.getAttribute(props.PLACEHOLDER); }
/**
* Set the input to readonly state
* @param {boolean|string} value If true will set `readonly` attribute
*/
set readonly(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.READONLY, val.toString());
} else {
this.removeAttribute(props.READONLY);
}
this.setInputState(props.READONLY);
}
get readonly() { return this.getAttribute(props.READONLY); }
/**
* Set the size (width) of input
* @param {string} value [xs, sm, mm, md, lg, full]
*/
set size(value) {
const size = SIZES[value];
this.setAttribute(props.SIZE, size || SIZES.default);
this.input?.classList.remove(...Object.values(SIZES));
this.input?.classList.add(size || SIZES.default);
}
get size() { return this.getAttribute(props.SIZE) || SIZES.default; }
/**
* Sets the text alignment
* @param {string} value [left, center, right]
*/
set textAlign(value) {
const textAlign = TEXT_ALIGN[value] || TEXT_ALIGN.default;
this.setAttribute(props.TEXT_ALIGN, textAlign);
this.input?.classList.remove(...Object.values(TEXT_ALIGN));
this.input?.classList.add(textAlign);
}
get textAlign() { return this.getAttribute(props.TEXT_ALIGN) || TEXT_ALIGN.default; }
/**
* Set to true if the input is a triggr field
* @param {boolean|string} value If true will set `triggerfield` attribute
*/
set triggerfield(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.TRIGGERFIELD, val.toString());
} else {
this.removeAttribute(props.TRIGGERFIELD);
}
this.input?.classList[this.triggerfield ? 'add' : 'remove']('has-triggerfield');
}
get triggerfield() { return this.getAttribute(props.TRIGGERFIELD); }
/**
* Sets the input type
* @param {string} value [text, password, number, email]
*/
set type(value) {
const type = TYPES[value];
if (type) {
this.setAttribute(props.TYPE, type);
this.input.setAttribute(props.TYPE, type);
return;
}
this.setAttribute(props.TYPE, TYPES.default);
this.input.setAttribute(props.TYPE, TYPES.default);
}
get type() { return this.getAttribute(props.TYPE); }
/**
* Sets the validation check to use
* @param {string} value The `validate` attribute
*/
set validate(value) {
if (value) {
this.setAttribute(props.VALIDATE, value);
} else {
this.removeAttribute(props.VALIDATE);
}
this.handleValidation();
}
get validate() { return this.getAttribute(props.VALIDATE); }
/**
* Set `validation-events` attribute
* @param {string} value The `validation-events` attribute
*/
set validationEvents(value) {
if (value) {
this.setAttribute(props.VALIDATION_EVENTS, value);
} else {
this.removeAttribute(props.VALIDATION_EVENTS);
}
this.handleValidation();
}
get validationEvents() { return this.getAttribute(props.VALIDATION_EVENTS); }
/**
* Set the `value` attribute of input
* @param {string} val the value property
*/
set value(val) {
let v = val || '';
// If a mask is enabled, use the conformed value.
// If no masking occurs, simply use the provided value.
if (this.mask) {
v = this.processMaskFromProperty(val) || v;
}
this.setAttribute(props.VALUE, v);
if (this.input?.value !== v) {
this.input.value = v;
['focus', 'blur', 'focus'].forEach((m) => this.input[m]());
this.input.dispatchEvent(new Event('change', { bubbles: true }));
}
}
get value() {
return this.input?.value || '';
}
}
export default IdsInput;
<file_sep>/src/ids-trigger-field/ids-trigger-button.js
import {
customElement,
scss,
props
} from '../ids-base/ids-element';
import { IdsButton } from '../ids-button/ids-button';
import { IdsStringUtils as stringUtils } from '../ids-base/ids-string-utils';
import styles from './ids-trigger-button.scss';
/**
* IDS Trigger Button Component
* @type {IdsTriggerButton}
* @inherits IdsElement
*/
@customElement('ids-trigger-button')
@scss(styles)
class IdsTriggerButton extends IdsButton {
/**
* Call the constructor and then initialize
*/
constructor() {
super();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [props.CSS_CLASS,
props.DISABLED,
props.ICON,
props.ICON_ALIGN,
props.ID,
props.TEXT,
props.TYPE,
props.TABBABLE,
props.MODE,
props.THEME];
}
/**
* Set if the trigger field is tabbable
* @param {boolean|string} value True of false depending if the trigger field is tabbable
*/
set tabbable(value) {
const isTabbable = stringUtils.stringToBool(value);
/** @type {any} */
const button = this.shadowRoot?.querySelector('button');
this.setAttribute(props.TABBABLE, value.toString());
button.tabIndex = !isTabbable ? '-1' : '0';
}
get tabbable() { return this.getAttribute(props.TABBABLE) || true; }
}
export default IdsTriggerButton;
<file_sep>/app/ids-block-grid/index.js
import IdsBlockgrid from '../../src/ids-block-grid/ids-block-grid';
import IdsBlockgridItem from '../../src/ids-block-grid/ids-block-grid-item';
<file_sep>/app/ids-popup/test-target-in-grid.js
import IdsPopup from '../../src/ids-popup/ids-popup';
import './test-target-in-grid.scss';
<file_sep>/src/ids-text/ids-text.js
import {
IdsElement,
customElement,
scss,
props,
mix
} from '../ids-base/ids-element';
// Import Mixins
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsTooltipMixin } from '../ids-base/ids-tooltip-mixin';
import { IdsStringUtils as stringUtils } from '../ids-base/ids-string-utils';
import styles from './ids-text.scss';
const fontSizes = ['xs', 'sm', 'base', 'lg', 'xl', 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 60, 72];
const fontWeightClasses = ['bold', 'bolder'];
/**
* IDS Text Component
* @type {IdsText}
* @inherits IdsElement
* @mixes IdsThemeMixin
* @mixes IdsEventsMixin
* @mixes IdsTooltipMixin
* @part text - the text element
*/
@customElement('ids-text')
@scss(styles)
class IdsText extends mix(IdsElement).with(IdsEventsMixin, IdsThemeMixin, IdsTooltipMixin) {
constructor() {
super();
}
connectedCallback() {
super.connectedCallback();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [
props.TYPE,
props.FONT_SIZE,
props.AUDIBLE,
props.DISABLED,
props.ERROR,
props.MODE,
props.VERSION,
props.LABEL,
props.FONT_WEIGHT,
props.AUDIBLE,
props.OVERFLOW,
props.COLOR
];
}
/**
* Inner template contents
* @returns {string} The template
*/
template() {
const tag = this.type || 'span';
let classList = 'ids-text';
classList += this.color === 'unset' ? ' ids-text-color-unset' : '';
classList += (this.overflow === 'ellipsis') ? ' ellipsis' : '';
classList += ((this.audible)) ? ' audible' : '';
classList += this.fontSize ? ` ids-text-${this.fontSize}` : '';
classList += (this.fontWeight === 'bold' || this.fontWeight === 'bolder')
? ` ${this.fontWeight}` : '';
return `<${tag} class="${classList}" mode="${this.mode}" version="${this.version}"><slot></slot></${tag}>`;
}
/**
* Set the font size/style of the text with a class.
* @param {string | null} value The font size in the font scheme
* i.e. 10, 12, 16 or xs, sm, base, lg, xl
*/
set fontSize(value) {
fontSizes.forEach((size) => this.container?.classList.remove(`ids-text-${size}`));
if (value) {
this.setAttribute(props.FONT_SIZE, value);
this.container?.classList.add(`ids-text-${value}`);
return;
}
this.removeAttribute(props.FONT_SIZE);
}
get fontSize() { return this.getAttribute(props.FONT_SIZE); }
/**
* Adjust font weight; can be either "bold" or "bolder"
* @param {string | null} value (if bold)
*/
set fontWeight(value) {
let hasValue = false;
switch (value) {
case 'bold':
case 'bolder':
hasValue = true;
break;
default:
break;
}
this.container?.classList.remove(...fontWeightClasses);
if (hasValue) {
this.setAttribute(props.FONT_WEIGHT, value);
this.container?.classList.add(value);
return;
}
this.removeAttribute(props.FONT_WEIGHT);
}
get fontWeight() {
return this.getAttribute(props.FONT_WEIGHT);
}
/**
* Set the type of element it is (h1-h6, span (default))
* @param {string | null} value The type of element
*/
set type(value) {
if (value) {
this.setAttribute(props.TYPE, value);
} else {
this.removeAttribute(props.TYPE);
}
this.render();
}
get type() { return this.getAttribute(props.TYPE); }
/**
* If set to "unset", color can be controlled by parent container
* @param {string | null} value "unset" or undefined/null
*/
set color(value) {
if (value === 'unset') {
this.setAttribute(props.COLOR, value);
this.container.classList.add('ids-text-color-unset');
} else {
this.removeAttribute(props.COLOR);
this.container.classList.remove('ids-text-color-unset');
}
this.render();
}
get color() {
return this.getAttribute(props.COLOR);
}
/**
* Set `audible` string (screen reader only text)
* @param {string | null} value The `audible` attribute
*/
set audible(value) {
const isValueTruthy = stringUtils.stringToBool(value);
if (isValueTruthy && this.container && !this.container?.classList.contains('audible')) {
this.container.classList.add('audible');
this.setAttribute(props.AUDIBLE, value);
}
if (!isValueTruthy && this.container?.classList.contains('audible')) {
this.container.classList.remove('audible');
this.removeAttribute(props.AUDIBLE);
}
}
get audible() { return this.getAttribute(props.AUDIBLE); }
/**
* Set the text to disabled color.
* @param {boolean} value True if disabled
*/
set disabled(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.DISABLED, value);
return;
}
this.removeAttribute(props.DISABLED);
}
get disabled() { return this.getAttribute(props.DISABLED); }
/**
* Set the text to error color.
* @param {boolean} value True if error text
*/
set error(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.container.classList.add('error');
this.setAttribute(props.ERROR, value);
return;
}
this.removeAttribute(props.ERROR);
this.container.classList.remove('error');
}
get error() { return this.getAttribute(props.ERROR); }
/**
* Set the text to label color.
* @param {boolean} value True if error text
*/
set label(value) {
const val = stringUtils.stringToBool(value);
if (val) {
this.container.classList.add('label');
this.setAttribute(props.LABEL, value);
return;
}
this.removeAttribute(props.LABEL);
this.container.classList.remove('label');
}
get label() { return this.getAttribute(props.LABEL); }
/**
* Set how content overflows; can specify 'ellipsis', or undefined or 'none'
* @param {string | null} [value=null] how content is overflow
*/
set overflow(value) {
const isEllipsis = value === 'ellipsis';
if (isEllipsis) {
this.container?.classList.add('ellipsis');
this.setAttribute('overflow', 'ellipsis');
} else {
this.container?.classList.remove('ellipsis');
this.removeAttribute('overflow');
}
}
get overflow() {
return this.getAttribute('overflow');
}
}
export default IdsText;
<file_sep>/app/ids-alert/example.js
const iconAlertHtml = `<ids-alert icon="alert"></ids-alert>
<ids-alert icon="success"></ids-alert>
<ids-alert icon="empty-circle"></ids-alert>
<ids-alert icon="half-empty-circle"></ids-alert>
<ids-alert icon="error"></ids-alert>
<ids-alert icon="info"></ids-alert>
<ids-alert icon="alert"></ids-alert>
<ids-alert icon="new"></ids-alert>
<ids-alert icon="in-progress"></ids-alert>
<ids-alert icon="error"></ids-alert>
<ids-alert icon="success-solid"></ids-alert>
<ids-alert icon="alert-solid"></ids-alert>
<ids-alert icon="error-solid"></ids-alert>
<ids-alert icon="info-solid"></ids-alert>
<ids-alert icon="alert-solid"></ids-alert>
<ids-alert icon="new-solid"></ids-alert>
<ids-alert icon="in-progress-solid"></ids-alert>
<ids-alert icon="info-field-solid"></ids-alert>`;
document.getElementById('ids-alert-list').innerHTML = iconAlertHtml;
<file_sep>/app/ids-mask/example.js
document.addEventListener('DOMContentLoaded', () => {
// Phone Number Input - standard pattern mask
const phoneInput = document.querySelector('#mask-phone-number');
phoneInput.mask = ['(', /[1-9]/, /\d/, /\d/, ')', ' ', /\d/, /\d/, /\d/, '-', /\d/, /\d/, /\d/, /\d/];
// Date Input - use `date` string to pre-configure the internal Date Mask
const dateInput = document.querySelector('#mask-date');
dateInput.mask = 'date';
dateInput.maskOptions = {
format: 'M/d/yyyy HH:mm a'
};
// Number Input - use `number` string to pre-configure the internal Number Mask
const numberInput = document.querySelector('#mask-number');
numberInput.mask = 'number';
numberInput.maskOptions = {
allowDecimal: true,
allowNegative: true,
allowThousandsSeparator: true,
decimalLimit: 2,
integerLimit: 7
};
});
<file_sep>/app/ids-trigger-button/index.js
import IdsTriggerButton from '../../src/ids-trigger-field/ids-trigger-button';
<file_sep>/test/ids-menu/ids-menu-group-func-test.js
/**
* @jest-environment jsdom
*/
import IdsMenu, {
IdsMenuGroup,
IdsMenuHeader,
IdsMenuItem,
IdsSeparator
} from '../../src/ids-menu/ids-menu';
const exampleHTML = `
<ids-menu-header id="primary-header">My Items</ids-menu-header>
<ids-menu-group id="primary" select="single" aria-labelledby="primary-header">
<ids-menu-item id="item1" value="1">Item 1</ids-menu-item>
<ids-menu-item id="item2" value="2">Item 2</ids-menu-item>
<ids-menu-item id="item3" value="3">Item 3</ids-menu-item>
</ids-menu-group>
<ids-separator id="sep1"></ids-separator>
<ids-menu-header id="secondary-header">Other Items</ids-menu-header>
<ids-menu-group id="secondary" select="multiple" keep-open aria-labelledby="secondary-header">
<ids-menu-item id="item4" value="4">Item 4</ids-menu-item>
<ids-menu-item icon="settings" id="item5" value="5">Item 5</ids-menu-item>
<ids-separator id="sep2"></ids-separator>
<ids-menu-item id="item6" value="6">Item 6</ids-menu-item>
</ids-menu-group>
`;
describe('IdsMenuGroup Component', () => {
let menu;
let group1;
let header1;
let group2;
let header2;
let item1;
let item4;
let item5;
let item6;
beforeEach(async () => {
menu = new IdsMenu();
menu.id = 'test-menu';
document.body.appendChild(menu);
menu.insertAdjacentHTML('afterbegin', exampleHTML);
group1 = document.querySelector('#primary');
group2 = document.querySelector('#secondary');
header1 = document.querySelector('#primary-header');
header2 = document.querySelector('#secondary-header');
// get reference to an item in each group
item1 = document.querySelector('#item1');
item4 = document.querySelector('#item4');
item5 = document.querySelector('#item5');
item6 = document.querySelector('#item6');
});
afterEach(async () => {
document.body.innerHTML = '';
menu = null;
group1 = null;
group2 = null;
header1 = null;
header2 = null;
item1 = null;
item4 = null;
item5 = null;
item6 = null;
});
it('should render', () => {
const errors = jest.spyOn(global.console, 'error');
expect(document.querySelectorAll('ids-menu-group').length).toEqual(2);
expect(errors).not.toHaveBeenCalled();
});
it('has access to its parent menu', () => {
expect(group1.menu.isEqualNode(menu)).toBeTruthy();
});
it('can be associated with a header', () => {
expect(group1.header).toBeDefined();
expect(group1.header.isEqualNode(header1)).toBeTruthy();
expect(group2.header).toBeDefined();
expect(group2.header.isEqualNode(header2)).toBeTruthy();
});
it('should have a correct `aria-labelledBy` attribute', () => {
expect(group1.getAttribute('aria-labelledby')).toEqual(header1.id);
expect(group2.getAttribute('aria-labelledby')).toEqual(header2.id);
});
it('can programmatically change selection type', () => {
group1.select = 'multiple';
expect(group1.getAttribute('select')).toBe('multiple');
expect(item1.container.classList.contains('has-multi-checkmark')).toBeTruthy();
group1.select = 'none';
expect(group1.getAttribute('select')).toBe(null);
expect(item1.container.classList.contains('has-checkmark')).toBeFalsy();
expect(item1.container.classList.contains('has-multi-checkmark')).toBeFalsy();
});
it('can programmatically change whether or not the menu stays open after selection', () => {
group1.keepOpen = true;
expect(group1.getAttribute('keep-open')).toBeTruthy();
group1.keepOpen = false;
expect(group1.getAttribute('keep-open')).toBeFalsy();
});
it('can deselect all but a single item from a multi-select group', () => {
item4.select();
item5.select();
item6.select();
group2.deselectAllExcept(item5);
const selected = menu.getSelectedItems(group2);
expect(selected.length).toEqual(1);
expect(selected[0].isEqualNode(item5)).toBeTruthy();
});
});
<file_sep>/app/ids-base/ids-data-source.js
import IdsDataSource from '../../src/ids-base/ids-data-source';
<file_sep>/app/ids-base/ids-element.js
import IdsElement from '../../src/ids-base/ids-element';
<file_sep>/app/ids-menu/index.js
import IdsMenu from '../../src/ids-menu/ids-menu';
import IdsMenuHeader from '../../src/ids-menu/ids-menu-header';
import IdsMenuItem from '../../src/ids-menu/ids-menu-item';
import IdsMenuGroup from '../../src/ids-menu/ids-menu-group';
import IdsSeparator from '../../src/ids-menu/ids-separator';
<file_sep>/src/ids-base/README.md
# Ids Base
This folder contains source code for the common base code for IDS. This includes shared functions, Core Css and The webcomponent base class `IdsElement`.
## Ids Base Css
The Ids base css class contains imports for core modules like typography (including labels), and imports for the Ids Identity token css variables and the functional classes mixins (similar to tailwind css).
## Ids Decorators
The ids-decorators are imported directly into ids-element and may not need to be called directly. Current there are there
1. One to add a version to the webcomponent and one to make a customer element for the web components. This is added in IdsElement if used
1. One to make the component a customElement
1. One to add a mixin to the components
## Ids Element
Ids Element is the general base class for most web components in IDS. Its used to have a base layer with common functions that all components will have. If only some components will have the functionality use a mixin instead. Ids Element current adds the following:
1. A version number from the package json
1. A name property from the element name
1. Handles setting changes
1. Removed attached event handlers (if the mixin is used)
1. Prevents flash of un styled content
1. Holds the property (settings) list
1. Renders a template from the template property
1. Exports all mixins
## Ids Keyboard Mixin
- Handles detaching if a key is pressed down currently
- Adds a hot key mapper
- Can list the supported keys for a component
The Keyboard mixin is attached with the `mix ... with` decorator. To use it to respond to keys in a component you can use it like this:
```js
handleKeys() {
this.listen('Enter', this, (e) => {
// Do something on Enter
});
}
```
Also at any time you can check `this.pressedKeys` to see what keys are current down. `this.hotkeys` contains all the currently watched keys.
## Ids Event Handler
Adds a small wrapper around component events. This can be used to see what event handlers are attached on a component as well as the fact that the Ids Element Base will call removeAll to remove all used event handlers.
The Keyboard mixin is attached with the `mix ... with` decorator. To use it to respond to events in a component you can use it like this:
```js
// Handle Clicking the x for dismissible
const closeIcon = this.querySelector('ids-icon[icon="close"]');
this.addEventListener('click', closeIcon, () => this.dismiss());
```
- Handles consistency on the data sent (element, event data, id, idx, custom ect.)
- Before events events can be vetoed
- All events should have past tense for example `activated`, `beforeactivated`, `afteractived` and not `activate`, `beforeactivate`, `afteractivate`
It's also possible to use Namespaces with the Ids Event Handler's methods, similar to the 4.x version's support for jQuery Event Namespaces. When assigning an event name, usage of a period (.) will cause any text after the period to be considered the "namespace". When removing assigned event listeners using the namespace, only handlers that match the event type AND namespace will be removed:
```js
this.onEvent('click', closeIcon, () => this.dismiss());
this.onEvent(('click.doop', closeIcon, () => this.otherDismissCheck());
console.log(this.handledEvents());
// both `click` and `click.doop` exist.
this.offEvent(('click.doop', closeIcon);
console.log(this.handledEvents());
// `click.doop` is not there, but `click` remains.
```
## Ids Mixins
Mixins are simply functions with shared functionality that can be injected into a component. For example the IdsEventOmitter. They get around the issue that in JS that you cannot inherit from more than one object. Also they prevent the Base Element from getting bloated with functionality that not every component uses. Ids is using a simple object as a mixin that in "injected" into the component in the constructor and then used according to its documentation. If the mixin has UI elements it should probably be a web component instead.
## Ids Resize Mixin
This mixin contains lifecycle methods for making a component detect page and element resizing. The mixin allows a component to be registered against a global instance of ResizeObserver, which can trigger size changes throughout the UI, and fire a `refresh()` method on the component if one is defined. The mixin also has lifecycle methods for setting up and tearing down a MutationObserver that can will fire a `refresh()` method on the component if one is defined.
## Ids Deep Clone Mixin
This mixin makes a deep copy of an array or object even if its nested, or contains functions. Its optimized to be very fast. In addition it can handle circular references. Its used in the data source mixin.
## Ids Dirty Tracker Mixin
This mixin tracks the input element text/value changes and show a dirty indicator icon (yellow triangle) to indicate the field has been modified from the starting value.
## Ids Clearable Mixin
This mixin adds a clear button (x icon) to an input element ands bind click and key events to clear the text in the input when clicked. It will trigger the `cleared` when the contents of the input element are cleared and a `contents-checked` event when the contents of input are being checked for empty.
## Ids Validation Mixin
This mixin add functionality for validation to the component. This includes a add/remove message function api. Also triggers the `validated` event when evaluated and passes an `isValid` argument for the current state.
## Ids Theme Mixin
This mixin adds functionality to change the theme on a component. To use it you need to:
1. Include the IdsThemeMixin in the `mix` list.
1. Add two properties to the properties array. For example:
```js
static get properties() {
return [... props.MODE, props.VERSION];
}
```
1. Add types for MODE and VERSION to the `d.ts` file for the new properties.
1. Add the theme mixin name to the @mixins tag for future docs.
1. Add the color changes for each theme scss file. For example:
```css
.ids-container[mode='light'] {
@include bg-white();
}
.ids-container[mode='dark'] {
@include bg-slate-90();
}
.ids-container[mode='contrast'] {
@include bg-slate-10();
}
.ids-container[version='classic'][mode='light'] {
@include bg-graphite-10();
}
.ids-container[version='classic'][mode='dark'] {
@include bg-classic-slate-80();
}
.ids-container[version='classic'][mode='contrast'] {
@include bg-graphite-20();
}
```
1. In addition you should expose some of the component elements as `parts` do this in the comments and in the template. This gives a way to customize the styles outside of the web components, for flexibility and possible style customizations.
```js
...
/**
* @part tag - the tag element
* @part icon - the icon element
*/
...
template() {
return '<span class="ids-tag" part="tag"><slot></slot></span>';
}
```
1. Add a themeable parts section to the .MD file
## Ids Tooltip Mixin
This mixin adds functionality to display a tooltip on an item.
1. Include the import and then IdsTooltipMixin in the `mix` list.
1. Add types for MODE and VERSION to the `d.ts` file for the new properties.
1. Add IdsTooltipMixin to the @mixes list
When using it...
1. Test it by adding for example `tooltip="Additional Information"` on the component.
1. Consider adding a test to tooltip tests.
1. If `tooltip="true"` is set then
<file_sep>/src/ids-popup-menu/ids-popup-menu.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
import IdsMenu, {
IdsMenuGroup,
IdsMenuHeader,
IdsMenuItem,
IdsSeparator
} from '../ids-menu/ids-menu';
import IdsPopup from '../ids-popup/ids-popup';
// Subcomponent Export
export {
IdsMenuGroup,
IdsMenuHeader,
IdsMenuItem,
IdsSeparator
};
export default class IdsPopupMenu extends IdsMenu {
/* references this menu's parent menu component, if this menu is a submenu */
readonly parentMenu: IdsPopupMenu | undefined;
/* references the internal IdsPopup component */
readonly popup: IdsPopup;
/* defines the "target" HTMLElement in which to apply the PopupMenu */
target?: HTMLElement | undefined;
/* defines the action that will cause the menu to appear */
trigger: 'contextmenu' | 'click' | 'immediate';
/* hides the menu */
hide(): void;
/* shows the menu */
show(): void;
/* hides all submenus, ignoring submenus attached to the specified menu item */
hideSubmenus(focusedMenuItem?: IdsMenuItem): void;
}
<file_sep>/src/ids-accordion/ids-accordion.js
import {
IdsElement,
customElement,
scss,
mix,
props
} from '../ids-base/ids-element';
import styles from './ids-accordion.scss';
import IdsAccordionHeader from './ids-accordion-header';
import IdsAccordionPanel from './ids-accordion-panel';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
/**
* IDS Accordion Component
* @type {IdsAccordion}
* @inherits IdsElement
* @mixes IdsEventsMixin
* @mixes IdsThemeMixin
* @part accordion - the accordion root element
*/
@customElement('ids-accordion')
@scss(styles)
class IdsAccordion extends mix(IdsElement).with(IdsEventsMixin, IdsThemeMixin) {
constructor() {
super();
}
connectedCallback() {
super.connectedCallback();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [props.MODE, props.VERSION];
}
/**
* Inner template contents
* @returns {string} The template
*/
template() {
return `
<div class="ids-accordion" part="accordion">
<slot></slot>
</div>
`;
}
}
export default IdsAccordion;
<file_sep>/test/ids-menu-button/ids-menu-button-func-test.js
/**
* @jest-environment jsdom
*/
import IdsIcon from '../../src/ids-icon/ids-icon';
import IdsMenuButton from '../../src/ids-menu-button/ids-menu-button';
import IdsPopupMenu, {
IdsMenuGroup,
IdsMenuHeader,
IdsMenuItem,
IdsSeparator
} from '../../src/ids-popup-menu/ids-popup-menu';
describe('IdsMenuButton Component', () => {
let buttonEl;
let menuEl;
beforeEach(() => {
buttonEl = new IdsMenuButton();
buttonEl.id = 'test-button';
buttonEl.type = 'secondary';
buttonEl.dropdownIcon = '';
document.body.appendChild(buttonEl);
menuEl = new IdsPopupMenu();
menuEl.id = 'test-menu';
document.body.appendChild(menuEl);
// Connect the components
buttonEl.menu = 'test-menu';
menuEl.target = buttonEl;
});
afterEach(() => {
document.body.innerHTML = '';
buttonEl = null;
menuEl = null;
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
buttonEl.remove();
buttonEl = new IdsMenuButton();
buttonEl.id = 'test-button';
buttonEl.type = 'secondary';
buttonEl.dropdownIcon = '';
document.body.appendChild(buttonEl);
expect(document.querySelectorAll('ids-menu-button').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
expect(buttonEl.shouldUpdate).toBeTruthy();
});
it('renders correctly', () => {
const newButtonElem = new IdsMenuButton();
newButtonElem.id = 'new-button';
newButtonElem.dropdownIcon = 'more';
document.body.appendChild(newButtonElem);
const newMenuElem = new IdsPopupMenu();
newMenuElem.id = 'new-menu';
document.body.appendChild(newMenuElem);
// Connect the components
newButtonElem.menu = 'new-menu';
newMenuElem.target = newButtonElem;
newButtonElem.template();
expect(newButtonElem.outerHTML).toMatchSnapshot();
});
it('can change/remove its dropdown icon', () => {
buttonEl.dropdownIcon = 'launch';
let iconEl = buttonEl.button.querySelector('ids-icon');
expect(buttonEl.dropdownIcon).toBe('launch');
expect(iconEl.icon).toBe('launch');
// Remove it
buttonEl.dropdownIcon = null;
iconEl = buttonEl.button.querySelector('ids-icon');
expect(buttonEl.dropdownIcon).toBe(undefined);
expect(iconEl).toBe(null);
// Try removing it again (runs the else clause in `set dropdownIcon`)
buttonEl.dropdownIcon = undefined;
iconEl = buttonEl.button.querySelector('ids-icon');
expect(buttonEl.dropdownIcon).toBe(undefined);
expect(iconEl).toBe(null);
});
it('points the menu\'s arrow at the button if there is no icon', () => {
buttonEl.dropdownIcon = null;
});
it('shows/hides the menu when the button is clicked', (done) => {
const clickEvent = new MouseEvent('click', { bubbles: true });
buttonEl.dispatchEvent(clickEvent);
setTimeout(() => {
expect(menuEl.popup.visible).toBeTruthy();
done();
}, 20);
});
it('should render an icon button', () => {
document.body.innerHTML = '';
buttonEl = new IdsMenuButton();
buttonEl.id = 'icon-button';
const iconEl = new IdsIcon();
iconEl.slot = 'icon';
iconEl.icon = 'more';
buttonEl.appendChild(iconEl);
const spanEl = document.createElement('span');
spanEl.classList.add('audible');
spanEl.text = 'Icon Only Button';
buttonEl.appendChild(spanEl);
document.body.appendChild(buttonEl);
buttonEl.render();
expect(buttonEl.shadowRoot.querySelector('.ids-icon-button')).toBeTruthy();
});
it('focuses the button when the menu is closed with the `Escape` key', () => {
const closeEvent = new KeyboardEvent('keydown', { key: 'Escape', bubbles: true });
menuEl.show();
setTimeout(() => {
expect(menuEl.popup.visible).toBeTruthy();
menuEl.dispatchEvent(closeEvent);
setTimeout(() => {
expect(document.activeElement.isEqualNode(buttonEl)).toBeTruthy();
}, 20);
}, 20);
});
});
<file_sep>/app/ids-tag/example.js
// Add an event listener to test clickable links
const tag = document.querySelector('#ids-clickable-tag');
tag?.addEventListener('click', (e) => {
console.info('Click Fired', e);
});
<file_sep>/src/ids-loader/README.md
# Ids Loader Component
## Description
The ids-loader component is used to notify the user that the system is processing a request, and that they must wait for that request to be processed before continuing with the current task. There are different indicators and UI styles for different scenarios. In previous versions this has been called Loading Indicator or Busy Indicator.
## Use Cases
- Loading indicators tell users about wait times for current processes. Examples can include, searching through a page and submitting a form or a page loading. These indicators communicate the status to the user.
## Terminology
- *Determinate*: Used when there is a defined loading time. These indicators display percentages that help approximate the wait time.
- *Indeterminate*: Used when there is an undefined loading time. These indicators are for unspecified wait times, and do not include a percentage.
## Features (With Code Examples)
A page loader can be added to a page by adding an ids-loader to the page and running the scripts. It will not appear until the web component is in a `connnected` state.
```html
<ids-loader></ids-loader>
```
## Settings and Attributes
TBD
## Themeable Parts
- `loader` allows you to further style the loader element
## States and Variations (With Code Examples)
- Loading
- Determinate
- Indeterminate
## Keyboard Guidelines
No keyboard shortcuts available.
## Responsive Guidelines
- The Page Loader will fill 100% off the top of the page
## Converting from Previous Versions
- 3.x: have all new markup and classes.
- 4.x: Busy Indicator has been changed to ids-loader. It has all new markup and classes for web components.
## Proposed Changes
- Fix Accessibility issue (1.4.1 Use of Color) by adding an icon to the color tags.
- Add a label or off screen text for accessibility
## Accessibility Guidelines
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. Ensure the color tags that indicate state like OK, cancel, ect have other ways to indicate that information. This is failing.
- 1.1 Text Alternatives - Provide text alternatives for any non-text content so that it can be changed into other forms people need, such as large print, braille, speech, symbols or simpler language.
## Regional Considerations
Any Labels should be localized in the current language. The animation should flip in RTL mode (TBD)
<file_sep>/src/ids-alert/ids-alert.js
import {
IdsElement,
customElement,
scss,
props,
stringUtils,
mix
} from '../ids-base/ids-element';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
import IdsIcon from '../ids-icon/ids-icon';
import styles from './ids-alert.scss';
/**
* IDS Alert Component
* @type {IdsAlert}
* @inherits IdsElement
* @mixes IdsEventsMixin
* @mixes IdsThemeMixin
* @part icon - the icon element
*/
@customElement('ids-alert')
@scss(styles)
class IdsAlert extends mix(IdsElement).with(IdsEventsMixin, IdsThemeMixin) {
constructor() {
super();
}
connectedCallback() {
super.connectedCallback();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The propertires in an array
*/
static get properties() {
return [props.ICON, props.DISABLED, props.MODE, props.VERSION];
}
/**
* Create the Template for the contents
*
* @returns {string} The template
*/
template() {
const cssClass = stringUtils.stringToBool(this.disabled) ? ' class="disabled"' : '';
return `<ids-icon size="normal"${cssClass} part="icon"></ids-icon>`;
}
/**
* Sets to disabled
* @param {boolean|string?} value If true will set `disabled` attribute
*/
set disabled(value) {
const icon = this.shadowRoot?.querySelector('ids-icon');
const val = stringUtils.stringToBool(value);
if (val) {
this.setAttribute(props.DISABLED, val.toString());
icon?.classList.add(props.DISABLED);
} else {
this.removeAttribute(props.DISABLED);
icon?.classList.remove(props.DISABLED);
}
}
get disabled() { return this.getAttribute(props.DISABLED); }
/**
* Return the icon of the alert.
* @returns {string | null} the path data
*/
get icon() { return this.getAttribute(props.ICON); }
/**
* Set the icon
* @param {string | null} value The Icon Type [success, info, error, alert]
*/
set icon(value) {
if (value) {
this.setAttribute(props.ICON, value);
this.shadowRoot?.querySelector('ids-icon')?.setAttribute(props.ICON, value);
} else {
this.removeAttribute(props.ICON);
}
}
}
export default IdsAlert;
<file_sep>/app/ids-tag/index.js
import IdsTag from '../../src/ids-tag/ids-tag';
<file_sep>/app/ids-virtual-scroll/example.js
import IdsCard from '../../src/ids-card/ids-card';
import './index.scss';
// Example for populating the Virtual Scoller
const virtualScrollUl = document.querySelector('#virtual-scroll-1');
const virtualScrollTable = document.querySelector('#virtual-scroll-2');
// 1. Do an ajax request
const xmlhttp = new XMLHttpRequest();
const url = '/api/products';
xmlhttp.onreadystatechange = function onreadystatechange() {
if (this.readyState === 4 && this.status === 200) {
// Setup the list view
virtualScrollUl.itemTemplate = (item) => `<li class="ids-virtual-scroll-item">${item.productName}</li>`;
virtualScrollUl.data = JSON.parse(this.responseText);
// Set up the table
virtualScrollTable.scrollTarget = document.querySelector('.ids-data-grid');
virtualScrollTable.itemTemplate = (item) => `<div role="row" class="ids-data-grid-row">
<span role="cell" class="ids-data-grid-cell"><span class="text-ellipsis">${item.productId}</span></span>
<span role="cell" class="ids-data-grid-cell"><span class="text-ellipsis">${item.productName}</span></span>
</div>`;
virtualScrollTable.data = JSON.parse(this.responseText);
}
};
// 3. Execute the request
xmlhttp.open('GET', url, true);
xmlhttp.send();
<file_sep>/doc/PUBLISH.md
# Publishing Notes
## Making a package in npm
- Bump version in package.json
- Bump version in src/ids-base/ids-decorators.js
- Run command `npm run publish:dry-run` to test first
- Run command `npm run publish:npm`
## Working with local changes
- Go to the root folder of this project folder and type `npm link` to set up the symbolic link
- Go to the root of the destination project `npm link ids-enterprise-wc`
<file_sep>/app/ids-base/ids-validation-mixin.js
import IdsValidationMixin from '../../src/ids-base/ids-validation-mixin';
<file_sep>/test/helpers/style-mock.js
module.exports = `:host { background-color: transparent; }`;
<file_sep>/src/ids-accordion/README.md
# Ids Accordion Component
## Description
The IDS Accordion component is a UI pattern that is comprised of a stacked list of elements. A basic accordion will consist of a `ids-accordion-header` which shows a title or summary of the `ids-accordion-panel` and acts as a control for expanding and collapsing.
## Use Cases
Can be used to conserve space, by hiding information until needed. Accordions can be commonly seen on mobile sites and applications. It can help tell the user what the page is about and allows the user to select and see what is needed.
## Terminology
- **ids-accordion** Parent container for all accordions
- **ids-accordion-panel** First child of and accordion. Contains the header and content area. Contains 2 slots, the `header` and the `pane`.
- **ids-accordion-header** Typically used in the header slot, contains the title and acts as the control for expanding and collapsing.
## Features (With Code Examples)
Standard accordion, most commonly used:
```html
<ids-accordion>
<ids-accordion-panel>
<ids-accordion-header slot="header">
<ids-text font-size="16">Warehouse Location</ids-text>
</ids-accordion-header>
<ids-text slot="content">
Remix, optimize, "B2B, iterate?" Best-of-breed efficient beta-test; social cutting-edge: rich magnetic tagclouds
front-end infomediaries viral authentic incentivize sexy extensible functionalities incentivize. Generate killer
authentic grow vertical blogospheres, functionalities ecologies harness, "tag solutions synergies exploit data-driven B2C open-source e-markets optimize create, enhance convergence create." Out-of-the-box strategize best-of-breed back-end, deploy design markets metrics. Content web services enhance leading-edge Cluetrain, deliverables dot-com scalable. User-centric morph, back-end, synthesize mesh, frictionless, exploit next-generation tag portals, e-commerce channels; integrate; recontextualize distributed revolutionize innovative eyeballs.
</ids-text>
</ids-accordion-panel>
<ids-accordion-panel>
<ids-accordion-header slot="header">
<ids-text font-size="16">Sort By</ids-text>
</ids-accordion-header>
<ids-text slot="content">
Remix, optimize, "B2B, iterate?" Best-of-breed efficient beta-test; social cutting-edge: rich magnetic tagclouds
front-end infomediaries viral authentic incentivize sexy extensible functionalities incentivize. Generate killer
authentic grow vertical blogospheres, functionalities ecologies harness, "tag solutions synergies exploit
data-driven B2C open-source e-markets optimize create, enhance convergence create." Out-of-the-box strategize
best-of-breed back-end, deploy design markets metrics. Content web services enhance leading-edge Cluetrain,
deliverables dot-com scalable. User-centric morph, back-end, synthesize mesh, frictionless, exploit next-generation
tag portals, e-commerce channels; integrate; recontextualize distributed revolutionize innovative eyeballs.
</ids-text>
</ids-accordion-panel>
<ids-accordion-panel>
<ids-accordion-header slot="header">
<ids-text font-size="16">Brand Name</ids-text>
</ids-accordion-header>
<ids-text slot="content">
Remix, optimize, "B2B, iterate?" Best-of-breed efficient beta-test; social cutting-edge: rich magnetic tagclouds
front-end infomediaries viral authentic incentivize sexy extensible functionalities incentivize. Generate killer
authentic grow vertical blogospheres, functionalities ecologies harness, "tag solutions synergies exploit
data-driven B2C open-source e-markets optimize create, enhance convergence create." Out-of-the-box strategize
best-of-breed back-end, deploy design markets metrics. Content web services enhance leading-edge Cluetrain,
deliverables dot-com scalable. User-centric morph, back-end, synthesize mesh, frictionless, exploit next-generation
tag portals, e-commerce channels; integrate; recontextualize distributed revolutionize innovative eyeballs.
</ids-text>
</ids-accordion-panel>
<ids-accordion-panel>
<ids-accordion-header slot="header">
<ids-text font-size="16">Material</ids-text>
</ids-accordion-header>
<ids-text slot="content">
Remix, optimize, "B2B, iterate?" Best-of-breed efficient beta-test; social cutting-edge: rich magnetic tagclouds
front-end infomediaries viral authentic incentivize sexy extensible functionalities incentivize. Generate killer
authentic grow vertical blogospheres, functionalities ecologies harness, "tag solutions synergies exploit
data-driven B2C open-source e-markets optimize create, enhance convergence create." Out-of-the-box strategize
best-of-breed back-end, deploy design markets metrics. Content web services enhance leading-edge Cluetrain,
deliverables dot-com scalable. User-centric morph, back-end, synthesize mesh, frictionless, exploit next-generation
tag portals, e-commerce channels; integrate; recontextualize distributed revolutionize innovative eyeballs.
</ids-text>
</ids-accordion-panel>
</ids-accordion>
```
## States and Variations
The Accordion's headers support the following states:
- Normal/Default: This is the default of an accordion.
- Hover: Roll over an interactive element inside the accordion
- Disabled: Disabled elements can be inside an accordion. These cards cannot be clicked, hovered or focused.
- Focus: For accessibility. To give a user guidance when using a screen reader.
- Active/Selected: After the pressed/clicked state, users are taken to the active state. This includes expanding or closing an accordion.
## Keyboard Guidelines
- **Shift+Tab**: Works the same as Tab, but in the opposite direction. When focus is on the tab or accordion header, a press of down/right will move focus to the next logical accordion Header or Tab page. When focus reaches the last header/tab page, further key presses will have optionally wrap to the first header
- **Up Arrow or Left Arrow**: When focus is on the tab or accordion header, a press of up/left will move focus to the previous logical accordion header or tab page. When focus reaches the first header/tab page, further key presses will optionally wrap to the first header.
- **Enter or Space**: When focus is on an accordion header, this keystroke toggles the expansion of the corresponding panel. If collapsed, the panel is expanded, and its aria-expanded state is set to true. If expanded, the panel is collapsed and its aria-expanded state is set to false.
## Accessibility
The IDS Accordion component has a `role="region"` and a unique `title` is generated for each instance.
<file_sep>/app/ids-accordion/index.js
import IdsAccordion from '../../src/ids-accordion/ids-accordion';
<file_sep>/app/ids-button/performance.js
import IdsButton from '../../src/ids-button/ids-button';
const appendTestItems = () => {
const section = document.querySelector('ids-layout-grid-cell');
for (let index = 0; index < 1000; index++) {
let html = '';
html += `<ids-button id="button-${index}" type="secondary">
Button ${index}
</ids-button>`;
section.insertAdjacentHTML('beforeend', html);
}
};
document.addEventListener('DOMContentLoaded', () => {
appendTestItems();
console.info(window.performance.now());
});
<file_sep>/app/ids-render-loop/example.js
import { IdsRenderLoop, IdsRenderLoopItem } from '../../src/ids-render-loop/ids-render-loop';
// Supporting Components
import IdsText from '../../src/ids-text/ids-text';
import IdsLayoutGridCell from '../../src/ids-layout-grid/ids-layout-grid-cell';
import IdsLayoutGrid from '../../src/ids-layout-grid/ids-layout-grid';
import IdsButton from '../../src/ids-button/ids-button';
// Access the global RenderLoop instance.
// When using the mixin on a real component, the `rl` property exists on the component instance.
import renderLoop from '../../src/ids-render-loop/ids-render-loop-global';
// When the DOM Loads, register a loop that counts durations
// of both the loop and an individual item, as well as sets up play/stop.
document.addEventListener('DOMContentLoaded', () => {
const duration = 1000;
const countdownBtn = document.querySelector('#countdown-trigger-btn');
const countdownSpan = document.querySelector('#renderloop-countdown');
let timer;
countdownBtn.addEventListener('click', () => {
// Clear any previously-set RenderLoop items,
// making them "destroy" without calling the Timeout function
if (timer) {
timer.destroy(true);
}
countdownSpan.classList.remove('done');
// Create a RenderLoop item that updates a countdown number on its callback
timer = new IdsRenderLoopItem({
duration,
updateCallback() {
const timeLeft = (duration - this.elapsedTime).toFixed(0);
countdownSpan.textContent = `${timeLeft}`;
},
timeoutCallback() {
countdownSpan.textContent = 'DONE!';
countdownSpan.classList.add('done');
this.destroy();
}
});
renderLoop.register(timer);
});
});
<file_sep>/test/ids-popup-menu/ids-popup-menu-e2e-test.js
describe('Ids Popup Menu e2e Tests', () => {
const url = 'http://localhost:4444/ids-popup-menu';
beforeAll(async () => {
await page.goto(url, { waitUntil: ['networkidle2', 'load'] });
});
it('should not have errors', async () => {
await expect(page.title()).resolves.toMatch('IDS Popup Menu Component');
});
// @TODO: Revisit and figure out accessibility issues
it('should pass Axe accessibility tests', async () => {
await page.setBypassCSP(true);
await page.goto(url, { waitUntil: ['networkidle2', 'load'] });
await expect(page).toPassAxeTests();
});
});
<file_sep>/test/ids-tooltip/ids-tooltip-percy-test.js
import percySnapshot from '@percy/puppeteer';
describe('Ids Tooltip Percy Tests', () => {
const url = 'http://localhost:4444/ids-tooltip/standalone-css';
it('should not have visual regressions in new light theme (percy)', async () => {
await page.goto(url, { waitUntil: ['networkidle2', 'load'] });
await percySnapshot(page, 'ids-tooltip-new-light');
});
});
<file_sep>/src/ids-alert/README.md
# Ids Alert Component
## Description
The IDS Alert component used to communicate as part of a display message that gives users attention, affects an application, feature or a page. This component consists of different types such as `error`, `warning`, `success`, and `info` that represents its color.
## Use Cases
Typically, these alerts are mostly effective to gain attention of the status of your application.
## Terminology
- **Type**: Type is basically the status of an alert.
- **Icon**: Icon is the symbol of the alert.
## Feature (With the Code Examples)
An alert is created by using the `ids-alert`. It has a `type` property to set the desire alert icon.
```html
<ids-alert icon="info"></ids-alert>
<ids-alert icon="success"></ids-alert>
<ids-alert icon="alert"></ids-alert>
```
An alert can be used in a disabled situation so comes with a disabled style
```html
<ids-alert icon="info" disabled="true"></ids-alert>
```
## Themeable Parts
- `icon` allows you to further style the icon element
## States and Variations
- Color
- Size
- Alert
## Keyboard Guidelines
Alert icons do not have tab stops or keyboard interaction on their own. However, they may be placed in a grid cell or other object that has tab focus.
## Responsive Guidelines
- Flows within its parent/placement and is usually centered vertically.
## Alternate Designs
Icons differ in the two provided theme/icon versions.
## Accessibility
The traffic light colors are accessibility violations for contrast, however, the high contrast theme provides an alternative that passes. In addition, in context text should be used as color alone cannot provide the meaning.
## Regional Considerations
Some icons that indicate direction will be flipped when in Right-To-Left languages. This is a TODO still.
<file_sep>/app/ids-card/index.js
import IdsCard from '../../src/ids-card/ids-card';
<file_sep>/app/ids-alert/index.js
import IdsAlert from '../../src/ids-alert/ids-alert';
<file_sep>/src/ids-mask/ids-masks.d.ts
export const DEFAULT_NUMBER_MASK_OPTIONS: {
prefix?: string,
suffix?: string,
allowThousandsSeparator?: boolean,
symbols?: {
currency?: string,
decimal?: string,
negative?: string,
thousands?: string
},
allowDecimal?: boolean,
decimalLimit?: number,
locale?: string,
requireDecimal?: boolean,
allowNegative?: boolean,
allowLeadingZeros: boolean,
integerLimit?: number | null,
};
declare type MaskArray = Array<RegExp|string>;
declare type MaskReturnObject = {
mask: MaskArray
};
export function numberMask(rawValue?: string, options?: Record<string, unknown>): MaskReturnObject;
export const DEFAULT_DATETIME_MASK_OPTIONS: {
format: string,
symbols?: {
timeSeparator?: string,
dayPeriodSeparator?: string,
dateSeparator?: string
}
};
declare type DateMaskReturnObject = {
mask: MaskArray,
literals: Array<string>,
literalRegex: RegExp
};
export function dateMask(
rawValue?: string, options?: Record<string, unknown>
): DateMaskReturnObject;
declare type DatePipeReturnObject = {
characterIndexes: number[],
value: string
};
export function autocorrectedDatePipe(
processResult: Record<string, unknown>,
options: Record<string, unknown>
): DatePipeReturnObject;
<file_sep>/app/index.js
// Import All Components Used in the Page
import IdsLoader from '../src/ids-loader/ids-loader';
import IdsButton from '../src/ids-button/ids-button';
import IdsCheckbox from '../src/ids-checkbox/ids-checkbox';
import IdsSwitch from '../src/ids-switch/ids-switch';
import IdsToggleButton from '../src/ids-toggle-button/ids-toggle-button';
import IdsMenuButton from '../src/ids-menu-button/ids-menu-button';
import IdsText from '../src/ids-text/ids-text';
import IdsHyperlink from '../src/ids-hyperlink/ids-hyperlink';
import IdsIcon from '../src/ids-icon/ids-icon';
import IdsPopup from '../src/ids-popup/ids-popup';
import IdsToolbar from '../src/ids-toolbar/ids-toolbar';
import IdsProgress from '../src/ids-progress/ids-progress';
import IdsUploadAdvanced from '../src/ids-upload-advanced/ids-upload-advanced';
import IdsTag from '../src/ids-tag/ids-tag';
import IdsLayoutGrid from '../src/ids-layout-grid/ids-layout-grid';
import IdsLayoutGridCell from '../src/ids-layout-grid/ids-layout-grid-cell';
import IdsInput from '../src/ids-input/ids-input';
import IdsTriggerField from '../src/ids-trigger-field/ids-trigger-field';
import IdsTriggerButton from '../src/ids-trigger-field/ids-trigger-button';
import IdsExpandableArea from '../src/ids-expandable-area/ids-expandable-area';
import IdsAccordion from '../src/ids-accordion/ids-accordion';
import IdsRadio from '../src/ids-radio/ids-radio';
import IdsRenderLoopMixin from '../src/ids-render-loop/ids-render-loop-mixin';
import IdsTextarea from '../src/ids-textarea/ids-textarea';
import IdsUpload from '../src/ids-upload/ids-upload';
import IdsAlert from '../src/ids-alert/ids-alert';
import IdsBadge from '../src/ids-badge/ids-badge';
import IdsBlockGrid from '../src/ids-block-grid/ids-block-grid';
import IdsBlockGridItem from '../src/ids-block-grid/ids-block-grid-item';
import IdsContainer from '../src/ids-container/ids-container';
import IdsThemeSwitcher from '../src/ids-theme-switcher/ids-theme-switcher';
import IdsWizard, { IdsWizardStep } from '../src/ids-wizard';
// Import Example Code
import './ids-menu-button/example';
import './ids-icon/example';
import './ids-popup/example';
import './ids-render-loop/example';
import './ids-toggle-button/example';
import './ids-alert/example';
import './ids-button/index';
import './ids-menu-button/index';
import './ids-checkbox/example';
import './ids-icon/index';
import './ids-input/example';
import './ids-input/test-validation-message';
import './ids-mask/example';
import './ids-progress/example';
import './ids-upload-advanced/example';
import './ids-popup/index';
import './ids-render-loop/index';
import './ids-toggle-button/index';
import './ids-trigger-field/index';
import './ids-radio/example';
import './ids-alert/index';
import './ids-badge/index';
import './ids-textarea/example';
import './ids-block-grid/index';
import './ids-wizard/index';
<file_sep>/src/ids-list-view/TODO.md
# ListView TODO's
## Tier One
- [] Selection
- [] Paging
- [] Organize Embellishment / template clases
- [] Keyboard Navigation see (standards)[https://www.w3.org/TR/wai-aria-practices/#keyboard-interaction-for-layout-grids] (actionable mode, cell navigation, row navigation)
- [] Check all current events
- [] Disabled
- [] Automation Id's
- [] All Variations
- [] Accessibility see (standards)[https://design.infor.com/code/ids-enterprise/latest/listview#accessibility]
<file_sep>/src/ids-menu/README.md
# Ids Menu Component
TBD
<file_sep>/src/ids-render-loop/ids-render-loop-item.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
export default class IdsRenderLoopItem {
/** lifespan of the renderloop item in miliseconds (defaults to -1, which means it has no duration) */
duration?: number;
/** a unique identifier for the loop item */
id?: string;
/** specifies the interval in which the `updateCallback` will fire in miliseconds (defaults to 1) */
updateDuration?: number;
/** specifies a function to fire when the timer ends */
timeoutCallback?: () => void;
/** determines whether or not the item is counting down its lifespan */
readonly paused: boolean;
/** timestamp representing the time when the loop item was registered */
readonly startTime: number | undefined;
/** represents the total elapsed time this item has been registered */
readonly elapsedTime: number;
/** describes the total time this component has been "stopped" */
readonly totalStoppedTime?: number;
/** destroys this RenderLoop Item and removes it from the queue */
destroy(doTimeout?: boolean): void;
/** pauses the loop item */
pause(): void;
/** resumes the loop item */
resume(): void;
/** programmatically causes a `timeoutCallback` to occur, if conditions allow */
timeout(): void;
/** programmatically causes an `updateCallback` to occur, if conditions allow */
update(): void;
}
<file_sep>/app/ids-base/ids-resize-mixin.js
import IdsResizeMixin from '../../src/ids-base/ids-resize-mixin';
<file_sep>/src/ids-icon/ids-icon.js
import pathData from 'ids-identity/dist/theme-new/icons/standard/path-data.json';
import {
IdsElement,
scss,
customElement,
props
} from '../ids-base/ids-element';
import styles from './ids-icon.scss';
// Setting Defaults
const sizes = {
large: 24,
normal: 18,
medium: 18,
small: 10
};
/**
* IDS Icon Component
* @type {IdsIcon}
* @inherits IdsElement
*/
@customElement('ids-icon')
@scss(styles)
class IdsIcon extends IdsElement {
constructor() {
super();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [props.ICON, props.SIZE];
}
/**
* Create the Template for the contents
*
* @returns {string} The template
*/
template() {
const size = sizes[this.size];
return `<svg xmlns="http://www.w3.org/2000/svg" stroke="currentColor" fill="none" height="${size}" width="${size}" viewBox="0 0 18 18" focusable="false" aria-hidden="true" role="presentation">
${this.iconData()}
</svg>`;
}
/**
* Return the icon data for the svg based on the icon name
* @returns {string} the path data
*/
iconData() {
return pathData[this.icon];
}
/**
* Return the icon name
* @returns {string} the path data
*/
get icon() { return this.getAttribute(props.ICON) || ''; }
set icon(value) {
const svgElem = this.shadowRoot?.querySelector('svg');
if (value && svgElem) {
this.setAttribute(props.ICON, value);
svgElem.innerHTML = this.iconData();
} else {
this.removeAttribute(props.ICON);
svgElem?.remove();
}
}
/**
* Return the size. May be large, normal/medium or small
* @returns {string} the path data
*/
get size() { return this.getAttribute(props.SIZE) || 'normal'; }
set size(value) {
if (value) {
const size = sizes[this.size];
this.setAttribute(props.SIZE, value);
this.shadowRoot?.querySelector('svg')?.setAttribute('height', size);
this.shadowRoot?.querySelector('svg')?.setAttribute('width', size);
} else {
this.removeAttribute(props.SIZE);
}
}
}
export default IdsIcon;
<file_sep>/app/ids-loader/index.js
import IdsLoader from '../../src/ids-loader/ids-loader';
<file_sep>/src/ids-expandable-area/README.md
# Ids Expandable Area Component
## Description
The IDS Expandable Area component is a UI pattern that is comprised of a title, content and a toggle button or link for expanding and collapsing.
## Use Cases
Expandable areas are crucial to displaying only important information to the user. This can be especially useful in managing application pages with many sections, forms and other content. Users may benefit from having to sort through less information.
## Terminology
- **ids-expandable-area**: Container for expandable areas. The default version consists of 4 slots. `header`, `pane`, `expander-default` and `expander-expanded`.
- **header**: Usually contains an `ids-text` component and displays the title of the expandable area. When type is set to `toggle-btn` the header will container an `ids-toggle-button` and acts as the control for expanding and collapsing.
- **pane**: This slot contains the content for the expandable area. Usually contains an `ids-text` component but can be filled with anything within reason, such as, forms.
- **expander-default**: Acts as the control for expanding. Contains the text for the collapsed state.
- **expander-expanded**: Acts as the control for collapsing. Contains the text for the expanded state.
## Themeable Parts
- `container` allows you to further style the main container element
- `header` allows you to further style the header element
- `pane` allows you to further style the pane element
- `footer` allows you to further style the footer element
## Features (With Code Examples)
Ids Expandable Area - Default
```html
<ids-expandable-area>
<ids-text slot="header" font-size="16">Procurement</ids-text>
<ids-text slot="pane" font-size="14">
Ubiquitous out-of-the-box, scalable; communities disintermediate beta-test, enable utilize markets dynamic
infomediaries virtual data-driven synergistic aggregate infrastructures, "cross-platform, feeds
bleeding-edge tagclouds." Platforms extend interactive B2C benchmark proactive, embrace e-markets,
transition generate peer-to-peer.
</ids-text>
<ids-text slot="expander-default" font-size="14">Show More</ids-text>
<ids-text slot="expander-expanded" font-size="14">Show Less</ids-text>
</ids-expandable-area>
```
Ids Expandable Area - Toggle Button
```html
<ids-expandable-area type="toggle-btn">
<ids-toggle-button
slot="header"
id="test-toggle-button"
icon-on="caret-up"
icon-off="caret-down"
text-off="Employee"
text-on="Employee"
>
<span slot="text"></span>
<ids-icon slot="icon" icon="settings"></ids-icon>
</ids-toggle-button>
<ids-text slot="pane" font-size="14">
Ubiquitous out-of-the-box, scalable; communities disintermediate beta-test, enable utilize markets dynamic
infomediaries virtual data-driven synergistic aggregate infrastructures, "cross-platform, feeds
bleeding-edge tagclouds." Platforms extend interactive B2C benchmark proactive, embrace e-markets,
transition generate peer-to-peer.
</ids-text>
</ids-expandable-area>
```
## States and Variations
- **Collapsed**: The state where the expandable area’s content is not visible. Sometimes it serves as the default state, this state can also be remembered by the application.
- **Expanded**: The state where the expandable area’s content is visible. Sometimes it serves as the default state, this state can also be remembered by the application.
## Keyboard Guidelines
- **Enter or Space**: When focus is on an `expander`, this keystroke toggles the expansion of the corresponding expandable area panel. If collapsed, the panel is expanded, and its aria-expanded state is set to true. If expanded, the panel is collapsed and its aria-expanded state is set to false.
## Accessibility
<file_sep>/test/ids-mask/ids-mask-common-func-test.js
import { convertPatternFromString } from '../../src/ids-mask/ids-mask-common';
describe('Ids Mask (common API)', () => {
it('can convert a string-based pattern to a Javascript array', () => {
let arr = convertPatternFromString('["(", /[1-9]/, /\\d/, /\\d/, ")", " ", /\\d/, /\\d/, /\\d/, "-", /\\d/, /\\d/, /\\d/, /\\d/]');
expect(Array.isArray(arr)).toBeTruthy();
expect(arr.length).toBe(14);
expect(arr[0]).toBe('(');
arr = undefined;
// NOTE: Need to detect both types of quotes
// eslint-disable-next-line
arr = convertPatternFromString("['(', /[1-9]/, /\\d/, /\\d/, ')', ' ', /\\d/, /\\d/, /\\d/, '-', /\\d/, /\\d/, /\\d/, /\\d/]");
expect(Array.isArray(arr)).toBeTruthy();
expect(arr.length).toBe(14);
expect(arr[0]).toBe('(');
});
it('cannot convert invalid patterns', () => {
let arr = convertPatternFromString(5);
expect(arr).toBeUndefined();
arr = convertPatternFromString('');
expect(arr).toBeUndefined();
// Add more types of input eventually, but make this pass all code paths
arr = convertPatternFromString('A');
expect(arr).toBeUndefined();
});
});
<file_sep>/src/ids-trigger-field/ids-trigger-field.js
import {
IdsElement,
customElement,
mix,
scss,
props,
stringUtils
} from '../ids-base/ids-element';
import { IdsEventsMixin } from '../ids-base/ids-events-mixin';
import { IdsThemeMixin } from '../ids-base/ids-theme-mixin';
import styles from './ids-trigger-field.scss';
// Supporting components
import { IdsButton } from '../ids-button/ids-button';
import IdsInput from '../ids-input/ids-input';
import IdsTriggerButton from './ids-trigger-button';
/**
* IDS Trigger Field Component
* @type {IdsTriggerField}
* @inherits IdsElement
* @mixes IdsEventsMixin
* @mixes IdsThemeMixin
* @part field - the field container
*/
@customElement('ids-trigger-field')
@scss(styles)
class IdsTriggerField extends mix(IdsElement).with(IdsEventsMixin, IdsThemeMixin) {
/**
* Call the constructor and then initialize
*/
constructor() {
super();
}
/**
* Custom Element `connectedCallback` implementation
* @returns {void}
*/
connectedCallback() {
this.handleEvents();
super.connectedCallback();
}
/**
* Return the properties we handle as getters/setters
* @returns {Array} The properties in an array
*/
static get properties() {
return [props.TABBABLE, props.APPEARANCE, props.DISABLE_EVENTS];
}
/**
* Create the Template for the contents
* @returns {string} The template
*/
template() {
return `<div class="ids-trigger-field" part="field"><slot></slot></div>`;
}
/**
* Set if the trigger field is tabbable
* @param {boolean|string} value True of false depending if the trigger field is tabbable
*/
set tabbable(value) {
const isTabbable = stringUtils.stringToBool(value);
/** @type {any} */
const button = this.querySelector('ids-trigger-button');
this.setAttribute(props.TABBABLE, value.toString());
button.tabbable = isTabbable;
}
get tabbable() { return this.getAttribute(props.TABBABLE); }
/**
* Set the appearance of the trigger field
* @param {string} value Provide different options for appearance 'normal' | 'compact'
*/
set appearance(value) {
if (value) {
this.setAttribute(props.APPEARANCE, value);
return;
}
this.setAttribute(props.APPEARANCE, 'normal');
}
get appearance() { return this.getAttribute(props.APPEARANCE); }
/**
* Set if the button handles events
* @param {boolean|string} value True of false depending if the button handles events
*/
set disableNativeEvents(value) {
const isDisabled = stringUtils.stringToBool(value);
if (isDisabled) {
this.setAttribute(props.DISABLE_EVENTS, value.toString());
this.handleEvents();
return;
}
this.removeAttribute(props.DISABLE_EVENTS);
}
get disableNativeEvents() { return this.getAttribute(props.DISABLE_EVENTS); }
/**
* Establish Internal Event Handlers
* @private
* @returns {object} The object for chaining.
*/
handleEvents() {
if (this.disableNativeEvents) {
return false;
}
/** @type {any} */
const button = this.querySelector('ids-trigger-button');
if (button) {
this.onEvent('click', button, () => this.trigger());
}
return this;
}
/**
* Fire the trigger event and action.
*/
trigger() {
let canTrigger = true;
const response = (/** @type {any} */ veto) => {
canTrigger = !!veto;
};
this.triggerEvent('beforetriggerclicked', this, { detail: { elem: this, response } });
if (!canTrigger) {
return;
}
this.triggerEvent('triggerclicked', this, { detail: { elem: this } });
}
}
export default IdsTriggerField;
<file_sep>/test/ids-virtual-scroll/ids-virtual-scroll-func-test.js
/**
* @jest-environment jsdom
*/
import IdsVirtualScroll from '../../src/ids-virtual-scroll/ids-virtual-scroll';
import dataset from '../../app/data/products.json';
describe('IdsVirtualScroll Component', () => {
let virtualScroll;
const appendVirtualScroll = () => {
const elem = new IdsVirtualScroll();
elem.innerHTML = `<div class="ids-list-view"><ul slot="contents"></ul></div>`;
document.body.appendChild(elem);
elem.height = 308;
elem.itemHeight = 20;
elem.itemTemplate = (item) => `<li class="ids-virtual-scroll-item">${item.productName}</li>`;
elem.data = dataset;
return elem;
};
beforeEach(async () => {
virtualScroll = appendVirtualScroll();
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
document.body.innerHTML = '';
const errors = jest.spyOn(global.console, 'error');
appendVirtualScroll();
expect(document.querySelectorAll('ids-virtual-scroll').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('renders correctly', () => {
expect(virtualScroll.innerHTML).toMatchSnapshot();
});
it('renders rows on native scroll events', async () => {
const startingHtml = virtualScroll.innerHTML;
virtualScroll.container.dispatchEvent(new Event('scroll'));
expect(virtualScroll.innerHTML).toEqual(startingHtml);
});
it('renders rows on scroll', async () => {
const startingHtml = virtualScroll.innerHTML;
virtualScroll.scrollTop = 30000;
virtualScroll.handleScroll({ target: virtualScroll });
await new Promise((r) => setTimeout(r, 50));
expect(virtualScroll.innerHTML).not.toEqual(startingHtml);
});
it('renders cancels multiple handleScroll', async () => {
const startingHtml = virtualScroll.innerHTML;
virtualScroll.scrollTop = 500;
virtualScroll.handleScroll({ target: virtualScroll });
virtualScroll.scrollTop = 500;
virtualScroll.handleScroll({ target: virtualScroll });
virtualScroll.scrollTop = 501;
virtualScroll.handleScroll({ target: virtualScroll });
await new Promise((r) => setTimeout(r, 50));
expect(virtualScroll.innerHTML).not.toEqual(startingHtml);
});
it('can set the bufferSize attribute', async () => {
expect((virtualScroll.innerHTML.match(/<li/g) || []).length).toEqual(56);
virtualScroll.bufferSize = 100;
expect(virtualScroll.getAttribute('buffer-size')).toEqual('100');
virtualScroll.renderItems();
expect((virtualScroll.innerHTML.match(/<li/g) || []).length).toEqual(216);
});
it('removes the height attribute when reset', () => {
virtualScroll.height = null;
expect(virtualScroll.getAttribute('height')).toEqual(null);
});
it('removes the bufferSize attribute when reset', () => {
virtualScroll.bufferSize = null;
expect(virtualScroll.getAttribute('buffer-size')).toEqual(null);
});
it('removes the itemHeight attribute when reset', () => {
virtualScroll.itemHeight = null;
expect(virtualScroll.getAttribute('item-height')).toEqual(null);
});
it('removes the itemCount attribute when reset', () => {
virtualScroll.itemCount = null;
expect(virtualScroll.getAttribute('item-count')).toEqual(null);
});
it('removes the data value when reset', () => {
virtualScroll.data = null;
expect(virtualScroll.datasource.data).toEqual(null);
});
it('has a simple default template', () => {
const elem = new IdsVirtualScroll();
elem.stringTemplate = '<div class="ids-virtual-scroll-item">${productName}</div>'; //eslint-disable-line
const template = elem.itemTemplate({ productName: 'test' });
expect(template).toEqual('<div class="ids-virtual-scroll-item">test</div>');
});
it('handles setting scrollTarget', () => {
const errors = jest.spyOn(global.console, 'error');
virtualScroll.scrollTarget = virtualScroll.shadowRoot.querySelector('.ids-virtual-scroll');
expect(virtualScroll.scrollTarget).not.toBe(null);
virtualScroll.scrollTarget = null;
expect(errors).not.toHaveBeenCalled();
});
it('can scroll to an item', () => {
expect(virtualScroll.scrollTop).toEqual('0');
virtualScroll.scrollToIndex(900);
expect(virtualScroll.scrollTop).toEqual('18000');
});
it('can reset the scrollTop', () => {
expect(virtualScroll.scrollTop).toEqual('0');
virtualScroll.scrollTop = null;
virtualScroll.scrollTop = 100;
virtualScroll.scrollTop = 0;
virtualScroll.scrollTop = null;
expect(virtualScroll.scrollTop).toEqual(0);
expect(virtualScroll.getAttribute('scroll-top')).toEqual(null);
});
it('can reset the data', () => {
expect(virtualScroll.querySelectorAll('li').length).toEqual(56);
virtualScroll.data = virtualScroll.data.slice(1, 10);
expect(virtualScroll.querySelectorAll('li').length).toEqual(9);
});
it('can reset the data to zero', () => {
expect(virtualScroll.querySelectorAll('li').length).toEqual(56);
virtualScroll.data = [];
expect(virtualScroll.querySelectorAll('li').length).toEqual(0);
});
});
<file_sep>/app/ids-radio/example.js
// Supporting components
import IdsButton from '../../src/ids-button/ids-button';
import IdsLayoutGrid from '../../src/ids-layout-grid/ids-layout-grid';
document.addEventListener('DOMContentLoaded', () => {
const radio = document.querySelector('#ids-radio-validation');
const btnValidate = document.querySelector('#btn-radio-validate');
const btnClear = document.querySelector('#btn-radio-clear');
// Validate
btnValidate?.addEventListener('click', () => {
radio?.checkValidation();
});
// Clear
btnClear?.addEventListener('click', () => {
radio?.clear();
});
});
<file_sep>/src/ids-base/ids-constants.js
// Please keep constants alphabetized
export const props = {
ACCEPT: 'accept',
ALTERNATE_ROW_SHADING: 'alternate-row-shading',
ANIMATED: 'animated',
ALIGN: 'align',
APPEARANCE: 'appearance',
AUDIBLE: 'audible',
AUTO: 'auto',
AUTOGROW: 'autogrow',
AUTOGROW_MAX_HEIGHT: 'autogrow-max-height',
AUTO_HEIGHT: 'auto-height',
AUTOSELECT: 'autoselect',
BG_TRANSPARENT: 'bg-transparent',
CHAR_MAX_TEXT: 'char-max-text',
CHAR_REMAINING_TEXT: 'char-remaining-text',
CHARACTER_COUNTER: 'character-counter',
CHECKED: 'checked',
CLEARABLE: 'clearable',
CLEARABLE_FORCED: 'clearable-forced',
COL_END: 'col-end',
COL_SPAN: 'col-span',
COL_START: 'col-start',
COLOR: 'color',
COLS: 'cols',
CLICKABLE: 'clickable',
CSS_CLASS: 'css-class',
DATA: 'data',
DELAY: 'delay',
DIRTY_TRACKER: 'dirty-tracker',
DISABLE_EVENTS: 'disable-native-events',
DISABLED: 'disabled',
DISMISSIBLE: 'dismissible',
ERROR: 'error',
EXPANDED: 'expanded',
FILE: 'file',
FILE_NAME: 'file-name',
FILL: 'fill',
FIXED: 'fixed',
FOCUSABLE: 'focusable',
FONT_SIZE: 'font-size',
FONT_WEIGHT: 'font-weight',
GAP: 'gap',
GROUP: 'group',
GROUP_DISABLED: 'group-disabled',
HORIZONTAL: 'horizontal',
HREF: 'href',
ICON: 'icon',
ICON_ALIGN: 'icon-align',
ID: 'id',
INDETERMINATE: 'indeterminate',
KEEP_OPEN: 'keep-open',
LABEL: 'label',
LABEL_AUDIBLE: 'label-audible',
LABEL_REQUIRED: 'label-required',
LABEL_FILETYPE: 'label-filetype',
MAX: 'max',
MAX_FILE_SIZE: 'max-file-size',
MAX_FILES: 'max-files',
MAX_FILES_IN_PROCESS: 'max-files-in-process',
MASK: 'mask',
MASK_GUIDE: 'mask-guide',
MASK_OPTIONS: 'mask-options',
MASK_RETAIN_POSITIONS: 'mask-retain-positions',
MAXLENGTH: 'maxlength',
MENU: 'menu',
METHOD: 'method',
MODE: 'mode',
MULTIPLE: 'multiple',
NO_MARGINS: 'no-margins',
NO_TEXT_ELLIPSIS: 'no-text-ellipsis',
OVERFLOW: 'overflow',
PLACEHOLDER: 'placeholder',
PLACEMENT: 'placement',
PARAM_NAME: 'param-name',
PRINTABLE: 'printable',
READONLY: 'readonly',
RESIZABLE: 'resizable',
REQUIRED: 'required',
ROW_END: 'row-end',
ROW_HEIGHT: 'row-height',
ROW_SPAN: 'row-span',
ROW_START: 'row-start',
ROWS: 'rows',
SCROLLABLE: 'scrollable',
SELECT: 'select',
SELECTED: 'selected',
SHAPE: 'shape',
SHOW_BROWSE_LINK: 'show-browse-link',
SIZE: 'size',
STEP_NUMBER: 'step-number',
SUBMENU: 'submenu',
TABBABLE: 'tabbable',
TABINDEX: 'tabindex',
TARGET: 'target',
TOOLTIP: 'tooltip',
TRIGGER: 'trigger',
TRIGGERFIELD: 'triggerfield',
TRIGGER_LABEL: 'trigger-label',
TEXT: 'text',
TEXT_ALIGN: 'text-align',
TEXT_ELLIPSIS: 'text-ellipsis',
TYPE: 'type',
URL: 'url',
VALIDATE: 'validate',
VALIDATION_EVENTS: 'validation-events',
VALIDATION_HAS_ERROR: 'validation-has-error',
VALUE: 'value',
VERSION: 'version',
VIRTUAL_SCROLL: 'virtual-scroll',
VISIBLE: 'visible',
};
// Please keep constants alphabetized
export const prefix = {
PREFIX: 'ids'
};
<file_sep>/test/ids-card/ids-card-func-test.js
/**
* @jest-environment jsdom
*/
import IdsCard from '../../src/ids-card/ids-card';
describe('IdsCard Component', () => {
let card;
beforeEach(async () => {
const elem = new IdsCard();
elem.innerHTML = `<div slot="card-header">
<ids-text font-size="20" type="h2">Card Title Two</ids-text>
</div>
<div slot="card-content">
</div>`;
document.body.appendChild(elem);
card = document.querySelector('ids-card');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
const elem = new IdsCard();
document.body.appendChild(elem);
elem.remove();
expect(document.querySelectorAll('ids-card').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('renders correctly', () => {
expect(card.outerHTML).toMatchSnapshot();
});
it('renders auto-height from an attribute', () => {
card.setAttribute('auto-height', 'true');
expect(card.getAttribute('auto-height')).toEqual('true');
expect(card.container.classList.contains('ids-card-auto-height')).toEqual(true);
});
it('renders success color from the api', () => {
card.autoHeight = true;
expect(card.getAttribute('auto-height')).toEqual('true');
expect(card.autoHeight).toEqual('true');
});
it('removes the clickable attribute when reset', () => {
card.autoHeight = true;
expect(card.getAttribute('auto-height')).toEqual('true');
card.autoHeight = false;
expect(card.getAttribute('auto-height')).toEqual(null);
expect(card.autoHeight).toEqual(null);
});
it('supports setting mode', () => {
card.mode = 'dark';
expect(card.container.getAttribute('mode')).toEqual('dark');
});
it('supports setting version', () => {
card.version = 'classic';
expect(card.container.getAttribute('version')).toEqual('classic');
});
});
<file_sep>/src/ids-tooltip/README.MD
# Ids Tooltip Component
## Description
Tooltips are a component thats used in conjunction with other components to show additional information and non critical information related to an element such as a button or field. A tooltip will displays information on either hover, tab and/or focus. In this version we have separated the tooltip from popups.
## Use Cases
- Displays additional non critical information thats not discernible from the interface/design.
## Terminology
- **Popup**: Any thing that popups up and positions use the ids-popup component
- **Tooltip**: A message which appears when a cursor is positioned over an icon, image, hyperlink, or other element in a graphical user interface.
## Features (With Code Examples)
A normal tooltip used as a web component attached to a button.
```html
<ids-button id="button-1">Example Button</ids-button>
<ids-tooltip target="button-1" placement="top">Additional Information</ids-tooltip>
```
A tooltip can also be used more conveniently for text only tooltips on inputs and buttons as a mixin.
```html
<ids-button id="button-1" tooltip="Additional Information">Example Button</ids-button>
```
A tooltips content can be set in a synchronous way with the textContent attribute.
```js
document.querySelector('#button-1').textContent = 'New Content';
```
A tooltips content can be set in an synchronous way with the beforeShow property.
```js
const getContents = () => new Promise((resolve) => {
const xhr = new XMLHttpRequest();
xhr.open('get', '/api/bikes', true);
xhr.onload = () => {
const status = xhr.status;
if (status === 200) {
resolve(JSON.parse(xhr.responseText)[1].manufacturerName);
}
};
xhr.send();
});
const tooltipAsync = document.querySelector('[target="#tooltip-async"]');
tooltipAsync.beforeShow = async function beforeShow() {
return getContents();
};
```
A tooltips can be manually set to visible, but it still needs an alignment target to do so.
```js
document.querySelector('#button-1').visible = true;
```
## Usage Guidance
- Do not show critical information in a tooltip or use it for functionality, it should be strictly for supplemental information
- Write concise tooltip text. Imagine someone on a small screen or with high zoom needing to pan around just to read the tooltip.
- Avoid rich content. Formatting such as bold text, italics, headings, icons, etc. will not be conveyed through `aria-describedby` or `aria-labelledby`.
- No interactive content. Any interactive content such as links or buttons should not be placed within a tooltip.
## Settings and Attributes
- `delay` {string | number} Set how long after hover you should delay before showing
- `placement` {string} Sets the tooltip placement between left, right, top, bottom
- `target` {string | HTMLElement} Set the target element via a css selector or HTMLElement
- `trigger` {string} Set trigger agains the target between hover, click and focus
- `visible` {string | boolean} Set tooltip immediately to visible/invisible
## Themeable Parts
- `popup` allows you to further style the popup container element
- `tooltip` allows you to further style the tooltip element
## States and Variations (With Code Examples)
- Color
- Open
- Closed
## Keyboard Guidelines
- <kbd>Tab/Shift+Tab</kbd>: If the element tooltip is showing on focus then this will cause the tooltip to appear.
## Responsive Guidelines
- Tooltips do not work well in mobile or in responsive situations and should be avoided. But the tooltip will attempt to adjust to fit the page as the popup menu allows.
## Converting from Previous Versions
- 3.x: Tags have all new markup and classes.
- 4.x: Tags have all new markup and classes for web components.
## Accessibility Guidelines
For a good article on making the tooltip accessible see [Tooltips WCAG 2.1](https://sarahmhigley.com/writing/tooltips-in-wcag-21/)
To make the tooltip accessible we do the following:
We use `aria-describedby` or `aria-labelledby` to associate the UI control with the tooltip. Do not put essential information in tooltips.
The following general rules apply:
- Do not add interactive content (buttons/links ect) in a hyperlink
- Only interactive elements should trigger tooltips. While disabled tooltips do work on hover they do not work on focus so are not accessible and should be avoided.- Tooltip text is purely supplemental, and should not override the existing accessible name for a control.
## Regional Considerations
Tooltips should be localized in the current language. Consider that in some languages text may be a lot longer (German). And in some cases it cant be wrapped (Thai).
<file_sep>/src/ids-skip-link/README.MD
# Ids Skip Link Component
## Description
A skip link is a single element added first thing in your page. When the skip link is focused from the browser URL using <kbd>Tab</kbd>, it will activate allowing you to jump to a main section. The main content is not usually the first thing on a web page. Keyboard and screen reader users generally must navigate a long list of navigation links, sub-lists of links, corporate icons, site searches, and other elements before ever arriving at the main content. This is particularly difficult for users with some forms of motor disabilities.
For more details info on skip links see <a href="https://webaim.org/techniques/skipnav/" target="_blank">this article</a>.
## Use Cases
- When you want to add accessibility features to your application
- When you have a lot of content at the top of every page such as links/navigation/headers that may need to be jumped over when familiar with the page
## Terminology
- **Link/Hyperlink**: An interactive link to another page within Infor software to external destinations.
- **Skip Link**: An interactive link to jump past some page content
## Features (With Code Examples)
A normal skip link element as a custom element. That will link to a section with `id="main-content"` somewhere else in the page
```html
<ids-skip-link href="#main-content">Skip to Main Content</ids-skip-link>
```
## Settings and Attributes
- `href` {string} Set the links href to a url or file
- `mode` {string} Set the theme mode
- `version` {string} Set the theme version
## Themeable Parts
- `skiplink` allows you to further style the skip link element
## States and Variations (With Code Examples)
- Focused / Active
## Keyboard Guidelines
- When you <kbd>Tab</kbd> from the browser bar into the page the skip link will appear on and be focused
- <kbd>Enter</kbd> will follow the link to the `id` of the content area you specify
## Responsive Guidelines
- Skip links are fixed position
## Converting from Previous Versions
- 3.x: Used a skip link css class on `<a>` tags but is now a custom element
- 4.x: Used a skip link css class on `<a>` tags but is now a custom element
## Designs
[Design Specs](https://www.figma.com/file/ok0LLOT9PP1J0kBkPMaZ5c/IDS_Component_File_v4.6-(Draft))
## Accessibility Guidelines
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. Ensure the color tags that indicate state like OK, cancel, ect have other ways to indicate that information. This is failing.
- 1.4.3 Contrast (Minimum) - The visual presentation of text and images of text has a contrast ratio of at least 4.5:1. Ensure the color tags pass contrast.
## Regional Considerations
Link text should be localized in the current language. And should flip to the alternate side in Right To Left mode. Consider that in some languages text may be a lot longer (German). And in some cases it cant be wrapped (Thai).
<file_sep>/src/ids-toggle-button/ids-toggle-button.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
import { IdsButton } from '../ids-button/ids-button';
export default class IdsToggleButton extends IdsButton {
/* Contains space-delimeted CSS classes (or an array of CSS classes) that will be passed to the Shadow Root button */
cssClass?: Array<string> | string | null;
/* A string representing an icon to display inside the button. This icon will become the content of the Shadow Root button's `icon` slot when set. */
icon?: string | null;
/* API-level method of setting a button's text content. This will become the content of the Shadow Root button's `text` slot when set. */
text?: string;
/* The type/purpose of the button to display */
type: 'default' | 'primary' | 'secondary' | 'tertiary' | 'destructive'
/** Set the theme mode */
mode: 'light' | 'dark' | 'contrast' | string;
/** Set the theme version */
version: 'new' | 'classic' | string;
/* Provides a direct reference to the Shadow Root's HTMLButtonElement. */
readonly button: HTMLElement;
/** Toggles the "pressed" state of the button */
toggle(): void;
}
<file_sep>/test/ids-popup-menu/ids-popup-menu-data-driven-func-test.js
/**
* @jest-environment jsdom
*/
import IdsPopupMenu, {
IdsMenuGroup,
IdsMenuHeader,
IdsMenuItem,
IdsSeparator
} from '../../src/ids-popup-menu/ids-popup-menu';
import IdsPopup from '../../src/ids-popup/ids-popup';
// Pull in menu contents
import dataset from '../../app/data/menu-contents.json';
describe('IdsPopupMenu Component', () => {
let menu;
beforeEach(() => {
// Invoke/Append the main menu
menu = new IdsPopupMenu();
menu.id = 'test-menu';
menu.data = dataset;
document.body.appendChild(menu);
});
afterEach(() => {
document.body.innerHTML = '';
menu = null;
});
it('should render', () => {
const errors = jest.spyOn(global.console, 'error');
// Three popupmenus (top level and 2 submenus)
expect(document.querySelectorAll('ids-popup-menu').length).toEqual(3);
expect(errors).not.toHaveBeenCalled();
});
it('reverts to markup-driven if handed an empty dataset', () => {
menu.data = null;
// both old data and markup should stay in-tact
let menus = document.querySelectorAll('ids-popup-menu');
expect(menu.data).toEqual([]);
expect(menus.length).toEqual(3);
// Removing one via markup should work fine
menus[2].remove();
menus = document.querySelectorAll('ids-popup-menu');
// Data shouldn't change, but markup will
expect(menu.data).toEqual([]);
expect(menus.length).toEqual(2);
});
it('accepts an array as a `contents` property', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.id = 'test-menu';
menu.data = [
{
type: 'group',
items: [
{
id: 'item-1',
text: 'Item One',
value: 1
},
{
id: 'item-2',
text: 'Item Two',
value: 2
}
]
}
];
document.body.appendChild(menu);
expect(errors).not.toHaveBeenCalled();
expect(menu.groups.length).toEqual(1);
expect(menu.items.length).toEqual(2);
});
it('renders with no errors when given an empty dataset', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.id = 'test-menu';
menu.data = {};
document.body.appendChild(menu);
expect(errors).not.toHaveBeenCalled();
});
it('won\'t render contents if the data object has no items in its `contents` array', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.data = {
contents: []
};
document.body.appendChild(menu);
expect(errors).not.toHaveBeenCalled();
expect(menu.groups.length).toEqual(0);
});
it('propagates an `id` property on a data object as the Popupmenu\'s `id` attribute', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.data = {
id: 'test-menu',
contents: []
};
document.body.appendChild(menu);
expect(errors).not.toHaveBeenCalled();
expect(menu.id).toBe('test-menu');
});
it('won\'t render contents if the contents property is not valid', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.data = {
contents: 'cake'
};
document.body.appendChild(menu);
expect(errors).not.toHaveBeenCalled();
expect(menu.groups.length).toEqual(0);
});
it('won\'t render a group if it has no `items` property', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.data = {
id: 'test-menu',
contents: [
{
type: 'group',
id: 'empty-group'
}
]
};
document.body.appendChild(menu);
expect(errors).not.toHaveBeenCalled();
expect(menu.groups.length).toEqual(0);
});
it('won\'t render a group if its `items` property has no items present', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.data = {
id: 'test-menu',
contents: [
{
type: 'group',
id: 'empty-group',
items: []
}
]
};
document.body.appendChild(menu);
expect(errors).not.toHaveBeenCalled();
expect(menu.groups.length).toEqual(0);
});
it('won\'t render an item\'s submenu if the submenu has no `contents` property', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.data = {
id: 'test-menu',
contents: [
{
type: 'group',
id: 'empty-group',
items: [
{
id: 'my-item',
text: 'My Menu Item',
submenu: {
id: 'my-submenu'
}
}
]
}
]
};
document.body.appendChild(menu);
const item = document.querySelector('#my-item');
expect(errors).not.toHaveBeenCalled();
expect(item).toBeDefined();
expect(item.hasSubmenu).toBeFalsy();
});
it('won\'t render an item\'s submenu if the submenu\'s `contents` property is invalid', () => {
const errors = jest.spyOn(global.console, 'error');
document.body.innerHTML = '';
menu = new IdsPopupMenu();
menu.data = {
id: 'test-menu',
contents: [
{
type: 'group',
id: 'empty-group',
items: [
{
id: 'my-item',
text: 'My Menu Item',
submenu: {
id: 'my-submenu',
contents: 'fish'
}
}
]
}
]
};
document.body.appendChild(menu);
const item = document.querySelector('#my-item');
expect(errors).not.toHaveBeenCalled();
expect(item).toBeDefined();
expect(item.hasSubmenu).toBeFalsy();
});
});
<file_sep>/test/ids-trigger-button/ids-trigger-button-func-test.js
/**
* @jest-environment jsdom
*/
import IdsTriggerButton from '../../src/ids-trigger-field/ids-trigger-button';
describe('IdsTriggerButton Component', () => {
let triggerButton;
beforeEach(async () => {
const elem = new IdsTriggerButton();
document.body.appendChild(elem);
triggerButton = document.querySelector('ids-trigger-button');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
const elem = new IdsTriggerButton();
document.body.appendChild(elem);
elem.remove();
expect(document.querySelectorAll('ids-trigger-button').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('renders correctly', () => {
expect(triggerButton.outerHTML).toMatchSnapshot();
});
it('defaults tabbable to true', () => {
expect(triggerButton.tabbable).toEqual(true);
triggerButton.removeAttribute('tabbable');
expect(triggerButton.tabbable).toEqual(true);
});
it('supports tabbable', () => {
triggerButton.tabbable = true;
expect(triggerButton.shadowRoot.querySelector('button').getAttribute('tabindex')).toEqual('0');
expect(triggerButton.tabbable).toEqual('true');
triggerButton.tabbable = false;
expect(triggerButton.shadowRoot.querySelector('button').getAttribute('tabindex')).toEqual('-1');
expect(triggerButton.tabbable).toEqual('false');
});
});
<file_sep>/src/ids-wizard/index.js
export { default } from './ids-wizard';
export { default as IdsWizardStep } from './ids-wizard-step';
<file_sep>/test/ids-checkbox/ids-checkbox-func-test.js
/**
* @jest-environment jsdom
*/
import IdsCheckbox from '../../src/ids-checkbox/ids-checkbox';
describe('IdsCheckbox Component', () => {
let cb;
beforeEach(async () => {
const elem = new IdsCheckbox();
document.body.appendChild(elem);
cb = document.querySelector('ids-checkbox');
});
afterEach(async () => {
document.body.innerHTML = '';
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
const elem = new IdsCheckbox();
document.body.appendChild(elem);
elem.remove();
expect(document.querySelectorAll('ids-checkbox').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
});
it('should renders checked', () => {
cb.checked = 'true';
expect(cb.getAttribute('checked')).toEqual('true');
expect(cb.checked).toEqual('true');
});
it('should dirty tracking', () => {
expect(cb.getAttribute('dirty-tracker')).toEqual(null);
expect(cb.shadowRoot.querySelector('.icon-dirty')).toBeFalsy();
expect(cb.labelEl.querySelector('.msg-dirty')).toBeFalsy();
cb.dirtyTracker = true;
expect(cb.getAttribute('dirty-tracker')).toEqual('true');
expect(cb.shadowRoot.querySelector('.icon-dirty')).toBeFalsy();
expect(cb.labelEl.querySelector('.msg-dirty')).toBeFalsy();
cb.checked = true;
let val = cb.valMethod(cb.input);
cb.setDirtyTracker(val);
expect(cb.getAttribute('dirty-tracker')).toEqual('true');
expect(cb.shadowRoot.querySelector('.icon-dirty')).toBeTruthy();
expect(cb.labelEl.querySelector('.msg-dirty')).toBeTruthy();
cb.checked = false;
val = cb.valMethod(cb.input);
cb.setDirtyTracker(val);
expect(cb.getAttribute('dirty-tracker')).toEqual('true');
expect(cb.shadowRoot.querySelector('.icon-dirty')).toBeFalsy();
expect(cb.labelEl.querySelector('.msg-dirty')).toBeFalsy();
cb.dirtyTracker = false;
expect(cb.getAttribute('dirty-tracker')).toEqual(null);
expect(cb.shadowRoot.querySelector('.icon-dirty')).toBeFalsy();
expect(cb.labelEl.querySelector('.msg-dirty')).toBeFalsy();
});
it('should renders as disabled', () => {
expect(cb.getAttribute('disabled')).toEqual(null);
expect(cb.input.hasAttribute('disabled')).toBe(false);
let rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.classList).not.toContain('disabled');
cb.disabled = true;
expect(cb.getAttribute('disabled')).toEqual('true');
expect(cb.input.hasAttribute('disabled')).toBe(true);
rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.classList).toContain('disabled');
cb.disabled = false;
expect(cb.getAttribute('disabled')).toEqual(null);
expect(cb.input.hasAttribute('disabled')).toBe(false);
rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.classList).not.toContain('disabled');
});
it('should add/remove required error', () => {
cb.validate = 'required';
expect(cb.getAttribute('validate')).toEqual('required');
expect(cb.validate).toEqual('required');
expect(cb.labelEl.classList).toContain('required');
expect(cb.shadowRoot.querySelector('.validation-message')).toBeFalsy();
cb.checkValidation();
const msgEl = cb.shadowRoot.querySelector('.validation-message');
expect(msgEl).toBeTruthy();
expect(msgEl.getAttribute('validation-id')).toEqual('required');
cb.checked = true;
cb.checkValidation();
expect(cb.shadowRoot.querySelector('.validation-message')).toBeFalsy();
});
it('should set validation events', () => {
expect(cb.getAttribute('validate')).toEqual(null);
expect(cb.getAttribute('validation-events')).toEqual(null);
cb.validate = 'required';
cb.validationEvents = 'blur';
expect(cb.getAttribute('validate')).toEqual('required');
expect(cb.getAttribute('validation-events')).toEqual('blur');
cb.validationEvents = null;
expect(cb.getAttribute('validate')).toEqual('required');
expect(cb.getAttribute('validation-events')).toEqual(null);
cb.validate = null;
expect(cb.getAttribute('validate')).toEqual(null);
expect(cb.getAttribute('validation-events')).toEqual(null);
});
it('should set label required indicator', () => {
const className = 'no-required-indicator';
expect(cb.getAttribute('validate')).toEqual(null);
expect(cb.getAttribute('label-required')).toEqual(null);
expect(cb.labelEl.classList).not.toContain(className);
cb.validate = 'required';
expect(cb.getAttribute('validate')).toEqual('required');
expect(cb.getAttribute('label-required')).toEqual(null);
expect(cb.labelEl.classList).not.toContain(className);
cb.labelRequired = false;
expect(cb.getAttribute('validate')).toEqual('required');
expect(cb.getAttribute('label-required')).toEqual(null);
expect(cb.labelEl.classList).toContain(className);
expect(cb.labelRequired).toEqual(null);
cb.labelRequired = true;
expect(cb.getAttribute('validate')).toEqual('required');
expect(cb.getAttribute('label-required')).toEqual('true');
expect(cb.labelEl.classList).not.toContain(className);
expect(cb.labelRequired).toEqual('true');
});
it('should set label text', () => {
let label = cb.labelEl.querySelector('.label-text');
label.remove();
cb.label = 'test';
cb.radioCheckbox = false;
document.body.innerHTML = '';
const elem = new IdsCheckbox();
document.body.appendChild(elem);
cb = document.querySelector('ids-checkbox');
label = cb.labelEl.querySelector('.label-text');
expect(label.textContent.trim()).toBe('');
cb.label = 'test';
label = cb.labelEl.querySelector('.label-text');
expect(label.textContent.trim()).toBe('test');
cb.label = null;
label = cb.labelEl.querySelector('.label-text');
expect(label.textContent.trim()).toBe('');
});
it('should renders colored', () => {
const color = 'emerald07';
let rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.getAttribute('color')).toEqual(null);
expect(cb.getAttribute('color')).toEqual(null);
cb.color = color;
rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.getAttribute('color')).toEqual(color);
expect(cb.getAttribute('color')).toEqual(color);
cb.color = false;
rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.getAttribute('color')).toEqual(null);
expect(cb.getAttribute('color')).toEqual(null);
expect(cb.color).toEqual(null);
});
it('should renders value', () => {
const value = 'test';
expect(cb.getAttribute('value')).toEqual(null);
cb.value = value;
expect(cb.getAttribute('value')).toEqual(value);
expect(cb.input.value).toEqual(value);
cb.value = null;
expect(cb.getAttribute('value')).toEqual(null);
});
it('should set indeterminate', () => {
expect(cb.getAttribute('indeterminate')).toEqual(null);
expect(cb.input.classList).not.toContain('indeterminate');
cb.indeterminate = true;
expect(cb.getAttribute('indeterminate')).toEqual('true');
expect(cb.input.classList).toContain('indeterminate');
cb.indeterminate = false;
expect(cb.getAttribute('indeterminate')).toEqual(null);
expect(cb.input.classList).not.toContain('indeterminate');
cb.indeterminate = true;
expect(cb.getAttribute('indeterminate')).toEqual('true');
expect(cb.input.classList).toContain('indeterminate');
cb.input.click();
expect(cb.getAttribute('indeterminate')).toEqual(null);
expect(cb.input.classList).not.toContain('indeterminate');
});
it('should rander display horizontal', () => {
let rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.classList).not.toContain('horizontal');
expect(cb.getAttribute('horizontal')).toEqual(null);
cb.horizontal = true;
rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.classList).toContain('horizontal');
expect(cb.getAttribute('horizontal')).toEqual('true');
cb.horizontal = false;
rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.classList).not.toContain('horizontal');
expect(cb.getAttribute('horizontal')).toEqual(null);
expect(cb.horizontal).toEqual(null);
});
it('should dispatch native events', () => {
const events = ['change', 'focus', 'keydown', 'keypress', 'keyup', 'click', 'dbclick'];
events.forEach((evt) => {
let response = null;
cb.addEventListener(evt, () => {
response = 'triggered';
});
const event = new Event(evt);
cb.input.dispatchEvent(event);
expect(response).toEqual('triggered');
});
});
it('should remove events', () => {
cb.input = null;
document.body.innerHTML = '';
const elem = new IdsCheckbox();
document.body.appendChild(elem);
cb = document.querySelector('ids-checkbox');
cb.handleCheckboxChangeEvent('remove');
cb.handleNativeEvents('remove');
const events = ['change', 'focus', 'keydown', 'keypress', 'keyup', 'click', 'dbclick'];
events.forEach((evt) => {
let response = null;
cb.addEventListener(`trigger${evt}`, () => {
response = 'triggered';
});
const event = new Event(evt);
cb.input.dispatchEvent(event);
expect(response).not.toEqual('triggered');
});
});
it('should renders template', () => {
document.body.innerHTML = '';
cb = document.createElement('ids-checkbox');
cb.setAttribute('color', 'ruby07');
cb.setAttribute('disabled', 'true');
cb.setAttribute('horizontal', 'true');
cb.setAttribute('checked', 'true');
cb.setAttribute('label-required', 'false');
cb.setAttribute('indeterminate', 'true');
cb.template();
expect(cb.getAttribute('disabled')).toEqual('true');
const rootEl = cb.shadowRoot.querySelector('.ids-checkbox');
expect(rootEl.classList).toContain('disabled');
expect(rootEl.classList).toContain('horizontal');
expect(cb.getAttribute('horizontal')).toEqual('true');
expect(cb.getAttribute('checked')).toEqual('true');
expect(cb.checked).toEqual('true');
expect(cb.getAttribute('indeterminate')).toEqual('true');
});
});
<file_sep>/app/ids-skip-link/index.js
import IdsSkipLink from '../../src/ids-skip-link/ids-skip-link';
<file_sep>/src/ids-list-view/README.md
# Ids List View Component
## Description
Displays a set of related data objects and their attributes in list format.
## Use Cases
- Best for limited attribute data that may or may not include clear differentiators like status.
- Used to display relevant objects. The list view container can feature checkboxes, search bar, hyperlinks, and other elements.
- Allows users to assign/remove objects. Displays when one or more rows are selected.
- Can alert users of updates on objects.
- Lists may be single or multiple selected
- You can have a fixed list toolbar on top, which may contain a title and filtering/search options
- You can have a contextual action toolbar for selected items
- Paging is supported
## Terminology
- Card: UI design pattern that groups related information that resembles a card
- Group Action: A special toolbar inside the card content area that can be used to act on the content.
## Features (With Code Examples)
This example shows using a list view with an html template element bound to a dataset. This example is showing the list in a card but the card is optional.
The template shows the use of a string substitution to access the data element. Note that `dataset` is required to loop over the dataset option passed into the control.
```html
<ids-card>
<div slot="card-header">
<ids-text font-size="20" type="h2">Card Title One</ids-text>
</div>
<div slot="card-content">
<ids-list-view id="list-view-1" virtual-scroll="true">
<template>
<ids-text font-size="16" type="h2">${productName}</ids-text>
<ids-text font-size="12" type="span">Count: ${units}</ids-text>
<ids-text font-size="12" type="span">Price: $ ${unitPrice}</ids-text>
</template>
</ids-list-view>
</div>
</ids-card>
```
```js
const listView = document.querySelector('#list-view-1');
listView.data = products;
```
## Themeable Parts
- `container` allows you to further style the root container element
- `list` allows you to further style the `<ul>` elements text element
- `listitem` allows you to further style the `<li>` elements text element
## States and Variations (With Code Examples)
- Hover
- Selected
- Focus
- Disabled
## Keyboard Guidelines
- <kbd>Tab</kbd> When a list is tabbed to, select the first item if nothing else is already selected. A second tab will take the user out of the widget to the next tab stop on the page.
- <kbd>Up/down arrow</kbd> navigate up and down the list.
- <kbd>Shift+F10</kbd> If the current item has an associated context menu, then this key combination will launch that menu.
- <kbd>Space</kbd> toggles <a href="http://access.aol.com/dhtml-style-guide-working-group/#checkbox" target="_blank">checkboxes</a> in the case of multi select or a list item in case of normal select
## Responsive Guidelines
- The list is 100% of the parent container in height and width so can be used in a widget object or responsive grid object.
- The list body will expand vertically and horizontally to fill it the size of its parent container.
- When used in homepages, special rules apply with sizes.
## Converting from Previous Versions
** From 3.x to 4.x**
- Single select roughly replaces the inforListBox component.
- Multi select is a new feature, however it replaces the listbox with checkboxes construct.
** From 4.x to 5.x**
- If using properties/settings these are now attributes.
- Markup has changed to a custom element `<ids-list-view></ids-list-view>`
- If using events events are now plain JS events for example
- The template is now a template element that uses simple string substitution
- Can now be imported as a single JS file and used with encapsulated styles (in some browsers)
## Accessibility Guidelines
- 1.1.1 Non-text Content - All images, links and icons have text labels for screen readers when the formatters are used.
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. Ensure the color tags that indicate state like OK, cancel, ect have other ways to indicate that information. This is failing.
- 1.4.3 Contrast (Minimum) - The visual presentation of text and images of text has a contrast ratio of at least 4.5:1. All statuses and objects must pass.
- 2.1.1 Keyboard - Make all functionality available from a keyboard. The grid has keyboard shortcuts and is usable with a screen reader due to the addition of aria tags.
## Regional Considerations
Titles should be localized in the current language. All elements will flip to the alternate side in Right To Left mode. Consider that in some languages text may be a lot longer (German) and in some cases it cant be wrapped (Thai). For some of these cases text-ellipsis is supported.
<file_sep>/src/ids-popup/TODO.md
# TODO on the IDS Popup
- [x] Fix centering - `align="center"` should position the element directly over top
- [x] See if we can fix the attribute display in the DOM to match what comes from `get align()`;
- [x] Fix the bad placement on initial load
- [x] Make switching the alignment target work
- [x] Influence the "direction" the popup is placed in when using an alignEdge with coordinate-based placement
- [x] Figure out how to fix `top, top` and `left, left` when setting opposite dimensions, like `align-x="top"` or `align-y="left"`
- [x] Add MutationObserver detection for the Popup content wrapper, do a `refresh()` when it changes
- [x] Tests for basic placement
- [x] Fix bad input tests (see functional tests)
- [x] Move current Popup `alignTarget` samples to another page, use a button on the index to place Popup by target.
- [ ] Create a "Click in the boundary" page that places a Popup by coordinates.
- [x] Add Popup "open" (visible) and "closed" (hidden) states.
- [x] Figure out how to get default placement/visibility working again after adding `shouldUpdate: true` setting to the `connectedCallback`
- [ ] Add tests for "type"
- [x] Improve test page for placement within the page
- [ ] Add option to close on clicking out in the page
- [ ] Improve "alignTarget" to accept an element reference directly (IDS Popup Menu).
<file_sep>/src/ids-virtual-scroll/ids-virtual-scroll.d.ts
// Ids is a JavaScript project, but we define TypeScript declarations so we can
// confirm our code is type safe, and to support TypeScript users.
export default class IdsVirtualScroll extends HTMLElement {
/** Set the internal element template markup for a single element */
itemTemplate: (item: unknown) => string | string;
/** Attach a dataset that matches the list template and render */
data: Array<unknown>;
/** Set internal element that will be the scrollable area */
scrollTarget?: HTMLElement;
/** Set the scroll top position and scroll down to that location */
scrollTop: number;
/** The height in pixels we want the scroll area to be */
height: number;
/** The height of each item in the list, must be fixed size */
itemHeight: number;
/** The number of elements in the dataset. This is also set internally when attaching data */
itemCount: number;
/** The number of extra elements to render to improve or tweak the scroll experience */
bufferSize: number;
}
<file_sep>/src/ids-badge/README.md
#Ids Badge Component
## Description
The IDS Badge Compoment consists of a small circle containing alphanumeric or icon that appears to another object. Badges are used to display alphanumeric values. A User can easily skim object-related values.
## Use Cases
- Badges can be use to give a non-critical status update on a piece of information or action to your application.
- Badges can be used to provide users with information about an object that is worthy of attention.
## Terminology
**Badge**: used to inform users of the status of an object or of an action.
**Color**: This is the color of the badge.
**Shape**: This is the shape of the badge. It can be round or rounded rectangle.
## Themeable Parts
- `badge` allows you to further style the badge element
## Features (With Code Examples)
A normal/ no properties badge used as a web component.
```html
<ids-badge>5</ids-badge>
```
A colored badge is done by adding a `color` attribute and one of the following:
alert, error, info, warning, and success.
```html
<ids-badge color="alert">10</ids-badge>
<ids-badge color="error">1500</ids-badge>
<ids-badge color="info">25k+</ids-badge>
<ids-badge color="warning">16</ids-badge>
<ids-badge color="success">5</ids-badge>
```
A shape badge is done by adding a `shape` attribute and one of the following: normal, and round. When you don't set the shape, normal will be the default value.
```html
<ids-badge color="alert" shape="round">10</ids-badge>
```
A badge with an icon can be configure by adding the `<ids-icon></ids-icon>` component inside of the badge.
```html
<ids-badge color="alert" shape="round"><ids-icon icon="pending" size="normal"></ids-icon></ids-badge>
```
Audible span can de configure by adding `<ids-text audible="true"></ids-text>` inside of the badge.
```html
<ids-badge color="error">404 <ids-text audible="true">In Error Condition</ids-text></ids-badge>
```
## Settings and Attributes
- `color` {string} Sets the color of the badge e.g. `alert`, `error`, or `info`.
- `shape` {string} `normal` is the default value of the shape, you can also change it to `round`.
## States and Variations
- Color
- Shape
## Keyboard Guidelines
Badges do not have tab stops and have no keyboard interaction on their own, but they may be placed in a grid cell or object that has tab focus.
## Accessibility Guidelines
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. Ensure the color tags that indicate state like OK, cancel, ect have other ways to indicate that information. This is failing.
- 1.4.3 Contrast (Minimum) - The visual presentation of text and images of text has a contrast ratio of at least 4.5:1 Ensure the color badges pass contrast.
<file_sep>/app/ids-upload-advanced/test-sandbox.js
// Supporting components
import IdsUploadAdvanced from '../../src/ids-upload-advanced/ids-upload-advanced';
/*
* ========================================================
* Use one to test, from below methods
* (1) `uploadFile()` - Use demo app server to upload, will remove after one minute
* (2) `uploadFileDummy()` - Use dummy logic to loop interval, no files sent anywhere
* Else use component's `sendByXHR()` method, requird url attribute (url="http://localhost:4300/upload"), will remove after one minute
* ========================================================
*/
const useSend = 1;
/**
* Get random integer
* @param {number} min The file
* @param {number} max The file to check
* @returns {number} Calcutated random integer
*/
function randomInteger(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
/**
* Dummy send method, for demo purposes only
* This method use dummy logic to loop interval and
* does NOT use `formData` param and no files get to send anywhere
* @param {object} formData Contains the file data.
* @param {object} uiElem The ui element
* @returns {void}
*/
// eslint-disable-next-line
function uploadFileDummy(formData, uiElem) {
const total = 100;
let loaded = 0;
const e = {
completeHandler: () => ({ loaded, total, target: { readyState: 4, status: 200 } }),
progressHandler: () => ({ loaded, total }),
abortHandler: () => null,
errorHandler: () => ({
loaded,
total,
target: {
readyState: 4,
status: 401,
statusText: '<em>Error</em>: Some server issue!'
}
}),
};
const interval = setInterval(() => {
const randomInt = randomInteger(1, 100);
loaded++;
if (loaded >= 100) {
// Complete
clearInterval(interval);
uiElem.completeHandler(e.completeHandler());
} else if (randomInt === loaded) {
// Error
clearInterval(interval);
uiElem.errorHandler(e.errorHandler());
} else {
// In process
uiElem.progressHandler(e.progressHandler());
}
}, 50);
uiElem?.addEventListener('abort', () => {
clearInterval(interval);
uiElem.abortHandler(e.abortHandler());
});
}
/**
* Send files to Demo App server, files will remove after one minute.
* In Chrome network tab, simulate a slow internet connection for testing
* @param {object} formData Contains the file data.
* @param {object} uiElem The ui element
* @returns {void}
*/
// eslint-disable-next-line
function uploadFile(formData, uiElem) {
const xhr = new XMLHttpRequest();
xhr.upload.addEventListener('progress', uiElem.progressHandler.bind(uiElem), false);
xhr.addEventListener('load', uiElem.completeHandler.bind(uiElem), false);
xhr.addEventListener('error', uiElem.errorHandler.bind(uiElem), false);
xhr.addEventListener('abort', uiElem.abortHandler.bind(uiElem), false);
xhr.open('POST', 'http://localhost:4300/upload');
xhr.send(formData);
// File abort
uiElem?.addEventListener('abort', () => {
xhr.abort();
});
}
/*
* ========================================================
* After DOM Loaded
* ========================================================
*/
document.addEventListener('DOMContentLoaded', () => {
const elemToBeSetAttrUrl = document.querySelectorAll('#upload-advanced-url ids-upload-advanced');
const elemToBeUseCustomSend = document.querySelector('#upload-advanced-send');
for (let i = 0, l = elemToBeSetAttrUrl.length; i < l; i++) {
elemToBeSetAttrUrl[i].url = 'http://localhost:4300/upload';
}
/**
* Send mehtod
* (1) Use demo app server to upload, will remove after one minute
* (2) Or Else, Use dummy logic to loop interval, no files sent anywhere
* Else use component's `sendByXHR` method, requird url attribute, will remove after one minute
*/
if (elemToBeUseCustomSend) {
if (useSend === 1) {
elemToBeUseCustomSend.send = uploadFile;
} else if (useSend === 2) {
elemToBeUseCustomSend.send = uploadFileDummy;
}
}
/*
* ========================================================
* Events that may be triggered
* ========================================================
*/
/* eslint-disable */
// Element to target
const targetElem = document.querySelector('#elem-upload-advanced-events');
// Files enter in drag area
targetElem?.addEventListener('filesdragenter', (e) => {
// console.log('Files enter in drag area', e);
});
// Files drop in to drag area
targetElem?.addEventListener('filesdrop', (e) => {
// console.log('Files drop in to drag area', e);
});
// File begin upload
targetElem?.addEventListener('beginupload', (e) => {
// console.log('File begin upload', e);
});
// File abort
targetElem?.addEventListener('abort', (e) => {
// console.log('File abort', e);
});
// File error
targetElem?.addEventListener('error', (e) => {
// console.log('File error', e);
});
// File complete
targetElem?.addEventListener('complete', (e) => {
// console.log('File complete', e);
});
// Click close button
targetElem?.addEventListener('closebuttonclick', (e) => {
// console.log('Clicked on close button', e);
});
});
<file_sep>/src/ids-popup/README.md
# Ids Popup Component
The IDS Popup Component allows for the placement of any HTML content in a fixed/absolute-positioned container anywhere on a page. The component provides API for setting coordinates, or aligning the container in relation to a "parent" element on the page. Additional API exists for modifying alignment, visibility, and controlling animation.
This component is a building-block component for many "placeable" IDS Components, such as the [Popup Menu](../ids-popup-menu/README.md) and [Modal](../ids-modal/README.md)
## Use Cases
- Placing HTML content using X/Y coordinates
- Placing HTML content against a target element and using X/Y offsets
## Terminology
### Alignment
**Align Target** Defines an external element that serves as a "triggering" element for the Popup.
**Align Edge** Also called the "primary alignment", given two alignment values, the first one is the edge of an Align Target that the Popup will be placed against immediately. For example "bottom" will cause the Popup to be aligned at the bottom of the Align Target.
**Primary Alignment** See "Align Edge"
**Secondary Alignment** Given two alignment values, the secondary alignment is optional, and will cause the Popup to line up the given edge with the Align Target's given edge. For example, given `top, left`, the secondary alignment will cause the Popup to align primarily to the top of the Align Target, but will also line up both the Popup and the Align Targets' left edges.
### Other
**Arrow** Popups can optionally display arrows that can point to content when used with an Align Target.
**Type** Defines the style of Popup that can be used. There are several different display types, the most common one being `menu`.
## Themeable Parts
- `popup` allows you to further style or adjust the outer popup element
- `arrow` allows you to adjust the arrow element
## Features (with code samples)
Creating a Context-menu style that would open on click might look like the following:
```html
<ids-popup id="my-popup" x="0" y="10" align="top, left">
<div slot="content">My Popup</div>
</ids-popup>
```
To create a Popup that appears to align itself against a button, you could do the following:
```html
<ids-popup id="my-popup" x="10" y="10" align="top, left" alignTarget="#my-button">
<div slot="content">My Popup</div>
</ids-popup>
...
<ids-button id="my-button">
<span slot="text">My Button</span>
</ids-button>
```
## Usage Tips
- When making a Popup that is placed in reference to an adjacent element, it must be placed AFTER it in the DOM. Placing it BEFORE the adjacent element can cause its placement to be incorrect on its first render.
- When using an `alignTarget`, also using the `arrow` setting and pointing it in the direction of the `alignTarget` can help contextualize the relationship between the two elements.
<file_sep>/app/ids-radio/index.js
import IdsRadio from '../../src/ids-radio/ids-radio';
<file_sep>/test/ids-text/ids-text-e2e-test.js
describe('Ids Text e2e Tests', () => {
const url = 'http://localhost:4444/ids-text';
beforeAll(async () => {
await page.goto(url, { waitUntil: 'load' });
});
it('should not have errors', async () => {
await expect(page.title()).resolves.toMatch('IDS Text Component');
});
it('should pass Axe accessibility tests', async () => {
await page.setBypassCSP(true);
await page.goto(url, { waitUntil: 'load' });
await expect(page).toPassAxeTests();
});
});
<file_sep>/src/ids-mask/ids-mask-common.d.ts
export const EMPTY_STRING: string;
export const PLACEHOLDER_CHAR: string;
export const CARET_TRAP: string;
export const NON_DIGITS_REGEX: RegExp;
export const DIGITS_REGEX: RegExp;
export const ALPHAS_REGEX: RegExp;
export const ANY_REGEX: RegExp;
export const DEFAULT_CONFORM_OPTIONS: {
caretTrapIndexes: Array<number>,
guide?: boolean,
previousMaskResult?: string,
placeholderChar?: string,
placeholder?: string,
selection?: {
start?: number
end?: number
}
keepCharacterPositions?: boolean
};
export function convertPatternFromString(pattern?: string): Array<string|RegExp> | undefined;
<file_sep>/src/ids-tag/README.MD
# Ids Tag Component
## Description
Tags are UI embellishments that are used to label, categorize and organize keywords surrounding
some contextual element such as a form.
## Use Cases
- Use when the content on your page is mapped into multiple categories and does not only fit into one hierarchical category.
- Use when you want users to contribute data to your website and let them organize their content themselves.
## Terminology
- **Tag**: A UI embellishments for classification
- **Clickable/Dismissible**: Tag can be closed and removed with an X button
- **Classification**: How tags are labelled with colors and text
- **Disabled**: Tag can be disabled so it cannot be followed or clicked.
## Features (With Code Examples)
A normal tag used as a web component.
```html
<ids-tag>Normal Tag</ids-tag>
```
A normal tag used using just the css. This is limited to normal tags only.
```html
<span class="ids-tag">Normal Tag</span>
```
A colored tag is done by adding the `color` attribute and one of the following: secondary, error, success, caution or a hex color beginning with a # character.
```html
<ids-tag color="secondary">Secondary Tags</ids-tag>
<ids-tag color="error">Error Tag</ids-tag>
<ids-tag color="success">Success Tag</ids-tag>
<ids-tag color="caution">Warning Tag</ids-tag>
<ids-tag color="#EDE3FC">Custom Tag Color</ids-tag>
```
## Settings and Attributes
- `clickable` {boolean} Turns on the functionality to make the tag clickable like a link
- `dismissible` {boolean} Turns on the functionality to add an (x) button to clear remove the tag
- `color` {string} Sets the color to a internal color such as `azure` or may be a hex starting with a `#`
## Themeable Parts
- `checkbox` allows you to further style the checkbox input element
- `slider` allows you to further style the sliding part of the switch
- `label` allows you to further style the label text
## States and Variations (With Code Examples)
- Color
- Linkable
- Badge
- Disabled
- Closable
## Keyboard Guidelines
- <kbd>Tab/Shift+Tab</kbd>: If the tab is focusable this will focus or unfocus the tag.
- <kbd>Backspace / Alt+Del</kbd>: If the tag is dismissible then this will remove the tag.
- <kbd>Enter</kbd>: If the tag is clickable then this will follow the tag link.
## Responsive Guidelines
- Flows with padding and margin within the width and height of the parent container. Possibly scrolling as needed based on parent dimensions.
## Converting from Previous Versions
- 3.x: Tags have all new markup and classes.
- 4.x: Tags have all new markup and classes for web components.
## Designs
[Design Specs](https://www.figma.com/file/ok0LLOT9PP1J0kBkPMaZ5c/IDS_Component_File_v4.6-(Draft))
## Accessibility Guidelines
- 1.4.1 Use of Color - Color is not used as the only visual means of conveying information, indicating an action, prompting a response, or distinguishing a visual element. Ensure the color tags that indicate state like OK, cancel, ect have other ways to indicate that information. This is failing.
- 1.4.3 Contrast (Minimum) - The visual presentation of text and images of text has a contrast ratio of at least 4.5:1. Ensure the color tags pass contrast.
## Regional Considerations
Labels should be localized in the current language. The close and link icons will flip to the alternate side in Right To Left mode. Consider that in some languages text may be a lot longer (German). And in some cases it cant be wrapped (Thai).
<file_sep>/test/ids-button/ids-toggle-button-func-test.js
/**
* @jest-environment jsdom
*/
import IdsToggleButton from '../../src/ids-toggle-button/ids-toggle-button';
describe('IdsToggleButton Component', () => {
let btn;
beforeEach(async () => {
const elem = new IdsToggleButton();
elem.id = 'test-button';
elem.textOn = 'Test Button (On)';
elem.iconOff = 'star-filled';
elem.textOff = 'Test Button (Off)';
elem.iconOff = 'star-outlined';
elem.pressed = false;
document.body.appendChild(elem);
btn = document.querySelector('ids-toggle-button');
});
afterEach(async () => {
document.body.innerHTML = '';
btn = null;
});
it('renders with no errors', () => {
const errors = jest.spyOn(global.console, 'error');
btn.remove();
btn = new IdsToggleButton();
document.body.appendChild(btn);
expect(document.querySelectorAll('ids-toggle-button').length).toEqual(1);
expect(errors).not.toHaveBeenCalled();
expect(btn.shouldUpdate).toBeTruthy();
});
it('can be pressed/unpressed', () => {
btn.pressed = true;
expect(btn.icon).toBe('star-filled');
expect(btn.text).toBe('Test Button (On)');
btn.pressed = false;
expect(btn.icon).toBe('star-outlined');
expect(btn.text).toBe('Test Button (Off)');
btn.setAttribute('pressed', true);
expect(btn.icon).toBe('star-filled');
expect(btn.text).toBe('Test Button (On)');
btn.setAttribute('pressed', false);
expect(btn.icon).toBe('star-outlined');
expect(btn.text).toBe('Test Button (Off)');
});
it('can be toggled', () => {
btn.toggle();
expect(btn.icon).toBe('star-filled');
expect(btn.text).toBe('Test Button (On)');
btn.toggle();
expect(btn.icon).toBe('star-outlined');
expect(btn.text).toBe('Test Button (Off)');
});
it('cannot be any other type but "default"', () => {
btn.type = 'primary';
expect(btn.getAttribute('type')).toBe(null);
expect(btn.type).not.toBeDefined();
expect(btn.state.type).toBe('default');
expect(btn.button.classList.contains('primary')).toBeFalsy();
btn.setAttribute('type', 'secondary');
expect(btn.getAttribute('type')).toBe(null);
expect(btn.type).not.toBeDefined();
expect(btn.state.type).toBe('default');
expect(btn.button.classList.contains('secondary')).toBeFalsy();
});
it('can set the "on" icon', () => {
btn.iconOn = 'settings';
expect(btn.getAttribute('icon-on')).toBe('settings');
expect(btn.iconOn).toBe('settings');
expect(btn.icons.on).toBe('settings');
btn.setAttribute('icon-on', 'mail');
expect(btn.getAttribute('icon-on')).toBe('mail');
expect(btn.iconOn).toBe('mail');
expect(btn.icons.on).toBe('mail');
// Disabling the "on" icon sets it to the default "star-filled"
btn.iconOn = '';
expect(btn.hasAttribute('icon-on')).toBeFalsy();
expect(btn.iconOn).toBe('star-filled');
expect(btn.icons.on).toBe('star-filled');
});
it('can set the "off" icon', () => {
btn.iconOff = 'settings';
expect(btn.getAttribute('icon-off')).toBe('settings');
expect(btn.iconOff).toBe('settings');
expect(btn.icons.off).toBe('settings');
btn.setAttribute('icon-off', 'mail');
expect(btn.getAttribute('icon-off')).toBe('mail');
expect(btn.iconOff).toBe('mail');
expect(btn.icons.off).toBe('mail');
// Disabling the "off" icon sets it to the default "star-outlined"
btn.iconOff = '';
expect(btn.hasAttribute('icon-off')).toBeFalsy();
expect(btn.iconOff).toBe('star-outlined');
expect(btn.icons.off).toBe('star-outlined');
});
it('can set the "on" text', () => {
btn.textOn = 'Button is on';
expect(btn.getAttribute('text-on')).toBe('Button is on');
expect(btn.textOn).toBe('Button is on');
expect(btn.texts.on).toBe('Button is on');
btn.setAttribute('text-on', 'Definitely on');
expect(btn.getAttribute('text-on')).toBe('Definitely on');
expect(btn.textOn).toBe('Definitely on');
expect(btn.texts.on).toBe('Definitely on');
btn.textOn = '';
expect(btn.hasAttribute('text-on')).toBeFalsy();
expect(btn.textOn).toBe('');
expect(btn.texts.on).toBe('');
});
it('can set the "off" text', () => {
btn.textOff = 'Button is off';
expect(btn.getAttribute('text-off')).toBe('Button is off');
expect(btn.textOff).toBe('Button is off');
expect(btn.texts.off).toBe('Button is off');
btn.setAttribute('text-off', 'Definitely off');
expect(btn.getAttribute('text-off')).toBe('Definitely off');
expect(btn.textOff).toBe('Definitely off');
expect(btn.texts.off).toBe('Definitely off');
btn.textOff = '';
expect(btn.hasAttribute('text-off')).toBeFalsy();
expect(btn.textOff).toBe('');
expect(btn.texts.off).toBe('');
});
});
<file_sep>/src/ids-trigger-field/README.md
# Ids Trigger Field Component
## Description
We include a trigger field component that helps layout an input next to a button (the trigger button). This component can be used on its own, but its generally used to make other internal components (datepicker, timepicker ect).
## Use Cases
- When you want an input with a trigger button icon that opens some kind of selection dialog that works with the field.
## Terminology
- **Trigger**: The button next to the input element
- **Input**: The input element
## Features (With Code Examples)
Add an ids-trigger-field to the page and inside its slot add an ids-input and ids-trigger-button.
```html
<ids-trigger-field tabbable="false">
<ids-input label="Date Field" size="sm"></ids-input>
<ids-trigger-button>
<ids-text audible="true">Date Field trigger</ids-text>
<ids-icon slot="icon" icon="schedule"></ids-icon>
</ids-trigger-button>
</ids-trigger-field>
```
## Settings and Attributes
- `tabbable` {boolean} Turns on the functionality allow the trigger to be tabbable. For accessibility reasons this should be on in most cases and this is the default.
- `appearance` {string} Turns on the functionality to have more compact field size (TODO)
## Converting from Previous Versions
- 3.x: There was no separate trigger field component.
- 4.x: There was no separate trigger field component.
## Regional Considerations
In Right To Left Languages the trigger field and alignment will flow to the other side.
<file_sep>/src/ids-base/ids-dom-utils.d.ts
declare const IdsDOMUtils: {
/** Returns the closest Shadow Root to the provided element, if applicable */
getClosestShadow(node: HTMLElement): Node | undefined;
/** Used specifically to detect the closest Shadow Root container OR `document`. */
getClosestContainerNode(node: HTMLElement): Node;
/**
* Returns the closest Root Node parent of a provided element. If the provided element is inside
* a Shadow Root, that Shadow Root's host's parentNode is provided. `document` is used as a
* fallback. This method allows for `querySelector()` in some nested Shadow Roots to work properly
*/
getClosestRootNode(node: HTMLElement): Node;
};
export default IdsDOMUtils;
<file_sep>/app/ids-checkbox/example.js
// Supporting components
import IdsButton from '../../src/ids-button/ids-button';
import IdsLayoutGrid from '../../src/ids-layout-grid/ids-layout-grid';
document.addEventListener('DOMContentLoaded', () => {
const btnSetIndeterminate = document.querySelector('#btn-set-indeterminate');
const btnRemoveIndeterminate = document.querySelector('#btn-remove-indeterminate');
const cbIndeterminate = document.querySelector('#cb-indeterminate');
// Set indeterminate
btnSetIndeterminate?.addEventListener('click', () => {
cbIndeterminate.indeterminate = true;
});
// Remove indeterminate
btnRemoveIndeterminate?.addEventListener('click', () => {
cbIndeterminate.indeterminate = false;
});
});
<file_sep>/app/ids-base/ids-constants.js
import { props } from '../../src/ids-base/ids-constants'; //eslint-disable-line
<file_sep>/src/ids-render-loop/ids-render-loop-global.js
import { IdsRenderLoop } from './ids-render-loop';
// Stores the global RenderLoop instance.
// If access to the RenderLoop directly is needed, app developers should use this
// single instance and NOT construct another RenderLoop.
/** @type {any} */
const renderLoop = new IdsRenderLoop();
export default renderLoop;
export { IdsRenderLoop };
| 374ce4cea55ff9f127abff0aed1d5475943ad0dd | [
"JavaScript",
"TypeScript",
"Markdown"
] | 139 | TypeScript | amarp86/enterprise-wc | 90cf9925f3760041afc6f79f54b7037588b61df2 | e1645b5b5008d4fb115d0e217af4d3c608253bab |
refs/heads/master | <repo_name>wangweihong/test<file_sep>/vendor/libnet/libnetif.go
package libnet
import (
"log"
dockerClient "github.com/fsouza/go-dockerclient"
)
var (
providers = make(map[string]Libnet)
defaultProvider = "fsouza"
)
type Libnet interface {
Setup(endpoint string) error
GetNetworks() ([]dockerClient.Network, error)
CreateNetwork(dockerClient.CreateNetworkOptions) (*dockerClient.Network, error)
RemoveNetwork(nid string) error
InspectNetwork(nid string) (*dockerClient.Network, error)
ListNetworkContainers(nid string) (map[string]dockerClient.Endpoint, error)
DisconnectContainer(nid, cid string, force bool) error
ConnectContainer(nid, cid string, conf dockerClient.EndpointConfig) error
}
/*use for low-level client provider to register, DO NOT CALL IT.*/
func Register(name string, provider Libnet) error {
if provider == nil {
log.Fatal("register:libnet provider is nil")
}
if _, dup := providers[name]; dup {
log.Fatal("register [%s] called twice for one provider", name)
}
providers[name] = provider
return nil
}
func NewNetworkManager() Libnet {
net, exists := providers[defaultProvider]
if !exists {
log.Fatal("%s not support", defaultProvider)
}
return net
}
<file_sep>/kv.go
package main
import (
"encoding/json"
"fmt"
"log"
"time"
"github.com/docker/libkv"
"github.com/docker/libkv/store"
"github.com/docker/libkv/store/etcd"
)
var (
defaultEndpoint = "192.168.14.130:2379"
kv store.Store
mainKey = "/appnet"
kvChannel = make(chan kvEvent)
)
const (
EventCreateNetwork = 1
EventRemoveNetwork = 2
EventAddContainer = 3
EventRemoveContainer = 4
EventCreatePool = 5
EventRemovePool = 6
)
type kvEvent struct {
pool *VirNetworkPool
nid string
cid string
event int
}
//kv store存储的格式
// /appnet/env1/net1 --> net1info
// /net2 --> net2info
// /net3 --> net3info
func kvCreateNetwork(v kvEvent) error {
path := fmt.Sprintf("%s/%s/%s", mainKey, v.pool.Environment, v.nid)
fmt.Printf(" key %s\n", path)
exist, err := kv.Exists(path)
if err != nil {
return err
}
if exist {
err := fmt.Errorf("key %s has already exist", v.pool.Environment)
return err
}
byteContent, err := json.Marshal(v.pool.Networks[v.nid])
fmt.Printf("key:%v,value:%v\n", v.nid, string(byteContent))
if err != nil {
return err
}
err = kv.Put(path, byteContent, nil)
if err != nil {
fmt.Println(err)
return err
}
return nil
}
func kvRemoveNetwork(v kvEvent) error {
path := fmt.Sprintf("%s/%s/%s", mainKey, v.pool.Environment, v.nid)
fmt.Printf(" key %s\n", path)
exist, err := kv.Exists(path)
if err != nil {
return err
}
if !exist {
err := fmt.Errorf("environment %s doesn't exist", v.pool.Environment)
return err
}
err = kv.Delete(path)
return nil
}
func kvCreatePool(v kvEvent) error {
path := fmt.Sprintf("%s/%s", mainKey, v.pool.Environment)
fmt.Printf(" key %s\n", path)
exist, err := kv.Exists(path)
if err != nil {
return err
}
if exist {
err := fmt.Errorf("pool %s has already exist", v.pool.Environment)
return err
}
err = kv.Put(
path,
[]byte(""),
&store.WriteOptions{
IsDir: true,
},
)
return err
}
func kvRemovePool(v kvEvent) error {
path := fmt.Sprintf("%s/%s", mainKey, v.pool.Environment)
fmt.Printf(" key %s\n", path)
exist, err := kv.Exists(path)
if err != nil {
return err
}
if !exist {
err := fmt.Errorf("pool %s doesn't exist", v.pool.Environment)
return err
}
err = kv.DeleteTree(path)
return err
}
func kvLoop() {
// tick := time.Tick(5 * time.Second)
for {
select {
//每次更新后,在kv store中备份
case v := <-kvChannel:
switch v.event {
case EventCreateNetwork:
err := kvCreateNetwork(v)
v.pool.kvBackupChan <- err
continue
case EventRemoveNetwork:
err := kvRemoveNetwork(v)
v.pool.kvBackupChan <- err
continue
case EventCreatePool:
err := kvCreatePool(v)
v.pool.kvBackupChan <- err
continue
case EventRemovePool:
err := kvRemovePool(v)
v.pool.kvBackupChan <- err
continue
}
}
}
}
func init() {
etcd.Register()
st, err := libkv.NewStore(
store.ETCD,
[]string{defaultEndpoint},
&store.Config{
ConnectionTimeout: 10 * time.Second,
},
)
if err != nil {
log.Fatal(err)
}
kv = st
}
<file_sep>/antman.go
package main
import (
"flag"
"fmt"
"log"
"net/http"
"github.com/ant0ine/go-json-rest/rest"
)
var (
kvStore string
kvType string
)
type Response struct {
// Resource json.RawMessage `json:"resource"`
Content interface{} `json:"content"`
Message string `json:"message"`
Code string `json:"code"`
}
func GoodResponse(method string, r *Response) *Response {
r.Message = method + " successful"
r.Code = "0"
return r
}
type Pair struct {
Name string `json:"name"`
ID string `json:"id"`
}
//更改为发现未存在的环境时,自动创建
/*
func CreateNetworkPool(w rest.ResponseWriter, r *rest.Request) {
environment := r.PathParam("env")
fmt.Printf("request to create network pool:%s\n", environment)
err := PoolManager.AddPool(environment)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp := Response{}
resp.Code = "0"
resp.Message = "create network pool success"
w.WriteJson(&resp)
}
*/
func RemoveNetworkPool(w rest.ResponseWriter, r *rest.Request) {
environment := r.PathParam("env")
err := PoolManager.RemovePool(environment)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp := Response{}
resp.Code = "0"
resp.Message = "Remove network pool success"
w.WriteJson(&resp)
}
func GetNetworkPool(w rest.ResponseWriter, r *rest.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
environment := r.PathParam("env")
networks, err := PoolManager.Pools[environment].GetNetworks()
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
Pairs := make([]Pair, 0)
for _, v := range networks {
var p Pair
p.Name = v.Name
p.ID = v.ID
Pairs = append(Pairs, p)
}
resp := Response{
Content: Pairs,
Message: "",
Code: "0",
}
w.WriteJson(resp)
}
func GetNetwork(w rest.ResponseWriter, r *rest.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
environment := r.PathParam("env")
nid := r.PathParam("id")
network, err := PoolManager.Pools[environment].InfoNetwork(nid)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp := Response{
Content: network,
Message: "",
Code: "0",
}
w.WriteJson(resp)
}
func PostNetwork(w rest.ResponseWriter, r *rest.Request) {
w.Header().Set("Access-Control-Allow-Origin", "*")
environment := r.PathParam("env")
var opt VirNetworkCreateOption
err := r.DecodeJsonPayload(&opt)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
//environment不存在,则添加该environment
if _, exists := PoolManager.Pools[environment]; !exists {
err := PoolManager.AddPool(environment)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
net, err := PoolManager.Pools[environment].CreateNetwork(environment, opt)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp := Response{
Content: *net,
Message: "",
Code: "0",
}
w.WriteJson(resp)
}
func InfoNetwork(w rest.ResponseWriter, r *rest.Request) {
nid := r.PathParam("id")
environment := r.PathParam("env")
net, err := PoolManager.Pools[environment].InfoNetwork(nid)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp := Response{}
resp.Content = net
resp.Code = "0"
resp.Message = ""
w.WriteJson(resp)
}
func DeleteNetwork(w rest.ResponseWriter, r *rest.Request) {
nid := r.PathParam("id")
environment := r.PathParam("env")
err := PoolManager.Pools[environment].RemoveNetwork(nid)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp := Response{}
w.WriteJson(GoodResponse("DELETE", &resp))
}
//container
type Containers struct {
Content []map[string]string `json:"content"`
Message string `json:"message"`
Code string `json:"code"`
}
func GetContainers(w rest.ResponseWriter, r *rest.Request) {
/*
nid := r.PathParam("id")
endpointMap, err := libnet.GetContainers(nid)
if err != nil {
rest.Error(w, err.Error(), http.StatusInternalServerError)
return
}
containers := new(Containers)
containers.Content = make([]map[string]string, 0)
for k, v := range endpointMap {
content := make(map[string]string)
content["id"] = k
content["name"] = v.Name
content["macaddr"] = v.MacAddress
content["ipaddr"] = v.IPv4Address
containers.Content = append(containers.Content, content)
}
containers.Message = "Containers"
containers.Code = "0"
w.WriteJson(&containers)
*/
}
func main() {
var portInt int
flag.IntVar(&portInt, "port", 12345, "--port=12345")
/*
flag.StringVar(&kvType, "kvType", "etcd", "--kvtype=etcd")
flag.StringVar(&kvStore, "kvstore", "192.168.14.130:2379", "--kvstore=192.168.14.130:2379")
if len(kvType) == 0 || len(kvStore) == 0 {
log.Fatal("invalid configuration of key-value store")
}
*/
flag.Parse()
port := fmt.Sprintf(":%d", portInt)
api := rest.NewApi()
api.Use(rest.DefaultDevStack...)
go kvLoop()
router, err := rest.MakeRouter(
//rest.Get("/api/appsoar-network/config", GetConfig),
rest.Get("/api/appsoar-network/:env/containers/:name", GetContainers),
rest.Get("/api/appsoar-network/:env/network", GetNetworkPool),
rest.Post("/api/appsoar-network/:env/network", PostNetwork),
rest.Get("/api/appsoar-network/:env/network/:id", InfoNetwork),
rest.Delete("/api/appsoar-network/:env/network/:id", DeleteNetwork),
//rest.Post("/api/appsoar-network/pool/:env", CreateNetworkPool),
rest.Delete("/api/appsoar-network/pool/:env", RemoveNetworkPool),
)
if err != nil {
log.Fatal(err)
}
api.SetApp(router)
log.Print("listening" + port)
log.Fatal(http.ListenAndServe(port, api.MakeHandler()))
}
<file_sep>/network.go
package main
import (
"errors"
"fmt"
"log"
"sync"
"libdockernet"
"libdockernet/dockerclient"
"libdockernet/dockerclient/fsouzapackage"
)
var (
//每个环境对应一个NetworkPool
defaultSocket = "unix://var/run/docker.sock"
netbackend dockerclient.NetClient
PoolManager VirNetworkPools
supportNetworkType = [...]string{
"macvlan",
"overlay",
}
filterType = [...]string{
"null",
"bridge",
"host",
}
)
//网络池归属于一个环境
type VirNetworkPools struct {
lock *sync.RWMutex
Pools map[string]VirNetworkPool //key:env,value:pool
}
type VirNetworkPool struct {
Environment string `json:"environment"`
//*更改成map[nid]VirNetwork
Networks map[string]VirNetwork `json:"networks"`
//host belongs to a network pool
Hosts VirHosts `json:"hosts"`
lock *sync.RWMutex
kvBackupChan chan error //用于接收kv store backup action的状态,成功还是失败.
}
//
type VirHosts struct {
Hosts []VirHosts `json:"hosts"`
}
type VirHost struct {
Node string `json:"node"`
}
type VirNetwork struct {
Name string `json:"name"`
ID string `json:"id"`
Scope string `json:"scope"`
Driver string `json:"driver"`
Subnet string `json:"subnet"`
Gateway string `json:"gateway"`
Options map[string]string `json:"options"`
NContainers int `json:"numcontainers"`
Containers []VirContainer `json:"containers"`
hasGetDetail bool //获取标志网络是否有调用过InfoNetwork接口
}
type VirContainer dockerclient.Container
type VirNetworkCreateOption dockerclient.NetworkCreateOption
func checkEnvValid(env string) bool {
return true
}
func isFilterType(kind string) bool {
for _, v := range filterType {
if v == kind {
return true
}
}
return false
}
func checkNetTypeSupport(kind string) bool {
for _, v := range supportNetworkType {
if kind == v {
return true
}
}
return false
}
//get networks belong to one env
func (pool VirNetworkPool) GetNetworks() (map[string]VirNetwork, error) {
return pool.Networks, nil
}
func (pool VirNetworkPool) InfoNetwork(nid string) (VirNetwork, error) {
pool.lock.Lock()
defer pool.lock.Unlock()
network, exists := pool.Networks[nid]
if !exists {
return VirNetwork{}, fmt.Errorf("network %s doesn't exits\n", nid)
}
//没有获取详细的网络信息
if !network.hasGetDetail {
detailNetwork, err := netbackend.InfoNetwork(nid)
if err != nil {
return VirNetwork{}, fmt.Errorf("network %s get details \n", nid)
}
network.Scope = detailNetwork.Scope
network.Subnet = detailNetwork.Subnet
network.Gateway = detailNetwork.Subnet
network.hasGetDetail = true
network.Containers = make([]VirContainer, 0)
for _, v := range detailNetwork.Containers {
network.Containers = append(network.Containers, VirContainer(v))
}
network.Options = make(map[string]string)
for i, j := range detailNetwork.Options {
network.Options[i] = j
}
pool.Networks[nid] = network
}
return network, nil
}
func (pool VirNetworkPool) RemoveNetwork(nid string) error {
pool.lock.Lock()
defer pool.lock.Unlock()
_, exists := pool.Networks[nid]
if !exists {
fmt.Println(pool)
for k, _ := range pool.Networks {
fmt.Println(k)
}
return errors.New("invalid network")
}
delete(pool.Networks, nid)
//这里还要检测容器数
//调用kv去更新,后端存储的
//调用docker client去移除网络
/*
if false {
return fmt.Errorf("kv store remove entry fail")
}
*/
err := netbackend.DeleteNetwork(nid)
if err != nil {
//在这里删除失败只提示警告, 然后返回,通过虚拟网络池屏蔽掉该网络
return fmt.Errorf("backend remove network fail")
}
return nil
}
//网络池中添加网络
func (pool VirNetworkPool) CreateNetwork(env string, opt VirNetworkCreateOption) (*VirNetwork, error) {
pool.lock.Lock()
defer pool.lock.Unlock()
//check option
if len(opt.Name) == 0 || len(opt.Driver) == 0 || len(opt.Subnet) == 0 {
return nil, fmt.Errorf("invalid network create option")
}
if !checkNetTypeSupport(opt.Driver) {
return nil, fmt.Errorf("network type is not support")
}
//创建网络
fmt.Println(opt)
opta := dockerclient.NetworkCreateOption(opt)
//注:这里这个网络对象信息不完整,没有subnet等信息
new, err := netbackend.CreateNetwork(&opta)
if err != nil {
return nil, err
}
fmt.Printf("new net ===> %v\n", *new)
var net VirNetwork
net.Name = new.Name
net.ID = new.ID
net.Scope = new.Scope
net.Driver = new.Driver
net.Subnet = new.Subnet
net.Gateway = new.Gateway
net.Containers = make([]VirContainer, 0)
net.hasGetDetail = false
for _, v := range new.Containers {
tmp := VirContainer(v)
net.Containers = append(net.Containers, tmp)
}
net.Options = make(map[string]string)
for i, j := range new.Options {
net.Options[i] = j
}
if pool.Networks == nil {
pool.Networks = make(map[string]VirNetwork)
}
pool.Networks[net.ID] = net
//get result
var kve kvEvent
kve.pool = &pool
kve.nid = net.ID
kve.event = EventCreateNetwork
kvChannel <- kve
err = <-pool.kvBackupChan
//kv store fail
if err != nil {
fmt.Errorf("kv store fail: %v\n", err)
delete(pool.Networks, net.ID)
err = netbackend.DeleteNetwork(net.ID)
if err != nil {
//在这里删除失败只提示警告, 然后返回,通过虚拟网络池屏蔽掉该网络
return nil, fmt.Errorf("backend remove network fail")
}
}
pool.Networks[net.ID] = net
fmt.Println("kv store success")
return &net, nil
}
//为新的环境添加网络池
func (pools VirNetworkPools) AddPool(env string) error {
pools.lock.Lock()
defer pools.lock.Unlock()
if _, exists := pools.Pools[env]; exists {
return errors.New("pool already exists")
}
pool := VirNetworkPool{
Networks: make(map[string]VirNetwork),
lock: new(sync.RWMutex),
kvBackupChan: make(chan error),
Environment: env,
}
pools.Pools[env] = pool
var kve kvEvent
kve.pool = &pool
kve.event = EventCreatePool
fmt.Println("ready to backup in kvstore")
kvChannel <- kve
err := <-pool.kvBackupChan
if err != nil {
delete(pools.Pools, env)
return fmt.Errorf("Create NetworkPool fail for %v\n", err)
}
return nil
}
//移除某个环境的网络池
func (pools VirNetworkPools) RemovePool(env string) error {
pools.lock.Lock()
defer pools.lock.Unlock()
pool, exists := pools.Pools[env]
if !exists {
return errors.New("invalid pool")
}
var kve kvEvent
kve.pool = &pool
kve.event = EventRemovePool
fmt.Println("ready to backup in kvstore")
kvChannel <- kve
err := <-pool.kvBackupChan
if err != nil {
return fmt.Errorf("backup NetworkPool fail for %v\n", err)
}
delete(pools.Pools, env)
return nil
}
func init() {
PoolManager.Pools = make(map[string]VirNetworkPool)
PoolManager.lock = new(sync.RWMutex)
fsouzapackage.Register()
client, err := libdockernet.NewNetClient(
dockerclient.Fsouza,
defaultSocket,
)
if err != nil {
log.Fatal(err)
}
netbackend = client
}
<file_sep>/vendor/libnet/fsouza-goclient.go
package libnet
import (
"errors"
dockerClient "github.com/fsouza/go-dockerclient"
)
var FsouzaClient = new(fsouzaClient)
type fsouzaClient struct {
Client *dockerClient.Client
}
func (c *fsouzaClient) checkSetup() error {
if c.Client == nil {
return errors.New("libnet must setup fisrt")
}
return nil
}
func (c *fsouzaClient) Setup(endpoint string) error {
client, err := dockerClient.NewClient(endpoint)
if err != nil {
return err
}
c.Client = client
return nil
}
func (c *fsouzaClient) GetNetworks() ([]dockerClient.Network, error) {
err := c.checkSetup()
if err != nil {
return []dockerClient.Network{}, err
}
fsouzanetworks, err := c.Client.ListNetworks()
if err != nil {
return []dockerClient.Network{}, err
}
return fsouzanetworks, nil
}
func (c *fsouzaClient) CreateNetwork(opt dockerClient.CreateNetworkOptions) (*dockerClient.Network, error) {
err := c.checkSetup()
if err != nil {
return &dockerClient.Network{}, err
}
fsouzanetwork, err := c.Client.CreateNetwork(opt)
if err != nil {
return nil, err
}
return fsouzanetwork, nil
}
func (c *fsouzaClient) RemoveNetwork(nid string) error {
err := c.checkSetup()
if err != nil {
return err
}
err = c.Client.RemoveNetwork(nid)
return err
}
func (c *fsouzaClient) InspectNetwork(nid string) (*dockerClient.Network, error) {
err := c.checkSetup()
if err != nil {
return &dockerClient.Network{}, err
}
fsouzanetwork, err := c.Client.NetworkInfo(nid)
if err != nil {
return &dockerClient.Network{}, err
}
return fsouzanetwork, nil
}
func (c *fsouzaClient) ListNetworkContainers(nid string) (map[string]dockerClient.Endpoint, error) {
err := c.checkSetup()
if err != nil {
return nil, err
}
network, err := c.InspectNetwork(nid)
if err != nil {
return nil, err
}
return network.Containers, nil
}
func (c *fsouzaClient) DisconnectContainer(nid, cid string, force bool) error {
err := c.checkSetup()
if err != nil {
return err
}
opts := dockerClient.NetworkConnectionOptions{
Container: cid,
Force: force,
}
c.Client.DisconnectNetwork(nid, opts)
return nil
}
func (c *fsouzaClient) ConnectContainer(nid, cid string, conf dockerClient.EndpointConfig) error {
err := c.checkSetup()
if err != nil {
return err
}
opts := dockerClient.NetworkConnectionOptions{
Container: cid,
EndpointConfig: &conf,
}
c.Client.ConnectNetwork(nid, opts)
return nil
}
func init() {
Register("fsouza", FsouzaClient)
}
| 620c7fe38b4435d9ab013941a9d75a5d4331c222 | [
"Go"
] | 5 | Go | wangweihong/test | fa4c3f91bd43021511055e45ffd76271ac9d3c16 | 884bb092ea233a443b00317a01b13e0898c2b6c6 |
refs/heads/master | <file_sep>var titulo="";
var formElement=null;
var numeroSecreto=null;
var respuestaSelect=null;
var respuestaSelect2=null;
var respuestasCheckbox = [];
var respuestasCheck =[];
var respuestasRadio =[];
var respuestasRadio2 =[];
var nota = 0;
window.onload=function(){
formElement=document.getElementById('myform');
//Corregir al apretar
formElement.onsubmit=function(){
inicializar();
if (comprobar()){
corregirSelect();
corregirSelect2();
corregirCheckbox();
corregirCheckbox2();
corregirText();
corregirText2();
corregirRadio();
corregirRadio2();
presentarNota();
}
return false;
}
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
gestionarXml(this);
}
}
xhttp.open("GET", "https://rawgit.com/oscaradriancasi/PreguntasXML/master/xml/preguntasexamen.xml", true);
xhttp.send();
//----------------------------------------------------------------------------------------------------------
//Text1
function gestionarXml(dadesXml){
var xmlDoc = dadesXml.responseXML;
titulo= xmlDoc.getElementById('Oscar005').getElementsByTagName('title')[0].innerHTML;
ponerTitulo(titulo);
respuesta=xmlDoc.getElementById("Oscar005").getElementsByTagName('answer')[0].innerHTML;
//Text2
var xmlDoc = dadesXml.responseXML;
titulo= xmlDoc.getElementById('Oscar006').getElementsByTagName('title')[0].innerHTML;
ponerTitulo1(titulo);
respuesta2=xmlDoc.getElementById("Oscar006").getElementsByTagName('answer')[0].innerHTML;
//--------------------------------------------------------------------------------------------------------------
//check1
var tituloCheckbox = xmlDoc.getElementById('Oscar003').getElementsByTagName("title")[0].innerHTML;
var opcionesCheckbox = [];
var respuestasCheckbox = [];
var nopt = xmlDoc.getElementById("Oscar003").getElementsByTagName('option').length;
for (i = 0; i < nopt; i++) {
opcionesCheckbox[i]=xmlDoc.getElementById("Oscar003").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosCheckboxHtml(tituloCheckbox,opcionesCheckbox);
var nres = xmlDoc.getElementById("Oscar003").getElementsByTagName('answer').length;
for (i = 0; i < nres; i++) {
respuestasCheckbox[i]=xmlDoc.getElementById("Oscar003").getElementsByTagName("answer")[i].innerHTML;
}
//check2
var tituloCheck = xmlDoc.getElementById('Oscar004').getElementsByTagName("title")[0].innerHTML;
var opcionesCheck = [];
var respuestasCheck = [];
var nop = xmlDoc.getElementById("Oscar004").getElementsByTagName('option').length;
for (i = 0; i < nop; i++) {
opcionesCheck[i]=xmlDoc.getElementById("Oscar004").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosCheckHtml(tituloCheck,opcionesCheck);
var nres1 = xmlDoc.getElementById("Oscar004").getElementsByTagName('answer').length;
for (i = 0; i < nres1; i++) {
respuestasCheck[i]=xmlDoc.getElementById("Oscar004").getElementsByTagName("answer")[i].innerHTML;
}
//-----------------------------------------------------------------------------------------------------------
//SELECT 1
var tituloSelect=xmlDoc.getElementById('Oscar001').getElementsByTagName("title")[0].innerHTML;
var opcionesSelect = [];
var nopt = xmlDoc.getElementById("Oscar001").getElementsByTagName('option').length;
for (i = 0; i < nopt; i++) {
opcionesSelect[i] = xmlDoc.getElementById("Oscar001").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosSelectHtml(tituloSelect,opcionesSelect);
respuestaSelect=parseInt(xmlDoc.getElementsByTagName("answer")[0].innerHTML);
//SELECT2
var tituloSelectx=xmlDoc.getElementById('Oscar002').getElementsByTagName("title")[0].innerHTML;
var opcionesSelectx = [];
var nopt = xmlDoc.getElementById("Oscar002").getElementsByTagName('option').length;
for (i = 0; i < nopt; i++) {
opcionesSelectx[i] = xmlDoc.getElementById("Oscar002").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosSelectHtmlx(tituloSelectx,opcionesSelectx);
respuestaSelect2=parseInt(xmlDoc.getElementsByTagName("answer")[1].innerHTML);
//---------------------------------------------------------------------------------------------------------
//radio1
var tituloRadio = xmlDoc.getElementById('Oscar009').getElementsByTagName("title")[0].innerHTML;
var opcionesRadio = [];
var respuestasRadio = [];
var noR = xmlDoc.getElementById("Oscar009").getElementsByTagName('option').length;
for (i = 0; i < noR; i++) {
opcionesRadio[i]=xmlDoc.getElementById("Oscar009").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosRadioHtml(tituloRadio,opcionesRadio);
var nresrad = xmlDoc.getElementById("Oscar009").getElementsByTagName('answer').length;
for (i = 0; i < nresrad; i++) {
respuestasRadio[i]=xmlDoc.getElementById("Oscar009").getElementsByTagName("answer")[i].innerHTML;
}
//radio2
var tituloRadio1 = xmlDoc.getElementById('Oscar010').getElementsByTagName("title")[0].innerHTML;
var opcionesRadio1 = [];
var respuestasRadio = [];
var noF = xmlDoc.getElementById("Oscar010").getElementsByTagName('option').length;
for (i = 0; i < noF; i++) {
opcionesRadio1[i]=xmlDoc.getElementById("Oscar010").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosRadio1Html(tituloRadio1,opcionesRadio1);
var nresrad = xmlDoc.getElementById("Oscar010").getElementsByTagName('answer').length;
for (i = 0; i < nresrad; i++) {
respuestasRadio[i]=xmlDoc.getElementById("Oscar010").getElementsByTagName("answer")[i].innerHTML;
}
//----------------------------------------------------------------------------------------------------------------
//multiple1
var tituloMultiple=xmlDoc.getElementById('Oscar007').getElementsByTagName("title")[0].innerHTML;
var opcionesMultiple = [];
var nopt = xmlDoc.getElementById("Oscar007").getElementsByTagName('option').length;
for (i = 0; i < nopt; i++) {
opcionesMultiple[i] = xmlDoc.getElementById("Oscar007").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosMultipleHtml(tituloMultiple,opcionesMultiple);
respuestaMultiple=parseInt(xmlDoc.getElementsByTagName("answer")[1].innerHTML);
//multiple2
var tituloMultiplex=xmlDoc.getElementById('Oscar008').getElementsByTagName("title")[0].innerHTML;
var opcionesMultiplex = [];
var nopt = xmlDoc.getElementById("Oscar008").getElementsByTagName('option').length;
for (i = 0; i < nopt; i++) {
opcionesMultiplex[i] = xmlDoc.getElementById("Oscar008").getElementsByTagName('option')[i].innerHTML;
}
ponerDatosMultipleHtmlx(tituloMultiplex,opcionesMultiplex);
respuestaMultiple=parseInt(xmlDoc.getElementsByTagName("answer")[1].innerHTML);
//------------------------------------------------------------------------------------------------------------------
function ponerTitulo(t){
document.getElementsByTagName('h3')[4].innerHTML=t;
}
function ponerTitulo1(t){
document.getElementsByTagName('h3')[5].innerHTML=t;
}
function ponerDatosCheckboxHtml(t,opt){
var checkboxContainer=document.getElementById('div1');
document.getElementById('Oscar003').innerHTML = t;
for (i = 0; i < opt.length; i++) {
var input = document.createElement("input");
var label = document.createElement("label");
label.innerHTML=opt[i];
label.setAttribute("for", "color1_"+i);
input.type="checkbox";
input.name="color1";
input.id="color1_"+i;;
checkboxContainer.appendChild(input);
checkboxContainer.appendChild(label);
}
}
function ponerDatosCheckHtml(ti,opti){
var checkContainer=document.getElementById('div2');
document.getElementById('Oscar004').innerHTML = ti;
for (i = 0; i < opti.length; i++) {
var input = document.createElement("input");
var label = document.createElement("label");
label.innerHTML=opti[i];
label.setAttribute("for", "color_"+i);
input.type="checkbox";
input.name="color";
input.id="color_"+i;;
checkContainer.appendChild(input);
checkContainer.appendChild(label);
}
}
function ponerDatosMultipleHtml(tu,optu){
document.getElementById("Oscar007").innerHTML=tu;
var multiple = document.getElementsByTagName("select")[2];
for (i = 0; i < optu.length; i++) {
var option = document.createElement("option");
option.text = optu[i];
option.value=i+1;
multiple.options.add(option);
}
}
function ponerDatosMultipleHtmlx(tx,optx){
document.getElementById("Oscar008").innerHTML=tx;
var multiple = document.getElementsByTagName("select")[3];
for (i = 0; i < optx.length; i++) {
var option = document.createElement("option");
option.text = optx[i];
option.value=i+1;
multiple.options.add(option);
}
}
function ponerDatosSelectHtml(tu,optu){
document.getElementById("Oscar001").innerHTML=tu;
var select = document.getElementsByTagName("select")[0];
for (i = 0; i < optu.length; i++) {
var option = document.createElement("option");
option.text = optu[i];
option.value=i+1;
select.options.add(option);
}
}
function ponerDatosSelectHtmlx(tx,optx){
document.getElementById("Oscar002").innerHTML=tx;
var select = document.getElementsByTagName("select")[1];
for (i = 0; i < optx.length; i++) {
var option = document.createElement("option");
option.text = optx[i];
option.value=i+1;
select.options.add(option);
}
}
function ponerDatosRadioHtml(t,opt){
var checkboxContainer=document.getElementById('div3');
document.getElementById('Oscar009').innerHTML = t;
for (i = 0; i < opt.length; i++) {
var input = document.createElement("input");
var label = document.createElement("label");
label.innerHTML=opt[i];
label.setAttribute("for", "color2_"+i);
input.type="radio";
input.name="color2";
input.id="color2_"+i;;
checkboxContainer.appendChild(input);
checkboxContainer.appendChild(label);
}
}
function ponerDatosRadio1Html(t,opt){
var checkboxContainer=document.getElementById('div4');
document.getElementById('Oscar010').innerHTML = t;
for (i = 0; i < opt.length; i++) {
var input = document.createElement("input");
var label = document.createElement("label");
label.innerHTML=opt[i];
label.setAttribute("for", "color3_"+i);
input.type="radio";
input.name="color3";
input.id="color3_"+i;;
checkboxContainer.appendChild(input);
checkboxContainer.appendChild(label);
}
}
}
//Gestionar la presentación de las respuestas
function darRespuestaHtml(r){
var p = document.createElement("p");
var node = document.createTextNode(r);
p.appendChild(node);
document.getElementById('resultadosDiv').appendChild(p);
}
function presentarNota(){
darRespuestaHtml("Nota: " +nota+ " puntos sobre 10");
}
function inicializar(){
document.getElementById('resultadosDiv').innerHTML = "";
nota=0.0;
}
//implementación de la corrección
function corregirSelect(){
var sel = formElement.elements[0];
if (sel.selectedIndex==respuestaSelect) { //-1 porque hemos puesto una opción por defecto en el select que ocupa la posición 0
darRespuestaHtml("P1: Correcto");
nota +=1;
}
else darRespuestaHtml("P1: Incorrecto");
}
function corregirSelect2(){
var sel = formElement.elements[1];
if (sel.selectedIndex==respuestaSelect2) {
darRespuestaHtml("P2: Correcto");
nota +=1;
}
else darRespuestaHtml("P2: Incorrecto");
}
//--------------------------------------------------------------------------------------------------------------------------------------------
function corregirText(){
//Vosotros debéis comparar el texto escrito con el texto que hay en el xml
//en este ejemplo hace una comparación de números enteros
var s=formElement.elements[10].value;
if (s==respuesta) {
darRespuestaHtml("P5: Exacto!");
nota +=1;
}
else {
darRespuestaHtml("P5: Te has quedado corto");
}
}
function corregirText2(){
//Vosotros debéis comparar el texto escrito con el texto que hay en el xml
//en este ejemplo hace una comparación de números enteros
var s=formElement.elements[12].value;
if (s==respuesta2) {
darRespuestaHtml("P6: Exacto!");
nota +=1;
}
else {
darRespuestaHtml("P6: Te has quedado corto");
}
}
//----------------------------------------------------------------------------------------------------------------------------------------------------------
function corregirCheckbox(){
//Para cada opción mira si está checkeada, si está checkeada mira si es correcta y lo guarda en un array escorrecta[]
var f=formElement;
var escorrecta = [];
for (i = 0; i < f.color1.length; i++) { //"color" es el nombre asignado a todos los checkbox
if (f.color1[i].checked) {
escorrecta[i]=false;
for (j = 0; j < respuestasCheckbox.length; j++) {
if (i==respuestasCheckbox[j]) escorrecta[i]=true;
}
//si es correcta sumamos y ponemos mensaje, si no es correcta restamos y ponemos mensaje.
if (escorrecta[i]) {
nota +=1
darRespuestaHtml("P3: Exacto!");
} else {
darRespuestaHtml("P3: Incorrecta");
}
}
}
}
function corregirCheckbox2(){
//Para cada opción mira si está checkeada, si está checkeada mira si es correcta y lo guarda en un array escorrecta[]
var f=formElement;
var escorrecta = [];
for (i = 0; i < f.color.length; i++) { //"color" es el nombre asignado a todos los checkbox
if (f.color[i].checked) {
escorrecta[i]=false;
for (j = 0; j < respuestasCheck.length; j++) {
if (i==respuestasCheck[j]) escorrecta[i]=true;
}
//si es correcta sumamos y ponemos mensaje, si no es correcta restamos y ponemos mensaje.
if (escorrecta[i]) {
nota +=1
darRespuestaHtml("P4: Exacto!");
} else {
darRespuestaHtml("P4: Incorrecta");
}
}
}
}
//------------------------------------------------------------------------------------------------------
function corregirRadio(){
//Para cada opción mira si está checkeada, si está checkeada mira si es correcta y lo guarda en un array escorrecta[]
var f=formElement;
var escorrecta = [];
for (i = 0; i < f.color2.length; i++) { //"color" es el nombre asignado a todos los checkbox
if (f.color2[i].checked) {
escorrecta[i]=false;
for (j = 0; j < respuestasRadio.length; j++) {
if (i==respuestasRadio[j]) escorrecta[i]=true;
}
//si es correcta sumamos y ponemos mensaje, si no es correcta restamos y ponemos mensaje.
if (escorrecta[i]) {
nota +=1
darRespuestaHtml("P9: Exacto!");
} else {
darRespuestaHtml("P9: Incorrecta");
}
}
}
}
function corregirRadio2(){
//Para cada opción mira si está checkeada, si está checkeada mira si es correcta y lo guarda en un array escorrecta[]
var f=formElement;
var escorrecta = [];
for (i = 0; i < f.color3.length; i++) { //"color" es el nombre asignado a todos los checkbox
if (f.color3[i].checked) {
escorrecta[i]=false;
for (j = 0; j < respuestasRadio2.length; j++) {
if (i==respuestasRadio2[j]) escorrecta[i]=true;
}
//si es correcta sumamos y ponemos mensaje, si no es correcta restamos y ponemos mensaje.
if (escorrecta[i]) {
nota +=1
darRespuestaHtml("P10: Exacto!");
} else {
darRespuestaHtml("P10: Incorrecta");
}
}
}
}
//Comprobar que se han introducido datos en el formulario
function comprobar(){
var f=formElement;
var checked=false;
for (i = 0; i < f.color.length; i++) { //"color" es el nombre asignado a todos los checkbox
if (f.color[i].checked) checked=true;
}
if (f.elements[0].value=="") {
f.elements[0].focus();
alert("Escribe un número");
return false;
} else if (f.elements[1].selectedIndex==0) {
f.elements[1].focus();
alert("Selecciona una opción");
return false;
} if (!checked) {
document.getElementsByTagName("h3")[2].focus();
alert("Selecciona una opción del checkbox");
return false;
} else return true;
}
}//fin windowload<file_sep># PreguntasExamenDia10
Este proyecto esta formado por un conunto de 2 archivos html ya que contiene los id de las preguntas y los diferentes div que he tenido que usar para darle los estilos necesarios a la página principal, y otra del uso del proyecto(sus instrucciones), después cuenta con dos estilos css tanto móvil comoescritorio, con sus respectivos tamaños de letras, colores, etc. También cuenta con el archivo xml que contiene las preguntas con sus identificadores y las distintas respuestas entre ellas la verdadera o verdaderas, y por último cuenta con un archivo javascript que ha sido fácilitado el código por Miguel que entiende sobre este tipo de campos.
Este proyecto sigue en versión de pruebas.
| 4c4d87c999154a15d4f2ec7643890e0f73c1a765 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | oscaradriancasi/PreguntasExamenDia10 | 7f70666716cab0e72a2a08d4b93d966190fb31cd | a2917f598cbf6af679c475d171253d4c531118b7 |
refs/heads/main | <repo_name>dbuts/chessSquareMemorization<file_sep>/README.md
# chessSquareMemorization
Simple application to help memorize the color of squares in the game of Chess.
<file_sep>/squareGuesser.py
import random
#Adding both white and black for future development with linked image of highlighted square
black = ["a1","a3","a5","a7","b2","b4","b6","b8","c1","c3","c5","c7","d2","d4","d6","d8","e1","e3","e5","e7","f2","f4","f6","f8","g1","g3","g5","g7","h2","h4","h6","h8"]
white = ["a2","a4","a6","a8","b1","b3","b5","b7","c2","c4","c6","c8","d1","d3","d5","d7","e2","e4","e6","e8","f1","f3","f5","f7","g2","g4","g6","g8","h1","h3","h5","h7"]
colors = ['b','w']
def squareQuestionnaire():
color = colors[random.randint(0,1)] # Pick random color
if color == 'b':
square = black[random.randint(0,31)] # 64 squares on chess board, half white and half black so 32
else:
square = white[random.randint(0,31)]
print(square) # Display square to user
response = input("Black or White? (b/w)\n") # Await user response
if response == color:
print(u'\u2705'+"Correct\n\n")
else:
print(u'\u274C'"Wrong\n\n")
squareQuestionnaire() # Call function again to reprompt user
def main():
print("\n\n\n\n\n\n\n\n\n\n") # Clear Terminal output
squareQuestionnaire()
if __name__ == "__main__":
main() | 0d0cbd3cef16ae717ab0314d47a8bb7dd9b15d56 | [
"Markdown",
"Python"
] | 2 | Markdown | dbuts/chessSquareMemorization | a31d3fefee563e8a36c8dbeb4515aff03b2ad28c | 5a9ff9e28a622af5890696ba4086287c17070a85 |
refs/heads/master | <file_sep># PRISM-POMDP-Analyser
Tool for generating graphs of POMDP models from PRISM.
1. Export adversaries from PRISM:
prism model.prism props.props -exportadv strat.adv
2. Generate graph
java main strat.adv -g
<file_sep>package pomdp_analyser;
import java.text.DecimalFormat;
import java.util.ArrayList;
public class Strategy {
private ArrayList<Execution> executions = new ArrayList<>();
public ArrayList<Execution> getExecutions() {
return executions;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (Execution e : executions) {
sb.append("Start: " + e.getStartState() + " End: " + e.getEndState() + "\n");
sb.append("Variable Initial Values: ");
for (Integer f : e.getVariableInitialValue()) {
sb.append(f + ",");
}
sb.deleteCharAt(sb.length() - 1);
sb.append("\nVariable End Values: ");
for (Integer f : e.getVariableEndValue()) {
sb.append(f + ",");
}
sb.deleteCharAt(sb.length() - 1);
sb.append("\nInitial Belief Probabilities: ");
for (Double f : e.getInitialBeliefProb()) {
sb.append(f + ",");
}
sb.deleteCharAt(sb.length() - 1);
sb.append("\nFinal Belief Probabilities: ");
for (Double f : e.getFinalBeliefProb()) {
sb.append(f + ",");
}
sb.deleteCharAt(sb.length() - 1);
sb.append("\nProbability: " + e.getNonDetNumber() + "/" + e.getProbability());
sb.append("\n\n");
}
return sb.toString();
}
public void setExecutions(ArrayList<Execution> executions) {
this.executions = executions;
}
public Strategy() {
}
public String getGraph() {
DecimalFormat df = new DecimalFormat("#.00");
StringBuilder sb = new StringBuilder();
StringBuilder labels = new StringBuilder();
sb.append("digraph G {");
for (Execution e : executions) {
StringBuilder initialVars = new StringBuilder();
StringBuilder initialBeliefs = new StringBuilder();
// build inital vars string for label
for (int a : e.getVariableInitialValue()) {
initialVars.append(a + ",");
}
initialVars.deleteCharAt(initialVars.length() - 1);
// build belifs string for label
for (double a : e.getInitialBeliefProb()) {
initialBeliefs.append(Math.round(a * 100.0) / 100.0 + ",");
}
initialBeliefs.deleteCharAt(initialBeliefs.length() - 1);
sb.append(e.getStartState() + " -> " + e.getEndState() + "[label=\"" + e.getProbability() + "\"];\n");
labels.append(e.getStartState() + " [label=\"" + e.getStartState() + "\\n(" + initialVars +")\n[" + initialBeliefs + "]\"];\n");
}
sb.append(labels);
sb.append("}");
return sb.toString();
}
public String toEnglish() {
for (Execution e : executions) {
System.out.println(e.getStartState());
}
return "yer da";
}
} | ae1c41410d45afd361f2ba32e0fc056ad274a962 | [
"Markdown",
"Java"
] | 2 | Markdown | munroa/PRISM-POMDP-Analyser | ad906266946cae6b523b3975cf79656896ad72de | 129c3509f24cca2ddd38fab4ad8d0cd6e9942f6c |
refs/heads/master | <file_sep>int RFIDResetPin = 13;
//Register your RFID tags here
char tag1[13] = "1E009A4067A3";
char tag2[13] = "010230F28243";
char tag3[13] = "01023C013A04";
char tag4[13] = "01023101093A";
char tag5[13] = "01023C0A4376";
char tag6[13] = "01023C000E31";
char tag7[13] = "01023C0A3207";
char tag8[13] = "1A004116317C";
char tag9[13] = "1E009A81F9FC";
char tag10[13] = "1A004162261F";
void setup(){
Serial.begin(9600);
pinMode(RFIDResetPin, OUTPUT);
digitalWrite(RFIDResetPin, HIGH);
//ONLY NEEDED IF CONTROLING THESE PINS - EG. LEDs
pinMode(2, OUTPUT);
pinMode(3, OUTPUT);
pinMode(4, OUTPUT);
pinMode(5, OUTPUT);
pinMode(6, OUTPUT);
pinMode(7, OUTPUT);
pinMode(8, OUTPUT);
pinMode(9, OUTPUT);
pinMode(10, OUTPUT);
pinMode(11, OUTPUT);
}
void loop(){
char tagString[13];
int index = 0;
boolean reading = false;
while(Serial.available()){
int readByte = Serial.read(); //read next available byte
if(readByte == 2) reading = true; //begining of tag
if(readByte == 3) reading = false; //end of tag
if(reading && readByte != 2 && readByte != 10 && readByte != 13){
//store the tag
tagString[index] = readByte;
index ++;
}
}
checkTag(tagString); //Check if it is a match
clearTag(tagString); //Clear the char of all value
resetReader(); //eset the RFID reader
}
void checkTag(char tag[]){
///////////////////////////////////
//Check the read tag against known tags
///////////////////////////////////
if(strlen(tag) == 0) return; //empty, no need to contunue
if(compareTag(tag, tag1)){ // if matched tag1, do this
lightLED(2);
}else if(compareTag(tag, tag2)){ //if matched tag2, do this
lightLED(3);
}else if(compareTag(tag, tag3)){
lightLED(4);
}else if(compareTag(tag, tag4)){
lightLED(5);
}else if(compareTag(tag, tag5)){
lightLED(6);
}else if(compareTag(tag, tag6)){
lightLED(7);
}else if(compareTag(tag, tag7)){
lightLED(8);
}else if(compareTag(tag, tag8)){
lightLED(9);
}else if(compareTag(tag, tag9)){
lightLED(10);
}else if(compareTag(tag, tag10)){
lightLED(11);
}else{
Serial.println(tag); //read out any unknown tag
}
}
void lightLED(int pin){
///////////////////////////////////
//Turn on LED on pin "pin" for 250ms
///////////////////////////////////
Serial.println(pin);
digitalWrite(pin, HIGH);
delay(250);
digitalWrite(pin, LOW);
}
void resetReader(){
///////////////////////////////////
//Reset the RFID reader to read again.
///////////////////////////////////
digitalWrite(RFIDResetPin, LOW);
digitalWrite(RFIDResetPin, HIGH);
delay(150);
}
void clearTag(char one[]){
///////////////////////////////////
//clear the char array by filling with null - ASCII 0
//Will think same tag has been read otherwise
///////////////////////////////////
for(int i = 0; i < strlen(one); i++){
one[i] = 0;
}
}
boolean compareTag(char one[], char two[]){
///////////////////////////////////
//compare two value to see if same,
//strcmp not working 100% so we do this
///////////////////////////////////
if(strlen(one) == 0) return false; //empty
for(int i = 0; i < 12; i++){
if(one[i] != two[i]) return false;
}
return true; //no mismatches
}
<file_sep>package edu.rit.csci759.rspi.exercise;
public interface RpiIndicatorInterface {
/*
* Constant thresholds for ambient light intensity
*/
static final int AMBIENT_DARK=35;
static final int AMBIENT_BRIGHT=80;
/*
* Constant thresholds for temperature in F
*/
static final int TEMPERATURE_COLD=60;
static final int TEMPERATURE_HOT=76;
/*
* function to turn off all LEDs
*/
void led_all_off();
/*
* function to turn on all LEDs
*/
void led_all_on();
/*
* function to indicate error; normally blnking red LED
*/
void led_error(int blink_count) throws InterruptedException;
/*
* Turn on a LED to indicate the value is low
*/
void led_when_low();
/*
* Turn on a LED to indicate the value is mid
*/
void led_when_mid();
/*
* Turn on a LED to indicate the value is high
*/
void led_when_high();
/*
* read light intensity value from the photocell
*/
int read_ambient_light_intensity();
/*
* read temperature value from the TMP36 sensor
*/
int read_temperature();
}
<file_sep>package edu.rit.csci759.rspi;
import edu.rit.csci759.rspi.utils.AdafruitMCP4725;
public class RpiDACExample {
public static void main(String[] args)// throws IOException
{
System.out.println("The output happens on the VOUT terminal of the MCP4725.");
AdafruitMCP4725 dac = new AdafruitMCP4725();
for (int i=0; i<5; i++)
{
for (int volt : AdafruitMCP4725.DACLookupFullSine9Bit)
{
dac.setVoltage(volt);
try { Thread.sleep(10L); } catch (InterruptedException ie) {}
}
}
}
}<file_sep>package test.rit.harsh.myapplication;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Created by patil on 10/9/2015.
*/
public class RuleGetter implements Parcelable {
public static final Creator<RuleGetter> CREATOR = new Creator<RuleGetter>() {
@Override
public RuleGetter createFromParcel(Parcel in) {
return new RuleGetter(in);
}
@Override
public RuleGetter[] newArray(int size) {
return new RuleGetter[size];
}
};
private String name;
public RuleGetter(String name) {
this.name = name;
}
protected RuleGetter(Parcel in) {
name = in.readString();
}
public String getName() {
return name;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(name);
}
}<file_sep>#!/bin/sh
curl -s get.pi4j.com | sudo bash<file_sep>'''
Created on Nov 20, 2014
@author: ph
'''
# example of using MCP3008 ADC to read value from photocell and tmp36
try:
import RPi.GPIO as GPIO, time
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
import plotly.plotly as py
import json
import datetime
import readadc
GPIO.setmode(GPIO.BCM)
LED_RED=21
LED_YELLOW=20
LED_GREEN=16
chan_list=[LED_RED, LED_YELLOW, LED_GREEN]
AMBIENT_DARK=35;
AMBIENT_BRIGHT=80;
def setup():
GPIO.setup(chan_list, GPIO.OUT,initial=GPIO.LOW)
readadc.initialize()
def cleanup():
GPIO.cleanup()
def led_all_off():
GPIO.output(chan_list, GPIO.LOW)
def led_all_on():
GPIO.output(chan_list, GPIO.HIGH)
def led_when_low():
GPIO.output(LED_RED, GPIO.HIGH)
GPIO.output(LED_YELLOW, GPIO.LOW)
GPIO.output(LED_GREEN, GPIO.LOW)
def led_when_mid():
GPIO.output(LED_YELLOW, GPIO.HIGH)
GPIO.output(LED_RED, GPIO.LOW)
GPIO.output(LED_GREEN, GPIO.LOW)
def led_when_high():
GPIO.output(LED_GREEN, GPIO.HIGH)
GPIO.output(LED_RED, GPIO.LOW)
GPIO.output(LED_YELLOW, GPIO.LOW)
def led_error(blink_count):
bc=0
while bc<blink_count:
GPIO.output(LED_RED, GPIO.HIGH)
time.sleep(1)
GPIO.output(LED_RED, GPIO.LOW)
time.sleep(1)
bc+=1
def get_temp(adc_pin):
tmp36_data = readadc.readadc(adc_pin,
readadc.PINS.SPICLK,
readadc.PINS.SPIMOSI,
readadc.PINS.SPIMISO,
readadc.PINS.SPICS)
tmp36_millivolts = tmp36_data * (3300.0/1024.0)
# 10 mv per degree
temp_C = ((tmp36_millivolts - 100.0) / 10.0) - 40.0
# convert celsius to fahrenheit
temp_F = (temp_C * 9.0 / 5.0) + 32
# remove decimal point from millivolts
tmp36_millivolts = "%d" % tmp36_millivolts
# show only one decimal place for temprature and voltage readings
temp_C = "%.1f" % temp_C
temp_F = "%.1f" % temp_F
return temp_F
def get_intensity(adc_pin):
photo_data = readadc.readadc(adc_pin,
readadc.PINS.SPICLK,
readadc.PINS.SPIMOSI,
readadc.PINS.SPIMISO,
readadc.PINS.SPICS)
# convert light intensity value to 1-100
photo_millivolts = photo_data * (3300.0/1024.0)
light_value = (photo_millivolts * (100.0/3300.0))
return light_value
def stream_to_plotly():
with open('./config.json') as config_file:
plotly_user_config = json.load(config_file)
py.sign_in(plotly_user_config["plotly_username"], plotly_user_config["plotly_api_key"])
url = py.plot([
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][0],
'maxpoints': 200
}
},
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][1],
'maxpoints': 200
}
},
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][2],
'maxpoints': 200
}
}], filename='Raspberry Pi Light Intensity Streaming')
print "View your streaming graph here: ", url
stream = py.Stream(plotly_user_config['plotly_streaming_tokens'][0])
stream.open()
stream1 = py.Stream(plotly_user_config['plotly_streaming_tokens'][1])
stream1.open()
stream2 = py.Stream(plotly_user_config['plotly_streaming_tokens'][2])
stream2.open()
while True:
light_value = get_intensity(1)
print light_value
if(light_value<=AMBIENT_DARK and light_value>=0):
led_when_low();
elif (light_value>AMBIENT_DARK and light_value<AMBIENT_BRIGHT):
led_when_mid();
elif(light_value>=AMBIENT_BRIGHT and light_value<=100):
led_when_high();
else:
led_error(3);
stream.write({'x': datetime.datetime.now(), 'y': ("%.1f" % light_value)})
stream1.write({'x': datetime.datetime.now(), 'y': get_temp(0)})
#stream2.write({'x': datetime.datetime.now(), 'y': read_temp(sensor2_file)[0]})
time.sleep(1)
if __name__ == '__main__':
setup()
stream_to_plotly()
cleanup()
<file_sep>'''
Created on Nov 20, 2014
@author: ph
'''
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
GPIO.setmode(GPIO.BCM)
def setup():
pass
def cleanup():
# initial values of variables etc...
counter = 0
try:
# here you put your main loop or block of code
while counter < 9000000:
# count up to 9000000 - takes ~20s
counter += 1
print "Target reached: %d" % counter
except KeyboardInterrupt:
# here you put any code you want to run before the program
# exits when you press CTRL+C
print "\n", counter # print value of counter
except:
# this catches ALL other exceptions including errors.
# You won't get any error messages for debugging
# so only use it once your code is working
print "Other error or exception occurred!"
finally:
GPIO.cleanup() # this ensures a clean exit
if __name__ == '__main__':
setup()
cleanup()<file_sep>The project contains two files,
1. MyApplication
2. PervasiveCourse_student
The first file is the android client which is an android project. It can be imported into android studio as a project and, installed on any android phone. We have also included the apk file which can be installed on any android phone.
The second file is the server that works on Raspberry pi. To compile the files open the console in the directory, 'ant build' will compile the files, while 'sudo ant JSONRPCServer' will bring up the server active.
The android application UI has one activity. The first part of the activity shows the current temperature and ambient of the room. The history of the room temperature is also stored at the bottom of the same page.
The second page of this activity is meant to set and delete rules. <file_sep>package test.rit.harsh.myapplication;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.AsyncTask;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.List;
public class RecyclerAdapter extends RecyclerView.Adapter<RecyclerAdapter.ViewHolder> {
public String datatoSend;
private List<RuleGetter> rule;
private Activity activity;
public RecyclerAdapter(Activity activity, List<RuleGetter> rule) {
this.rule = rule;
this.activity = activity;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup viewGroup, int viewType) {
//inflate your layout and pass it to view holder
LayoutInflater inflater = activity.getLayoutInflater();
View view = inflater.inflate(R.layout.recycler, viewGroup, false);
ViewHolder viewHolder = new ViewHolder(view);
return viewHolder;
}
@Override
public void onBindViewHolder(ViewHolder viewHolder, int position) {
//setting data to view holder elements
viewHolder.name.setText(rule.get(position).getName());
//set on click listener for each element
viewHolder.container.setOnClickListener(onClickListener(position));
}
private void setDataToView(TextView name, int position) {
name.setText(rule.get(position).getName());
}
public void removeAt(int position) {
this.rule.remove(position);
notifyItemRemoved(position);
notifyDataSetChanged();
}
@Override
public int getItemCount() {
return (null != rule ? rule.size() : 0);
}
private View.OnClickListener onClickListener(final int position) {
return new View.OnClickListener() {
@Override
public void onClick(View v) {
final ViewHolder newView = new ViewHolder(v);
android.support.v7.app.AlertDialog.Builder alertDialogBuilder = new android.support.v7.app.AlertDialog.Builder(activity);
alertDialogBuilder.setMessage("Are you sure,You wanted to delete this rule");
alertDialogBuilder.setPositiveButton("yes", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface arg0, int arg1) {
datatoSend = "DeleteRule" + "," + newView.name.getText();
//((RuleFragment)newView.getAdapter()).removeItem(position);
System.out.println("inside delete view" + datatoSend);
removeAt(position);
new SendJSODeleteRequest().execute();
}
});
alertDialogBuilder.setNegativeButton("No", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
android.support.v7.app.AlertDialog alertDialog = alertDialogBuilder.create();
alertDialog.show();
}
};
}
/**
* View holder to display each RecylerView item
*/
protected class ViewHolder extends RecyclerView.ViewHolder {
private ImageView imageView;
private TextView name;
private TextView job;
private View container;
public ViewHolder(View view) {
super(view);
imageView = (ImageView) view.findViewById(R.id.image);
name = (TextView) view.findViewById(R.id.name);
container = view.findViewById(R.id.card_view);
}
}
class SendJSODeleteRequest extends AsyncTask<Void, String, String> {
String response_txt;
@Override
protected void onPreExecute() {
}
@Override
protected String doInBackground(Void... params) {
String serverURL_text = "10.10.10.110:8080";
//look into
String request_method = datatoSend;
datatoSend = "";
Log.d("debug", "request is:" + request_method);
response_txt = JSONHandler.testJSONRequest(serverURL_text, request_method);
return response_txt;
}
protected void onProgressUpdate(Integer... progress) {
//setProgressPercent(progress[0]);
}
protected void onPostExecute(String result) {
Log.d("debug", result);
Log.d("debug", response_txt);
}
}
}
<file_sep>apply plugin: 'com.android.application'
android {
compileSdkVersion 23
buildToolsVersion "23.0.1"
defaultConfig {
applicationId "test.rit.harsh.myapplication"
minSdkVersion 21
targetSdkVersion 23
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
compile 'com.android.support:appcompat-v7:23.0.1'
compile 'com.android.support:support-v4:23.0.+'
compile 'com.android.support:recyclerview-v7:23.+'
compile 'com.android.support:cardview-v7:23.+'
compile 'com.android.support:design:23.0.1'
compile files('src/main/java/test/rit/harsh/myapplication/android-json-rpc-0.3.4.jar')
compile files('src/main/java/test/rit/harsh/myapplication/jsonrpc2-base-1.35.jar')
compile files('src/main/java/test/rit/harsh/myapplication/jsonrpc2-client-1.14.4.jar')
compile files('src/main/java/test/rit/harsh/myapplication/jsonrpc2-server-1.10.1.jar')
compile files('src/main/java/test/rit/harsh/myapplication/json-smart-1.2.jar')
}
<file_sep>'''
Created on Nov 18, 2014
@author: ph
'''
from time import sleep
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
GPIO.setmode(GPIO.BCM)
LED_PIN1=16
LED_PIN2=20
LED_PIN3=21
blink_time = input('Enter blink time: ')
def setup():
chan_list=[LED_PIN1, LED_PIN2, LED_PIN3]
GPIO.setup(chan_list, GPIO.OUT,initial=GPIO.LOW)
def rainbow_blink():
for i in range(1,blink_time,1):
for p in (LED_PIN1, LED_PIN2, LED_PIN3):
GPIO.output(p, GPIO.HIGH)
sleep(0.5)
GPIO.output(p, GPIO.LOW)
sleep(1)
def cleanup():
GPIO.cleanup()
if __name__ == '__main__':
setup()
rainbow_blink()
cleanup()<file_sep>package test.rit.harsh.myapplication;
//The JSON-RPC 2.0 Base classes that define the
//JSON-RPC 2.0 protocol messages
import com.thetransactioncompany.jsonrpc2.JSONRPC2ParseException;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Request;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Response;
import com.thetransactioncompany.jsonrpc2.server.Dispatcher;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
//The JSON-RPC 2.0 server framework package
public class JsonRPCServer {
/**
* The port that the server listens on.
*/
private static final int PORT = 2344;
/**
* A handler thread class. Handlers are spawned from the listening
* loop and are responsible for a dealing with a single client
* and broadcasting its messages.
*/
public static class Handler extends Thread {
private Socket socket;
private BufferedReader in;
private PrintWriter out;
private Dispatcher dispatcher;
private BGNotiService obj;
//private int local_count;
/**
* Constructs a handler thread, squirreling away the socket.
* All the interesting work is done in the run method.
*/
public Handler(Socket socket, BGNotiService obj) {
this.socket = socket;
this.obj = obj;
// Create a new JSON-RPC 2.0 request dispatcher
this.dispatcher = new Dispatcher();
// Register the "echo", "getDate" and "getTime" handlers with it
dispatcher.register(new JSONHandler.UpdateHandler(obj));
}
/**
* Services this thread's client by repeatedly requesting a
* screen name until a unique one has been submitted, then
* acknowledges the name and registers the output stream for
* the client in a global set, then repeatedly gets inputs and
* broadcasts them.
*/
public void run() {
try {
System.out.println("in jsonRPCSERVER");
// Create character streams for the socket.
in = new BufferedReader(new InputStreamReader(socket.getInputStream()));
out = new PrintWriter(socket.getOutputStream(), true);
// read request
String line;
line = in.readLine();
//System.out.println(line);
StringBuilder raw = new StringBuilder();
raw.append("" + line);
boolean isPost = line.startsWith("POST");
int contentLength = 0;
while (!(line = in.readLine()).equals("")) {
//System.out.println(line);
raw.append('\n' + line);
if (isPost) {
final String contentHeader = "Content-Length: ";
if (line.startsWith(contentHeader)) {
contentLength = Integer.parseInt(line.substring(contentHeader.length()));
}
}
}
StringBuilder body = new StringBuilder();
if (isPost) {
int c = 0;
for (int i = 0; i < contentLength; i++) {
c = in.read();
body.append((char) c);
}
}
System.out.println(body.toString());
JSONRPC2Request request = JSONRPC2Request.parse(body.toString());
JSONRPC2Response resp = dispatcher.process(request, null);
// send response
out.write("HTTP/1.1 200 OK\r\n");
out.write("Content-Type: application/json\r\n");
out.write("\r\n");
out.write(resp.toJSONString());
// do not in.close();
out.flush();
out.close();
socket.close();
} catch (IOException e) {
System.out.println(e);
} catch (JSONRPC2ParseException e) {
e.printStackTrace();
} finally {
try {
socket.close();
} catch (IOException e) {
}
}
}
}
}<file_sep>'''
Created on Nov 20, 2014
@author: ph
'''
try:
import RPi.GPIO as GPIO, time
except RuntimeError:
print("Error importing RPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
GPIO.setmode(GPIO.BCM)
PIR_PIN = 26
LED_PIN1=16
LED_PIN2=20
LED_PIN3=21
MOTOR_PIN=12
def setup():
GPIO.setup(PIR_PIN, GPIO.IN)
chan_list=[LED_PIN1, LED_PIN2, LED_PIN3]
GPIO.setup(chan_list, GPIO.OUT, initial=GPIO.LOW)
#GPIO.setup(MOTOR_PIN, GPIO.OUT, initial=GPIO.LOW)
def cleanup():
GPIO.cleanup()
def check_alarm():
while True:
if GPIO.input(PIR_PIN):
GPIO.output(LED_PIN3, GPIO.LOW)
GPIO.output(LED_PIN2, GPIO.HIGH)
#GPIO.output(MOTOR_PIN, GPIO.HIGH)
#time.sleep(0.5)
#GPIO.output(MOTOR_PIN, GPIO.LOW)
else:
print "PIR alarm!"
GPIO.output(LED_PIN2, GPIO.LOW)
GPIO.output(LED_PIN3, GPIO.HIGH)
time.sleep(0.5)
if __name__ == '__main__':
setup()
check_alarm()
cleanup()<file_sep>package edu.rit.csci759.rspi;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import com.pi4j.io.gpio.GpioController;
import com.pi4j.io.gpio.GpioFactory;
import com.pi4j.io.gpio.GpioPinDigitalOutput;
import com.pi4j.io.gpio.PinState;
import com.pi4j.io.gpio.RaspiPin;
/* Color scheme if using tri-color LED
* yellow = R+G
* cyan = G+B
* magenta = R+B
* white = R+G+B
*/
public class Rpi3ColorLED
{
private static final BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in));
// create gpio controller
final static GpioController gpio = GpioFactory.getInstance();
final static GpioPinDigitalOutput greenPin = gpio.provisionDigitalOutputPin(RaspiPin.GPIO_27, "green", PinState.LOW);
final static GpioPinDigitalOutput yellowPin = gpio.provisionDigitalOutputPin(RaspiPin.GPIO_28, "yellow", PinState.LOW);
final static GpioPinDigitalOutput redPin = gpio.provisionDigitalOutputPin(RaspiPin.GPIO_29, "red", PinState.LOW);
public static String userInput(String prompt)
{
String retString = "";
System.out.print(prompt);
try{
retString = stdin.readLine();
}catch(Exception e){
System.out.println(e);
String s;
try{
s = userInput("<Oooch/>");
}catch(Exception exception){
exception.printStackTrace();
}
}
return retString;
}
private static void blink3LED(int bc) throws InterruptedException{
/*
* the following code for blink them in sequence
*/
System.out.println("Blinking all LED for "+bc+" times...");
int count=0;
while (count<bc){
greenPin.toggle();
Thread.sleep(300);
yellowPin.toggle();
Thread.sleep(300);
redPin.toggle();
Thread.sleep(300);
count++;
}
LEDsOff();
}
private static void LEDsOff(){
// Switch them off
redPin.low();
greenPin.low();
yellowPin.low();
}
public static void main(String[] args) throws InterruptedException {
System.out.println("GPIO Control - pin 27, 28 & 29 ... started.");
blink3LED(5);
boolean continue_run = true;
while (continue_run){
String s = userInput("R(ed), G(reen), Y(ellow), L(oop), or Q(uit) > ");
if ("R".equals(s.toUpperCase()))
redPin.toggle();
else if ("G".equals(s.toUpperCase()))
greenPin.toggle();
else if ("Y".equals(s.toUpperCase()))
yellowPin.toggle();
else if ("L".equals(s.toUpperCase()))
blink3LED(5);
else if ("QUIT".equals(s.toUpperCase()) || "Q".equals(s.toUpperCase()))
continue_run = false;
else
System.out.println("Unknown command [" + s + "]");
}
// Switch them off
LEDsOff();
// stop all GPIO activity/threads by shutting down the GPIO controller
// (this method will forcefully shutdown all GPIO monitoring threads and scheduled tasks)
gpio.shutdown();
}
}
<file_sep>'''
Created on Nov 20, 2014
@author: ph
'''
try:
import RPi.GPIO as GPIO, time, os
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
GPIO.setmode(GPIO.BCM)
LED_PIN1=16
LED_PIN2=20
LED_PIN3=21
Photocell_PIN=13
def setup():
chan_list=[LED_PIN1, LED_PIN2, LED_PIN3]
GPIO.setup(chan_list, GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(Photocell_PIN, GPIO.IN)
def cleanup():
GPIO.cleanup()
# code for getting a range of value by measuring the time it takes
# to charge a capacitor
def RCtime():
reading=0;
GPIO.setup(Photocell_PIN, GPIO.OUT)
GPIO.output(Photocell_PIN, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(Photocell_PIN, GPIO.IN)
while (GPIO.input(Photocell_PIN) == GPIO.LOW):
reading+=1
return reading
# a digital read function; it output 0 or 1 depending on whether the
# resistance is above or below the threshold
def digital_read():
while True:
if GPIO.input(Photocell_PIN):
GPIO.output(LED_PIN2, GPIO.LOW)
GPIO.output(LED_PIN1, GPIO.HIGH)
else:
GPIO.output(LED_PIN2, GPIO.HIGH)
GPIO.output(LED_PIN1, GPIO.LOW)
time.sleep(0.5)
if __name__ == '__main__':
setup()
#digital_read()
while True:
print RCtime()
cleanup()<file_sep>#!/bin/python
'''
Created on Nov 18, 2014
@author: ph
'''
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
pin = input('Enter pin: ')
GPIO.setmode(GPIO.BCM)
func = GPIO.gpio_function(pin)
print ("Pin %d in mode: " % pin)
if func == GPIO.IN:
print "INPUT"
elif func == GPIO.OUT:
print "OUTPUT"
elif func == GPIO.SPI:
print "SPI"
elif func == GPIO.I2C:
print "I2C"
elif func == GPIO.HARD_PWM:
print "Hard_PWN"
elif func == GPIO.SERIAL:
print "Serial"
elif func == GPIO.UNKNOWN:
print "Unknown"
if __name__ == '__main__':
pass<file_sep>package test.rit.harsh.myapplication;
import android.util.Log;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Error;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Request;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Response;
import com.thetransactioncompany.jsonrpc2.client.JSONRPC2Session;
import com.thetransactioncompany.jsonrpc2.client.JSONRPC2SessionException;
import com.thetransactioncompany.jsonrpc2.server.MessageContext;
import com.thetransactioncompany.jsonrpc2.server.RequestHandler;
import net.minidev.json.JSONArray;
import net.minidev.json.JSONObject;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.UnknownHostException;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class JSONHandler {
public static JSONRPC2Request request;
public static String testJSONRequest(String server_URL_text, String method) {
// Creating a new session to a JSON-RPC 2.0 web service at a specified URL
Log.d("Debug serverURL", server_URL_text);
// The JSON-RPC 2.0 server URL
URL serverURL = null;
try {
serverURL = new URL("http://" + server_URL_text);
} catch (MalformedURLException e) {
// handle exception...
}
// Create new JSON-RPC 2.0 client session
JSONRPC2Session mySession = new JSONRPC2Session(serverURL);
// Construct new request
int requestID = 0;
if (method.contains(",")) {
// rules list for adding parameters
List<Object> rules = new ArrayList<Object>();
String newmethod[] = method.split(",");
rules.add(newmethod[1]);
// JSONRPC@Request for handling parametes
request = new JSONRPC2Request(newmethod[0], rules, requestID);
} else {
request = new JSONRPC2Request(method, requestID);
}
// Send request
JSONRPC2Response response = null;
try {
response = mySession.send(request);
} catch (JSONRPC2SessionException e) {
Log.e("error", e.getMessage().toString());
// handle exception...
}
// Print response result / error
if (response.indicatesSuccess())
Log.d("debug", response.getResult().toString());
else
Log.e("error", response.getError().getMessage().toString());
return response.getResult().toString();
}
public static class UpdateHandler implements RequestHandler {
private static BGNotiService obj;
UpdateHandler(BGNotiService obj) {
this.obj = obj;
}
// Reports the method names of the handled requests
public String[] handledRequests() {
return new String[]{"C_change"};
}
// Processes the requests
public JSONRPC2Response process(JSONRPC2Request req, MessageContext ctx) {
JSONArray incoming = (JSONArray) req.getParams();
String hostname = "unknown";
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
e.printStackTrace();
}
if (req.getMethod().equals("C_change")) {
Log.d("msg from process", "change reveived" + incoming.toString());
String temp = incoming.toJSONString().replace("[", "").replace("]", "");
obj.onUpdate(temp);
return new JSONRPC2Response(hostname + "success", req.getID());
} else {
// Method name not supported
return new JSONRPC2Response(JSONRPC2Error.METHOD_NOT_FOUND, req.getID());
}
}
}
}
<file_sep>package test.rit.harsh.myapplication;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.app.TaskStackBuilder;
import android.content.Context;
import android.content.Intent;
import android.os.IBinder;
import android.support.annotation.Nullable;
import android.support.v4.app.NotificationCompat;
import android.support.v4.content.LocalBroadcastManager;
import android.widget.Toast;
import java.io.IOException;
import java.net.ServerSocket;
/**
* Created by <NAME> on 10/7/2015.
*/
public class BGNotiService extends Service {
private int current_temp = 0;
@Override
public void onCreate() {
super.onCreate();
}
@Override
public void onDestroy() {
super.onDestroy();
}
public int onStartCommand(Intent intent, int flags, int startId) {
new Thread(new serverStarter(this)).start();
Toast.makeText(this, "Service Started", Toast.LENGTH_SHORT).show();
return START_STICKY;
}
/*
* Displays notification in the notification bar
*
*/
private void displayNotification(String s) {
String[] display = s.split(",");
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(this)
.setSmallIcon(R.mipmap.ic_launcher)
.setContentTitle("Smart Blind Update")
.setContentText("Temp:" + display[0] + "F" + " Ambient:" + display[1]);
// Creates an explicit intent for an Activity in your app
Intent resultIntent = new Intent(this, MainActivity.class);
TaskStackBuilder stackBuilder = TaskStackBuilder.create(this);
// Adds the back stack for the Intent (but not the Intent itself)
stackBuilder.addParentStack(MainActivity.class);
// Adds the Intent that starts the Activity to the top of the stack
stackBuilder.addNextIntent(resultIntent);
PendingIntent resultPendingIntent =
stackBuilder.getPendingIntent(
0,
PendingIntent.FLAG_UPDATE_CURRENT
);
mBuilder.setContentIntent(resultPendingIntent);
NotificationManager mNotificationManager =
(NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
// mId allows you to update the notification later on.
mNotificationManager.notify(0, mBuilder.build());
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
return null;
}
public void onUpdate(String s) {
Intent i = new Intent("test.rit.harsh.myapplication.BGNotiService.RECEIVE_JSON");
i.putExtra("value", s);
String[] change = s.split(",");
int previous_temp = current_temp;
System.out.println(previous_temp);
current_temp = Integer.parseInt(change[0].replace("\"", ""));
if (current_temp < ((previous_temp) - 2) || (current_temp > (previous_temp + 2))) {
LocalBroadcastManager.getInstance(this).sendBroadcast(i);
displayNotification(s);
}
}
/*
* Class which creates a new server instance in the background service
*
*/
public class serverStarter implements Runnable {
BGNotiService obj;
public serverStarter(BGNotiService obj) {
this.obj = obj;
}
private static final int PORT = 2344;
@Override
public void run() {
try {
ServerSocket listener = new ServerSocket(PORT);
System.out.println("Server is listening on " + listener.getLocalPort());
while (true) {
new JsonRPCServer.Handler(listener.accept(), obj).start();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
<file_sep>/**
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
package edu.rit.csci759.fuzzylogic;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import net.minidev.json.JSONArray;
import net.sourceforge.jFuzzyLogic.FIS;
import net.sourceforge.jFuzzyLogic.FunctionBlock;
import net.sourceforge.jFuzzyLogic.rule.Rule;
import net.sourceforge.jFuzzyLogic.rule.RuleBlock;
import net.sourceforge.jFuzzyLogic.rule.RuleExpression;
import net.sourceforge.jFuzzyLogic.rule.RuleTerm;
import net.sourceforge.jFuzzyLogic.ruleConnectionMethod.RuleConnectionMethodAndMin;
import net.sourceforge.jFuzzyLogic.ruleConnectionMethod.RuleConnectionMethodOrBoundedSum;
public class BlindMonitor {
FIS fis;
static FunctionBlock function_block;
static RuleBlock rule_block;
Double currentTemp;
Double currentAmbient;
private static HashMap<String, RuleBlock> ruleBlockMap;
private static List<Rule> rulesList;
private String filename = "";
/*
* public static void main(String[] args) throws Exception { //String
* filename = "FuzzyLogic/tipper.fcl"; FIS fis = FIS.load(filename, true);
*
* JSONArray array = null;
*
* MyTipperClass tipperobj= new MyTipperClass();
*
* tipperobj.setRule(array); tipperobj.deleteRule(array);
* tipperobj.setRule(array); }
*/
/**
* Constructor It initiates the function block and the rule block with the
* file blinder.fcl
*/
public BlindMonitor() {
// loading the fuzzy file
filename = "FuzzyLogic/blinder.fcl";
fis = FIS.load(filename, true);
// call the function block and ruleblock
function_block = fis.getFunctionBlock(null);
rule_block = function_block.getFuzzyRuleBlock("No1");
ruleBlockMap=function_block.getRuleBlocks();
}
/**
* Sets the temperature
*
* @param temp
* double value assigned from the GPIO pin readings
*/
public void setTemp(double temp) {
currentTemp = temp;
System.out.println("Temp: " + temp);
function_block.setVariable("temp", currentTemp);
}
/**
* Sets the ambient
*
* @param ambient
* double value assigned from the GPIO pin readings
*/
public void setAmbient(double ambient) {
currentAmbient = ambient;
System.out.println("Ambient: " + ambient);
function_block.setVariable("ambient", currentAmbient);
}
public String TempPos() {
function_block.evaluate();
double dim = function_block.getVariable("ambient").getMembership("dim");
System.out.println(dim);
double bright = function_block.getVariable("ambient").getMembership(
"bright");
System.out.println(bright);
double dark = function_block.getVariable("ambient").getMembership(
"dark");
System.out.println(dark);
if (dim > bright) {
if (dim > dark) {
System.out.println("DIM");
return "DIM";
} else {
System.out.println("DARK");
return "DARK";
}
} else if (bright > dark) {
System.out.println("BRIGHT");
return "BRIGHT";
} else {
System.out.println("DARK");
return "DARK";
}
}
/**
* sets the rule block with the rules sent by the client
*
* @param array
* the array of rules sent by the user
* @return returns a true if the rules were successfully added
*/
public boolean setRule(JSONArray array) {
// System.out.println("inside setrule");
String temp = array.toJSONString();
rulesList = rule_block.getRules();
temp = temp.replace("[", "").replace("]", "").replace("\"", "");
temp = temp.replace("\\", "");
if (fis == null) {
System.err.println("Can't load file: '" + filename + "'");
System.exit(1);
}
String[] ruleList = temp.split(":");
// check if it is a conflicting rule
for (Rule r : fis.getFunctionBlock("tipper").getFuzzyRuleBlock("No1")
.getRules()) {
if (r.getAntecedents().getTerm1().equals(ruleList[0])
&& r.getAntecedents().getTerm1().equals(ruleList[2])) {
System.out.println("Conflicting rule added");
return false;
}
}
// Increase rule updates counter
Rule rule = new Rule(
Integer.toString((rule_block.getRules().size()) + 1),
rule_block);
RuleTerm term1 = new RuleTerm(function_block.getVariable("temp"),
ruleList[0], false);
RuleTerm term2 = new RuleTerm(function_block.getVariable("ambient"),
ruleList[2], false);
RuleExpression antecedent;
if (ruleList[1].equalsIgnoreCase("and")) {
antecedent = new RuleExpression(term1, term2,
RuleConnectionMethodAndMin.get());
} else {
antecedent = new RuleExpression(term1, term2,
RuleConnectionMethodOrBoundedSum.get());
}
rule.setAntecedents(antecedent);
rule.addConsequent(function_block.getVariable("blind"), ruleList[3],
false);
String formatter = rule.toStringFcl().replace(" ", "");
for (int i = 0; i < rulesList.size(); i++) {
if (rulesList.get(i) != null) {
formatter = "";
formatter = rulesList.get(i).toStringFcl();
formatter = formatter.replace('(', ' ').replace(')', ' ')
.replace(" ", "");
if (rulesList.get(i).toStringFcl().contains(formatter)) {
System.out.println("Rule to be added already exists!!");
return false;
}
}
}
rule_block.add(rule);
// Get default function block
ruleBlockMap = function_block.getRuleBlocks();
ruleBlockMap.put("No1", rule_block);
function_block.setRuleBlocks(ruleBlockMap);
fis.addFunctionBlock("tipper", function_block);
for (Rule r : fis.getFunctionBlock("tipper").getFuzzyRuleBlock("No1")
.getRules()) {
System.out.println(r);
}
return true;
}
/**
* This function deletes the rules that user wishes to remove
*
* @param array
*/
public synchronized void deleteRule(JSONArray array) {
String temp = array.toJSONString();
//System.out.println(temp);
temp = temp.replace("[", "").replace("]", "").replace("\"", "");
temp = temp.replace("\\", "");
String[] deleteRuleList = temp.split(":");
//rule_block = function_block.getFuzzyRuleBlock("No1");
Rule rule = new Rule(Integer.toString(ruleBlockMap.size()), rule_block);
RuleTerm term1 = new RuleTerm(function_block.getVariable("temp"),
deleteRuleList[0], false);
RuleTerm term2 = new RuleTerm(function_block.getVariable("ambient"),
deleteRuleList[2], false);
RuleExpression antecedent;
if (deleteRuleList[1].equalsIgnoreCase("and")) {
antecedent = new RuleExpression(term1, term2,
RuleConnectionMethodAndMin.get());
} else {
antecedent = new RuleExpression(term1, term2,
RuleConnectionMethodOrBoundedSum.get());
}
rule.setAntecedents(antecedent);
rule.addConsequent(function_block.getVariable("blind"),
deleteRuleList[3], false);
String delRule = rule.toStringFcl();
delRule = delRule.replace(" ", "");
rulesList = rule_block.getRules();
String formatter;
for (int i = 0; i < rulesList.size(); i++) {
if (rulesList.get(i) != null) {
formatter = "";
formatter = rulesList.get(i).toStringFcl();
formatter = formatter.replace('(', ' ').replace(')', ' ')
.replace(" ", "");
if (formatter.contains(delRule)) {
rulesList.remove(i);
}
}
}
// update the FIS object with the new rules list after deletion of a
// single rule from it.
rule_block.setRules(rulesList);
rule_block.reset();
ruleBlockMap = function_block.getRuleBlocks();
ruleBlockMap.put("No1", rule_block);
function_block.setRuleBlocks(ruleBlockMap);
fis.addFunctionBlock("tipper", function_block);
}
/**
* This function is used to send the rules to the user when the android
* activity is open after destroying
*
* @return returns a list of all the rules that is set by the user
*/
public List<String> sendRules() {
List<String> send = new ArrayList<>();
List<Rule> rules = fis.getFunctionBlock("tipper")
.getFuzzyRuleBlock("No1").getRules();
for (Rule r : fis.getFunctionBlock("tipper").getFuzzyRuleBlock("No1")
.getRules()) {
String s = r.getAntecedents().getTerm1().toString().split("\\ ")[2]
+ ":"
+ r.getAntecedents().getRuleConnectionMethod().toString().replace(" : MIN;", "")
+ ":"
+ r.getAntecedents().getTerm2().toString().split("\\ ")[2]
+ ":"
+ r.getConsequents().toString().replace("[", "")
.replace("]", "").split("\\ ")[2];
send.add(s);
System.out.println(s);
}
return send;
}
/**
* This evaluates the the rules according to the time and ambient value and
* finds the highest degree of support to find out the blind position
*
* @return returns a string of the blind position
*/
public String blindPos() {
function_block.evaluate();
String cons = null;
double degree = 0;
// check for the degree
for (Rule r : fis.getFunctionBlock("tipper").getFuzzyRuleBlock("No1")
.getRules()) {
// System.out.println("Inside for");
if (r.getDegreeOfSupport() > degree) {
degree = r.getDegreeOfSupport();
// System.out.println(degree);
cons = r.getConsequents().toString();
}
}
double pos = function_block.getVariable("blind").defuzzify();
// System.out.println("Defuzzify: " + pos);
if (cons == null) {
return "no rule";
}
return cons;
}
}
<file_sep>'''
Created on Nov 20, 2014
@author: ph
'''
try:
import RPi.GPIO as GPIO, time
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
import plotly.plotly as py
import json
import datetime
import os
import glob
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
base_dir = '/sys/bus/w1/devices/'
sensor1_folder = glob.glob(base_dir + '28*')[0]
sensor1_file = sensor1_folder + '/w1_slave'
sensor2_folder = glob.glob(base_dir + '28*')[1]
sensor2_file = sensor2_folder + '/w1_slave'
GPIO.setmode(GPIO.BCM)
LED_PIN1=16
LED_PIN2=20
LED_PIN3=21
Photocell_PIN=13
Temperature_PIN=4
def setup():
chan_list=[LED_PIN1, LED_PIN2, LED_PIN3]
GPIO.setup(chan_list, GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(Photocell_PIN, GPIO.IN)
GPIO.setup(Temperature_PIN, GPIO.IN)
def cleanup():
GPIO.cleanup()
# code for getting a range of value by measuring the time it takes
# to charge a capacitor
def RCtime():
reading=0;
GPIO.setup(Photocell_PIN, GPIO.OUT)
GPIO.output(Photocell_PIN, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(Photocell_PIN, GPIO.IN)
while (GPIO.input(Photocell_PIN) == GPIO.LOW):
reading+=1
return reading
def translate(value, leftMin, leftMax, rightMin, rightMax):
print value
# Figure out how 'wide' each range is
leftSpan = leftMax - leftMin
rightSpan = rightMax - rightMin
# Convert the left range into a 0-1 range (float)
valueScaled = float(value - leftMin) / float(leftSpan)
# Convert the 0-1 range into a value in the right range.
return rightMin + (valueScaled * rightSpan)
def read_temp_raw(sensor_file):
f = open(sensor_file, 'r')
lines = f.readlines()
f.close()
return lines
def read_temp(sensor_file):
lines = read_temp_raw(sensor_file)
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = read_temp_raw()
equals_pos = lines[1].find('t=')
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
temp_c = float(temp_string) / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
return temp_c, temp_f
def stream_to_plotly():
with open('./config.json') as config_file:
plotly_user_config = json.load(config_file)
py.sign_in(plotly_user_config["plotly_username"], plotly_user_config["plotly_api_key"])
url = py.plot([
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][0],
'maxpoints': 200
}
},
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][1],
'maxpoints': 200
}
},
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][2],
'maxpoints': 200
}
}], filename='Raspberry Pi Light Intensity Streaming')
print "View your streaming graph here: ", url
stream = py.Stream(plotly_user_config['plotly_streaming_tokens'][0])
stream.open()
stream1 = py.Stream(plotly_user_config['plotly_streaming_tokens'][1])
stream1.open()
stream2 = py.Stream(plotly_user_config['plotly_streaming_tokens'][2])
stream2.open()
# sensor_data = readadc.readadc(Photocell_PIN,
# readadc.PINS.SPICLK,
# readadc.PINS.SPIMOSI,
# readadc.PINS.SPIMISO,
# readadc.PINS.SPICS)
LED_ON=False
while True:
if LED_ON:
GPIO.output(LED_PIN1, GPIO.HIGH)
LED_ON=True
else:
GPIO.output(LED_PIN1, GPIO.LOW)
LED_ON=False
stream.write({'x': datetime.datetime.now(), 'y': translate(RCtime(), 0, 700000, 0, 200)})
stream1.write({'x': datetime.datetime.now(), 'y': read_temp(sensor1_file)[0]})
stream2.write({'x': datetime.datetime.now(), 'y': read_temp(sensor2_file)[0]})
time.sleep(5)
if __name__ == '__main__':
setup()
stream_to_plotly()
cleanup()
<file_sep>#include <Servo.h>
#define MAXLEVEL 255
#define MINLEVEL 0
#define LOWLEVEL 85
#define HIGHLEVEL 170
// Some servo can go from 0 to 180 degree, so we need to limit the angle
#define SERVO_LOWER_BOUND 0
#define SERVO_UPPER_BOUND 160
#define button1 13
#define button2 14
#define button3 15
#define scroller 29
static int d0 = 0;
static int d1 = 1;
static int d2 = 2;
static int d3 = 3;
static int d4 = 4;
static int d5 = 5;
int motorPin1 = 1; // One motor wire connected to digital pin 4
int motorPin2 = 2; // One motor wire connected to digital pin 5
Servo myservo;
int previousLightIntensity=0;
void setup()
{
// initialize serial communication
Serial.begin(57600);
Serial.setTimeout(25);
// Digital pins
pinMode(d0, INPUT_PULLUP);
//pinMode(d1, INPUT_PULLUP);
//pinMode(d2, INPUT_PULLUP);
pinMode(motorPin1, OUTPUT);
pinMode(motorPin2, OUTPUT);
//pinMode(d3, OUTPUT);
pinMode(d4, INPUT_PULLUP);
pinMode(d5, INPUT_PULLUP);
// initialize the digital pins as an output:
// attaches the servo on pin 3 to the servo object
myservo.attach(3);
//myservo.write(0); //set servo to init point
}
void all_on()
{
Bean.setLed(255, 255, 255);
}
void all_off()
{
Bean.setLed(0, 0, 0);
}
void blink_red()
{
Bean.setLed(255, 0, 0);
Bean.sleep(500);
Bean.setLed(0, 0, 0);
}
void blink_green()
{
Bean.setLed(0, 255, 0);
Bean.sleep(500);
Bean.setLed(0, 0, 0);
}
void blink_blue()
{
Bean.setLed(0, 0, 255);
Bean.sleep(500);
Bean.setLed(0, 0, 0);
}
void blink_3_color()
{
Bean.setLed(255, 0, 0);
Bean.sleep(500);
Bean.setLed(0, 255, 0);
Bean.sleep(500);
Bean.setLed(0, 0, 255);
Bean.sleep(500);
}
void fade_3_color()
{
int r,g,b;
for(int i=0;i<=30;i++){
r=random(0,255);
g=random(0,255);
b=random(0,255);
Bean.setLed(r, g, b);
Bean.sleep(10);
}
}
void blink_two_loop()
{
Bean.setLed(255, 0, 0);
Bean.sleep(500);
Bean.setLed(0, 255, 0);
Bean.sleep(500);
Bean.setLed(0, 0, 255);
Bean.sleep(500);
Bean.setLed(0, 255, 0);
Bean.sleep(500);
Bean.setLed(255, 0, 0);
Bean.sleep(500);
}
//void rotateLeft(int speedOfRotate, int length){
// analogWrite(motorPin1, speedOfRotate); //rotates motor
// digitalWrite(motorPin2, LOW); // set the Pin motorPin2 LOW
// delay(length); //waits
// digitalWrite(motorPin1, LOW); // set the Pin motorPin1 LOW
//}
//
//void rotateRight(int speedOfRotate, int length){
// analogWrite(motorPin2, speedOfRotate); //rotates motor
// digitalWrite(motorPin1, LOW); // set the Pin motorPin1 LOW
// delay(length); //waits
// digitalWrite(motorPin2, LOW); // set the Pin motorPin2 LOW
//}
//
//void rotateLeftFull(int length){
// digitalWrite(motorPin1, HIGH); //rotates motor
// digitalWrite(motorPin2, LOW); // set the Pin motorPin2 LOW
// delay(length); //waits
// digitalWrite(motorPin1, LOW); // set the Pin motorPin1 LOW
//}
//
//void rotateRightFull(int length){
// digitalWrite(motorPin2, HIGH); //rotates motor
// digitalWrite(motorPin1, LOW); // set the Pin motorPin1 LOW
// delay(length); //waits
// digitalWrite(motorPin2, LOW); // set the Pin motorPin2 LOW
//}
void turn_servo(int degree, int speedOfTurn)
{
myservo.write(degree); // sets the servo position according to the scaled value
delay(speedOfTurn);
}
void servo_sweep(){
for(int pos = SERVO_LOWER_BOUND; pos < SERVO_UPPER_BOUND; pos += 1) // goes from 0 degrees to 180 degrees
{ // in steps of 1 degree
myservo.write(pos); // tell servo to go to position in variable 'pos'
delay(5); // waits 15ms for the servo to reach the position
}
for(int pos = SERVO_UPPER_BOUND; pos>=SERVO_LOWER_BOUND; pos-=1) // goes from 180 degrees to 0 degrees
{
myservo.write(pos); // tell servo to go to position in variable 'pos'
delay(5); // waits 15ms for the servo to reach the position
}
}
// the loop routine runs over and over again forever:
void loop()
{
char buffer[64];
size_t readLength = 64;
uint8_t length = 0;
int analog0 = analogRead(A0);
int lightIntensity = map(analog0, 0, 1023, 0, 254);
int analog1 = analogRead(A1);
// intensity indicator
if (lightIntensity > MAXLEVEL) {
blink_red();
} else if(lightIntensity < MINLEVEL) {
blink_green();
} else if (lightIntensity >= HIGHLEVEL) {
Bean.setLed(0, 255, 0);
} else if (lightIntensity <= LOWLEVEL) {
Bean.setLed(255, 0, 0);
} else if (lightIntensity > LOWLEVEL && lightIntensity< HIGHLEVEL) {
Bean.setLed(250, 129, 0);
} else {
blink_blue();
}
// servo_sweep();
if(lightIntensity < MAXLEVEL && lightIntensity > MINLEVEL){
if(abs(lightIntensity-previousLightIntensity)>10){
int servoPos=map(lightIntensity, 0, 254, SERVO_LOWER_BOUND, SERVO_UPPER_BOUND);
turn_servo(servoPos, 1);
}
previousLightIntensity=lightIntensity;
}
length = Serial.readBytes(buffer, readLength);
// Return all the serial pins
// All of these features other than the digital/analog pin inputs are performed
// behind-the-scenes using our Apple Bean SDK. The digital/analog pin inputs are
// read from a sketch that’s uploaded to the Bean. These pins use a simple command/response
// structure. The client (iPhone) sends the decimal value ‘2’ to request the pin states.
// After receiving this command, the Arduino probes the pins and sends the following info back to the client:
// byte 0: 0x82 – Response to 0x02 (decimal value ‘2’ in hexadecimal)
// byte 1: the digital pin values as bits b0 to b5 in the order d0 to d5
// byte 2: the low-byte of analog pin 1
// byte 3: the high-byte of analog pin 1
// byte 4: the low-byte of analog pin 2
// byte 5: the high-byte of analog pin 2
if ( 0x02 == buffer[0] && 1 == length)
{
//pinMode(d0, INPUT_PULLUP); //PD6
//pinMode(d1, INPUT_PULLUP); //PB1
//pinMode(d2, INPUT_PULLUP); //PB2
//pinMode(d3, INPUT_PULLUP);
pinMode(d4, INPUT_PULLUP);
pinMode(d5, INPUT_PULLUP);
int digital0 = 0;//digitalRead(d0);
int digital1 = 0; //digitalRead(d1);
int digital2 = 0; // digitalRead(d2);
int digital3 = 0; //digitalRead(d3);
int digital4 = digitalRead(d4);
int digital5 = digitalRead(d5);
uint8_t digitalAll = digital0 | ( digital1 << 1 ) | ( digital2 << 2 ) | ( digital3 << 3);
digitalAll |= ( digital4 << 4 ) | ( digital5 << 5 );
buffer[0] = 0x82;
buffer[1] = digitalAll;
buffer[2] = analog0 & 0xFF;
buffer[3] = analog0 >> 8;
buffer[4] = analog1 & 0xFF;
buffer[5] = analog1 >> 8;
Serial.write((uint8_t*)buffer, 6);
}
boolean led_displayed=false;
// Controlling from iphone app
if ( length > 0 ) {
for (int i = 0; i < length - 1; i += 2 ) {
// Check if button1 has been pressed or released...
if ( buffer[i] == button1 ) {
// If the button is held down, buffer[i+1] will be 0
// If it's released, buffer[i+1] is 1
// Set pin to 1 when the button is held down
// and to 0 when released
//analogWrite(3,255);
if(!led_displayed && 1-buffer[i+1] == 1) {
blink_3_color();
}
} else if ( buffer[i] == button2 ){
digitalWrite(4,1-buffer[i+1]);
if(!led_displayed && 1-buffer[i+1] == 1) {
fade_3_color();
}
} else if ( buffer[i] == button3 ) {
digitalWrite(5,1-buffer[i+1]);
if(!led_displayed && 1-buffer[i+1] == 1) {
blink_two_loop();
}
} else if (buffer[i] == scroller) {
//controlling a servo
//turn_servo(buffer[i+1]);
//rotating two-way motor
// if(buffer[i+1]<=255/2)
// rotateLeft(buffer[i+1], 1000);
// else
// rotateRight(buffer[i+1]-255/2, 1000);
}
led_displayed=true;
}
}
Bean.sleep(300); // sleep for a bit
}
<file_sep>'''
Created on Nov 20, 2014
@author: ph
'''
try:
import RPi.GPIO as GPIO, time
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
import plotly.plotly as py
import json
import datetime
GPIO.setmode(GPIO.BCM)
LED_PIN1=16
LED_PIN2=20
LED_PIN3=21
Photocell_PIN=13
def setup():
chan_list=[LED_PIN1, LED_PIN2, LED_PIN3]
GPIO.setup(chan_list, GPIO.OUT,initial=GPIO.LOW)
GPIO.setup(Photocell_PIN, GPIO.IN)
def cleanup():
GPIO.cleanup()
# code for getting a range of value by measuring the time it takes
# to charge a capacitor
def RCtime():
reading=0;
GPIO.setup(Photocell_PIN, GPIO.OUT)
GPIO.output(Photocell_PIN, GPIO.LOW)
time.sleep(0.1)
GPIO.setup(Photocell_PIN, GPIO.IN)
while (GPIO.input(Photocell_PIN) == GPIO.LOW):
reading+=1
return reading
def stream_to_plotly():
with open('./config.json') as config_file:
plotly_user_config = json.load(config_file)
py.sign_in(plotly_user_config["plotly_username"], plotly_user_config["plotly_api_key"])
url = py.plot([
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][0],
'maxpoints': 200
}
},
{
'x': [], 'y': [], 'type': 'scatter',
'stream': {
'token': plotly_user_config['plotly_streaming_tokens'][1],
'maxpoints': 200
}
}], filename='Raspberry Pi Light Intensity Streaming')
print "View your streaming graph here: ", url
stream = py.Stream(plotly_user_config['plotly_streaming_tokens'][0])
stream.open()
stream1 = py.Stream(plotly_user_config['plotly_streaming_tokens'][1])
stream1.open()
# sensor_data = readadc.readadc(Photocell_PIN,
# readadc.PINS.SPICLK,
# readadc.PINS.SPIMOSI,
# readadc.PINS.SPIMISO,
# readadc.PINS.SPICS)
while True:
stream.write({'x': datetime.datetime.now(), 'y': RCtime()})
stream1.write({'x': datetime.datetime.now(), 'y': RCtime()+1})
time.sleep(0.25)
# a digital read function; it output 0 or 1 depending on whether the
# resistance is above or below the threshold
def digital_read():
while True:
if GPIO.input(Photocell_PIN):
GPIO.output(LED_PIN2, GPIO.LOW)
GPIO.output(LED_PIN1, GPIO.HIGH)
else:
GPIO.output(LED_PIN2, GPIO.HIGH)
GPIO.output(LED_PIN1, GPIO.LOW)
time.sleep(0.5)
if __name__ == '__main__':
setup()
#digital_read()
stream_to_plotly()
#while True:
# print RCtime()
cleanup()<file_sep>package edu.rit.csci759.jsonrpc.server;
/**
* Demonstration of the JSON-RPC 2.0 Server framework usage. The request
* handlers are implemented as static nested classes for convenience, but in
* real life applications may be defined as regular classes within their old
* source files.
*
* @author <NAME>
* @version 2011-03-05
*/
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import net.minidev.json.JSONArray;
import net.sourceforge.jFuzzyLogic.demo.tipper.TipperAnimation;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Error;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Request;
import com.thetransactioncompany.jsonrpc2.JSONRPC2Response;
import com.thetransactioncompany.jsonrpc2.server.MessageContext;
import com.thetransactioncompany.jsonrpc2.server.RequestHandler;
import edu.rit.csci759.fuzzylogic.BlindMonitor;
import edu.rit.csci759.rspi.RpiIndicatorImplementation;
public class JsonHandler {
static BlindMonitor blinder = new BlindMonitor();
static RpiIndicatorImplementation rpi = new RpiIndicatorImplementation();
// Implements a handler for an "echo" JSON-RPC method
public static class EchoHandler implements RequestHandler {
// Reports the method names of the handled requests
public String[] handledRequests() {
return new String[] { "echo" };
}
// Processes the requests
public JSONRPC2Response process(JSONRPC2Request req, MessageContext ctx) {
if (req.getMethod().equals("echo")) {
// Echo first parameter
List params = (List) req.getParams();
Object input = params.get(0);
return new JSONRPC2Response(input, req.getID());
} else {
// Method name not supported
return new JSONRPC2Response(JSONRPC2Error.METHOD_NOT_FOUND,
req.getID());
}
}
}
/**
* @author <NAME> This class handles the temperature and ambient of the
* system. Using the RPI object it send the use the temp and ambient
*
*/
public static class TempAmbientHandler implements RequestHandler {
// Reports the method names of the handled requests
public String[] handledRequests() {
return new String[] { "getTemp", "getAmbient" };
}
// Processes the requests
public JSONRPC2Response process(JSONRPC2Request req, MessageContext ctx) {
//System.out.println("in temp handler: " + req);
String hostname = "unknown";
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
e.printStackTrace();
}
if (req.getMethod().equals("getTemp")) {
RpiIndicatorImplementation Object = new RpiIndicatorImplementation();
String temp = String.valueOf(Object.read_temperature());
return new JSONRPC2Response(hostname + " " + temp, req.getID());
} else if (req.getMethod().equals("getAmbient")) {
RpiIndicatorImplementation Object = new RpiIndicatorImplementation();
String ambient = String.valueOf(Object
.read_ambient_light_intensity());
return new JSONRPC2Response(hostname + " " + ambient,
req.getID());
} else {
// Method name not supported
return new JSONRPC2Response(JSONRPC2Error.METHOD_NOT_FOUND,
req.getID());
}
}
}
public static class DateTimeHandler implements RequestHandler {
// Reports the method names of the handled requests
public String[] handledRequests() {
return new String[] { "getDate", "getTime" };
}
// Processes the requests
public JSONRPC2Response process(JSONRPC2Request req, MessageContext ctx) {
String hostname = "unknown";
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
e.printStackTrace();
}
if (req.getMethod().equals("getDate")) {
DateFormat df = DateFormat.getDateInstance();
String date = df.format(new Date());
return new JSONRPC2Response(hostname + " " + date, req.getID());
} else if (req.getMethod().equals("getTime")) {
DateFormat df = DateFormat.getTimeInstance();
String time = df.format(new Date());
return new JSONRPC2Response(hostname + " " + time, req.getID());
} else {
// Method name not supported
return new JSONRPC2Response(JSONRPC2Error.METHOD_NOT_FOUND,
req.getID());
}
}
}
/**
* @author srs6573 <NAME> This class handles the sending and receiving
* of the rules of a given client
*
*/
public static class RuleHandler implements RequestHandler {
@Override
public String[] handledRequests() {
// TODO Auto-generated method stub
return new String[] { "RegisterRule", "DeleteRule", "getRules" };
}
@Override
public JSONRPC2Response process(JSONRPC2Request req, MessageContext ctx) {
//System.out.println(req.toJSONString());
JSONArray ruleMethod = (JSONArray) req.getParams();
String hostname = "unknown";
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
e.printStackTrace();
}
// System.out.println(req.getMethod());
if (req.getMethod().equals("RegisterRule")) {
boolean returnstate = blinder.setRule(ruleMethod);
if (!returnstate) {
return new JSONRPC2Response(hostname + "Failure",
req.getID());
} else {
return new JSONRPC2Response(hostname + "Success",
req.getID());
}
} else if (req.getMethod().equals("DeleteRule")) {
blinder.deleteRule(ruleMethod);
return new JSONRPC2Response(hostname + "Success", req.getID());
} else if (req.getMethod().equals("getRules")) {
return new JSONRPC2Response(hostname + blinder.sendRules(),
req.getID());
} else {
return new JSONRPC2Response(JSONRPC2Error.METHOD_NOT_FOUND,
req.getID());
}
}
}
/**
* @author <NAME> This class send the temp changes to the user as well
* as runs a blind monitoring in a background thread
*
*/
public static class ChangeHandler extends Thread {
int temp, ambient;
public List<Object> send() {
// System.out.println("Inside change");
temp = rpi.read_temperature();
blinder.setTemp(temp);
ambient = rpi.read_ambient_light_intensity();
blinder.setAmbient(ambient);
List<Object> readings = new ArrayList<>();
readings.add(Integer.toString(temp));
readings.add(blinder.TempPos());
// System.out.println("Returning with: " + temp + ", " + ambient);
return readings;
}
public void run() {
// System.out.println("Inside monitor");
int temp, ambient;
while (true) {
try {
Thread.sleep(10000);
temp = rpi.read_temperature();
blinder.setTemp(temp);
ambient = rpi.read_ambient_light_intensity();
// System.out.println("In run temp: " + temp + " : " +
// ambient);
blinder.setAmbient(ambient);
List<Object> readings = new ArrayList<>();
readings.add(temp);
readings.add(ambient);
String blind = blinder.blindPos();
// System.out.println("Blind: " + blind);
if (blind.equals("[blind IS open]")) {
rpi.led_when_high();
Thread.sleep(5000);
rpi.led_all_off();
} else if (blind.equals("[blind IS half]")) {
rpi.led_when_mid();
Thread.sleep(5000);
rpi.led_all_off();
} else if (blind.equals("[blind IS close]")) {
rpi.led_when_low();
Thread.sleep(5000);
rpi.led_all_off();
} else {
System.out.println(blind);
}
Thread.sleep(8000);
} catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// System.out.println("Thread "+ this.isAlive());
}
}
}
}
<file_sep>#!/bin/sh
rsync -avzh * <EMAIL>:~/ritcoursematerials/PervasiveCourse/
<file_sep>'''
Created on Nov 20, 2014
@author: ph
'''
import readadc
import time
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO! This is probably because you need superuser privileges. You can achieve this by using 'sudo' to run your script")
GPIO.setmode(GPIO.BCM)
DEBUG=True
LOGGER=True
# temperature sensor connected channel 0 of mcp3008
tmp36_adc = 0
photo_adc = 1
def setup():
readadc.initialize()
def loop():
while True:
tmp36_data = readadc.readadc(tmp36_adc,
readadc.PINS.SPICLK,
readadc.PINS.SPIMOSI,
readadc.PINS.SPIMISO,
readadc.PINS.SPICS)
tmp36_millivolts = tmp36_data * (3300.0/1024.0)
# 10 mv per degree
temp_C = ((tmp36_millivolts - 100.0) / 10.0) - 40.0
# convert celsius to fahrenheit
temp_F = (temp_C * 9.0 / 5.0) + 32
# remove decimal point from millivolts
tmp36_millivolts = "%d" % tmp36_millivolts
# show only one decimal place for temprature and voltage readings
temp_C = "%.1f" % temp_C
temp_F = "%.1f" % temp_F
photo_data = readadc.readadc(photo_adc,
readadc.PINS.SPICLK,
readadc.PINS.SPIMOSI,
readadc.PINS.SPIMISO,
readadc.PINS.SPICS)
photo_millivolts = photo_data * (100.0/1024.0)
print "Current temperature: %sC, %sF; light: %f" % (temp_C, temp_F, photo_millivolts)
time.sleep(0.5)
def cleanup():
# initial values of variables etc...
counter = 0
try:
# here you put your main loop or block of code
while counter < 9000000:
# count up to 9000000 - takes ~20s
counter += 1
print "Target reached: %d" % counter
except KeyboardInterrupt:
# here you put any code you want to run before the program
# exits when you press CTRL+C
print "\n", counter # print value of counter
except:
# this catches ALL other exceptions including errors.
# You won't get any error messages for debugging
# so only use it once your code is working
print "Other error or exception occurred!"
finally:
GPIO.cleanup() # this ensures a clean exit
if __name__ == '__main__':
setup()
loop()
cleanup() | d68e6126357e133da79cf701df600c682f22d949 | [
"Gradle",
"Java",
"Python",
"Text",
"C++",
"Shell"
] | 25 | C++ | grnair89/Smart-Blind | ba197578672f42885e6f31d8fa0aea73a7e0cf81 | 0816a4dc7de67e4b2c0d973d946ead19b9af06ea |
refs/heads/master | <repo_name>tensor-programming/go-tutorial-7<file_sep>/data.go
package main
type User struct {
Uuid string
Fname string
Lname string
Username string
Email string
Password string
}<file_sep>/README.md
# Go Tutorial Part 7
# starting a User Module
### We start adding a login/logout/signup module to our webapp.
## Run `go run *.go` or `go run main.go data.go cookies.go` to run the app, run `go build main.go data.go main.go` to build an executable file.
### Check out the Youtube Tutorial for this [Go Program](https://youtu.be/cOdKxFMwhg8). Here is our [Youtube Channel](https://www.youtube.com/channel/UCYqCZOwHbnPwyjawKfE21wg) Subscribe for more content.
### Check out our blog at [tensor-programming.com](http://tensor-programming.com/).
### Our [Twitter](https://twitter.com/TensorProgram), our [facebook](https://www.facebook.com/Tensor-Programming-1197847143611799/) and our [Steemit](https://steemit.com/@tensor).
### Donate if you like the content:
### Ada: DdzFFzCqrhsqPcLbpt3C9nkSW2HvMJJCER5c9ijxKwXDet3GT5KchnUp<KEY>
### Eth: 0xD210Ea51F1615794A16080A108d2BC5471F60166
### Ltc: LXsKxF5JhmMtKgqfcUFdvcXVwiaSqxN9cP
<file_sep>/cookie.go
package main
import "net/http"
func setSession(u *User, w http.ResponseWriter){
value := map[string]string{
"name": u.Username,
"pass": u.Password,
}
if encoded, err := cookieHandler.Encode("session", value); err == nil {
cookie := &http.Cookie{
Name: "session",
Value: encoded,
Path: "/",
}
http.SetCookie(w, cookie)
}
}
func getUserName(r *http.Request) (username string){
if cookie, err := r.Cookie("session"); err == nil {
cookieValue := make(map[string]string)
if err = cookieHandler.Decode("session", cookie.Value, &cookieValue); err == nil {
username = cookieValue["name"]
}
}
return username
}
func clearSession(w http.ResponseWriter){
cookie := &http.Cookie{
Name: "session",
Value: "",
Path: "/",
MaxAge: -1,
}
http.SetCookie(w, cookie)
}
| 38fc75a91e82a9d1dfb834829972d7e7111748a0 | [
"Markdown",
"Go"
] | 3 | Go | tensor-programming/go-tutorial-7 | 4eb73f8509563a09b277acd2f19b67a657fe47f1 | 2e9b90db37880c44b80cb3e077085c774abc9a1c |
refs/heads/master | <file_sep>package dao;
import java.util.ArrayList;
import dto.Product;
public class ProductRepository {
private ArrayList<Product> listOfProducts = new ArrayList<Product>();
public ProductRepository() {
Product phone = new Product("P1234", "iPhone 6s", 800000);
phone.setDescription("4.7-inch, 1334X750 Renina HD display,8-megapixel iSight Camera");
} | a79e5df819ffb1194cf6e40f47bc5732e5d66815 | [
"Java"
] | 1 | Java | tjdwhdduf/ui | d5f13fd22098dd7cd01e60171ceb753a995d91fa | 266b767f2e66cfda6750a4585b9b00a5692afd0a |
refs/heads/master | <repo_name>94ostry/deplex<file_sep>/src/main/java/com/pioost/Main.java
package com.pioost;
import com.pioost.dependency.CalculationRule;
import com.pioost.dependency.Dependency;
import com.pioost.dependency.TextFiledBindable;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.layout.GridPane;
import javafx.stage.Stage;
import java.util.Optional;
public class Main extends Application {
@Override
public void start(Stage primaryStage) throws Exception {
GridPane root = FXMLLoader.load(getClass().getResource("sample.fxml"));
primaryStage.setTitle("Hello World");
primaryStage.setScene(new Scene(root, 300, 275));
setDependency(root);
primaryStage.show();
}
private void setDependency(GridPane root) {
Optional<Node> field1 =
root.getChildrenUnmodifiable().stream().filter(f -> f.getId().equals("field1"))
.findFirst();
TextFiledBindable textFiledBindable1 = new TextFiledBindable(
(javafx.scene.control.TextField) field1.get());
Optional<Node> field2 =
root.getChildrenUnmodifiable().stream().filter(f -> f.getId().equals("field2"))
.findFirst();
TextFiledBindable textFiledBindable2 = new TextFiledBindable(
(javafx.scene.control.TextField) field2.get());
Dependency dependency =
new DependencyBuilder().from(textFiledBindable1).rule(new CalculationRule())
.to(textFiledBindable2).build();
}
public static void main(String[] args) {
launch(args);
}
}
<file_sep>/src/main/java/com/pioost/DependencyBuilder.java
package com.pioost;
import com.pioost.dependency.Bindable;
import com.pioost.dependency.CalculationRule;
import com.pioost.dependency.Dependency;
import com.pioost.dependency.TextFiledBindable;
import com.sun.tracing.dtrace.Attributes;
/**
* Created by pioost on 10/22/14.
*/
public class DependencyBuilder {
private Bindable from;
private CalculationRule calculationRule;
private Bindable to;
public DependencyBuilder from(Bindable textFiledBindable1) {
from = textFiledBindable1;
return this;
}
public DependencyBuilder rule(CalculationRule rule) {
calculationRule = rule;
return this;
}
public DependencyBuilder to(Bindable textFiledBindable2) {
to = textFiledBindable2;
return this;
}
public Dependency build()
{
return new Dependency( from, to, calculationRule );
}
}
<file_sep>/settings.gradle
rootProject.name = 'deplex'
<file_sep>/README.md
deplex
======
Dependency builder
| ea77f2c21b2ac5a4809635f5d11e7edd269ec50d | [
"Markdown",
"Java",
"Gradle"
] | 4 | Java | 94ostry/deplex | 3f1f9d7fc36165a285f5cb3a2d794d756ae6e4d3 | cd56f1e3d3dd2db1e737aa1f556e89c53268fbd9 |
refs/heads/master | <file_sep># API-Rest-Clientes-ServicosPrestados
Projeto de API Rest criado com Spring Boot para gerenciamento de cliente e cadastro de serviços prestados.
## Front-end
*[Link](https://github.com/JeeanLiima97/clientes-frontend)
## 📦 Desenvolvimento
Utilizado Spring Boot seguindo os padrões Rest de Clean code e S.O.L.I.D.. Utilizado Spring Security com JWT para autenticação e autorização e Swagger para Documentação.
## 🛠️ Construído com
Ferramentas que utilizadas para criar o projeto
* [Intellij](https://www.jetbrains.com/pt-br/idea/) - IDE utilizada
* [SpringBoot](https://spring.io/projects/spring-boot) - Framework Java
* [H2 Database](https://www.h2database.com/html/main.html) - Banco em memória
* [Lombok](https://projectlombok.org/)
<file_sep>campo.nome.obrigatorio=O campo nome é obritatório.
campo.cpf.obrigatorio=O campo cpf é obritatório.
campo.cpf.invalido=O campo cpf está invalido.
campo.descricao.obrigatorio=O campo Descrição é obritatório.
campo.preco.obrigatorio=O campo Preço é obritatório.
campo.data.obrigatorio=O campo Data é obritatório.
campo.cliente.obrigatorio=O campo Cliente é obritatório.
<file_sep>package JeeanLiima97.com.github.clientes.model.repository;
import JeeanLiima97.com.github.clientes.model.entity.ServicoPrestado;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
public interface ServicoPrestaadoRepository extends JpaRepository<ServicoPrestado, Integer> {
@Query("select s from ServicoPrestado s join s.cliente c where upper(c.nome) like upper (:nome) and MONTH(s.data)=:mes")
List<ServicoPrestado> findByNomeClienteAndMes(
@Param("nome") String nome, @Param("mes") Integer mes);
}
| ceaed1abb33ccac01315883cf24adb5a01efbf2c | [
"Markdown",
"Java",
"INI"
] | 3 | Markdown | JeeanLiima97/clientes-api | 75a27113a871f9ac2d55d12dcb7e3269e1444e74 | 99a673a3bab11310ced8e5dd85983955daf04947 |
refs/heads/master | <file_sep>###############################################
# Install helpers for development environment #
###############################################
DIR_HELPERS=/opt/sh-helpers
DIR_BIN=/usr/local/bin
DIR_SH_PHP=./php
DIR_SH_NODE=./node
DIR_SH_MONGO=./mongo
DIR_SH_FLUTTER=./flutter
EXT_FILE=sh
VERSION=1
PHP=php
PHP5=php5
PHPCS=phpcs
PHPCBF=phpcbf
PHPCSFIXER=php-cs-fixer
COMPOSER=composer
NG=ng
NPM=npm
NODE=node
GULP=gulp
BOWER=bower
FLUTTER=flutter
DART=dart
MONGO=mongo
info:
-@echo "make install all -> Install all helpers"
uninstall:
rm -f $(DIR_BIN)/$(PHP)
rm -f $(DIR_BIN)/$(PHP5)
rm -f $(DIR_BIN)/$(PHPCS)
rm -f $(DIR_BIN)/$(PHPCBF)
rm -f $(DIR_BIN)/$(PHPCSFIXER)
rm -f $(DIR_BIN)/$(COMPOSER)
rm -f $(DIR_BIN)/$(NG)
rm -f $(DIR_BIN)/$(NPM)
rm -f $(DIR_BIN)/$(NODE)
rm -f $(DIR_BIN)/$(GULP)
rm -f $(DIR_BIN)/$(MONGO)
rm -f $(DIR_BIN)/$(BOWER)
rm -f $(DIR_BIN)/$(FLUTTER)
rm -f $(DIR_BIN)/$(DART)
rm -rf $(DIR_HELPERS)
update:
make uninstall
make install
install:
@echo "Configuring helpers..."
rm -rf $(DIR_HELPERS) && mkdir -p $(DIR_HELPERS)
cp -r $(DIR_SH_PHP)/$(PHP).$(EXT_FILE) $(DIR_HELPERS)/$(PHP).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_PHP)/$(PHP5).$(EXT_FILE) $(DIR_HELPERS)/$(PHP5).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_PHP)/$(PHPCS).$(EXT_FILE) $(DIR_HELPERS)/$(PHPCS).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_PHP)/$(PHPCBF).$(EXT_FILE) $(DIR_HELPERS)/$(PHPCBF).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_PHP)/$(PHPCSFIXER).$(EXT_FILE) $(DIR_HELPERS)/$(PHPCSFIXER).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_PHP)/$(COMPOSER).$(EXT_FILE) $(DIR_HELPERS)/$(COMPOSER).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_NODE)/$(NG).$(EXT_FILE) $(DIR_HELPERS)/$(NG).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_NODE)/$(NPM).$(EXT_FILE) $(DIR_HELPERS)/$(NPM).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_NODE)/$(NODE).$(EXT_FILE) $(DIR_HELPERS)/$(NODE).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_NODE)/$(GULP).$(EXT_FILE) $(DIR_HELPERS)/$(GULP).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_NODE)/$(BOWER).$(EXT_FILE) $(DIR_HELPERS)/$(BOWER).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_MONGO)/$(MONGO).$(EXT_FILE) $(DIR_HELPERS)/$(MONGO).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_FLUTTER)/$(FLUTTER).$(EXT_FILE) $(DIR_HELPERS)/$(FLUTTER).$(EXT_FILE).$(VERSION)
cp -r $(DIR_SH_FLUTTER)/$(DART).$(EXT_FILE) $(DIR_HELPERS)/$(DART).$(EXT_FILE).$(VERSION)
rm -f $(DIR_BIN)/$(PHP)
rm -f $(DIR_BIN)/$(PHP5)
rm -f $(DIR_BIN)/$(PHPCS)
rm -f $(DIR_BIN)/$(PHPCBF)
rm -f $(DIR_BIN)/$(PHPCSFIXER)
rm -f $(DIR_BIN)/$(COMPOSER)
rm -f $(DIR_BIN)/$(NG)
rm -f $(DIR_BIN)/$(NPM)
rm -f $(DIR_BIN)/$(NODE)
rm -f $(DIR_BIN)/$(GULP)
rm -f $(DIR_BIN)/$(BOWER)
rm -f $(DIR_BIN)/$(MONGO)
rm -f $(DIR_BIN)/$(FLUTTER)
rm -f $(DIR_BIN)/$(DART)
ln -sF $(DIR_HELPERS)/$(PHP).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(PHP)
ln -sF $(DIR_HELPERS)/$(PHP5).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(PHP5)
ln -sF $(DIR_HELPERS)/$(PHPCS).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(PHPCS)
ln -sF $(DIR_HELPERS)/$(PHPCBF).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(PHPCBF)
ln -sF $(DIR_HELPERS)/$(PHPCSFIXER).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(PHPCSFIXER)
ln -sF $(DIR_HELPERS)/$(COMPOSER).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(COMPOSER)
ln -sF $(DIR_HELPERS)/$(NG).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(NG)
ln -sF $(DIR_HELPERS)/$(NPM).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(NPM)
ln -sF $(DIR_HELPERS)/$(NODE).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(NODE)
ln -sF $(DIR_HELPERS)/$(GULP).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(GULP)
ln -sF $(DIR_HELPERS)/$(BOWER).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(BOWER)
ln -sF $(DIR_HELPERS)/$(MONGO).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(MONGO)
ln -sF $(DIR_HELPERS)/$(FLUTTER).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(FLUTTER)
ln -sF $(DIR_HELPERS)/$(DART).$(EXT_FILE).$(VERSION) $(DIR_BIN)/$(DART)
chmod a+x $(DIR_BIN)/$(PHP)
chmod a+x $(DIR_BIN)/$(PHP5)
chmod a+x $(DIR_BIN)/$(PHPCS)
chmod a+x $(DIR_BIN)/$(PHPCBF)
chmod a+x $(DIR_BIN)/$(PHPCSFIXER)
chmod a+x $(DIR_BIN)/$(COMPOSER)
chmod a+x $(DIR_BIN)/$(NG)
chmod a+x $(DIR_BIN)/$(NPM)
chmod a+x $(DIR_BIN)/$(NODE)
chmod a+x $(DIR_BIN)/$(GULP)
chmod a+x $(DIR_BIN)/$(BOWER)
chmod a+x $(DIR_BIN)/$(MONGO)
chmod a+x $(DIR_BIN)/$(FLUTTER)
chmod a+x $(DIR_BIN)/$(DART)
@echo "Ok!"<file_sep>#!/bin/bash
###########################################################
# Helper for use container mongo in development environment #
###########################################################
CONTAINERNAME="helper_mongo"
DIR=${PWD}
ROOTDIR=$(echo "$DIR" | cut -d "/" -f3)
CONTAINERNAME=$CONTAINERNAME"_"$ROOTDIR
FILELOADVOLUMENOW="/tmp/.dkr_load_volume_now_"$CONTAINERNAME
COMMANDEXEC="$@"
if [[ -z "$COMMANDEXEC" ]]; then
COMMANDEXEC="-v"
fi
if [ ! -f "$FILELOADVOLUMENOW" ]; then
echo $DIR > $FILELOADVOLUMENOW
VOLUMENOW=$DIR
else
VOLUMENOW=$(cat "$FILELOADVOLUMENOW")
fi
if [ ! $(docker ps -q -f status=running -f name="$CONTAINERNAME") ]; then
if [ ! $(docker ps -aq -f name="$CONTAINERNAME" -f status=running) ]; then
docker rm -f "$CONTAINERNAME" 2> /dev/null
fi
docker run -itd --name "$CONTAINERNAME" -v "$VOLUMENOW":/usr/src/app -v ~/.ssh:/root/.ssh -w /usr/src/app mongo /bin/bash > /dev/null
fi
docker exec "$CONTAINERNAME" mongo $COMMANDEXEC<file_sep>#!/bin/bash
###########################################################
# Helper for use container php in development environment #
###########################################################
CONTAINERNAME="helper_php_composer"
DIR=${PWD}
ROOTDIR=$(echo "$DIR" | cut -d "/" -f3)
CONTAINERNAME=$CONTAINERNAME"_"$ROOTDIR
FILELOADVOLUMENOW="/tmp/.dkr_load_volume_now_"$CONTAINERNAME
COMMANDEXEC="$@"
if [[ -z "$COMMANDEXEC" ]]; then
COMMANDEXEC="-v"
fi
if [ ! -f "$FILELOADVOLUMENOW" ]; then
echo $DIR > $FILELOADVOLUMENOW
VOLUMENOW=$DIR
else
VOLUMENOW=$(cat "$FILELOADVOLUMENOW")
fi
if [ ! -d "$VOLUMENOW" ] || [ ! "$VOLUMENOW" == "$DIR" ]; then
echo $DIR > $FILELOADVOLUMENOW
VOLUMENOW=$DIR
docker stop $(docker ps -aq -f name="$CONTAINERNAME") >> /dev/null 2>&1
fi
if [ ! $(docker ps -q -f status=running -f name="$CONTAINERNAME") ]; then
if [ ! $(docker ps -aq -f name="$CONTAINERNAME" -f status=running) ]; then
docker rm -f "$CONTAINERNAME" 2> /dev/null
fi
docker run -tid --name "$CONTAINERNAME" -v ~/.ssh:/root/.ssh -v $VOLUMENOW:/var/www cbsan/php:5.6-tools /bin/bash >> /dev/null 2>&1
fi
docker exec "$CONTAINERNAME" composer $COMMANDEXEC | 5293f9b002862840859ee871f4640d7067c04e58 | [
"Makefile",
"Shell"
] | 3 | Makefile | cbsan/sh-helper | b6e59d11ae590b4513fc5de8631816b5f85bfaf5 | e00f7a8b0d83c476f9000ad95d7889e986b13183 |
refs/heads/master | <file_sep># Snake.py
import os
import random
import time
import msvcrt
clear = lambda: os.system("cls")
Grid = [['#' for i in range(30)]] + [(['#'] + [' ' for i in range(28)] + ['#']) for i in range(18)] + [['#' for i in range(30)]]
Snake = []
def PrepareScreen():
screen = []
for row in Grid:
screen.append(''.join(row))
return '\n'.join(screen)
def Initialize():
Grid[10][10] = 'O'
Grid[10][9] = 'O'
Grid[10][8] = 'O'
Snake.append((10, 10))
Snake.append((10, 9))
Snake.append((10, 8))
PlaceApple()
def PlaceApple():
while True:
x = random.randint(0, len(Grid) - 1)
y = random.randint(0, len(Grid[0]) - 1)
if Grid[x][y] == ' ':
Grid[x][y] = '@'
return
def Move(dir):
head = Snake[0]
tail = Snake[-1]
newHead = (0,0)
if (dir == 'up'):
newHead = (head[0] - 1, head[1])
elif (dir == 'down'):
newHead = (head[0] + 1, head[1])
elif (dir == 'left'):
newHead = (head[0], head[1] - 1)
elif (dir == 'right'):
newHead = (head[0], head[1] + 1)
global next
next = Grid[newHead[0]][newHead[1]]
if next == ' ' or next == '@':
Grid[newHead[0]][newHead[1]] = 'O'
Grid[tail[0]][tail[1]] = ' '
Snake.insert(0, newHead)
if next == ' ':
Snake.pop()
if next == '@':
PlaceApple()
CurrDir = 'right'
Initialize()
while True:
clear()
print(PrepareScreen())
time.sleep(0.25)
Move(CurrDir)
if next == '#' or next == 'O':
print('Game Over')
break
if msvcrt.kbhit():
key = msvcrt.getch()
if (key == 'w'):
CurrDir = 'up'
if (key == 'a'):
CurrDir = 'left'
if (key == 's'):
CurrDir = 'down'
if (key == 'd'):
CurrDir = 'right'
<file_sep># Game
import random
rps = {1: "Rock",
2: "Paper",
3: "Scissors"}
keepPlaying = True
while keepPlaying:
print("""
1. Rock
2. Paper
3. Scissors
""")
ai = random.randint(1,3)
choice = int(input())
print("You picked", rps[choice])
print("The AI picked", rps[ai])
if ai == choice:
print("Draw!")
elif choice > ai and not (ai == 1 and choice == 3):
print("You win!")
else:
print("You lose!") | 78b49aede795169a1ddad36cee221ea6ae90f677 | [
"Python"
] | 2 | Python | yangliming/BroSoft | fcc1eddd2c631e18da7fa8d407e02fa50f692c71 | 16092c52ca3196536c206450646883ed63831d44 |
refs/heads/develop | <repo_name>factly/dega-server<file_sep>/service/core/action/page/route.go
package page
import (
"time"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// page request body
type page struct {
Title string `json:"title" validate:"required,min=3,max=150"`
Subtitle string `json:"subtitle"`
Slug string `json:"slug"`
Excerpt string `json:"excerpt"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
Status string `json:"status"`
IsFeatured bool `json:"is_featured"`
IsSticky bool `json:"is_sticky"`
IsHighlighted bool `json:"is_highlighted"`
FeaturedMediumID uint `json:"featured_medium_id"`
PublishedDate *time.Time `json:"published_date"`
FormatID uint `json:"format_id" validate:"required"`
SpaceID uint `json:"space_id"`
CategoryIDs []uint `json:"category_ids"`
TagIDs []uint `json:"tag_ids"`
AuthorIDs []uint `json:"author_ids"`
}
type pageData struct {
model.Post
Authors []model.Author `json:"authors"`
}
var userContext config.ContextKey = "post_user"
func Router() chi.Router {
r := chi.NewRouter()
entity := "pages"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{page_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/test/service/fact-check/rating/create_test.go
package rating
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestRatingCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable rating", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unable to decode rating", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("create rating", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
ratingCountQuery(mock, 0)
slugCheckMock(mock, Data)
ratingInsertMock(mock)
SelectWithOutSpace(mock, Data)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("create rating with slug is empty", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
ratingCountQuery(mock, 0)
slugCheckMock(mock, Data)
ratingInsertMock(mock)
SelectWithOutSpace(mock, Data)
mock.ExpectCommit()
Data["slug"] = ""
res := e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object()
Data["slug"] = "true"
res.ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("medium does not belong same space", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
ratingCountQuery(mock, 0)
slugCheckMock(mock, Data)
ratingInsertError(mock)
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("rating with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 1, Data["name"])
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("rating with same numeric value exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
ratingCountQuery(mock, 1)
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse rating description", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
ratingCountQuery(mock, 0)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
})
t.Run("create rating when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
ratingCountQuery(mock, 0)
slugCheckMock(mock, Data)
ratingInsertMock(mock)
SelectWithOutSpace(mock, Data)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/config/model.go
package config
import (
"time"
"gorm.io/gorm"
)
// ContextKey keys for contexts
type ContextKey string
// Base with id, created_at, updated_at & deleted_at
type Base struct {
ID uint `gorm:"primary_key" json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt *gorm.DeletedAt `sql:"index" json:"deleted_at" swaggertype:"primitive,string"`
CreatedByID uint `gorm:"column:created_by_id" json:"created_by_id"`
UpdatedByID uint `gorm:"column:updated_by_id" json:"updated_by_id"`
}
<file_sep>/test/service/podcast/list_test.go
package podcast
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestPodcastList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get empty list of podcasts", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(0))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get list of podcasts", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(len(podcastList)))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, podcastList[0]["title"], podcastList[0]["slug"], podcastList[0]["description"], podcastList[0]["html_description"], podcastList[0]["language"], podcastList[0]["primary_category_id"], podcastList[0]["medium_id"], 1).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, podcastList[1]["title"], podcastList[1]["slug"], podcastList[1]["description"], podcastList[1]["html_description"], podcastList[1]["language"], podcastList[1]["primary_category_id"], podcastList[1]["medium_id"], 1))
PodcastCategorySelect(mock)
medium.SelectWithOutSpace(mock)
category.SelectWithOutSpace(mock)
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(podcastList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(podcastList[0])
})
t.Run("get list of podcasts with search query", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(len(podcastList)))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, podcastList[0]["title"], podcastList[0]["slug"], podcastList[0]["description"], podcastList[0]["html_description"], podcastList[0]["language"], podcastList[0]["primary_category_id"], podcastList[0]["medium_id"], 1).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, podcastList[1]["title"], podcastList[1]["slug"], podcastList[1]["description"], podcastList[1]["html_description"], podcastList[1]["language"], podcastList[1]["primary_category_id"], podcastList[1]["medium_id"], 1))
PodcastCategorySelect(mock)
medium.SelectWithOutSpace(mock)
category.SelectWithOutSpace(mock)
e.GET(basePath).
WithHeaders(headers).
WithQuery("q", "test").
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(podcastList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(podcastList[0])
})
}
<file_sep>/util/names.go
package util
import (
"strings"
"github.com/factly/dega-server/config"
)
// CheckName checks if the table contains any entry with same name
func CheckName(space uint, name, table string) bool {
var count int64
newName := strings.ToLower(strings.TrimSpace(name))
config.DB.Table(table).Where("deleted_at IS NULL AND (space_id = ? AND name ILIKE ?)", space, newName).Count(&count)
return count > 0
}
<file_sep>/test/service/core/policy/list_test.go
package policy
import (
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("List success", func(t *testing.T) {
test.CheckSpaceMock(mock)
// Splits string of ID to retrieve the name of the policy. The name is in the last index, hence the split
var text = strings.Split(test.Dummy_KetoPolicy[1]["id"].(string), ":")
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().Object().Value("nodes").Array().Element(0).Object().Value("name").Equal(text[len(text)-1])
})
t.Run("when keto cannot fetch policies", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
gock.New(viper.GetString("keto_url")).
Post("/engines/acp/ory/regex/allowed").
Persist().
Reply(http.StatusOK)
test.CheckSpaceMock(mock)
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusServiceUnavailable)
})
}
<file_sep>/service/core/action/request/space/my.go
package space
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
// my - Get all my space permissions requests
// @Summary Show all my space permissions requests
// @Description Get all my space permissions requests
// @Tags Space_Permissions_Request
// @ID get-all-my-space-permissions-requests
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 200 {array} paging
// @Router /core/requests/spaces/my [get]
func my(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
offset, limit := paginationx.Parse(r.URL.Query())
result := paging{}
result.Nodes = make([]model.SpacePermissionRequest, 0)
config.DB.Model(&model.SpacePermissionRequest{}).Where(&model.SpacePermissionRequest{
SpaceID: uint(sID),
}).Count(&result.Total).Offset(offset).Limit(limit).Find(&result.Nodes)
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/permissions/space/create_test.go
package space
import (
"errors"
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestSpacePermissionCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable permission", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Undecodable permission", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("space's permission already exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1))
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create space permission", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0))
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "space_permissions"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["fact_check"], Data["space_id"], Data["media"], Data["posts"], Data["podcast"], Data["episodes"]).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectCommit()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusCreated).
JSON().
Object().
ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("creating space permission fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0))
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "space_permissions"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["fact_check"], Data["space_id"], Data["media"], Data["posts"], Data["podcast"], Data["episodes"]).
WillReturnError(errors.New("cannot create space permission"))
mock.ExpectRollback()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/format/update_test.go
package format
import (
"fmt"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestFormatUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid format id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("format_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("format record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
recordNotFoundMock(mock)
e.PUT(path).
WithPath("format_id", "100").
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
})
t.Run("Unable to decode format data", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, 1, 1)
e.PUT(path).
WithPath("format_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unprocessable format", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, 1, 1)
e.PUT(path).
WithPath("format_id", 1).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("update format", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedFormat := map[string]interface{}{
"name": "Fact Check",
"slug": "fact-check",
}
SelectMock(mock, 1, 1)
formatUpdateMock(mock, updatedFormat)
selectAfterUpdate(mock, updatedFormat)
mock.ExpectCommit()
e.PUT(path).
WithPath("format_id", 1).
WithHeaders(headers).
WithJSON(updatedFormat).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedFormat)
})
t.Run("update format by id with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedFormat := map[string]interface{}{
"name": "Fact Check",
"slug": "fact-check",
}
SelectMock(mock, 1, 1)
mock.ExpectQuery(`SELECT slug, space_id FROM "formats"`).
WithArgs("fact-check%", 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, updatedFormat["name"], "factcheck"))
formatUpdateMock(mock, updatedFormat)
selectAfterUpdate(mock, updatedFormat)
mock.ExpectCommit()
Data["slug"] = ""
e.PUT(path).
WithPath("format_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedFormat)
Data["slug"] = "fact-check"
test.ExpectationsMet(t, mock)
})
t.Run("update format with different slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedFormat := map[string]interface{}{
"name": "Fact Check",
"slug": "testing-slug",
}
SelectMock(mock, 1, 1)
mock.ExpectQuery(`SELECT slug, space_id FROM "formats"`).
WithArgs(fmt.Sprint(updatedFormat["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
formatUpdateMock(mock, updatedFormat)
selectAfterUpdate(mock, updatedFormat)
mock.ExpectCommit()
e.PUT(path).
WithPath("format_id", 1).
WithHeaders(headers).
WithJSON(updatedFormat).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedFormat)
})
t.Run("format with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedFormat := map[string]interface{}{
"name": "Fact Chk",
"slug": "fact-check",
}
SelectMock(mock, 1, 1)
sameNameCount(mock, 1, updatedFormat["name"])
e.PUT(path).
WithPath("format_id", 1).
WithHeaders(headers).
WithJSON(updatedFormat).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("update format when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
updatedFormat := map[string]interface{}{
"name": "Fact Check",
"slug": "article",
}
SelectMock(mock, 1, 1)
mock.ExpectQuery(`SELECT slug, space_id FROM "formats"`).
WithArgs(fmt.Sprint(updatedFormat["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
formatUpdateMock(mock, updatedFormat)
selectAfterUpdate(mock, updatedFormat)
mock.ExpectRollback()
e.PUT(path).
WithPath("format_id", 1).
WithHeaders(headers).
WithJSON(updatedFormat).
Expect().
Status(http.StatusInternalServerError)
})
}
<file_sep>/test/service/fact-check/google/list_test.go
package google
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service/fact-check/action/google"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestGoogleList(t *testing.T) {
mock := test.SetupMockDB()
test.GoogleFactCheckGock()
test.KavachGock()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
headers := map[string]string{
"X-User": "1",
"X-Space": "1",
}
path := "/fact-check/google"
t.Run("get list of google factchecks", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.GET(path).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"query": "modi",
"language": "en",
}).
Expect().
Status(http.StatusOK)
})
t.Run("get google factcheck without query", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.GET(path).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"language": "en",
}).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("get list of google factchecks with pageToken query param", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.GET(path).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"query": "modi",
"language": "en",
"pageToken": "abc",
}).
Expect().
Status(http.StatusOK)
})
t.Run("when google server is down", func(t *testing.T) {
gock.Off()
test.KavachGock()
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.GET(path).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"query": "modi",
"language": "en",
}).
Expect().
Status(http.StatusServiceUnavailable)
})
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
t.Run("when google returns empty result", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
gock.New(google.GoogleURL).
Reply(http.StatusOK).
JSON(map[string]interface{}{})
e.GET(path).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"query": "modi",
"language": "en",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
})
}
<file_sep>/test/mock.go
package test
import (
"database/sql/driver"
"log"
"regexp"
"testing"
"time"
"github.com/factly/dega-server/service/fact-check/action/google"
"github.com/factly/x/meilisearchx"
"github.com/meilisearch/meilisearch-go"
"github.com/nats-io/gnatsd/server"
gnatsd "github.com/nats-io/gnatsd/test"
"github.com/nats-io/nats.go"
"github.com/spf13/viper"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/config"
"gorm.io/driver/postgres"
"gorm.io/gorm"
"gorm.io/gorm/logger"
)
// AnyTime To match time for test sqlmock queries
type AnyTime struct{}
// Match satisfies sqlmock.Argument interface
func (a AnyTime) Match(v driver.Value) bool {
_, ok := v.(time.Time)
return ok
}
// SetupMockDB setups the mock sql db
func SetupMockDB() sqlmock.Sqlmock {
viper.Set("kavach_url", "http://kavach:6620")
viper.Set("keto_url", "http://keto:6644")
viper.Set("meili_url", "http://meili:7700")
viper.Set("meili_key", "password")
viper.Set("imageproxy_url", "http://imageproxy")
viper.Set("create_super_organisation", true)
viper.Set("nats_url", "nats://127.0.0.1:4222")
viper.Set("enable_hukz", false)
viper.Set("templates_path", "../../../../web/templates/*")
google.GoogleURL = "http://googlefactchecktest.com"
meilisearchx.Client = meilisearch.NewClient(meilisearch.Config{
Host: viper.GetString("meili_url"),
APIKey: viper.GetString("meili_key"),
})
db, mock, err := sqlmock.New()
if err != nil {
log.Println(err)
}
dialector := postgres.New(postgres.Config{
DSN: "sqlmock_db_0",
DriverName: "postgres",
Conn: db,
PreferSimpleProtocol: true,
})
config.DB, err = gorm.Open(dialector, &gorm.Config{
Logger: logger.Default.LogMode(logger.Info),
})
if err != nil {
log.Println(err)
}
return mock
}
// RunDefaultNATSServer will run a nats server on the default port.
func RunDefaultNATSServer() *server.Server {
return RunServerOnPort(nats.DefaultPort)
}
// RunServerOnPort will run a server on the given port.
func RunServerOnPort(port int) *server.Server {
opts := gnatsd.DefaultTestOptions
opts.Port = port
return RunServerWithOptions(opts)
}
// RunServerWithOptions will run a server with the given options.
func RunServerWithOptions(opts server.Options) *server.Server {
return gnatsd.RunServer(&opts)
}
// RunServerWithConfig will run a server with the given configuration file.
func RunServerWithConfig(configFile string) (*server.Server, *server.Options) {
return gnatsd.RunServerWithConfig(configFile)
}
//ExpectationsMet checks if all the expectations are fulfilled
func ExpectationsMet(t *testing.T, mock sqlmock.Sqlmock) {
if err := mock.ExpectationsWereMet(); err != nil {
t.Errorf("there were unfulfilled expectations: %s", err)
}
}
func CheckSpaceMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "spaces"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"organisation_id", "slug", "space_id"}).AddRow(1, "test-space", "1"))
}
<file_sep>/service/podcast/action/route.go
package podcast
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// podcast model
type podcast struct {
Title string `json:"title" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
Language string `json:"language" validate:"required"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
MediumID uint `json:"medium_id"`
SpaceID uint `json:"space_id"`
PrimaryCategoryID uint `json:"primary_category_id"`
CategoryIDs []uint `json:"category_ids"`
HeaderCode string `json:"header_code"`
FooterCode string `json:"footer_code"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var podcastUser config.ContextKey = "podcast_user"
// Router - Group of podcast router
func Router() chi.Router {
r := chi.NewRouter()
entity := "podcasts"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{podcast_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/fact-check/model/claimant.go
package model
import (
"errors"
"gorm.io/gorm"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/jinzhu/gorm/dialects/postgres"
)
// Claimant model
type Claimant struct {
config.Base
Name string `gorm:"column:name" json:"name"`
Slug string `gorm:"column:slug" json:"slug"`
Description postgres.Jsonb `gorm:"column:description" json:"description" swaggertype:"primitive,string"`
HTMLDescription string `gorm:"column:html_description" json:"html_description,omitempty"`
TagLine string `gorm:"column:tag_line" json:"tag_line"`
MediumID *uint `gorm:"column:medium_id;default:NULL" json:"medium_id"`
Medium *model.Medium `json:"medium"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Space *model.Space `json:"space,omitempty"`
}
// BeforeSave - validation for medium
func (claimant *Claimant) BeforeSave(tx *gorm.DB) (e error) {
if claimant.MediumID != nil && *claimant.MediumID > 0 {
medium := model.Medium{}
medium.ID = *claimant.MediumID
err := tx.Model(&model.Medium{}).Where(model.Medium{
SpaceID: claimant.SpaceID,
}).First(&medium).Error
if err != nil {
return errors.New("medium do not belong to same space")
}
}
return nil
}
var claimantUser config.ContextKey = "claimant_user"
// BeforeCreate hook
func (claimant *Claimant) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(claimantUser)
if userID == nil {
return nil
}
uID := userID.(int)
claimant.CreatedByID = uint(uID)
claimant.UpdatedByID = uint(uID)
return nil
}
<file_sep>/service/core/model/migration.go
package model
import "github.com/factly/dega-server/config"
//Migration - core models
func Migration() {
_ = config.DB.AutoMigrate(
&Medium{},
&Category{},
&Tag{},
&Space{},
&Format{},
&Post{},
&PostAuthor{},
&OrganisationPermission{},
&SpacePermission{},
&OrganisationPermissionRequest{},
&SpacePermissionRequest{},
&Menu{},
)
}
<file_sep>/service/fact-check/model/migration.go
package model
import "github.com/factly/dega-server/config"
//Migration fact-check models
func Migration() {
_ = config.DB.AutoMigrate(
&Claimant{},
&Rating{},
&Claim{},
&PostClaim{},
)
}
<file_sep>/util/arrays/difference.go
package arrays
func Difference(prev []uint, new []uint) ([]uint, []uint) {
del := make([]uint, 0)
if len(new) == 0 {
return new, prev
}
if len(prev) == 0 {
return new, del
}
for _, i := range prev {
found := false
for _, j := range new {
if i == j {
found = true
break
}
}
if !found {
del = append(del, i)
}
}
additional := subtraction(new, prev)
return additional, del
}
func subtraction(arr1 []uint, arr2 []uint) []uint {
sub := make([]uint, 0)
for _, i := range arr1 {
found := false
for _, j := range arr2 {
if i == j {
found = true
break
}
}
if !found {
sub = append(sub, i)
}
}
return sub
}
<file_sep>/cmd/root.go
package cmd
import (
"github.com/factly/dega-server/config"
"github.com/spf13/cobra"
)
// rootCmd represents the base command when called without any subcommands
var rootCmd = &cobra.Command{
Use: "dega-server",
Short: "A lightweight CMS written in Go.",
Long: `A lightweight, scalable & high performant CMS written in Go. Developed for modern web features with all the best practices built-in.`,
}
// Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute() {
cobra.CheckErr(rootCmd.Execute())
}
func init() {
cobra.OnInitialize(config.SetupVars)
}
<file_sep>/service/core/action/category/create.go
package category
import (
"context"
"encoding/json"
"errors"
"net/http"
"reflect"
"github.com/factly/x/loggerx"
"gorm.io/gorm"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
)
// create - Create category
// @Summary Create category
// @Description Create category
// @Tags Category
// @ID add-category
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Category body category true "Category Object"
// @Success 201 {object} model.Category
// @Failure 400 {array} string
// @Router /core/categories [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
category := &category{}
err = json.NewDecoder(r.Body).Decode(&category)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(category)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// Check if parent category exist or not
if category.ParentID != 0 {
var parentCat model.Category
parentCat.ID = category.ParentID
err = config.DB.Where(&model.Category{SpaceID: uint(sID)}).First(&parentCat).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("Parent category does not exist", http.StatusUnprocessableEntity)))
return
}
}
var categorySlug string
if category.Slug != "" && slugx.Check(category.Slug) {
categorySlug = category.Slug
} else {
categorySlug = slugx.Make(category.Name)
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Category{})
tableName := stmt.Schema.Table
// Check if category with same name exist
if util.CheckName(uint(sID), category.Name, tableName) {
loggerx.Error(errors.New(`category with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
mediumID := &category.MediumID
if category.MediumID == 0 {
mediumID = nil
}
parentID := &category.ParentID
if category.ParentID == 0 {
parentID = nil
}
// Store HTML description
var description string
if len(category.Description.RawMessage) > 0 && !reflect.DeepEqual(category.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(category.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse category description", http.StatusUnprocessableEntity)))
return
}
}
result := &model.Category{
Name: category.Name,
Description: category.Description,
HTMLDescription: description,
Slug: slugx.Approve(&config.DB, categorySlug, sID, tableName),
ParentID: parentID,
MediumID: mediumID,
SpaceID: uint(sID),
IsFeatured: category.IsFeatured,
MetaFields: category.MetaFields,
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Category{}).Create(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Model(&model.Category{}).Preload("Medium").First(&result)
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "category",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"space_id": result.SpaceID,
"meta_fields": result.MetaFields,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("category.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/util/policy.go
package util
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
"github.com/factly/x/middlewarex"
"github.com/spf13/viper"
)
// KetoAllowed is request object to check permissions of user
type KetoAllowed struct {
Subject string `json:"subject"`
Action string `json:"action"`
Resource string `json:"resource"`
}
// CheckKetoPolicy returns middleware that checks the permissions of user from keto server
func CheckKetoPolicy(entity, action string) func(h http.Handler) http.Handler {
return func(h http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
sID, err := middlewarex.GetSpace(ctx)
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
uID, err := middlewarex.GetUser(ctx)
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
oID, err := GetOrganisation(ctx)
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
commonString := fmt.Sprint(":org:", oID, ":app:dega:space:", sID, ":")
kresource := fmt.Sprint("resources", commonString, entity)
kaction := fmt.Sprint("actions", commonString, entity, ":", action)
result := KetoAllowed{}
result.Action = kaction
result.Resource = kresource
result.Subject = fmt.Sprint(uID)
resStatus, err := IsAllowed(result)
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
if resStatus != 200 {
w.WriteHeader(http.StatusUnauthorized)
return
}
h.ServeHTTP(w, r)
})
}
}
// CheckSpaceKetoPermission checks keto policy for operations on space
func CheckSpaceKetoPermission(action string, oID, uID uint) error {
commonString := fmt.Sprint(":org:", oID, ":app:dega:spaces")
kresource := fmt.Sprint("resources", commonString)
kaction := fmt.Sprint("actions", commonString, ":", action)
result := KetoAllowed{}
result.Action = kaction
result.Resource = kresource
result.Subject = fmt.Sprint(uID)
resStatus, err := IsAllowed(result)
if err != nil {
return err
}
if resStatus != 200 {
return errors.New("Permission not granted")
}
return nil
}
// IsAllowed checks if keto policy allows user to action on resource
func IsAllowed(result KetoAllowed) (int, error) {
buf := new(bytes.Buffer)
err := json.NewEncoder(buf).Encode(&result)
if err != nil {
return 0, err
}
req, err := http.NewRequest("POST", viper.GetString("keto_url")+"/engines/acp/ory/regex/allowed", buf)
if err != nil {
return 0, err
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return 0, err
}
return resp.StatusCode, nil
}
<file_sep>/service/core/action/tag/route.go
package tag
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// tag model
type tag struct {
Name string `json:"name" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
IsFeatured bool `json:"is_featured"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var userContext config.ContextKey = "tag_user"
// Router - Group of tag router
func Router() chi.Router {
r := chi.NewRouter()
entity := "tags"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{tag_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/core/action/post/delete.go
package post
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
factcheckModel "github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete post by id
// @Summary Delete a post
// @Description Delete post by ID
// @Tags Post
// @ID delete-post-by-id
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param post_id path string true "Post ID"
// @Success 200
// @Router /core/posts/{post_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
postID := chi.URLParam(r, "post_id")
id, err := strconv.Atoi(postID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Post{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Post{
SpaceID: uint(sID),
}).Where("is_page", false).Preload("Tags").Preload("Categories").First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
tx := config.DB.Begin()
// delete all associations
if len(result.Tags) > 0 {
_ = tx.Model(&result).Association("Tags").Delete(result.Tags)
}
if len(result.Categories) > 0 {
_ = tx.Model(&result).Association("Categories").Delete(result.Categories)
}
tx.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Delete(&model.PostAuthor{})
tx.Model(&factcheckModel.PostClaim{}).Where(&factcheckModel.PostClaim{
PostID: uint(id),
}).Delete(&factcheckModel.PostClaim{})
tx.Model(&model.Post{}).Delete(&result)
err = meilisearchx.DeleteDocument("dega", result.ID, "post")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("post.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/fact-check/action/rating/update.go
package rating
import (
"encoding/json"
"errors"
"net/http"
"reflect"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update rating by id
// @Summary Update a rating by id
// @Description Update rating by ID
// @Tags Rating
// @ID update-rating-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param rating_id path string true "Rating ID"
// @Param Rating body rating false "Rating"
// @Success 200 {object} model.Rating
// @Router /fact-check/ratings/{rating_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
ratingID := chi.URLParam(r, "rating_id")
id, err := strconv.Atoi(ratingID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
rating := &rating{}
err = json.NewDecoder(r.Body).Decode(&rating)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(rating)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := model.Rating{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Rating{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var ratingSlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Rating{})
tableName := stmt.Schema.Table
if result.Slug == rating.Slug {
ratingSlug = result.Slug
} else if rating.Slug != "" && slugx.Check(rating.Slug) {
ratingSlug = slugx.Approve(&config.DB, rating.Slug, sID, tableName)
} else {
ratingSlug = slugx.Approve(&config.DB, slugx.Make(rating.Name), sID, tableName)
}
// Check if rating with same name exist
if rating.Name != result.Name && util.CheckName(uint(sID), rating.Name, tableName) {
loggerx.Error(errors.New(`rating with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
if rating.NumericValue != result.NumericValue {
// Check if rating with same numeric value exist
var sameValueRatings int64
config.DB.Model(&model.Rating{}).Where(&model.Rating{
SpaceID: uint(sID),
NumericValue: rating.NumericValue,
}).Count(&sameValueRatings)
if sameValueRatings > 0 {
loggerx.Error(errors.New(`rating with same numeric value exist`))
errorx.Render(w, errorx.Parser(errorx.GetMessage(`rating with same numeric value exist`, http.StatusUnprocessableEntity)))
return
}
}
// Store HTML description
var description string
if len(rating.Description.RawMessage) > 0 && !reflect.DeepEqual(rating.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(rating.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse rating description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.Begin()
mediumID := &rating.MediumID
result.MediumID = &rating.MediumID
if rating.MediumID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"medium_id": nil}).Error
mediumID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
err = tx.Model(&result).Updates(model.Rating{
Base: config.Base{UpdatedByID: uint(uID)},
Name: rating.Name,
Slug: ratingSlug,
BackgroundColour: rating.BackgroundColour,
TextColour: rating.TextColour,
MediumID: mediumID,
Description: rating.Description,
HTMLDescription: description,
NumericValue: rating.NumericValue,
MetaFields: rating.MetaFields,
}).Preload("Medium").First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "rating",
"name": result.Name,
"slug": result.Slug,
"background_colour": rating.BackgroundColour,
"text_colour": rating.TextColour,
"description": result.Description,
"numeric_value": result.NumericValue,
"space_id": result.SpaceID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("rating.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/author/all.go
package author
import (
"context"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/middlewarex"
)
// All - to return all authors
func All(ctx context.Context) (map[string]model.Author, error) {
authors := make(map[string]model.Author)
organisationID, err := util.GetOrganisation(ctx)
if err != nil {
return authors, err
}
userID, err := middlewarex.GetUser(ctx)
if err != nil {
return authors, err
}
authors = Mapper(organisationID, userID)
return authors, nil
}
<file_sep>/test/service/core/policy/create_test.go
package policy
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestCreatePolicy(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
// Create a policy
t.Run("Successful create policy", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(policy_test).
WithHeaders(headers).
Expect().
Status(http.StatusOK).JSON().Object().Value("name").Equal(policy_test["name"])
})
t.Run("undecodable policy body", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(undecodable_policy).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(policy_test).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
})
}
<file_sep>/service/core/action/request/space/route.go
package space
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/factly/x/middlewarex"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
var permissionContext config.ContextKey = "space_perm_user"
type spacePermissionRequest struct {
Title string `json:"title"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
FactCheck bool `json:"fact_check"`
Media int64 `json:"media"`
Posts int64 `json:"posts"`
Episodes int64 `json:"episodes"`
Podcast bool `json:"podcast"`
SpaceID int64 `json:"space_id" validate:"required"`
}
// Router - CRUD servies
func Router() http.Handler {
r := chi.NewRouter()
r.With(middlewarex.CheckSuperOrganisation("dega", util.GetOrganisation)).Get("/", list)
r.Get("/my", my)
r.With(middlewarex.CheckSuperOrganisation("dega", util.GetOrganisation)).Route("/{request_id}", func(r chi.Router) {
r.Get("/", details)
r.Delete("/", delete)
r.Post("/approve", approve)
r.Post("/reject", reject)
})
return r
}
<file_sep>/service/core/model/policy.go
package model
// KetoPolicy model
type KetoPolicy struct {
ID string `json:"id"`
Subjects []string `json:"subjects"`
Actions []string `json:"actions"`
Resources []string `json:"resources"`
Effect string `json:"effect"`
Description string `json:"description"`
}
// KetoRole model
type KetoRole struct {
ID string `json:"id"`
Members []string `json:"members"`
}
// Permission model
type Permission struct {
Resource string `json:"resource"`
Actions []string `json:"actions"`
}
// Policy model
type Policy struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
Permissions []Permission `json:"permissions"`
Users []Author `json:"users"`
}
<file_sep>/test/service/core/user/list_test.go
package user
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestListUsers(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get users in space", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(path).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Number().
Equal(2)
})
t.Run("get users when keto is down", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
test.CheckSpaceMock(mock)
e.GET(path).
WithHeaders(headers).
Expect().
Status(http.StatusServiceUnavailable)
})
t.Run("get users when keto is unable to fetch policies", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
gock.New(viper.GetString("keto_url") + "/engines/acp/ory/regex/roles/(.+)").
Persist().
Reply(http.StatusOK).
JSON(test.Dummy_Role)
test.CheckSpaceMock(mock)
e.GET(path).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
})
}
<file_sep>/util/organisation.go
package util
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"github.com/factly/dega-server/service/core/model"
"github.com/spf13/viper"
)
type paging struct {
Nodes []model.Organisation `json:"nodes"`
Total int64 `json:"total"`
}
// GetAllOrganisationsMap return slice of all organisations
func GetAllOrganisationsMap(q string) (map[uint]model.Organisation, error) {
path := "/organisations"
if q != "" {
path = fmt.Sprint(path, "?q=", q)
}
req, err := http.NewRequest("GET", viper.GetString("kavach_url")+path, nil)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
var pagingList paging
if resp.StatusCode != http.StatusOK {
return nil, errors.New("could not fetch organisations")
}
err = json.NewDecoder(resp.Body).Decode(&pagingList)
if err != nil {
return nil, err
}
orgMap := make(map[uint]model.Organisation)
for _, organisation := range pagingList.Nodes {
orgMap[organisation.ID] = organisation
}
return orgMap, nil
}
type orgWithPermission struct {
model.Organisation
Permission permission `json:"permission"`
}
type permission struct {
Role string `json:"role"`
}
// CheckOwnerFromKavach checks if user is owner of organisation
func CheckOwnerFromKavach(uID, oID int) (bool, error) {
req, err := http.NewRequest("GET", viper.GetString("kavach_url")+"/organisations/my", nil)
if err != nil {
return false, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-User", fmt.Sprint(uID))
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return false, err
}
if resp.StatusCode != http.StatusOK {
return false, errors.New("error from kavach server")
}
var permArr []orgWithPermission
err = json.NewDecoder(resp.Body).Decode(&permArr)
if err != nil {
return false, err
}
for _, each := range permArr {
if each.Permission.Role == "owner" && each.ID == uint(oID) {
return true, nil
}
}
return false, nil
}
<file_sep>/test/service/core/medium/list_test.go
package medium
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestMediumList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
mediumlist := []map[string]interface{}{
{"name": "Sample Medium 1", "slug": "test-medium-1"},
{"name": "Sample Medium 2", "slug": "test-medium-2"},
}
t.Run("get empty list of media", func(t *testing.T) {
test.CheckSpaceMock(mock)
countQuery(mock, 0)
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get non-empty list of media", func(t *testing.T) {
test.CheckSpaceMock(mock)
countQuery(mock, len(mediumlist))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, mediumlist[0]["name"], mediumlist[0]["slug"], mediumlist[0]["type"], mediumlist[0]["title"], mediumlist[0]["description"], mediumlist[0]["caption"], mediumlist[0]["alt_text"], mediumlist[0]["file_size"], mediumlist[0]["url"], mediumlist[0]["dimensions"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, mediumlist[1]["name"], mediumlist[1]["slug"], mediumlist[1]["type"], mediumlist[1]["title"], mediumlist[1]["description"], mediumlist[1]["caption"], mediumlist[1]["alt_text"], mediumlist[1]["file_size"], mediumlist[1]["url"], mediumlist[1]["dimensions"], 1))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(mediumlist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(mediumlist[0])
test.ExpectationsMet(t, mock)
})
t.Run("get media with pagination", func(t *testing.T) {
test.CheckSpaceMock(mock)
countQuery(mock, len(mediumlist))
mock.ExpectQuery(paginationQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, mediumlist[1]["name"], mediumlist[1]["slug"], mediumlist[1]["type"], mediumlist[1]["title"], mediumlist[1]["description"], mediumlist[1]["caption"], mediumlist[1]["alt_text"], mediumlist[1]["file_size"], mediumlist[1]["url"], mediumlist[1]["dimensions"], 1))
e.GET(basePath).
WithQueryObject(map[string]interface{}{
"limit": "1",
"page": "2",
}).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(mediumlist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(mediumlist[1])
test.ExpectationsMet(t, mock)
})
t.Run("get list of media filtered by q", func(t *testing.T) {
test.CheckSpaceMock(mock)
countQuery(mock, len(mediumlist))
mock.ExpectQuery(selectQuery).
WithArgs(1, sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, mediumlist[0]["name"], mediumlist[0]["slug"], mediumlist[0]["type"], mediumlist[0]["title"], mediumlist[0]["description"], mediumlist[0]["caption"], mediumlist[0]["alt_text"], mediumlist[0]["file_size"], mediumlist[0]["url"], mediumlist[0]["dimensions"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, mediumlist[1]["name"], mediumlist[1]["slug"], mediumlist[1]["type"], mediumlist[1]["title"], mediumlist[1]["description"], mediumlist[1]["caption"], mediumlist[1]["alt_text"], mediumlist[1]["file_size"], mediumlist[1]["url"], mediumlist[1]["dimensions"], 1))
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"q": "test",
"sort": "asc",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(mediumlist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(mediumlist[0])
test.ExpectationsMet(t, mock)
})
t.Run("when query does not match any post", func(t *testing.T) {
test.CheckSpaceMock(mock)
test.DisableMeiliGock(testServer.URL)
gock.New(viper.GetString("meili_url") + "/indexes/dega/search").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusOK).
JSON(test.EmptyMeili)
e.GET(basePath).
WithHeaders(headers).
WithQuery("q", "test").
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("when meili is down", func(t *testing.T) {
test.CheckSpaceMock(mock)
test.DisableMeiliGock(testServer.URL)
e.GET(basePath).
WithHeaders(headers).
WithQuery("q", "test").
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/post/testvars.go
package post
import (
"database/sql/driver"
"fmt"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/format"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/space"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/factly/dega-server/test/service/fact-check/claim"
"github.com/factly/dega-server/test/service/fact-check/rating"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"title": "Post",
"subtitle": "post subtitle",
"slug": "post",
"status": "draft",
"is_page": false,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"featured_medium_id": uint(1),
"published_date": time.Now(),
"format_id": uint(1),
"category_ids": []uint{1},
"tag_ids": []uint{1},
"claim_ids": []uint{1},
"author_ids": []uint{1},
}
var postData = map[string]interface{}{
"title": "Post",
"subtitle": "post subtitle",
"slug": "post",
"status": "draft",
"is_page": false,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"format_id": uint(1),
}
var postList = []map[string]interface{}{
{
"title": "Post 1",
"subtitle": "post subtitle 1",
"slug": "post-1",
"status": "draft",
"is_page": false,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 1"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 1</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"featured_medium_id": uint(1),
"format_id": uint(1),
},
{
"title": "Post 2",
"subtitle": "post subtitle",
"slug": "post-2",
"status": "draft",
"is_page": false,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 2"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 2</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"featured_medium_id": uint(1),
"format_id": uint(1),
},
}
var invalidData = map[string]interface{}{
"title": "a",
}
var templateData = map[string]interface{}{
"post_id": 1,
}
var invalidTemplateData = map[string]interface{}{
"posd": 1,
}
var undecodableTemplateData = map[string]interface{}{
"post_id": "dfdsf",
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "title", "subtitle", "slug", "status", "is_page", "excerpt", "description", "html_description", "is_featured", "is_sticky", "is_highlighted", "featured_medium_id", "format_id", "published_date", "space_id"}
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "posts"`)
var paginationQuery = `SELECT \* FROM "posts" (.+) LIMIT 1 OFFSET 1`
var basePath = "/core/posts"
var path = "/core/posts/{post_id}"
var templatePath = "/core/posts/templates"
func slugCheckMock(mock sqlmock.Sqlmock, post map[string]interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "posts"`)).
WithArgs(fmt.Sprint(post["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows(columns))
}
func postInsertMock(mock sqlmock.Sqlmock, post map[string]interface{}, isPublished bool) {
mock.ExpectBegin()
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
if isPublished {
mock.ExpectQuery(`INSERT INTO "posts"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, post["title"], post["subtitle"], post["slug"], post["status"], post["is_page"], post["excerpt"], post["description"], post["html_description"], post["is_featured"], post["is_sticky"], post["is_highlighted"], post["format_id"], test.AnyTime{}, 1, nil, post["featured_medium_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"featured_medium_id", "id"}).
AddRow(1, 1))
} else {
mock.ExpectQuery(`INSERT INTO "posts"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, post["title"], post["subtitle"], post["slug"], post["status"], post["is_page"], post["excerpt"], post["description"], post["html_description"], post["is_featured"], post["is_sticky"], post["is_highlighted"], post["format_id"], nil, 1, nil, post["featured_medium_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"featured_medium_id", "id"}).
AddRow(1, 1))
}
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
}
func postListMock(mock sqlmock.Sqlmock) {
test.CheckSpaceMock(mock)
postCountQuery(mock, len(postList))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, postList[0]["title"], postList[0]["subtitle"], postList[0]["slug"], postList[0]["status"], postList[0]["is_page"], postList[0]["excerpt"],
postList[0]["description"], postList[0]["html_description"], postList[0]["is_featured"], postList[0]["is_sticky"], postList[0]["is_highlighted"], postList[0]["featured_medium_id"], postList[0]["format_id"], postList[0]["published_date"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, postList[1]["title"], postList[1]["subtitle"], postList[1]["slug"], postList[1]["status"], postList[1]["is_page"], postList[1]["excerpt"],
postList[1]["description"], postList[1]["html_description"], postList[1]["is_featured"], postList[1]["is_sticky"], postList[1]["is_highlighted"], postList[1]["featured_medium_id"], postList[1]["format_id"], postList[1]["published_date"], 1))
preloadMock(mock, sqlmock.AnyArg(), sqlmock.AnyArg())
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id", "position"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 1))
claim.SelectWithOutSpace(mock, claim.Data)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1))
}
func postListWithFiltersMock(mock sqlmock.Sqlmock) {
test.CheckSpaceMock(mock)
postCountQuery(mock, len(postList))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, postList[0]["title"], postList[0]["subtitle"], postList[0]["slug"], postList[0]["status"], postList[0]["is_page"], postList[0]["excerpt"],
postList[0]["description"], postList[0]["html_description"], postList[0]["is_featured"], postList[0]["is_sticky"], postList[0]["is_highlighted"], postList[0]["featured_medium_id"], postList[0]["format_id"], postList[0]["published_date"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, postList[1]["title"], postList[1]["subtitle"], postList[1]["slug"], postList[1]["status"], postList[1]["is_page"], postList[1]["excerpt"],
postList[1]["description"], postList[1]["html_description"], postList[1]["is_featured"], postList[1]["is_sticky"], postList[1]["is_highlighted"], postList[1]["featured_medium_id"], postList[1]["format_id"], postList[1]["published_date"], 1))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
format.SelectMock(mock, 1)
space.SelectQuery(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id", "position"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 1))
claim.SelectWithOutSpace(mock, claim.Data)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 1, 1))
}
func preloadMock(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
format.SelectMock(mock, 1)
medium.SelectWithOutSpace(mock)
space.SelectQuery(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
}
func postSelectWithOutSpace(mock sqlmock.Sqlmock, post map[string]interface{}) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, post["title"], post["subtitle"], post["slug"], post["status"], post["is_page"], post["excerpt"], post["description"], post["html_description"], post["is_featured"], post["is_sticky"], post["is_highlighted"], post["featured_medium_id"], post["format_id"], post["published_date"], 1))
// Preload Claimant & Rating
preloadMock(mock, 1)
}
func postSelectWithSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, false, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["is_page"], Data["excerpt"], Data["description"], Data["html_description"], Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], Data["featured_medium_id"], Data["format_id"], Data["published_date"], 1))
}
func postSelectPublishedWithSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, false, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["title"], Data["subtitle"], Data["slug"], "publish", Data["is_page"], Data["excerpt"], Data["description"], Data["html_description"], Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], Data["featured_medium_id"], Data["format_id"], Data["published_date"], 1))
}
//check post exits or not
func recordNotFoundMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, false, 100).
WillReturnRows(sqlmock.NewRows(columns))
}
func postCountQuery(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "posts"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func postClaimInsertMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(`INSERT INTO "post_claims"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
func postClaimSelectMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id", "position"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 1))
claim.SelectWithOutSpace(mock, claim.Data)
}
func postAuthorSelectMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1))
}
func postAuthorInsertMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(`INSERT INTO "post_authors"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
func postSchemaUpdateQuery(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "ratings"`)).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug", "background_colour", "text_colour", "medium_id", "description", "html_description", "numeric_value", "space_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, rating.Data["name"], rating.Data["slug"], rating.Data["background_colour"], rating.Data["text_colour"], rating.Data["medium_id"], rating.Data["description"], rating.Data["html_description"], rating.Data["numeric_value"], 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, sqlmock.AnyArg(), 1).
WillReturnResult(driver.ResultNoRows)
}
func preUpdateMock(mock sqlmock.Sqlmock, post map[string]interface{}, slugCheckRequired bool) {
// slug check is required
if slugCheckRequired {
slugCheckMock(mock, post)
}
mock.ExpectBegin()
// get new tags & categories to update
tag.SelectMock(mock, tag.Data, 1)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_categories"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
}
func preUpdateDraftMock(mock sqlmock.Sqlmock, post map[string]interface{}, slugCheckRequired bool) {
postSelectWithSpace(mock)
preUpdateMock(mock, post, slugCheckRequired)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
}
func preUpdatePublishedMock(mock sqlmock.Sqlmock, post map[string]interface{}, slugCheckRequired bool) {
postSelectPublishedWithSpace(mock)
preUpdateMock(mock, post, slugCheckRequired)
// Check medium & format belong to same space or not
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
}
func updateQueryMock(mock sqlmock.Sqlmock, post map[string]interface{}, slugCheckRequired bool) {
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, post["is_page"], post["is_featured"], post["is_sticky"], post["is_highlighted"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, 1, post["title"], post["subtitle"], post["slug"], post["excerpt"],
post["description"], post["html_description"], post["is_sticky"], post["is_highlighted"], post["featured_medium_id"], post["format_id"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
postUpdateQueryMock(mock, post)
}
func updatePublishedQueryMock(mock sqlmock.Sqlmock, post map[string]interface{}, slugCheckRequired bool) {
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, nil, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, post["is_page"], post["is_featured"], post["is_sticky"], post["is_highlighted"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, 1, post["title"], post["subtitle"], post["slug"], post["status"], post["excerpt"],
post["description"], post["html_description"], post["is_sticky"], post["is_highlighted"], post["featured_medium_id"], post["format_id"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
postUpdateQueryMock(mock, post)
}
func postUpdateQueryMock(mock sqlmock.Sqlmock, post map[string]interface{}) {
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, post["title"], post["subtitle"], post["slug"], post["status"], post["page"], post["excerpt"], post["description"], post["html_description"], post["is_featured"], post["is_sticky"], post["is_highlighted"], post["featured_medium_id"], post["format_id"], post["published_date"], 1))
preloadMock(mock)
}
func updatePostClaimsMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id", "position"}).
AddRow(1, time.Now(), time.Now(), nil, 2, 1, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_claims" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectQuery(`INSERT INTO "post_claims"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id", "position"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 1))
claim.SelectWithOutSpace(mock, claim.Data)
}
func updatePostAuthorMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 2, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_authors" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectQuery(`INSERT INTO "post_authors"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
postAuthorSelectMock(mock)
}
func updateMock(mock sqlmock.Sqlmock, post map[string]interface{}, slugCheckRequired bool) {
preUpdateDraftMock(mock, post, slugCheckRequired)
updateQueryMock(mock, post, slugCheckRequired)
updatePostClaimsMock(mock)
updatePostAuthorMock(mock)
postSchemaUpdateQuery(mock)
}
func deleteMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_categories"`)).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_authors" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_claims" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "posts" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
}
<file_sep>/sonar-project.properties
sonar.organization=factly
sonar.projectKey=factly_dega-server
sonar.projectName=dega-server
sonar.sources=service/
sonar.tests=test/
sonar.test.inclusions=**/*_test.go
sonar.go.coverage.reportPaths=cov.out<file_sep>/test/service/core/post/main_test.go
package post
import (
"os"
"testing"
"github.com/factly/dega-server/test"
"gopkg.in/h2non/gock.v1"
)
func TestMain(m *testing.M) {
// Mock kavach server and allowing persisted external traffic
defer gock.Disable()
test.MockServer()
defer gock.DisableNetworking()
exitValue := m.Run()
os.Exit(exitValue)
}
<file_sep>/service/core/action/author/feed.go
package author
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"strings"
"time"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/paginationx"
"github.com/go-chi/chi"
"github.com/gorilla/feeds"
)
func Feeds(w http.ResponseWriter, r *http.Request) {
spaceID := chi.URLParam(r, "space_id")
sID, err := strconv.Atoi(spaceID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
offset, limit := paginationx.Parse(r.URL.Query())
sort := r.URL.Query().Get("sort")
if sort != "asc" {
sort = "desc"
}
// create slug map from author's slugs provided in path param
slugs := chi.URLParam(r, "slugs")
authorSlugs := strings.Split(slugs, ",")
slugMap := make(map[string]bool)
for _, each := range authorSlugs {
slugMap[each] = true
}
space := model.Space{}
space.ID = uint(sID)
if err := config.DB.Preload("Logo").First(&space).Error; err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
now := time.Now()
feed := &feeds.Feed{
Id: fmt.Sprint(space.ID),
Title: space.Name,
Subtitle: space.TagLine,
Link: &feeds.Link{Href: space.SiteAddress},
Description: space.Description,
Created: now,
}
if space.Logo != nil {
spaceLogo := map[string]interface{}{}
_ = json.Unmarshal(space.Logo.URL.RawMessage, &spaceLogo)
if rawURL, found := spaceLogo["raw"]; found {
feed.Image = &feeds.Image{
Title: space.Logo.Name,
Url: rawURL.(string),
Link: rawURL.(string),
}
}
}
// get post authors from published posts in given space
postAuthors := make([]model.PostAuthor, 0)
config.DB.Model(&model.PostAuthor{}).Joins("JOIN posts ON posts.id = post_authors.post_id").Where("status = ? AND space_id = ? AND page = ?", "publish", spaceID, false).Find(&postAuthors)
var userID int
if len(postAuthors) > 0 {
userID = int(postAuthors[0].AuthorID)
}
// create list of author ids whose posts are to be included
authorIDs := make([]uint, 0)
authorMap := Mapper(space.OrganisationID, userID)
for _, author := range authorMap {
if _, found := slugMap[author.Slug]; found {
authorIDs = append(authorIDs, author.ID)
}
}
postList := make([]model.Post, 0)
config.DB.Model(&model.Post{}).Joins("JOIN post_authors ON posts.id = post_authors.post_id").Where(&model.Post{
Status: "publish",
SpaceID: uint(sID),
}).Where("is_page = ?", false).Where("author_id IN (?)", authorIDs).Where("post_authors.deleted_at IS NULL").Order("created_at " + sort).Offset(offset).Limit(limit).Find(&postList)
// generate post author map
postAuthorMap := make(map[uint][]uint)
for _, po := range postAuthors {
if _, found := postAuthorMap[po.PostID]; !found {
postAuthorMap[po.PostID] = make([]uint, 0)
}
postAuthorMap[po.PostID] = append(postAuthorMap[po.PostID], po.AuthorID)
}
for _, post := range postList {
author := authorMap[fmt.Sprint(postAuthorMap[post.ID][0])]
item := feeds.Item{
Id: fmt.Sprint(post.ID),
Title: post.Title,
Link: &feeds.Link{Href: fmt.Sprint("https://factly.org/", post.Slug)},
Created: *post.PublishedDate,
Updated: post.UpdatedAt,
Description: post.Excerpt,
Content: post.HTMLDescription,
}
authorName := fmt.Sprint(author.FirstName, " ", author.LastName)
if authorName != " " {
item.Author = &feeds.Author{Name: authorName, Email: author.Email}
}
feed.Items = append(feed.Items, &item)
}
if err := feed.WriteRss(w); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
<file_sep>/service/core/action/request/space/create.go
package space
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
)
// request - Create space permission request
// @Summary Create space permission request
// @Description Create space permission request
// @Tags Space_Permissions_Request
// @ID add-space-permission-request
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Request body spacePermissionRequest true "Request Object"
// @Success 201 {object} model.SpacePermissionRequest
// @Failure 400 {array} string
// @Router /core/requests/spaces [post]
func Create(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
request := spacePermissionRequest{}
err = json.NewDecoder(r.Body).Decode(&request)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(request)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
space := model.Space{}
space.ID = uint(request.SpaceID)
// Fetch space for which request is made
err = config.DB.First(&space).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("space not found", http.StatusNotFound)))
return
}
isAdmin, err := util.CheckOwnerFromKavach(uID, int(space.OrganisationID))
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage(err.Error(), http.StatusInternalServerError)))
return
}
if !isAdmin {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := model.SpacePermissionRequest{
Request: model.Request{
Title: request.Title,
Description: request.Description,
Status: "pending",
},
SpaceID: uint(request.SpaceID),
Posts: request.Posts,
Podcast: request.Podcast,
Episodes: request.Episodes,
Media: request.Media,
FactCheck: request.FactCheck,
}
err = config.DB.WithContext(context.WithValue(r.Context(), permissionContext, uID)).Model(&model.SpacePermissionRequest{}).Create(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/service/core/action/format/update.go
package format
import (
"encoding/json"
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update format by id
// @Summary Update a format by id
// @Description Update format by ID
// @Tags Format
// @ID update-format-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param format_id path string true "Format ID"
// @Param X-Space header string true "Space ID"
// @Param Format body format false "Format"
// @Success 200 {object} model.Format
// @Router /core/formats/{format_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
formatID := chi.URLParam(r, "format_id")
id, err := strconv.Atoi(formatID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Format{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Format{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
format := &format{}
err = json.NewDecoder(r.Body).Decode(&format)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(format)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
var formatSlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Format{})
tableName := stmt.Schema.Table
if result.Slug == format.Slug {
formatSlug = result.Slug
} else if format.Slug != "" && slugx.Check(format.Slug) {
formatSlug = slugx.Approve(&config.DB, format.Slug, sID, tableName)
} else {
formatSlug = slugx.Approve(&config.DB, slugx.Make(format.Name), sID, tableName)
}
// Check if format with same name exist
if format.Name != result.Name && util.CheckName(uint(sID), format.Name, tableName) {
loggerx.Error(errors.New(`format with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
tx := config.DB.Begin()
tx.Model(&result).Updates(model.Format{
Base: config.Base{UpdatedByID: uint(uID)},
Name: format.Name,
Slug: formatSlug,
Description: format.Description,
MetaFields: format.MetaFields,
}).First(&result)
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "format",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"space_id": result.SpaceID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("format.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/fact-check/route.go
package factcheck
import (
"net/http"
"github.com/go-chi/chi"
"github.com/factly/dega-server/service/fact-check/action/claim"
"github.com/factly/dega-server/service/fact-check/action/claimant"
"github.com/factly/dega-server/service/fact-check/action/google"
"github.com/factly/dega-server/service/fact-check/action/rating"
)
// Router - CRUD servies
func Router() http.Handler {
r := chi.NewRouter()
r.Mount("/claimants", claimant.Router())
r.Mount("/ratings", rating.Router())
r.Mount("/claims", claim.Router())
r.Mount("/google", google.Router())
return r
}
<file_sep>/util/organisationID.go
package util
import (
"context"
"errors"
"net/http"
"strings"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/middlewarex"
)
type ctxKeyOrganisationID int
// OrganisationIDKey is the key that holds the unique user ID in a request context.
const OrganisationIDKey ctxKeyOrganisationID = 0
// GenerateOrganisation check X-User in header
func GenerateOrganisation(h http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
tokens := strings.Split(strings.Trim(r.URL.Path, "/"), "/")
if len(tokens) <= 1 || tokens[1] != "spaces" {
ctx := r.Context()
sID, err := middlewarex.GetSpace(ctx)
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
space := &model.Space{}
space.ID = uint(sID)
err = config.DB.First(&space).Error
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
ctx = context.WithValue(ctx, OrganisationIDKey, space.OrganisationID)
h.ServeHTTP(w, r.WithContext(ctx))
return
}
h.ServeHTTP(w, r)
})
}
// GetOrganisation return organisation ID
func GetOrganisation(ctx context.Context) (int, error) {
if ctx == nil {
return 0, errors.New("context not found")
}
organisationID := ctx.Value(OrganisationIDKey)
if organisationID != nil {
return organisationID.(int), nil
}
return 0, errors.New("something went wrong")
}
<file_sep>/service/core/action/author/list.go
package author
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"github.com/factly/x/loggerx"
"github.com/spf13/viper"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
// list response
type paging struct {
Total int `json:"total"`
Nodes []model.Author `json:"nodes"`
}
// list - Get all authors
// @Summary Show all authors
// @Description Get all authors
// @Tags Authors
// @ID get-all-authors
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param limit query string false "limit per page"
// @Param page query string false "page number"
// @Success 200 {object} paging
// @Router /core/authors [get]
func list(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := paging{}
result.Nodes = make([]model.Author, 0)
url := fmt.Sprint(viper.GetString("kavach_url"), "/organisations/", oID, "/users")
req, err := http.NewRequest("GET", url, nil)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-User", strconv.Itoa(uID))
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.NetworkError()))
return
}
defer resp.Body.Close()
users := make([]model.Author, 0)
err = json.NewDecoder(resp.Body).Decode(&users)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
offset, limit := paginationx.Parse(r.URL.Query())
total := len(users)
lowerLimit := offset
upperLimit := offset + limit
if offset > total {
lowerLimit = 0
upperLimit = 0
} else if offset+limit > total {
lowerLimit = offset
upperLimit = total
}
result.Nodes = users[lowerLimit:upperLimit]
result.Total = total
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/webhook/route.go
package webhook
import (
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
type webhook struct {
Name string `json:"name"`
URL string `json:"url" validate:"required"`
Enabled bool `json:"enabled"`
EventIDs []uint `json:"event_ids" validate:"required"`
Tags postgres.Jsonb `json:"tags" swaggertype:"primitive,string"`
}
// Router webhooks endpoint router
func Router() chi.Router {
r := chi.NewRouter()
entity := "webhooks"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.With(util.CheckKetoPolicy(entity, "get")).Get("/logs", logs)
r.Route("/{webhook_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/core/action/post/template.go
package post
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
)
type templateData struct {
PostID uint `json:"post_id" validate:"required"`
}
// create - create template
// @Summary create template
// @Description Create template
// @Tags Post
// @ID create-template
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param TemplateData body templateData false "TemplateData"
// @Success 200 {object} model.Post
// @Router /core/posts/templates [post]
func createTemplate(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
templateReq := &templateData{}
err = json.NewDecoder(r.Body).Decode(&templateReq)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(templateReq)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := model.Post{}
result.ID = uint(templateReq.PostID)
// check of post exist
err = config.DB.Where(&model.Post{
SpaceID: uint(sID),
}).Where("is_page = ?", false).Preload("Tags").Preload("Categories").First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
template := result
template.Status = "template"
template.PublishedDate = nil
template.Base = config.Base{}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Post{}).Create(&template).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").Preload("Space").First(&template)
tagIDs := make([]uint, 0)
categoryIDs := make([]uint, 0)
for _, tag := range template.Tags {
tagIDs = append(tagIDs, tag.ID)
}
for _, category := range template.Categories {
categoryIDs = append(categoryIDs, category.ID)
}
// Insert into meili index
meiliObj := map[string]interface{}{
"id": template.ID,
"kind": "post",
"title": template.Title,
"subtitle": template.Subtitle,
"slug": template.Slug,
"status": template.Status,
"excerpt": template.Excerpt,
"description": template.Description,
"is_featured": template.IsFeatured,
"is_sticky": template.IsSticky,
"is_highlighted": template.IsHighlighted,
"format_id": template.FormatID,
"published_date": template.PublishedDate.Unix(),
"space_id": template.SpaceID,
"tag_ids": tagIDs,
"category_ids": categoryIDs,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("post.template.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, template)
}
<file_sep>/service/core/action/event/create.go
package event
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/test"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/requestx"
"github.com/factly/x/validationx"
"github.com/spf13/viper"
)
// create - Create Event
// @Summary Create Event
// @Description Create Event
// @Tags Events
// @ID add-event
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Event body event true "Event Object"
// @Success 201 {object} model.Event
// @Failure 400 {array} string
// @Router /core/events [post]
func create(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
event := &event{}
if err = json.NewDecoder(r.Body).Decode(&event); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
if validationError := validationx.Check(event); validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// append app and space tag even if not provided
if err = AddTags(event, sID); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
hukzURL := viper.GetString("hukz_url") + "/events"
resp, err := requestx.Request("POST", hukzURL, event, map[string]string{
"X-User": fmt.Sprint(uID),
})
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if resp.StatusCode == http.StatusUnprocessableEntity {
errorx.Render(w, errorx.Parser(errorx.CannotSaveChanges()))
return
}
if resp.StatusCode != http.StatusCreated {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
var eventRes model.Event
if err = json.NewDecoder(resp.Body).Decode(&eventRes); err != nil {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusCreated, eventRes)
}
func AddTags(event *event, sID int) error {
tags := make(map[string]string)
if len(event.Tags.RawMessage) > 0 && !reflect.DeepEqual(event.Tags, test.NilJsonb()) {
err := json.Unmarshal(event.Tags.RawMessage, &tags)
if err != nil {
return err
}
}
tags["app"] = "dega"
tags["space"] = fmt.Sprint(sID)
bytesArr, err := json.Marshal(tags)
if err != nil {
return err
}
event.Tags.RawMessage = bytesArr
return nil
}
<file_sep>/service/fact-check/model/claim.go
package model
import (
"errors"
"time"
"github.com/factly/dega-server/service/core/model"
"gorm.io/gorm"
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
)
// Claim model
type Claim struct {
config.Base
Claim string `gorm:"column:claim" json:"claim"`
Slug string `gorm:"column:slug" json:"slug"`
ClaimDate *time.Time `gorm:"column:claim_date" json:"claim_date" sql:"DEFAULT:NULL"`
CheckedDate *time.Time `gorm:"column:checked_date" json:"checked_date" sql:"DEFAULT:NULL"`
ClaimSources postgres.Jsonb `gorm:"column:claim_sources" json:"claim_sources" swaggertype:"primitive,string"`
Description postgres.Jsonb `gorm:"column:description" json:"description" swaggertype:"primitive,string"`
HTMLDescription string `gorm:"column:html_description" json:"html_description,omitempty"`
ClaimantID uint `gorm:"column:claimant_id" json:"claimant_id"`
Claimant Claimant `json:"claimant"`
RatingID uint `gorm:"column:rating_id" json:"rating_id"`
Rating Rating `json:"rating"`
Fact string `gorm:"column:fact" json:"fact"`
ReviewSources postgres.Jsonb `gorm:"column:review_sources" json:"review_sources" swaggertype:"primitive,string"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Space *model.Space `json:"space,omitempty"`
}
// PostClaim model
type PostClaim struct {
config.Base
ClaimID uint `gorm:"column:claim_id" json:"claim_id"`
Claim Claim `json:"claim"`
PostID uint `gorm:"column:post_id" json:"post_id"`
Position uint `gorm:"column:position" json:"position"`
}
// BeforeSave - validation for rating & claimant
func (claim *Claim) BeforeSave(tx *gorm.DB) (e error) {
if claim.ClaimantID > 0 {
claimant := Claimant{}
claimant.ID = claim.ClaimantID
err := tx.Model(&Claimant{}).Where(Claimant{
SpaceID: claim.SpaceID,
}).First(&claimant).Error
if err != nil {
return errors.New("claimant do not belong to same space")
}
}
if claim.RatingID > 0 {
rating := Rating{}
rating.ID = claim.RatingID
err := tx.Model(&Rating{}).Where(Rating{
SpaceID: claim.SpaceID,
}).First(&rating).Error
if err != nil {
return errors.New("rating do not belong to same space")
}
}
return nil
}
var claimUser config.ContextKey = "claim_user"
// BeforeCreate hook
func (claim *Claim) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(claimUser)
if userID == nil {
return nil
}
uID := userID.(int)
claim.CreatedByID = uint(uID)
claim.UpdatedByID = uint(uID)
return nil
}
var postUser config.ContextKey = "post_user"
// BeforeCreate hook
func (pc *PostClaim) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(postUser)
if userID == nil {
return nil
}
uID := userID.(int)
pc.CreatedByID = uint(uID)
pc.UpdatedByID = uint(uID)
return nil
}
<file_sep>/test/service/core/meta/details_test.go
package meta
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestMetaDetails(t *testing.T) {
mock := test.SetupMockDB()
test.IFramelyGock()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get iframely metadata for github.com", func(t *testing.T) {
e.GET(path).
WithQueryObject(map[string]interface{}{
"url": siteUrl,
"type": "iframely",
}).
Expect().
Status(http.StatusOK).
JSON().
Object()
})
t.Run("get oembed metadata for github.com", func(t *testing.T) {
e.GET(path).
WithQueryObject(map[string]interface{}{
"url": siteUrl,
"type": "oembed",
}).
Expect().
Status(http.StatusOK).
JSON().
Object()
})
t.Run("get link metadata for github.com", func(t *testing.T) {
res := e.GET(path).
WithQueryObject(map[string]interface{}{
"url": siteUrl,
"type": "link",
}).
Expect().
Status(http.StatusOK).
JSON().
Object()
res.Value("success").Number().Equal(1)
res.Value("meta").Object().ContainsMap(linkmeta["meta"])
})
t.Run("request metadata without url", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(path).
WithQueryObject(map[string]interface{}{
"type": "iframely",
}).
Expect().
Status(http.StatusBadRequest)
})
t.Run("request metadata without type", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(path).
WithQueryObject(map[string]interface{}{
"url": siteUrl,
}).
Expect().
Status(http.StatusBadRequest)
})
t.Run("iframely is down", func(t *testing.T) {
gock.Off()
test.CheckSpaceMock(mock)
e.GET(path).
WithQueryObject(map[string]interface{}{
"url": siteUrl,
"type": "iframely",
}).
Expect().
Status(http.StatusInternalServerError)
})
t.Run("iframely is timed out", func(t *testing.T) {
gock.Off()
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
gock.New(viper.GetString("iframely_url")).
Get("/iframely").
ParamPresent("url").
Persist().
Reply(http.StatusRequestTimeout)
e.GET(path).
WithQueryObject(map[string]interface{}{
"url": siteUrl,
"type": "iframely",
}).
Expect().
Status(http.StatusInternalServerError)
})
}
<file_sep>/service/core/action/policy/mapper.go
package policy
import (
"encoding/json"
"strings"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
)
// Mapper map policy
func Mapper(ketoPolicy model.KetoPolicy, userMap map[string]model.Author) model.Policy {
permissions := make([]model.Permission, 0)
for _, resource := range ketoPolicy.Resources {
var eachRule model.Permission
resourcesPrefixAll := strings.Split(resource, ":")
resourcesPrefix := strings.Join(resourcesPrefixAll[1:], ":")
eachRule.Resource = resourcesPrefixAll[len(resourcesPrefixAll)-1]
eachRule.Actions = make([]string, 0)
for _, action := range ketoPolicy.Actions {
if strings.HasPrefix(action, "actions:"+resourcesPrefix) {
actionSplitAll := strings.Split(action, ":")
eachRule.Actions = append(eachRule.Actions, actionSplitAll[len(actionSplitAll)-1])
}
}
permissions = append(permissions, eachRule)
}
authors := make([]model.Author, 0)
for _, user := range ketoPolicy.Subjects {
val, exists := userMap[user]
if exists {
authors = append(authors, val)
}
}
var result model.Policy
nameAll := strings.Split(ketoPolicy.ID, ":")
result.ID = nameAll[len(nameAll)-1]
result.Name = nameAll[len(nameAll)-1]
result.Description = ketoPolicy.Description
result.Permissions = permissions
result.Users = authors
return result
}
// GetPermissions gives permissions from policy for given userID
func GetPermissions(ketoPolicy model.KetoPolicy, userID uint) []model.Permission {
permissions := make([]model.Permission, 0)
for _, resource := range ketoPolicy.Resources {
var eachRule model.Permission
resourcesPrefixAll := strings.Split(resource, ":")
resourcesPrefix := strings.Join(resourcesPrefixAll[1:], ":")
eachRule.Resource = resourcesPrefixAll[len(resourcesPrefixAll)-1]
eachRule.Actions = make([]string, 0)
for _, action := range ketoPolicy.Actions {
if strings.HasPrefix(action, "actions:"+resourcesPrefix) {
actionSplitAll := strings.Split(action, ":")
eachRule.Actions = append(eachRule.Actions, actionSplitAll[len(actionSplitAll)-1])
}
}
permissions = append(permissions, eachRule)
}
return permissions
}
// GetAllPolicies gives list of all keto policies
func GetAllPolicies() ([]model.KetoPolicy, error) {
resp, err := util.KetoGetRequest("/engines/acp/ory/regex/policies")
if err != nil {
return nil, err
}
defer resp.Body.Close()
var policyList []model.KetoPolicy
err = json.NewDecoder(resp.Body).Decode(&policyList)
if err != nil {
return nil, err
}
return policyList, nil
}
<file_sep>/service/core/model/medium.go
package model
import (
"encoding/json"
"net/url"
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/spf13/viper"
"gorm.io/gorm"
)
// Medium model
type Medium struct {
config.Base
Name string `gorm:"column:name" json:"name"`
Slug string `gorm:"column:slug" json:"slug"`
Type string `gorm:"column:type" json:"type"`
Title string `gorm:"column:title" json:"title"`
Description string `gorm:"column:description" json:"description"`
Caption string `gorm:"column:caption" json:"caption"`
AltText string `gorm:"column:alt_text" json:"alt_text"`
FileSize int64 `gorm:"column:file_size" json:"file_size"`
URL postgres.Jsonb `gorm:"column:url" json:"url" swaggertype:"primitive,string"`
Dimensions string `gorm:"column:dimensions" json:"dimensions"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
}
func (Medium) TableName() string {
return "media"
}
var mediumUser config.ContextKey = "medium_user"
// BeforeCreate hook
func (media *Medium) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(mediumUser)
if userID == nil {
return nil
}
uID := userID.(int)
media.CreatedByID = uint(uID)
media.UpdatedByID = uint(uID)
return nil
}
// AfterCreate hook
func (media *Medium) AfterCreate(tx *gorm.DB) (err error) {
resurl := map[string]interface{}{}
if viper.IsSet("imageproxy_url") && media.URL.RawMessage != nil {
_ = json.Unmarshal(media.URL.RawMessage, &resurl)
if rawURL, found := resurl["raw"]; found {
urlObj, _ := url.Parse(rawURL.(string))
resurl["proxy"] = viper.GetString("imageproxy_url") + urlObj.Path
rawBArr, _ := json.Marshal(resurl)
media.URL = postgres.Jsonb{
RawMessage: rawBArr,
}
}
}
return nil
}
// AfterFind hook
func (media *Medium) AfterFind(tx *gorm.DB) (err error) {
resurl := map[string]interface{}{}
if viper.IsSet("imageproxy_url") && media.URL.RawMessage != nil {
_ = json.Unmarshal(media.URL.RawMessage, &resurl)
if rawURL, found := resurl["raw"]; found {
urlObj, _ := url.Parse(rawURL.(string))
resurl["proxy"] = viper.GetString("imageproxy_url") + urlObj.Path
rawBArr, _ := json.Marshal(resurl)
media.URL = postgres.Jsonb{
RawMessage: rawBArr,
}
}
}
return nil
}
<file_sep>/service/fact-check/action/claim/route.go
package claim
import (
"time"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
type claim struct {
Claim string `json:"claim" validate:"required,min=3,max=5000"`
Slug string `json:"slug"`
ClaimDate *time.Time `json:"claim_date" `
CheckedDate *time.Time `json:"checked_date"`
ClaimSources postgres.Jsonb `json:"claim_sources" swaggertype:"primitive,string"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
ClaimantID uint `json:"claimant_id" validate:"required"`
RatingID uint `json:"rating_id" validate:"required"`
Fact string `json:"fact"`
ReviewSources postgres.Jsonb `json:"review_sources" swaggertype:"primitive,string"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var userContext config.ContextKey = "claim_user"
// Router - Group of claim router
func Router() chi.Router {
r := chi.NewRouter()
entity := "claims"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{claim_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/core/action/user/permission.go
package user
import (
"fmt"
"net/http"
"strconv"
"strings"
"github.com/factly/dega-server/util/arrays"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
"github.com/factly/dega-server/service/core/action/policy"
"github.com/factly/dega-server/util"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
)
// userpermissions - Get user's permission
// @Summary Get user's permission
// @Description Get user's permission
// @Tags Users
// @ID get-users-permission
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param user_id path string true "User ID"
// @Success 200 {object} []model.Permission
// @Router /core/users/{user_id}/permissions [get]
func userpermissions(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
userID := chi.URLParam(r, "user_id")
id, err := strconv.Atoi(userID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
var result []model.Permission
// check if the user is admin of organisation
isAdmin := util.CheckSpaceKetoPermission("all", uint(oID), uint(uID))
// fetch all the keto policies
policyList, err := policy.GetAllPolicies()
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if isAdmin == nil {
// logged user is admin and user_id is also admin's
if id == uID {
allPermission := []model.Permission{
model.Permission{
Resource: "admin",
Actions: []string{"admin"},
},
}
renderx.JSON(w, http.StatusOK, allPermission)
return
}
result = GetPermissions(int(oID), int(sID), id, policyList)
} else {
// logged user not admin
errorx.Render(w, errorx.Parser(errorx.GetMessage("Not allowed", http.StatusUnauthorized)))
return
}
renderx.JSON(w, http.StatusOK, result)
}
// GetPermissions gets user's permissions
func GetPermissions(oID, sID, uID int, policyList []model.KetoPolicy) []model.Permission {
permissionsMap := make(map[string][]string)
spacePrefix := fmt.Sprint("id:org:", oID, ":app:dega:space:", sID, ":")
permissions := make([]model.Permission, 0)
for _, pol := range policyList {
if strings.HasPrefix(pol.ID, spacePrefix) {
var isPresent bool = false
for _, user := range pol.Subjects {
if user == fmt.Sprint(uID) {
isPresent = true
break
}
}
if isPresent {
polPermission := policy.GetPermissions(pol, uint(uID))
for _, per := range polPermission {
if _, found := permissionsMap[per.Resource]; !found {
permissionsMap[per.Resource] = make([]string, 0)
}
permissionsMap[per.Resource] = arrays.Union(permissionsMap[per.Resource], per.Actions)
}
}
}
}
for res, act := range permissionsMap {
perm := model.Permission{
Resource: res,
Actions: act,
}
permissions = append(permissions, perm)
}
return permissions
}
<file_sep>/service/core/action/permissions/space/my.go
package space
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
// my - Get my Space permissions
// @Summary Show a my Space permissions
// @Description Get my Space permissions
// @Tags Space_Permissions
// @ID get-my-space-permissions
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 200 {object} model.SpacePermission
// @Router /core/permissions/spaces/my [get]
func my(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := model.SpacePermission{}
err = config.DB.Model(&model.SpacePermission{}).Where(&model.SpacePermission{
SpaceID: uint(sID),
}).Preload("Space").First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/podcast/action/episode/details.go
package episode
import (
"fmt"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// details - Get episode by id
// @Summary Show a episode by id
// @Description Get episode by ID
// @Tags Episode
// @ID get-episode-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param episode_id path string true "Episode ID"
// @Success 200 {object} episodeData
// @Router /podcast/episodes/{episode_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
episodeID := chi.URLParam(r, "episode_id")
id, err := strconv.Atoi(episodeID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := episodeData{}
result.Episode.ID = uint(id)
err = config.DB.Model(&model.Episode{}).Preload("Podcast").Preload("Medium").Preload("Podcast.Medium").Preload("Podcast.PrimaryCategory").Preload("Podcast.Categories").Where(&model.Episode{
SpaceID: uint(sID),
}).First(&result.Episode).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
// Adding authors in response
authorMap, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
authorEpisodes := make([]model.EpisodeAuthor, 0)
config.DB.Model(&model.EpisodeAuthor{}).Where(&model.EpisodeAuthor{
EpisodeID: uint(id),
}).Find(&authorEpisodes)
for _, each := range authorEpisodes {
result.Authors = append(result.Authors, authorMap[fmt.Sprint(each.AuthorID)])
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/medium/route.go
package medium
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// medium model
type medium struct {
Name string `json:"name" validate:"required"`
Slug string `json:"slug"`
Type string `json:"type" validate:"required"`
Title string `json:"title"`
Description string `json:"description"`
Caption string `json:"caption"`
AltText string `json:"alt_text"`
FileSize int64 `json:"file_size" validate:"required"`
URL postgres.Jsonb `json:"url" swaggertype:"primitive,string"`
Dimensions string `json:"dimensions" validate:"required"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var userContext config.ContextKey = "medium_user"
// Router - Group of medium router
func Router() chi.Router {
r := chi.NewRouter()
entity := "media"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{medium_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/core/action/policy/create.go
package policy
import (
"encoding/json"
"net/http"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
// create - Create policy
// @Summary Create policy
// @Description Create policy
// @Tags Policy
// @ID add-policy
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Policy body policyReq true "Policy Object"
// @Success 201 {object} model.Policy
// @Router /core/policies [post]
func create(w http.ResponseWriter, r *http.Request) {
spaceID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
userID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
organisationID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
policyReq := policyReq{}
err = json.NewDecoder(r.Body).Decode(&policyReq)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
result := Mapper(Composer(organisationID, spaceID, policyReq), author.Mapper(organisationID, userID))
err = insertIntoMeili(result)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if util.CheckNats() {
if err = util.NC.Publish("policy.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
func insertIntoMeili(result model.Policy) error {
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "policy",
"name": result.Name,
"description": result.Description,
}
return meilisearchx.AddDocument("dega", meiliObj)
}
<file_sep>/service/core/action/meta/details.go
package meta
import (
"encoding/json"
"fmt"
"net/http"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/renderx"
"github.com/spf13/viper"
)
// details - Get meta info
// @Summary Get meta info
// @Description Get meta info
// @Tags Meta
// @ID get-meta-info
// @Produce json
// @Param url query string true "URL"
// @Param type query string true "Type"
// @Success 200 {object} metadata
// @Router /meta [get]
func details(w http.ResponseWriter, r *http.Request) {
url := r.URL.Query().Get("url")
if url == "" {
errorx.Render(w, errorx.Parser(errorx.GetMessage("please pass url query parameter", http.StatusBadRequest)))
return
}
metaType := r.URL.Query().Get("type")
var path string
if metaType == "oembed" || metaType == "link" {
path = fmt.Sprintf("/oembed?url=%s&omit_script=1", url)
} else if metaType == "iframely" {
path = fmt.Sprintf("/iframely?url=%s&omit_script=1", url)
} else {
errorx.Render(w, errorx.Parser(errorx.GetMessage("please pass valid type query parameter", http.StatusBadRequest)))
return
}
res, err := http.Get(viper.GetString("iframely_url") + path)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if metaType == "iframely" || metaType == "oembed" {
var result map[string]interface{}
err = json.NewDecoder(res.Body).Decode(&result)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusOK, result)
} else if metaType == "link" {
result := metadata{}
result.Success = 0
var iframelyres iFramelyRes
err = json.NewDecoder(res.Body).Decode(&iframelyres)
if err != nil {
renderx.JSON(w, http.StatusOK, result)
return
}
result.Meta.Title = iframelyres.Title
result.Meta.SiteName = iframelyres.ProviderName
result.Meta.Image = map[string]interface{}{
"url": iframelyres.ThumbnailURL,
}
result.Meta.Description = iframelyres.Description
result.Success = 1
renderx.JSON(w, http.StatusOK, result)
}
}
<file_sep>/go.mod
module github.com/factly/dega-server
go 1.13
require (
github.com/DATA-DOG/go-sqlmock v1.5.0
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751
github.com/dlmiddlecote/sqlstats v1.0.2
github.com/eduncan911/podcast v1.4.2
github.com/factly/x v0.0.63
github.com/gavv/httpexpect v2.0.0+incompatible
github.com/gavv/httpexpect/v2 v2.1.0
github.com/go-chi/chi v4.1.2+incompatible
github.com/gorilla/feeds v1.1.1
github.com/jinzhu/gorm v1.9.16
github.com/meilisearch/meilisearch-go v0.12.0
github.com/nats-io/gnatsd v1.4.1
github.com/nats-io/nats.go v1.10.0
github.com/prometheus/client_golang v1.9.0
github.com/spf13/cobra v1.1.3
github.com/spf13/viper v1.8.1
github.com/swaggo/http-swagger v1.0.0
github.com/swaggo/swag v1.7.0
gopkg.in/h2non/gock.v1 v1.0.15
gorm.io/driver/postgres v1.0.8
gorm.io/gorm v1.21.11
)
<file_sep>/service/route.go
package service
import (
"fmt"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/x/healthx"
_ "github.com/factly/dega-server/docs" // docs is generated by Swag CLI, you have to import it.
"github.com/factly/dega-server/service/core"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/action/category"
"github.com/factly/dega-server/service/core/action/format"
"github.com/factly/dega-server/service/core/action/meta"
"github.com/factly/dega-server/service/core/action/post"
"github.com/factly/dega-server/service/core/action/request/organisation"
"github.com/factly/dega-server/service/core/action/request/space"
"github.com/factly/dega-server/service/core/action/tag"
factCheck "github.com/factly/dega-server/service/fact-check"
"github.com/factly/dega-server/service/podcast"
podcastAction "github.com/factly/dega-server/service/podcast/action"
"github.com/factly/dega-server/util"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/go-chi/chi"
"github.com/go-chi/chi/middleware"
"github.com/spf13/viper"
httpSwagger "github.com/swaggo/http-swagger"
)
func RegisterRoutes() http.Handler {
r := chi.NewRouter()
r.Use(middleware.RequestID)
r.Use(loggerx.Init())
r.Use(middleware.RealIP)
r.Use(middleware.Recoverer)
r.Use(middleware.Heartbeat("/ping"))
// r.Use(middlewarex.GormRequestID(&config.DB))
if viper.IsSet("mode") && viper.GetString("mode") == "development" {
r.Get("/swagger/*", httpSwagger.WrapHandler)
fmt.Println("Swagger @ http://localhost:7789/swagger/index.html")
}
if viper.IsSet("iframely_url") {
r.Mount("/meta", meta.Router())
}
sqlDB, _ := config.DB.DB()
healthx.RegisterRoutes(r, healthx.ReadyCheckers{
"database": sqlDB.Ping,
"keto": util.KetoChecker,
"kavach": util.KavachChecker,
"kratos": util.KratosChecker,
"meilisearch": util.MeiliChecker,
})
r.With(middlewarex.CheckUser, middlewarex.CheckSpace(1), util.GenerateOrganisation, middlewarex.CheckAccess("dega", 1, util.GetOrganisation)).Group(func(r chi.Router) {
r.Mount("/core", core.Router())
r.With(util.FactCheckPermission).Mount("/fact-check", factCheck.Router())
r.With(util.PodcastPermission).Mount("/podcast", podcast.Router())
})
r.With(middlewarex.CheckUser).Group(func(r chi.Router) {
r.Post("/core/requests/organisations", organisation.Create)
r.With(middlewarex.CheckSpace(1)).Post("/core/requests/spaces", space.Create)
})
return r
}
func RegisterFeedsRoutes() http.Handler {
r := chi.NewRouter()
r.Use(middleware.RequestID)
r.Use(loggerx.Init())
r.Use(middleware.RealIP)
r.Use(middleware.Recoverer)
r.Use(middleware.Heartbeat("/ping"))
r.Route("/spaces/{space_id}", func(r chi.Router) {
r.Get("/posts/feed", post.Feeds)
r.Get("/posts/feeds/rss2", post.Feeds)
r.Get("/tags/{slugs}/feed", tag.Feeds)
r.Get("/tags/{slugs}/feeds/rss2", tag.Feeds)
r.Get("/categories/{slugs}/feed", category.Feeds)
r.Get("/categories/{slugs}/feeds/rss2", category.Feeds)
r.Get("/formats/{slugs}/feed", format.Feeds)
r.Get("/formats/{slugs}/feeds/rss2", format.Feeds)
r.Get("/authors/{slugs}/feed", author.Feeds)
r.Get("/authors/{slugs}/feeds/rss2", author.Feeds)
r.Get("/podcasts/{podcast_slug}/feed", podcastAction.Feeds)
r.Get("/podcasts/{podcast_slug}/feeds/rss2", podcastAction.Feeds)
})
return r
}
<file_sep>/service/podcast/action/episode/create.go
package episode
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"gorm.io/gorm"
)
// create - Create episode
// @Summary Create episode
// @Description Create episode
// @Tags Episode
// @ID add-episode
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Episode body episode true "Episode Object"
// @Success 201 {object} episodeData
// @Failure 400 {array} string
// @Router /podcast/episodes [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
episode := &episode{}
err = json.NewDecoder(r.Body).Decode(&episode)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(episode)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
var episodeSlug string
if episode.Slug != "" && slugx.Check(episode.Slug) {
episodeSlug = episode.Slug
} else {
episodeSlug = slugx.Make(episode.Title)
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Episode{})
tableName := stmt.Schema.Table
mediumID := &episode.MediumID
if episode.MediumID == 0 {
mediumID = nil
}
podcastID := &episode.PodcastID
if episode.PodcastID == 0 {
podcastID = nil
}
// Store HTML description
var description string
if len(episode.Description.RawMessage) > 0 && !reflect.DeepEqual(episode.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(episode.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse episode description", http.StatusUnprocessableEntity)))
return
}
}
result := &episodeData{}
result.Episode = model.Episode{
Title: episode.Title,
Description: episode.Description,
HTMLDescription: description,
Slug: slugx.Approve(&config.DB, episodeSlug, sID, tableName),
Season: episode.Season,
Episode: episode.Episode,
AudioURL: episode.AudioURL,
PodcastID: podcastID,
PublishedDate: episode.PublishedDate,
MediumID: mediumID,
MetaFields: episode.MetaFields,
SpaceID: uint(sID),
}
tx := config.DB.WithContext(context.WithValue(r.Context(), episodeUser, uID)).Begin()
err = tx.Model(&model.Episode{}).Create(&result.Episode).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
if len(episode.AuthorIDs) > 0 {
authorMap, err := author.All(r.Context())
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
episodeAuthors := make([]model.EpisodeAuthor, 0)
for _, each := range episode.AuthorIDs {
if _, found := authorMap[fmt.Sprint(each)]; found {
ea := model.EpisodeAuthor{
EpisodeID: result.ID,
AuthorID: each,
}
episodeAuthors = append(episodeAuthors, ea)
result.Authors = append(result.Authors, authorMap[fmt.Sprint(each)])
}
}
if err = tx.Model(&model.EpisodeAuthor{}).Create(&episodeAuthors).Error; err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
tx.Model(&model.Episode{}).Preload("Podcast").Preload("Medium").First(&result.Episode)
// Insert into meili index
var publishedDate int64
if result.PublishedDate == nil {
publishedDate = 0
} else {
publishedDate = result.PublishedDate.Unix()
}
meiliObj := map[string]interface{}{
"id": result.Episode.ID,
"kind": "episode",
"title": result.Title,
"slug": result.Slug,
"season": result.Season,
"episode": result.Episode,
"audio_url": result.AudioURL,
"podcast_id": result.PodcastID,
"description": result.Description,
"published_date": publishedDate,
"space_id": result.SpaceID,
"medium_id": result.MediumID,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("episode.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/cmd/migrate.go
package cmd
import (
"github.com/factly/dega-server/config"
coreModel "github.com/factly/dega-server/service/core/model"
factCheckModel "github.com/factly/dega-server/service/fact-check/model"
podcastModel "github.com/factly/dega-server/service/podcast/model"
"github.com/spf13/cobra"
)
func init() {
rootCmd.AddCommand(migrateCmd)
}
var migrateCmd = &cobra.Command{
Use: "migrate",
Short: "Applies DB migrations for dega-server.",
Run: func(cmd *cobra.Command, args []string) {
// db setup
config.SetupDB()
factCheckModel.Migration()
coreModel.Migration()
podcastModel.Migration()
},
}
<file_sep>/service/core/action/format/list.go
package format
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
// list response
type paging struct {
Total int64 `json:"total"`
Nodes []model.Format `json:"nodes"`
}
// list - Get all formats
// @Summary Show all formats
// @Description Get all formats
// @Tags Format
// @ID get-all-formats
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param limit query string false "limit per page"
// @Param page query string false "page number"
// @Success 200 {object} paging
// @Router /core/formats [get]
func list(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := paging{}
result.Nodes = make([]model.Format, 0)
offset, limit := paginationx.Parse(r.URL.Query())
err = config.DB.Model(&model.Format{}).Where(&model.Format{
SpaceID: uint(sID),
}).Count(&result.Total).Order("id desc").Offset(offset).Limit(limit).Find(&result.Nodes).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/user/permission_test.go
package user
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestListUsersPermission(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("member requests permissions", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
test.CheckSpaceMock(mock)
gock.New(viper.GetString("keto_url") + "/engines/acp/ory/regex/roles/(.+)").
Persist().
Reply(http.StatusNotFound)
gock.New(viper.GetString("keto_url") + "/engines/acp/ory/regex/policies").
Persist().
Reply(http.StatusOK).
JSON(test.Dummy_KetoPolicy)
e.GET(permissionPath).
WithHeaders(headers).
WithPath("user_id", "1").
Expect().
Status(http.StatusUnauthorized)
})
t.Run("get logged admin's permissions", func(t *testing.T) {
test.KetoGock()
test.CheckSpaceMock(mock)
e.GET(permissionPath).
WithHeaders(headers).
WithPath("user_id", "1").
Expect().
Status(http.StatusOK).
JSON().
Array().
Contains(adminPermissionsResponse)
})
t.Run("admin requests member's permissions", func(t *testing.T) {
test.KetoGock()
test.CheckSpaceMock(mock)
e.GET(permissionPath).
WithHeaders(headers).
WithPath("user_id", "2").
Expect().
Status(http.StatusOK).
JSON().
Array().
Contains(permissionsResponse[0], permissionsResponse[1])
})
t.Run("logged in user is admin and cannot get policies", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
test.CheckSpaceMock(mock)
gock.New(viper.GetString("keto_url")).
Post("/engines/acp/ory/regex/allowed").
Persist().
Reply(http.StatusOK)
gock.New(viper.GetString("keto_url") + "/engines/acp/ory/regex/policies").
Persist().
Reply(http.StatusNotFound)
e.GET(permissionPath).
WithHeaders(headers).
WithPath("user_id", "2").
Expect().
Status(http.StatusInternalServerError)
})
t.Run("when keto is down", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
test.CheckSpaceMock(mock)
e.GET(permissionPath).
WithHeaders(headers).
WithPath("user_id", "1").
Expect().
Status(http.StatusInternalServerError)
})
t.Run("invalid user_id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(permissionPath).
WithHeaders(headers).
WithPath("user_id", "abc").
Expect().
Status(http.StatusBadRequest)
})
}
<file_sep>/service/core/action/tag/create.go
package tag
import (
"context"
"encoding/json"
"errors"
"net/http"
"reflect"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"gorm.io/gorm"
)
// create - Create tag
// @Summary Create tag
// @Description Create tag
// @Tags Tag
// @ID add-tag
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Tag body tag true "Tag Object"
// @Success 201 {object} model.Tag
// @Failure 400 {array} string
// @Router /core/tags [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
tag := &tag{}
err = json.NewDecoder(r.Body).Decode(&tag)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(tag)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Tag{})
tableName := stmt.Schema.Table
var tagSlug string
if tag.Slug != "" && slugx.Check(tag.Slug) {
tagSlug = tag.Slug
} else {
tagSlug = slugx.Make(tag.Name)
}
// Check if tag with same name exist
if util.CheckName(uint(sID), tag.Name, tableName) {
loggerx.Error(errors.New(`tag with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
// Store HTML description
var description string
if len(tag.Description.RawMessage) > 0 && !reflect.DeepEqual(tag.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(tag.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse tag description", http.StatusUnprocessableEntity)))
return
}
}
result := &model.Tag{
Name: tag.Name,
Slug: slugx.Approve(&config.DB, tagSlug, sID, tableName),
Description: tag.Description,
HTMLDescription: description,
SpaceID: uint(sID),
IsFeatured: tag.IsFeatured,
MetaFields: tag.MetaFields,
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Tag{}).Create(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "tag",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"space_id": result.SpaceID,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("tag.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/service/core/action/request/organisation/route.go
package organisation
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/factly/x/middlewarex"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
var permissionContext config.ContextKey = "org_perm_user"
type organisationPermissionRequest struct {
OrganisationID uint `json:"organisation_id" validate:"required"`
Title string `json:"title"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
Spaces int64 `json:"spaces"`
}
// Router - CRUD servies
func Router() http.Handler {
r := chi.NewRouter()
app := "dega"
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Get("/", list)
r.Get("/my", my)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Route("/{request_id}", func(r chi.Router) {
r.Get("/", details)
r.Delete("/", delete)
r.Post("/approve", approve)
r.Post("/reject", reject)
})
return r
}
<file_sep>/util/podcast_permission.go
package util
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/middlewarex"
"github.com/spf13/viper"
)
// PodcastPermission checks weather organisation has podcast permission
func PodcastPermission(h http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if viper.GetBool("create_super_organisation") {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
permission := model.SpacePermission{}
err = config.DB.Model(&model.SpacePermission{}).Where(&model.SpacePermission{
SpaceID: uint(sID),
}).First(&permission).Error
if err != nil {
w.WriteHeader(http.StatusUnauthorized)
return
}
if !permission.Podcast {
w.WriteHeader(http.StatusUnauthorized)
return
}
}
h.ServeHTTP(w, r)
})
}
<file_sep>/service/core/action/menu/update.go
package menu
import (
"encoding/json"
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update menu by id
// @Summary Update a menu by id
// @Description Update menu by ID
// @Tags Menu
// @ID update-menu-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param menu_id path string true "Menu ID"
// @Param X-Space header string true "Space ID"
// @Param Menu body menu false "Menu"
// @Success 200 {object} model.Menu
// @Router /core/menus/{menu_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
menuID := chi.URLParam(r, "menu_id")
id, err := strconv.Atoi(menuID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
menu := &menu{}
err = json.NewDecoder(r.Body).Decode(&menu)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(menu)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := model.Menu{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Menu{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var menuSlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Menu{})
tableName := stmt.Schema.Table
if result.Slug == menu.Slug {
menuSlug = result.Slug
} else if menu.Slug != "" && slugx.Check(menu.Slug) {
menuSlug = slugx.Approve(&config.DB, menu.Slug, sID, tableName)
} else {
menuSlug = slugx.Approve(&config.DB, slugx.Make(menu.Name), sID, tableName)
}
// Check if menu with same name exist
if menu.Name != result.Name && util.CheckName(uint(sID), menu.Name, tableName) {
loggerx.Error(errors.New(`menu with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
tx := config.DB.Begin()
err = tx.Model(&result).Updates(model.Menu{
Base: config.Base{UpdatedByID: uint(uID)},
Name: menu.Name,
Slug: menuSlug,
Menu: menu.Menu,
MetaFields: menu.MetaFields,
}).First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "menu",
"name": result.Name,
"slug": result.Slug,
"menu": result.Menu,
"space_id": result.SpaceID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("menu.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/request/space/details.go
package space
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// details - Get space permissions requests detail
// @Summary Show a space permissions requests detail
// @Description Get space permissions requests detail
// @Tags Space_Permissions_Request
// @ID get-space-permission-request-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param request_id path string true "Request ID"
// @Success 200 {object} model.SpacePermissionRequest
// @Router /core/requests/spaces/{request_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
requestID := chi.URLParam(r, "request_id")
id, err := strconv.Atoi(requestID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := model.SpacePermissionRequest{}
result.ID = uint(id)
err = config.DB.First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/event/details.go
package event
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/requestx"
"github.com/go-chi/chi"
"github.com/spf13/viper"
)
// details - Get event by id
// @Summary Show a event by id
// @Description Get event by ID
// @Tags Events
// @ID get-event-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param event_id path string true "Event ID"
// @Success 200 {object} model.Event
// @Router /core/events/{event_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
eventID := chi.URLParam(r, "event_id")
id, err := strconv.Atoi(eventID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
hukzURL := viper.GetString("hukz_url") + "/events/" + fmt.Sprint(id)
resp, err := requestx.Request("GET", hukzURL, nil, map[string]string{
"X-User": fmt.Sprint(uID),
})
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if resp.StatusCode == http.StatusNotFound {
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var eventRes model.Event
if err = json.NewDecoder(resp.Body).Decode(&eventRes); err != nil {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if resp.StatusCode != http.StatusOK {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusOK, eventRes)
}
<file_sep>/test/service/core/author/main_test.go
package author
import (
"os"
"testing"
"github.com/factly/dega-server/test"
"gopkg.in/h2non/gock.v1"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var missingSpace = map[string]string{
"X-User": "1",
}
var missingUser = map[string]string{
"X-Space": "1",
}
var basePath = "/core/authors"
func TestMain(m *testing.M) {
// Mock kavach server and allowing persisted external traffic
defer gock.Disable()
test.MockServer()
defer gock.DisableNetworking()
exitValue := m.Run()
os.Exit(exitValue)
}
<file_sep>/service/podcast/action/update.go
package podcast
import (
"encoding/json"
"errors"
"net/http"
"reflect"
"strconv"
"github.com/factly/dega-server/config"
coreModel "github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update podcast by id
// @Summary Update a podcast by id
// @Description Update podcast by ID
// @Tags Podcast
// @ID update-podcast-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param podcast_id path string true "Podcast ID"
// @Param X-Space header string true "Space ID"
// @Param Podcast body podcast false "Podcast"
// @Success 200 {object} model.Podcast
// @Router /podcast/{podcast_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
podcastID := chi.URLParam(r, "podcast_id")
id, err := strconv.Atoi(podcastID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
podcast := &podcast{}
err = json.NewDecoder(r.Body).Decode(&podcast)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(podcast)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := &model.Podcast{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Podcast{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var podcastSlug string
// Get table title
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Podcast{})
tableName := stmt.Schema.Table
if result.Slug == podcast.Slug {
podcastSlug = result.Slug
} else if podcast.Slug != "" && slugx.Check(podcast.Slug) {
podcastSlug = slugx.Approve(&config.DB, podcast.Slug, sID, tableName)
} else {
podcastSlug = slugx.Approve(&config.DB, slugx.Make(podcast.Title), sID, tableName)
}
// Check if podcast with same title exist
if podcast.Title != result.Title && util.CheckName(uint(sID), podcast.Title, tableName) {
loggerx.Error(errors.New(`podcast with same title exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
// Store HTML description
var description string
if len(podcast.Description.RawMessage) > 0 && !reflect.DeepEqual(podcast.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(podcast.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse podcast description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.Begin()
newCategories := make([]coreModel.Category, 0)
if len(podcast.CategoryIDs) > 0 {
config.DB.Model(&coreModel.Category{}).Where(podcast.CategoryIDs).Find(&newCategories)
if err = tx.Model(&result).Association("Categories").Replace(&newCategories); err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
} else {
_ = config.DB.Model(&result).Association("Categories").Clear()
}
mediumID := &podcast.MediumID
result.MediumID = &podcast.MediumID
if podcast.MediumID == 0 {
err = tx.Model(&result).Omit("Categories").Updates(map[string]interface{}{"medium_id": nil}).Error
mediumID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
primaryCategoryID := &podcast.PrimaryCategoryID
result.PrimaryCategoryID = &podcast.PrimaryCategoryID
if podcast.PrimaryCategoryID == 0 {
err = tx.Model(&result).Omit("Categories").Updates(map[string]interface{}{"primary_category_id": nil}).Error
primaryCategoryID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
tx.Model(&result).Omit("Categories").Updates(model.Podcast{
Base: config.Base{UpdatedByID: uint(uID)},
HTMLDescription: description,
Description: podcast.Description,
Slug: slugx.Approve(&config.DB, podcastSlug, sID, tableName),
Language: podcast.Language,
MediumID: mediumID,
PrimaryCategoryID: primaryCategoryID,
HeaderCode: podcast.HeaderCode,
FooterCode: podcast.FooterCode,
MetaFields: podcast.MetaFields,
}).Preload("Categories").Preload("Medium").First(&result)
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "podcast",
"title": result.Title,
"slug": result.Slug,
"description": result.Description,
"language": result.Language,
"category_ids": podcast.CategoryIDs,
"space_id": result.SpaceID,
"medium_id": result.MediumID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("podcast.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/post/list_test.go
package post
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/fact-check/claim"
"github.com/gavv/httpexpect/v2"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestPostList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get empty list of posts", func(t *testing.T) {
test.CheckSpaceMock(mock)
postCountQuery(mock, 0)
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "name", "slug", "space_id"}).
AddRow(1, time.Now(), time.Now(), nil, "Tag test 1", "tag-test-1", 1))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "name", "slug", "space_id"}).
AddRow(1, time.Now(), time.Now(), nil, "Tag test 1", "tag-test-1", 1))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get non-empty list of posts", func(t *testing.T) {
postListMock(mock)
delete(postList[0], "featured_medium_id")
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(postList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(postList[0])
test.ExpectationsMet(t, mock)
})
t.Run("get posts with pagination", func(t *testing.T) {
test.CheckSpaceMock(mock)
postCountQuery(mock, len(postList))
mock.ExpectQuery(paginationQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, postList[1]["title"], postList[1]["subtitle"], postList[1]["slug"], postList[1]["status"], postList[1]["page"], postList[1]["excerpt"],
postList[1]["description"], postList[1]["html_description"], postList[1]["is_featured"], postList[1]["is_sticky"], postList[1]["is_highlighted"], postList[1]["featured_medium_id"], postList[1]["format_id"], postList[1]["published_date"], 1))
preloadMock(mock, sqlmock.AnyArg())
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id", "position"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 1))
claim.SelectWithOutSpace(mock, claim.Data)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1))
delete(postList[1], "featured_medium_id")
e.GET(basePath).
WithQueryObject(map[string]interface{}{
"limit": "1",
"page": "2",
}).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(postList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(postList[1])
test.ExpectationsMet(t, mock)
})
t.Run("get list of posts based on filters", func(t *testing.T) {
postListWithFiltersMock(mock)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"tag": "2",
"category": "2",
"author": "2",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(postList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(postList[0])
test.ExpectationsMet(t, mock)
})
t.Run("get list of posts based on filters and query", func(t *testing.T) {
postListWithFiltersMock(mock)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"tag": "2",
"category": "2",
"q": "test",
"author": "1",
"format": "2",
"status": "publish",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(postList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(postList[0])
test.ExpectationsMet(t, mock)
})
t.Run("when query does not match any post", func(t *testing.T) {
test.CheckSpaceMock(mock)
test.DisableMeiliGock(testServer.URL)
gock.New(viper.GetString("meili_url") + "/indexes/dega/search").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusOK).
JSON(test.EmptyMeili)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"tag": "2",
"category": "2",
"q": "test",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
t.Run("when meili is down", func(t *testing.T) {
test.CheckSpaceMock(mock)
test.DisableMeiliGock(testServer.URL)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"tag": "2",
"q": "test",
"author": "1",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/search/testvars.go
package search
var headers = map[string]string{
"X-User": "1",
"X-Space": "1",
}
var Data = map[string]interface{}{
"q": "test",
"limit": 10,
"filters": "kind=category",
}
var undecodableData = map[string]interface{}{
"q": 10,
"limit": "10",
}
var invalidData = map[string]interface{}{
"q": "te",
"limit": 100,
"filters": "kind=category",
}
var path string = "/core/search"
<file_sep>/test/service/core/space/update_test.go
package space
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestSpaceUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("update a space", func(t *testing.T) {
updateMock(mock)
mock.ExpectCommit()
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("invalid space id", func(t *testing.T) {
e.PUT(path).
WithPath("space_id", "invalid").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusBadRequest)
})
t.Run("unprocessable space body", func(t *testing.T) {
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("undecodable space body", func(t *testing.T) {
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("space record does not exist", func(t *testing.T) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(Columns))
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("logo does not exist", func(t *testing.T) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, "name", "slug", "site_title", "tag_line", nilJsonb(), "site_address", 1, 1, 1, 1, nilJsonb(), nilJsonb(), nilJsonb(), nilJsonb(), 1))
mock.ExpectBegin()
slugCheckMock(mock)
mediumNotFound(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("logo mobile does not exist", func(t *testing.T) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, "name", "slug", "site_title", "tag_line", nilJsonb(), "site_address", 1, 1, 1, 1, nilJsonb(), nilJsonb(), nilJsonb(), nilJsonb(), 1))
mock.ExpectBegin()
slugCheckMock(mock)
medium.SelectWithSpace(mock)
mediumNotFound(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("fav icon does not exist", func(t *testing.T) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, "name", "slug", "site_title", "tag_line", nilJsonb(), "site_address", 1, 1, 1, 1, nilJsonb(), nilJsonb(), nilJsonb(), nilJsonb(), 1))
mock.ExpectBegin()
slugCheckMock(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
mediumNotFound(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("mobile icon does not exist", func(t *testing.T) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, "name", "slug", "site_title", "tag_line", nilJsonb(), "site_address", 1, 1, 1, 1, nilJsonb(), nilJsonb(), nilJsonb(), nilJsonb(), 1))
mock.ExpectBegin()
slugCheckMock(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
mediumNotFound(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("update space when logo_id = 0", func(t *testing.T) {
oneMediaIDZeroMock(mock, test.AnyTime{}, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["logo_mobile_id"], Data["fav_icon_id"], Data["mobile_icon_id"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], 1)
Data["logo_id"] = 0
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusOK)
Data["logo_id"] = 1
test.ExpectationsMet(t, mock)
})
t.Run("update space when logo_mobile_id = 0", func(t *testing.T) {
oneMediaIDZeroMock(mock, test.AnyTime{}, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["logo_id"], Data["fav_icon_id"], Data["mobile_icon_id"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], 1)
Data["logo_mobile_id"] = 0
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusOK)
Data["logo_mobile_id"] = 1
test.ExpectationsMet(t, mock)
})
t.Run("update space when fav_icon_id = 0", func(t *testing.T) {
oneMediaIDZeroMock(mock, test.AnyTime{}, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["logo_id"], Data["logo_mobile_id"], Data["mobile_icon_id"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], 1)
Data["fav_icon_id"] = 0
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusOK)
Data["fav_icon_id"] = 1
test.ExpectationsMet(t, mock)
})
t.Run("update space when mobile_icon_id = 0", func(t *testing.T) {
oneMediaIDZeroMock(mock, test.AnyTime{}, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["logo_id"], Data["logo_mobile_id"], Data["fav_icon_id"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], 1)
Data["mobile_icon_id"] = 0
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusOK)
Data["mobile_icon_id"] = 1
test.ExpectationsMet(t, mock)
})
t.Run("update a space when keto is down", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusUnauthorized)
test.ExpectationsMet(t, mock)
})
t.Run("update a space when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
updateMock(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("space_id", "1").
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/config/vars.go
package config
import (
"log"
"github.com/spf13/viper"
)
// SetupVars setups all the config variables to run application
func SetupVars() {
viper.AddConfigPath(".")
viper.SetConfigName("config")
viper.SetEnvPrefix("dega")
viper.AutomaticEnv()
err := viper.ReadInConfig()
if err != nil {
log.Println("config file not found...")
}
if !viper.IsSet("database_host") {
log.Fatal("please provide database_host config param")
}
if !viper.IsSet("database_user") {
log.Fatal("please provide database_user config param")
}
if !viper.IsSet("database_name") {
log.Fatal("please provide database_name config param")
}
if !viper.IsSet("database_password") {
log.Fatal("please provide database_password config param")
}
if !viper.IsSet("database_port") {
log.Fatal("please provide database_port config param")
}
if !viper.IsSet("database_ssl_mode") {
log.Fatal("please provide database_ssl_mode config param")
}
if !viper.IsSet("kavach_url") {
log.Fatal("please provide kavach_url config param")
}
if !viper.IsSet("keto_url") {
log.Fatal("please provide keto_url config param")
}
if !viper.IsSet("meili_url") {
log.Fatal("please provide meili_url config param")
}
if !viper.IsSet("meili_key") {
log.Fatal("please provide meili_key config param")
}
if !viper.IsSet("google_key") {
log.Fatal("please provide google_key config param")
}
if !viper.IsSet("create_super_organisation") {
log.Fatal("please provide create_super_organisation (bool) config param")
}
if viper.GetBool("create_super_organisation") {
if !viper.IsSet("kratos_public_url") {
log.Fatal("please provide kratos_public_url config param")
}
}
}
<file_sep>/service/core/model/menu.go
package model
import (
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
"gorm.io/gorm"
)
// Menu model
type Menu struct {
config.Base
Name string `gorm:"column:name" json:"name" validate:"required"`
Slug string `gorm:"column:slug" json:"slug" validate:"required"`
Menu postgres.Jsonb `gorm:"column:menu" json:"menu" swaggertype:"primitive,string"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
Space *Space `json:"space,omitempty"`
}
var menuUser config.ContextKey = "menu_user"
// BeforeCreate hook
func (menu *Menu) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(menuUser)
if userID == nil {
return nil
}
uID := userID.(int)
menu.CreatedByID = uint(uID)
menu.UpdatedByID = uint(uID)
return nil
}
<file_sep>/service/core/action/page/details.go
package page
import (
"fmt"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// details - Get page by id
// @Summary Show a page by id
// @Description Get page by ID
// @Tags Page
// @ID get-page-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param page_id path string true "Page ID"
// @Success 200 {object} pageData
// @Router /core/pages/{page_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
pageID := chi.URLParam(r, "page_id")
id, err := strconv.Atoi(pageID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &pageData{}
result.Authors = make([]model.Author, 0)
postAuthors := []model.PostAuthor{}
result.ID = uint(id)
err = config.DB.Model(&model.Post{}).Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").Where(&model.Post{
SpaceID: uint(sID),
IsPage: true,
}).First(&result.Post).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
// fetch all authors
config.DB.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Find(&postAuthors)
// Adding author
authors, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
for _, postAuthor := range postAuthors {
aID := fmt.Sprint(postAuthor.AuthorID)
if author, found := authors[aID]; found {
result.Authors = append(result.Authors, author)
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/fact-check/claim/create_test.go
package claim
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestClaimCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable claim", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unable to decode claim", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("create claim", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
slugCheckMock(mock, Data)
claimInsertMock(mock)
SelectWithOutSpace(mock, Data)
mock.ExpectCommit()
result := e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object().ContainsMap(Data)
validateAssociations(result)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse claim description", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("create claim with slug is empty", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
slugCheckMock(mock, Data)
claimInsertMock(mock)
SelectWithOutSpace(mock, Data)
mock.ExpectCommit()
Data["slug"] = ""
result := e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object()
Data["slug"] = "claim"
result.ContainsMap(Data)
validateAssociations(result)
test.ExpectationsMet(t, mock)
})
t.Run("claimant does not belong same space", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
slugCheckMock(mock, Data)
claimantFKError(mock)
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("rating does not belong same space", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
slugCheckMock(mock, Data)
ratingFKError(mock)
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("create claim when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
slugCheckMock(mock, Data)
claimInsertMock(mock)
SelectWithOutSpace(mock, Data)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/permissions/space/list.go
package space
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/renderx"
)
type spaceWithPermissions struct {
model.Space
Permission *model.SpacePermission `json:"permission"`
}
type paging struct {
Nodes []spaceWithPermissions `json:"nodes"`
Total int64 `json:"total"`
}
// list - Get all Space permissions
// @Summary Show all Space permissions
// @Description Get all Space permissions
// @Tags Space_Permissions
// @ID get-all-space-permissions
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param q query string false "Query"
// @Success 200 {array} paging
// @Router /core/permissions/spaces [get]
func list(w http.ResponseWriter, r *http.Request) {
result := paging{}
result.Nodes = make([]spaceWithPermissions, 0)
// Get all spaces
spaceList := make([]model.Space, 0)
config.DB.Model(&model.Space{}).Count(&result.Total).Find(&spaceList)
if len(spaceList) == 0 {
renderx.JSON(w, http.StatusOK, result)
return
}
// Get all permissions
permissionsList := make([]model.SpacePermission, 0)
config.DB.Model(&model.SpacePermission{}).Find(&permissionsList)
permissionsMap := make(map[uint]model.SpacePermission)
for _, perm := range permissionsList {
permissionsMap[perm.SpaceID] = perm
}
for _, space := range spaceList {
spacePerm := spaceWithPermissions{}
spacePerm.Space = space
if perm, found := permissionsMap[space.ID]; found {
spacePerm.Permission = &perm
}
result.Nodes = append(result.Nodes, spacePerm)
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/util/nats.go
package util
import (
"log"
"github.com/nats-io/nats.go"
"github.com/spf13/viper"
)
// NC nats connection object
var NC *nats.EncodedConn
// ConnectNats connect to nats server
func ConnectNats() {
var err error
nc, err := nats.Connect(viper.GetString("nats_url"), nats.UserInfo(viper.GetString("nats_user_name"), viper.GetString("nats_user_password")))
if err != nil {
log.Fatal(err)
}
NC, _ = nats.NewEncodedConn(nc, nats.JSON_ENCODER)
}
// CheckNats checks if nats to be included
func CheckNats() bool {
return viper.IsSet("enable_hukz") && viper.GetBool("enable_hukz")
}
<file_sep>/service/core/action/category/route.go
package category
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// category request body
type category struct {
Name string `json:"name" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
ParentID uint `json:"parent_id"`
MediumID uint `json:"medium_id"`
IsFeatured bool `json:"is_featured"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var userContext config.ContextKey = "category_user"
// Router - Group of category router
func Router() chi.Router {
r := chi.NewRouter()
entity := "categories"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{category_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/core/model/webhook.go
package model
import (
"time"
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
)
// Webhook webhook model
type Webhook struct {
config.Base
Name string `json:"name"`
URL string `json:"url"`
Enabled bool `json:"enabled"`
Events []Event `json:"events"`
Tags map[string]string `json:"tags"`
}
// Event event model
type Event struct {
config.Base
Name string `json:"name"`
Event string `json:"event"`
Tags map[string]string `json:"tags"`
}
// WebhookLog model
type WebhookLog struct {
ID uint `gorm:"primary_key" json:"id"`
CreatedAt time.Time `json:"created_at"`
CreatedByID uint `gorm:"column:created_by_id" json:"created_by_id"`
Event string `gorm:"column:event" json:"event"`
URL string `gorm:"column:url" json:"url"`
ResponseStatusCode int `gorm:"column:response_status_code" json:"response_status_code"`
Data postgres.Jsonb `gorm:"column:data" json:"data" swaggertype:"primitive,string"`
ResponseBody postgres.Jsonb `gorm:"column:response_body" json:"response_body" swaggertype:"primitive,string"`
Tags postgres.Jsonb `gorm:"column:tags" json:"tags" swaggertype:"primitive,string"`
}
<file_sep>/test/service/podcast/create_test.go
package podcast
import (
"net/http"
"net/http/httptest"
"regexp"
"strings"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestPodcastCreate(t *testing.T) {
mock := test.SetupMockDB()
viper.Set("templates_path", "../../../web/templates/*")
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable podcast", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Undecodable podcast", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Podcast with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1, strings.ToLower(Data["title"].(string))).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(1))
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Create podcast", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1, strings.ToLower(Data["title"].(string))).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(0))
slugCheckMock(mock, Data)
category.SelectWithOutSpace(mock)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectQuery(`INSERT INTO "podcasts"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["slug"], Data["description"], Data["html_description"], Data["language"], Data["primary_category_id"], Data["medium_id"], 1).
WillReturnRows(sqlmock.
NewRows([]string{"medium_id", "id", "primary_category_id"}).
AddRow(1, 1, 1))
podcastCategoriesInsert(mock)
SelectQuery(mock)
PodcastCategorySelect(mock)
medium.SelectWithOutSpace(mock)
category.SelectWithOutSpace(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse podcast description", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1, strings.ToLower(Data["title"].(string))).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(0))
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("Create podcast when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1, strings.ToLower(Data["title"].(string))).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(0))
slugCheckMock(mock, Data)
category.SelectWithOutSpace(mock)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectQuery(`INSERT INTO "podcasts"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["slug"], Data["description"], Data["html_description"], Data["language"], Data["primary_category_id"], Data["medium_id"], 1).
WillReturnRows(sqlmock.
NewRows([]string{"medium_id", "id", "primary_category_id"}).
AddRow(1, 1, 1))
podcastCategoriesInsert(mock)
SelectQuery(mock)
PodcastCategorySelect(mock)
medium.SelectWithOutSpace(mock)
category.SelectWithOutSpace(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/page/testvars.go
package page
import (
"database/sql/driver"
"fmt"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/format"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"title": "Post",
"subtitle": "post subtitle",
"slug": "post",
"status": "draft",
"is_page": true,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"featured_medium_id": uint(1),
"published_date": time.Now(),
"format_id": uint(1),
"category_ids": []uint{1},
"tag_ids": []uint{1},
"author_ids": []uint{1},
}
var pageData = map[string]interface{}{
"title": "Post",
"subtitle": "post subtitle",
"slug": "post",
"status": "draft",
"is_page": true,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"format_id": uint(1),
}
var pageList = []map[string]interface{}{
{
"title": "Post 1",
"subtitle": "post subtitle 1",
"slug": "post-1",
"status": "draft",
"is_page": true,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 1"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 1</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"featured_medium_id": uint(1),
"format_id": uint(1),
},
{
"title": "Post 2",
"subtitle": "post subtitle",
"slug": "post-2",
"status": "draft",
"is_page": true,
"excerpt": "post excerpt",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 2"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 2</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"featured_medium_id": uint(1),
"format_id": uint(1),
},
}
var invalidData = map[string]interface{}{
"title": "a",
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "title", "subtitle", "slug", "status", "is_page", "excerpt", "description", "html_description", "is_featured", "is_sticky", "is_highlighted", "featured_medium_id", "format_id", "published_date", "space_id"}
var selectQuery = `SELECT (.+) FROM "posts"`
var basePath = "/core/pages"
var path = "/core/pages/{page_id}"
func slugCheckMock(mock sqlmock.Sqlmock, post map[string]interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "posts"`)).
WithArgs(fmt.Sprint(post["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows(columns))
}
func SelectMock(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["is_page"], Data["excerpt"], Data["description"], Data["html_description"], Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], Data["featured_medium_id"], Data["format_id"], Data["published_date"], 1))
}
func pageCountQuery(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "posts"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func preloadMock(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
format.SelectMock(mock, 1)
medium.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
}
func pageAuthorInsertMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(`INSERT INTO "post_authors"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
<file_sep>/util/keto.go
package util
import (
"net/http"
"github.com/spf13/viper"
)
// KetoGetRequest does get request to keto with empty body
func KetoGetRequest(path string) (*http.Response, error) {
req, err := http.NewRequest("GET", viper.GetString("keto_url")+path, nil)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
return resp, nil
}
<file_sep>/test/service/podcast/update_test.go
package podcast
import (
"database/sql/driver"
"errors"
"net/http"
"net/http/httptest"
"regexp"
"strings"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestPodcastUpdate(t *testing.T) {
mock := test.SetupMockDB()
viper.Set("templates_path", "../../../web/templates/*")
test.MockServer()
gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid podcast id", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("podcast_id", "invalid_id").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusBadRequest)
test.ExpectationsMet(t, mock)
})
t.Run("podcast record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns))
e.PUT(path).
WithPath("podcast_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("invalid podcast body", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("podcast_id", "1").
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("undecodable podcast body", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("podcast_id", "1").
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("podcast with same name already exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectQuery(mock)
Data["title"] = "New Title"
mock.ExpectQuery(countQuery).
WithArgs(1, strings.ToLower(Data["title"].(string))).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(1))
e.PUT(path).
WithPath("podcast_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
Data["title"] = "Test Podcast"
})
t.Run("cannot parse podcast description", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectQuery(mock)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.PUT(path).
WithPath("podcast_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("updating categories fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectQuery(mock)
mock.ExpectBegin()
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE "podcasts" SET`).
WithArgs(test.AnyTime{}, 1).WillReturnResult(driver.ResultNoRows)
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], 1, category.Data["meta_fields"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnError(errors.New("cannot update categories"))
mock.ExpectRollback()
e.PUT(path).
WithPath("podcast_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("updating podcast", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectQuery(mock)
mock.ExpectBegin()
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE "podcasts" SET`).
WithArgs(test.AnyTime{}, 1).WillReturnResult(driver.ResultNoRows)
podcastCategoriesInsert(mock)
mock.ExpectExec(`DELETE FROM "podcast_categories"`).
WithArgs(1, 1).
WillReturnResult(driver.ResultNoRows)
slugCheckMock(mock, Data)
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE \"podcasts\"`).
WithArgs(test.AnyTime{}, 1, Data["slug"], Data["description"], Data["html_description"], Data["language"], Data["primary_category_id"], Data["medium_id"], 1, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectQuery(mock)
PodcastCategorySelect(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectCommit()
e.PUT(path).
WithPath("podcast_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusOK)
test.ExpectationsMet(t, mock)
})
t.Run("updating podcast when medium_id = 0", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectQuery(mock)
mock.ExpectBegin()
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE "podcasts" SET`).
WithArgs(test.AnyTime{}, 1).WillReturnResult(driver.ResultNoRows)
podcastCategoriesInsert(mock)
mock.ExpectExec(`DELETE FROM "podcast_categories"`).
WithArgs(1, 1).
WillReturnResult(driver.ResultNoRows)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE \"podcasts\"`).
WithArgs(nil, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
slugCheckMock(mock, Data)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE \"podcasts\"`).
WithArgs(test.AnyTime{}, 1, Data["slug"], Data["description"], Data["html_description"], Data["language"], Data["primary_category_id"], 1, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectQuery(mock)
PodcastCategorySelect(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectCommit()
Data["medium_id"] = 0
e.PUT(path).
WithPath("podcast_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusOK)
test.ExpectationsMet(t, mock)
Data["medium_id"] = 1
})
t.Run("updating podcast when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectQuery(mock)
mock.ExpectBegin()
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE "podcasts" SET`).
WithArgs(test.AnyTime{}, 1).WillReturnResult(driver.ResultNoRows)
podcastCategoriesInsert(mock)
mock.ExpectExec(`DELETE FROM "podcast_categories"`).
WithArgs(1, 1).
WillReturnResult(driver.ResultNoRows)
slugCheckMock(mock, Data)
medium.SelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "categories"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(category.Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["parent_id"], category.Data["meta_fields"], category.Data["medium_id"], category.Data["is_featured"], 1))
mock.ExpectExec(`UPDATE \"podcasts\"`).
WithArgs(test.AnyTime{}, 1, Data["slug"], Data["description"], Data["html_description"], Data["language"], Data["primary_category_id"], Data["medium_id"], 1, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectQuery(mock)
PodcastCategorySelect(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("podcast_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/request/space/list_test.go
package space
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestSpaceRequestList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get empty list of requests", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get list of requests", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(len(requestList)))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, requestList[0]["title"], requestList[0]["description"], requestList[0]["status"], requestList[0]["media"], requestList[0]["posts"], requestList[0]["episodes"], requestList[0]["podcast"], requestList[0]["fact_check"], requestList[0]["space_id"]).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, requestList[1]["title"], requestList[1]["description"], requestList[1]["status"], requestList[1]["media"], requestList[1]["posts"], requestList[1]["episodes"], requestList[1]["podcast"], requestList[1]["fact_check"], requestList[1]["space_id"]))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(requestList)}).
Value("nodes").
Array().Element(0).Object().ContainsMap(requestList[0])
test.ExpectationsMet(t, mock)
})
t.Run("get list of approved requests", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(len(requestList)))
mock.ExpectQuery(selectQuery).
WithArgs("approved").
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, requestList[0]["title"], requestList[0]["description"], requestList[0]["status"], requestList[0]["media"], requestList[0]["posts"], requestList[0]["episodes"], requestList[0]["podcast"], requestList[0]["fact_check"], requestList[0]["space_id"]).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, requestList[1]["title"], requestList[1]["description"], requestList[1]["status"], requestList[1]["media"], requestList[1]["posts"], requestList[1]["episodes"], requestList[1]["podcast"], requestList[1]["fact_check"], requestList[1]["space_id"]))
e.GET(basePath).
WithHeaders(headers).
WithQuery("status", "approved").
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(requestList)}).
Value("nodes").
Array().Element(0).Object().ContainsMap(requestList[0])
test.ExpectationsMet(t, mock)
})
t.Run("get list of requests with paiganation", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(len(requestList)))
mock.ExpectQuery(selectQuery).
WithArgs("pending").
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, requestList[1]["title"], requestList[1]["description"], requestList[1]["status"], requestList[1]["media"], requestList[1]["posts"], requestList[1]["episodes"], requestList[1]["podcast"], requestList[1]["fact_check"], requestList[1]["space_id"]))
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"page": 2,
"limit": 1,
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(requestList)}).
Value("nodes").
Array().Element(0).Object().ContainsMap(requestList[1])
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/page/delete_test.go
package page
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestPageDelete(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid page id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.DELETE(path).
WithPath("page_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
test.ExpectationsMet(t, mock)
})
t.Run("page record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(selectQuery).
WithArgs(true, 1, 100).
WillReturnRows(sqlmock.NewRows(columns))
e.DELETE(path).
WithPath("page_id", "100").
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("delete page", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
mock.ExpectBegin()
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_categories"`)).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_authors" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "posts" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectCommit()
e.DELETE(path).
WithPath("page_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusOK)
test.ExpectationsMet(t, mock)
})
t.Run("delete page when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
mock.ExpectBegin()
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_categories"`)).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_authors" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "posts" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectRollback()
e.DELETE(path).
WithPath("page_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/permissions/organisation/list.go
package organisation
import (
"net/http"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/renderx"
)
type orgWithPermissions struct {
model.Organisation
Permission *model.OrganisationPermission `json:"permission"`
}
type paging struct {
Nodes []orgWithPermissions `json:"nodes"`
Total int64 `json:"total"`
}
// list - Get all organisation permissions
// @Summary Show all organisation permissions
// @Description Get all organisation permissions
// @Tags Organisation_Permissions
// @ID get-all-org-permissions
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param q query string false "Query"
// @Success 200 {array} paging
// @Router /core/permissions/organisations [get]
func list(w http.ResponseWriter, r *http.Request) {
searchQuery := r.URL.Query().Get("q")
result := paging{}
result.Nodes = make([]orgWithPermissions, 0)
permissionList := make([]model.OrganisationPermission, 0)
config.DB.Model(&model.OrganisationPermission{}).Find(&permissionList)
permissionMap := make(map[uint]model.OrganisationPermission)
for _, permission := range permissionList {
permissionMap[permission.OrganisationID] = permission
}
allOrgMap, err := util.GetAllOrganisationsMap(searchQuery)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
for oid, organisation := range allOrgMap {
owp := orgWithPermissions{}
owp.Organisation = organisation
if per, found := permissionMap[oid]; found {
owp.Permission = &per
}
result.Nodes = append(result.Nodes, owp)
}
result.Total = int64(len(result.Nodes))
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/util/arrays/union.go
package arrays
// Union returns union of two string slices
func Union(a, b []string) []string {
m := make(map[string]bool)
for _, item := range a {
m[item] = true
}
for _, item := range b {
if _, ok := m[item]; !ok {
a = append(a, item)
}
}
return a
}
<file_sep>/service/podcast/action/create.go
package podcast
import (
"context"
"encoding/json"
"errors"
"net/http"
"reflect"
"github.com/factly/dega-server/config"
coreModel "github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"gorm.io/gorm"
)
// create - Create podcast
// @Summary Create podcast
// @Description Create podcast
// @Tags Podcast
// @ID add-podcast
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Podcast body podcast true "Podcast Object"
// @Success 201 {object} model.Podcast
// @Failure 400 {array} string
// @Router /podcast [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
podcast := &podcast{}
err = json.NewDecoder(r.Body).Decode(&podcast)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(podcast)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
var podcastSlug string
if podcast.Slug != "" && slugx.Check(podcast.Slug) {
podcastSlug = podcast.Slug
} else {
podcastSlug = slugx.Make(podcast.Title)
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Podcast{})
tableName := stmt.Schema.Table
// Check if podcast with same name exist
if util.CheckName(uint(sID), podcast.Title, tableName) {
loggerx.Error(errors.New(`podcast with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
mediumID := &podcast.MediumID
if podcast.MediumID == 0 {
mediumID = nil
}
primaryCategoryID := &podcast.PrimaryCategoryID
if podcast.PrimaryCategoryID == 0 {
primaryCategoryID = nil
}
// Store HTML description
var description string
if len(podcast.Description.RawMessage) > 0 && !reflect.DeepEqual(podcast.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(podcast.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse podcast description", http.StatusUnprocessableEntity)))
return
}
}
result := &model.Podcast{
Title: podcast.Title,
Description: podcast.Description,
HTMLDescription: description,
Slug: slugx.Approve(&config.DB, podcastSlug, sID, tableName),
Language: podcast.Language,
MediumID: mediumID,
PrimaryCategoryID: primaryCategoryID,
HeaderCode: podcast.HeaderCode,
FooterCode: podcast.FooterCode,
MetaFields: podcast.MetaFields,
SpaceID: uint(sID),
}
if len(podcast.CategoryIDs) > 0 {
config.DB.Model(&coreModel.Category{}).Where(podcast.CategoryIDs).Find(&result.Categories)
}
tx := config.DB.WithContext(context.WithValue(r.Context(), podcastUser, uID)).Begin()
err = tx.Model(&model.Podcast{}).Create(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Model(&model.Podcast{}).Preload("Medium").Preload("Categories").Preload("PrimaryCategory").First(&result)
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "podcast",
"title": result.Title,
"slug": result.Slug,
"description": result.Description,
"language": result.Language,
"category_ids": podcast.CategoryIDs,
"space_id": result.SpaceID,
"primary_category_id": result.PrimaryCategoryID,
"medium_id": result.MediumID,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("podcast.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/test/service/core/post/update_test.go
package post
import (
"errors"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/format"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
var updatePost = map[string]interface{}{
"title": "Post",
"subtitle": "post subtitle",
"status": "draft",
"excerpt": "post excerpt",
"is_page": false,
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"is_featured": false,
"is_sticky": true,
"is_highlighted": true,
"featured_medium_id": uint(1),
"format_id": uint(1),
"category_ids": []uint{1},
"tag_ids": []uint{1},
"claim_ids": []uint{1},
"author_ids": []uint{1},
}
func TestPostUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid post id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("post_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("post record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
recordNotFoundMock(mock)
e.PUT(path).
WithPath("post_id", "100").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusNotFound)
})
t.Run("Unable to decode post data", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unprocessable post", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("update post", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
updateMock(mock, updatePost, false)
mock.ExpectCommit()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(postData)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse post description", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
postSelectWithSpace(mock)
updatePost["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusUnprocessableEntity)
updatePost["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("updating post fails", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
preUpdateDraftMock(mock, updatePost, false)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, Data["is_page"], Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, 1, Data["title"], Data["subtitle"], Data["slug"], Data["excerpt"],
Data["description"], Data["html_description"], Data["is_sticky"], Data["is_highlighted"], Data["featured_medium_id"], Data["format_id"], 1).
WillReturnError(errors.New("cannot update post"))
mock.ExpectRollback()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("change post status back to draft", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
preUpdatePublishedMock(mock, updatePost, false)
updatePublishedQueryMock(mock, updatePost, false)
updatePostClaimsMock(mock)
updatePostAuthorMock(mock)
mock.ExpectCommit()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(postData)
test.ExpectationsMet(t, mock)
})
t.Run("user does not have publish permission when changing post status back to draft", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
gock.New(viper.GetString("keto_url")).
Post("/engines/acp/ory/regex/allowed").
Reply(http.StatusOK)
gock.New(viper.GetString("keto_url")).
Post("/engines/acp/ory/regex/allowed").
Reply(http.StatusForbidden)
test.CheckSpaceMock(mock)
postSelectPublishedWithSpace(mock)
preUpdateMock(mock, updatePost, false)
mock.ExpectRollback()
updatePost["status"] = "draft"
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusUnauthorized)
test.ExpectationsMet(t, mock)
updatePost["status"] = "draft"
})
t.Run("keto down when changing post status back to draft", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
gock.New(viper.GetString("keto_url")).
Post("/engines/acp/ory/regex/allowed").
Reply(http.StatusOK)
test.CheckSpaceMock(mock)
postSelectPublishedWithSpace(mock)
preUpdateMock(mock, updatePost, false)
mock.ExpectRollback()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusUnauthorized)
test.ExpectationsMet(t, mock)
})
gock.Off()
test.MockServer()
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
updatePost["status"] = "draft"
t.Run("deleting old post_claims fails", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
preUpdateDraftMock(mock, updatePost, false)
updateQueryMock(mock, updatePost, false)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 2, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_claims" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnError(errors.New("cannot delete post_claims"))
mock.ExpectRollback()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("adding post_claims fails", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
preUpdateDraftMock(mock, updatePost, false)
updateQueryMock(mock, updatePost, false)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_claims"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "claim_id", "post_id", "position"}).
AddRow(1, time.Now(), time.Now(), nil, 2, 1, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_claims" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectQuery(`INSERT INTO "post_claims"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1, 1).
WillReturnError(errors.New(`cannot create post claims`))
mock.ExpectRollback()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("deleting old post_authors fails", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
preUpdateDraftMock(mock, updatePost, false)
updateQueryMock(mock, updatePost, false)
updatePostClaimsMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 2, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_authors" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnError(errors.New("cannot delete post_authors"))
mock.ExpectRollback()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("creating post_authors fails", func(t *testing.T) {
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
preUpdateDraftMock(mock, updatePost, false)
updateQueryMock(mock, updatePost, false)
updatePostClaimsMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, 2, 1))
mock.ExpectExec(regexp.QuoteMeta(`UPDATE "post_authors" SET "deleted_at"=`)).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectQuery(`INSERT INTO "post_authors"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1).
WillReturnError(errors.New("cannot create post_authors"))
mock.ExpectRollback()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("update post when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
updatePost["slug"] = "post"
test.CheckSpaceMock(mock)
updateMock(mock, updatePost, false)
mock.ExpectRollback()
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
test.MockServer()
})
t.Run("update post by id with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatePost["slug"] = "post"
updateMock(mock, updatePost, true)
mock.ExpectCommit()
Data["slug"] = ""
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(postData)
Data["slug"] = "post"
test.ExpectationsMet(t, mock)
})
t.Run("update post with different slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatePost["slug"] = "post-test"
updateMock(mock, updatePost, true)
mock.ExpectCommit()
postData["slug"] = "post-test"
e.PUT(path).
WithPath("post_id", 1).
WithHeaders(headers).
WithJSON(updatePost).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(postData)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/space/create_test.go
package space
import (
"errors"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/organisation"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestSpaceCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("create a space", func(t *testing.T) {
insertMock(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusCreated)
test.ExpectationsMet(t, mock)
})
t.Run("creating space fails", func(t *testing.T) {
organisation.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1))
slugCheckMock(mock)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "spaces"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], Data["organisation_id"]).
WillReturnError(errors.New("cannot create space"))
mock.ExpectRollback()
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("creating space permission fails", func(t *testing.T) {
organisation.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1))
slugCheckMock(mock)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "spaces"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], Data["organisation_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"fav_icon_id", "mobile_icon_id", "logo_id", "logo_mobile_id", "id"}).
AddRow(1, 1, 1, 1, 1))
mock.ExpectQuery(`INSERT INTO "space_permissions"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, true, 1, -1, -1, true, -1).
WillReturnError(errors.New("cannot create space permission"))
mock.ExpectRollback()
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("create space when no permission found", func(t *testing.T) {
Data["organisation_id"] = 2
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "organisation_permissions"`)).
WithArgs(2).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "organisation_id", "spaces", "mediums", "posts"}))
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["organisation_id"] = 1
test.ExpectationsMet(t, mock)
})
t.Run("create more than allowed spaces", func(t *testing.T) {
organisation.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(10))
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unprocessable space body", func(t *testing.T) {
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("unable to decode space body", func(t *testing.T) {
e.POST(basePath).
WithHeader("X-User", "1").
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("invalid user id", func(t *testing.T) {
e.POST(basePath).
WithHeader("X-User", "invalid_id").
WithJSON(Data).
Expect().
Status(http.StatusUnauthorized)
})
t.Run("When keto is down", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusUnauthorized)
})
t.Run("create a space when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
insertMock(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeader("X-User", "1").
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/model/space.go
package model
import (
"errors"
"gorm.io/gorm"
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
)
// Space model
type Space struct {
config.Base
Name string `gorm:"column:name" json:"name"`
Slug string `gorm:"column:slug" json:"slug"`
SiteTitle string `gorm:"column:site_title" json:"site_title"`
TagLine string `gorm:"column:tag_line" json:"tag_line"`
Description string `gorm:"description" json:"description"`
SiteAddress string `gorm:"column:site_address" json:"site_address"`
LogoID *uint `gorm:"column:logo_id;default:NULL" json:"logo_id"`
Logo *Medium `gorm:"foreignKey:logo_id" json:"logo"`
LogoMobileID *uint `gorm:"column:logo_mobile_id;default:NULL" json:"logo_mobile_id"`
LogoMobile *Medium `gorm:"foreignKey:logo_mobile_id" json:"logo_mobile"`
FavIconID *uint `gorm:"column:fav_icon_id;default:NULL" json:"fav_icon_id"`
FavIcon *Medium `gorm:"foreignKey:fav_icon_id" json:"fav_icon"`
MobileIconID *uint `gorm:"column:mobile_icon_id;default:NULL" json:"mobile_icon_id"`
MobileIcon *Medium `gorm:"foreignKey:mobile_icon_id" json:"mobile_icon"`
VerificationCodes postgres.Jsonb `gorm:"column:verification_codes" json:"verification_codes" swaggertype:"primitive,string"`
SocialMediaURLs postgres.Jsonb `gorm:"column:social_media_urls" json:"social_media_urls" swaggertype:"primitive,string"`
ContactInfo postgres.Jsonb `gorm:"column:contact_info" json:"contact_info" swaggertype:"primitive,string"`
Analytics postgres.Jsonb `gorm:"column:analytics" json:"analytics" swaggertype:"primitive,string"`
HeaderCode string `gorm:"column:header_code" json:"header_code"`
FooterCode string `gorm:"column:footer_code" json:"footer_code"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
OrganisationID int `gorm:"column:organisation_id" json:"organisation_id"`
}
// SpacePermission model
type SpacePermission struct {
config.Base
FactCheck bool `gorm:"column:fact_check" json:"fact_check"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Space *Space `gorm:"foreignKey:space_id" json:"space,omitempty"`
Media int64 `gorm:"column:media" json:"media"`
Posts int64 `gorm:"column:posts" json:"posts"`
Podcast bool `gorm:"column:podcast" json:"podcast"`
Episodes int64 `gorm:"column:episodes" json:"episodes"`
}
var spaceUser config.ContextKey = "space_user"
var spacePermissionUser config.ContextKey = "space_perm_user"
// BeforeUpdate checks if all associated mediums are in same space
func (space *Space) BeforeUpdate(tx *gorm.DB) (e error) {
if space.LogoID != nil && *space.LogoID > 0 {
medium := Medium{}
medium.ID = *space.LogoID
err := tx.Model(&Medium{}).Where(Medium{
SpaceID: space.ID,
}).First(&medium).Error
if err != nil {
return errors.New("logo do not belong to same space")
}
}
if space.LogoMobileID != nil && *space.LogoMobileID > 0 {
medium := Medium{}
medium.ID = *space.LogoMobileID
err := tx.Model(&Medium{}).Where(Medium{
SpaceID: space.ID,
}).First(&medium).Error
if err != nil {
return errors.New("logo mobile do not belong to same space")
}
}
if space.FavIconID != nil && *space.FavIconID > 0 {
medium := Medium{}
medium.ID = *space.FavIconID
err := tx.Model(&Medium{}).Where(Medium{
SpaceID: space.ID,
}).First(&medium).Error
if err != nil {
return errors.New("fav icon do not belong to same space")
}
}
if space.MobileIconID != nil && *space.MobileIconID > 0 {
medium := Medium{}
medium.ID = *space.MobileIconID
err := tx.Model(&Medium{}).Where(Medium{
SpaceID: space.ID,
}).First(&medium).Error
if err != nil {
return errors.New("mobile icon do not belong to same space")
}
}
return nil
}
// BeforeCreate hook
func (space *Space) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(spaceUser)
if userID == nil {
return nil
}
uID := userID.(int)
space.CreatedByID = uint(uID)
space.UpdatedByID = uint(uID)
return nil
}
// BeforeCreate hook
func (sp *SpacePermission) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(spacePermissionUser)
if userID == nil {
return nil
}
uID := userID.(int)
sp.CreatedByID = uint(uID)
sp.UpdatedByID = uint(uID)
return nil
}
<file_sep>/test/service/fact-check/claimant/testvars.go
package claimant
import (
"fmt"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"name": "TOI",
"slug": "toi",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"tag_line": "sample tag line",
"medium_id": uint(1),
}
var resData = map[string]interface{}{
"name": "TOI",
"slug": "toi",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"tag_line": "sample tag line",
}
var invalidData = map[string]interface{}{
"name": "a",
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug", "medium_id", "description", "html_description", "tag_line", "space_id"}
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "claimants"`)
var deleteQuery = regexp.QuoteMeta(`UPDATE "claimants" SET "deleted_at"=`)
var paginationQuery = `SELECT \* FROM "claimants" (.+) LIMIT 1 OFFSET 1`
var basePath = "/fact-check/claimants"
var path = "/fact-check/claimants/{claimant_id}"
func slugCheckMock(mock sqlmock.Sqlmock, claimant map[string]interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "claimants"`)).
WithArgs(fmt.Sprint(claimant["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows(columns))
}
func claimantInsertMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "claimants"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["tag_line"], Data["medium_id"], 1).
WillReturnRows(sqlmock.
NewRows([]string{"id", "medium_id"}).
AddRow(1, 1))
}
func claimantInsertError(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
medium.EmptyRowMock(mock)
mock.ExpectRollback()
}
func claimantUpdateMock(mock sqlmock.Sqlmock, claimant map[string]interface{}, err error) {
mock.ExpectBegin()
if err != nil {
medium.EmptyRowMock(mock)
} else {
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"claimants\"`).
WithArgs(test.AnyTime{}, 1, claimant["name"], claimant["slug"], claimant["description"], claimant["html_description"], claimant["tag_line"], claimant["medium_id"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectWithSpace(mock)
medium.SelectWithOutSpace(mock)
}
}
func SelectWithOutSpace(mock sqlmock.Sqlmock, claimant map[string]interface{}) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, claimant["name"], claimant["slug"], claimant["medium_id"], claimant["description"], claimant["html_description"], claimant["tag_line"], 1))
// Preload medium
medium.SelectWithOutSpace(mock)
}
func SelectWithSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"], Data["medium_id"], Data["description"], Data["html_description"], Data["tag_line"], 1))
}
//check claimant exits or not
func recordNotFoundMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 100).
WillReturnRows(sqlmock.NewRows(columns))
}
// check claimant associated with any claim before deleting
func claimantClaimExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "claims"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func claimantCountQuery(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "claimants"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func EmptyRowMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns))
}
<file_sep>/test/models.go
package test
import "time"
// Dummy response body for the mock server requesting organisation data
// Endpoint this is sent for is /organisations
var Dummy_Org = map[string]interface{}{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"title": "test org",
"slug": "test-org",
"permission": map[string]interface{}{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"user_id": 1,
"user": nil,
"organisation_id": 1,
"organisation": nil,
"role": "owner",
},
}
var PaiganatedOrg = map[string]interface{}{
"nodes": []interface{}{
Dummy_Org,
},
"total": 1,
}
var Dummy_Org_Member_List = []map[string]interface{}{
map[string]interface{}{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"title": "test org",
"permission": map[string]interface{}{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"user_id": 1,
"user": nil,
"organisation_id": 1,
"organisation": nil,
"role": "member",
},
},
}
var Dummy_OrgList = []map[string]interface{}{
Dummy_Org,
}
// Dummy response for the mock server requesting list of authors
// Endpoint this is sent for is /organisations/[id]/users
var Dummy_AuthorList = []map[string]interface{}{
{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"email": "<EMAIL>",
"kid": "",
"first_name": "abc",
"last_name": "cba",
"birth_date": time.Now(),
"gender": "male",
"permission": map[string]interface{}{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"user_id": 1,
"user": nil,
"organisation_id": 1,
"organisation": nil,
"role": "owner",
},
},
{
"id": 2,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"email": "<EMAIL>",
"kid": "",
"first_name": "def",
"last_name": "fed",
"birth_date": time.Now(),
"gender": "male",
"permission": map[string]interface{}{
"id": 2,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"user_id": 2,
"user": nil,
"organisation_id": 1,
"organisation": nil,
"role": "member",
},
},
}
var Dummy_KetoPolicy = []map[string]interface{}{
{
"id": "id:org:1:app:dega:space:1:test-policy-4",
"description": "",
"subjects": []string{
"1",
"2",
},
"resources": []string{
"resources:org:1:app:dega:space:1:categories",
"resources:org:1:app:dega:space:1:tags",
},
"actions": []string{
"actions:org:1:app:dega:space:1:categories:get",
"actions:org:1:app:dega:space:1:categories:create",
"actions:org:1:app:dega:space:1:tags:update",
"actions:org:1:app:dega:space:1:tags:delete",
},
"effect": "allow",
"conditions": nil,
},
{
"id": "id:org:1:app:dega:space:1:test-policy-0",
"description": "",
"subjects": []string{
"1",
},
"resources": []string{
"resources:org:12:app:dega:space:18:policies",
},
"actions": []string{
"actions:org:12:app:dega:space:18:policies:get",
"actions:org:12:app:dega:space:18:policies:create",
"actions:org:12:app:dega:space:18:policies:update",
"actions:org:12:app:dega:space:18:policies:delete",
},
"effect": "allow",
"conditions": nil,
},
}
var Dummy_Role = map[string]interface{}{
"id": "roles:org:1:admin",
"members": []string{
"1",
},
}
// Dummy single policy
var Dummy_SingleMock = map[string]interface{}{
"id": "id:org:1:app:dega:space:1:test-policy-0",
"description": "",
"subjects": []string{
"1",
},
"resources": []string{
"resources:org:12:app:dega:space:18:policies",
},
"actions": []string{
"actions:org:12:app:dega:space:18:policies:get",
"actions:org:12:app:dega:space:18:policies:create",
"actions:org:12:app:dega:space:18:policies:update",
"actions:org:12:app:dega:space:18:policies:delete",
},
"effect": "allow",
"conditions": nil,
}
var ReturnUpdate = map[string]interface{}{
"updateId": 1,
}
var MeiliHits = map[string]interface{}{
"hits": []map[string]interface{}{
{
"object_id": "post_3",
"kind": "post",
"description": "This is a test post with claim",
"id": 1,
"slug": "test-post-2",
"space_id": 1,
"title": "Test Post",
"category_ids": []uint{
2,
},
"excerpt": "Test post with claim",
"is_featured": true,
"is_highlighted": true,
"is_sticky": true,
"published_date": -62135596800,
"status": "draft",
"subtitle": "Test Post",
"tag_ids": []uint{
42,
},
"claim_ids": []uint{
5,
},
"format_id": 3,
},
{
"object_id": "claim_2",
"kind": "claim",
"description": "This is a test claim",
"id": 2,
"slug": "test-claim",
"space_id": 1,
"title": "Test Claim",
"checked_date": 1598959138,
"claim_date": -62135596800,
"claim_sources": "secret sources",
"claimant_id": 2,
"rating_id": 2,
"review": "Bad review",
"review_sources": "Good sources",
"review_tag_line": "Bad review good sources",
},
},
"offset": 0,
"limit": 20,
"nbHits": 7,
"exhaustiveNbHits": false,
"processingTimeMs": 2,
"query": "test",
}
var EmptyMeili = map[string]interface{}{
"hits": []map[string]interface{}{},
"offset": 0,
"limit": 20,
"nbHits": 0,
"exhaustiveNbHits": false,
"processingTimeMs": 2,
"query": "test",
}
var GoogleResponse = map[string]interface{}{
"claims": []map[string]interface{}{
map[string]interface{}{
"text": "Shopkeepers sleeping inside shops due to Modi govt's handling of COVID-19",
"claimant": "Social media",
"claimDate": "2016-06-20T00:00:00Z",
"claimReview": []map[string]interface{}{
map[string]interface{}{
"publisher": map[string]interface{}{
"name": "Alt News",
"site": "altnews.in",
},
"url": "https://www.altnews.in/congress-rohan-gupta-shares-old-images-of-shopkeeper-falling-a-sleep-to-target-pm-modi/",
"title": "Photos of shopkeepers sleeping inside shops from 2019 shared as recent",
"reviewDate": "2020-09-28T00:00:00Z",
"textualRating": "False",
"languageCode": "en",
},
},
},
},
"nextPageToken": "CBQ",
}
var IFramelyResponse = map[string]interface{}{
"meta": map[string]interface{}{
"description": "GitHub is where over 50 million developers shape the future of software, together. Contribute to the open source community, manage your Git repositories, review code like a pro, track bugs and feat...",
"title": "GitHub: Where the world builds software",
"theme-color": "#1e2327",
"canonical": "https://github.com/",
"site": "GitHub",
},
"links": map[string]interface{}{
"thumbnail": []interface{}{
map[string]interface{}{
"href": "https://github.githubassets.com/images/modules/open_graph/github-mark.png",
"type": "image/png",
"rel": []interface{}{
"twitter",
"thumbnail",
"ssl",
"og",
},
"media": map[string]interface{}{
"width": 1200,
"height": 620,
},
},
},
"icon": []interface{}{
map[string]interface{}{
"href": "https://github.githubassets.com/favicons/favicon.svg",
"type": "image/svg+xml",
},
},
},
}
var OembedResponse = map[string]interface{}{
"type": "link",
"version": "1.0",
"title": "GitHub: Where the world builds software",
"url": "https://github.com/",
"provider_name": "GitHub",
"description": "GitHub is where over 50 million developers shape the future of software, together. Contribute to the open source community, manage your Git repositories, review code like a pro, track bugs and feat...",
"thumbnail_url": "https://github.githubassets.com/images/modules/open_graph/github-mark.png",
"thumbnail_width": 1200,
"thumbnail_height": 620,
}
<file_sep>/service/core/action/policy/route.go
package policy
import (
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
)
type policyReq struct {
Name string `json:"name"`
Description string `json:"description"`
Permissions []model.Permission `json:"permissions"`
Users []string `json:"users"`
}
// Router - Group of medium router
func Router() chi.Router {
r := chi.NewRouter()
entity := "policies"
r.Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/default", createDefaults)
r.Route("/{policy_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/fact-check/action/google/list.go
package google
import (
"encoding/json"
"errors"
"io/ioutil"
"net/http"
"net/url"
"github.com/spf13/viper"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/renderx"
)
// googleapis for factchecks
var GoogleURL = "https://factchecktools.googleapis.com/v1alpha1/claims:search"
// list response
type paging struct {
Total int `json:"total"`
Nodes []interface{} `json:"nodes"`
NextPage string `json:"nextPage"`
}
// list - Get all google fact checks
// @Summary Show all google fact checks
// @Description Get all google fact checks
// @Tags Claimant
// @ID get-all-google-fact-checks
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param query query string false "Query"
// @Param pageToken query string false "Page Token"
// @Param language query string false "language code"
// @Param sort query string false "Sort"
// @Success 200 {object} paging
// @Router /fact-check/google [get]
func list(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query().Get("query")
language := r.URL.Query().Get("language")
pageToken := r.URL.Query().Get("pageToken")
if query == "" {
loggerx.Error(errors.New("query can't be empty"))
errorx.Render(w, errorx.Parser(errorx.Message{Message: "query can't be empty",
Code: http.StatusUnprocessableEntity}))
return
}
var factChecks map[string]interface{}
req, err := http.NewRequest("GET", GoogleURL, nil)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
q := url.Values{}
q.Add("key", viper.GetString("google_key"))
q.Add("query", query)
if language != "" {
q.Add("languageCode", language)
}
if pageToken != "" {
q.Add("pageToken", pageToken)
}
req.URL.RawQuery = q.Encode()
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.NetworkError()))
return
}
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
err = json.Unmarshal(body, &factChecks)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
result := paging{}
result.Nodes = make([]interface{}, 0)
if claims, found := factChecks["claims"]; found {
result.Nodes = (claims).([]interface{})
result.Total = len(result.Nodes)
}
if nextPageToken, found := factChecks["nextPageToken"]; found {
result.NextPage = nextPageToken.(string)
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/search/route.go
package search
import "github.com/go-chi/chi"
type searchQuery struct {
Query string `json:"q" validate:"required,min=3"`
Limit int64 `json:"limit" validate:"lte=20"`
Filters string `json:"filters"`
FacetFilters []string `json:"facetFilters"`
}
// Router - Group of search router
func Router() chi.Router {
r := chi.NewRouter()
r.Post("/", list)
return r
}
<file_sep>/test/service/podcast/episode/testvars.go
package episode
import (
"database/sql/driver"
"fmt"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"title": "Test Episode",
"slug": "test-episode",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"season": 1,
"episode": 1,
"audio_url": "testaudio.com",
"published_date": time.Now(),
"medium_id": 1,
"podcast_id": 1,
"author_ids": []uint{1},
}
var invalidData = map[string]interface{}{
"title": "T",
}
var resData = map[string]interface{}{
"title": "Test Episode",
"slug": "test-episode",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"season": 1,
"episode": 1,
"audio_url": "testaudio.com",
"medium_id": 1,
"podcast_id": 1,
}
var Columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "title", "slug", "season", "episode", "audio_url", "podcast_id", "description", "html_description", "published_date", "medium_id", "space_id"}
var basePath = "/podcast/episodes"
var path = "/podcast/episodes/{episode_id}"
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "episodes"`)
func SelectQuery(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["title"], Data["slug"], Data["season"], Data["episode"], Data["audio_url"], Data["podcast_id"], Data["description"], Data["html_description"], Data["published_date"], Data["medium_id"], 1))
}
func EpisodeAuthorSelect(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "episode_authors"`)).
WillReturnRows(sqlmock.NewRows([]string{"episode_id", "author_id"}).
AddRow(1, 1))
}
func CountQuery(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "episodes"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(count))
}
func slugCheckMock(mock sqlmock.Sqlmock, episode map[string]interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "episodes"`)).
WithArgs(fmt.Sprint(episode["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows(Columns))
}
<file_sep>/test/service/core/tag/create_test.go
package tag
import (
"errors"
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestTagCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable tag", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unable to decode tag", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("create tag", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
tagInsertMock(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object().ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("creating tag fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["is_featured"], 1).
WillReturnError(errors.New("cannot create tag"))
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("tag with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 1, Data["name"])
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create tag with slug is empty", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
tagInsertMock(mock)
mock.ExpectCommit()
Data["slug"] = ""
res := e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object()
Data["slug"] = "elections"
res.ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse tag description", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("create tag when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
tagInsertMock(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/post/details.go
package post
import (
"fmt"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
factCheckModel "github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// details - Get post by id
// @Summary Show a post by id
// @Description Get post by ID
// @Tags Post
// @ID get-post-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param post_id path string true "Post ID"
// @Success 200 {object} postData
// @Router /core/posts/{post_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
postID := chi.URLParam(r, "post_id")
id, err := strconv.Atoi(postID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &postData{}
result.Authors = make([]model.Author, 0)
result.Claims = make([]factCheckModel.Claim, 0)
postAuthors := []model.PostAuthor{}
postClaims := []factCheckModel.PostClaim{}
result.ID = uint(id)
err = config.DB.Model(&model.Post{}).Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").Preload("Space").Where(&model.Post{
SpaceID: uint(sID),
}).Where("is_page = ?", false).First(&result.Post).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
if result.Format.Slug == "fact-check" {
config.DB.Model(&factCheckModel.PostClaim{}).Where(&factCheckModel.PostClaim{
PostID: uint(id),
}).Preload("Claim").Preload("Claim.Rating").Preload("Claim.Rating.Medium").Preload("Claim.Claimant").Preload("Claim.Claimant.Medium").Find(&postClaims)
result.ClaimOrder = make([]uint, len(postClaims))
// appending all post claims
for _, postClaim := range postClaims {
result.Claims = append(result.Claims, postClaim.Claim)
result.ClaimOrder[int(postClaim.Position-1)] = postClaim.ClaimID
}
}
// fetch all authors
config.DB.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Find(&postAuthors)
// Adding author
authors, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
for _, postAuthor := range postAuthors {
aID := fmt.Sprint(postAuthor.AuthorID)
if author, found := authors[aID]; found {
result.Authors = append(result.Authors, author)
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/permissions/organisation/delete.go
package organisation
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete Organisation permission by id
// @Summary Delete a Organisation permission
// @Description Delete Organisation permission by ID
// @Tags Organisation_Permissions
// @ID delete-org-permission-by-id
// @Param X-User header string true "User ID"
// @Param permission_id path string true "Permission ID"
// @Param X-Space header string true "Space ID"
// @Success 200
// @Failure 400 {array} string
// @Router /core/permissions/organisations/{permission_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
permissionID := chi.URLParam(r, "permission_id")
id, err := strconv.Atoi(permissionID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := model.OrganisationPermission{}
result.ID = uint(id)
// check record exists or not
err = config.DB.First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
config.DB.Delete(&result)
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/model/info.go
package model
type PostCount struct {
Status string `json:"status"`
Slug string `json:"slug"`
Count int64 `json:"count"`
}
type Info struct {
Categories int64 `gorm:"column:categories" json:"categories"`
Tags int64 `gorm:"column:tags" json:"tags"`
Posts []PostCount `gorm:"column:posts" json:"posts"`
Podcasts int64 `gorm:"column:podcasts" json:"podcasts"`
Episodes int64 `gorm:"column:episodes" json:"episodes"`
}
<file_sep>/test/service/fact-check/claim/testvars.go
package claim
import (
"fmt"
"regexp"
"time"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/fact-check/claimant"
"github.com/factly/dega-server/test/service/fact-check/rating"
"github.com/gavv/httpexpect/v2"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"claim": "Claim",
"slug": "claim",
"claim_date": time.Now(),
"checked_date": time.Now(),
"claim_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"claim sources"}`),
},
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"claimant_id": uint(1),
"rating_id": uint(1),
"fact": "test fact",
"review_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"review sources"}`),
},
}
var claimList = []map[string]interface{}{
{
"claim": "Claim 1",
"slug": "claim-test",
"claim_date": time.Time{},
"checked_date": time.Time{},
"claim_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"claim sources 1"}`),
},
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 1"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 1</p>",
"claimant_id": uint(1),
"rating_id": uint(1),
"fact": "test fact 1",
"review_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"review sources1"}`),
},
},
{
"claim": "Claim 2",
"slug": "claim-test",
"claim_date": time.Time{},
"checked_date": time.Time{},
"claim_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"claim sources 2"}`),
},
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 2"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 2</p>",
"claimant_id": uint(1),
"rating_id": uint(1),
"fact": "test fact 2",
"review_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"review sources 2"}`),
},
},
}
var invalidData = map[string]interface{}{
"claim": "a",
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "claim", "slug", "claim_date", "checked_date", "claim_sources", "description", "html_description", "claimant_id", "rating_id", "fact", "review_sources", "space_id"}
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "claims"`)
var deleteQuery = regexp.QuoteMeta(`UPDATE "claims" SET "deleted_at"=`)
var paginationQuery = `SELECT \* FROM "claims" (.+) LIMIT 1 OFFSET 1`
var basePath = "/fact-check/claims"
var path = "/fact-check/claims/{claim_id}"
func slugCheckMock(mock sqlmock.Sqlmock, claim map[string]interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "claims"`)).
WithArgs(fmt.Sprint(claim["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows(columns))
}
func claimInsertMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
claimant.SelectWithSpace(mock)
rating.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "claims"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["claim"], Data["slug"], test.AnyTime{}, test.AnyTime{}, Data["claim_sources"], Data["description"], Data["html_description"], Data["claimant_id"], Data["rating_id"], Data["fact"], Data["review_sources"], 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
func claimListMock(mock sqlmock.Sqlmock) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimCountQuery(mock, len(claimList))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, claimList[0]["claim"], claimList[0]["slug"], claimList[0]["claim_date"], claimList[0]["checked_date"], claimList[0]["claim_sources"], claimList[0]["description"], claimList[0]["html_description"], claimList[0]["claimant_id"], claimList[0]["rating_id"], claimList[0]["fact"], claimList[0]["review_sources"], 1))
claimant.SelectWithOutSpace(mock, claimant.Data)
rating.SelectWithOutSpace(mock, rating.Data)
}
func claimantFKError(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
claimant.EmptyRowMock(mock)
mock.ExpectRollback()
}
func ratingFKError(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
claimant.SelectWithSpace(mock)
rating.EmptyRowMock(mock)
mock.ExpectRollback()
}
func claimUpdateMock(mock sqlmock.Sqlmock, claim map[string]interface{}, err error) {
mock.ExpectBegin()
claimant.SelectWithSpace(mock)
rating.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"claims\"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
claimant.SelectWithSpace(mock)
rating.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"claims\"`).
WithArgs(test.AnyTime{}, 1, claim["claim"], claim["slug"], claim["claim_sources"], claim["description"], claim["html_description"], claim["claimant_id"], claim["rating_id"], claim["fact"], claim["review_sources"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectWithSpace(mock)
claimant.SelectWithOutSpace(mock, claimant.Data)
rating.SelectWithOutSpace(mock, rating.Data)
}
func SelectWithOutSpace(mock sqlmock.Sqlmock, claim map[string]interface{}) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, claim["claim"], claim["slug"], claim["claim_date"], claim["checked_date"], claim["claim_sources"], claim["description"], claim["html_description"], claim["claimant_id"], claim["rating_id"], claim["fact"], claim["review_sources"], 1))
// Preload Claimant & Rating
claimant.SelectWithOutSpace(mock, claimant.Data)
rating.SelectWithOutSpace(mock, rating.Data)
}
func SelectWithSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["claim"], Data["slug"], Data["claim_date"], Data["checked_date"], Data["claim_sources"], Data["description"], Data["html_description"], Data["claimant_id"], Data["rating_id"], Data["fact"], Data["review_sources"], 1))
}
//check claim exits or not
func recordNotFoundMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 100).
WillReturnRows(sqlmock.NewRows(columns))
}
// check claim associated with any post before deleting
func claimPostExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "post_claims"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func claimCountQuery(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "claims"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func validateAssociations(result *httpexpect.Object) {
delete(claimant.Data, "medium_id")
delete(rating.Data, "medium_id")
result.Value("claimant").
Object().
ContainsMap(claimant.Data)
result.Value("rating").
Object().
ContainsMap(rating.Data)
claimant.Data["medium_id"] = 1
rating.Data["medium_id"] = 1
}
<file_sep>/test/service/core/format/create_test.go
package format
import (
"errors"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestFormatCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable format", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unable to decode format", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("create format", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
formatInsertMock(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object().ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("create format with slug is empty", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
formatInsertMock(mock)
mock.ExpectCommit()
Data["slug"] = ""
res := e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object()
Data["slug"] = "fact-check"
res.ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("creating format fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "formats"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], "", 1).
WillReturnError(errors.New("cannot create format"))
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("create fact-check when not permitted", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "space_permissions"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "space_id", "fact_check"}))
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("format with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 1, Data["name"])
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create format when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock)
formatInsertMock(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/category/details.go
package category
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// details - Get category by id
// @Summary Show a category by id
// @Description Get category by ID
// @Tags Category
// @ID get-category-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param category_id path string true "Category ID"
// @Success 200 {object} model.Category
// @Router /core/categories/{category_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
categoryID := chi.URLParam(r, "category_id")
id, err := strconv.Atoi(categoryID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Category{}
result.ID = uint(id)
err = config.DB.Model(&model.Category{}).Preload("Medium").Where(&model.Category{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/post/details_test.go
package post
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestPostDetails(t *testing.T) {
mock := test.SetupMockDB()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid post id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(path).
WithPath("post_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("post record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
recordNotFoundMock(mock)
e.GET(path).
WithPath("post_id", "100").
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
})
t.Run("get post by id", func(t *testing.T) {
test.CheckSpaceMock(mock)
postSelectWithSpace(mock)
preloadMock(mock, 1)
postClaimSelectMock(mock)
postAuthorSelectMock(mock)
e.GET(path).
WithPath("post_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(postData)
})
}
<file_sep>/test/service/core/search/list_test.go
package search
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestPostList(t *testing.T) {
mock := test.SetupMockDB()
test.MeiliGock()
test.KetoGock()
test.KavachGock()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid space id in header", func(t *testing.T) {
e.POST(path).
WithHeaders(map[string]string{
"X-User": "1",
"X-Space": "abc",
}).
WithJSON(Data).
Expect().
Status(http.StatusUnauthorized)
})
t.Run("undecodable body", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(path).
WithHeaders(headers).
WithJSON(undecodableData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("invalid body", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(path).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("search entities with query 'test'", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(path).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK)
test.ExpectationsMet(t, mock)
})
t.Run("meili server is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
e.POST(path).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/tag/update_test.go
package tag
import (
"fmt"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestTagUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid tag id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("tag_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("tag record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
recordNotFoundMock(mock)
e.PUT(path).
WithPath("tag_id", "100").
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
})
t.Run("Unable to decode tag data", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, Data, 1, 1)
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unprocessable tag", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, Data, 1, 1)
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("update tag", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedTag := map[string]interface{}{
"name": "Elections",
"slug": "elections",
"is_featured": true,
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
}
SelectMock(mock, Data, 1, 1)
tagUpdateMock(mock, updatedTag)
mock.ExpectCommit()
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
WithJSON(updatedTag).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedTag)
})
t.Run("update tag by id with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedTag := map[string]interface{}{
"name": "Elections",
"slug": "elections-1",
"is_featured": true,
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
}
SelectMock(mock, Data, 1, 1)
mock.ExpectQuery(`SELECT slug, space_id FROM "tags"`).
WithArgs("elections%", 1).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, updatedTag["name"], "elections", updatedTag["description"], updatedTag["html_description"], false, 1))
tagUpdateMock(mock, updatedTag)
mock.ExpectCommit()
Data["slug"] = ""
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedTag)
Data["slug"] = "elections"
})
t.Run("update tag with different slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedTag := map[string]interface{}{
"name": "Elections",
"slug": "testing-slug",
"is_featured": true,
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
}
SelectMock(mock, Data, 1, 1)
mock.ExpectQuery(`SELECT slug, space_id FROM "tags"`).
WithArgs(fmt.Sprint(updatedTag["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
tagUpdateMock(mock, updatedTag)
mock.ExpectCommit()
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
WithJSON(updatedTag).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedTag)
})
t.Run("tag with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedTag := map[string]interface{}{
"name": "NewElections",
"slug": "elections",
"is_featured": true,
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
}
SelectMock(mock, Data, 1, 1)
sameNameCount(mock, 1, updatedTag["name"])
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
WithJSON(updatedTag).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("cannot parse tag description", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedTag := map[string]interface{}{
"name": "NewElections",
"slug": "elections",
"is_featured": true,
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
}
SelectMock(mock, Data, 1, 1)
sameNameCount(mock, 0, updatedTag["name"])
updatedTag["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
WithJSON(updatedTag).
Expect().
Status(http.StatusUnprocessableEntity)
updatedTag["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("update tag when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
updatedTag := map[string]interface{}{
"name": "Elections",
"slug": "elections",
"is_featured": true,
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
}
SelectMock(mock, Data, 1, 1)
tagUpdateMock(mock, updatedTag)
mock.ExpectRollback()
e.PUT(path).
WithPath("tag_id", 1).
WithHeaders(headers).
WithJSON(updatedTag).
Expect().
Status(http.StatusInternalServerError)
})
}
<file_sep>/test/service/fact-check/claim/update_test.go
package claim
import (
"fmt"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/factly/dega-server/test/service/fact-check/claimant"
"github.com/factly/dega-server/test/service/fact-check/rating"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
var updatedClaim = map[string]interface{}{
"claim": "Claim",
"claim_date": time.Now(),
"checked_date": time.Now(),
"claim_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"claim sources"}`),
},
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"claimant_id": uint(1),
"rating_id": uint(1),
"fact": "test fact",
"review_tag_line": postgres.Jsonb{
RawMessage: []byte(`{"type":"review tag line"}`),
},
"review_sources": postgres.Jsonb{
RawMessage: []byte(`{"type":"review sources"}`),
},
}
func TestClaimUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid claim id", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("claim_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("claim record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
recordNotFoundMock(mock)
e.PUT(path).
WithPath("claim_id", "100").
WithHeaders(headers).
WithJSON(updatedClaim).
Expect().
Status(http.StatusNotFound)
})
t.Run("Unable to decode claim data", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unprocessable claim", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("update claim", func(t *testing.T) {
updatedClaim["slug"] = "claim"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
claimUpdateMock(mock, updatedClaim, nil)
mock.ExpectCommit()
result := e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(updatedClaim).
Expect().
Status(http.StatusOK).JSON().Object()
validateAssociations(result)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse claim description", func(t *testing.T) {
updatedClaim["slug"] = "claim"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
updatedClaim["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(updatedClaim).
Expect().
Status(http.StatusUnprocessableEntity)
updatedClaim["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("update claim by id with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedClaim["slug"] = "claim"
SelectWithSpace(mock)
slugCheckMock(mock, Data)
claimUpdateMock(mock, updatedClaim, nil)
mock.ExpectCommit()
Data["slug"] = ""
result := e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object()
Data["slug"] = "claim"
validateAssociations(result)
test.ExpectationsMet(t, mock)
})
t.Run("update claim with different slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedClaim["slug"] = "claim-test"
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "claims"`).
WithArgs(fmt.Sprint(updatedClaim["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
claimUpdateMock(mock, updatedClaim, nil)
mock.ExpectCommit()
result := e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(updatedClaim).
Expect().
Status(http.StatusOK).JSON().Object()
validateAssociations(result)
test.ExpectationsMet(t, mock)
})
t.Run("claimant do not belong to same space", func(t *testing.T) {
updatedClaim["slug"] = "claim"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
mock.ExpectBegin()
claimant.EmptyRowMock(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(updatedClaim).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("rating do not belong to same space", func(t *testing.T) {
updatedClaim["slug"] = "claim"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
mock.ExpectBegin()
claimant.SelectWithSpace(mock)
rating.EmptyRowMock(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(updatedClaim).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("update claim when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
updatedClaim["slug"] = "claim"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
claimUpdateMock(mock, updatedClaim, nil)
mock.ExpectRollback()
e.PUT(path).
WithPath("claim_id", 1).
WithHeaders(headers).
WithJSON(updatedClaim).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/info/details.go
package info
import (
"fmt"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
podcastModel "github.com/factly/dega-server/service/podcast/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
// details - Get info by id
// @Summary Show a info by id
// @Description Get info by ID
// @Tags Info
// @ID get-info-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param info_id path string true "Info ID"
// @Success 200 {object} model.Info
// @Router /core/info [get]
func details(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Info{}
err = config.DB.Model(&model.Category{}).Where(&model.Category{
SpaceID: uint(sID),
}).Count(&result.Categories).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
err = config.DB.Model(&model.Tag{}).Where(&model.Tag{
SpaceID: uint(sID),
}).Count(&result.Tags).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
err = config.DB.Model(&podcastModel.Podcast{}).Where(&podcastModel.Podcast{
SpaceID: uint(sID),
}).Count(&result.Podcasts).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
err = config.DB.Model(&podcastModel.Episode{}).Where(&podcastModel.Episode{
SpaceID: uint(sID),
}).Count(&result.Episodes).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
result.Posts = make([]model.PostCount, 0)
err = config.DB.Raw(fmt.Sprint("SELECT formats.slug, posts.status, COUNT (*) FROM posts JOIN formats ON posts.format_id = formats.id where posts.deleted_at IS NULL AND posts.space_id = ", sID, "group by posts.status, formats.slug")).Scan(&result.Posts).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/permissions/organisation/route.go
package organisation
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/factly/x/middlewarex"
"github.com/go-chi/chi"
)
type organisationPermission struct {
OrganisationID uint `json:"organisation_id" validate:"required"`
Spaces int64 `json:"spaces"`
}
var userContext config.ContextKey = "org_perm_user"
// Router - Group of medium router
func Router() chi.Router {
r := chi.NewRouter()
app := "dega"
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Get("/", list)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Post("/", create)
r.Get("/my", details)
r.Route("/{permission_id}", func(r chi.Router) {
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Put("/", update)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Delete("/", delete)
})
return r
}
<file_sep>/service/core/action/permissions/organisation/details.go
package organisation
import (
"encoding/json"
"fmt"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
type orgPermissionRes struct {
model.OrganisationPermission
SpacePermissions []model.SpacePermission `json:"space_permissions"`
IsAdmin bool `json:"is_admin,omitempty"`
}
// details - Get my organisation permissions
// @Summary Show a my organisation permissions
// @Description Get my organisation permissions
// @Tags Organisation_Permissions
// @ID get-org-permission-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 200 {object} orgPermissionRes
// @Router /core/permissions/organisations/my [get]
func details(w http.ResponseWriter, r *http.Request) {
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := orgPermissionRes{}
err = config.DB.Model(&model.OrganisationPermission{}).Where(&model.OrganisationPermission{
OrganisationID: uint(oID),
}).First(&result.OrganisationPermission).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
resp, _ := util.KetoGetRequest("/engines/acp/ory/regex/policies/app:dega:superorg")
if resp.StatusCode == http.StatusOK {
var policy model.KetoPolicy
err = json.NewDecoder(resp.Body).Decode(&policy)
if err == nil && len(policy.Subjects) > 0 && policy.Subjects[0] == fmt.Sprint(oID) {
isOwner, _ := util.CheckOwnerFromKavach(uID, oID)
result.IsAdmin = isOwner
}
}
// Get all spaces of organisation
spaceList := make([]model.Space, 0)
config.DB.Model(&model.Space{}).Where(&model.Space{
OrganisationID: oID,
}).Find(&spaceList)
spaceIDs := make([]uint, 0)
for _, space := range spaceList {
spaceIDs = append(spaceIDs, space.ID)
}
// Fetch all the spaces's permissions
err = config.DB.Model(&model.SpacePermission{}).Where("space_id IN (?)", spaceIDs).Find(&result.SpacePermissions).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/model/format.go
package model
import (
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
"gorm.io/gorm"
)
// Format model
type Format struct {
config.Base
Name string `gorm:"column:name" json:"name" validate:"required"`
Slug string `gorm:"column:slug" json:"slug" validate:"required"`
Description string `gorm:"column:description" json:"description"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Space *Space `json:"space,omitempty"`
}
var formatUser config.ContextKey = "format_user"
// BeforeCreate hook
func (format *Format) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(formatUser)
if userID == nil {
return nil
}
uID := userID.(int)
format.CreatedByID = uint(uID)
format.UpdatedByID = uint(uID)
return nil
}
<file_sep>/test/testVars.go
package test
import (
"encoding/json"
"github.com/jinzhu/gorm/dialects/postgres"
)
// NilJsonb return nil json
func NilJsonb() postgres.Jsonb {
ba, _ := json.Marshal(nil)
return postgres.Jsonb{
RawMessage: ba,
}
}
<file_sep>/service/fact-check/action/claim/update.go
package claim
import (
"encoding/json"
"errors"
"net/http"
"reflect"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update claim by id
// @Summary Update a claim by id
// @Description Update claim by ID
// @Tags Claim
// @ID update-claim-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param claim_id path string true "Claim ID"
// @Param Claim body claim false "Claim"
// @Success 200 {object} model.Claim
// @Router /fact-check/claims/{claim_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
claimID := chi.URLParam(r, "claim_id")
id, err := strconv.Atoi(claimID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
claim := &claim{}
err = json.NewDecoder(r.Body).Decode(&claim)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(claim)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := &model.Claim{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Claim{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var claimSlug string
slug := claim.Slug
if len(slug) > 150 {
slug = claim.Slug[:150]
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Claim{})
tableName := stmt.Schema.Table
if result.Slug == claim.Slug {
claimSlug = result.Slug
} else if claim.Slug != "" && slugx.Check(slug) {
claimSlug = slugx.Approve(&config.DB, slug, sID, tableName)
} else {
if len(claim.Claim) > 150 {
claimSlug = slugx.Approve(&config.DB, slugx.Make(claim.Claim[:150]), sID, tableName)
} else {
claimSlug = slugx.Approve(&config.DB, slugx.Make(claim.Claim), sID, tableName)
}
}
// Store HTML description
var description string
if len(claim.Description.RawMessage) > 0 && !reflect.DeepEqual(claim.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(claim.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse claim description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.Begin()
tx.Model(&result).Select("ClaimDate", "CheckedDate").Updates(model.Claim{
ClaimDate: claim.ClaimDate,
CheckedDate: claim.CheckedDate,
})
err = tx.Model(&result).Updates(model.Claim{
Base: config.Base{UpdatedByID: uint(uID)},
Claim: claim.Claim,
Slug: claimSlug,
ClaimSources: claim.ClaimSources,
Description: claim.Description,
HTMLDescription: description,
ClaimantID: claim.ClaimantID,
RatingID: claim.RatingID,
Fact: claim.Fact,
ReviewSources: claim.ReviewSources,
MetaFields: claim.MetaFields,
}).Preload("Rating").Preload("Rating.Medium").Preload("Claimant").Preload("Claimant.Medium").First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
var claimMeiliDate int64 = 0
if result.ClaimDate != nil {
claimMeiliDate = result.ClaimDate.Unix()
}
var checkedMeiliDate int64 = 0
if result.CheckedDate != nil {
checkedMeiliDate = result.CheckedDate.Unix()
}
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "claim",
"claim": result.Claim,
"slug": result.Slug,
"description": result.Description,
"claim_date": claimMeiliDate,
"checked_date": checkedMeiliDate,
"claim_sources": result.ClaimSources,
"claimant_id": result.ClaimantID,
"rating_id": result.RatingID,
"fact": result.Fact,
"review_sources": result.ReviewSources,
"space_id": result.SpaceID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("claim.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/page/create.go
package page
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"gorm.io/gorm"
)
// create - Create page
// @Summary Create page
// @Description Create page
// @Tags Page
// @ID add-page
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Page body page true "Page Object"
// @Success 201 {object} pageData
// @Router /core/pages [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
page := page{}
err = json.NewDecoder(r.Body).Decode(&page)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(page)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := &pageData{}
result.Authors = make([]model.Author, 0)
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Post{})
tableName := stmt.Schema.Table
var postSlug string
if page.Slug != "" && slugx.Check(page.Slug) {
postSlug = page.Slug
} else {
postSlug = slugx.Make(page.Title)
}
featuredMediumID := &page.FeaturedMediumID
if page.FeaturedMediumID == 0 {
featuredMediumID = nil
}
// Store HTML description
var description string
if len(page.Description.RawMessage) > 0 && !reflect.DeepEqual(page.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(page.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse post description", http.StatusUnprocessableEntity)))
return
}
}
result.Post = model.Post{
Title: page.Title,
Slug: slugx.Approve(&config.DB, postSlug, sID, tableName),
Status: page.Status,
IsPage: true,
Subtitle: page.Subtitle,
Excerpt: page.Excerpt,
Description: page.Description,
HTMLDescription: description,
IsHighlighted: page.IsHighlighted,
IsSticky: page.IsSticky,
FeaturedMediumID: featuredMediumID,
FormatID: page.FormatID,
SpaceID: uint(sID),
}
if len(page.TagIDs) > 0 {
config.DB.Model(&model.Tag{}).Where(page.TagIDs).Find(&result.Post.Tags)
}
if len(page.CategoryIDs) > 0 {
config.DB.Model(&model.Category{}).Where(page.CategoryIDs).Find(&result.Post.Categories)
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Post{}).Create(&result.Post).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Model(&model.Post{}).Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").First(&result.Post)
// Adding author
authors, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
for _, id := range page.AuthorIDs {
aID := fmt.Sprint(id)
if _, found := authors[aID]; found && id != 0 {
author := model.PostAuthor{
AuthorID: id,
PostID: result.Post.ID,
}
err := tx.Model(&model.PostAuthor{}).Create(&author).Error
if err == nil {
result.Authors = append(result.Authors, authors[aID])
}
}
}
// Insert into meili index
var meiliPublishDate int64
if result.Post.Status == "publish" {
meiliPublishDate = result.Post.PublishedDate.Unix()
}
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "page",
"title": result.Title,
"subtitle": result.Subtitle,
"slug": result.Slug,
"status": result.Status,
"is_page": result.IsPage,
"excerpt": result.Excerpt,
"description": result.Description,
"is_featured": result.IsFeatured,
"is_sticky": result.IsSticky,
"is_highlighted": result.IsHighlighted,
"format_id": result.FormatID,
"published_date": meiliPublishDate,
"space_id": result.SpaceID,
"tag_ids": page.TagIDs,
"category_ids": page.CategoryIDs,
"author_ids": page.AuthorIDs,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("page.created", result); err != nil {
errorx.Render(w, errorx.Parser(errorx.GetMessage("not able to publish event", http.StatusInternalServerError)))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/test/service/core/medium/update_test.go
package medium
import (
"errors"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
var updatedMedium = map[string]interface{}{
"name": "Image",
"type": "jpg",
"title": "Sample image",
"description": "desc",
"caption": "sample",
"alt_text": "sample",
"file_size": 100,
"url": postgres.Jsonb{
RawMessage: []byte(`{"raw":"http://testimage.com/test.jpg"}`),
},
"dimensions": "testdims",
}
func TestMediumUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid medium id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("medium_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("medium record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
recordNotFoundMock(mock)
e.PUT(path).
WithPath("medium_id", "100").
WithHeaders(headers).
WithJSON(updatedMedium).
Expect().
Status(http.StatusNotFound)
})
t.Run("Unable to decode medium data", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("medium_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unprocessable medium", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("medium_id", 1).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("update medium", func(t *testing.T) {
updatedMedium["slug"] = "image"
test.CheckSpaceMock(mock)
SelectWithSpace(mock)
mediumUpdateMock(mock, updatedMedium, nil)
SelectWithSpace(mock)
mock.ExpectCommit()
e.PUT(path).
WithPath("medium_id", 1).
WithHeaders(headers).
WithJSON(updatedMedium).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedMedium)
test.ExpectationsMet(t, mock)
})
t.Run("update medium by id with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedMedium["slug"] = "image"
SelectWithSpace(mock)
slugCheckMock(mock, Data)
mediumUpdateMock(mock, updatedMedium, nil)
SelectWithSpace(mock)
mock.ExpectCommit()
Data["slug"] = ""
e.PUT(path).
WithPath("medium_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedMedium)
Data["slug"] = "image"
test.ExpectationsMet(t, mock)
})
t.Run("update medium with different slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedMedium["slug"] = "image-test"
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "media"`).
WithArgs(fmt.Sprint(updatedMedium["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
t.Log(updatedMedium)
mediumUpdateMock(mock, updatedMedium, nil)
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, updatedMedium["name"], updatedMedium["slug"], updatedMedium["type"], updatedMedium["title"], updatedMedium["description"], updatedMedium["caption"], updatedMedium["alt_text"], updatedMedium["file_size"], updatedMedium["url"], updatedMedium["dimensions"], 1))
mock.ExpectCommit()
e.PUT(path).
WithPath("medium_id", 1).
WithHeaders(headers).
WithJSON(updatedMedium).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(updatedMedium)
test.ExpectationsMet(t, mock)
})
t.Run("medium not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
updatedMedium["slug"] = "toi-test"
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "media"`).
WithArgs(fmt.Sprint(updatedMedium["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
mediumUpdateMock(mock, updatedMedium, errors.New("update failed"))
mock.ExpectRollback()
e.PUT(path).
WithPath("medium_id", 1).
WithHeaders(headers).
WithJSON(updatedMedium).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("update medium when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
updatedMedium["slug"] = "image"
test.CheckSpaceMock(mock)
SelectWithSpace(mock)
mediumUpdateMock(mock, updatedMedium, nil)
SelectWithSpace(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("medium_id", 1).
WithHeaders(headers).
WithJSON(updatedMedium).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/category/update.go
package category
import (
"encoding/json"
"errors"
"net/http"
"reflect"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update category by id
// @Summary Update a category by id
// @Description Update category by ID
// @Tags Category
// @ID update-category-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param category_id path string true "Category ID"
// @Param X-Space header string true "Space ID"
// @Param Category body category false "Category"
// @Success 200 {object} model.Category
// @Router /core/categories/{category_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
categoryID := chi.URLParam(r, "category_id")
id, err := strconv.Atoi(categoryID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
category := &category{}
err = json.NewDecoder(r.Body).Decode(&category)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(category)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := model.Category{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Category{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
if result.ID == category.ParentID {
loggerx.Error(errors.New("cannot add itself as parent"))
errorx.Render(w, errorx.Parser(errorx.CannotSaveChanges()))
return
}
// Check if parent category exist or not
if category.ParentID != 0 {
var parentCat model.Category
parentCat.ID = category.ParentID
err = config.DB.Where(&model.Category{SpaceID: uint(sID)}).First(&parentCat).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("Parent category does not exist", http.StatusUnprocessableEntity)))
return
}
}
var categorySlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Category{})
tableName := stmt.Schema.Table
if result.Slug == category.Slug {
categorySlug = result.Slug
} else if category.Slug != "" && slugx.Check(category.Slug) {
categorySlug = slugx.Approve(&config.DB, category.Slug, sID, tableName)
} else {
categorySlug = slugx.Approve(&config.DB, slugx.Make(category.Name), sID, tableName)
}
// Check if category with same name exist
if category.Name != result.Name && util.CheckName(uint(sID), category.Name, tableName) {
loggerx.Error(errors.New(`category with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
// Store HTML description
var description string
if len(category.Description.RawMessage) > 0 && !reflect.DeepEqual(category.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(category.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse category description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.Begin()
mediumID := &category.MediumID
result.MediumID = &category.MediumID
if category.MediumID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"medium_id": nil}).Error
mediumID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
parentID := &category.ParentID
if category.ParentID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"parent_id": nil}).Error
parentID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
tx.Model(&result).Select("IsFeatured").Updates(model.Category{IsFeatured: category.IsFeatured})
err = tx.Model(&result).Updates(model.Category{
Base: config.Base{UpdatedByID: uint(uID)},
Name: category.Name,
Slug: categorySlug,
Description: category.Description,
HTMLDescription: description,
ParentID: parentID,
MediumID: mediumID,
MetaFields: category.MetaFields,
}).Preload("Medium").First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "category",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"space_id": result.SpaceID,
"meta_fields": result.MetaFields,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("category.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/request/space/reject.go
package space
import (
"context"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// reject - reject space permission
// @Summary reject space permission
// @Description reject space permission
// @Tags Space_Permissions_Request
// @ID reject-space-permission
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param request_id path string true "Request ID"
// @Success 200
// @Failure 400 {array} string
// @Router /core/requests/spaces/{request_id}/reject [post]
func reject(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
requestID := chi.URLParam(r, "request_id")
id, err := strconv.Atoi(requestID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
request := model.SpacePermissionRequest{}
request.ID = uint(id)
// Check if the request exist or not
err = config.DB.Where(&model.SpacePermissionRequest{
Request: model.Request{Status: "pending"},
}).First(&request).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
// Mark request as rejected
err = config.DB.WithContext(context.WithValue(r.Context(), permissionContext, uID)).Model(&request).Updates(&model.SpacePermissionRequest{
Request: model.Request{
Base: config.Base{UpdatedByID: uint(uID)},
Status: "rejected",
},
}).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/action/medium/create.go
package medium
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/spf13/viper"
"gorm.io/gorm"
)
// create - Create medium
// @Summary Create medium
// @Description Create medium
// @Tags Medium
// @ID add-medium
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Medium body []medium true "Medium Object"
// @Success 201 {object} []model.Medium
// @Failure 400 {array} string
// @Router /core/media [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
var mediumList []medium
err = json.NewDecoder(r.Body).Decode(&mediumList)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
if viper.GetBool("create_super_organisation") {
// Fetch space permissions
permission := model.SpacePermission{}
err = config.DB.Model(&model.SpacePermission{}).Where(&model.SpacePermission{
SpaceID: uint(sID),
}).First(&permission).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot create more media", http.StatusUnprocessableEntity)))
return
}
// Fetch total number of medium in space
var totMedia int64
config.DB.Model(&model.Medium{}).Where(&model.Medium{
SpaceID: uint(sID),
}).Count(&totMedia)
if totMedia+int64(len(mediumList)) > permission.Media && permission.Media > 0 {
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot create more media", http.StatusUnprocessableEntity)))
return
}
}
result := paging{}
result.Nodes = make([]model.Medium, 0)
for _, medium := range mediumList {
validationError := validationx.Check(medium)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
var mediumSlug string
if medium.Slug != "" && slugx.Check(medium.Slug) {
mediumSlug = medium.Slug
} else {
mediumSlug = slugx.Make(medium.Name)
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Medium{})
tableName := stmt.Schema.Table
med := model.Medium{
Name: medium.Name,
Slug: slugx.Approve(&config.DB, mediumSlug, sID, tableName),
Title: medium.Title,
Type: medium.Type,
Description: medium.Description,
Caption: medium.Caption,
AltText: medium.AltText,
FileSize: medium.FileSize,
URL: medium.URL,
Dimensions: medium.Dimensions,
MetaFields: medium.MetaFields,
SpaceID: uint(sID),
}
result.Nodes = append(result.Nodes, med)
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Medium{}).Create(&result.Nodes).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
for i := range result.Nodes {
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.Nodes[i].ID,
"kind": "medium",
"name": result.Nodes[i].Name,
"slug": result.Nodes[i].Slug,
"title": result.Nodes[i].Title,
"type": result.Nodes[i].Type,
"description": result.Nodes[i].Description,
"space_id": result.Nodes[i].SpaceID,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
result.Total = int64(len(result.Nodes))
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("media.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/service/core/action/request/route.go
package request
import (
"net/http"
"github.com/factly/dega-server/service/core/action/request/organisation"
"github.com/factly/dega-server/service/core/action/request/space"
"github.com/go-chi/chi"
)
// Router - CRUD servies
func Router() http.Handler {
r := chi.NewRouter()
r.Mount("/spaces", space.Router())
r.Mount("/organisations", organisation.Router())
return r
}
<file_sep>/test/service/core/request/organisation/reject_test.go
package organisation
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestOrganisationRequestReject(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid request id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(rejectPath).
WithPath("request_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("request record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(selectQuery).
WithArgs("pending", 1).
WillReturnRows(sqlmock.NewRows(Columns))
e.POST(rejectPath).
WithPath("request_id", "1").
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("reject the request", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectQuery(mock)
mock.ExpectBegin()
mock.ExpectExec(`UPDATE \"organisation_permission_requests\"`).
WithArgs(test.AnyTime{}, 1, "rejected", 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectCommit()
e.POST(rejectPath).
WithPath("request_id", "1").
WithHeaders(headers).
Expect().
Status(http.StatusOK)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/meta/route.go
package meta
import (
"github.com/go-chi/chi"
)
type metadata struct {
Success uint `json:"success"`
Meta meta `json:"meta"`
}
type meta struct {
Title string `json:"title"`
SiteName string `json:"site_name"`
Description string `json:"description"`
Image map[string]interface{} `json:"image"`
}
type iFramelyRes struct {
URL string `json:"url,omitempty"`
Type string `json:"type,omitempty"`
Version string `json:"version,omitempty"`
Title string `json:"title,omitempty"`
Author string `json:"author,omitempty"`
ProviderName string `json:"provider_name,omitempty"`
Description string `json:"description,omitempty"`
ThumbnailURL string `json:"thumbnail_url,omitempty"`
}
// Router - Group of tag router
func Router() chi.Router {
r := chi.NewRouter()
r.Get("/", details)
return r
}
<file_sep>/service/core/action/menu/delete.go
package menu
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete menu by id
// @Summary Delete a menu
// @Description Delete menu by ID
// @Tags Menu
// @ID delete-menu-by-id
// @Param X-User header string true "User ID"
// @Param menu_id path string true "Menu ID"
// @Param X-Space header string true "Space ID"
// @Success 200
// @Failure 400 {array} string
// @Router /core/menus/{menu_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
menuID := chi.URLParam(r, "menu_id")
id, err := strconv.Atoi(menuID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := &model.Menu{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Menu{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
tx := config.DB.Begin()
err = tx.Delete(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
err = meilisearchx.DeleteDocument("dega", result.ID, "menu")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("menu.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/action/policy/composer.go
package policy
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"github.com/spf13/viper"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/loggerx"
)
func contains(s []string, e string) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}
// Composer create keto policy
func Composer(oID int, sID int, inputPolicy policyReq) model.KetoPolicy {
allowedResources := []string{"categories", "formats", "media", "policies", "posts", "pages", "tags", "webhooks", "claims", "claimants", "fact-checks", "ratings", "google", "menus", "episodes", "podcasts"}
allowedActions := []string{"get", "create", "update", "delete", "publish"}
result := model.KetoPolicy{}
commanPolicyString := fmt.Sprint(":org:", oID, ":app:dega:space:", sID, ":")
result.ID = "id" + commanPolicyString + inputPolicy.Name
result.Description = inputPolicy.Description
result.Effect = "allow"
result.Resources = make([]string, 0)
result.Actions = make([]string, 0)
for _, each := range inputPolicy.Permissions {
if contains(allowedResources, each.Resource) {
result.Resources = append(result.Resources, "resources"+commanPolicyString+each.Resource)
var eachActions []string
for _, action := range each.Actions {
if contains(allowedActions, action) {
eachActions = append(eachActions, "actions"+commanPolicyString+each.Resource+":"+action)
}
}
result.Actions = append(result.Actions, eachActions...)
}
}
result.Subjects = inputPolicy.Users
buf := new(bytes.Buffer)
err := json.NewEncoder(buf).Encode(&result)
if err != nil {
loggerx.Error(err)
}
req, err := http.NewRequest("PUT", viper.GetString("keto_url")+"/engines/acp/ory/regex/policies", buf)
if err != nil {
loggerx.Error(err)
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return model.KetoPolicy{}
}
defer resp.Body.Close()
err = json.NewDecoder(resp.Body).Decode(&result)
if err != nil {
loggerx.Error(err)
}
return result
}
<file_sep>/test/service/core/permissions/organisation/create_test.go
package organisation
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestOrganisationPermissionCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable permission", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Undecodable permission", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("create permission", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(0))
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "organisation_permissions"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["organisation_id"], Data["spaces"]).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectCommit()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusCreated).
JSON().
Object().
ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("permission of the organisation already exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).
AddRow(1))
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/mockServers.go
package test
import (
"net/http"
"github.com/factly/dega-server/service/fact-check/action/google"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
// MockServer is created to intercept HTTP Calls outside this project. Mocking the external project servers helps with Unit Testing.
func MockServer() {
KavachGock()
KetoGock()
MeiliGock()
}
func KavachGock() {
// Mock server to return a user from kavach
gock.New(viper.GetString("kavach_url") + "/organisations/[0-9]+/users").
Persist().
Reply(http.StatusOK).
JSON(Dummy_AuthorList)
gock.New(viper.GetString("kavach_url") + "/organisations/my").
Persist().
Reply(http.StatusOK).
JSON(Dummy_OrgList)
// Creates a mock server for kavach URL with an appropriate dummy response.
gock.New(viper.GetString("kavach_url") + "/organisations").
Persist().
Reply(http.StatusOK).
JSON(PaiganatedOrg)
// Creates a mock server for kavach URL with an appropriate dummy response.
gock.New(viper.GetString("kavach_url") + "/organisations/[0-9]+/applications/dega/access").
Persist().
Reply(http.StatusOK)
}
func KetoGock() {
// <----- ALL THE KETO POLICIES (FOR POLICY TEST)------>
// GET-details for single id,
gock.New(viper.GetString("keto_url")).
Get("/engines/acp/ory/regex/policies/(.+)").
SetMatcher(gock.NewMatcher()).
AddMatcher(func(req *http.Request, ereq *gock.Request) (bool, error) { return req.Method == "GET", nil }).
Persist().
Reply(http.StatusOK).
JSON(Dummy_SingleMock)
// DELETE AND UPDATE POLICY - get specific policy, delete and put
gock.New(viper.GetString("keto_url")).
Put("/engines/acp/ory/regex/policies/(.+)").
SetMatcher(gock.NewMatcher()).
AddMatcher(func(req *http.Request, ereq *gock.Request) (bool, error) {
if req.Method == "PUT" || req.Method == "DELETE" {
return true, nil
}
return false, nil
}).
Persist().
Reply(http.StatusOK).
JSON(Dummy_KetoPolicy)
gock.New(viper.GetString("keto_url")).
Delete("/engines/acp/ory/regex/policies/(.+)").
SetMatcher(gock.NewMatcher()).
AddMatcher(func(req *http.Request, ereq *gock.Request) (bool, error) {
if req.Method == "PUT" || req.Method == "DELETE" {
return true, nil
}
return false, nil
}).
Persist().
Reply(http.StatusOK).
JSON(Dummy_KetoPolicy)
// GET and POST POLICY - returns a list of policies and post policy
gock.New(viper.GetString("keto_url") + "/engines/acp/ory/regex/policies").
Persist().
Reply(http.StatusOK).
JSON(Dummy_KetoPolicy)
gock.New(viper.GetString("keto_url") + "/engines/acp/ory/regex/roles/(.+)").
Persist().
Reply(http.StatusOK).
JSON(Dummy_Role)
// Creates a mock server for keto for provisioning Policy.Authorizer module.
gock.New(viper.GetString("keto_url")).
Post("/engines/acp/ory/regex/allowed").
Persist().
Reply(http.StatusOK)
}
func MeiliGock() {
gock.New(viper.GetString("meili_url") + "/indexes/dega/search").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusOK).
JSON(MeiliHits)
gock.New(viper.GetString("meili_url")).
Post("/indexes/dega/documents").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusAccepted).
JSON(ReturnUpdate)
gock.New(viper.GetString("meili_url")).
Put("/indexes/dega/documents").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusAccepted).
JSON(ReturnUpdate)
gock.New(viper.GetString("meili_url")).
Delete("/indexes/dega/documents/(.+)").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusAccepted).
JSON(ReturnUpdate)
}
func GoogleFactCheckGock() {
gock.New(google.GoogleURL).
Persist().
Reply(http.StatusOK).
JSON(GoogleResponse)
}
func IFramelyGock() {
viper.Set("iframely_url", "http://iframely:8061")
gock.New(viper.GetString("iframely_url")).
Get("/oembed").
ParamPresent("url").
Persist().
Reply(http.StatusOK).
JSON(OembedResponse)
gock.New(viper.GetString("iframely_url")).
Get("/iframely").
ParamPresent("url").
Persist().
Reply(http.StatusOK).
JSON(IFramelyResponse)
}
func DisableMeiliGock(serverURL string) {
gock.Off()
KavachGock()
KetoGock()
gock.New(serverURL).EnableNetworking().Persist()
defer gock.DisableNetworking()
}
func DisableKavachGock(serverURL string) {
gock.Off()
MeiliGock()
KetoGock()
gock.New(serverURL).EnableNetworking().Persist()
defer gock.DisableNetworking()
}
func DisableKetoGock(serverURL string) {
gock.Off()
MeiliGock()
KavachGock()
gock.New(serverURL).EnableNetworking().Persist()
defer gock.DisableNetworking()
}
<file_sep>/test/service/core/permissions/space/testvars.go
package space
import (
"database/sql/driver"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"space_id": 1,
"fact_check": true,
"media": 1,
"posts": 1,
"podcast": true,
"episodes": 1,
}
var invalidData = map[string]interface{}{
"fact_check": 1,
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "space_id", "fact_check", "media", "posts", "podcast", "episodes"}
var selectQuery = `SELECT (.+) FROM \"space_permissions\"`
var countQuery = regexp.QuoteMeta(`SELECT count(*) FROM "space_permissions"`)
var basePath = "/core/permissions/spaces"
var path = "/core/permissions/spaces/{permission_id}"
var mypath = "/core/permissions/spaces/my"
func SelectQuery(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["space_id"], Data["fact_check"], Data["media"], Data["posts"], Data["podcast"], Data["episodes"]))
}
<file_sep>/service/core/action/request/organisation/approve.go
package organisation
import (
"context"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// approve - approve organisation permission
// @Summary approve organisation permission
// @Description approve organisation permission
// @Tags Organisation_Permissions_Request
// @ID approve-org-permission
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param request_id path string true "Request ID"
// @Success 201 {object} model.OrganisationPermission
// @Failure 400 {array} string
// @Router /core/requests/organisations/{request_id}/approve [post]
func approve(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
requestID := chi.URLParam(r, "request_id")
id, err := strconv.Atoi(requestID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
request := model.OrganisationPermissionRequest{}
request.ID = uint(id)
// Check if the request exist or not
err = config.DB.Where(&model.OrganisationPermissionRequest{
Request: model.Request{Status: "pending"},
}).First(&request).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
orgPermission := model.OrganisationPermission{}
// Check if the permission for the organisation already exist
err = config.DB.Model(&model.OrganisationPermission{}).Where(&model.OrganisationPermission{
OrganisationID: request.OrganisationID,
}).First(&orgPermission).Error
tx := config.DB.WithContext(context.WithValue(r.Context(), permissionContext, uID)).Begin()
result := model.OrganisationPermission{
Base: config.Base{UpdatedByID: uint(uID)},
OrganisationID: request.OrganisationID,
Spaces: request.Spaces,
}
if err != nil {
// Create a organisation permission
err = tx.Model(&model.OrganisationPermission{}).Create(&result).Error
} else {
// Update the organisation permission
err = tx.Model(&orgPermission).Updates(&result).First(&result).Error
}
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Mark request as approved
err = tx.Model(&request).Updates(&model.OrganisationPermissionRequest{
Request: model.Request{
Base: config.Base{UpdatedByID: uint(uID)},
Status: "approved",
},
}).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Commit()
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/fact-check/action/claim/create.go
package claim
import (
"context"
"encoding/json"
"errors"
"net/http"
"reflect"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"gorm.io/gorm"
)
// create - Create claim
// @Summary Create claim
// @Description Create claim
// @Tags Claim
// @ID add-claim
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Claim body claim true "Claim Object"
// @Success 201 {object} model.Claim
// @Failure 400 {array} string
// @Router /fact-check/claims [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
claim := &claim{}
err = json.NewDecoder(r.Body).Decode(&claim)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(claim)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Claim{})
tableName := stmt.Schema.Table
var claimSlug string
slug := claim.Slug
if len(slug) > 150 {
slug = claim.Slug[:150]
}
if claim.Slug != "" && slugx.Check(slug) {
claimSlug = slug
} else {
if len(claim.Claim) > 150 {
claimSlug = slugx.Make(claim.Claim[:150])
} else {
claimSlug = slugx.Make(claim.Claim)
}
}
// Store HTML description
var description string
if len(claim.Description.RawMessage) > 0 && !reflect.DeepEqual(claim.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(claim.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse claim description", http.StatusUnprocessableEntity)))
return
}
}
result := &model.Claim{
Claim: claim.Claim,
Slug: slugx.Approve(&config.DB, claimSlug, sID, tableName),
ClaimDate: claim.ClaimDate,
CheckedDate: claim.CheckedDate,
ClaimSources: claim.ClaimSources,
Description: claim.Description,
HTMLDescription: description,
ClaimantID: claim.ClaimantID,
RatingID: claim.RatingID,
Fact: claim.Fact,
ReviewSources: claim.ReviewSources,
MetaFields: claim.MetaFields,
SpaceID: uint(sID),
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Claim{}).Create(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Model(&model.Claim{}).Preload("Rating").Preload("Rating.Medium").Preload("Claimant").Preload("Claimant.Medium").Find(&result)
var claimMeiliDate int64 = 0
if result.ClaimDate != nil {
claimMeiliDate = result.ClaimDate.Unix()
}
var checkedMeiliDate int64 = 0
if result.CheckedDate != nil {
checkedMeiliDate = result.CheckedDate.Unix()
}
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "claim",
"claim": result.Claim,
"slug": result.Slug,
"description": result.Description,
"claim_date": claimMeiliDate,
"checked_date": checkedMeiliDate,
"claim_sources": result.ClaimSources,
"claimant_id": result.ClaimantID,
"rating_id": result.RatingID,
"fact": result.Fact,
"review_sources": result.ReviewSources,
"space_id": result.SpaceID,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("claim.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/test/service/core/category/testvars.go
package category
import (
"fmt"
"regexp"
"strings"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data map[string]interface{} = map[string]interface{}{
"name": "Test category",
"slug": "test-category",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"parent_id": 0,
"medium_id": 1,
"is_featured": true,
"meta_fields": postgres.Jsonb{
RawMessage: []byte(`{"type":"description"}`),
},
}
var resData map[string]interface{} = map[string]interface{}{
"name": "Test category",
"slug": "test-category",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"meta_fields": postgres.Jsonb{
RawMessage: []byte(`{"type":"description"}`),
},
"is_featured": true,
}
var invalidData map[string]interface{} = map[string]interface{}{
"nae": "Tecateg",
"slg": "test-category",
}
var categorylist []map[string]interface{} = []map[string]interface{}{
{
"name": "Test category 1",
"slug": "test-category-1",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 1"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 1</p>",
"parent_id": 0,
"medium_id": 1,
"meta_fields": postgres.Jsonb{
RawMessage: []byte(`{"type":"description"}`),
},
"is_featured": true,
},
{
"name": "Test category 2",
"slug": "test-category-2",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 2"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 2</p>",
"parent_id": 0,
"medium_id": 1,
"meta_fields": postgres.Jsonb{
RawMessage: []byte(`{"type":"description"}`),
},
"is_featured": true,
},
}
var Columns []string = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug", "description", "html_description", "parent_id", "meta_fields", "medium_id", "is_featured", "space_id"}
var selectQuery string = regexp.QuoteMeta(`SELECT * FROM "categories"`)
var countQuery string = regexp.QuoteMeta(`SELECT count(*) FROM "categories"`)
var deleteQuery string = regexp.QuoteMeta(`UPDATE "categories" SET "deleted_at"=`)
const path string = "/core/categories/{category_id}"
const basePath string = "/core/categories"
func selectWithSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["parent_id"], Data["meta_fields"], Data["medium_id"], Data["is_featured"], 1))
}
func SelectWithOutSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["parent_id"], Data["meta_fields"], Data["medium_id"], Data["is_featured"], 1))
}
func slugCheckMock(mock sqlmock.Sqlmock, category map[string]interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "categories"`)).
WithArgs(fmt.Sprint(category["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows(Columns))
}
func sameNameCount(mock sqlmock.Sqlmock, count int, name interface{}) {
mock.ExpectQuery(countQuery).
WithArgs(1, strings.ToLower(name.(string))).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func insertMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["is_featured"], 1, Data["meta_fields"], Data["medium_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"parent_id", "medium_id", "id"}).
AddRow(1, 1, 1))
}
func insertWithMediumError(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "media"`)).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "name", "slug", "type", "title", "description", "html_description", "caption", "alt_text", "file_size", "url", "dimensions", "space_id"}))
mock.ExpectRollback()
}
func updateMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(nil, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, Data["is_featured"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["medium_id"], Data["meta_fields"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
selectWithSpace(mock)
medium.SelectWithOutSpace(mock)
}
func categoryPostAssociation(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "posts" JOIN "post_categories"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func deleteMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(nil, 1, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(deleteQuery).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
}
<file_sep>/service/core/action/medium/delete.go
package medium
import (
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
factCheckModel "github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete medium by id
// @Summary Delete a medium
// @Description Delete medium by ID
// @Tags Medium
// @ID delete-medium-by-id
// @Param X-User header string true "User ID"
// @Param medium_id path string true "Medium ID"
// @Param X-Space header string true "Space ID"
// @Success 200
// @Router /core/media/{medium_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
mediumID := chi.URLParam(r, "medium_id")
id, err := strconv.Atoi(mediumID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Medium{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Medium{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
uintID := uint(id)
// check if medium is associated with posts
var totAssociated int64
config.DB.Model(&model.Post{}).Where(&model.Post{
FeaturedMediumID: &uintID,
}).Count(&totAssociated)
if totAssociated != 0 {
loggerx.Error(errors.New("medium is associated with post"))
errorx.Render(w, errorx.Parser(errorx.CannotDelete("medium", "post")))
return
}
// check if medium is associated with categories
config.DB.Model(&model.Category{}).Where(&model.Category{
MediumID: &uintID,
}).Count(&totAssociated)
if totAssociated != 0 {
loggerx.Error(errors.New("medium is associated with category"))
errorx.Render(w, errorx.Parser(errorx.CannotDelete("medium", "category")))
return
}
// check if medium is associated with spaces
config.DB.Model(&model.Space{}).Where(&model.Space{
LogoID: &uintID,
}).Or(&model.Space{
LogoMobileID: &uintID,
}).Or(&model.Space{
FavIconID: &uintID,
}).Or(&model.Space{
MobileIconID: &uintID,
}).Count(&totAssociated)
if totAssociated != 0 {
loggerx.Error(errors.New("medium is associated with space"))
errorx.Render(w, errorx.Parser(errorx.CannotDelete("medium", "space")))
return
}
// check if medium is associated with ratings
config.DB.Model(&factCheckModel.Rating{}).Where(&factCheckModel.Rating{
MediumID: &uintID,
}).Count(&totAssociated)
if totAssociated != 0 {
loggerx.Error(errors.New("medium is associated with rating"))
errorx.Render(w, errorx.Parser(errorx.CannotDelete("medium", "rating")))
return
}
// check if medium is associated with claimants
config.DB.Model(&factCheckModel.Claimant{}).Where(&factCheckModel.Claimant{
MediumID: &uintID,
}).Count(&totAssociated)
if totAssociated != 0 {
loggerx.Error(errors.New("medium is associated with claimant"))
errorx.Render(w, errorx.Parser(errorx.CannotDelete("medium", "claimant")))
return
}
tx := config.DB.Begin()
tx.Delete(&result)
err = meilisearchx.DeleteDocument("dega", result.ID, "medium")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("media.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/action/menu/details.go
package menu
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// details - Get menu by id
// @Summary Show a menu by id
// @Description Get menu by ID
// @Tags Menu
// @ID get-menu-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param menu_id path string true "Menu ID"
// @Success 200 {object} model.Menu
// @Router /core/menus/{menu_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
menuID := chi.URLParam(r, "menu_id")
id, err := strconv.Atoi(menuID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Menu{}
result.ID = uint(id)
err = config.DB.Model(&model.Menu{}).Where(&model.Menu{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/event/list.go
package event
import (
"encoding/json"
"fmt"
"net/http"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/requestx"
"github.com/spf13/viper"
)
type paging struct {
Total int64 `json:"total"`
Nodes []model.Event `json:"nodes"`
}
// list - Get all events
// @Summary Show all events
// @Description Get all events
// @Tags Events
// @ID get-all-events
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param limit query string false "limit per page"
// @Param page query string false "page number"
// @Success 200 {object} paging
// @Router /core/events [get]
func list(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
hukzURL := viper.GetString("hukz_url") + "/events?tag=app:dega&limit=" + r.URL.Query().Get("limit") + "&page=" + r.URL.Query().Get("page") + "&tag=space:" + fmt.Sprint(sID)
resp, err := requestx.Request("GET", hukzURL, nil, map[string]string{
"X-User": fmt.Sprint(uID),
})
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if resp.StatusCode != http.StatusOK {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
var eventsPaging paging
if err = json.NewDecoder(resp.Body).Decode(&eventsPaging); err != nil {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusOK, eventsPaging)
}
<file_sep>/util/healthCheckers.go
package util
import (
"errors"
"net/http"
"github.com/spf13/viper"
)
// KetoChecker checks if keto is ready
func KetoChecker() error {
_, err := KetoGetRequest("/health/ready")
return err
}
// KratosChecker checks if Kratos is ready
func KratosChecker() error {
return GetRequest(viper.GetString("kratos_public_url") + "/health/ready")
}
// KavachChecker checks if Kavach is ready
func KavachChecker() error {
return GetRequest(viper.GetString("kavach_url") + "/health/ready")
}
// MeiliChecker checks if Meilisearch is ready
func MeiliChecker() error {
return GetRequest(viper.GetString("meili_url") + "/health")
}
// GetRequest returns error if error in status code
func GetRequest(url string) error {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
res, err := client.Do(req)
if err != nil {
return err
}
if res.StatusCode >= 500 {
return errors.New("cannot connect")
}
return nil
}
<file_sep>/service/fact-check/action/rating/list.go
package rating
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
// list response
type paging struct {
Total int64 `json:"total"`
Nodes []model.Rating `json:"nodes"`
}
// list - Get all ratings
// @Summary Show all ratings
// @Description Get all ratings
// @Tags Rating
// @ID get-all-ratings
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param limit query string false "limit per page"
// @Param page query string false "page number"
// @Param all query string false "all"
// @Success 200 {object} paging
// @Router /fact-check/ratings [get]
func list(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := paging{}
result.Nodes = make([]model.Rating, 0)
all := r.URL.Query().Get("all")
offset, limit := paginationx.Parse(r.URL.Query())
stmt := config.DB.Model(&model.Rating{}).Preload("Medium").Where(&model.Rating{
SpaceID: uint(sID),
}).Count(&result.Total).Order("id desc")
if all == "true" {
err = stmt.Find(&result.Nodes).Error
} else {
err = stmt.Offset(offset).Limit(limit).Find(&result.Nodes).Error
}
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/config/organisation.go
package config
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
"regexp"
"strings"
"github.com/spf13/viper"
)
type organisation struct {
Base
Title string `json:"title"`
Slug string `json:"slug"`
}
type flowInitResponse struct {
ID string `json:"id,omitempty"`
Type string `json:"type,omitempty"`
Methods passwordMethod `json:"methods,omitempty"`
}
type passwordMethod struct {
Password map[string]interface{} `json:"password,omitempty"`
}
type ketoPolicy struct {
ID string `json:"id"`
Subjects []string `json:"subjects"`
Actions []string `json:"actions"`
Resources []string `json:"resources"`
Effect string `json:"effect"`
Description string `json:"description"`
}
// OrganisationPermission model
type OrganisationPermission struct {
Base
OrganisationID uint `gorm:"column:organisation_id" json:"organisation_id"`
Spaces int64 `gorm:"column:spaces" json:"spaces"`
}
var ketoPolicyPath string = "/engines/acp/ory/regex/policies"
// CheckSuperOrganisation checks if super organisation is present in kavach or not
func CheckSuperOrganisation() bool {
// check if policy is present in keto
req, _ := http.NewRequest("GET", viper.GetString("keto_url")+ketoPolicyPath+"/app:dega:superorg", nil)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return false
}
if resp.StatusCode != http.StatusOK {
return false
}
var policy ketoPolicy
err = json.NewDecoder(resp.Body).Decode(&policy)
if err != nil {
return false
}
if len(policy.Subjects) == 0 {
return false
}
orgID := policy.Subjects[0]
// check if organisation is present in kavach
req, _ = http.NewRequest("GET", viper.GetString("kavach_url")+"/organisations/"+orgID, nil)
req.Header.Set("Content-Type", "application/json")
client = &http.Client{}
resp, err = client.Do(req)
if err != nil {
return false
}
if resp.StatusCode == http.StatusOK {
return true
}
return false
}
// UserConfigPresent checks if user config params is present in config file
func UserConfigPresent() bool {
return viper.IsSet("default_user_email") && viper.IsSet("default_user_password") && viper.GetString("default_user_email") != "" && viper.GetString("default_user_password") != "" && viper.IsSet("super_organisation_title")
}
// CreateSuperOrganisation creates a super user and organisation in kavach
func CreateSuperOrganisation() error {
if viper.GetBool("create_super_organisation") && !CheckSuperOrganisation() && UserConfigPresent() {
// create a user in kratos through api
resp, err := createKratosUser()
if err != nil {
return err
}
var sessionBody map[string]interface{}
var kavachUserCheckers map[string]interface{}
if resp.StatusCode == http.StatusOK {
_ = json.NewDecoder(resp.Body).Decode(&sessionBody)
sessionMap := sessionBody["session"].(map[string]interface{})
kavachUserCheckers = map[string]interface{}{
"extra": sessionMap,
}
} else {
kavachUserCheckers = map[string]interface{}{
"extra": map[string]interface{}{
"identity": map[string]interface{}{
"traits": map[string]interface{}{
"email": viper.GetString("default_user_email"),
},
},
},
}
}
// create or fetch user in kavach at /users/checker
resp, err = createKavachUser(kavachUserCheckers)
if err != nil {
return err
}
// create organisation in kavach with the created user as owner
var respBody map[string]interface{}
err = json.NewDecoder(resp.Body).Decode(&respBody)
if err != nil {
return err
}
headerMap := respBody["header"].(map[string]interface{})
userIDArr := headerMap["X-User"].([]interface{})
userID := userIDArr[0].(string)
resp, err = createKavachOrganisation(userID)
if err != nil {
return err
}
var respOrganisation organisation
err = json.NewDecoder(resp.Body).Decode(&respOrganisation)
if err != nil {
return err
}
// create permissions for super organisation
err = createSuperOrganisationPermissions(respOrganisation.ID)
if err != nil {
return err
}
// create keto policy for super organisation
_, err = createKetoPolicy(respOrganisation.ID)
if err != nil {
return err
}
} else {
return errors.New("did not create super user and organisation")
}
return nil
}
func createKratosUser() (*http.Response, error) {
req, _ := http.NewRequest("GET", viper.GetString("kratos_public_url")+"/self-service/registration/api", nil)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
var body flowInitResponse
_ = json.NewDecoder(resp.Body).Decode(&body)
var actionURL string
passwordMap := body.Methods.Password
if config, found := passwordMap["config"]; found {
configMap := config.(map[string]interface{})
if action, found := configMap["action"]; found {
actionURL = action.(string)
}
}
userCredsBody := map[string]interface{}{
"traits.email": viper.GetString("default_user_email"),
"password": viper.GetString("default_user_password"),
}
buf := new(bytes.Buffer)
err = json.NewEncoder(buf).Encode(&userCredsBody)
if err != nil {
return nil, err
}
req, _ = http.NewRequest("POST", actionURL, buf)
if viper.IsSet("oathkeeper_host") {
req.URL.Host = viper.GetString("oathkeeper_host")
}
req.Header.Set("Content-Type", "application/json")
resp, err = client.Do(req)
if err != nil {
return nil, err
}
return resp, nil
}
func createKavachUser(kavachUserCheckers map[string]interface{}) (*http.Response, error) {
buf := new(bytes.Buffer)
err := json.NewEncoder(buf).Encode(&kavachUserCheckers)
if err != nil {
return nil, err
}
req, _ := http.NewRequest("POST", viper.GetString("kavach_url")+"/users/checker", buf)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
return nil, errors.New("could not create user in kavach")
}
return resp, nil
}
func createKavachOrganisation(userID string) (*http.Response, error) {
var re = regexp.MustCompile("[^a-z0-9]+")
org := organisation{
Title: viper.GetString("super_organisation_title"),
Slug: strings.Trim(re.ReplaceAllString(strings.ToLower(viper.GetString("super_organisation_title")), "-"), "-"),
}
buf := new(bytes.Buffer)
err := json.NewEncoder(buf).Encode(&org)
if err != nil {
return nil, err
}
req, _ := http.NewRequest("POST", viper.GetString("kavach_url")+"/organisations", buf)
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-User", userID)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusCreated {
return nil, errors.New("could not create organisation in kavach")
}
return resp, nil
}
func createSuperOrganisationPermissions(oID uint) error {
return DB.Model(&OrganisationPermission{}).Create(&OrganisationPermission{
OrganisationID: oID,
Spaces: -1,
}).Error
}
func createKetoPolicy(organisationID uint) (*http.Response, error) {
policy := ketoPolicy{
ID: "app:dega:superorg",
Subjects: []string{fmt.Sprint(organisationID)},
Resources: []string{fmt.Sprint("resources:org:", organisationID, ":<.*>")},
Actions: []string{fmt.Sprint("actions:org:", organisationID, ":<.*>")},
Effect: "allow",
}
buf := new(bytes.Buffer)
err := json.NewEncoder(buf).Encode(&policy)
if err != nil {
return nil, err
}
req, _ := http.NewRequest("PUT", viper.GetString("keto_url")+ketoPolicyPath, buf)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
return nil, errors.New("could not create keto policy")
}
return resp, nil
}
<file_sep>/README.md
# Dega Server
**Releasability:** [](https://sonarcloud.io/dashboard?id=factly_dega-server)
**Reliability:** [](https://sonarcloud.io/dashboard?id=factly_dega-server) [](https://sonarcloud.io/dashboard?id=factly_dega-server)
**Security:** [](https://sonarcloud.io/dashboard?id=factly_dega-server) [](https://sonarcloud.io/dashboard?id=factly_dega-server)
**Maintainability:** [](https://sonarcloud.io/dashboard?id=factly_dega-server) [](https://sonarcloud.io/dashboard?id=factly_dega-server) [](https://sonarcloud.io/dashboard?id=factly_dega-server)
**Other:** [](https://sonarcloud.io/dashboard?id=factly_dega-server) [](https://sonarcloud.io/dashboard?id=factly_dega-server) [](https://sonarcloud.io/dashboard?id=factly_dega-server)
## Configurable variables
```
DATABASE_HOST=postgres
DATABASE_USER=postgres
DATABASE_PASSWORD=postgres
DATABASE_NAME=dega
DATABASE_PORT=5432
DATABASE_SSL_MODE=disable
MODE=development
OATHKEEPER_HOST=oathkeeper:4455
ENABLE_HUKZ=true
NATS_URL=nats://nats:4222
NATS_USER_NAME=natsuser
NATS_USER_PASSWORD=<PASSWORD>
HUKZ_URL=http://hukz:7790
KAVACH_URL=http://kavach-server:8000
IMAGEPROXY_URL=http://127.0.0.1:7001
KETO_URL=http://keto:4466
KRATOS_PUBLIC_URL=http://kratos:4433
IFRAMELY_URL=http://iframely:8061
GOOGLE_KEY=<google api key for google factchecks>
TEMPLATES_PATH=web/templates
MEILI_URL=http://meilisearch:7700
MEILI_KEY=<meilisearch server key>
CREATE_SUPER_ORGANISATION=true
SUPER_ORGANISATION_TITLE=<title of super organisation>
DEFAULT_NUMBER_OF_MEDIA=10
DEFAULT_NUMBER_OF_SPACES=2
DEFAULT_NUMBER_OF_POSTS=10
DEFAULT_NUMBER_OF_EPISODES=10
DEFAULT_USER_EMAIL=<super user email>
DEFAULT_USER_PASSWORD=<<PASSWORD>>
```
* Config file should be stored in project root folder with name config (ext can be yml, json, env)
* Environment variables can also be set for configuration.
> If running in docker, swagger docs can be accessed at `http://localhost:7789/swagger/index.html`
## Tests
To run test cases
`go test ./test/... -coverpkg ./service/... -coverprofile=coverage.out`
To watch the test results display in your browser:
`go tool cover -html=coverage.out`<file_sep>/test/service/fact-check/claim/list_test.go
package claim
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/factly/dega-server/test/service/fact-check/claimant"
"github.com/factly/dega-server/test/service/fact-check/rating"
"github.com/gavv/httpexpect/v2"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestClaimList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get empty list of claims", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimCountQuery(mock, 0)
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get non-empty list of claims", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimCountQuery(mock, len(claimList))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, claimList[0]["claim"], claimList[0]["slug"], claimList[0]["claim_date"], claimList[0]["checked_date"], claimList[0]["claim_sources"],
claimList[0]["description"], claimList[0]["html_description"], claimList[0]["claimant_id"], claimList[0]["rating_id"], claimList[0]["fact"], claimList[0]["review_sources"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, claimList[1]["claim"], claimList[1]["slug"], claimList[1]["claim_date"], claimList[1]["checked_date"], claimList[1]["claim_sources"],
claimList[1]["description"], claimList[1]["html_description"], claimList[1]["claimant_id"], claimList[1]["rating_id"], claimList[1]["fact"], claimList[1]["review_sources"], 1))
claimant.SelectWithOutSpace(mock, claimant.Data)
rating.SelectWithOutSpace(mock, rating.Data)
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(claimList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(claimList[0])
test.ExpectationsMet(t, mock)
})
t.Run("get claims with pagination", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimCountQuery(mock, len(claimList))
mock.ExpectQuery(paginationQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, claimList[1]["claim"], claimList[1]["slug"], claimList[1]["claim_date"], claimList[1]["checked_date"], claimList[1]["claim_sources"],
claimList[1]["description"], claimList[1]["html_description"], claimList[1]["claimant_id"], claimList[1]["rating_id"], claimList[1]["fact"], claimList[1]["review_sources"], 1))
claimant.SelectWithOutSpace(mock, claimant.Data)
rating.SelectWithOutSpace(mock, rating.Data)
e.GET(basePath).
WithQueryObject(map[string]interface{}{
"limit": "1",
"page": "2",
}).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(claimList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(claimList[1])
test.ExpectationsMet(t, mock)
})
t.Run("get list of claims based on filters", func(t *testing.T) {
claimListMock(mock)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"claimant": "2",
"rating": "2",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(claimList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(claimList[0])
test.ExpectationsMet(t, mock)
})
t.Run("get list of claims based on filters and query", func(t *testing.T) {
claimListMock(mock)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"claimant": "2",
"rating": "2",
"q": "test",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(claimList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(claimList[0])
test.ExpectationsMet(t, mock)
})
t.Run("when query does not match any claim", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
test.DisableMeiliGock(testServer.URL)
gock.New(viper.GetString("meili_url") + "/indexes/dega/search").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusOK).
JSON(test.EmptyMeili)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"claimant": "2",
"rating": "2",
"q": "test",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
t.Run("when meili is down", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
test.DisableMeiliGock(testServer.URL)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"claimant": "2",
"format_id": "2",
"q": "test",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/format/details.go
package format
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// details - Get format by id
// @Summary Show a format by id
// @Description Get format by ID
// @Tags Format
// @ID get-format-by-id
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param format_id path string true "Format ID"
// @Success 200 {object} model.Format
// @Router /core/formats/{format_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
formatID := chi.URLParam(r, "format_id")
id, err := strconv.Atoi(formatID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Format{}
result.ID = uint(id)
err = config.DB.Model(&model.Format{}).Where(&model.Format{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/main.go
package main
import (
"github.com/factly/dega-server/cmd"
_ "github.com/factly/dega-server/docs" // docs is generated by Swag CLI, you have to import it.
)
// @title Dega API
// @version 1.0
// @description Dega server API
// @contact.name API Support
// @contact.url http://www.swagger.io/support
// @contact.email <EMAIL>
// @license.name Apache 2.0
// @license.url http://www.apache.org/licenses/LICENSE-2.0.html
// @host localhost:7789
// @BasePath /
func main() {
cmd.Execute()
}
<file_sep>/test/service/core/request/organisation/testvars.go
package organisation
import (
"database/sql/driver"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"title": "Test Title",
"description": postgres.Jsonb{
RawMessage: []byte(`{"type":"description"}`),
},
"status": "pending",
"organisation_id": 1,
"spaces": 10,
}
var requestList = []map[string]interface{}{
{
"title": "Test Title 1",
"description": postgres.Jsonb{
RawMessage: []byte(`{"type":"description1"}`),
},
"status": "approved",
"organisation_id": 1,
"spaces": 10,
},
{
"title": "Test Title 2",
"description": postgres.Jsonb{
RawMessage: []byte(`{"type":"description2"}`),
},
"status": "pending",
"organisation_id": 1,
"spaces": 20,
},
}
var invalidData = map[string]interface{}{
"title": "aa",
}
var Columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "title", "description", "status", "organisation_id", "spaces"}
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "organisation_permission_requests"`)
var countQuery = regexp.QuoteMeta(`SELECT count(*) FROM "organisation_permission_requests"`)
var basePath = "/core/requests/organisations"
var path = "/core/requests/organisations/{request_id}"
var approvePath = "/core/requests/organisations/{request_id}/approve"
var rejectPath = "/core/requests/organisations/{request_id}/reject"
var myPath = "/core/requests/organisations/my"
func SelectQuery(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["title"], Data["description"], Data["status"], Data["organisation_id"], Data["spaces"]))
}
<file_sep>/service/core/action/policy/details.go
package policy
import (
"encoding/json"
"fmt"
"net/http"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
"github.com/spf13/viper"
)
// details - Get policy by ID
// @Summary Get policy by ID
// @Description Get policy by ID
// @Tags Policy
// @ID get-policy-by-id
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param policy_id path string true "Policy ID"
// @Success 200 {object} model.Policy
// @Router /core/policies/{policy_id} [get]
func details(w http.ResponseWriter, r *http.Request) {
spaceID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
userID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
organisationID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
policyID := chi.URLParam(r, "policy_id")
ketoPolicyID := fmt.Sprint("id:org:", organisationID, ":app:dega:space:", spaceID, ":", policyID)
req, err := http.NewRequest("GET", viper.GetString("keto_url")+"/engines/acp/ory/regex/policies/"+ketoPolicyID, nil)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.NetworkError()))
return
}
defer resp.Body.Close()
ketoPolicy := model.KetoPolicy{}
err = json.NewDecoder(resp.Body).Decode(&ketoPolicy)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
/* User req */
userMap := author.Mapper(organisationID, userID)
result := Mapper(ketoPolicy, userMap)
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/medium/create_test.go
package medium
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestMediumCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable medium", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock)
countQuery(mock, 1)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unable to decode medium", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("create medium", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock)
countQuery(mock, 0)
slugCheckMock(mock, Data)
mediumInsertMock(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(createArr).
Expect().
Status(http.StatusCreated).
JSON().
Object().
Value("nodes").
Array().
Element(0).Object().ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("create medium when permission not present", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "space_permissions"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "space_id", "mediums", "posts"}))
e.POST(basePath).
WithHeaders(headers).
WithJSON(createArr).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create more than permitted medium", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
countQuery(mock, 100)
e.POST(basePath).
WithHeaders(headers).
WithJSON(createArr).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create medium with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
countQuery(mock, 0)
slugCheckMock(mock, Data)
mediumInsertMock(mock)
mock.ExpectCommit()
Data["slug"] = ""
res := e.POST(basePath).
WithHeaders(headers).
WithJSON(createArr).
Expect().
Status(http.StatusCreated).
JSON().
Object().
Value("nodes").Array().Element(0).Object()
Data["slug"] = "image"
res.ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
t.Run("medium does not belong same space", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
countQuery(mock, 0)
slugCheckMock(mock, Data)
mediumInsertError(mock)
e.POST(basePath).
WithHeaders(headers).
WithJSON(createArr).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("create medium when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
countQuery(mock, 0)
slugCheckMock(mock, Data)
mediumInsertMock(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(createArr).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/permissions/space/create.go
package space
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
"github.com/spf13/viper"
)
// create - Create Space permission
// @Summary Create Space permission
// @Description Create Space permission
// @Tags Space_Permissions
// @ID add-space-permission
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Permission body spacePermission true "Permission Object"
// @Success 201 {object} model.SpacePermission
// @Failure 400 {array} string
// @Router /core/permissions/spaces [post]
func create(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
permission := spacePermission{}
err = json.NewDecoder(r.Body).Decode(&permission)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(permission)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
var totPerms int64
config.DB.Model(&model.SpacePermission{}).Where(&model.SpacePermission{
SpaceID: permission.SpaceID,
}).Count(&totPerms)
if totPerms > 0 {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("space's permission already exist", http.StatusUnprocessableEntity)))
return
}
if permission.Media == 0 {
permission.Media = viper.GetInt64("default_number_of_media")
}
if permission.Posts == 0 {
permission.Posts = viper.GetInt64("default_number_of_posts")
}
if permission.Episodes == 0 {
permission.Episodes = viper.GetInt64("default_number_of_episodes")
}
result := model.SpacePermission{
SpaceID: permission.SpaceID,
FactCheck: permission.FactCheck,
Media: permission.Media,
Posts: permission.Posts,
Podcast: permission.Podcast,
Episodes: permission.Episodes,
}
err = config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Create(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/service/core/action/permissions/route.go
package permissions
import (
"github.com/factly/dega-server/service/core/action/permissions/organisation"
"github.com/factly/dega-server/service/core/action/permissions/space"
"github.com/go-chi/chi"
)
// Router router
func Router() chi.Router {
r := chi.NewRouter()
r.Mount("/organisations", organisation.Router())
r.Mount("/spaces", space.Router())
return r
}
<file_sep>/test/service/core/post/template_test.go
package post
import (
"errors"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/format"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestPostTemplateCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
Data["status"] = "template"
t.Run("Create a template from post", func(t *testing.T) {
test.CheckSpaceMock(mock)
postSelectWithSpace(mock)
// preload tags & categories
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
postInsertMock(mock, Data, false)
postSelectWithOutSpace(mock, Data)
mock.ExpectCommit()
e.POST(templatePath).
WithHeaders(headers).
WithJSON(templateData).
Expect().
Status(http.StatusOK)
test.ExpectationsMet(t, mock)
})
t.Run("invalid template data body", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(templatePath).
WithHeaders(headers).
WithJSON(invalidTemplateData).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("undecodable template data body", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(templatePath).
WithHeaders(headers).
WithJSON(undecodableTemplateData).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("post record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(selectQuery).
WithArgs(1, false, 1).
WillReturnRows(sqlmock.NewRows(columns))
e.POST(templatePath).
WithHeaders(headers).
WithJSON(templateData).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("cannot create template", func(t *testing.T) {
test.CheckSpaceMock(mock)
postSelectWithSpace(mock)
// preload tags & categories
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectQuery(`INSERT INTO "posts"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["is_page"], Data["excerpt"], Data["description"], Data["html_description"], Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], Data["format_id"], nil, 1, nil, Data["featured_medium_id"]).
WillReturnError(errors.New("cannot create post"))
mock.ExpectRollback()
e.POST(templatePath).
WithHeaders(headers).
WithJSON(templateData).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("create template when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
postSelectWithSpace(mock)
// preload tags & categories
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
postInsertMock(mock, Data, false)
postSelectWithOutSpace(mock, Data)
mock.ExpectRollback()
e.POST(templatePath).
WithHeaders(headers).
WithJSON(templateData).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
Data["status"] = "draft"
}
<file_sep>/test/service/core/menu/testvars.go
package menu
import (
"database/sql/driver"
"fmt"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"name": "Elections",
"slug": "elections",
"menu": postgres.Jsonb{
RawMessage: []byte(`{"item1":"description"}`),
},
}
var menulist = []map[string]interface{}{
{
"name": "Elections",
"slug": "elections",
"menu": postgres.Jsonb{
RawMessage: []byte(`{"item1":"description1"}`),
},
},
{
"name": "India",
"slug": "india",
"menu": postgres.Jsonb{
RawMessage: []byte(`{"item2":"description2"}`),
},
},
}
var invalidData = map[string]interface{}{
"name": "a",
}
var Columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug", "menu", "space_id"}
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "menus"`)
var deleteQuery = regexp.QuoteMeta(`UPDATE "menus" SET "deleted_at"=`)
var basePath = "/core/menus"
var path = "/core/menus/{menu_id}"
func SelectQuery(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"], Data["menu"], 1))
}
func slugCheckMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "menus"`)).
WithArgs(fmt.Sprint(Data["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "space_id", "name", "slug"}))
}
<file_sep>/service/core/action/tag/delete.go
package tag
import (
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete tag by id
// @Summary Delete a tag
// @Description Delete tag by ID
// @Tags Tag
// @ID delete-tag-by-id
// @Param X-User header string true "User ID"
// @Param tag_id path string true "Tag ID"
// @Param X-Space header string true "Space ID"
// @Success 200
// @Failure 400 {array} string
// @Router /core/tags/{tag_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
tagID := chi.URLParam(r, "tag_id")
id, err := strconv.Atoi(tagID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := &model.Tag{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Tag{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
// check if tag is associated with posts
tag := new(model.Tag)
tag.ID = uint(id)
totAssociated := config.DB.Model(tag).Association("Posts").Count()
if totAssociated != 0 {
loggerx.Error(errors.New("tag is associated with post"))
errorx.Render(w, errorx.Parser(errorx.CannotDelete("tag", "post")))
return
}
tx := config.DB.Begin()
tx.Delete(&result)
err = meilisearchx.DeleteDocument("dega", result.ID, "tag")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("tag.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/test/service/podcast/episode/create_test.go
package episode
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/factly/dega-server/test/service/podcast"
"github.com/gavv/httpexpect"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestEpisodeCreate(t *testing.T) {
mock := test.SetupMockDB()
viper.Set("templates_path", "../../../../web/templates/*")
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable episode", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Undecodable episode", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse episode description", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
Data["description"] = postgres.Jsonb{RawMessage: []byte(`{"raw":"test"}`)}
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
})
t.Run("create episode", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
slugCheckMock(mock, Data)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
podcast.SelectQuery(mock)
mock.ExpectQuery(`INSERT INTO "episodes"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["slug"], Data["season"], Data["episode"], Data["audio_url"], Data["podcast_id"], Data["description"], Data["html_description"], test.AnyTime{}, 1, Data["medium_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"medium_id", "id"}).
AddRow(1, 1))
mock.ExpectQuery(`INSERT INTO "episode_authors"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 0, 0, 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
SelectQuery(mock)
medium.SelectWithOutSpace(mock)
podcast.SelectQuery(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).
JSON().
Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("create episode when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
slugCheckMock(mock, Data)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
podcast.SelectQuery(mock)
mock.ExpectQuery(`INSERT INTO "episodes"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["slug"], Data["season"], Data["episode"], Data["audio_url"], Data["podcast_id"], Data["description"], Data["html_description"], test.AnyTime{}, 1, Data["medium_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"medium_id", "id"}).
AddRow(1, 1))
mock.ExpectQuery(`INSERT INTO "episode_authors"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 0, 0, 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
SelectQuery(mock)
medium.SelectWithOutSpace(mock)
podcast.SelectQuery(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/fact-check/rating/list_test.go
package rating
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestRatingList(t *testing.T) {
mock := test.SetupMockDB()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
ratinglist := []map[string]interface{}{
{"name": "Test Rating 1", "slug": "test-rating-1",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 1"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 1</p>",
},
{"name": "Test Rating 2", "slug": "test-rating-2",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 2"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 2</p>",
},
}
t.Run("get empty list of ratings", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
ratingCountQuery(mock, 0)
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get non-empty list of ratings", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
ratingCountQuery(mock, len(ratinglist))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, ratinglist[0]["name"], ratinglist[0]["slug"], ratinglist[0]["background_colour"], ratinglist[0]["text_colour"], ratinglist[0]["medium_id"], ratinglist[0]["description"], ratinglist[0]["html_description"], ratinglist[0]["numeric_value"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, ratinglist[1]["name"], ratinglist[1]["slug"], ratinglist[0]["background_colour"], ratinglist[1]["text_colour"], ratinglist[1]["medium_id"], ratinglist[1]["description"], ratinglist[1]["html_description"], ratinglist[1]["numeric_value"], 1))
e.GET(basePath).
WithHeaders(headers).
WithQuery("all", "true").
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(ratinglist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(ratinglist[0])
test.ExpectationsMet(t, mock)
})
t.Run("get ratings with pagination", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
ratingCountQuery(mock, len(ratinglist))
mock.ExpectQuery(paginationQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, ratinglist[1]["name"], ratinglist[1]["slug"], ratinglist[0]["background_colour"], ratinglist[1]["text_colour"], ratinglist[1]["medium_id"], ratinglist[1]["description"], ratinglist[1]["html_description"], ratinglist[1]["numeric_value"], 1))
e.GET(basePath).
WithQueryObject(map[string]interface{}{
"limit": "1",
"page": "2",
}).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(ratinglist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(ratinglist[1])
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/medium/testvars.go
package medium
import (
"errors"
"fmt"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
"github.com/jinzhu/gorm/dialects/postgres"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"name": "Image",
"slug": "image",
"type": "jpg",
"title": "Sample image",
"description": "desc",
"caption": "sample",
"alt_text": "sample",
"file_size": 100,
"url": postgres.Jsonb{
RawMessage: []byte(`{"raw":"http://testimage.com/test.jpg"}`),
},
"dimensions": "testdims",
}
var createArr = []map[string]interface{}{
Data,
}
var invalidData = []map[string]interface{}{
{
"name": "a",
},
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug", "type", "title", "description", "caption", "alt_text", "file_size", "url", "dimensions", "space_id"}
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "media"`)
var deleteQuery = regexp.QuoteMeta(`UPDATE "media" SET "deleted_at"=`)
var paginationQuery = `SELECT \* FROM "media" (.+) LIMIT 1 OFFSET 1`
var basePath = "/core/media"
var path = "/core/media/{medium_id}"
func slugCheckMock(mock sqlmock.Sqlmock, medium map[string]interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "media"`)).
WithArgs(fmt.Sprint(medium["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows(columns))
}
//check medium exits or not
func recordNotFoundMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 100).
WillReturnRows(sqlmock.NewRows(columns))
}
func mediumInsertError(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "media"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["type"], Data["title"], Data["description"], Data["caption"], Data["alt_text"], Data["file_size"], Data["url"], Data["dimensions"], 1).
WillReturnError(errors.New(`pq: insert or update on table "medium" violates foreign key constraint "media_space_id_spaces_id_foreign"`))
mock.ExpectRollback()
}
func mediumInsertMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "media"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["type"], Data["title"], Data["description"], Data["caption"], Data["alt_text"], Data["file_size"], Data["url"], Data["dimensions"], 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
func mediumUpdateMock(mock sqlmock.Sqlmock, medium map[string]interface{}, err error) {
mock.ExpectBegin()
if err != nil {
mock.ExpectExec(`UPDATE \"media\"`).
WithArgs(test.AnyTime{}, 1, medium["name"], medium["slug"], medium["type"], medium["title"], medium["description"], medium["caption"], medium["alt_text"], medium["file_size"], medium["url"], medium["dimensions"], 1).
WillReturnError(errors.New("update failed"))
} else {
mock.ExpectExec(`UPDATE \"media\"`).
WithArgs(test.AnyTime{}, 1, medium["name"], medium["slug"], medium["type"], medium["title"], medium["description"], medium["caption"], medium["alt_text"], medium["file_size"], medium["url"], medium["dimensions"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
}
}
func mediumDeleteMock(mock sqlmock.Sqlmock) {
mediumPostExpect(mock, 0)
mediumCategoryExpect(mock, 0)
mediumSpaceExpect(mock, 0)
mediumRatingExpect(mock, 0)
mediumClaimantExpect(mock, 0)
mock.ExpectBegin()
mock.ExpectExec(deleteQuery).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
}
func SelectWithSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"], Data["type"], Data["title"], Data["description"], Data["caption"], Data["alt_text"], Data["file_size"], Data["url"], Data["dimensions"], 1))
}
func SelectWithOutSpace(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"], Data["type"], Data["title"], Data["description"], Data["caption"], Data["alt_text"], Data["file_size"], Data["url"], Data["dimensions"], 1))
}
func EmptyRowMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns))
}
func countQuery(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "media"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
// check medium associated with any post before deleting
func mediumPostExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "posts"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
// check medium associated with any rating before deleting
func mediumRatingExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "ratings"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
// check medium associated with any claimant before deleting
func mediumClaimantExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "claimants"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
// check medium associated with any category before deleting
func mediumCategoryExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "categories"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
// check medium associated with any space before deleting
func mediumSpaceExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "spaces"`)).
WithArgs(1, 1, 1, 1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
<file_sep>/service/fact-check/action/claimant/route.go
package claimant
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// claimant model
type claimant struct {
Name string `json:"name" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
TagLine string `json:"tag_line"`
MediumID uint `json:"medium_id"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var userContext config.ContextKey = "claimant_user"
// Router - Group of claimant router
func Router() chi.Router {
r := chi.NewRouter()
entity := "claimants"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{claimant_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/fact-check/action/google/route.go
package google
import "github.com/go-chi/chi"
// Router - google fact checks router
func Router() chi.Router {
r := chi.NewRouter()
r.Get("/", list)
return r
}
<file_sep>/test/service/fact-check/claimant/list_test.go
package claimant
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestClaimantList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
claimantlist := []map[string]interface{}{
{"name": "Test Claimant 1", "slug": "test-claimant-1",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 1"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 1</p>",
},
{"name": "Test Claimant 2", "slug": "test-claimant-2",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description 2"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description 2</p>",
},
}
t.Run("get empty list of claimants", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimantCountQuery(mock, 0)
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get non-empty list of claimants", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimantCountQuery(mock, len(claimantlist))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, claimantlist[0]["name"], claimantlist[0]["slug"], claimantlist[0]["medium_id"], claimantlist[0]["description"], claimantlist[0]["html_description"], claimantlist[0]["numeric_value"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, claimantlist[1]["name"], claimantlist[1]["slug"], claimantlist[1]["medium_id"], claimantlist[1]["description"], claimantlist[1]["html_description"], claimantlist[1]["numeric_value"], 1))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(claimantlist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(claimantlist[0])
test.ExpectationsMet(t, mock)
})
t.Run("get claimants with pagination", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimantCountQuery(mock, len(claimantlist))
mock.ExpectQuery(paginationQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, claimantlist[1]["name"], claimantlist[1]["slug"], claimantlist[1]["medium_id"], claimantlist[1]["description"], claimantlist[1]["html_description"], claimantlist[1]["numeric_value"], 1))
e.GET(basePath).
WithQueryObject(map[string]interface{}{
"limit": "1",
"page": "2",
}).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(claimantlist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(claimantlist[1])
test.ExpectationsMet(t, mock)
})
t.Run("get list of claimants based on search query q", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
claimantCountQuery(mock, len(claimantlist))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, claimantlist[0]["name"], claimantlist[0]["slug"], claimantlist[0]["medium_id"], claimantlist[0]["description"], claimantlist[0]["html_description"], claimantlist[0]["numeric_value"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, claimantlist[1]["name"], claimantlist[1]["slug"], claimantlist[1]["medium_id"], claimantlist[1]["description"], claimantlist[1]["html_description"], claimantlist[1]["numeric_value"], 1))
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"q": "test",
"sort": "asc",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(claimantlist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(claimantlist[0])
test.ExpectationsMet(t, mock)
})
t.Run("when query does not match any claimant", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
test.DisableMeiliGock(testServer.URL)
gock.New(viper.GetString("meili_url") + "/indexes/dega/search").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusOK).
JSON(test.EmptyMeili)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"q": "test",
"sort": "asc",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
t.Run("search with query q when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"q": "test",
"sort": "asc",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/menu/route.go
package menu
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// menu request body
type menu struct {
Name string `json:"name" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
Menu postgres.Jsonb `json:"menu" swaggertype:"primitive,string"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var userContext config.ContextKey = "menu_user"
// Router - Group of menu router
func Router() chi.Router {
r := chi.NewRouter()
entity := "menus"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{menu_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/service/core/action/menu/create.go
package menu
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/factly/x/loggerx"
"gorm.io/gorm"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
)
// create - Create menu
// @Summary Create menu
// @Description Create menu
// @Tags Menu
// @ID add-menu
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Menu body menu true "Menu Object"
// @Success 201 {object} model.Menu
// @Failure 400 {array} string
// @Router /core/menus [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
menu := &menu{}
err = json.NewDecoder(r.Body).Decode(&menu)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(menu)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
var menuSlug string
if menu.Slug != "" && slugx.Check(menu.Slug) {
menuSlug = menu.Slug
} else {
menuSlug = slugx.Make(menu.Name)
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Menu{})
tableName := stmt.Schema.Table
// Check if menu with same name exist
if util.CheckName(uint(sID), menu.Name, tableName) {
loggerx.Error(errors.New(`menu with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
result := &model.Menu{
Name: menu.Name,
Menu: menu.Menu,
Slug: slugx.Approve(&config.DB, menuSlug, sID, tableName),
MetaFields: menu.MetaFields,
SpaceID: uint(sID),
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Menu{}).Create(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Model(&model.Menu{}).First(&result)
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "menu",
"name": result.Name,
"slug": result.Slug,
"menu": result.Menu,
"space_id": result.SpaceID,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("menu.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/service/core/model/request.go
package model
import (
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
"gorm.io/gorm"
)
// Request model
type Request struct {
config.Base
Title string `gorm:"column:title" json:"title"`
Description postgres.Jsonb `gorm:"column:description" json:"description" swaggertype:"primitive,string"`
Status string `gorm:"column:status" json:"status"`
}
// OrganisationPermissionRequest model
type OrganisationPermissionRequest struct {
Request
OrganisationID uint `gorm:"column:organisation_id" json:"organisation_id"`
Spaces int64 `gorm:"column:spaces" json:"spaces"`
}
// SpacePermissionRequest model
type SpacePermissionRequest struct {
Request
Media int64 `gorm:"column:media" json:"media"`
Posts int64 `gorm:"column:posts" json:"posts"`
Episodes int64 `gorm:"column:episodes" json:"episodes"`
Podcast bool `gorm:"column:podcast" json:"podcast"`
FactCheck bool `gorm:"column:fact_check" json:"fact_check"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Space *Space `gorm:"foreignKey:space_id" json:"space,omitempty"`
}
var spaceRequestUser config.ContextKey = "space_perm_user"
var orgRequestUser config.ContextKey = "org_perm_user"
// BeforeCreate hook
func (opr *OrganisationPermissionRequest) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(orgRequestUser)
if userID == nil {
return nil
}
uID := userID.(int)
opr.CreatedByID = uint(uID)
opr.UpdatedByID = uint(uID)
return nil
}
// BeforeCreate hook
func (spr *SpacePermissionRequest) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(spaceRequestUser)
if userID == nil {
return nil
}
uID := userID.(int)
spr.CreatedByID = uint(uID)
spr.UpdatedByID = uint(uID)
return nil
}
<file_sep>/service/core/action/permissions/organisation/update.go
package organisation
import (
"encoding/json"
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"github.com/spf13/viper"
)
// update - Update Organisation permission by id
// @Summary Update a Organisation permission by id
// @Description Update Organisation permission by ID
// @Tags Organisation_Permissions
// @ID update-org-permission-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param permission_id path string true "Permission ID"
// @Param X-Space header string true "Space ID"
// @Param Permission body organisationPermission false "Permission Body"
// @Success 200 {object} model.OrganisationPermission
// @Router /core/permissions/organisations/{permission_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
permissionID := chi.URLParam(r, "permission_id")
id, err := strconv.Atoi(permissionID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
permission := organisationPermission{}
err = json.NewDecoder(r.Body).Decode(&permission)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(permission)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := model.OrganisationPermission{}
result.ID = uint(id)
// check record exists or not
err = config.DB.First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
if permission.Spaces == 0 {
permission.Spaces = viper.GetInt64("default_number_of_spaces")
}
tx := config.DB.Begin()
err = tx.Model(&model.OrganisationPermission{}).Model(&result).Updates(&model.OrganisationPermission{
Base: config.Base{UpdatedByID: uint(uID)},
Spaces: permission.Spaces,
}).First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Commit()
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/fact-check/action/claimant/create.go
package claimant
import (
"context"
"encoding/json"
"errors"
"net/http"
"reflect"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"gorm.io/gorm"
)
// create - Create claimant
// @Summary Create claimant
// @Description Create claimant
// @Tags Claimant
// @ID add-claimant
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Claimant body claimant true "Claimant Object"
// @Success 201 {object} model.Claimant
// @Failure 400 {array} string
// @Router /fact-check/claimants [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
claimant := &claimant{}
err = json.NewDecoder(r.Body).Decode(&claimant)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(claimant)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Claimant{})
tableName := stmt.Schema.Table
var claimantSlug string
if claimant.Slug != "" && slugx.Check(claimant.Slug) {
claimantSlug = claimant.Slug
} else {
claimantSlug = slugx.Make(claimant.Name)
}
mediumID := &claimant.MediumID
if claimant.MediumID == 0 {
mediumID = nil
}
// Check if claimant with same name exist
if util.CheckName(uint(sID), claimant.Name, tableName) {
loggerx.Error(errors.New(`claimant with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
var description string
// Store HTML description
if len(claimant.Description.RawMessage) > 0 && !reflect.DeepEqual(claimant.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(claimant.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse claimant description", http.StatusUnprocessableEntity)))
return
}
}
result := &model.Claimant{
Name: claimant.Name,
Slug: slugx.Approve(&config.DB, claimantSlug, sID, tableName),
Description: claimant.Description,
HTMLDescription: description,
MediumID: mediumID,
SpaceID: uint(sID),
TagLine: claimant.TagLine,
MetaFields: claimant.MetaFields,
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Model(&model.Claimant{}).Create(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Model(&model.Claimant{}).Preload("Medium").First(&result)
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "claimant",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"tag_line": result.TagLine,
"space_id": result.SpaceID,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("claimant.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/service/core/action/tag/feed.go
package tag
import (
"net/http"
"strconv"
"strings"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/post"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/paginationx"
"github.com/go-chi/chi"
)
func Feeds(w http.ResponseWriter, r *http.Request) {
spaceID := chi.URLParam(r, "space_id")
sID, err := strconv.Atoi(spaceID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
offset, limit := paginationx.Parse(r.URL.Query())
sort := r.URL.Query().Get("sort")
if sort != "asc" {
sort = "desc"
}
slugs := chi.URLParam(r, "slugs")
tagSlugs := strings.Split(slugs, ",")
space := model.Space{}
space.ID = uint(sID)
if err := config.DB.Preload("Logo").First(&space).Error; err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
tagIDs := make([]uint, 0)
tagList := make([]model.Tag, 0)
config.DB.Model(&model.Tag{}).Where("slug IN (?)", tagSlugs).Find(&tagList)
for _, each := range tagList {
tagIDs = append(tagIDs, each.ID)
}
feed := post.GetFeed(space)
postList := make([]model.Post, 0)
config.DB.Model(&model.Post{}).Joins("JOIN post_tags ON posts.id = post_tags.post_id").Where(&model.Post{
Status: "publish",
SpaceID: uint(sID),
}).Where("is_page = ?", false).Where("tag_id IN (?)", tagIDs).Order("created_at " + sort).Offset(offset).Limit(limit).Find(&postList)
feed.Items = post.GetItemsList(postList, space)
if err := feed.WriteRss(w); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
<file_sep>/service/podcast/model/episode.go
package model
import (
"errors"
"time"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/jinzhu/gorm/dialects/postgres"
"gorm.io/gorm"
)
// Episode model
type Episode struct {
config.Base
Title string `gorm:"column:title" json:"title"`
Slug string `gorm:"column:slug" json:"slug"`
Season int `gorm:"column:season" json:"season"`
Episode int `gorm:"column:episode" json:"episode"`
AudioURL string `gorm:"column:audio_url" json:"audio_url"`
PodcastID *uint `gorm:"column:podcast_id" json:"podcast_id"`
Podcast *Podcast `json:"podcast"`
Description postgres.Jsonb `gorm:"column:description" json:"description" swaggertype:"primitive,string"`
HTMLDescription string `gorm:"column:html_description" json:"html_description,omitempty"`
PublishedDate *time.Time `gorm:"column:published_date" json:"published_date" sql:"DEFAULT:NULL"`
MediumID *uint `gorm:"column:medium_id;default:NULL" json:"medium_id"`
Medium *model.Medium `json:"medium"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Space *model.Space `json:"space,omitempty"`
}
// EpisodeAuthor model
type EpisodeAuthor struct {
config.Base
AuthorID uint `gorm:"column:author_id" json:"author_id"`
EpisodeID uint `gorm:"column:episode_id" json:"episode_id"`
}
// BeforeSave - validation for medium & podcast
func (episode *Episode) BeforeSave(tx *gorm.DB) (e error) {
if episode.MediumID != nil && *episode.MediumID > 0 {
medium := model.Medium{}
medium.ID = *episode.MediumID
err := tx.Model(&model.Medium{}).Where(model.Medium{
SpaceID: episode.SpaceID,
}).First(&medium).Error
if err != nil {
return errors.New("medium do not belong to same space")
}
}
if episode.PodcastID != nil && *episode.PodcastID > 0 {
podcast := Podcast{}
podcast.ID = *episode.PodcastID
err := tx.Model(&Podcast{}).Where(Podcast{
SpaceID: episode.SpaceID,
}).First(&podcast).Error
if err != nil {
return errors.New("podcast do not belong to same space")
}
}
return nil
}
var episodeUser config.ContextKey = "episode_user"
// BeforeCreate hook
func (episode *Episode) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(episodeUser)
if userID == nil {
return nil
}
uID := userID.(int)
episode.CreatedByID = uint(uID)
episode.UpdatedByID = uint(uID)
return nil
}
<file_sep>/test/service/core/permissions/space/list_test.go
package space
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/space"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestSpacePermissionList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get empty list of permissions", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "spaces"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "spaces"`)).
WillReturnRows(sqlmock.NewRows(space.Columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Number().
Equal(0)
test.ExpectationsMet(t, mock)
})
t.Run("get list of permissions", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "spaces"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1))
space.SelectQuery(mock)
SelectQuery(mock)
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("nodes").
Array().
Element(0).
Object().
Value("permission").
Object().
ContainsMap(Data)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/page/details_test.go
package page
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestPageDetails(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid page id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(path).
WithPath("page_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("page record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(selectQuery).
WithArgs(true, 1, 100).
WillReturnRows(sqlmock.NewRows(columns))
e.GET(path).
WithPath("page_id", "100").
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
})
t.Run("get page by id", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
preloadMock(mock, 1)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).AddRow(1, time.Now(), time.Now(), nil, 1, 1))
e.GET(path).
WithPath("page_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(pageData)
})
}
<file_sep>/test/service/core/post/create_test.go
package post
import (
"errors"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/format"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestPostCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable post", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Unable to decode post", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("create post", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 0)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
postInsertMock(mock, Data, false)
postSelectWithOutSpace(mock, Data)
postClaimInsertMock(mock)
postClaimSelectMock(mock)
postAuthorInsertMock(mock)
postSchemaUpdateQuery(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object().ContainsMap(postData)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse post description", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 0)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("create post when permission not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "space_permissions"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "space_id", "media", "posts"}))
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create more than permitted posts", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 100)
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("creating post claims fail", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 0)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
postInsertMock(mock, Data, false)
postSelectWithOutSpace(mock, Data)
mock.ExpectQuery(`INSERT INTO "post_claims"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, 1, 1, 1).
WillReturnError(errors.New("cannot create post_claims"))
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("create post with slug is empty", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 0)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
postInsertMock(mock, Data, false)
postSelectWithOutSpace(mock, Data)
postClaimInsertMock(mock)
postClaimSelectMock(mock)
postAuthorInsertMock(mock)
postSchemaUpdateQuery(mock)
mock.ExpectCommit()
Data["slug"] = ""
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object().ContainsMap(postData)
Data["slug"] = "post"
test.ExpectationsMet(t, mock)
})
t.Run("medium does not belong same space", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 0)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
mock.ExpectBegin()
medium.EmptyRowMock(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("format does not belong same space", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 0)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
format.EmptyRowMock(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("create post when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
postCountQuery(mock, 0)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
postInsertMock(mock, Data, false)
postSelectWithOutSpace(mock, Data)
postClaimInsertMock(mock)
postClaimSelectMock(mock)
postAuthorInsertMock(mock)
postSchemaUpdateQuery(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/policy/delete.go
package policy
import (
"fmt"
"net/http"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
"github.com/spf13/viper"
)
// delete - Delete policy by ID
// @Summary Delete policy by ID
// @Description GeDeletet policy by ID
// @Tags Policy
// @ID delete-policy-by-id
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param policy_id path string true "Policy ID"
// @Success 200 {object} model.Policy
// @Router /core/policies/{policy_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
spaceID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
organisationID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
/* delete old policy */
policyId := chi.URLParam(r, "policy_id")
policyID := fmt.Sprint("id:org:", organisationID, ":app:dega:space:", spaceID, ":"+policyId)
req, err := http.NewRequest("DELETE", viper.GetString("keto_url")+"/engines/acp/ory/regex/policies/"+policyID, nil)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.NetworkError()))
return
}
defer resp.Body.Close()
objectID := fmt.Sprint("policy_", policyId)
_, err = meilisearchx.Client.Documents("dega").Delete(objectID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/action/page/update.go
package page
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/dega-server/util/arrays"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update page by id
// @Summary Update a page by id
// @Description Update page by ID
// @Tags Page
// @ID update-page-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param page_id path string true "Page ID"
// @Param Page body page false "Page"
// @Success 200 {object} pageData
// @Router /core/pages/{page_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
pageID := chi.URLParam(r, "page_id")
id, err := strconv.Atoi(pageID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
page := &page{}
err = json.NewDecoder(r.Body).Decode(&page)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(page)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := &pageData{}
result.ID = uint(id)
result.Tags = make([]model.Tag, 0)
result.Categories = make([]model.Category, 0)
result.Authors = make([]model.Author, 0)
// check record exists or not
err = config.DB.Where(&model.Post{
SpaceID: uint(sID),
IsPage: true,
}).First(&result.Post).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
// fetch all authors
authors, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
page.SpaceID = result.SpaceID
var pageSlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Post{})
tableName := stmt.Schema.Table
if result.Slug == page.Slug {
pageSlug = result.Slug
} else if page.Slug != "" && slugx.Check(page.Slug) {
pageSlug = slugx.Approve(&config.DB, page.Slug, sID, tableName)
} else {
pageSlug = slugx.Approve(&config.DB, slugx.Make(page.Title), sID, tableName)
}
// Store HTML description
var description string
if len(page.Description.RawMessage) > 0 && !reflect.DeepEqual(page.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(page.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse page description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
newTags := make([]model.Tag, 0)
if len(page.TagIDs) > 0 {
config.DB.Model(&model.Tag{}).Where(page.TagIDs).Find(&newTags)
if err = tx.Model(&result.Post).Association("Tags").Replace(&newTags); err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
} else {
_ = config.DB.Model(&result.Post).Association("Tags").Clear()
}
newCategories := make([]model.Category, 0)
if len(page.CategoryIDs) > 0 {
config.DB.Model(&model.Category{}).Where(page.CategoryIDs).Find(&newCategories)
if err = tx.Model(&result.Post).Association("Categories").Replace(&newCategories); err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
} else {
_ = config.DB.Model(&result.Post).Association("Categories").Clear()
}
featuredMediumID := &page.FeaturedMediumID
result.Post.FeaturedMediumID = &page.FeaturedMediumID
if page.FeaturedMediumID == 0 {
err = tx.Model(&result.Post).Omit("Tags", "Categories").Updates(map[string]interface{}{"featured_medium_id": nil}).Error
featuredMediumID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
updatedPage := model.Post{
Base: config.Base{UpdatedByID: uint(uID)},
Title: page.Title,
Slug: pageSlug,
Subtitle: page.Subtitle,
Status: page.Status,
PublishedDate: page.PublishedDate,
Excerpt: page.Excerpt,
Description: page.Description,
HTMLDescription: description,
IsHighlighted: page.IsHighlighted,
IsSticky: page.IsSticky,
FormatID: page.FormatID,
FeaturedMediumID: featuredMediumID,
}
tx.Model(&result.Post).Select("IsFeatured", "IsSticky", "IsHighlighted").Omit("Tags", "Categories").Updates(model.Post{
IsFeatured: page.IsFeatured,
IsSticky: page.IsSticky,
IsHighlighted: page.IsHighlighted,
})
err = tx.Model(&result.Post).Omit("Tags", "Categories").Updates(updatedPage).Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").First(&result.Post).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
pageAuthors := []model.PostAuthor{}
// fetch existing post authors
tx.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Find(&pageAuthors)
prevAuthorIDs := make([]uint, 0)
mapperPostAuthor := map[uint]model.PostAuthor{}
postAuthorIDs := make([]uint, 0)
for _, postAuthor := range pageAuthors {
mapperPostAuthor[postAuthor.AuthorID] = postAuthor
prevAuthorIDs = append(prevAuthorIDs, postAuthor.AuthorID)
}
toCreateIDs, toDeleteIDs := arrays.Difference(prevAuthorIDs, page.AuthorIDs)
// map post author ids
for _, id := range toDeleteIDs {
postAuthorIDs = append(postAuthorIDs, mapperPostAuthor[id].ID)
}
// delete post authors
if len(postAuthorIDs) > 0 {
err = tx.Where(&postAuthorIDs).Delete(&model.PostAuthor{}).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
// creating new post authors
for _, id := range toCreateIDs {
if id != 0 {
postAuthor := &model.PostAuthor{}
postAuthor.AuthorID = uint(id)
postAuthor.PostID = result.ID
err = tx.Model(&model.PostAuthor{}).Create(&postAuthor).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
}
// fetch existing post authors
updatedPostAuthors := []model.PostAuthor{}
tx.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Find(&updatedPostAuthors)
// appending previous post authors to result
for _, postAuthor := range updatedPostAuthors {
aID := fmt.Sprint(postAuthor.AuthorID)
if author, found := authors[aID]; found {
result.Authors = append(result.Authors, author)
}
}
// Update into meili index
var meiliPublishDate int64
if result.Post.PublishedDate != nil {
meiliPublishDate = result.Post.PublishedDate.Unix()
}
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "page",
"title": result.Title,
"subtitle": result.Subtitle,
"slug": result.Slug,
"status": result.Status,
"excerpt": result.Excerpt,
"description": result.Description,
"is_featured": result.IsFeatured,
"is_sticky": result.IsSticky,
"is_highlighted": result.IsHighlighted,
"is_page": result.IsPage,
"format_id": result.FormatID,
"published_date": meiliPublishDate,
"space_id": result.SpaceID,
"tag_ids": page.TagIDs,
"category_ids": page.CategoryIDs,
"author_ids": page.AuthorIDs,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("page.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/policy/testvars.go
package policy
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var invalidHeader = map[string]string{
"X-Space": "1",
}
// valid policy
var policy_test = map[string]interface{}{
"name": "test policy",
"permissions": []map[string]interface{}{
{
"resource": "policies",
"actions": []string{"get", "create", "update", "delete"},
},
},
"users": []string{
"<EMAIL>",
},
}
var undecodable_policy = map[string]interface{}{
"name": "test policy",
"permissions": "none",
"users": []string{
"<EMAIL>",
},
}
var basePath = "/core/policies"
var defaultsPath = "/core/policies/default"
var path = "/core/policies/{policy_id}"
<file_sep>/test/service/core/permissions/organisation/testvars.go
package organisation
import (
"database/sql/driver"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"organisation_id": 1,
"spaces": 5,
}
var invalidData = map[string]interface{}{
"organisationid": 1,
"spes": 5,
}
var undecodableData = map[string]interface{}{
"organisation_id": "1",
"spes": 5,
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "organisation_id", "spaces"}
var selectQuery = regexp.QuoteMeta(`SELECT * FROM "organisation_permissions"`)
var countQuery = regexp.QuoteMeta(`SELECT count(*) FROM "organisation_permissions"`)
var deleteQuery = regexp.QuoteMeta(`UPDATE "organisation_permissions" SET "deleted_at"=`)
var basePath = "/core/permissions/organisations"
var path = "/core/permissions/organisations/{permission_id}"
var mypath = "/core/permissions/organisations/my"
func SelectQuery(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["organisation_id"], Data["spaces"]))
}
func spaceSelectQuery(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "spaces"`)).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug", "site_title", "tag_line", "description", "site_address", "logo_id", "logo_mobile_id", "fav_icon_id", "mobile_icon_id", "verification_codes", "social_media_urls", "contact_info", "organisation_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, "name", "slug", "site_title", "tag_line", "description", "site_address", 1, 1, 1, 1, nil, nil, nil, 1))
}
<file_sep>/service/core/action/webhook/create.go
package webhook
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/test"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/requestx"
"github.com/factly/x/validationx"
"github.com/spf13/viper"
)
// create - Create Webhook
// @Summary Create Webhook
// @Description Create Webhook
// @Tags Webhooks
// @ID add-webhook
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Webhook body webhook true "Webhook Object"
// @Success 201 {object} model.Webhook
// @Failure 400 {array} string
// @Router /core/webhooks [post]
func create(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
webhook := &webhook{}
if err = json.NewDecoder(r.Body).Decode(&webhook); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
if validationError := validationx.Check(webhook); validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// append app and space tag even if not provided
if err = AddTags(webhook, sID); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
hukzURL := viper.GetString("hukz_url") + "/webhooks"
resp, err := requestx.Request("POST", hukzURL, webhook, map[string]string{
"X-User": fmt.Sprint(uID),
})
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if resp.StatusCode > 500 {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
var webhookRes model.Webhook
if err = json.NewDecoder(resp.Body).Decode(&webhookRes); err != nil {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusCreated, webhookRes)
}
func AddTags(webhook *webhook, sID int) error {
tags := make(map[string]string)
if len(webhook.Tags.RawMessage) > 0 && !reflect.DeepEqual(webhook.Tags, test.NilJsonb()) {
err := json.Unmarshal(webhook.Tags.RawMessage, &tags)
if err != nil {
return err
}
}
tags["app"] = "dega"
tags["space"] = fmt.Sprint(sID)
bytesArr, err := json.Marshal(tags)
if err != nil {
return err
}
webhook.Tags.RawMessage = bytesArr
return nil
}
<file_sep>/service/core/action/permissions/organisation/create.go
package organisation
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/spf13/viper"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
)
// create - Create organisation permission
// @Summary Create organisation permission
// @Description Create organisation permission
// @Tags Organisation_Permissions
// @ID add-org-permission
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Permission body organisationPermission true "Permission Object"
// @Success 201 {object} model.OrganisationPermission
// @Failure 400 {array} string
// @Router /core/permissions/organisations [post]
func create(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
permission := organisationPermission{}
err = json.NewDecoder(r.Body).Decode(&permission)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(permission)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
if permission.Spaces == 0 {
permission.Spaces = viper.GetInt64("default_number_of_spaces")
}
var totPerms int64
config.DB.Model(&model.OrganisationPermission{}).Where(&model.OrganisationPermission{
OrganisationID: permission.OrganisationID,
}).Count(&totPerms)
if totPerms > 0 {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("organisation's permission already exist", http.StatusUnprocessableEntity)))
return
}
result := model.OrganisationPermission{
OrganisationID: permission.OrganisationID,
Spaces: permission.Spaces,
}
config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Model(&model.OrganisationPermission{}).Create(&result)
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/test/service/core/menu/create_test.go
package menu
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestMenuCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable menu", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Undecodable menu", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("menu with same name exists", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "menus"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1))
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create menu", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "menus"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0))
slugCheckMock(mock)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "menus"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["menu"], 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
SelectQuery(mock)
mock.ExpectCommit()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusCreated)
test.ExpectationsMet(t, mock)
})
t.Run("meili server fails", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "menus"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0))
slugCheckMock(mock)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "menus"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["menu"], 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
SelectQuery(mock)
mock.ExpectRollback()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/podcast/action/feed.go
package podcast
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"time"
pcast "github.com/eduncan911/podcast"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
coreModel "github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/go-chi/chi"
)
func Feeds(w http.ResponseWriter, r *http.Request) {
spaceID := chi.URLParam(r, "space_id")
sID, err := strconv.Atoi(spaceID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
space := &coreModel.Space{}
space.ID = uint(sID)
if err = config.DB.First(&space).Error; err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
slug := chi.URLParam(r, "podcast_slug")
if slug == "" {
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
// getting page query param
pageNo := 1
page := r.URL.Query().Get("page")
if page != "" {
pageNo, _ = strconv.Atoi(page)
}
// getting limit query param
limit := 10
lim := r.URL.Query().Get("limit")
if lim != "" {
limit, _ = strconv.Atoi(lim)
}
// max limit of 300
if limit > 300 {
limit = 300
}
sort := r.URL.Query().Get("sort")
if sort != "asc" {
sort = "desc"
}
result := &model.Podcast{}
err = config.DB.Model(&model.Podcast{}).Where(&model.Podcast{
SpaceID: uint(sID),
Slug: slug,
}).Preload("Categories").Preload("Medium").Preload("PrimaryCategory").First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
now := time.Now()
p := pcast.New(
result.Title,
fmt.Sprint("http://factly.org/podcasts/", result.Slug),
result.HTMLDescription,
&now, &now,
)
if result.Medium != nil {
podcastMediumURL := map[string]interface{}{}
_ = json.Unmarshal(result.Medium.URL.RawMessage, &podcastMediumURL)
if rawURL, found := podcastMediumURL["raw"]; found {
p.IImage = &pcast.IImage{
HREF: rawURL.(string),
}
}
}
if result.PrimaryCategory != nil {
p.Category = result.PrimaryCategory.Name
}
p.Description = result.HTMLDescription
for _, cat := range result.Categories {
icat := pcast.ICategory{
Text: cat.Name,
}
p.ICategories = append(p.ICategories, &icat)
}
p.Language = result.Language
// fetch all episodes related to this podcast
episodeList := make([]model.Episode, 0)
config.DB.Model(&model.Episode{}).Where(&model.Episode{
PodcastID: &result.ID,
}).Preload("Medium").Order("created_at " + sort).Limit(limit).Offset((pageNo - 1) * limit).Find(&episodeList)
episodeIDs := make([]uint, 0)
for _, each := range episodeList {
episodeIDs = append(episodeIDs, each.ID)
}
episodeAuthors := make([]model.EpisodeAuthor, 0)
config.DB.Model(&model.EpisodeAuthor{}).Where("episode_id IN (?)", episodeIDs).Find(&episodeAuthors)
var authorMap map[string]coreModel.Author
if len(episodeAuthors) > 0 {
authorMap = author.Mapper(space.OrganisationID, int(episodeAuthors[0].AuthorID))
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
episodeAuthorMap := make(map[uint]uint)
for _, ea := range episodeAuthors {
if _, found := episodeAuthorMap[ea.EpisodeID]; !found {
episodeAuthorMap[ea.EpisodeID] = ea.AuthorID
}
}
for _, episode := range episodeList {
description := episode.HTMLDescription
if description == "" {
description = "----"
}
item := pcast.Item{
Title: episode.Title,
GUID: fmt.Sprint(episode.ID),
Description: description,
Link: fmt.Sprint("https://factly.org/podcasts/", result.ID, "/episodes/", episode.ID),
Source: episode.AudioURL,
}
if episode.PublishedDate != nil {
item.PubDateFormatted = episode.PublishedDate.String()
}
if episode.Medium != nil {
episodeMediumURL := map[string]interface{}{}
_ = json.Unmarshal(episode.Medium.URL.RawMessage, &episodeMediumURL)
if rawURL, found := episodeMediumURL["raw"]; found {
item.IImage = &pcast.IImage{
HREF: rawURL.(string),
}
}
}
if authID, found := episodeAuthorMap[episode.ID]; found {
if author, found := authorMap[fmt.Sprint(authID)]; found {
item.Author = &pcast.Author{
Name: fmt.Sprint(author.FirstName, " ", author.LastName),
Email: author.Email,
}
}
}
if _, err := p.AddItem(item); err != nil {
fmt.Println(item.Title, ": error", err.Error())
return
}
}
w.Header().Set("Content-Type", "application/xml")
if err := p.Encode(w); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
<file_sep>/service/core/action/page/list.go
package page
import (
"fmt"
"net/http"
"net/url"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
type paging struct {
Total int64 `json:"total"`
Nodes []pageData `json:"nodes"`
}
// list - Get all pages
// @Summary Show all pages
// @Description Get all pages
// @Tags Page
// @ID get-all-pages
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param limit query string false "limit per page"
// @Param page query string false "page number"
// @Param sort query string false "sort"
// @Success 200 {array} pageData
// @Router /core/pages [get]
func list(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
// Filters
u, _ := url.Parse(r.URL.String())
queryMap := u.Query()
searchQuery := r.URL.Query().Get("q")
sort := r.URL.Query().Get("sort")
filters := generateFilters(queryMap["tag"], queryMap["category"], queryMap["author"], queryMap["status"])
filteredPageIDs := make([]uint, 0)
if filters != "" {
filters = fmt.Sprint(filters, " AND space_id=", sID)
}
result := paging{}
result.Nodes = make([]pageData, 0)
if filters != "" || searchQuery != "" {
// Search pages with filter
var hits []interface{}
var res map[string]interface{}
if searchQuery != "" {
hits, err = meilisearchx.SearchWithQuery("dega", searchQuery, filters, "page")
} else {
res, err = meilisearchx.SearchWithoutQuery("dega", filters, "page")
if _, found := res["hits"]; found {
hits = res["hits"].([]interface{})
}
}
if err != nil {
loggerx.Error(err)
renderx.JSON(w, http.StatusOK, result)
return
}
filteredPageIDs = meilisearchx.GetIDArray(hits)
if len(filteredPageIDs) == 0 {
renderx.JSON(w, http.StatusOK, result)
return
}
}
posts := make([]model.Post, 0)
offset, limit := paginationx.Parse(r.URL.Query())
if sort != "asc" {
sort = "desc"
}
tx := config.DB.Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").Model(&model.Post{}).Where(&model.Post{
SpaceID: uint(sID),
}).Where("is_page = ?", true).Order("created_at " + sort)
formatIDs := make([]uint, 0)
for _, fid := range queryMap["format"] {
fidStr, _ := strconv.Atoi(fid)
formatIDs = append(formatIDs, uint(fidStr))
}
if len(formatIDs) > 0 {
tx.Where("format_id IN (?)", formatIDs)
}
if len(filteredPageIDs) > 0 {
err = tx.Where(filteredPageIDs).Count(&result.Total).Offset(offset).Limit(limit).Find(&posts).Error
} else {
err = tx.Where("status != ?", "template").Count(&result.Total).Offset(offset).Limit(limit).Find(&posts).Error
}
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
var postIDs []uint
for _, p := range posts {
postIDs = append(postIDs, p.ID)
}
// fetch all authors
authors, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
// fetch all authors related to posts
postAuthors := []model.PostAuthor{}
config.DB.Model(&model.PostAuthor{}).Where("post_id in (?)", postIDs).Find(&postAuthors)
postAuthorMap := make(map[uint][]uint)
for _, po := range postAuthors {
if _, found := postAuthorMap[po.PostID]; !found {
postAuthorMap[po.PostID] = make([]uint, 0)
}
postAuthorMap[po.PostID] = append(postAuthorMap[po.PostID], po.AuthorID)
}
for _, post := range posts {
pageList := &pageData{}
pageList.Authors = make([]model.Author, 0)
pageList.Post = post
postAuthors, hasEle := postAuthorMap[post.ID]
if hasEle {
for _, postAuthor := range postAuthors {
aID := fmt.Sprint(postAuthor)
if author, found := authors[aID]; found {
pageList.Authors = append(pageList.Authors, author)
}
}
}
result.Nodes = append(result.Nodes, *pageList)
}
renderx.JSON(w, http.StatusOK, result)
}
func generateFilters(tagIDs, categoryIDs, authorIDs, status []string) string {
filters := ""
if len(tagIDs) > 0 {
filters = fmt.Sprint(filters, meilisearchx.GenerateFieldFilter(tagIDs, "tag_ids"), " AND ")
}
if len(categoryIDs) > 0 {
filters = fmt.Sprint(filters, meilisearchx.GenerateFieldFilter(categoryIDs, "category_ids"), " AND ")
}
if len(authorIDs) > 0 {
filters = fmt.Sprint(filters, meilisearchx.GenerateFieldFilter(authorIDs, "author_ids"), " AND ")
}
if len(status) > 0 {
filters = fmt.Sprint(filters, meilisearchx.GenerateFieldFilter(status, "status"), " AND ")
}
if filters != "" && filters[len(filters)-5:] == " AND " {
filters = filters[:len(filters)-5]
}
return filters
}
<file_sep>/service/core/model/post.go
package model
import (
"errors"
"time"
"gorm.io/gorm"
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
)
// Post model
type Post struct {
config.Base
Title string `gorm:"column:title" json:"title"`
Subtitle string `gorm:"column:subtitle" json:"subtitle"`
Slug string `gorm:"column:slug" json:"slug"`
Status string `gorm:"column:status" json:"status"`
IsPage bool `gorm:"column:is_page" json:"is_page"`
Excerpt string `gorm:"column:excerpt" json:"excerpt"`
Description postgres.Jsonb `gorm:"column:description" json:"description" sql:"jsonb" swaggertype:"primitive,string"`
HTMLDescription string `gorm:"column:html_description" json:"html_description,omitempty"`
IsFeatured bool `gorm:"column:is_featured" json:"is_featured"`
IsSticky bool `gorm:"column:is_sticky" json:"is_sticky"`
IsHighlighted bool `gorm:"column:is_highlighted" json:"is_highlighted"`
FeaturedMediumID *uint `gorm:"column:featured_medium_id;default:NULL" json:"featured_medium_id"`
Medium *Medium `gorm:"foreignKey:featured_medium_id" json:"medium"`
FormatID uint `gorm:"column:format_id" json:"format_id" sql:"DEFAULT:NULL"`
Format *Format `json:"format"`
PublishedDate *time.Time `gorm:"column:published_date" json:"published_date"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Schemas postgres.Jsonb `gorm:"column:schemas" json:"schemas" swaggertype:"primitive,string"`
Meta postgres.Jsonb `gorm:"column:meta" json:"meta" swaggertype:"primitive,string"`
HeaderCode string `gorm:"column:header_code" json:"header_code"`
FooterCode string `gorm:"column:footer_code" json:"footer_code"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
Tags []Tag `gorm:"many2many:post_tags;" json:"tags"`
Categories []Category `gorm:"many2many:post_categories;" json:"categories"`
Space *Space `json:"space,omitempty"`
}
// PostAuthor model
type PostAuthor struct {
config.Base
AuthorID uint `gorm:"column:author_id" json:"author_id"`
PostID uint `gorm:"column:post_id" json:"post_id"`
}
var postUser config.ContextKey = "post_user"
// BeforeSave - validation for associations
func (post *Post) BeforeSave(tx *gorm.DB) (e error) {
if post.FeaturedMediumID != nil && *post.FeaturedMediumID > 0 {
medium := Medium{}
medium.ID = *post.FeaturedMediumID
err := tx.Model(&Medium{}).Where(Medium{
SpaceID: post.SpaceID,
}).First(&medium).Error
if err != nil {
return errors.New("medium do not belong to same space")
}
}
if post.FormatID > 0 {
format := Format{}
format.ID = post.FormatID
err := tx.Model(&Format{}).Where(Format{
SpaceID: post.SpaceID,
}).First(&format).Error
if err != nil {
return errors.New("format do not belong to same space")
}
}
for _, tag := range post.Tags {
if tag.SpaceID != post.SpaceID {
return errors.New("some tags do not belong to same space")
}
}
for _, category := range post.Categories {
if category.SpaceID != post.SpaceID {
return errors.New("some categories do not belong to same space")
}
}
return nil
}
// BeforeCreate hook
func (post *Post) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(postUser)
if userID == nil {
return nil
}
uID := userID.(int)
post.CreatedByID = uint(uID)
post.UpdatedByID = uint(uID)
return nil
}
// BeforeCreate hook
func (pa *PostAuthor) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(postUser)
if userID == nil {
return nil
}
uID := userID.(int)
pa.CreatedByID = uint(uID)
pa.UpdatedByID = uint(uID)
return nil
}
<file_sep>/cmd/serve.go
package cmd
import (
"log"
"net/http"
"github.com/dlmiddlecote/sqlstats"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/util"
"github.com/factly/x/meilisearchx"
"github.com/go-chi/chi"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func init() {
rootCmd.AddCommand(serveCmd)
}
var serveCmd = &cobra.Command{
Use: "serve",
Short: "Starts server for dega-server.",
Run: func(cmd *cobra.Command, args []string) {
// db setup
config.SetupDB()
meilisearchx.SetupMeiliSearch("dega", []string{"name", "slug", "description", "title", "subtitle", "excerpt", "site_title", "site_address", "tag_line", "review", "review_tag_line"})
if util.CheckNats() {
util.ConnectNats()
defer util.NC.Close()
}
r := service.RegisterRoutes()
go func() {
promRouter := chi.NewRouter()
sqlDB, _ := config.DB.DB()
collector := sqlstats.NewStatsCollector(viper.GetString("database_name"), sqlDB)
prometheus.MustRegister(collector)
promRouter.Mount("/metrics", promhttp.Handler())
log.Fatal(http.ListenAndServe(":8001", promRouter))
}()
if viper.IsSet("enable_feeds") && viper.GetBool("enable_feeds") {
go func() {
r := service.RegisterFeedsRoutes()
if err := http.ListenAndServe(":8002", r); err != nil {
log.Fatal(err)
}
}()
}
if err := http.ListenAndServe(":8000", r); err != nil {
log.Fatal(err)
}
},
}
<file_sep>/test/service/core/author/list_test.go
package author
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestList(t *testing.T) {
mock := test.SetupMockDB()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("test list.go", func(t *testing.T) {
test.CheckSpaceMock(mock)
// Extracts "nodes" from the response, which contains the real data that we sent in array format.
// Checking only if the email value is equal since the data sent and data retrieved is different cause list.go manipulates it,
// so direct comparison of the objects (sent & received) is futile.
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("nodes").
Array().
Element(0).
Object().
Value("email").
Equal(test.Dummy_AuthorList[0]["email"])
})
// Header missing space. This results in Unauthorization.
t.Run("Missing space for list", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(basePath).
WithHeaders(missingSpace).
Expect().
Status(http.StatusUnauthorized)
})
// Header missing User ID. This results in Unauthorization.
t.Run("Missing user for list", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.GET(basePath).
WithHeaders(missingUser).
Expect().
Status(http.StatusUnauthorized)
})
t.Run("Request to kavach fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
test.DisableKavachGock(testServer.URL)
gock.New(viper.GetString("kavach_url") + "/organisations/[0-9]+/applications/dega/access").
Persist().
Reply(http.StatusOK)
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusServiceUnavailable)
})
}
<file_sep>/test/service/core/format/testvars.go
package format
import (
"database/sql/driver"
"fmt"
"regexp"
"strings"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
)
var headers = map[string]string{
"X-Space": "1",
"X-User": "1",
}
var Data = map[string]interface{}{
"name": "Fact Check",
"slug": "fact-check",
}
var invalidData = map[string]interface{}{
"name": "a",
}
var defaultData = []map[string]interface{}{
{
"name": "Article",
"slug": "article",
"description": "Article",
},
{
"name": "Fact Check",
"slug": "fact-check",
"description": "Fact Check",
},
}
var columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug"}
var selectQuery = `SELECT (.+) FROM "formats"`
var deleteQuery = regexp.QuoteMeta(`UPDATE "formats" SET "deleted_at"=`)
var paginationQuery = `SELECT \* FROM "formats" (.+) LIMIT 1 OFFSET 1`
var basePath = "/core/formats"
var defaultsPath = "/core/formats/default"
var path = "/core/formats/{format_id}"
func slugCheckMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT slug, space_id FROM "formats"`)).
WithArgs(fmt.Sprint(Data["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "space_id", "name", "slug"}))
}
func formatInsertMock(mock sqlmock.Sqlmock) {
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "formats"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], "", 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
func sameNameCount(mock sqlmock.Sqlmock, count int, name interface{}) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "formats"`)).
WithArgs(1, strings.ToLower(name.(string))).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
//check format exits or not
func recordNotFoundMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 100).
WillReturnRows(sqlmock.NewRows(columns))
}
func SelectMock(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"]))
}
// check whether format is associated with any post before deleting
func formatPostExpect(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "posts"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func formatUpdateMock(mock sqlmock.Sqlmock, format map[string]interface{}) {
mock.ExpectBegin()
mock.ExpectExec(`UPDATE \"formats\"`).
WithArgs(test.AnyTime{}, 1, format["name"], format["slug"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
}
func formatCountQuery(mock sqlmock.Sqlmock, count int) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "formats"`)).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(count))
}
func EmptyRowMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns))
}
func selectAfterUpdate(mock sqlmock.Sqlmock, format map[string]interface{}) {
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, format["name"], format["slug"]))
}
<file_sep>/service/podcast/action/episode/route.go
package episode
import (
"time"
"github.com/factly/dega-server/config"
coreModel "github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// episode model
type episode struct {
Title string `json:"title" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
Season int `json:"season" validate:"required"`
Episode int `json:"episode" validate:"required"`
AudioURL string `json:"audio_url" validate:"required"`
PodcastID uint `json:"podcast_id"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
PublishedDate *time.Time `json:"published_date" sql:"DEFAULT:NULL"`
MediumID uint `json:"medium_id"`
SpaceID uint `json:"space_id"`
AuthorIDs []uint `json:"author_ids"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
type episodeData struct {
model.Episode
Authors []coreModel.Author `json:"authors"`
}
var episodeUser config.ContextKey = "episode_user"
// Router - Group of episode router
func Router() chi.Router {
r := chi.NewRouter()
entity := "episodes"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.Route("/{episode_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/test/service/core/category/create_test.go
package category
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/gavv/httpexpect"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestCategoryCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable category", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unable to decode category", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("create category without parent", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock, Data)
insertMock(mock)
SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectCommit()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("parent category does not exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(Columns))
Data["parent_id"] = 1
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["parent_id"] = 0
test.ExpectationsMet(t, mock)
})
t.Run("create category with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock, Data)
insertMock(mock)
SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectCommit()
Data["slug"] = ""
res := e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusCreated).JSON().Object()
Data["slug"] = "test-category"
res.ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("medium does not belong same space", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock, Data)
insertWithMediumError(mock)
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("medium does not exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock, Data)
insertWithMediumError(mock)
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("when category with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 1, Data["name"])
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse category description", func(t *testing.T) {
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("create category when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
sameNameCount(mock, 0, Data["name"])
slugCheckMock(mock, Data)
insertMock(mock)
SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectRollback()
e.POST(basePath).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/page/update_test.go
package page
import (
"database/sql/driver"
"errors"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/format"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/gavv/httpexpect"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestPageUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid page id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("page_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("invalid page body", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("page_id", 1).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("undecodable page body", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("page_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("page record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(selectQuery).
WithArgs(true, 1, 100).
WillReturnRows(sqlmock.NewRows(columns))
e.PUT(path).
WithPath("page_id", "100").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse page description", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"invalid":"block"}`),
}
e.PUT(path).
WithPath("page_id", "1").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
})
t.Run("updating tags fail", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
mock.ExpectBegin()
// get new tags & categories to update
tag.SelectMock(mock, tag.Data, 1)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnError(errors.New(`cannot update post tags`))
mock.ExpectRollback()
e.PUT(path).
WithPath("page_id", "1").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("updating categories fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
mock.ExpectBegin()
tag.SelectMock(mock, tag.Data, 1)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnError(errors.New(`cannot update page categories`))
mock.ExpectRollback()
e.PUT(path).
WithPath("page_id", "1").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("update page", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
mock.ExpectBegin()
tag.SelectMock(mock, tag.Data, 1)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_categories"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["excerpt"],
Data["description"], Data["html_description"], Data["is_sticky"], Data["is_highlighted"], Data["featured_medium_id"], Data["format_id"], test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectMock(mock)
preloadMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).AddRow(1, time.Now(), time.Now(), nil, 1, 1))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).AddRow(1, time.Now(), time.Now(), nil, 1, 1))
mock.ExpectCommit()
e.PUT(path).
WithPath("page_id", "1").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().
Object().ContainsMap(pageData)
test.ExpectationsMet(t, mock)
})
t.Run("update page when featured_medium_id = 0", func(t *testing.T) {
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
mock.ExpectBegin()
tag.SelectMock(mock, tag.Data, 1)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_categories"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(nil, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["excerpt"],
Data["description"], Data["html_description"], Data["is_sticky"], Data["is_highlighted"], Data["format_id"], test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectMock(mock)
preloadMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).AddRow(1, time.Now(), time.Now(), nil, 1, 1))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).AddRow(1, time.Now(), time.Now(), nil, 1, 1))
mock.ExpectCommit()
Data["featured_medium_id"] = 0
e.PUT(path).
WithPath("page_id", "1").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().
Object().ContainsMap(pageData)
test.ExpectationsMet(t, mock)
Data["featured_medium_id"] = 1
})
t.Run("update page when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
SelectMock(mock, true, 1, 1)
mock.ExpectBegin()
tag.SelectMock(mock, tag.Data, 1)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_tags"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
category.SelectWithOutSpace(mock)
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE "posts" SET`).
WithArgs(test.AnyTime{}, 1).
WillReturnResult(driver.ResultNoRows)
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
mock.ExpectExec(regexp.QuoteMeta(`DELETE FROM "post_categories"`)).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectExec(`UPDATE \"posts\"`).
WithArgs(test.AnyTime{}, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["excerpt"],
Data["description"], Data["html_description"], Data["is_sticky"], Data["is_highlighted"], Data["featured_medium_id"], Data["format_id"], test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectMock(mock)
preloadMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).AddRow(1, time.Now(), time.Now(), nil, 1, 1))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).AddRow(1, time.Now(), time.Now(), nil, 1, 1))
mock.ExpectRollback()
e.PUT(path).
WithPath("page_id", "1").
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/podcast/action/list.go
package podcast
import (
"fmt"
"net/http"
"net/url"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
// list response
type paging struct {
Total int64 `json:"total"`
Nodes []model.Podcast `json:"nodes"`
}
// list - Get all podcasts
// @Summary Show all podcasts
// @Description Get all podcasts
// @Tags Podcast
// @ID get-all-podcasts
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param limit query string false "limit per page"
// @Param page query string false "page number"
// @Param q query string false "Query"
// @Param category query string false "Category"
// @Param primary_category query string false "Primary Category"
// @Param language query string false "Language"
// @Param sort query string false "Sort"
// @Success 200 {object} paging
// @Router /podcast [get]
func list(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
searchQuery := r.URL.Query().Get("q")
sort := r.URL.Query().Get("sort")
// Filters
u, _ := url.Parse(r.URL.String())
queryMap := u.Query()
filters := generateFilters(queryMap["category"], queryMap["primary_category"], queryMap["language"])
filteredPodcastIDs := make([]uint, 0)
if filters != "" {
filters = fmt.Sprint(filters, " AND space_id=", sID)
}
result := paging{}
result.Nodes = make([]model.Podcast, 0)
if filters != "" || searchQuery != "" {
var hits []interface{}
var res map[string]interface{}
if searchQuery != "" {
hits, err = meilisearchx.SearchWithQuery("dega", searchQuery, filters, "podcast")
} else {
res, err = meilisearchx.SearchWithoutQuery("dega", filters, "podcast")
if _, found := res["hits"]; found {
hits = res["hits"].([]interface{})
}
}
if err != nil {
loggerx.Error(err)
renderx.JSON(w, http.StatusOK, result)
return
}
filteredPodcastIDs = meilisearchx.GetIDArray(hits)
if len(filteredPodcastIDs) == 0 {
renderx.JSON(w, http.StatusOK, result)
return
}
}
if sort != "asc" {
sort = "desc"
}
offset, limit := paginationx.Parse(r.URL.Query())
tx := config.DB.Model(&model.Podcast{}).Preload("Categories").Preload("Medium").Preload("PrimaryCategory").Where(&model.Podcast{
SpaceID: uint(sID),
}).Order("created_at " + sort)
if len(filteredPodcastIDs) > 0 {
err = tx.Where(filteredPodcastIDs).Count(&result.Total).Offset(offset).Limit(limit).Find(&result.Nodes).Error
} else {
err = tx.Count(&result.Total).Offset(offset).Limit(limit).Find(&result.Nodes).Error
}
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
func generateFilters(categoryIDs, primaryCatID, language []string) string {
filters := ""
if len(categoryIDs) > 0 {
filters = fmt.Sprint(filters, meilisearchx.GenerateFieldFilter(categoryIDs, "category_ids"), " AND ")
}
if len(primaryCatID) > 0 {
filters = fmt.Sprint(filters, meilisearchx.GenerateFieldFilter(primaryCatID, "primary_category_id"), " AND ")
}
if len(language) > 0 {
filters = fmt.Sprint(filters, meilisearchx.GenerateFieldFilter(language, "language"), " AND ")
}
if filters != "" && filters[len(filters)-5:] == " AND " {
filters = filters[:len(filters)-5]
}
return filters
}
<file_sep>/service/core/action/space/my.go
package space
import (
"encoding/json"
"io/ioutil"
"net/http"
"strconv"
"github.com/factly/dega-server/service/core/action/user"
"github.com/spf13/viper"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/policy"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
type organisationUser struct {
config.Base
Role string `gorm:"column:role" json:"role"`
}
type orgWithSpace struct {
config.Base
Title string `gorm:"column:title" json:"title"`
Slug string `gorm:"column:slug;unique_index" json:"slug"`
Permission organisationUser `json:"permission"`
Applications []application `json:"applications"`
Spaces []spaceWithPermissions `json:"spaces"`
}
type application struct {
config.Base
Name string `gorm:"column:name" json:"name"`
Description string `gorm:"column:description" json:"description"`
URL string `gorm:"column:url" json:"url"`
MediumID *uint `gorm:"column:medium_id;default:NULL" json:"medium_id"`
Medium *model.Medium `gorm:"foreignKey:medium_id" json:"medium"`
}
type spaceWithPermissions struct {
model.Space
Permissions []model.Permission `json:"permissions"`
}
// list - Get all spaces for a user
// @Summary Show all spaces
// @Description Get all spaces
// @Tags Space
// @ID get-all-spaces
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 200 {array} orgWithSpace
// @Router /core/spaces [get]
func my(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
// Fetched all organisations of the user
req, err := http.NewRequest("GET", viper.GetString("kavach_url")+"/organisations/my", nil)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
req.Header.Set("X-User", strconv.Itoa(uID))
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.NetworkError()))
return
}
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
allOrg := []orgWithSpace{}
err = json.Unmarshal(body, &allOrg)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
var allOrgIDs []int
for _, each := range allOrg {
allOrgIDs = append(allOrgIDs, int(each.ID))
}
// Fetched all the spaces related to all the organisations
var allSpaces = make([]model.Space, 0)
config.DB.Model(model.Space{}).Where("organisation_id IN (?)", allOrgIDs).Preload("Logo").Preload("LogoMobile").Preload("FavIcon").Preload("MobileIcon").Find(&allSpaces)
// fetch all the keto policies
policyList, err := policy.GetAllPolicies()
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
adminPerm := model.Permission{
Resource: "admin",
Actions: []string{"admin"},
}
result := make([]orgWithSpace, 0)
for _, each := range allOrg {
spaceWithPermArr := []spaceWithPermissions{}
for _, space := range allSpaces {
if space.OrganisationID == int(each.ID) {
if each.Permission.Role != "owner" {
permissions := user.GetPermissions(int(each.ID), int(space.ID), uID, policyList)
spaceWithPerm := spaceWithPermissions{
Space: space,
Permissions: permissions,
}
spaceWithPermArr = append(spaceWithPermArr, spaceWithPerm)
} else {
adminSpaceWithPerm := spaceWithPermissions{
Space: space,
Permissions: []model.Permission{adminPerm},
}
spaceWithPermArr = append(spaceWithPermArr, adminSpaceWithPerm)
}
}
}
each.Spaces = spaceWithPermArr
result = append(result, each)
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/fact-check/action/rating/route.go
package rating
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// rating model
type rating struct {
Name string `json:"name" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
BackgroundColour postgres.Jsonb `json:"background_colour" validate:"required" swaggertype:"primitive,string"`
TextColour postgres.Jsonb `json:"text_colour" validate:"required" swaggertype:"primitive,string"`
Description postgres.Jsonb `json:"description" swaggertype:"primitive,string"`
NumericValue int `json:"numeric_value" validate:"required"`
MediumID uint `json:"medium_id"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
}
var userContext config.ContextKey = "rating_user"
// Router - Group of rating router
func Router() chi.Router {
r := chi.NewRouter()
entity := "ratings"
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", list)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/", create)
r.With(util.CheckKetoPolicy(entity, "create")).Post("/default", createDefaults)
r.Route("/{rating_id}", func(r chi.Router) {
r.With(util.CheckKetoPolicy(entity, "get")).Get("/", details)
r.With(util.CheckKetoPolicy(entity, "update")).Put("/", update)
r.With(util.CheckKetoPolicy(entity, "delete")).Delete("/", delete)
})
return r
}
<file_sep>/config/database.go
package config
import (
"fmt"
"log"
"time"
"gorm.io/gorm/logger"
"github.com/factly/x/loggerx"
"github.com/spf13/viper"
"gorm.io/driver/postgres"
"gorm.io/gorm"
)
// DB - gorm DB
var DB *gorm.DB
// SetupDB is database setuo
func SetupDB() {
fmt.Println("connecting to database ...")
dbString := fmt.Sprint("host=", viper.GetString("database_host"), " ",
"user=", viper.GetString("database_user"), " ",
"password=", viper.GetString("database_password"), " ",
"dbname=", viper.GetString("database_name"), " ",
"port=", viper.GetInt("database_port"), " ",
"sslmode=", viper.GetString("database_ssl_mode"))
var err error
DB, err = gorm.Open(postgres.Open(dbString), &gorm.Config{
Logger: loggerx.NewGormLogger(logger.Config{
SlowThreshold: 200 * time.Millisecond,
LogLevel: logger.Info,
Colorful: true,
}),
})
if err != nil {
log.Fatal(err)
}
fmt.Println("connected to database ...")
}
<file_sep>/test/service/core/page/list_test.go
package page
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect"
"gopkg.in/h2non/gock.v1"
)
func TestPageList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get empty list of pages", func(t *testing.T) {
test.CheckSpaceMock(mock)
pageCountQuery(mock, 0)
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "post_id", "author_id"}))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get non-empty list of pages", func(t *testing.T) {
test.CheckSpaceMock(mock)
pageCountQuery(mock, len(pageList))
mock.ExpectQuery(selectQuery).
WithArgs(1, true, "template").
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, pageList[0]["title"], pageList[0]["subtitle"], pageList[0]["slug"], pageList[0]["status"], pageList[0]["is_page"], pageList[0]["excerpt"],
pageList[0]["description"], pageList[0]["html_description"], pageList[0]["is_featured"], pageList[0]["is_sticky"], pageList[0]["is_highlighted"], pageList[0]["featured_medium_id"], pageList[0]["format_id"], pageList[0]["published_date"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, pageList[1]["title"], pageList[1]["subtitle"], pageList[1]["slug"], pageList[1]["status"], pageList[1]["is_page"], pageList[1]["excerpt"],
pageList[1]["description"], pageList[1]["html_description"], pageList[1]["is_featured"], pageList[1]["is_sticky"], pageList[1]["is_highlighted"], pageList[1]["featured_medium_id"], pageList[1]["format_id"], pageList[1]["published_date"], 1))
preloadMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(pageList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(pageList[0])
test.ExpectationsMet(t, mock)
})
t.Run("get pages with paiganation", func(t *testing.T) {
test.CheckSpaceMock(mock)
pageCountQuery(mock, len(pageList))
mock.ExpectQuery(selectQuery).
WithArgs(1, true, "template").
WillReturnRows(sqlmock.NewRows(columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, pageList[1]["title"], pageList[1]["subtitle"], pageList[1]["slug"], pageList[1]["status"], pageList[1]["is_page"], pageList[1]["excerpt"],
pageList[1]["description"], pageList[1]["html_description"], pageList[1]["is_featured"], pageList[1]["is_sticky"], pageList[1]["is_highlighted"], pageList[1]["featured_medium_id"], pageList[1]["format_id"], pageList[1]["published_date"], 1))
preloadMock(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_authors"`)).
WithArgs(sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "author_id", "post_id"}).
AddRow(1, time.Now(), time.Now(), nil, 1, 1))
e.GET(basePath).
WithQueryObject(map[string]interface{}{
"page": 2,
"limit": 1,
"sort": "asc",
}).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(pageList)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(pageList[1])
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/space/update.go
package space
import (
"context"
"encoding/json"
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
)
// update - Update space
// @Summary Update space
// @Description Update space
// @Tags Space
// @ID update-space
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param space_id path string true "Space ID"
// @Param Space body space true "Space Object"
// @Success 200 {object} model.Space
// @Router /core/spaces/{space_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
spaceID := chi.URLParam(r, "space_id")
id, err := strconv.Atoi(spaceID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
space := &space{}
err = json.NewDecoder(r.Body).Decode(&space)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(space)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
if space.OrganisationID == 0 {
return
}
err = util.CheckSpaceKetoPermission("update", uint(space.OrganisationID), uint(uID))
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage(err.Error(), http.StatusUnauthorized)))
return
}
result := model.Space{}
result.ID = uint(id)
// check record exists or not
err = config.DB.First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
logoID := &space.LogoID
result.LogoID = &space.LogoID
if space.LogoID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"logo_id": nil}).Error
logoID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
logoMobileID := &space.LogoMobileID
result.LogoMobileID = &space.LogoMobileID
if space.LogoMobileID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"logo_mobile_id": nil}).Error
logoMobileID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
favIconID := &space.FavIconID
result.FavIconID = &space.FavIconID
if space.FavIconID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"fav_icon_id": nil}).Error
favIconID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
mobileIconID := &space.MobileIconID
result.MobileIconID = &space.MobileIconID
if space.MobileIconID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"mobile_icon_id": nil}).Error
mobileIconID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
var spaceSlug string
if result.Slug == space.Slug {
spaceSlug = result.Slug
} else if space.Slug != "" && slugx.Check(space.Slug) {
spaceSlug = approveSpaceSlug(space.Slug)
} else {
spaceSlug = approveSpaceSlug(slugx.Make(space.Name))
}
err = tx.Model(&result).Updates(model.Space{
Base: config.Base{UpdatedByID: uint(uID)},
Name: space.Name,
SiteTitle: space.SiteTitle,
Slug: spaceSlug,
Description: space.Description,
TagLine: space.TagLine,
SiteAddress: space.SiteAddress,
LogoID: logoID,
FavIconID: favIconID,
MobileIconID: mobileIconID,
LogoMobileID: logoMobileID,
Analytics: space.Analytics,
VerificationCodes: space.VerificationCodes,
SocialMediaURLs: space.SocialMediaURLs,
ContactInfo: space.ContactInfo,
HeaderCode: space.HeaderCode,
FooterCode: space.FooterCode,
MetaFields: space.MetaFields,
}).Preload("Logo").Preload("LogoMobile").Preload("FavIcon").Preload("MobileIcon").First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "space",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"site_title": result.SiteTitle,
"site_address": result.SiteAddress,
"tag_line": result.TagLine,
"organisation_id": result.OrganisationID,
"analytics": result.Analytics,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("space.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/fact-check/action/claimant/update.go
package claimant
import (
"encoding/json"
"errors"
"net/http"
"reflect"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update claimant by id
// @Summary Update a claimant by id
// @Description Update claimant by ID
// @Tags Claimant
// @ID update-claimant-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param claimant_id path string true "Claimant ID"
// @Param Claimant body claimant false "Claimant"
// @Success 200 {object} model.Claimant
// @Router /fact-check/claimants/{claimant_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
claimantID := chi.URLParam(r, "claimant_id")
id, err := strconv.Atoi(claimantID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
claimant := &claimant{}
err = json.NewDecoder(r.Body).Decode(&claimant)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(claimant)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := model.Claimant{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Claimant{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var claimantSlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Claimant{})
tableName := stmt.Schema.Table
if result.Slug == claimant.Slug {
claimantSlug = result.Slug
} else if claimant.Slug != "" && slugx.Check(claimant.Slug) {
claimantSlug = slugx.Approve(&config.DB, claimant.Slug, sID, tableName)
} else {
claimantSlug = slugx.Approve(&config.DB, slugx.Make(claimant.Name), sID, tableName)
}
// Check if claimant with same name exist
if claimant.Name != result.Name && util.CheckName(uint(sID), claimant.Name, tableName) {
loggerx.Error(errors.New(`claimant with same name exist`))
errorx.Render(w, errorx.Parser(errorx.SameNameExist()))
return
}
// Store HTML description
var description string
if len(claimant.Description.RawMessage) > 0 && !reflect.DeepEqual(claimant.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(claimant.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse claimant description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.Begin()
mediumID := &claimant.MediumID
result.MediumID = &claimant.MediumID
if claimant.MediumID == 0 {
err = tx.Model(&result).Updates(map[string]interface{}{"medium_id": nil}).Error
mediumID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
err = tx.Model(&result).Updates(model.Claimant{
Base: config.Base{UpdatedByID: uint(uID)},
Name: claimant.Name,
Slug: claimantSlug,
MediumID: mediumID,
TagLine: claimant.TagLine,
Description: claimant.Description,
HTMLDescription: description,
MetaFields: claimant.MetaFields,
}).Preload("Medium").First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "claimant",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"tag_line": result.TagLine,
"space_id": result.SpaceID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("claimant.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/cmd/super-organisation.go
package cmd
import (
"log"
"github.com/factly/dega-server/config"
"github.com/spf13/cobra"
)
func init() {
rootCmd.AddCommand(superOrgCmd)
}
var superOrgCmd = &cobra.Command{
Use: "create-super-org",
Short: "Creates super organisation for dega-server.",
Run: func(cmd *cobra.Command, args []string) {
// db setup
config.SetupDB()
err := config.CreateSuperOrganisation()
if err != nil {
log.Println(err)
}
},
}
<file_sep>/service/podcast/model/migration.go
package model
import "github.com/factly/dega-server/config"
//Migration - core models
func Migration() {
_ = config.DB.AutoMigrate(
&Episode{},
&Podcast{},
&EpisodeAuthor{},
)
}
<file_sep>/service/podcast/action/episode/update.go
package episode
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/dega-server/util/arrays"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update episode by id
// @Summary Update a episode by id
// @Description Update episode by ID
// @Tags Episode
// @ID update-episode-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param episode_id path string true "Episode ID"
// @Param X-Space header string true "Space ID"
// @Param Episode body episode false "Episode"
// @Success 200 {object} episodeData
// @Router /podcast/episodes/{episode_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
episodeID := chi.URLParam(r, "episode_id")
id, err := strconv.Atoi(episodeID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &episodeData{}
result.Episode.ID = uint(id)
episode := &episode{}
err = json.NewDecoder(r.Body).Decode(&episode)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(episode)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// check record exists or not
err = config.DB.Where(&model.Episode{
SpaceID: uint(sID),
}).First(&result.Episode).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var episodeSlug string
// Get table title
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Episode{})
tableName := stmt.Schema.Table
if result.Slug == episode.Slug {
episodeSlug = result.Slug
} else if episode.Slug != "" && slugx.Check(episode.Slug) {
episodeSlug = slugx.Approve(&config.DB, episode.Slug, sID, tableName)
} else {
episodeSlug = slugx.Approve(&config.DB, slugx.Make(episode.Title), sID, tableName)
}
// Store HTML description
var description string
if len(episode.Description.RawMessage) > 0 && !reflect.DeepEqual(episode.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(episode.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse episode description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.Begin()
mediumID := &episode.MediumID
result.MediumID = &episode.MediumID
if episode.MediumID == 0 {
err = tx.Model(&result.Episode).Updates(map[string]interface{}{"medium_id": nil}).Error
mediumID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
podcastID := &episode.PodcastID
result.PodcastID = &episode.PodcastID
if episode.PodcastID == 0 {
err = tx.Model(&result.Episode).Updates(map[string]interface{}{"podcast_id": nil}).Error
podcastID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
tx.Model(&result.Episode).Select("PublishedDate").Updates(model.Episode{PublishedDate: episode.PublishedDate})
tx.Model(&result.Episode).Updates(model.Episode{
Base: config.Base{UpdatedByID: uint(uID)},
Title: episode.Title,
HTMLDescription: description,
Description: episode.Description,
Slug: slugx.Approve(&config.DB, episodeSlug, sID, tableName),
Season: episode.Season,
Episode: episode.Episode,
AudioURL: episode.AudioURL,
PodcastID: podcastID,
MediumID: mediumID,
MetaFields: episode.MetaFields,
}).Preload("Medium").Preload("Podcast").Preload("Podcast.Medium").First(&result.Episode)
// fetch old authors
prevEpisodeAuthors := make([]model.EpisodeAuthor, 0)
tx.Model(&model.EpisodeAuthor{}).Where(&model.EpisodeAuthor{
EpisodeID: uint(result.Episode.ID),
}).Find(&prevEpisodeAuthors)
prevAuthorIDs := make([]uint, 0)
for _, each := range prevEpisodeAuthors {
prevAuthorIDs = append(prevAuthorIDs, each.AuthorID)
}
toCreateIDs, toDeleteIDs := arrays.Difference(prevAuthorIDs, episode.AuthorIDs)
if len(toDeleteIDs) > 0 {
tx.Model(&model.EpisodeAuthor{}).Where("author_id IN (?)", toDeleteIDs).Delete(&model.EpisodeAuthor{})
}
if len(toCreateIDs) > 0 {
createEpisodeAuthors := make([]model.EpisodeAuthor, 0)
for _, each := range toCreateIDs {
epiAuth := model.EpisodeAuthor{
EpisodeID: uint(result.Episode.ID),
AuthorID: each,
}
createEpisodeAuthors = append(createEpisodeAuthors, epiAuth)
}
if err = tx.Model(&model.EpisodeAuthor{}).Create(&createEpisodeAuthors).Error; err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
// Fetch current authors
authorMap, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
authorEpisodes := make([]model.EpisodeAuthor, 0)
tx.Model(&model.EpisodeAuthor{}).Where(&model.EpisodeAuthor{
EpisodeID: uint(id),
}).Find(&authorEpisodes)
for _, each := range authorEpisodes {
result.Authors = append(result.Authors, authorMap[fmt.Sprint(each.AuthorID)])
}
// Update into meili index
var publishedDate int64
if result.PublishedDate == nil {
publishedDate = 0
} else {
publishedDate = result.PublishedDate.Unix()
}
meiliObj := map[string]interface{}{
"id": result.Episode.ID,
"kind": "episode",
"title": result.Title,
"slug": result.Slug,
"season": result.Season,
"episode": result.Episode,
"audio_url": result.AudioURL,
"podcast_id": result.PodcastID,
"description": result.Description,
"published_date": publishedDate,
"space_id": result.SpaceID,
"medium_id": result.MediumID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("episode.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/request/organisation/delete.go
package organisation
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete Organisation permission request by id
// @Summary Delete a Organisation permission request
// @Description Delete Organisation permission request by ID
// @Tags Organisation_Permissions_Request
// @ID delete-org-permission-request-by-id
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param request_id path string true "Request ID"
// @Success 200
// @Failure 400 {array} string
// @Router /core/requests/organisations/{request_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
requestID := chi.URLParam(r, "request_id")
id, err := strconv.Atoi(requestID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
request := model.OrganisationPermissionRequest{}
request.ID = uint(id)
// Check if the request exist or not
err = config.DB.First(&request).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
config.DB.Delete(&request)
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/test/service/core/page/create_test.go
package page
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/format"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/gavv/httpexpect"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestPageCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable post", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("Undecodable post", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
})
t.Run("parsing html description fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"invalid":"block"}`),
}
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
})
t.Run("create page", func(t *testing.T) {
test.CheckSpaceMock(mock)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectQuery(`INSERT INTO "posts"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["is_page"], Data["excerpt"], Data["description"], Data["html_description"], Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], Data["format_id"], nil, 1, nil, Data["featured_medium_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"featured_medium_id", "id"}).
AddRow(1, 1))
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
SelectMock(mock)
preloadMock(mock)
pageAuthorInsertMock(mock)
mock.ExpectCommit()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusCreated).
JSON().
Object().ContainsMap(pageData)
test.ExpectationsMet(t, mock)
})
t.Run("create page when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
slugCheckMock(mock, Data)
tag.SelectMock(mock, tag.Data, 1)
category.SelectWithOutSpace(mock)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
format.SelectMock(mock, 1, 1)
mock.ExpectQuery(`INSERT INTO "posts"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["subtitle"], Data["slug"], Data["status"], Data["is_page"], Data["excerpt"], Data["description"], Data["html_description"], Data["is_featured"], Data["is_sticky"], Data["is_highlighted"], Data["format_id"], nil, 1, nil, Data["featured_medium_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"featured_medium_id", "id"}).
AddRow(1, 1))
mock.ExpectQuery(`INSERT INTO "tags"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, tag.Data["name"], tag.Data["slug"], tag.Data["description"], tag.Data["html_description"], tag.Data["is_featured"], 1, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectExec(`INSERT INTO "post_tags"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
medium.SelectWithSpace(mock)
mock.ExpectQuery(`INSERT INTO "categories"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, category.Data["name"], category.Data["slug"], category.Data["description"], category.Data["html_description"], category.Data["is_featured"], sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.
NewRows([]string{"id", "parent_id", "medium_id"}).
AddRow(1, 1, 1))
mock.ExpectExec(`INSERT INTO "post_categories"`).
WithArgs(1, 1).
WillReturnResult(sqlmock.NewResult(0, 1))
SelectMock(mock)
preloadMock(mock)
pageAuthorInsertMock(mock)
mock.ExpectRollback()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/core/meta/testvars.go
package meta
var path = "/meta"
var siteUrl = "testsite.com"
var linkmeta = map[string]interface{}{
"success": 1,
"meta": map[string]interface{}{
"title": "GitHub: Where the world builds software",
"site_name": "GitHub",
"description": "GitHub is where over 50 million developers shape the future of software, together. Contribute to the open source community, manage your Git repositories, review code like a pro, track bugs and feat...",
"image": map[string]interface{}{
"url": "https://github.githubassets.com/images/modules/open_graph/github-mark.png",
},
},
}
<file_sep>/test/service/core/policy/default_test.go
package policy
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service/core/action/policy"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestCreateDefaultPolicy(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
policy.DataFile = "../../../../data/formats.json"
// Create a policy
t.Run("create default policies", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusCreated).
JSON().
Object().
Value("nodes").
Array()
test.ExpectationsMet(t, mock)
})
t.Run("when cannot open data file", func(t *testing.T) {
policy.DataFile = "nofile.json"
test.CheckSpaceMock(mock)
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
policy.DataFile = "../../../../data/policies.json"
})
t.Run("when cannot parse data file", func(t *testing.T) {
policy.DataFile = "invalidData.json"
test.CheckSpaceMock(mock)
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
policy.DataFile = "../../../../data/policies.json"
})
t.Run("when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/search/list.go
package search
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"github.com/factly/x/meilisearchx"
"github.com/meilisearch/meilisearch-go"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
)
// search - Search Entities
// @Summary Global search for all entities
// @Description Global search for all entities
// @Tags Search
// @ID search-entities
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Search body searchQuery false "Search"
// @Success 200
// @Router /core/search [post]
func list(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
searchQuery := &searchQuery{}
err = json.NewDecoder(r.Body).Decode(&searchQuery)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(searchQuery)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
// Add spaceId filter
var filters string = fmt.Sprint("space_id=", sID)
if len(searchQuery.Filters) > 0 {
filters = fmt.Sprint(searchQuery.Filters, " AND ", filters)
}
result, err := meilisearchx.Client.Search("dega").Search(meilisearch.SearchRequest{
Query: searchQuery.Query,
Limit: searchQuery.Limit,
Filters: filters,
FacetFilters: searchQuery.FacetFilters,
})
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
renderx.JSON(w, http.StatusOK, result.Hits)
}
<file_sep>/service/core/action/post/update.go
package post
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"strconv"
"time"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
factCheckModel "github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/dega-server/util/arrays"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/schemax"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
"gorm.io/gorm"
)
// update - Update post by id
// @Summary Update a post by id
// @Description Update post by ID
// @Tags Post
// @ID update-post-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param post_id path string true "Post ID"
// @Param Post body post false "Post"
// @Success 200 {object} postData
// @Router /core/posts/{post_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
postID := chi.URLParam(r, "post_id")
id, err := strconv.Atoi(postID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
post := &post{}
postAuthors := []model.PostAuthor{}
postClaims := []factCheckModel.PostClaim{}
err = json.NewDecoder(r.Body).Decode(&post)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(post)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := &postData{}
result.ID = uint(id)
result.Tags = make([]model.Tag, 0)
result.Categories = make([]model.Category, 0)
result.Authors = make([]model.Author, 0)
result.Claims = make([]factCheckModel.Claim, 0)
// fetch all authors
authors, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
// check record exists or not
err = config.DB.Where(&model.Post{
SpaceID: uint(sID),
}).Where("is_page = ?", false).First(&result.Post).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
post.SpaceID = result.SpaceID
var postSlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Post{})
tableName := stmt.Schema.Table
if result.Slug == post.Slug {
postSlug = result.Slug
} else if post.Slug != "" && slugx.Check(post.Slug) {
postSlug = slugx.Approve(&config.DB, post.Slug, sID, tableName)
} else {
postSlug = slugx.Approve(&config.DB, slugx.Make(post.Title), sID, tableName)
}
// Store HTML description
var description string
if len(post.Description.RawMessage) > 0 && !reflect.DeepEqual(post.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(post.Description)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse post description", http.StatusUnprocessableEntity)))
return
}
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
newTags := make([]model.Tag, 0)
if len(post.TagIDs) > 0 {
config.DB.Model(&model.Tag{}).Where(post.TagIDs).Find(&newTags)
if err = tx.Model(&result.Post).Association("Tags").Replace(&newTags); err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
} else {
_ = config.DB.Model(&result.Post).Association("Tags").Clear()
}
newCategories := make([]model.Category, 0)
if len(post.CategoryIDs) > 0 {
config.DB.Model(&model.Category{}).Where(post.CategoryIDs).Find(&newCategories)
if err = tx.Model(&result.Post).Association("Categories").Replace(&newCategories); err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
} else {
_ = config.DB.Model(&result.Post).Association("Categories").Clear()
}
featuredMediumID := &post.FeaturedMediumID
result.Post.FeaturedMediumID = &post.FeaturedMediumID
if post.FeaturedMediumID == 0 {
err = tx.Model(&result.Post).Omit("Tags", "Categories").Updates(map[string]interface{}{"featured_medium_id": nil}).Error
featuredMediumID = nil
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
updatedPost := model.Post{
Base: config.Base{UpdatedByID: uint(uID)},
Title: post.Title,
Slug: postSlug,
Subtitle: post.Subtitle,
Excerpt: post.Excerpt,
Description: post.Description,
HTMLDescription: description,
IsHighlighted: post.IsHighlighted,
IsSticky: post.IsSticky,
FormatID: post.FormatID,
FeaturedMediumID: featuredMediumID,
Meta: post.Meta,
HeaderCode: post.HeaderCode,
FooterCode: post.FooterCode,
MetaFields: post.MetaFields,
}
oldStatus := result.Post.Status
// Check if post status is changed back to draft from published
if oldStatus == "publish" && post.Status == "draft" {
status, err := getPublishPermissions(oID, sID, uID)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
if status == http.StatusOK {
updatedPost.Status = "draft"
tx.Model(&result.Post).Select("PublishedDate").Omit("Tags", "Categories").Updates(model.Post{PublishedDate: nil})
} else {
tx.Rollback()
w.WriteHeader(http.StatusUnauthorized)
return
}
} else if post.Status == "publish" {
// Check if authors are not added while publishing post
if len(post.AuthorIDs) == 0 {
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot publish post without author", http.StatusUnprocessableEntity)))
return
}
status, err := getPublishPermissions(oID, sID, uID)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
if status == http.StatusOK {
updatedPost.Status = "publish"
if post.PublishedDate == nil {
currTime := time.Now()
updatedPost.PublishedDate = &currTime
} else {
updatedPost.PublishedDate = post.PublishedDate
}
} else {
tx.Rollback()
w.WriteHeader(http.StatusUnauthorized)
return
}
} else if post.Status == "ready" {
updatedPost.Status = "ready"
} else if oldStatus == "ready" && post.Status == "draft" {
updatedPost.Status = "draft"
}
tx.Model(&result.Post).Select("IsFeatured", "IsSticky", "IsHighlighted", "IsPage").Omit("Tags", "Categories").Updates(model.Post{
IsFeatured: post.IsFeatured,
IsSticky: post.IsSticky,
IsHighlighted: post.IsHighlighted,
IsPage: post.IsPage,
})
err = tx.Model(&result.Post).Updates(updatedPost).Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").Preload("Space").First(&result.Post).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
var toCreateIDs []uint
var toDeleteIDs []uint
if result.Post.Format.Slug == "fact-check" {
// fetch existing post claims
tx.Model(&factCheckModel.PostClaim{}).Where(&factCheckModel.PostClaim{
PostID: uint(id),
}).Find(&postClaims)
err = tx.Model(&factCheckModel.PostClaim{}).Delete(&postClaims).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
toCreatePostClaims := make([]factCheckModel.PostClaim, 0)
for i, id := range post.ClaimIDs {
postClaim := factCheckModel.PostClaim{}
postClaim.ClaimID = uint(id)
postClaim.PostID = result.ID
postClaim.Position = uint(i + 1)
toCreatePostClaims = append(toCreatePostClaims, postClaim)
}
err = tx.Model(&factCheckModel.PostClaim{}).Create(&toCreatePostClaims).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// fetch updated post claims
updatedPostClaims := []factCheckModel.PostClaim{}
tx.Model(&factCheckModel.PostClaim{}).Where(&factCheckModel.PostClaim{
PostID: uint(id),
}).Preload("Claim").Preload("Claim.Rating").Preload("Claim.Rating.Medium").Preload("Claim.Claimant").Preload("Claim.Claimant.Medium").Find(&updatedPostClaims)
result.ClaimOrder = make([]uint, len(postClaims))
// appending previous post claims to result
for _, postClaim := range updatedPostClaims {
result.Claims = append(result.Claims, postClaim.Claim)
result.ClaimOrder[int(postClaim.Position-1)] = postClaim.ClaimID
}
}
// fetch existing post authors
config.DB.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Find(&postAuthors)
prevAuthorIDs := make([]uint, 0)
mapperPostAuthor := map[uint]model.PostAuthor{}
postAuthorIDs := make([]uint, 0)
for _, postAuthor := range postAuthors {
mapperPostAuthor[postAuthor.AuthorID] = postAuthor
prevAuthorIDs = append(prevAuthorIDs, postAuthor.AuthorID)
}
toCreateIDs, toDeleteIDs = arrays.Difference(prevAuthorIDs, post.AuthorIDs)
// map post author ids
for _, id := range toDeleteIDs {
postAuthorIDs = append(postAuthorIDs, mapperPostAuthor[id].ID)
}
// delete post authors
if len(postAuthorIDs) > 0 {
err = tx.Where(&postAuthorIDs).Delete(&model.PostAuthor{}).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
// creating new post authors
for _, id := range toCreateIDs {
if id != 0 {
postAuthor := &model.PostAuthor{}
postAuthor.AuthorID = uint(id)
postAuthor.PostID = result.ID
err = tx.Model(&model.PostAuthor{}).Create(&postAuthor).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
}
// fetch existing post authors
updatedPostAuthors := []model.PostAuthor{}
tx.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Find(&updatedPostAuthors)
// appending previous post authors to result
for _, postAuthor := range updatedPostAuthors {
aID := fmt.Sprint(postAuthor.AuthorID)
if author, found := authors[aID]; found {
result.Authors = append(result.Authors, author)
}
}
ratings := make([]factCheckModel.Rating, 0)
config.DB.Model(&factCheckModel.Rating{}).Where(factCheckModel.Rating{
SpaceID: uint(sID),
}).Order("numeric_value asc").Find(&ratings)
schemas := schemax.GetSchemas(schemax.PostData{
Post: result.Post,
Authors: result.Authors,
Claims: result.Claims,
}, *result.Space, ratings)
byteArr, err := json.Marshal(schemas)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Model(&result.Post).Select("Schemas").Updates(&model.Post{
Schemas: postgres.Jsonb{RawMessage: byteArr},
})
result.Post.Schemas = postgres.Jsonb{RawMessage: byteArr}
// Update into meili index
var meiliPublishDate int64
if result.Post.Status == "publish" {
meiliPublishDate = result.Post.PublishedDate.Unix()
}
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "post",
"title": result.Title,
"subtitle": result.Subtitle,
"slug": result.Slug,
"status": result.Status,
"excerpt": result.Excerpt,
"description": result.Description,
"is_featured": result.IsFeatured,
"is_sticky": result.IsSticky,
"is_highlighted": result.IsHighlighted,
"is_page": result.IsPage,
"format_id": result.FormatID,
"published_date": meiliPublishDate,
"space_id": result.SpaceID,
"tag_ids": post.TagIDs,
"category_ids": post.CategoryIDs,
"author_ids": post.AuthorIDs,
}
if result.Format.Slug == "fact-check" {
meiliObj["claim_ids"] = post.ClaimIDs
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("post.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if result.Post.Status == "publish" {
if err = util.NC.Publish("post.published", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
if oldStatus == "publish" && (result.Post.Status == "draft" || result.Post.Status == "ready") {
if err = util.NC.Publish("post.unpublished", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
if (oldStatus == "publish" || oldStatus == "draft") && result.Post.Status == "ready" {
if err = util.NC.Publish("post.ready", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
}
renderx.JSON(w, http.StatusOK, result)
}
func getPublishPermissions(oID, sID, uID int) (int, error) {
commonString := fmt.Sprint(":org:", oID, ":app:dega:space:", sID, ":")
kresource := fmt.Sprint("resources", commonString, "posts")
kaction := fmt.Sprint("actions", commonString, "posts:publish")
result := util.KetoAllowed{}
result.Action = kaction
result.Resource = kresource
result.Subject = fmt.Sprint(uID)
resStatus, err := util.IsAllowed(result)
if err != nil {
return 0, err
}
return resStatus, nil
}
<file_sep>/service/core/action/format/default.go
package format
import (
"context"
"encoding/json"
"io/ioutil"
"net/http"
"os"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
// DataFile default json data file
var DataFile = "./data/formats.json"
// createDefaults - Create Default Formats
// @Summary Create Default Formats
// @Description Create Default Formats
// @Tags Format
// @ID add-default-formats
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 201 {object} paging
// @Failure 400 {array} string
// @Router /core/formats/default [post]
func createDefaults(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
jsonFile, err := os.Open(DataFile)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
defer jsonFile.Close()
formats := make([]model.Format, 0)
byteValue, _ := ioutil.ReadAll(jsonFile)
err = json.Unmarshal(byteValue, &formats)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
for i := range formats {
formats[i].SpaceID = uint(sID)
tx.Model(&model.Format{}).FirstOrCreate(&formats[i], &formats[i])
err = insertIntoMeili(formats[i])
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
result := paging{}
result.Nodes = formats
result.Total = int64(len(formats))
tx.Commit()
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/test/service/core/request/space/create_test.go
package space
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/space"
"github.com/gavv/httpexpect"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestSpaceRequestCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("Unprocessable request body", func(t *testing.T) {
e.POST(basePath).
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Undecodable request body", func(t *testing.T) {
e.POST(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Space for the request not found", func(t *testing.T) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "spaces"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(space.Columns))
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("User not owner of the organisation", func(t *testing.T) {
space.SelectQuery(mock, 1)
test.DisableKavachGock(testServer.URL)
gock.New(viper.GetString("kavach_url") + "/organisations/my").
Persist().
Reply(http.StatusOK).
JSON([]map[string]interface{}{
map[string]interface{}{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"title": "test org",
"slug": "test-org",
"permission": map[string]interface{}{
"id": 1,
"created_at": time.Now(),
"updated_at": time.Now(),
"deleted_at": nil,
"user_id": 1,
"user": nil,
"organisation_id": 1,
"organisation": nil,
"role": "member",
},
},
})
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusUnauthorized)
test.ExpectationsMet(t, mock)
})
t.Run("Create space request", func(t *testing.T) {
gock.Off()
test.MockServer()
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
space.SelectQuery(mock, 1)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "space_permission_requests"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["title"], Data["description"], "pending", Data["media"], Data["posts"], Data["episodes"], Data["podcast"], Data["fact_check"], Data["space_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectCommit()
e.POST(basePath).
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusCreated)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/test/service/fact-check/rating/default_test.go
package rating
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service/fact-check/action/rating"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestDefaultRatingCreate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
rating.DataFile = "./testDefault.json"
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("create default ratings", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectBegin()
for i := 0; i < 6; i++ {
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
mock.ExpectQuery(`INSERT INTO "ratings"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, defaultData[i]["name"], defaultData[i]["slug"], defaultData[i]["background_colour"], defaultData[i]["text_colour"], defaultData[i]["description"], defaultData[i]["html_description"], defaultData[i]["numeric_value"], nil, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
mock.ExpectCommit()
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusCreated).
JSON().
Object().
Value("nodes").
Array()
test.ExpectationsMet(t, mock)
})
t.Run("default ratings already created", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectBegin()
for i := 0; i < 6; i++ {
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, defaultData[i]["name"], defaultData[i]["slug"], defaultData[i]["background_colour"], defaultData[i]["text_colour"], defaultData[i]["medium_id"], defaultData[i]["description"], defaultData[i]["html_description"], defaultData[i]["numeric_value"], 1))
}
mock.ExpectCommit()
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusCreated).
JSON().
Object().
Value("nodes").
Array()
test.ExpectationsMet(t, mock)
})
t.Run("when cannot open data file", func(t *testing.T) {
rating.DataFile = "nofile.json"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
rating.DataFile = "../../../../data/ratings.json"
})
t.Run("when cannot parse data file", func(t *testing.T) {
rating.DataFile = "invalidData.json"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
rating.DataFile = "./testDefault.json"
})
t.Run("when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
mock.ExpectBegin()
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(columns))
mock.ExpectQuery(`INSERT INTO "ratings"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, defaultData[0]["name"], defaultData[0]["slug"], defaultData[0]["background_colour"], defaultData[0]["text_colour"], defaultData[0]["description"], defaultData[0]["html_description"], defaultData[0]["numeric_value"], nil, 1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
mock.ExpectRollback()
e.POST(defaultsPath).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/request/organisation/my.go
package organisation
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
// my - Get all my organisation permissions requests
// @Summary Show all my organisation permissions requests
// @Description Get all my organisation permissions requests
// @Tags Organisation_Permissions_Request
// @ID get-all-my-org-permissions-requests
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 200 {array} paging
// @Router /core/requests/organisations/my [get]
func my(w http.ResponseWriter, r *http.Request) {
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
offset, limit := paginationx.Parse(r.URL.Query())
result := paging{}
result.Nodes = make([]model.OrganisationPermissionRequest, 0)
config.DB.Model(&model.OrganisationPermissionRequest{}).Where(&model.OrganisationPermissionRequest{
OrganisationID: uint(oID),
}).Count(&result.Total).Offset(offset).Limit(limit).Find(&result.Nodes)
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/route.go
package core
import (
"net/http"
"github.com/factly/dega-server/service/core/action/event"
"github.com/factly/dega-server/service/core/action/info"
"github.com/factly/dega-server/service/core/action/menu"
"github.com/factly/dega-server/service/core/action/page"
"github.com/factly/dega-server/service/core/action/permissions"
"github.com/factly/dega-server/service/core/action/request"
"github.com/factly/dega-server/service/core/action/webhook"
"github.com/factly/dega-server/util"
"github.com/go-chi/chi"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/action/category"
"github.com/factly/dega-server/service/core/action/format"
"github.com/factly/dega-server/service/core/action/medium"
"github.com/factly/dega-server/service/core/action/policy"
"github.com/factly/dega-server/service/core/action/post"
"github.com/factly/dega-server/service/core/action/search"
"github.com/factly/dega-server/service/core/action/space"
"github.com/factly/dega-server/service/core/action/tag"
"github.com/factly/dega-server/service/core/action/user"
)
// Router - CRUD servies
func Router() http.Handler {
r := chi.NewRouter()
r.Mount("/media", medium.Router())
r.Mount("/menus", menu.Router())
r.Mount("/categories", category.Router())
r.Mount("/formats", format.Router())
r.Mount("/tags", tag.Router())
r.Mount("/spaces", space.Router())
r.Mount("/posts", post.Router())
r.Mount("/pages", page.Router())
r.Mount("/policies", policy.Router())
r.Mount("/authors", author.Router())
r.Mount("/search", search.Router())
r.Mount("/users", user.Router())
r.Mount("/permissions", permissions.Router())
r.Mount("/requests", request.Router())
r.Mount("/info", info.Router())
if util.CheckNats() {
r.Mount("/webhooks", webhook.Router())
r.Mount("/events", event.Router())
}
return r
}
<file_sep>/service/core/action/event/default.go
package event
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/requestx"
"github.com/spf13/viper"
)
// DataFile default json data file
var DataFile = "./data/events.json"
// create - Create default Events
// @Summary Create default Events
// @Description Create default Events
// @Tags Events
// @ID add-default-events
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 201 {object} model.Event
// @Failure 400 {array} string
// @Router /core/events/default [post]
func defaults(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
jsonFile, err := os.Open(DataFile)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
defer jsonFile.Close()
events := make([]event, 0)
byteValue, _ := ioutil.ReadAll(jsonFile)
err = json.Unmarshal(byteValue, &events)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
eventsResp := make([]model.Event, 0)
for i := range events {
if err = AddTags(&events[i], sID); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
hukzURL := viper.GetString("hukz_url") + "/events"
resp, err := requestx.Request("POST", hukzURL, events[i], map[string]string{
"X-User": fmt.Sprint(uID),
})
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if resp.StatusCode == http.StatusUnprocessableEntity {
continue
}
if resp.StatusCode != http.StatusCreated {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
var eventRes model.Event
if err = json.NewDecoder(resp.Body).Decode(&eventRes); err != nil {
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
eventsResp = append(eventsResp, eventRes)
}
renderx.JSON(w, http.StatusCreated, eventsResp)
}
<file_sep>/service/podcast/route.go
package podcast
import (
"net/http"
podcast "github.com/factly/dega-server/service/podcast/action"
"github.com/factly/dega-server/service/podcast/action/episode"
"github.com/go-chi/chi"
)
// Router - CRUD servies
func Router() http.Handler {
r := chi.NewRouter()
r.Mount("/", podcast.Router())
r.Mount("/episodes", episode.Router())
return r
}
<file_sep>/service/core/action/medium/update.go
package medium
import (
"encoding/json"
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
"gorm.io/gorm"
)
// update - Update medium by id
// @Summary Update a medium by id
// @Description Update medium by ID
// @Tags Medium
// @ID update-medium-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param medium_id path string true "Medium ID"
// @Param X-Space header string true "Space ID"
// @Param Medium body medium false "Medium"
// @Success 200 {object} model.Medium
// @Router /core/media/{medium_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
mediumID := chi.URLParam(r, "medium_id")
id, err := strconv.Atoi(mediumID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
medium := &medium{}
err = json.NewDecoder(r.Body).Decode(&medium)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(medium)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := &model.Medium{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Medium{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
var mediumSlug string
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Medium{})
tableName := stmt.Schema.Table
if result.Slug == medium.Slug {
mediumSlug = result.Slug
} else if medium.Slug != "" && slugx.Check(medium.Slug) {
mediumSlug = slugx.Approve(&config.DB, medium.Slug, sID, tableName)
} else {
mediumSlug = slugx.Approve(&config.DB, slugx.Make(medium.Name), sID, tableName)
}
tx := config.DB.Begin()
err = tx.Model(&result).Updates(model.Medium{
Base: config.Base{UpdatedByID: uint(uID)},
Name: medium.Name,
Slug: mediumSlug,
Title: medium.Title,
Type: medium.Type,
Description: medium.Description,
AltText: medium.AltText,
Caption: medium.Caption,
FileSize: medium.FileSize,
URL: medium.URL,
Dimensions: medium.Dimensions,
MetaFields: medium.MetaFields,
}).First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "medium",
"name": result.Name,
"slug": result.Slug,
"title": result.Title,
"type": result.Type,
"description": result.Description,
"space_id": result.SpaceID,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("media.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/category/list.go
package category
import (
"fmt"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
)
// list response
type paging struct {
Total int64 `json:"total"`
Nodes []model.Category `json:"nodes"`
}
// list - Get all categories
// @Summary Show all categories
// @Description Get all categories
// @Tags Category
// @ID get-all-categories
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param limit query string false "limit per page"
// @Param page query string false "page number"
// @Param q query string false "Query"
// @Param sort query string false "Sort"
// @Success 200 {object} paging
// @Router /core/categories [get]
func list(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
searchQuery := r.URL.Query().Get("q")
sort := r.URL.Query().Get("sort")
filteredCategoryIDs := make([]uint, 0)
result := paging{}
result.Nodes = make([]model.Category, 0)
if searchQuery != "" {
filters := fmt.Sprint("space_id=", sID)
var hits []interface{}
hits, err = meilisearchx.SearchWithQuery("dega", searchQuery, filters, "category")
if err != nil {
loggerx.Error(err)
renderx.JSON(w, http.StatusOK, result)
return
}
filteredCategoryIDs = meilisearchx.GetIDArray(hits)
if len(filteredCategoryIDs) == 0 {
renderx.JSON(w, http.StatusOK, result)
return
}
}
if sort != "asc" {
sort = "desc"
}
offset, limit := paginationx.Parse(r.URL.Query())
tx := config.DB.Model(&model.Category{}).Preload("Medium").Where(&model.Category{
SpaceID: uint(sID),
}).Order("created_at " + sort)
if len(filteredCategoryIDs) > 0 {
err = tx.Where(filteredCategoryIDs).Count(&result.Total).Offset(offset).Limit(limit).Find(&result.Nodes).Error
} else {
err = tx.Count(&result.Total).Offset(offset).Limit(limit).Find(&result.Nodes).Error
}
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/info/route.go
package info
import "github.com/go-chi/chi"
// Router - Group of category router
func Router() chi.Router {
r := chi.NewRouter()
r.Get("/", details)
return r
}
<file_sep>/service/core/action/space/route.go
package space
import (
"github.com/factly/dega-server/config"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
// space request body
type space struct {
Name string `json:"name" validate:"required,min=3,max=50"`
Slug string `json:"slug"`
SiteTitle string `json:"site_title"`
TagLine string `json:"tag_line"`
Description string `json:"description"`
SiteAddress string `json:"site_address"`
LogoID uint `json:"logo_id"`
LogoMobileID uint `json:"logo_mobile_id"`
FavIconID uint `json:"fav_icon_id"`
MobileIconID uint `json:"mobile_icon_id"`
VerificationCodes postgres.Jsonb `json:"verification_codes" swaggertype:"primitive,string"`
SocialMediaURLs postgres.Jsonb `json:"social_media_urls" swaggertype:"primitive,string"`
ContactInfo postgres.Jsonb `json:"contact_info" swaggertype:"primitive,string"`
Analytics postgres.Jsonb `json:"analytics" swaggertype:"primitive,string"`
HeaderCode string `json:"header_code"`
FooterCode string `json:"footer_code"`
MetaFields postgres.Jsonb `json:"meta_fields" swaggertype:"primitive,string"`
OrganisationID int `json:"organisation_id" validate:"required"`
}
var userContext config.ContextKey = "space_user"
// Router - Group of currency router
func Router() chi.Router {
r := chi.NewRouter()
r.Post("/", create)
r.Get("/", my)
r.Route("/{space_id}", func(r chi.Router) {
r.Put("/", update)
r.Delete("/", delete)
})
return r
}
<file_sep>/test/service/core/category/update_test.go
package category
import (
"errors"
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/gavv/httpexpect"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
func TestCategoryUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid category id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("category_id", "invalid_id").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
test.ExpectationsMet(t, mock)
})
t.Run("category record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(selectQuery).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows(Columns))
e.PUT(path).
WithPath("category_id", "1").
WithJSON(Data).
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
test.ExpectationsMet(t, mock)
})
t.Run("Unable to decode category data", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("category_id", "1").
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unprocessable category", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.PUT(path).
WithPath("category_id", "1").
WithJSON(invalidData).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("update category", func(t *testing.T) {
test.CheckSpaceMock(mock)
selectWithSpace(mock)
updateMock(mock)
mock.ExpectCommit()
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("update category with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
selectWithSpace(mock)
slugCheckMock(mock, Data)
updateMock(mock)
mock.ExpectCommit()
Data["slug"] = ""
res := e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object()
Data["slug"] = "test-category"
res.ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("update category with parent set", func(t *testing.T) {
Data["parent_id"] = 2
test.CheckSpaceMock(mock)
selectWithSpace(mock)
selectWithSpace(mock)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, Data["is_featured"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["parent_id"], Data["medium_id"], Data["meta_fields"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
selectWithSpace(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectCommit()
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
Data["parent_id"] = 0
})
t.Run("parent category not found in space", func(t *testing.T) {
Data["parent_id"] = 2
test.CheckSpaceMock(mock)
selectWithSpace(mock)
mock.ExpectQuery(selectQuery).
WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg()).
WillReturnRows(sqlmock.NewRows(Columns))
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
Data["parent_id"] = 0
})
t.Run("update category with its own parent id", func(t *testing.T) {
test.CheckSpaceMock(mock)
selectWithSpace(mock)
Data["parent_id"] = 1
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["parent_id"] = 0
test.ExpectationsMet(t, mock)
})
t.Run("update category with medium id = 0", func(t *testing.T) {
test.CheckSpaceMock(mock)
selectWithSpace(mock)
Data["medium_id"] = 0
mock.ExpectBegin()
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(nil, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(nil, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, Data["is_featured"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["meta_fields"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
selectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "media"`)).
WithArgs(0).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "name", "slug", "type", "title", "description", "caption", "alt_text", "file_size", "url", "dimensions", "space_id"}))
mock.ExpectCommit()
res := e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object()
res.ContainsMap(resData)
Data["medium_id"] = 1
test.ExpectationsMet(t, mock)
})
t.Run("updating category fails", func(t *testing.T) {
test.CheckSpaceMock(mock)
selectWithSpace(mock)
mock.ExpectBegin()
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(nil, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, Data["is_featured"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"categories\"`).
WithArgs(test.AnyTime{}, 1, Data["name"], Data["slug"], Data["description"], Data["html_description"], Data["medium_id"], Data["meta_fields"], 1).
WillReturnError(errors.New(`updating category fails`))
mock.ExpectRollback()
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("category with same name exist", func(t *testing.T) {
test.CheckSpaceMock(mock)
selectWithSpace(mock)
Data["name"] = "New Category"
sameNameCount(mock, 1, Data["name"])
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
Data["name"] = "Test Category"
})
t.Run("cannot parse category description", func(t *testing.T) {
test.CheckSpaceMock(mock)
selectWithSpace(mock)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusUnprocessableEntity)
Data["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
test.ExpectationsMet(t, mock)
})
t.Run("update category when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
selectWithSpace(mock)
updateMock(mock)
mock.ExpectRollback()
e.PUT(path).
WithPath("category_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/permissions/space/route.go
package space
import (
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/util"
"github.com/factly/x/middlewarex"
"github.com/go-chi/chi"
)
type spacePermission struct {
SpaceID uint `json:"space_id" validate:"required"`
FactCheck bool `json:"fact_check"`
Media int64 `json:"media"`
Posts int64 `json:"posts"`
Podcast bool `json:"podcast"`
Episodes int64 `json:"episodes"`
}
var userContext config.ContextKey = "space_perm_user"
// Router - Group of currency router
func Router() chi.Router {
r := chi.NewRouter()
app := "dega"
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Get("/", list)
r.Get("/my", my)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Post("/", create)
r.Route("/{permission_id}", func(r chi.Router) {
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Get("/", details)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Put("/", update)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Delete("/", delete)
})
return r
}
<file_sep>/service/core/model/author.go
package model
import (
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
)
// Author model
type Author struct {
config.Base
Email string `gorm:"column:email;uniqueIndex" json:"email"`
KID string `gorm:"column:kid;" json:"kid"`
FirstName string `gorm:"column:first_name" json:"first_name"`
LastName string `gorm:"column:last_name" json:"last_name"`
Slug string `gorm:"column:slug" json:"slug"`
DisplayName string `gorm:"column:display_name" json:"display_name"`
BirthDate string `gorm:"column:birth_date" json:"birth_date"`
Gender string `gorm:"column:gender" json:"gender"`
FeaturedMediumID *uint `gorm:"column:featured_medium_id;default:NULL" json:"featured_medium_id"`
Medium *Medium `gorm:"foreignKey:featured_medium_id" json:"medium"`
SocialMediaURLs postgres.Jsonb `gorm:"column:social_media_urls" json:"social_media_urls" swaggertype:"primitive,string"`
Description string `gorm:"column:description" json:"description"`
}
<file_sep>/service/core/action/event/route.go
package event
import (
"github.com/factly/dega-server/util"
"github.com/factly/x/middlewarex"
"github.com/go-chi/chi"
"github.com/jinzhu/gorm/dialects/postgres"
)
type event struct {
Name string `json:"name" validate:"required"`
Event string `json:"event" validate:"required"`
Tags postgres.Jsonb `json:"tags" swaggertype:"primitive,string"`
}
// Router events endpoint router
func Router() chi.Router {
r := chi.NewRouter()
app := "dega"
r.Get("/", list)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Post("/", create)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Post("/default", defaults)
r.Route("/{event_id}", func(r chi.Router) {
r.Get("/", details)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Put("/", update)
r.With(middlewarex.CheckSuperOrganisation(app, util.GetOrganisation)).Delete("/", delete)
})
return r
}
<file_sep>/test/service/core/space/my_test.go
package space
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/gavv/httpexpect"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestSpaceMy(t *testing.T) {
mock := test.SetupMockDB()
defer gock.Disable()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get my spaces", func(t *testing.T) {
SelectQuery(mock, 1)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
e.GET(basePath).
WithHeader("X-User", "1").
Expect().
Status(http.StatusOK).
JSON().
Array().
Element(0).
Object().
Value("spaces").
Array().
Element(0).
Object().
ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("invalid space header", func(t *testing.T) {
e.GET(basePath).
WithHeader("X-User", "invalid").
Expect().
Status(http.StatusUnauthorized)
})
t.Run("when keto is down", func(t *testing.T) {
test.DisableKetoGock(testServer.URL)
SelectQuery(mock, 1)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
e.GET(basePath).
WithHeader("X-User", "1").
Expect().
Status(http.StatusInternalServerError)
})
t.Run("when kavach is down", func(t *testing.T) {
test.DisableKavachGock(testServer.URL)
e.GET(basePath).
WithHeader("X-User", "1").
Expect().
Status(http.StatusServiceUnavailable)
})
t.Run("when member requests his spaces", func(t *testing.T) {
test.DisableKavachGock(testServer.URL)
SelectQuery(mock, 1)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
gock.New(viper.GetString("kavach_url") + "/organisations/my").
Persist().
Reply(http.StatusOK).
JSON(test.Dummy_Org_Member_List)
e.GET(basePath).
WithHeader("X-User", "1").
Expect().
Status(http.StatusOK)
})
}
<file_sep>/service/core/action/request/organisation/create.go
package organisation
import (
"context"
"encoding/json"
"errors"
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
)
// request - Create organisation permission request
// @Summary Create organisation permission request
// @Description Create organisation permission request
// @Tags Organisation_Permissions_Request
// @ID add-org-permission-request
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Request body organisationPermissionRequest true "Request Object"
// @Success 201 {object} model.OrganisationPermissionRequest
// @Failure 400 {array} string
// @Router /core/requests/organisations [post]
func Create(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
request := organisationPermissionRequest{}
err = json.NewDecoder(r.Body).Decode(&request)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(request)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
isAdmin, err := util.CheckOwnerFromKavach(uID, int(request.OrganisationID))
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage(err.Error(), http.StatusInternalServerError)))
return
}
if !isAdmin {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := model.OrganisationPermissionRequest{
Request: model.Request{
Title: request.Title,
Description: request.Description,
Status: "pending",
},
OrganisationID: request.OrganisationID,
Spaces: request.Spaces,
}
err = config.DB.WithContext(context.WithValue(r.Context(), permissionContext, uID)).Model(&model.OrganisationPermissionRequest{}).Create(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/test/service/fact-check/rating/update_test.go
package rating
import (
"errors"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
var updatedRating = map[string]interface{}{
"name": "True",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"background_colour": postgres.Jsonb{
RawMessage: []byte(`"green"`),
},
"text_colour": postgres.Jsonb{
RawMessage: []byte(`"green"`),
},
"numeric_value": 5,
"medium_id": uint(1),
}
func TestRatingUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid rating id", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("rating_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("rating record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
recordNotFoundMock(mock)
e.PUT(path).
WithPath("rating_id", "100").
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusNotFound)
})
t.Run("Unable to decode rating data", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unprocessable rating", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("update rating", func(t *testing.T) {
updatedRating["slug"] = "true"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
ratingUpdateMock(mock, updatedRating, nil)
mock.ExpectCommit()
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("update rating by id with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedRating["slug"] = "true"
SelectWithSpace(mock)
slugCheckMock(mock, Data)
ratingUpdateMock(mock, updatedRating, nil)
mock.ExpectCommit()
Data["slug"] = ""
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
Data["slug"] = "true"
test.ExpectationsMet(t, mock)
})
t.Run("update rating with different slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedRating["slug"] = "true-test"
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "ratings"`).
WithArgs(fmt.Sprint(updatedRating["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
ratingUpdateMock(mock, updatedRating, nil)
mock.ExpectCommit()
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("medium not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedRating["slug"] = "true-test"
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "ratings"`).
WithArgs(fmt.Sprint(updatedRating["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
ratingUpdateMock(mock, updatedRating, errors.New("record not found"))
mock.ExpectRollback()
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("cannot parse rating description", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "ratings"`).
WithArgs(fmt.Sprint(updatedRating["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
updatedRating["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
updatedRating["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
updatedRating["name"] = "True"
})
t.Run("rating with same name exist", func(t *testing.T) {
updatedRating["slug"] = "true"
updatedRating["name"] = "New Rating"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
sameNameCount(mock, 1, updatedRating["name"])
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
updatedRating["name"] = "True"
})
t.Run("rating with same numeric value exist", func(t *testing.T) {
updatedRating["slug"] = "true"
updatedRating["numeric_value"] = 3
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
ratingCountQuery(mock, 1)
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
updatedRating["numeric_value"] = 5
})
t.Run("update rating when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
updatedRating["slug"] = "true"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
ratingUpdateMock(mock, updatedRating, nil)
mock.ExpectRollback()
e.PUT(path).
WithPath("rating_id", 1).
WithHeaders(headers).
WithJSON(updatedRating).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/user/route.go
package user
import "github.com/go-chi/chi"
// Router - Group of currency router
func Router() chi.Router {
r := chi.NewRouter()
r.Get("/", list)
r.Get("/{user_id}/permissions", userpermissions)
return r
}
<file_sep>/util/description.go
package util
import (
"encoding/json"
"github.com/factly/x/editorx"
"github.com/jinzhu/gorm/dialects/postgres"
)
func HTMLDescription(jsonData postgres.Jsonb) (string, error) {
editorjsBlocks := make(map[string]interface{})
err := json.Unmarshal(jsonData.RawMessage, &editorjsBlocks)
if err != nil {
return "", err
}
description, err := editorx.EditorjsToHTML(editorjsBlocks)
if err != nil {
return "", err
}
return description, nil
}
<file_sep>/test/service/core/space/testvars.go
package space
import (
"database/sql/driver"
"encoding/json"
"regexp"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/factly/dega-server/test/service/core/permissions/organisation"
"github.com/jinzhu/gorm/dialects/postgres"
)
func nilJsonb() postgres.Jsonb {
ba, _ := json.Marshal(nil)
return postgres.Jsonb{
RawMessage: ba,
}
}
var Data map[string]interface{} = map[string]interface{}{
"name": "Test Space",
"slug": "test-space",
"site_title": "Test site title",
"tag_line": "Test tagline",
"description": "Test Desc",
"site_address": "testaddress.com",
"logo_id": 1,
"logo_mobile_id": 1,
"fav_icon_id": 1,
"mobile_icon_id": 1,
"verification_codes": nilJsonb(),
"social_media_urls": nilJsonb(),
"contact_info": nilJsonb(),
"analytics": nilJsonb(),
"organisation_id": 1,
}
var resData map[string]interface{} = map[string]interface{}{
"name": "Test Space",
"slug": "test-space",
"site_title": "Test site title",
"tag_line": "Test tagline",
"description": "Test Desc",
"site_address": "testaddress.com",
"verification_codes": nilJsonb(),
"social_media_urls": nilJsonb(),
"contact_info": nilJsonb(),
"analytics": nilJsonb(),
"organisation_id": 1,
}
var invalidData map[string]interface{} = map[string]interface{}{
"nam": "Te",
"slug": "test-space",
"organisation_id": 0,
}
var Columns = []string{"id", "created_at", "updated_at", "deleted_at", "created_by_id", "updated_by_id", "name", "slug", "site_title", "tag_line", "description", "site_address", "logo_id", "logo_mobile_id", "fav_icon_id", "mobile_icon_id", "verification_codes", "social_media_urls", "contact_info", "analytics", "organisation_id"}
var selectQuery string = regexp.QuoteMeta(`SELECT * FROM "spaces"`)
var deleteQuery string = regexp.QuoteMeta(`UPDATE "spaces" SET "deleted_at"=`)
var countQuery string = regexp.QuoteMeta(`SELECT count(*) FROM "spaces"`)
const path string = "/core/spaces/{space_id}"
const basePath string = "/core/spaces"
func SelectQuery(mock sqlmock.Sqlmock, args ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(args...).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["logo_id"], Data["logo_mobile_id"], Data["fav_icon_id"], Data["mobile_icon_id"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], Data["organisation_id"]))
}
func slugCheckMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(Data["slug"].(string) + "%").
WillReturnRows(sqlmock.NewRows(Columns))
}
func insertMock(mock sqlmock.Sqlmock) {
organisation.SelectQuery(mock, 1)
mock.ExpectQuery(countQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(1))
slugCheckMock(mock)
mock.ExpectBegin()
mock.ExpectQuery(`INSERT INTO "spaces"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], Data["organisation_id"]).
WillReturnRows(sqlmock.
NewRows([]string{"fav_icon_id", "mobile_icon_id", "logo_id", "logo_mobile_id", "id"}).
AddRow(1, 1, 1, 1, 1))
spacePermissionCreateQuery(mock)
}
func mediumNotFound(mock sqlmock.Sqlmock) {
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "media"`)).
WithArgs(1, 1).
WillReturnRows(sqlmock.NewRows([]string{"id", "created_at", "updated_at", "deleted_at", "name", "slug", "type", "title", "description", "caption", "alt_text", "file_size", "url", "dimensions", "space_id"}))
}
func updateMock(mock sqlmock.Sqlmock) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, "name", "slug", "site_title", "tag_line", nilJsonb(), "site_address", 1, 1, 1, 1, nilJsonb(), nilJsonb(), nilJsonb(), nilJsonb(), 1))
mock.ExpectBegin()
slugCheckMock(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"spaces\"`).
WithArgs(test.AnyTime{}, 1, Data["name"], Data["slug"], Data["site_title"], Data["tag_line"], Data["description"], Data["site_address"], Data["logo_id"], Data["logo_mobile_id"], Data["fav_icon_id"], Data["mobile_icon_id"], Data["verification_codes"], Data["social_media_urls"], Data["contact_info"], Data["analytics"], 1).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectQuery(mock, 1, 1)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
}
func oneMediaIDZeroMock(mock sqlmock.Sqlmock, updateargs ...driver.Value) {
mock.ExpectQuery(selectQuery).
WithArgs(1).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, "name", "slug", "site_title", "tag_line", nilJsonb(), "site_address", 1, 1, 1, 1, nilJsonb(), nilJsonb(), nilJsonb(), nilJsonb(), 1))
mock.ExpectBegin()
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"spaces\"`).
WithArgs(nil, test.AnyTime{}, 1).
WillReturnResult(sqlmock.NewResult(1, 1))
slugCheckMock(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
medium.SelectWithSpace(mock)
mock.ExpectExec(`UPDATE \"spaces\"`).
WithArgs(updateargs...).
WillReturnResult(sqlmock.NewResult(1, 1))
SelectQuery(mock, 1, 1)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
medium.SelectWithOutSpace(mock)
mock.ExpectCommit()
}
func spacePermissionCreateQuery(mock sqlmock.Sqlmock) {
mock.ExpectQuery(`INSERT INTO "space_permissions"`).
WithArgs(test.AnyTime{}, test.AnyTime{}, nil, 1, 1, true, 1, -1, -1, true, -1).
WillReturnRows(sqlmock.
NewRows([]string{"id"}).
AddRow(1))
}
<file_sep>/service/core/action/policy/update.go
package policy
import (
"encoding/json"
"fmt"
"net/http"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
"github.com/spf13/viper"
)
// update - Update policy
// @Summary Update policy
// @Description Update policy
// @Tags Policy
// @ID update-policy
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Policy body policyReq true "Policy Object"
// @Param policy_id path string true "Policy ID"
// @Success 200 {object} model.Policy
// @Router /core/policies/{policy_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
spaceID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
userID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
organisationID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
/* create new policy */
policyReq := policyReq{}
err = json.NewDecoder(r.Body).Decode(&policyReq)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
/* delete old policy */
commanPolicyString := fmt.Sprint(":org:", organisationID, ":app:dega:space:", spaceID, ":")
policyID := chi.URLParam(r, "policy_id")
policyID = "id" + commanPolicyString + policyID
req, err := http.NewRequest("DELETE", viper.GetString("keto_url")+"/engines/acp/ory/regex/policies/"+policyID, nil)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.NetworkError()))
return
}
defer resp.Body.Close()
/* User req */
result := Mapper(Composer(organisationID, spaceID, policyReq), author.Mapper(organisationID, userID))
// Update into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "policy",
"name": result.Name,
"description": result.Description,
}
err = meilisearchx.UpdateDocument("dega", meiliObj)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
if util.CheckNats() {
if err = util.NC.Publish("policy.updated", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/user/testvars.go
package user
var path string = "/core/users"
var headers = map[string]string{
"X-User": "1",
"X-Space": "1",
}
var permissionPath string = "/core/users/{user_id}/permissions"
var permissionsResponse = []map[string]interface{}{
map[string]interface{}{
"resource": "tags",
"actions": []string{"update", "delete"},
},
map[string]interface{}{
"resource": "categories",
"actions": []string{"get", "create"},
},
}
var adminPermissionsResponse = map[string]interface{}{
"resource": "admin",
"actions": []string{"admin"},
}
<file_sep>/test/service/core/post/delete_test.go
package post
import (
"net/http"
"net/http/httptest"
"regexp"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/category"
"github.com/factly/dega-server/test/service/core/tag"
"github.com/gavv/httpexpect/v2"
"gopkg.in/h2non/gock.v1"
)
func TestPostDelete(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid post id", func(t *testing.T) {
test.CheckSpaceMock(mock)
e.DELETE(path).
WithPath("post_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("post record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
recordNotFoundMock(mock)
e.DELETE(path).
WithPath("post_id", "100").
WithHeaders(headers).
Expect().
Status(http.StatusNotFound)
})
t.Run("post record deleted", func(t *testing.T) {
test.CheckSpaceMock(mock)
postSelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
deleteMock(mock)
mock.ExpectCommit()
e.DELETE(path).
WithPath("post_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusOK)
})
t.Run("delete when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
postSelectWithSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_categories"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "category_id"}).
AddRow(1, 1))
category.SelectWithOutSpace(mock)
mock.ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "post_tags"`)).
WithArgs(1).
WillReturnRows(sqlmock.NewRows([]string{"post_id", "tag_id"}).
AddRow(1, 1))
tag.SelectMock(mock, tag.Data, 1)
deleteMock(mock)
mock.ExpectRollback()
e.DELETE(path).
WithPath("post_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusInternalServerError)
})
}
<file_sep>/service/core/action/request/organisation/list.go
package organisation
import (
"net/http"
"github.com/factly/dega-server/config"
"github.com/factly/x/paginationx"
"github.com/factly/x/renderx"
"github.com/factly/dega-server/service/core/model"
)
type paging struct {
Nodes []model.OrganisationPermissionRequest `json:"nodes"`
Total int64 `json:"total"`
}
// list - Get all organisation permissions requests
// @Summary Show all organisation permissions requests
// @Description Get all organisation permissions requests
// @Tags Organisation_Permissions_Request
// @ID get-all-org-permissions-requests
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param status query string false "Status"
// @Success 200 {array} paging
// @Router /core/requests/organisations [get]
func list(w http.ResponseWriter, r *http.Request) {
status := r.URL.Query().Get("status")
if status == "" {
status = "pending"
}
offset, limit := paginationx.Parse(r.URL.Query())
result := paging{}
result.Nodes = make([]model.OrganisationPermissionRequest, 0)
tx := config.DB.Model(&model.OrganisationPermissionRequest{})
if status != "all" {
tx.Where("status = ?", status)
}
tx.Count(&result.Total).Offset(offset).Limit(limit).Find(&result.Nodes)
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/model/category.go
package model
import (
"errors"
"gorm.io/gorm"
"github.com/factly/dega-server/config"
"github.com/jinzhu/gorm/dialects/postgres"
)
// Category model
type Category struct {
config.Base
Name string `gorm:"column:name" json:"name"`
Slug string `gorm:"column:slug" json:"slug"`
Description postgres.Jsonb `gorm:"column:description" json:"description" swaggertype:"primitive,string"`
HTMLDescription string `gorm:"column:html_description" json:"html_description,omitempty"`
ParentID *uint `gorm:"column:parent_id;default:NULL" json:"parent_id"`
MediumID *uint `gorm:"column:medium_id;default:NULL" json:"medium_id"`
Medium *Medium `json:"medium"`
IsFeatured bool `gorm:"column:is_featured" json:"is_featured"`
SpaceID uint `gorm:"column:space_id" json:"space_id"`
Posts []*Post `gorm:"many2many:post_categories;" json:"posts"`
Space *Space `json:"space,omitempty"`
MetaFields postgres.Jsonb `gorm:"column:meta_fields" json:"meta_fields" swaggertype:"primitive,string"`
}
// BeforeSave - validation for medium
func (category *Category) BeforeSave(tx *gorm.DB) (e error) {
if category.MediumID != nil && *category.MediumID > 0 {
medium := Medium{}
medium.ID = *category.MediumID
err := tx.Model(&Medium{}).Where(Medium{
SpaceID: category.SpaceID,
}).First(&medium).Error
if err != nil {
return errors.New("medium do not belong to same space")
}
}
return nil
}
var categoryUser config.ContextKey = "category_user"
// BeforeCreate hook
func (category *Category) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(categoryUser)
if userID == nil {
return nil
}
uID := userID.(int)
category.CreatedByID = uint(uID)
category.UpdatedByID = uint(uID)
return nil
}
<file_sep>/service/core/action/post/create.go
package post
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"time"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
factCheckModel "github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/schemax"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
"github.com/jinzhu/gorm/dialects/postgres"
"github.com/spf13/viper"
"gorm.io/gorm"
)
// create - Create post
// @Summary Create post
// @Description Create post
// @Tags Post
// @ID add-post
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param Post body post true "Post Object"
// @Success 201 {object} postData
// @Router /core/posts [post]
func create(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
post := post{}
err = json.NewDecoder(r.Body).Decode(&post)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(post)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
var status string = "draft"
if post.Status == "publish" {
if len(post.AuthorIDs) == 0 {
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot publish post without author", http.StatusUnprocessableEntity)))
return
}
stat, err := getPublishPermissions(oID, sID, uID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
if stat == http.StatusOK {
status = "publish"
}
}
if post.Status == "ready" {
status = "ready"
}
post.SpaceID = uint(sID)
result, errMessage := createPost(r.Context(), post, status)
if errMessage.Code != 0 {
errorx.Render(w, errorx.Parser(errMessage))
return
}
renderx.JSON(w, http.StatusCreated, result)
}
func createPost(ctx context.Context, post post, status string) (*postData, errorx.Message) {
result := &postData{}
result.Authors = make([]model.Author, 0)
result.Claims = make([]factCheckModel.Claim, 0)
sID, err := middlewarex.GetSpace(ctx)
if err != nil {
loggerx.Error(err)
return nil, errorx.Unauthorized()
}
uID, err := middlewarex.GetUser(ctx)
if err != nil {
loggerx.Error(err)
return nil, errorx.Unauthorized()
}
if viper.GetBool("create_super_organisation") {
// Fetch space permissions
permission := model.SpacePermission{}
err = config.DB.Model(&model.SpacePermission{}).Where(&model.SpacePermission{
SpaceID: uint(sID),
}).First(&permission).Error
if err != nil {
return nil, errorx.GetMessage("cannot create more posts", http.StatusUnprocessableEntity)
}
// Fetch total number of posts in space
var totPosts int64
config.DB.Model(&model.Post{}).Where(&model.Post{
SpaceID: uint(sID),
}).Where("status != 'template'").Count(&totPosts)
if totPosts >= permission.Posts && permission.Posts > 0 {
return nil, errorx.GetMessage("cannot create more posts", http.StatusUnprocessableEntity)
}
}
// Get table name
stmt := &gorm.Statement{DB: config.DB}
_ = stmt.Parse(&model.Post{})
tableName := stmt.Schema.Table
var postSlug string
if post.Slug != "" && slugx.Check(post.Slug) {
postSlug = post.Slug
} else {
postSlug = slugx.Make(post.Title)
}
featuredMediumID := &post.FeaturedMediumID
if post.FeaturedMediumID == 0 {
featuredMediumID = nil
}
// Store HTML description
var description string
if len(post.Description.RawMessage) > 0 && !reflect.DeepEqual(post.Description, test.NilJsonb()) {
description, err = util.HTMLDescription(post.Description)
if err != nil {
return nil, errorx.GetMessage("cannot parse post description", http.StatusUnprocessableEntity)
}
}
result.Post = model.Post{
Title: post.Title,
Slug: slugx.Approve(&config.DB, postSlug, sID, tableName),
Status: status,
IsPage: post.IsPage,
Subtitle: post.Subtitle,
Excerpt: post.Excerpt,
Description: post.Description,
HTMLDescription: description,
IsHighlighted: post.IsHighlighted,
IsSticky: post.IsSticky,
FeaturedMediumID: featuredMediumID,
FormatID: post.FormatID,
Meta: post.Meta,
HeaderCode: post.HeaderCode,
FooterCode: post.FooterCode,
MetaFields: post.MetaFields,
SpaceID: uint(sID),
}
if status == "publish" {
if post.PublishedDate == nil {
currTime := time.Now()
result.Post.PublishedDate = &currTime
} else {
result.Post.PublishedDate = post.PublishedDate
}
} else {
result.Post.PublishedDate = nil
}
if len(post.TagIDs) > 0 {
config.DB.Model(&model.Tag{}).Where(post.TagIDs).Find(&result.Post.Tags)
}
if len(post.CategoryIDs) > 0 {
config.DB.Model(&model.Category{}).Where(post.CategoryIDs).Find(&result.Post.Categories)
}
tx := config.DB.WithContext(context.WithValue(ctx, userContext, uID)).Begin()
err = tx.Model(&model.Post{}).Create(&result.Post).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
return nil, errorx.DBError()
}
tx.Model(&model.Post{}).Preload("Medium").Preload("Format").Preload("Tags").Preload("Categories").Preload("Space").First(&result.Post)
if result.Format.Slug == "fact-check" {
// create post claim
for i, id := range post.ClaimIDs {
postClaim := &factCheckModel.PostClaim{}
postClaim.ClaimID = uint(id)
postClaim.PostID = result.ID
postClaim.Position = uint(i + 1)
err = tx.Model(&factCheckModel.PostClaim{}).Create(&postClaim).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
return nil, errorx.DBError()
}
}
// fetch all post claims
postClaims := []factCheckModel.PostClaim{}
tx.Model(&factCheckModel.PostClaim{}).Where(&factCheckModel.PostClaim{
PostID: result.ID,
}).Preload("Claim").Preload("Claim.Rating").Preload("Claim.Rating.Medium").Preload("Claim.Claimant").Preload("Claim.Claimant.Medium").Find(&postClaims)
result.ClaimOrder = make([]uint, len(postClaims))
// appending all post claims
for _, postClaim := range postClaims {
result.Claims = append(result.Claims, postClaim.Claim)
result.ClaimOrder[int(postClaim.Position-1)] = postClaim.ClaimID
}
}
// Adding author
authors, err := author.All(ctx)
if err != nil {
loggerx.Error(err)
return nil, errorx.InternalServerError()
}
for _, id := range post.AuthorIDs {
aID := fmt.Sprint(id)
if _, found := authors[aID]; found && id != 0 {
author := model.PostAuthor{
AuthorID: id,
PostID: result.Post.ID,
}
err := tx.Model(&model.PostAuthor{}).Create(&author).Error
if err == nil {
result.Authors = append(result.Authors, authors[aID])
}
}
}
ratings := make([]factCheckModel.Rating, 0)
config.DB.Model(&factCheckModel.Rating{}).Where(factCheckModel.Rating{
SpaceID: uint(sID),
}).Order("numeric_value asc").Find(&ratings)
schemas := schemax.GetSchemas(schemax.PostData{
Post: result.Post,
Authors: result.Authors,
Claims: result.Claims,
}, *result.Space, ratings)
byteArr, err := json.Marshal(schemas)
if err != nil {
tx.Rollback()
loggerx.Error(err)
return nil, errorx.InternalServerError()
}
tx.Model(&result.Post).Select("Schemas").Updates(&model.Post{
Schemas: postgres.Jsonb{RawMessage: byteArr},
})
result.Post.Schemas = postgres.Jsonb{RawMessage: byteArr}
// Insert into meili index
var meiliPublishDate int64
if result.Post.Status == "publish" {
meiliPublishDate = result.Post.PublishedDate.Unix()
}
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "post",
"title": result.Title,
"subtitle": result.Subtitle,
"slug": result.Slug,
"status": result.Status,
"excerpt": result.Excerpt,
"description": result.Description,
"is_featured": result.IsFeatured,
"is_sticky": result.IsSticky,
"is_highlighted": result.IsHighlighted,
"is_page": result.IsPage,
"format_id": result.FormatID,
"published_date": meiliPublishDate,
"space_id": result.SpaceID,
"tag_ids": post.TagIDs,
"category_ids": post.CategoryIDs,
"author_ids": post.AuthorIDs,
}
if result.Format.Slug == "fact-check" {
meiliObj["claim_ids"] = post.ClaimIDs
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
return nil, errorx.InternalServerError()
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("post.created", result); err != nil {
return nil, errorx.GetMessage("not able to publish event", http.StatusInternalServerError)
}
if result.Post.Status == "publish" {
if err = util.NC.Publish("post.published", result); err != nil {
return nil, errorx.GetMessage("not able to publish event", http.StatusInternalServerError)
}
}
}
return result, errorx.Message{}
}
<file_sep>/service/core/model/organisation.go
package model
import (
"github.com/factly/dega-server/config"
"gorm.io/gorm"
)
// Organisation model
type Organisation struct {
config.Base
Title string `json:"title"`
Slug string `json:"slug"`
Description string `json:"description"`
FeaturedMediumID *uint `json:"featured_medium_id"`
Medium *Medium `json:"medium"`
}
// OrganisationPermission model
type OrganisationPermission struct {
config.Base
OrganisationID uint `gorm:"column:organisation_id" json:"organisation_id"`
Spaces int64 `gorm:"column:spaces" json:"spaces"`
}
var organisationPermissionUser config.ContextKey = "org_perm_user"
// BeforeCreate hook
func (op *OrganisationPermission) BeforeCreate(tx *gorm.DB) error {
ctx := tx.Statement.Context
userID := ctx.Value(organisationPermissionUser)
if userID == nil {
return nil
}
uID := userID.(int)
op.CreatedByID = uint(uID)
op.UpdatedByID = uint(uID)
return nil
}
<file_sep>/service/podcast/action/episode/delete.go
package episode
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/podcast/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete episode by id
// @Summary Delete a episode
// @Description Delete episode by ID
// @Tags Episode
// @ID delete-episode-by-id
// @Param X-User header string true "User ID"
// @Param episode_id path string true "Episode ID"
// @Param X-Space header string true "Space ID"
// @Success 200
// @Failure 400 {array} string
// @Router /podcast/episodes/{episode_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
episodeID := chi.URLParam(r, "episode_id")
id, err := strconv.Atoi(episodeID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
result := &model.Episode{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Episode{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
tx := config.DB.Begin()
tx.Delete(&result)
tx.Model(&model.EpisodeAuthor{}).Where(&model.EpisodeAuthor{
EpisodeID: uint(id),
}).Delete(&model.EpisodeAuthor{})
err = meilisearchx.DeleteDocument("dega", result.ID, "episode")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("episode.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/fact-check/action/rating/delete.go
package rating
import (
"errors"
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete rating by id
// @Summary Delete a rating
// @Description Delete rating by ID
// @Tags Rating
// @ID delete-rating-by-id
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param rating_id path string true "Rating ID"
// @Success 200
// @Router /fact-check/ratings/{rating_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
ratingID := chi.URLParam(r, "rating_id")
id, err := strconv.Atoi(ratingID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Rating{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Model(&model.Rating{}).Where(&model.Rating{
SpaceID: uint(sID),
}).First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
// check if rating is associated with claims
var totAssociated int64
config.DB.Model(&model.Claim{}).Where(&model.Claim{
RatingID: uint(id),
}).Count(&totAssociated)
if totAssociated != 0 {
loggerx.Error(errors.New("rating is associated with claim"))
errorx.Render(w, errorx.Parser(errorx.CannotDelete("rating", "claim")))
return
}
tx := config.DB.Begin()
tx.Model(&model.Rating{}).Delete(&result)
err = meilisearchx.DeleteDocument("dega", result.ID, "rating")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("rating.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/action/page/delete.go
package page
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete page by id
// @Summary Delete a page
// @Description Delete page by ID
// @Tags Page
// @ID delete-page-by-id
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Param page_id path string true "Page ID"
// @Success 200
// @Router /core/pages/{page_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
postID := chi.URLParam(r, "page_id")
id, err := strconv.Atoi(postID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Post{}
result.ID = uint(id)
// check record exists or not
err = config.DB.Where(&model.Post{
SpaceID: uint(sID),
IsPage: true,
}).Preload("Tags").Preload("Categories").First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
tx := config.DB.Begin()
// delete all associations
if len(result.Tags) > 0 {
_ = tx.Model(&result).Association("Tags").Delete(result.Tags)
}
if len(result.Categories) > 0 {
_ = tx.Model(&result).Association("Categories").Delete(result.Categories)
}
tx.Model(&model.PostAuthor{}).Where(&model.PostAuthor{
PostID: uint(id),
}).Delete(&model.PostAuthor{})
tx.Model(&model.Post{}).Delete(&result)
err = meilisearchx.DeleteDocument("dega", result.ID, "page")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("page.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/action/space/create.go
package space
import (
"context"
"encoding/json"
"errors"
"net/http"
"strconv"
"github.com/spf13/viper"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/slugx"
"github.com/factly/x/validationx"
)
// create - Create space
// @Summary Create space
// @Description Create space
// @Tags Space
// @ID add-space
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param Space body space true "Space Object"
// @Success 201 {object} model.Space
// @Router /core/spaces [post]
func create(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
space := &space{}
err = json.NewDecoder(r.Body).Decode(&space)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(space)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
if space.OrganisationID == 0 {
return
}
err = util.CheckSpaceKetoPermission("create", uint(space.OrganisationID), uint(uID))
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage(err.Error(), http.StatusUnauthorized)))
return
}
var superOrgID int
if viper.GetBool("create_super_organisation") {
superOrgID, err = middlewarex.GetSuperOrganisationID("dega")
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
// Fetch organisation permissions
permission := model.OrganisationPermission{}
err = config.DB.Model(&model.OrganisationPermission{}).Where(&model.OrganisationPermission{
OrganisationID: uint(space.OrganisationID),
}).First(&permission).Error
if err != nil && space.OrganisationID != superOrgID {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot create more spaces", http.StatusUnprocessableEntity)))
return
}
if err == nil {
// Fetch total number of spaces in organisation
var totSpaces int64
config.DB.Model(&model.Space{}).Where(&model.Space{
OrganisationID: space.OrganisationID,
}).Count(&totSpaces)
if totSpaces >= permission.Spaces && permission.Spaces > 0 {
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot create more spaces", http.StatusUnprocessableEntity)))
return
}
}
}
var spaceSlug string
if space.Slug != "" && slugx.Check(space.Slug) {
spaceSlug = space.Slug
} else {
spaceSlug = slugx.Make(space.Name)
}
result := model.Space{
Name: space.Name,
SiteTitle: space.SiteTitle,
Slug: approveSpaceSlug(spaceSlug),
Description: space.Description,
TagLine: space.TagLine,
SiteAddress: space.SiteAddress,
Analytics: space.Analytics,
VerificationCodes: space.VerificationCodes,
SocialMediaURLs: space.SocialMediaURLs,
OrganisationID: space.OrganisationID,
ContactInfo: space.ContactInfo,
HeaderCode: space.HeaderCode,
FooterCode: space.FooterCode,
MetaFields: space.MetaFields,
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
err = tx.Create(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
if viper.GetBool("create_super_organisation") {
// Create SpacePermission for super organisation
var spacePermission model.SpacePermission
if superOrgID == space.OrganisationID {
spacePermission = model.SpacePermission{
SpaceID: result.ID,
Media: -1,
Posts: -1,
Podcast: true,
Episodes: -1,
FactCheck: true,
}
} else {
spacePermission = model.SpacePermission{
SpaceID: result.ID,
Media: viper.GetInt64("default_number_of_media"),
Posts: viper.GetInt64("default_number_of_posts"),
Episodes: viper.GetInt64("default_number_of_episodes"),
Podcast: false,
FactCheck: false,
}
}
var spacePermContext config.ContextKey = "space_perm_user"
if err = tx.WithContext(context.WithValue(r.Context(), spacePermContext, uID)).Create(&spacePermission).Error; err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
}
// Insert into meili index
meiliObj := map[string]interface{}{
"id": result.ID,
"kind": "space",
"name": result.Name,
"slug": result.Slug,
"description": result.Description,
"site_title": result.SiteTitle,
"site_address": result.SiteAddress,
"tag_line": result.TagLine,
"organisation_id": result.OrganisationID,
"analytics": result.Analytics,
}
err = meilisearchx.AddDocument("dega", meiliObj)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("space.created", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusCreated, result)
}
func approveSpaceSlug(slug string) string {
spaceList := make([]model.Space, 0)
config.DB.Model(&model.Space{}).Where("slug LIKE ? AND deleted_at IS NULL", slug+"%").Find(&spaceList)
count := 0
for {
flag := true
for _, each := range spaceList {
temp := slug
if count != 0 {
temp = temp + "-" + strconv.Itoa(count)
}
if each.Slug == temp {
flag = false
break
}
}
if flag {
break
}
count++
}
temp := slug
if count != 0 {
temp = temp + "-" + strconv.Itoa(count)
}
return temp
}
<file_sep>/service/core/action/policy/default.go
package policy
import (
"encoding/json"
"io/ioutil"
"net/http"
"os"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
// DataFile default json data file
var DataFile = "./data/policies.json"
// createDefaults - Create Default Policies
// @Summary Create Default Policies
// @Description Create Default Policies
// @Tags Policy
// @ID add-default-policies
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 201 {object} paging
// @Failure 400 {array} string
// @Router /core/policies/default [post]
func createDefaults(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
jsonFile, err := os.Open(DataFile)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
defer jsonFile.Close()
policies := make([]policyReq, 0)
byteValue, _ := ioutil.ReadAll(jsonFile)
err = json.Unmarshal(byteValue, &policies)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
authors := author.Mapper(oID, uID)
result := paging{}
result.Nodes = make([]model.Policy, 0)
for _, policy := range policies {
res := Mapper(Composer(oID, sID, policy), authors)
result.Nodes = append(result.Nodes, res)
if err = insertIntoMeili(res); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
result.Total = len(result.Nodes)
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/service/fact-check/action/rating/default.go
package rating
import (
"context"
"encoding/json"
"io/ioutil"
"net/http"
"os"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/fact-check/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
)
// DataFile default json data file
var DataFile = "./data/ratings.json"
// createDefaults - Create Default Ratings
// @Summary Create Default Ratings
// @Description Create Default Ratings
// @Tags Rating
// @ID add-default-ratings
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 201 {object} paging
// @Failure 400 {array} string
// @Router /fact-check/ratings/default [post]
func createDefaults(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
jsonFile, err := os.Open(DataFile)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
defer jsonFile.Close()
ratings := make([]model.Rating, 0)
byteValue, _ := ioutil.ReadAll(jsonFile)
err = json.Unmarshal(byteValue, &ratings)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx := config.DB.WithContext(context.WithValue(r.Context(), userContext, uID)).Begin()
for i := range ratings {
ratings[i].SpaceID = uint(sID)
ratings[i].HTMLDescription, err = util.HTMLDescription(ratings[i].Description)
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage("cannot parse rating description", http.StatusUnprocessableEntity)))
return
}
tx.Model(&model.Rating{}).FirstOrCreate(&ratings[i], &ratings[i])
err = insertIntoMeili(ratings[i])
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
result := paging{}
result.Nodes = ratings
result.Total = int64(len(ratings))
tx.Commit()
renderx.JSON(w, http.StatusCreated, result)
}
<file_sep>/test/service/fact-check/claimant/update_test.go
package claimant
import (
"errors"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/permissions/space"
"github.com/gavv/httpexpect/v2"
"github.com/jinzhu/gorm/dialects/postgres"
"gopkg.in/h2non/gock.v1"
)
var updatedClaimant = map[string]interface{}{
"name": "TOI",
"description": postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
},
"html_description": "<p>Test Description</p>",
"tag_line": "sample tag line",
"medium_id": uint(1),
}
func TestClaimantUpdate(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
defer gock.DisableNetworking()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("invalid claimant id", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("claimant_id", "invalid_id").
WithHeaders(headers).
Expect().
Status(http.StatusBadRequest)
})
t.Run("claimant record not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
recordNotFoundMock(mock)
e.PUT(path).
WithPath("claimant_id", "100").
WithHeaders(headers).
WithJSON(updatedClaimant).
Expect().
Status(http.StatusNotFound)
})
t.Run("Unable to decode claimant data", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("Unprocessable claimant", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(invalidData).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
})
t.Run("update claimant", func(t *testing.T) {
updatedClaimant["slug"] = "toi"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
claimantUpdateMock(mock, updatedClaimant, nil)
mock.ExpectCommit()
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(updatedClaimant).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("claimant with same name exist", func(t *testing.T) {
updatedClaimant["name"] = "<NAME>"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
claimantCountQuery(mock, 1)
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(updatedClaimant).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
updatedClaimant["name"] = "TOI"
})
t.Run("cannot parse claimant description", func(t *testing.T) {
updatedClaimant["slug"] = "toi"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
updatedClaimant["description"] = postgres.Jsonb{
RawMessage: []byte(`{"block": "new"}`),
}
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(updatedClaimant).
Expect().
Status(http.StatusUnprocessableEntity)
test.ExpectationsMet(t, mock)
updatedClaimant["description"] = postgres.Jsonb{
RawMessage: []byte(`{"time":1617039625490,"blocks":[{"type":"paragraph","data":{"text":"Test Description"}}],"version":"2.19.0"}`),
}
})
t.Run("update claimant by id with empty slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedClaimant["slug"] = "toi"
SelectWithSpace(mock)
slugCheckMock(mock, Data)
claimantUpdateMock(mock, updatedClaimant, nil)
mock.ExpectCommit()
Data["slug"] = ""
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(Data).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
Data["slug"] = "toi"
test.ExpectationsMet(t, mock)
})
t.Run("update claimant with different slug", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedClaimant["slug"] = "toi-test"
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "claimants"`).
WithArgs(fmt.Sprint(updatedClaimant["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
claimantUpdateMock(mock, updatedClaimant, nil)
mock.ExpectCommit()
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(updatedClaimant).
Expect().
Status(http.StatusOK).JSON().Object().ContainsMap(resData)
test.ExpectationsMet(t, mock)
})
t.Run("medium not found", func(t *testing.T) {
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
updatedClaimant["slug"] = "toi-test"
SelectWithSpace(mock)
mock.ExpectQuery(`SELECT slug, space_id FROM "claimants"`).
WithArgs(fmt.Sprint(updatedClaimant["slug"], "%"), 1).
WillReturnRows(sqlmock.NewRows([]string{"slug", "space_id"}))
claimantUpdateMock(mock, updatedClaimant, errors.New("record not found"))
mock.ExpectRollback()
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(updatedClaimant).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
t.Run("update claimant when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
updatedClaimant["slug"] = "toi"
test.CheckSpaceMock(mock)
space.SelectQuery(mock, 1)
SelectWithSpace(mock)
claimantUpdateMock(mock, updatedClaimant, nil)
mock.ExpectRollback()
e.PUT(path).
WithPath("claimant_id", 1).
WithHeaders(headers).
WithJSON(updatedClaimant).
Expect().
Status(http.StatusInternalServerError)
test.ExpectationsMet(t, mock)
})
}
<file_sep>/service/core/action/author/mapper.go
package author
import (
"encoding/json"
"fmt"
"net/http"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/loggerx"
"github.com/spf13/viper"
)
// Mapper map user with id
// if any error occurs then Mapper just returns empty list
func Mapper(oID int, uID int) map[string]model.Author {
userMap := make(map[string]model.Author)
url := fmt.Sprint(viper.GetString("kavach_url"), "/organisations/", oID, "/users")
req, err := http.NewRequest("GET", url, nil)
if err != nil {
loggerx.Error(err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-User", fmt.Sprint(uID))
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return userMap
}
defer resp.Body.Close()
users := []model.Author{}
err = json.NewDecoder(resp.Body).Decode(&users)
if err != nil {
loggerx.Error(err)
}
for _, u := range users {
userMap[fmt.Sprint(u.ID)] = u
}
return userMap
}
<file_sep>/service/core/action/user/list.go
package user
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"strings"
"github.com/factly/dega-server/service/core/action/author"
"github.com/factly/dega-server/service/core/action/policy"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/middlewarex"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/renderx"
)
// list response
type paging struct {
Total int `json:"total"`
Nodes []userPolicy `json:"nodes"`
}
type userPolicy struct {
model.Author
Policies []policyRes `json:"policies"`
}
type policyRes struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
}
// list - Get users with space access
// @Summary Get users with space access
// @Description Get users with space access
// @Tags Users
// @ID get-space-users
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param X-Space header string true "Space ID"
// @Success 200 {object} paging
// @Router /core/users [get]
func list(w http.ResponseWriter, r *http.Request) {
sID, err := middlewarex.GetSpace(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
oID, err := util.GetOrganisation(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
userIDsMap := make(map[uint][]policyRes)
// get all the admins of the organisation
adminRoleID := fmt.Sprint("roles:org:", oID, ":admin")
resp, err := util.KetoGetRequest("/engines/acp/ory/regex/roles/" + adminRoleID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.NetworkError()))
return
}
defer resp.Body.Close()
adminRole := model.KetoRole{}
err = json.NewDecoder(resp.Body).Decode(&adminRole)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
for _, member := range adminRole.Members {
memid, _ := strconv.Atoi(member)
userIDsMap[uint(memid)] = []policyRes{
policyRes{
ID: "admin",
Name: "admin",
},
}
}
// Get all policies
policyList, err := policy.GetAllPolicies()
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
prefixName := fmt.Sprint("id:org:", oID, ":app:dega:space:", sID, ":")
// append subjects whose id has prefix for our space
for _, policy := range policyList {
if strings.HasPrefix(policy.ID, prefixName) {
policyNameTokens := strings.Split(policy.ID, ":")
policyID := policyNameTokens[len(policyNameTokens)-1]
for _, subject := range policy.Subjects {
subid, _ := strconv.Atoi(subject)
if _, found := userIDsMap[uint(subid)]; !found {
userIDsMap[uint(subid)] = make([]policyRes, 0)
}
polRes := policyRes{
ID: policyID,
Name: policyID,
Description: policy.Description,
}
userIDsMap[uint(subid)] = append(userIDsMap[uint(subid)], polRes)
}
}
}
// Fetch all the users
userMap, err := author.All(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
result := paging{}
userlist := make([]userPolicy, 0)
for usrID, pol := range userIDsMap {
userIDStr := fmt.Sprint(usrID)
if user, found := userMap[userIDStr]; found {
usrpol := userPolicy{
Policies: pol,
Author: user,
}
userlist = append(userlist, usrpol)
}
}
result.Nodes = userlist
result.Total = len(userlist)
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/service/core/action/space/delete.go
package space
import (
"net/http"
"strconv"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/dega-server/util"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/meilisearchx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/go-chi/chi"
)
// delete - Delete space
// @Summary Delete space
// @Description Delete space
// @Tags Space
// @ID delete-space
// @Consume json
// @Produce json
// @Param X-User header string true "User ID"
// @Param space_id path string true "Space ID"
// @Success 200
// @Router /core/spaces/{space_id} [delete]
func delete(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
spaceID := chi.URLParam(r, "space_id")
sID, err := strconv.Atoi(spaceID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
result := &model.Space{}
result.ID = uint(sID)
// check record exists or not
err = config.DB.First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
if result.OrganisationID == 0 {
return
}
err = util.CheckSpaceKetoPermission("delete", uint(result.OrganisationID), uint(uID))
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.GetMessage(err.Error(), http.StatusUnauthorized)))
return
}
tx := config.DB.Begin()
tx.Model(&model.Space{}).Delete(&result)
err = meilisearchx.DeleteDocument("dega", result.ID, "space")
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
tx.Commit()
if util.CheckNats() {
if err = util.NC.Publish("space.deleted", result); err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InternalServerError()))
return
}
}
renderx.JSON(w, http.StatusOK, nil)
}
<file_sep>/service/core/action/permissions/space/update.go
package space
import (
"encoding/json"
"errors"
"net/http"
"strconv"
"github.com/spf13/viper"
"github.com/factly/dega-server/config"
"github.com/factly/dega-server/service/core/model"
"github.com/factly/x/errorx"
"github.com/factly/x/loggerx"
"github.com/factly/x/middlewarex"
"github.com/factly/x/renderx"
"github.com/factly/x/validationx"
"github.com/go-chi/chi"
)
// update - Update Space permission by id
// @Summary Update a Space permission by id
// @Description Update Space permission by ID
// @Tags Space_Permissions
// @ID update-space-permission-by-id
// @Produce json
// @Consume json
// @Param X-User header string true "User ID"
// @Param permission_id path string true "Permission ID"
// @Param X-Space header string true "Space ID"
// @Param Permission body spacePermission false "Permission Body"
// @Success 200 {object} model.SpacePermission
// @Router /core/permissions/spaces/{permission_id} [put]
func update(w http.ResponseWriter, r *http.Request) {
uID, err := middlewarex.GetUser(r.Context())
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.Unauthorized()))
return
}
permissionID := chi.URLParam(r, "permission_id")
id, err := strconv.Atoi(permissionID)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.InvalidID()))
return
}
permission := spacePermission{}
err = json.NewDecoder(r.Body).Decode(&permission)
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DecodeError()))
return
}
validationError := validationx.Check(permission)
if validationError != nil {
loggerx.Error(errors.New("validation error"))
errorx.Render(w, validationError)
return
}
result := model.SpacePermission{}
result.ID = uint(id)
// check record exists or not
err = config.DB.First(&result).Error
if err != nil {
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.RecordNotFound()))
return
}
if permission.Media == 0 {
permission.Media = viper.GetInt64("default_number_of_media")
}
if permission.Posts == 0 {
permission.Posts = viper.GetInt64("default_number_of_posts")
}
tx := config.DB.Begin()
tx.Model(&result).Select("FactCheck", "Podcast").Updates(model.SpacePermission{
FactCheck: permission.FactCheck,
Podcast: permission.Podcast,
})
err = tx.Model(&result).Updates(&model.SpacePermission{
Base: config.Base{UpdatedByID: uint(uID)},
Posts: permission.Posts,
Media: permission.Media,
Episodes: permission.Episodes,
}).First(&result).Error
if err != nil {
tx.Rollback()
loggerx.Error(err)
errorx.Render(w, errorx.Parser(errorx.DBError()))
return
}
tx.Commit()
renderx.JSON(w, http.StatusOK, result)
}
<file_sep>/test/service/core/category/list_test.go
package category
import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/factly/dega-server/service"
"github.com/factly/dega-server/test"
"github.com/factly/dega-server/test/service/core/medium"
"github.com/gavv/httpexpect"
"github.com/spf13/viper"
"gopkg.in/h2non/gock.v1"
)
func TestCategoryList(t *testing.T) {
mock := test.SetupMockDB()
test.MockServer()
testServer := httptest.NewServer(service.RegisterRoutes())
gock.New(testServer.URL).EnableNetworking().Persist()
defer gock.DisableNetworking()
defer testServer.Close()
// create httpexpect instance
e := httpexpect.New(t, testServer.URL)
t.Run("get empty list of categories", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(0))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns))
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": 0})
test.ExpectationsMet(t, mock)
})
t.Run("get list of categories", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(len(categorylist)))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, categorylist[0]["name"], categorylist[0]["slug"], categorylist[0]["description"], categorylist[0]["html_description"], categorylist[0]["parent_id"], categorylist[0]["meta_fields"], categorylist[0]["medium_id"], categorylist[0]["is_featured"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, categorylist[1]["name"], categorylist[1]["slug"], categorylist[1]["description"], categorylist[1]["html_description"], categorylist[1]["parent_id"], categorylist[1]["meta_fields"], categorylist[1]["medium_id"], categorylist[1]["is_featured"], 1))
medium.SelectWithOutSpace(mock)
delete(categorylist[0], "parent_id")
delete(categorylist[0], "medium_id")
e.GET(basePath).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(categorylist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(categorylist[0])
test.ExpectationsMet(t, mock)
})
t.Run("get list of categories with paiganation", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(len(categorylist)))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, categorylist[1]["name"], categorylist[1]["slug"], categorylist[1]["description"], categorylist[1]["html_description"], categorylist[1]["parent_id"], categorylist[1]["meta_fields"], categorylist[1]["medium_id"], categorylist[1]["is_featured"], 1))
medium.SelectWithOutSpace(mock)
delete(categorylist[1], "parent_id")
delete(categorylist[1], "medium_id")
e.GET(basePath).
WithQueryObject(map[string]interface{}{
"limit": "1",
"page": "2",
}).
WithHeaders(headers).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(categorylist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(categorylist[1])
test.ExpectationsMet(t, mock)
})
t.Run("get list of categories based on search query q", func(t *testing.T) {
test.CheckSpaceMock(mock)
mock.ExpectQuery(countQuery).
WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(len(categorylist)))
mock.ExpectQuery(selectQuery).
WillReturnRows(sqlmock.NewRows(Columns).
AddRow(1, time.Now(), time.Now(), nil, 1, 1, categorylist[0]["name"], categorylist[0]["slug"], categorylist[0]["description"], categorylist[0]["html_description"], 0, categorylist[0]["meta_fields"], 1, categorylist[0]["is_featured"], 1).
AddRow(2, time.Now(), time.Now(), nil, 1, 1, categorylist[1]["name"], categorylist[1]["slug"], categorylist[1]["description"], categorylist[1]["html_description"], 0, categorylist[1]["meta_fields"], 1, categorylist[1]["is_featured"], 1))
medium.SelectWithOutSpace(mock)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"q": "test",
"sort": "asc",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
ContainsMap(map[string]interface{}{"total": len(categorylist)}).
Value("nodes").
Array().
Element(0).
Object().
ContainsMap(categorylist[0])
test.ExpectationsMet(t, mock)
})
t.Run("when query does not match any category", func(t *testing.T) {
test.CheckSpaceMock(mock)
test.DisableMeiliGock(testServer.URL)
gock.New(viper.GetString("meili_url") + "/indexes/dega/search").
HeaderPresent("X-Meili-API-Key").
Persist().
Reply(http.StatusOK).
JSON(test.EmptyMeili)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"q": "test",
"sort": "asc",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
t.Run("search with query q when meili is down", func(t *testing.T) {
test.DisableMeiliGock(testServer.URL)
test.CheckSpaceMock(mock)
e.GET(basePath).
WithHeaders(headers).
WithQueryObject(map[string]interface{}{
"q": "test",
"sort": "asc",
}).
Expect().
Status(http.StatusOK).
JSON().
Object().
Value("total").
Equal(0)
test.ExpectationsMet(t, mock)
})
}
| d8c30d5bbda5c2a7fd151f6f91631803b8295661 | [
"Markdown",
"Go Module",
"Go",
"INI"
] | 234 | Go | factly/dega-server | be294a22e2abaf3059bfc5318835e3bcac4f91e9 | ca99d54cd4974abf0586768cf35400dc4968e580 |
refs/heads/master | <repo_name>aadarshjain/PYTHON-PROJECT<file_sep>/sql tria.py
import sqlite3
#########################################{LIBRARIES}##########################################################
class product_info:
mrp=0
pur_rate=0
sale_rate=0
exp_date=" "
discount=0.0
final_price=0.0
#########################################{class product_info}#################################################
class purchase(product_info):
def _init_(self):
print("Inside Purchse")
com_name=" "
product_name=" "
def input_company(self):
print('''+++MENU+++
MENU:-PLEASE SELECT THE COMPANY
1)AGAPPE
2)BIOMEURIEX
3)ARKRAY
4)NIHON KOHDEN
5)BIOLAB''')
self.com_name=input()
def input_product(self):
if(self.com_name=="AGAPPE"):
print('''PRODUCTS:-
1)BIO-CHEMISTRY
2)CELL-COUNTER
3)MISPA NANO
4)MISPA NEO
''')
elif(self.com_name=="BIOMEURIX"):
print('''PRODUCTS:-
1)MINI VIDAS
2)VIDAS
''')
elif(self.com_name=="ARKRAY"):
print('''PRODUCTS:-
1)PPD 10TU
2)PPD 2TU
3)PPD 5TU
''')
elif(self.com_name=="NIHON KOHDEN"):
print('''PRODUCTS:-
1)CELL COUNTER 3
2)CELL COUNTER 5
3)REAGENTS
''')
elif(self.com_name=="BIOLAB"):
print('''PRODUCTS:-
1)GIMSA STAIN
2)RAPID PAP
3)LIQUID SOLUTION
''')
self.product_name=input()
def products(self):
print("PLEASE ENTER THE PRODUCT INFORMATION")
print("MRP")
self.mrp=int(input())
print("SALE_RATE")
self.sale_rate=int(input())
print("PURCHASE_RATE")
self.pur_rate=int(input())
print("EXPIRY DATE")
self.exp_date=input()
print("DISCOUNT")
self.discount=float(input())
print("FINAL PRICE")
self.final_price=float(input())
def update_database(self):
pass
#########################################{class purchase}##################################################
class sales(product_info):
def _init_(self):
print("Inside sales")
self.com_name=" "
self.product_name=" "
self.customer_name=" "
def input_company(self):
print('''+++MENU+++
MENU:-PLEASE SELECT THE COMPANY
1)AGAPPE
2)BIOMEURIEX
3)ARKRAY
4)NIHON KOHDEN
5)BIOLAB''')
self.com_name=input()
#print(self.com_name)
def input_product(self):
if(self.com_name=="AGAPPE"):
print('''PRODUCTS:-
1)BIO-CHEMISTRY
2)CELL-COUNTER
3)MISPA NANO
4)MISPA NEO
''')
elif(self.com_name=="BIOMEURIEX"):
print('''PRODUCTS:-
1)MINI VIDAS
2)VIDAS
''')
elif(self.com_name=="ARKRAY"):
print('''PRODUCTS:-
1)PPD 10TU
2)PPD 2TU
3)PPD 5TU
''')
elif(self.com_name=="NIHON KOHDEN"):
print('''PRODUCTS:-
1)CELL COUNTER 3
2)CELL COUNTER 5
3)REAGENTS
''')
elif(self.com_name=="BIOLAB"):
print('''PRODUCTS:-
1)GIMSA STAIN
2)RAPID PAP
3)LIQUID SOLUTION
''')
self.product_name=input()
def input_customer(self):
print("PLEASE ENTER THE CUSTOMER NAME")
self.customer_name=input()
def products(self):
print("PLEASE ENTER THE PRODUCT INFORMATION")
print("MRP")
self.mrp=int(input())
print("SALE_RATE")
self.sale_rate=int(input())
print("PURCHASE_RATE")
self.pur_rate=int(input())
print("EXPIRY DATE")
self.exp_date=input()
print("DISCOUNT")
self.discount=float(input())
print("FINAL PRICE")
self.final_price=float(input())
#############################################{class sales}####################################################
#if "__name__"=="main":
#print('''PLEASE SELECT FROM THE GIVEN OPTIONS
#1)BUY
#2)SALE''')
#choice=int(input())
#buy = purchase()
#if choice==1:
# buy.input_company()
# buy.input_product()
#elif choice==2:
sell = sales()
sell.input_customer()
sell.input_company()
sell.input_product()
sell.products()
##########################################{sqllite trial area}############################################
connection=sqlite3.connect("SALE_TABLE.db")
cur=connection.cursor()
sql_command = """CREATE TABLE sale (
productname VARCHAR(30),
companyname VARCHAR(30),
cutomername VARCHAR(30),
mrp integer,
purchaserate integer,
salerate float,
expdate text NOT NULL,
discount float,
finalrate float,
);"""
| fcf981caa4f2aa6540cb72a4ba14a816a410f291 | [
"Python"
] | 1 | Python | aadarshjain/PYTHON-PROJECT | d1218999402d6eaa279f88bc93913b86f928c27e | 9dffac738b6daad1d26740a27e7d977628de706d |
refs/heads/master | <repo_name>gapry/lamport<file_sep>/lamport.C
#include<sys/types.h>
#include <algorithm>
#include <stdlib.h>
#include<sys/stat.h>
#include<fcntl.h>
#include<cstdio>
#include<unistd.h>
#include <dirent.h>
#include<cstring>
#define MAXLEN 1000
int filterByChoosing(const struct dirent* entry);
int filterByNumber(const struct dirent* entry);
class ProcessMutex{
private:
char mChoosingFileName[MAXLEN]; //the path of the choosing file for this process
char mNumberFileName[MAXLEN]; //the path to the number file of this process
char fileDirPath[MAXLEN]; //the directory path where the lock files are placed.
int number ; //the last number that this process wrote into its file.
int pid ; //the process id of this process.
bool lockAcquired ;
public:
ProcessMutex(const char* mFileDirPath, const char * hostname, int pid);
~ProcessMutex();
void lock();
void release();
private:
int getNumberFromFile(const char* filepath);
int getPidFromFileName(const char* filepath);
};
ProcessMutex::ProcessMutex(const char *dir_path, const char* hostname, int process_id){
this->number = 0;
this->pid = process_id;
//build the file name for this process
strcpy(fileDirPath,dir_path);
strcpy(mNumberFileName,fileDirPath);
strcat(mNumberFileName,"/");
char pidstring[MAXLEN];
sprintf(pidstring,"%d:",pid);
strcat(mNumberFileName,pidstring);
strcat(mNumberFileName,hostname);
strcpy(mChoosingFileName,mNumberFileName);
strcat(mChoosingFileName,".choosing");
strcat(mNumberFileName,".number");
lockAcquired = false;
printf("\nFile directory : %s",fileDirPath);
printf("\nChoosing path : %s",mChoosingFileName);
printf("\nNumber path : %s",mNumberFileName);
}
ProcessMutex::~ProcessMutex(){
remove(mChoosingFileName);
remove(mNumberFileName);
}
void ProcessMutex::lock(){
char filePath[2*MAXLEN];
//first signal your intention to acquire a lock by creating a .choosing file
int chooseFd = open(mChoosingFileName,O_CREAT,S_IRWXU|S_IROTH);
close(chooseFd);
//loop through all the number files sorted in pid order, get the max number and write your own number file.
struct dirent** numberList;
int nfiles = scandir(fileDirPath,&numberList,filterByNumber,alphasort);
for(int i=0; i< nfiles;i++){
strcpy(filePath,fileDirPath);
strcat(filePath,"/");
strcat(filePath,numberList[i]->d_name);
this->number = std::max(this->number,getNumberFromFile(filePath));
free(numberList[i]);
}
(this->number)++;
int numFd = open(mNumberFileName,O_CREAT|O_WRONLY,S_IRWXU|S_IROTH);
int wc=write(numFd,&(this->number),sizeof(int));
//printf("\nThe write code is %d",wc);
if(wc < 0 )
perror(NULL);
close(numFd);
//remove your .choosing file
remove(mChoosingFileName);
printf("\nChose the number %d",this->number);
//now loop through all the .choosing files, sorted in pid order and check if they exist
struct dirent** choosingList;
int cfiles = scandir(fileDirPath,&choosingList,filterByChoosing,alphasort);
for(int i =0 ; i < cfiles; ++i){
strcpy(filePath,fileDirPath);
strcat(filePath,"/");
strcat(filePath,choosingList[i]->d_name);
free(choosingList[i]);
while(access(filePath,F_OK)!=-1){;
/* wait as long as the choosing file for this process exists */
printf("\nWaiting for prcess :%s to chose",filePath);
}
/* wait as long as processes with number smaller than yours are in their critical sections */
int fileNumber = 0;
int filePid = 0;
do{
fileNumber = getNumberFromFile(filePath);
filePid = getPidFromFileName(filePath);
printf("\nGot file number %d and process number %d ",fileNumber,filePid);
}while(fileNumber!=0 && ((fileNumber<this->number)||((fileNumber==this->number)&&(filePid<this->pid))));
}
//if you reach here you have got the lock !
lockAcquired = true;
return ;
}
void ProcessMutex::release(){
//set the number file to zero.
this->number = 0;
remove(mNumberFileName);
lockAcquired = false;
}
inline int ProcessMutex::getPidFromFileName(const char* filePath){
char pidstring[MAXLEN];
int len = strrchr(filePath,':') -strrchr(filePath,'/')-1;
strncpy(pidstring,strrchr(filePath,'/')+1,len);
pidstring[len] = '\0';
//printf("\nThe pid string from filename is %s of length %d",pidstring,len);
return atoi(pidstring);
}
inline int ProcessMutex::getNumberFromFile(const char * chfilePath){
char filePath[MAXLEN];
if(strcmp(strrchr(chfilePath,'.'),".choosing")==0){
int len = strrchr(chfilePath,'.')-chfilePath +1;
strncpy(filePath,chfilePath,len);
strcpy(filePath+len,"number");
}
else
strcpy(filePath,chfilePath);
//printf("\nThe spliced number file path is %s ",filePath);
int fd = open(filePath,O_RDONLY);
if(fd==-1)
return 0;
int readNumber=-1;
int rc = read(fd,(void*)&readNumber,sizeof(int));
//printf("\nThe read code is %d",rc);
if(rc < 0 )
perror(NULL);
if(rc==0)
readNumber =0;
close(fd);
return readNumber;
}
int filterByChoosing(const struct dirent* entry){
if(strcmp(strrchr(entry->d_name,'.'),".choosing")==0)
return 1;
else return 0;
}
int filterByNumber(const struct dirent* entry){
if(strcmp(strrchr(entry->d_name,'.'),".number")==0)
return 1;
else return 0;
}
int main(int argc , char * argv[]){
setbuf(stdout,NULL);
setbuf(stderr,NULL);
char hostnamestr[MAXLEN];
int sleepsecs = atoi(argv[1]);
gethostname(hostnamestr,MAXLEN);
ProcessMutex mutex("/home/adityar/sbox/testMutex/",hostnamestr,getpid());
int i = 1000;
while(i--){
mutex.lock();
printf("\n---------------------Entered critical section---------------------");
//to test for mutual exclusion , try and create and then remove the same file inside each process's critical section. only the process has the permission to do this so that
//any other process will fail. if any of the parallel processes fails, then our critical section is not safe and the algorithm does not work.
int testfd = open("/home/adityar/sbox/lamport/resource.file",O_CREAT|O_EXCL,S_IRWXU);
if(testfd < 0)
printf("\nRace condition detected");
close(testfd);
//sleep for a certain number of seconds to see if other processes try and enter your critical section
//sleep(sleepsecs);
remove("/home/adityar/sbox/lamport/resource.file");
//FILE * resource = fopen("/home/adityar/sbox/lamport/resource.file","a");
//printf("\n\t\t\t sleeping for %d seconds.",sleepsecs);
//fprintf(resource,"\nProcess no: %d wrote 1",getpid());
//fprintf(resource,"\nProcess no: %d wrote 2",getpid());
//fclose(resource);
// sleep(sleepsecs);
mutex.release();
printf("\n---------------------Left critical section-------------------------------");
}
}
<file_sep>/README.md
lamport
=======
An implementation of Lamport's bakery algorithm (http://en.wikipedia.org/wiki/Lamport's_bakery_algorithm) on NFS file systems for synchronization. Contains the original paper with comments on how to adapt it to use for file locking
| a12872347c6c18a4ff096ce34e06e3ac2e63fadf | [
"Markdown",
"C"
] | 2 | C | gapry/lamport | 060914fcc76e362cd4b1982c53190aafb17f5f37 | 508b3c14cb8dcc684814fde29b169dced22b71cf |
refs/heads/master | <file_sep>boto==2.3.0
gevent==0.13.7
ipdb==0.6.1
ipython==0.12.1
<file_sep>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "s3sync",
version = "0.1",
packages = find_packages(),
include_package_data = True,
author = "<NAME>",
author_email = "<EMAIL>",
description = "Stupid script to upload/download files from S3 bucket",
long_description = \
"""
Stupid script to upload/download files from S3 bucket.
You can find something better, google is your friend.
""",
license = "MIT License",
keywords = "aws s3 boto gevent",
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
platforms = ['any'],
scripts = ['s3sync.py',],
url = "https://github.com/aljosa/s3sync",
)
<file_sep>#!/usr/bin/env python
import os, sys, argparse, boto, gevent
from gevent.pool import Pool
from gevent import monkey
monkey.patch_all()
parser = argparse.ArgumentParser(description='s3sync - upload/download files to/from amazon s3')
parser.add_argument('-b', '--bucket', dest='bucket', help='target bucket', nargs="?", default=None)
parser.add_argument('-f', '--folder', dest='folder', help='target folder', nargs="?", default=None)
#parser.add_argument('-e', '--exclude', dest='exclude', help='exclude files in folder', nargs="?", default=None)
parser.add_argument('-v', '--verbose', dest='verbose', help='show extra information', nargs="?", default=None)
parser.add_argument('action', choices=["upload", "download"], help='upload or download files', nargs="?", default=None)
def local_files(root):
for path, folders, filenames in os.walk(root):
for f in filenames:
filename = os.path.join(path, f)
yield filename
def download_key(folder, key, bucket):
path = os.path.join(folder, key.name)
key_folder = os.path.dirname(path)
if not os.path.isdir(key_folder):
os.makedirs(key_folder)
key.get_contents_to_filename(path)
def download(folder, bucket):
pool = Pool(size=50)
print "Downloading files..."
jobs = [pool.spawn(download_key, folder, key, bucket) for key in bucket.list()]
gevent.joinall(jobs)
def upload_key(folder, filename, bucket):
name = filename.replace(folder, "")
key = bucket.new_key(name)
key.set_contents_from_filename(filename)
def upload(folder, bucket):
pool = Pool(size=50)
print "Uploading files..."
jobs = [pool.spawn(upload_key, folder, filename, bucket) for filename in local_files(folder)]
gevent.joinall(jobs)
if __name__ == "__main__":
print "s3sync"
bucket = folder = None
try:
s3 = boto.connect_s3()
except boto.exception.NoAuthHandlerFound, ex:
print """
Do you have AWS credentials set?
Something like this:
$ export AWS_ACCESS_KEY_ID=<AWS_ACCESS_KEY_ID>
$ export AWS_SECRET_ACCESS_KEY=<AWS_SECRET_ACCESS_KEY>
"""
print "Error: \n\t", ex, "\n"
sys.exit(1)
args = parser.parse_args()
print "args: ", args
if args.folder:
if not os.path.exists(args.folder):
os.mkdir(args.folder)
print "Created folder: %s" % args.folder
folder = os.path.abspath(args.folder)
print "folder: %s" % folder
if args.bucket:
try:
bucket = s3.get_bucket(args.bucket)
except Exception, ex:
print "\nError accessing bucket '%s':\n" % args.bucket
if args.verbose:
print ex, "\n"
sys.exit(1)
print "bucket: ", bucket
if bucket and folder:
if args.action == "upload":
upload(folder, bucket)
elif args.action == "download":
download(folder, bucket)
| 4e02d28bd9c7c26c08e357bbd89043160347aee4 | [
"Python",
"Text"
] | 3 | Text | aljosa/s3stuff | 48ff8570b5de8eb3432a5b57ea8c71987818e931 | 1c7ffd1b6ea501e52fa87597161ac83613bff40c |
refs/heads/master | <repo_name>Ivan95kos/jumper<file_sep>/src/main/resources/db/migration/V3__Encode_passwords.sql
update usr set password = <PASSWORD>' where username = 'admin';<file_sep>/src/main/resources/application.properties
spring.datasource.url=jdbc:mysql://localhost:3306/jumper?autoReconnect=true&useSSL=false&useUnicode=true&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC
spring.datasource.username=root
spring.datasource.password=root
spring.jps.generate-ddl=false
spring.jpa.show-sql=false
spring.jpa.hibernate.ddl-auto=validate
spring.freemarker.expose-request-attributes=true
upload.path=/C:/Users/Ivan/IdeaProjects/jumper/uploads
spring.mail.host=smtp.gmail.com
spring.mail.username=<EMAIL>
spring.mail.password=<PASSWORD>
spring.mail.port=465
spring.mail.protocol=smtps
mail.debug=true
recaptcha.secret=<KEY>
spring.session.jdbc.initialize-schema=always
spring.session.jdbc.table-name=SPRING_SESSION | 55ddfe391af07b790b4864a7c4fc5bb356585cc6 | [
"SQL",
"INI"
] | 2 | SQL | Ivan95kos/jumper | 00cf66e2e8796593186389e620747080221ad96b | 576c8985c0cbf41417551cb918af2dd8bdb0cde3 |
refs/heads/master | <repo_name>jiamao/jm-ssi<file_sep>/index.js
const fs = require('fs');
const path = require('path');
const decode = require('./lib/decode');
const run = require('./lib/run');
// 解析ssi文件,
/**
* // 运行模板,
* file {string} 文件路径
* options {object} 参数
* data 运行时参数
* root ssi执行根路径
*/
async function parse(file, options) {
options = Object.assign({}, options);
return new Promise(async (resolve, reject) => {
if(options.root) file = path.resolve(options.root || '', file);
options.file = file;
fs.readFile(file, 'utf8', async (err, data) => {
if(err) {
reject && reject(err);
}
else {
let result = await run(data, options);
resolve && resolve(result);
}
});
});
}
module.exports = {
decode: decode.decode,
run,
parse
};<file_sep>/readme.md
# jm-ssi
[![NPM version][npm-image]][npm-url]
[![npm download][download-image]][download-url]
`nodejs` 解析`ssi`库。
支持以下语法:
```html
<!--# include file="path" -->
<!--# include virtual="path" -->
<!--# set var="k" value="v" -->
<!--# echo var="name" default="default" -->
<!--# if expr="$name = /text/" -->
<!--# if expr="$name != text" -->
<!--# else -->
<!--# endif -->
```
## Install
```bash
$ npm i jm-ssi --save
```
## Usage
```js
const ssi = require('jm-ssi');
ssi.run(`<!--#if expr="\${HTTP_HOST} = /www.tenganxinxi.com/"-->
<title>腾讯腾安</title>
<!--#elif expr="\${HTTP_HOST} = /www.txfund.com/-->
<title>腾讯腾安2</title>
<!--#else-->
<title>腾讯理财通</title>
<!--#endif-->`, {
// 指定执行变量
data: {
"HTTP_HOST": "www.txfund.com"
}
}).then((result) => {
console.log(result);
});
```
#### 文件模板解析
`index.shtml`
```html
<!--#if expr="${HTTP_HOST} = www.tenganxinxi.com"-->
<title>腾讯腾安</title>
<!--#else-->
<title>腾讯理财通</title>
<!--#endif-->
<div>test $ ````</div>
<!--#echo var="HTTP_USER_AGENT" -->
<!--# include virtual="/if.shtml" stub="one" -->
```
```js
const path = require('path');
const ssi = require('jm-ssi');
const root = path.join(__dirname, 'templates');
ssi.parse('index.shtml', {
// 指定执行变量
data: {
"name": 'fefeding',
"HTTP_HOST": "www.txfund.com",
"HTTP_USER_AGENT": "Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1 wechatdevtools/1.02.1907300 MicroMessenger/6.7.3 Language/zh_CN webview/1568882530032134 webdebugger port/31804",
},
root
}).then(result => {
console.log(result);
});
```
## License
[MIT](LICENSE)
[npm-image]: https://img.shields.io/npm/v/jm-ssi.svg?style=flat-square
[npm-url]: https://npmjs.org/package/jm-ssi
[download-image]: https://img.shields.io/npm/dm/jm-ssi.svg?style=flat-square
[download-url]: https://npmjs.org/package/jm-ssi
<file_sep>/lib/run.js
const vm = require("vm");
const ssidecode = require('./decode');
/**
* // 运行模板,
* tpl {string} 模板
* options {object} 参数
* data 运行时参数
* root ssi执行根路径
*/
module.exports = function(tpl, options) {
options = options || {};
options.data = options.data || {};
options.root = options.root || '';
return new Promise((resolve, reject) => {
ssidecode.decode(tpl, options).then(code => {
const result = runCode(code, options.data);
resolve && resolve(result);
}).catch(err => {
reject && reject(err);
});
});
}
// 执行模板
function runCode(code, vars) {
const params = [];
/*const parvalues = [];
if(vars) {
for(var k in vars) {
if(!vars.hasOwnProperty(k)) continue;
parvalues.push(vars[k]);
params.push(k);
}
}*/
const context = vm.createContext(vars);
const fun = vm.compileFunction(code, params, {
//filename: p
parsingContext: context
});
//return script.runInContext(context);
return fun.call(context);
}<file_sep>/lib/decode.js
const path = require('path');
const fs = require('fs');
const codeArrName = '__p__';
const syntaxReg = /<!--#\s*([^\r\n]+?)\s*-->/mg;
const includeFileReg = /^\s*include\s+(file|virtual)=(['"])([^\r\n]+?)(['"])\s*(.*)/;
const setVarReg = /^\s*set\s+var\s*=\s*['"]?([^'"]+)['"]?\s+value\s*=\s*([\w\W]*)?\s*$/;
const echoReg = /\s*echo\s+var\s*=\s*['"]?([^'"]+)['"]?(\s+default\s*=\s*(['"][\w\W]*['"])\s*)?\s*/;
const ifReg = /^\s*if\s+expr\s*=\s*['"]?([^'"]*)?['"]?\s*$/;
const elifReg = /^\s*elif\s+expr\s*=\s*['"]?([^'"]*)?['"]?\s*$/;
const elseReg = /^\s*else\s*$/;
const endifReg = /^\s*endif\s*$/;
// 分割代码块
// ssi表达式和普通文本分离
function resolveTemplate(tpl) {
if(typeof tpl != 'string') {
return tpl;
}
const tags = [];
let index = 0;
tpl.replace(syntaxReg, (m, r, i, src) => {
if(i > index) {
tags.push({
content: convertContent(src.substring(index, i))
});
}
// 运行表达式
tags.push({
match: m,
expression: r,
index: i
});
index = i + m.length;// 向前移到当前表达式结尾
});
// 如果最后还有未处理的,则也加进到模板里
if(index < tpl.length - 1) {
tags.push({
content: convertContent(tpl.substr(index))
});
}
return tags;
}
// 把模板解析成可执行的js代码
// options {} file 当前文件路径, root 当前根路径
async function decode(tpl, options) {
options = options || {};
return new Promise(async (resolve, reject) => {
try {
let codeBlocks = await convertToCode(tpl, options);
let code = `if(typeof ${codeArrName}=='undefined'){var ${codeArrName}=[];} ${codeBlocks} return ${codeArrName}.join('');`;
//console.log(code);
resolve && resolve(code);
}
catch(e) {
console.error(e);
reject && reject(e);
}
});
}
// 表达式转为代码
async function convertToCode(tpl, options) {
const code = []; // 代码块
const syntaxs = resolveTemplate(tpl);
if(!syntaxs || !syntaxs.length) return "";
for(let l of syntaxs) {
if(!l) continue;
if(l.content) {
code.push(`${codeArrName}.push(\`${l.content}\`);`);
}
if(l.expression) {
let block = await resolveSyntax(l.expression, options);
code.push(block);
}
}
return code.join('\n');
}
// 处理ssi关健表达式
async function resolveSyntax(expression, options) {
return new Promise(async (resolve, reject) => {
// 对表达式转换成js对应的
let m = null;
let result = '';
switch(true) {
// set var表达式
case !!(m = expression.match(setVarReg)): {
result = convertSetVar(m);
break;
}
// echo表达式
case !!(m = expression.match(echoReg)): {
result = convertEcho(m);
break;
}
// if 表达式
case !!(m = expression.match(ifReg)): {
result = convertIF(m);
break;
}
// elif 表达式
case !!(m = expression.match(elifReg)): {
result = convertELIF(m);
break;
}
// else 表达式
case !!(m = expression.match(elseReg)): {
result = convertELSE(m);
break;
}
// endif 表达式
case !!(m = expression.match(endifReg)): {
result = convertENDIF(m);
break;
}
// include 文件模板
case !!(m = expression.match(includeFileReg)): {
result = await resolveInclude(m, options);
break;
}
}
resolve && resolve(result);
});
}
// 处理include
function resolveInclude(m, options) {
return new Promise(async (resolve, reject) => {
let result = '';
if(m && m.length > 3 && m[3]) {
let file = m[3];
let parent = options.root || '';
// 用了相对路径,则相对于父路径
if(file.indexOf('.') === 0) {
if(options.file) parent = path.dirname(options.file);
}
//console.log(parent,file);
file = path.join(parent, file);
//console.log(file);
if(fs.existsSync(file)) {
fs.readFile(file, 'utf8', async (err, data) => {
if(err) {
reject && reject(err);
}
else {
let opt = Object.assign({}, options, {file: file});
let ret = await convertToCode(data, opt);
resolve && resolve(ret);
}
});
return;
}
}
resolve && resolve(result);
});
}
// 声明变量转换
function convertSetVar(m) {
if(m && m.length > 2) {
return `var ${m[1]}=${m[2]};`;
}
return '';
}
// echo转换
function convertEcho(m) {
if(m && m.length > 1) {
let c = `if(typeof ${m[1]} == 'undefined'){console.error('${m[1]} is not defined');`;
// 如果有default值
if(m.length > 3 && m[3]) {
c += `${codeArrName}.push(${m[3]});`;
}
return c + ` } else {${codeArrName}.push(${m[1]});}`;
}
return '';
}
// if 表达式
function convertIF(m) {
if(m.length > 1) {
let exp = convertExpr(m[1]);
return `if(${exp}){`;
}
return 'if(true){';
}
// if 表达式
function convertELIF(m) {
if(m.length > 1) {
let exp = convertExpr(m[1]);
return `} else if(${exp}){`;
}
return '} else if(true){';
}
// if 表达式
function convertELSE(m) {
return '} else {';
}
// if 表达式
function convertENDIF(m) {
return '}';
}
// 转换普通文本
function convertContent(content) {
return content.replace(/\\/g, '\\\\')
.replace(/\$/g, '\\$') // $符号需要处理,跟``取变量冲突
.replace(/`/g, '\\`');
}
// 把expr表达式转为js代码
// a=b ,这里的a,b可以是常量,也可能是取的变量,b可能是一个正则
/*expr - 判定一个表达式,可以是变量:
<!--# if expr="$name" -->
比较字符串:
<!--# if expr="${name} = text" -->
<!--# if expr="$name != text" -->
或者匹配正则:
<!--# if expr="$name = /text/" -->
<!--# if expr="$name != /text/" -->*/
function convertExpr(expr) {
// 没有=号表示为单变量方式,直接返回它即可
if(expr.indexOf('=') === -1) {
return convertVar(expr);
}
// 判断表达式
else if(/([^"']+)\s*([!]?=)\s*([^"']+)/.test(expr)) {
let varname = RegExp.$1;
let expequal = RegExp.$2 == '='? '==' : RegExp.$2; // = or != =转为==
let varvalue = RegExp.$3.trim();// 表达式值, 如果有//前后,则为正则表达式 否则为变量或常量
varname = convertVar(varname);
// 正则
if(/^\/[^'"]+\/$/.test(varvalue)) {
return `${expequal=='!='?'!':''}${varvalue}.test(${varname})`;
}
else {
// 如果是变量,则转为js变量
if(/^\$/.test(varvalue)) {
varvalue = convertVar(varvalue);
}
// 加上引号
else {
varvalue = `"${varvalue}"`;
}
return `${varname}${expequal}${varvalue}`;
}
}
return '';
}
// 变量可以是 $name 或 ${name} ,为了适应js,我们把它转成 name
function convertVar(v) {
v = v.trim();
if(v && /\$([^{}\s]+)/.test(v)) {
v = RegExp.$1;
}
else if(v && /\${([^}]+)}/.test(v)) {
// 转为常量
v = RegExp.$1;
}
return v;
}
module.exports = {
decode
} | 20700fbe1b1ebfff62a0d78927d4ee7362a1504a | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | jiamao/jm-ssi | 6b48d85d0c55f913dd6944c0c57203a3f765e3e0 | 12466176701866001af2843991a216738c4e054d |
refs/heads/main | <file_sep>//
// MovieGridCell.swift
// flixtermovies
//
// Created by <NAME> on 10/1/20.
//
import UIKit
class MovieGridCell: UICollectionViewCell {
@IBOutlet weak var posterView: UIImageView!
}
<file_sep># Flixter-Movies
Flixter Movies is an app that allows users to browse movies from the [The Movie Database API](http://docs.themoviedb.apiary.io/#).
## Flix Part 2
### User Stories
#### REQUIRED (10pts)
- [x] (5pts) User can tap a cell to see more details about a particular movie.
- [x] (5pts) User can tap a tab bar button to view a grid layout of Movie Posters using a CollectionView.
#### BONUS
- [ ] (2pts) User can tap a poster in the collection view to see a detail screen of that movie.
- [x] (2pts) In the detail view, when the user taps the poster, a new screen is presented modally where they can view the trailer.
### App Walkthrough GIF
<img src="http://g.recordit.co/6psOLv9qHH.gif" width=250><br>
### Notes
Most of the challenges in creating additional features to the Flixter Movies app came from adding a bonus element. Adding the Web Kit View to my app, along with the neccessary code to properly load the correct page took a lot of trial and error. Ultimately, I was able to accomplish this task, and I am excited to take on more bonus features in the projects that lie ahead.
---
## Flix Part 1
### User Stories
#### REQUIRED (10pts)
- [x] (2pts) User sees an app icon on the home screen and a styled launch screen.
- [x] (5pts) User can view and scroll through a list of movies now playing in theaters.
- [x] (3pts) User can view the movie poster image for each movie.
#### BONUS
- [x] (2pt) User can view the app on various device sizes and orientations.
- [x] (1pt) Run your app on a real device.
### App Walkthrough GIF
<img src="http://g.recordit.co/9ChNlQLfJv.gif" width=250><br>
### Notes
With the new updates to Xcode, I did have difficulties creating the new App file with the correct properties. I faced a few challenges with my terminal and initializing pod; however, the problems were solved. I really enjoyed making this app, and I am excited to make more in the future building off what I learned in creating this one.
| 156d239dd6be3395974773acf9d9b8e236928932 | [
"Swift",
"Markdown"
] | 2 | Swift | evelynhasama/Flixter-Movies | bc747caa77a688e5202ef8be6316a78a867936b4 | 313a24f228f4c58c82b507987eac124ed6d64e52 |
refs/heads/master | <repo_name>L-u-i-s/blog1<file_sep>/public/index.php
<?php
# Include the vendors autoload
require("../vendor/autoload.php");
# Configures Idiorm ORM to use SQLite
ORM::configure('sqlite:../app.sqlite.db');
ORM::configure('logging', true);
//ORM::configure('return_result_sets', true);
# instantiates a new Slim Application
$app = new \Slim\Slim([
#Adds application settings
'view' => new \Slim\Views\Blade(),
'templates.path' => '../templates',
]);
#setup templates compiled cache
$view = $app->view();
$view->parserOptions = [
'debug' => true,
'cache' => "../html_cache"
];
# defines a route for the GET method
$app->get("/todos", function() use ($app){
$todos = ORM::forTable('todos')
->select(array('todos.id', 'todos.task', 'lookup.value'))
->join('lookup', ['todos.status', '=', 'lookup.code'])
//->where(['lookup.type' => 'todo.status'])
->findMany();
$app->render('todos.index', compact('todos', 'app'));
//var_dump(ORM::get_query_log(), $todos);
})->name('todos.index');
# defines a route for the GET method
/*$app->get("/todos/:id", function($id) use ($app){
$todo = ORM::forTable('todos')->findOne($id);
$app->render('todos.show', compact('todo'));
});*/
# defines a route for the GET method
$app->get("/todos/:id/update/status/:status", function($id, $status) use ($app){
# We get the status code firts the status code they passed
$status = ORM::forTable('lookup')->where(['type' => 'todo.status', 'value' => $status])->findOne();
# Then we get the corresponding todo item for the id
$todo = ORM::forTable('todos')->findOne($id);
# We update its code value //$todo->status = $status->code;
$todo->set('status', $status->code);
# We save it into the database
$todo->save();
//var_dump(ORM::get_query_log());
$app->redirect('/todos');
})
->conditions(['status' => '(new|working|done|archived)'])
->name('todo.update');
# Actually runs the application
$app->run();<file_sep>/app_schema.sql
CREATE TABLE todos (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task CHAR(128) NOT NULL,
status INTEGER NOT NULL default 0,
created_at DATETIME default current_timestamp,
updated_at DATETIME default current_timestamp
);
INSERT INTO todos
(id, task, status)
VALUES
(NULL, "Get milk", 0),
(NULL, "Walk the dog", 0),
(NULL, "Learn MVC", 0);
CREATE TABLE lookup (
id INTEGER PRIMARY KEY AUTOINCREMENT,
type CHAR(128) NOT NULL,
code INTEGER NOT NULL,
value CHAR(128)
);
INSERT INTO lookup
(id, type, code, value)
VALUES
(NULL, 'todo.status', 0, 'new'),
(NULL, 'todo.status', 1, 'working'),
(NULL, 'todo.status', 2, 'done'),
(NULL, 'todo.status', 3, 'archived'); | 2a3b7b4b505967bddef5784e515b9ae170828b86 | [
"SQL",
"PHP"
] | 2 | PHP | L-u-i-s/blog1 | 88fb8483a142e80720bcabfe6a790d4df3c9b258 | ffed197dd9260276ddb24339f5c60e5c025c15bf |
refs/heads/master | <repo_name>VannAtApollo/apollo-demo-1<file_sep>/performance/server/Dockerfile
FROM node:14-alpine
COPY package.json server.js schema.graphql data.json ./
RUN npm install
USER node
CMD node server.js<file_sep>/awards-apollo/Makefile
push:
apollo service:push \
--localSchemaFile=schema.graphql \
--graph=jesse-test-1xc6kq \
--variant=current \
--serviceName=awards \
--serviceURL=https://awards-dot-lovelace-presales-demo.ue.r.appspot.com
publish:
rover subgraph publish jesse-test-1xc6kq@current --schema ./schema.graphql \
--name awards --routing-url https://awards-dot-lovelace-presales-demo.ue.r.appspot.com
deploy:
gcloud app deploy
check:
apollo service:check \
--localSchemaFile=schema.graphql \
--graph=jesse-test-1xc6kq \
--variant=current \
--serviceName=awards
# --validationPeriod=20736000
rcheck:
rover subgraph check jesse-test-1xc6kq \
--schema=schema.graphql \
--name=awards<file_sep>/gateway/Makefile
deploy:
gcloud app deploy
deploy-contracts:
gcloud app deploy app-contracts.yaml<file_sep>/awards-graphene/main.py
import graphene
import logging
import json
from graphene_federation import build_schema, key, extend, external
from middleware import TracingMiddleware
from flask import Flask
from flask_graphql import GraphQLView
app = Flask(__name__)
@key(fields='awardName year')
class Award(graphene.ObjectType):
class Meta:
description = "An award for a work of literature."
bookTitle = graphene.String(description="The title of the book")
title = graphene.String(deprecation_reason="Use awardTitle for all new clients.")
year = graphene.Int(description="The year the award was given.")
authorName = graphene.String(description="The author name.")
awardTitle = graphene.String(description="The title of the award (ie, 'Best Novel').")
awardName = graphene.String(description="The name of the award (ie, '<NAME>').")
def resolve_title(self, info):
return self.awardTitle
@extend(fields='name')
class Author(graphene.ObjectType):
name = external(graphene.String(required=True))
awards = graphene.List(Award)
def resolve_awards(parent, info):
#logging.error(parent)
#logging.error(info)
author_awards = []
for a in awards:
if a.authorName == parent.name:
author_awards.append(a)
return author_awards
awards = []
award_data = json.loads(open('awards.json').read())
for award in award_data:
awards.append(Award(
bookTitle=award['bookTitle'],
year=award['year'],
awardName=award['awardName'],
awardTitle=award['awardTitle'],
authorName=award['authorName']
))
class Query(graphene.ObjectType):
awards = graphene.List(Award)
def resolve_awards(root, info):
return awards
schema = graphene.Schema(query=Query)
app.add_url_rule('/', view_func=GraphQLView.as_view(
'graphql',
schema=schema,
graphiql=True,
debug=True
#middleware=[TracingMiddleware()]
))
# Optional, for adding batch query support (used in Apollo-Client)
app.add_url_rule('/graphql/batch', view_func=GraphQLView.as_view(
'graphql_batch',
schema=schema,
batch=True,
debug=True
))
if __name__ == '__main__':
app.run()<file_sep>/awards-graphene/middleware.py
import time
from datetime import datetime
from functools import partial
class TracingMiddleware(object):
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
def __init__(self):
#print("init")
self.resolver_stats = list()
self.reset()
def reset(self):
#print("reset")
self.start_time = self.now()
self.end_time = None
self.parsing_start_time = None
self.parsing_end_time = None
self.validation_start_time = None
self.validation_end_time = None
def start(self):
#print("start")
self.reset()
self.start_time = self.now()
def end(self):
self.end_time = self.now()
def parsing_start(self):
#print("parsing st")
self.parsing_start_time = self.now()
def parsing_end(self):
self.parsing_end_time = self.now()
def validation_start(self):
#print("val st")
self.validation_start_time = self.now()
def validation_end(self):
self.validation_end_time = self.now()
def now(self):
#if PY37:
return time.time_ns()
#return int(time.time() * 1000000000)
@property
def start_time_str(self):
start_time_seconds = float(self.start_time / 1000000000)
return datetime.fromtimestamp(start_time_seconds).strftime(self.DATETIME_FORMAT)
@property
def end_time_str(self):
end_time_seconds = float(self.end_time / 1000000000)
return datetime.fromtimestamp(end_time_seconds).strftime(self.DATETIME_FORMAT)
@property
def duration(self):
if not self.end_time:
raise ValueError("Tracing has not ended yet!")
return self.end_time - self.start_time
def get_tracing_extension_dict(self):
result = dict(
version=1,
startTime=self.start_time_str,
endTime=self.end_time_str,
duration=self.duration,
parsing=dict(
startOffset=self.parsing_start_time - self.start_time,
duration=self.parsing_end_time - self.parsing_start_time,
),
execution=dict(resolvers=self.resolver_stats),
)
if self.validation_start_time and self.validation_end_time:
result["validation"] = dict(
startOffset=self.validation_start_time - self.start_time,
duration=self.validation_end_time - self.validation_start_time,
)
return result
def _after_resolve(self, start_time, resolver_stats, info, data):
end = time.time()
#print("1")
elapsed_ms = (end - start_time) * 1000
#print("2")
#print(self.start_time)
stat = {
"path": info.path,
"parentType": str(info.parent_type),
"fieldName": info.field_name,
"returnType": str(info.return_type),
"startOffset": self.now() - self.start_time,
"duration": elapsed_ms,
}
#print("3")
resolver_stats.append(stat)
#print("4")
self.reset()
return data
def resolve(self, _next, root, info, *args, **kwargs):
self.start()
start = time.time()
on_result_f = partial(self._after_resolve, start, self.resolver_stats, info)
return _next(root, info, *args, **kwargs) \
.then(on_result_f)<file_sep>/awards-apollo/server.js
const fs = require('fs');
const { ApolloServer, gql } = require('apollo-server');
const { buildFederatedSchema } = require('@apollo/federation');
const { ApolloServerPluginInlineTrace } = require('apollo-server-core');
const dotenv = require('dotenv');
const bunyan = require('bunyan');
// Imports the Google Cloud client library for Bunyan
const {LoggingBunyan} = require('@google-cloud/logging-bunyan');
// Creates a Bunyan Cloud Logging client
const loggingBunyan = new LoggingBunyan();
// Create a Bunyan logger that streams to Cloud Logging
// Logs will be written to: "projects/YOUR_PROJECT_ID/logs/bunyan_log"
const logger = bunyan.createLogger({
// The JSON payload of the log as it appears in Cloud Logging
// will contain "name": "my-service"
name: 'awards-service',
streams: [
// Log to the console at 'info' and above
{stream: process.stdout, level: 'info'},
// And log to Cloud Logging, logging at 'info' and above
loggingBunyan.stream('info'),
],
});
dotenv.config();
// A schema is a collection of type definitions (hence "typeDefs")
// that together define the "shape" of queries that are executed against
// your data.
const typeDefs = gql(fs.readFileSync('schema.graphql', 'utf8'))
const awards = require('./awards.json');
function fetchAwardsForAuthor(author) {
//logger.info('Fetch awards for author: ' + author);
//logger.info(awards);
var awardsFound = [];
for (var i = 0; i < awards.length; i++) {
if (awards[i].authorName === author) {
awardsFound.push(awards[i]);
}
}
//logger.info("Books found: " + awardsFound.length);
return awardsFound;
}
// Resolvers define the technique for fetching the types defined in the
// schema. This resolver retrieves awards from the "awards" array above.
const resolvers = {
Query: {
awards: () => awards,
},
Author: {
awards(author) {
return fetchAwardsForAuthor(author.name);
}
}
};
// The ApolloServer constructor requires two parameters: your schema
// definition and your set of resolvers.
const server = new ApolloServer({
schema: buildFederatedSchema([{ typeDefs, resolvers }]),
plugins: [ApolloServerPluginInlineTrace()]
});
const PORT = process.env.PORT || 8080;
// The `listen` method launches a web server.
server.listen({port:PORT}).then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});
<file_sep>/awards-ariadne/README.md
# This service is not currently used.
Use /awards-graphene instead.<file_sep>/awards-graphene/README.md
# Awards Service
The awards service knows about Science Fiction literature awards like
the Hugo and Nebula.
# Setup
1. Install PIP dependencies: `pip install -r requirements.txt`
# Deploy
`make deploy`
# Run a Schema Check
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make check`
# Push a Schema Change
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make push`<file_sep>/fixtures/dedupe.py
import sys
import json
if __name__ == "__main__":
values = set()
if len(sys.argv) != 3:
print("Usage: dedupe.py filename.json key_name\n")
sys.exit(1)
key_to_test = sys.argv[2]
new_data = []
for item in json.loads(open(sys.argv[1], 'r').read()):
for k, v in item.items():
if k == key_to_test:
if v in values:
# Dupe, skip it
continue
else:
values.add(v)
new_data.append(item)
# Re write file with new data.
out_file = open(sys.argv[1], 'w')
out_file.write(json.dumps(new_data, indent=4))
out_file.close()
<file_sep>/README.md
# Apollo Demo
Federated schema with 3 services:
* authors/ - using GQLGen (GoLang)
* awards-graphene/ - using Graphene (Python)
* books/ - using Apollo Service (JS)
## Setup (local)
Add `.env` files in each service, gateway, and clients directory as documented here:
https://www.apollographql.com/docs/tutorial/production/#set-environment-variables
Run `npm install` for the NodeJS projects.
## Deployment
In each service:
```make deploy```
<file_sep>/awards-graphene/Makefile
push:
apollo service:push \
--endpoint=https://awards-dot-jll-apollo-test.uc.r.appspot.com \
--graph=jesse-test-1xc6kq \
--variant=current \
--serviceName=awards \
--serviceURL=https://awards-dot-jll-apollo-test.uc.r.appspot.com
check:
apollo service:check --serviceName=awards --endpoint=https://awards-dot-jll-apollo-test.uc.r.appspot.com
deploy:
gcloud app deploy<file_sep>/performance/Makefile
test:
APOLLO_KEY=service:Performance-Testing:aHO8QcP2o41mf1CgBSKdwA \
rover subgraph publish Performance-Testing@current \
--name backend --schema server/schema.graphql --convert \
--routing-url http://server:8080/
docker-compose --env-file .env up
siege:
ab -n 4000 -c 5 -T "application/json" -p gql_post.json http://localhost:10100/
siegegw:
ab -n 4000 -c 5 -T "application/json" -p gql_post.json http://localhost:10101/
<file_sep>/parse_data.py
import csv
import json
import wikipedia
import re
date_range = re.compile(r'\(([A-Za-z]+) ([0-9]{1,2}), ([0-9]{4}) – ([A-Za-z]+) ([0-9]{1,2}), ([0-9]{4})\)')
born_on = re.compile(r'\(born ([A-Za-z]+) ([0-9]{1,2}), ([0-9]{4})\)')
books = []
awards = []
authors = []
other_source1 = json.loads(open('fixtures/award_data_1.json').read())
other_source1_db = {}
# Index by work title
for work in other_source1:
other_source1_db[work['title']] = work
with open('fixtures/data.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
line_count += 1
else:
extra_data = None
try:
extra_data = other_source1_db[row[0]]
except:
print("Extra data miss: %s" % row[0])
pass
books.append({
'title': row[0],
'author': row[1],
'published_date': row[2]
})
author_data = {
'name': row[1],
'whereBorn': '',
'yearBorn': None,
'yearDied': None
}
try:
author = wikipedia.page("%s (author)" % row[1])
print("Found wikipedia page: ", author.title)
author_data['biography'] = author.summary
age_range = date_range.search(author.content)
if age_range:
print("Match age range.")
author_data['yearBorn'] = int(age_range.group(3))
author_data['yearDied'] = int(age_range.group(6))
else:
born = born_on.search(author.content)
if born:
author_data['yearBorn'] = int(born.group(3))
except:
pass
authors.append(author_data)
if row[3] == '1':
yearWon = None
if extra_data is not None:
if "Hugo" in extra_data['winner']:
yearWon = int(extra_data['year'])
awards.append({
'bookTitle': row[0],
'year': yearWon,
'authorName': row[1],
'awardName': '<NAME>ward',
'awardTitle': 'Best Novel'
})
if row[4] == '1':
yearWon = None
if extra_data is not None:
if "Nebula" in extra_data['winner']:
yearWon = int(extra_data['year'])
awards.append({
'bookTitle': row[0],
'year': yearWon,
'authorName': row[1],
'awardName': 'Nebula Award',
'awardTitle': 'Best Novel'
})
if row[5] == '1':
yearWon = None
if extra_data is not None:
if "Locus" in extra_data['winner']:
yearWon = int(extra_data['year'])
awards.append({
'bookTitle': row[0],
'year': yearWon,
'authorName': row[1],
'awardName': 'Locus Award',
'awardTitle': 'Best Novel'
})
line_count += 1
print(f'Processed {line_count} lines.')
with open('authors/authors.json', 'w') as authors_file:
authors_file.write(json.dumps(authors, indent=4))
with open('awards-graphene/awards.json', 'w') as awards_file:
awards_file.write(json.dumps(awards, indent=4))
with open('books/books.json','w') as books_file:
books_file.write(json.dumps(books, indent=4))<file_sep>/performance/gateway/Makefile
build:
docker build -t jesse/gateway .<file_sep>/books/README.md
# Books Service
This service uses the Apollo Server it implment the service.
# Setup
1. Run `npm install`
2. Add a `.env` file.
# Run a Schema Check
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make check`
# Push a Schema Change
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make push`
# Run Locally
`node server.js`<file_sep>/proxy/go.mod
module github.com/jesse-apollo/apollo-demo/gateway
go 1.16
<file_sep>/authors/Makefile
register-service:
apollo service:push \
--localSchemaFile=./graph/schema.graphqls \
--graph=jesse-test-1xc6kq \
--variant=current \
--serviceName=authors \
--serviceURL=https://authors-dot-lovelace-presales-demo.ue.r.appspot.com/query
publish:
rover subgraph publish jesse-test-1xc6kq@current --schema ./graph/schema.graphqls \
--name authors --routing-url https://authors-dot-lovelace-presales-demo.ue.r.appspot.com/query
regen:
go run github.com/99designs/gqlgen init
deploy:
gcloud app deploy
check:
apollo service:check \
--localSchemaFile=./graph/schema.graphqls \
--graph=jesse-test-1xc6kq \
--variant=current \
--serviceName=authors
rcheck:
rover subgraph check jesse-test-1xc6kq \
--schema=./graph/schema.graphqls \
--name=authors
<file_sep>/performance/gateway/gateway.js
import { ApolloGateway, RemoteGraphQLDataSource } from "@apollo/gateway";
import { ApolloServer } from "apollo-server";
import dotenv from "dotenv";
dotenv.config();
const PORT = process.env.PORT || 4000;
const APOLLO_KEY = process.env.APOLLO_KEY || 4000;
const gateway = new ApolloGateway();
const server = new ApolloServer({
gateway,
debug: true,
introspection: true, // Not for prod
playground: true, // Not for prod
subscriptions: false,
});
server.listen({ port: PORT }).then(({ url }) => {
console.log(`Server ready at ${url}`);
}); <file_sep>/client/main.py
import random
from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport
client_name = [
"iOS",
"web",
"Android",
]
versions = ['1.0', '1.1', '1.2']
# Select your transport with a defined url endpoint
transport = RequestsHTTPTransport(
url="https://lovelace-presales-demo.ue.r.appspot.com",
headers={
'apollographql-client-name': random.choice(client_name),
'apollographql-client-version': random.choice(versions)
}, verify=True, retries=3,
)
# Create a GraphQL client using the defined transport
client = Client(transport=transport, fetch_schema_from_transport=False)
client_types = []
queries = [
gql("query getAwards { awards { awardTitle, awardName, year }}"),
gql("query getAwardName { awards { awardName }}"),
gql("query getAwardDetails { awards { awardName, title }}"),
gql("query getAuthors { authors { name, yearBorn, biography }}"),
gql("query getBooks { books { title, author, publisher }}"),
gql("query getAuthorsDetail { authors { name, books { title }, awards { awardName } }}"),
gql("query getHomePageDetail { authors { name, books { title }, awards { awardName } }, books { title }, awards { awardName, title } }"),
]
for i in range(random.randint(50,100)):
# Provide a GraphQL query
query = random.choice(queries)
#print(query)
# Execute the query on the transport
result = client.execute(query)
<file_sep>/authors/go.mod
module github.com/jesse-apollo/apollo-demo/authors
go 1.15
require (
github.com/99designs/gqlgen v0.13.0
github.com/agnivade/levenshtein v1.1.0 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/mitchellh/mapstructure v1.4.0 // indirect
github.com/vektah/gqlparser/v2 v2.1.0
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
google.golang.org/protobuf v1.27.1
gopkg.in/yaml.v2 v2.4.0 // indirect
)
<file_sep>/books/server.js
const fs = require('fs');
const { ApolloServer, gql } = require('apollo-server');
const { buildFederatedSchema } = require('@apollo/federation');
const { ApolloServerPluginInlineTrace } = require('apollo-server-core');
const dotenv = require('dotenv');
const bunyan = require('bunyan');
// Imports the Google Cloud client library for Bunyan
const {LoggingBunyan} = require('@google-cloud/logging-bunyan');
// Creates a Bunyan Cloud Logging client
const loggingBunyan = new LoggingBunyan();
// Create a Bunyan logger that streams to Cloud Logging
// Logs will be written to: "projects/YOUR_PROJECT_ID/logs/bunyan_log"
const logger = bunyan.createLogger({
// The JSON payload of the log as it appears in Cloud Logging
// will contain "name": "my-service"
name: 'book-service',
streams: [
// Log to the console at 'info' and above
{stream: process.stdout, level: 'info'},
// And log to Cloud Logging, logging at 'info' and above
loggingBunyan.stream('info'),
],
});
dotenv.config();
// A schema is a collection of type definitions (hence "typeDefs")
// that together define the "shape" of queries that are executed against
// your data.
const typeDefs = gql(fs.readFileSync('schema.graphql', 'utf8'))
const books = require('./books.json');
function fetchBooksForAuthor(author) {
//logger.info('Fetch books for author: ' + author);
//logger.info(books);
var booksFound = [];
for (var i = 0; i < books.length; i++) {
if (books[i].author === author) {
booksFound.push(books[i]);
}
}
//logger.info("Books found: " + booksFound.length);
return booksFound;
}
function fetchBooksForDatum(datum) {
//logger.info('Fetch books for author: ' + author);
//logger.info(books);
var booksFound = [];
for (var i = 0; i < 20; i++) {
booksFound.push(
{
"title": "My Book " + i ,
"author": "Datum " + datum._id,
"published_date": "10 July 2018"
});
}
//logger.info("Books found: " + booksFound.length);
return booksFound;
}
// Resolvers define the technique for fetching the types defined in the
// schema. This resolver retrieves books from the "books" array above.
const resolvers = {
Query: {
books: () => books,
},
Author: {
books(author) {
return fetchBooksForAuthor(author.name);
}
},
Datum: {
books(datum) {
return fetchBooksForDatum(datum._id);
}
}
};
// The ApolloServer constructor requires two parameters: your schema
// definition and your set of resolvers.
const server = new ApolloServer({
schema: buildFederatedSchema([{ typeDefs, resolvers }]),
plugins: [ApolloServerPluginInlineTrace()]
});
const PORT = process.env.PORT || 8080;
// The `listen` method launches a web server.
server.listen({port:PORT}).then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});
<file_sep>/authors/graph/entity.resolvers.go
package graph
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"github.com/jesse-apollo/apollo-demo/authors/graph/generated"
"github.com/jesse-apollo/apollo-demo/authors/graph/model"
)
func (r *entityResolver) FindAuthorByName(ctx context.Context, name string) (*model.Author, error) {
var authors []*model.Author
jsonFile, err := os.Open("authors.json")
// if we os.Open returns an error then handle it
if err != nil {
return nil, err
}
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
byteValue, _ := ioutil.ReadAll(jsonFile)
err = json.Unmarshal(byteValue, &authors)
if err != nil {
return nil, err
}
for _, author := range authors {
if (*author).Name == name {
return author, nil
}
}
return nil, fmt.Errorf("Author not found.")
}
// Entity returns generated.EntityResolver implementation.
func (r *Resolver) Entity() generated.EntityResolver { return &entityResolver{r} }
type entityResolver struct{ *Resolver }
<file_sep>/gateway/server.js
import { ApolloGateway, RemoteGraphQLDataSource } from "@apollo/gateway";
import { ApolloServer } from "apollo-server";
import { bootstrap } from 'global-agent';
import { ApolloServerPluginInlineTrace } from "apollo-server-core";
import dotenv from "dotenv";
import fetch from "make-fetch-happen";
//console.log(process.env.GLOBAL_AGENT_HTTP_PROXY);
//console.log(process.env.GLOBAL_AGENT_HTTPS_PROXY);
//bootstrap();
class AuthenticatedDataSource extends RemoteGraphQLDataSource {
willSendRequest({ request, context }) {
request.http.headers.set('apiKey', '<KEY>');
}
}
dotenv.config();
const PORT = process.env.PORT || 4000;
const APOLLO_KEY = process.env.APOLLO_KEY;
//global.GLOBAL_AGENT.HTTP_PROXY = 'https://127.0.0.1:9090';
const gateway = new ApolloGateway({
buildService({ name, url }) {
return new AuthenticatedDataSource({ url });
},
});
const server = new ApolloServer({
gateway,
context: ({ req }) => {
// Get the Mongo API Key from the headers
const apiKey = req.headers.apiKey || '';
return { apiKey };
},
debug: false,
introspection: false, // Not for prod
playground: false, // Not for prod
subscriptions: false,
//logger: logger,
plugins: [ApolloServerPluginInlineTrace()]
});
server.listen({ port: PORT }).then(({ url }) => {
console.log(`Server ready at ${url}`);
}); <file_sep>/awards-apollo/README.md
# Awards Service - Apollo/NodeJS
The awards service knows about Science Fiction literature awards like the Hugo and Nebula.
# Setup
1. Run `npm install`
2. Add a `.env` file.
# Run a Schema Check
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make check`
# Push a Schema Change
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make push`
# Run Locally
`node server.js`<file_sep>/authors/README.md
# Authors Service
This services knows about Science Fiction authors.
# Setup
1. Add an Apollo `.env` file.
2. Run `go build`
# Deploy to AppEngine
`make deploy`
# Run a Schema Check
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make check`
# Push a Schema Change
1. Edit the Makefile to specify your correct graph and variant.
2. Run `make push`
# Regen Project after Schema Change
1. Run `make regen`<file_sep>/performance/gateway/Dockerfile
FROM node:14-alpine
COPY package.json gateway.js ./
RUN npm install
USER node
CMD node gateway.js<file_sep>/fixtures/isbn_search.py
import sys
import json
import isbnlib
if __name__ == "__main__":
values = set()
if len(sys.argv) != 2:
print("Usage: isbn_search.py books.json\n")
sys.exit(1)
new_data = []
for item in json.loads(open(sys.argv[1], 'r').read()):
isbn = isbnlib.isbn_from_words("%s %s" % (item['title'], item['author']))
item['isbn'] = isbn
print("%s - %s" % (item['title'], item['isbn']))
new_data.append(item)
out_file = open('test.json', 'w')
out_file.write(json.dumps(new_data, indent=4))
out_file.close()<file_sep>/performance/server/Makefile
build:
docker build -t jesse/server .<file_sep>/client/Makefile
check:
apollo client:check --includes="*.py"
<file_sep>/books/Makefile
push:
apollo service:push \
--localSchemaFile=schema.graphql \
--graph=jesse-test-1xc6kq \
--variant=current \
--serviceName=books \
--serviceURL=https://books-dot-lovelace-presales-demo.ue.r.appspot.com
publish:
rover subgraph publish jesse-test-1xc6kq@current --schema ./schema.graphql \
--name books --routing-url https://books-dot-lovelace-presales-demo.ue.r.appspot.com
deploy:
gcloud app deploy
check:
apollo service:check \
--localSchemaFile=schema.graphql \
--graph=jesse-test-1xc6kq \
--variant=current \
--serviceName=books
# --validationPeriod=20736000
rcheck:
rover subgraph check jesse-test-1xc6kq \
--schema=schema.graphql \
--name=books --validation-period=2w<file_sep>/awards-ariadne/main.py
from ariadne import ObjectType, QueryType, gql
from ariadne.contrib.federation import make_federated_schema
from ariadne.contrib.tracing.apollotracing import ApolloTracingExtension
from ariadne.wsgi import GraphQL
# Define types using Schema Definition Language (https://graphql.org/learn/schema/)
# Wrapping string in gql function provides validation and better error traceback
type_defs = gql("""
type Query {
awards: [Award!]!
}
type Award {
bookTitle: String
year: Int
authorName: String
awardTitle: String
}
""")
# Map resolver functions to Query fields using QueryType
query = QueryType()
# Resolvers are simple python functions
@query.field("awards")
def resolve_award(*_):
return [
{"bookTitle": "Dune", "year": 1966, "awardName": "Hugo Award", "authorName": "<NAME>"},
{"bookTitle": "Dune", "year": 1966, "awardName": "Nebula Award", "authorName": "<NAME>"},
{"bookTitle": "The Left Hand of Darkness", "year": 1970, "awardName": "Nebula Award", "authorName": "<NAME>"},
]
# Map resolver functions to custom type fields using ObjectType
award = ObjectType("Award")
#@<EMAIL>("fullName")
#def resolve_person_fullname(person, *_):
# return "%s %s" % (person["firstName"], person["lastName"])
# Create executable GraphQL schema
schema = make_federated_schema(type_defs, query, award)
# Create an ASGI app using the schema, running in debug mode
app = GraphQL(schema, debug=True, extensions=[ApolloTracingExtension])
<file_sep>/authors/graph/schema.resolvers.go
package graph
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
import (
"context"
"encoding/json"
"io/ioutil"
"os"
"github.com/jesse-apollo/apollo-demo/authors/graph/generated"
"github.com/jesse-apollo/apollo-demo/authors/graph/model"
)
func (r *queryResolver) Authors(ctx context.Context) ([]*model.Author, error) {
var authors []*model.Author
jsonFile, err := os.Open("authors.json")
// if we os.Open returns an error then handle it
if err != nil {
return nil, err
}
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
byteValue, _ := ioutil.ReadAll(jsonFile)
err = json.Unmarshal(byteValue, &authors)
if err != nil {
return nil, err
}
return authors, nil
}
// Query returns generated.QueryResolver implementation.
func (r *Resolver) Query() generated.QueryResolver { return &queryResolver{r} }
type queryResolver struct{ *Resolver }
<file_sep>/awards-ariadne/requirements.txt
ariadne==0.12.0
graphql-core==3.0.5
starlette==0.13.8
typing-extensions==3.7.4.3
| 01cda5e55c3880632fae0f30ea0841c8c37a3819 | [
"JavaScript",
"Markdown",
"Makefile",
"Python",
"Text",
"Go",
"Go Module",
"Dockerfile"
] | 33 | Dockerfile | VannAtApollo/apollo-demo-1 | 1bd36d33abaafe79a8a783d5a78bbc6ffa1f0499 | 966fae891c863e710c1de1d58c339bbeea6aca2d |
refs/heads/master | <repo_name>ybai62868/CS-97SI-Introduction-to-Programming-Contests<file_sep>/2020UESTC/01-DataStructure/A/main.cpp
# include <cstdio>
# include <iostream>
# include <stack>
# include <string>
using namespace std;
# define MAX 1000006
char str[MAX];
stack<char>s;
int main(void)
{
int T; scanf("%d", &T);
while (T--) {
int n; scanf("%d", &n);
if (n==0) {
printf("0\n");
continue;
}
scanf("%s", str);
s.push(str[0]);
for (int i = 1;i < n;i++) {
if(s.empty()) {
s.push(str[i]);
continue;
} else {
if (s.top()=='(' && str[i]==')') {
s.pop();
} else {
s.push(str[i]);
}
}
}
int ans = s.size();
printf("%d\n", ans);
while(!s.empty()) {
s.pop();
}
}
return 0;
}<file_sep>/README.md
# CS-97SI-Introduction-to-Programming-Contests
All of the materials include: Lecture slides && Practice Problems' code.
| b393a6c6f7790176f30cdf66c2dc1de0a18625ea | [
"Markdown",
"C++"
] | 2 | C++ | ybai62868/CS-97SI-Introduction-to-Programming-Contests | 261ad1cdfc0af50dfa6af4e25573c97fee55162b | 918884430c9e7a31edc421d83da9dffa882dbf01 |
refs/heads/master | <file_sep>package object
import (
"bytes"
"fmt"
"hash/fnv"
"monkey_interpreter/ast"
"strings"
)
type ObjectType string
type BuiltinFunction func(args ...Object) Object
const (
IntegerObj = "INTEGER"
BooleanObj = "BOOLEAN"
NullObj = "NULL"
ReturnValueObj = "RETURN_VALUE"
ErrorObj = "ERROR"
FunctionObj = "FUNCTION"
StringObj = "STRING"
BuiltinObj = "BUILTIN"
ArrayObj = "ARRAY"
HashObj = "HASH"
)
type Hashable interface {
HashKey() HashKey
}
type HashPair struct {
Key Object
Value Object
}
type Hash struct {
Pairs map[HashKey]HashPair
}
func (h *Hash) Type() ObjectType { return HashObj }
func (h *Hash) Inspect() string {
var out bytes.Buffer
var pairs []string
for _, pair := range h.Pairs {
pairs = append(pairs, fmt.Sprintf("%s: %s",
pair.Key.Inspect(), pair.Value.Inspect()))
}
out.WriteString("{")
out.WriteString(strings.Join(pairs, ", "))
out.WriteString("}")
return out.String()
}
type HashKey struct {
Type ObjectType
Value uint64
}
func (b *Boolean) HashKey() HashKey {
var value uint64
if b.Value {
value = 1
} else {
value = 0
}
return HashKey{Type: b.Type(), Value: value}
}
func (i *Integer) HashKey() HashKey {
return HashKey{Type: i.Type(), Value: uint64(i.Value)}
}
func (s *String) HashKey() HashKey {
h := fnv.New64a()
h.Write([]byte(s.Value))
return HashKey{Type: s.Type(), Value: h.Sum64()}
}
type Array struct {
Elements []Object
}
func (ao *Array) Type() ObjectType { return ArrayObj }
func (ao *Array) Inspect() string {
var out bytes.Buffer
var elements []string
for _, e := range ao.Elements {
elements = append(elements, e.Inspect())
}
out.WriteString("[")
out.WriteString(strings.Join(elements, ", "))
out.WriteString("]")
return out.String()
}
type Builtin struct {
Fn BuiltinFunction
}
func (b *Builtin) Type() ObjectType {
return BuiltinObj
}
func (b *Builtin) Inspect() string {
return "builtin function"
}
type String struct {
Value string
}
func (s *String) Type() ObjectType {
return StringObj
}
func (s *String) Inspect() string {
return s.Value
}
type Function struct {
Parameters []*ast.Identifier
Body *ast.BlockStatement
Env *Environment
}
func (f *Function) Type() ObjectType {
return FunctionObj
}
func (f *Function) Inspect() string {
var out bytes.Buffer
var params []string
for _, p := range f.Parameters {
params = append(params, p.String())
}
out.WriteString("fn")
out.WriteString("(")
out.WriteString(strings.Join(params, ","))
out.WriteString(f.Body.String())
out.WriteString("\n}")
return out.String()
}
type Error struct {
Message string
}
func (e *Error) Type() ObjectType { return ErrorObj }
func (e *Error) Inspect() string { return "ERROR: " + e.Message }
type ReturnValue struct {
Value Object
}
func (rv *ReturnValue) Type() ObjectType { return ReturnValueObj }
func (rv *ReturnValue) Inspect() string { return rv.Value.Inspect() }
type Object interface {
Type() ObjectType
Inspect() string
}
type Integer struct {
Value int64
}
func (i *Integer) Inspect() string {
return fmt.Sprintf("%d", i.Value)
}
func (i *Integer) Type() ObjectType {
return IntegerObj
}
type Boolean struct {
Value bool
}
func (b *Boolean) Type() ObjectType {
return BooleanObj
}
func (b *Boolean) Inspect() string {
return fmt.Sprintf("%t", b.Value)
}
type Null struct {
}
func (n *Null) Type() ObjectType {
return NullObj
}
func (n *Null) Inspect() string {
return "null"
}
| 71bcb397fa00c02fbb531ab8d7646df6694ac7dd | [
"Go"
] | 1 | Go | Solomon-8/monkey_interpreter | d1c75b8ed2fae2d29bf23f5e8c44e8f819944d67 | be07be657a95fa4b3869be83ae8281f54387bc53 |
refs/heads/master | <repo_name>LoungeCPP/TextAsteroids<file_sep>/TextAsteroids/src/main.cpp
#include <iostream>
#include <string>
#include <array>
#include <thread>
#include <mutex>
#include <vector>
#include <deque>
#include <random>
#include <cctype>
#include <conio.h>
#include <Windows.h>
struct point
{
short x;
short y;
point() : x(0), y(0) {}
point(short x, short y) : x(x), y(y) {}
};
auto stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE);
static const short width = 60;
static const short height = 34;
std::mutex iomutex;
void SetConsoleSize(point size)
{
COORD coord{ size.x, size.y };
SMALL_RECT rect{ 0, 0, size.x - 1, size.y - 1 };
CONSOLE_FONT_INFOEX info{ sizeof(info), static_cast<DWORD>(-1),{ 8, 14 }, FF_DONTCARE, 400, L"Lucida Console" };
auto res = SetConsoleWindowInfo(stdout_handle, true, &rect);
res = res && SetConsoleScreenBufferSize(stdout_handle, coord);
res = res && SetCurrentConsoleFontEx(stdout_handle, false, &info);
if (!res) throw std::runtime_error("failed to set console attributes");
}
void DisableInputEcho()
{
auto stdin_handle = GetStdHandle(STD_INPUT_HANDLE);
DWORD mode = 0;
GetConsoleMode(stdin_handle, &mode);
SetConsoleMode(stdin_handle, mode & (~ENABLE_ECHO_INPUT));
}
void SetCursorPos(point pos)
{
std::unique_lock<std::mutex> lock(iomutex);
COORD coord{pos.x, pos.y};
SetConsoleCursorPosition(stdout_handle, { coord.X, coord.Y });
}
void WriteToConsoleBuffer(const char* chars, std::size_t length, point coords)
{
std::unique_lock<std::mutex> lock(iomutex);
DWORD written = 0;
WriteConsoleOutputCharacterA(stdout_handle, chars, length, { coords.x, coords.y }, &written);
}
const float pi = 3.1415926535897932f;
const point window_size = { width, height - 2 };
const point game_size = { width, height - 3 };
const point input_coords = { 0 , 32 };
bool close_enough(float a, float b, float epsilon = std::numeric_limits<float>::epsilon())
{
return std::abs(a - b) < epsilon;
}
struct RocketShip
{
point position = { game_size.x / 2, game_size.y / 2 };
point velocity = { 0,0 };
float rotation = -pi/2;
float angular_velocity = 0;
};
template<typename T>
T spaceship_cast(RocketShip& rocket)
{
if (close_enough(rocket.rotation, -pi / 2, pi/4))
{
return 'A';
}
if (close_enough(rocket.rotation, pi / 2, pi / 4))
{
return 'V';
}
if (close_enough(rocket.rotation, pi, pi / 4) || close_enough(rocket.rotation, -pi, pi / 4))
{
return '<';
}
if (close_enough(rocket.rotation, 0, pi / 4))
{
return '>';
}
return 'X';
}
void step_simulation(RocketShip& ship)
{
ship.position.x += ship.velocity.x;
ship.position.y += ship.velocity.y;
if (ship.position.x < 0)
ship.position.x += game_size.x;
if (ship.position.x >= game_size.x)
ship.position.x -= game_size.x;
if (ship.position.y < 0)
ship.position.y += game_size.y;
if (ship.position.y >= game_size.y)
ship.position.y -= game_size.y;
ship.rotation += ship.angular_velocity;
if (ship.rotation > pi)
ship.rotation -= 2*pi;
if (ship.rotation < -pi)
ship.rotation += 2 * pi;
}
struct Asteroid
{
point position;
point velocity;
int health;
float radius;
Asteroid() {}
};
float distance(point A, point B)
{
float dx = B.x - A.x;
float dy = B.y - A.y;
return std::sqrt(dx*dx + dy*dy);
}
void draw_asteroid(Asteroid& roid)
{
for (short y = roid.position.y - roid.radius; y < roid.position.y + roid.radius; y++)
{
for (short x = roid.position.x - roid.radius; x < roid.position.x + roid.radius; x++)
{
if (distance({ x,y }, roid.position) < roid.radius)
{
if (x >= 0 && x < game_size.x && y >= 0 && y < game_size.y)
{
char c = '#';
WriteToConsoleBuffer(&c, 1, { x, y });
}
}
}
}
}
void step_simulation(Asteroid& roid)
{
roid.position.x += roid.velocity.x;
roid.position.y += roid.velocity.y;
}
struct Projectile
{
point position;
point velocity;
Projectile() {};
};
void step_simulation(Projectile& projectile)
{
projectile.position.x += projectile.velocity.x;
projectile.position.y += projectile.velocity.y;
}
bool line_circle_intersect(point A, point B, point center, float radius)
{
float d = distance(A, B);
float det = A.x * B.y - B.x * A.y;
float discr = radius*radius * (d*d) - (det * det);
return discr >= 0;
}
bool circle_circle_intersection(point center1, float radius1, point center2, float radius2)
{
float distx = center2.x - center1.x;
float disty = center2.y - center1.y;
float dist = std::sqrt(distx*distx + disty*disty);
return dist < (radius1 + radius2);
}
int main()
{
const std::size_t tick_interval_ms = 500;
std::vector<char> screen_buffer(game_size.x * game_size.y);
SetConsoleSize({ width,height });
DisableInputEcho();
std::mutex simulation_mutex;
std::string buf;
std::size_t offset = 0;
std::string input;
const std::string blank(width, ' ');
bool game_over = false;
RocketShip ship;
std::vector<Asteroid> roids;
std::vector<Projectile> projectiles;
auto Render = [&]
{
//clear
for (size_t i = 0; i < game_size.y; i++)
{
WriteToConsoleBuffer(blank.data(), width, { 0, static_cast<short>(i) });
}
for (auto& roid : roids)
{
draw_asteroid(roid);
}
for (auto& proj : projectiles)
{
char c = '*';
WriteToConsoleBuffer(&c, 1, proj.position);
}
if (!game_over)
{
auto ship_char = spaceship_cast<char>(ship);
WriteToConsoleBuffer(&ship_char, 1, ship.position);
}
if (game_over)
{
const std::string game_over_screen =
"=========================GAME OVER========================="
" You got blown up by an asteroid! "
" "
" press [enter] to exit ";
WriteToConsoleBuffer(game_over_screen.data(), game_over_screen.length(), { 0, 6 });
return;
}
};
auto PollInput = [&]
{
auto ch = _getch();
switch (ch)
{
case 13: //carriage return
{
if (game_over)
std::exit(0);
input = buf;
buf.clear();
offset = 0;
break;
}
case 8: //backspace
{
if (buf.length())
{
buf.erase(buf.begin() + offset - 1);
offset--;
}
break;
}
case 224: //some keys
{
auto ch2 = _getch();
switch (ch2)
{
case 83: //DEL
{
buf.erase(buf.begin() + offset);
break;
}
case 71: //HOME
{
offset = 0;
break;
}
case 79: //END
{
offset = buf.length();
break;
}
case 75: //LEFT
{
offset = min(buf.length(), max(0, offset - 1));
break;
}
case 77: //RIGHT
{
offset = min(buf.length(), max(0, offset + 1));
break;
}
}
break;
}
case 0: //other keys
{
_getch();
break;
}
default:
{
if (std::isprint(ch))
if (buf.length() < width - 1)
{
buf.insert(buf.begin() + offset, ch);
offset++;
}
break;
}
}
};
auto ActOnInput = [&]
{
if (!input.empty())
{
std::unique_lock<std::mutex> lock(simulation_mutex);
if (input == "pew")
{
Projectile p;
p.position = ship.position;
p.velocity = { static_cast<short>(2 * cos(ship.rotation)), static_cast<short>(2 * sin(ship.rotation)) };
projectiles.push_back(p);
}
if (input == "w")
{
ship.velocity.y -= 1;
}
if (input == "a")
{
ship.velocity.x -= 1;
}
if (input == "s")
{
ship.velocity.y += 1;
}
if (input == "d")
{
ship.velocity.x += 1;
}
if (input == "e")
{
ship.angular_velocity += pi / 8;
}
if (input == "q")
{
ship.angular_velocity -= pi / 8;
}
input.clear();
}
};
std::size_t ticks_per_roid = 5;
auto SimulateRoids = [&]
{
static std::mt19937 twister(0);
static std::uniform_int_distribution<int> xdist(0, game_size.x);
for (auto& roid : roids)
{
step_simulation(roid);
}
//delete roids that have left the area
std::remove_if(roids.begin(), roids.end(), [](const Asteroid& roid)
{
return roid.position.x < 0 || roid.position.x >= game_size.x
|| roid.position.y < 0 || roid.position.y >= game_size.y;
});
//spawn new roids
static std::size_t ticks = 0;
if (ticks == 0)
{
Asteroid roid;
roid.radius = 2;
roid.position.x = xdist(twister);
roid.position.y = 0;
roid.velocity.x = std::uniform_int_distribution<int>(-2, 2)(twister);
roid.velocity.y = 1;
roid.health = std::uniform_int_distribution<int>(10, 50)(twister);
roids.push_back(roid);
}
ticks = (ticks + 1) % ticks_per_roid;
};
auto SimulateShip = [&]
{
step_simulation(ship);
for (auto& roid : roids)
{
if (circle_circle_intersection(ship.position, std::sqrt(2), roid.position, roid.radius))
{
game_over = true;
}
}
};
auto SimulateProjectiles = [&]
{
for (auto& proj : projectiles)
{
step_simulation(proj);
}
//delete projectiles that have left the area
std::remove_if(projectiles.begin(), projectiles.end(), [](const Projectile& proj)
{
return proj.position.x < 0 || proj.position.x >= game_size.x
|| proj.position.y < 0 || proj.position.y >= game_size.y;
});
for (auto& proj : projectiles)
{
for (auto& roid : roids)
{
if (circle_circle_intersection(proj.position, std::sqrt(2), roid.position, roid.radius))
{
//mark roid and projectile to be deleted next tick
proj.position.x = -1;
roid.position.x = -1;
}
}
}
};
std::thread get_input([&]
{
while (true)
{
SetCursorPos({ static_cast<short>(input_coords.x + offset), input_coords.y });
PollInput();
DWORD written = 0;
WriteToConsoleBuffer(blank.data(), blank.length(), input_coords);
WriteToConsoleBuffer(buf.data(), buf.length(), input_coords);
ActOnInput();
}
});
while (true)
{
{
std::unique_lock<std::mutex> lock(simulation_mutex);
SimulateProjectiles();
SimulateRoids();
SimulateShip();
Render();
}
std::this_thread::sleep_for(std::chrono::milliseconds(tick_interval_ms));
}
get_input.join();
return 0;
}
<file_sep>/README.md
# TextAsteroids
Asteroids are falling toward your planet,
and YOU are the captain of the only ship in range!
Unfortunately, your rocket ship only responds to the following text commands:
- q : turn left
- e : turn right
- w : accelerate upwards
- s : accelerate downards
- a : accelerate left
- d : accelerate right
- pew : fire your weapon
Use your rocket ship to defend your home! Or don't. Whatever.

#Build Instructions
Open the solution in VS2013 or newer, build.
| 274b7f7312dc3782fae15e8673b63272231d29be | [
"Markdown",
"C++"
] | 2 | C++ | LoungeCPP/TextAsteroids | 04e98ea7f0ce7d569c1a71b0cd26ea3b1a3baa85 | 8b784b02a52294118b3809b05c74b2991102a6d1 |
refs/heads/master | <file_sep>
var container;
var camera, scene, renderer;
var imagedata;
var N = 256;
var spotlight;
var sphere;
init();
animate();
function init()
{
container = document.getElementById('container');
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 4000);
camera.position.set(N/2, N*0.6, N*2.5);
camera.lookAt(new THREE.Vector3(N/2, 0.0, N/2));
renderer = new THREE.WebGLRenderer({antialias: false});
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setClearColor(0x000000ff, 1);
container.appendChild(renderer.domElement);
window.addEventListener('resize', onWindowResize, false);
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var img = new Image();
img.onload = function()
{
canvas.width = img.width;
canvas.height = img.height;
context.drawImage(img, 0, 0 );
imagedata = context.getImageData(0, 0, img.width, img.height);
CreateTerrain();
}
img.src = 'js/pics/plateau.jpg';
spotlight = new THREE.PointLight(0xffffff);
spotlight.position.set(N/2, N*2, N/2);
scene.add(spotlight);
var geometry = new THREE.SphereGeometry( 5, 32, 32 );
var material = new THREE.MeshBasicMaterial( {color: 0xffff00} );
sphere = new THREE.Mesh( geometry, material );
scene.add( sphere );
}
function onWindowResize()
{
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
var alpha = 0.1;
function animate()
{
/*var clock = new THREE.Clock();
var delta = clock.getDelta();*/
alpha += 0.01;
requestAnimationFrame(animate);
render();
var x = N/2 + N*Math.cos(alpha);
var y = 0 + N*Math.sin(alpha);
spotlight.position.set(x, y, N/2);
sphere.position.copy(spotlight.position);
}
function render()
{
renderer.render(scene, camera);
}
function CreateTerrain()
{
var geometry = new THREE.Geometry();
for(var i= 0; i < N; i++){
for (var j = 0; j < N; j++){
var y = getPixel(imagedata, i, j);
geometry.vertices.push(new THREE.Vector3( i, y/8.0, j));
}
}
for(var i= 0; i < N-1; i++){
for (var j = 0; j < N-1; j++){
var ind0 = i + j * N;
var ind1 = (i + 1) + j * N;
var ind2 = i + (j + 1) * N;
var ind3 = (i + 1) + (j + 1) * N;
geometry.faces.push(new THREE.Face3(ind0, ind1, ind3));
geometry.faces.push(new THREE.Face3(ind0, ind3, ind2));
geometry.faceVertexUvs[0].push([
new THREE.Vector2(i/(N-1), (j/(N-1))),
new THREE.Vector2((i+1)/(N-1), j/(N-1)),
new THREE.Vector2((i+1)/(N-1), (j+1)/(N-1))]);
geometry.faceVertexUvs[0].push([
new THREE.Vector2(i/(N-1), (j/(N-1))),
new THREE.Vector2((i+1)/(N-1), (j+1)/(N-1)),
new THREE.Vector2(i/(N-1), (j+1)/(N-1))]);
}
}
geometry.computeFaceNormals();
geometry.computeVertexNormals();
/*for(var i = 0; i < 162; i++){
geometry.faces[i].vertexColors[0] = new THREE.Color(0xff0000);
geometry.faces[i].vertexColors[1] = new THREE.Color(0x00ff00);
geometry.faces[i].vertexColors[2] = new THREE.Color(0x0000ff);
}*/
var triangleMaterial = new THREE.MeshBasicMaterial({
map:tex,
wireframe: true,
side:THREE.DoubleSide
});
var loader = new THREE.TextureLoader();
var tex = loader.load( 'js/pics/grasstile.jpg' );
var mat = new THREE.MeshLambertMaterial({
map: tex,
wireframe: false,
side: THREE.DoubleSide
});
var triangleMesh = new THREE.Mesh(geometry, mat);
triangleMesh.position.set(0.0, 3.0, 0.0);
scene.add(triangleMesh);
}
function getPixel( imagedata, x, y )
{
var position = ( x + imagedata.width * y ) * 4, data = imagedata.data;
return data[ position ];;
} | 13e658b54dc41e4b8386a37d9791084676340867 | [
"JavaScript"
] | 1 | JavaScript | thankshelp/optgLab1 | e7e37b7090d549994ef1700b752a033e73060785 | e6b701808a54002616785bc87cec04189fc81542 |
refs/heads/master | <repo_name>DeveloperMujtaba/Health-Clinic<file_sep>/HealthClinic/Models/Citations3.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace HealthClinic.Models
{
public class Citations3
{
public string citationnumber3 { get; set; }
public DateTime citationdate3 { get; set; }
public string citationcountry3 { get; set; }
public string citationcourt3 { get; set; }
public DateTime citationcomplydate3 { get; set; }
public string citationvcn3 { get; set; }
}
}<file_sep>/HealthClinic/Models/Citations1.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace HealthClinic.Models
{
public class Citations1
{
public string citationnumber1 { get; set; }
public DateTime citationdate1 { get; set; }
public string citationcountry1 { get; set; }
public string citationcourt1 { get; set; }
public DateTime citationcomplydate1 { get; set; }
public string citationvcn1 { get; set; }
}
}<file_sep>/HealthClinic/shopping_cart/assets/js/myfunction0ff5.js
/*
*
* START validate phone
*/
function validatePhone(phoneField/*, format*/) {
//alert(phoneField);
var num = phoneField.replace(/[^\d]/g,'');
if(num.length != 10) {
//Alert the user that the phone number entered was invalid.
//alert('Please enter a valid phone number including area code');
return false;
}/* else {
//Email was valid. If format type is set, format the Phone to the desired style.
switch(format) {
case '0': //Format (xxx)-xxx-xxxx
phoneField.value = "(" + num.substring(0,3) + ")-" + num.substring(3, 6) + "-" + num.substring(6);
break;
case '1': //Format xxx-xxx-xxxx
phoneField.value = num.substring(0,3) + "-" + num.substring(3, 6) + "-" + num.substring(6);
break;
default: //Format xxxxxxxxxx
phoneField.value = num;
break;
}
}*/
}
/*
*
* END validate phone
*/
var jsArray = ["21", "16", "17", "15"];
function IsNumeric(val) {
if (isNaN(parseFloat(val))) {
return false;
}
return true;
}
function validateFileExtension(ext) {
ext = (ext + '').toLowerCase();
if(ext=='jpg' || ext=='jpeg' || ext=='bmp' || ext=='pdf') {
return true;
}
return false;
}
function in_array (needle, haystack, argStrict) {
var key = '',
strict = !! argStrict;
if (strict) {
for (key in haystack) {
if (haystack[key] === needle) {
return true;
}
}
} else {
for (key in haystack) {
if (haystack[key] == needle) {
return true;
}
}
}
return false;
}
function mydiff(date1,date2,interval) {
var second=1000, minute=second*60, hour=minute*60, day=hour*24, week=day*7;
date1 = new Date(date1);
date2 = new Date(date2);
var timediff = date2 - date1;
if (isNaN(timediff)) return NaN;
switch (interval) {
case "years": return date2.getFullYear() - date1.getFullYear();
case "months": return (
( date2.getFullYear() * 12 + date2.getMonth() )
-
( date1.getFullYear() * 12 + date1.getMonth() )
);
case "weeks" : return Math.floor(timediff / week);
case "days" : return Math.floor(timediff / day);
case "hours" : return Math.floor(timediff / hour);
case "minutes": return Math.floor(timediff / minute);
case "seconds": return Math.floor(timediff / second);
default: return undefined;
}
}
function isCheckedById(id){
var checked = $("input[@id="+id+"]:checked").length;
if (checked == 0){
return false;
} else{
return true;
}
}
function fLateDays(element)
{
//var ticket_county = $('#ticket_county').val();
var txtValdate_citation = $('#'+element).val();
if($('#n').val()=="1"){
if(!isDateRSC(txtValdate_citation)) {
alert('Invalid Date, Enter correct format (MM/DD/YYYY)');
$('#'+element).focus();
return false;
}
else
return true;
}
else{
return true;
}
}
function validarFormatoFecha(campo) {
var RegExPattern = /^\d{1,2}\/\d{1,2}\/\d{2,4}$/;
if ((campo.match(RegExPattern)) && (campo!='')) {
return true;
} else {
return false;
}
}
function isDateRSC(txtDate){
var mystr = txtDate;
if (!validarFormatoFecha(txtDate)) {
return false;
}
var myarr = mystr.split("/");
if (myarr[0].length==1) {varmes= '0'+myarr[0];} else { varmes= myarr[0];}
if (myarr[1].length==1) { vardia= '0'+myarr[1]; } else { vardia= myarr[1];}
var jsArray2000 = ['00','01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22'];
var jsArray1900 = [
'70','71','72','73','74','75','76','77','78','79',
'80','81','82','83','84','85','86','87','88','89',
'90','91','92','93','94','95','96','97','98','99'
];
if (myarr[2].length==2) {
if(in_array(myarr[2], jsArray2000)){
varyear= '20'+myarr[2];
}else if(in_array(myarr[2], jsArray1900)){
varyear= '19'+myarr[2];
} else {
return false;
}
} else {
varyear= myarr[2];
}
txtDateResul = varmes+'/'+vardia+'/'+varyear;
var currVal = txtDateResul;//txtDate;
if(currVal == '')
return false;
var rxDatePattern = /^(\d{1,2})(\/|-)(\d{1,2})(\/|-)(\d{4})$/; //Declare Regex
var dtArray = currVal.match(rxDatePattern); // is format OK?
if (dtArray == null)
return false;
//Checks for mm/dd/yyyy format.
dtMonth = dtArray[1];
dtDay= dtArray[3];
dtYear = dtArray[5];
if (dtMonth.length<=1)
return false;
if (dtDay.length<=1)
return false;
if (dtMonth < 1 || dtMonth > 12)
return false;
else if (dtDay < 1 || dtDay> 31)
return false;
else if ((dtMonth==4 || dtMonth==6 || dtMonth==9 || dtMonth==11) && dtDay ==31)
return false;
else if (dtMonth == 2)
{
var isleap = (dtYear % 4 == 0 && (dtYear % 100 != 0 || dtYear % 400 == 0));
if (dtDay> 29 || (dtDay ==29 && !isleap))
return false;
}
return true;
}
function getAge(dateString) {
var now = new Date();
var today = new Date(now.getYear(),now.getMonth(),now.getDate());
var yearNow = 114;
var monthNow = 7;
var dateNow = 04;
var dob = new Date(dateString.substring(6,10),
dateString.substring(0,2)-1, // dateString.substring(3,5) for DD/MM/YYYY format
dateString.substring(3,5) // dateString.substring(0,2)-1 for DD/MM/YYYY format
);
var yearDob = dob.getYear();
var monthDob = dob.getMonth();
var dateDob = dob.getDate();
var age = {};
yearAge = yearNow - yearDob;
if (monthNow >= monthDob)
var monthAge = monthNow - monthDob;
else {
yearAge--;
var monthAge = 12 + monthNow -monthDob;
}
if (dateNow >= dateDob)
var dateAge = dateNow - dateDob;
else {
monthAge--;
var dateAge = 31 + dateNow - dateDob;
if (monthAge < 0) {
monthAge = 11;
yearAge--;
}
}
age = {
years: yearAge,
months: monthAge,
days: dateAge
};
return age.years;
}
function ConfirmCancel(url)
{
var x = confirm("Are you sure you want to cancel?");
if (x)
location.href=url;
else
return false;
}
function urlLocation(url)
{
location.href=url;
}
function validarDriversLicenseNumber(e) {
tecla = (document.all) ? e.keyCode : e.which;
if (tecla==8) return true; //Tecla de retroceso (para poder borrar)
else if (tecla==45) return true;
else if (tecla==48) return true;
else if (tecla==49) return true;
else if (tecla==50) return true;
else if (tecla==51) return true;
else if (tecla==52) return true;
else if (tecla==53) return true;
else if (tecla==54) return true;
else if (tecla==55) return true;
else if (tecla==56) return true;
else if (tecla==57) return true;
else if ((tecla >= 65 && tecla <= 90) || (tecla >= 97 && tecla <= 122)) return true;
/*else {
alert ('invalid character');
return false;
}*/
patron = /1/; //ver nota
te = String.fromCharCode(tecla);
return patron.test(te);
}
function validate_cc()
{
if($("#is_complete_information").val() == "0"){
alert("Please complete the required fields (*)");
return false;
}
else{
if($('#payment').val()=='' || $('#card_no').val()=='' || $('#exp_date').val()=='' || $('#cvv_code').val()=='' || $('#cvv_code').val()=='') {
$('#payOn').hide();
$('#payOff').show();
if ($('#payment').val()=='') {
alert("Choose Credit Card type");
} else if($('#card_no').val()==''){
alert("Enter Credit Card Number");
$('#card_no').focus();
} else if($('#exp_date').val()==''){
alert("Enter Expiration Date");
$('#exp_date').focus();
} else if($('#cvv_code').val()==''){
alert("Enter Cvv Code");
$('#cvv_code').focus();
}
$('#payOn').show();
$('#payOff').hide();
return false;
}
$('#payOn').hide();
$('#payOff').show();
return true;
}
}
function validateFormInit() {
console.log($("#origin").val());
if(document.hireForm.fname.value=="") {
alert("Field required. Please enter First Name");
document.hireForm.fname.focus();
return false;
}
if(document.hireForm.lname.value=="") {
alert("Field required. Please enter Last Name");
document.hireForm.lname.focus();
return false;
}
if(document.hireForm.cellphone.value=="") {
alert("Field required. Please enter Cell Phone Number");
document.hireForm.cellphone.focus();
return false;
}
phoneField = $('#cellphone').val();
var num = phoneField.replace(/[^\d]/g,'');
if(num.length != 10) {
alert("Please Enter a Valid Phone Number")
//Phone.value=""
$('#cellphone').focus();
return false;
}
if(document.hireForm.email.value=="") {
alert("Field required. Please enter E-mail");
document.hireForm.email.focus();
return false;
}
if(document.hireForm.email.value != "") {
var reg2=new RegExp(/^\w+((-\w+)|(\.\w+))*\@[A-Za-z0-9]+((\.|-)[A-Za-z0-9]+)*\.[A-Za-z0-9]+$/ );
var email=document.hireForm.email.value;
if(email.search(reg2)==-1) {
alert("Enter a valid email address");
document.hireForm.email.focus();
return false;
}
/*else if(document.hireForm.email.value!=document.hireForm.c_email.value) {
alert("Enter confirm your email address");
document.hireForm.c_email.focus();
return false;
}*/
}
email=hireForm.email.value;
if(email.indexOf("@")==-1 || email.indexOf(".")==-1) {
alert("Enter Valid Email");
hireForm.email.focus();
return false;
}
return true;
}
function validate() {
if(document.hireForm.fname.value=="") {
alert("Field required. Please enter First Name");
document.hireForm.fname.focus();
return false;
}
if(document.hireForm.lname.value=="") {
alert("Field required. Please enter Last Name");
document.hireForm.lname.focus();
return false;
}
if($("#edit").val()=="1"){
if(document.hireForm.saddress.value=="") {
alert("Field required. Please enter Street Address");
document.hireForm.saddress.focus();
return false;
}
if(document.hireForm.city.value=="") {
alert("Field required. Please enter City");
document.hireForm.city.focus();
return false;
}
if(document.hireForm.state.value=="") {
alert("Field required. Please enter State");
document.hireForm.state.focus();
return false;
}
if(document.hireForm.zipcode.value=="")
{
alert("Field required. Please enter Zip Code");
document.hireForm.zipcode.focus();
return false;
}
}
if(document.hireForm.cellphone.value=="") {
alert("Field required. Please enter Cell Phone Number");
document.hireForm.cellphone.focus();
return false;
}
if(document.hireForm.cellphone.value=="") {
alert("Field required. Please enter Cell Phone Number");
document.hireForm.cellphone.focus();
return false;
}
/*
if(validatePhone('cellphone')==false) {
alert("Enter only numbers");
$('#cellphone').focus();
return false;
}*/
//if (validatePhone($('#cellphone').val())==false){
phoneField = $('#cellphone').val();
var num = phoneField.replace(/[^\d]/g,'');
if(num.length != 10) {
alert("Please Enter a Valid Phone Number")
//Phone.value=""
$('#cellphone').focus();
return false;
}
if(document.hireForm.email.value=="") {
alert("Field required. Please enter E-mail");
document.hireForm.email.focus();
return false;
}
if(document.hireForm.email.value != "") {
var reg2=new RegExp(/^\w+((-\w+)|(\.\w+))*\@[A-Za-z0-9]+((\.|-)[A-Za-z0-9]+)*\.[A-Za-z0-9]+$/ );
var email=document.hireForm.email.value;
if(email.search(reg2)==-1) {
alert("Enter a valid email address");
document.hireForm.email.focus();
return false;
}else if(document.hireForm.email.value!=document.hireForm.c_email.value) {
alert("Enter confirm your email address");
document.hireForm.c_email.focus();
return false;
}
}
email=hireForm.email.value;
if(email.indexOf("@")==-1 || email.indexOf(".")==-1) {
alert("Enter Valid Email");
hireForm.email.focus();
return false;
}
if(document.hireForm.driver_li_no.value=="") {
alert("Field required. Please enter Drivers License Number");
document.hireForm.driver_li_no.focus();
return false;
}
if(document.hireForm.state_issued.value=="") {
alert("Choose State Issued");
document.hireForm.state_issued.focus();
return false;
}
if(document.hireForm.license_cdl.value=="") {
alert("Specify if your license is CDL License");
document.hireForm.license_cdl.focus();
return false;
}
if(document.hireForm.dob.value==""){
alert("Field required. Please enter Date of Birth");
document.hireForm.dob.focus();
return false;
}
var txtVal = $.trim($('#dob').val());
if(!isDateRSC(txtVal)) {
alert('Invalid Date, Enter correct format (MM/DD/YYYY)');
$('#dob').focus();
return false;
}
var yearsDOB = Math.floor(moment(new Date()).diff(moment(txtVal,"MM/DD/YYYY"),'years',true));
//console.log(yearsDOB);
//return false;
if(yearsDOB < 18) {
alert('Invalid Date Of Birth, please call us at 1-844-339-8811 for further assistance.');
return false
}
return true;
}
function checkAdditionalUpload(element){
//console.log(element);
//console.log(element.options[element.selectedIndex].value)
var optionSelect = element.options[element.selectedIndex].value;
var totalAddOptions = $("#totalAddOptions").val();
console.log(optionSelect);
console.log(totalAddOptions);
if(optionSelect == "1" && totalAddOptions > 0){
$("#cit_file_row_additional").show();
$("#cit_agree_additional").show();
}
else{
$("#cit_file_row_additional").hide();
$("#cit_agree_additional").hide();
}
}
$(document).ready(
function()
{
$('#california-form').click(function(){
$( "#origin" ).val('California');
$( "#hireForm" ).submit();
});
$('#florida-form').click(function(){
$( "#origin" ).val('Florida');
$( "#hireForm" ).submit();
});
$('#other-form').click(function(){
$( "#origin" ).val('Other');
$( "#hireForm" ).submit();
});
$("#start-shopping-cart").click(function(){
window.location.href = "https://www.ticketclinic.com/shopping_cart/index.php/init";
});
$('.tooltip-ttc-container .control-label').click(function(){
$('.tooltip-ttc').hide();
$(this).next().show();
});
$('.tooltip-ttc span').click(function(){
$(this).parent().hide();
});
$( document ).tooltip();
/*********************************************************************************************************************************
*************************************************** Personal Information **************************************************
*********************************************************************************************************************************/
/*
// para california se quita la validación
$('#state_issued').change(function(){
if($('#state_issued').val()!='FL') {
$('#non_florida_div').hide();
$('#errorFLORIDA').show();
} else {
$('#non_florida_div').show();
$('#errorFLORIDA').hide();
}
});
*/
$('#county_error').hide();
$('#courthouse_error').hide();
$('#type_of_violation_error').hide();
$('#past_due_error').hide();
if($('#ticket_county').val() != null && $('#ticket_county').val()!='') {
if($("#countyFee").val() != '1'){
$('#phase3_submit').hide();
$('#county_error').show();
}
// else{
//$('#phase3_submit').hide();
// $('#county_error').hide();
// }
//console.log($("#countyFee").val());
/*alert('Please choose County.');
$('#ticket_county').focus();
return false;*/
}
/*
*Al momento de ingresar tus datos en el carrito, si respondes YES a CDL LICENSE, tiene que salir este mensaje y que se detenga la
*compra: Tickets with this type of driver license require special attention, please call 1-800-CITATION for further assistance.
*/
$('#license_cdl').change(function() {
/*if($('#license_cdl').val()==0) {
$('#btnSubmit').show();
$('#errorCDLLICENSE').hide();
} else {
$('#btnSubmit').hide();
$('#errorCDLLICENSE').show();
}*/
});
$('#ticket_courthouse').change(function() {
if($("#county_error").is(":hidden")){
$('.question_id').remove();
$("#cit_file_row_additional").hide();
$("#cit_agree_additional").hide();
$('#cit_type_val').val("0");
$('#type_of_violation_error').hide();
if((parseInt($('#ticket_courthouse').val()) >= 123 && parseInt($('#ticket_courthouse').val()) <= 132) || $('#ticket_courthouse').val()==60 || $('#ticket_courthouse').val()==62 || $('#ticket_courthouse').val()==63 || $('#ticket_courthouse').val()==133 || $('#ticket_courthouse').val()==55 || $('#ticket_courthouse').val()==56) {
$('#phase3_submit').hide();
$('#courthouse_error').show();
} else {
$('#phase3_submit').show();
$('#courthouse_error').hide();
}
}
});
/*********************************************************************************************************************************
***************************************************** Num the ticket ******************************************************
*********************************************************************************************************************************/
$('#ticket_type').change(
function()
{
if($('#ticket_type').val()!='') {
$('#num_tickets_box').show();
//$('#num_tickets option[value=""]').attr('selected','selected');
if($('#ticket_type').val()==2 || $('#ticket_type').val()==3) {
$('#ticketNumBox').show();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorOtherBox').hide();
$('#errorParkingBox').hide();
if($('#ticket_type').val()==3){
$('#num_tickets_box').hide();
$('#num_tickets option[value=1]').attr('selected','selected');
}
} else {
$('#ticketNumBox').hide();
if($('#ticket_type').val()==1){
$('#errorCriminalBox').show();
$('#errorGenericBox').hide();
$('#errorOtherBox').hide();
$('#errorParkingBox').hide();
}
else if($('#ticket_type').val()==4){
$('#errorOtherBox').show();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorParkingBox').hide();
}
else if($('#ticket_type').val()==5){
$('#errorParkingBox').show();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorOtherBox').hide();
}
}
} else {
$('#ticketNumBox').hide();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorOtherBox').hide();
$('#errorParkingBox').hide();
}
}
); // END $('#ticket_type')
/*if($('#ticket_type').val()==2) {
$('#ticketNumBox').show();
$('#errorGenericBox').hide();
}*/
if($('#ticket_type').val()!='') {
if($('#ticket_type').val()==2 || $('#ticket_type').val()==3) {
$('#ticketNumBox').show();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorOtherBox').hide();
$('#errorParkingBox').hide();
} else {
$('#ticketNumBox').hide();
if($('#ticket_type').val()==1){
$('#errorCriminalBox').show();
$('#errorGenericBox').hide();
$('#errorOtherBox').hide();
$('#errorParkingBox').hide();
}
else if($('#ticket_type').val()==4){
$('#errorOtherBox').show();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorParkingBox').hide();
}
else if($('#ticket_type').val()==5){
$('#errorParkingBox').show();
$('#ticketNumBox').hide();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorOtherBox').hide();
}
}
} else {
$('#ticketNumBox').hide();
$('#errorGenericBox').hide();
$('#errorCriminalBox').hide();
$('#errorOtherBox').hide();
$('#errorParkingBox').hide();
}
//if($('#num_tickets').val()>1 && $('#num_tickets').val()<6) {
if($('#num_tickets').val()>0 && $('#num_tickets').val() != ""){
////$('#next_five_days_box').show();
if($('#num_tickets').val()==1) {
$('#same_date_box').hide();
$("#lbl_next_five_days").html("Is your citation due within the next five days?<font class=\"required\"> * </font>");
//$('#next_five_days_box').show();
} else {
$('#same_date_box').hide();
$("#lbl_next_five_days").html("Is your citation due within the next five days?<font class=\"required\"> * </font>");
//$('#next_five_days_box').hide();
}
}
else{
$('#same_date_box').hide();
//$('#next_five_days_box').hide();
}
$('#num_tickets').change(
function()
{
/*
if($('#num_tickets').val()!='') {
if($('#num_tickets').val()==6) {
$('#errorTicketMore').show();
$('#btnBox').hide();
} else {
$('#errorTicketMore').hide();
$('#btnBox').show();
}
} else {
$('#errorTicketMore').hide();
$('#btnBox').show();
}
*/
//if($('#num_tickets').val()>1 && $('#num_tickets').val()<6) {ç
if($('#num_tickets').val()>0 && $('#num_tickets').val() != ""){
////$('#next_five_days_box').show();
//console.log($("[for=next_five_days_box]")[0]);
//console.log($('#num_tickets').val());
if($('#num_tickets').val()==1) {
$('#same_date_box').hide();
//$('#next_five_days_box').show();
$("#lbl_next_five_days").html("Is your citation due within the next five days?<font class=\"required\"> * </font>");
} else {
$('#same_date_box').hide();
$("#lbl_next_five_days").html("Is your citation due within the next five days?<font class=\"required\"> * </font>");
//$('#next_five_days_box').hide();
}
}
else{
$('#same_date_box').hide();
//// $('#next_five_days_box').hide();
}
}
); // END $('#num_tickets')
if($('#next_five_days').val()=='') {
//$('#num_tickets_box').hide();
////$('#next_five_days_box').hide();
}
$('#next_five_days').change(
function(){
if($('#next_five_days').val()==1) {
$('#errorGenericBox').show();
// $('#num_tickets_box').hide();
$('#btnBox').hide();
//$('select[name=num_tickets]').val('');
} else {
$('#errorGenericBox').hide();
//$('#num_tickets_box').show();
$('#btnBox').show();
////$('select[name=num_tickets]').val('');
}
}
);
$('#btnSubmitQty').click(
function(){
if($('#next_five_days').val()=='') {
alert('Please choose question(Is your citation due within the next five days?).');
$('#next_five_days').focus();
return false;
}
if($('#num_tickets').val()=='') {
alert('Please choose question (How many violations did you receive at this time? ).');
$('#num_tickets').focus();
return false;
}
/*if($('#num_tickets').val()>1 && $('#same_date').val()=='') {
alert('Please choose question ( Did you receive these tickets on the same date? ).');
$('#same_date').focus();
return false;
}*/
}
);
/*********************************************************************************************************************************
*************************************************** Tickets Items ******************************************************
*********************************************************************************************************************************/
/*
if($('#cit_type_val').val()==21) {
$('#cit_other').show();
} else if($('#cit_type_val').val()==25) {
$('#speedingInfo').show();
}
*/
$('#cit_type_val').change(function() {
if($('#cit_type_val')!=""){
//if((parseInt($('#ticket_courthouse').val()) >= 123 && parseInt($('#ticket_courthouse').val()) <= 132) || $('#ticket_courthouse').val()==60)
if(!((parseInt($('#ticket_courthouse').val()) >= 123 && parseInt($('#ticket_courthouse').val()) <= 132) || $('#ticket_courthouse').val()==60)) {
$.getJSON( $("#urlCheckCitationTypeFee").val()+"?county="+$('#ticket_county').val()+"&citation_type="+$('#cit_type_val').val(), function( data ) {
// console.log(data);
$('.question_id').remove();
$("#cit_file_row_additional").hide();
$("#cit_agree_additional").hide();
if(data.estado == "0"){
$('#phase3_submit').hide();
$('#type_of_violation_error').show();
}else{
$('#phase3_submit').show();
$('#type_of_violation_error').hide();
var tipo_1 = '<div class="form-group question_id"><label for="{id}" class="col-sm-4 control-label">{question}<font class="required"> * </font></label><div class="col-sm-4"><select name="{id}" id="{id}" class="form-control input-sm"><option value="">- Please Select -</option><option value="1">Yes</option><option value="0">No</option><option value="3">Uncertain</option></select></div></div>';
var tipo_2 = '<div class="form-group question_id"><label for="{id}" class="col-sm-4 control-label">{question}<font class="required"> * </font></label><div class="col-sm-4"><select onchange="checkAdditionalUpload(this);" name="{id}" id="{id}" class="form-control input-sm"><option value="">- Please Select -</option><option value="1">Yes</option><option value="0">No</option></select></div></div>';
var tipo_3 = '<div class="form-group question_id"><label for="{id}" class="col-sm-4 control-label">{question}<font class="required"> * </font></label><div class="col-sm-4"><input type="text" name="{id}" value="" id="{id}" placeholder="" class="form-control input-sm"></div>{label}</div>';
//var res = str.replace(/blue/g, "red");
var total_additional_options = 0;
data.questions.forEach((question)=>{
//console.log(element);
if(question.type =="1")
$("#cit_type").after(tipo_1.replace(/{id}/g, "question_"+question.id).replace(/{question}/g, question.question));
if(question.type =="2")
$("#cit_type").after(tipo_2.replace(/{id}/g, "question_"+question.id).replace(/{question}/g, question.question));
if(question.type =="3")
$("#cit_type").after(tipo_3.replace(/{id}/g, "question_"+question.id).replace(/{question}/g, question.question).replace(/{label}/g, question.label));
if(question.additional_option=="1"){
total_additional_options++;
$("#lbl_file_additional").html(question.label+'<br /><span style="font-size:11px;">(.gif .jpg .png and .pdf files only)</span>');
}
//else if(question.type =="2")
// $("#phase3Part").append(tipo_2.replace(/{id}/g, "question_"+question.id).replace(/{question}/g, question.question));
});
$("#totalAddOptions").val(total_additional_options)
}
});
}
}
});
/////////////COMMENT - JCFL///////////////
// $('#cit_type_val').change(function() {
// $('#doubt_row').hide();
// $('#phase3_submit').show();
// $('#speedingInfo').hide();
// $('#cit_error').hide();
// $('#invalid_ticket').hide();
// $('#cit_other').hide();
// $('#speed_error').hide();
// $('#school_row').hide();
// if($('#cit_type_val').val()==25) {
// $('#speedingInfo').show();
// $('#school_row').show();
// }
// if($('#cit_type_val').val()==6 || $('#cit_type_val').val()==19 || $('#cit_type_val').val()==20 || $('#cit_type_val').val()==28) {
// if($('#cit_type_val').val()==6) {
// $('#error_careless_driving').show();
// $('#error_Toll_violations').hide();
// $('#error_Open_container_and_passenger_moving').hide();
// } else if($('#cit_type_val').val()==28) {
// $('#error_careless_driving').hide();
// $('#error_Toll_violations').show();
// $('#error_Open_container_and_passenger_moving').hide();
// } else if($('#cit_type_val').val()==19 || $('#cit_type_val').val()==20) {
// $('#error_careless_driving').hide();
// $('#error_Toll_violations').hide();
// $('#error_Open_container_and_passenger_moving').show();
// }
// $('#cit_file_row').hide();
// $('#cit_agree').hide();
// $('#phase3_submit').hide();
// } else if($('#cit_type_val').val()==8) {
// $('#cit_file_row').hide();
// $('#cit_or').hide();
// $('#cit_agree').hide();
// $('#cit_error').show();
// $('#phase3_submit').hide();
// } else if($('#cit_type_val').val()==31) {
// $('#cit_file_row').hide();
// $('#cit_or').hide();
// $('#cit_agree').hide();
// $('#invalid_ticket').show();
// $('#phase3_submit').hide();
// } else if($('#cit_type_val').val()==21) {
// $('#cit_file_row').show();
// $('#cit_or').show();
// $('#cit_agree').show();
// $('#cit_other').show();
// } else if($('#cit_type_val').val()==23) {
// $('#doubt_row').show();
// $('#phase3_submit').hide();
// $('#cit_file_row').hide();
// $('#cit_or').hide();
// $('#cit_agree').hide();
// } else {
// $('#error_careless_driving').hide();
// $('#error_Toll_violations').hide();
// $('#error_Open_container_and_passenger_moving').hide();
// $('#cit_file_row').show();
// $('#cit_agree').show();
// $('#phase3_submit').show();
// }
// }
// );
/////////////END - JCFL///////////////
// END $('#cit_type_val')
$('#speed_applicable').blur(function() {
if(!IsNumeric($('#unlawfull_speed').val())) {
alert('Please enter a numeric value for Unlawful Speed.');
$('#unlawfull_speed').focus();
} else if(!IsNumeric($('#speed_applicable').val())) {
alert('Please enter a numeric value for Posted Speed Limit.');
$('#speed_applicable').focus();
} else {
$('#phase3_submit').show();
$('#speed_error').hide();
$('#school_row').hide();
//var speedDiff = $('#speed_applicable').val() - $('#unlawfull_speed').val(); /*commend by rsc*/
var speedDiff = $('#unlawfull_speed').val() - $('#speed_applicable').val();
if(speedDiff>=30) {
$('#speed_error').show();
$('#phase3_submit').hide();
} else if(speedDiff<30) {
$('#school_row').show();
}
}
}); /* END $('#speed_applicable').blur(function()*/
/*
$( '#form_qty' ).submit( function( event ) {
//if
console.log($("#check_due").val());
if($("#check_due").val()=="0"){
event.preventDefault();
$.ajax({
'url' : $("#urlCheckCitationDueDate").val(),
'type' : 'POST', //the way you want to send data to your URL
'data' : {'date_due' : $("#date_citation_due").val()},
'success' : function(resp){ //probably this request will return anything, it'll be put in var "data"
//console.log(resp);
//return false;
if(resp.estado == "0"){
$('#county_error').hide();
if($('#ticket_county').val()=='') {
alert('Please choose County.');
$('#ticket_county').focus();
return false;
}
if($('#citations_number').val()=='') {
alert('Please choose Citation Number.');
$('#citations_number').focus();
return false;
}
if($('#date_citation').val()=='') {
alert('Please choose Date of Citation.');
$('#date_citation').focus();
return false;
}
if(!isDateRSC($('#date_citation').val())) {
alert('Invalid Date');
$('#date_citation').focus();
return false;
}
if($('#inv_acc').val()=='') {
alert('Please specify if you were involved with accident.');
$('#inv_acc').focus();
return false;
}
var eitherone=0;
if($('#cit_file').val()!='') {
eitherone=1;
var ext = /[^.]+$/.exec($('#cit_file').val());
if(!validateFileExtension(ext)) {
alert('Please upload a valid document, only bmp,jpeg,pdf are supported.');
$('#cit_file').focus();
return false;
}
}
if($('#prev_cit_file').val()!='') {
eitherone=1;
}
if (document.getElementById('agree_to_conditions').checked){
eitherone=1;
}
if(eitherone==0) {
alert('You must upload a document or agree to the terms and conditions');
$('#cit_file').focus();
return false;
}
//console.log("submit1");
//$('#form_qty').submit();
// $('#form_qty').submit();
//document.getElementById('form_qty').submit();
$("#check_due").val("1");
$('#form_qty').submit();
}
else{
//$('#phase3_submit').hide();
$('#county_error').show();
return false;
}
},
error: function(resp) { // 500 Status Header
}
});
return false;
}
});
*/
$('#submit_phase3').click(function(){
if(fLateDays('date_citation_due')){
if($("#check_due").val()=="0"){
$.ajax({
'url' : $("#urlCheckCitationDueDate").val(),
'type' : 'POST', //the way you want to send data to your URL
'data' : {'date_due' : $("#date_citation_due").val()},
'success' : function(resp){ //probably this request will return anything, it'll be put in var "data"
// console.log(resp);
// return false;
if(resp.expirado == "1"){
if($('#ticket_courthouse').val()==36 || $('#ticket_courthouse').val()==67) {
//courthouse_error
$('#courthouse_error').show();
return false;
}
}
if(resp.estado == "0"){
$("#check_due").val("1");
$('#submit_phase3').click();
//console.log("submit1");
//$('#form_qty').submit();
//$('#form_qty').submit();
//document.getElementById('form_qty').submit();
}
else{
// else{
////$('#phase3_submit').hide();
$('#past_due_error').show();
// }
return false;
}
},
error: function(resp) { // 500 Status Header
}
});
return false;
}
else{
$('#past_due_error').hide();
//checkFields
if($('#ticket_county').val()=='' && $('#checkFields').val()=='1') {
alert('Please choose County.');
$('#ticket_county').focus();
return false;
}
if($('#citations_number').val()=='' && $('#checkFields').val()=='1') {
alert('Please choose Citation Number.');
$('#citations_number').focus();
return false;
}
if($('#date_citation').val()=='' && $('#checkFields').val()=='1') {
alert('Please choose Date of Citation.');
$('#date_citation').focus();
return false;
}
if(!isDateRSC($('#date_citation').val()) && $('#checkFields').val()=='1') {
alert('Invalid Date');
$('#date_citation').focus();
return false;
}
if($('#inv_acc').val()=='') {
alert('Please specify if you were involved with accident.');
$('#inv_acc').focus();
return false;
}
var eitherone=0;
if($('#cit_file').val()!='' && $('#checkFields').val()=='1') {
eitherone=1;
var ext = /[^.]+$/.exec($('#cit_file').val());
if(!validateFileExtension(ext)) {
alert('Please upload a valid document, only bmp,jpeg,pdf are supported.');
$('#cit_file').focus();
return false;
}
}
if($('#cit_type_val').val()=='0') {
alert('Please select a type of Violation.');
// $('#inv_acc').focus();
return false;
}
if($('#prev_cit_file').val()!='') {
eitherone=1;
}
if (document.getElementById('agree_to_conditions').checked){
eitherone=1;
}
if(eitherone==0 && $('#checkFields').val()=='1') {
alert('You must upload a document or agree to the terms and conditions');
$('#cit_file').focus();
return false;
}
}
}
else
return false
} // END function(){
);
/* $('#submit_phase3') */
$('#editimage').click(function(){
$('#cit_file').show();
$('#cit_agree').show();
$('#editimage').hide();
} // END function(){
);
$('#editimageadditional').click(function(){
$('#cit_file_additional').show();
$('#cit_agree_additional').show();
$('#editimageadditional').hide();
} // END function(){
);
$('#cit_file').change(function(){
$('#agree_to_conditions').prop('checked', false);
$('#cit_agree').hide();
} // END function(){
); // END $('#cit_file').change
$('#cit_file_additional').change(function(){
$('#agree_to_conditions_additional').prop('checked', false);
$('#cit_agree_additional').hide();
} // END function(){
); // END $('#cit_file').change
$('#inv_acc').change(function(){
$('#phase3_submit').show();
if($('#inv_acc').val()==1) {
$('#inj_row').hide(); /*Si se selecciona accidente parar proceso*/
$('#inj_row_error').show(); $('#cit_file_row').hide(); $('#cit_agree').hide(); $('#phase3_submit').hide();
$('#cit_type').hide();
$('#speedingInfo').hide();
$('#cit_error').hide();
$('#cit_other').hide();
} else {
$('#inj_row_error').hide(); $('#cit_file_row').show(); $('#cit_agree').show(); $('#phase3_submit').show();// add by rsc
$('#inj_row').hide();
$('#inj_ser').hide();
$('#inj_ser_error').hide();
$('#cit_type').show();
$("#acc_inj option[value=0]").attr("selected",true); /*add by rsc*/
$("#serious_inj option[value=0]").attr("selected",true); /*add by rsc*/
if($('#cit_type_val').val()==6 || $('#cit_type_val').val()==19 || $('#cit_type_val').val()==20 ||
$('#cit_type_val').val()==28) { /*add by rsc*/
if($('#cit_type_val').val()==6) {
$('#error_careless_driving').show();
$('#error_Toll_violations').hide();
$('#error_Open_container_and_passenger_moving').hide();
} else if($('#cit_type_val').val()==28) {
$('#error_careless_driving').hide();
$('#error_Toll_violations').show();
$('#error_Open_container_and_passenger_moving').hide();
} else if($('#cit_type_val').val()==19 || $('#cit_type_val').val()==20) {
$('#error_careless_driving').hide();
$('#error_Toll_violations').hide();
$('#error_Open_container_and_passenger_moving').show();
}
$('#cit_file_row').hide();
$('#cit_agree').hide();
$('#phase3_submit').hide();
} else {
$('#error_careless_driving').hide();
$('#error_Toll_violations').hide();
$('#error_Open_container_and_passenger_moving').hide();
$('#cit_file_row').show();
$('#cit_agree').show();
$('#phase3_submit').show();
}
}
});
/* $('#inv_acc').change */
}
/* function() */
);
/* $(document).ready */ <file_sep>/HealthClinic/Models/hireusm.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace HealthClinic.Models
{
public class hireusm
{
public string firstname { get; set; }
public string lastname { get; set; }
public string address { get; set; }
public string city { get; set; }
public string state { get; set; }
public string zipcode { get; set; }
public string homephone { get; set; }
public string placeofemp { get; set; }
public string empphone { get; set; }
public string email { get; set; }
public string license { get; set; }
public DateTime dob { get; set; }
public string preferedby { get; set; }
public string country { get; set; }
public string legalprb { get; set; }
public string token { get; set; }
public List<Citations1> citations { get; set; }
}
}<file_sep>/HealthClinic/Models/Citations2.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace HealthClinic.Models
{
public class Citations2
{
public string citationnumber2 { get; set; }
public DateTime citationdate2 { get; set; }
public string citationcountry2 { get; set; }
public string citationcourt2 { get; set; }
public DateTime citationcomplydate2 { get; set; }
public string citationvcn2 { get; set; }
}
}<file_sep>/HealthClinic/Controllers/HomeController.cs
using HealthClinic.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace HealthClinic.Controllers
{
public class HomeController : Controller
{
public ActionResult Index()
{
return View();
}
public ActionResult About()
{
ViewBag.Message = "Your application description page.";
return View();
}
public ActionResult Contact()
{
ViewBag.Message = "Your contact page.";
return View();
}
public ActionResult Messg()
{
string msg = (string)TempData["Message"]?? "Hey There!~/";
ViewBag.Message = msg;
return View();
}
[HttpPost]
[ActionName("drivers-fleet-form")]
public ActionResult fleetform(driverfleet df)
{
using (var context = new healthclinicEntities())
{
fleetservices flt = new fleetservices
{
doyouearn = df.input_1,
cdllicense = df.input_2,
tt6=df.input_3,
recommend=df.input_4,
ywhynot=df.input_5,
fname=df.input_6_2_1,
lname=df.input_6_2_2,
addr=df.input_6_3_1,
addr1=df.input_6_3_2,
city=df.input_6_3_3,
C_state=df.input_6_3_4,
zip=df.input_6_3_5,
email=df.input_6_4,
phone=df.input_6_5,
company=df.input_6_6,
noofdrivers=int.Parse(df.input_7??"0"),
addfb=df.input_8,
};
context.fleetservices.Add(flt);
context.SaveChanges();
}
TempData["Message"] = "You submission has been saved and being processed!~/professional-drivers-fleet.html";
return RedirectToAction("messg");
}
[HttpPost]
[ActionName("contact-form")]
public ActionResult ContactForm(contact_form cf)
{
if (cf.input_9.ToString().Contains("1/1/0001"))
{
cf.input_9 = DateTime.Now;
}
if (cf.input_10.ToString().Contains("1/1/0001"))
{
cf.input_10 = DateTime.Now;
}
using (var context = new healthclinicEntities())
{
contactform frm = new contactform
{
C_type = cf.input_1,
name = cf.input_2,
email = cf.input_3,
phone = cf.input_4,
fax = cf.input_5,
dlicense = cf.input_6,
tickethelp = cf.input_7,
citationno = cf.input_8,
citationdate = cf.input_9,
complydate = cf.input_10,
conticket = cf.input_11,
vcnumber = cf.input_12,
city = cf.input_13,
info = cf.input_15 ?? "" + cf.input_14 ?? ""
};
context.contactform.Add(frm);
context.SaveChanges();
}
TempData["Message"] = "You submission has been saved, We'll contact you back within 3 business days!~/contact.html";
return RedirectToAction("messg");
}
[HttpPost]
public ActionResult HireUsNew(hireusm hu, Citations1 c1, Citations2 c2, Citations3 c3, Citations4 c4)
{
try
{
using (var context = new healthclinicEntities())
{
string tok = DateTime.Now.ToString("yyyy-MM-dd-T-HH-mm-ss-fffffffK");
hireus hiu = new hireus
{
firstname = hu.firstname,
lastname = hu.lastname,
address = hu.address,
city = hu.city,
state = hu.state,
zipcode = hu.zipcode,
homephone = hu.homephone,
placeofemp = hu.placeofemp,
empphone = hu.empphone,
email = hu.email,
license = hu.license,
dob = hu.dob,
preferedby = hu.preferedby,
country = hu.country,
legalprb = hu.legalprb,
token = tok
};
context.hireus.Add(hiu);
//context.SaveChanges();
citations c = new citations
{
citationnumber = c1.citationnumber1,
citationcomplydate = c1.citationcomplydate1,
citationcountry = c1.citationcountry1,
citationcourt = c1.citationcourt1,
citationdate = c1.citationdate1,
citationvcn = c1.citationvcn1,
token = tok
};
context.citations.Add(c);
// if has citation 2
if (c2.citationnumber2 != null)
{
citations cc = new citations
{
citationnumber = c2.citationnumber2,
citationcomplydate = c2.citationcomplydate2,
citationcountry = c2.citationcountry2,
citationcourt = c2.citationcourt2,
citationdate = c2.citationdate2,
citationvcn = c2.citationvcn2,
token = tok
};
context.citations.Add(cc);
}
if (c3.citationnumber3 != null)
{
citations ccc = new citations
{
citationnumber = c3.citationnumber3,
citationcomplydate = c3.citationcomplydate3,
citationcountry = c3.citationcountry3,
citationcourt = c3.citationcourt3,
citationdate = c3.citationdate3,
citationvcn = c3.citationvcn3,
token = tok
};
context.citations.Add(ccc);
}
if (c3.citationnumber3 != null)
{
citations cccc = new citations
{
citationnumber = c4.citationnumber4,
citationcomplydate = c4.citationcomplydate4,
citationcountry = c4.citationcountry4,
citationcourt = c4.citationcourt4,
citationdate = c4.citationdate4,
citationvcn = c4.citationvcn4,
token = tok
};
context.citations.Add(cccc);
}
//finally place the transaction
context.SaveChanges();
TempData["Message"] = "Data has been saved!~/hire-us-new.html";
return RedirectToAction("messg");
}
}
catch (Exception ex)
{
TempData["Message"] = "There is problem with you submission, please try again: "+ex.Message+"!~/hire-us-new.html";
return RedirectToAction("messg");
}
}
}
}<file_sep>/HealthClinic/Models/driverfleet.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace HealthClinic.Models
{
public class driverfleet
{
public string input_1 { get; set; }
public string input_2 { get; set; }
public string input_3 { get; set; }
public string input_4 { get; set; }
public string input_5 { get; set; }
public string input_6_1 { get; set; }
public string input_6_2_1 { get; set; }
public string input_6_2_2 { get; set; }
public string input_6_3_1 { get; set; }
public string input_6_3_2 { get; set; }
public string input_6_3_3 { get; set; }
public string input_6_3_4 { get; set; }
public string input_6_3_5 { get; set; }
public string input_6_3_6 { get; set; }
public string input_6_4 { get; set; }
public string input_6_5 { get; set; }
public string input_6_6 { get; set; }
public string input_7 { get; set; }
public string input_8 { get; set; }
}
}<file_sep>/HealthClinic/Models/contact_form.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace HealthClinic.Models
{
public class contact_form
{
public string input_1 { get; set; }
public string input_2 { get; set; }
public string input_3 { get; set; }
public string input_4 { get; set; }
public string input_5 { get; set; }
public string input_6 { get; set; }
public string input_7 { get; set; }
public string input_8 { get; set; }
public DateTime input_9 { get; set; }
public DateTime input_10 { get; set; }
public string input_11 { get; set; }
public string input_12 { get; set; }
public string input_13 { get; set; }
public string input_14 { get; set; }
public string input_15 { get; set; }
}
}<file_sep>/HealthClinic/Models/Citations4.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace HealthClinic.Models
{
public class Citations4
{
public string citationnumber4 { get; set; }
public DateTime citationdate4 { get; set; }
public string citationcountry4 { get; set; }
public string citationcourt4 { get; set; }
public DateTime citationcomplydate4 { get; set; }
public string citationvcn4 { get; set; }
}
}<file_sep>/HealthClinic/App_Start/BundleConfig.cs
using System.Collections.Generic;
using System.Web;
using System.Web.Optimization;
namespace HealthClinic
{
class NonOrderingBundleOrderer : IBundleOrderer
{
public IEnumerable<BundleFile> OrderFiles(BundleContext context, IEnumerable<BundleFile> files)
{
return files;
}
}
static class BundleExtentions
{
public static Bundle NonOrdering(this Bundle bundle)
{
bundle.Orderer = new NonOrderingBundleOrderer();
return bundle;
}
}
public class BundleConfig
{
// For more information on bundling, visit https://go.microsoft.com/fwlink/?LinkId=301862
public static void RegisterBundles(BundleCollection bundles)
{
bundles.Add(new ScriptBundle("~/bundles/js").NonOrdering().Include(
"~/themes/wp-content/themes/the-ticket-clinic/js/jquery.min.js",
"~/themes/wp-content/themes/the-ticket-clinic/js/app.js",
"~/themes/wp-content/themes/the-ticket-clinic/js/jquery.cookie.js",
"~/themes/wp-content/themes/the-ticket-clinic/js/jquery.cycle2.min.js",
"~/themes/wp-content/themes/the-ticket-clinic/js/jquery.cycle2.tile.js",
"~/themes/wp-content/themes/the-ticket-clinic/js/jquery.imgliquid.min.js",
"~/themes/wp-content/themes/the-ticket-clinic/js/jquery.matchheight.js",
"~/themes/wp-content/themes/the-ticket-clinic/js/slick.min.js"
// "~/themes/wp-content/plugins/contact-form-7/includes/js/scripts58e0.js?ver=5.1.4",
// "~/themes/wp-includes/js/wp-embed.min0606.js?ver=5.2.9",
// "~/themes/wp-content/plugins/js_composer/assets/js/dist/js_composer_front.mina752.js?ver=4.11.2.1"
));
//bundles.Add(new ScriptBundle("~/bundles/jqueryval").Include(
// "~/Scripts/jquery.validate*"));
//// Use the development version of Modernizr to develop with and learn from. Then, when you're
//// ready for production, use the build tool at https://modernizr.com to pick only the tests you need.
//bundles.Add(new ScriptBundle("~/bundles/modernizr").Include(
// "~/Scripts/modernizr-*"));
//bundles.Add(new ScriptBundle("~/bundles/bootstrap").Include(
// "~/Scripts/bootstrap.js"));
//bundles.Add(new StyleBundle("~/Content/css").Include(
// "~/Content/bootstrap.css",
// "~/Content/site.css"));
}
}
}
| 8631ab9dcbd0bc40e45da1bc6862da39a6dbaf93 | [
"JavaScript",
"C#"
] | 10 | C# | DeveloperMujtaba/Health-Clinic | c07ed4c3681f04d364f0f37fe7ec1fe6a759e9d6 | 41702e1442063c1f5527ede7806ec4fc1b7e65ed |
refs/heads/main | <repo_name>zywickib/beer-manager<file_sep>/src/main/java/pl/zywickib/core/domain/dao/BeerDao.java
package pl.zywickib.core.domain.dao;
import lombok.extern.slf4j.Slf4j;
import org.tkit.quarkus.jpa.daos.AbstractDAO;
import pl.zywickib.core.domain.model.Beer;
import pl.zywickib.core.domain.model.Brewery;
import javax.enterprise.context.ApplicationScoped;
@Slf4j
@ApplicationScoped
public class BeerDao extends AbstractDAO<Beer> {
public Beer create(Beer beer, String breweryId) {
Brewery brewery = getEntityManager().getReference(Brewery.class, breweryId);
beer.setBrewery(brewery);
return super.create(beer);
}
}
<file_sep>/src/main/java/pl/zywickib/core/rs/v1/controller/BreweryController.java
package pl.zywickib.core.rs.v1.controller;
import lombok.extern.slf4j.Slf4j;
import org.eclipse.microprofile.openapi.annotations.Operation;
import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Content;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
import org.eclipse.microprofile.openapi.annotations.parameters.Parameter;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponses;
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
import pl.zywickib.core.domain.dao.BreweryDao;
import pl.zywickib.core.domain.model.Beer;
import pl.zywickib.core.domain.model.Brewery;
import pl.zywickib.core.rs.v1.dto.BeerResultDto;
import pl.zywickib.core.rs.v1.dto.BeerUpdateDto;
import pl.zywickib.core.rs.v1.dto.BreweryDto;
import pl.zywickib.core.rs.v1.dto.BreweryResultDto;
import pl.zywickib.core.rs.v1.mapper.BeerMapper;
import pl.zywickib.core.rs.v1.mapper.BreweryMapper;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.transaction.Transactional;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
import java.util.stream.Collectors;
@Slf4j
@ApplicationScoped
@Path("/v1/brewery")
@Tag(name = "brewery")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Transactional(value = Transactional.TxType.NOT_SUPPORTED)
public class BreweryController {
@Inject
BreweryDao breweryDao;
@Inject
BreweryMapper breweryMapper;
@GET
@Path("{guid}")
@Operation(operationId = "getBreweryByGuid", description = "Get brewery by guid")
@APIResponses({ @APIResponse(responseCode = "200",
description = "OK",
content = @Content(mediaType = MediaType.APPLICATION_JSON,
schema = @Schema(implementation = BreweryResultDto.class, type = SchemaType.OBJECT))),
@APIResponse(responseCode = "400", description = "Bad request"),
@APIResponse(responseCode = "404", description = "Not Found"),
@APIResponse(responseCode = "500", description = "Internal Server Error")
})
public Response getBreweryByGuid(@PathParam("guid") String guid) {
Brewery brewery = breweryDao.findById(guid);
if (brewery == null) {
return Response.status(404).entity("{\"message\":\"Brewery with guid: " + guid + " not found\"}").build();
}
return Response.ok(breweryMapper.map(brewery)).build();
}
@GET
@Path("")
@Operation(operationId = "getAllBreweries", description = "Get all breweries")
@APIResponses({@APIResponse(responseCode = "200",
description = "OK",
content = @Content(mediaType = MediaType.APPLICATION_JSON,
schema = @Schema(implementation = BreweryResultDto[].class))),
@APIResponse(responseCode = "400", description = "Bad request"),
@APIResponse(responseCode = "404", description = "Not Found"),
@APIResponse(responseCode = "500", description = "Internal Server Error")
})
public Response getAllBreweries() {
List<BreweryResultDto> breweries = breweryDao.findAll().map(breweryMapper::map).collect(Collectors.toList());
return Response.ok(breweries).build();
}
@POST
@Path("")
@Operation(operationId = "addNewBrewery", description = "Add new brewery")
@APIResponses({@APIResponse(responseCode = "200",
description = "OK",
content = @Content(mediaType = MediaType.APPLICATION_JSON,
schema = @Schema(implementation = BreweryResultDto.class))),
@APIResponse(responseCode = "400", description = "Bad request"),
@APIResponse(responseCode = "404", description = "Not Found"),
@APIResponse(responseCode = "500", description = "Internal Server Error")
})
@Transactional(value = Transactional.TxType.REQUIRED)
public Response addBrewery(@Parameter @Valid @NotNull BreweryDto breweryDto) {
Brewery dbBrewery = breweryDao.findByName(breweryDto.getName());
if (dbBrewery != null) {
return Response.status(400).entity("{\"message\":\"Brewery with name: " + breweryDto.getName()
+ " already exists\"}").build();
}
Brewery createdBrewery = breweryDao.create(breweryMapper.map(breweryDto));
return Response.ok(breweryMapper.map(createdBrewery)).build();
}
@DELETE
@Path("{guid}")
@Operation(operationId = "deleteBrewery", description = "Get brewery by guid")
@APIResponses({@APIResponse(responseCode = "204", description = "No Content"),
@APIResponse(responseCode = "400", description = "Bad request"),
@APIResponse(responseCode = "404", description = "Not Found"),
@APIResponse(responseCode = "500", description = "Internal Server Error")
})
@Transactional(value = Transactional.TxType.REQUIRED)
public Response deleteBrewery(@PathParam("guid") String guid) {
Brewery brewery = breweryDao.findById(guid);
if (brewery == null) {
return Response.status(404).entity("{\"message\":\"Brewery with guid: " + guid + " not found\"}").build();
}
breweryDao.deleteQueryById(guid);
return Response.noContent().build();
}
@PUT
@Path("{guid}")
@Operation(operationId = "updateBreweryByGuid", description = "update brewery")
@APIResponses({@APIResponse(responseCode = "200",
description = "OK",
content = @Content(mediaType = MediaType.APPLICATION_JSON,
schema = @Schema(implementation = BreweryResultDto.class))),
@APIResponse(responseCode = "400", description = "Bad request"),
@APIResponse(responseCode = "404", description = "Not Found"),
@APIResponse(responseCode = "500", description = "Internal Server Error")
})
@Transactional(value = Transactional.TxType.REQUIRED)
public Response updateBrewery(@PathParam("guid") String guid, @Parameter @Valid @NotNull BreweryDto breweryDto) {
Brewery dbBreweryByName = breweryDao.findByName(breweryDto.getName());
if (dbBreweryByName != null) {
return Response.status(400).entity("{\"message\":\"Brewery with name: " + breweryDto.getName()
+ " already exists\"}").build();
}
Brewery dbBrewery = breweryDao.findById(guid);
if (dbBrewery == null) {
return Response.status(404).entity("{\"message\":\"Brewery with guid: " + guid + " not found\"}").build();
}
dbBrewery.setName(breweryDto.getName());
dbBrewery.setLogo(breweryDto.getLogo());
Brewery updatedBrewery = breweryDao.update(dbBrewery);
return Response.ok(breweryMapper.map(updatedBrewery)).build();
}
}
<file_sep>/src/main/java/pl/zywickib/core/rs/v1/dto/BeerUpdateDto.java
package pl.zywickib.core.rs.v1.dto;
import lombok.Getter;
import lombok.Setter;
import pl.zywickib.core.domain.model.Brewery;
import javax.validation.constraints.NotBlank;
@Getter
@Setter
public class BeerUpdateDto {
@NotBlank
private String name;
private String style;
private Integer ibu;
private Double abv;
private String description;
private Brewery brewery;
}
<file_sep>/src/main/java/pl/zywickib/core/rs/v1/dto/BeerResultDto.java
package pl.zywickib.core.rs.v1.dto;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class BeerResultDto {
private String id;
private String name;
private String style;
private Integer ibu;
private Double abv;
private String description;
private BreweryDto brewery;
}
<file_sep>/src/main/java/pl/zywickib/core/rs/v1/mapper/BeerMapper.java
package pl.zywickib.core.rs.v1.mapper;
import org.mapstruct.Mapper;
import pl.zywickib.core.domain.model.Beer;
import pl.zywickib.core.rs.v1.dto.BeerResultDto;
import pl.zywickib.core.rs.v1.dto.BeerUpdateDto;
@Mapper(componentModel = "cdi")
public interface BeerMapper {
BeerResultDto map(Beer beer);
Beer map(BeerUpdateDto beerUpdateDto);
}
| 15676e0f3542e03b730a11b8ec4dd19a0d08cc83 | [
"Java"
] | 5 | Java | zywickib/beer-manager | 47935c3fae528f27003ff3ce487d554bd81428e3 | 1ef440713e9e403bf629f15eb75f4a82b16f1176 |
refs/heads/master | <repo_name>sunahhlee/TopHCS<file_sep>/setup.py
from setuptools import setup
setup(name='topHCS',
version='0.0.1',
description='TopH Count Sketch',
url='https://github.com/sunahhlee/TopHCS',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['topHCS'],
zip_safe=False)
<file_sep>/topHCS/old/run_mini.py
import math
import numpy as np
import copy
import torch
from csvec import CSVec
from topHCS import TopHCS
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import cm
device = 'cpu'
vecs = []
for i in range(4):
sampleVec = torch.randint(-10, 10, (40,), dtype=torch.float)
vecs.append(sampleVec)
assert(len(vecs) == 4)
for q, paths in enumerate([1]):
summed = vecs[0].clone()
for v in vecs[1:]:
summed += v
expectedIndices = torch.sort(summed**2)[1]
kVals = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
hVals = [0, 1.0]
cVals = [50, 100, 1000, 10000]
cVals = [20]
for p, cols in enumerate(cVals):
csvec_accuracy = np.zeros(len(kVals))
topHCS_accuracy = np.zeros((len(hVals), len(kVals)))
for k_i, k in enumerate(kVals):
d, c, r, numBlocks = len(summed), cols, 15, 1
#ipdb.set_trace()
expected = torch.zeros(len(summed), device=device)
expected[expectedIndices[-k:].to(device)] = summed[expectedIndices[-k:].to(device)]
assert(summed.size() == vecs[0].size())
w_0 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_0 += vecs[0]
print("")
w_1 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_1 += vecs[1]
print("")
w_2 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_2 += vecs[2]
print("")
w_3 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_3 += vecs[3]
print("")
workers_summed = w_0 + w_1 + w_2 + w_3
result = workers_summed.unSketch(k)
#csvec_accuracy = (expected == result).nonzero().numel() / k
csvec_accuracy[k_i] = (expected[expectedIndices] * result[expectedIndices]).nonzero().numel() / k
print("k = ", k)
#print("num of non 0 elements in expected : ", expected[expectedIndices].nonzero().numel())
#print("num of non 0 elements in result : ", result[expectedIndices].nonzero().numel())
print('csvec accuracy :', csvec_accuracy)
print('expected :', expected)
print('result :', result)
for h_i, h in enumerate(hVals):
result = torch.zeros(len(summed), device=device)
h_curr = int(h*k)
w_0 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_0.store(vecs[0])
print(w_0.topH)
print("")
w_1 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_1.store(vecs[1])
print(w_0.topH)
print("")
w_2 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_2.store(vecs[2])
print(w_0.topH)
print("")
w_3 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_3.store(vecs[3])
print(w_0.topH)
print("")
workers = [w_0, w_1, w_2, w_3]
result = TopHCS.topKSum(workers, k)
#topHCS_accuracy = (expected == result).nonzero().numel() / k
#print("num of non 0 elements in topH result : ", result[expectedIndices].nonzero().numel())
topHCS_accuracy[h_i, k_i] = (expected[expectedIndices] * result[expectedIndices]).nonzero().numel() / k
print('result for h =', h_curr, " :", result)
print('topHCS accuracy :', '\n', topHCS_accuracy)
<file_sep>/topHCS/old/outputs/basic_n_1000_h_0_numBlocks_1.py
]0;IPython: topHCS/topHCSd = 1002
cols = 500 k = 10 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 0.95 1. 1. 0.98 0.98333333
1. 1. 0.98888889 1. ]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 1. ]
[1. 0.95 0.96666667 1. 0.96 0.96666667
1. 1. 0.98888889 1. ]
[1. 0.95 0.96666667 0.975 1. 0.98333333
1. 1. 0.98888889 1. ]]
d = 1002
cols = 1000 k = 10 blocks = 1
k = 10
num of non 0 elements in expected : 10
num of non 0 elements in result : 10
csvec accuracy : [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 10
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 1 5
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 2 10
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 1000 k = 20 blocks = 1
k = 20
num of non 0 elements in expected : 20
num of non 0 elements in result : 20
csvec accuracy : [1. 0.95 0. 0. 0. 0. 0. 0. 0. 0. ]
num of non 0 elements in topH result : 20
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 1 10
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 0. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 20
h_i, h_curr = 2 20
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 0. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 0. 0. 0. 0. 0. 0. 0. 0. ]]
d = 1002
cols = 1000 k = 30 blocks = 1
k = 30
num of non 0 elements in expected : 30
num of non 0 elements in result : 30
csvec accuracy : [1. 0.95 1. 0. 0. 0. 0. 0. 0. 0. ]
num of non 0 elements in topH result : 30
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 0. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 0. 0. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 30
h_i, h_curr = 1 15
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 0. 0. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 30
h_i, h_curr = 2 30
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 0. 0. 0. 0. 0. 0. 0. ]]
d = 1002
cols = 1000 k = 40 blocks = 1
k = 40
num of non 0 elements in expected : 40
num of non 0 elements in result : 40
csvec accuracy : [1. 0.95 1. 1. 0. 0. 0. 0. 0. 0. ]
num of non 0 elements in topH result : 40
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 0. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 0. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 40
h_i, h_curr = 1 20
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 0. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 40
h_i, h_curr = 2 40
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 0. 0. 0. 0. 0. 0. ]]
d = 1002
cols = 1000 k = 50 blocks = 1
k = 50
num of non 0 elements in expected : 50
num of non 0 elements in result : 50
csvec accuracy : [1. 0.95 1. 1. 1. 0. 0. 0. 0. 0. ]
num of non 0 elements in topH result : 50
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 50
h_i, h_curr = 1 25
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 50
h_i, h_curr = 2 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 0. 0. 0. 0. 0. ]]
d = 1002
cols = 1000 k = 60 blocks = 1
k = 60
num of non 0 elements in expected : 60
num of non 0 elements in result : 60
csvec accuracy : [1. 0.95 1. 1. 1. 1. 0. 0. 0. 0. ]
num of non 0 elements in topH result : 60
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 60
h_i, h_curr = 1 30
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 60
h_i, h_curr = 2 60
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 0. 0. 0. 0. ]]
d = 1002
cols = 1000 k = 70 blocks = 1
k = 70
num of non 0 elements in expected : 70
num of non 0 elements in result : 70
csvec accuracy : [1. 0.95 1. 1. 1. 1. 1. 0. 0. 0. ]
num of non 0 elements in topH result : 70
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 0. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 70
h_i, h_curr = 1 35
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 70
h_i, h_curr = 2 70
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 0. 0. 0. ]]
d = 1002
cols = 1000 k = 80 blocks = 1
k = 80
num of non 0 elements in expected : 80
num of non 0 elements in result : 80
csvec accuracy : [1. 0.95 1. 1. 1. 1. 1. 1. 0. 0. ]
num of non 0 elements in topH result : 80
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 0. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 0. 0. 0. ]]
num of non 0 elements in topH result : 80
h_i, h_curr = 1 40
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 0. 0. 0. ]]
num of non 0 elements in topH result : 80
h_i, h_curr = 2 80
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 0. 0. ]]
d = 1002
cols = 1000 k = 90 blocks = 1
k = 90
num of non 0 elements in expected : 90
num of non 0 elements in result : 90
csvec accuracy : [1. 0.95 1. 1. 1. 1. 1. 1. 1. 0. ]
num of non 0 elements in topH result : 90
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 0. 0. ]]
num of non 0 elements in topH result : 90
h_i, h_curr = 1 45
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 0. 0. ]]
num of non 0 elements in topH result : 90
h_i, h_curr = 2 90
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 0. ]]
d = 1002
cols = 1000 k = 100 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 0.95 1. 1. 1. 1. 1. 1. 1. 1. ]
num of non 0 elements in topH result : 100
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 0. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 0. ]]
num of non 0 elements in topH result : 100
h_i, h_curr = 1 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 1. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 0. ]]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 1. ]
[1. 0.95 1. 1. 1. 1. 1. 1. 1. 1. ]]
d = 1002
cols = 10000 k = 10 blocks = 1
k = 10
num of non 0 elements in expected : 10
num of non 0 elements in result : 10
csvec accuracy : [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 10
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 1 5
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 2 10
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 20 blocks = 1
k = 20
num of non 0 elements in expected : 20
num of non 0 elements in result : 20
csvec accuracy : [1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 20
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 1 10
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 2 20
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 30 blocks = 1
k = 30
num of non 0 elements in expected : 30
num of non 0 elements in result : 30
csvec accuracy : [1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 30
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 1 15
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 2 30
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 40 blocks = 1
k = 40
num of non 0 elements in expected : 40
num of non 0 elements in result : 40
csvec accuracy : [1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 40
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 1 20
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 2 40
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 50 blocks = 1
k = 50
num of non 0 elements in expected : 50
num of non 0 elements in result : 50
csvec accuracy : [1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 50
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 1 25
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 2 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 60 blocks = 1
k = 60
num of non 0 elements in expected : 60
num of non 0 elements in result : 60
csvec accuracy : [1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
num of non 0 elements in topH result : 60
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 1 30
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 2 60
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 70 blocks = 1
k = 70
num of non 0 elements in expected : 70
num of non 0 elements in result : 70
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
num of non 0 elements in topH result : 70
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 1 35
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 2 70
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
d = 1002
cols = 10000 k = 80 blocks = 1
k = 80
num of non 0 elements in expected : 80
num of non 0 elements in result : 80
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
num of non 0 elements in topH result : 80
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 1 40
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 2 80
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
d = 1002
cols = 10000 k = 90 blocks = 1
k = 90
num of non 0 elements in expected : 90
num of non 0 elements in result : 90
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
num of non 0 elements in topH result : 90
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 1 45
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 2 90
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
d = 1002
cols = 10000 k = 100 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
num of non 0 elements in topH result : 100
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 1 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]
d = 1002
cols = 100000 k = 10 blocks = 1
k = 10
num of non 0 elements in expected : 10
num of non 0 elements in result : 10
csvec accuracy : [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 10
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 1 5
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 2 10
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 20 blocks = 1
k = 20
num of non 0 elements in expected : 20
num of non 0 elements in result : 20
csvec accuracy : [1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 20
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 1 10
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 2 20
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 30 blocks = 1
k = 30
num of non 0 elements in expected : 30
num of non 0 elements in result : 30
csvec accuracy : [1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 30
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 1 15
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 2 30
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 40 blocks = 1
k = 40
num of non 0 elements in expected : 40
num of non 0 elements in result : 40
csvec accuracy : [1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 40
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 1 20
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 2 40
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 50 blocks = 1
k = 50
num of non 0 elements in expected : 50
num of non 0 elements in result : 50
csvec accuracy : [1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 50
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 1 25
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 2 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 60 blocks = 1
k = 60
num of non 0 elements in expected : 60
num of non 0 elements in result : 60
csvec accuracy : [1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
num of non 0 elements in topH result : 60
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 1 30
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 2 60
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 70 blocks = 1
k = 70
num of non 0 elements in expected : 70
num of non 0 elements in result : 70
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
num of non 0 elements in topH result : 70
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 1 35
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 2 70
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
d = 1002
cols = 100000 k = 80 blocks = 1
k = 80
num of non 0 elements in expected : 80
num of non 0 elements in result : 80
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
num of non 0 elements in topH result : 80
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 1 40
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 2 80
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
d = 1002
cols = 100000 k = 90 blocks = 1
k = 90
num of non 0 elements in expected : 90
num of non 0 elements in result : 90
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
num of non 0 elements in topH result : 90
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 1 45
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 2 90
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
d = 1002
cols = 100000 k = 100 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
num of non 0 elements in topH result : 100
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 1 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]
d = 1002
cols = 500 k = 10 blocks = 1
k = 10
num of non 0 elements in expected : 10
num of non 0 elements in result : 10
csvec accuracy : [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 10
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 1 5
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 2 10
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 500 k = 20 blocks = 1
k = 20
num of non 0 elements in expected : 20
num of non 0 elements in result : 20
csvec accuracy : [1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 20
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 1 10
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 2 20
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 500 k = 30 blocks = 1
k = 30
num of non 0 elements in expected : 30
num of non 0 elements in result : 30
csvec accuracy : [1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 30
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 1 15
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 2 30
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 500 k = 40 blocks = 1
k = 40
num of non 0 elements in expected : 40
num of non 0 elements in result : 40
csvec accuracy : [1. 1. 1. 0.975 0. 0. 0. 0. 0. 0. ]
num of non 0 elements in topH result : 40
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 1 20
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 1. 1. 0.975 0. 0. 0. 0. 0. 0. ]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 40
h_i, h_curr = 2 40
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]
[1. 1. 1. 0.975 0. 0. 0. 0. 0. 0. ]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]]
d = 1002
cols = 500 k = 50 blocks = 1
k = 50
num of non 0 elements in expected : 50
num of non 0 elements in result : 50
csvec accuracy : [1. 1. 1. 0.975 1. 0. 0. 0. 0. 0. ]
num of non 0 elements in topH result : 50
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 1. 1. 0.975 0. 0. 0. 0. 0. 0. ]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 50
h_i, h_curr = 1 25
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0. 0. 0. 0. 0. ]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 50
h_i, h_curr = 2 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0. 0. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]]
d = 1002
cols = 500 k = 60 blocks = 1
k = 60
num of non 0 elements in expected : 60
num of non 0 elements in result : 60
csvec accuracy : [1. 1. 1. 0.975 1. 0.98333333
0. 0. 0. 0. ]
num of non 0 elements in topH result : 60
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0. 0. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0. ]]
num of non 0 elements in topH result : 60
h_i, h_curr = 1 30
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
0. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
0. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0.
0. 0. 0. 0. ]]
num of non 0 elements in topH result : 60
h_i, h_curr = 2 60
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
0. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
0. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0. 0. 0. 0. ]]
d = 1002
cols = 500 k = 70 blocks = 1
k = 70
num of non 0 elements in expected : 70
num of non 0 elements in result : 70
csvec accuracy : [1. 1. 1. 0.975 1. 0.98333333
1. 0. 0. 0. ]
num of non 0 elements in topH result : 70
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
0. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0. 0. 0. 0. ]]
num of non 0 elements in topH result : 70
h_i, h_curr = 1 35
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0. 0. 0. 0. ]]
num of non 0 elements in topH result : 70
h_i, h_curr = 2 70
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 0. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0. 0. 0. ]]
d = 1002
cols = 500 k = 80 blocks = 1
k = 80
num of non 0 elements in expected : 80
num of non 0 elements in result : 80
csvec accuracy : [1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0. 0. ]
num of non 0 elements in topH result : 80
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0. 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0. 0. 0. ]]
num of non 0 elements in topH result : 80
h_i, h_curr = 1 40
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0. 0. 0. ]]
num of non 0 elements in topH result : 80
h_i, h_curr = 2 80
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 0. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0.9875 0. 0. ]]
d = 1002
cols = 500 k = 90 blocks = 1
k = 90
num of non 0 elements in expected : 90
num of non 0 elements in result : 90
csvec accuracy : [1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0.98888889 0. ]
num of non 0 elements in topH result : 90
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0. 0. ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0.9875 0. 0. ]]
num of non 0 elements in topH result : 90
h_i, h_curr = 1 45
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0.98888889 0. ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0.9875 0. 0. ]]
num of non 0 elements in topH result : 90
h_i, h_curr = 2 90
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0.98888889 0. ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0.9875 1. 0. ]]
d = 1002
cols = 500 k = 100 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0.98888889 0.96 ]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 1. ]
[1. 1. 1. 0.975 1. 0.98333333
1. 0.975 0.98888889 0.98 ]
[1. 1. 1. 1. 1. 0.98333333
0.98571429 0.9875 1. 0.98 ]]
d = 1002
cols = 1000 k = 10 blocks = 1
k = 10
num of non 0 elements in expected : 10
num of non 0 elements in result : 10
csvec accuracy : [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 10
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 1 5
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 2 10
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 1000 k = 20 blocks = 1
k = 20
num of non 0 elements in expected : 20
num of non 0 elements in result : 20
csvec accuracy : [1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 20
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 1 10
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 2 20
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 1000 k = 30 blocks = 1
k = 30
num of non 0 elements in expected : 30
num of non 0 elements in result : 30
csvec accuracy : [1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 30
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 1 15
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 2 30
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 1000 k = 40 blocks = 1
k = 40
num of non 0 elements in expected : 40
num of non 0 elements in result : 40
csvec accuracy : [1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 40
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 1 20
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 2 40
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 1000 k = 50 blocks = 1
k = 50
num of non 0 elements in expected : 50
num of non 0 elements in result : 50
csvec accuracy : [1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 50
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 1 25
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 2 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
d = 1002
cols = 1000 k = 60 blocks = 1
k = 60
num of non 0 elements in expected : 60
num of non 0 elements in result : 60
csvec accuracy : [1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
num of non 0 elements in topH result : 60
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 1 30
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 2 60
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
d = 1002
cols = 1000 k = 70 blocks = 1
k = 70
num of non 0 elements in expected : 70
num of non 0 elements in result : 70
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
num of non 0 elements in topH result : 70
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 1 35
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 2 70
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
d = 1002
cols = 1000 k = 80 blocks = 1
k = 80
num of non 0 elements in expected : 80
num of non 0 elements in result : 80
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
num of non 0 elements in topH result : 80
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 1 40
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 1. 1. 1. 1. 1. 1. 0.9875 0. 0. ]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0. ]]
num of non 0 elements in topH result : 80
h_i, h_curr = 2 80
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0. ]
[1. 1. 1. 1. 1. 1. 1. 0.9875 0. 0. ]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0. ]]
d = 1002
cols = 1000 k = 90 blocks = 1
k = 90
num of non 0 elements in expected : 90
num of non 0 elements in result : 90
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
num of non 0 elements in topH result : 90
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0. ]
[1. 1. 1. 1. 1. 1. 1. 0.9875 0. 0. ]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0. ]]
num of non 0 elements in topH result : 90
h_i, h_curr = 1 45
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]
[1. 1. 1. 1. 1. 1.
1. 0.9875 0.98888889 0. ]
[1. 1. 1. 1. 1. 1.
1. 1. 0. 0. ]]
num of non 0 elements in topH result : 90
h_i, h_curr = 2 90
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]
[1. 1. 1. 1. 1. 1.
1. 0.9875 0.98888889 0. ]
[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]]
d = 1002
cols = 1000 k = 100 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
num of non 0 elements in topH result : 100
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 1. ]
[1. 1. 1. 1. 1. 1.
1. 0.9875 0.98888889 0. ]
[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]]
num of non 0 elements in topH result : 100
h_i, h_curr = 1 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 1. ]
[1. 1. 1. 1. 1. 1.
1. 0.9875 0.98888889 1. ]
[1. 1. 1. 1. 1. 1.
1. 1. 1. 0. ]]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1.
1. 1. 1. 1. ]
[1. 1. 1. 1. 1. 1.
1. 0.9875 0.98888889 1. ]
[1. 1. 1. 1. 1. 1.
1. 1. 1. 1. ]]
d = 1002
cols = 10000 k = 10 blocks = 1
k = 10
num of non 0 elements in expected : 10
num of non 0 elements in result : 10
csvec accuracy : [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 10
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 1 5
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 2 10
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 20 blocks = 1
k = 20
num of non 0 elements in expected : 20
num of non 0 elements in result : 20
csvec accuracy : [1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 20
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 1 10
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 2 20
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 30 blocks = 1
k = 30
num of non 0 elements in expected : 30
num of non 0 elements in result : 30
csvec accuracy : [1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 30
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 1 15
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 2 30
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 40 blocks = 1
k = 40
num of non 0 elements in expected : 40
num of non 0 elements in result : 40
csvec accuracy : [1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 40
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 1 20
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 2 40
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 50 blocks = 1
k = 50
num of non 0 elements in expected : 50
num of non 0 elements in result : 50
csvec accuracy : [1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 50
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 1 25
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 2 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 60 blocks = 1
k = 60
num of non 0 elements in expected : 60
num of non 0 elements in result : 60
csvec accuracy : [1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
num of non 0 elements in topH result : 60
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 1 30
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 2 60
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
d = 1002
cols = 10000 k = 70 blocks = 1
k = 70
num of non 0 elements in expected : 70
num of non 0 elements in result : 70
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
num of non 0 elements in topH result : 70
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 1 35
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 2 70
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
d = 1002
cols = 10000 k = 80 blocks = 1
k = 80
num of non 0 elements in expected : 80
num of non 0 elements in result : 80
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
num of non 0 elements in topH result : 80
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 1 40
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 2 80
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
d = 1002
cols = 10000 k = 90 blocks = 1
k = 90
num of non 0 elements in expected : 90
num of non 0 elements in result : 90
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
num of non 0 elements in topH result : 90
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 1 45
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 2 90
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
d = 1002
cols = 10000 k = 100 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
num of non 0 elements in topH result : 100
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 1 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]
d = 1002
cols = 100000 k = 10 blocks = 1
k = 10
num of non 0 elements in expected : 10
num of non 0 elements in result : 10
csvec accuracy : [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 10
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 1 5
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 10
h_i, h_curr = 2 10
topHCS accuracy :
[[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 20 blocks = 1
k = 20
num of non 0 elements in expected : 20
num of non 0 elements in result : 20
csvec accuracy : [1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 20
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 1 10
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 20
h_i, h_curr = 2 20
topHCS accuracy :
[[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 30 blocks = 1
k = 30
num of non 0 elements in expected : 30
num of non 0 elements in result : 30
csvec accuracy : [1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 30
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 1 15
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 0. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 30
h_i, h_curr = 2 30
topHCS accuracy :
[[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 40 blocks = 1
k = 40
num of non 0 elements in expected : 40
num of non 0 elements in result : 40
csvec accuracy : [1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 40
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 1 20
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 0. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 40
h_i, h_curr = 2 40
topHCS accuracy :
[[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 50 blocks = 1
k = 50
num of non 0 elements in expected : 50
num of non 0 elements in result : 50
csvec accuracy : [1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
num of non 0 elements in topH result : 50
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 1 25
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 0. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 50
h_i, h_curr = 2 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 60 blocks = 1
k = 60
num of non 0 elements in expected : 60
num of non 0 elements in result : 60
csvec accuracy : [1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
num of non 0 elements in topH result : 60
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 1 30
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 0. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 60
h_i, h_curr = 2 60
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
d = 1002
cols = 100000 k = 70 blocks = 1
k = 70
num of non 0 elements in expected : 70
num of non 0 elements in result : 70
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
num of non 0 elements in topH result : 70
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 1 35
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 0. 0. 0. 0.]]
num of non 0 elements in topH result : 70
h_i, h_curr = 2 70
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
d = 1002
cols = 100000 k = 80 blocks = 1
k = 80
num of non 0 elements in expected : 80
num of non 0 elements in result : 80
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
num of non 0 elements in topH result : 80
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 1 40
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 0. 0. 0.]]
num of non 0 elements in topH result : 80
h_i, h_curr = 2 80
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
d = 1002
cols = 100000 k = 90 blocks = 1
k = 90
num of non 0 elements in expected : 90
num of non 0 elements in result : 90
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
num of non 0 elements in topH result : 90
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 1 45
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 0. 0.]]
num of non 0 elements in topH result : 90
h_i, h_curr = 2 90
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
d = 1002
cols = 100000 k = 100 blocks = 1
k = 100
num of non 0 elements in expected : 100
num of non 0 elements in result : 100
csvec accuracy : [1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
num of non 0 elements in topH result : 100
h_i, h_curr = 0 0
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 1 50
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 0.]]
num of non 0 elements in topH result : 100
h_i, h_curr = 2 100
topHCS accuracy :
[[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]
<file_sep>/topHCS/test_topHCS.py
import unittest
import csvec
from csvec import CSVec
from topHCS import TopHCS
import torch
class Base:
# use Base class to hide TestCase from the unittest runner
# we only want the subclasses to actually be run
class TopHCSTestCase(unittest.TestCase):
def testInit(self):
d = 100
c = 20
r = 5
h = 36
a = TopHCS(h, d, c, r, **self.csvecArgs)
zerosTable = torch.zeros(r, c).to(self.device)
self.assertTrue(torch.allclose(a.csvec.table, zerosTable))
zerosH = torch.zeros(h).to(self.device)
self.assertTrue(torch.allclose(a.topH, zerosH))
self.assertTrue(a.h == h)
def testStoreVec1(self):
# store vec [1, 100], recover 1
d = 100
c = 20
r = 5
h = 1
a = TopHCS(h, d, c, r, **self.csvecArgs)
vec = torch.arange(1, d+1, dtype=torch.float, device=self.device)
a.store(vec)
expected = torch.zeros(d, dtype=torch.float, device=self.device)
expected[d-1] = d
self.assertTrue(torch.allclose(a.topH, expected))
self.assertTrue(torch.allclose(a.bottomH, vec - expected))
def testStoreVec2(self):
# store vec [1, 100], recover all
d = 100
c = 20
r = 5
h = d
a = TopHCS(h, d, c, r, **self.csvecArgs)
vec = torch.arange(1, d+1, dtype=torch.float, device=self.device)
a.store(vec)
self.assertTrue(torch.allclose(a.topH, vec))
def testStoreVec3(self):
# store randn tensor, recover all
d = 100
c = 20
r = 5
h = d
a = TopHCS(h, d, c, r, **self.csvecArgs)
vec = torch.randn(d)
a.store(vec)
self.assertTrue(torch.allclose(a.topH, vec))
def testSameBuckets(self):
d = 100
c = 20
r = 5
h = 0
a = CSVec(d, c, r, **self.csvecArgs)
vec = torch.randn(d)
a += vec
b = TopHCS(h=h, d=d, c=c, r=r, **self.csvecArgs)
b.store(vec)
self.assertTrue(torch.allclose(a.table, b.csvec.table))
def testTopKSum(self):
d = 10
c = 10000
r = 20
h = d
a = TopHCS(h, d, c, r, **self.csvecArgs)
b = TopHCS(h, d, c, r, **self.csvecArgs)
zerosHalf = torch.zeros(d//2, dtype=torch.float, device=self.device)
vec = torch.cat((torch.randn(d//2, device=self.device), zerosHalf), 0)
vec2 = torch.cat((zerosHalf, torch.randn(d//2, device=self.device)), 0)
a.store(vec)
b.store(vec2)
result = TopHCS.topKSum([a, b], d)
expected = vec + vec2
self.assertTrue(torch.equal(expected, result))
def testTopKSum2(self):
d = 10
c = 10000
r = 20
h = d
a = TopHCS(h, d, c, r, **self.csvecArgs)
b = TopHCS(h, d, c, r, **self.csvecArgs)
c = TopHCS(h, d, c, r, **self.csvecArgs)
vec = torch.randn(d, device=self.device)
vec2 = torch.randn(d, device=self.device)
vec3 = torch.randn(d, device=self.device)
a.store(vec)
b.store(vec2)
c.store(vec3)
result = TopHCS.topKSum([a, b, c], d)
expected = vec + vec2 + vec3
self.assertTrue(torch.equal(expected, result))
class TestCaseCPU1(Base.TopHCSTestCase):
def setUp(self):
# hack to reset csvec's global cache between tests
csvec.cache = {}
self.device = "cpu"
self.numBlocks = 1
self.csvecArgs = {"numBlocks": self.numBlocks,
"device": self.device}
class TestCaseCPU2(Base.TopHCSTestCase):
def setUp(self):
csvec.cache = {}
self.device = "cpu"
self.numBlocks = 2
self.csvecArgs = {"numBlocks": self.numBlocks,
"device": self.device}
@unittest.skipIf(not torch.cuda.is_available(), "CUDA not available")
class TestCaseCUDA2(Base.TopHCSTestCase):
def setUp(self):
csvec.cache = {}
self.device = "cuda"
self.numBlocks = 2
self.csvecArgs = {"numBlocks": self.numBlocks,
"device": self.device}
<file_sep>/topHCS/old/testing/run_basic_full_graphing.py
import math
import numpy as np
import copy
import torch
from csvec import CSVec
from topHCS import TopHCS
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import cm
# graphing initials
n = 3
colors = plt.cm.plasma(np.linspace(0,0.95,n))
fig, axs = plt.subplots(2, 4, figsize=(40,16))
fontsize=17
device = 'cuda'
accumVecPaths = -1
kVals = [50000, 100000]
hVals = [0.5, 1.0]
rVals = [5, 15]
c = 180000
csvec_accuracy = [[[0.1543, 0.09229], [0.3211, 0.19329]], [[0.00656, 0.00856], [0.0379, 0.03852]]]
csvec_L2 = [[[1.71890879e-05, 2.16076169e-05], [7.52577307e-06, 1.03917146e-05]], [[0.01835084, 0.02452415], [0.01133576, 0.01507866]]]
half_accuracy = [[[0.21412, 0.12725], [0.37652, 0.24214]], [[0.00744, 0.00982], [0.0417, 0.044]]]
half_L2 = [[[1.11603767e-05, 1.46940065e-05], [6.48265086e-06, 8.79577055e-06]], [[0.01785514,0.02350082], [0.0110609, 0.014491]]]
full_L2 = [[[1.07820888e-05, 1.40615657e-05], [6.25502980e-06, 8.43287489e-06]], [[0.0175926, 0.02290335], [0.01090221, 0.01415622]]]
full_accuracy = [[[0.22136, 0.13394], [0.39184, 0.25581]], [[0.00752, 0.0105], [0.0434, 0.04697]]]
for p, paths in enumerate(["initial", "accum"]):
for r_i, rows in enumerate(rVals):
r = rows
axs[p, 2*r_i].tick_params(labelsize=fontsize-2)
axs[p, 2*r_i].set_xlabel("k", fontsize=fontsize-2)
axs[p, 2*r_i].set_ylabel("index accuracy", fontsize=fontsize-2)
axs[p, 2*r_i].set_title("index accuracy vs. k, sketch=("+str(r)+", "+str(c)+")", fontsize=fontsize)
axs[p, 2*r_i].plot(kVals, csvec_accuracy[p][r_i], color=colors[0], label="CSVec")
axs[p, 2*r_i].plot(kVals, half_accuracy[p][r_i], color=colors[1], label="h = 0.5*k")
axs[p, 2*r_i].plot(kVals, full_accuracy[p][r_i], color=colors[2], label="h = k")
axs[p, 2*r_i].legend(fontsize=fontsize-3)
axs[p, 2*r_i + 1].tick_params(labelsize=fontsize-2)
axs[p, 2*r_i + 1].set_xlabel("k", fontsize=fontsize-2)
axs[p, 2*r_i + 1].set_ylabel("L2 norm = sqrt(sum((expected - recovered)^2))", fontsize=fontsize-2)
axs[p, 2*r_i + 1].set_title("L2 norm vs. k, sketch=("+str(r)+", "+str(c)+")", fontsize=fontsize)
axs[p, 2*r_i + 1].plot(kVals, csvec_L2[p][r_i], color=colors[0], label="CSVec")
axs[p, 2*r_i + 1].plot(kVals, half_L2[p][r_i], color=colors[1], label="h = 0.5*k")
axs[p, 2*r_i + 1].plot(kVals, full_L2[p][r_i], color=colors[2], label="h = k")
axs[p, 2*r_i + 1].legend(fontsize=fontsize-3)
plt.savefig("graphs/basic_full_numBlocks_40.png")
<file_sep>/topHCS/old/testing/run_summed_c_15.py
import math
import numpy as np
import copy
import torch
from csvec import CSVec
from topHCS import TopHCS
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from datetime import datetime
startTime = datetime.now()
device = 'cuda'
#graphing
fig, axs = plt.subplots(2, 3, figsize=(24, 12))
#Loading vectors from file
initialVecPaths = ["../../datafiles/initialVs0.torch",
"../../datafiles/initialVs1.torch",
"../../datafiles/initialVs2.torch",
"../../datafiles/initialVs3.torch"]
accumVecPaths = ["../../datafiles/accumulatedVs0.torch",
"../../datafiles/accumulatedVs1.torch",
"../../datafiles/accumulatedVs2.torch",
"../../datafiles/accumulatedVs3.torch"]
for p, paths in enumerate([initialVecPaths, accumVecPaths]):
vecs = []
for path in paths:
vec = torch.load(path, map_location=device)
vecs.append(vec)
#TODO: use torch.stack. I got lazy here
summed = vecs[0]
for lazy in vecs[1:]:
summed += lazy
assert(vecs[0].size() == summed.size())
d, c, r, numBlocks = len(summed), 180000, 15, 30
kVals = [50000, 100000, 150000, 200000, 250000, 500000]
hVals = [1.00, .75, .50, .25]
# init CSVec stuff
indexAcc_1 = np.zeros(len(kVals))
L1_1 = np.zeros(len(kVals))
L2_1 = np.zeros(len(kVals))
# init TopHCS stuff
indexAcc_2 = np.zeros((len(hVals), len(kVals)))
L1_2 = np.zeros((len(hVals), len(kVals)))
L2_2 = np.zeros((len(hVals), len(kVals)))
for k_i, k in enumerate(kVals):
k = int(k) #stupid
expected = torch.zeros(d, device=device)
expectedIndices = torch.sort(summed**2)[1][-k:]
expected[expectedIndices.to(device)] = summed[expectedIndices.to(device)]
# CSVecs
workers = []
for vec in vecs:
w = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w += vec
workers.append(w)
# Summing CSVecs into 1st worker to save memory
for w in workers[1:]:
workers[0] += w
recovered_1 = workers[0].unSketch(k)
indexAcc_1[k_i] = (expected[expectedIndices] * recovered_1[expectedIndices]).nonzero().numel() / k
diff = recovered_1[torch.nonzero(recovered_1.view(-1).data).squeeze()] - summed[torch.nonzero(recovered_1.view(-1).data).squeeze()]
L1_1[k_i] = torch.sum(torch.abs(diff))
L2_1[k_i] = torch.sum(diff**2)
for h_i, hVal in enumerate(hVals):
h = int(k * hVal)
workers = [] #reusing
for vec in vecs:
w = TopHCS(h=h, d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w.store(vec)
workers.append(w)
assert(len(workers) == len(vecs))
recovered_2 = TopHCS.topKSum(workers, k)
indexAcc_2[h_i, k_i] = (expected[expectedIndices] * recovered_2[expectedIndices]).nonzero().numel() / k
diff = recovered_2[torch.nonzero(recovered_2.view(-1).data).squeeze()] - summed[torch.nonzero(recovered_2.view(-1).data).squeeze()]
L1_2[h_i, k_i] = torch.sum(torch.abs(diff))
L2_2[h_i, k_i] = torch.sum(diff**2)
#indexAcc = (expected == recovered).nonzero().numel() / k
# Use above only when k == d
#print("\n", "k = %r; index accuracy = %r" % (k, indexAcc))
colors=["#A30CE8", "#FF0000", "#E8710C", "#FFD20D"]
axs[p, 0].plot(kVals, L1_1, color='blue', label="CSVec")
axs[p, 1].plot(kVals, L2_1, color='blue', label="CSVec")
axs[p, 2].plot(kVals, indexAcc_1, color='blue', label="CSVec")
for h_i, h in enumerate(hVals):
axs[p, 0].plot(kVals, L1_2[h_i], color=colors[h_i], label="h = k*"+str(h))
axs[p, 1].plot(kVals, L2_2[h_i], color=colors[h_i], label="h = k*"+str(h))
axs[p, 2].plot(kVals, indexAcc_2[h_i], color=colors[h_i], label="h = k*"+str(h))
axs[p, 0].set_xlabel("k for topK")
axs[p, 1].set_xlabel("k for topK")
axs[p, 2].set_xlabel("k for topK")
axs[p, 0].set_ylabel("sum L1 Reconstruction Error")
axs[p, 1].set_ylabel("sum L2 Reconstruction Error")
axs[p, 2].set_ylabel("Index Accuracy Rate")
axs[p, 0].set_title("sum L1 Reconstruction Error vs k")
axs[p, 1].set_title("sum L2 Reconstruction Error vs k")
axs[p, 2].set_title("Index Accuracy Rate vs k")
axs[p, 0].legend()
axs[p, 1].legend()
axs[p, 2].legend()
print("On %r" % (paths))
print("k Values = %r" % (kVals))
print("\n", "CSVec: index accuracy = %r" % (indexAcc_1))
print("CSVec: sum L1 reconstruction error = %r" % (L1_1))
print("CSVec: sum L2 reconstruction error = %r" % (L2_2))
print("\n", "TopHCS: index accuracy = %r" % (indexAcc_2))
print("TopHCS: sum L1 reconstruction error = %r" % (L1_2))
print("TopHCS: sum L2 reconstruction error = %r" % (L2_2))
plt.savefig("graphs/summedloss.png")
<file_sep>/README.md
# TopHCS: TopH Count Sketch
## Installation
Dependencies: `pytorch`, `numpy` and `CSVec` (from Sketched SGD paper, see description). Tested with `torch==1.0.1` and `numpy==1.15.3`, but this should work with a wide range of versions.
`git clone` the repository to your local machine, move to the directory containing `setup.py`, then run
```
pip install -e .
```
to install this package.
## Description
This package contains one main class, `TopHCS`, which computes the Count Sketch of input vectors, and can extract heavy hitters from a Count Sketch. To account for collisions, it keeps track of the top H elements (by magnitude).
Link to the Count Sketch paper -> http://www.mathcs.emory.edu/~cheung/Courses/584-StreamDB/Syllabus/papers/Frequency-count/FrequentStream.pdf
Link to the Sketched SGD paper -> http://arxiv.org/pdf/1903.04488.pdf
<file_sep>/topHCS/k_100000_h_16k.py
import math
import numpy as np
import copy
import torch
from csvec import CSVec
from topHCS import TopHCS
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib import cm
import ipdb
import sys
torch.set_printoptions(threshold=5000)
fig, axs = plt.subplots(2, 2, figsize=(24, 18))
matplotlib.rcParams.update({'font.size': 16})
device = 'cuda'
graphPath = 'graphs/k_100000_h_16k.png'
initialVecPaths = ["../../datafiles/initialVs0.torch",
"../../datafiles/initialVs1.torch",
"../../datafiles/initialVs2.torch",
"../../datafiles/initialVs3.torch"]
accumVecPaths = ["../../datafiles/accumulatedVs0.torch",
"../../datafiles/accumulatedVs1.torch",
"../../datafiles/accumulatedVs2.torch",
"../../datafiles/accumulatedVs3.torch"]
for p, paths in enumerate([initialVecPaths, accumVecPaths]):
print("Using {}".format(paths))
vecs = []
for path in paths:
#vecs.append(torch.load(path, map_location=device)[::stepsize])
vecs.append(torch.load(path, map_location=device))
print("vector loaded")
assert(len(vecs) == len(initialVecPaths))
summed = vecs[0].clone()
for v in vecs[1:]:
summed += v
expectedIndices = torch.sort(summed**2)[1]
kVals = [10000, 100000, 1000000, 10000000]
hVals = [1, 2, 4, 8, 16]
cVals = [180000]
for c, cols in enumerate(cVals):
csvecAcc = np.zeros(len(kVals))
topHCSAcc = np.zeros((len(hVals), len(kVals)))
csvecL2 = np.zeros(len(kVals))
topHCSL2 = np.zeros((len(hVals), len(kVals)))
for k_i, k in enumerate(kVals):
d, c, r, numBlocks = len(summed), cols, 15, 30
#ipdb.set_trace()
expected = torch.zeros(len(summed), device=device)
expected[expectedIndices[-k:].to(device)] = summed[expectedIndices[-k:].to(device)]
assert(summed.size() == vecs[0].size())
w_0 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_0 += vecs[0]
print("")
w_1 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_1 += vecs[1]
print("")
w_2 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_2 += vecs[2]
print("")
w_3 = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=device)
w_3 += vecs[3]
print("")
workersSummed = w_0 + w_1 + w_2 + w_3
result = workersSummed.unSketch(k)
csvecAcc[k_i] = (expected[expectedIndices] * result[expectedIndices]).nonzero().numel() / k
print("k = {}".format(k))
csvecL2[k_i] = (torch.sum((result - expected)**2))**0.5
for h_i, h in enumerate(hVals):
result = torch.zeros(len(summed), device=device)
h_curr = int(h*k)
w_0 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_0.store(vecs[0])
print("")
w_1 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_1.store(vecs[1])
print("")
w_2 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_2.store(vecs[2])
print("")
w_3 = TopHCS(d=d, c=c, r=r, h=h_curr, numBlocks=numBlocks, device=device)
w_3.store(vecs[3])
print("")
workers = [w_0, w_1, w_2, w_3]
result = TopHCS.topKSum(workers, k)
topHCSAcc[h_i, k_i] = (expected[expectedIndices] * result[expectedIndices]).nonzero().numel() / k
print("h_curr = ", h_curr)
topHCSL2[h_i, k_i] = (torch.sum((result - expected)**2))**0.5
print('topHCS accuracy = {}'.format(topHCSAcc))
print('CSVec accuracy = {}'.format(csvecAcc))
numColors = len(hVals) + 1
colors = plt.cm.plasma(np.linspace(0,1,numColors))
axs[p, 0].set_xlabel("k")
axs[p, 0].set_xscale('log')
axs[p, 0].set_ylabel("index accuracy")
axs[p, 0].set_title("index accuracy vs. k, sketch=({}, {})".format(r, c))
axs[p, 0].plot(kVals, csvecAcc, marker='x', color=colors[0], label="CSVec (h=0)")
axs[p, 1].set_xlabel("k")
axs[p, 1].set_ylabel("L2 reconstruction error")
axs[p, 1].set_xscale('log')
axs[p, 1].set_title("L2 reconstruction error vs. k")
axs[p, 1].plot(kVals, csvecL2, marker='x', color=colors[0], label="CSVec (h=0)")
for h_i, h in enumerate(hVals):
axs[p, 0].plot(kVals, topHCSAcc[h_i], marker='x', color=colors[h_i+1], label="h = k*{}".format(h))
axs[p, 1].plot(kVals, topHCSL2[h_i], marker='x', color=colors[h_i+1], label="h = k*{}".format(h))
axs[p, 0].legend()
axs[p, 1].legend()
plt.savefig(graphPath)
<file_sep>/topHCS/__init__.py
from .topHCS import TopHCS
<file_sep>/topHCS/topHCS.py
import math
import numpy as np
import copy
import torch
from csvec import CSVec
def topk(vec, k):
""" Return the largest k elements (by magnitude) of vec """
ret = torch.zeros_like(vec)
if k != 0:
topkIndices = torch.sort(vec**2)[1][-k:]
ret[topkIndices] = vec[topkIndices]
return ret
class TopHCS(object):
""" Represents one worker"""
def __init__(self, d, c, r, h, numBlocks, device='cpu'):
self.h, self.d = h, d
self.device = device
self.topH = torch.zeros(d, dtype=torch.float, device=self.device)
self.csvec = CSVec(d=d, c=c, r=r, numBlocks=numBlocks, device=self.device)
def zero(self):
""" Clear csvec and topH tensor """
self.csvec.zero()
self.topH = torch.zeros(self.d, dtype=torch.float, device=self.device)
# formerly store(...)
def accumulateVec(self, vec):
""" Compresses vector """
""" Save top-h elements in self.topH, sketch bottom d-h elements """
""" csvec and topH should be zero before storing """
# assert(self.topH.nonzero().numel() == 0)
# changed this for commefficient optimizer
self.topH = topk(vec, self.h).to(self.device)
self.csvec.accumulateVec((vec - self.topH).to(self.device))
def accumulateTable(self, table):
if table.size() != self.csvec.table.size():
msg = "Passed in table has size {}, expecting {}"
raise ValueError(msg.format(table.size(), self.csvec.table.size()))
else:
self.csvec.accumulateTable(table)
@classmethod
def topKSum(cls, workers, k, unSketchNum=0):
assert isinstance(workers, list), "workers must be a list"
sketchSum = copy.deepcopy(workers[0].csvec)
sketchSum.zero()
topHSum = torch.zeros_like(workers[0].topH)
for w in workers:
sketchSum.accumulateTable(w.csvec.table)
topHSum += w.topH
d = len(topHSum)
unSketchNum = d if (unSketchNum == 0) else unSketchNum
unSketchedSum = sketchSum.unSketch(k=unSketchNum)
if topHSum.size() != unSketchedSum.size():
msg = "topHSum has size {}, unSketchedSum size {}"
raise ValueError(msg.format(topHSum.size(), unSketchedSum.size()))
ret = topk(topHSum + unSketchedSum, k)
return ret
| 37aceb7f0f6b594d6796d7e7ab4d6d1def8460bb | [
"Markdown",
"Python"
] | 10 | Python | sunahhlee/TopHCS | 23c29ca530582d9d61cde61426a27b71e2a38d37 | 4b811dc448a976470045bcc8fc3b9861925b3192 |
refs/heads/master | <file_sep>import pygame, sys
from game import Game
# clear the program
# update the game
# draw the game
# 1) Behaviour
# 2) Constants
# 3) Data Definitons
# 4) Functions
'''
Behaviours:-
Gameover:
- snake hits the edge of the screen
- snake touches itself
Snake Movement:
- body trails its head
Snake:
- Eats food and grows by one
Score:
- How much food has been eaten
Menu Screen:
- Shows one time at the beginning of game
- disappears on any keypress
Game over:
- Displays when snake hits wall or eats itself
- will go back a new game on any keypress
Key input:
- arrow keys and wasd change snake direction
'''
'''
Constants:-
- Colors
- Window dimensions
- size of cells
- frame rate
- Cell width and height
'''
# [{"x": 1, "y": 2}]
pygame.init()
DISPLAYSURF = pygame.display.set_mode((800, 600))
BASICFONT = pygame.font.Font('freesansbold.ttf', 18)
CLOCK = pygame.time.Clock()
pygame.display.set_caption('Snake Xenzia')
def main():
game = Game()
game.run()
sys.exit
print("GAME RUNS!")
main() | ce5d909a6bf1772bed1e62dc7d5745fe4d80af80 | [
"Python"
] | 1 | Python | Bilal815/SnakeXenxia | ea700f71d1646eea2b1cd4745eded573564dfbe1 | f715c788fad29ea837f8a9064aa15bf73f695ded |
refs/heads/master | <file_sep>from iqoptionapi.stable_api import IQ_Option
import time, json
from datetime import datetime
from dateutil import tz # pip install python-dateutil
import sys
import fileinput
import os
import getpass
error_password="""{"code":"invalid_credentials","message":"You entered the wrong credentials. Please check that the login/password is correct."}"""
############################################################################################
# Banner do CLI
############################################################################################
def bannerProject():
os.system("cls")
print("""
.______ ______ .______ ______ __
| _ \ / __ \ | _ \ / __ \ | | _
| |_) | | | | | | |_) | | | | | | | _| |_
| / | | | | | _ < | | | | | | |_ _|
| |\ \----.| `--' | | |_) | | `--' | | | |_|
| _| `._____| \______/ |______/ \______/ |__|
""")
############################################################################################
# Pergunta o Email e Senha do Usuário
############################################################################################
def consultInformation():
print("""[] Seja bem vindo ao seu robô I+.
[] Por favor, preencha os dados abaixo. \n""")
print("\n[ LOGIN ] Seu email:")
email = input()
print("\n[ LOGIN ] Sua senha:")
password = <PASSWORD>('')
return email, password
############################################################################################
# Conecta o Usuário com a API
############################################################################################
def apiConnect(email, password):
API = IQ_Option(email, password)
while_run_time=10
check,reason=API.connect()
API.change_balance("PRACTICE")
return API, while_run_time, check, reason
############################################################################################
# Consulta informações do usuário na API (Copiei do MAV mesmo KKKKKKKKKK)
############################################################################################
def perfil():
perfil = json.loads(json.dumps(API.get_profile_ansyc()))
return perfil
'''
name
first_name
last_name
email
city
nickname
currency
currency_char
address
created
postal_index
gender
birthdate
balance
'''
############################################################################################
# Carrega a lista de sinais com o arquivo (sinais.txt)
############################################################################################
def carregar_sinais():
arquivo = open('sinais.txt', encoding='UTF-8')
lista = arquivo.read()
arquivo.close
lista = lista.split('\n')
for index,a in enumerate(lista):
if not a.rstrip():
del lista[index]
return lista
############################################################################################
# Painel de Controle Principal
############################################################################################
def painelControle(credencialPerfil):
print("""\n\n
_ _ _ _ _
___ ___|_|___ ___| | _| |___ ___ ___ ___| |_ ___ ___| |___
| . | .'| | | -_| | | . | -_| | _| . | | _| _| . | | -_|
| _|__,|_|_|_|___|_| |___|___| |___|___|_|_|_| |_| |___|_|___|
|_|
""")
print(""" ---------------------------------------------------------------------""")
print(" Seja bem vindo " + str(credencialPerfil['name']) + ". ")
print(" Você tem um total de " + str(credencialPerfil['balance']) + " na sua conta.")
print(" Atualmente sua moeda é " + str(credencialPerfil['currency']) + ".")
print(""" ---------------------------------------------------------------------""")
############################################################################################
# A opção do menu abaixo do Painel de Controle
############################################################################################
def switchControlPainel():
print(" Para continuar selecione uma opção.\n")
print(""" [ Digite 1 ] Para usar o arquivo \"sinais.txt\".""")
print(""" ---------------------------------------------------------------------""")
switchControlPainelOption = int(input())
return switchControlPainelOption
############################################################################################
# Abre o arquivo sinais.txt e lê, transforma em JSON
############################################################################################
def lerSinais(lista):
json.dumps(carregar_sinais(), indent=1)
lista = carregar_sinais()
contLine = 1
for sinal in lista:
dados = sinal.split(', ')
# Contador de Linhas
os.system("cls || clear")
print("\n\n[ TUDO CERTO ] Lendo arquivo.... \n \n")
time.sleep(2)
print("Informações da " + str(contLine) + "º linha. \n")
print("Você selecionou para comprar no dia " + str(dados[0]) + ".")
print("Na hora " + str(dados[1]) + ".")
print("Com a paridade " + str(dados[2]) + ".")
print("Duração de " + str(dados[3]) + ".")
print("Com " + str(dados[4]) + " de entrada.")
print("Com direção " + str(dados[5]) + ".")
contLine += 1
input()
############################################################################################
# Chama as funções acima, isso aqui é a parte do Login e verificação dos Dados
############################################################################################
bannerProject()
email, password = consultInformation()
API, while_run_time, check, reason = apiConnect(email, password)
############################################################################################
# Se a conexão com a API for True, ele obedece esse bloco.
############################################################################################
if check:
os.system("cls || clear")
print("\n\n[ CONECTADO ] Bem vindo! Aguarde....")
time.sleep(3)
while True:
if API.check_connect()==False:#detect the websocket is close
print("Tentando reconectar...")
check,reason=API.connect()
if check:
os.system("cls || clear")
# Parâmetros pós conectado
credencialPerfil = perfil()
painelControle(credencialPerfil)
time.sleep(2)
switchControlPainelOption = switchControlPainel()
if switchControlPainelOption == 1:
time.sleep(3)
# Chama a função
lista = carregar_sinais()
lerSinais(lista)
print("\n\n\nAcabaram todas as linhas. Obrigado por usar o Bot.")
exit()
else:
if reason==error_password:
print("Erro! Senha inválida.")
exit()
else:
print("Sem conexão com internet!")
exit()
else:
if reason=="[Errno -2] Name or service not known":
print("Sem conexão com internet!")
elif reason==error_password:
print("Erro! Senha inválida.")
<file_sep><h2>Bot IQOption</h2>
<hr>
<p>Um pequeno bot, interativo via CLI, usando API (não oficial) da IQOption.</p> | 3aee922e7375a40b10dbbcb976db5073acaf753d | [
"Markdown",
"Python"
] | 2 | Python | hugovst10/iq | c02c3661e6c25eec07fcfed3aae8541cb621bf7b | 91e9d521b0db0905f31adc14a827b8992379baca |
refs/heads/main | <file_sep>package main
import (
"bytes"
"crypto/sha256"
"fmt"
"strconv"
"time"
)
// Block for storing information
type Block struct {
Timestamp int64
Data []byte
PrevBlockHash []byte
Hash []byte
}
// SetHash creates
func (b *Block) SetHash() {
timestamp := []byte(strconv.FormatInt(b.Timestamp,10))
headers := bytes.Join([][]byte{b.PrevBlockHash, b.Data, timestamp}, []byte{})
hash := sha256.Sum256(headers)
b.Hash = hash[:]
}
// NewBlock creates new block
func NewBlock(data string, prevBlockHash []byte) *Block {
block := &Block{time.Now().Unix(), []byte(data), prevBlockHash,[]byte{}}
block.SetHash()
return block
}
// NewGenesisBlock creates the first block of the Blockchain
func NewGenesisBlock() *Block {
return NewBlock("Genesis Block", []byte{})
}
// Blockchain a database with certain structure
type BlockChain struct {
blocks []*Block
}
// AddBlock adds new block to the Blockchain
func (bc *BlockChain) AddBlock(data string) {
prevBlock := bc.blocks[len(bc.blocks)-1]
newBlock := NewBlock(data, prevBlock.Hash)
bc.blocks = append(bc.blocks, newBlock)
}
func NewBlockChain() *BlockChain {
genesisBlock := NewGenesisBlock()
return &BlockChain{[]*Block{genesisBlock}}
}
func main() {
bc := NewBlockChain()
bc.AddBlock("Send 1700 AKT to Adam")
bc.AddBlock("Send 300 AKT to anon")
for _, block := range bc.blocks {
fmt.Printf("Data: %s\n",block.Data)
fmt.Printf("Hash: %x\n",block.Hash)
fmt.Printf("Previous Hash: %x\n",block.PrevBlockHash)
}
} | 2dd645bc772489ed9e16ba3ee1459c3c83551b02 | [
"Go"
] | 1 | Go | sirajadam/go-blockchain | 97824a30db9bc6fa3ceede1daffb7abd84b0b5c6 | 207b28a05eb7e80d2c7dcd6644b1be312e62c458 |
refs/heads/master | <file_sep>DROP TABLE main.horses CASCADE;
DROP TABLE main.races CASCADE;<file_sep>import os
import re
def pdf2xml(pdfDir, xmlDir):
if pdfDir[-1] != '/':
pdfDir += '/'
if xmlDir[-1] != '/':
xmlDir += '/'
files = os.listdir(pdfDir)
print('Converting ', str(len(files)), 'files')
for file in files:
if re.search(r'\.pdf', file) is None:
continue
address = "'" + pdfDir + file + "'"
output = "'" + xmlDir + re.sub('pdf', 'txt', file) + "'"
command = 'gs -sDEVICE=txtwrite -dTextFormat=0 -o '+ output + ' ' + address
os.system(command)<file_sep>import psycopg2
import psycopg2.extras
import os
from convertPDF.driver import parseFullDay
def generateEntries(txtFolderAddress):
entries = {'races': [], 'horses': []}
fileNames = os.listdir(txtFolderAddress)
totalFiles = len(fileNames)
print('Converting', totalFiles, 'files')
fileCnt = 0
alreadyPulled = []
for fileName in fileNames:
#if fileCnt > 1500:
# print(fileName)
if fileCnt % 300 == 0:
print(fileCnt)
fileCnt += 1
with open(txtFolderAddress + '/' + fileName) as file:
chart = file.readlines()
dayEntries = parseFullDay(chart)
for raceEntries in dayEntries:
if raceEntries == {}:
continue
racePk = (raceEntries['general']['trackName'], str(raceEntries['general']['month']) + '/' + str(raceEntries['general']['day']) + '/' + str(raceEntries['general']['year']), str(raceEntries['general']['raceNum']))
if racePk in alreadyPulled:
continue
alreadyPulled.append(racePk)
entries['races'].append(
(
raceEntries['general']['trackName'],
str(raceEntries['general']['month']) + '/' + str(raceEntries['general']['day']) + '/' + str(raceEntries['general']['year']),
str(raceEntries['general']['raceNum']),
dictTry(raceEntries, ['general', 'stakes']),
dictTry(raceEntries, ['general', 'distance']),
dictTry(raceEntries, ['general', 'surface']),
dictTry(raceEntries, ['general', 'weather']),
dictTry(raceEntries, ['general', 'conditions']),
dictTry(raceEntries, ['general', 'startTime']),
dictTry(raceEntries, ['general', 'startNote']),
dictTry(raceEntries, ['general', 'segment1']),
dictTry(raceEntries, ['general', 'segment2']),
dictTry(raceEntries, ['general', 'segment3']),
dictTry(raceEntries, ['general', 'segment4']),
dictTry(raceEntries, ['general', 'segment5']),
dictTry(raceEntries, ['general', 'segments']),
dictTry(raceEntries, ['times', 'fracTime1']),
dictTry(raceEntries, ['times', 'fracTime2']),
dictTry(raceEntries, ['times', 'fracTime3']),
dictTry(raceEntries, ['times', 'fracTime4']),
dictTry(raceEntries, ['times', 'fracTime5']),
dictTry(raceEntries, ['times', 'finalTime']),
dictTry(raceEntries, ['times', 'runUp']),
dictTry(raceEntries, ['bet','wpsPool']),
dictTry(raceEntries, ['bet', 'firstPlaceWin']),
dictTry(raceEntries, ['bet', 'firstPlacePlace']),
dictTry(raceEntries, ['bet', 'firstPlaceShow']),
dictTry(raceEntries, ['bet', 'secondPlacePlace']),
dictTry(raceEntries, ['bet', 'secondPlaceShow']),
dictTry(raceEntries, ['bet', 'thirdPlaceShow']),
dictTry(raceEntries, ['bet', 'exactaBuyin']),
dictTry(raceEntries, ['bet', 'exactaFinish']),
dictTry(raceEntries, ['bet', 'exactaPayout']),
dictTry(raceEntries, ['bet', 'exactaPool']),
dictTry(raceEntries, ['bet','trifectaBuyin']),
dictTry(raceEntries, ['bet','trifectaFinish']),
dictTry(raceEntries, ['bet','trifectaPayout']),
dictTry(raceEntries, ['bet', 'trifectaPool']),
dictTry(raceEntries, ['bet','superfectaBuyin']),
dictTry(raceEntries, ['bet','superfectaFinish']),
dictTry(raceEntries, ['bet','superfectaPayout']),
dictTry(raceEntries, ['bet', 'superfectaPool']),
dictTry(raceEntries, ['bet','quinellaBuyin']),
dictTry(raceEntries, ['bet','quinellaFinish']),
dictTry(raceEntries, ['bet','quinellaPayout']),
dictTry(raceEntries, ['bet', 'quinellaPool']),
)
)
cnt = 0
for horse in raceEntries['horse']:
program = horse['program']
rlIndex = cnt
rlCnt = 0
for rlHorse in raceEntries['runline']:
if rlHorse['program'] == program:
rlIndex = rlCnt
break
rlCnt += 1
trainerCnt = 0
for trainerProgram in raceEntries['end'][0]['program']:
if trainerProgram == program:
trainer = raceEntries['end'][0]['trainer'][trainerCnt]
break
trainerCnt += 1
ownerCnt = 0
for ownerProgram in raceEntries['end'][1]['program']:
if ownerProgram == program:
owner = raceEntries['end'][1]['owner'][ownerCnt]
break
ownerCnt += 1
entries['horses'].append(
(
raceEntries['general']['trackName'],
str(raceEntries['general']['month']) + '/' + str(raceEntries['general']['day']) + '/' + str(raceEntries['general']['year']),
raceEntries['general']['raceNum'],
horse['program'],
horse['horse'],
horse['lastRaceDay'],
horse['lastRaceMonth'],
horse['lastRaceYear'],
horse['lastRaceTrack'],
horse['jockey'],
horse['weight'],
horse['m_e'],
horse['placePP'],
dictTry(horse, ['placeSeg1']),
horse['placeSeg2'],
horse['placeSeg3'],
horse['placeSeg4'],
horse['placeSeg5'],
horse['placeSeg6'],
horse['odds'],
horse['comments'],
horse['lastRaceNum'],
horse['lastRacePlace'],
dictTry(horse, ['lengthsSeg1'], naFlag=False),
horse['lengthsSeg2'],
horse['lengthsSeg3'],
horse['lengthsSeg4'],
horse['lengthsSeg5'],
horse['lengthsSeg6'],
dictTry(raceEntries, ['runline', 'rlLengthsSeg1'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlLengthsSeg2'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlLengthsSeg3'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlLengthsSeg4'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlLengthsSeg5'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlLengthsSeg6'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlPlaceSeg1'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlPlaceSeg2'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlPlaceSeg3'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlPlaceSeg4'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlPlaceSeg5'], [rlIndex]),
dictTry(raceEntries, ['runline', 'rlPlaceSeg6'], [rlIndex]),
trainer,
owner
)
)
cnt += 1
return entries
def populateDB(dbConnection, entries):
populateRaces(dbConnection, entries['races'])
populateHorses(dbConnection, entries['horses'])
def populateRaces(dbConnection, entries):
with dbConnection.cursor() as cur:
cur.execute("SET session_replication_role='replica';")
psycopg2.extras.execute_batch(
cur,
"""
INSERT INTO main.races(
track,
date,
race,
stakes,
distance,
surface,
weather,
conditions,
"startTime",
"startNote",
segment1,
segment2,
segment3,
segment4,
segment5,
segments,
"fracTime1",
"fracTime2",
"fracTime3",
"fracTime4",
"fracTime5",
"finalTime",
runup,
"wpsPool",
"firstPlaceWin",
"firstPlacePlace",
"firstPlaceShow",
"secondPlacePlace",
"secondPlaceShow",
"thirdPlaceShow",
"exactaBuyin",
"exactaFinish",
"exactaPayout",
"exactaPool",
"trifectaBuyin",
"trifectaFinish",
"trifectaPayout",
"trifectaPool",
"superfectaBuyin",
"superfectaFinish",
"superfectaPayout",
"superfectaPool",
"quinellaBuyin",
"quinellaFinish",
"quinellaPayout",
"quinellaPool"
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
""",
entries
)
def populateHorses(dbConnection, entries):
with dbConnection.cursor() as cur:
cur.execute("SET session_replication_role='replica';")
psycopg2.extras.execute_batch(
cur,
"""
INSERT INTO main.horses(
track,
date,
race,
"horseProgram",
"horseName",
"lastRaceDay",
"lastRaceMonth",
"lastRaceYear",
"lastRaceTrack",
jockey,
weight,
"ME",
"placePP",
"placeSeg1",
"placeSeg2",
"placeSeg3",
"placeSeg4",
"placeSeg5",
"placeSeg6",
odds,
comments,
"lastRaceNum",
"lastRacePlace",
"lengthsSeg1",
"lengthsSeg2",
"lengthsSeg3",
"lengthsSeg4",
"lengthsSeg5",
"lengthsSeg6",
"rlLengthsSeg1",
"rlLengthsSeg2",
"rlLengthsSeg3",
"rlLengthsSeg4",
"rlLengthsSeg5",
"rlLengthsSeg6",
"rlPlaceSeg1",
"rlPlaceSeg2",
"rlPlaceSeg3",
"rlPlaceSeg4",
"rlPlaceSeg5",
"rlPlaceSeg6",
trainer,
owner
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s)
""",
entries
)
def dictTry(dictionary, keys, listIndices=[], naFlag=True):
thing = dict(dictionary)
listIndex = 0
dictIndex = 0
for _ in range(len(keys) + len(listIndices)):
if type(thing) == list:
try:
thing = thing[listIndices[listIndex]]
listIndex += 1
except IndexError:
return 'N/A' if naFlag else ''
else:
try:
thing = thing[keys[dictIndex]]
dictIndex += 1
except KeyError:
return 'N/A' if naFlag else ''
return thing
### DEBUG
if __name__ == '__main__':
entries = generateEntries('./../charts/txts')
conn = psycopg2.connect(
host = "localhost",
database = "horses",
user = "karisch",
password = "<PASSWORD>",
port = 5432
)
populateDB(conn, entries)<file_sep>import re
import pandas as pd
from .regexPatterns import *
def parseGenInfo(genLines):
dfDict = {}
dfDict['trackName'], dfDict['month'], dfDict['day'], dfDict['year'], dfDict['raceNum'] = parseLine1(genLines[0])
dfDict['stakes'] = parseLine2(genLines[1])
for line in genLines[0:]:
if re.search(distanceSurfaceLinePattern, line) is not None:
dfDict['distance'], dfDict['surface'] = parseDistanceSurface(line)
elif re.search(weatherConditionsLinePattern, line) is not None:
dfDict['weather'], dfDict['conditions'] = parseWeatherConditions(line)
elif re.search(startNotesLinePattern, line) is not None:
dfDict['startTime'], dfDict['startNote'] = parseStart(line)
elif re.search(segmentsLinePattern, line) is not None:
dfDict['segment1'], dfDict['segment2'], dfDict['segment3'], dfDict['segment4'], dfDict['segment5'] = parseSegments(line)
if dfDict['segment3'] == '':
dfDict['segments'] = 2
elif dfDict['segment4'] == '':
dfDict['segments'] = 3
elif dfDict['segment5'] == '':
dfDict['segments'] = 4
else:
dfDict['segments'] = 5
return dfDict
def parseLine1(line):
simpleLine = re.sub(r'[^-A-Za-z]', '', line)
if re.search(r'(RMTC|CALLAWAYGARDEN)', simpleLine) is not None: #check if track is lethbridge
fullSearch = re.search(genInfoLine1LethbridgePattern, line)
else:
fullSearch = re.search(genInfoLine1TrackPattern, line)
trackNameRaw = fullSearch.group(1)
dateRaw = fullSearch.group(2)
raceNumRaw = fullSearch.group(3)
#track name -> abbreviated name
trackNameFull = re.sub('[^A-Za-z ]', '', trackNameRaw)
trackName = trackLongToShort[trackNameFull]
#date
dateSearch = re.search(genInfoLine1DatePattern, dateRaw)
monthRaw = dateSearch.group(1)
month = monthNameToNumber[monthRaw]
day = dateSearch.group(2)
year = dateSearch.group(3)
#race number
raceNum = re.search(genInfoLine1RaceNumPattern, raceNumRaw).group(0)
out = [trackName, int(month), int(day), int(year), int(raceNum)]
return out
def parseLine2(line):
breedRaw = re.search(genInfoLine2BreedPattern, line).group(1)
stakes = ''
if re.search(stakesLinePattern, line) is not None:
stakesSearch = re.search(gradePattern, line)
if stakesSearch is None:
stakes = 'General'
else:
stakes = stakesSearch.group(1)
breed = re.sub('[^A-Za-z]', '', breedRaw)
return stakes
def parseDistanceSurface(line):
fullSearch = re.search(distanceSurfaceFullSearchPattern, line)
if fullSearch is None:
specSearch = re.match(distanceSurfaceSpecSearchPattern, line)
else:
specSearch = re.match(distanceSurfaceSpecSearchPattern, fullSearch.group(0))
if specSearch is None:
print('Match error in parseDistanceSurface on line: ' + line)
return ['ERROR', 'ERROR']
distance, surface = [specSearch.group(1).strip(), specSearch.group(2).strip()]
if re.search('- Originally', surface) is not None:
surface = re.search(r'([A-Za-z ]+)-', surface).group(1).strip()
surface = re.sub(r' Current', '', surface)
out = [distance, surface]
return out
def parseWeatherConditions(line):
fullSearch = re.search(weatherConditionsSearchPattern, line)
if fullSearch is None:
print('Match error in parseWeatherConditions on line: ' + line)
return ['ERROR'] * 2
weather = fullSearch.group(1)
conditions = fullSearch.group(2)
out = [weather, conditions]
return out
def parseStart(line):
fullSearch = re.search(startNotesSearchPattern, line)
if fullSearch is None:
print('Match error in parseStart on line: ' + line)
return ['ERROR'] * 2
startTime = fullSearch.group(1)
startNote = fullSearch.group(2)
out = [startTime, startNote]
return out
def parseSegments(line):
fullSearch = re.search(segmentsSearchPattern, line)
segment1 = fullSearch.group(1)
segment2 = fullSearch.group(2)
segment3 = fullSearch.group(3)
segment4 = fullSearch.group(4)
segment5 = fullSearch.group(5)
return [segment1, segment2, segment3, segment4, segment5]
trackLongToShort = {}
trackShortToLong = {}
tracksDF = pd.read_csv('./../excel/tracks.csv', delimiter=',', header=None)
for i in range(tracksDF.shape[0]):
trackLongToShort[tracksDF.iloc[i,1]] = tracksDF.iloc[i,0]
trackShortToLong[tracksDF.iloc[i,0]] = tracksDF.iloc[i,1]
monthNameToNumber = {
'January': 1,
'February': 2,
'March': 3,
'April': 4,
'May': 5,
'June': 6,
'July': 7,
'August': 8,
'September': 9,
'October': 10,
'November': 11,
'December': 12
}<file_sep>from convertPDF.infoFns.genInfoFns import parseDistanceSurface
from convertPDF.driver import parseFullDay
with open('./../charts/txts/eqbPDFChartPlus - 2021-06-25T165907.789.txt') as file:
full = file.read()
jack = parseFullDay(full)<file_sep>import re
import pandas as pd
from .regexPatterns import *
def parseRunlineInfo(runlineLines):
runlineLines = [x for x in runlineLines if re.search('^ \([A-Z]+\)$', x) is None]
colsSearch = re.search(pointOfCallLinePattern, runlineLines[1])
if colsSearch.group(1) == 'Start':
firstCallStart = True
else:
firstCallStart = False
lines = runlineLines[2:]
numHorses = int(len(lines) / 2)
#set up empty dataframe
runlineDicts = []
priorLineBottom = True
missingTopLineInd = []
for i in range(len(runlineLines)): #loop to add spacing if top line missing (can only happen if horse dropped out of race before first point of call)
line = runlineLines[i]
if firstCallStart:
checkSearch = re.search(firstCallStartRLSearchPattern, line)
else:
checkSearch = re.search(firstCallNonStartRLSearchPattern, line)
if checkSearch is not None and priorLineBottom: #check to see if the current line meets the conditions for a missing top line and whether the last line was bottom
missingTopLineInd.append(i) #add index where true
priorLineBottom = True
else:
priorLineBottom = False
for i in range(len(missingTopLineInd)): #now need to loop over indices and insert empty spacer lines
runlineLines.insert(missingTopLineInd[i], ' ')
missingTopLineInd = [x + 1 for x in missingTopLineInd] #need to increment remaining indices to account for new element
for i in [x*2 for x in list(range(numHorses))]:
runlineDict = {}
activeHorse = lines[i:i+2]
topItems = parseRunlineTopLine(activeHorse[0])
bottomItems = parseRunlineBottomLine(activeHorse[1])
#if all 6 segments have lengths
if [x for x in topItems if x == ''] == []:
runlineDict['rlLengthsSeg1'] = topItems[0] #fill in the first segment with the first length
startInd = 1 #filling in the remaining lengths will follow with the next element
else:
runlineDict['rlLengthsSeg1'] = '' #otherwise it will be "Start" and won't have any lengths associated
startInd = 0 #fill in remaining elements starting from the beginning of the list
runlineDict['rlLengthsSeg2'] = topItems[startInd]
runlineDict['rlLengthsSeg3'] = topItems[startInd + 1]
runlineDict['rlLengthsSeg4'] = topItems[startInd + 2]
runlineDict['rlLengthsSeg5'] = topItems[startInd + 3]
runlineDict['rlLengthsSeg6'] = topItems[startInd + 4]
runlineDict['program'] = bottomItems[0]
runlineDict['rlPlaceSeg1'] = bottomItems[1]
runlineDict['rlPlaceSeg2'] = bottomItems[2]
runlineDict['rlPlaceSeg3'] = bottomItems[3]
runlineDict['rlPlaceSeg4'] = bottomItems[4]
runlineDict['rlPlaceSeg5'] = bottomItems[5]
runlineDict['rlPlaceSeg6'] = bottomItems[6]
runlineDicts.append(dict(runlineDict))
return runlineDicts
def parseRunlineTopLine(line):
fullSearch = re.search(rlTopLineSearchPattern, line)
if fullSearch is None:
print('Match error in parseRunlineTopLine on line: ' + line)
return ['ERROR'] * 6
out = []
for i in range(1,7):
out.append(fullSearch.group(i))
return out
def parseRunlineBottomLine(line):
fullSearch = re.search(rlBottomLineSearchPattern, line)
if fullSearch is None:
print('Match error in parseRunlineBottomLine on line: ' + line)
return ['ERROR'] * 7
out = []
for i in range(1,8):
out.append(fullSearch.group(i))
return out<file_sep>from convertPDF.webScrape.pdf2xml import *
from convertPDF.webScrape.xml2txt import *
bulkXml2Txt('./../charts/xmls', './../charts/temptxt')<file_sep>### BUILDING BLOCKS
horseProgramPattern = r'\d?\d[ABCXabcfx]?'
horsePlacePattern = r'[-0-9*]*'
splitTimePattern = r'[0-9.:N/A]*'
### DRIVER
newRaceTest = r'Copyright 202[0-9] Equibase Company LLC. All Rights Reserved.'
cancelledRace = r'Cancelled - '
quarterHorseRace = r'(- Quarter Horse|Arabian|Mixed)'
generalInfoCutoff = r'Last Raced Pgm'
horseInfoCutoff = r'Fractional Times:|[^*]Final Time:'
timesInfoCutoff = r'Run-Up: '
betInfoCutoff = r'Past Performance Running Line Preview'
endInfoCutoff = r'Trainers: '
### GENINFOFNS
distanceSurfaceLinePattern = r'(Current Track( Record:)?|Furlongs On (The )?(Dirt|Turf)$|Miles? On (The )?(Dirt|Turf)$)'
weatherConditionsLinePattern = r'Weather: [A-Za-z]+ Track:'
startNotesLinePattern = r'Off at: [0-9:]+ Start:'
segmentsLinePattern = r'Last Raced Pgm'
genInfoLine1TrackPattern = r' *([^-]+) - ([^-]+) - (.*)'
stakesLinePattern = r'([Ss]takes|TAKES)'
gradePattern = r'([Gg]rade \d|Listed|Black Type)'
genInfoLine1LethbridgePattern = r' *([^-]+-[^-]+) - ([^-]+) - ([^-]+)'
genInfoLine1DatePattern = r'([A-Za-z]*) (\d?\d), (\d\d\d\d)'
genInfoLine1RaceNumPattern = r'\d?\d'
genInfoLine2BreedPattern = r'- (.*)$'
distanceSurfaceFullSearchPattern = r'([- A-Za-z0-9/]+)(?=(Current )?Track)'
distanceSurfaceSpecSearchPattern = r' (.*) (?=On The)On The ([-A-Za-z ]*) ?'
weatherConditionsSearchPattern = r'Weather: ([A-Z][a-z]*) Track: ([A-Z][a-z]*)'
startNotesSearchPattern = r'Off at: (\d?\d:\d\d) Start: ([A-Z0-9][a-z0-9 ]*)'
segmentsSearchPattern = r'PP ([A-Za-z0-9/]+) ?([A-Za-z0-9/]*) ?([A-Za-z0-9/]*) ?([A-Za-z0-9/]*) ?([A-Za-z0-9/]*) ?Fin'
### HORSEINFOFNS
horseInfoBottomLineCheckPattern = r'^ (\d?\d[A-Z][a-z]{2}\d\d|---)'
horseInfoTopLineSearchPattern = r'^ ([0-9/A-Za-z]*) ?([0-9/A-Za-z]*) ?([0-9/A-Za-z]*) ?([0-9/A-Za-z]*) ?([0-9/A-Za-z]*) ?([0-9/A-Za-z]*) ?([0-9/A-Za-z]*) ?([0-9/A-Za-z]*)'
horseInfoBottomLineSearchPattern = r'^ (\d?\d[A-Z][a-z]{2}\d\d [A-Z]{2,3}|---) (' + horseProgramPattern + r') ([^0-9]+) (\d?\d?\d)[»½¶]* ([ABCLM1]+|[ABCLM1]+ [23abcfghijklnopqrsvWwxyz]+|- -|[23abcfghijklnopqrsvWwxyz]+) (' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ([0-9]+\.\d\d)?\*? ?(.*)$'
horseInfoDateSearchPattern = r'(\d?\d)([A-Z][a-z]{2})(\d\d) ([A-Z]{2,3})'
horseJockeySearchPattern = r"(.*) ?\(([-A-Za-z,. ']+)\)"
### TIMESINFOFNS
fractionalTimesLinePattern = r'Fractional Times:|Final Time:'
runupLinePattern = r'Run-Up:'
fractionalTimesSearchPattern = r'(Fractional Times: (' + splitTimePattern + r') ?(' + splitTimePattern + r') ?(' + splitTimePattern + r') ?(' + splitTimePattern + r') ?(' + splitTimePattern + r'))? Final Time: (' + splitTimePattern + r')'
runupSearchPattern = r'Run-Up: ([0-9.]*)'
#### BETINFOFNS
WPSLinePattern = r'Total WPS Pool'
betsLinePattern = r'Pgm Horse Win'
advancedBetsLinePattern = r'(Exacta|Trifecta|Superfecta|Quinella|Perfecta)'
buyinPattern = r'(\$\d\.\d\d)'
WPSSearchPattern = r'Total WPS Pool: \$([0-9,]*)'
betLineOneEntryPattern = r'^ ' + horseProgramPattern + r' [^0-9]+ (\d?\d?\d\.\d\d)()()(.*)$'
betLineTwoEntryPattern = r'^ ' + horseProgramPattern + r' [^0-9]+ (\d?\d?\d\.\d\d) (\d?\d\.\d\d)()(.*)$'
betLineThreeEntryPattern = r'^ ' + horseProgramPattern + r' [^0-9]+ (\d?\d?\d\.\d\d) (\d?\d\.\d\d) (\d?\d\.\d\d)(.*)$'
betLineNoEntryPattern = r'^ ' + horseProgramPattern + r' [^0-9]+()()()(\$.*)?$'
additionalBetLineSearchPattern = r'([0-9.$ A-Za-z,]*) ([0-9-/ABCL* ]*) (\([-0-9A-Za-z ]+\) )?([0-9,.]+\.\d\d) ([0-9,.]*)( [0-9,.]*)?$'
### RUNLINEINFOFNS
pointOfCallLinePattern = r'^ Pgm Horse Name (Start|[0-9/]+)'
firstCallStartRLSearchPattern = r'^ (' + horseProgramPattern + r') [^0-9]+ \d?\d[ABC]?( ---)+$'
firstCallNonStartRLSearchPattern = r'^ (' + horseProgramPattern + r') [^0-9]+ \d?\d[ABC]?( ---)+$'
rlTopLineSearchPattern = r'([-0-9/A-Za-z]+) ?([-0-9/A-Za-z]*) ?([-0-9/A-Za-z]*) ?([-0-9/A-Za-z]*) ?([-0-9/A-Za-z]*) ?([-0-9/A-Za-z]*)$'
rlBottomLineSearchPattern = r'^ (' + horseProgramPattern + r') [^0-9]+ (' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r') ?(' + horsePlacePattern + r')$'
### ENDITEMSFNS
trainerLinePattern = r'^ Trainers:'
ownerLinePattern = r'^ Owners:'
footnoteLinePattern = r'^ Footnotes$'
trainerFullSearchPattern = r'( \d?\d - [^;]+;)+'
trainerShortSearchPattern = r'(\d?\d) - (.+)$'
ownerFullSearchPattern = r'( \d?\d - ?[^;]+;)+'
ownerShortSearchPattern = r'^ (\d?\d) - ?(.+)$'<file_sep>from bs4 import BeautifulSoup
import re
import os
"""
function to return all the unique rows found in a single race pdf, sorted by page number and line number
input: soupObj, a BeautifulSoup object of the parsed xml of a race pdf
output: a list of dicts, each representing a different page, each mapping a particular line in the pdf to a line in the output txt
"""
def uniqueXmlRows(soupObj):
pageList = soupObj.find_all('page')
pages = []
for i in range(len(pageList)):
page = pageList[i]
spanList = page.find_all('span')
row = []
for span in spanList:
bbox = span.get('bbox')
bboxSearch = re.search('\d+ (\d+) \d+ (\d+)', bbox)
row.append(int(bboxSearch.group(1)))
rowUnique = sorted(list(set(row)))
rowIndex = list(range(len(rowUnique)))
rowDict = dict(zip(rowUnique,rowIndex))
pages.append(rowDict)
return pages
"""
function to generate a txt file from the xml of a pdf of a result chart from equibase
inputs:
soupObj: a BeautifulSoup object
outputFile: a string address for the output txt
outputs a txt file in the system at the address provided
"""
def createTxtFromXml(soupObj, outputFile):
lines = uniqueXmlRows(soupObj)
pageList = soupObj.find_all('page')
txt = []
for i in range(len(pageList)):
page = pageList[i]
lastColInd = [0] * len(lines[i])
pageTxt = [''] * len(lines[i])
spanList = page.find_all('span')
for span in spanList:
charList = span.find_all('char')
size = int(span.get('size')[0])
for char in charList:
bbox = char.get('bbox')
c = char.get('c')
bboxSearch = re.search('(\d+) (\d+) (\d+) \d+', bbox)
if bboxSearch is None:
print('Error - no match')
print('Page =', i)
print('bbox =', bbox)
print('Character =', c)
input('Press enter to continue')
continue
beginCol = int(bboxSearch.group(1))
pdfRow = int(bboxSearch.group(2))
endCol = int(bboxSearch.group(3))
txtRow = lines[i][pdfRow]
if beginCol == lastColInd[txtRow] or (size == 6 and (beginCol - lastColInd[txtRow]) <= 2):
pageTxt[txtRow] += c
else:
pageTxt[txtRow] += ' ' + c
lastColInd[txtRow] = endCol
txt += pageTxt
txt = [x for x in txt if x != '']
with open(outputFile, 'w') as file:
for line in txt:
file.write('%s\n' % line)
def bulkXml2Txt(xmlDir, txtDir):
if txtDir[-1] != '/':
txtDir += '/'
if xmlDir[-1] != '/':
xmlDir += '/'
filenames = os.listdir(xmlDir)
extantFiles = os.listdir(txtDir)
print('Converting ' + str(len(filenames)) + ' files')
cnt = 0
for filename in filenames:
cnt += 1
if filename in extantFiles or re.search(r'.txt', filename) is None:
continue
with open(xmlDir + filename) as file:
rawXml = file.read()
soup = BeautifulSoup(rawXml, 'html.parser')
createTxtFromXml(soup, txtDir + filename)
if cnt % 100 == 0:
perc = cnt / len(filenames) * 100
print('Progress: %.1f%%' % perc)<file_sep>import re
import pandas as pd
from .regexPatterns import *
def parseHorseInfo(horseLines):
numHorses = int(len(horseLines) / 2)
horseDict = {}
horseDicts = []
priorLineBottom = True
missingTopLineInd = []
for i in range(len(horseLines)): #loop to add spacing if top line missing (can only happen if horse has never been raced AND was in last place the whole race)
line = horseLines[i]
if re.search(horseInfoBottomLineCheckPattern, line) is not None: #check to see if the current line is bottom and the last line was also bottom
if priorLineBottom:
missingTopLineInd.append(i) #add index where true
priorLineBottom = True
else:
priorLineBottom = False
for i in range(len(missingTopLineInd)): #now need to loop over indices and insert empty spacer lines
horseLines.insert(missingTopLineInd[i], ' ')
missingTopLineInd = [x + 1 for x in missingTopLineInd] #need to increment remaining indices to account for new element
topItemsList = []
#loop over each horse in the lines provided
for i in [x*2 for x in list(range(numHorses))]:
activeHorse = horseLines[i:i+2]
bottomItems = parseHorseBottomLine(activeHorse[1])
topItems = parseHorseTopLine(activeHorse[0])
horseDict['lastRaceDay'] = bottomItems[0]
horseDict['lastRaceMonth'] = bottomItems[1]
horseDict['lastRaceYear'] = bottomItems[2]
horseDict['lastRaceTrack'] = bottomItems[3]
horseDict['program'] = bottomItems[4]
horseDict['horse'] = bottomItems[5].strip()
horseDict['jockey'] = bottomItems[6]
horseDict['weight'] = bottomItems[7]
horseDict['m_e'] = bottomItems[8]
horseDict['placePP'] = bottomItems[9]
horseDict['placeSeg1'] = bottomItems[10]
horseDict['placeSeg2'] = bottomItems[11]
horseDict['placeSeg3'] = bottomItems[12]
horseDict['placeSeg4'] = bottomItems[13]
horseDict['placeSeg5'] = bottomItems[14]
horseDict['placeSeg6'] = bottomItems[15]
horseDict['odds'] = bottomItems[16]
horseDict['comments'] = bottomItems[17]
if horseDict['lastRaceDay'] == '':
horseDict['lastRaceNum'] = ''
horseDict['lastRacePlace'] = ''
topItemsList.append(topItems)
horseDicts.append(dict(horseDict))
else:
horseDict['lastRaceNum'] = topItems[0]
horseDict['lastRacePlace'] = topItems[1]
topItemsList.append(topItems[2:])
horseDicts.append(dict(horseDict))
horseDicts = placeLengths(horseDicts, topItemsList)
return horseDicts
def parseHorseTopLine(line):
fullSearch = re.search(horseInfoTopLineSearchPattern, line)
out = []
for i in range(1,9):
out.append(fullSearch.group(i))
return out
def parseHorseBottomLine(line):
fullSearch = re.search(horseInfoBottomLineSearchPattern, line)
if fullSearch is None:
print('parseHorseBottomLine error on line: ' + line)
out = ['ERROR'] * 18
return out
out = []
dateSearch = re.search(horseInfoDateSearchPattern, fullSearch.group(1))
if dateSearch is not None:
out[:4] = [dateSearch.group(1), dateSearch.group(2), dateSearch.group(3), dateSearch.group(4)]
else:
out[:4] = [''] * 4
out.append(fullSearch.group(2))
horseAndJockey = fullSearch.group(3)
hjSearch = re.search(horseJockeySearchPattern, horseAndJockey)
if hjSearch is None:
print(horseAndJockey)
out.append(hjSearch.group(1))
out.append(hjSearch.group(2))
for i in range(4,15):
out.append(fullSearch.group(i))
return out
def placeLengths(horseDicts, topItemsList):
placeSegNames = ['placeSeg1','placeSeg2','placeSeg3','placeSeg4','placeSeg5','placeSeg6']
lengthsSegNames = ['lengthsSeg1','lengthsSeg2','lengthsSeg3','lengthsSeg4','lengthsSeg5','lengthsSeg6']
topItemsFull = False
for items in topItemsList:
if items[-1] != '':
topItemsFull = True
break
#loop over the points of call of each race, looking at each horse in turn to determine if any have dropped out.
#this is to ensure that the total number of horses in the race is known at each point of call.
#this allows accurate placement of lengths for horses that were in last place at some point in the race.
numHorses = []
for seg in placeSegNames:
tempNumHorses = len(horseDicts)
for horseDict in horseDicts:
if horseDict[seg] == '---': #if this horse was no longer in the race at this point of call
tempNumHorses -= 1 #reduce horses in the race by one
numHorses.append(str(tempNumHorses))
horseCnt = 0
for horseDict in horseDicts:
if topItemsFull: #if all the points of call have lengths associated (i.e. one of them is not "start")
startSeg = 0 #start placing lengths at the first point of call
else:
startSeg = 1 #otherwise, start at the second point of call (the first non-start point of call)
numHorsesInd = 0
segCnt = 0
for seg in placeSegNames[startSeg:]:
if horseDict[seg] != '---' and horseDict[seg] != numHorses[numHorsesInd] and horseDict[seg] != '' and horseDict[seg] != 'N/A':
segCnt += 1
numHorsesInd += 1
nonBlankCnt = sum([1 for x in topItemsList[horseCnt] if x != ''])
if segCnt - nonBlankCnt == 1:
topItemsList[horseCnt][1:] = topItemsList[horseCnt][:-1]
topItemsList[horseCnt][0] = horseDict['lastRacePlace']
horseDict['lastRacePlace'] = horseDict['lastRaceNum']
horseDict['lastRaceNum'] = ''
elif segCnt - nonBlankCnt == 2:
topItemsList[horseCnt][2:] = topItemsList[horseCnt][:-2]
topItemsList[horseCnt][0] = horseDict['lastRaceNum']
topItemsList[horseCnt][1] = horseDict['lastRacePlace']
horseDict['lastRacePlace'] = ''
horseDict['lastRaceNum'] = ''
horseCnt += 1
ind = 0
#loop over each point of call for each horse, inserting the appropriate lengths into the dataframe
for horseDict in horseDicts:
if topItemsFull: #if all the points of call have lengths associated (i.e. one of them is not "start")
startSeg = 0 #start placing lengths at the first point of call
else:
startSeg = 1 #otherwise, start at the second point of call (the first non-start point of call)
lengthsIndex = 0
for segInd in range(startSeg, len(placeSegNames)):
if horseDict[placeSegNames[segInd]] == numHorses[segInd]: #if horse was in last place at this point
horseDict[lengthsSegNames[segInd]] = '' #lengths will be left blank
else:
horseDict[lengthsSegNames[segInd]] = topItemsList[ind][lengthsIndex] #if not, fill in the lengths of the next point
lengthsIndex += 1 #then go to next index in the lengths list
ind += 1
return horseDicts<file_sep>import re
import pandas as pd
from .regexPatterns import *
"""
This file contains functions that are called in order to parse the lines of the chart relating to betting info.
"""
def parseBetInfo(betLines):
"""
Driver function to manage/delegate parsing of bet lines and generating dictionaries with info.
Input: betLines, a list of strings, each string containing one line of the race chart
Output: a dictionary containing
"""
betDict = {}
wpsInd = -1
ind = 0
for line in betLines:
if re.search(WPSLinePattern, line) is not None:
wpsInd = ind
elif re.search(betsLinePattern, line) is not None:
betStartInd = ind
ind += 1
if wpsInd == -1:
#print('No betting detected for race.')
return {}
linesCleaned = [x for x in betLines[betStartInd+1:] if re.search(r'^ (\d?\d[ABC]?|\$\d)', x) is not None]
wpsPool = parseWPS(betLines[wpsInd])
betDict['wpsPool'] = wpsPool
firstPlaceItems = parseBetLine(linesCleaned[0])
secondPlaceItems = parseBetLine(linesCleaned[1])
betDict['firstPlaceWin'] = firstPlaceItems[0]
betDict['firstPlacePlace'] = firstPlaceItems[1]
betDict['firstPlaceShow'] = firstPlaceItems[2]
if secondPlaceItems[2] != '':
betDict['secondPlacePlace'] = secondPlaceItems[1]
betDict['secondPlaceShow'] = secondPlaceItems[2]
else:
betDict['secondPlacePlace'] = secondPlaceItems[0]
betDict['secondPlaceShow'] = secondPlaceItems[1]
if len(linesCleaned) > 2:
thirdPlaceItems = parseBetLine(linesCleaned[2])
betDict['thirdPlaceShow'] = thirdPlaceItems[1] if thirdPlaceItems[1] != '' else thirdPlaceItems[0]
additionalLines = betLines[(betStartInd + 4):] + [firstPlaceItems[-1], secondPlaceItems[-1], thirdPlaceItems[-1]]
for line in additionalLines:
if line is None:
line = ''
keywordSearch = re.search(advancedBetsLinePattern, line)
if keywordSearch is not None:
keyword = re.sub(' ','',keywordSearch.group(1).lower())
if keyword == 'perfecta':
keyword = 'exacta'
activeAdditional = parseAdditionalBetLines(line)
if activeAdditional[0] != 'ERROR' and re.search(buyinPattern, activeAdditional[0]) is None:
print('Additional bet line parse error on line: ' + line)
betDict[keyword + 'Buyin'] = 'ERROR'
else:
betDict[keyword + 'Buyin'] = re.search(buyinPattern, activeAdditional[0]).group(1) if activeAdditional[0] != 'ERROR' else 'ERROR'
betDict[keyword + 'Finish'] = activeAdditional[1]
betDict[keyword + 'Payout'] = activeAdditional[3]
betDict[keyword + 'Pool'] = activeAdditional[4]
return betDict
def parseWPS(line):
fullSearch = re.search(WPSSearchPattern, line)
return fullSearch.group(1)
def parseBetLine(line):
fullSearch = re.search(betLineThreeEntryPattern, line)
if fullSearch is None:
fullSearch = re.search(betLineTwoEntryPattern, line)
if fullSearch is None:
fullSearch = re.search(betLineOneEntryPattern, line)
if fullSearch is None:
fullSearch = re.search(betLineNoEntryPattern, line)
out = []
if fullSearch is None:
print('Error in parseBetLine on line: ' + line)
for _ in range(1, 5):
out.append('ERROR')
else:
for i in range(1, 5):
out.append(fullSearch.group(i))
return out
def parseAdditionalBetLines(line):
fullSearch = re.search(additionalBetLineSearchPattern, line)
out = []
if fullSearch is None:
print('Error in parseAdditionalBetLines on line: ' + line)
for _ in range(1, 6):
out.append('ERROR')
else:
for i in range(1, 6):
out.append(fullSearch.group(i))
return out<file_sep>DELETE FROM main.races;
DELETE FROM main.horses;<file_sep>import re
import pandas as pd
from .regexPatterns import *
def parseEndInfo(endLines):
cnt = 0
for line in endLines:
if re.match(trainerLinePattern, line) is not None:
trainerInd = [cnt]
elif re.match(ownerLinePattern, line) is not None:
trainerInd.append(cnt)
ownerInd = [cnt]
elif re.match(footnoteLinePattern, line) is not None:
ownerInd.append(cnt)
cnt += 1
trainerLine = ''.join([line[:-1] for line in endLines[trainerInd[0]: trainerInd[1]]]) + ';'
ownerLine = ''.join([line[:-1] for line in endLines[ownerInd[0]: ownerInd[1]]])
trainersDict = parseTrainerLine(trainerLine)
ownersDict = parseOwnerLine(ownerLine)
return [trainersDict, ownersDict]
def parseTrainerLine(line):
endDict = {'program': [], 'trainer': []}
fullSearch = re.search(trainerFullSearchPattern, line)
split = fullSearch.group(0).split(';')[:-1]
for item in split:
shortSearch = re.search(trainerShortSearchPattern, item)
endDict['program'].append(shortSearch.group(1))
endDict['trainer'].append(shortSearch.group(2))
return endDict
def parseOwnerLine(line):
endDict = {'program': [], 'owner': []}
fullSearch = re.search(ownerFullSearchPattern, line)
split = fullSearch.group(0).split(';')[:-1]
for item in split:
shortSearch = re.search(ownerShortSearchPattern, item)
endDict['program'].append(shortSearch.group(1))
endDict['owner'].append(shortSearch.group(2))
return endDict<file_sep>import os
import re
import pandas as pd
from bs4 import BeautifulSoup
def renameHtml(folderAddress):
fileNames = os.listdir(folderAddress)
for fileName in fileNames:
with open(folderAddress + '/' + fileName) as file:
raw = file.read()
soup = BeautifulSoup(raw, 'html.parser')
date = re.search(r', (.*)', soup.center.text.strip()).group(1)
os.rename(folderAddress + '/' + fileName, folderAddress + '/' + date + '.html')
def generateTrackKey(htmlFileAddress, outputCsvAddress):
out = {'shortName': [], 'fullName': []}
with open(htmlFileAddress) as file:
raw = file.read()
soup = BeautifulSoup(raw, 'html.parser')
tracksRaw = soup.find_all('div')[88].find_all('option')
for i in range(1, len(tracksRaw)):
search = re.search(r'([A-Z0-9]{2,3}) +- (.*)', tracksRaw[i].text)
shortName = search.group(1).strip()
fullName = re.sub(r'[^A-Za-z ]', '', search.group(2).strip())
out['shortName'].append(shortName)
out['fullName'].append(fullName)
extras = {
'shortName': [
'PMT',
'UN',
'EDR',
'CHA',
],
'fullName': [
'<NAME>',
'EASTERN OREGON LIVESTOCK SHO W',
'ENERGY DOWNS',
'CHARLESTON',
]
}
out['shortName'].extend(extras['shortName'])
out['fullName'].extend(extras['fullName'])
outdf = pd.DataFrame(out)
outdf.to_csv(outputCsvAddress + '/tracks.csv', index=False)<file_sep>import sys
import os
import re
import pandas as pd
if __name__ == '__main__':
from infoFns.genInfoFns import parseGenInfo
from infoFns.horseInfoFns import parseHorseInfo
from infoFns.timesInfoFns import parseTimeInfo
from infoFns.betInfoFns import parseBetInfo
from infoFns.runlineInfoFns import parseRunlineInfo
from infoFns.endInfoFns import parseEndInfo
from infoFns.regexPatterns import *
else:
from .infoFns.genInfoFns import parseGenInfo
from .infoFns.horseInfoFns import parseHorseInfo
from .infoFns.timesInfoFns import parseTimeInfo
from .infoFns.betInfoFns import parseBetInfo
from .infoFns.runlineInfoFns import parseRunlineInfo
from .infoFns.endInfoFns import parseEndInfo
from .infoFns.regexPatterns import *
def parseFullDay(fullChart):
newRaceInd = [-1] #first index will be 0 after conversion (controls for out of index error later)
for i in range(len(fullChart)):
if re.search(newRaceTest, fullChart[i]) is not None: #check for the expression that signifies end of race
newRaceInd.append(i)
allRaces = []
for i in range(len(newRaceInd) - 1):
#print('Race number: ' + str(i))
allRaces.append(parseRace(fullChart[newRaceInd[i] + 1:newRaceInd[i + 1]]))
return allRaces
def parseRace(raceChart):
cnt = 0
#loop to find indexes for different parse sections
for line in raceChart:
if re.search(cancelledRace, line) is not None: #check for cancelled race first, if cancelled, return empty dictionary
return {}
elif re.search(quarterHorseRace, line) is not None: #also do not need to process quarter horses
return {}
elif re.search(generalInfoCutoff, line) is not None: #everything before this falls under "general info"
genInd = cnt + 1
horseInd = [cnt + 1]
elif re.search(horseInfoCutoff, line) is not None: #after general info, need to process info for each horse
horseInd.append(cnt)
timesInd = [cnt]
elif re.search(timesInfoCutoff, line) is not None: #after horses, timing and runup info
timesInd.append(cnt + 1)
betInd = [cnt + 1]
elif re.search(betInfoCutoff, line) is not None: #after timing, betting info and additional horse info
betInd.append(cnt)
runLineInd = [cnt]
elif re.search(endInfoCutoff, line) is not None: #finally, need to do trainers, owners and other ending info
runLineInd.append(cnt)
endInfoInd = cnt
cnt += 1
genItems = parseGenInfo(raceChart[:genInd])
horseItems = parseHorseInfo(raceChart[horseInd[0]:horseInd[1]])
timesItems = parseTimeInfo(raceChart[timesInd[0]:timesInd[1]])
betItems = parseBetInfo(raceChart[betInd[0]:betInd[1]])
runlineItems = parseRunlineInfo(raceChart[runLineInd[0]:runLineInd[1]])
endItems = parseEndInfo(raceChart[endInfoInd:])
outputDict = {
'general': genItems,
'horse': horseItems,
'times': timesItems,
'bet': betItems,
'runline': runlineItems,
'end': endItems
}
return outputDict
######### DEBUG
if __name__ == '__main__':
with open('./../charts/txts/eqbPDFChartPlus - 2021-07-10T204831.617.txt') as file:
full = file.readlines()
jack = parseFullDay(full)<file_sep>--
-- PostgreSQL database dump
--
-- Dumped from database version 12.7 (Ubuntu 12.7-0ubuntu0.20.04.1)
-- Dumped by pg_dump version 13.3
-- Started on 2021-06-22 01:08:26
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 7 (class 2615 OID 16388)
-- Name: main; Type: SCHEMA; Schema: -; Owner: karisch
--
CREATE SCHEMA main;
ALTER SCHEMA main OWNER TO karisch;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- TOC entry 204 (class 1259 OID 16397)
-- Name: horses; Type: TABLE; Schema: main; Owner: karisch
--
CREATE TABLE main.horses (
track character varying(200) NOT NULL,
date character varying(100) NOT NULL,
race character varying(50) NOT NULL,
"horseProgram" character varying(50) NOT NULL,
"horseName" character varying(200),
"lastRaceDay" character varying(50),
"lastRaceMonth" character varying(50),
"lastRaceYear" character varying(50),
"lastRaceTrack" character varying(50),
jockey character varying(200),
weight character varying(50),
"ME" character varying(100),
"placePP" character varying(50),
"placeSeg1" character varying(50),
"placeSeg2" character varying(50),
"placeSeg3" character varying(50),
"placeSeg4" character varying(50),
"placeSeg5" character varying(50),
"placeSeg6" character varying(50),
odds character varying(50),
comments character varying(300),
"lastRaceNum" character varying(50),
"lastRacePlace" character varying(50),
"lengthsSeg1" character varying(50),
"lengthsSeg2" character varying(50),
"lengthsSeg3" character varying(50),
"lengthsSeg4" character varying(50),
"lengthsSeg5" character varying(50),
"lengthsSeg6" character varying(50),
"rlLengthsSeg1" character varying(50),
"rlLengthsSeg2" character varying(50),
"rlLengthsSeg3" character varying(50),
"rlLengthsSeg4" character varying(50),
"rlLengthsSeg5" character varying(50),
"rlLengthsSeg6" character varying(50),
"rlPlaceSeg1" character varying(50),
"rlPlaceSeg2" character varying(50),
"rlPlaceSeg3" character varying(50),
"rlPlaceSeg4" character varying(50),
"rlPlaceSeg5" character varying(50),
"rlPlaceSeg6" character varying(50),
trainer character varying(200),
owner character varying(250)
);
ALTER TABLE main.horses OWNER TO karisch;
--
-- TOC entry 203 (class 1259 OID 16389)
-- Name: races; Type: TABLE; Schema: main; Owner: karisch
--
CREATE TABLE main.races (
track character varying(50) NOT NULL,
date character varying(200) NOT NULL,
race character varying(100) NOT NULL,
stakes character varying(100),
distance character varying(200),
surface character varying(150),
weather character varying(200),
conditions character varying(200),
"startTime" character varying(50),
"startNote" character varying(300),
segment1 character varying(50),
segment2 character varying(50),
segment3 character varying(50),
segment4 character varying(50),
segment5 character varying(50),
segments character varying(50),
"fracTime1" character varying(50),
"fracTime2" character varying(50),
"fracTime3" character varying(50),
"fracTime4" character varying(50),
"fracTime5" character varying(50),
"finalTime" character varying(50),
runup character varying(50),
"wpsPool" character varying(100),
"firstPlaceWin" character varying(50),
"firstPlacePlace" character varying(50),
"firstPlaceShow" character varying(50),
"secondPlacePlace" character varying(50),
"secondPlaceShow" character varying(50),
"thirdPlaceShow" character varying(50),
"exactaBuyin" character varying(100),
"exactaFinish" character varying(100),
"exactaPayout" character varying(100),
"exactaPool" character varying(100),
"trifectaBuyin" character varying(100),
"trifectaFinish" character varying(100),
"trifectaPayout" character varying(100),
"trifectaPool" character varying(100),
"superfectaBuyin" character varying(100),
"superfectaFinish" character varying(100),
"superfectaPayout" character varying(100),
"superfectaPool" character varying(100),
"quinellaBuyin" character varying(100),
"quinellaFinish" character varying(100),
"quinellaPayout" character varying(100),
"quinellaPool" character varying(100)
);
ALTER TABLE main.races OWNER TO karisch;
--
-- TOC entry 2804 (class 2606 OID 16404)
-- Name: horses horses_pkey; Type: CONSTRAINT; Schema: main; Owner: karisch
--
ALTER TABLE ONLY main.horses
ADD CONSTRAINT horses_pkey PRIMARY KEY (track, date, race, "horseProgram");
--
-- TOC entry 2802 (class 2606 OID 16396)
-- Name: races races_pkey; Type: CONSTRAINT; Schema: main; Owner: karisch
--
ALTER TABLE ONLY main.races
ADD CONSTRAINT races_pkey PRIMARY KEY (track, date, race);
--
-- TOC entry 2805 (class 2606 OID 16405)
-- Name: horses fk_horses_to_races; Type: FK CONSTRAINT; Schema: main; Owner: karisch
--
ALTER TABLE ONLY main.horses
ADD CONSTRAINT fk_horses_to_races FOREIGN KEY (track, date, race) REFERENCES main.races(track, date, race) NOT VALID;
-- Completed on 2021-06-22 01:08:28
--
-- PostgreSQL database dump complete
--
<file_sep>import pandas as pd
import numpy as np
import re
def racesClean(racesdf):
outdf = racesdf.loc[racesdf['fracTime1'] != 'ERROR'].reset_index(drop=True).copy()
outdf.loc[:, 'startTime'] = pd.to_datetime(outdf.loc[:, 'date'] + ' ' + outdf.loc[:, 'startTime'])
outdf.loc[:, 'date'] = pd.to_datetime(outdf['date'])
outdf.loc[:, 'race'] = pd.to_numeric(outdf['race'])
outdf.loc[:, 'wpsPool'] = outdf['wpsPool'].str.replace(',', '')
outdf = numClean(outdf, 'wpsPool')
outdf = numClean(outdf, 'firstPlaceWin')
outdf = numClean(outdf, 'firstPlacePlace')
outdf = numClean(outdf, 'firstPlaceShow')
outdf = numClean(outdf, 'secondPlacePlace')
outdf = numClean(outdf, 'secondPlaceShow')
outdf = numClean(outdf, 'thirdPlaceShow')
outdf = numClean(outdf, 'runup')
outdf.loc[:, 'exactaPool'] = outdf['exactaPool'].str.replace(',', '')
outdf.loc[:, 'exactaPool'] = outdf['exactaPool'].str.extract('(\d+)\.?')[0]
outdf = numClean(outdf, 'exactaPool')
outdf.loc[:, 'trifectaPool'] = outdf['trifectaPool'].str.replace(',', '')
outdf.loc[:, 'trifectaPool'] = outdf['trifectaPool'].str.extract('(\d+)\.?')[0]
outdf = numClean(outdf, 'trifectaPool')
outdf.loc[:, 'exactaPayout'] = outdf['exactaPayout'].str.replace(',', '')
outdf.loc[:, 'exactaPayout'] = outdf['exactaPayout'].str.extract('(\d+)\.?')[0]
outdf = numClean(outdf, 'exactaPayout')
outdf.loc[:, 'trifectaPayout'] = outdf['trifectaPayout'].str.replace(',', '')
outdf.loc[:, 'trifectaPayout'] = outdf['trifectaPayout'].str.extract('(\d+)\.?')[0]
outdf = numClean(outdf, 'trifectaPayout')
outdf.loc[:, 'superfectaPayout'] = outdf['superfectaPayout'].str.replace(',', '')
outdf.loc[:, 'superfectaPayout'] = outdf['superfectaPayout'].str.extract('(\d+)\.?')[0]
outdf = numClean(outdf, 'superfectaPayout')
outdf.loc[:, 'quinellaPayout'] = outdf['quinellaPayout'].str.replace(',', '')
outdf.loc[:, 'quinellaPayout'] = outdf['quinellaPayout'].str.extract('(\d+)\.?')[0]
outdf = numClean(outdf, 'quinellaPayout')
outdf.loc[:, 'distance'] = pd.Series([0] * outdf.shape[0])
aboutDistances = outdf['distanceStr'].str.extract('About (.*)').dropna()
outdf.loc[aboutDistances.index, 'distanceStr'] = aboutDistances[0]
for distanceKey, distanceValue in distanceDict.items():
outdf.loc[outdf['distanceStr'] == distanceKey, 'distance'] = distanceValue
columns = list(outdf.columns)
distanceStrInd = columns.index('distanceStr')
columns.insert(distanceStrInd + 1, columns.pop(-1))
outdf = outdf[columns]
for i in range(1, 7):
colName = 'finalTime' if i == 6 else 'fracTime' + str(i)
onlyPos = outdf.loc[~(outdf[colName].isnull()), colName]
convTime = onlyPos.str.extract(r'^(\d?):?(\d\d\.\d\d)')
convTime.loc[convTime[0].isnull(), 0] = '1'
convTime.loc[convTime[1].isnull(), 1] = '0'
convTime.loc[convTime[0] == '', 0] = '0'
convTime = convTime.astype({0: 'float', 1: 'float'})
convTime.loc[:, 0] = convTime[0] * 60
convTime = convTime.sum(1)
outdf.loc[outdf[colName].isnull(), colName] = '-1'
outdf.loc[convTime.index, colName] = convTime.astype('string')
outdf.loc[:, colName] = outdf[colName].astype('float')
outdf.loc[outdf[colName] == -1, colName] = np.nan
if i == 6:
break
outdf.loc[outdf['segment' + str(i)].isnull(), 'segment' + str(i)] = ''
for segKey, segVal in segmentDict.items():
outdf.loc[outdf['segment' + str(i)] == segKey, 'segment' + str(i)] = str(segVal)
outdf.sort_index(inplace=True)
return outdf
def horsesClean(horsesdf):
outdf = horsesdf.copy()
outdf.loc[:, 'date'] = pd.to_datetime(outdf['date'])
outdf.loc[:, 'race'] = pd.to_numeric(outdf['race'])
outdf = numClean(outdf, 'lastRaceDay')
outdf = numClean(outdf, 'lastRaceYear')
outdf = numClean(outdf, 'weight')
outdf = numClean(outdf, 'placePP')
outdf = numClean(outdf, 'odds')
tracks = pd.read_csv('./../excel/tracks.csv')
trackAbbreves = list(tracks['shortName'])
indices = outdf['lastRaceTrack'].copy()
for abbreve in trackAbbreves:
indices[indices == abbreve] = '-1'
indices = indices[indices != '-1']
outdf.loc[indices.index, 'lastRaceNum'] = ''
outdf.loc[indices.index, 'lastRacePlace'] = ''
outdf = numClean(outdf, 'lastRaceNum')
outdf = numClean(outdf, 'lastRacePlace')
for i in range(1, 7):
outdf.loc[outdf['placeSeg' + str(i)] == '---', 'placeSeg' + str(i)] = ''
outdf.loc[outdf['placeSeg' + str(i)] == '*', 'placeSeg' + str(i)] = ''
outdf = numClean(outdf, 'placeSeg' + str(i))
outdf.loc[outdf['lengthsSeg' + str(i)] == '---', 'lengthsSeg' + str(i)] = ''
outdf.loc[outdf['lengthsSeg' + str(i)] == '*', 'lengthsSeg' + str(i)] = ''
outdf = lengthsParse(outdf, 'lengthsSeg' + str(i))
outdf.loc[outdf['rlPlaceSeg' + str(i)] == '---', 'rlPlaceSeg' + str(i)] = ''
outdf.loc[outdf['rlPlaceSeg' + str(i)] == '*', 'rlPlaceSeg' + str(i)] = ''
outdf = numClean(outdf, 'rlPlaceSeg' + str(i))
outdf.loc[outdf['rlLengthsSeg' + str(i)] == '---', 'rlLengthsSeg' + str(i)] = ''
outdf.loc[outdf['rlLengthsSeg' + str(i)] == '*', 'rlLengthsSeg' + str(i)] = ''
outdf = lengthsParse(outdf, 'rlLengthsSeg' + str(i))
return outdf
def numClean(df, colName):
df.loc[df[colName].isnull(), colName] = '-1'
df.loc[df[colName] == 'ERROR', colName] = '-1'
df.loc[df[colName] == '', colName] = '-1'
df.loc[:, colName] = pd.to_numeric(df[colName])
df.loc[df[colName] == -1, colName] = np.nan
return df
def lengthsParse(df, colName):
df.loc[df[colName] == 'Nose', colName] = '0.1'
df.loc[df[colName] == 'Head', colName] = '0.2'
df.loc[df[colName] == 'Neck', colName] = '0.3'
df.loc[df[colName].isnull(), colName] = '-1'
df.loc[df[colName].str.contains('[A-Z]'), colName] = '0.1'
regNums = df.loc[df[colName].str.contains(r'\d?\d'), colName]
slashes = df.loc[df[colName].str.contains(r'/'), colName]
slashesSplit = slashes.str.extract('(\d?\d?)(\d)/(\d)')
slashesSplit.loc[slashesSplit[0] == '', 0] = '0'
slashesSplit = slashesSplit.astype({0: 'int', 1: 'int', 2: 'int'})
slashesSplit['fractions'] = slashesSplit[1] / slashesSplit[2]
combined = (slashesSplit[0] + slashesSplit['fractions']).astype('str')
df.loc[regNums.index, colName] = regNums
df.loc[combined.index, colName] = combined
df.loc[:, colName] = pd.to_numeric(df[colName])
df.loc[df[colName] == -1, colName] = np.nan
return df
distanceDict = {
'One Furlong': 1.0,
'Two Furlongs': 2.0,
'Two And One Half Furlongs': 2.5,
'Three Furlongs': 3.0,
'Three And One Half Furlongs': 3.5,
'Four Furlongs': 4.0,
'Four And One Half Furlongs': 4.5,
'Five Furlongs': 5.0,
'Five And One Fourth Furlongs': 5.25,
'Five And One Half Furlongs': 5.5,
'Six Furlongs': 6.0,
'Six And One Half Furlongs': 6.5,
'Seven Furlongs': 7.0,
'Seven And One Half Furlongs': 7.5,
'One Mile': 8.0,
'One Mile And Forty Yards': 8.181818,
'One Mile And Seventy Yards': 8.318181,
'One And One Sixteenth Miles': 8.5,
'One And One Eighth Miles': 9.0,
'One And Three Sixteenth Miles': 9.5,
'One And One Fourth Miles': 10.0,
'One And Five Sixteenth Miles': 10.5,
'One And Three Eighth Miles': 11.0,
'One And Seven Sixteenth Miles': 11.5,
'One And One Half Miles': 12.0,
'One And Nine Sixteenth Miles': 12.5,
'One And Five Eighth Miles': 13.0,
'One and Eleven Sixteenth Miles': 13.5,
'One And Three Fourth Miles': 14.0,
'One And Thirteen Sixteenth Miles': 14.5,
'One And Seven Eighth Miles': 15.0,
'One And Fifteen Sixteenth Miles': 15.5,
'Two Miles': 16.0,
'Two Miles And Seventy Yards': 16.318181,
'Two And One Sixteenth Miles': 16.5,
'Two And One Eighth Miles': 17.0,
'Two And Three Sixteenth Miles': 17.5,
'Two And One Fourth Miles': 18.0,
'Two And Five Sixteenth Miles': 18.5,
'Two And Three Eighth Miles': 19.0,
'Two And Seven Sixteenth Miles': 19.5,
'Two And One Half Miles': 20.0,
'Two And Nine Sixteenth Miles': 20.5,
'Two And Five Eighth Miles': 21.0,
'Two And Eleven Sixteenth Miles': 21.5,
'Two And Three Fourth Miles': 22.0,
'Two And Thirteen Sixteenth Miles': 22.5,
'Two And Seven Eighth Miles': 23.0,
'Two And Fifteen Sixteenth Miles': 23.5,
'Three Miles': 24.0,
'Three And One Eighth Miles': 25.0,
'Three And One Fourth Miles': 26.0,
'Two And Three Eighth Miles': 27.0,
'Three And One Half Miles': 28.0,
'Three And Five Eighth Miles': 29.0,
'Three And Three Fourth Miles': 30.0,
'Three And Seven Eighth Miles': 31.0,
'Four Miles': 32.0,
}
segmentDict = {
'3/16': 1.5,
'1/4': 2.0,
'3/8': 3.0,
'1/2': 4.0,
'5/8': 5.0,
'3/4': 6.0,
'7/8': 7.0,
'1m': 8.0,
'11/8': 9.0,
'11/4': 10.0,
'13/8': 11.0,
'11/2': 12.0,
'15/8': 13.0,
'13/4': 14.0,
'17/8': 15.0,
'2m': 16.0,
'21/4': 18.0,
'21/2': 20.0,
'23/4': 22.0,
'3m': 24.0,
'31/2': 28.0,
}
if __name__ == '__main__':
df = pd.read_csv('./../outputs/races.csv', dtype='string')
jack = racesClean(df)<file_sep>from bs4 import BeautifulSoup
import re
import os
import pandas as pd
def getLinks(html, patternStr):
with open(html) as file:
raw = file.read()
soup = BeautifulSoup(raw, features='lxml')
link = []
pattern = re.compile(patternStr)
for i in soup.find_all('a'):
for key in i.attrs.keys():
if key == 'href':
jack = i.get('href')
if pattern.match(jack) is not None:
link.append(jack)
return link
def generateRaceUrlsFromLinks(dayList):
urls = []
for raw in dayList:
dateIndex = re.search('[0-9][0-9]/[0-9][0-9]/[0-9][0-9][0-9][0-9]', raw).span()
trackIndex = list(re.search('tid=...', raw).span())
trackIndex[0] += 4
if raw[trackIndex[1] - 1] == "&":
trackIndex[1] -= 1
countryIndex = list(re.search('ctry=...', raw).span())
countryIndex[0] += 5
if raw[countryIndex[1] - 1] == " ":
countryIndex[1] -= 1
first = 'https://www.equibase.com/premium/eqbPDFChartPlus.cfm?RACE=A&BorP=P&TID='
second = '&CTRY='
third = '&DT='
fourth = '&DAY=D&STYLE=EQB'
urls.append(first + raw[trackIndex[0]:trackIndex[1]] + second + raw[countryIndex[0]:countryIndex[1]] + third + raw[dateIndex[0]:dateIndex[1]] + fourth)
return urls
def saveRaceUrlsFromFiles(filesAddress):
files = os.listdir(filesAddress)
urls = []
for file in files:
dayLinks = getLinks(filesAddress+file, 'eqbPDFChartPlusIndex.cfm\?tid=')
urls += generateRaceUrlsFromLinks(dayLinks)
with open('./../excel/raceUrls.csv', 'w') as file:
for item in urls:
file.write('%s\n' % item)<file_sep>import re
import pandas as pd
from .regexPatterns import *
def parseTimeInfo(timeLines):
timeDict = {}
runUp = '0'
for line in timeLines:
if re.search(fractionalTimesLinePattern, line) is not None:
fractionalTimes = parseFractionalTimes(line)
elif re.search(runupLinePattern, line) is not None:
runUp = parseRunUp(line)
timeDict['fracTime1'] = fractionalTimes[0]
timeDict['fracTime2'] = fractionalTimes[1]
timeDict['fracTime3'] = fractionalTimes[2]
timeDict['fracTime4'] = fractionalTimes[3]
timeDict['fracTime5'] = fractionalTimes[4]
timeDict['finalTime'] = fractionalTimes[5]
timeDict['runUp'] = runUp
return timeDict
def parseFractionalTimes(line):
fullSearch = re.search(fractionalTimesSearchPattern, line)
if fullSearch is None:
print('Match error in parseFractionalTimes on line: ' + line)
return ['ERROR'] * 7
out = []
for i in range(2,8):
if fullSearch.group(i) is None or fullSearch.group(i) == 'N/A':
out.append('')
else:
out.append(fullSearch.group(i))
return out
def parseRunUp(line):
fullSearch = re.search(runupSearchPattern, line)
runUp = fullSearch.group(1)
return runUp | 79decef9dd5e03810dd90447763d380be763d2aa | [
"SQL",
"Python"
] | 19 | SQL | Ven0r/horseData | 9bf31c818e041259d72353560c329372c4e754f1 | e736a1c60e50335c145557ad4f5d979200873dd3 |
refs/heads/master | <repo_name>legitparty/sensehatjoy<file_sep>/README.md
# sensehatjoy
Raspberry Pi SenseHat Joystick controller
<file_sep>/sensehatjoy.py
#!/usr/bin/env python
class HSBColor:
def __init__(self, h = 0.0, s = 0.0, b = 0.0):
self.h = h
self.s = s
self.b = b
def rgb(self):
normal_h = (float(self.h) * 3.0) % 3.0
hr = 1.0 - (min(1.0, abs(normal_h )))
hg = 1.0 - (min(1.0, abs(normal_h - 1.0)))
hb = 1.0 - (min(1.0, abs(normal_h - 2.0)))
hr2 = 1.0 - (min(1.0, abs(normal_h - 3.0)))
hr += hr2
r = self.b - (self.s * self.b) * (1.0 - hr)
g = self.b - (self.s * self.b) * (1.0 - hg)
b = self.b - (self.s * self.b) * (1.0 - hb)
return int(r * 255), int(g * 255), int(b * 255)
def copy(self):
return HSBColor(self.h, self.s, self.b)
class HSBDisplay:
def __init__(self, width = 8, height = 8, image = None):
self.width = width
self.height = height
if image is None:
self.image = [[HSBColor() for x in range(self.width)] for y in range(self.height)]
else:
self.image = image
def rgb(self):
return [
[x.rgb() for x in y]
for y
in self.image
]
def get_pixel(self, x, y):
return self.image[y][x]
def set_pixel(self, x, y, hsb):
pixel = self.get_pixel(x, y)
pixel.h = hsb.h
pixel.s = hsb.s
pixel.b = hsb.b
def draw(self, hat):
pixels = []
for row in self.rgb():
pixels.extend(row)
hat.set_pixels(pixels)
def copy(self):
image = [[hsb.copy() for hsb in range(self.width)] for y in range(self.height)]
return HSBDisplay(self.width, self.height, image)
class HatPen:
def __init__(self):
from sense_hat import SenseHat
self.hat = SenseHat()
self.hat.clear()
self.cursor_pos = (0, 0)
self.cursor_color = HSBColor()
self.display = HSBDisplay()
def apply(self):
x, y = self.cursor_pos
self.display.set_pixel(x, y, self.cursor_color)
def draw(self):
self.display.draw(self.hat)
def draw_cursor(self):
x, y = self.cursor_pos
self.hat.set_pixel(x, y, self.cursor_color.rgb())
class HatJoy(HatPen):
pass
def main():
from sys import stdout
from sense_hat.stick import SenseStick
j = SenseStick()
hsb = [
1.0,
1.0,
1.0,
]
hsb_labels = ["hue", "saturation", "brightness"]
c_cursor = 0
c_color = 1
c_drawing = 2
c_labels = ["cursor", "color", "drawing"]
c = 0
i = 0
x = 0
y = 0
o = HatPen()
o.cursor_color = HSBColor(hsb[0], hsb[1], hsb[2])
o.cursor_pos = (x, y)
while True:
stdout.write("\r")
stdout.flush()
print "-" * 40
print "In %s command mode." % c_labels[c%3]
if c_labels[c%3] == "color":
print "HSBColor(h=%.2f, s=%.2f, b=%.2f)." % (hsb[0], hsb[1], hsb[2])
print "%s to %.2f" % (hsb_labels[i%3], hsb[i%3])
print "Cursor at pos: %i, %i" % (x, y)
o.draw()
if c_labels[c%3] in ["cursor", "color"]:
o.draw_cursor()
e = j.wait_for_event()
if e.action == "pressed":# or e.action == "held":
if e.direction == "middle":
c += 1
continue
elif e.direction == "up":
if c_labels[c%3] == "color":
hsb[i%3] = max(0.0, min(1.0, hsb[i%3] + 0.01))
o.cursor_color = HSBColor(hsb[0], hsb[1], hsb[2])
else:
y = max(0, min(7, y - 1))
o.cursor_pos = (x, y)
elif e.direction == "down":
if c_labels[c%3] == "color":
hsb[i%3] = max(0.0, min(1.0, hsb[i%3] - 0.01))
o.cursor_color = HSBColor(hsb[0], hsb[1], hsb[2])
else:
y = max(0, min(7, y + 1))
o.cursor_pos = (x, y)
elif e.direction == "left":
if c_labels[c%3] == "color":
i -= 1
o.cursor_color = HSBColor(hsb[0], hsb[1], hsb[2])
else:
x = max(0, min(7, x - 1))
o.cursor_pos = (x, y)
elif e.direction == "right":
if c_labels[c%3] == "color":
i += 1
o.cursor_color = HSBColor(hsb[0], hsb[1], hsb[2])
else:
x = max(0, min(7, x + 1))
o.cursor_pos = (x, y)
if c_labels[c%3] == "drawing":
o.apply()
if __name__ == '__main__':
main()
| 05b22646aa486be5d2e8cadb0972e610ffea7894 | [
"Markdown",
"Python"
] | 2 | Markdown | legitparty/sensehatjoy | 0f921b38f829ec112eb8d3d2e0b8f43dfff75aae | 595c2ce3e69a2a6c8133197f596ae22fcddf8744 |
refs/heads/master | <repo_name>KevinAngeles/Week11-Hangman<file_sep>/letter.js
var allowedCharacters = /^[a-zA-Z0-9]+$/;
function Letter(l)
{
this.l = l;
this.visible = ( !allowedCharacters.test(this.l) );
}
/**
* Returns the stored character if it is visible, or _ if not
*/
Letter.prototype.render = function() {
return ( this.visible ) ? this.l : "_";
}
module.exports.Letter = Letter;
module.exports.allowedCharacters = allowedCharacters;<file_sep>/main.js
var Word = require('./word.js');
var gameWord = require('./game.js');
var inquirer = require('inquirer');
var w = new Word(gameWord.selectedWord);
var gameFinished = false;
var opportunities = 15;
console.log("=================================================");
console.log(w.render());
play();
function play()
{
inquirer.prompt([
{
type: "input",
message: "Guess a character",
name: "character"
}
]).then(function (user) {
//detect which key the user pressed
var userKey = user.character;
if(w.isAllowed(userKey))
{
if(!w.guess(userKey))
{
opportunities = opportunities-1;
console.log("Wrong guess!");
}
console.log(w.render());
console.log("=================================================");
if(opportunities <= 0)
{
gameFinished = true;
console.log("You Lost");
console.log("The word was: "+w.selectedWord)
console.log("=================================================");
}
else if(w.complete())
{
gameFinished = true;
console.log("Congratulations, You Won");
console.log("=================================================");
}
//If the user has more opportunities
if(!gameFinished)
//continue playing and call to inquirer again
play();
}
else
{
console.log("Character Not Allowed");
console.log("=================================================");
play();
}
});
}<file_sep>/game.js
var words = ["Mighty Long Fall","Bring me to Life","Low","Talk Dirty"];
//This will get a random song from the array of songs and set it to selectedWord
exports.selectedWord = words[Math.floor(Math.random()*words.length)]; | 1c8a7bf5c6a65e4c4f6236cb1b921232d4cae38b | [
"JavaScript"
] | 3 | JavaScript | KevinAngeles/Week11-Hangman | ea125a126dcc87bbc2ec0606a9795d1a002bdc67 | c5856d8eb1381371623090ce11065d4262105c9e |
refs/heads/master | <repo_name>orithena/clip2qr<file_sep>/README.md
clip2qr
=======
Grabs the contents of the X clipboard, generates a QR code image and displays it for
immediate use. Main usecase: get a URL or some text snippet to your smartphone.
You need to have an app installed on your smartphone that can read QR Codes (e.g. QRDroid).
There are no options. Save the clip2qr.sh file somewhere, make it executable and somehow
register it in your Desktop Environment.
* You might register it to run on a keyboard shortcut, e.g. Ctrl-B.
* You might assign a starter icon on your panel to this script.
* Or you might find a completely different method to invoke this script.
Dependencies
------------
On Ubuntu, the following packages need to be installed: python, python-tk, qrencode, xclip.
$ sudo apt-get install python python-tk python-imaging-tk qrencode xclip
Install
-------
Start a terminal and type the following commands:
$ wget https://raw.github.com/orithena/clip2qr/master/clip2qr.sh -O /tmp/clip2qr.sh
$ sudo cp /tmp/clip2qr.sh /usr/local/bin
$ sudo chmod +x /usr/local/bin/clip2qr.sh
Then decide on a method to run this script and register it in your Desktop Environment.
For example, locate the "Keyboard Shortcut" preferences dialog and assign a global shortcut
to `/usr/local/bin/clip2qr.sh`. Or add a starter to your desktop panel (maybe by right-clicking
on it?). You could generate an icon for the starter like this:
$ sudo qrencode -s 1 -m 1 -o /usr/share/icons/qrcode.png "Clipboard to QR Code"
Increase the value after -s if you need a bigger icon.
<file_sep>/clip2qr.sh
#!/bin/sh
#
# clip2qr.sh
# ==========
# Grabs the contents of the X clipboard, generates a QR code image and displays it for immediate use.
#
#
# Released under the terms of the MIT-License (see below).
# Copyright (c) 2013 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Set up a temporary directory
TMPDIR=$(mktemp -d)
# ensure removal of said tempdir on exit
trap 'rm -rf $TMPDIR; exit 1' 0 1 2 3 13 15
# First, try to convert data from clipboard and push it into tempdir
# check if primary clipboard contains convertible data
if xclip -o | qrencode -s 2 -m 2 -o - > $TMPDIR/qrcode.png
then
TXT=$(xclip -o)
# check if clipboard contains convertible data
elif xclip -o -selection clipboard | qrencode -s 2 -m 2 -o - > $TMPDIR/qrcode.png
then
TXT=$(xclip -o -selection clipboard)
# no convertible data found -> notify user (maybe you need to insert your notificator of choice here)
else
STXT=$( echo "$(xclip -o)" | head -n 1 | cut -c 1-50 )
notify-send -i unknown "Conversion Error" "Cannot provide a QR Code for the current clipboard contents:\
\
$STXT ..."
exit 0
fi
# Write the contents of the qr code as text to tempdir
echo "$TXT" > $TMPDIR/content.txt
# Start python interpreter to display the QR code
python - <<PYEND
import Tkinter
from PIL import Image
from PIL import ImageTk
# Pull up a TK window, make it fullscreen and focusable
tk = Tkinter.Tk()
tk.geometry("%dx%d+0+0" % (tk.winfo_screenwidth(), tk.winfo_screenheight()))
tk.wm_focusmodel(Tkinter.ACTIVE)
# Quit on anything
def handler(event):
tk.quit()
tk.destroy()
tk.bind("<Key>", handler)
tk.bind("<KeyPress>", handler)
tk.bind("<Button>", handler)
tk.protocol("WM_DELETE_WINDOW", tk.destroy)
# These variables will be needed after the try block
txt = ""
tkim = None
try:
# Read QR code image, convert to byte stream
img = Image.open("$TMPDIR/qrcode.png").convert()
# Double the size of the image until it fills at least half the screen (minus a margin)
while (img.size[1] < tk.winfo_screenheight() * 0.4) and (img.size[0] < tk.winfo_screenwidth() * 0.45):
img = img.resize(([x*2 for x in img.size]), Image.NONE)
# Wrap image in Tk-Container
tkim = ImageTk.PhotoImage(img)
# Read text to display from file
txt = file("$TMPDIR/content.txt").read()
except Exception as e:
txt = "Error while retrieving data to display: " + str(e)
# Headline label widget
lh = Tkinter.Label(tk, text="QR Code from Clipboard", font=("sans-serif", 12), background="#000",
foreground="#FFF", justify=Tkinter.CENTER).pack(fill=Tkinter.BOTH, expand=1)
# QR code image label widget
li = Tkinter.Label(tk, image=tkim, background="#000", foreground="#FFF",
justify=Tkinter.CENTER).pack(fill=Tkinter.BOTH, expand=1)
# QR code contents text label widget
lt = Tkinter.Label(tk, text=txt, font=("sans-serif", 9), background="#000", foreground="#FFF", justify=Tkinter.LEFT,
wraplength=tk.winfo_screenwidth()*0.9).pack(fill=Tkinter.BOTH, expand=1)
# Lift window, grab focus
tk.overrideredirect(True)
tk.lift()
tk.focus_force()
tk.grab_set()
tk.grab_set_global()
# run event loop until destruction via handlers above
tk.mainloop()
PYEND
# Clean up
rm -rf $TMPDIR
trap 0 1 2 3 13 15
| bf6304f03eaa790ab4879e99ceef894ec00c21c4 | [
"Markdown",
"Shell"
] | 2 | Markdown | orithena/clip2qr | ef8da8074d4bded035592e4bfec5570a8bde1ae4 | d2a903eefd8ff47b3a6cd212b623e04299faa4fc |
refs/heads/main | <file_sep>import React, { Component } from 'react';
class BirthdayButton extends React.Component {
constructor(props) {
super(props);
this.state = {
position: this.props.age
};
}
render() {
return (
<fieldset>
<button onClick={ this.switch }>Birthday Button for { this.props.firstName } { this.props.lastName } </button>
</fieldset>
);
}
switch = () => {
var age = this.state.position
age++;
this.state = {
position: age
};
}
}
export default BirthdayButton<file_sep>import React from 'react';
import {
Nav,
NavLink,
Bars,
NavMenu,
NavBtn,
NavBtnLink
} from './NavbarElements';
import Logo from '../../components/Header/LOGO.png';
const Header = () => {
return (
<>
<Nav>
<NavLink to='/'>
<img src={Logo} width='55em' background-color='white' alt='' />
</NavLink>
<Bars />
<NavMenu>
<NavLink to='/about-me' activeStyle>About</NavLink>
<NavLink to='/projects' activeStyle> Projects </NavLink>
<NavLink to='/skills' activeStyle>Skills </NavLink>
<NavLink to='/resume' activeStyle>Resume/ CV</NavLink>
</NavMenu>
<NavBtn>
<NavBtnLink to='/contact-me'>Contact Me</NavBtnLink>
</NavBtn>
</Nav>
</>
);
};
export default Header;
<file_sep>const express = require('express');
const app = express();
require('./server/routes/person.routes')(app); // This is new
app.listen(8000, () => {
console.log("Listening at Port 8000")
})<file_sep>
// Create a Ninja class
// add an attribute: name
// add an attribute: health
// add a attribute: speed - give a default value of 3
// add a attribute: strength - give a default value of 3
// add a method: sayName() - This should log that Ninja's name to the console
// add a method: showStats() - This should show the Ninja's name, strength, speed, and health.
// add a method: drinkSake() - This should add +10 Health to the Ninja
//
class Ninja {
constructor(name, health, speed=3, strength=3) {
this.name = name;
this.health = health;
this.speed = speed;
this.strength = strength;
}
sayName() {
console.log(this.name)
}
showStats() {
console.log(`This is the name ${this.name}, This is the strength ${this.strength}, This is the speed ${this.speed}, This is the health ${this.health}`)
}
drinkSake() {
this.health += 10;
return "What one programmer can do in one month, two programmers can do in two months."
}
}
class Sensei extends Ninja {
constructor(name, wisdom=10) {
super(name, 200, 10);
this.wisdom = wisdom
}
speakWisdom() {
const speakWisdom = super.drinkSake();
console.log(speakWisdom);
}
showStats() {
console.log(`This is the name ${this.name}, This is the strength ${this.strength}, This is the speed ${this.speed}, This is the health ${this.health}, This is the wisdom ${this.wisdom}`);
}
}
const ninja1 = new Ninja("Hyabusa", 15);
ninja1.sayName();
ninja1.showStats();
console.log(ninja1.drinkSake());
ninja1.showStats();
const superSensei = new Sensei("Master Splinter", 15);
superSensei.speakWisdom();
// -> "What one programmer can do in one month, two programmers can do in two months."
superSensei.showStats();
// Extend the Ninja class and create the Sensei class. A Sensei should have 200 Health, 10 speed, and 10 strength by default. In addition, a Sensei should have a new attribute called wisdom, and the default should be 10. Finally, add the speakWisdom() method. speakWisdom() should call the drinkSake() method from the Ninja class, before console.logging a wise message.
// example output
// const superSensei = new Sensei("Master Splinter");
// superSensei.speakWisdom();
// -> "What one programmer can do in one month, two programmers can do in two months."
// superSensei.showStats();
// -> "Name: <NAME>, Health: 210, Speed: 10, Strength: 10"
// create a class Sensei that inherits from the Ninja class
// add an attribute: wisdom - default to 10
// create a method: speakWisdom()
// create a method: drinkSake()<file_sep>import React, { Component } from 'react';
import BirthdayButton from './BirthdayButton';
import '../App.css';
class PersonCard extends React.Component{
constructor(props) {
super(props);
this.state = {
position: this.props.age
};
}
switch = () => {
let age = this.state.position
age++;
this.setState ( {
position: age
});
}
render () {
const { firstName, lastName, age, hairColor} = this.props;
return (
<div>
<h1>{ lastName }, { firstName } </h1>
<h4>Age: {this.state.position}</h4>
<h4>Hair Color: {hairColor}</h4>
<button className="BirthdayButton" onClick={ this.switch }>Birthday Button for { this.props.firstName } { this.props.lastName } </button>
</div>
);
}
}
export default PersonCard
<file_sep>import React, { useEffect, useState } from 'react';
import axios from 'axios';
import './style.css';
import ProfilePicture from '../../frontend/src/images/profilePicture.JPG';
const Main = () => {
const [message, setMessage] = useState('Message received!');
useEffect(() => {
axios.get('http://localhost:8000/api')
.then(response => {
setMessage(response.data.message);
});
}, []);
return (
<>
<div class="container" >
<img class="profilePicture" src={ProfilePicture} alt='' />
<div class="intro">
<h2 class='introtext' >Full-Stack Web Developer</h2>
<h3 class='introtext'> Self Motivated, U.S. Marine Veteran with over 8 years in technical experience. </h3>
<h3 class='introtext'>On a journey to become a professional Software Engineer. </h3>
</div>
<h2 class="glow"><NAME></h2>
<h3 class="glow"> Los Angeles, California</h3>
</div>
<div class="about-me">
hello
</div>
</>
);
};
export default Main;
<file_sep>import './App.css';
import Header from './components/Header/Header';
import { BrowserRouter as Router, Switch, Route } from 'react-router-dom';
import HomePage from './views/HomePage';
import Main from './Main';
import Footer from './components/Footer/Footer';
function App() {
return (
<Router>
<Header />
<Main />
<Switch>
<Route path="/" exact component={HomePage} />
{/* <Route path="/about" exact component={AboutPage} /> */}
</Switch>
<Footer />
</Router>
);
}
export default App;
| 1453b1a55842e56254207b626d6953ab71427f59 | [
"JavaScript"
] | 7 | JavaScript | jag18729/MERNSTACK | 9ce7b70192f0c3802f0533acf0da3aba00a83c5e | 6e04d97a76e05396c634cb6baad05577b753d7af |
refs/heads/master | <repo_name>nickstew/AtTaskForJenkins<file_sep>/src/main/java/com/attask/jenkins/plugin/Visualization/TestResultPublisher.java
package com.attask.jenkins.plugin.Visualization;
import com.attask.jenkins.plugin.AtTaskForJenkins;
import hudson.Extension;
import hudson.Launcher;
import hudson.model.*;
import hudson.tasks.junit.*;
import org.kohsuke.stapler.DataBoundConstructor;
import java.io.IOException;
/**
* User: nicholasstewart
* Date: 2/22/12
* Time: 3:59 PM
*/
public class TestResultPublisher extends TestDataPublisher {
@DataBoundConstructor
public TestResultPublisher() {
super();
}
/**
* Called after test results are collected by Hudson, to create a resolver for {@link hudson.tasks.junit.TestAction}s.
*
* @return can be null to indicate that there's nothing to contribute for this test result.
*/
@Override
public hudson.tasks.junit.TestResultAction.Data getTestData(final AbstractBuild<?, ?> build,
Launcher launcher,
BuildListener listener,
TestResult testResult) throws IOException, InterruptedException {
return new TestActionListBuilder();
}
@Extension
public static class DescriptorImplementator extends Descriptor<TestDataPublisher> {
@Override
public String getDisplayName() {
return "Include AtTask test result information";
}
}
}
<file_sep>/pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.jenkins-ci.plugins</groupId>
<artifactId>plugin</artifactId>
<version>1.413</version>
</parent>
<artifactId>AtTaskForJenkins</artifactId>
<version>0.1.1</version>
<packaging>hpi</packaging>
<name>AtTask For Jenkins</name>
<description>AtTask integration for Jenkins</description>
<url>http://wiki.jenkins-ci.org/display/JENKINS/AtTaskForJenkins</url>
<dependencies>
<dependency>
<groupId>com.attask</groupId>
<artifactId>api-sdk-internal</artifactId>
<version>1.0</version>
</dependency>
</dependencies>
<developers>
<developer>
<id>nickstew</id>
<name><NAME></name>
<email><EMAIL></email>
</developer>
</developers>
<scm>
<connection>scm:git:ssh://github.com/jenkinsci/AtTaskForJenkins.git</connection>
<developerConnection>scm:git:ssh://git@github.com/jenkinsci/AtTaskForJenkins.git</developerConnection>
<url>https://github.com/jenkinsci/AtTaskForJenkins</url>
</scm>
<repositories>
<repository>
<id>maven.jenkins-ci.org</id>
<url>http://maven.jenkins-ci.org/content/repositories/releases</url>
</repository>
<repository>
<id>localhost</id>
<url>http://maven.ops.ut.us.attask.com:8081/artifactory/ext-release-local</url>
</repository>
<repository>
<id>m.g.o-public</id>
<url>http://maven.glassfish.org/content/groups/public/</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>m.g.o-public</id>
<url>http://maven.glassfish.org/content/groups/public/</url>
</pluginRepository>
</pluginRepositories>
</project>
<file_sep>/src/main/java/com/attask/jenkins/plugin/Visualization/TestActionListBuilder.java
package com.attask.jenkins.plugin.Visualization;
import com.attask.jenkins.plugin.*;
import com.attask.sdk.model.OpTask;
import com.attask.sdk.model.Project;
import com.attask.sdk.model.User;
import hudson.model.AbstractBuild;
import hudson.tasks.junit.*;
import java.util.*;
import java.util.logging.Logger;
/**
* User: nicholasstewart
* Date: 2/29/12
* Time: 2:35 PM
*/
public class TestActionListBuilder extends hudson.tasks.junit.TestResultAction.Data {
private static List<User> _assignees;
public static final Logger LOG = Logger.getLogger(TestResultPublisher.class.getName());
private RemoteConnector getConnector(AtTaskForJenkins.DescriptorImpl descriptor) throws Exception {
return DependencyFactory.getConnector(descriptor.getUrl(), descriptor.getUsername(), descriptor.getPassword());
}
/**
* Returns all TestActions for the testObject.
*
* @return Can be empty but never null. The caller must assume that the returned list is read-only.
*/
@Override
@SuppressWarnings("deprecation")
public List<? extends TestAction> getTestAction(TestObject testObject) {
List<TestAction> actions = new ArrayList<TestAction>();
// We are not interested in TestResult or PackageResult right now, only CaseResult.
if (!(testObject instanceof CaseResult))
return actions;
RemoteConnector connector = null;
ConversionUtil conversionUtil = new ConversionUtil();
try {
AbstractBuild<?, ?> build = testObject.getOwner();
AtTaskForJenkins.DescriptorImpl descriptor = (AtTaskForJenkins.DescriptorImpl) build.getDescriptorByName("AtTaskForJenkins");
connector = getConnector(descriptor);
String testClassName = testObject instanceof CaseResult ? ((CaseResult) testObject).getClassName() : "";
String issueName = conversionUtil.createTestName(testClassName, testObject.getDisplayName());
List<User> assignees = getAssignees(connector, descriptor.getGroupName());
List<User> committers = getCommitters(connector, build.getCulprits());
//AbstractBuild<?, ?> firstFailedBuild = build.getProject().getBuildByNumber(((CaseResult) testObject).getFailedSince());
Project project = connector.getProject(conversionUtil.createProjectName(build, descriptor));
OpTask issue = connector.getIssue(issueName, project);
if(issue != null) {
String attaskIssueUrl = descriptor.getUrl();
if(!attaskIssueUrl.endsWith("/"))
attaskIssueUrl += "/";
attaskIssueUrl += "issue/view?ID="+issue.getID();
actions.add(new AtTaskLinkAction(attaskIssueUrl, issueName));
if(issue.getAssignedToID() != null) {
User user = connector.getUserByID(issue.getAssignedToID());
actions.add(new BlameAction(issue, user, assignees, committers, descriptor, testObject.getId()));
}else {
actions.add(new BlameAction(issue, assignees, committers, descriptor, testObject.getId()));
}
String status = issue.getStatus();
if (!status.equals(connector.getResolvedStatus()) && issue.getAssignments() != null && issue.getAssignments().size() > 0) {
status = issue.getAssignments().get(0).getStatus().getValue(); // Should be get label, bug sdk bug.
}
//TODO: this should work but there's a "java.lang.ClassNotFoundException: org.apache.struts.util.MessageResourcesFactory" in the sdk
actions.add(new StatusAction(status));
} else if (project != null) {
actions.add(new BlameAction(issueName, project, assignees, committers, descriptor, testObject.getId()));
actions.add(new StatusAction("Untracked"));
} else {
actions.add(new EmptyTestResultAction("testFailureAssignment unattached",""));
actions.add(new StatusAction("Untracked"));
}
} catch (Exception e) {
LOG.info("com.attask.jenkins.plugin.Visualization.TestActionListBuilder has caught an exception.\n" +
"It will now return a list of empty Actions to properly display testReport table.");
actions.add(new EmptyTestResultAction("testFailureAssignment unattached",""));
actions.add(new StatusAction("Untracked"));
}
return actions;
}
private List<User> getAssignees(RemoteConnector connector, String groupName){
if(_assignees==null){
List<User> assignees = new ArrayList<User>();
assignees.addAll(connector.getUsers(groupName));
Collections.sort(assignees, new Comparator<User>() {
public int compare(User user, User user1) {
return user.getName().compareTo(user1.getName());
}
});
_assignees = assignees;
}
return _assignees;
}
private List<User> getCommitters(RemoteConnector connector, Set<hudson.model.User> culprits){
List<User> committers = new ArrayList<User>();
for(hudson.model.User culprit : culprits) {
User committer = connector.getUser(culprit.getFullName(), false);
if(committer != null) {
committers.add(committer);
}
}
return committers;
}
}
<file_sep>/src/main/java/com/attask/jenkins/plugin/ConversionUtil.java
package com.attask.jenkins.plugin;
import hudson.EnvVars;
import hudson.model.AbstractBuild;
import hudson.model.TaskListener;
public class ConversionUtil {
public JobConditionEnum convertBuildResult(String buildResult){
//Converts project status to condition for AtTask
JobConditionEnum condition = JobConditionEnum.parse(buildResult);
return condition != null ? condition : JobConditionEnum.Stable;
}
public String createBuildURL(String hostURL, String buildURL){
StringBuilder builder = new StringBuilder();
builder.append(hostURL).append(buildURL);
return builder.toString().replaceAll(" ", "%20");
}
public String createTestName(String className, String methodName){
StringBuilder sb = new StringBuilder();
String [] nameParts = className.split("\\.");
String newName = nameParts[nameParts.length-1];
sb.append(newName);
sb.append("#");
sb.append(methodName);
sb.append("()");
return sb.toString();
}
public String getMessage(String itemName, int buildNumber, String buildLink, JobConditionEnum condition, String changeSet) {
StringBuilder builder = new StringBuilder();
builder.append(itemName);
builder.append(condition.getMessage());
builder.append(" #").append(buildNumber).append(".").append("\n").append(buildLink);
if (changeSet != null && changeSet.length() > 0)
builder.append("\n").append(changeSet);
return builder.toString();
}
public String createProjectName(AbstractBuild build, AtTaskForJenkins.DescriptorImpl descriptor) {
AtTaskForJenkins job = (AtTaskForJenkins) build.getProject().getPublishersList().get(descriptor);
if (job.getCustomProjectName() != null && job.getCustomProjectName().trim().length() > 0) {
String expandedProjectName = job.getCustomProjectName();
try {
EnvVars env = build.getCharacteristicEnvVars();
for (Object o : build.getBuildVariables().keySet()) {
env.put(o.toString(), (String) build.getBuildVariables().get(o));
}
expandedProjectName = env.expand(expandedProjectName);
} catch (Exception e) {
// do nothing;
}
return expandedProjectName;
}
return "Jenkins "+build.getProject().getName()+" results";
}
}
<file_sep>/src/main/java/com/attask/jenkins/plugin/Visualization/BlameAction.java
package com.attask.jenkins.plugin.Visualization;
import com.attask.jenkins.plugin.AtTaskForJenkins;
import com.attask.jenkins.plugin.DependencyFactory;
import com.attask.jenkins.plugin.RemoteConnector;
import com.attask.sdk.model.OpTask;
import com.attask.sdk.model.User;
import hudson.model.Hudson;
import hudson.tasks.junit.TestAction;
import org.apache.commons.io.output.NullOutputStream;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
/**
* User: nicholasstewart
* Date: 2/21/12
* Time: 3:58 PM
*/
@SuppressWarnings("deprecation")
public class BlameAction extends TestAction {
private OpTask testObject = null;
private User assignedTo;
private List<User> committers;
private List<User> groupMembers;
private String testID;
private String testName;
private com.attask.sdk.model.Project project;
private AtTaskForJenkins.DescriptorImpl descriptor;
@SuppressWarnings(value = "@Deprecated")
public BlameAction(OpTask testObject, User assignedTo, List<User> groupMembers, List<User> committers, AtTaskForJenkins.DescriptorImpl descriptor, String testID) {
this.testObject = testObject;
this.testName = testObject.getName();
this.assignedTo = assignedTo;
this.groupMembers = groupMembers;
this.committers = committers;
this.descriptor = descriptor;
this.testID = testID;
if(committers.size() == 0)
this.committers = new ArrayList<User>();
}
public BlameAction(OpTask testObject, List<User> groupMembers, List<User> committers, AtTaskForJenkins.DescriptorImpl descriptor, String testID) {
this.testObject = testObject;
this.testName = testObject.getName();
this.groupMembers = groupMembers;
this.committers = committers;
this.descriptor = descriptor;
this.testID = testID;
if(committers.size() == 0)
this.committers = new ArrayList<User>();
}
public BlameAction(String testName, com.attask.sdk.model.Project project, List<User> groupMembers, List<User> committers, AtTaskForJenkins.DescriptorImpl descriptor, String testID) {
this.testName = testName;
this.project = project;
this.groupMembers = groupMembers;
this.committers = committers;
this.descriptor = descriptor;
this.testID = testID;
if(committers.size() == 0)
this.committers = new ArrayList<User>();
}
public void doBlame(StaplerRequest request, StaplerResponse response) throws Exception {
String attaskId = request.getParameter("blameId");
RemoteConnector connector = DependencyFactory.getConnector(descriptor.getUrl(), descriptor.getUsername(), descriptor.getPassword());
try {
// Try logging in as the user making the request
User requestor = connector.getUser(Hudson.getAuthentication().getName(), false);
if (requestor != null)
connector = DependencyFactory.getConnector(descriptor.getUrl(), requestor.getUsername(), descriptor.getUsername(), descriptor.getPassword(), new PrintStream(new NullOutputStream()));
} catch (Exception e) {
};
try {
if (testObject == null && project != null) {
testObject = connector.getIssue(testName, project);
if (testObject == null) {
testObject = connector.addIssue(testName, project);
}
}
if (attaskId == null || attaskId.length() == 0) {
connector.unassignIssue(testObject);
} else {
User assignee = connector.getUserByID(attaskId); // TODO: check with Jesse about grabbing from TestActionListBuilder._cachedUsers
if (assignee != null)
connector.assignIssue(testObject, assignee);
}
} catch (Exception e) {}
response.forwardToPreviousPage(request);
}
public String getIconFileName() {
return null;
}
public String getDisplayName() {
return null;
}
public String getUrlName() {
return "blame";
}
public List<User> getCommitters() {
return committers;
}
public List<User> getGroupMembers() {
return groupMembers;
}
public String getAssignedToName() {
return getAssignedTo() != null ? getAssignedTo().getName() : "Unassigned";
}
public String getBlameUrl() {
StringBuilder builder = new StringBuilder();
if (testID != null && testID.length() > 0) {
builder.append(testID);
} else {
// fall back if there's no replacement
builder.append("(root)/");
String[] parts = testName.replace("()","").split("#");
builder.append(parts[0]);
builder.append("/");
builder.append(parts[1]);
}
builder.append("/blame/blame");
return builder.toString();
}
public User getAssignedTo() {
return assignedTo;
}
public OpTask getTestObject() {
return testObject;
}
}
<file_sep>/src/main/webapp/TableTransformer.js
$(document).ready(function (){
var headerRow;
$('select.blameSelect').change(function(e){
assign(makeUrl(e.target));
});
if(/history/i.test(document.URL) != true || $('td.testStatus').first().parent().index() != -1) {
if($('td.testStatus').first().parent().index() == 1) {
headerRow = $('td.testStatus').first().parent().prev();
} else if ($('td.testStatus').first().parent().index() == 0) {
headerRow = $('td.testStatus').first().parent().parent().prev().children().first();
}
var assignedToIndex = $('td.testFailureAssignment').first().index(); // Index for assigned to col
var statusIndex = $('td.testStatus').first().index(); // Index for status col
var assignedToHeader = $("<td>").addClass("pane-header").text("Assigned To");
var statusHeader = $("<td>").addClass("pane-header").text("Status");
if(assignedToIndex > 0) {
headerRow.children().eq(assignedToIndex-1).after(assignedToHeader);
}
if(assignedToIndex == -1){}
if(assignedToIndex == 0 || assignedToIndex < -1) {
headerRow.children().eq(assignedToIndex).before(assignedToHeader);
}
if(statusIndex > 0) {
headerRow.children().eq(statusIndex-1).after(statusHeader);
}
if(statusIndex == -1){}
if(statusIndex == 0 || statusIndex < -1) {
headerRow.children().eq(statusIndex).before(statusHeader);
}
var tableRows = $("td.testStatus").first().parent().parent().children();
var tableRow;
var status;
var firstContentRow = $('td.pane').first().parent().index(); // First row in the table with content. Not Headers.
for (var index = firstContentRow; index < tableRows.size(); index++) {
tableRow = tableRows.eq(index);
status = tableRow.find('td.testStatus').text();
if (status == 'Accepted') {
tableRow.css('color', '#00AA00')
} else if (status == 'Resolved' || status == 'Done') {
tableRow.css('color', '#999999')
} else if (status != 'Untracked') {
tableRow.css('color','#0000FF');
}
}
}
});
function makeUrl(target){
return document.URL.substring(0,document.URL.indexOf('testReport')+11)+$(target).parent().find('input:hidden#blameUrl').attr('value');
}
function assign(url) {
$.get(url, { blameId: $(this).val() } );
}<file_sep>/src/main/java/com/attask/jenkins/plugin/RemoteConnector.java
package com.attask.jenkins.plugin;
import com.attask.sdk.enums.Condition;
import com.attask.sdk.model.*;
import java.util.Collection;
import java.util.List;
public interface RemoteConnector {
Group getGroup(String groupName, String username);
OpTask assignIssue(OpTask issue, User assignee);
OpTask unassignIssue(OpTask issue);
OpTask updateIssue(OpTask issue, String status, Condition atTaskIssueCondition, String assignedToID, String stackTrace, String buildLink);
OpTask addIssue(String issueName, Project project);
OpTask getIssue(String issueName, Project project);
List<OpTask> getIssues(String projectID);
boolean isIssueAcknowledged(OpTask issue);
Project updateProject(Project currentProject, JobConditionEnum projectCondition, String buildLink);
Project addProject(String projectName, Group group);
Project getProject(String projectName);
Note addNote(String message, String objID, String objCode, List<User> notifiees);
User getUserByID(String userID);
User getUser(String username, boolean strict);
List<User> getUsers(String groupName);
String getResolvedStatus();
void loginAsUser(String username);
}
<file_sep>/src/main/java/com/attask/jenkins/plugin/DependencyFactory.java
package com.attask.jenkins.plugin;
import org.apache.commons.io.output.NullOutputStream;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
public class DependencyFactory {
private static Map<String, RemoteConnector> connectorCache = new HashMap<String, RemoteConnector>();
public static RemoteConnector getConnector(String url, String adminUsername, String adminPassword) throws Exception {
return getConnector(url, adminUsername, adminPassword, new PrintStream(new NullOutputStream()));
}
public static RemoteConnector getConnector(String url, String adminUsername, String adminPassword, PrintStream logger) throws Exception {
return getConnector(url, adminUsername, adminUsername, adminPassword, logger);
}
public static RemoteConnector getConnector(String url, String loginAsUsername, String adminUsername, String adminPassword, PrintStream logger) throws Exception {
if (!connectorCache.containsKey(loginAsUsername)) {
connectorCache.put(loginAsUsername, new AtTaskConnector(adminUsername, adminPassword, url, logger));
if (loginAsUsername != null && !loginAsUsername.equals(adminUsername))
connectorCache.get(loginAsUsername).loginAsUser(loginAsUsername);
}
return connectorCache.get(loginAsUsername);
}
}
<file_sep>/src/main/java/com/attask/jenkins/plugin/Visualization/AtTaskLinkAction.java
package com.attask.jenkins.plugin.Visualization;
import hudson.tasks.junit.TestAction;
/**
* Created by IntelliJ IDEA.
* User: jenniferrumbaugh
* Date: 2/27/12
* Time: 2:52 PM
* To change this template use File | Settings | File Templates.
*/
public class AtTaskLinkAction extends TestAction {
private String atTaskLink;
private String testName;
public AtTaskLinkAction(String atTaskLink, String testName){
this.atTaskLink = atTaskLink;
this.testName = testName;
}
public String getIconFileName() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public String getDisplayName() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public String getUrlName() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public String getLink() {
return atTaskLink;
}
public String getTestName() {
return testName;
}
}
| cc1438f7e1d811015d8008eec0a793c1d06ba87a | [
"JavaScript",
"Java",
"Maven POM"
] | 9 | Java | nickstew/AtTaskForJenkins | 3c85da9d43b4b65591bfac12e2893bc17c92727e | 97c6c3a81f6a68d54ae2a897706f5d3016ec05c7 |
refs/heads/master | <repo_name>prahladsingh44/angular6<file_sep>/src/app/app.component.ts
import { Component } from '@angular/core';
import {HelloComponent} from './hello/hello.component';
// function log(target, name, descriptor) {
// console.log(target, name, descriptor);
// const origional = descriptor.value;
// descriptor.value = function() {
// console.log('this function is hacked');
// };
// return descriptor;
// }
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
title = 'MyApp';
// constructor() {
// this.asimpleMethod();
// }
// @log
// asimpleMethod() {
// console.log('Hey There');
// }
}
| bbeed6518fa3de762559c4b7fa15e832811d9fe4 | [
"TypeScript"
] | 1 | TypeScript | prahladsingh44/angular6 | ebf3099cf04042fc10f7d75d4eab1081899f3092 | 6ad01006fedae65b83605bb45ca2b5bc07dd2522 |
refs/heads/master | <repo_name>zhangchengkain1/eadm<file_sep>/README.md
eadm
====
| cc1cfc07439591392a5f0a31b4bfeec9dc861c73 | [
"Markdown"
] | 1 | Markdown | zhangchengkain1/eadm | a6575c628907945763e6fdbc7fa2e4a2c19aa2bc | 7260d29213648f46ccf9b634449aee3efc518f95 |
refs/heads/master | <repo_name>bcoolidge28/Benton-and-Serenas-project<file_sep>/FINISHED PYTHON GAME.py
#project
#project opener
print 'It was long ago, precisely 45 A.D. King Certifacato sends a knight to go get you. This is where you come in. By the way what is your name?'
print ' '
userInput = raw_input()
print 'Knight: Have you seen a youg man named ' + str(userInput)
print ' '
print str(userInput) + ': That is me'
print ' '
print 'Knight: Oh... Then come with me King Certifacato needs you.'
print ' '
#Going to see the king
print '(You go with the knight to see the king)'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print '(The king sees you walk in and sends his servants that are surrounding him away.)'
print 'King Certifacato: You must be ' + str(userInput)
print ' '
print str(userInput) + ': Yes that`s me your majesty. Why did you need me?'
print ' '
print 'King Certifacato: Did you know your granfather ' + str(userInput)
print ' '
userAnswer1 = raw_input()
if userAnswer1 == str('yes'):
print 'King Certifacato: Then you knew that your grandfather was a great knight.'
print ' '
print str(userInput) + ': Yes, and what does that have to do with me?'
print ' '
elif userAnswer1 == str('no'):
print 'King Certifacato: Ok well your grandfather was the greastest knight that has ever fought for this kingdom.'
print ' '
print str(userInput) + ': Ok why did I need to know that.'
print ' '
print 'King Certifacato: Well since he was a great knight I figured that you would be to. So I am offering you a chance to become rich.'
print ' '
print str(userInput) + ': Well what is it that I must do to become rich.'
print ' '
print 'King Certifacato: My crown was stolen and I need you to go to the Dark Forest and get it back from Trolly The Terrible.'
print ' '
print str(userInput) + ': Trolly The Terrible! Sorry your majesty but I can`t do it.'
print ' '
print 'King Certifacato: If you do it and bring it back to me then I will pay you 5,000,000 gold pieces.'
print ' '
print str(userInput) + ': Well I do need the money badly so...fine i`ll do it.'
print ' '
print 'King Certifacato: Great, my best knight Sir Buttocks will teach you how to fight.'
print ' '
print '(You go out with Sir Buttocks to the training fields.)'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
#learning how to fight with Sir Buttocks
import random
damageBySquirrel = random.randint(10,20)
damageBySlime = random.randint(12,22)
damageByBear = random.randint(20,27)
damageByTrollyTheTerrible = random.randint(30,40)
damageByWizard = random.randint(22,28)
playerHealth = 100#
playerHealth2 = 100#
playerHealth3 = 100#
playerHealth4 = 100#
playerHealth5 = 100#
squirrelHealth = 100#
slimeHealth = 100#
trollytheterribleHealth = 100#
bearHealth = 100
wizardHealth = 100
watermelonHealth = 1
punchDmg = random.randint(1,5)
swordDmg = random.randint(15,20)
maceDmg = random.randint(20,35)
lightningspellDmg = random.randint(1,40)
print 'Sir Buttocks: Okay when fighting you have 3 different attacks, a punch, a sword stab, and a mace hit. The punch is the weakest attack, the sword stab has meduim damage, and the mace has the highest damage. To attack all you have to do is hit 1,2,3. 1 is a punch, 2 is a sword stab, and 3 is a mace hit.'
print ' '
print str(userInput) + ": Okay is there anything that I can practice on?"
print ' '
print 'Sir Buttocks: Oh yeah (grabs a watermelon and puts it on a table). Smash that watermelon.'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
keepGoing = True
while (playerHealth > 0 and watermelonHealth > 0):
print "Choose an action. 1 will punch, 2 will use the sword, and 3 will use the mace."
userInput2 = int(raw_input())
if userInput2 == 1:
print "You punch the watermelon."
watermelonHealth = int(watermelonHealth) - punchDmg
if userInput2 == 2:
print "You stab the watermelon."
watermelonHealth = int(watermelonHealth) - swordDmg
if userInput2 == 3:
print "You hit the watermelon with your mace."
watermelonHealth = int(watermelonHealth) - maceDmg
print "Sir Buttocks: You smashed a watermelon now go and get the kings crown back from real monsters that can fight back."
print ' '
print str(userInput) + ': Great.'
print ' '
print '(You put on armour and set out for the dark forest)'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print '(You stand outside the Dark Forest and see a sign that says KEEP OUT,DO NOT ENTER)'
print str(userInput) + ': Well that`s not a good sign.'
print ' '
print '(You enter The Dark Foret)'
print ' '
print '(About an our after entering he Dark Forest a giant man eating squirrel bursts out of the bushes)'
print str(userInput) + ': Wow!'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
#first fight
while (playerHealth > 0 and squirrelHealth > 0):
print "Choose an action. 1 will punch, 2 will use the sword, and 3 will use the mace."
userInput2 = int(raw_input())
if userInput2 == 1:
print "You punch the squirrel."
squirrelHealth = int(squirrelHealth) - punchDmg
print "The squirrel struck back"
playerHealth = playerHealth - damageBySquirrel
print 'Your health is ' + str(playerHealth)
print 'The giant man eating squirrel`s health is ' + str(squirrelHealth)
if userInput2 == 2:
print "You stab the squirrel."
squirrelHealth = int(squirrelHealth) - swordDmg
print "The squirrel struck back"
playerHealth = playerHealth - damageBySquirrel
print 'Your health is ' + str(playerHealth)
print 'The giant man eating squirrel`s health is ' + str(squirrelHealth)
if userInput2 == 3:
print "You hit the squirrel with your mace."
squirrelHealth = int(squirrelHealth) - maceDmg
print "The squirrel struck back"
playerHealth = playerHealth - damageBySquirrel
print 'Your health is ' + str(playerHealth)
print 'The giant man eating squirrel`s health is ' + str(squirrelHealth)
if (playerHealth > 0 and squirrelHealth < 0):
print 'You killed the giant man eating squirrel!'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print '(You start running down the path in the forest)'
print ' '
print '(You stop in front of a tree when all of a sudden a slime ball comes out from behind the tree and fights you)'
#second fight
while (playerHealth2 > 0 and slimeHealth > 0):
print "Choose an action. 1 will punch, 2 will use the sword, and 3 will use the mace."
userInput3 = int(raw_input())
if userInput3 == 1:
print "You punch the slime."
slimeHealth = int(slimeHealth) - punchDmg
print "The Slime struck back"
playerHealth2 = playerHealth2 - damageBySlime
print 'Your health is ' + str(playerHealth2)
print 'The slime`s health is ' + str(slimeHealth)
if userInput3 == 2:
print "You stab the slime."
slimeHealth = int(slimeHealth) - swordDmg
print "The Slime struck back"
playerHealth2 = playerHealth2 - damageBySlime
print 'Your health is ' + str(playerHealth2)
print 'The slime`s health is ' + str(slimeHealth)
if userInput3 == 3:
print "You hit the slime with your mace."
slimeHealth = int(slimeHealth) - maceDmg
print "The Slime struck back"
playerHealth2 = playerHealth2 - damageBySlime
print 'Your health is ' + str(playerHealth2)
print 'The slimes health is ' + str(slimeHealth)
if (playerHealth2 > 0 and slimeHealth < 0):
print 'You slayed the slime and start moving down the path once more.'
print '(You dig a hole in the ground for safety through the night but you are awokened by the roar of a bear.)'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
#third fight
while (playerHealth3 > 0 and bearHealth > 0):
print "Choose an action. 1 will punch, 2 will use the sword, and 3 will use the mace."
userInput4 = int(raw_input())
if userInput4 == 1:
print "You punch the bear."
bearHealth = int(bearHealth) - punchDmg
print "The bear struck back"
playerHealth3 = playerHealth3 - damageByBear
print 'Your health is ' + str(playerHealth3)
print 'The bear`s health is ' + str(bearHealth)
if userInput4 == 2:
print "You stab the bear."
bearHealth = int(bearHealth) - swordDmg
print "The bear struck back"
playerHealth3 = playerHealth3 - damageByBear
print 'Your health is ' + str(playerHealth3)
print 'The bear`s health is ' + str(bearHealth)
if userInput4 == 3:
print "You hit the bear with your mace."
bearHealth = int(bearHealth) - maceDmg
print "The bear struck back"
playerHealth3 = playerHealth3 - damageByBear
print 'Your health is ' + str(playerHealth3)
print 'The bears`s health is ' + str(bearHealth)
if (playerHealth > 0 and bearHealth < 0):
print 'You killed the bear!'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print ' '
print '(You start running when you come upon a house inside of a tree, you walk up to it and knock on the door. A wizard appears and says...'
print ' '
print 'Wizard: Let me guess you came upon my house on your journey to Trolly the Terrible.'
print str(userInput) + ': Uh yeah how did you know?'
print ' '
print 'Wizard: You are not the first knight that King Certifacto has sent on a task to defeat Trooly the Terrible.'
print 'I`ll make a deal with you, if I give you a lightning spell to defeat Trolly the Terrible you have to tell the king to never send anyone out here again.'
print ' '
print str(userInput) + ': Fine it`s a deal as long as I get the potion.'
print ' '
print '(The wizard suddenly pulls out a wand to figh you.)'
print ' '
print 'Wizard: You didn`t think that I would just give you a spell potion did you. HAHA. If you defeat me then you`ll be abble to get the spell.'
print str(userInput) + ': Oh no.'
print'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
while (playerHealth4 > 0 and wizardHealth > 0):
print "Choose an action. 1 will punch, 2 will use the sword, and 3 will use the mace."
userInput5 = int(raw_input())
if userInput5 == 1:
print "You punch the wizard."
wizardHealth = int(wizardHealth) - punchDmg
print "The wizard struck back"
playerHealth4 = playerHealth4 - damageByWizard
print 'Your health is ' + str(playerHealth4)
print 'The wizards`s health is ' + str(wizardHealth)
if userInput5 == 2:
print "You stab the wizard."
wizardHealth = int(wizardHealth) - swordDmg
print "The wizard struck back"
playerHealth4 = playerHealth4 - damageByWizard
print 'Your health is ' + str(playerHealth4)
print 'The wizard`s health is ' + str(wizardHealth)
if userInput5 == 3:
print "You hit the wizard with your mace."
wizardHealth = int(wizardHealth) - maceDmg
print "The wizard struck back"
playerHealth4 = playerHealth4 - damageByWizard
print 'Your health is ' + str(playerHealth4)
print 'The wizards is ' + str(wizardHealth)
if (playerHealth > 0 and wizardHealth < 0):
print 'You killed the wizard!'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print ' '
print 'You walk into the wizards house and grab the scroll with how to use the lightning spell and you read it.)'
print ' '
print 'YOU KNOW CAN USE THE LIGHTNING SPELL!!!!!!!!!!!!!!!!!'
print ' '
print '(You walk through the woods along teh path for about an hour and you see the other side of the forest and you sprint through the edge and into the open.)'
print ' '
print '(You look forward and see Trolly`s village so you walk into the village and walk up to Trolly.)'
print ' '
print str(userInput) + ': Are you Trolly the Terrible?'
print 'Trolly: Yes, and who are you you HUMAN!'
print str(userInput) + ': I`m the guy that is going to kill you and get back the kings crown!'
print 'Trolly: OH LETS SEE ABOUT THAT!'
print '(Trolly jumps from his throne and pulls out his bone sword to fight you.)'
while (playerHealth5 > 0 and trollytheterribleHealth > 0):
print "Choose an action. 1 will punch, 2 will use the sword, and 3 will use the mace, and 4 will use the lightning spell."
userInput6 = int(raw_input())
if userInput6 == 1:
print "You punch Trolly."
trollytheterribleHealth = int(trollytheterribleHealth) - punchDmg
print "Trolly struck back"
playerHealth5 = playerHealth5 - damageByTrollyTheTerrible
print 'Your health is ' + str(playerHealth5)
print 'The Trollys`s health is ' + str(trollytheterribleHealth)
if userInput6 == 2:
print "You stab Trolly."
trollytheterribleHealth = int(trollytheterribleHealth) - swordDmg
print "Trolly struck back"
playerHealth5 = playerHealth5 - damageByTrollyTheTerrible
print 'Your health is ' + str(playerHealth4)
print 'The Trolly`s health is ' + str(trollytheterribleHealth)
if userInput6 == 3:
print "You hit Trolly with your mace."
trollytheterribleHealth = int(trollytheterribleHealth) - maceDmg
print "Trolly struck back"
playerHealth5 = playerHealth5 - damageByTrollyTheTerrible
print 'Your health is ' + str(playerHealth4)
print 'Trolly`s is ' + str(trollytheterribleHealth)
if userInput6 == 4:
print "You cast the lightning spell on Trolly."
trollytheterribleHealth = int(trollytheterribleHealth) - lightningspellDmg
print "Trolly struck back"
playerHealth5 = playerHealth5 - damageByTrollyTheTerrible
print 'Your health is ' + str(playerHealth5)
print 'Trolly`s is ' + str(trollytheterribleHealth)
if (playerHealth > 0 and trollytheterribleHealth < 0):
print 'You killed Trolly the Terrible!'
print '(You go into Trolly`s tent and get King Certifacato`s crown.)'
print '(You go back to the kingdom and give the king his crown back.)'
print '(The king thenn gives you 5,000,000 gold pieces and you lie happily ever after until he came back to life.)'
print 'YOU WIN!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
if (playerHealth < 0 and trollytheterribleHealth >= 0):
print 'Trolly the Terrible demolidhed you.'
print 'GAME OVER'
if (playerHealth < 0 and wizardHealth >= 0):
print 'The wizard zapped you to death.'
print 'GAME OVER'
if (playerHealth <= 0 and bearHealth >= 0):
print 'The bear mauled you to death.'
print 'GAME OVER'
if (playerHealth <= 0 and slimeHealth >= 0):
print 'The slime absorbed you.'
print 'GAME OVER'
if (playerHealth <= 0 and squirrelHealth >= 0):
print 'The Giant Man Eating squirrel ate you.'
print 'GAME OVER'
keepGoing = False
| be08051c185cf5f5c915bcd6d55ce3f9dc17998e | [
"Python"
] | 1 | Python | bcoolidge28/Benton-and-Serenas-project | fb9ac8879bfee3b2ee61206b6412fa9070da9036 | c32c16fab5671a586fdbbfd92559192b8eca6a0e |
refs/heads/master | <repo_name>pratik702/Boggle-client<file_sep>/src/index.js
import React from 'react';
import ReactDOM from 'react-dom';
import './index.css';
import '../node_modules/bootstrap/dist/css/bootstrap.min.css'
import { NotificationContainer, NotificationManager } from 'react-notifications';
class Game extends React.Component {
constructor(props) {
super(props);
this.state = {
alphabets: [''],
selectedAlphabet: '',
wordList: [],
totalPoints: 0,
color: 'white',
}
}
componentDidMount() {
this.getRandomAlphabets(4);
}
getRandomAlphabets(boardSize) {
fetch('http://localhost:8080/boggle-service/alphabetWordList/' + boardSize)
.then(res => res.json())
.then((data) => {
this.setState({
alphabets: data,
selectedAlphabet: '',
wordList: [],
totalPoints: 0
})
})
.catch(console.log)
}
getNeighbors(alphabet) {
var object = this.state.alphabets.filter(item => {
return item.alphabet === alphabet
})
return object[0].neighbors;
}
validateWord(word) {
var url = 'http://localhost:8080/boggle-service/validateWord/' + word.toLowerCase();
fetch(url)
.then(res => res.json())
.then((data) => {
if (data) {
var wordList = this.state.wordList;
wordList.push(word)
var totalPoints = this.state.totalPoints
totalPoints += word.length
this.setState({
alphabets: this.state.alphabets,
selectedAlphabet: this.state.selectedAlphabet,
wordList: wordList,
totalPoints: totalPoints
})
}
else {
NotificationManager.info('Invalid word', '', 1000);
}
})
.catch(console.log)
}
render() {
if (this.state.alphabets.length > 1) {
var alphabets = this.state.alphabets.map(item => item.alphabet);
}
else {
var alphabets = ['S', 'R', 'V', 'R', 'E', 'R', 'R', ' ', '4', '0', '4', ' ']
}
var words = this.state.wordList
return (
<div className="container game" style={{marginLeft:'33%'}}>
<div className="" style={{ height: 'auto', width: '20%', backgroundColor: 'skyblue', padding:35, textAlign:'center' }}>
<text style={{color:'white'}}><h1 style={{alignSelf:'right'}}>BOGGLE</h1></text>
<div className="row" style={{ paddingBottom: '20PX' }}>
{
alphabets.map((val, index) => {
return (
<div style={{ padding: '0', width: '25%' }}>
<button className="whiteButton" disabled={this.props.isGameOver} style={{ width: '100%', border: '3px solid skyblue' }} onClick={() => this.myfunction(val)}>
<p style={{ fontSize: '20', fontWeight: 'bold',marginBottom:'0.8rem',marginTop:'0.8rem' }}>
{val}
</p>
</button>
</div>
);
})
}
</div>
<div className="row">
<input disabled={this.props.isGameOver} style={{ width: '60%', size: 40, borderRadius:'20px' }} value={this.state.selectedAlphabet}></input>
<button className="orangeButton" disabled={this.props.isGameOver} style={{ width: '40%' }} onClick={() => this.submitWord()}>
Send</button>
</div>
<div className="row" style={{ border: '3px solid skyblue', color: 'red', fontWeight:'bold' }}>
<NotificationContainer />
</div>
</div>
<div className="" style={{ height: 'auto', width: '20%', backgroundColor: 'skyblue', padding:'150px 35px 35px 35px', textAlign:'center' }}>
<div className="row" style={{ border: '3px solid skyblue', color: 'white', fontWeight:'bold', alignItems:'center' }}>
Total Points: <h1>{this.state.totalPoints}</h1>
</div>
<div className="row" style={{ border: '3px solid skyblue', color: 'white', fontWeight:'bold' }}>
Selected words:
</div>
<div className="row" style={{ border: '3px solid skyblue', color: 'white', fontWeight:'bold', textAlign:'initial' }}>
{
words.map((val, index) => {
return (
<div style={{ width: '100%' }}>
<h1>
{val}
</h1>
</div>
);
})
}
</div>
</div>
</div>
);
}
myfunction(val) {
let word = this.state.selectedAlphabet;
this.setState({
alphabets: this.state.alphabets,
selectedAlphabet: word + val,
wordList: this.state.wordList,
totalPoints: this.state.totalPoints,
});
}
submitWord() {
var word = this.state.selectedAlphabet;
var isValid = true;
for (let i = 1; i < word.length; i++) {
if (!(this.getNeighbors(word[i - 1]).includes(word[i]))) {
isValid = false;
}
if (word.substring(0, i).includes(word[i])) {
isValid = false;
}
}
if (isValid) {
this.validateWord(this.state.selectedAlphabet);
}
else {
NotificationManager.info('Invalid word', '', 1000);
}
this.setState({
alphabets: this.state.alphabets,
selectedAlphabet: '',
wordList: this.state.wordList,
totalPoints: this.state.totalPoints
})
}
}
export default class Timer extends React.Component {
state = {
minutes: 1,
seconds: 0,
}
componentDidMount() {
this.myInterval = setInterval(() => {
const { seconds, minutes } = this.state
if (seconds > 0) {
this.setState(({ seconds }) => ({
seconds: seconds - 1
}))
}
if (seconds === 0) {
if (minutes === 0) {
clearInterval(this.myInterval)
} else {
this.setState(({ minutes }) => ({
minutes: minutes - 1,
seconds: 59
}))
}
}
}, 1000)
}
componentWillUnmount() {
clearInterval(this.myInterval)
}
render() {
const { minutes, seconds } = this.state
if (!(minutes === 0 && seconds === 0)) {
return (
<div className="display">
<Game isGameOver={false} />
<div>
{
<h1 style={{color: 'skyblue'}}>Time Remaining: {minutes}:{seconds < 10 ? `0${seconds}` : seconds}</h1>
}
</div>
</div>
)
}
else {
return (
<div className="display">
<Game isGameOver={true} />
<div>
{
<h1 style={{color: 'skyblue'}}>Game over</h1>
}
</div>
</div>
)
}
}
}
// ========================================
ReactDOM.render(
<Timer />,
document.getElementById('root')
);
<file_sep>/README.md
# Boggle-client
**Prerequisites:**
1. Node Package manager should be installed.
2. Boggle-service should be running. (https://github.com/pratik702/Boggle-service)
**Steps to setup:**
1. Clone this project.
2. Open command prompt and traverse to the project directory.
3. Execute this command to download project dependencies: npm install
4. Execute this command to run the project: npm start
**Rules of the game:**
1. Four v Four board is loaded.
2. Time limit of 1 minute is set.
3. Only such words are valid, in which each an alphabet should lie adjacent to the next alphabet.
4. Each word give points equal to the number of characters.
| 37c4d3ae298dd36779c1dcaf9012f6448d8aa05e | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | pratik702/Boggle-client | 996b307f3cb44361da64dd42778f25b61f5304c7 | ff67aa7d6864df6b4d4a67cedb02992764162448 |
refs/heads/main | <file_sep>import pytest
from flaskr.db import get_db
def test_index(client, auth):
response = client.get("search/index")
assert b"Log In" in response.data
assert b"Register" in response.data
auth.login()
response = client.get("search/index")
assert b"test title" in response.data
assert b"by test on 2018-01-01" in response.data
assert b"test\nbody" in response.data
assert b'href="/search/1/update"' in response.data
@pytest.mark.parametrize("path", ("search/create", "search/1/update", "search/1/delete"))
def test_login_required(client, path):
response = client.post(path)
assert response.headers["Location"] == "http://localhost/auth/login"
def test_author_required(app, client, auth):
# change the post author to another user
with app.app_context():
db = get_db()
db.execute("UPDATE recipe SET author_id = 2 WHERE id = 1")
db.commit()
auth.login()
# current user can't modify other user's recipe
assert client.post("search/1/update").status_code == 403
assert client.post("search/1/delete").status_code == 403
# current user doesn't see edit link
assert b'href="/search/1/update"' not in client.get("search/index").data
@pytest.mark.parametrize("path", ("search/2/update", "search/2/delete"))
def test_exists_required(client, auth, path):
auth.login()
assert client.post(path).status_code == 404
def test_create(client, auth, app):
auth.login()
assert client.get("search/create").status_code == 200
client.post("search/create", data={"title": "created", "body": ""})
with app.app_context():
db = get_db()
count = db.execute("SELECT COUNT(id) FROM recipe").fetchone()[0]
assert count == 2
def test_update(client, auth, app):
auth.login()
assert client.get("search/1/update").status_code == 200
client.post("search/1/update", data={"title": "updated", "body": ""})
with app.app_context():
db = get_db()
recipe = db.execute("SELECT * FROM recipe WHERE id = 1").fetchone()
assert recipe["title"] == "updated"
@pytest.mark.parametrize("path", ("search/create", "search/1/update"))
def test_create_update_validate(client, auth, path):
auth.login()
response = client.post(path, data={"title": "", "body": ""})
assert b"Title is required." in response.data
def test_delete(client, auth, app):
auth.login()
response = client.post("search/1/delete")
assert response.headers["Location"] == "http://localhost/blog/index"
with app.app_context():
db = get_db()
recipe = db.execute("SELECT * FROM recipe WHERE id = 1").fetchone()
assert recipe is None
<file_sep>
-- Inserts dummy data into ingredient_cat and ingredient tables
INSERT INTO ingredient_cat ('name') VALUES ('Fat'), ('Potato'), ('Salt'), ('Spice'), ('Milk');
INSERT INTO ingredient VALUES (NULL,'butter-generic', 1, 3.0, 3.5, 3.0, 2.5, 2.5, 3.0, 2.0);
INSERT INTO ingredient VALUES (NULL, 'butter-organic', 1, 3.0, 3.5, 3.0, 3.0, 3.0, 3.5, 2.0);
INSERT INTO ingredient VALUES (NULL, 'butter-vegan', 1, 3.0, 3.5, 3.0, 3.0, 4.5, 3.5, 4.0);
INSERT INTO ingredient VALUES (NULL, 'potatoes-generic', 2, 4.0, 4.0, 3.0, 3.0, 4.5, 3.5, 4.0);
INSERT INTO ingredient VALUES (NULL, 'potatoes-organic', 2, 4.0, 4.0, 4.0, 4.0, 4.5, 3.5, 4.0);
INSERT INTO ingredient VALUES (NULL, 'potatoes-local', 2, 4.0, 4.0, 4.5, 4.5, 4.5, 4.5, 5.0);
INSERT INTO ingredient VALUES (NULL, 'salt-generic', 3, 4.0, 4.0, 3.0, 3.0, 4.5, 3.5, 4.0);
INSERT INTO ingredient VALUES (NULL, 'salt-imported', 3, 4.0, 4.0, 4.0, 4.0, 4.5, 3.5, 3.0);
INSERT INTO ingredient VALUES (NULL, 'pepper-generic', 4, 4.0, 4.0, 3.0, 3.0, 4.5, 3.5, 4.0);
INSERT INTO ingredient VALUES (NULL, 'pepper-organic', 4, 4.0, 4.0, 4.0, 4.0, 4.5, 3.5, 3.0);
INSERT INTO ingredient VALUES (NULL, 'milk-generic', 5, 3.0, 4.0, 3.0, 2.5, 2.5, 2.5, 1.0);
INSERT INTO ingredient VALUES (NULL, 'milk-organic', 5, 4.0, 4.0, 4.0, 4.0, 4.5, 3.5, 2.0);
INSERT INTO ingredient VALUES (NULL, 'milk-soy', 5, 4.0, 3.5, 4.0, 4.0, 4.5, 3.5, 4.0); <file_sep>from flask import Blueprint
from flask import flash
from flask import g
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from werkzeug.exceptions import abort
from flaskr.auth import login_required
from flaskr.db import get_db
from flaskr.ingredient import getAlternativesByRatingAvg
import time
import sqlite3
bp = Blueprint("recipe", __name__, url_prefix="/recipe")
@bp.route("/index", methods=("GET", "POST"))
def index():
"""Shows all recipes, POST method filters recipes shown, ordered most recent first."""
db = get_db()
sqlQuery = "SELECT * FROM recipe r " \
"JOIN user u ON r.author_id = u.id JOIN recipe_cat rc ON r.category_id = rc.id "
if request.method == "POST":
sqlQuery += "WHERE " + request.form["filter"] + " LIKE '%" + request.form["search"] + "%' "
if request.form["category"] != 'NULL':
sqlQuery += "AND rc.name = '" + request.form["category"] + "' "
sqlQuery += "ORDER BY created DESC"
recipes = db.execute(sqlQuery).fetchall()
return render_template("recipe/index.html", recipes=recipes)
@bp.route("/<int:id>")
def view_recipe(id):
"""View a recipe and its ingredients."""
recipe = get_recipe(id)
ingredients = get_recipe_ingredients(id)
return render_template("recipe/view.html", recipe=recipe, ingredients=ingredients)
def get_recipe(id):
recipe = (
get_db()
.execute(
"SELECT * FROM recipe r WHERE r.id = ?",
(id,),
)
.fetchone()
)
if recipe is None:
abort(404, f"Recipe id {id} doesn't exist.")
recipe = (get_db()
.execute(
"SELECT * FROM recipe r JOIN user u ON r.author_id = u.id \
WHERE r.id = ?",
(id,),
)
).fetchone()
return recipe
def get_recipe_ingredients(id):
"""Get a list of ingredients in a Recipe querying the database by recipe id."""
recipe = get_recipe(id)
ingredients = (
get_db()
.execute(
"SELECT recipe_id, ingredient_id, amount, unit, name, category_id \
r_nourishment, r_value, r_human_welfare, r_animal_welfare, \
r_resource_cons, r_biodiversity, r_global_warming\
FROM recipe_ingredient ri \
JOIN ingredient i ON i.id=ingredient_id \
WHERE recipe_id= ?",
(id,)
)
)
return ingredients
@bp.route("/create", methods=("GET", "POST"))
def create():
"""Create a new recipe for the current user."""
if request.method == "POST":
title = request.form["title"]
body = request.form["body"]
recipe_category = request.form["category"]
error = None
if not title:
error = "Title is required."
if not recipe_category:
error = "Recipe category is required"
if error is not None:
flash(error)
else:
db = get_db()
if g.user is None:
db.execute(
"INSERT INTO recipe (author_id, category_id, title, body) VALUES (?, ?, ?, ?)",
(1, recipe_category, title, body),
)
db.commit()
else:
db.execute(
"INSERT INTO recipe (author_id, category_id, title, body) VALUES (?, ?, ?, ?)",
(g.user["id"], recipe_category, title, body),
)
db.commit()
return redirect(url_for("recipe.index"))
# method is a GET, return the create page with associated context
db = get_db()
categories = db.execute("SELECT DISTINCT id, name FROM recipe_cat")
ingredients = db.execute("SELECT DISTINCT id, name FROM ingredient")
return render_template("recipe/create.html", categories = categories, ingredients = ingredients)
@bp.route("/<int:id>/update", methods=("GET", "POST"))
@login_required
def update(id):
"""Update a recipe if the current user is the author."""
post = get_recipe(id)
if request.method == "POST":
title = request.form["title"]
body = request.form["body"]
error = None
if not title:
error = "Title is required."
if error is not None:
flash(error)
else:
db = get_db()
db.execute(
"UPDATE recipe SET title = ?, body = ? WHERE id = ?", (title, body, id)
)
db.commit()
return redirect(url_for("recipe.index"))
return render_template("recipe/update.html", post=post)
@bp.route("/<int:id>/delete", methods=("POST",))
@login_required
def delete(id):
"""Delete a recipe.
Ensures that the recipe exists and that the logged in user is the
author of the recipe.
"""
get_recipe(id)
db = get_db()
db.execute("DELETE FROM recipe WHERE id = ?", (id,))
db.commit()
return redirect(url_for("recipe.index"))
<file_sep>
-- Inserts dummy data into recipe_cat, recipe, and recipe_ingredient tables
INSERT INTO recipe_cat ('name') VALUES ('Breakfast'), ('Lunch'), ('Dinner'), ('Side'), ('Dessert');
INSERT INTO recipe ('author_id', 'category_id', 'title', 'body')
VALUES (1, 4, 'Potato Gratin',
'Grease an oven safe dish. Thinly slice and layer potatoes.
Season with salt and pepper. Add milk and remaining butter.
Bake at 425 deg F for 30 min.' );
INSERT INTO recipe_ingredient ('recipe_id', 'ingredient_id', 'amount', 'unit')
VALUES (1, 1, 3, 'Tbs'), (1, 4, 1.5, 'lbs'), (1, 7, 3, 'Tbs'), (1, 9, 3,'tsp'),
(1, 11, 1, 'c'); | d76a8a00f421783405eb593fb5989bb6814777fa | [
"SQL",
"Python"
] | 4 | Python | CameronVandenBerg17/CS361-Group-04 | c2e49408277b17fee3381473d8b5ac0f5653bb51 | 02f0d28034acfce181d38956ace2e1c20bbe7133 |
refs/heads/master | <file_sep># frozen_string_literal: true
source "https://rubygems.org"
gemspec path: Bundler.root.to_s.sub('/gemfiles', '')
gem 'guard' if RUBY_VERSION > "2.2.5"
gem 'guard-minitest' if RUBY_VERSION > "2.2.5"
<file_sep># frozen_string_literal: true
require 'stronger_parameters/version'
require 'action_pack'
require 'strong_parameters' if ActionPack::VERSION::MAJOR == 3
require 'stronger_parameters/parameters'
require 'stronger_parameters/constraints'
require 'stronger_parameters/controller_support/permitted_parameters'
<file_sep># frozen_string_literal: true
module StrongerParameters
VERSION = '2.12.1'
end
| 7f58755605f831ad505757ab9da0bdc8a2ae065f | [
"Ruby"
] | 3 | Ruby | waldow90/stronger_parameters | df8385844c57242ae1fc6afa6e24bd260953cd78 | f3c1c51d0903c1ffb83eb1ace314fe7261e49300 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.