text stringlengths 184 4.48M |
|---|
/******************************************************************************
* JBoss, a division of Red Hat *
* Copyright 2006, Red Hat Middleware, LLC, and individual *
* contributors as indicated by the @authors tag. See the *
* copyright.txt in the distribution for a full listing of *
* individual contributors. *
* *
* This is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Lesser General Public License as *
* published by the Free Software Foundation; either version 2.1 of *
* the License, or (at your option) any later version. *
* *
* This software is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
* Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public *
* License along with this software; if not, write to the Free *
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA *
* 02110-1301 USA, or see the FSF site: http://www.fsf.org. *
******************************************************************************/
package org.gatein.pc.test.portlet.jsr286.tck.portletrequests;
import org.gatein.pc.test.unit.PortletTestCase;
import org.gatein.pc.test.unit.PortletTestContext;
import org.gatein.pc.test.unit.Assertion;
import org.gatein.pc.test.unit.annotations.TestCase;
import org.gatein.pc.test.unit.actions.PortletRenderTestAction;
import org.gatein.pc.test.unit.actions.PortletActionTestAction;
import org.gatein.pc.test.unit.actions.PortletEventTestAction;
import org.gatein.pc.test.unit.actions.PortletResourceTestAction;
import org.gatein.pc.test.unit.web.UTP6;
import org.gatein.pc.test.unit.protocol.response.Response;
import org.gatein.pc.test.unit.protocol.response.EndTestResponse;
import org.gatein.pc.test.unit.protocol.response.InvokeGetResponse;
import static org.gatein.pc.test.unit.Assert.*;
import javax.portlet.Portlet;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import javax.portlet.PortletException;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.PortletRequest;
import javax.portlet.EventRequest;
import javax.portlet.EventResponse;
import javax.portlet.ResourceRequest;
import javax.portlet.ResourceResponse;
import java.io.IOException;
/**
* @author <a href="mailto:julien@jboss.org">Julien Viet</a>
* @version $Revision: 630 $
*/
@TestCase(Assertion.JSR286_90)
public class LifeCyclePhaseRequestAttribute
{
public LifeCyclePhaseRequestAttribute(PortletTestCase seq)
{
seq.bindAction(0, UTP6.RENDER_JOIN_POINT, new PortletRenderTestAction()
{
protected Response run(Portlet portlet, RenderRequest request, RenderResponse response, PortletTestContext context) throws PortletException, IOException
{
return new InvokeGetResponse(response.createActionURL().toString());
}
});
seq.bindAction(1, UTP6.ACTION_JOIN_POINT, new PortletActionTestAction()
{
protected void run(Portlet portlet, ActionRequest request, ActionResponse response, PortletTestContext context) throws PortletException, IOException
{
assertEquals(PortletRequest.ACTION_PHASE, request.getAttribute(PortletRequest.LIFECYCLE_PHASE));
//
response.setEvent("Event", null);
}
});
seq.bindAction(1, UTP6.EVENT_JOIN_POINT, new PortletEventTestAction()
{
protected void run(Portlet portlet, EventRequest request, EventResponse response, PortletTestContext context) throws PortletException, IOException
{
assertEquals(PortletRequest.EVENT_PHASE, request.getAttribute(PortletRequest.LIFECYCLE_PHASE));
}
});
seq.bindAction(1, UTP6.RENDER_JOIN_POINT, new PortletRenderTestAction()
{
protected Response run(Portlet portlet, RenderRequest request, RenderResponse response, PortletTestContext context) throws PortletException, IOException
{
assertEquals(PortletRequest.RENDER_PHASE, request.getAttribute(PortletRequest.LIFECYCLE_PHASE));
//
return new InvokeGetResponse(response.createResourceURL().toString());
}
});
seq.bindAction(2, UTP6.RESOURCE_JOIN_POINT, new PortletResourceTestAction()
{
protected Response run(Portlet portlet, ResourceRequest request, ResourceResponse response, PortletTestContext context) throws PortletException, IOException
{
assertEquals(PortletRequest.RESOURCE_PHASE, request.getAttribute(PortletRequest.LIFECYCLE_PHASE));
//
return new EndTestResponse();
}
});
}
} |
% Brooks Law and the Mythic Man Month
% Benjamin Roberts
% 6th April 2014
# Brooks Law
_"Adding manpower to a late project makes it later" - Fred Brooks_
# Fred Brooks
- Ph.D from Harvard in 1953.
- Joined IBM in 1956.
- Managed the development of System/360 mainframe and OS/360 system.
- 1999 winner of the Turing Award.
_“We called this project, ‘You bet your company.’” - Robert Evans (Head of Engineering, S/360) 1966_
# The Mythic Man Month
- Lessons learnt from the OS/360 project.
- Collection of software management and construction essays.
- Still relevant today.
- Available at [archive.org][archive_book].
_"Some people have called the book the "bible of software engineering" [...] everybody quotes it, some people read it, and a few people go by it." - Fred Brooks 2005_
# Brooks Law
_"Adding manpower to a late project makes it later" - Fred Brooks_
----------------
![Perfectly Partitionable Task][perfect_project]
----------------
- Just because your job will take _"6 person months"_ doesnt mean it will take 3 people 2 months.
_"Nine women can't make a baby in one month." - Fred Brooks_
# Causes
- Ramp-Up time
- Communitcation overheads
# Ramp-up time
- Jumping into an unfamiliar project is difficult.
- Must learn structure, management, build process and style.
- The sudden addition of multiple persons incures a training or ramp-up time
# Example: HipHop-VM
- Build process: 2 days
- Find relevant code: 3 days
- Understand relvant code: 1 week
- First working modification: 4 weeks into project
# Brooks' Example:
![Initial Milestones][milestones_expectation]
-----------
![Delay Incurred][milestones_delay]
-----------
![Man power added][milestones_result]
-----------
![Oops][train_wreck]
# Communication overheads
- Tasks which can be partitioned require communication.
- As the number of partitions/developers increases communication begins to dominate the development effort.
- Communication effort can be approximated as n(n-1)/2
_"Three workers require three times as much pairwise intercommunication as two; four require six times as much as two." - Fred Brooks_
# Brooks' Example
![Complex Intercommunications][communications_scale]
# Solutions
- In source documentation helps comprehension of code
- Project documentation covering management and structure decreases lead time
- Consistant build environments.
- Can help decrease ramp-up time of added persons but cannot remove it.
----------------
- Consistant interfaces to implementation reduces communication requirement
- Adding persons to support roles (documentation/quality assurance) can be added with reduced ramp-up time
- There is no silver bullet.
_"More software projects have gone awry for lack of calendar time than for all other causes combined." - Fred Brooks_
#References
- [IBM Archives: Frederick P. Brooks, Jr][ibm_brooks]
- [IBM 100: System 360][ibm_360]
- [The Mythical Man Month (Essays on Software Engineering) - Fred Brooks 1975][archive_book]
[links to]: references
[ibm_brooks]: http://www-03.ibm.com/ibm/history/exhibits/builders/builders_brooksjr.html
[archive_book]: https://archive.org/details/mythicalmanmonth00fred
[ibm_360]: http://www-03.ibm.com/ibm/history/ibm100/us/en/icons/system360/words/
[image]: links
[perfect_project]: images/perfect-partitionable.png
[communication_project]: images/complex-interaction.png
[milestones_expectation]: images/milestones.png
[milestones_delay]: images/delay.png
[milestones_result]: images/result.png
[train_wreck]: http://redpenofdoomdotcom.files.wordpress.com/2011/02/train-wreck.jpg
[communications_scale]: images/complex-interaction.png |
import { useEffect, useState } from "react";
import dayjs from "utils/dates/dayjsex";
import getLdBootstrap from "utils/launch-darkly/getLdBootstrap";
const POLLING_INTERVAL = dayjs.duration({
seconds: getLdBootstrap()?.defaultPollingIntervalSeconds ?? 15,
});
export default function usePollingFetchKey(duration = POLLING_INTERVAL) {
const [fetchKey, setFetchKey] = useState(0);
const durationMs = duration.asMilliseconds();
useEffect(() => {
const interval = setInterval(
() => setFetchKey((curr) => curr + 1),
durationMs
);
return () => clearInterval(interval);
}, [durationMs]);
return fetchKey;
} |
import { Link, NavLink } from "react-router-dom";
const NavBar = () => {
return (
<nav className="navbar navbar-expand-sm navbar-light bg-light shadow-sm">
<div className="container">
<Link to="/" className="navbar-brand">
Real<i className="bi bi-geo-fill"></i>App
</Link>
<button
className="navbar-toggler"
type="button"
data-bs-toggle="collapse"
data-bs-target="#main-navbar"
>
<span className="navbar-toggler-icon"></span>
</button>
<div className="collapse navbar-collapse" id="main-navbar">
<ul className="navbar-nav me-auto mb-2 mb-sm-0">
<li className="nav-item">
<NavLink to="/about" className="nav-link">
About
</NavLink>
</li>
<li className="nav-item">
<NavLink to="/my-card" className="nav-link">
My Cards
</NavLink>
</li>
</ul>
<ul className="navbar-nav ms-auto mb-2 mb-sm-0">
<li className="nav-item">
<NavLink to="/sign-in" className="nav-link">
Sign In
</NavLink>
</li>
<li className="nav-item">
<NavLink to="/sign-up" className="nav-link">
Sign Up
</NavLink>
</li>
</ul>
</div>
</div>
</nav>
);
};
export default NavBar; |
import React, { useState } from 'react'
import { List, ListItem, ListItemText } from '@material-ui/core'
import Input from './Input'
import Button from './Button'
function InputRow({ addTodoItem }) {
const [input, setInput] = useState('')
function addTodo() {
addTodoItem(input)
setInput('')
}
return (
<form>
<Input value={input} onChange={(e) => setInput(e.target.value)} />
<Button onClick={addTodo} color="primary" variant="outlined">
Add Item
</Button>
</form>
)
}
export function TodoList() {
const [todos, setTodos] = useState(['Improve JS skills 💪', 'Pet dog 🐶'])
function addTodoItem(todo) {
todo.length && setTodos([...todos, todo])
}
function removeTodoItem(i) {
todos.splice(i, 1)
setTodos([...todos])
}
return (
<div className="todo-list">
<h1>Todo Items</h1>
<InputRow addTodoItem={addTodoItem} />
<List>
{todos.map((todo, i) => (
<ListItem key={`${todo}-${i}`}>
<ListItemText>{todo}</ListItemText>
<Button color="secondary" onClick={() => removeTodoItem(i)}>
Remove
</Button>
</ListItem>
))}
</List>
</div>
)
} |
<!DOCTYPE html>
<html lang="en" xmlns:th="http://www.thymeleaf.org">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Paw Finder - Users List</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/css/bootstrap.min.css">
<style>
body {
background-color: #E3DAC9;
}
.navbar {
background-color: white;
}
.navbar-brand {
color: #343a40;
}
.navbar-brand img {
height: 30px;
width: 30px;
margin-right: 5px;
}
.navbar-nav .nav-link {
color: #343a40;
}
h1 {
text-align: center;
margin-top: 20px;
margin-bottom: 30px;
}
.table {
width: 100%;
border-collapse: collapse;
margin-top: 20px;
background-color: #fff;
}
.table th,
.table td {
padding: 1rem;
border-top: none;
border-bottom: 1px solid #dee2e6;
}
.table thead th {
background-color: #f5f5f5;
border-top: none;
border-bottom: 2px solid #dee2e6;
}
.table tbody tr:hover {
background-color: #E3DAC9;
color: #fff;
}
.table tbody td {
vertical-align: middle;
}
.btn-success {
background-color: #b08d57;
border-color: #b08d57;
}
.btn-success:hover {
background-color: #856b40;
border-color: #856b40;
}
.btn-primary {
background-color: #b08d57;
border-color: #b08d57;
}
.btn-primary:hover {
background-color: #856b40;
border-color: #856b40;
}
</style>
</head>
<body>
<nav class="navbar navbar-expand-lg navbar-light bg-light" style="height: 60px;">
<a class="navbar-brand" href="#">
<img src="/img/Logo2.png" alt="Logo" height="30" width="30" class="mr-2">
<span style="font-size: 20px;">Paw Finder</span>
</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarNav"
aria-controls="navbarNav" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarNav">
<ul class="navbar-nav mr-auto">
<li class="nav-item">
<a class="nav-link" href="#">Home</a>
</li>
<li class="nav-item">
<a class="nav-link" th:href="@{/shelters/list}">Pet Adoption</a>
</li>
<li class="nav-item">
<a class="nav-link" href="#">Lost and Found</a>
</li>
<li class="nav-item">
<a class="nav-link" th:href="@{/showMap}">Veterinary Clinics</a>
</li>
<li class="nav-item">
<a class="nav-link" href="/shelters">Shelters</a>
</li>
<li class="nav-item">
<a class="nav-link" href="/animals/showAll">View Animals</a>
</li>
<li class="nav-item">
<a class="nav-link" href="/users">Users</a>
</li>
</ul>
<ul class="navbar-nav navbar-nav-buttons">
<li class="nav-item">
<a class="nav-link" href="/users/register">Register</a>
</li>
<li class="nav-item">
<a class="nav-link" href="/users/login">Login</a>
</li>
</ul>
</div>
</nav>
<div class="container">
<h1 class="text-center" style="color: #856b40;">Users List</h1>
<table class="table table-borderless">
<thead>
<tr>
<th scope="col">First Name</th>
<th scope="col">Last Name</th>
<th scope="col">Email</th>
<th scope="col">Phone</th>
<th scope="col">Roles</th>
<th scope="col">Action</th>
</tr>
</thead>
<tbody>
<tr th:each="user : ${users}">
<td th:text="${user.userFirstName}"></td>
<td th:text="${user.userLastName}"></td>
<td th:text="${user.userEmail}"></td>
<td th:text="${user.userPhone}"></td>
<td th:text="${user.authorities}"></td>
<td>
<a th:href="@{'/users/' + ${user.userID} + '/edit'}" class="btn btn-primary">Edit Roles</a>
</td>
</tr>
</tbody>
</table>
</div>
<footer class="text-white text-center py-3" style="height: 0vh;">
<div style="background-color: white; color: black;">
© 2023 Paw Finder. All rights reserved.
</div>
</footer>
<script src="https://code.jquery.com/jquery-3.5.1.slim.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@popperjs/core@2.5.4/dist/umd/popper.min.js"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/js/bootstrap.min.js"></script>
</body>
</html> |
import { UserDB } from './../../../database/entity/user.entity';
import { ResStatus } from './../../../shared/enum/res-status.enum';
import { UpdateFoodTypeDTO, UpdateFoodTypeReqDTO } from './../dto/update-food-type.dto';
import { HttpException, HttpStatus, Inject, Injectable, OnApplicationBootstrap } from '@nestjs/common';
import { CreateFoodTypeReqDto, CreateFoodTypeResDTO } from '../dto/create-food-type.dto';
import { ResultFoodResDTO } from '../dto/findAll.dto';
import { UploadFoodTypeImageDtoRes } from '../dto/uploadFoodTypeImage-dto';
import { ApiFoodService } from './../../../api/food/service/api-food.service';
import { GlobalResDTO } from './../../../api/global-dto/global-res.dto';
import { DataBase } from './../../../database/database.providers';
import { FoodTypeDB } from './../../../database/entity/food-type.entity';
import { LogService } from './../../../helper/services/log.service';
import { FoodTypeService } from './food-type.service';
@Injectable()
export class ApiFoodTypeService implements OnApplicationBootstrap {
private logger = new LogService(ApiFoodService.name);
constructor(
@Inject(DataBase.FoodTypeDB) private readonly foodTypeRepository: typeof FoodTypeDB,
private foodTypeService: FoodTypeService,
) { }
onApplicationBootstrap() {
//
}
async api_create(user: UserDB, createFoodTypeDTO: CreateFoodTypeReqDto): Promise<CreateFoodTypeResDTO> {
const tag = this.api_create.name;
try {
const result = await this.foodTypeService.create(user, createFoodTypeDTO);
return new CreateFoodTypeResDTO(ResStatus.success, 'สร้างประเภทอาหารสำเร็จ', result);
} catch (error) {
console.error(`${tag} -> `, error);
this.logger.error(`${tag} -> `, error);
throw new HttpException(`${error}`, HttpStatus.INTERNAL_SERVER_ERROR);
}
}
async api_update(user: UserDB, id: number, updateFoodTypeDto: UpdateFoodTypeReqDTO): Promise<UpdateFoodTypeDTO> {
const tag = this.api_update.name;
try {
let res: UpdateFoodTypeDTO = null;
await this.foodTypeService
.update(id, updateFoodTypeDto)
.then((response) => {
res = new UpdateFoodTypeDTO(ResStatus.success, '😜 อัพเดตประเภทอาหารสำเร็จ 😜', response);
})
.catch((error) => {
console.error(error);
res = new UpdateFoodTypeDTO(ResStatus.fail, 'กรุณาตรวจสอบความถูกต้องของข้อมูล', null);
});
return res;
} catch (error) {
console.error(`${tag} -> `, error);
this.logger.error(`${tag} -> `, error);
throw new HttpException(`${error}`, HttpStatus.INTERNAL_SERVER_ERROR);
}
}
async api_del(user: UserDB, _id: number): Promise<GlobalResDTO> {
const tag = this.api_del.name;
try {
let res: GlobalResDTO = null;
await this.foodTypeService
.remove(_id)
.then((resp) => {
if (resp === 0) {
res = new GlobalResDTO(ResStatus.fail, 'ลบไม่สำเร็จ');
} else {
res = new GlobalResDTO(ResStatus.success, 'ลบสำเร็จ');
}
})
.catch((err) => {
res = new GlobalResDTO(ResStatus.fail, '');
});
return res;
} catch (error) {
console.error(`${tag} -> `, error);
this.logger.error(`${tag} -> `, error);
throw new HttpException(`${error}`, HttpStatus.INTERNAL_SERVER_ERROR);
}
}
async api_findAll(user: UserDB) {
const tag = this.api_findAll.name;
try {
const foodTypeFindAll = await this.foodTypeService.findAll(user);
// console.log(JSON.stringify(foodTypeFindAll, null, 2));
return new ResultFoodResDTO(ResStatus.success, '', foodTypeFindAll);
} catch (error) {
console.error(`${tag} -> `, error);
this.logger.error(`${tag} -> `, error);
throw new HttpException(`${error}`, HttpStatus.INTERNAL_SERVER_ERROR);
}
}
async uploadFoodTypeImage(image: Express.Multer.File[], id: number): Promise<UploadFoodTypeImageDtoRes> {
const tag = this.uploadFoodTypeImage.name;
try {
if (!image || image.length === 0) {
throw new HttpException(`cannot image user`, HttpStatus.INTERNAL_SERVER_ERROR);
}
const findFoodType = await this.foodTypeRepository.findByPk(id);
if (!findFoodType) throw new Error('no user found try again later');
this.logger.debug('findFoodType -> ', findFoodType)
findFoodType.image = image[0].filename ? image[0].filename : null;
await findFoodType.save();
// const afterUpdate = await this.usersService.findOne(id);
return new UploadFoodTypeImageDtoRes(ResStatus.success, '', findFoodType);
} catch (error) {
this.logger.error(`${tag} -> `, error);
throw new HttpException(`${error}`, HttpStatus.INTERNAL_SERVER_ERROR);
}
}
} |
<div class="dialog-header-bar">
<h1 mat-dialog-title>Applicant editing form</h1>
<mat-dialog-actions>
<mat-icon mat-dialog-close>close</mat-icon>
</mat-dialog-actions>
</div>
<form class="applicant-form" [formGroup]="validationGroup!" novalidate>
<!-- First Name -->
<p class="required-field">First name</p>
<mat-form-field appearance="fill" [ngClass]="validationGroup!.controls['firstName'].errors? 'invalid-input' : ''">
<input required formControlName="firstName" type="text" matInput placeholder="Enter first name" [(ngModel)]="updatedApplicant.firstName">
</mat-form-field>
<div class="validation-error" *ngIf="validationGroup!.controls['firstName'].errors?.required; else firstNamePatternError">
It`s required field
</div>
<ng-template #firstNamePatternError>
<div class="validation-error" *ngIf="validationGroup!.controls['firstName'].errors?.pattern">
A first name can only contain latin letters and start with an upper case letter
</div>
</ng-template>
<!-- Last Name -->
<p class="required-field">Last name</p>
<mat-form-field appearance="fill" [ngClass]="validationGroup!.controls['lastName'].errors? 'invalid-input' : ''">
<input required formControlName="lastName" type="text" matInput placeholder="Enter last name" [(ngModel)]="updatedApplicant.lastName">
</mat-form-field>
<div class="validation-error" *ngIf="validationGroup!.controls['lastName'].errors?.required; else lastNamePatternError">
It`s required field
</div>
<ng-template #lastNamePatternError>
<div class="validation-error" *ngIf="validationGroup!.controls['lastName'].errors?.pattern">
A last name can only contain latin letters and start with an upper case letter
</div>
</ng-template>
<!-- Email -->
<p class="required-field">Email</p>
<mat-form-field appearance="fill" [ngClass]="validationGroup!.controls['email'].errors? 'invalid-input' : ''">
<input required formControlName="email" type="email" matInput placeholder="Enter email" [(ngModel)]="updatedApplicant.email">
</mat-form-field>
<div class="validation-error" *ngIf="validationGroup!.controls['email'].errors?.existEmail; else emailPatternError">
It`s email in use
</div>
<div class="validation-error" *ngIf="validationGroup!.controls['email'].errors?.required; else emailPatternError">
It`s required field
</div>
<ng-template #emailPatternError>
<div class="validation-error" *ngIf="validationGroup!.controls['email'].errors?.pattern">
Please enter a valid email address.
</div>
</ng-template>
<!-- Experience -->
<p>Experience</p>
<mat-form-field appearance="fill" [ngClass]="validationGroup!.controls['experience'].errors? 'invalid-input' : ''">
<input required formControlName="experience" type="number" matInput step=".1" min="0" max="60" [(ngModel)]="updatedApplicant.experience">
</mat-form-field>
<div class="validation-error" *ngIf="validationGroup!.controls['experience'].errors">
Please enter a valid experience number in range from 0 to 60
</div>
<!-- Phone -->
<p class="required-field">Phone</p>
<mat-form-field appearance="fill" [ngClass]="validationGroup!.controls['phone'].errors? 'invalid-input' : ''">
<input required formControlName="phone" type="tel" matInput placeholder="Enter phone number" [(ngModel)]="updatedApplicant.phone">
</mat-form-field>
<div class="validation-error" *ngIf="validationGroup!.controls['phone'].errors?.required; else phonePatternError">
It`s required field
</div>
<ng-template #phonePatternError>
<div class="validation-error" *ngIf="validationGroup!.controls['phone'].errors?.pattern">
A phone number can only contain integers and has a length from 8 to 16. In has to match the pattern +<code><number> or <number>
</div>
</ng-template>
<!-- LinkedIn url -->
<p>LinkedIn url</p>
<mat-form-field appearance="fill" [ngClass]="validationGroup!.controls['linkedInUrl'].errors? 'invalid-input' : ''">
<input formControlName="linkedInUrl" type="text" matInput placeholder="Enter LinkedIn url" [(ngModel)]="updatedApplicant.linkedInUrl">
</mat-form-field>
<div class="validation-error" *ngIf="validationGroup!.controls['linkedInUrl'].errors?.pattern">
LinkedIn url must match the pattern https://www.linkedin.com/<username>
</div>
<mat-dialog-actions class="edit-action">
<button [disabled]="validationGroup!.invalid" (click)="updateApplicant()" mat-button>
Update
</button>
</mat-dialog-actions>
</form> |
class User < ActiveRecord::Base
authenticates_with_sorcery!
has_many :authentications, dependent: :destroy
validates :password, length: { minimum: 3 }
validates :password, confirmation: true
validates :password_confirmation, presence: true
validates :email, uniqueness: true
def apply_omniauth(omniauth)
if email.blank?
self.email = omniauth['info']['email']
self.password = SecureRandom.hex(8) # we cant let password blank.
self.password_confirmation = self.password
end
authentications.build(:provider => omniauth['provider'], :uid => omniauth['uid'])
end
end |
package Ecom.Controller;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import Ecom.Model.Cart;
import Ecom.Model.Product;
import Ecom.Service.CartService;
@RestController
@RequestMapping("/ecom/cart")
@RequiredArgsConstructor
public class CartController {
private final CartService cartService;
@PostMapping("/add-product")
public ResponseEntity<Cart> addProductToCart(@RequestParam Integer userId, @RequestParam Integer productId) {
Cart cart = cartService.addProductToCart(userId, productId);
return new ResponseEntity<>(cart, HttpStatus.CREATED);
}
@PutMapping("/increase-productQty/{cartId}/{productId}")
public ResponseEntity<Cart> increaseProductQuantity(
@PathVariable Integer cartId,
@PathVariable Integer productId
) {
Cart cart = cartService.increaseProductQuantity(cartId, productId);
return ResponseEntity.ok(cart);
}
@PutMapping("/decrease-productQty/{cartId}/{productId}")
public ResponseEntity<Cart> decreaseProductQuantity(
@PathVariable Integer cartId,
@PathVariable Integer productId
) {
Cart cart = cartService.decreaseProductQuantity(cartId, productId);
return ResponseEntity.ok(cart);
}
@DeleteMapping("/remove-product/{cartId}/{productId}")
public ResponseEntity<String> removeProductFromCart(@PathVariable Integer cartId, @PathVariable Integer productId) {
cartService.removeProductFromCart(cartId, productId);
String msg = "Prodcut is removed from cart";
return new ResponseEntity<String>(msg, HttpStatus.OK);
}
@DeleteMapping("/empty-Cart/{cartId}")
public ResponseEntity<String> removeAllProductFromCart(@PathVariable Integer cartId) {
cartService.removeAllProductFromCart(cartId);
String msg = "All product Remove From cart";
return new ResponseEntity<String>(msg, HttpStatus.OK);
}
@GetMapping("/products/{cartId}")
public ResponseEntity<Cart> getAllCartProducts(@PathVariable Integer cartId) {
Cart products = cartService.getAllCartProduct(cartId);
return ResponseEntity.ok(products);
}
} |
import { Arg, Authorized, Ctx, FieldResolver, Mutation, Query, Resolver, ResolverInterface, Root } from 'type-graphql'
import { Excuse } from '../entity/Excuse'
import { Context } from '../types'
import {
ExcuseCreateInput,
ExcuseCreateResponse,
createExcuse
} from './excuse/create'
import { ExcuseDeleteResponse, ExcuseDeleteInput, deleteExcuse } from './excuse/delete'
@Resolver(Excuse)
export class ExcuseResolver implements ResolverInterface<Excuse> {
@Authorized()
@FieldResolver()
async student (@Root() excuse: Excuse, @Ctx() { loaders }: Context) {
return loaders.student.load(excuse.studentId)
}
@Authorized()
@FieldResolver()
async submittedBy (@Root() excuse: Excuse, @Ctx() { loaders }: Context) {
if (excuse.submittedById == null) {
return undefined
}
return loaders.user.load(excuse.submittedById)
}
@Authorized()
@Query(() => Excuse, { nullable: true })
async excuse (
@Arg('id') id: string,
@Ctx() { loaders }: Context
) {
return loaders.excuse.load(id)
}
@Authorized()
@Mutation(() => ExcuseCreateResponse)
async createExcuse (
@Arg('data') data: ExcuseCreateInput,
@Ctx() context: Context
) : Promise<ExcuseCreateResponse> {
return createExcuse(data, context)
}
@Authorized()
@Mutation(() => ExcuseDeleteResponse)
async deleteExcuse (
@Arg('data') data: ExcuseDeleteInput
) : Promise<ExcuseDeleteResponse> {
return deleteExcuse(data)
}
} |
from stringprep import in_table_d2
from lxml import etree
import re
import pandas as pd
from transformers import AutoTokenizer
from datasets import Dataset
from sklearn.model_selection import train_test_split, StratifiedGroupKFold
ELEMENT_TAG = 'IIM'
with open('data/Tagged ST VW 1927.xml', 'r', encoding='ISO-8859-1') as f:
xml_content = f.read()
def _find_paragraphs_and_process(element, result_list):
# if the element is a paragraph, process it
if isinstance(element.tag, str) and re.match(r'P\d{3}', element.tag):
_process_element(element, element.tag, result_list)
# else, recursively search for paragraphs in the children of the element
else:
for child in element:
_find_paragraphs_and_process(child, result_list)
def _process_element(element, paragraph_id, result_list, inside=False):
# update paragraph_id if the element is a paragraph
match = re.match(r'P(\d{3})', element.tag)
if match:
paragraph_id = int(match.group(1))
# if the element is a SC, set inside to True
if element.tag == ELEMENT_TAG:
inside = True
# process the text of the current element
if element.text:
words = element.text.split()
for i, word in enumerate(words):
iob_tag = f'B-{ELEMENT_TAG}' if i == 0 and inside else (f'I-{ELEMENT_TAG}' if inside else 'O')
result_list.append({'paragraph_id': paragraph_id, 'word': word, 'iob_tag': iob_tag})
# process the children of the current element
for child in element:
_process_element(child, paragraph_id, result_list, inside)
# process the tail of the current element
if element.tail:
tail_words = element.tail.split()
for word in tail_words:
result_list.append({'paragraph_id': paragraph_id, 'word': word, 'iob_tag': 'O'})
def _tokenize_and_preserve_labels(batch):
tokenized_inputs = tokenizer(batch['paragraph'], padding=True, truncation=True)
label_ids_list = []
for i, word_labels in enumerate(batch['word_labels']):
word_ids = tokenized_inputs.word_ids(batch_index=i)
word_labels = word_labels.split(',')
label_ids = []
for word_idx in word_ids:
if word_idx is None:
label_ids.append(-100)
else:
label = word_labels[word_idx] if word_idx < len(word_labels) else 'O' # Default to 'O' or another default label
label_id = label2id[label]
label_ids.append(label_id)
label_ids_list.append(label_ids) # Append the list of label IDs for this sentence to the overall list
tokenized_inputs["labels"] = label_ids_list
return tokenized_inputs
print('Parsing XML...')
root = etree.fromstring(xml_content, parser=etree.XMLParser(recover=True))
result_list = []
print(f'Processing {ELEMENT_TAG} elements...')
_find_paragraphs_and_process(root, result_list)
df = pd.DataFrame(result_list)
# split the dataset into train and test sets using StratifiedGroupKFold
stratified_group_kfold = StratifiedGroupKFold(n_splits=5)
for train_index, test_index in stratified_group_kfold.split(df, df['iob_tag'], df['paragraph_id']):
train_df = df.iloc[train_index].copy()
test_df = df.iloc[test_index].copy()
# break the loop after the first iteration
break
train_df['paragraph'] = train_df[['paragraph_id', 'word', 'iob_tag']].groupby(['paragraph_id'])['word'].transform(lambda x: ' '.join(x))
train_df['word_labels'] = train_df[['paragraph_id', 'word', 'iob_tag']].groupby(['paragraph_id'])['iob_tag'].transform(lambda x: ','.join(x))
train_df = train_df[['paragraph', 'word_labels']].drop_duplicates().reset_index(drop=True)
test_df['paragraph'] = test_df[['paragraph_id', 'word', 'iob_tag']].groupby(['paragraph_id'])['word'].transform(lambda x: ' '.join(x))
test_df['word_labels'] = test_df[['paragraph_id', 'word', 'iob_tag']].groupby(['paragraph_id'])['iob_tag'].transform(lambda x: ','.join(x))
test_df = test_df[['paragraph', 'word_labels']].drop_duplicates().reset_index(drop=True)
train_df.to_csv(f'data/train_{ELEMENT_TAG}.csv', index=False)
test_df.to_csv(f'data/test_{ELEMENT_TAG}.csv', index=False)
label2id = {f'B-{ELEMENT_TAG}': 0, f'I-{ELEMENT_TAG}': 1, 'O': 2}
id2label = {0: f'B-{ELEMENT_TAG}', 1: f'I-{ELEMENT_TAG}', 2: 'O'}
train_dataset = Dataset.from_pandas(train_df)
test_dataset = Dataset.from_pandas(test_df)
model_name = "google-bert/bert-base-uncased"
tokenizer = AutoTokenizer.from_pretrained(model_name)
print('Tokenizing and encoding...')
tokenized_train_dataset = train_dataset.map(_tokenize_and_preserve_labels, batched=True, batch_size=None)
tokenized_test_dataset = test_dataset.map(_tokenize_and_preserve_labels, batched=True, batch_size=None)
tokenized_train_dataset.save_to_disk(f'data/tokenized_train_dataset_{ELEMENT_TAG}.hf')
tokenized_test_dataset.save_to_disk(f'data/tokenized_test_dataset_{ELEMENT_TAG}.hf')
print(f'Files data/tokenized_train_dataset_{ELEMENT_TAG}.hf and data/tokenized_test_dataset_{ELEMENT_TAG}.hf saved')
print('Preprocessing done')
# Print out a few examples from the tokenized datasets
# for i in range(100):
# print(f"Example {i+1}:")
# print(f"Tokens: {tokenizer.convert_ids_to_tokens(tokenized_train_dataset[i]['input_ids'])}")
# print(f"Labels: {tokenized_train_dataset[i]['labels']}")
# print()
# for i in range(100):
# print(f"Example {i+1}:")
# print(f"Tokens: {tokenizer.convert_ids_to_tokens(tokenized_test_dataset[i]['input_ids'])}")
# print(f"Labels: {tokenized_test_dataset[i]['labels']}")
# print() |
package cn.minih.web.core
import cn.minih.common.exception.MinihErrorCode
import cn.minih.common.exception.MinihException
import cn.minih.common.util.*
import cn.minih.core.annotation.Service
import cn.minih.core.beans.BeanFactory
import cn.minih.web.annotation.Delete
import cn.minih.web.annotation.Get
import cn.minih.web.annotation.Post
import cn.minih.web.annotation.Put
import io.vertx.core.Vertx
import io.vertx.core.http.HttpMethod
import io.vertx.core.http.HttpServerRequest
import io.vertx.ext.web.RoutingContext
import io.vertx.kotlin.coroutines.dispatcher
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch
import java.lang.reflect.Proxy
import kotlin.reflect.KAnnotatedElement
import kotlin.reflect.KClass
import kotlin.reflect.KFunction
import kotlin.reflect.KParameter
import kotlin.reflect.full.callSuspend
import kotlin.reflect.full.createType
import kotlin.reflect.full.memberFunctions
import kotlin.reflect.full.valueParameters
/**
* 注册服务
* @author hubin
* @since 2023-08-05 19:10:40
* @desc
*/
object RegisterService {
private lateinit var vertx: Vertx
private fun getNeedRegisterFunctions(iservice: KClass<*>): List<Triple<String, String, Annotation>?> {
val pathExists = mutableListOf<String>()
val iserviceMapping = findRequestMapping(iservice as KAnnotatedElement)
var parentPath = ""
iserviceMapping?.let { def -> parentPath = formatPath(def.url) }
return iservice.members.filterIsInstance<KFunction<*>>().map { fn ->
val requestType = findRequestMapping(fn as KAnnotatedElement)
requestType?.let { mapping ->
var realPath = parentPath
if (mapping.url.isNotBlank() && mapping.url != "/") {
realPath = realPath.plus(formatPath(mapping.url))
}
if (pathExists.contains("${getHttpMethod(mapping.type)?.name()}_$realPath")) {
throw MinihException("路径重复!")
}
pathExists.add("${getHttpMethod(mapping.type)?.name()}_$realPath")
Triple(fn.name, realPath, mapping.type)
}
}
}
private fun getHttpMethod(n: Annotation): HttpMethod? {
return when (n) {
is Post -> HttpMethod.POST
is Get -> HttpMethod.GET
is Put -> HttpMethod.PUT
is Delete -> HttpMethod.DELETE
else -> null
}
}
private fun getBeanCall(params: List<KParameter>): Any? {
if (params.isNotEmpty()) {
val p1 = params.first()
val clazz = p1.type.classifier as KClass<*>
val superClasses = getSuperClassRecursion(clazz)
if (superClasses.contains(cn.minih.web.service.Service::class)) {
return BeanFactory.instance.getBeanFromType(clazz.supertypes.first { it != Proxy::class.createType() })
}
}
return null
}
private fun registerEventBusConsumer(realPath: String, fn: KFunction<Any?>) {
val serverName = getProjectName(this.vertx.orCreateContext)
vertx.eventBus().consumer(serverName.plus(realPath.replace("/", "."))) { p ->
CoroutineScope(vertx.orCreateContext.dispatcher()).launch {
var rawResult: Any?
try {
val bean: Any? = getBeanCall(fn.parameters)
val parameters = bean?.let {
fn.parameters.subList(1, fn.parameters.size)
} ?: fn.parameters
val bodyParameters =
parameters.filter { it.type == RoutingContext::class.createType() || it.type == HttpServerRequest::class.createType() }
Assert.isNull(bodyParameters, "接口[${fn.name}]需要http请求参数,无法创建eventBusConsumer!")
val args = generateArgs(parameters, p.body()).map { it.second }.toTypedArray()
rawResult = when {
bean == null && parameters.isEmpty() -> if (fn.isSuspend) fn.callSuspend() else fn.call()
bean == null -> if (fn.isSuspend) fn.callSuspend(*args) else fn.call(*args)
parameters.isEmpty() -> if (fn.isSuspend) fn.callSuspend(bean) else fn.call(bean)
else -> if (fn.isSuspend) fn.callSuspend(bean, *args) else fn.call(bean, *args)
}
} catch (e: Throwable) {
rawResult =
MinihException(
"远程接口调用出现错误,${e.cause}",
errorCode = MinihErrorCode.ERR_CODE_REMOTE_CALL_ERROR
)
}
p.reply(rawResult?.toJsonObject())
}
}
}
fun registerService(
serviceList: List<KClass<*>>,
vertx: Vertx,
handler: (t: Triple<String, HttpMethod?, KFunction<*>>) -> Unit
) {
this.vertx = vertx
serviceList.forEach { iservice ->
val serviceDefs = BeanFactory.instance.findBeanDefinitionByType(iservice)
.filter { it.annotations.map { a1 -> a1.annotationClass }.contains(Service::class) }
Assert.isTrue(serviceDefs.size == 1) {
MinihException("${iservice.simpleName} 实例未找到或找到多个!")
}
val serviceDef = serviceDefs.first()
val bean = BeanFactory.instance.getBean(serviceDef.beanName)
val sClazz = bean::class.supertypes.first { s -> s != Proxy::class.createType() }.classifier as KClass<*>
getNeedRegisterFunctions(iservice).filterNotNull().forEach {
val fns = sClazz.memberFunctions.filter { fn -> fn.name == it.first }
var fn = fns.first()
if (fns.size > 1) {
val iFns = iservice.members.filterIsInstance<KFunction<*>>().filter { iFn -> iFn.name == it.first }
iFns.forEach { fnTmp ->
val requestType = findRequestMapping(fnTmp as KAnnotatedElement)
requestType?.let { mapping ->
if (getHttpMethod(mapping.type) == getHttpMethod(it.third)) {
fn = fns.first { fnTmp1 ->
var flag =
fnTmp1.name == fnTmp.name
&& fnTmp1.valueParameters.size == fnTmp.valueParameters.size
&& fnTmp1.returnType::class == fnTmp.returnType::class
if (flag) {
for ((index, kParameter) in fnTmp1.valueParameters.withIndex()) {
val i1 = fnTmp.valueParameters[index]
if (kParameter.type != i1.type) {
flag = false
}
}
}
flag
}
}
}
}
}
handler(Triple(it.second, getHttpMethod(it.third), fn))
registerEventBusConsumer(it.second, fn)
}
}
}
} |
// SPDX-License-Identifier: MIT
pragma solidity >=0.7.0 <0.9.0;
contract multiSigWallet{
// Events to be emitted while proforming the function in the multisigwallet
event Deposit(address indexed sender, uint amount);
event Submit(uint indexed txId);
event Approve(address indexed owner, uint indexed txId);
event Revoke(address indexed owner, uint indexed txId);
event Execute(uint indexed txId);
// transaction structure while shall be pointed to using mapping
struct Transaction {
address to;
uint value;
bytes data;
bool executed;
}
// Declaration of state variable
// array of owners
address[] public owners;
// mapping of owners from address to its authorisation status as an owner
mapping(address => bool) public isOwner;
//this variable is to store how many approvals are required to approve a transaction
uint public required;
// This is the array to store the transactions which are waiting for approval
Transaction[] public transactions;
// This is a mapping which points from transaction number to address to approval
mapping(uint => mapping(address => bool)) public approved;
// onlyOwner modifier
modifier onlyOwner() {
require(isOwner[msg.sender],"Not owner");
_;
}
// modifier to check if the transaction exists or not
modifier txExists(uint _txId) {
require(_txId < transactions.length, "tx does not exist");
_;
}
// modifier to check if the transaction is approved or not
modifier notApproved(uint _txId){
require(!approved[_txId][msg.sender], "tx already approved");
_;
}
// modifier to check if the transaction is executed or not
modifier notexecuted(uint _txId){
require(!transactions[_txId].executed, "tx already executed");
_;
}
// this constructor is to define the owners and store them in the array
constructor(address[] memory _owners, uint _required) {
// require function to check if the owner length is greater than zero so that there are validators for approving the transaction
require(_owners.length >0 , "owners required");
// require function to see that the number of approvals required is greater than 0 and less than the number of approvers existing
require(
_required >0 && _required <= owners.length,
"Invalid required numbers of owners");
// insert the owners into the owners array through the for loop
for (uint i ; i< _owners.length ; i++){
address owner = _owners[i];
// check if the adress is 0
require(owner != address(0), "invalid owner");
// check if the owner is the already present
require(!isOwner[owner], "owner is not unique");
// assigning the owner as an approver for transaction
isOwner[owner] = true;
// push the owner into the owners array
owners.push(owner);
}
required = _required;
}
// receive payments into the contract
receive() external payable {
emit Deposit(msg.sender, msg.value);
}
// submition of transaction to the contract waiting for approval
function submit(address _to, uint _value, bytes calldata _data)
external
onlyOwner{
transactions.push(Transaction({
to: _to,
value: _value,
data: _data,
executed: false
}));
emit Submit(transactions.length -1);
}
// approval function
function aapproved(uint _txId)
external
onlyOwner
txExists(_txId)
notApproved(_txId)
{
approved[_txId][msg.sender] = true;
emit Approve(msg.sender, _txId);
}
// this function will help in finding how many approvals have been already received
function _getApprovalCount(uint _txId) private view returns(uint count){
for(uint i; i< owners.length; i++){
if (approved[_txId][owners[i]]){
count += 1;
}
}
}
// This function will execute the transaction in the multisig wallet
function execute(uint _txId) external txExists(_txId) notexecuted(_txId){
require(_getApprovalCount(_txId) >= required, "approval< required");
Transaction storage transaction = transactions[_txId];
transaction.executed = true;
(bool success, ) = transaction.to.call{value: transaction.value}(
transaction.data
);
require(success, "tx failed");
emit Execute(_txId);
}
// this transaction shall revoke the approved transaction which was waiting for execution
function revoke(uint _txId)
external
onlyOwner
txExists(_txId)
notexecuted(_txId){
require(approved[_txId][msg.sender], "tx not approved");
approved[_txId][msg.sender] = false;
emit Revoke(msg.sender, _txId);
}
} |
<template>
<div class="app">
<TheHeader></TheHeader>
<TheBalance :transactions="transactions"></TheBalance>
<IncomeExpenses :transactions="transactions"></IncomeExpenses>
<TransactionList
@on-remove-transaction="removeTransaction"
:transactions="transactions"
></TransactionList>
<AddTransaction @on-submit="addTransaction"></AddTransaction>
</div>
</template>
<script setup>
import { ref, onMounted } from "vue";
import TheHeader from "./components/TheHeader.vue";
import TheBalance from "./components/TheBalance.vue";
import IncomeExpenses from "./components/IncomeExpenses.vue";
import TransactionList from "./components/TransactionList.vue";
import AddTransaction from "./components/AddTransaction.vue";
const addTransaction = (data) => {
transactions.value.push(data);
storeTransactions();
};
const removeTransaction = (id) => {
transactions.value = transactions.value.filter((t) => {
return t.id != id;
});
storeTransactions();
};
const storeTransactions = () => {
localStorage.setItem("transactions", JSON.stringify(transactions.value));
};
const transactions = ref([]);
onMounted(() => {
const storedTransactions = JSON.parse(localStorage.getItem("transactions"));
if (storedTransactions) {
transactions.value = storedTransactions;
}
});
</script>
<style lang="scss">
#app {
font-family: Avenir, Helvetica, Arial, sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
text-align: center;
color: #2c3e50;
margin-top: 60px;
}
</style> |
# Log Level
My solution to the log levels exercise.
[My profile Exercism](https://exercism.org/profiles/stefanilima)
[Exercism Link](https://exercism.org/tracks/csharp/exercises/log-levels)
## Instructions
In this exercise you'll be processing log-lines.
Each log line is a string formatted as follows:
` "[<LEVEL>]: <MESSAGE>". `
There are three different log levels:
- INFO
- WARNING
- ERROR
You have three tasks, each of which will take a log line and ask you to do something with it.
## Tasks
### Task 1: Get message from a log line
Implement the (static) `LogLine.Message()` method to return a log line's message:
```
LogLine.Message("[ERROR]: Invalid operation")
// => "Invalid operation"
````
Any leading or trailing white space should be removed:
```
LogLine.Message("[WARNING]: Disk almost full\r\n")
// => "Disk almost full"
```
### Task 2: Get log level from a log line
Implement the (static) `LogLine.LogLevel()` method to return a log line's log level, which should be returned in lowercase:
```
LogLine.LogLevel("[ERROR]: Invalid operation")
// => "error"
````
### Task 3: Reformat a log line
Implement the (static) `LogLine.Reformat()` method that reformats the log line, putting the message first and the log level after it in parentheses:
```
LogLine.Reformat("[INFO]: Operation completed")
// => "Operation completed (info)"
```` |
import os
import logging
import time
from datetime import datetime, timedelta
from tkinter import *
from tkinter import messagebox
import shutil
import glob # For file pattern matching
import subprocess # For file opening
import win32com.client # For detecting if file is open and then closing it
import pandas as pd
import openpyxl
# ---------------------------- BACKEND SETUP ------------------------------- #
# Logging (enhanced for more informative messages)
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# Active Directory
# Get the absolute path of the script and use it as active directory
active_directory = os.path.dirname(os.path.abspath(__file__))
os.chdir(active_directory)
# ---------------------------- FILE MANAGEMENT ----------------------------- #
tiktok_ads_path = "Inputs/Thrasio TikTok Shop-Campaign Report(2024-05-12 to 2024-05-18).xlsx"
all_orders_path = "Inputs/All order-2024-05-20-16_03.csv"
affiliate_orders_path = "Inputs/all_20240511210000_20240518205959.csv"
video_analytics_path = "Inputs/Video Performance List_20240520200916.xlsx"
insense_transactions_path = "Inputs/transactions_history (1).xlsx"
company_catalog_path = "Inputs/TikTok Shop US Product Lists.xlsx"
weekly_dashboard_path = "Outputs/TikTok Processor.xlsx"
def create_backup(file_path):
"""Creates a timestamped backup and deletes previous backups of the given file.
Args:
file_path (string): file path for the file that will be handled.
"""
try:
# Backup file creation
file_dir, file_name = os.path.split(file_path)
backup_name = os.path.join(file_dir, f"{os.path.splitext(file_name)[0]}_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.xlsx")
shutil.copy(file_path, backup_name)
# Delete previous backups of the same file
backup_pattern = os.path.join(file_dir, f"{os.path.splitext(file_name)[0]}_*.xlsx")
for old_backup in glob.glob(backup_pattern):
if old_backup != backup_name:
try:
os.remove(old_backup)
logger.info(f"Deleted previous backup: {old_backup}")
except Exception as e:
logger.warning(f"Failed to delete backup: {old_backup}. Error: {e}")
logger.info(f"Backup created: {backup_name}")
logger.info(f"Backup created at: {os.path.abspath(backup_name)}")
except Exception as e:
logger.error(f"Backup or deletion failed: {e}")
messagebox.showerror(title="Error", message=f"An error occurred during backup: {e}")
def open_file(file_path):
"""Open the file using the default application.
Args:
file_path (string): file path for the file that will be handled.
"""
try:
# Open the file with the default associated application
subprocess.Popen(["start", "", file_path], shell=True)
logger.info(f"open_file: Opened the file: {file_path}")
except Exception as e:
logger.error(f"open_file: An error occurred: {e}")
messagebox.showerror(title="Error", message=f"An error occurred while opening the file: {e}")
def close_open_file(file_path):
"""Check if the specified file is open and close it if necessary.
Args:
file_path (string): file path for the file that will be handled.
"""
try:
# Normalize and get the absolute file path for the operating system, with lower case drive letter
normalized_file_path = os.path.abspath(os.path.normpath(file_path)).lower()
# Create an instance of the Excel application
excel = win32com.client.Dispatch("Excel.Application")
# Debug: Log the normalized file path
logger.debug(f"Normalized file path to close: {normalized_file_path}")
# Extract the filename from the file path
target_filename = os.path.basename(normalized_file_path)
# Iterate through the open workbooks
for workbook in excel.Workbooks:
workbook_path = os.path.abspath(os.path.normpath(workbook.FullName)).lower()
# Debug: Log the workbook path
logger.debug(f"Open workbook path: {workbook_path}")
# Extract the filename from the workbook path
workbook_filename = os.path.basename(workbook_path)
# Compare filenames to handle OneDrive paths and case differences
if workbook_filename == target_filename:
workbook.Close(SaveChanges=False)
logger.info(f"close_open_file: Closed the open file: {normalized_file_path}")
break
else:
logger.info(f"close_open_file: File is not open: {normalized_file_path}")
# Ensure proper cleanup
del excel
time.sleep(1) # Add a short delay to ensure the Excel process releases the file
except Exception as e:
logger.error(f"close_open_file: An error occurred: {e}")
messagebox.showerror(title="Error", message=f"An error occurred while closing the file: {e}")
# ---------------------------- ERROR CHECKERS ------------------------------ #
def check_write_permission(directory):
"""Check if the user has writer permission to create file backup
Args:
directory (string): current directory where the code is being run from
Returns:
None: Uses return to stop the code from executing as no backup can be made
"""
if os.access(directory, os.W_OK):
logger.info(f"Write permission exists for: {directory}")
return True
else:
logger.error(f"No write permission for: {directory}")
return False
# ---------------------------- CONSTANTS ------------------------------- #
FONT_NAME = "Calibri"
FONT_SIZE = 11
WHITE = "#fcf7f9"
PLATFORM_FEE = 0.06 # Tiktok's Platform Fee
# ---------------------------- CATALOG --------------------------------- #
# Naming convention Parent ASIN_Parent TTSID
B0CTMBJTCW_1729385211989037378 = ["Angry Orange Pet Odor Eliminator with Citrus Scent for Strong Dog or Cat Pee Smells on Carpet, Furniture & Indoor Outdoor Floors - 24 Fluid Ounces - Puppy Supplies", 1729385211989037378, [1729462219447243074, 1729485339627000130, 1729462219447308610], ["AOR-PET-PrMx-24", "AOR-PET-RTU24-2PK", "AOR-PET-PrMx-128-AB"]]
B08D7M933F_1729386030694895938 = ["Nippies Skin Reusable Covers - Sticky Adhesive Silicone Pasties - Reusable Skin Adhesive Covers for Women with Travel Box", 1729386030694895938, [1729386030694961474,1729386030695289154,1729386030695027010,1729386030695354690,1729386030695092546,1729386031247233346,1729386030695158082,1729386031247298882,1729386030695223618,1729386031247364418], ["NN-Adhesive Silicone Nippies-Lt","NN-Adhesive Silicone Nippies-Lt-Sz2","NN-Adhesive Silicone Nippies-Md","NN-Adhesive Silicone Nippies-Md-Sz2","NN-Adhesive Silicone Nippies-Dk","NN-Adhesive Silicone Nippies-Dk-Sz2","BSX-SKADES1","BSX-SKADES2","BSX-SKADHZ1","BSX-SKADHZ2"]]
# Dictionary of TTPID that should be processed
ACTIVE_PRODUCT_LIST = {"AO_PB0CTMBJTCW": B0CTMBJTCW_1729385211989037378,
"Nippies_PB08D7M933F": B08D7M933F_1729386030694895938,
"Nippies_B001PU9A9Q": B08D7M933F_1729386030694895938
}
# ---------------------------- FINANCIAL METRICS --------------------------- #
sales = None
seller_discount = None
shipping_fee_income = None
shipping_fee_seller_discount = None
shipping_fee_net_income = None
gross_revenue = None
returns = None
net_revenue = None
cogs = None
shipping_cost = None
total_cogs = None
product_sample_cogs = None
product_sample_shipping_cost = None
affiliate_commission = None
insense_joinbrands_flat_fee = None
marketing_affiliate_total = None
tiktok_ads = None
total_marketing_expenses = None
platform_fee = None
net_income_cp = None
net_margin = None
asp_before_disc = None
asp_after_disc = None
total_units = None
total_units_wow_percent = None
ads_impressions = None
video_views = None
product_impressions = None
product_clicks = None
media_ctr = None
media_cvr = None
num_of_samples_sent_insense = None
num_of_samples_sent_tts = None
num_of_content_posted = None
viral_video_greater_than_1mm_vv = None
viral_video_link = None
viral_video_performance = None
best_video_views = None
best_video_link = None
best_video_performance = None
lw_vv_avg = None
event = None
comment = None
# ---------------------------- WEEKLY TASKS -------------------------------- #
def weekly_tasks(ttpid):
"""Runs the weekly tasks as defined in the instructions.
These are activated via a button in the UI.\n
Args:
ttpid (string): the selected ttpid by user from the dropdown list
"""
try:
close_open_file(weekly_dashboard_path) # Close dashboard if it's open
logger.info(f"Closed the dashboard file: {weekly_dashboard_path}")
create_backup(weekly_dashboard_path) # Create backup of the current file
logger.info(f"Created backup for file: {weekly_dashboard_path}")
current_df = create_dataframe_from_sheet(ttpid) # Create df for the sheet we will be working on
logger.info(f"Created dataframe for sheet: {ttpid}. Data shape: {current_df.shape}")
process_all_orders() # Extract all the financial data from the All Orders report.
logger.info(f"Financial data extracted from all orders.")
# open_file(weekly_dashboard_path) # Open file after handling FIXME Remove comment
logger.info(f"Opened the dashboard file: {weekly_dashboard_path}")
except Exception as e:
logger.error(f"Error during weekly tasks for TTPID {ttpid}: {e}")
messagebox.showerror(title="Error", message=f"An error occurred during weekly tasks for TTPID {ttpid}: {e}")
def create_dataframe_from_sheet(sheet_name):
"""Create a dataframe from the specified sheet starting from cell A3.
Args:
sheet_name (string): The name of the sheet to process.
Returns:
pd.DataFrame: The resulting dataframe with proper headers.
"""
try:
logger.info(f"Reading sheet: {sheet_name}")
# Load the sheet into a dataframe, skipping the first two rows
df = pd.read_excel(weekly_dashboard_path, sheet_name=sheet_name, skiprows=1)
logger.info(f"Sheet {sheet_name} read successfully. Data shape: {df.shape}")
# Set the first column as headers
df.columns = df.iloc[0]
df = df[1:] # Remove the header row from the data
logger.info(f"Headers set from the first column. Data shape after setting headers: {df.shape}")
return df.reset_index(drop=True)
except Exception as e:
logger.error(f"Error processing sheet {sheet_name}: {e}")
raise e
def process_all_orders():
"""Process the all_orders CSV file to extract financial metrics."""
try:
# Read the CSV file
all_orders_df = pd.read_csv(all_orders_path)
logger.info(f"Read all_orders file. Data shape: {all_orders_df.shape}")
# Delete rows with "Canceled" in the "Order Status" column
all_orders_df = all_orders_df[all_orders_df["Order Status"] != "Canceled"]
logger.info(f"Filtered out 'Canceled' orders. Data shape: {all_orders_df.shape}")
# Calculate financial metrics
global sales, seller_discount, shipping_fee_income, shipping_fee_seller_discount
global shipping_fee_net_income, gross_revenue, returns, net_revenue
global product_sample_cogs, product_sample_shipping_cost, num_of_samples_sent_tts
sales = all_orders_df["SKU Subtotal Before Discount"].sum()
seller_discount = all_orders_df["SKU Seller Discount"].sum()
shipping_fee_income = all_orders_df["Original Shipping Fee"].sum()
shipping_fee_seller_discount = all_orders_df["Shipping Fee Seller Discount"].sum()
shipping_fee_net_income = shipping_fee_income - shipping_fee_seller_discount
gross_revenue = sales + shipping_fee_net_income - seller_discount
returns = all_orders_df["Order Refund Amount"].sum()
net_revenue = gross_revenue - returns
product_sample_cogs = all_orders_df[all_orders_df["SKU Unit Original Price"] == 0]["Quantity"].sum()
product_sample_shipping_cost = all_orders_df[all_orders_df["SKU Unit Original Price"] == 0]["Quantity"].sum()
num_of_samples_sent_tts = all_orders_df[all_orders_df["SKU Unit Original Price"] == 0]["Quantity"].sum()
logger.info("Financial metrics from All Orders calculated successfully.")
print(sales, seller_discount, shipping_fee_income, num_of_samples_sent_tts, product_sample_cogs) #FIXME remove
except Exception as e:
logger.error(f"Error processing all_orders file: {e}")
messagebox.showerror(title="Error", message=f"An error occurred while processing the all_orders file: {e}")
# ---------------------------- UI SETUP ------------------------------- #
# Main window UI setup
main_window = Tk()
main_window.title("TikTok Weekly Processor")
main_window.config(padx=50, pady=50, bg=WHITE)
# Variable to store the selected TTPID
selected_ttpid = StringVar()
selected_ttpid.set(list(ACTIVE_PRODUCT_LIST.keys())[0]) # Set the default value
# OptionMenu of all available TTPIDs that can be processed
ttpid_optionmenu = OptionMenu(main_window, selected_ttpid, *ACTIVE_PRODUCT_LIST.keys())
ttpid_optionmenu.grid(column=0, row=0)
# Action button that will run the weekly tasks
weekly_task_button = Button(text="Perform Weekly Tasks", command=lambda: weekly_tasks(selected_ttpid.get()))
weekly_task_button.grid(column=1, row=0)
main_window.mainloop() |
// Copyright 2022 ByteDance and/or its affiliates.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "monolith/native_training/runtime/ops/file_utils.h"
#include <string>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "tensorflow/core/lib/core/status_test_util.h"
namespace tensorflow {
namespace monolith_tf {
namespace {
using ::testing::ElementsAre;
TEST(ValidateShardedFilesTest, Basic) {
FileSpec spec;
TF_EXPECT_OK(ValidateShardedFiles("a/b", {"a/b-00000-of-00001"}));
TF_EXPECT_OK(ValidateShardedFiles(
"a/b", {"a/b-00000-of-00002", "a/b-00001-of-00002"}, &spec));
EXPECT_THAT(spec.nshards(), 2);
TF_EXPECT_OK(ValidateShardedFiles(
"a", {"a-00000-of-00001", "a-00000-of-00001-tmp-1234"}));
TF_EXPECT_OK(ValidateShardedFiles(
"a", {"a-00000-of-00001", "a-00000-of-00002-tmp-1234"}));
EXPECT_FALSE(ValidateShardedFiles("a/b", {"a/b-00000-of-00002"}).ok());
EXPECT_FALSE(
ValidateShardedFiles("a/b", {"a/b-00000-of-00001", "a/b-00001-of-00001"})
.ok());
EXPECT_FALSE(
ValidateShardedFiles("a/b", {"a/b-00000-of-00001", "a/b-00000-of-00002",
"a/b-00001-of-00002"})
.ok());
EXPECT_FALSE(ValidateShardedFiles("a/b", {"random-string"}).ok());
EXPECT_FALSE(ValidateShardedFiles("a/b", {"a/b-random-string"}).ok());
}
TEST(ValidateShardedFilesTest, FileSpecTest) {
auto spec = FileSpec::ShardedFileSpec("a/b", 2);
EXPECT_THAT(spec.GetFilenames(),
ElementsAre("a/b-00000-of-00002", "a/b-00001-of-00002"));
}
TEST(ValidateShardedFilesTest, LargeFileSet) {
std::vector<std::string> filenames;
for (int i = 0; i < 100; ++i) {
filenames.push_back(GetShardedFileName("/a", i, 100));
}
TF_EXPECT_OK(ValidateShardedFiles("/a", filenames));
}
} // namespace
} // namespace monolith_tf
} // namespace tensorflow |
from collections import Counter
from enum import Enum
import pytest
from cardpicker.integrations.integrations import get_configured_game_integration
from cardpicker.integrations.mtg import MTG
class TestGetIntegration:
@pytest.mark.parametrize("environment_variable, integration_class", [("MTG", MTG)])
def test_get_integration(self, db, environment_variable, integration_class, settings):
settings.GAME = environment_variable
assert get_configured_game_integration() == integration_class
class TestMTGIntegration:
# region constants
class Decks(str, Enum):
# all of these decks have 4x brainstorm, 3x past in flames, and 1x delver of secrets // insectile aberration
AETHER_HUB = "https://aetherhub.com/Deck/test-796905"
ARCHIDEKT = "https://archidekt.com/decks/3380653"
CUBE_COBRA = "https://cubecobra.com/cube/overview/2fj4"
DECK_STATS = "https://deckstats.net/decks/216625/2754468-test"
MAGIC_VILLE = "https://magic-ville.com/fr/decks/showdeck?ref=948045"
MANA_STACK = "https://manastack.com/deck/test-426"
MOXFIELD = "https://www.moxfield.com/decks/D42-or9pCk-uMi4XzRDziQ"
MTG_GOLDFISH = "https://www.mtggoldfish.com/deck/5149750"
SCRYFALL = "https://scryfall.com/@mpcautofill/decks/71bb2d40-c922-4a01-a63e-7ba2dde29a5c"
TAPPED_OUT = "https://tappedout.net/mtg-decks/09-10-22-DoY-test"
TCG_PLAYER = "https://decks.tcgplayer.com/magic/standard/mpc-autofill/test/1398367"
def __str__(self):
return self.value
# endregion
# region tests
def test_get_double_faced_card_pairs(self):
assert len(MTG.get_double_faced_card_pairs()) > 0
def test_get_meld_pairs(self):
assert len(MTG.get_meld_pairs()) > 0
@pytest.mark.parametrize("url", list(Decks))
def test_valid_url(self, client, django_settings, snapshot, url):
decklist = MTG.query_import_site(url)
assert Counter(decklist.splitlines()) == snapshot
# endregion |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>portfolio</title>
<link
rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/modern-normalize@1.1.0/modern-normalize.css"
/>
<link rel="preconnect" href="https://fonts.googleapis.com" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
<link rel="stylesheet" href="./css/styles.css" />
<link
href="https://fonts.googleapis.com/css2?family=Raleway:wght@700&family=Roboto:wght@400;500;700;900&display=swap"
rel="stylesheet"
/>
</head>
<body>
<header class="header-page">
<div class="container header-page-container">
<nav class="nav">
<a class="logo header-logo link" href="/"><span class="web">Web</span>Studio</a>
<ul class="list nav-list nav">
<li class="site-nav-item"><a class="site-nav link" href="./index.html">Студия</a></li>
<li class="site-nav-item">
<a class="site-nav link" href="./portfolio.html">Портфолио</a>
</li>
<li class="site-nav-item"><a class="site-nav link" href="">Контакты</a></li>
</ul>
</nav>
<ul class="list nav nav-item">
<li class="contact-nav-item">
<a class="contact-nav link" href="info@devstudio.com"
><span lang="en">info@devstudio.com</span></a
>
</li>
<li class="contact-nav-item">
<a class="contact-nav link" href="tel:+380961111111">+38 096 111 11 11</a>
</li>
</ul>
</div>
</header>
<main>
<section class="portfolio">
<div class="container">
<ul class="list menu">
<li><button type="button" class="button portfolio-button">Все</button></li>
<li><button type="button" class="button portfolio-button">Веб-сайты</button></li>
<li><button type="button" class="button portfolio-button">Приложения</button></li>
<li><button type="button" class="button portfolio-button">Дизайн</button></li>
<li><button type="button" class="button portfolio-button">Маркетинг</button></li>
</ul>
<ul class="list portfolio-list">
<li class="portfolio-list-card">
<img src="./img/img4.jpg" alt="Веб-сайт" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title">Технокряк</h3>
<p class="portfolio-text">Веб-сайт</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img5.jpg" alt="Дизайн" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title">
Постер <span lang="en">New Orlean vs Golden Star</span>
</h3>
<p class="portfolio-text">Дизайн</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img6.jpg" alt="Приложение" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title">Ресторан <span lang="en">Seafood</span></h3>
<p class="portfolio-text">Приложение</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img7.jpg" alt=">Маркетинг" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title">Проект <span lang="en">Prime</span></h3>
<p class="portfolio-text">Маркетинг</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img8.jpg" alt="Приложение" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title">Проект <span lang="en">Boxes</span></h3>
<p class="portfolio-text">Приложение</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img9.jpg" alt="Веб-сайт" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title"><span lang="en">Inspiration has no Border</span>s</h3>
<p class="portfolio-text">Веб-сайт</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img10.jpg" alt="Дизайн" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title">Издание <span lang="en">Limited Edition</span></h3>
<p class="portfolio-text">Дизайн</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img11.jpg" alt=">Маркетинг" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title">Проект <span lang="en">LAB</span></h3>
<p class="portfolio-text">Маркетинг</p>
</div>
</li>
<li class="portfolio-list-card">
<img src="./img/img12.jpg" alt="Приложение" width="370" height="294" />
<div class="portfolio-card">
<h3 class="portfolio-title"><span lang="en">Growing Business</span></h3>
<p class="portfolio-text">Приложение</p>
</div>
</li>
</ul>
</div>
</section>
</main>
<footer class="footer">
<div class="container footer-section">
<a class="logo link footer-logo" href="/"><span class="footer-web">Web</span>Studio</a>
<address>
<ul class="list">
<li class="footer-list">
<a
class="footer-link link"
href="https://www.google.com/maps/place/бул.+Леси+Украинки,+26,+Киев,+02000/@50.4265824,30.5372888,18z/data=!3m1!4b1!4m5!3m4!1s0x40d4cf0e033ecbe9:0x57a4dffefec77da0!8m2!3d50.4265807!4d30.5383858"
>г. Киев, пр-т Леси Украинки, 26</a
>
</li>
<li class="footer-list">
<a class="footer-contact link" href="info@devstudio.com">info@devstudio.com</a>
</li>
<li class="footer-list">
<a class="footer-contact link" href="tel:+380961111111">+38 096 111 11 11</a>
</li>
</ul>
</address>
</div>
</footer>
</body>
</html> |
.. _module_becke_numerical_grid:
.. program:: becke_numerical_grid
.. default-role:: option
becke_numerical_grid
This module contains all quantities needed to build Becke's grid used in general for DFT integration. Note that it can be used for whatever integration in R^3 as long as the functions to be integrated are mostly concentrated near the atomic regions.
This grid is built as the reunion of a spherical grid around each atom. Each spherical grid contains
a certain number of radial and angular points. No pruning is done on the angular part of the grid.
The main keyword for that module is:
* :option:`becke_numerical_grid grid_type_sgn` which controls the precision of the grid according the standard **SG-n** grids. This keyword controls the two providers `n_points_integration_angular` `n_points_radial_grid`.
The main providers of that module are:
* `n_points_integration_angular` which is the number of angular integration points. WARNING: it obeys to specific rules so it cannot be any integer number. Some of the possible values are [ 50 | 74 | 170 | 194 | 266 | 302 | 590 | 1202 | 2030 | 5810 ] for instance. See :file:`angular.f` for more details.
* `n_points_radial_grid` which is the number of radial angular points. This can be any strictly positive integer. Nevertheless, a minimum of 50 is in general necessary.
* `final_grid_points` which are the (x,y,z) coordinates of the grid points.
* `final_weight_at_r_vector` which are the weights at each grid point
For a simple example of how to use the grid, see :file:`example.irp.f`.
The spherical integration uses Lebedev-Laikov grids, which was used from the code distributed through CCL (http://www.ccl.net/).
See next section for explanations and citation policies.
.. code-block:: text
This subroutine is part of a set of subroutines that generate
Lebedev grids [1-6] for integration on a sphere. The original
C-code [1] was kindly provided by Dr. Dmitri N. Laikov and
translated into fortran by Dr. Christoph van Wuellen.
This subroutine was translated using a C to fortran77 conversion
tool written by Dr. Christoph van Wuellen.
Users of this code are asked to include reference [1] in their
publications, and in the user- and programmers-manuals
describing their codes.
This code was distributed through CCL (http://www.ccl.net/).
[1] V.I. Lebedev, and D.N. Laikov
"A quadrature formula for the sphere of the 131st
algebraic order of accuracy"
Doklady Mathematics, Vol. 59, No. 3, 1999, pp. 477-481.
[2] V.I. Lebedev
"A quadrature formula for the sphere of 59th algebraic
order of accuracy"
Russian Acad. Sci. Dokl. Math., Vol. 50, 1995, pp. 283-286.
[3] V.I. Lebedev, and A.L. Skorokhodov
"Quadrature formulas of orders 41, 47, and 53 for the sphere"
Russian Acad. Sci. Dokl. Math., Vol. 45, 1992, pp. 587-592.
[4] V.I. Lebedev
"Spherical quadrature formulas exact to orders 25-29"
Siberian Mathematical Journal, Vol. 18, 1977, pp. 99-107.
[5] V.I. Lebedev
"Quadratures on a sphere"
Computational Mathematics and Mathematical Physics, Vol. 16,
1976, pp. 10-24.
[6] V.I. Lebedev
"Values of the nodes and weights of ninth to seventeenth
order Gauss-Markov quadrature formulae invariant under the
octahedron group with inversion"
Computational Mathematics and Mathematical Physics, Vol. 15,
1975, pp. 44-51.
EZFIO parameters
----------------
.. option:: grid_type_sgn
Type of grid used for the Becke's numerical grid. Can be, by increasing accuracy: [ 0 | 1 | 2 | 3 ]
Default: 2
.. option:: n_points_final_grid
Total number of grid points
.. option:: thresh_grid
threshold on the weight of a given grid point
Default: 1.e-20
.. option:: my_grid_becke
if True, the number of angular and radial grid points are read from EZFIO
Default: False
.. option:: my_n_pt_r_grid
Number of radial grid points given from input
Default: 300
.. option:: my_n_pt_a_grid
Number of angular grid points given from input. Warning, this number cannot be any integer. See file list_angular_grid
Default: 1202
.. option:: n_points_extra_final_grid
Total number of extra_grid points
.. option:: extra_grid_type_sgn
Type of extra_grid used for the Becke's numerical extra_grid. Can be, by increasing accuracy: [ 0 | 1 | 2 | 3 ]
Default: 0
.. option:: thresh_extra_grid
threshold on the weight of a given extra_grid point
Default: 1.e-20
.. option:: my_extra_grid_becke
if True, the number of angular and radial extra_grid points are read from EZFIO
Default: False
.. option:: my_n_pt_r_extra_grid
Number of radial extra_grid points given from input
Default: 300
.. option:: my_n_pt_a_extra_grid
Number of angular extra_grid points given from input. Warning, this number cannot be any integer. See file list_angular_extra_grid
Default: 1202
.. option:: rad_grid_type
method used to sample the radial space. Possible choices are [KNOWLES | GILL]
Default: KNOWLES
.. option:: extra_rad_grid_type
method used to sample the radial space. Possible choices are [KNOWLES | GILL]
Default: KNOWLES
Providers
---------
.. c:var:: alpha_knowles
File : :file:`becke_numerical_grid/integration_radial.irp.f`
.. code:: fortran
double precision, allocatable :: alpha_knowles (100)
Recommended values for the alpha parameters according to the paper of Knowles (JCP, 104, 1996)
as a function of the nuclear charge
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`grid_points_per_atom`
.. c:var:: angular_quadrature_points
File : :file:`becke_numerical_grid/angular_grid_pts.irp.f`
.. code:: fortran
double precision, allocatable :: angular_quadrature_points (n_points_integration_angular,3)
double precision, allocatable :: weights_angular_points (n_points_integration_angular)
weights and grid points for the integration on the angular variables on
the unit sphere centered on (0,0,0)
According to the LEBEDEV scheme
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
.. c:var:: angular_quadrature_points_extra
File : :file:`becke_numerical_grid/angular_extra_grid.irp.f`
.. code:: fortran
double precision, allocatable :: angular_quadrature_points_extra (n_points_extra_integration_angular,3)
double precision, allocatable :: weights_angular_points_extra (n_points_extra_integration_angular)
weights and grid points_extra for the integration on the angular variables on
the unit sphere centered on (0,0,0)
According to the LEBEDEV scheme
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_extra_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
.. c:var:: dr_radial_extra_integral
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
double precision, allocatable :: grid_points_extra_radial (n_points_extra_radial_grid)
double precision :: dr_radial_extra_integral
points_extra in [0,1] to map the radial integral [0,\infty]
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_extra_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
.. c:var:: dr_radial_integral
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
double precision, allocatable :: grid_points_radial (n_points_radial_grid)
double precision :: dr_radial_integral
points in [0,1] to map the radial integral [0,\infty]
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
.. c:var:: final_grid_points
File : :file:`becke_numerical_grid/grid_becke_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points (3,n_points_final_grid)
double precision, allocatable :: final_weight_at_r_vector (n_points_final_grid)
integer, allocatable :: index_final_points (3,n_points_final_grid)
integer, allocatable :: index_final_points_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
final_grid_points(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_final_grid`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`ao_abs_int_grid`
* :c:data:`ao_overlap_abs_grid`
* :c:data:`ao_prod_abs_r`
* :c:data:`ao_prod_center`
* :c:data:`ao_prod_dist_grid`
* :c:data:`aos_grad_in_r_array`
* :c:data:`aos_in_r_array`
* :c:data:`aos_lapl_in_r_array`
* :c:data:`aos_sr_vc_alpha_lda_w`
* :c:data:`aos_sr_vxc_alpha_lda_w`
* :c:data:`aos_vc_alpha_lda_w`
* :c:data:`aos_vc_alpha_pbe_w`
* :c:data:`aos_vc_alpha_sr_pbe_w`
* :c:data:`aos_vxc_alpha_lda_w`
* :c:data:`aos_vxc_alpha_pbe_w`
* :c:data:`aos_vxc_alpha_sr_pbe_w`
* :c:data:`elec_beta_num_grid_becke`
* :c:data:`energy_c_lda`
* :c:data:`energy_c_sr_lda`
* :c:data:`energy_x_lda`
* :c:data:`energy_x_pbe`
* :c:data:`energy_x_sr_lda`
* :c:data:`energy_x_sr_pbe`
* :c:data:`f_psi_cas_ab`
* :c:data:`f_psi_hf_ab`
* :c:data:`final_grid_points_transp`
* :c:data:`mo_grad_ints`
* :c:data:`mos_in_r_array`
* :c:data:`mos_in_r_array_omp`
* :c:data:`mu_average_prov`
* :c:data:`mu_grad_rho`
* :c:data:`mu_of_r_dft_average`
* :c:data:`mu_rsc_of_r`
* :c:data:`one_e_dm_and_grad_alpha_in_r`
.. c:var:: final_grid_points_extra
File : :file:`becke_numerical_grid/extra_grid_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_extra (3,n_points_extra_final_grid)
double precision, allocatable :: final_weight_at_r_vector_extra (n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra (3,n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra_reverse (n_points_extra_integration_angular,n_points_extra_radial_grid,nucl_num)
final_grid_points_extra(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector_extra(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points_extra(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_extra_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`n_points_extra_final_grid`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_extra_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`aos_in_r_array_extra`
.. c:var:: final_grid_points_per_atom
File : :file:`becke_numerical_grid/grid_becke_per_atom.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_per_atom (3,n_pts_max_per_atom,nucl_num)
double precision, allocatable :: final_weight_at_r_vector_per_atom (n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom (3,n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_radial_grid`
* :c:data:`n_pts_per_atom`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
.. c:var:: final_grid_points_transp
File : :file:`becke_numerical_grid/grid_becke_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_transp (n_points_final_grid,3)
Transposed final_grid_points
Needs:
.. hlist::
:columns: 3
* :c:data:`final_grid_points`
* :c:data:`n_points_final_grid`
.. c:var:: final_weight_at_r
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
double precision, allocatable :: final_weight_at_r (n_points_integration_angular,n_points_radial_grid,nucl_num)
Total weight on each grid point which takes into account all Lebedev, Voronoi and radial weights.
Needs:
.. hlist::
:columns: 3
* :c:data:`alpha_knowles`
* :c:data:`angular_quadrature_points`
* :c:data:`grid_atomic_number`
* :c:data:`grid_points_radial`
* :c:data:`m_knowles`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`r_gill`
* :c:data:`rad_grid_type`
* :c:data:`weight_at_r`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_grid_points`
* :c:data:`final_grid_points_per_atom`
* :c:data:`n_points_final_grid`
* :c:data:`n_pts_per_atom`
.. c:var:: final_weight_at_r_extra
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
double precision, allocatable :: final_weight_at_r_extra (n_points_extra_integration_angular,n_points_extra_radial_grid,nucl_num)
Total weight on each grid point which takes into account all Lebedev, Voronoi and radial weights.
Needs:
.. hlist::
:columns: 3
* :c:data:`alpha_knowles`
* :c:data:`angular_quadrature_points_extra`
* :c:data:`extra_rad_grid_type`
* :c:data:`grid_atomic_number`
* :c:data:`grid_points_extra_radial`
* :c:data:`m_knowles`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_num`
* :c:data:`r_gill`
* :c:data:`weight_at_r_extra`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_grid_points_extra`
* :c:data:`n_points_extra_final_grid`
.. c:var:: final_weight_at_r_vector
File : :file:`becke_numerical_grid/grid_becke_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points (3,n_points_final_grid)
double precision, allocatable :: final_weight_at_r_vector (n_points_final_grid)
integer, allocatable :: index_final_points (3,n_points_final_grid)
integer, allocatable :: index_final_points_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
final_grid_points(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_final_grid`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`ao_abs_int_grid`
* :c:data:`ao_overlap_abs_grid`
* :c:data:`ao_prod_abs_r`
* :c:data:`ao_prod_center`
* :c:data:`ao_prod_dist_grid`
* :c:data:`aos_grad_in_r_array`
* :c:data:`aos_in_r_array`
* :c:data:`aos_lapl_in_r_array`
* :c:data:`aos_sr_vc_alpha_lda_w`
* :c:data:`aos_sr_vxc_alpha_lda_w`
* :c:data:`aos_vc_alpha_lda_w`
* :c:data:`aos_vc_alpha_pbe_w`
* :c:data:`aos_vc_alpha_sr_pbe_w`
* :c:data:`aos_vxc_alpha_lda_w`
* :c:data:`aos_vxc_alpha_pbe_w`
* :c:data:`aos_vxc_alpha_sr_pbe_w`
* :c:data:`elec_beta_num_grid_becke`
* :c:data:`energy_c_lda`
* :c:data:`energy_c_sr_lda`
* :c:data:`energy_x_lda`
* :c:data:`energy_x_pbe`
* :c:data:`energy_x_sr_lda`
* :c:data:`energy_x_sr_pbe`
* :c:data:`f_psi_cas_ab`
* :c:data:`f_psi_hf_ab`
* :c:data:`final_grid_points_transp`
* :c:data:`mo_grad_ints`
* :c:data:`mos_in_r_array`
* :c:data:`mos_in_r_array_omp`
* :c:data:`mu_average_prov`
* :c:data:`mu_grad_rho`
* :c:data:`mu_of_r_dft_average`
* :c:data:`mu_rsc_of_r`
* :c:data:`one_e_dm_and_grad_alpha_in_r`
.. c:var:: final_weight_at_r_vector_extra
File : :file:`becke_numerical_grid/extra_grid_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_extra (3,n_points_extra_final_grid)
double precision, allocatable :: final_weight_at_r_vector_extra (n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra (3,n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra_reverse (n_points_extra_integration_angular,n_points_extra_radial_grid,nucl_num)
final_grid_points_extra(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector_extra(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points_extra(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_extra_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`n_points_extra_final_grid`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_extra_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`aos_in_r_array_extra`
.. c:var:: final_weight_at_r_vector_per_atom
File : :file:`becke_numerical_grid/grid_becke_per_atom.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_per_atom (3,n_pts_max_per_atom,nucl_num)
double precision, allocatable :: final_weight_at_r_vector_per_atom (n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom (3,n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_radial_grid`
* :c:data:`n_pts_per_atom`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
.. c:var:: grid_atomic_number
File : :file:`becke_numerical_grid/atomic_number.irp.f`
.. code:: fortran
integer, allocatable :: grid_atomic_number (nucl_num)
Atomic number used to adjust the grid
Needs:
.. hlist::
:columns: 3
* :c:data:`nucl_charge`
* :c:data:`nucl_num`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`grid_points_per_atom`
.. c:var:: grid_points_extra_per_atom
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
double precision, allocatable :: grid_points_extra_per_atom (3,n_points_extra_integration_angular,n_points_extra_radial_grid,nucl_num)
x,y,z coordinates of grid points_extra used for integration in 3d space
Needs:
.. hlist::
:columns: 3
* :c:data:`alpha_knowles`
* :c:data:`angular_quadrature_points_extra`
* :c:data:`extra_rad_grid_type`
* :c:data:`grid_atomic_number`
* :c:data:`grid_points_extra_radial`
* :c:data:`m_knowles`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_coord`
* :c:data:`nucl_num`
* :c:data:`r_gill`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_grid_points_extra`
* :c:data:`weight_at_r_extra`
.. c:var:: grid_points_extra_radial
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
double precision, allocatable :: grid_points_extra_radial (n_points_extra_radial_grid)
double precision :: dr_radial_extra_integral
points_extra in [0,1] to map the radial integral [0,\infty]
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_extra_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
.. c:var:: grid_points_per_atom
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
double precision, allocatable :: grid_points_per_atom (3,n_points_integration_angular,n_points_radial_grid,nucl_num)
x,y,z coordinates of grid points used for integration in 3d space
Needs:
.. hlist::
:columns: 3
* :c:data:`alpha_knowles`
* :c:data:`angular_quadrature_points`
* :c:data:`grid_atomic_number`
* :c:data:`grid_points_radial`
* :c:data:`m_knowles`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_coord`
* :c:data:`nucl_num`
* :c:data:`r_gill`
* :c:data:`rad_grid_type`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_grid_points`
* :c:data:`final_grid_points_per_atom`
* :c:data:`weight_at_r`
.. c:var:: grid_points_radial
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
double precision, allocatable :: grid_points_radial (n_points_radial_grid)
double precision :: dr_radial_integral
points in [0,1] to map the radial integral [0,\infty]
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
.. c:var:: index_final_points
File : :file:`becke_numerical_grid/grid_becke_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points (3,n_points_final_grid)
double precision, allocatable :: final_weight_at_r_vector (n_points_final_grid)
integer, allocatable :: index_final_points (3,n_points_final_grid)
integer, allocatable :: index_final_points_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
final_grid_points(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_final_grid`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`ao_abs_int_grid`
* :c:data:`ao_overlap_abs_grid`
* :c:data:`ao_prod_abs_r`
* :c:data:`ao_prod_center`
* :c:data:`ao_prod_dist_grid`
* :c:data:`aos_grad_in_r_array`
* :c:data:`aos_in_r_array`
* :c:data:`aos_lapl_in_r_array`
* :c:data:`aos_sr_vc_alpha_lda_w`
* :c:data:`aos_sr_vxc_alpha_lda_w`
* :c:data:`aos_vc_alpha_lda_w`
* :c:data:`aos_vc_alpha_pbe_w`
* :c:data:`aos_vc_alpha_sr_pbe_w`
* :c:data:`aos_vxc_alpha_lda_w`
* :c:data:`aos_vxc_alpha_pbe_w`
* :c:data:`aos_vxc_alpha_sr_pbe_w`
* :c:data:`elec_beta_num_grid_becke`
* :c:data:`energy_c_lda`
* :c:data:`energy_c_sr_lda`
* :c:data:`energy_x_lda`
* :c:data:`energy_x_pbe`
* :c:data:`energy_x_sr_lda`
* :c:data:`energy_x_sr_pbe`
* :c:data:`f_psi_cas_ab`
* :c:data:`f_psi_hf_ab`
* :c:data:`final_grid_points_transp`
* :c:data:`mo_grad_ints`
* :c:data:`mos_in_r_array`
* :c:data:`mos_in_r_array_omp`
* :c:data:`mu_average_prov`
* :c:data:`mu_grad_rho`
* :c:data:`mu_of_r_dft_average`
* :c:data:`mu_rsc_of_r`
* :c:data:`one_e_dm_and_grad_alpha_in_r`
.. c:var:: index_final_points_extra
File : :file:`becke_numerical_grid/extra_grid_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_extra (3,n_points_extra_final_grid)
double precision, allocatable :: final_weight_at_r_vector_extra (n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra (3,n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra_reverse (n_points_extra_integration_angular,n_points_extra_radial_grid,nucl_num)
final_grid_points_extra(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector_extra(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points_extra(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_extra_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`n_points_extra_final_grid`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_extra_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`aos_in_r_array_extra`
.. c:var:: index_final_points_extra_reverse
File : :file:`becke_numerical_grid/extra_grid_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_extra (3,n_points_extra_final_grid)
double precision, allocatable :: final_weight_at_r_vector_extra (n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra (3,n_points_extra_final_grid)
integer, allocatable :: index_final_points_extra_reverse (n_points_extra_integration_angular,n_points_extra_radial_grid,nucl_num)
final_grid_points_extra(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector_extra(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points_extra(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_extra_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`n_points_extra_final_grid`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_extra_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`aos_in_r_array_extra`
.. c:var:: index_final_points_per_atom
File : :file:`becke_numerical_grid/grid_becke_per_atom.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_per_atom (3,n_pts_max_per_atom,nucl_num)
double precision, allocatable :: final_weight_at_r_vector_per_atom (n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom (3,n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_radial_grid`
* :c:data:`n_pts_per_atom`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
.. c:var:: index_final_points_per_atom_reverse
File : :file:`becke_numerical_grid/grid_becke_per_atom.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points_per_atom (3,n_pts_max_per_atom,nucl_num)
double precision, allocatable :: final_weight_at_r_vector_per_atom (n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom (3,n_pts_max_per_atom,nucl_num)
integer, allocatable :: index_final_points_per_atom_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_radial_grid`
* :c:data:`n_pts_per_atom`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
.. c:var:: index_final_points_reverse
File : :file:`becke_numerical_grid/grid_becke_vector.irp.f`
.. code:: fortran
double precision, allocatable :: final_grid_points (3,n_points_final_grid)
double precision, allocatable :: final_weight_at_r_vector (n_points_final_grid)
integer, allocatable :: index_final_points (3,n_points_final_grid)
integer, allocatable :: index_final_points_reverse (n_points_integration_angular,n_points_radial_grid,nucl_num)
final_grid_points(1:3,j) = (/ x, y, z /) of the jth grid point
final_weight_at_r_vector(i) = Total weight function of the ith grid point which contains the Lebedev, Voronoi and radial weights contributions
index_final_points(1:3,i) = gives the angular, radial and atomic indices associated to the ith grid point
index_final_points_reverse(i,j,k) = index of the grid point having i as angular, j as radial and l as atomic indices
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_final_grid`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`ao_abs_int_grid`
* :c:data:`ao_overlap_abs_grid`
* :c:data:`ao_prod_abs_r`
* :c:data:`ao_prod_center`
* :c:data:`ao_prod_dist_grid`
* :c:data:`aos_grad_in_r_array`
* :c:data:`aos_in_r_array`
* :c:data:`aos_lapl_in_r_array`
* :c:data:`aos_sr_vc_alpha_lda_w`
* :c:data:`aos_sr_vxc_alpha_lda_w`
* :c:data:`aos_vc_alpha_lda_w`
* :c:data:`aos_vc_alpha_pbe_w`
* :c:data:`aos_vc_alpha_sr_pbe_w`
* :c:data:`aos_vxc_alpha_lda_w`
* :c:data:`aos_vxc_alpha_pbe_w`
* :c:data:`aos_vxc_alpha_sr_pbe_w`
* :c:data:`elec_beta_num_grid_becke`
* :c:data:`energy_c_lda`
* :c:data:`energy_c_sr_lda`
* :c:data:`energy_x_lda`
* :c:data:`energy_x_pbe`
* :c:data:`energy_x_sr_lda`
* :c:data:`energy_x_sr_pbe`
* :c:data:`f_psi_cas_ab`
* :c:data:`f_psi_hf_ab`
* :c:data:`final_grid_points_transp`
* :c:data:`mo_grad_ints`
* :c:data:`mos_in_r_array`
* :c:data:`mos_in_r_array_omp`
* :c:data:`mu_average_prov`
* :c:data:`mu_grad_rho`
* :c:data:`mu_of_r_dft_average`
* :c:data:`mu_rsc_of_r`
* :c:data:`one_e_dm_and_grad_alpha_in_r`
.. c:var:: m_knowles
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
integer :: m_knowles
value of the "m" parameter in the equation (7) of the paper of Knowles (JCP, 104, 1996)
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`grid_points_per_atom`
.. c:var:: n_points_extra_final_grid
File : :file:`becke_numerical_grid/extra_grid_vector.irp.f`
.. code:: fortran
integer :: n_points_extra_final_grid
Number of points_extra which are non zero
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_extra_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`aos_in_r_array_extra`
* :c:data:`aos_in_r_array_extra_transp`
* :c:data:`final_grid_points_extra`
.. c:var:: n_points_extra_grid_per_atom
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
integer :: n_points_extra_grid_per_atom
Number of grid points_extra per atom
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_extra_radial_grid`
.. c:var:: n_points_extra_integration_angular
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
integer :: n_points_extra_radial_grid
integer :: n_points_extra_integration_angular
n_points_extra_radial_grid = number of radial grid points_extra per atom
n_points_extra_integration_angular = number of angular grid points_extra per atom
These numbers are automatically set by setting the grid_type_sgn parameter
Needs:
.. hlist::
:columns: 3
* :c:data:`extra_grid_type_sgn`
* :c:data:`my_extra_grid_becke`
* :c:data:`my_n_pt_a_extra_grid`
* :c:data:`my_n_pt_r_extra_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`angular_quadrature_points_extra`
* :c:data:`final_grid_points_extra`
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`grid_points_extra_radial`
* :c:data:`n_points_extra_final_grid`
* :c:data:`n_points_extra_grid_per_atom`
* :c:data:`weight_at_r_extra`
.. c:var:: n_points_extra_radial_grid
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
integer :: n_points_extra_radial_grid
integer :: n_points_extra_integration_angular
n_points_extra_radial_grid = number of radial grid points_extra per atom
n_points_extra_integration_angular = number of angular grid points_extra per atom
These numbers are automatically set by setting the grid_type_sgn parameter
Needs:
.. hlist::
:columns: 3
* :c:data:`extra_grid_type_sgn`
* :c:data:`my_extra_grid_becke`
* :c:data:`my_n_pt_a_extra_grid`
* :c:data:`my_n_pt_r_extra_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`angular_quadrature_points_extra`
* :c:data:`final_grid_points_extra`
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`grid_points_extra_radial`
* :c:data:`n_points_extra_final_grid`
* :c:data:`n_points_extra_grid_per_atom`
* :c:data:`weight_at_r_extra`
.. c:var:: n_points_final_grid
File : :file:`becke_numerical_grid/grid_becke_vector.irp.f`
.. code:: fortran
integer :: n_points_final_grid
Number of points which are non zero
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`act_mos_in_r_array`
* :c:data:`alpha_dens_kin_in_r`
* :c:data:`ao_abs_int_grid`
* :c:data:`ao_overlap_abs_grid`
* :c:data:`ao_prod_abs_r`
* :c:data:`ao_prod_center`
* :c:data:`ao_prod_dist_grid`
* :c:data:`aos_grad_in_r_array`
* :c:data:`aos_grad_in_r_array_transp`
* :c:data:`aos_grad_in_r_array_transp_3`
* :c:data:`aos_grad_in_r_array_transp_bis`
* :c:data:`aos_in_r_array`
* :c:data:`aos_in_r_array_transp`
* :c:data:`aos_lapl_in_r_array`
* :c:data:`aos_lapl_in_r_array_transp`
* :c:data:`aos_sr_vc_alpha_lda_w`
* :c:data:`aos_sr_vxc_alpha_lda_w`
* :c:data:`aos_vc_alpha_lda_w`
* :c:data:`aos_vc_alpha_pbe_w`
* :c:data:`aos_vc_alpha_sr_pbe_w`
* :c:data:`aos_vxc_alpha_lda_w`
* :c:data:`aos_vxc_alpha_pbe_w`
* :c:data:`aos_vxc_alpha_sr_pbe_w`
* :c:data:`basis_mos_in_r_array`
* :c:data:`core_density`
* :c:data:`core_inact_act_mos_grad_in_r_array`
* :c:data:`core_inact_act_mos_in_r_array`
* :c:data:`core_inact_act_v_kl_contracted`
* :c:data:`core_mos_in_r_array`
* :c:data:`effective_alpha_dm`
* :c:data:`effective_spin_dm`
* :c:data:`elec_beta_num_grid_becke`
* :c:data:`energy_c_lda`
* :c:data:`energy_c_sr_lda`
* :c:data:`energy_x_lda`
* :c:data:`energy_x_pbe`
* :c:data:`energy_x_sr_lda`
* :c:data:`energy_x_sr_pbe`
* :c:data:`f_psi_cas_ab`
* :c:data:`f_psi_cas_ab_old`
* :c:data:`f_psi_hf_ab`
* :c:data:`final_grid_points`
* :c:data:`final_grid_points_transp`
* :c:data:`full_occ_2_rdm_cntrctd`
* :c:data:`full_occ_2_rdm_cntrctd_trans`
* :c:data:`full_occ_v_kl_cntrctd`
* :c:data:`grad_total_cas_on_top_density`
* :c:data:`inact_density`
* :c:data:`inact_mos_in_r_array`
* :c:data:`kinetic_density_generalized`
* :c:data:`mo_grad_ints`
* :c:data:`mos_grad_in_r_array`
* :c:data:`mos_grad_in_r_array_tranp`
* :c:data:`mos_grad_in_r_array_transp_3`
* :c:data:`mos_grad_in_r_array_transp_bis`
* :c:data:`mos_in_r_array`
* :c:data:`mos_in_r_array_omp`
* :c:data:`mos_in_r_array_transp`
* :c:data:`mos_lapl_in_r_array`
* :c:data:`mos_lapl_in_r_array_tranp`
* :c:data:`mu_average_prov`
* :c:data:`mu_grad_rho`
* :c:data:`mu_of_r_dft`
* :c:data:`mu_of_r_dft_average`
* :c:data:`mu_of_r_hf`
* :c:data:`mu_of_r_prov`
* :c:data:`mu_of_r_psi_cas`
* :c:data:`mu_rsc_of_r`
* :c:data:`one_e_act_density_alpha`
* :c:data:`one_e_act_density_beta`
* :c:data:`one_e_cas_total_density`
* :c:data:`one_e_dm_and_grad_alpha_in_r`
* :c:data:`pot_grad_x_alpha_ao_pbe`
* :c:data:`pot_grad_x_alpha_ao_sr_pbe`
* :c:data:`pot_grad_xc_alpha_ao_pbe`
* :c:data:`pot_grad_xc_alpha_ao_sr_pbe`
* :c:data:`pot_scal_x_alpha_ao_pbe`
* :c:data:`pot_scal_x_alpha_ao_sr_pbe`
* :c:data:`pot_scal_xc_alpha_ao_pbe`
* :c:data:`pot_scal_xc_alpha_ao_sr_pbe`
* :c:data:`potential_c_alpha_ao_lda`
* :c:data:`potential_c_alpha_ao_sr_lda`
* :c:data:`potential_x_alpha_ao_lda`
* :c:data:`potential_x_alpha_ao_sr_lda`
* :c:data:`potential_xc_alpha_ao_lda`
* :c:data:`potential_xc_alpha_ao_sr_lda`
* :c:data:`total_cas_on_top_density`
* :c:data:`virt_mos_in_r_array`
.. c:var:: n_points_grid_per_atom
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
integer :: n_points_grid_per_atom
Number of grid points per atom
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_radial_grid`
.. c:var:: n_points_integration_angular
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
integer :: n_points_radial_grid
integer :: n_points_integration_angular
n_points_radial_grid = number of radial grid points per atom
n_points_integration_angular = number of angular grid points per atom
These numbers are automatically set by setting the grid_type_sgn parameter
Needs:
.. hlist::
:columns: 3
* :c:data:`grid_type_sgn`
* :c:data:`my_grid_becke`
* :c:data:`my_n_pt_a_grid`
* :c:data:`my_n_pt_r_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`angular_quadrature_points`
* :c:data:`final_grid_points`
* :c:data:`final_grid_points_per_atom`
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`grid_points_radial`
* :c:data:`n_points_final_grid`
* :c:data:`n_points_grid_per_atom`
* :c:data:`n_pts_per_atom`
* :c:data:`weight_at_r`
.. c:var:: n_points_radial_grid
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
integer :: n_points_radial_grid
integer :: n_points_integration_angular
n_points_radial_grid = number of radial grid points per atom
n_points_integration_angular = number of angular grid points per atom
These numbers are automatically set by setting the grid_type_sgn parameter
Needs:
.. hlist::
:columns: 3
* :c:data:`grid_type_sgn`
* :c:data:`my_grid_becke`
* :c:data:`my_n_pt_a_grid`
* :c:data:`my_n_pt_r_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`angular_quadrature_points`
* :c:data:`final_grid_points`
* :c:data:`final_grid_points_per_atom`
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`grid_points_radial`
* :c:data:`n_points_final_grid`
* :c:data:`n_points_grid_per_atom`
* :c:data:`n_pts_per_atom`
* :c:data:`weight_at_r`
.. c:var:: n_pts_max_per_atom
File : :file:`becke_numerical_grid/grid_becke_per_atom.irp.f`
.. code:: fortran
integer, allocatable :: n_pts_per_atom (nucl_num)
integer :: n_pts_max_per_atom
Number of points which are non zero
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_grid_points_per_atom`
.. c:var:: n_pts_per_atom
File : :file:`becke_numerical_grid/grid_becke_per_atom.irp.f`
.. code:: fortran
integer, allocatable :: n_pts_per_atom (nucl_num)
integer :: n_pts_max_per_atom
Number of points which are non zero
Needs:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_num`
* :c:data:`thresh_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_grid_points_per_atom`
.. c:var:: r_gill
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
double precision :: r_gill
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
* :c:data:`grid_points_per_atom`
.. c:var:: weight_at_r
File : :file:`becke_numerical_grid/grid_becke.irp.f`
.. code:: fortran
double precision, allocatable :: weight_at_r (n_points_integration_angular,n_points_radial_grid,nucl_num)
Weight function at grid points : w_n(r) according to the equation (22)
of Becke original paper (JCP, 88, 1988)
The "n" discrete variable represents the nucleis which in this array is
represented by the last dimension and the points are labelled by the
other dimensions.
Needs:
.. hlist::
:columns: 3
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_coord_transp`
* :c:data:`nucl_dist_inv`
* :c:data:`nucl_num`
* :c:data:`slater_bragg_type_inter_distance_ua`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
.. c:var:: weight_at_r_extra
File : :file:`becke_numerical_grid/extra_grid.irp.f`
.. code:: fortran
double precision, allocatable :: weight_at_r_extra (n_points_extra_integration_angular,n_points_extra_radial_grid,nucl_num)
Weight function at grid points_extra : w_n(r) according to the equation (22)
of Becke original paper (JCP, 88, 1988)
The "n" discrete variable represents the nucleis which in this array is
represented by the last dimension and the points_extra are labelled by the
other dimensions.
Needs:
.. hlist::
:columns: 3
* :c:data:`grid_points_extra_per_atom`
* :c:data:`n_points_extra_radial_grid`
* :c:data:`nucl_coord_transp`
* :c:data:`nucl_dist_inv`
* :c:data:`nucl_num`
* :c:data:`slater_bragg_type_inter_distance_ua`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
.. c:var:: weights_angular_points
File : :file:`becke_numerical_grid/angular_grid_pts.irp.f`
.. code:: fortran
double precision, allocatable :: angular_quadrature_points (n_points_integration_angular,3)
double precision, allocatable :: weights_angular_points (n_points_integration_angular)
weights and grid points for the integration on the angular variables on
the unit sphere centered on (0,0,0)
According to the LEBEDEV scheme
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
.. c:var:: weights_angular_points_extra
File : :file:`becke_numerical_grid/angular_extra_grid.irp.f`
.. code:: fortran
double precision, allocatable :: angular_quadrature_points_extra (n_points_extra_integration_angular,3)
double precision, allocatable :: weights_angular_points_extra (n_points_extra_integration_angular)
weights and grid points_extra for the integration on the angular variables on
the unit sphere centered on (0,0,0)
According to the LEBEDEV scheme
Needs:
.. hlist::
:columns: 3
* :c:data:`n_points_extra_radial_grid`
Needed by:
.. hlist::
:columns: 3
* :c:data:`final_weight_at_r_extra`
* :c:data:`grid_points_extra_per_atom`
Subroutines / functions
-----------------------
.. c:function:: cell_function_becke:
File : :file:`becke_numerical_grid/step_function_becke.irp.f`
.. code:: fortran
double precision function cell_function_becke(r, atom_number)
atom_number :: atom on which the cell function of Becke (1988, JCP,88(4))
r(1:3) :: x,y,z coordinantes of the current point
Needs:
.. hlist::
:columns: 3
* :c:data:`nucl_coord_transp`
* :c:data:`nucl_dist_inv`
* :c:data:`nucl_num`
* :c:data:`slater_bragg_type_inter_distance_ua`
.. c:function:: derivative_knowles_function:
File : :file:`becke_numerical_grid/integration_radial.irp.f`
.. code:: fortran
double precision function derivative_knowles_function(alpha, m, x)
Derivative of the function proposed by Knowles (JCP, 104, 1996) for distributing the radial points
.. c:function:: example_becke_numerical_grid:
File : :file:`becke_numerical_grid/example.irp.f`
.. code:: fortran
subroutine example_becke_numerical_grid
subroutine that illustrates the main features available in becke_numerical_grid
Needs:
.. hlist::
:columns: 3
* :c:data:`final_grid_points`
* :c:data:`final_weight_at_r`
* :c:data:`grid_points_per_atom`
* :c:data:`n_points_final_grid`
* :c:data:`n_points_radial_grid`
* :c:data:`nucl_coord`
* :c:data:`nucl_num`
.. c:function:: f_function_becke:
File : :file:`becke_numerical_grid/step_function_becke.irp.f`
.. code:: fortran
double precision function f_function_becke(x)
.. c:function:: knowles_function:
File : :file:`becke_numerical_grid/integration_radial.irp.f`
.. code:: fortran
double precision function knowles_function(alpha, m, x)
Function proposed by Knowles (JCP, 104, 1996) for distributing the radial points :
the Log "m" function ( equation (7) in the paper )
.. c:function:: step_function_becke:
File : :file:`becke_numerical_grid/step_function_becke.irp.f`
.. code:: fortran
double precision function step_function_becke(x)
Step function of the Becke paper (1988, JCP,88(4)) |
// https://practice.geeksforgeeks.org/problems/prerequisite-tasks/1
package Graph;
import java.util.List;
import java.util.ArrayList;
public class PrerequisiteTasks_DFS {
public static void main(String[] args) {
int N = 4;
int[][] prerequisites = {{1, 0}, {2, 1}, {3, 2}};
System.out.println(isPossible(N, prerequisites));
}
public static boolean isPossible(int N, int[][] prerequisites) {
// Your Code goes here
List<List<Integer>> adj = generateAdjList(prerequisites, N);
boolean[] vis = new boolean[N];
boolean[] pathVis = new boolean[N];
for (int i = 0; i < N; i++) {
if (!vis[i]) {
if (dfs(i, adj, vis, pathVis)) { // if cycle detected then we cannot complete all tasks
return false;
}
}
}
return true;
}
// checking if the graph contains cycle
private static boolean dfs(int node, List<List<Integer>> adj, boolean[] vis, boolean[] pathVis) {
vis[node] = true;
pathVis[node] = true;
for (int it : adj.get(node)) {
if (!vis[it]) {
if (dfs(it, adj, vis, pathVis)) {
return true;
}
} else if (pathVis[it]) {
return true;
}
}
pathVis[node] = false;
return false;
}
private static List<List<Integer>> generateAdjList(int[][] prerequisites, int N) {
List<List<Integer>> adj = new ArrayList<>();
for (int i = 0; i < N; i++) {
adj.add(new ArrayList<>());
}
for (int[] pair : prerequisites) {
int u = pair[1];
int v = pair[0];
adj.get(u).add(v);
}
return adj;
}
} |
<?xml version='1.0' encoding='ISO-8859-1'?>
<!DOCTYPE article PUBLIC "-//OASIS//DTD DocBook XML V4.1.2//EN"
"http://www.oasis-open.org/docbook/xml/4.1.2/docbookx.dtd" >
<!--
Layout borrowed from Doug's smartmontools_scsi.xml. The following text
is also from his file.
This is DocBook XML that can be rendered into a single HTML page with a
command like 'xmlto html-nochunks <this_file_name>'. It can also be
rendered into multi-page HTML (drop the "-nochunks") or pdf, ps, txt,
etc.
-->
<article id="index">
<articleinfo>
<title>FAQ - Frequently Asked Questions</title>
<author>
<firstname>smartmontools</firstname>
<surname>developers</surname>
<affiliation>
<address>
<email>smartmontools-support@lists.sourceforge.net</email>
</address>
</affiliation>
</author>
<authorinitials>sd</authorinitials>
<pubdate>2003-09-24</pubdate>
<revhistory>
<revision>
<revnumber>1.0</revnumber>
<date>2003-10-22</date>
<authorinitials>sd</authorinitials>
<revremark>
Moved from index.html to XML
</revremark>
</revision>
</revhistory>
<copyright>
<year>2003</year>
<holder>Bruce Allen</holder>
</copyright>
<legalnotice>
<para>
Permission is granted to copy, distribute and/or modify this
document under the terms of the GNU Free Documentation License,
Version 1.1 or any later version published by the Free Software
Foundation; with no Invariant Sections, with no Front-Cover Texts,
and with no Back-Cover Texts.
</para>
<para>
For an online copy of the license see
<ulink url="http://www.fsf.org/copyleft/fdl.html">
<literal>http://www.fsf.org/copyleft/fdl.html</literal></ulink> .
</para>
</legalnotice>
<abstract>
<para>
FAQ - Frequently Asked Questions
</para>
</abstract>
</articleinfo>
<!--
<toc></toc>
-->
<sect1 id="a">
<title>What do I do if I have problems, or need support? Suppose I want
to become a developer, or suggest some new extensions?</title>
<para>First, search the support mailing list archives to see if your
question has been answered. Instructions are in the following
paragraph. If you don't find an answer there, then please send an
e-mail to the smartmontools-support mailing list. Instructions are
available at <ulink url="http://lists.sourceforge.net/mailman/listinfo/smartmontools-support">
<literal>http://lists.sourceforge.net/mailman/listinfo/smartmontools-support</literal></ulink>
. The list is moderated but you're not required to subscribe to it in
order to post your question.</para>
<para>To search the archives, first go to <ulink url="http://sourceforge.net/mailarchive/forum.php?forum=smartmontools-support">
<literal>http://sourceforge.net/mailarchive/forum.php?forum=smartmontools-support</literal></ulink>
. In the top left corner you will see a search box: use <emphasis
role="bold">Mailing List</emphasis> as the type of search. This tool
works very well.</para>
<para>Note that from time to time SourceForge has mailing problems and
you'll get a message telling you that <emphasis role="italic">Either
your mailing list name was misspelled or your mailing list has not been
archived yet. If this list has just been created, please retry in 2-4
hours</emphasis>. If this happens, you'll have to try again
later.</para>
</sect1>
<sect1 id="b">
<title>What are the future plans for smartmontools?</title>
<para>My plan is that smartmontools-5.x will support ATA/ATAPI-5 disks.
Eventually, we'll do smartmontools-6.x to support ATA/ATAPI-6 disks,
smartmontools-7.x for the ATA/ATAPI-7 standard, and so on. The "x" will
denote revision level, as bugs get found and fixed, and as enhancements
get added. If it's possible to maintain backwards compatibility, that
would be nice, but I don't know if it will be possible or
practical.</para>
</sect1>
<sect1 id="c">
<title>Why are you doing this?</title>
<para>My research group at U. Wisconsin - Milwaukee runs a beowulf
cluster - <ulink url="http://www.lsc-group.phys.uwm.edu/beowulf/medusa/">
<literal>http://www.lsc-group.phys.uwm.edu/beowulf/medusa/</literal></ulink>
- with 600 ATA-5 and -6 disks (300 IBM and 300 Maxtor). We have more
than 50 TB of data stored on the system. I also help out with a cluster
- <ulink url="http://pandora.aei.mpg.de/merlin/">
<literal>http://pandora.aei.mpg.de/merlin/</literal></ulink> - at the
Albert Einstein Institute that has another 300 IBM ATA-6 disks (36 TB
total). It's nice to have advanced warning when a disk is going to
fail.</para>
</sect1>
<sect1 id="d">
<title>I see some strange output from smartctl. What does it
mean?</title>
<para>The raw S.M.A.R.T. attributes (temperature, power-on lifetime, and
so on) are stored in vendor-specific structures. Sometime these are
strange. Hitachi disks (at least some of them) store power-on lifetime
in minutes, rather than hours (see next question below). IBM disks (at
least some of them) have three temperatures stored in the raw structure,
not just one. And so on. If you find strange output, or unknown
attributes, please send an e-mail to the mailing list and we'll help you
try and figure it out.</para>
</sect1>
<sect1 id="e">
<title>What Kernel Version is needed? (Linux)</title>
<para>Kernel versions 2.4.0 or later should work. We recommend the
latest 2.4 kernel.</para>
<para>Vanilla kernel.org 2.2.X kernels do not support the
HDIO_DRIVE_TASK ioctl(), which is needed for the ATA drive to execute
the ATA SMART RETURN STATUS command. So these kernels will not
work.</para>
<para>Vendor-supplied 2.2.X kernels, and vanilla 2.2.X kernels patched
with Andre Hedrick's IDE patches - <ulink url="http://www.funet.fi/pub/linux/kernel/people/hedrick/ide-2.2.20/">
<literal>http://www.funet.fi/pub/linux/kernel/people/hedrick/ide-2.2.20/</literal></ulink>
(also available from your local kernel.org mirror, not updated for
2.2.21 or later, and probably still containing a few bugs) may support
the needed ioctl().</para>
<para>If the configuration option CONFIG_IDE_TASK_IOCTL exists in your
2.2.X kernel source code tree, then your 2.2.X kernel will probably
support smartmontools. Note that this kernel configuration option does
<emphasis role="italic">not</emphasis> need to be enabled. Its presence
merely indicates that the required HDIO_DRIVE_TASK ioctl() is
supported.</para>
</sect1>
<sect1 id="f">
<title>What attributes does smartmontools not yet recognize?</title>
<para>From Maxtor disks (99), (100), (101)</para>
<para>If you can attach names/meanings to these attributes, please send
a note to the mailing list. If you have access to other SMART utilities
(especially manufacturer-specific ones, see below) and can send us
comparison output from smartctl and the other utility, that's especially
useful.</para>
</sect1>
<sect1 id="g">
<title>My Maxtor/Hitachi/Fujitsu disk is only a few days old, yet
smartctl reports its age (Attribute 9) as thousands of hours!</title>
<para>On some recent disks, Maxtor has started to use Attribute 9 to
store the lifetime in minutes rather than hours. In this case, use the
-m option (smartctl versions 5.0.X) or the --vendorattribute=9,minutes
(smartctl 5.1.X) option to correctly display hours and minutes.</para>
<para>Some models of Fujitsu disks are known to use Attribute 9 for
lifetime in seconds. In that case, use the --vendorattribute=9,seconds
option to correctly display hours, minutes and seconds.</para>
</sect1>
<sect1 id="h">
<title>The power-on timer (Attribute 9 raw value) on my Maxtor disk acts
strange.</title>
<para>There are three related problems with Maxtor's SMART
firmware:</para>
<para><emphasis role="bold">1 - </emphasis>On some disks from 2001/2002,
the raw value of Attribute 9 (Power On Time) is <emphasis
role="italic">supposed</emphasis> to be minutes. But it advances at an
unpredictable rate, always more slowly than one count per minute. One
(unconfirmed) theory is that when the disk is in idle mode, the counter
stops advancing. This is only supposed to happen in standby
mode.</para>
<para><emphasis role="bold">2 - </emphasis> In Maxtor disks that use the
raw value of Attribute 9 as a minutes counter, only two bytes (of the
six available) are used to store the raw value. So it resets to zero
once every 65536=2^16 minutes, or about once every 1092 hours. This is
fixed in all Maxtor disks manufactured after July 2003, where the raw
value was extended to four bytes.</para>
<para><emphasis role="bold">3 - </emphasis> In Maxtor disks that use the
raw value of Attribute 9 as a minutes counter, the hour time-stamps in
the self-test and ATA error logs are calculated by right shifting 6
bits. This is equivalent to dividing by 64 rather than by 60. As a
result, the hour time-stamps in these logs advance 7% more slowly than
they should. Thus, if you do self-tests once per week at the same time,
instead of the time-stamps being 168 hours apart, they are 157 hours
apart. This is also fixed in all Maxtor disks manufactured after July
2003.</para>
</sect1>
<sect1 id="i">
<title>Where can I find manufacturer-specific disk-testing
utilities?</title>
<para>A good listing of such utilities can be found at <ulink url="http://www.benchmarkhq.ru/english.html?/be_hdd2.html">
<literal>http://www.benchmarkhq.ru/english.html?/be_hdd2.html</literal></ulink>
. Unfortunately most of these are for MS operating systems, but most can
be run from an MS-DOS boot disk. Note: if you do run one of these
utilities, and it identifies the meanings of any SMART Attributes that
are not known to smartmontools, please report them to the mailing
list.</para>
<para>These utilities have an important role to fill. If your disk has
bad sectors (for example, as revealed by running self-tests with
smartmontools) and the disk is not able to recover the data from those
sectors, then the disk will <emphasis role="italic">not</emphasis>
automatically reallocate those damaged sectors from its set of spare
sectors, because forcing the reallocation to take place may entail some
loss of data. Because the commands that force such reallocation are
<emphasis role="italic">Vendor Specific</emphasis>, most manufactuers
provide a utility for this purpose. It may cause data loss but can
repair damaged sectors (at least, until it runs out of replacement
sectors).</para>
</sect1>
<sect1 id="j">
<title>When I run <emphasis role="tt">smartd</emphasis>,
the SYSLOG <emphasis role="tt">/var/log/messages</emphasis> contains
messages like this:</title>
<programlisting>
smartd: Reading Device /dev/sdv
modprobe: modprobe: Can't locate module block-major-65
</programlisting>
<para>This is because when <emphasis role="tt">smartd</emphasis> starts,
it looks for all ATA and SCSI devices to monitor (matching the pattern
<emphasis role="tt">/dev/hd[a-z]</emphasis> or <emphasis
role="tt">/dev/sd[a-z]</emphasis>). The log messages appear because
your system doesn't have most of these devices.</para>
<para>Recent releases of smartd can use a configuration file <emphasis
role="tt">smartd.conf</emphasis> to specify which devices to include or
exclude from start-up search.</para>
</sect1>
<sect1 id="k">
<title>What's the story on IBM SMART disks?</title>
<para>Apparently some of the older SMART firmware on IBM disks can
interfere with the regular operation of the disk. If you have this
problem, a firmware upgrade that fixes the problem is avaialable at
<ulink url="http://www.geocities.com/dtla_update/">
<literal>http://www.geocities.com/dtla_update/</literal></ulink>
.</para>
</sect1>
<sect1 id="l">
<title>How can I check that the package hasn't been tampered
with?</title>
<para>Since the <emphasis role="tt">smartmontools</emphasis> utilities
run as root, you might be concerned about something harmful being
embedded within them. Starting with release 5.19 of <emphasis
role="tt">smartmontools</emphasis>, the .rpm files and tarball have been
GPG signed. (The tarball's fingerprint is given in the SoureForge
Release Notes.) Please verify these using the GPG Signing Key available
at <ulink url="http://smartmontools.sourceforge.net/SmartmontoolsSigningKey.txt">
<literal>http://smartmontools.sourceforge.net/SmartmontoolsSigningKey.txt</literal></ulink>
.</para>
</sect1>
<sect1 id="m">
<title>Is there a bootable standalone CD or floppy that contains
smartmontools?</title>
<para>If you have a system that is showing signs of disk trouble (for
example, it's unbootable and the console is full of disk error messages)
it can be handy to have a version of smartmontools that can be run off
of a bootable CD or floppy to examine the disk's SMART data and run
self-tests. This is also useful if you want to run Captive Self-Tests
(the <emphasis role="bold"><emphasis role="tt">-C</emphasis></emphasis>
option of <emphasis role="bold"><emphasis
role="tt">smartctl</emphasis></emphasis> ) on disks that can not easily
be unmounted, such as those hosting the Operating System files. Or you
can use this to run <emphasis role="tt">smartctl</emphasis> on computers
that don't use Linux as the day-to-day Operating System.</para>
<para>At present I am only aware of three such bootable disks:</para>
<itemizedlist>
<listitem>
<para>LNX-BBC Bootable CD - <ulink url="http://www.lnx-bbc.org/">
<literal>http://www.lnx-bbc.org/</literal></ulink></para>
</listitem>
<listitem>
<para>Stresslinux Bootable CD - <ulink url="http://www.stresslinux.org/">
<literal>http://www.stresslinux.org/</literal></ulink></para>
</listitem>
<listitem>
<para>RIP (Recovery Is Possible) Bootable CD/Floppy - <ulink url="http://www.tux.org/pub/people/kent-robotti/looplinux/rip/">
<literal>http://www.tux.org/pub/people/kent-robotti/looplinux/rip/</literal></ulink></para>
</listitem>
</itemizedlist>
<para> Please let us know if there are others, and we'll add them to
this list.</para>
</sect1>
<sect1 id="n">
<title>Can I monitor ATA disks behind SCSI RAID controllers?</title>
<para>From release 5.1-16, smartmontools supports 3ware SCSI RAID
controllers that use ATA disks internally. To pass commands through the
3ware controller, use the smartmontools <emphasis role="bold">-d
3ware,N</emphasis> option or Directive.</para>
<para>In smartmontools release 5.1-16, the SMART HEALTH STATUS
(smartmontools <emphasis role="bold">-H</emphasis>) is not returned
correctly for 3ware devices. In this release, the ENABLE AUTOMATIC
OFFLINE and ENABLE ATTRIBUTE AUTOSAVE commands (smartmontools <emphasis
role="bold">-o on</emphasis> and <emphasis role="bold">-S on</emphasis>)
are <emphasis role="italic">disabled</emphasis> for 3ware devices,
because at the time 5.1-16 was released, the 3w-xxxx driver could not
pass these commands through to the ATA disks.</para>
<para>Later smartmontools CVS code and releases <emphasis
role="italic">do</emphasis> correctly support <emphasis
role="italic">all</emphasis> of these commands. You may:</para>
<itemizedlist>
<listitem>
<para>Use version <emphasis role="bold">1.02.00.037</emphasis> or
greater of the 3w-xxxx driver, or</para>
</listitem>
<listitem>
<para>Patch earlier 3ware 3w-xxxx drivers with <ulink url="http://smartmontools.sourceforge.net/3w-xxxx.txt">
<literal>http://smartmontools.sourceforge.net/3w-xxxx.txt</literal></ulink>
so that these commands reach the disks, or</para>
</listitem>
<listitem>
<para>Use an <emphasis role="bold">unpatched</emphasis> earlier 3w-xxxx
driver (which won't pass these commands to the disks but will instead
print harmless warning messages to SYSLOG).</para>
</listitem>
</itemizedlist>
<para>Since smartmontools 3ware support is new, please report positive
or negative experiences to the mailing list, particularly for 64-bit
and/or big-endian architectures.</para></sect1>
<sect1 id="o">
<title>SCSI disks and tapes (TapeAlert)</title>
<para>smartmontools for SCSI disks and tapes (including medium changers)
is discussed at <ulink url="http://smartmontools.sourceforge.net/smartmontools_scsi.html">
<literal>http://smartmontools.sourceforge.net/smartmontools_scsi.html</literal></ulink>
.</para>
</sect1>
<sect1 id="p">
<title>FireWire, USB, and SATA disks/systems</title>
<para>As for USB and FireWire (ieee1394) disks and tape drives, the news
isn't good. They appear to Linux as SCSI devices but their
implementations do not usually support those SCSI commands needed by
smartmontools. The ieee1394 consortium recently certified the first
external enclosure (containing a ATA disk and a protocol bridge) as
being compliant to the relevant standards. Such devices have already
been on the market for about 3 years and they tend to only support the
bare minimum of commands needed for device operation (i.e. S.M.A.R.T.
support is an unsupported extra).</para>
<para>I'd be very grateful to find someone who could help me test the
smartmontools code on serial ATA (SATA) disks. They should appear as
normal ATA disks in Linux.</para>
</sect1>
<sect1 id="q">
<title>How does smartmontools differ from smartsuite?</title>
<para>The smartsuite code was originally developed as a Senior Thesis by
Michael Cornwell at the Concurrent Systems Laboratory (now part of the
Storage Systems Research Center - <ulink url="http://ssrc.soe.ucsc.edu/">
<literal>http://ssrc.soe.ucsc.edu/</literal></ulink>), Jack Baskin
School of Engineering, University of California, Santa Cruz. You can
find some information about the original smartsuite project here:</para>
<itemizedlist>
<listitem>
<para>Press Release 1 - <ulink url="http://www.ucsc.edu/news_events/press_releases/archive/99-00/09-99/smart_software.htm">
<literal>http://www.ucsc.edu/news_events/press_releases/archive/99-00/09-99/smart_software.htm</literal></ulink></para>
</listitem>
<listitem>
<para>Press Release 2 - <ulink url="http://www.santa-cruz.com/archive/1999/September/22/local/stories/5local.htm">
<literal>http://www.santa-cruz.com/archive/1999/September/22/local/stories/5local.htm</literal></ulink></para>
</listitem>
<listitem>
<para>Press Release 3 - <ulink url="http://www.ucsc.edu/currents/99-00/09-27/smart.html">
<literal>http://www.ucsc.edu/currents/99-00/09-27/smart.html</literal></ulink></para>
</listitem>
</itemizedlist>
<para>smartmontools was derived directly from smartsuite. It differs
from smartsuite in that it supports the ATA/ATAPI-5 standard. So for
example <emphasis role="tt">smartctl</emphasis> from smartsuite has no
facility for printing the SMART self-test logs, and doesn't print
timestamp information in the most usable way.</para>
<para>The <emphasis role="tt">smartctl</emphasis> utility in
smartmontools has added functionality for this (<emphasis role="tt">-q,
-l selftest,-S, -T, -v and -m</emphasis> options), updated
documentation, and also fixes small technical bugs in smartsuite. [One
example: smartsuite does not actually use the ATA SMART RETURN STATUS
command to find out the health status of a disk. It instead tries to
infer this from the SMART Attribute values.] See <ulink url="http://smartmontools.cvs.sourceforge.net/viewcvs.py/smartmontools/sm5/CHANGELOG?rev=HEAD&content-type=text/plain">
<literal>http://smartmontools.cvs.sourceforge.net/viewcvs.py/smartmontools/sm5/CHANGELOG?rev=HEAD&content-type=text/plain</literal></ulink> for a summary of what's been done.</para>
<para>The <emphasis role="tt">smartd</emphasis> utility differs from the
smartsuite <emphasis role="tt">smartd</emphasis> in major ways. First,
it prints somewhat more informative error messages to the syslog.
Second, on startup it looks for a configuration file <emphasis
role="tt">smartd.conf</emphasis>, and if <emphasis
role="tt">smartd</emphasis> finds this file, it monitors the list of
devices therein, rather than querying all IDE and SCSI devices on your
system. (If the configuration file does not exist, then it does query
all IDE and SCSI devices.) Also, it's a well-behaved daemon and doesn't
leave open file descriptors and other detrius behind.</para>
<para>In addition, the <emphasis role="tt">smartmontools</emphasis>
version of <emphasis role="tt">smartd</emphasis> can be instructed (via
Directives in the configuration file) to monitor for changes in a number
of different disk properties: the SMART status, failure or prefailure
attributes going below threshold, new errors appearing in the ATA Error
Log or the SMART Self-Test Log, and so on. <emphasis
role="tt">smartd</emphasis> can also send an e-mail warning or run a
user-specified executable if it detects a problem with the disk.</para>
<para>The other principle difference is that smartmontools is an
OpenSource development project, meaning that we keep the files in CVS,
and that other developers who wish to contribute can commit changes to
the archive. If you would like to contribute, please write to the
mailing-list.</para>
<para>But the bottom line is that the code in smartmontools is derived
directly from smartsuite and is similar. The smartsuite package can be
found at <ulink url="http://sourceforge.net/projects/smartsuite/">
<literal>http://sourceforge.net/projects/smartsuite/</literal></ulink>
.</para></sect1>
<sect1 id="r">
<title>Does it work on Windows?</title>
<para>Currently not, but we consider Cygwin - <ulink url="http://www.cygwin.com/">
<literal>http://www.cygwin.com/</literal></ulink> - the way to go,
where CVS compiles almost out of the box but still lacks any specific
code to make it work. Write to the mailing list if you're interested in
porting it. Someone already sent some S.M.A.R.T. code for Windows,
which may be of use. Porting to other platforms may be easier as well
now that any Linux specific code (like linux/hdreg.h) has been removed,
and more will be done soon in that direction.</para>
<para>A Cygwin port would probably only require and additional DLL,
cygwin1.dll, to run on plain Windows.</para></sect1>
<sect1 id="s">
<title>Why has the versioning scheme changed?</title>
<para>With the move to GNU Autoconf and GNU Automake it changed from
5.X-Y (where X and Y are one or more numbers) to 5.Y. This had to be
done because the -Y extension is used by distributions to almost always
denote a new build of the same version. So, the first version with that
change will be 5.19 and not 5.1-19.</para>
</sect1>
</article> |
/********************************************************************
created: 2012/02/04
created: 4:2:2012 11:24
filename: IDataSourceStatus.h
author: Mateusz Janiak
purpose: Obiekty pozwalające określić status danych - ich dostępnośc i użycie
*********************************************************************/
#ifndef HEADER_GUARD___IDATASOURCESTATUS_H__
#define HEADER_GUARD___IDATASOURCESTATUS_H__
namespace communication
{
//! Status składowania danych
enum DataStorage {
UnknownStorage = 0x00, /* Nieznany status - problem na serwerze, brak lokalnie */
Local = 0x10, /* Dane całkowicie lokalne */
Remote = 0x01, /* Dane całkoowice zdalne */
PartiallyLocal = 0x11 /* Dane częsciowo dostępne lokalnie*/
};
//! Status użycia danych w DM
enum DataUsage {
UnknownUsage = 0x00, /* Nieznany status - status startowy przy wyznaczaniu finalnego statusu. Ostatecznie ten status nie może się pojawić dla danych */
Loaded = 0x10, /* Dane załadowane w całości do DM */
Unloaded = 0x01, /* Dane nie załadowane do DM */
PartiallyLoaded = 0x11 /* Dane częsciowo załadowane do DM */
};
//! Interfejs dostarczający informacji o statusie poszczególnych elementów hierarchi danych oraz tych sztucznie stworzonych.
//! Przechowuje stan składowania danych: Lokalne, Zdalne, Częsciowo lokalne, Nieznany (problem z bazą, wymaganiami dla danych)
//! Przechowuje stan użycia danych: Załadowane, Niezaładowane, Częsciowo załadowane
class IDataStatus
{
public:
//! Wirtualny destruktor
virtual ~IDataStatus() {};
//! \return Stan składowania danych
virtual const DataStorage storage() const = 0;
//! \return Stan użycia danych w DM
virtual const DataUsage usage() const = 0;
};
}
#endif // HEADER_GUARD___IDATASOURCESTATUS_H__ |
# string data type
# literal assignment
import math
first = 'Dave'
last = "Mark"
# print(type(first))
# print(type(first) == str)
# print(isinstance(first, str))
# constructor function
# pizza = str("Pepperoni")
# print(type(pizza))
# print(type(pizza) == str)
# print(isinstance(pizza, str))
# concatanation
fullname = first + " " + last
print(fullname)
fullname += "!"
print(fullname)
# casting a number to a string
decade = str(1980)
print(type(decade))
print(decade)
statement = "I loke rock music from the " + decade + "s."
print(statement)
# multiple lines
multiple = '''
Hey
i was just checking
all good
'''
print(multiple)
# escaping special characters
sentence = 'I\'m back at work!\tHey!\n\nWhere\'s this at\\located?'
print(sentence)
# string methods
print(first)
print(first.lower())
print(first.upper())
print(first)
print(multiple.title())
print(multiple.replace("good", "ok"))
print(multiple)
print(len(multiple))
multiple += " "
multiple = " " + multiple
print(len(multiple))
print(len(multiple.strip()))
print(len(multiple.lstrip()))
print(len(multiple.rstrip()))
# bulid a menu
title = "menu".upper()
print(title.center(20, "="))
print("Coffee".ljust(16, ".") + "$1".rjust(4))
print("Muffin".ljust(16, ".") + "$2".rjust(4))
print("Cheesecake".ljust(16, ".") + "$4".rjust(4))
print("")
# string index value
print(first[1])
print(first[-2])
print(first[1:-1])
print(first[1:])
# some methods return boolean data
print(first.startswith("D"))
print(first.endswith("Z"))
print("")
# boolean data type
myvalue = True
x = bool(False)
print(type(x))
print(isinstance(myvalue, bool))
print("")
# numeric data types
# integer types
price = 100
best_price = int(80)
print(type(price))
print(isinstance(best_price, int))
print("")
# float type
gpa = 3.28
y = float(1.14)
print(type(gpa))
# complex type
comp_value = 5+3j
print(type(comp_value))
print(comp_value.real)
print(comp_value.imag)
# built-in functions for numbers
print(abs(gpa))
print(abs(gpa * -1))
print(round(gpa))
print(round(gpa, 1))
print(math.pi)
print(math.sqrt(64))
print(math.ceil(gpa))
print(math.floor(gpa))
# casting a string to a number
zipcode = "10001"
zip_value = int(zipcode)
print(type(zip_value))
# error if you attempt to cast incorrect data
zip_value = int("New york") |
package day03_review.demo03_面向对象.面向对象中.继承.继承性.作业2;
public class Cylinder extends Circle {
private double length;
public Cylinder(double radius, double length) {
super(radius);
this.length = length;
}
public double getLength() {
return length;
}
public void setLength(double length) {
this.length = length;
}
public double findVolume(){
// 体积, 圆面积*高
return super.findArea() * length;
}
@Override
public double findArea(){
// 计算圆柱的表面积
return 2 * Math.PI * getRadius() * length + 2 * super.findArea();
}
} |
import { readFileIntoLines } from "../../utils/readFile";
describe("Day 7: Handy Haversacks", () => {
describe("How many bag colors can eventually contain at least one shiny gold bag?", () => {
test("Example", () => {
const example = readFileIntoLines(`${__dirname}/fixtures/example.txt`);
expect(countBagsThatCanContainOneShinyGoldBag(example)).toEqual(4);
});
test("Input", () => {
const example = readFileIntoLines(`${__dirname}/fixtures/input.txt`);
expect(
countBagsThatCanContainOneShinyGoldBag(example)
).toMatchInlineSnapshot(`316`);
});
});
});
describe("Day 7: Handy Haversacks - Part II", () => {
describe("How many individual bags are required inside your single shiny gold bag?", () => {
test("Example", () => {
const example = readFileIntoLines(
`${__dirname}/fixtures/examplePart2.txt`
);
expect(countBagsInShinyGoldBag(example)).toEqual(126);
});
test("Input", () => {
const example = readFileIntoLines(`${__dirname}/fixtures/input.txt`);
expect(countBagsInShinyGoldBag(example)).toMatchInlineSnapshot(`11310`);
});
});
});
// rule example: vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.
type Rules = string[];
type Color = string;
type Contain = {
color: string;
count: number;
};
type MustContain = Contain[];
type ParsedBagRules = Map<Color, () => MustContain>;
function countBagsInShinyGoldBag(rules: Rules) {
const bagRules = parseBagRules(rules);
const shinyGoldContains = bagRules.get("shiny gold")?.() || [];
return shinyGoldContains.reduce((acc, contain) => acc + contain.count, 0);
}
function countBagsThatCanContainOneShinyGoldBag(rules: Rules) {
const bagRules = parseBagRules(rules);
const shinyGoldIncludedIn = Array.from(bagRules).filter(([, canContain]) =>
canContain().find((contain) => contain.color === "shiny gold")
);
return shinyGoldIncludedIn.length;
}
// Examples:
// - faded blue bags contain no other bags.
// - dark olive bags contain 3 faded blue bags, 4 dotted black bags.
function parseRule(ruleStr: string) {
const [bagDescription, bagContainmentStr] = ruleStr.split("contain");
const color = bagDescription.replace("bags", "").trim();
const mustContain: Contain[] = bagContainmentStr.includes("no other bags")
? []
: bagContainmentStr.split(",").map((str) => {
const [countStr, colorA, colorB] = str.trim().split(" "); // example: 99 dotted black bags
return {
color: `${colorA} ${colorB}`,
count: Number(countStr),
};
});
return {
color,
mustContain,
};
}
function parseBagRules(rulesAsStrings: Rules): ParsedBagRules {
const bagRules: ParsedBagRules = new Map();
for (const ruleStr of rulesAsStrings) {
const currentRule = parseRule(ruleStr);
// resolve contains dynamically when read for the first time
// cache result for faster subsequent reads
let resolvedContains!: MustContain;
const getContains = (): MustContain => {
if (resolvedContains) return resolvedContains;
return (resolvedContains = currentRule.mustContain.reduce<MustContain>(
(acc, contain) => {
acc.push(contain);
const subContains = bagRules.get(contain.color)?.() || [];
subContains.forEach((subContain) =>
acc.push({
color: subContain.color,
count: subContain.count * contain.count,
})
);
return acc;
},
[]
));
};
bagRules.set(currentRule.color, getContains);
}
return bagRules;
} |
package pl.lodz.p.it.ssbd2023.ssbd02.web.selenide;
import static com.codeborne.selenide.Condition.attribute;
import static com.codeborne.selenide.Condition.text;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$$;
import static com.codeborne.selenide.Selenide.localStorage;
import static com.codeborne.selenide.Selenide.open;
import static com.codeborne.selenide.Selenide.sleep;
import static com.codeborne.selenide.Selenide.webdriver;
import static com.codeborne.selenide.WebDriverConditions.urlContaining;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.Configuration;
import com.codeborne.selenide.WebDriverRunner;
import java.util.Map;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
class SelenideMOK19 {
@BeforeAll
public static void setUp() {
ChromeOptions options = new ChromeOptions();
options.addArguments("start-maximized");
options.addArguments("--no-sandbox");
options.addArguments("--ignore-certificate-errors");
options.addArguments("--lang=en");
options.setExperimentalOption("prefs", Map.of("intl.accept_languages", "en"));
WebDriverRunner.setWebDriver(new ChromeDriver(options));
Configuration.baseUrl = "http://localhost:4200";
}
@AfterEach
public void tearDown() {
WebDriverRunner.closeWebDriver();
}
@Test
void shouldBlockAccountAfterThreeAttempts() {
open("/");
assertThat(localStorage().containsItem("token"), equalTo(false));
assertThat(localStorage().containsItem("locale"), equalTo(false));
assertThat(localStorage().containsItem("refreshToken"), equalTo(false));
$(".title-text").shouldHave(text("Wooden Furniture"));
$$("span").findBy(text("Login")).click();
for (int i = 0; i < 3; i++) {
failToAuthenticateWithWrongPassword();
}
$$("input").findBy(attribute("data-placeholder", "login")).setValue("client");
$$("input").findBy(attribute("data-placeholder", "password")).setValue("WrongPassword#!123");
$(".mat-focus-indicator .login-button").click();
webdriver().shouldHave(urlContaining("/login"));
$$("input").findBy(attribute("data-placeholder", "login")).setValue("administrator");
$$("input").findBy(attribute("data-placeholder", "password")).setValue("Student123!");
$(".mat-focus-indicator .login-button").click();
$$(".mat-icon").filterBy(Condition.text("menu")).first().click();
$$(".mat-menu-item").filterBy(Condition.text("Admin Panel")).first().click();
webdriver().shouldHave(urlContaining("/admin"));
$$(".mat-icon").filterBy(Condition.text("remove_red_eye")).get(4).click();
$$("small").filterBy(Condition.text("Account state")).first().shouldHave(text("Blocked"));
$$(".mat-button").filterBy(Condition.text("Activate")).first().click();
$$(".mat-raised-button").filterBy(Condition.text("Confirm")).first().click();
sleep(1000);
$$("small").filterBy(Condition.text("Account state")).first().shouldHave(text("Active"));
}
private void failToAuthenticateWithWrongPassword() {
$$("input").findBy(attribute("data-placeholder", "login")).setValue("client");
$$("input").findBy(attribute("data-placeholder", "password")).setValue("WrongPassword#!123");
$(".mat-focus-indicator .login-button").click();
sleep(1000);
}
} |
import { Button, MantineProvider } from '@mantine/core'
import { useColorScheme } from '@mantine/hooks'
import type { ReactNode } from 'react'
import React, { Suspense, useEffect, useState } from 'react'
import { isDisconnectError } from '../../common/isDisconnectError'
import { onBackgroundDisconnection } from '../backgroundScriptConnection'
import { Alert } from './alert'
import { Panel } from './panel'
export function App() {
const colorScheme = useColorScheme()
const [isDisconnected, setIsDisconnected] = useState(false)
useEffect(() => {
const subscription = onBackgroundDisconnection.subscribe(() => setIsDisconnected(true))
return () => subscription.unsubscribe()
}, [])
return (
<MantineProvider
theme={{
colorScheme,
globalStyles: () => ({
body: {
margin: 0,
},
}),
cursorType: 'pointer',
// This is the default for devtools on mac
// https://github.com/ChromeDevTools/devtools-frontend/blob/92b3004cf9190eeb98a721ecb8c3931b45609031/front_end/ui/legacy/inspectorCommon.css#L86
// TODO: adjust for other OS
fontFamily: '".SFNSDisplay-Regular", "Helvetica Neue", "Lucida Grande", sans-serif',
fontSizes: {
xs: '11px',
// Mantine uses the 'md' font size as a default, but some of its components is using 'sm'.
// We want all font size to default to the same size, so let's use the same value for 'sm'
// and 'md'.
sm: '12px',
md: '12px',
lg: '16px',
xl: '22px',
},
fontFamilyMonospace: 'menlo, monospace',
other: {
fontSizeMonospace: '11px',
},
}}
withGlobalStyles
>
<ErrorBoundary>
<Suspense fallback={<></>}>{isDisconnected ? <DisconnectAlert /> : <Panel />}</Suspense>
</ErrorBoundary>
</MantineProvider>
)
}
function DisconnectAlert() {
return (
<Alert
level="error"
title="Extension disconnected!"
message="The extension has been disconnected. This can happen after an update."
button={<ReloadButton />}
/>
)
}
function ReloadButton() {
return <Button onClick={() => location.reload()}>Reload extension</Button>
}
class ErrorBoundary extends React.Component<{ children: ReactNode }, { error?: unknown }> {
state = {}
static getDerivedStateFromError(error: unknown) {
return { error }
}
render() {
if ('error' in this.state) {
const error = this.state.error
if (isDisconnectError(error)) {
return <DisconnectAlert />
}
return (
<Alert
level="error"
title="Extension crashed!"
message={error instanceof Error ? String(error) : `Error: ${String(error)}`}
button={<ReloadButton />}
/>
)
}
return this.props.children
}
} |
import * as React from 'react';
import Button from '@mui/material/Button';
import Dialog from '@mui/material/Dialog';
import DialogActions from '@mui/material/DialogActions';
import DialogContent from '@mui/material/DialogContent';
// import DialogContentText from '@mui/material/DialogContentText';
import DialogTitle from '@mui/material/DialogTitle';
import Slide from '@mui/material/Slide';
import { TextField } from '@mui/material';
import addDeviceApi from '../../api/add_device';
import swal from 'sweetalert';
const Transition = React.forwardRef(function Transition(props, ref) {
return <Slide direction="up" ref={ref} {...props} />;
});
export default function AddDevice(props) {
const [deviceName, setDeviceName]= React.useState("")
const [open, setOpen] = React.useState(false);
const handleClickOpen = () => {
setOpen(true);
};
const handleClose = () => {
setOpen(false);
};
return (
<div>
<Button type={"button"} color={"primary"} variant={"contained"} onClick={handleClickOpen}>
Thêm thiết bị
</Button>
<Dialog
open={open}
TransitionComponent={Transition}
keepMounted
onClose={handleClose}
aria-describedby="alert-dialog-slide-description"
>
<DialogTitle>{"Thêm thiết bị"}</DialogTitle>
<DialogContent>
<TextField value={deviceName} onChange={(e)=> setDeviceName(e.target.value)} placeholder={"Tên thiết bị"} style={{height: 40, width: 400}} />
</DialogContent>
<DialogActions>
<Button onClick={handleClose}>Đóng</Button>
<Button variant={"contaiend"} onClick={async ()=> {
const result= await addDeviceApi(deviceName)
swal("Thông báo", "Tạo thành công thiết bị", "success")
.then(()=> props?.setChange(prev=> !prev))
.then(()=> handleClose())
}}>Tạo</Button>
</DialogActions>
</Dialog>
</div>
);
} |
import 'package:flutter/material.dart';
import 'package:skinsavvy/core/routes.dart';
import 'package:skinsavvy/presentation/pages/skincare_rec/models/skincare_rec_model.dart';
import 'package:skinsavvy/presentation/widgets/app_bar.dart';
import 'package:skinsavvy/presentation/widgets/button.dart';
import 'package:skinsavvy/services/shared_service.dart';
class SkincareRecPage extends StatelessWidget {
final List<ProductDetail> products;
const SkincareRecPage({super.key, required this.products});
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: appBar(context, 'Skincare Recommendation', 18, true, false),
body: ListView(
padding: const EdgeInsets.symmetric(horizontal: 24, vertical: 12),
children: [
..._displayProducts(products),
],
),
bottomNavigationBar: Container(
decoration: const BoxDecoration(
color: Colors.white,
),
padding: const EdgeInsets.symmetric(horizontal: 24, vertical: 12),
child: Button(
label: 'Add to Routine',
onPressed: () {
SharedService.saveData('saved_product', products);
Navigator.pushNamed(context, AppRoutes.home);
}),
));
}
List<Container> _displayProducts(List<ProductDetail> products) {
List<Container> productContainers = [];
for (var prod in products) {
Container container = Container(
padding: const EdgeInsets.all(8),
width: double.infinity,
margin: const EdgeInsets.only(bottom: 12),
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(12),
color: Colors.white,
border: Border.all(
color: Colors.grey.withOpacity(0.5),
width: 1,
),
boxShadow: [
BoxShadow(
color: Colors.grey.withOpacity(0.05),
spreadRadius: 2,
blurRadius: 12,
offset: const Offset(0, 2),
),
],
),
child: Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Container(
width: 80,
height: 100,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(12),
image: DecorationImage(
alignment: Alignment.center,
image: NetworkImage(prod.img),
fit: BoxFit.cover,
),
),
),
Expanded(
flex: 4,
child: Container(
padding: const EdgeInsets.only(left: 16),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text(
prod.brand,
style: const TextStyle(
fontSize: 14,
fontWeight: FontWeight.w600,
),
),
const SizedBox(height: 4),
Text(
prod.name,
style: const TextStyle(
fontSize: 12,
fontWeight: FontWeight.normal,
),
maxLines: 2,
overflow: TextOverflow.ellipsis,
),
const SizedBox(height: 4),
Text(
prod.price,
style: const TextStyle(
fontSize: 13,
fontWeight: FontWeight.w500,
),
),
],
),
),
),
],
),
);
productContainers.add(container);
}
return productContainers;
}
} |
import Sortable from "../../node_modules/sortablejs/modular/sortable.complete.esm.js";
import { baseUrl, renderError, sendAPI } from "../../config.js";
class App {
#data = {
Shandilya: {
currentTasks: [],
savedForLaterTasks: [],
completedTasks: [],
},
Aaditya: {
currentTasks: [],
savedForLaterTasks: [],
completedTasks: [],
},
};
#container;
#addNewTaskButtons;
constructor() {
const self = this;
const doEverything = async function () {
try {
self.#container = document.querySelector(".container");
await self.#getTasks();
self.#displayTasks();
self.#enableDragging();
self.#addNewTaskButtons = document.querySelectorAll(".add-new-task");
self.#addNewTaskButtons.forEach((button) =>
button.addEventListener("click", self.#addTask.bind(self))
);
self.#container.addEventListener("click", self.#showTasks);
self.#container.addEventListener(
"click",
self.#goToResultsOrEdit.bind(self)
);
} catch (err) {
renderError(self.#container, err);
console.error(err);
}
};
doEverything();
}
async #getTasks() {
try {
const requests = Object.keys(this.#data).map((key) =>
sendAPI("GET", `${baseUrl}/positions/${key}`)
);
const responses = await Promise.all(requests);
Object.keys(this.#data).forEach((key, i) => {
this.#data[key] = Object.fromEntries(
responses[i].data.tasks.map((el) => [el.position, el.tasks])
);
});
} catch (err) {
throw err;
}
}
#addTask(e) {
const person = e.target.closest(".row").querySelector(".name").textContent;
const toDoTasks = e.target.closest(".row").querySelector(".tasks");
const headingRow = e.target.closest(".row").querySelector(".list-group");
const showButton = e.target.closest(".row").querySelector(".show-button");
toDoTasks.classList.remove("hidden");
showButton.textContent = "-";
headingRow.insertAdjacentHTML(
"beforeend",
`<div class="row justify-content-center filtered">
<div class="col col-1 my-col">${
this.#data[person].currentTasks.length + 1
}</div>
<div class="col col-6 my-col"><input class="input-task-name" type="text" /></div>
</div>`
);
const inputTaskName = document.querySelector(".input-task-name");
inputTaskName.focus();
inputTaskName.addEventListener("keydown", this.#saveTask.bind(this));
}
async #saveTask(e) {
try {
const inputTaskName = document.querySelector(".input-task-name");
if (e.key !== "Enter" || !inputTaskName || inputTaskName.value === "")
return;
const person = inputTaskName
.closest(".person-row")
.querySelector(".name").textContent;
const taskName = inputTaskName.value;
this.#data[person].currentTasks.push(taskName);
await sendAPI(
"PATCH",
`${baseUrl}/positions/${person}?position=currentTasks`,
{
tasks: this.#data[person].currentTasks,
}
);
this.#data[person].currentTasks.reverse();
inputTaskName.closest(".row").classList.add("hidden");
this.#updateTasks(e);
inputTaskName.value = "";
inputTaskName.classList.remove(".input-task-name");
await sendAPI("POST", `${baseUrl}/records`, {
assignedTo: person,
taskName: taskName,
dates: [],
works: [],
startTimes: [],
endTimes: [],
timeElapsed: [],
timeTaken: [],
results: [],
comments: [],
status: [],
grades: [],
});
} catch (err) {
throw err;
}
}
#editTask(e) {
const task = e.target.closest(".linkclass");
const currentTask = task.querySelector(".task").textContent;
const taskNumber = task.querySelector(".my-col").textContent;
task.innerHTML = `
<div class="col col-1 my-col">${taskNumber}</div>
<div class="col col-6 my-col"><input class="input-task-name" type="text" value="${currentTask}" /></div>
</div>`;
task.classList.add("filtered");
task.classList.remove("linkclass");
const inputTaskName = document.querySelector(".input-task-name");
inputTaskName.focus();
inputTaskName.addEventListener("keydown", async (e) => {
try {
const currentTarget = e.currentTarget;
if (e.key !== "Enter" || !currentTarget || currentTarget.value === "")
return;
const closestListGroup = currentTarget.closest(".list-group");
const person = closestListGroup.dataset.person;
const position = closestListGroup.dataset.position;
this.#data[person][position][taskNumber - 1] = currentTarget.value;
this.#data[person][position].reverse();
const res = (await sendAPI("GET", `${baseUrl}/tasks/${person}`)).data
.requestedDateTask;
res.forEach((task) => {
const index = task.tasks.findIndex((el) => el === currentTask);
task.tasks[index] = currentTarget.value;
});
const resArray = res.map((el) =>
sendAPI(
"PUT",
`${baseUrl}/tasks/${person}/${el.date.split("/").join("-")}`,
{
assignedTo: person,
date: el.date,
tasks: el.tasks,
}
)
);
resArray.push(
sendAPI("PATCH", `${baseUrl}/records/${person}/${currentTask}`, {
taskName: currentTarget.value,
})
);
resArray.push(
sendAPI(
"PATCH",
`${baseUrl}/positions/${person}?position=${position}`,
{
tasks: this.#data[person][position].slice().reverse(),
}
)
);
await Promise.all(resArray);
currentTarget.closest(".row").classList.add("hidden");
this.#updateTasks(e);
this.#data[person][position].reverse();
currentTarget.value = "";
currentTarget.classList.remove(".input-task-name");
} catch (err) {
throw err;
}
});
inputTaskName.addEventListener("click", (e) => e.currentTarget.focus());
}
#updateTasks(e) {
const closestListGroup = e.target.closest(".list-group");
if (!closestListGroup) return;
let markup = "";
const person = closestListGroup.dataset.person;
const position = closestListGroup.dataset.position;
this.#data[person][position].forEach((_, i) => {
const currentIndex = this.#data[person][position].length - i - 1;
markup += `<div class="row justify-content-center linkclass">
<div class="col col-1 my-col">${i + 1}</div>
<div class="col col-6 task my-col">${
this.#data[person][position][currentIndex]
}</div>
<div class="col col-1 my-col edit-col"><img src="edit.png" class="edit-icon"></div>
</div>`;
});
closestListGroup.innerHTML = markup;
}
#displayTasks() {
let bigMarkup = [];
Object.keys(this.#data).forEach((key) => {
let markup = [];
Object.keys(this.#data[key]).forEach((newKey) => {
let littleMarkup = "";
this.#data[key][newKey].forEach((task, i) => {
littleMarkup += `<div class="row justify-content-center linkclass">
<div class="col col-1 my-col">${i + 1}</div>
<div class="col col-6 task my-col">${task}</div>
<div class="col col-1 my-col edit-col"><img src="edit.png" class="edit-icon"></div>
</div>`;
});
markup.push(littleMarkup);
});
bigMarkup.push(markup);
});
Object.keys(this.#data).forEach((key, i) => {
let markup = "";
const arr = Object.keys(this.#data[key]);
bigMarkup[i].forEach((mark, index) => {
const result = arr[index].slice().replace(/([A-Z])/g, " $1");
markup += `
<div class="lists">
<div class="row justify-content-center">
${result.charAt(0).toUpperCase() + result.slice(1)}
</div>
<div class="row justify-content-center">
<div class="col col-1 my-col">Number</div>
<div class="col col-6 my-col">Task</div>
<div class="col col-1 my-col">Edit</div>
</div>
<div class="list-group" data-person=${key} data-position=${
arr[index]
}>
${mark}
</div>
</div>
`;
});
this.#container.insertAdjacentHTML(
"beforeend",
`
<div class="row person-row ${i ? "mt-5" : ""}">
<div class="col-1 column">
<button class="show-button">+</button>
</div>
<div class="col-2 name">${key}</div>
<div class="tasks hidden">
${markup}
</div>
<div class="col-3">
<button type="button" class="btn mt-2 add-new-task">
Add New Task for ${key}
</button>
</div>
`
);
});
}
#enableDragging() {
const lists = document.querySelectorAll(".list-group");
lists.forEach((list) => {
const self = this;
const sortable = new Sortable(list, {
animation: 150,
filter: ".filtered",
group: `shared-${list.dataset.person}`,
async onSort(e) {
try {
const person = list.dataset.person;
const position = list.dataset.position;
self.#data[person][position] = Array.from(
e.target.querySelectorAll(".row")
)
.map((node) => node.querySelector(".col-6").textContent)
.reverse();
self.#data[person][position].reverse();
await sendAPI(
"PATCH",
`${baseUrl}/positions/${person}?position=${position}`,
{
tasks: self.#data[person][position],
}
);
self.#data[person][position].reverse();
self.#updateTasks(e);
} catch (err) {
throw err;
}
},
});
});
}
#move(arr, fromIndex, toIndex) {
var element = arr[fromIndex];
arr.splice(fromIndex, 1);
arr.splice(toIndex, 0, element);
}
#showTasks(e) {
const showButton = e.target.closest(".show-button");
if (!showButton) return;
const toDoTasks = showButton.closest(".row").querySelector(".tasks");
toDoTasks.classList.toggle("hidden");
if (showButton.textContent === "+") showButton.textContent = "-";
else showButton.textContent = "+";
}
#goToResultsOrEdit(e) {
if (e.target.closest("div").classList.contains("edit-col")) {
this.#editTask(e);
return;
}
if (!e.target.closest(".list-group") || !e.target.closest(".linkclass"))
return;
const person = e.target
.closest(".person-row")
.querySelector(".name").textContent;
const taskName = e.target
.closest(".row")
.querySelector(".task")
.textContent.split(" ")
.join("%");
const url = `${window.location.href.split("/")[0]}//${
window.location.href.split("/")[2]
}/admin/tasks/course/index.html?${person}+${taskName}`;
window.open(url, "_blank");
}
}
const app = new App(); |
/************ 1 - SITIO OFICIAL DE JAVASCRIPT ************/
/*
Podría decirse que, como standard (ECMAScript) el sitio oficial del lenguaje es: http://www.ecmascript.org/
Sin embargo, este sitio solo se ocupa de la estandarización.
Las especificaciones del lenguaje para desarrolladores se encuentra en el siguiente url: https://developer.mozilla.org/en/JavaScript
*/
/*********************************************************/
/************ 2 - TIPOS DE COMENTARIOS ************/
// COMENTARIOS DE UNA SOLA LINEA
/*
COMENTARIOS
DE
VARIAS
LINEAS
*/
/**************************************************/
/************ 3 - CREANDO VARIABLES ************/
var variableVar = "Una variable usando VAR"; //Desde hace años, ya no se recomienda usar VAR
let variableLet = "Una variable usando LET";
const CONSTANTE = "Una constante";
/***********************************************/
/************ 4 - TIPOS DE DATOS PRIMITIVOS ************/
let tipoCadena = "Hola, soy una cadena de texto";
//Los de tipo number pueden ser enteros o decimales
let tipoNumber = 42;
tipoNumber = 42.5;
let tipoBooleano = true;
let tipoIndefinido = undefined;
let tipoSimbolo = Symbol("mySymbol");
let tipoBigInt = 1234567890123456789012345678901234567890n;
let tipoNulo = null; //Aunque algunos ejemplos lo clasifican como primitivo, en la documentación de Mozilla (https://developer.mozilla.org/es/docs/Glossary/Primitive) se indica que es un "caso especial".
/*******************************************************/
/************ 5 - HOLA, JAVASCRIPT ************/
console.log("¡Hola, JavaScript!");
/**********************************************/ |
package tableau._014_Longest_Common_Prefix;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* @author jacka
* @version 1.0 on 2/17/2020
*/
class SolutionITest {
@ParameterizedTest
@MethodSource("solutionStream")
public void longestCommonPrefixSingleCommonMidString(Solution solution) {
final String[] input = {"flower", "flow", "flight"};
assertEquals("fl", solution.longestCommonPrefix(input));
}
@ParameterizedTest
@MethodSource("solutionStream")
public void longestCommon2(Solution solution) {
final String[] input = {"dog", "racecar", "car"};
assertEquals("", solution.longestCommonPrefix(input));
}
@ParameterizedTest
@MethodSource("solutionStream")
public void longestCommon3(Solution solution) {
final String[] input = {"", "b"};
assertEquals("", solution.longestCommonPrefix(input));
}
@ParameterizedTest
@MethodSource("solutionStream")
public void longestCommon4(Solution solution) {
final String[] input = {"a"};
assertEquals("a", solution.longestCommonPrefix(input));
}
static Stream<Solution> solutionStream() {
return Stream.of(
new SolutionI()
);
}
} |
package com.cydeo.tests.day15_data_driven_testing;
import com.cydeo.utils.ConfigurationReader;
import io.restassured.http.ContentType;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import static io.restassured.RestAssured.*;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.MatcherAssert.*;
public class JUnitValueSourceTest {
@ParameterizedTest
@ValueSource(ints = {22, 56, 8, 45, 74, 123, 654 })
public void number(int num){
System.out.println("num = " + num);
assertThat(num, is(greaterThan(0)));
}
@ParameterizedTest
@ValueSource(strings = {"Vugar", "Shina", "Dzerassa", "Eda", "Kevin","Shahin","Nadir"} )
public void testNames(String name) {
System.out.println("Hi! "+ name);
assertThat(name, not(blankOrNullString()));
}
@BeforeAll
public static void setUp() {
baseURI = ConfigurationReader.getProperty("zipcode.api.url");
}
@ParameterizedTest
@ValueSource(ints = {22102, 22031, 22034, 11209, 15090, 15237,12345,20879,21224,33433})
public void zipCodeTest(int zipCode) {
//each time new zipcode value from ValueSource is assigned to zipCode variable
given().accept(ContentType.JSON)
.and().pathParam("postal-code", zipCode) //using zipCode as path parameter
.when().get("/us/{postal-code}")
.then().assertThat().statusCode(200)
.log().all();
}
} |
# coding: utf-8
"""
This module provides command-line access to pystache.
Run this script using the -h option for command-line help.
"""
try:
import json
except:
# The json module is new in Python 2.6, whereas simplejson is
# compatible with earlier versions.
try:
import simplejson as json
except ImportError:
# Raise an error with a type different from ImportError as a hack around
# this issue:
# http://bugs.python.org/issue7559
from sys import exc_info
ex_type, ex_value, tb = exc_info()
new_ex = Exception("%s: %s" % (ex_type.__name__, ex_value))
raise new_ex.__class__, new_ex, tb
# The optparse module is deprecated in Python 2.7 in favor of argparse.
# However, argparse is not available in Python 2.6 and earlier.
from optparse import OptionParser
import sys
# We use absolute imports here to allow use of this script from its
# location in source control (e.g. for development purposes).
# Otherwise, the following error occurs:
#
# ValueError: Attempted relative import in non-package
#
from pystache.common import TemplateNotFoundError
from pystache.renderer import Renderer
USAGE = """\
%prog [-h] template context
Render a mustache template with the given context.
positional arguments:
template A filename or template string.
context A filename or JSON string."""
def parse_args(sys_argv, usage):
"""
Return an OptionParser for the script.
"""
args = sys_argv[1:]
parser = OptionParser(usage=usage)
options, args = parser.parse_args(args)
template, context = args
return template, context
# TODO: verify whether the setup() method's entry_points argument
# supports passing arguments to main:
#
# http://packages.python.org/distribute/setuptools.html#automatic-script-creation
#
def main(sys_argv=sys.argv):
template, context = parse_args(sys_argv, USAGE)
if template.endswith('.mustache'):
template = template[:-9]
renderer = Renderer()
try:
template = renderer.load_template(template)
except TemplateNotFoundError:
pass
try:
context = json.load(open(context))
except IOError:
context = json.loads(context)
rendered = renderer.render(template, context)
print rendered
if __name__=='__main__':
main() |
import 'package:drift/drift.dart';
import 'package:expense_kit/model/database/database.dart';
import 'package:expense_kit/model/entity/expense_entity.dart';
class Expense extends Table {
IntColumn get id => integer().autoIncrement()();
TextColumn get description => text().nullable()();
RealColumn get amount => real()();
IntColumn get type => intEnum<ExpenseType>()();
DateTimeColumn get date => dateTime().nullable()();
IntColumn get accountId => integer()();
IntColumn get categoryId => integer()();
BoolColumn get isEMI => boolean().withDefault(const Constant(false))();
IntColumn get emiId => integer().withDefault(const Constant(-1))();
}
class ExpenseTable {
Future insert(ExpenseEntity entity) async {
var companion = ExpenseCompanion.insert(
description: Value(entity.description),
amount: entity.amount,
type: entity.type,
date: Value(
entity.dateTime ?? DateTime.now(),
),
accountId: entity.accountId!,
categoryId: entity.categoryId ?? 0,
);
return await database.into(database.expense).insert(companion);
}
Future remove(ExpenseEntity entity) async {
return database.expense.deleteWhere((tbl) => tbl.id.isValue(entity.id));
}
Future removeByEMI(int emiID) async {
return database.expense.deleteWhere((tbl) => tbl.emiId.isValue(emiID));
}
Future<List<ExpenseEntity>> allExpenses() async {
final expenses = await database.select(database.expense).get();
return expenses
.map((e) => ExpenseEntity(
id: e.id,
description: e.description,
amount: e.amount,
type: e.type,
dateTime: e.date,
accountId: e.accountId,
categoryId: e.categoryId,
))
.toList();
}
// get all expenses before today date
Future<List<ExpenseEntity>> allExpensesBeforeToday() async {
var query = database.select(database.expense)
..where(
(row) =>
row.date.year.equals(DateTime.now().year) &
row.date.month.equals(DateTime.now().month),
);
final expenses = await query.get();
return expenses.map((e) {
return ExpenseEntity.fromMap(e.toJson());
}).toList();
}
MultiSelectable<ExpenseData> expenseStream() {
return database.select(database.expense)
..where(
(row) =>
row.date.year.equals(DateTime.now().year) &
row.date.month.equals(DateTime.now().month),
);
}
} |
import React from 'react'
import {
Checkbox,
FormControlLabel,
makeStyles,
Theme,
} from '@material-ui/core/'
const useStyles = makeStyles((theme: Theme) => ({
label: {
...theme.typography.caption,
marginBottom: '-0.5em',
},
}))
interface Props {
name: string
label: string
value: boolean
onChange: (e: any) => void
labelPlacement?: 'end'
}
const LcaCheckbox = ({ name, value, onChange, ...others }: Props) => {
const classes = useStyles({})
const handleCheck = (_: any, checked: boolean) => {
onChange({ target: { name, value: checked } })
}
return (
<FormControlLabel
labelPlacement="top"
classes={classes}
{...others}
control={<Checkbox name={name} checked={value} onChange={handleCheck} />}
/>
)
}
export default LcaCheckbox |
library IEEE;
use IEEE.Std_logic_1164.all;
use IEEE.Numeric_Std.all;
entity decode_tb is
end;
architecture bench of decode_tb is
component decode
Port ( instruction : in std_logic_vector (31 downto 0);
pc : in std_logic_vector (31 downto 0);
write_data : in std_logic_vector (31 downto 0);
write_enable : in std_logic;
write_reg : in std_logic_vector (4 downto 0);
out1 : out std_logic_vector (31 downto 0);
out2 : out std_logic_vector (31 downto 0);
sign_extended : out std_logic_vector(31 downto 0);
next_pc : out std_logic_vector (31 downto 0);
branch_taken : out std_logic );
end component;
signal instruction: std_logic_vector (31 downto 0);
signal pc: std_logic_vector (31 downto 0);
signal write_data: std_logic_vector (31 downto 0);
signal write_enable: std_logic;
signal write_reg: std_logic_vector (4 downto 0);
signal out1: std_logic_vector (31 downto 0);
signal out2: std_logic_vector (31 downto 0);
signal sign_extended: std_logic_vector(31 downto 0);
signal next_pc: std_logic_vector (31 downto 0);
signal branch_taken: std_logic;
begin
uut: decode port map ( instruction => instruction,
pc => pc,
write_data => write_data,
write_enable => write_enable,
write_reg => write_reg,
out1 => out1,
out2 => out2,
sign_extended => sign_extended,
next_pc => next_pc,
branch_taken => branch_taken );
stimulus: process
begin
-- Put initialisation code here
pc <= x"00000000"; write_enable <= '0';
write_data <= x"00000001"; write_reg <= "00001"; write_enable <= '1'; wait for 10 ns; write_enable <= '0';
write_data <= x"00000002"; write_reg <= "00010"; write_enable <= '1'; wait for 10 ns; write_enable <= '0';
write_data <= x"00000003"; write_reg <= "00011"; write_enable <= '1'; wait for 10 ns; write_enable <= '0';
-- Put test bench stimulus code here
instruction <= x"00008403"; wait for 10 ns; -- ADD R1, R0, 5
instruction <= x"00000443"; wait for 10 ns; -- ADD R1, R2, R3
instruction <= x"00010443"; wait for 10 ns; -- SUB R1, R2, R3
instruction <= x"00018443"; wait for 10 ns; -- SUB R1, R2, 5
instruction <= x"00020443"; wait for 10 ns; -- AND R1, R2, R3
instruction <= x"00030443"; wait for 10 ns; -- OR R1, R2, R3
instruction <= x"000404A2"; wait for 10 ns; -- LW R1, 5, R2
instruction <= x"000504A2"; wait for 10 ns; -- SW R1, 5, R2
instruction <= x"000604A2"; wait for 10 ns; -- JR R1, 5
instruction <= x"000704A2"; wait for 10 ns; -- BEQZ R1, 5, R2
wait;
end process;
end; |
import PropTypes from 'prop-types';
export const Contacts = ({ contacts, onContactDelete }) => {
if (contacts.length === 0) {
return <h4>No contacts avaliable</h4>;
}
return (
<ul>
{contacts.map(({ name, number, id }) => (
<li key={id}>
{name}: {number}{' '}
<button type="submit" onClick={() => onContactDelete(id)}>
Delete
</button>
</li>
))}
</ul>
);
};
Contacts.propTypes = {
contacts: PropTypes.array.isRequired,
onContactDelete: PropTypes.func.isRequired,
}; |
// SPDX-License-Identifier: ISC
pragma solidity >=0.8.19;
import { BaseScript } from "frax-std/BaseScript.sol";
import { console } from "frax-std/FraxTest.sol";
import "src/Constants.sol" as Constants;
import { ArbitrumDualOracle, ConstructorParams as ArbitrumDualOracleParams } from "src/ArbitrumDualOracle.sol";
function deployArbitrumDualOracle()
returns (address _address, bytes memory _constructorParams, string memory _contractName)
{
ArbitrumDualOracleParams memory _params = ArbitrumDualOracleParams({
// = Timelock2Step
timelockAddress: Constants.Arbitrum.TIMELOCK_ADDRESS,
// = DualOracleBase
baseToken0: address(840),
baseToken0Decimals: 18,
quoteToken0: Constants.Arbitrum.ARB_ERC20,
quoteToken0Decimals: 18,
baseToken1: address(840),
baseToken1Decimals: 18,
quoteToken1: Constants.Arbitrum.ARB_ERC20,
quoteToken1Decimals: 18,
// = UniswapV3SingleTwapOracle
arbErc20: Constants.Arbitrum.ARB_ERC20,
wethErc20: Constants.Arbitrum.WETH_ERC20,
uniV3PairAddress: Constants.Arbitrum.ARB_ETH_UNI_V3_POOL,
twapDuration: 15 minutes,
// = ChainlinkOracleWithMaxDelay
arbUsdChainlinkFeedAddress: Constants.Arbitrum.ARB_USD_CHAINLINK_ORACLE,
arbUsdChainlinkMaximumOracleDelay: 1 days + 5 minutes,
// = EthUsdChainlinkOracleWithMaxDelay
ethUsdChainlinkFeed: Constants.Arbitrum.ETH_USD_CHAINLINK_ORACLE,
maxEthUsdOracleDelay: 1 days + 5 minutes
});
_constructorParams = abi.encode(_params);
_contractName = "ArbitrumDualOracle";
_address = address(new ArbitrumDualOracle(_params));
}
contract DeployArbitrumDualOracle is BaseScript {
function run()
external
broadcaster
returns (address _address, bytes memory _constructorParams, string memory _contractName)
{
(_address, _constructorParams, _contractName) = deployArbitrumDualOracle();
}
} |
package ru.sladkkov.service;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.Period;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import ru.sladkkov.dto.ScoringDataDto;
import ru.sladkkov.enums.EmploymentStatus;
import ru.sladkkov.enums.Gender;
import ru.sladkkov.enums.MartialStatus;
import ru.sladkkov.enums.PositionAtWork;
import ru.sladkkov.exception.custom.*;
@Service
@Slf4j
public class ScoringService {
@Value("${baseRate}")
private BigDecimal baseRate;
public BigDecimal scoringData(ScoringDataDto scoringDataDto) {
log.info("Скоринг занятости, rate: {}", baseRate);
BigDecimal rate = baseRate;
rate = rate.add(checkEmploymentStatus(scoringDataDto));
rate = rate.add(checkMartialStatus(scoringDataDto));
rate = rate.add(checkWorkPosition(scoringDataDto));
rate = rate.add(checkDependentAmount(scoringDataDto));
rate = rate.add(checkAgeWithGender(scoringDataDto));
rate = rate.add(checkEmployeeSalary(scoringDataDto));
rate = rate.add(calculateRate(scoringDataDto));
checkWorkExperience(scoringDataDto);
log.info("Скоринг занятости, rate: {}", rate);
return rate;
}
public BigDecimal checkEmploymentStatus(final ScoringDataDto scoringDataDto) {
BigDecimal additionalRate = BigDecimal.ZERO;
log.info("Скоринг занятости начался. Добавочный additionalRate: {}", additionalRate);
var employmentStatus = scoringDataDto.getEmploymentDto().getEmploymentStatus();
if (employmentStatus.equals(EmploymentStatus.UNEMPLOYED)) {
log.error("Рабочий статус клиента - безработный. Отказ.");
throw new UnemployedException("Отказ по причине безработный", new IllegalAccessException());
}
if (employmentStatus.equals(EmploymentStatus.EMPLOYED)) {
additionalRate = additionalRate.add(BigDecimal.valueOf(0.1));
}
if (employmentStatus.equals(EmploymentStatus.BUSINESS_OWNER)) {
additionalRate = additionalRate.add(BigDecimal.valueOf(0.3));
}
log.info("Скоринг занятости закончился. Добавочный additionalRate: {}", additionalRate);
return additionalRate;
}
public BigDecimal checkMartialStatus(final ScoringDataDto scoringDataDto) {
BigDecimal additionalRate = BigDecimal.ZERO;
log.info("Скоринг семейного положения начался. Добавочный additionalRate: {}", additionalRate);
var maritalStatus = scoringDataDto.getMaritalStatus();
if (maritalStatus.equals(MartialStatus.MARRIED)) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.3));
}
if (maritalStatus.equals(MartialStatus.WIDOWED)) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.5));
}
if (maritalStatus.equals(MartialStatus.DIVORCED)) {
additionalRate = additionalRate.add(BigDecimal.valueOf(0.3));
}
log.info(
"Скоринг семейного положения закончился. Добавочный additionalRate: {}", additionalRate);
return additionalRate;
}
public BigDecimal checkWorkPosition(final ScoringDataDto scoringDataDto) {
BigDecimal additionalRate = BigDecimal.ZERO;
log.info("Скоринг рабочей позиции начался. Добавочный additionalRate: {}", additionalRate);
var position = scoringDataDto.getEmploymentDto().getPosition();
if (position.equals(PositionAtWork.MIDDLE_MANAGER)) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.2));
}
if (position.equals(PositionAtWork.TOP_MANAGER)) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.4));
}
log.info("Скоринг рабочей позиции закончился. Добавочный additionalRate: {}", additionalRate);
return additionalRate;
}
public BigDecimal checkAgeWithGender(final ScoringDataDto scoringDataDto) {
BigDecimal additionalRate = BigDecimal.ZERO;
log.info("Скоринг возраста и пола начался. Добавочный additionalRate: {}", additionalRate);
Period period =
Period.between(
scoringDataDto.getLoanApplicationRequestDto().getBirthday(), LocalDate.now());
var years = period.getYears();
if (years < 20) {
log.error("Возраст сотрудника не подходит под условия кредита. Отказ.");
throw new AgeIsLessException(
"Возраст не подходит под условия кредита. Отказ.", new IllegalAccessException());
}
if (years > 60) {
log.error("Возраст сотрудника не подходит под условия кредита. Отказ.");
throw new AgeIsMoreException(
"Возраст не подходит под условия кредита {}. Отказ.", new IllegalAccessException());
}
if (!scoringDataDto.getGender().equals(Gender.MALE) && years > 30 || years < 55) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.3));
}
if (scoringDataDto.getGender().equals(Gender.FEMALE) && years > 35 || years < 60) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.3));
}
log.info("Скоринг возраста и пола закончился. Добавочный additionalRate: {}", additionalRate);
return additionalRate;
}
public void checkWorkExperience(final ScoringDataDto scoringDataDto) {
log.info("Скоринг опыта работы начался");
var workExperienceTotal = scoringDataDto.getEmploymentDto().getWorkExperienceTotal();
if (workExperienceTotal < 12) {
log.error(
"Недостаточно общего опыта работы, заявка отклонена workExperienceTotal {} < 12",
workExperienceTotal);
throw new WorkExperienceNotEnoughException(
"Недостаточно общего опыта работы для одобрения кредита.", new IllegalAccessException());
}
if (scoringDataDto.getEmploymentDto().getWorkExperienceCurrent() < 3) {
log.error(
"Недостаточно текущего опыта работы, заявка отклонена workExperienceTotal {} < 3",
workExperienceTotal);
throw new WorkExperienceNotEnoughException(
"Недостаточно текущего опыта работы для одобрения кредита.",
new IllegalAccessException());
}
log.info("Скоринг опыта работы закончился");
}
public BigDecimal checkDependentAmount(final ScoringDataDto scoringDataDto) {
BigDecimal additionalRate = BigDecimal.ZERO;
log.info(
"Скоринг количества иждивенцов начался. Добавочный additionalRate: {}", additionalRate);
if (scoringDataDto.getDependentAmount() > 1) {
additionalRate = additionalRate.add(BigDecimal.valueOf(0.1));
}
log.info(
"Скоринг количества иждивенцов закончился. Добавочный additionalRate: {}", additionalRate);
return additionalRate;
}
public BigDecimal checkEmployeeSalary(final ScoringDataDto scoringDataDto) {
BigDecimal additionalRate = BigDecimal.ZERO;
log.info("Скоринг зарплаты начинается. Добавочный additionalRate: {}", additionalRate);
if (scoringDataDto
.getLoanApplicationRequestDto()
.getAmount()
.compareTo(
scoringDataDto.getEmploymentDto().getSalary().multiply(BigDecimal.valueOf(20)))
>= 0) {
log.error("Размер кредита превышает 20 зарплат. Отказ");
throw new CreditIsVeryBigException(
"Размер кредита превышает 20 зарплат", new IllegalAccessException());
}
log.info("Скоринг зарплаты закончился. Добавочный additionalRate: {}", additionalRate);
return additionalRate;
}
public BigDecimal calculateRate(final ScoringDataDto scoringDataDto) {
BigDecimal additionalRate = BigDecimal.ZERO;
log.info("Скоринг начинается. Добавочный additionalRate: {}", additionalRate);
if (Boolean.TRUE.equals(scoringDataDto.getIsInsuranceEnabled())) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.3));
}
if (Boolean.TRUE.equals(scoringDataDto.getIsSalaryClient())) {
additionalRate = additionalRate.subtract(BigDecimal.valueOf(0.1));
}
log.info("Скоринг закончился. Добавочный additionalRate: {}", additionalRate);
return additionalRate;
}
} |
<?php
declare(strict_types=1);
require_once dirname(__FILE__) . '/functions.php';
try {
if (!isset($_GET['title']) || trim($_GET['title']) === '') {
return;
}
$pdo = connect();
$statement = $pdo->prepare("SELECT * FROM books WHERE title LIKE :title ESCAPE '#' ORDER BY published DESC");
$statement->bindValue(':title', '%' . escapeLike($_GET['title']) . '%', PDO::PARAM_STR);
$statement->execute();
} catch (PDOException $e) {
echo '本の検索に失敗しました';
return;
}
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>SELECTの実行</title>
</head>
<body>
<h3>タイトルに「<?= escape($_GET['title']) ?>」を含む書籍の検索結果</h3>
<table border="1">
<tr>
<th>タイトル</th>
<th>価格</th>
<th>発売日</th>
</tr>
<?php while ($row = $statement->fetch(PDO::FETCH_ASSOC)) : ?>
<tr>
<td><?= escape($row['title']) ?></td>
<td><?= escape(number_format($row['price'])) ?>円</td>
<td><?= escape($row['published']) ?></td>
</tr>
<?php endwhile; ?>
</table>
</body>
</html> |
"use client"
import DogForm from './dog-form';
import DogList from './dog-list';
import DogData from './dogs.json';
import { useState } from 'react';
export default function DogPage() {
const [dogList, setDogList] = useState(
DogData.map( (dog) => ({...dog}) )
);
const handleCreateDog = (newDog) => {
// dogList.push(newDog); // don't do this, we cannot mutate state variables directly
setDogList( [...dogList, newDog] );
}
const [isDogFormOpen, setDogFormOpen] = useState(false);
const openDogForm = () => setDogFormOpen(true);
const closeDogForm = () => setDogFormOpen(false);
return(
<main>
{
isDogFormOpen && (
<DogForm closeFormFunc={closeDogForm} onCreateDog={handleCreateDog} />
)
}
<h1 className='text-3xl font-bold text-center'>Dogs for Adoption</h1>
<DogList listOfDogs={dogList} />
<div className='text-center my-5'>
<button onClick={openDogForm} className="py-2 px-4 rounded-sm bg-blue-600 hover:bg-blue-500 text-white">Open Dog Form</button>
</div>
</main>
);
} |
//
// UserPermissions.swift
//
//
// Created by Stuart A. Malone on 3/22/23.
//
import Foundation
/// UserPermissions control which parts of the user interface are enabled,
/// and which server operations the user can perform.
///
/// UserPermissions are not stored in the database so they can evolve over time
/// without having to migrate any data.
public struct UserPermissions: OptionSet {
public let rawValue: Int
public init(rawValue: Int) {
self.rawValue = rawValue
}
public static let renameCourse = UserPermissions(rawValue: 1 << 0)
public static let editLayout = UserPermissions(rawValue: 1 << 1)
public static let grantRoles = UserPermissions(rawValue: 1 << 2)
public static let viewCourse = UserPermissions(rawValue: 1 << 3)
public static let undoRedo = UserPermissions(rawValue: 1 << 4)
public static let deleteCourse = UserPermissions(rawValue: 1 << 5)
public static let dropMarks = UserPermissions(rawValue: 1 << 6)
public static let setFinishFlag = UserPermissions(rawValue: 1 << 7)
public static let none: UserPermissions = []
} |
import sys
import zmq
import time
import asyncio
import zmq.asyncio
import threading
import argparse
import logging
import numpy as np
from datetime import datetime
from collections import deque
import aioconsole
ctx = zmq.asyncio.Context()
async def receive_data(writer, addr, port):
zmq_socket = ctx.socket(zmq.SUB)
zmq_socket.setsockopt(zmq.SUBSCRIBE, b"")
zmq_socket.connect(f"tcp://{addr}:{port}")
try:
while True:
line = (await zmq_socket.recv()).decode('utf-8')
if line[0] == "]":
writer.write(line[1:].strip()+"\n")
except KeyboardInterrupt:
pass
async def read_stdin_send_cmd(reader, addr, port):
s = ctx.socket(zmq.PUSH)
s.connect(f"tcp://{addr}:{port}")
try:
while True:
line = await reader.readline()
if not line:
break
await s.send(line.strip()+b"\n")
except KeyboardInterrupt:
pass
async def main(args):
reader, writer = await aioconsole.get_standard_streams()
await asyncio.gather(read_stdin_send_cmd(reader, args.addr, args.push_port), receive_data(writer, args.addr, args.pub_port))
if __name__ == "__main__":
logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO)
parser = argparse.ArgumentParser(description="Sending and receiving commands to/from auto-alignment MCU.")
parser.add_argument("--addr", dest='addr', metavar='ADDR', type=str,
help="Address to the receiver zmq server.", default="127.0.0.1")
parser.add_argument("--pub-port", dest='pub_port', metavar='PUB_PORT', type=int,
help="Port to the message publisher of the receiver zmq server.", default=5333)
parser.add_argument("--push-port", dest='push_port', metavar='PUSH_PORT', type=int,
help="Push port to the command transmitter of the receiver zmq server.", default=5334)
args = parser.parse_args()
try:
asyncio.run(main(args))
except KeyboardInterrupt:
pass |
/*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package scaffolds
import (
"fmt"
log "github.com/sirupsen/logrus"
pluginutil "sigs.k8s.io/kubebuilder/v3/pkg/plugin/util"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/common/kustomize/v2/scaffolds/internal/templates/config/crd/patches"
"sigs.k8s.io/kubebuilder/v3/pkg/config"
"sigs.k8s.io/kubebuilder/v3/pkg/machinery"
"sigs.k8s.io/kubebuilder/v3/pkg/model/resource"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/common/kustomize/v2/scaffolds/internal/templates/config/certmanager"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/common/kustomize/v2/scaffolds/internal/templates/config/kdefault"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/common/kustomize/v2/scaffolds/internal/templates/config/webhook"
)
var _ plugins.Scaffolder = &webhookScaffolder{}
type webhookScaffolder struct {
config config.Config
resource resource.Resource
// fs is the filesystem that will be used by the scaffolder
fs machinery.Filesystem
// force indicates whether to scaffold files even if they exist.
force bool
}
// NewWebhookScaffolder returns a new Scaffolder for v2 webhook creation operations
func NewWebhookScaffolder(config config.Config, resource resource.Resource, force bool) plugins.Scaffolder {
return &webhookScaffolder{
config: config,
resource: resource,
force: force,
}
}
// InjectFS implements cmdutil.Scaffolder
func (s *webhookScaffolder) InjectFS(fs machinery.Filesystem) { s.fs = fs }
// Scaffold implements cmdutil.Scaffolder
func (s *webhookScaffolder) Scaffold() error {
log.Println("Writing kustomize manifests for you to edit...")
// Initialize the machinery.Scaffold that will write the files to disk
scaffold := machinery.NewScaffold(s.fs,
machinery.WithConfig(s.config),
machinery.WithResource(&s.resource),
)
if err := s.config.UpdateResource(s.resource); err != nil {
return fmt.Errorf("error updating resource: %w", err)
}
if err := scaffold.Execute(
&kdefault.WebhookCAInjectionPatch{},
&kdefault.ManagerWebhookPatch{},
&webhook.Kustomization{Force: s.force},
&webhook.KustomizeConfig{},
&webhook.Service{},
&certmanager.Certificate{},
&certmanager.Kustomization{},
&certmanager.KustomizeConfig{},
&patches.EnableWebhookPatch{},
&patches.EnableCAInjectionPatch{},
); err != nil {
return fmt.Errorf("error scaffolding kustomize webhook manifests: %v", err)
}
kustomizeFilePath := "config/default/kustomization.yaml"
err := pluginutil.UncommentCode(kustomizeFilePath, "#- ../webhook", `#`)
if err != nil {
hasWebHookUncommented, err := pluginutil.HasFragment(kustomizeFilePath, "- ../webhook")
if !hasWebHookUncommented || err != nil {
log.Errorf("Unable to find the target #- ../webhook to uncomment in the file "+
"%s.", kustomizeFilePath)
}
}
err = pluginutil.UncommentCode(kustomizeFilePath, "#- path: manager_webhook_patch.yaml", `#`)
if err != nil {
hasWebHookUncommented, err := pluginutil.HasFragment(kustomizeFilePath, "- path: manager_webhook_patch.yaml")
if !hasWebHookUncommented || err != nil {
log.Errorf("Unable to find the target #- path: manager_webhook_patch.yaml to uncomment in the file "+
"%s.", kustomizeFilePath)
}
}
crdKustomizationsFilePath := "config/crd/kustomization.yaml"
err = pluginutil.UncommentCode(crdKustomizationsFilePath, "#- path: patches/webhook", `#`)
if err != nil {
hasWebHookUncommented, err := pluginutil.HasFragment(crdKustomizationsFilePath, "- path: patches/webhook")
if !hasWebHookUncommented || err != nil {
log.Errorf("Unable to find the target(s) #- path: patches/webhook/* to uncomment in the file "+
"%s.", crdKustomizationsFilePath)
}
}
err = pluginutil.UncommentCode(crdKustomizationsFilePath, "#configurations:\n#- kustomizeconfig.yaml", `#`)
if err != nil {
hasWebHookUncommented, err := pluginutil.HasFragment(crdKustomizationsFilePath, "- kustomizeconfig.yaml")
if !hasWebHookUncommented || err != nil {
log.Errorf("Unable to find the target(s) #configurations:\n#- kustomizeconfig.yaml to uncomment in the file "+
"%s.", crdKustomizationsFilePath)
}
}
return nil
} |
import 'package:flutter/material.dart';
class PasswordInput extends StatefulWidget {
final String password;
final Function(String) onPasswordChange;
const PasswordInput(
{Key? key, required this.password, required this.onPasswordChange})
: super(key: key);
@override
State<PasswordInput> createState() => _PasswordInputState();
}
class _PasswordInputState extends State<PasswordInput> {
bool isObscure = true;
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.symmetric(vertical: 10),
child: Theme(
data: Theme.of(context).copyWith(
colorScheme: ThemeData().colorScheme.copyWith(
primary: Colors.black87,
),
),
child: TextFormField(
maxLength: 40,
onChanged: (value) {
widget.onPasswordChange(value);
},
validator: (value) {
if (value!.length < 4) {
return 'Contraseña demasiado corto';
}
return null;
},
obscureText: isObscure,
decoration: InputDecoration(
fillColor: Colors.white,
filled: true,
counterText: "",
contentPadding: const EdgeInsets.symmetric(
vertical: 20.0,
horizontal: 10.0,
),
focusedBorder: const OutlineInputBorder(
borderSide: BorderSide(
color: Color.fromARGB(184, 194, 194, 255),
width: 3.0,
style: BorderStyle.solid),
borderRadius: BorderRadius.all(
Radius.circular(15.0),
),
),
enabledBorder: const OutlineInputBorder(
borderSide: BorderSide(
color: Color.fromARGB(184, 194, 194, 255),
width: 3.0,
style: BorderStyle.solid),
borderRadius: BorderRadius.all(
Radius.circular(15.0),
),
),
prefixIcon: const Icon(
Icons.lock,
),
suffixIcon: Padding(
padding: const EdgeInsetsDirectional.only(end: 12.0),
child: TextButton(
onPressed: () {
print("cambiar valor obscure");
setState(() {
isObscure = !isObscure;
});
},
child: const Icon(
Icons.remove_red_eye_outlined,
),
),
),
hintText: 'Password',
hintStyle: const TextStyle(
fontSize: 14.0,
color: Colors.black38,
),
),
),
));
}
} |
import React from "react";
// Redux
import { useSelector, useDispatch } from "react-redux";
import { Icon as LegacyIcon } from "@ant-design/compatible";
// Design
import { Layout } from "antd";
import "./header.css";
// Views
import AccountMenu from "./views/AccountMenu";
import Notifications from "./views/Notifications";
// Design (const)
const { Header } = Layout;
const NavHeader = () => {
const dispatch = useDispatch();
const userInfo = useSelector(state => state.info.userInfo);
const menuCollapsed = useSelector(state => state.menu.menuCollapsed);
return (
<Header style={{ background: "#fff", padding: 0 }}>
<LegacyIcon
className="trigger"
type={menuCollapsed ? "menu-unfold" : "menu-fold"}
onClick={() => dispatch({ type: "MENU_COLLAPSE" })}
/>
<div className="right-header-menu">
<Notifications />
<AccountMenu name={"Logged User"} />
</div>
</Header>
);
};
export default NavHeader; |
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
interface IERC20 {
function transferFrom(address sender, address recipient, uint256 amount) external returns (bool);
}
interface INFT {
function transferFrom(address from, address to, uint256 tokenId) external;
}
contract fourthSector {
address public admin;
struct Claim {
uint256 points;
string ipfsUri;
bool verified;
address tokenAddress;
uint256 tokenId; // For NFTs
}
struct Account {
uint256 totalAttributionPoints;
bool isWorker;
Claim[] claims;
bool isSponsor;
bool isSupporter;
}
mapping(address => bool) public validNFTContracts; // Track valid NFT contracts
mapping(address => Account) public accounts;
address[] public accountList;
event AccountUpdated(address account, bool isWorker);
event ClaimSubmitted(address account, uint256 points, string ipfsUri);
event SponsorshipSubmitted(address sponsor, uint256 points, string ipfsUri);
event SupportClaimSubmitted(address supporter, uint256 points, string ipfsUri);
event ClaimVerified(address account, uint256 claimIndex, bool verified);
event SponsorshipReceived(address sponsor, uint256 amount, address tokenAddress);
event SupportReceived(address supporter, address nftAddress, uint256 tokenId);
event PointsModified(address account, uint256 claimIndex, uint256 newPoints);
event ValidNFTContractAdded(address nftAddress);
event ValidNFTContractRemoved(address nftAddress);
constructor(address _admin) {
admin = _admin;
}
modifier onlyAdmin() {
require(msg.sender == admin, "Only admin can call this function");
_;
}
function addValidNFTContract(address _nftAddress) public onlyAdmin {
validNFTContracts[_nftAddress] = true;
emit ValidNFTContractAdded(_nftAddress);
}
function removeValidNFTContract(address _nftAddress) public onlyAdmin {
validNFTContracts[_nftAddress] = false;
emit ValidNFTContractRemoved(_nftAddress);
}
function updateAccount(address _account, bool _isWorker) public onlyAdmin {
ensureAccountListed(_account);
Account storage account = accounts[_account];
account.isWorker = _isWorker;
emit AccountUpdated(_account, _isWorker);
}
// Workers submit claims with IPFS URI
function submitClaim(uint256 _points, string memory _ipfsUri) public {
require(accounts[msg.sender].isWorker, "Not a worker");
accounts[msg.sender].claims.push(Claim(_points, _ipfsUri, false, address(0), 0));
emit ClaimSubmitted(msg.sender, _points, _ipfsUri);
}
// Verify worker claim
function verifyWorkerClaim(address _account, uint256 _claimIndex, bool _verified) public {
require(accounts[_account].isWorker, "Not a worker");
require(_claimIndex < accounts[_account].claims.length, "Invalid claim index");
Claim storage claim = accounts[_account].claims[_claimIndex];
claim.verified = _verified;
if (_verified) {
// Add points to total attribution points if verified
accounts[_account].totalAttributionPoints += claim.points;
}
emit ClaimVerified(_account, _claimIndex, _verified);
}
function sponsor(uint256 _points, string memory _ipfsUri, address _tokenAddress, uint256 _tokenAmount) public payable {
ensureAccountListed(msg.sender);
Account storage sponsorAccount = accounts[msg.sender];
sponsorAccount.isSponsor = true;
if (_tokenAddress == address(0)) {
require(msg.value > 0, "Must send ETH with sponsorship");
emit SponsorshipReceived(msg.sender, msg.value, address(0));
} else {
IERC20 token = IERC20(_tokenAddress);
require(token.transferFrom(msg.sender, address(this), _tokenAmount), "Token transfer failed");
emit SponsorshipReceived(msg.sender, _tokenAmount, _tokenAddress);
}
sponsorAccount.claims.push(Claim(_points, _ipfsUri, false, _tokenAddress, 0));
}
function support(uint256 _points, string memory _ipfsUri, address _nftAddress, uint256 _tokenId) public {
require(validNFTContracts[_nftAddress], "NFT is not from a valid contract");
INFT nft = INFT(_nftAddress);
nft.transferFrom(msg.sender, address(this), _tokenId);
ensureAccountListed(msg.sender);
Account storage supporterAccount = accounts[msg.sender];
supporterAccount.isSupporter = true;
supporterAccount.claims.push(Claim(_points, _ipfsUri, false, _nftAddress, _tokenId));
emit SupportReceived(msg.sender, _nftAddress, _tokenId);
emit SupportClaimSubmitted(msg.sender, _points, _ipfsUri);
}
function verifyOrModifyClaim(address _account, uint256 _claimIndex, uint256 _newPoints, bool _verified) public onlyAdmin {
Account storage account = accounts[_account];
require(_claimIndex < account.claims.length, "Invalid claim index");
Claim storage claim = account.claims[_claimIndex];
// Verify or modify only if the account is a sponsor or a supporter
require(account.isSponsor || account.isSupporter, "Account is neither a sponsor nor a supporter");
claim.points = _newPoints;
claim.verified = _verified;
if (_verified) {
account.totalAttributionPoints += _newPoints;
}
emit PointsModified(_account, _claimIndex, _newPoints);
emit ClaimVerified(_account, _claimIndex, _verified);
}
function ensureAccountListed(address _account) internal {
if (!isAccountListed(_account)) {
accountList.push(_account);
}
}
function isAccountListed(address _account) internal view returns (bool) {
for (uint i = 0; i < accountList.length; i++) {
if (accountList[i] == _account) {
return true;
}
}
return false;
}
function calculateShare(address _account) public view returns (uint256) {
uint256 totalPoints = 0;
for (uint i = 0; i < accountList.length; i++) {
totalPoints += accounts[accountList[i]].totalAttributionPoints;
}
if (totalPoints == 0) {
return 0;
}
return (accounts[_account].totalAttributionPoints * 100) / totalPoints;
}
function getAccountsAndShares() public view returns (address[] memory, uint256[] memory) {
uint256 totalPoints = 0;
for (uint i = 0; i < accountList.length; i++) {
totalPoints += accounts[accountList[i]].totalAttributionPoints;
}
address[] memory addresses = new address[](accountList.length);
uint256[] memory shares = new uint256[](accountList.length);
for (uint i = 0; i < accountList.length; i++) {
addresses[i] = accountList[i];
shares[i] = totalPoints > 0 ? (accounts[accountList[i]].totalAttributionPoints * 100000000) / totalPoints : 0;
}
return (addresses, shares);
}
} |
const mongoose = require('mongoose');
const _ = require('underscore');
const setName = (name) => _.escape(name).trim();
const TeamSchema = new mongoose.Schema({
name: {
type: String,
required: true,
trim: true,
set: setName,
},
team: {
member1: {
species: {
type: String,
required: true,
trim: true,
},
nickname: {
type: String,
trim: true,
},
level: {
type: Number,
required: true,
},
ability: {
type: String,
required: true,
},
nature: {
type: String,
required: true,
},
held_item: {
type: String,
},
HP_IVs: {
type: Number,
required: true,
},
attack_IVs: {
type: Number,
required: true,
},
defense_IVs: {
type: Number,
required: true,
},
special_attack_IVs: {
type: Number,
required: true,
},
special_defense_IVs: {
type: Number,
required: true,
},
speed_IVs: {
type: Number,
required: true,
},
HP_EVs: {
type: Number,
required: true,
},
attack_EVs: {
type: Number,
required: true,
},
defense_EVs: {
type: Number,
required: true,
},
special_attack_EVs: {
type: Number,
required: true,
},
special_defense_EVs: {
type: Number,
required: true,
},
speed_EVs: {
type: Number,
required: true,
},
move_1: {
type: String,
},
move_2: {
type: String,
},
move_3: {
type: String,
},
move_4: {
type: String,
},
},
member2: {
species: {
type: String,
required: true,
trim: true,
},
nickname: {
type: String,
trim: true,
},
level: {
type: Number,
required: true,
},
ability: {
type: String,
required: true,
},
nature: {
type: String,
required: true,
},
held_item: {
type: String,
},
HP_IVs: {
type: Number,
required: true,
},
attack_IVs: {
type: Number,
required: true,
},
defense_IVs: {
type: Number,
required: true,
},
special_attack_IVs: {
type: Number,
required: true,
},
special_defense_IVs: {
type: Number,
required: true,
},
speed_IVs: {
type: Number,
required: true,
},
HP_EVs: {
type: Number,
required: true,
},
attack_EVs: {
type: Number,
required: true,
},
defense_EVs: {
type: Number,
required: true,
},
special_attack_EVs: {
type: Number,
required: true,
},
special_defense_EVs: {
type: Number,
required: true,
},
speed_EVs: {
type: Number,
required: true,
},
move_1: {
type: String,
},
move_2: {
type: String,
},
move_3: {
type: String,
},
move_4: {
type: String,
},
},
member3: {
species: {
type: String,
required: true,
trim: true,
},
nickname: {
type: String,
trim: true,
},
level: {
type: Number,
required: true,
},
ability: {
type: String,
required: true,
},
nature: {
type: String,
required: true,
},
held_item: {
type: String,
},
HP_IVs: {
type: Number,
required: true,
},
attack_IVs: {
type: Number,
required: true,
},
defense_IVs: {
type: Number,
required: true,
},
special_attack_IVs: {
type: Number,
required: true,
},
special_defense_IVs: {
type: Number,
required: true,
},
speed_IVs: {
type: Number,
required: true,
},
HP_EVs: {
type: Number,
required: true,
},
attack_EVs: {
type: Number,
required: true,
},
defense_EVs: {
type: Number,
required: true,
},
special_attack_EVs: {
type: Number,
required: true,
},
special_defense_EVs: {
type: Number,
required: true,
},
speed_EVs: {
type: Number,
required: true,
},
move_1: {
type: String,
},
move_2: {
type: String,
},
move_3: {
type: String,
},
move_4: {
type: String,
},
},
member4: {
species: {
type: String,
required: true,
trim: true,
},
nickname: {
type: String,
trim: true,
},
level: {
type: Number,
required: true,
},
ability: {
type: String,
required: true,
},
nature: {
type: String,
required: true,
},
held_item: {
type: String,
},
HP_IVs: {
type: Number,
required: true,
},
attack_IVs: {
type: Number,
required: true,
},
defense_IVs: {
type: Number,
required: true,
},
special_attack_IVs: {
type: Number,
required: true,
},
special_defense_IVs: {
type: Number,
required: true,
},
speed_IVs: {
type: Number,
required: true,
},
HP_EVs: {
type: Number,
required: true,
},
attack_EVs: {
type: Number,
required: true,
},
defense_EVs: {
type: Number,
required: true,
},
special_attack_EVs: {
type: Number,
required: true,
},
special_defense_EVs: {
type: Number,
required: true,
},
speed_EVs: {
type: Number,
required: true,
},
move_1: {
type: String,
},
move_2: {
type: String,
},
move_3: {
type: String,
},
move_4: {
type: String,
},
},
member5: {
species: {
type: String,
required: true,
trim: true,
},
nickname: {
type: String,
trim: true,
},
level: {
type: Number,
required: true,
},
ability: {
type: String,
required: true,
},
nature: {
type: String,
required: true,
},
held_item: {
type: String,
},
HP_IVs: {
type: Number,
required: true,
},
attack_IVs: {
type: Number,
required: true,
},
defense_IVs: {
type: Number,
required: true,
},
special_attack_IVs: {
type: Number,
required: true,
},
special_defense_IVs: {
type: Number,
required: true,
},
speed_IVs: {
type: Number,
required: true,
},
HP_EVs: {
type: Number,
required: true,
},
attack_EVs: {
type: Number,
required: true,
},
defense_EVs: {
type: Number,
required: true,
},
special_attack_EVs: {
type: Number,
required: true,
},
special_defense_EVs: {
type: Number,
required: true,
},
speed_EVs: {
type: Number,
required: true,
},
move_1: {
type: String,
},
move_2: {
type: String,
},
move_3: {
type: String,
},
move_4: {
type: String,
},
},
member6: {
species: {
type: String,
required: true,
trim: true,
},
nickname: {
type: String,
trim: true,
},
level: {
type: Number,
required: true,
},
ability: {
type: String,
required: true,
},
nature: {
type: String,
required: true,
},
held_item: {
type: String,
},
HP_IVs: {
type: Number,
required: true,
},
attack_IVs: {
type: Number,
required: true,
},
defense_IVs: {
type: Number,
required: true,
},
special_attack_IVs: {
type: Number,
required: true,
},
special_defense_IVs: {
type: Number,
required: true,
},
speed_IVs: {
type: Number,
required: true,
},
HP_EVs: {
type: Number,
required: true,
},
attack_EVs: {
type: Number,
required: true,
},
defense_EVs: {
type: Number,
required: true,
},
special_attack_EVs: {
type: Number,
required: true,
},
special_defense_EVs: {
type: Number,
required: true,
},
speed_EVs: {
type: Number,
required: true,
},
move_1: {
type: String,
},
move_2: {
type: String,
},
move_3: {
type: String,
},
move_4: {
type: String,
},
},
},
owner: {
type: mongoose.Schema.ObjectId,
required: true,
ref: 'Account',
},
createdDate: {
type: Date,
default: Date.now,
},
});
TeamSchema.statics.toAPI = (doc) => ({
name: doc.name,
team: doc.team,
});
const TeamModel = mongoose.model('Team', TeamSchema);
module.exports = TeamModel; |
// src/pages/CharacterDetailsPage.jsx
import React, { useEffect, useState } from 'react';
import { useParams } from 'react-router-dom';
import axios from 'axios';
import { useDispatch, useSelector } from 'react-redux';
import { addFavorite, removeFavorite, selectFavorites } from '../store/favoritesSlice';
const CharacterDetailsPage = () => {
const { id } = useParams();
const [character, setCharacter] = useState(null);
const dispatch = useDispatch();
const favorites = useSelector(selectFavorites);
useEffect(() => {
const fetchCharacterDetails = async () => {
try {
const response = await axios.get(`https://rickandmortyapi.com/api/character/${id}`);
setCharacter(response.data);
} catch (error) {
console.error('Error fetching character details:', error);
}
};
fetchCharacterDetails();
}, [id]);
const toggleFavorite = () => {
const isCharacterFavorite = favorites.some((favCharacter) => favCharacter.id === parseInt(id));
if (isCharacterFavorite) {
dispatch(removeFavorite(parseInt(id)));
} else {
dispatch(addFavorite({
id: parseInt(id),
name: character.name,
image: character.image,
//karakter özellikleri
}));
alert('Karakter favorilere eklendi!');
}
};
return (
<div>
<h2>Karakter Detayları</h2>
{character && (
<div>
<img src={character.image} alt={character.name} />
<h2>{character.name}</h2>
<p>Status: {character.status}</p>
<p>Species: {character.species}</p>
<button onClick={toggleFavorite}>
{favorites.some((favCharacter) => favCharacter.id === parseInt(id)) ?
'Favoriden Çıkart' : 'Ekle'}
</button>
</div>
)}
</div>
);
};
export default CharacterDetailsPage; |
import { Component } from 'react';
import pdfMake from 'pdfmake/build/pdfmake';
import dayjs from 'dayjs';
import { vfs } from './vfs_fonts.js';
import * as tableStructures from './columns';
import { tableLayout, fonts } from './settings';
import { getTaskTotalPriceHelper } from 'helpers';
class ListExport extends Component<{
tasks: any;
columns: any;
stage: any;
startDate?: Date;
endDate?: Date;
}> {
totalPrice = 0;
stage: string | undefined;
generateContent() {
var { accessors, headers, widths } = tableStructures.getTableStructure(
this.props.stage
);
const table = {
table: {
widths,
body: [
headers,
...[...this.genRow(this.props.tasks, accessors)]
// sort by increasing SerialNumber
.sort((a: any, b: any) => {
if (a[1] < b[1]) {
return -1;
}
if (a[1] > b[1]) {
return 1;
}
return 0;
}),
],
},
};
for (let i = 0; i < table.table.body.length; i++) {
if (i === 0) continue;
table.table.body[i][0] = i;
} // for column # make right count
// add TotalPrice
table.table.body.push([
{ text: '', colSpan: accessors.length - 1 }, // empty columns
...Array(accessors.length - 2).fill({ text: '', border: 'none' }), // required by pdkMake
{
alignment: 'right',
text: `€${Math.round(this.totalPrice)
.toLocaleString()
.replace(/,/g, ' ')}`,
},
]);
return [
{ text: this.stage, fontSize: 30, alignment: 'center', margin: [5, 4] },
table,
];
}
boldText = (text: string) => ({
text,
fontSize: 16,
alignment: 'center',
bold: true,
});
*genRow(tasks: any, accessors: any) {
for (let i = 0; i < tasks.length; i++) {
const row: any = [];
accessors.forEach((acc: string) => {
switch (acc) {
case 'createdDate':
row.push({
text: dayjs(tasks[i][acc]).format('DDMMMYYYY'),
});
break;
case 'title':
row.push({
text: tasks[i][acc],
color: 'blue',
link: `${process.env.REACT_APP_B24_HOST}/company/personal/user/${process.env.REACT_APP_B24_USER_ID}/tasks/task/view/${tasks[i].id}/`,
});
break;
case 'state.article':
row.push({
...this.boldText(tasks[i].state.article),
alignment: 'left',
});
break;
case 'state.certificate':
const files = tasks[i]['UF_TASK_WEBDAV_FILES'] || [];
row.push(
files.map((file: any) => ({
text: `${file.NAME}`,
decoration: 'underline',
color: 'blue',
link: `${process.env.REACT_APP_B24_HOST}${file.VIEW_URL}`,
}))
);
break;
case 'state.standardsResult':
const colorMap: {
[key: string]: any;
} = {
pass: 'green',
fail: 'red',
undefined: 'black',
};
const results = [
{
text: 'Result: ' + (tasks[i].state.resume || 'none'),
color: colorMap[tasks[i].state.resume],
},
];
for (let [standard, result] of Object.entries(
tasks[i].state.standardsResult
)) {
results.push({
text: `${standard} - ${result}`,
color: result === 'pass' ? 'green' : 'red',
});
// if (standard === 'EN 11612') {
// const passes = [];
// const fails = [];
// for (let [prop, val] of Object.entries(tasks[i].state.EN11612Detail)) {
// val === 'pass' ? passes.push(prop) : fails.push(prop);
// }
// results.push({ text: `PASS - (${passes})`, color: 'green' });
// results.push({ text: `PASS - (${fails})`, color: 'red' });
// }
}
row.push(results);
break;
case 'state.price':
this.totalPrice += Number(getTaskTotalPriceHelper(tasks[i].state));
row.push({
alignment: 'right',
text:
tasks[i].state.price === ''
? '€ 0'
: `€${Math.round(getTaskTotalPriceHelper(tasks[i].state))
.toLocaleString()
.replace(/,/g, ' ')}`,
});
break;
default:
acc.includes('state.') && !!tasks[i].state[acc.substring(6)]
? row.push(tasks[i].state[acc.substring(6)])
: row.push(tasks[i][acc] || '');
}
});
yield row;
}
}
export = () => {
switch (this.props.stage) {
case 'overdue':
this.stage = `Overdue Certifications in Testing Lab (on ${dayjs().format(
'DD.MM.YYYY'
)})`;
break;
default:
this.stage = this.props.stage;
}
let docDefinition = {
content: this.generateContent(),
defaultStyle: {
fontSize: 12,
font: 'Arial',
},
pageOrientation: 'landscape',
pageSize: 'A3',
};
return pdfMake
.createPdf(
// @ts-ignore
docDefinition,
tableLayout,
fonts,
vfs
)
.download(this.getFilename());
};
getFilename = () =>
this.props.startDate && this.props.endDate
? `Certification list for ${dayjs(this.props.startDate).format(
'DDMMMYYYY'
)} - ${dayjs(this.props.endDate).format('DDMMMYYYY')}.pdf`
: `Certification list - ${this.props.stage}.pdf`;
render() {
return (
<button className="btn btn-sm btn-outline-success" onClick={this.export}>
PDF export
</button>
);
}
}
export { ListExport }; |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="bootstrap/css/bootstrap.css">
<link rel="stylesheet" href="css/style.css">
<title>Clase #3: Grid y Breakpoints</title>
</head>
<body class="bg-dark text-light">
<header class="mb-5 clearfix">
<nav class="navbar navbar-light fixed-top" style="background-color: #e3f2fd;">
<div class="container-fluid">
<a class="navbar-brand border rounded border border-light bg-opacity-50 bold" style="background-color: #e3f2fd;" href="#">Proyectos Bootstrap</a>
<button class="navbar-toggler" type="button" data-bs-toggle="offcanvas" data-bs-target="#offcanvasNavbar" aria-controls="offcanvasNavbar">
<span class="navbar-toggler-icon"></span>
</button>
<div class="offcanvas offcanvas-end" tabindex="-1" id="offcanvasNavbar" aria-labelledby="offcanvasNavbarLabel">
<div class="offcanvas-header">
<h5 class="offcanvas-title text-dark" id="offcanvasNavbarLabel">Proyectos</h5>
<button type="button" class="btn-close text-reset" data-bs-dismiss="offcanvas" aria-label="Close"></button>
</div>
<div class="offcanvas-body">
<ul class="navbar-nav justify-content-end flex-grow-1 pe-3">
<li class="nav-item">
<a class="nav-link active" aria-current="page" href="index.html">Home</a>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" href="#" id="offcanvasNavbarDropdown" role="button" data-bs-toggle="dropdown" aria-expanded="false">
Proyecto 1
</a>
<ul class="dropdown-menu" aria-labelledby="offcanvasNavbarDropdown">
<li><a class="dropdown-item" href="clase4.html">Clase: Columnas y Gutters</a></li>
<li><a class="dropdown-item" href="clase5.html">Clase: Reboot e Imagenes</a></li>
<li><a class="dropdown-item" href="clase6.html">Clase: Tablas</a></li>
<li><a class="dropdown-item" href="clase7.html">Clase: Formularios</a></li>
<li><a class="dropdown-item" href="clase8.html">Clase: Helpers</a></li>
<li><a class="dropdown-item" href="clase9.html">Clase: Utilidades</a></li>
</ul>
</li>
<li class="nav-item">
<a class="nav-link" href="proyectofinal.html">Proyecto Final</a>
</li>
</ul>
</div>
</div>
</div>
</nav>
</header>
<section class="mt-5">
<div class="container">
<div class="row justify-content-start">
<div class="col-10">
</div>
</div>
</div>
</section>
<!-- Presentación Placeholders -->
<section>
<div class="container mt-5">
<div class="row justify-content-start">
<div class="col-10">
<h1 class="text-primary">Placeholders</h1>
<p>Se usan los place holders en componentes o paginas para indicar que algo continua cargando.</p>
</div>
</div>
</section>
<!-- Ejemplos de Placeholders -->
<section>
<div class="container clearfix">
<div class="col-11 border border-success">
<div class="card m-2 text-dark">
<img src="asserts/stretch.gif" class="card-img-top" alt="Prueba">
<div class="card-body">
<h5 class="card-title">Card title</h5>
<p class="card-text">Some quick example text to build on the card title and make up the bulk of the card's content.</p>
<a href="#" class="btn btn-primary">Go somewhere</a>
</div>
</div>
<div class="card m-2 text-dark" aria-hidden="true">
<img src="..." class="card-img-top" alt="Loading...">
<div class="card-body">
<h5 class="card-title placeholder-glow">
<span class="placeholder col-6"></span>
</h5>
<p class="card-text placeholder-glow">
<span class="placeholder col-7"></span>
<span class="placeholder col-4"></span>
<span class="placeholder col-4"></span>
<span class="placeholder col-6"></span>
<span class="placeholder col-8"></span>
</p>
<a href="#" tabindex="-1" class="btn btn-primary disabled placeholder col-6"></a>
</div>
</div>
</div>
</div>
</section>
<section>
<div class="container mt-4">
<div class="row justify-content-start">
<div class="col-11">
<h3 class="text-success">Width</h3>
<p>Se usan clases contextuales para darle color a las tablas, a las filas y a los elementos.</p>
</div>
</div>
</section>
<section>
<div class="container clearfix">
<div class="col-11 border border-success">
<span class="placeholder col-6 m-2"></span>
<span class="placeholder w-75 m-2"></span>
<span class="placeholder m-2" style="width: 25%;"></span>
</div>
</div>
</section>
<section>
<div class="container mt-4">
<div class="row justify-content-start">
<div class="col-11">
<h3 class="text-success">Color</h3>
<p>Se usan clases contextuales para darle color a las tablas, a las filas y a los elementos.</p>
</div>
</div>
</section>
<section>
<div class="container clearfix">
<div class="col-11 border border-success">
<span class="placeholder col-12 m-2"></span>
<span class="placeholder col-12 bg-primary m-2"></span>
<span class="placeholder col-12 bg-secondary m-2"></span>
<span class="placeholder col-12 bg-success m-2"></span>
<span class="placeholder col-12 bg-danger m-2"></span>
<span class="placeholder col-12 bg-warning m-2"></span>
<span class="placeholder col-12 bg-info m-2"></span>
<span class="placeholder col-12 bg-light m-2"></span>
<span class="placeholder col-12 bg-dark m-2"></span>
</div>
</div>
</section>
<section>
<div class="container mt-4">
<div class="row justify-content-start">
<div class="col-11">
<h3 class="text-success">Animation</h3>
<p>Se usan clases contextuales para darle color a las tablas, a las filas y a los elementos.</p>
</div>
</div>
</section>
<section>
<div class="container clearfix">
<div class="col-11 border border-success">
<p class="placeholder-glow m-2">
<span class="placeholder col-12"></span>
</p>
<p class="placeholder-wave m-2">
<span class="placeholder col-12"></span>
</p>
</div>
</div>
</section>
<footer class="row bg-success mt-5">
<div class="col-5">
<p class="ms-5">Ronald Humberto Solano Gutiérrez</p>
</div>
<div class="col-5">
</div>
<div class="col-2">
2022
</div>
</footer>
<script src="bootstrap/js/bootstrap.js"></script>
</body>
</html> |
module Blogging
class HomeHandler < Marten::Handlers::Template
include NavBarActiveable
@following_users : Bool?
nav_bar_item :home
template_name "blogging/home.html"
before_render :add_user_data_to_context
before_render :add_articles_to_context
before_render :add_popular_tags_to_context
before_render :add_targeted_tag_to_context
private PAGE_PARAM = "page"
private PAGE_SIZE = 10
private TAGS_COUNT = 20
private def add_articles_to_context
if !targeted_tag.nil?
context[:current_tab] = "tag"
context[:articles] = paginated_tag_feed_articles
elsif request.query_params.fetch(:articles, "user") == "user" && following_users?
context[:current_tab] = "user"
context[:articles] = paginated_user_feed_articles
else
context[:current_tab] = "global"
context[:articles] = paginated_global_feed_articles
end
end
private def add_popular_tags_to_context
context[:tags] = Tag.all[..TAGS_COUNT]
end
private def add_targeted_tag_to_context
context[:targeted_tag] = targeted_tag
end
private def add_user_data_to_context
context[:following_users] = following_users?
end
private def following_users?
@following_users ||= request.user? && request.user!.profile!.followed_users.exists?
end
private def page_number
request.query_params[PAGE_PARAM]?.try(&.to_i) || 1
rescue ArgumentError
1
end
private def paginated_global_feed_articles
paginator = Article.order("-created_at").paginator(PAGE_SIZE)
paginator.page(page_number)
end
private def paginated_tag_feed_articles
paginator = Article.filter(tags__label: targeted_tag).order("-created_at").paginator(PAGE_SIZE)
paginator.page(page_number)
end
private def paginated_user_feed_articles
followed_user_pks = request.user!.profile!.followed_users.pluck(:pk).flatten
paginator = Article.filter(author_id__in: followed_user_pks).order("-created_at").paginator(PAGE_SIZE)
paginator.page(page_number)
end
private def targeted_tag
request.query_params[:tag]?
end
end
end |
package com.example.busManagement.controller;
import com.example.busManagement.domain.Luggage;
import com.example.busManagement.domain.Passenger;
import com.example.busManagement.domain.PassengerDTO;
import com.example.busManagement.exception.PassengerNotFoundException;
import com.example.busManagement.repository.IRepositoryLuggage;
import com.example.busManagement.repository.IRepositoryPassenger;
import org.modelmapper.ModelMapper;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.stream.Collectors;
/*
{
"timesTravelled":7,
"firstName":"carina",
"lastName":"ggss",
"dateOfBirth":"2020/03/11",
"gender":"Female",
"phoneNumber":"1323454"
}
*/
@RestController
class ControllerPassenger {
private final IRepositoryPassenger passenger_repository;
private final IRepositoryLuggage luggage_repository;
ControllerPassenger(IRepositoryPassenger passenger_repository, IRepositoryLuggage luggage_repository) {
this.passenger_repository = passenger_repository;
this.luggage_repository = luggage_repository;
}
// Aggregate root
// tag::get-aggregate-root[]
@GetMapping("/passengers") //GETALL fara luggages
List<PassengerDTO> all() {
ModelMapper modelMapper = new ModelMapper();
List<Passenger> passengers = passenger_repository.findAll(); // FARA LUGGAGES, ALL
return passengers.stream()
.map(passenger -> modelMapper.map(passenger, PassengerDTO.class))
.collect(Collectors.toList());
}
// end::get-aggregate-root[]
@PostMapping("/passengers") // ADD
Passenger newPassenger(@RequestBody Passenger newPassenger) {
return passenger_repository.save(newPassenger);
}
// ~ Luggage = P
// ~ Passenger=A
@PostMapping("/passengers/{passengerId}/luggages")
Luggage newLuggageForPassenger(@PathVariable Long passengerId, @RequestBody Luggage newLuggage) {
// Find the Passenger by ID
Passenger passenger = passenger_repository.findById(passengerId)
.orElseThrow(() -> new PassengerNotFoundException(passengerId));
// Check if the Luggage with the given ID already exists
Luggage existingLuggage = null;
for (Luggage luggage : luggage_repository.findAll()) {
if (luggage.equals(newLuggage)) {
existingLuggage = luggage;
break;
}
}
if (existingLuggage != null) {
// If the Luggage already exists, set the Passenger property and return the existing Luggage
existingLuggage.setPassenger(passenger);
luggage_repository.save(existingLuggage);
return existingLuggage;
} else {
// If the Luggage does not exist, set the Passenger property for the new Luggage
newLuggage.setPassenger(passenger);
// Save the new Luggage to the database
Luggage savedLuggage = luggage_repository.save(newLuggage);
// Add the new Luggage to the Passenger
passenger.getLuggages().add(savedLuggage);
// Save the Passenger to update the Luggages list
passenger_repository.save(passenger);
return savedLuggage;
}
}
// Single item
@GetMapping("/passengers/{id}") //GET BY ID, cu LUGGAGES
Passenger one(@PathVariable Long id) {
return passenger_repository.findById(id)
.orElseThrow(() -> new PassengerNotFoundException(id));
}
@PutMapping("/passengers/{id}") //UPDATE
Passenger replacePassenger(@RequestBody Passenger newPassenger, @PathVariable Long id) {
return passenger_repository.findById(id)
.map(Passenger -> {
Passenger.setTimesTravelled(newPassenger.getTimesTravelled());
Passenger.setFirstName(newPassenger.getFirstName());
Passenger.setLastName(newPassenger.getLastName());
Passenger.setDateOfBirth(newPassenger.getDateOfBirth());
Passenger.setGender(newPassenger.getGender());
Passenger.setPhoneNumber(newPassenger.getPhoneNumber());
return passenger_repository.save(Passenger);
})
.orElseGet(() -> { // otherwise if not found, ADD IT
newPassenger.setId(id);
return passenger_repository.save(newPassenger);
});
}
@DeleteMapping("/passengers/{id}") //DELETE
void deletePassenger(@PathVariable Long id) {
passenger_repository.deleteById(id);
}
} |
const async = require('async');
const helpers = require('../../../helpers/azure');
module.exports = {
title: 'Connection Throttling Enabled',
category: 'PostgreSQL Server',
description: 'Ensures connection throttling is enabled for PostgreSQL servers',
more_info: 'Connection throttling slows the amount of query and error logs sent by the server from the same IP address, limiting DoS attacks or the slowing down of servers due to excessive legitimate user logs.',
recommended_action: 'Ensure the server parameters for each PostgreSQL server have the connection_throttling setting enabled.',
link: 'https://docs.microsoft.com/en-us/azure/postgresql/howto-configure-server-parameters-using-portal',
apis: ['servers:postgres:list', 'configurations:listByServer'],
run: function (cache, settings, callback) {
const results = [];
const source = {};
const locations = helpers.locations(settings.govcloud);
async.each(locations.servers.postgres, (location, rcb) => {
const configurations = helpers.addSource(cache, source,
['configurations', 'listByServer', location]);
if (!configurations) return rcb();
if (configurations.err || !configurations.data) {
helpers.addResult(results, 3,
'Unable to query for PostgreSQL Servers: ' + helpers.addError(configurations), location);
return rcb();
}
if (!configurations.data.length) {
helpers.addResult(results, 0, 'No existing PostgreSQL Servers found', location);
return rcb();
}
var configuration = configurations.data.filter(config => {
return config.name === "connection_throttling";
});
configuration.forEach(config => {
var configIdArr = config.id.split('/');
configIdArr.length = configIdArr.length - 2;
var configId = configIdArr.join('/');
if (config.value === 'ON' ||
config.value === 'on') {
helpers.addResult(results, 0, 'Connection throttling is enabled for the PostgreSQL Server configuration', location, configId);
} else {
helpers.addResult(results, 2, 'Connection throttling is disabled for the PostgreSQL Server configuration', location, configId);
}
});
rcb();
}, function () {
// Global checking goes here
callback(null, results, source);
});
}
}; |
import React from 'react';
import { IndexRoute, Router, Route, Redirect, browserHistory } from 'react-router';
import i18n from 'meteor/universe:i18n';
import injectTapEventPlugin from 'react-tap-event-plugin';
import { Meteor } from 'meteor/meteor';
import { Tracker } from 'meteor/tracker';
import { PrintBuddy } from '../../api/printbuddy/printbuddy';
//import gapi from 'gapi-client';
// route components
import RedirectContainer from '../../ui/containers/RedirectContainer';
import StartContainer from '../../ui/containers/StartContainer';
import AboutContainer from '../../ui/containers/AboutContainer';
import AppContainer from '../../ui/containers/AppContainer';
import ErrorContainer from '../../ui/containers/ErrorContainer';
import RequestContainer from '../../ui/containers/RequestContainer';
import PrintBudddyChatContainer from '../../ui/containers/PrintBudddyChatContainer';
import DoneContainer from '../../ui/containers/DoneContainer';
import JobsContainer from '../../ui/containers/JobsContainer';
import MyJobsContainer from '../../ui/containers/MyJobsContainer';
import MyJobListContainer from '../../ui/containers/MyJobListContainer';
import VerifiedContainer from '../../ui/containers/VerifiedContainer';
import ResetPasswordContainer from '../../ui/containers/ResetPasswordContainer';
import '../../api/user/user.js';
i18n.setLocale('en');
// Needed for onTouchTap
// http://stackoverflow.com/a/34015469/988941
injectTapEventPlugin();
function requireAuth(nextState, replace) {
if (!Meteor.userId()) {
replace({
pathname: '/start',
state: { nextPathname: nextState.location.pathname }
});
}
}
/*
* <Route path="create" component={CreateRequestContainer}/>
<Route path="pending" component={PendingRequestContainer}/>
<Route path="chat" component={ChatContainer}/>
<Route path="done" component={DoneContainer}/>
* */
export const renderRoutes = () => (
<Router history={browserHistory}>
<Route path="/start" component={StartContainer} />
<Route path="/about" component={AboutContainer} />
<Route path="/verified" component={VerifiedContainer} />
<Route path="/resetpassword" component={ResetPasswordContainer} />
<Route onEnter={requireAuth} path="/" component={AppContainer}>
<IndexRoute component={RedirectContainer} />
<Route path="request" component={RequestContainer}>
</Route>
<Route path="jobs" component={JobsContainer} />
<Route path="myjobs" component={MyJobsContainer}>
<IndexRoute component={MyJobListContainer} />
<Route path="chat/:id" component={PrintBudddyChatContainer} />
<Route path="done" component={DoneContainer} />
</Route>
</Route>
<Route path='/404' component={ErrorContainer} />
<Redirect from='*' to='/404' />
</Router>
); |
/******************************************************************************
*
* timer.hpp
*
* @author Copyright (C) 2015 Kotone Itaya
* @version 2.1.0
* @created 2015/11/02 Kotone Itaya -- Created!
* @@
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*****************************************************************************/
#ifndef __SPIRA_TIMER_HPP__
#define __SPIRA_TIMER_HPP__
#include <chrono>
#include <memory>
#include <cstdint>
#include "spira/stream.hpp"
#include "spira/source.hpp"
namespace spira {
class timer : public source<int64_t> {
public:
timer(double fps=1000);
timer(const timer& other);
timer(timer&& other) noexcept;
timer& operator =(const timer& other);
timer& operator =(timer&& other) noexcept;
friend void swap(timer& a, timer& b);
void prebind(const std::function<void(int64_t)> function);
void postbind(const std::function<void(int64_t)> function);
void poll();
void reset();
private:
struct impl; std::shared_ptr<impl> pimpl;
};
}
#endif |
from flask import Blueprint, request, jsonify
from flask_jwt_extended import create_access_token, jwt_required, get_jwt_identity, create_refresh_token
from app.models.user_model import User
from app.database import db
auth_blueprint = Blueprint('auth', __name__)
@auth_blueprint.route('/register', methods=['POST'])
def register():
data = request.get_json()
username = data.get('username')
password = data.get('password')
if User.query.filter_by(username=username).first():
return jsonify({'message': 'El usuario ya existe'}), 409
new_user = User(username=username)
new_user.set_password(password)
db.session.add(new_user)
db.session.commit()
return jsonify({ 'message': 'Usuario creado correctamente' }), 201
@auth_blueprint.route('/login', methods=['POST'])
def login():
data = request.get_json()
username = data.get('username')
password = data.get('password')
user = User.query.filter_by(username=username).first()
if user and user.check_password(password):
access_token = create_access_token(identity=user.id)
refresh_token = create_refresh_token(identity=user.id)
return jsonify(access_token=access_token, refresh_token=refresh_token), 200
return jsonify({ 'message': 'Credenciales invalidas' }), 401
@auth_blueprint.route('/refresh', methods=['POST'])
@jwt_required(refresh=True)
def refresh():
current_user_id = get_jwt_identity()
new_access_token = create_access_token(identity=current_user_id)
new_refresh_token= create_refresh_token(identity=current_user_id)
return jsonify(access_token=new_access_token, refresh_token=new_refresh_token), 200
@auth_blueprint.route('/protected', methods=['GET'])
@jwt_required()
def protected():
current_user_id = get_jwt_identity()
user = User.query.get(current_user_id)
return jsonify(user=user.serialize()), 200 |
import 'react-app-polyfill/ie11';
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import ProductCard, {ProductTitle, ProductImage, ProductButtons} from 'adlb-product-card';
const product = {
id: '1',
title: 'Coffee Mug - Card',
img: './coffee-mug.png'
}
const App = () => {
return (
<div>
<ProductCard
key={product}
product={product}
initialValues={{
count: 4,
maxCount: 10
}}
>
{
({ reset, count, increaseBy, isMaxCountReached }) => (
<>
<ProductImage />
<ProductTitle />
<ProductButtons />
</>
)
}
</ProductCard>
</div>
);
};
ReactDOM.render(<App />, document.getElementById('root')); |
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "Enums/EnemyState.h"
#include "Characters/BaseCharacter.h"
#include "BaseEnemy.generated.h"
class UBehaviorTree;
class APlayerCharacter;
DECLARE_DYNAMIC_MULTICAST_DELEGATE(FEnemyDeadDelegate);
UCLASS()
class LIGHTSOULS_API ABaseEnemy : public ABaseCharacter
{
GENERATED_BODY()
public:
ABaseEnemy();
protected:
virtual void BeginPlay();
public:
virtual void Tick(float DeltaTime) override;
FORCEINLINE FText GetEnemyName() const { return EnemyName; }
virtual void OnGetHit(float Damage, EAttackType AttackType, EAttackIntensityType AttackIntensityType, AActor* DamageCauser, FHitResult HitResult) override;
virtual float CalculateDamage() override;
FEnemyDeadDelegate OnEnemyDead;
FORCEINLINE EEnemyState GetEnemyState() const { return EnemyState; }
void SetEnemyState(EEnemyState InEnemyState);
FORCEINLINE UAnimMontage* GetCombatStartMontage() const { return CombatStartMontage; }
UAnimMontage* GetAttackMontage(int Index) const;
FORCEINLINE UBehaviorTree* GetBehaviorTree() const { return BehaviorTree; }
FORCEINLINE APlayerCharacter* GetTargetPlayer() const { return TargetPlayer; }
APlayerCharacter* SearchTargetPlayer();
//virtual void BeParried();
protected:
FText EnemyName;
EEnemyState EnemyState = EEnemyState::EES_Idle;
class UTargetComponent* TargetComponent;
UPROPERTY(EditDefaultsOnly, Category = "Montage")
UAnimMontage* CombatStartMontage;
UPROPERTY(EditDefaultsOnly, Category = "Montage")
TArray<UAnimMontage*> AttackMontages;
UPROPERTY(EditDefaultsOnly, Category = "AI")
UBehaviorTree* BehaviorTree;
float SearchRadius = 2000.f;
APlayerCharacter* TargetPlayer;
}; |
# Wzorzec projektowy Fasada (Facade) jest używany do zapewnienia uproszczonego interfejsu do
# skomplikowanego zestawu klas, biblioteki lub frameworka.
# Zalety:
# 1. Uproszczenie interfejsu: Klienci korzystają z jednego uproszczonego interfejsu zamiast bezpośrednio
# z skomplikowanego systemu klas.
# 2. Rozdzielenie kodu klienta od skomplikowanego systemu: Ułatwia utrzymanie kodu i jego rozwój.
# Wady:
# 1. Może stać się trudna w utrzymaniu, jeśli fasada będzie mieć zbyt wiele funkcjonalności.
# 2. Może ograniczać użytkowników, którzy potrzebują większej kontroli nad skomplikowanym systemem.
class KitchenSystem:
def prepare_dish(self, dish_name):
return f"Przygotowano danie: {dish_name}"
class CustomerService:
def take_order(self, order_details):
return f"Zamówienie przyjęte: {order_details}"
class InventorySystem:
def check_inventory(self, item_name):
return f"Sprawdzono zapasy: {item_name}"
class RestaurantFacade:
def __init__(self):
self.kitchen = KitchenSystem()
self.customer_service = CustomerService()
self.inventory = InventorySystem()
def place_order(self, dish_name):
if self.inventory.check_inventory(dish_name):
order_details = self.customer_service.take_order(dish_name)
dish = self.kitchen.prepare_dish(dish_name)
return f"{order_details}, {dish}"
else:
return "Danie nie jest dostępne"
# Użycie wzorca Fasada
restaurant = RestaurantFacade()
print(restaurant.place_order("Pizza Margherita"))
# W tym przykładzie:
# KitchenSystem, CustomerService i InventorySystem to skomplikowane podsystemy
# wewnątrz restauracji.
# RestaurantFacade to fasada, która udostępnia uproszczony interfejs do interakcji
# z tymi podsystemami.
# Klient (w tym przypadku właściciel restauracji lub menedżer)
# interaguje z systemem za pośrednictwem fasady, co znacznie upraszcza
# proces realizacji zamówienia. |
# Lookups primarily make use of hashing and dictionaries
# If you are interested in how these work under the hood
# Check out our hashing article here: https://schulichignite.com/blog/verifying-quickly
from __future__ import annotations
import uuid
from dataclasses import dataclass
database = dict() # Stores users with a unique ID
email_to_id = dict() # Links emails to unique ID's
# This means we only need to know someone's email to lookup
# Their information directly instead of searching a list!
@dataclass
class User:
name:str
age:int
birthday:str
email:str
phone:str
def __post_init__(self):
# Add user to database and email_to_id after creation
user_id = str(uuid.uuid4())
email_to_id[self.email] = user_id
database[user_id] = self
def find_user_by_email(email:str) -> User:
"""Finds a User by email
Parameters
----------
email : str
The email to find
Returns
-------
User
The user information associated with the email
"""
user_id = email_to_id[email]
return database[user_id]
if __name__ == "__main__":
User("Kieran Wood", 24, "Jan 1st 2023", "kieran@canadiancoding.ca", "+1(123)123-1234")
print(email_to_id)
print(database)
User("Jamie Avernus", 26, "March 4th 2023", "jamie@example.com", "+1(403)123-1234")
print(email_to_id)
print(database) |
package jdbc0922;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
public class Test02_selectOne {
public static void main(String[] args) {
// sungjuk 테이블에서 한 행 가져오기
int sno=61;
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
String url = "jdbc:oracle:thin:@localhost:1521:xe"; // 위치
String user = "system"; // 아이디
String password = "1234";
String driver = "oracle.jdbc.driver.OracleDriver"; // ojdbc8.jar
Class.forName(driver); // 데이터가 다닐수 있는 길
con = DriverManager.getConnection(url, user, password);
System.out.println("오라클DB 서버 연결 성공!!");
StringBuilder sql = new StringBuilder();
sql.append(" SELECT sno, uname, kor, eng, mat, tot, aver, addr, wdate ");
sql.append(" FROM sungjuk ");
sql.append(" WHERE sno= ? ");
pstmt=con.prepareStatement(sql.toString());
pstmt.setInt(1, sno);
rs = pstmt.executeQuery();
if (rs.next()) { // cursor 가 있는지?
System.out.println("자료있음~~");
// 1) 칼럼순서 접근
System.out.println(rs.getInt(1)); //1번칼럼. sno칼럼
System.out.println(rs.getString(2)); //2번칼럼. uname칼럼
System.out.println(rs.getInt(3));
System.out.println(rs.getInt(4));
System.out.println(rs.getInt(5));
System.out.println(rs.getInt(6));
System.out.println(rs.getInt(7));
System.out.println(rs.getString(8));
System.out.println(rs.getString(9));
// 2) 칼럼명 접근
System.out.println(rs.getInt("sno")); //1번칼럼. sno칼럼
System.out.println(rs.getString("uname")); //2번칼럼. uname칼럼
System.out.println(rs.getInt("kor"));
System.out.println(rs.getInt("eng"));
System.out.println(rs.getInt("mat"));
System.out.println(rs.getInt("tot"));
System.out.println(rs.getInt("aver"));
System.out.println(rs.getString("addr"));
System.out.println(rs.getString("wdate"));
} else {
System.out.println("자료없음!!");
}// if end
} catch (Exception e) {
System.out.println("실패 :" + e);
} finally { //자원 반납( 순서주의)
try {
if(rs!=null) {rs.close();}
} catch (Exception e2) {}
try {
if(pstmt!=null) {pstmt.close();}
} catch (Exception e2) {}
try {
if(con!=null) {con.close();}
} catch (Exception e2) {}
}// end
}// main() end
}// class end |
import numpy as np
from scipy.spatial.distance import cdist
from collections import namedtuple, deque
import random
class FrameStacker:
def __init__(self, num_frames, frame_shape):
self.num_frames = num_frames
self.frame_shape = frame_shape
self.frames_buffer = deque(maxlen=num_frames)
def reset(self, initial_frame):
for _ in range(self.num_frames):
self.frames_buffer.append(initial_frame)
def stack_frames(self):
return np.stack(self.frames_buffer, axis=-1)
def update_buffer(self, new_frame):
self.frames_buffer.append(new_frame)
class ExplorationMemory:
def __init__(self, capacity, vec_frame_shape):
self.capacity = capacity
self.frame_shape = vec_frame_shape
self.frames = np.zeros((capacity,) + (vec_frame_shape,), dtype=np.uint8)
self.current_index = 0
self.is_full = False
def update_memory(self, new_frame):
self.frames[self.current_index] = new_frame
self.current_index = (self.current_index + 1) % self.capacity
if not self.is_full and self.current_index == 0:
self.is_full = True
def get_frames(self):
if self.is_full:
return self.frames
else:
return self.frames[: self.current_index + 1]
def knn_query(self, query_frame, k=1):
frames_to_query = self.get_frames()
flattened_frames = frames_to_query.reshape((frames_to_query.shape[0], -1))
distances = cdist(flattened_frames, [query_frame], metric='euclidean')
indices = np.argsort(distances, axis=0)[:k]
return distances[indices], indices
class ReplayBuffer:
def __init__(self, capacity):
self.capacity = capacity
self.memory = deque([], maxlen=capacity)
self.Transition = namedtuple('Transition', ('stacked_frames', 'state_infos', 'action', 'next_stacked_frames', 'next_state_infos', 'reward', 'done'))
def push(self, *args):
self.memory.append(self.Transition(*args))
def sample(self, batch_size):
return random.sample(self.memory, batch_size)
def __len__(self):
return len(self.memory) |
package com.cydeo.step_definitions;
import com.cydeo.pages.WikiResultPage;
import com.cydeo.pages.WikiSearchPage;
import com.cydeo.utilities.BrowserUtils;
import com.cydeo.utilities.Driver;
import io.cucumber.java.en.Given;
import io.cucumber.java.en.Then;
import io.cucumber.java.en.When;
import io.cucumber.java.sl.In;
import org.junit.Assert;
public class Wiki_StepDefifnitions {
WikiSearchPage wikiSearchPage = new WikiSearchPage();
@Given("User is on Wikipedia home page")
public void user_is_on_wikipedia_home_page() {
Driver.getDriver().get("https://wikipedia.org");
}
@When("User types {string} in the wiki search box")
public void userTypesInTheWikiSearchBox(String searchValue) {
wikiSearchPage.searchBox.sendKeys(searchValue);
}
@When("User clicks wiki search button")
public void user_clicks_wiki_search_button() {
wikiSearchPage.searchButton.click();
}
@Then("User sees {string} is in the wiki title")
public void userSeesIsInTheWikiTitle(String expectedInTitle) {
BrowserUtils.verifyTitleContains(expectedInTitle);
}
WikiResultPage wikiResultPage = new WikiResultPage();
@Then("User sees {string} is in the main header")
public void userSeesIsInTheMainHeader(String expectedText) {
String actualHeaderText = wikiResultPage.mainHeader.getText();
Assert.assertEquals(actualHeaderText, expectedText);
}
@Then("User sees {string} is in the image header")
public void userSeesIsInTheImageHeader(String expectedText) {
Assert.assertEquals(wikiResultPage.imageHeader.getText(),expectedText);
}
} |
import React from 'react';
import { HEROBANNER_SIZE, IMAGE_BASE_URL, POSTER_SIZE } from "../config";
import { Button } from "./Button";
import { Grid } from "./Grid";
import { Herobanner } from "./HeroBanner";
import { Searchbar } from "./Searchbar";
import NoImage from '../images/no_image.jpg'
import { useHomeSeriesFetch } from '../Hooks/useHomeSeriesFetch';
import { ThumbSeries } from './ThumbSeries';
export const HomeSeries = () => {
const { state, loading, error, setSearchTerm, setIsLoadingMore, searchTerm } =
useHomeSeriesFetch();
if (error) return <div>Oulala, il y a un problème</div>;
console.log(state);
return (
<>
{state.results[0] ? (
<Herobanner
image={`${IMAGE_BASE_URL}${HEROBANNER_SIZE}${state.results[0].backdrop_path}`}
title={state.results[0].original_name}
/>
) : null}
<Searchbar setSearchTerm={setSearchTerm} />
<Grid headTitle={searchTerm ? "Series recherchées" : "Les populaires"}>
{state.results.map((serie) => (
<ThumbSeries
key={serie.id}
clickable
serieId={serie.id}
image={
serie.poster_path
? IMAGE_BASE_URL + POSTER_SIZE + serie.poster_path
: NoImage
}
title={serie.name}
/>
))}
{loading && <h1>Chargement</h1>}
</Grid>
{state.page < state.total_pages && !loading && (
<Button text="Suivant" callback={() => setIsLoadingMore(true)} />
)}
</>
)
} |
import React, {useRef, useCallback} from 'react';
import {WebView, WebViewMessageEvent} from 'react-native-webview';
import {styles} from './styles';
import {injectedJavaScript} from './injection';
type MainWebViewProps = {
uri: string;
setInformationGrab: (data: string) => void;
};
export const MainWebView = ({uri, setInformationGrab}: MainWebViewProps) => {
const webViewRef = useRef<WebView>(null);
/**
* Handle form event
*/
const onMessage = useCallback(
(event: WebViewMessageEvent) => {
setInformationGrab(event.nativeEvent.data);
},
[setInformationGrab],
);
return (
<WebView
ref={webViewRef}
style={styles.fullSizeWebViewContainer}
source={{
uri,
}}
injectedJavaScript={injectedJavaScript}
originWhitelist={['*']}
allowFileAccess={true}
onMessage={onMessage}
javaScriptEnabled={true}
domStorageEnabled={true}
mixedContentMode="always"
allowFileAccessFromFileURLs={true}
allowUniversalAccessFromFileURLs={true}
/>
);
}; |
# OpenAD
[](https://pypi.org/project/openad/)
[](https://pypi.org/project/openad/)
[](https://opensource.org/licenses/MIT)
[](https://github.com/psf/black)
[](https://github.com/acceleratedscience/open-ad-toolkit)
**Open Accelerated Discovery Client**<br>
[Project homepage](https://acceleratedscience.github.io/open-ad-toolkit/)
---
<br>
## Table of Contents
- [Installation](#installation)
- [Getting Started - CLI](#getting-started---cli)
- [Getting Started - Jupyter ](#getting-started---cli)
- [Getting Access to RXN, DS4SD and Tell Me Functionality](#getting-access-to-rxn-ds4sd-and-tell-me-functionality)
- [Installation for Development](#installation-for-development)
- [Testing a Branch](#testing-a-branch)
<br>
## Notes
- Only available for Linux (including Windows 11 WSL) and MacOS
- Currently only the OpenAI API is available for the _Tell Me_ Function (WatsonX coming soon)
- If you're on Mac and not installing into a virtual environment, you may need use `pip3` and `python3` instead of `pip` and `python` respectively.<br>
<br>
## Installation
> **Note:** If you're installing a development version of OpenAD, please jump to [Installation for Development](#installation-for-development)
1. **Step 0: Before you start**<br>
Ensure you're running Python 3.10.10 or 3.11.<br>
We do not have a requirement on how to install Python on your local machine;however, we recommend Pyenv.
```shell
git clone https://github.com/pyenv/pyenv.git ~/.pyenv
pyenv install 3.10
```
1. **Step 1: Set up virtual environment** (optional)<br>
python -m venv ~/ad-venv
source ~/ad-venv/bin/activate
> **Note:** To exit the virtual environment, you can run `deactivate`
> Also Openad Supports a Poetry install
1. **Step 2: Installation**<br>
To Install into a global or Python virtual environment<br>
pip install openad
To install into an Poetry Environment
poetry add openad
1. **Launch**<br>
To enter the command shell, simply enter `openad` from the command line.
> _**Note:** To see available commands, run `?`_
<br>
## Getting Started - CLI
- **Entering the Shell Environment**<br>
Run from any directory:
openad

- **Exiting the Shell Environment**<br>
Hit `ctrl+c` or run:
exit
- **Installing Toolkits**<br>
You can install the `DS4SD`, `GT4SD`, `ST4SD` and `RXN` toolkits, however please note that at this time, only `DS4SD` and `RXN` support experimental functionality while the others are meant as placeholders.
add toolkit ds4sd
add toolkit rxn
- **Running Bash Commands**<br>
To run any command in bash mode, prepend it with `openad` and make sure to prepend any quotes with `\`.
openad show molecules using file \'base_molecules.sdf\'
<br>
## Getting Started - Jupyter
### Jupyter Setup
If you plan to use this application inside Jupyter Notebook of JupyterLab, you should set it up as follows:
1. **Activate your virtual environment**<br>
This should already be active, per [Installation](#installation) instructions on top.
source ~/ad-venv/bin/activate
2. **Create an iPython kernel**<br>
This is used to run Notebook commands inside the virtual environment:
python -m ipykernel install --user --name=ad-kernel
if you are also using Poetry we suggest naming your kernel `poetry-ad-kernel`
> **Note:** To list your installed iPython kernels, you can run `jupyter kernelspec list`<br>
> To remove the kernel you can run `jupyter kernelspec uninstall ad-kernel`
4. **Initiate the magic commands.**<br>
This copies the magic commands into the iPython startup directory for your created profile:
init_magic
> **Note:** You can also:<br>
> • Initiate in a single notebook only: `init_magic .` followed by `run openad_magic.py`<br>
> • Initiate within another iPython profile: `init_magic <profile_name>`<br>
> • Initiate without installing anything: `run openad_magic.ipynb` after installing examples (see next bullet)<br>
> --> This executes the file `~/openad_notebooks/openad_magic.ipynb` and needs to be run every time after restarting the kernel
5. **Install the Notebook examples**<br>
This creates the `~/openad_notebooks` folder with a number of demonstration notbeooks.<br>
Start with `Table_of_Contents.ipynb`.
init_examples
<br>
### Jupyter Launch
- After installing the Notebook examples, open the table of contents to get an introduction and be taken through step by step how to use the tool.
jupyter lab ~/openad_notebooks/Table_of_Contents.ipynb
> **NOTE:** By launching Jupyter this way it will automatically launch the trial notebooks.
- Make sure to select your newly created "ad-kernel" iPython kernel. You can do this under _Kernel > Change Kernel_, or in the latest versions of Jupyter by clicking the kernel name in the top right hand corner. If you don't see your iPython kernel, make sure you followed the Jupyter Setup instructions listed above.
<br>
<figure>
<figcaption align="center" style="font-size:0.9em;opacity:.6;margin-bottom:-15px"><i>Jupyter Notebook</i></figcaption>
<img width="1112" alt="jupyter-notebook-kernel" src="https://github.com/acceleratedscience/open-ad-toolkit/assets/30096303/50dee4a9-80d9-4ddb-92f0-321d6c8328e5">
</figure>
<figure>
<figcaption align="center" style="font-size:0.9em;opacity:.6;margin-bottom:-15px"><i>Jupyter Lab</i></figcaption>
<img width="1112" alt="jupyter-lab-kernel" src="https://github.com/acceleratedscience/open-ad-toolkit/assets/30096303/99e9fda1-e9e3-4533-b375-267eaed5c657">
</figure>
- Magic commands are implemented by the _openad.py_ or _openad_magicnb_ files, and are invoked by the `%openad` prefix. For example:<br>
%openad list files
- An example magic comands to play with DS4SD:<br>
%openad display all collections
- An example magic commands to play with RXN
%openad list rxn models
<br>
## Getting Access to RXN, DS4SD and Tell Me Functionality
Below you find login instructions for RXN and DeepSearch. If you choose to use the `Tell Me` function, you will also need to obtain a OpenAI API account.<br>
<br>
### DS4SD (DeepSearch)
1. First, you'll need to generate an API key on the DeepSearch website.
- Visit the DeepSearch website and create an account: [deepsearch-experience.res.ibm.com](https://deepsearch-experience.res.ibm.com)<br>
- Once logged in, click the Toolkit/API icon in the top right hand corner, then open the HTTP section
- Click the "Generate new API key" button<br>
<br>
<img width="1112" alt="ds4sd-api-key" src="https://github.com/acceleratedscience/open-ad-toolkit/assets/30096303/60ed3108-b0f5-48eb-b0f9-eb5a9a4aa643">
1. Once inside the OpenAD client, you'll be prompted to authenticate when activating the DeepSearch (DS4SD) toolkit. When running `set context ds4sd` :
- **Hostname:** [https://sds.app.accelerate.science](https://sds.app.accelerate.science)
- **Email:** Your IBM email
- **API_key:** The DS4SD API key you obtained following the instructions above.<br>
> **Note:** Your DS4SD auth config file is saved as `~/.openad/ds-auth.ext-v2.json`. If you ever want to reset your DS4SD login information, simply delete this file.<br>
1. You should get a message saying you successfully logged in.
<br>
### RXN
1. First, you'll need to generate an API key on the RXN website.
- Sign up for an RXN account at [rxn.app.accelerate.science](https://rxn.app.accelerate.science)
- Obtain your API key by clicking the user profile icon in the top right hand corner and select "My profile".<br>
<br>
<img width="1112" alt="rxn-api-key" src="https://github.com/acceleratedscience/open-ad-toolkit/assets/30096303/d00199c2-3a94-4f11-a345-27cbde313732">
1. When setting the context to RXN using `set context rxn` you'll be prompted to create a new auth configuration file:
- **Hostname:** [https://rxn.app.accelerate.science](https://rxn.app.accelerate.science)<br>
- **API_key:** The RXN API key you obtained following the instructions above.
1. You should get a message saying you successfully logged in.<br>
> **Note:** Your RXN auth config file is saved as `~/.openad/rxn-auth.ext-v2.json`. If you ever want to reset your RXN login information, simply delete this file. You can also do this by running `set context ds4sd reset`<br>
<br>
### OpenAI
In order to use the "Tell me" functionality, you will need to create an account with OpenAI. There is a one month free trial.
> **Note:** WatsonX coming soon
1. Go to [platform.openai.com](https://platform.openai.com) and create an account
1. Click on the profile icon in the top right and choose "View API keys"
1. Create a new key
1. Run `tell me` to be prompted for your OpenAI API credentials
1. Your hostname is [https://api.openai.com/v1/models](https://api.openai.com/v1/models)
<img width="1112" alt="openai-api-key" src="https://github.com/acceleratedscience/open-ad-toolkit/assets/30096303/b0ce5207-a1b7-4558-9e57-18aee87baaee">
## Installation for Development
Only follow these instructions if you're contributing to the codebase.
1. **Step 0: Before you start**<br>
Ensure you're running Python 3.10.10 or above.
1. **Step 1: Set up virtual environment** (optional)<br>
python -m venv ~/ad-venv
source ~/ad-venv/bin/activate
> **Note:** To exit the virtual environment, you can run `deactivate`
1. **Step 2: Installation**<br>
[Download](https://github.com/acceleratedscience/open-ad-toolkit) or clone the right branch from GitHub:
git clone -b main https://github.com/acceleratedscience/open-ad-toolkit.git
> **Note:** To clone a particular branch, replace `main` with your branch name.
Then, enter the repo's top directory and install the requirements
cd open-ad-opentoolkit
pip install -e .
> **Note:** The `-e` flag stands for "editable". This means that instead of copying the package's files to the Python site-packages directory as in a regular installation, pip creates a symbolic link (symlink) from your package's source code directory into your Python environment.<br><br>
> This way you can make changes to the source code of the package, and those changes are immediately reflected in your Python environment. You don't need to reinstall the package every time you make a change.
<br>
## Testing a branch
To do a regular install from a particular branch, you can run:
pip install git+https://github.com/acceleratedscience/open-ad-toolkit.git@<branch_name>
<!--
For screenshots to look good, they should be small and ideally
all the same size. The script below lets you open the URLs in
the right size. Just paste this into the browser console and
press enter.
To take the screenshots with browser UI included on Mac, press
cmd+shift+4 followed by the spacebar, then click the window.
For consistency, stick to Chrome.
- - -
urls = [
'https://cps.foc-deepsearch.zurich.ibm.com',
'https://rxn.app.accelerate.science',
'https://sds.app.accelerate.science',
'https://platform.openai.com/account/api-keys'
]
for (var i=0; i< urls.length; i++) {
window.open(urls[i], '_blank', 'width=1000,height=600');
}
--> |
<?php
namespace Drupal\Tests\jsonapi\Kernel\Query;
use Drupal\Core\Field\FieldStorageDefinitionInterface;
use Drupal\Core\Http\Exception\CacheableBadRequestHttpException;
use Drupal\jsonapi\Context\FieldResolver;
use Drupal\jsonapi\Query\Filter;
use Drupal\jsonapi\ResourceType\ResourceType;
use Drupal\node\Entity\Node;
use Drupal\node\Entity\NodeType;
use Drupal\Tests\image\Kernel\ImageFieldCreationTrait;
use Drupal\Tests\jsonapi\Kernel\JsonapiKernelTestBase;
use Prophecy\Argument;
/**
* @coversDefaultClass \Drupal\jsonapi\Query\Filter
* @group jsonapi
* @group jsonapi_query
*
* @internal
*/
class FilterTest extends JsonapiKernelTestBase {
use ImageFieldCreationTrait;
/**
* {@inheritdoc}
*/
protected static $modules = [
'field',
'file',
'image',
'jsonapi',
'node',
'serialization',
'system',
'text',
'user',
];
/**
* A node storage instance.
*
* @var \Drupal\Core\Entity\EntityStorageInterface
*/
protected $nodeStorage;
/**
* The JSON:API resource type repository.
*
* @var \Drupal\jsonapi\ResourceType\ResourceTypeRepositoryInterface
*/
protected $resourceTypeRepository;
/**
* {@inheritdoc}
*/
public function setUp(): void {
parent::setUp();
$this->setUpSchemas();
$this->savePaintingType();
// ((RED or CIRCLE) or (YELLOW and SQUARE))
$this->savePaintings([
['colors' => ['red'], 'shapes' => ['triangle'], 'title' => 'FIND'],
['colors' => ['orange'], 'shapes' => ['circle'], 'title' => 'FIND'],
['colors' => ['orange'], 'shapes' => ['triangle'], 'title' => 'DO_NOT_FIND'],
['colors' => ['yellow'], 'shapes' => ['square'], 'title' => 'FIND'],
['colors' => ['yellow'], 'shapes' => ['triangle'], 'title' => 'DO_NOT_FIND'],
['colors' => ['orange'], 'shapes' => ['square'], 'title' => 'DO_NOT_FIND'],
]);
$this->nodeStorage = $this->container->get('entity_type.manager')->getStorage('node');
$this->fieldResolver = $this->container->get('jsonapi.field_resolver');
$this->resourceTypeRepository = $this->container->get('jsonapi.resource_type.repository');
}
/**
* @covers ::queryCondition
*/
public function testInvalidFilterPathDueToMissingPropertyName() {
$this->expectException(CacheableBadRequestHttpException::class);
$this->expectExceptionMessage('Invalid nested filtering. The field `colors`, given in the path `colors` is incomplete, it must end with one of the following specifiers: `value`, `format`, `processed`.');
$resource_type = $this->resourceTypeRepository->get('node', 'painting');
Filter::createFromQueryParameter(['colors' => ''], $resource_type, $this->fieldResolver);
}
/**
* @covers ::queryCondition
*/
public function testInvalidFilterPathDueToMissingPropertyNameReferenceFieldWithMetaProperties() {
$this->expectException(CacheableBadRequestHttpException::class);
$this->expectExceptionMessage('Invalid nested filtering. The field `photo`, given in the path `photo` is incomplete, it must end with one of the following specifiers: `id`, `meta.alt`, `meta.title`, `meta.width`, `meta.height`.');
$resource_type = $this->resourceTypeRepository->get('node', 'painting');
Filter::createFromQueryParameter(['photo' => ''], $resource_type, $this->fieldResolver);
}
/**
* @covers ::queryCondition
*/
public function testInvalidFilterPathDueMissingMetaPrefixReferenceFieldWithMetaProperties() {
$this->expectException(CacheableBadRequestHttpException::class);
$this->expectExceptionMessage('Invalid nested filtering. The property `alt`, given in the path `photo.alt` belongs to the meta object of a relationship and must be preceded by `meta`.');
$resource_type = $this->resourceTypeRepository->get('node', 'painting');
Filter::createFromQueryParameter(['photo.alt' => ''], $resource_type, $this->fieldResolver);
}
/**
* @covers ::queryCondition
*/
public function testInvalidFilterPathDueToMissingPropertyNameReferenceFieldWithoutMetaProperties() {
$this->expectException(CacheableBadRequestHttpException::class);
$this->expectExceptionMessage('Invalid nested filtering. The field `uid`, given in the path `uid` is incomplete, it must end with one of the following specifiers: `id`.');
$resource_type = $this->resourceTypeRepository->get('node', 'painting');
Filter::createFromQueryParameter(['uid' => ''], $resource_type, $this->fieldResolver);
}
/**
* @covers ::queryCondition
*/
public function testInvalidFilterPathDueToNonexistentProperty() {
$this->expectException(CacheableBadRequestHttpException::class);
$this->expectExceptionMessage('Invalid nested filtering. The property `foobar`, given in the path `colors.foobar`, does not exist. Must be one of the following property names: `value`, `format`, `processed`.');
$resource_type = $this->resourceTypeRepository->get('node', 'painting');
Filter::createFromQueryParameter(['colors.foobar' => ''], $resource_type, $this->fieldResolver);
}
/**
* @covers ::queryCondition
*/
public function testInvalidFilterPathDueToElidedSoleProperty() {
$this->expectException(CacheableBadRequestHttpException::class);
$this->expectExceptionMessage('Invalid nested filtering. The property `value`, given in the path `promote.value`, does not exist. Filter by `promote`, not `promote.value` (the JSON:API module elides property names from single-property fields).');
$resource_type = $this->resourceTypeRepository->get('node', 'painting');
Filter::createFromQueryParameter(['promote.value' => ''], $resource_type, $this->fieldResolver);
}
/**
* @covers ::queryCondition
*/
public function testQueryCondition() {
// Can't use a data provider because we need access to the container.
$data = $this->queryConditionData();
$get_sql_query_for_entity_query = function ($entity_query) {
// Expose parts of \Drupal\Core\Entity\Query\Sql\Query::execute().
$o = new \ReflectionObject($entity_query);
$m1 = $o->getMethod('prepare');
$m1->setAccessible(TRUE);
$m2 = $o->getMethod('compile');
$m2->setAccessible(TRUE);
// The private property computed by the two previous private calls, whose
// value we need to inspect.
$p = $o->getProperty('sqlQuery');
$p->setAccessible(TRUE);
$m1->invoke($entity_query);
$m2->invoke($entity_query);
return (string) $p->getValue($entity_query);
};
$resource_type = $this->resourceTypeRepository->get('node', 'painting');
foreach ($data as $case) {
$parameter = $case[0];
$expected_query = $case[1];
$filter = Filter::createFromQueryParameter($parameter, $resource_type, $this->fieldResolver);
$query = $this->nodeStorage->getQuery()->accessCheck(FALSE);
// Get the query condition parsed from the input.
$condition = $filter->queryCondition($query);
// Apply it to the query.
$query->condition($condition);
// Verify the SQL query is exactly the same.
$expected_sql_query = $get_sql_query_for_entity_query($expected_query);
$actual_sql_query = $get_sql_query_for_entity_query($query);
$this->assertSame($expected_sql_query, $actual_sql_query);
// Compare the results.
$this->assertEquals($expected_query->execute(), $query->execute());
}
}
/**
* Simply provides test data to keep the actual test method tidy.
*/
protected function queryConditionData() {
// ((RED or CIRCLE) or (YELLOW and SQUARE))
$query = $this->nodeStorage->getQuery()->accessCheck(FALSE);
$or_group = $query->orConditionGroup();
$nested_or_group = $query->orConditionGroup();
$nested_or_group->condition('colors', 'red', 'CONTAINS');
$nested_or_group->condition('shapes', 'circle', 'CONTAINS');
$or_group->condition($nested_or_group);
$nested_and_group = $query->andConditionGroup();
$nested_and_group->condition('colors', 'yellow', 'CONTAINS');
$nested_and_group->condition('shapes', 'square', 'CONTAINS');
$nested_and_group->notExists('photo.alt');
$or_group->condition($nested_and_group);
$query->condition($or_group);
return [
[
[
'or-group' => ['group' => ['conjunction' => 'OR']],
'nested-or-group' => ['group' => ['conjunction' => 'OR', 'memberOf' => 'or-group']],
'nested-and-group' => ['group' => ['conjunction' => 'AND', 'memberOf' => 'or-group']],
'condition-0' => [
'condition' => [
'path' => 'colors.value',
'value' => 'red',
'operator' => 'CONTAINS',
'memberOf' => 'nested-or-group',
],
],
'condition-1' => [
'condition' => [
'path' => 'shapes.value',
'value' => 'circle',
'operator' => 'CONTAINS',
'memberOf' => 'nested-or-group',
],
],
'condition-2' => [
'condition' => [
'path' => 'colors.value',
'value' => 'yellow',
'operator' =>
'CONTAINS',
'memberOf' => 'nested-and-group',
],
],
'condition-3' => [
'condition' => [
'path' => 'shapes.value',
'value' => 'square',
'operator' => 'CONTAINS',
'memberOf' => 'nested-and-group',
],
],
'condition-4' => [
'condition' => [
'path' => 'photo.meta.alt',
'operator' => 'IS NULL',
'memberOf' => 'nested-and-group',
],
],
],
$query,
],
];
}
/**
* Sets up the schemas.
*/
protected function setUpSchemas() {
$this->installSchema('system', ['sequences']);
$this->installSchema('node', ['node_access']);
$this->installSchema('user', ['users_data']);
$this->installSchema('user', []);
foreach (['user', 'node'] as $entity_type_id) {
$this->installEntitySchema($entity_type_id);
}
}
/**
* Creates a painting node type.
*/
protected function savePaintingType() {
NodeType::create([
'type' => 'painting',
])->save();
$this->createTextField(
'node', 'painting',
'colors', 'Colors',
FieldStorageDefinitionInterface::CARDINALITY_UNLIMITED
);
$this->createTextField(
'node', 'painting',
'shapes', 'Shapes',
FieldStorageDefinitionInterface::CARDINALITY_UNLIMITED
);
$this->createImageField('photo', 'painting');
}
/**
* Creates painting nodes.
*/
protected function savePaintings($paintings) {
foreach ($paintings as $painting) {
Node::create(array_merge([
'type' => 'painting',
], $painting))->save();
}
}
/**
* @covers ::createFromQueryParameter
* @dataProvider parameterProvider
*/
public function testCreateFromQueryParameter($case, $expected) {
$resource_type = new ResourceType('foo', 'bar', NULL);
$actual = Filter::createFromQueryParameter($case, $resource_type, $this->getFieldResolverMock($resource_type));
$conditions = $actual->root()->members();
for ($i = 0; $i < count($case); $i++) {
$this->assertEquals($expected[$i]['path'], $conditions[$i]->field());
$this->assertEquals($expected[$i]['value'], $conditions[$i]->value());
$this->assertEquals($expected[$i]['operator'], $conditions[$i]->operator());
}
}
/**
* Data provider for testCreateFromQueryParameter.
*/
public function parameterProvider() {
return [
'shorthand' => [
['uid' => ['value' => 1]],
[['path' => 'uid', 'value' => 1, 'operator' => '=']],
],
'extreme shorthand' => [
['uid' => 1],
[['path' => 'uid', 'value' => 1, 'operator' => '=']],
],
];
}
/**
* @covers ::createFromQueryParameter
*/
public function testCreateFromQueryParameterNested() {
$parameter = [
'or-group' => ['group' => ['conjunction' => 'OR']],
'nested-or-group' => [
'group' => ['conjunction' => 'OR', 'memberOf' => 'or-group'],
],
'nested-and-group' => [
'group' => ['conjunction' => 'AND', 'memberOf' => 'or-group'],
],
'condition-0' => [
'condition' => [
'path' => 'field0',
'value' => 'value0',
'memberOf' => 'nested-or-group',
],
],
'condition-1' => [
'condition' => [
'path' => 'field1',
'value' => 'value1',
'memberOf' => 'nested-or-group',
],
],
'condition-2' => [
'condition' => [
'path' => 'field2',
'value' => 'value2',
'memberOf' => 'nested-and-group',
],
],
'condition-3' => [
'condition' => [
'path' => 'field3',
'value' => 'value3',
'memberOf' => 'nested-and-group',
],
],
];
$resource_type = new ResourceType('foo', 'bar', NULL);
$filter = Filter::createFromQueryParameter($parameter, $resource_type, $this->getFieldResolverMock($resource_type));
$root = $filter->root();
// Make sure the implicit root group was added.
$this->assertEquals('AND', $root->conjunction());
// Ensure the or-group and the and-group were added correctly.
$members = $root->members();
// Ensure the OR group was added.
$or_group = $members[0];
$this->assertEquals('OR', $or_group->conjunction());
$or_group_members = $or_group->members();
// Make sure the nested OR group was added with the right conditions.
$nested_or_group = $or_group_members[0];
$this->assertEquals('OR', $nested_or_group->conjunction());
$nested_or_group_members = $nested_or_group->members();
$this->assertEquals('field0', $nested_or_group_members[0]->field());
$this->assertEquals('field1', $nested_or_group_members[1]->field());
// Make sure the nested AND group was added with the right conditions.
$nested_and_group = $or_group_members[1];
$this->assertEquals('AND', $nested_and_group->conjunction());
$nested_and_group_members = $nested_and_group->members();
$this->assertEquals('field2', $nested_and_group_members[0]->field());
$this->assertEquals('field3', $nested_and_group_members[1]->field());
}
/**
* Provides a mock field resolver.
*/
protected function getFieldResolverMock(ResourceType $resource_type) {
$field_resolver = $this->prophesize(FieldResolver::class);
$field_resolver->resolveInternalEntityQueryPath($resource_type, Argument::any(), Argument::any())->willReturnArgument(1);
return $field_resolver->reveal();
}
} |
"use client";
import Link from "next/link";
import React, { useEffect } from "react";
import { useRouter } from "next/navigation";
import axios from "axios";
import toast from "react-hot-toast";
export default function LoginPage() {
const router = useRouter();
const [user, setUser] = React.useState({
email: "",
password: "",
})
const [buttonDisabled, setButtonDisabled] = React.useState(false);
const [loading, setLoading] = React.useState(false);
const onLogin = async () => {
try {
setLoading(true);
const response = await axios.post("/api/users/login",user);
console.log("Login success",response.data);
toast.success("Login success");
router.push("/profile");
} catch (error: any) {
console.log("Login failed", error.message);
toast.error(error.message);
} finally {
setLoading(false)
}
}
useEffect(() => {
if (user.email.length > 0 && user.password.length > 0) {
setButtonDisabled(false)
} else {
setButtonDisabled(true)
}
}, [user]);
return (
<div className="flex flex-col items-center
justify-center min-h-screen py-2">
<h1>{loading ? "processing" : "Login"}</h1>
<hr />
<label htmlFor="email">email</label>
<input
className="mt-4 p-2 border border-gray-300 rounded-md
focus:outline-none focus:border-blue-500 text-gray-900"
id="email"
type="email"
value={user.email}
onChange={(e) => setUser({ ...user, email: e.target.value })}
placeholder="email"
/>
<label htmlFor="password">password</label>
<input
className="mt-4 p-2 border border-gray-300 rounded-md
focus:outline-none focus:border-blue-500 text-gray-900"
id="password"
type="password"
value={user.password}
onChange={(e) => setUser({ ...user, password: e.target.value })}
placeholder="password"
/>
<button
onClick={onLogin}
className="mt-4 px-6 py-3 bg-blue-500 text-white font-semibold
rounded-md shadow-md hover:bg-blue-600 focus:outline-none focus:ring-2
focus:ring-blue-500 focus:ring-opacity-50"
>{buttonDisabled ? "NoLogin" : "Login"}
</button>
<Link href="/signup">visit signup page</Link>
</div>
)
} |
<?php
namespace Drupal\{{ machine_name }}\Plugin\Validation\Constraint;
use Symfony\Component\Validator\Constraint;
/**
* Provides {{ plugin_label|article }} constraint.
*
* @Constraint(
* id = "{{ plugin_id }}",
* label = @Translation("{{ plugin_label }}", context = "Validation"),
* )
{% if input_type == 'entity' %}
*
* @DCG
* To apply this constraint, see https://www.drupal.org/docs/drupal-apis/entity-api/entity-validation-api/providing-a-custom-validation-constraint.
{% elseif input_type == 'item_list' %}
*
* @DCG
* To apply this constraint on third party entity types implement either
* hook_entity_base_field_info_alter() or hook_entity_bundle_field_info_alter().
{% elseif input_type == 'item' %}
*
* @DCG
* To apply this constraint on third party field types. Implement
* hook_field_info_alter().
{% endif %}
*/
class {{ class }} extends Constraint {
public $errorMessage = 'The error message.';
} |
using MediatR;
using Microsoft.Extensions.Localization;
using Taqm.Core.Bases;
using Taqm.Core.Features.Authentication.Commands.Models;
using Taqm.Core.Features.Users.Commands.Models;
using Taqm.Core.Resources;
using Taqm.Data.Responses;
using Taqm.Service.Abstracts;
namespace Taqm.Core.Features.Authentication.Commands.Handlers
{
public class AuthenticationCommandHandler : ResponseHandler,
IRequestHandler<ResetPasswordCommand, Response<string>>,
IRequestHandler<SignInCommand, Response<JwtAuthResponse>>,
IRequestHandler<RevokeTokenCommand, Response<bool>>
{
#region Fields
private readonly IStringLocalizer<SharedResources> _stringLocalizer;
private readonly IAuthenticationService _authenticationService;
#endregion
#region Constructors
public AuthenticationCommandHandler(IStringLocalizer<SharedResources> stringLocalizer,
IAuthenticationService authenticationService) : base(stringLocalizer)
{
_stringLocalizer = stringLocalizer;
_authenticationService = authenticationService;
}
#endregion
#region Handlers
public async Task<Response<JwtAuthResponse>> Handle(SignInCommand request, CancellationToken cancellationToken)
{
var response = await _authenticationService.SignInAsyns(request.Email, request.Password);
switch (response.Message)
{
case "EmailNotExist":
return BadRequest<JwtAuthResponse>(_stringLocalizer[SharedResourcesKeys.EmailIsNotExist]);
case "ConfirmEmail":
return BadRequest<JwtAuthResponse>(_stringLocalizer[SharedResourcesKeys.ConfirmEmail]);
case "IncorrectPassword":
return BadRequest<JwtAuthResponse>(_stringLocalizer[SharedResourcesKeys.IncorrectPassword]);
case "Failed":
return BadRequest<JwtAuthResponse>(_stringLocalizer[SharedResourcesKeys.IncorrectPassword]);
default:
return !response.IsAuthenticated ? BadRequest<JwtAuthResponse>(response.Message!) : Success(response);
}
}
public async Task<Response<string>> Handle(ResetPasswordCommand request, CancellationToken cancellationToken)
{
var resetPaswordResult =
await _authenticationService.ResetPasswordAsync(request.Password, request.Email, request.Token);
switch (resetPaswordResult)
{
case "NotFound":
return BadRequest<string>(_stringLocalizer[SharedResourcesKeys.EmailIsExist]);
case "Failed":
return BadRequest<string>(_stringLocalizer[SharedResourcesKeys.FailedToResetPassword]);
case "Success":
return Success<string>(_stringLocalizer[SharedResourcesKeys.PasswordChanged]);
default:
return BadRequest<string>(resetPaswordResult);
}
}
public async Task<Response<bool>> Handle(RevokeTokenCommand request, CancellationToken cancellationToken)
{
var result = await _authenticationService.RevokeTokenAsync(request.Token);
if (result is false)
return BadRequest<bool>(_stringLocalizer[SharedResourcesKeys.FailedToRevoke]);
return Success(result);
}
public Task<Response<string>> Handle(ForgetPasswordCommand request, CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
#endregion
}
} |
//
// IconView.swift
// ICNS
//
// Created by John Notaris on 7/5/24.
//
import SwiftUI
import UniformTypeIdentifiers
struct IconView: View {
@Binding var icon: Icon
@State private var showAlert = false
@State private var alertMessage = ""
@State private var alertTitle = "Success"
@State private var iconsGenerated = false
var body: some View {
VStack {
Spacer()
VStack {
ZStack {
if let imgData = icon.image, let img = NSImage(data: imgData) {
Image(nsImage: img)
.resizable()
.aspectRatio(contentMode: .fit)
.frame(maxWidth: 300, maxHeight: 300)
} else {
VStack {
Image(systemName: "plus")
.font(.system(size: 48))
.foregroundColor(.gray)
Spacer().frame(height: 20)
Text("Drop Image Here")
.foregroundColor(.gray)
}
.frame(maxWidth: 200, maxHeight: 200)
.background(
RoundedRectangle(cornerRadius: 25)
.stroke(style: StrokeStyle(lineWidth: 8, dash: [25]))
.foregroundColor(.gray)
)
.onDrop(of: [UTType.image], isTargeted: nil) { providers -> Bool in
providers.first?.loadDataRepresentation(forTypeIdentifier: UTType.image.identifier, completionHandler: { (data, error) in
if let data = data {
self.icon.image = data
}
})
return true
}
.onTapGesture {
selectImage()
}
}
}
}
Spacer() // This will push the above VStack to the top
TextField("Icon Name", text: $icon.name)
// .padding(.horizontal)
.onChange(of: icon.name) {
iconsGenerated = false
}
Text("Output Directory: \(formatDirectory(url: URL(string: icon.outputDirectory ?? "")))")
.foregroundColor(.gray)
.lineLimit(1)
.truncationMode(.middle)
.frame(maxWidth: .infinity, alignment: .leading)
}
.padding(25)
.alert(isPresented: $showAlert) {
Alert(title: Text(alertTitle), message: Text(alertMessage), dismissButton: .default(Text("OK")))
}
.toolbar {
ToolbarItem(placement: .principal) {
Button(action: selectOutputDirectory) {
Label("Select Output Directory", systemImage: "folder.badge.plus")
}
.help("Select Output Directory")
}
ToolbarItem(placement: .principal) {
Button(action: generateIcons) {
Label("Generate Icons", systemImage: "gearshape.fill")
}
.help("Generate Icons")
.disabled(self.icon.image == nil || icon.outputDirectory == nil)
}
ToolbarItem(placement: .principal) {
Button(action: generateICNS) {
Label("Generate ICNS", systemImage: "doc.badge.gearshape")
}
.help("Generate ICNS")
.disabled(!iconsGenerated)
}
ToolbarItem(placement: .principal) {
Button(action: clearImage) {
Label("Clear Icon", systemImage: "trash")
}
.help("Clear Icon")
}
}
}
func formatDirectory(url: URL?) -> String {
guard let url = url else {
return "Not Selected"
}
let path = url.absoluteString
if let range = path.range(of: "file://") {
return String(path[range.upperBound...])
} else {
return path
}
}
func selectImage() {
let openPanel = NSOpenPanel()
openPanel.canChooseFiles = true
openPanel.canChooseDirectories = false
openPanel.allowsMultipleSelection = false
openPanel.allowedContentTypes = [UTType.jpeg, UTType.png, UTType.gif, UTType.tiff, UTType.bmp]
openPanel.begin { (result) in
if result == .OK {
if let url = openPanel.url, let nsImage = NSImage(contentsOf: url) {
self.icon.image = nsImage.tiffRepresentation
}
}
}
}
func selectOutputDirectory() {
let openPanel = NSOpenPanel()
openPanel.canChooseFiles = false
openPanel.canChooseDirectories = true
openPanel.allowsMultipleSelection = false
openPanel.begin { (result) in
if result == .OK {
if let url = openPanel.url {
let accessGranted = url.startAccessingSecurityScopedResource()
if accessGranted {
icon.outputDirectory = url.absoluteString
do {
let bookmarkData = try url.bookmarkData(options: .withSecurityScope, includingResourceValuesForKeys: nil, relativeTo: nil)
UserDefaults.standard.set(bookmarkData, forKey: "outputDirectoryBookmark")
} catch {
print("Failed to save bookmark data for \(url): \(error)")
}
} else {
// Handle the case where access is denied.
print("Access to the directory was denied.")
}
}
}
}
}
func generateIcons() {
guard let imageData = icon.image, let image = NSImage(data: imageData), let outputDirectoryString = self.icon.outputDirectory, let outputDirectoryURL = URL(string: outputDirectoryString) else { return }
let accessGranted = outputDirectoryURL.startAccessingSecurityScopedResource()
if !accessGranted {
// Handle the case where access is denied.
self.alertMessage = "Access to the directory was denied. Select the output directory and try again."
self.alertTitle = "Error"
self.showAlert = true
return
}
let sizes = [16, 32, 128, 256, 512]
// Create a new .iconset folder
let iconsetFolder = outputDirectoryURL.appendingPathComponent("\(icon.name).iconset")
do {
try FileManager.default.createDirectory(at: iconsetFolder, withIntermediateDirectories: true, attributes: nil)
} catch {
// Display error alert
self.alertMessage = "Error creating .iconset folder: \(error)"
self.alertTitle = "Error"
self.showAlert = true
return
}
for size in sizes {
for scale in [1, 2] {
let scaledSize = NSSize(width: size*scale, height: size*scale)
let newImage = image.resizeImage(to: scaledSize)
let scaleSuffix = scale == 2 ? "@2x" : ""
let filename = "icon_\(size)x\(size)\(scaleSuffix).png"
let fileURL = iconsetFolder.appendingPathComponent(filename)
newImage.saveImage(as: .png, to: fileURL)
}
}
outputDirectoryURL.stopAccessingSecurityScopedResource()
// Display success alert
self.alertMessage = "Icons have been successfully created at \(iconsetFolder.path)!"
self.alertTitle = "Success"
self.iconsGenerated = true
self.showAlert = true
}
func generateICNS() {
guard let outputDirectoryString = self.icon.outputDirectory, let outputDirectoryURL = URL(string: outputDirectoryString) else { return }
let accessGranted = outputDirectoryURL.startAccessingSecurityScopedResource()
if !accessGranted {
// Handle the case where access is denied.
self.alertMessage = "Access to the directory was denied. Select the output directory and try again."
self.alertTitle = "Error"
self.showAlert = true
return
}
let iconsetFolder = outputDirectoryURL.appendingPathComponent("\(icon.name).iconset")
// Convert the .iconset folder to an .icns file
let process = Process()
process.executableURL = URL(fileURLWithPath: "/usr/bin/iconutil")
process.arguments = ["-c", "icns", iconsetFolder.path]
// Setup the pipes to capture standard output and error
let outPipe = Pipe()
let errorPipe = Pipe()
process.standardOutput = outPipe
process.standardError = errorPipe
process.launch()
// Wait for the process to finish
process.waitUntilExit()
// Read the process output
let outputData = outPipe.fileHandleForReading.readDataToEndOfFile()
_ = String(data: outputData, encoding: .utf8)
// Read the process error output
let errorData = errorPipe.fileHandleForReading.readDataToEndOfFile()
let errorOutput = String(data: errorData, encoding: .utf8)
// Check if the process completed successfully
if process.terminationStatus == 0 {
// Display success alert
let icnsFilePath = outputDirectoryURL.appendingPathComponent("\(icon.name).icns").path
self.alertMessage = "ICNS file has been successfully created at \(icnsFilePath)!"
self.alertTitle = "Success"
} else {
// Display error alert
self.alertMessage = "Failed to create ICNS file. Error: \(errorOutput ?? "Unknown error")"
self.alertTitle = "Error"
}
self.showAlert = true
outputDirectoryURL.stopAccessingSecurityScopedResource()
}
func clearImage() {
self.icon.image = nil
self.icon.outputDirectory = ""
}
} |
import React, { useEffect } from 'react';
import { Box, Stack, Typography } from '@mui/material';
import CustomButton from './CustomButton';
import UserContext from '../Context/GlobalContext';
import Cookies from 'js-cookie';
import Axios from '../AxiosInstance';
const Navbar = ({ handleOpen = () => {}, user, setUser = () => {} }) => {
useEffect(() => {
// TODO: Implement get user
const userToken=Cookies.get('UserToken');
if(userToken==null||userToken=='undefined') return;
// 1. check if cookie is set
// 2. send a request to server
Axios.get('/me',{headers:{Authorization:`Bearer ${userToken}`}})
Axios.get('/me',{
headers:{
Authorization:`Bearer ${userToken}`,
},
}).then((res)=>{
// 3. if success, set user information
setUser({
username:res.data.data.username,
email:res.data.data.email,
});
})
}, []);
const logout = () => {
setUser();
Cookies.remove('UserToken');
};
return (
<Stack
direction="row"
alignItems="center"
justifyContent="flex-end"
spacing={2}
sx={{
position: 'sticky',
zIndex: 10,
marginBottom: '8px',
padding: '16px',
}}
>
{user ? (
<Box sx={{ display: 'flex', alignItems: 'center', gap: '2rem' }}>
<Typography>{user.username}</Typography>
<CustomButton text="Log out" handle={logout} />
</Box>
) : (
<CustomButton text="Log in" handle={handleOpen} />
)}
</Stack>
);
};
export default Navbar; |
<?php
namespace App\Http\Controllers;
use App\Http\Requests\StoreSocialWelfareRequest;
use App\Http\Requests\UpdateSocialWelfareRequest;
use App\SocialWelfare;
use Illuminate\Http\Request;
use Session;
class SocialWelfareController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$socialWelfares = SocialWelfare::all();
return view('admin.social_welfare.index',compact('socialWelfares'));
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
//
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(StoreSocialWelfareRequest $request)
{
$data = SocialWelfare::create([
'name' => $request->name,
'status' => $request->status,
]);
Session::flash('success', $data->name .' is added');
return redirect()->route('social_welfares.index')->with($data->name. ' is successfully added');
}
/**
* Display the specified resource.
*
* @param \App\SocialWelfare $socialWelfare
* @return \Illuminate\Http\Response
*/
public function show(SocialWelfare $socialWelfare)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param \App\SocialWelfare $socialWelfare
* @return \Illuminate\Http\Response
*/
public function edit(SocialWelfare $socialWelfare)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param \App\SocialWelfare $socialWelfare
* @return \Illuminate\Http\Response
*/
public function update(UpdateSocialWelfareRequest $request, SocialWelfare $socialWelfare)
{
$socialWelfare->name = $request->name;
$socialWelfare->status = $request->status;
$socialWelfare->save();
Session::flash('success', $socialWelfare->name .' is updated');
return redirect()->route('social_welfares.index')->with($socialWelfare->name . ' is updated!');
}
/**
* Remove the specified resource from storage.
*
* @param \App\SocialWelfare $socialWelfare
* @return \Illuminate\Http\Response
*/
public function destroy(SocialWelfare $socialWelfare)
{
$socialWelfare->delete();
Session::flash('success', $socialWelfare->name .' is deleted');
return redirect()->route('social_welfares.index')->with($socialWelfare->name . ' deleted');
}
} |
import { PROD, fetchCookies } from "../util/consts";
import { ApiResponse } from "./api.types";
export const url = (path: string, params?: {[key: string]: string})=>{
// remove all slashes at end of path
path = path.trim().replace(/(\/)*$/, "");
const strParams = params ? Object.entries(params).reduce((prev, v, i)=>{
return prev+(i === 0 ? "?" : "&")+`${v[0]}=${v[1]}`;
}, "") : "";
if(PROD)
return path+strParams;
return `${window.location.protocol}//${window.location.hostname}:80/api`+path.start(1, "/")+strParams;
};
interface CustomRequestInit extends Omit<RequestInit, "body"> {
body?: any | BodyInit | null,
isReconnect?: boolean
}
export const unwrapApiResponse = <T>(val: ApiResponse<T>): T | null => {
if(val && typeof val === "object" && "error_msg" in val) {
return null;
}
return val as T;
};
export const fetchApi = async (path: string, reqOpts?: CustomRequestInit, useCookies=true): Promise<Response> => {
const JSONBody = reqOpts ? typeof reqOpts.body !== "string" : false;
const method = reqOpts?.method??"get";
apiLogger({data: reqOpts?.body, path, method, message: "Sending request", type: "info"});
try {
const resp = await fetch(url(path), {
...(reqOpts ?? {}),
...(useCookies ? fetchCookies : {}),
body: reqOpts?.body ? (JSONBody ? JSON.stringify(reqOpts.body) : reqOpts.body) : undefined,
headers: {
...(JSONBody ? {"content-type": "application/json"} : {}),
...(reqOpts ? reqOpts.headers : {})
}
});
if(!PROD) {
if(!resp.ok) {
let errorMsg: LogMessage = {message: `Status Code ${resp.status}: ${resp.statusText}`, method, path, type: "error"};
if(resp.headers.get("content-type") === "application/json") {
const jsonResp = await resp.clone().json();
if("error" in await jsonResp)
errorMsg = {message: jsonResp["error"], method, path, data: jsonResp, type: "error"};
}
apiLogger(errorMsg);
} else if(resp.headers.get("content-type") === "application/json") {
const jsonResp = await resp.clone().json();
apiLogger({data: jsonResp, method, path, type: "success"});
}
}
return resp;
} catch(e) {
if(e instanceof Error) {
apiLogger({message: e.message, method, type: "error", path});
} else {
console.debug(e);
}
throw e;
}
};
interface LogMessage {
data?: object
path: string,
method: string,
message?: string
type: "success" | "error" | "info"
}
function apiLogger({
data,
path,
message,
method,
type,
}: LogMessage) {
if (PROD)
return;
if(!message) {
if(type === "success")
message = "Successful request";
}
console.debug(`%c--Request Debugger-- %c[${method.toUpperCase()} ${type.toUpperCase()}]${message ? " " : ""}${message} %c${path}`, "color: #818181", getTitleColor(type), "color: white");
if(data)
console.debug(data);
}
function getTitleColor(logType?: "success" | "error" | "info") {
if (logType === "error") {
return "color: #d93e3e;";
}
if (logType === "info") {
return "color: white;";
}
return "color: #548a54;";
}
export const fetchApiJson = async <T=any>(path: string, reqOpts?: CustomRequestInit, useCookies?: boolean): Promise<ApiResponse<T>> => {
const resp = await fetchApi(path, reqOpts, useCookies);
const status = resp.status;
return {...(await resp.json() as ApiResponse<T>), status};
}; |
/*
* Copyright (c) 2021 Nicholas Valenti. All rights reserved.
* This project is supplied as-is with no warranty or guarantees.
*
* Licensed under GPLv3.
*/
#ifndef CONTEXTMENU_H
#define CONTEXTMENU_H
#include <QMenu>
#include <QMap>
#include <QString>
#include <QActionGroup>
#include "card.h"
#include "carddisplaylabel.h"
class ContextMenu : public QMenu
{
Q_OBJECT
public:
explicit ContextMenu(QWidget* parent = 0);
void addSuitSubmenu(eSuit Suit, QString Name);
void insertItemBySuit(eSuit Suit, QString Name, int Index);
protected:
QActionGroup* cardActions = new QActionGroup(this);
QAction* screenshotAction = new QAction(this);
QMap<eSuit, QMenu*> subMenus;
CardDisplayLabel* contextSource;
signals:
void screenshot(QWidget* Target);
void requestCardAtIndex(int Index, Card *&OutCard);
void replaceCard(CardDisplayLabel* Source, Card* OutCard);
public slots:
virtual void showMenu(CardDisplayLabel* Source);
virtual void actionTriggered(QAction* Action);
};
class ContextMenuHeavy : public ContextMenu
{
Q_OBJECT
public:
explicit ContextMenuHeavy(QWidget* parent = 0);
void addAdditionalActions();
private:
// TODO consider making this a modular construction...not now though
QAction* misalignAction = new QAction(this);
QAction* flipAction = new QAction(this);
signals:
void toggleMisaligned(CardDisplayLabel* Source);
void toggleFacedown(CardDisplayLabel* Source);
public slots:
void showMenu(CardDisplayLabel* Source) override;
void actionTriggered(QAction* Action) override;
void setCardEnabled(int Index, eSuit Suit, bool BEnabled = true);
void enableAllCards();
};
#endif // CONTEXTMENU_H |
package wres.testing;
import java.io.IOException;
import java.net.URI;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import javax.net.ssl.SSLContext;
import javax.net.ssl.X509TrustManager;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.jupiter.api.Test;
import okhttp3.OkHttpClient;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import wres.reading.PreReadException;
import wres.reading.ReaderUtilities;
import wres.reading.wrds.nwm.NwmRootDocument;
import wres.http.WebClient;
import wres.http.WebClientUtils;
public class WrdsNwmTest
{
private static final String WRDS_HOSTNAME;
static
{
String wrdsHostname = System.getenv( "WRDS_HOSTNAME" );
if ( Objects.nonNull( wrdsHostname ) && !wrdsHostname.isBlank() )
{
WRDS_HOSTNAME = wrdsHostname;
}
else
{
throw new ExceptionInInitializerError( "The environment variable WRDS_HOSTNAME must be set." );
}
}
private static final ObjectMapper JSON_OBJECT_MAPPER =
new ObjectMapper().registerModule( new JavaTimeModule() )
.configure( DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, true );
/** Custom HttpClient to use */
private static final OkHttpClient OK_HTTP_CLIENT;
static
{
try
{
Pair<SSLContext, X509TrustManager> sslContext = ReaderUtilities.getSslContextTrustingDodSignerForWrds();
OK_HTTP_CLIENT = WebClientUtils.defaultTimeoutHttpClient()
.newBuilder()
.sslSocketFactory( sslContext.getKey().getSocketFactory(),
sslContext.getRight() )
.build();
}
catch ( PreReadException e )
{
throw new ExceptionInInitializerError( "Failed to acquire the TLS context for connecting to WRDS: "
+ e.getMessage() );
}
}
private static final WebClient WEB_CLIENT = new WebClient( true, OK_HTTP_CLIENT );
private static final URI WRDS_NWM_URI_ONE =
URI.create( "https://" + WRDS_HOSTNAME
+ "/api/nwm3.0/v3.0/ops/medium_range/streamflow/nwm_feature_id/18384141/?forecast_type=ensemble" );
private static final URI WRDS_NWM_URI_TWO =
URI.create( "https://" + WRDS_HOSTNAME
+ "/api/nwm3.0/v3.0/ops/medium_range/streamflow/nwm_feature_id/5907079/?forecast_type=ensemble" );
@Test
void canGetMinimalResponseFromWrdsNwmWithWebClient() throws IOException
{
List<Integer> retryOnThese = Collections.emptyList();
try ( WebClient.ClientResponse response = WEB_CLIENT.getFromWeb( WRDS_NWM_URI_ONE,
retryOnThese ) )
{
assertAll( () -> assertTrue( response.getStatusCode() >= 200
&& response.getStatusCode() < 300,
"Expected HTTP 2XX response." ),
() -> assertNotNull( response.getResponse(),
"Expected an InputStream" )
);
}
}
@Test
void canGetAndParseResponseFromWrdsNwmWithWebClient() throws IOException
{
List<Integer> retryOnThese = Collections.emptyList();
NwmRootDocument document;
try ( WebClient.ClientResponse response = WEB_CLIENT.getFromWeb( WRDS_NWM_URI_TWO,
retryOnThese ) )
{
// Parse the stream in the way WrdsNwmReader parses a document:
document = JSON_OBJECT_MAPPER.readValue( response.getResponse(),
NwmRootDocument.class );
assertAll( () -> assertTrue( response.getStatusCode() >= 200
&& response.getStatusCode() < 300,
"Expected HTTP 2XX response." ),
() -> assertFalse( document.getForecasts()
.isEmpty(),
"Expected more than zero forecasts" ),
() -> assertFalse( document.getVariable()
.isEmpty(),
"Expected more than zero variables" )
);
}
}
} |
import mongoose from "mongoose";
import bcrypt from "bcryptjs";
import jwt from "jsonwebtoken";
const userSchema = mongoose.Schema(
{
firstName: {
type: String,
requre: true,
},
lastName: {
type: String,
requre: true,
},
phone: {
type: Number,
require: true,
unique: true,
min: 100000000,
max: 999999999,
},
email: {
type: String,
requre: true,
unique: true,
},
password: {
type: String,
requre: true,
},
isAdmin: {
type: Boolean,
requre: true,
default: false,
},
},
{
timestamps: true,
}
);
// generate authentication token
userSchema.methods.generateAuthToken = async function () {
const user = this;
const token = jwt.sign({ _id: user._id.toString() }, process.env.JWT_SECRET);
return token;
};
//hash password before saving
userSchema.pre("save", async function (next) {
const user = this;
if (user.isModified("password")) {
user.password = await bcrypt.hash(user.password, 10);
}
next();
});
const User = mongoose.model("User", userSchema);
export default User; |
import MD5 from 'js-md5';
export type FetchImageResult = {
width: number;
height: number;
file: File;
uuid: string;
url: string;
};
export type BaseImageSize = {
width: number;
height: number;
};
export const getImageSize = (imageInnerSize: BaseImageSize): BaseImageSize => {
const windowSize: BaseImageSize = {
width: window.innerWidth,
height: window.innerHeight,
};
const widthHeightProportion: number = imageInnerSize.width / imageInnerSize.height;
const maxSize = 960;
if (
(imageInnerSize.width > maxSize && windowSize.width > maxSize) ||
(imageInnerSize.height > maxSize && windowSize.height > maxSize)
) {
if (widthHeightProportion > 1) {
return {
width: maxSize,
height: maxSize / widthHeightProportion,
};
} else {
return {
width: maxSize * widthHeightProportion,
height: maxSize,
};
}
} else {
if (imageInnerSize.width > windowSize.width || imageInnerSize.height > windowSize.height) {
if (widthHeightProportion > 1) {
return {
width: windowSize.width,
height: windowSize.width / widthHeightProportion,
};
} else {
return {
width: windowSize.height * widthHeightProportion,
height: windowSize.height,
};
}
} else {
return {
width: imageInnerSize.width,
height: imageInnerSize.height,
};
}
}
};
export const fetchNetlessImageByUrl = async (url: string): Promise<FetchImageResult> => {
try {
const res = await fetch(url);
const blob = await res.blob();
const contentType = blob.type;
const image = new Image();
const reader = new FileReader();
const file = new File([blob], url, { type: contentType });
const result = await new Promise((resolve) => {
reader.readAsDataURL(blob);
reader.onload = () => {
image.addEventListener(
'load',
() => {
const uuid = MD5(reader.result!);
const res = getImageSize(image);
const result = {
width: res.width,
height: res.height,
file: file,
url,
uuid,
};
resolve(result);
},
false,
);
image.src = reader.result as string;
};
});
return result as FetchImageResult;
} catch (err) {
throw err;
}
};
export const rgbToHexColor = (r: number, g: number, b: number): string => {
const computeToHex = (c: number): string => {
const hex = c.toString(16);
return hex.length == 1 ? `0${hex}` : hex;
};
return `#${computeToHex(r)}${computeToHex(g)}${computeToHex(b)}`;
};
/**
* Mimetypes
*
* @see http://hul.harvard.edu/ois/////systems/wax/wax-public-help/mimetypes.htm
* @typedef Mimetypes~Kind
* @enum
*/
export const MimeTypesKind: Record<string, string> = {
opus: 'video/ogg',
ogv: 'video/ogg',
mp4: 'video/mp4',
mov: 'video/mp4',
m4v: 'video/mp4',
mkv: 'video/x-matroska',
m4a: 'audio/mp4',
mp3: 'audio/mpeg',
aac: 'audio/aac',
caf: 'audio/x-caf',
flac: 'audio/flac',
oga: 'audio/ogg',
wav: 'audio/wav',
m3u8: 'application/x-mpegURL',
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
gif: 'image/gif',
png: 'image/png',
svg: 'image/svg+xml',
webp: 'image/webp',
}; |
// This file is part of www.nand2tetris.org
// and the book "The Elements of Computing Systems"
// by Nisan and Schocken, MIT Press.
// File name: projects/03/b/RAM16K.hdl
/**
* Memory of 16K registers, each 16 bit-wide. Out holds the value
* stored at the memory location specified by address. If load==1, then
* the in value is loaded into the memory location specified by address
* (the loaded value will be emitted to out from the next time step onward).
*/
CHIP RAM16K {
IN in[16], load, address[14];
OUT out[16];
PARTS:
DMux4Way(in=load, sel=address[12..13], a=ram0, b=ram1, c=ram2, d=ram3);
RAM4K(in=in, load=ram0, out=ram0o, address=address[0..11]);
RAM4K(in=in, load=ram1, out=ram1o, address=address[0..11]);
RAM4K(in=in, load=ram2, out=ram2o, address=address[0..11]);
RAM4K(in=in, load=ram3, out=ram3o, address=address[0..11]);
Mux4Way16(a=ram0o, b=ram1o, c=ram2o, d=ram3o, sel=address[12..13], out=out);
} |
const express = require("express");
const bodyParser = require("body-parser");
const Blockchain = require("../blockchain");
// const HTTP_PORT = process.env.HTTP_PORT || 3000;
const HTTP_PORT = process.env.HTTP_PORT || 3000;
const P2pServer = require("./p2pServer");
const app = express();
const bc = new Blockchain();
const p2pServer = new P2pServer(bc);
app.use(bodyParser.json());
app.set("view engine", "pug");
app.get("/grafica", (req, res) => {
res.sendFile("page.html", { root: __dirname });
});
app.get("/blocks", (req, res) => {
res.json(bc.chain);
});
app.post("/mine", (req, res) => {
const block = bc.addBlock(req.body.data);
console.log(`new block added: ${block.toString()}`);
p2pServer.syncChains();
res.redirect("/blocks");
});
app.listen(HTTP_PORT, () => {
console.log("http server listening on port " + HTTP_PORT);
});
p2pServer.listen(); |
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import "./globals.css";
import {ThemeProvider} from "../components/providers/theme-provider";
import { Toaster } from "sonner"
import ConvexClientProvider from "../components/providers/ConvexClientProvider.tsx";
import {ModalProvider} from "../components/providers/modal-provider";
import { EdgeStoreProvider } from "@/lib/edgestore";
const inter = Inter({ subsets: ["latin"] });
export const metadata: Metadata = {
title: "Notion",
description: "Generated by create next app",
};
export default function RootLayout({
children,
}: Readonly<{
children: React.ReactNode;
}>) {
return (
<html lang="en">
<body className={inter.className}>
<ConvexClientProvider>
<EdgeStoreProvider>
<ThemeProvider
attribute="class"
defaultTheme="system"
enableSystem
disableTransitionOnChange
storageKey="jotion-theme-2"
>
<Toaster position="bottom-center" />
<ModalProvider />
{children}
</ThemeProvider>
</EdgeStoreProvider>
</ConvexClientProvider>
</body>
</html>
);
} |
package dev.gnomebot.app.server.handler;
import dev.gnomebot.app.App;
import dev.gnomebot.app.Assets;
import dev.gnomebot.app.server.HTTPResponseCode;
import dev.gnomebot.app.server.ServerRequest;
import dev.gnomebot.app.util.SnowFlake;
import dev.gnomebot.app.util.URLRequest;
import dev.latvian.apps.webutils.ImageUtils;
import dev.latvian.apps.webutils.json.JSONArray;
import dev.latvian.apps.webutils.json.JSONObject;
import dev.latvian.apps.webutils.json.JSONResponse;
import dev.latvian.apps.webutils.net.FileResponse;
import dev.latvian.apps.webutils.net.MimeType;
import dev.latvian.apps.webutils.net.Response;
import discord4j.core.util.ImageUtil;
import discord4j.rest.util.Image;
import io.javalin.http.HttpStatus;
import io.javalin.http.NotFoundResponse;
import javax.imageio.ImageIO;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.nio.file.Files;
import java.util.HashSet;
import static discord4j.rest.util.Image.Format.GIF;
import static discord4j.rest.util.Image.Format.PNG;
public class InfoHandlers {
public static final int[] VALID_SIZES = {16, 32, 64, 128, 256, 512, 1024, 2048, 4096};
public static Response ping(ServerRequest request) {
return JSONResponse.SUCCESS;
}
public static Response user(ServerRequest request) {
var size = request.query("size").asString("128");
var id = request.getSnowflake("user");
// App.info("Getting info for " + id.asString());
var json = JSONObject.of();
json.put("id", SnowFlake.str(id));
var userData = request.app.discordHandler.getUserData(id);
if (userData != null) {
json.put("name", userData.username());
json.put("discriminator", userData.discriminator());
json.put("avatar_url", App.url("api/info/avatar/" + id + "/" + size));
json.put("bot", userData.bot().toOptional().orElse(false));
} else {
json.put("name", "Deleted User");
json.put("discriminator", "0000");
json.put("avatar_url", App.url("api/info/avatar/" + id + "/" + size));
json.put("bot", false);
}
return JSONResponse.of(json);
}
public static Response avatar(ServerRequest request) throws Exception {
var size = Integer.parseInt(request.variable("size"));
if (size <= 0 || size > 4096) {
throw HTTPResponseCode.BAD_REQUEST.error("size_too_large");
}
var id = request.getSnowflake("user");
String url;
var sizeToRetrieve = 4096;
for (var validSize : VALID_SIZES) {
if (validSize >= size && validSize < sizeToRetrieve) {
sizeToRetrieve = validSize;
}
}
var userData = request.app.discordHandler.getUserData(id);
var avatar = userData == null ? null : userData.avatar().orElse(null);
if (avatar != null && avatar.startsWith("a_")) {
url = ImageUtil.getUrl("avatars/" + id + "/" + avatar, GIF) + "?size=" + sizeToRetrieve;
} else if (avatar != null) {
url = ImageUtil.getUrl("avatars/" + id + "/" + avatar, PNG) + "?size=" + sizeToRetrieve;
} else {
url = ImageUtil.getUrl("embed/avatars/" + (userData == null ? 0 : (Integer.parseInt(userData.discriminator()) % 5)), PNG) + "?size=" + sizeToRetrieve;
}
BufferedImage img;
try {
img = ImageUtils.resize(URLRequest.of(url).toImage().block(), size, size);
} catch (Exception ex) {
img = new BufferedImage(size, size, BufferedImage.TYPE_INT_ARGB);
for (var x = 0; x < size; x++) {
for (var y = 0; y < size; y++) {
img.setRGB(x, y, 0xFF000000);
}
}
}
return FileResponse.png(img);
}
public static Response emoji(ServerRequest request) throws Exception {
var size = Integer.parseInt(request.variable("size"));
if (size <= 0 || size > 4096) {
throw HTTPResponseCode.BAD_REQUEST.error("size_too_large");
}
var id = request.getSnowflake("emoji");
var url = ImageUtil.getUrl("emojis/" + id, Image.Format.PNG);
var sizeToRetrieve = 4096;
for (var validSize : VALID_SIZES) {
if (validSize >= size && validSize < sizeToRetrieve) {
sizeToRetrieve = validSize;
}
}
BufferedImage img;
try {
img = ImageUtils.resize(URLRequest.of(url).toImage().block(), size, size);
} catch (Exception ex) {
img = new BufferedImage(size, size, BufferedImage.TYPE_INT_ARGB);
for (var x = 0; x < size; x++) {
for (var y = 0; y < size; y++) {
img.setRGB(x, y, 0xFF000000);
}
}
}
return FileResponse.png(img);
}
public static Response define(ServerRequest request) throws Exception {
var word = request.variable("word");
var json = JSONObject.of();
json.put("found", false);
json.put("word", word);
try {
var data0 = URLRequest.of("https://api.dictionaryapi.dev/api/v2/entries/en/" + word).toJsonArray().blockEither();
if (data0.isRight() && data0.right().getMessage().startsWith("Error 404")) {
return JSONResponse.of(json);
}
var firstWord = data0.left().asObject(0).asString("word");
json.put("word", firstWord);
var phonetics = json.addArray("phonetics");
var meanings = json.addArray("meanings");
var phoneticsSet = new HashSet<String>();
for (var data1 : data0.left()) {
var data = (JSONObject) data1;
if (data.asString("word").equals(firstWord)) {
for (var o0 : data.asArray("phonetics").ofObjects()) {
var text = o0.asString("text").trim();
if (!phoneticsSet.contains(text)) {
phoneticsSet.add(text);
var o = phonetics.addObject();
o.put("text", text);
o.put("audio_url", o0.containsKey("audio") ? ("https:" + o0.asString("audio").trim()) : "");
}
}
for (var o0 : data.asArray("meanings").ofObjects()) {
var type = o0.asString("partOfSpeech").trim();
for (var o1 : o0.asArray("definitions").ofObjects()) {
var o = meanings.addObject();
o.put("type", type);
o.put("definition", o1.asString("definition").trim());
o.put("example", o1.asString("example").trim());
o.put("synonyms", o1.containsKey("synonyms") ? o1.get("synonyms") : JSONArray.of());
o.put("antonyms", o1.containsKey("antonyms") ? o1.get("antonyms") : JSONArray.of());
}
}
}
}
json.put("found", true);
} catch (Exception ex) {
ex.printStackTrace();
}
return JSONResponse.of(json);
}
public static Response videoThumbnail(ServerRequest request) throws Exception {
var message = request.app.discordHandler.client.getMessageById(SnowFlake.convert(request.getSnowflake("channel")), SnowFlake.convert(request.getSnowflake("message"))).block();
var attachmentId = request.getSnowflake("attachment");
for (var a : message.getAttachments()) {
if (a.getId().asLong() == attachmentId) {
var img = URLRequest.of(a.getProxyUrl() + "?format=jpeg").toImage().block();
var img1 = ImageIO.read(new ByteArrayInputStream(Files.readAllBytes(Assets.VIDEO.getFilePath())));
var g = img.createGraphics();
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
var tsize = Math.min(img.getWidth(), img.getHeight()) / 4;
// draw img1 at center of img, scaled 0.25x the height of img
g.drawImage(img1, (img.getWidth() - tsize) / 2, (img.getHeight() - tsize) / 2, tsize, tsize, null);
g.dispose();
var out = new ByteArrayOutputStream();
ImageIO.write(img, "jpeg", out);
return FileResponse.of(HttpStatus.OK, MimeType.JPEG, out.toByteArray());
}
}
throw new NotFoundResponse("Attachment not found");
}
} |
import 'package:flutter/material.dart';
import 'package:supabase_flutter/supabase_flutter.dart';
import 'package:go_router/go_router.dart';
import 'package:sae_mobile/utils/supabaseService.dart';
class CreateAccountPage extends StatefulWidget {
const CreateAccountPage({Key? key}) : super(key: key);
@override
_CreateAccountPageState createState() => _CreateAccountPageState();
}
class _CreateAccountPageState extends State<CreateAccountPage> {
final TextEditingController _emailController = TextEditingController();
final TextEditingController _passwordController = TextEditingController();
final GlobalKey<FormState> _formKey = GlobalKey<FormState>();
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('Création de compte'),
),
body: Padding(
padding: const EdgeInsets.all(16.0),
child: Form(
key: _formKey,
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
TextFormField(
controller: _emailController,
keyboardType: TextInputType.emailAddress,
decoration: InputDecoration(labelText: 'Email'),
validator: (value) {
if (value == null || value.isEmpty) {
return 'Please enter your email';
}
return null;
},
),
TextFormField(
controller: _passwordController,
obscureText: true,
decoration: InputDecoration(labelText: 'Password'),
validator: (value) {
if (value == null || value.isEmpty) {
return 'Please enter your password';
}
return null;
},
),
SizedBox(height: 20),
ElevatedButton(
onPressed: () async {
if (_formKey.currentState!.validate()) {
final email = _emailController.text.trim();
final password = _passwordController.text.trim();
try {
final supabaseService = SupabaseService();
final AuthResponse response = await supabaseService.client.auth.signUp(email: email, password: password);
final Session? session = response.session;
final User? user = response.user;
ScaffoldMessenger.of(context).showSnackBar(const SnackBar(
content: Text('Un mail de confirmation vous a été envoyé.'),
));
// Navigate to the home page after successful login
context.go('/');
} catch (error) {
ScaffoldMessenger.of(context).showSnackBar(SnackBar(
content: Text('$error'),
));
}
}
},
child: Text('Créer mon compte'),
),
SizedBox(height: 20),
ElevatedButton(
onPressed: () {
// Naviguer vers la page de login
context.go('/login');
},
child: Text("J'ai déjà un compte"),
),
],
),
),
),
);
}
} |
// Copyright 2023 Intrinsic Innovation LLC
// Intrinsic Proprietary and Confidential
// Provided subject to written agreement between the parties.
#ifndef INTRINSIC_PLATFORM_COMMON_BUFFERS_RT_QUEUE_BUFFER_H_
#define INTRINSIC_PLATFORM_COMMON_BUFFERS_RT_QUEUE_BUFFER_H_
#include <atomic>
#include <cstddef>
#include <memory>
#include "absl/base/attributes.h"
#include "absl/functional/function_ref.h"
#include "absl/log/check.h"
#include "absl/memory/memory.h"
#include "absl/types/optional.h"
// IWYU pragma: no_forward_declare absl::FunctionRef
namespace intrinsic {
namespace internal {
// A buffer for performing spsc-queue style automatic operations.
template <typename T>
class RtQueueBuffer {
public:
explicit RtQueueBuffer(size_t capacity);
RtQueueBuffer(size_t capacity, absl::FunctionRef<void(T*)> init_function);
// Gets a pointer to the front element, or nullptr if empty. After a call to
// Front(), must call DropFront() or KeepFront() prior to subsequent calls to
// Front().
ABSL_MUST_USE_RESULT T* Front();
// Removes the front element; no-op if the queue is empty.
void DropFront();
// Keeps the front element.
void KeepFront();
// Gets a pointer to the next available element, or nullptr if the queue is
// full. The element should be set and then FinishInsert must be called.
ABSL_MUST_USE_RESULT T* PrepareInsert();
// Make the element referenced by the return value of PrepareInsert
// available to the reader.
void FinishInsert();
// Returns true when the buffer is empty. Thread-safe.
bool Empty() const { return size_.load(std::memory_order_acquire) == 0; }
// Returns true when the buffer is full. Thread-safe.
bool Full() const {
return size_.load(std::memory_order_acquire) == capacity_;
}
// Returns the capacity of the buffer.
size_t Capacity() const { return capacity_; }
void InitElements(absl::FunctionRef<void(T*)> init_function);
private:
// Increases the number of messages stored in the buffer by 1.
void IncreaseSize() { size_.fetch_add(1, std::memory_order_seq_cst); }
// Decreases the number of messages stored in the buffer by 1.
void DecreaseSize() { size_.fetch_sub(1, std::memory_order_seq_cst); }
bool insert_in_progress_ = false;
size_t head_ = 0;
bool front_accessed_ = false;
size_t tail_ = 0;
// Memory used as a ring buffer.
std::atomic_size_t size_ = 0; // number of messages stored in the buffer
const size_t capacity_; // the length of the buffer
std::unique_ptr<T[]> buffer_;
};
// Implementation of RealtimeQueue functions.
template <typename T>
RtQueueBuffer<T>::RtQueueBuffer(size_t capacity)
: capacity_(capacity), buffer_(std::make_unique<T[]>(capacity_)) {}
template <typename T>
RtQueueBuffer<T>::RtQueueBuffer(size_t capacity,
absl::FunctionRef<void(T*)> init_function)
: capacity_(capacity), buffer_(std::make_unique<T[]>(capacity_)) {
InitElements(init_function);
}
template <typename T>
void RtQueueBuffer<T>::InitElements(absl::FunctionRef<void(T*)> init_function) {
for (size_t i = 0; i < Capacity(); ++i) {
init_function(&buffer_[i]);
}
}
// Implementation of RealtimeQueue::Reader functions.
template <typename T>
T* RtQueueBuffer<T>::Front() {
CHECK(!front_accessed_)
<< "KeepFront or DropFront must be called before another "
"call to Front is allowed.";
if (Empty()) {
return nullptr;
}
front_accessed_ = true;
return &buffer_[tail_];
}
template <typename T>
void RtQueueBuffer<T>::KeepFront() {
CHECK(front_accessed_) << "Front must be called before KeepFront.";
front_accessed_ = false;
}
template <typename T>
void RtQueueBuffer<T>::DropFront() {
CHECK(front_accessed_) << "Front must be called before DropFront.";
front_accessed_ = false;
DecreaseSize();
tail_ = (tail_ + 1) % Capacity();
}
template <typename T>
T* RtQueueBuffer<T>::PrepareInsert() {
CHECK(!insert_in_progress_)
<< "FinishInsert must be called before another call to "
"PrepareInsert is allowed.";
if (Full()) {
return nullptr;
}
insert_in_progress_ = true;
return &buffer_[head_];
}
template <typename T>
void RtQueueBuffer<T>::FinishInsert() {
CHECK(insert_in_progress_)
<< "PrepareInsert must be called before FinishInsert.";
insert_in_progress_ = false;
head_ = (head_ + 1) % Capacity();
IncreaseSize();
}
} // namespace internal
} // namespace intrinsic
#endif // INTRINSIC_PLATFORM_COMMON_BUFFERS_RT_QUEUE_BUFFER_H_ |
#include <algorithm>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <fcntl.h>
#include <tuple>
#include <unistd.h>
#include <vector>
#include "../syscall.h"
std::tuple<int, char*, size_t> MapFile(const char* filepath){
SyscallResult res = SyscallOpenFile(filepath, O_RDONLY);
if (res.error) {
fprintf(stderr, "%s: %s\n", strerror(res.error), filepath);
exit(1);
}
const int fd = res.value;
size_t filesize;
res = SyscallMapFile(fd, &filesize, 0);
if(res.error){
fprintf(stderr, "%s\n", strerror(res.error));
exit(1);
}
return {fd, reinterpret_cast<char*>(res.value), filesize};
}
uint64_t OpenTextWindow(int w, int h, const char* title){
SyscallResult res = SyscallOpenWindow(8 + 8 * w, 28 + 16 * h, 10, 10, title);
if(res.error){
fprintf(stderr, "%s\n", strerror(res.error));
exit(1);
}
const uint64_t layer_id = res.value;
auto fill_rect = [layer_id](int x, int y, int w, int h, uint32_t c) {
SyscallWinFillRectangle(layer_id, x, y, w, h, c);
};
fill_rect(3, 23, 1 + 8*w, 1, 0x666666);
fill_rect(3, 24, 1, 1 + 16*h, 0x666666);
fill_rect(4, 25 + 16*h, 1 + 8*w, 1, 0xcccccc);
fill_rect(5 + 8*w, 24, 1, 1 + 16*h, 0xcccccc);
return layer_id;
}
using LinesType = std::vector<std::pair<const char*, size_t>>;
LinesType FindLines(const char* p, size_t len) {
LinesType lines;
const char* end = p + len;
auto next_lf = [end](const char* s) {
while(s < end && *s != '\n'){
++s;
}
return s;
};
const char* lf = next_lf(p);
while(lf < end){
lines.push_back({p, lf - p});
p = lf + 1;
lf = next_lf(p);
}
if(p < end){
lines.push_back({p, end - p});
}
return lines;
}
int CountUTFSize(uint8_t c){
if(c < 0x80){
return 1;
} else if (0xc0 <= c && c < 0xe0){
return 2;
} else if (0xe0 <= c && c < 0xf0){
return 3;
} else if (0xf0 <= c && c < 0xf8){
return 4;
}
return 0;
}
void copyUTF8String(char* dst, size_t dst_size,
const char* src, size_t src_size,
int w, int tab){
int x = 0;
const auto src_end = src + src_size;
const auto dst_end = dst + dst_size;
while(*src){
if(*src == '\t'){
int spaces = tab - (x % tab);
if (dst + spaces >= dst_end){
break;
}
memset(dst, ' ', spaces);
++src;
dst += spaces;
x += spaces;
continue;
}
if(static_cast<uint8_t>(*src) < 0x80){
x += 1;
} else {
x += 2;
}
if (x >= w) {
break;
}
int c = CountUTFSize(*src);
if (src + c > src_end || dst + c >= dst_end){
break;
}
memcpy(dst, src, c);
src += c;
dst += c;
}
*dst = '\0';
}
void DrawLines(const LinesType& lines, int start_line,
uint64_t layer_id, int w, int h, int tab){
char buf[1024];
SyscallWinFillRectangle(layer_id, 4, 24, 8*w, 16*h, 0xffffff);
for (int i = 0; i < h; ++i){
int line_index = start_line + i;
if(line_index < 0 || lines.size() <= line_index){
continue;
}
const auto [line, line_len] = lines[line_index];
copyUTF8String(buf, sizeof(buf), line, line_len, w, tab);
SyscallWinWriteString(layer_id, 4, 24 + 16 * i, 0x000000, buf);
}
}
std::tuple<bool, int> WaitEvent(int h){
AppEvent events[1];
while (true) {
auto [n, err] = SyscallReadEvent(events, 1);
if(err){
fprintf(stderr, "ReadEvent failed: %s\n", strerror(err));
return {false, 0};
}
if(events[0].type == AppEvent::kQuit){
return {true, 0};
} else if(events[0].type == AppEvent::kKeyPush &&
events[0].arg.keypush.press){
return {false, events[0].arg.keypush.keycode};
}
}
}
bool UpdateStartLine(int* start_line, int height, size_t num_lines){
while(true){
const auto [quit, keycode] = WaitEvent(height);
if(quit){
return quit;
}
if(num_lines < height){
continue;
}
int diff;
switch(keycode){
case 75: diff = -height / 2; break;
case 78: diff = height / 2; break;
case 81: diff = 1; break;
case 82: diff = -1; break;
default:
continue;
}
if((diff < 0 && *start_line == 0) ||
(diff > 0 && *start_line == num_lines - height)){
continue;
}
*start_line += diff;
if(*start_line < 0){
*start_line = 0;
} else if(*start_line > num_lines - height) {
*start_line = num_lines - height;
}
return false;
}
}
extern "C" void main(int argc, char** argv){
auto print_help = [argv](){
fprintf(stderr, "Usage: %s [-w WIDTH] [-h HEIGHT] [-t TAB] <file>\n",
argv[0]);
};
int opt;
int width = 80, height = 20, tab = 8;
while((opt = getopt(argc, argv, "w:h:t:") != -1)){
switch(opt){
case 'w': width = atoi(optarg); break;
case 'h': height = atoi(optarg); break;
case 't': tab = atoi(optarg); break;
default:
print_help();
exit(1);
}
}
if(optind >= argc){
print_help();
exit(1);
}
const char* filepath = argv[optind];
const auto [fd, content, filesize] = MapFile(filepath);
const char* last_slash = strrchr(filepath, '/');
const char* filename = last_slash ? &last_slash[1] : filepath;
const auto layer_id = OpenTextWindow(width, height, filename);
const auto lines = FindLines(content, filesize);
int start_line = 0;
while(true){
DrawLines(lines, start_line, layer_id, width, height, tab);
if(UpdateStartLine(&start_line, height, lines.size())){
break;
}
}
SyscallCloseWindow(layer_id);
exit(0);
} |
import { useState, useEffect } from 'react';
import { useParams } from 'react-router-dom';
import ItemList from './ItemList';
import Spinner from '../spinner/Spinner';
import Error from '../404';
import { getProducts, getProductsByCategory } from '../../firebase';
const ItemListContainer = () => {
const [products, setProducts] = useState([]);
const [loading, setLoading] = useState(true);
const params = useParams();
useEffect(() => {
setLoading(true);
let products;
params.categoryId
? (products = getProductsByCategory(params.categoryId))
: (products = getProducts());
products
.then((data) => setProducts(data))
.catch((error) => console.log(error))
.finally(() => {
setLoading(false);
});
}, [params]);
if (!products.length && !loading) {
return <Error />;
}
return (
<div id="items">
<h1 className="text-center text-4xl py-5 font-bold bg-gray-50">
{params.categoryId ? params.categoryId : 'Todos los productos'}
</h1>
<main className="px-3">
{loading ? <Spinner /> : <ItemList products={products} />}
</main>
</div>
);
};
export default ItemListContainer; |
import useNav from "@/hooks/useNav"
import { forwardRef } from "react"
import Container from "./Container"
import H2 from "./H2"
import H3 from "./H3"
import Supertitle from "./Supertitle"
const Ownership = forwardRef<HTMLDivElement, {}>(props => {
const ref = useNav('ownership')
return (
<div ref={ref} id='ownership-section'>
<Container className="py-24 space-y-8">
<Supertitle className="text-black max-w-[20em]">
BENEFITS OF REAL ESTATE OWNERSHIP
</Supertitle>
<div className="flex flex-wrap lg:flex-nowrap space-y-12 lg:space-y-0 lg:space-x-12">
<div className="w-full lg:w-1/3 space-y-8">
<p className='text-black'>
Whether purchasing real estate for the certainty of your business operation, or for the opportunity to invest, Purcell Business Centre offers the opportunity to own a newly upgraded strata unit, renovated by a trusted developer in a professionally managed building. As one of the highest quality industrial buildings in the Kootenays, combined with an extensive list of newly installed building and mechanical systems, the risk of significant capital repairs has been substantially minimized over the first 10 years, providing owners with carefree ownership and management.
</p>
</div>
<div className="w-1/2 lg:w-1/3 space-y-8">
<div className="space-y-4">
<H3>Build equity</H3>
<p>Build equity through monthly mortgage payments, investing in leasehold improvements, and long-term capital appreciation.</p>
</div>
<div className="space-y-4">
<H3>Lending terms</H3>
<p>As a business purchasing for your own use, take advantage of potentially favourable lending terms, such as higher Loan-to-Value, financing for leasehold improvements, and other unique benefits.</p>
</div>
</div>
<div className="w-1/2 lg:w-1/3 space-y-8">
<div className="space-y-4">
<H3>Control</H3>
<p>Secure your long-term business location by eliminating lease renewal risk, and take full charge of the property’s operational costs.</p>
</div>
<div className="space-y-4">
<H3>Tax benefits</H3>
<p>Multiple potential tax write-offs, including rental payments as a business expense, and carrying costs from holding company income. Speak to your tax consultant for unique opportunities available.</p>
</div>
</div>
</div>
</Container>
<div
className='relative before:absolute before:[left:0] before:w-full before:h-full before:[background:rgba(0,0,0,.5)] before:[content:""]'
style={{
backgroundImage: `url(/img/purcell-business-center-wide.png)`,
backgroundRepeat: 'no-repeat',
backgroundPosition: 'center',
backgroundSize: 'cover',
}}
>
<Container className="[z-index:2] relative text-center py-24 space-y-16">
<H2 className="text-white max-w-[28em] mx-auto">Owning your company’s real estate provides several attractive exit strategies for business owners.</H2>
<div className="flex">
<div className="w-1/3">
<H2>1</H2>
<p className="text-white max-w-[16em] mx-auto">Selling your business and retaining the real estate as an income producing asset.</p>
</div>
<div className="w-1/3">
<H2>2</H2>
<p className="text-white max-w-[16em] mx-auto">Monetizing your built up equity by selling your real estate (and potentially leasing it back).</p>
</div>
<div className="w-1/3">
<H2>3</H2>
<p className="text-white max-w-[16em] mx-auto">Selling your real estate and the business together.</p>
</div>
</div>
</Container>
</div>
<div className="bg-primary">
<Container className="flex py-24 items-center">
<div className="space-y-8 lg:w-1/2">
<Supertitle className="text-white">
Path to ownership
</Supertitle>
<p className="text-white">
This is an exciting and important next step for your business, and MacDonald Communities will be there to assist you at each stage of the process. During construction, regular progress updates will be provided, so you can effectively plan your financing arrangements and coordinate with legal counsel in preparation of taking ownership of your unit. During corporate signage installation and office improvement work, Macdonald Communities will assist in the coordination with your contractors, so that your unit is fully move-in ready on day one. Upon occupancy, an experienced strata property manager will be available to ensure the property is well-maintained and fully managed, so you can continue to focus on the success of your core business.
</p>
</div>
<div className="hidden lg:block lg:w-1/2">
<div className="max-w-[24em] mx-auto relative concept">
<img src='/img/purcell-entrance.jpg' />
</div>
</div>
</Container>
</div>
</div>
)
})
export default Ownership |
#include <iostream>
using namespace std;
class NodeCL{
public:
int data;
NodeCL*next;
NodeCL(int d){
this->data=d;
next=NULL;
}
~NodeCL(){
int value=this->data;
if(this->next!=NULL){
delete next;
next=NULL;
}
cout<<"Memory is free for node with data "<<value<<endl;
}
};
void insertNode(NodeCL*&tail, int element, int d){
if(tail==NULL){
NodeCL*newNode=new NodeCL(d);
tail=newNode;
newNode->next=newNode;
}
else{
NodeCL*newNode=new NodeCL(d);
NodeCL*curr=tail;
while(curr->data!=element){
curr=curr->next;
}
newNode->next=curr->next;
curr->next=newNode;
}
}
void print(NodeCL*&tail){
if(tail==NULL){
cout<<"list is empty"<<endl;
}
NodeCL*temp=tail;
do{
cout<<temp->data<<" ";
temp=temp->next;
}
while(temp!=tail);
cout<<endl;
}
void deleteNode(NodeCL*&tail, int d){
if(tail==NULL){
cout<<"List is empty"<<endl;
return;
}
else{
NodeCL*prev=tail;
NodeCL*curr=prev->next;
while(curr->data!=d){
prev=curr;
curr=curr->next;
}
prev->next=curr->next;
if(curr==prev){
tail=NULL;
}
else if(tail==curr){
tail=prev;
}
curr->next=NULL;
delete curr;
}
}
int circularLLMain()
{
NodeCL*tail=NULL;
int opt,d,e;
char ch='y';
while ((ch=='y') || (ch=='Y'))
{cout<<"Menu"<<endl;
cout<<"Press 1 for Insertion"<<endl;
cout<<"Press 2 for deletion"<<endl;
cout<<"Press 3 for Traversal"<<endl;
cout<<"Enter your choice :: ";
cin>>opt;
if (opt==1)
{ cout<<"enter the element after which you wish to insert :: ";
cin>>e;
cout<<"enter the data of new node :: ";
cin>>d;
insertNode(tail,e,d);
}
else if (opt==2)
{ cout<<"enter the data of the node to be deleted :: ";
cin>>d;
deleteNode(tail,d);
}
else if (opt==3)
{ print(tail);
}
cout<<"Do you want to continue the program ?? Press y/n"<<endl;
cin>>ch;
}
return 0;
} |
<%@ page import="com.liferay.portal.kernel.model.User" %>
<%@ page import="java.util.List" %>
<%@ page import="djh.learn.builder.easyLiferay.service.PersonServiceUtil" %>
<%@ page import="djh.learn.builder.easyLiferay.model.Person" %>
<%@ include file="/init.jsp" %>
<%@ taglib uri = "http://java.sun.com/jsp/jstl/core" prefix = "c" %>
<script type="text/javascript" charset="utf8" src="https://code.jquery.com/jquery-3.3.1.js"></script>
<script type="text/javascript" charset="utf8" src="https://cdn.datatables.net/1.10.16/js/jquery.dataTables.min.js"></script>
<script type="text/javascript" charset="utf8" src="https://cdn.datatables.net/1.10.16/js/dataTables.jqueryui.min.js"></script>
<link rel="stylesheet" type="text/css" href="https://code.jquery.com/ui/1.12.1/themes/base/jquery-ui.css" />
<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/1.10.16/css/dataTables.jqueryui.min.css" />
<body>
<table id="example" class="table table-bordered table-striped" style="width:100%">
<thead>
<tr>
<th>ID</th>
<th>Name</th>
<th>Age</th>
<th>Married</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<c:forEach items="${persons}" var="person">
<portlet:resourceURL var="deleteURL">
</portlet:resourceURL>
<portlet:renderURL var="updateURL">
<portlet:param name="personId" value="${person.personId}"/>
</portlet:renderURL>
<tr>
<script type="text/javascript">
function callServeResource(){
AUI().use('aui-io-request', function(A){
var personId = "${person.personId}";
alert("person id is"+personId);
A.io.request('<%=deleteURL.toString()%>', {
method: 'post',
data: {
<portlet:namespace />personId: personId,
}
});
});
}
</script>
<td>${person.personId}</td>
<td>${person.name}</td>
<td>${person.age}</td>
<td>${person.married}</td>
<td><a href="${updateURL}" class="btn btn-secondary">Update</a> <a onclick="callServeResource()" class="btn btn-danger">Delete</a></td>
</tr>
</c:forEach>
</tbody>
</table>
<script>
$(function(){
$("#example").dataTable({
"iDisplayLength":5, // default page size
"aLengthMenu": [
[5, 10, 20, -1], // per page record options
[5, 10, 20, "All"]
],
"bLengthChange": true, //Customizable page size
"bSort": true, // for Soring
"order": [[0, 'asc']],
"bFilter": true, //search box
"aaSorting": [],
"aoColumns": [{// Columns width
"sWidth": "15%"
}, {
"sWidth": "15%"
}, {
"sWidth": "20%"
}, {
"sWidth": "20%"
},{
"sWidth": "30%"
}],
"bAutoWidth": false,
"oLanguage": {
"sSearch": "Search: ",
"sEmptyTable": "<div class='portlet-msg-alert'>No User Found</div>" // default message for no data
},
"sPaginationType": "full_numbers"
});
})
</script>
</body> |
/*
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_RUNTIME_FIELDDESCRIPTOR_HPP
#define SHARE_RUNTIME_FIELDDESCRIPTOR_HPP
#include "oops/constantPool.hpp"
#include "oops/fieldInfo.hpp"
#include "oops/instanceKlass.hpp"
#include "oops/symbol.hpp"
#include "utilities/accessFlags.hpp"
#include "utilities/constantTag.hpp"
// A fieldDescriptor describes the attributes of a single field (instance or class variable).
// It needs the class constant pool to work (because it only holds indices into the pool
// rather than the actual info).
class fieldDescriptor {
private:
FieldInfo _fieldinfo;
constantPoolHandle _cp;
inline FieldInfo field() const { return _fieldinfo; };
public:
fieldDescriptor() {}
fieldDescriptor(InstanceKlass* ik, int index) {
reinitialize(ik, index);
}
inline Symbol* name() const;
inline Symbol* signature() const;
inline InstanceKlass* field_holder() const {return _cp->pool_holder(); };
inline ConstantPool* constants() const;
AccessFlags access_flags() const { return _fieldinfo.access_flags(); }
FieldInfo::FieldFlags field_flags() const { return _fieldinfo.field_flags(); }
FieldStatus field_status() const { return field_holder()->fields_status()->at(_fieldinfo.index()); }
oop loader() const;
// Offset (in bytes) of field from start of instanceOop / Klass*
inline int offset() const;
Symbol* generic_signature() const;
int index() const { return _fieldinfo.index(); }
AnnotationArray* annotations() const;
AnnotationArray* type_annotations() const;
// Initial field value
inline bool has_initial_value() const;
inline int initial_value_index() const;
constantTag initial_value_tag() const; // The tag will return true on one of is_int(), is_long(), is_single(), is_double()
jint int_initial_value() const;
jlong long_initial_value() const;
jfloat float_initial_value() const;
jdouble double_initial_value() const;
oop string_initial_value(TRAPS) const;
// Field signature type
inline BasicType field_type() const;
// Access flags
bool is_private() const { return access_flags().is_private(); }
bool is_protected() const { return access_flags().is_protected(); }
bool is_static() const { return access_flags().is_static(); }
bool is_final() const { return access_flags().is_final(); }
bool is_stable() const { return field_flags().is_stable(); }
bool is_volatile() const { return access_flags().is_volatile(); }
bool is_transient() const { return access_flags().is_transient(); }
bool is_synthetic() const { return access_flags().is_synthetic(); }
bool is_field_access_watched() const { return field_status().is_access_watched(); }
bool is_field_modification_watched() const
{ return field_status().is_modification_watched(); }
bool has_initialized_final_update() const { return field_status().is_initialized_final_update(); }
bool has_generic_signature() const { return field_flags().is_generic(); }
bool is_trusted_final() const;
inline void set_is_field_access_watched(const bool value);
inline void set_is_field_modification_watched(const bool value);
inline void set_has_initialized_final_update(const bool value);
// Initialization
void reinitialize(InstanceKlass* ik, int index);
// Print
void print() const;
void print_on(outputStream* st) const;
void print_on_for(outputStream* st, oop obj);
};
#endif // SHARE_RUNTIME_FIELDDESCRIPTOR_HPP |
import React, { useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { getRockets, cancelReservation, reserveRocket } from '../redux/RocketSlice';
const Rockets = () => {
const dispatch = useDispatch();
const rockets = useSelector((state) => state.rockets.rockets);
useEffect(() => {
if (rockets.length === 0) {
dispatch(getRockets());
}
}, [dispatch, rockets]);
return (
<div>
<div className="card mb-3" style={{ maxWidth: '90vw' }}>
<div className="rockets__container d-flex-column">
{rockets.length === 0 ? (
<h2>Loading info...</h2>
) : (
<ul className="rocketslist">
{rockets.map((rocket) => (
<li key={rocket.id} className="rocketsitems">
<div className="row g-0">
<div className="col-md-4">
<img
src={rocket.flickr_images[0]}
alt={rocket.name}
className="img-fluid rounded-start"
/>
</div>
<div className="col-md-8">
<div className="card-body">
<h3 className="card-title">{rocket.rocket_name}</h3>
<p className="card-text">
<div className="btn-rocket">
{rocket.reserved && <span className="rockets__badge">Reserved</span>}
</div>
{rocket.description}
</p>
<div>
<button
type="button"
className={rocket.reserved ? 'rockets__cancel-btn' : 'rockets__reserve-btn btn btn-primary'}
onClick={() => {
dispatch(rocket.reserved
? cancelReservation(rocket.id)
: reserveRocket(rocket.id));
}}
>
{rocket.reserved ? 'Cancel Reservation' : 'Reserve Rocket'}
</button>
</div>
</div>
</div>
</div>
</li>
))}
</ul>
)}
</div>
</div>
</div>
);
};
export default Rockets; |
"""
Methods to handle date-strings and datetime objects.
The UTC timezone should be used in db by default.
Thus, all the methods return the dates relative to UTC.
"""
from typing import Union, Optional
from datetime import datetime, timedelta
import pytz
def get_today_utc_date_in_timezone(timezone: str) -> str:
"""
Function that constructs date-string for today UTC date relative to a specific timezone
Args:
timezone (str): timezone
Returns:
str: date-string
"""
ist = pytz.timezone(timezone)
return datetime.now(ist).astimezone(pytz.utc).strftime("%Y-%m-%d")
def get_array_of_past_dates(
n_days: int,
base_date: Optional[Union[datetime, str]] = None,
timezone: Optional[str] = "America/New_York",
) -> list[str]:
"""
Function to construct an array of date-strings
Args:
n_days (int):
Number of dates in the resulting array
base_date (Optional[Union[datetime, str]], optional):
Date to start from.
Before the array construction this value converted to UTC.
Defaults to None and converted to today UTC.
timezone (Optional[str], optional):
The string represeting time zone for all dates in the array.
Defaults to "America/New_York".
Returns:
list[str]: resulting array of date-strings. Fromat is YYYY-MM-DD.
"""
if base_date is None:
base_date = get_today_utc_date_in_timezone(timezone)
if isinstance(base_date, str):
base_date = datetime.strptime(base_date, "%Y-%m-%d").astimezone(pytz.utc)
return [(base_date - timedelta(days=x)).strftime("%Y-%m-%d") for x in range(n_days)]
def get_past_date(
n_days: int,
base_date: Optional[Union[datetime, str]] = None,
timezone: Optional[str] = "America/New_York",
) -> str:
"""
Function that returns the date-string represeinting n_days ago from the base_date.
Args:
n_days (int):
Number of dates in the resulting array
base_date (Optional[Union[datetime, str]], optional):
Date to start from.
Before the array construction this value converted to UTC.
Defaults to None and converted to today UTC.
timezone (Optional[str], optional):
The string represeting time zone for all dates in the array.
Defaults to "America/New_York".
Returns:
str: date-string. Format is YYYY-MM-DD.
"""
if base_date is None:
base_date = get_today_utc_date_in_timezone(timezone)
if isinstance(base_date, str):
base_date = datetime.strptime(base_date, "%Y-%m-%d").astimezone(pytz.utc)
return (base_date - timedelta(days=n_days)).strftime("%Y-%m-%d")
def get_future_date(
n_days: int,
base_date: Optional[Union[datetime, str]] = None,
timezone: Optional[str] = "America/New_York",
) -> str:
"""
Function that returns the date-string represeinting n_days into the future from the base_date.
Args:
n_days (int):
Number of dates in the resulting array
base_date (Optional[Union[datetime, str]], optional):
Date to start from.
Before the array construction this value converted to UTC.
Defaults to None and converted to today UTC.
timezone (Optional[str], optional):
The string represeting time zone for all dates in the array.
Defaults to "America/New_York".
Returns:
str: date-string. Format is YYYY-MM-DD.
"""
if base_date is None:
base_date = get_today_utc_date_in_timezone(timezone)
if isinstance(base_date, str):
base_date = datetime.strptime(base_date, "%Y-%m-%d").astimezone(pytz.utc)
return (base_date + timedelta(days=n_days)).strftime("%Y-%m-%d")
def is_valid_date(date_string: str, date_format: Optional[str] = "%Y-%m-%d") -> bool:
"""
Function to validate the date-string with the provided format.
Args:
date_string (str):
date-string to validate.
format (Optional[str], optional):
format of the date-string to validate. Defaults to '%Y-%m-%d'.
Raises:
ValueError: Raise exception if not valid parameter provided.
Returns:
bool: valid or not
"""
is_valid = False
try:
datetime.strptime(date_string, date_format)
is_valid = True
except Exception as e:
raise Exception(
"handle_datetimes.py, is_valid_date:"
+ f" Erroneus date-string provided, it should have a format of {date_format}"
) from e
return is_valid
def get_epoch(date_time: Union[datetime, str]) -> int:
"""
Function that converts datetime object or date-string
into UNIX/epoch time relative to UTC.
Args:
date_time (Union[datetime,str]):
date-string or datetime object to be converted.
Returns:
int: epoch integer.
"""
if isinstance(date_time, str):
date_time = datetime.strptime(date_time, "%Y-%m-%d")
epoch = datetime.utcfromtimestamp(0)
return (date_time - epoch).total_seconds() * 1000.0
def get_date_string(epoch: int) -> str:
"""
Function that converts UTC epoch datetime (ms) into date-string.
Args:
epoch (int):
UNIX/epoch representation of datetime (shoudl be in UTC format).
Returns:
str: date-string. Fromat is YYYY-MM-DD.
"""
return datetime.strftime(datetime.utcfromtimestamp(epoch / 1000), "%Y-%m-%d") |
import React from "react";
import { Link, NavLink, Outlet } from "react-router-dom";
import useAdmin from "../hooks/useAdmin";
import useInstructor from "../hooks/useInstructor";
import useStudent from "../hooks/useStudent";
import logo from "../assets/logo.png";
import {
FaListUl,
FaBookMedical,
FaFileInvoiceDollar,
FaBars,
FaBookOpen,
FaTools,
FaUserAlt,
} from "react-icons/fa";
import useTitle from "../hooks/useTitle";
const Dashboard = () => {
useTitle("sF | Dashboard");
const [isAdmin] = useAdmin();
const [isInstructor] = useInstructor();
const [isStaudent] = useStudent();
return (
<div className="drawer lg:drawer-open">
<input id="my-drawer-2" type="checkbox" className="drawer-toggle" />
<div className="drawer-content">
<div className="dashboardRight p-3 md:p-16 w-full h-full">
<label
htmlFor="my-drawer-2"
className="btn btn-primary drawer-button lg:hidden my-6"
>
<FaBars className="text-3xl"></FaBars>
</label>
<Outlet></Outlet>
</div>
</div>
<div className="drawer-side">
<label htmlFor="my-drawer-2" className="drawer-overlay"></label>
<ul className="menu m-0 p-0 w-80 h-full bg-base-200 text-base-content">
<div className="dashboardLeft col-span-2 min-h-screen bg-[#302787] p-9 px-4 text-white">
<div className="logo py-12">
<NavLink to="/">
<img className="inline-block w-[180px]" src={logo} alt="" />
</NavLink>
</div>
{isAdmin && (
<ul className="dashboardlink">
<li>
<NavLink to="/dashboard/manageclass">
{" "}
<FaTools></FaTools> Manage Classes
</NavLink>
</li>
<li>
<NavLink to="/dashboard/manageusers">
{" "}
<FaUserAlt></FaUserAlt> Manage Users
</NavLink>
</li>
</ul>
)}
{isInstructor && (
<ul className="dashboardlink">
<li>
<NavLink to="addclass">
{" "}
<FaBookMedical></FaBookMedical>Add a Class
</NavLink>
</li>
<li>
<NavLink to="myclass">
{" "}
<FaBookOpen></FaBookOpen> My Classes
</NavLink>
</li>
</ul>
)}
{isStaudent && (
<ul className="dashboardlink">
<li>
<NavLink
to="/dashboard/selectedclass"
className={({ isActive }) => (isActive ? "active" : "")}
>
<FaListUl></FaListUl> My Selected Classes
</NavLink>
</li>
<li>
<NavLink to="/dashboard/enrolledclass">
<FaBookMedical></FaBookMedical> My Enrolled Classes
</NavLink>
</li>
<li>
<NavLink to="/dashboard/paymenthistory">
<FaFileInvoiceDollar></FaFileInvoiceDollar> My Payment
History
</NavLink>
</li>
</ul>
)}
</div>
</ul>
</div>
</div>
);
};
export default Dashboard; |
// Copyright (c) Brock Allen & Dominick Baier. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.WebUtilities;
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Text.Encodings.Web;
namespace IdentityServer4.Extensions
{
internal static class StringExtensions
{
[DebuggerStepThrough]
public static string ToSpaceSeparatedString(this IEnumerable<string> list)
{
if (list == null)
{
return string.Empty;
}
var sb = new StringBuilder(100);
foreach (var element in list)
{
sb.Append(element + " ");
}
return sb.ToString().Trim();
}
[DebuggerStepThrough]
public static IEnumerable<string> FromSpaceSeparatedString(this string input)
{
input = input.Trim();
return input.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).ToList();
}
public static List<string> ParseScopesString(this string scopes)
{
if (scopes.IsMissing())
{
return null;
}
scopes = scopes.Trim();
var parsedScopes = scopes.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).Distinct().ToList();
if (parsedScopes.Any())
{
parsedScopes.Sort();
return parsedScopes;
}
return null;
}
[DebuggerStepThrough]
public static bool IsMissing(this string value)
{
return string.IsNullOrWhiteSpace(value);
}
[DebuggerStepThrough]
public static bool IsMissingOrTooLong(this string value, int maxLength)
{
if (string.IsNullOrWhiteSpace(value))
{
return true;
}
if (value.Length > maxLength)
{
return true;
}
return false;
}
[DebuggerStepThrough]
public static bool IsPresent(this string value)
{
return !string.IsNullOrWhiteSpace(value);
}
[DebuggerStepThrough]
public static string EnsureLeadingSlash(this string url)
{
if (url != null && !url.StartsWith("/"))
{
return "/" + url;
}
return url;
}
[DebuggerStepThrough]
public static string EnsureTrailingSlash(this string url)
{
if (url != null && !url.EndsWith("/"))
{
return url + "/";
}
return url;
}
[DebuggerStepThrough]
public static string RemoveLeadingSlash(this string url)
{
if (url != null && url.StartsWith("/"))
{
url = url.Substring(1);
}
return url;
}
[DebuggerStepThrough]
public static string RemoveTrailingSlash(this string url)
{
if (url != null && url.EndsWith("/"))
{
url = url.Substring(0, url.Length - 1);
}
return url;
}
[DebuggerStepThrough]
public static string CleanUrlPath(this string url)
{
if (String.IsNullOrWhiteSpace(url)) url = "/";
if (url != "/" && url.EndsWith("/"))
{
url = url.Substring(0, url.Length - 1);
}
return url;
}
[DebuggerStepThrough]
public static bool IsLocalUrl(this string url)
{
if (string.IsNullOrEmpty(url))
{
return false;
}
// Allows "/" or "/foo" but not "//" or "/\".
if (url[0] == '/')
{
// url is exactly "/"
if (url.Length == 1)
{
return true;
}
// url doesn't start with "//" or "/\"
if (url[1] != '/' && url[1] != '\\')
{
return true;
}
return false;
}
// Allows "~/" or "~/foo" but not "~//" or "~/\".
if (url[0] == '~' && url.Length > 1 && url[1] == '/')
{
// url is exactly "~/"
if (url.Length == 2)
{
return true;
}
// url doesn't start with "~//" or "~/\"
if (url[2] != '/' && url[2] != '\\')
{
return true;
}
return false;
}
return false;
}
[DebuggerStepThrough]
public static string AddQueryString(this string url, string query)
{
if (!url.Contains("?"))
{
url += "?";
}
else if (!url.EndsWith("&"))
{
url += "&";
}
return url + query;
}
[DebuggerStepThrough]
public static string AddQueryString(this string url, string name, string value)
{
return url.AddQueryString(name + "=" + UrlEncoder.Default.Encode(value));
}
[DebuggerStepThrough]
public static string AddHashFragment(this string url, string query)
{
if (!url.Contains("#"))
{
url += "#";
}
return url + query;
}
[DebuggerStepThrough]
public static NameValueCollection ReadQueryStringAsNameValueCollection(this string url)
{
if (url != null)
{
var idx = url.IndexOf('?');
if (idx >= 0)
{
url = url.Substring(idx + 1);
}
var query = QueryHelpers.ParseNullableQuery(url);
if (query != null)
{
return query.AsNameValueCollection();
}
}
return new NameValueCollection();
}
public static string GetOrigin(this string url)
{
if (url != null)
{
Uri uri;
try
{
uri = new Uri(url);
}
catch (Exception)
{
return null;
}
if (uri.Scheme == "http" || uri.Scheme == "https")
{
return $"{uri.Scheme}://{uri.Authority}";
}
}
return null;
}
}
} |
import 'dart:convert';
import 'package:dexter_mobile/app/shared/app_colors/app_colors.dart';
import 'package:dexter_mobile/app/shared/widgets/progress_indicator.dart';
import 'package:dexter_mobile/data/address/add_address_response_model.dart';
import 'package:dexter_mobile/data/address/address_model_responsible.dart';
import 'package:dexter_mobile/domain/remote/network_services/dio_service_config/app_config.dart';
import 'package:dexter_mobile/domain/remote/network_services/dio_service_config/dio_client.dart';
import 'package:dexter_mobile/domain/remote/network_services/dio_service_config/dio_error.dart';
import 'package:dio/dio.dart';
import 'package:get/get.dart';
class AddressController extends GetxController{
bool? isLoadingAddress;
bool? isLoadingAddressHasError;
List<Datum>? addressResponseModel = <Datum>[].obs;
List<String> state = [
"Abia",
"Adamawa",
"Akwa Ibom",
"Anambra",
"Bauchi",
"Bayelsa",
"Benue",
"Borno",
"CrossRiver",
"Delta",
"Ebonyi",
"Edo",
"Ekiti",
"Enugu",
"Gombe",
"Imo",
"Jigawa",
"Kaduna",
"Kano",
"Katsina",
"Kebbi",
"Kogi",
"Kwara",
"Lagos",
"Nasarawa",
"Niger",
"Ogun",
"Ondo",
"Osun",
"Oyo",
"Plateau",
"Rivers",
"Sokoto",
"Taraba",
"Yobe",
"Zamfara",
"Abuja FCT"
];
List<String> country = [
"Nigeria",
];
Future<void> getUserAddress()async{
isLoadingAddress = true;
isLoadingAddressHasError = false;
addressResponseModel = null;
update();
try{
final response = await NetworkProvider().call(path: "/addresses", method: RequestMethod.get);
addressResponseModel = AddressResponseModel.fromJson(response!.data).data;
isLoadingAddress = false;
isLoadingAddressHasError = false;
update();
}on DioError catch (err) {
final errorMessage = Future.error(ApiError.fromDio(err));
isLoadingAddress = false;
isLoadingAddressHasError = true;
update();
throw errorMessage;
} catch (err) {
isLoadingAddress = false;
isLoadingAddressHasError = true;
update();
throw err.toString();
}
}
Future<void> getUserAddressWithoutLoader()async{
update();
try{
final response = await NetworkProvider().call(path: "/addresses", method: RequestMethod.get);
addressResponseModel = AddressResponseModel.fromJson(response!.data).data;
update();
}on DioError catch (err) {
final errorMessage = Future.error(ApiError.fromDio(err));
update();
throw errorMessage;
} catch (err) {
update();
throw err.toString();
}
}
Future<void> editAddress({required String street,required String city,required String state,required String country, required String addressId})async{
progressIndicator(Get.context!);
try{
var postBody = jsonEncode({
"street": street,
"city": city,
"state": state,
"country": country,
});
final response = await NetworkProvider().call(path: "/addresses/$addressId", method: RequestMethod.post, body: postBody);
final data = AddAddressResponseModel.fromJson(response!.data);
update();
await getUserAddressWithoutLoader().then((value){
Get.back();
update();
Get.snackbar("Success", data.message ?? "Withdrawal Successful", colorText: white, backgroundColor: greenPea);
update();
});
}on DioError catch (err) {
final errorMessage = Future.error(ApiError.fromDio(err));
Get.back();
Get.snackbar("Error", err.response?.data['message'] ?? errorMessage, colorText: white, backgroundColor: persianRed);
update();
throw errorMessage;
} catch (err) {
Get.back();
Get.snackbar("Something Went Wrong", err.toString(), colorText: white, backgroundColor: persianRed);
update();
throw err.toString();
}
}
Future<void> addAddress({required String street,required String city,required String state,required String country })async{
progressIndicator(Get.context!);
try{
var postBody = jsonEncode({
"street": street,
"city": city,
"state": state,
"country": country,
});
final response = await NetworkProvider().call(path: "/addresses", method: RequestMethod.post, body: postBody);
final data = AddAddressResponseModel.fromJson(response!.data);
update();
await getUserAddressWithoutLoader().then((value){
Get.back();
update();
Get.snackbar("Success", data.message ?? "Withdrawal Successful", colorText: white, backgroundColor: greenPea);
update();
});
}on DioError catch (err) {
final errorMessage = Future.error(ApiError.fromDio(err));
Get.back();
Get.snackbar("Error", err.response?.data['message'] ?? errorMessage, colorText: white, backgroundColor: persianRed);
update();
throw errorMessage;
} catch (err) {
Get.back();
Get.snackbar("Something Went Wrong", err.toString(), colorText: white, backgroundColor: persianRed);
update();
throw err.toString();
}
}
@override
void onInit() {
getUserAddressWithoutLoader();
super.onInit();
}
// Future<void> deleteAddressAccount({required String bankAccountId})async{
// try{
// final response = await NetworkProvider().call(path: "/vendor/bank-accounts/$bankAccountId", method: RequestMethod.delete, context: Get.context);
// final data = DeleteBankAccountResponse.fromJson(response!.data);
// await getAllBankAccountNoState().then((value){
// Get.back();
// Get.snackbar("Success", data.message ?? "Account Successfully Deleted", backgroundColor: greenPea, colorText: white);
// update();
// });
// }on DioError catch (err) {
// final errorMessage = Future.error(ApiError.fromDio(err));
// Get.back();
// throw errorMessage;
// } catch (err) {
// Get.back();
// throw err.toString();
// }
// }
} |
/* roadmap_nmea.h - Decode a NMEA sentence.
*
* LICENSE:
*
* Copyright 2002 Pascal F. Martin
*
* This file is part of RoadMap.
*
* RoadMap is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* RoadMap is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with RoadMap; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* SYNOPSYS:
*
* Each module that wishes to receive NMEA information must first create
* a NMEA account (roadmap_nmea_create), then subscribe to as many sentences
* as it wishes (roadmap_nmea_subscribe).
*
* The processing of the NMEA data is handled within the roadmap_nmea_decode
* function, which is designed to be used as a roadmap_input decoder (see
* roadmap_input.h).
*/
#ifndef INCLUDED__ROADMAP_NMEA__H
#define INCLUDED__ROADMAP_NMEA__H
#include <time.h>
#include "roadmap_string.h"
#define ROADMAP_NMEA_MAX_SATELLITE 16
#define ROADMAP_NMEA_QUALITY_INVALID 0
#define ROADMAP_NMEA_QUALITY_GPS 1
#define ROADMAP_NMEA_QUALITY_DGPS 2
#define ROADMAP_NMEA_QUALITY_PPS 3
#define ROADMAP_NMEA_QUALITY_OTHER 4
typedef union {
struct {
time_t fixtime;
char status;
int latitude;
int longitude;
int speed;
int steering;
} rmc;
struct {
time_t fixtime;
int latitude;
int longitude;
int quality;
int count;
int dilution;
int altitude;
char altitude_unit[4];
} gga;
struct {
char status;
int mode;
int latitude;
int longitude;
} gll;
struct {
char automatic;
char dimension;
short reserved0;
char satellite[ROADMAP_NMEA_MAX_SATELLITE];
float dilution_position;
float dilution_horizontal;
float dilution_vertical;
} gsa;
struct {
char total;
char index;
char count;
char reserved0;
char satellite[4];
char elevation[4];
short azimuth[4];
short strength[4];
} gsv;
struct {
int steering;
int speed;
} vtg;
/* The following structures match Garmin extensions: */
struct {
char datum[256];
} pgrmm;
struct {
int horizontal;
char horizontal_unit[4];
int vertical;
char vertical_unit[4];
int three_dimensions;
char three_dimensions_unit[4];
} pgrme;
/* RoadMap's own extensions: */
struct {
RoadMapDynamicString id;
RoadMapDynamicString name;
RoadMapDynamicString sprite;
} pxrmadd;
struct {
RoadMapDynamicString id;
int latitude;
int longitude;
int speed;
int steering;
} pxrmmov;
struct {
RoadMapDynamicString id;
} pxrmdel;
struct {
int count;
struct {
RoadMapDynamicString item;
} subscribed[16];
} pxrmsub;
struct {
RoadMapDynamicString category;
RoadMapDynamicString name;
RoadMapDynamicString value;
} pxrmcfg;
} RoadMapNmeaFields;
struct RoadMapNmeaAccountRecord;
typedef struct RoadMapNmeaAccountRecord *RoadMapNmeaAccount;
RoadMapNmeaAccount roadmap_nmea_create (const char *name);
typedef void (*RoadMapNmeaListener) (void *context,
const RoadMapNmeaFields *fields);
void roadmap_nmea_subscribe (const char *vendor, /* NULL means standard. */
const char *sentence,
RoadMapNmeaListener listener,
RoadMapNmeaAccount account);
int roadmap_nmea_decode (void *user_context,
void *decoder_context, char *sentence, int length);
#endif // INCLUDED__ROADMAP_NMEA__H |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.