text
stringlengths 184
4.48M
|
|---|
import AsyncStorage from "@react-native-async-storage/async-storage";
import createDataContext from "./createDataContext";
import trackerApi from "../api/tracker";
import { navigate } from "../navigationRef";
const authReducer = (state, action) => {
switch (action.type) {
case "add_error":
return { ...state, errorMessage: action.payload };
case "signin":
return {
errorMessage: "",
token: action.payload,
};
case "clear_error_message":
return { ...state, errorMessage: "" };
case "signout":
return { token: null, errorMessage: "" };
case "isSignedIn":
return { token: action.payload };
default:
return state;
}
};
const tryLocalSignin = (dispatch) => async () => {
const token = await AsyncStorage.getItem("token");
if (token) {
dispatch({ type: "signin", payload: token });
navigate("Home");
} else {
navigate("Signup");
}
};
const clearErrorMessage = (dispatch) => () => {
dispatch({ type: "clear_error_message" });
};
const signup =
(dispatch) =>
async ({ email, password, username }) => {
try {
const response = await trackerApi.post("/signup", {
email,
password,
username,
});
dispatch({ type: "signin", payload: response.data.token });
navigate("Home");
} catch (err) {
dispatch({
type: "add_error",
payload: "Something went wrong while signing up",
});
}
};
const signin =
(dispatch) =>
async ({ email, password }) => {
try {
const response = await trackerApi.post("/signin", { email, password });
dispatch({
type: "signin",
payload: response.data.token,
});
navigate("Home");
} catch (err) {
dispatch({
type: "add_error",
payload: "Something went wrong with sign in",
});
}
};
const signout = (dispatch) => async () => {
navigate("loginFlow");
await AsyncStorage.removeItem("token");
await AsyncStorage.setItem("rememberme", JSON.stringify(null));
dispatch({ type: "signout" });
};
export const { Provider, Context } = createDataContext(
authReducer,
{ signin, signup, signout, clearErrorMessage, tryLocalSignin },
{ token: null, errorMessage: "", username: "" }
);
|
import { ComponentMeta, ComponentStory } from "@storybook/react";
import React from "react";
interface ButtonProps {
text: string;
}
export const First = ({ text, ...props }: ButtonProps) => {
return <div>Hello world {text}</div>;
};
export default {
title: "Components/First",
component: First,
// More on argTypes: https://storybook.js.org/docs/react/api/argtypes
} as ComponentMeta<typeof First>;
// More on component templates: https://storybook.js.org/docs/react/writing-stories/introduction#using-args
const Template: ComponentStory<typeof First> = (args) => <First {...args}/>;
export const SomeNameYouLike = Template.bind({});
SomeNameYouLike.args = {
text:"gghhjjhjh"
};
|
% formula = chemical_formula_by_mass(masses, group_by_element)
%
% Returns a readable form for the chemical formula of a molecule,
% from input giving the MASS number (not nuclear charge) of its atoms.
%
% IMPROVEMENT: is there some rules for the order of elements,
% e.g. why is the conventional names CO_2 and H_2O not of the same form??
%
% PARAMETERS
% masses
% group_by_element (Default: true)
% when true: [16 12 16] becomes O_2C
% when false: [16 12 16] becomes OCO
% SEE ALSO
% chemical_formula_to_mass
function formula = chemical_formula_by_mass(masses, group_by_element)
% Known elements by MASS number (Hydrogen isotopes included)
elements = {'H','D','T','He','?',...
'Li','^7Li','?','Be','^{10}B','B','C','^{13}C',...
'N','^{15}N','O','^{17}O','^{18}O','F','Ne','^{21}Ne','^{22}Ne'};
elements{23}='Na';
elements{24}='Mg';
elements{27}='Al';
elements{28}='Si';
elements{32}='S';
elements{35}='^{35}Cl';
elements{36}='^{36}Ar';
elements{37}='^{37}Cl';
elements{38}='^{38}Ar';
elements{40}='Ar';
if nargin == 0 && nargout == 1
% Special case: if no arguments are given, return the list of known elements!
% For use by chemical_formula_to_mass.m
formula = elements;
return;
end
if ischar(masses) % if a string was given, just return it
formula = masses;
return;
end
masses(masses == 0) = []; % remove zeroes, if they would occur
if nargin < 2
group_by_element = true;
end
formula = '';
used_masses = false(1,max(masses));
for m = masses
if ~used_masses(m)
if group_by_element
count = sum(masses == m);
used_masses(m) = true;
else
count = 1;
end
if m <= length(elements)
elem = elements{m};
else
error('The chemical element with M=%d is not named in the elements array', m);
end
if count > 1
formula = [formula elem '_' num2str(sum(masses == m))];
else
formula = [formula elem];
end
end
end
|
<!-- Copyright (c) 2021 . All Rights Reserved. -->
<div class="container-fluid">
<div class="card-block">
<form [formGroup]="submitterMasterForm" novalidate>
<h2 class="form-title card-label-admin">Submitter Master</h2>
<button type="button" *ngIf="showIcon" class="modal-close close" aria-label="Close" (click)="modalClose()">
<span aria-hidden="true">×</span>
</button>
<app-alert-message [alertMessage]="alertMessage"></app-alert-message>
<app-pop-up-message #popUpMesssage (buttonclickEvent)="popUpButtonHandler($event)"
[(popupMessage)]="popUpMessage"></app-pop-up-message>
<div class="row mt-3">
<menu-bar [menus]="menu" (onMenuClick)="onMenuItemClick($event)"></menu-bar>
</div>
<ng-template #loaderTemplate let-loaderValue="value">
<div class="row justify-content-center mt-4" *ngIf="loaderValue">
<div class="spinner-border text-success" role="status">
<span class="sr-only">Loading...</span>
</div>
</div>
</ng-template>
<ng-container *ngTemplateOutlet="loaderTemplate;context:{value: secProgress }"></ng-container>
<div class="form-group row">
<!-- Textbox with label of Submitter ID -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('submitterId') }">
<label class="col-form-label" for="submitterId" i18n>Submitter ID</label>
<input type="text" id="submitterId" [readonly]="submitterIdReadonly" [fieldLevelSec]="''"
[params]="" (keydown)="onKeyDown($event)" class="form-control" formControlName="submitterId" />
<small class="text-danger" *ngIf="formValidation.isValidField('submitterId')">
{{ formValidation.errorMessage('submitterId') }}
</small>
</div>
</div>
<ng-container *ngIf="showFullForm">
<div class="form-group row">
<label class="label-broken col-md-4 text-primary text-right">
<span>
<b><i>Address</i></b>
</span>
</label>
<label class="col-md-8 label-broken"><span></span></label>
</div>
<div class="form-group row">
<!-- Textbox with label of Submitter Name -->
<div class="col-md-6" [ngClass]="{'has-danger': formValidation.isValidField('submitterName') }">
<label class="col-form-label clr-blue" for="submitterName" i18n>Submitter Name</label>
<input type="text" id="submitterName" tabindex="1" [fieldLevelSec]="'Contact Name'" [params]=""
class="form-control" formControlName="submitterName" />
<small class="text-danger" *ngIf="formValidation.isValidField('submitterName')">
{{ formValidation.errorMessage('submitterName') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of Address Line 1 -->
<div class="col-md-6" [ngClass]="{'has-danger': formValidation.isValidField('addressLine1') }">
<label class="col-form-label" for="addressLine1" i18n>Address Line 1</label>
<input type="text" id="addressLine1" tabindex="2" [fieldLevelSec]="'Address Line 1'" [params]="''"
class="form-control" formControlName="addressLine1" />
<small class="text-danger" *ngIf="formValidation.isValidField('addressLine1')">
{{ formValidation.errorMessage('addressLine1') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of Address Line 2 -->
<div class="col-md-6" [ngClass]="{'has-danger': formValidation.isValidField('addressLine2') }">
<label class="col-form-label" for="addressLine2" i18n>Address Line 2</label>
<input type="text" id="addressLine2" tabindex="3" [fieldLevelSec]="'Address Line 2'" [params]="''"
class="form-control" formControlName="addressLine2" />
<small class="text-danger" *ngIf="formValidation.isValidField('addressLine2')">
{{ formValidation.errorMessage('addressLine2') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of Zip Code -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('zipCode') }">
<label class="col-form-label" for="zipCode" i18n>Zip Code</label>
<input type="text" id="zipCode" tabindex="4" [fieldLevelSec]="'Zip Code'" [params]="''"
(blur)="findDetailByZipCode($event)" class="form-control" formControlName="zipCode" />
<small class="text-danger" *ngIf="formValidation.isValidField('zipCode')">
{{ formValidation.errorMessage('zipCode') }}
</small>
</div>
<!-- Textbox with label of City -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('city') }">
<label class="col-form-label" for="city" i18n>City</label>
<input type="text" id="city" tabindex="5" [fieldLevelSec]="'City'" [params]="''" class="form-control"
formControlName="city" />
<small class="text-danger" *ngIf="formValidation.isValidField('city')">
{{ formValidation.errorMessage('city') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of State -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('state') }">
<label class="col-form-label" for="state" i18n>State</label>
<input type="text" id="state" tabindex="6" [fieldLevelSec]="'State'" [params]="''" class="form-control"
formControlName="state" />
<small class="text-danger" *ngIf="formValidation.isValidField('state')">
{{ formValidation.errorMessage('state') }}
</small>
</div>
<!-- Textbox with label of Country -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('country') }">
<label class="col-form-label" for="country" i18n>Country</label>
<select id="country" class="form-control" tabindex="7" formControlName="country">
<option [value]="c.systemCode" *ngFor="let c of countries;">{{c.systemCodeDesc2}}</option>
</select> <small class="text-danger" *ngIf="formValidation.isValidField('country')">
{{ formValidation.errorMessage('country') }}
</small>
</div>
</div>
<div class="form-group row">
<label class="label-broken col-md-4 text-primary text-right">
<span>
<b>Contact</b>
</span>
</label>
<label class="col-md-8 label-broken"><span></span></label>
</div>
<div class="form-group row">
<!-- Textbox with label of Contact Name -->
<div class="col-md-3" [ngClass]="{'has-danger': formValidation.isValidField('contactName') }">
<label class="col-form-label" for="contactName" i18n>Contact Name</label>
<input type="text" id="contactName" tabindex="8" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="contactName" />
<small class="text-danger" *ngIf="formValidation.isValidField('contactName')">
{{ formValidation.errorMessage('contactName') }}
</small>
</div>
<!-- Textbox with label of Phone Number -->
<div class="col-md-3" [ngClass]="{'has-danger': formValidation.isValidField('phoneNumber') }">
<label class="col-form-label" for="phoneNumber" i18n>Phone Number</label>
<input type="text" id="phoneNumber" tabindex="9" maxlength="10" [fieldLevelSec]="'Phone Number'"
[params]="''" class="form-control" formControlName="phoneNumber" />
<small class="text-danger" *ngIf="formValidation.isValidField('phoneNumber')">
{{ formValidation.errorMessage('phoneNumber') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of Title -->
<div class="col-md-6 " [ngClass]="{'has-danger': formValidation.isValidField('title') }">
<label class="col-form-label" for="title" i18n>Title :</label>
<select id="title" class="form-control" tabindex="10" (change)="inputEvent()" name="title"
formControlName="title" [fieldLevelSec]="'title'"
[params]="{secColDetails: secColDetails, isEditState: editVendonTIN}">
<option *ngFor="let title of titles" [ngValue]="title.contactTitle">{{title.contactTitle}}
</option>
</select>
<small class="text-danger" *ngIf="formValidation.isValidField('title')">
{{ formValidation.errorMessage('title') }}
</small>
</div>
<!-- Textbox with label of Fax Number -->
<div class="col-md-3" [ngClass]="{'has-danger': formValidation.isValidField('faxNumber') }">
<label class="col-form-label" for="faxNumber" i18n>Fax Number</label>
<input type="text" id="faxNumber" maxlength="10" tabindex="11" [fieldLevelSec]="'Fax Number'" [params]="''"
class="form-control" formControlName="faxNumber" />
<small class="text-danger" *ngIf="formValidation.isValidField('faxNumber')">
{{ formValidation.errorMessage('faxNumber') }}
</small>
</div>
</div>
<div class="form-group row">
<label class="label-broken col-md-4 text-primary text-right">
<span>
<b> Connection </b>
</span>
</label>
<label class="col-md-8 label-broken"><span></span></label>
</div>
<div class="form-group row">
<!-- Textbox with label of Computer Dailup No -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('computerDailupNo') }">
<label class="col-form-label" for="computerDailupNo" i18n>Computer Dailup No</label>
<input type="text" id="computerDailupNo" tabindex="12" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="computerDailupNo" />
<small class="text-danger" *ngIf="formValidation.isValidField('computerDailupNo')">
{{ formValidation.errorMessage('computerDailupNo') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of Modem Make -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('modemMake') }">
<label class="col-form-label" for="modemMake" i18n>Modem Make</label>
<input type="text" id="modemMake" tabindex="13" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="modemMake" />
<small class="text-danger" *ngIf="formValidation.isValidField('modemMake')">
{{ formValidation.errorMessage('modemMake') }}
</small>
</div>
<!-- Textbox with label of Modem Speed -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('modemSpeed') }">
<label class="col-form-label" for="modemSpeed" i18n>Modem Speed</label>
<!-- <input type="text" id="modemSpeed" [fieldLevelSec]="''" [params]="" class="form-control" formControlName="modemSpeed"/> -->
<select id="modemSpeed" class="form-control" tabindex="15" formControlName="modemSpeed">
<option [value]="c.value" *ngFor="let c of modemModels;">{{c.name}}</option>
</select>
<small class="text-danger" *ngIf="formValidation.isValidField('modemSpeed')">
{{ formValidation.errorMessage('modemSpeed') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of Modem Model -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('modemModel') }">
<label class="col-form-label" for="modemModel" i18n>Modem Model</label>
<input type="text" id="modemModel" tabindex="14" [fieldLevelSec]="''" [params]="" class="form-control" formControlName="modemModel"/>
<!-- <select id="modemModel" class="form-control" formControlName="modemModel">
<option [value]="c.value" *ngFor="let c of modemModels;">{{c.name}}</option>
</select> -->
<small class="text-danger" *ngIf="formValidation.isValidField('modemModel')">
{{ formValidation.errorMessage('modemModel') }}
</small>
</div>
<!-- Textbox with label of External Reference -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('externalReference') }">
<label class="col-form-label" for="externalReference" i18n>External Reference</label>
<input type="text" id="externalReference" tabindex="16" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="externalReference" />
<small class="text-danger" *ngIf="formValidation.isValidField('externalReference')">
{{ formValidation.errorMessage('externalReference') }}
</small>
</div>
</div>
<div class="form-group row">
</div>
<div class="form-group row">
<label class="label-broken col-md-4 text-primary text-right">
<span>
<b> Miscellaneous </b>
</span>
</label>
<label class="col-md-8 label-broken"><span></span></label>
</div>
<div class="form-group row">
</div>
<div class="form-group row">
<!-- Textbox with label of User Defined 1 -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('userDefined1') }">
<label class="col-form-label" for="userDefined1" i18n>File Layout</label>
<input type="text" id="userDefined1" tabindex="17" [fieldLevelSec]="'User Defined 5'" [params]="''"
class="form-control" formControlName="userDefined1" />
<small class="text-danger" *ngIf="formValidation.isValidField('userDefined1')">
{{ formValidation.errorMessage('userDefined1') }}
</small>
</div>
<!-- Textbox with label of User Defined 2 -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('userDefined2') }">
<label class="col-form-label" for="fileFrequency" i18n>File Frequency</label>
<input type="text" id="userDefined2" tabindex="18" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="userDefined2" />
<small class="text-danger" *ngIf="formValidation.isValidField('userDefined2')">
{{ formValidation.errorMessage('userDefined2') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of User Defined 6 -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('userDefined3') }">
<label class="col-form-label" for="userDefined3" i18n>File Type</label>
<input type="text" id="userDefined3" tabindex="19" [fieldLevelSec]="'User Defined 6'" [params]="''"
class="form-control" formControlName="userDefined3" />
<small class="text-danger" *ngIf="formValidation.isValidField('userDefined3')">
{{ formValidation.errorMessage('userDefined3') }}
</small>
</div>
<!-- Textbox with label of File Type -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('userDefined4') }">
<label class="col-form-label" for="userDefined4" i18n>Media Type</label>
<input type="text" id="userDefined4" tabindex="20" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="userDefined4" />
<small class="text-danger" *ngIf="formValidation.isValidField('userDefined4')">
{{ formValidation.errorMessage('userDefined4') }}
</small>
</div>
</div>
<div class="form-group row">
<!-- Textbox with label of User Defined 7 -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('userDefined5') }">
<label class="col-form-label" for="userDefined5" i18n>User Defined 5</label>
<input type="text" id="userDefined5" tabindex="21" [fieldLevelSec]="'User Defined 5'" [params]="''"
class="form-control" formControlName="userDefined5" />
<small class="text-danger" *ngIf="formValidation.isValidField('userDefined5')">
{{ formValidation.errorMessage('userDefined5') }}
</small>
</div>
<!-- Textbox with label of Media Type -->
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('userDefined6') }">
<label class="col-form-label" for="userDefined6" i18n>User Defined 6</label>
<input type="text" id="userDefined6" tabindex="22" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="userDefined6" />
<small class="text-danger" *ngIf="formValidation.isValidField('userDefined6')">
{{ formValidation.errorMessage('userDefined6') }}
</small>
</div>
</div>
<div class="form-group row">
<div class="col-md-4" [ngClass]="{'has-danger': formValidation.isValidField('userDefined7') }">
<label class="col-form-label" for="userDefined7" i18n>User Defined 7</label>
<input type="text" id="userDefined7" tabindex="23" [fieldLevelSec]="''" [params]="" class="form-control"
formControlName="userDefined7" />
<small class="text-danger" *ngIf="formValidation.isValidField('userDefined7')">
{{ formValidation.errorMessage('userDefined7') }}
</small>
</div>
</div>
</ng-container>
</form>
</div>
</div>
<ng-keyboard-shortcuts [shortcuts]="shortcuts"></ng-keyboard-shortcuts>
<ng-keyboard-shortcuts-help [key]="'f1'" [closeKey]="'escape'" [title]="'Hotkeys Help'"></ng-keyboard-shortcuts-help>
|
# 20230912
## 非効率なスライスの初期化
- makeを使ってスライスを初期化する際に、長さと容量を指定する
- この2つのパラメータに適切な値を渡すことを忘れてしまうのは、よくある誤り
```go
func convert(foos []Foo) []Bar {
bars := make([]Bar, 0)
for _, foo := range foos{
bars = append(bars, fooToBar(foo))
}
return bars
}
```
- 最初の要素を追加すると、大きさが1の規定配列が割り当てられる
- 基底配列がいっぱいになることで、Goはその容量を2倍にして別の配列を作成する
- 基底配列がいっぱいになるたびに配列がコピーされてしまう
- 最適化のためには2つの選択肢がある
1. 容量の指定
```go
func convert(foos []Foo) []Bar {
bars := make([]Bar, 0, len(foos))
for _, foo := range foos{
bars = append(bars, fooToBar(foo))
}
return bars
}
```
2. 長さの指定
```go
func convert(foos []Foo) []Bar {
bars := make([]Bar, len(foos))
for i, foo := range foos{
bars[i] = fooToBar(foo)
}
return bars
}
```
## nilスライスと空スライスに混在する
```go
func log(i int, s []string) {
fmt.Printf("%d: empty=%t\tnil=%t\n", i, len(s) == 0, s == nil)
}
func main() {
var s []string
log(1, s)
s = []string(nil)
log(2, s)
s = []string{}
log(3, s)
s = make([]string, 0)
log(4, s)
}
/*
> go run 5.\ slice/nil-slice.go
1: empty=true nil=true
2: empty=true nil=true
3: empty=true nil=false
4: empty=true nil=false
*/
```
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<h1>Explore event Delegate</h1>
<section>
<ul id="list-container">
<li class="item">Lorem, ipsum dolor.</li>
<li class="item">Illum, dolores quisquam!</li>
<li class="item">Alias, libero similique.</li>
<li class="item">Nostrum, ut culpa.</li>
<li class="item">Neque, fugit dolorem?</li>
<li class="item">Illo, quidem natus!</li>
</ul>
<button id="add-item">Add Item</button>
</section>
<script>
// const items = document.getElementsByClassName('item');
// for (const item of items) {
// item.addEventListener('click', function (event) {
// // console.log(event.target.parentNode);
// event.target.parentNode.removeChild(event.target);
// })
// }
document.getElementById('list-container').addEventListener('click', function (event) {
event.target.parentNode.removeChild(event.target);
})
document.getElementById('add-item').addEventListener('click', function () {
const listContainer = document.getElementById('list-container');
const li = document.createElement('li');
li.innerText = 'brand New item';
li.classList.add('item');
listContainer.appendChild(li);
})
</script>
</body>
</html>
|
package a1q2;
/**
* AssociateCustomer class inherits from the Customer class
* @author yeojustin
*/
public class AssociateCustomer extends Customer {
private PayingCustomer payingCustomer;
/**
* Constructs an AssociateCustomer object with the specified name and email.
*
* @param name the name of the associate customer
* @param email the email of the associate customer
*
* Precondition:
* - The name and email must be valid non-null values.
*
* Post-condition:
* - An AssociateCustomer object is created with the specified name and email.
*/
public AssociateCustomer(String name, String email) {
super(name, email);
}
/**
* Constructs an AssociateCustomer object with the specified name, email, and paying customer.
*
* @param name the name of the associate customer
* @param email the email of the associate customer
* @param payingCustomer the paying customer associated with the associate customer
*
* Precondition:
* - The name, email, and payingCustomer must be valid non-null values.
*
* Post-condition:
* - An AssociateCustomer object is created with the specified name, email, and paying customer.
*/
public AssociateCustomer(String name, String email, PayingCustomer payingCustomer) {
super(name, email);
this.payingCustomer = payingCustomer;
}
/**
* Returns the paying customer associated with this associate customer.
*
* @return the paying customer
*
* Precondition:
* - None
*
* Post-condition:
* - The paying customer associated with this associate customer is returned.
*/
public PayingCustomer getPayingCustomer() {
return payingCustomer;
}
/**
* Sets the paying customer associated with this associate customer.
*
* @param payingCustomer the paying customer to be set
*
* Precondition:
* - The payingCustomer must be a valid non-null value.
*
* Post-condition:
* - The paying customer associated with this associate customer is set to the specified paying customer.
*/
public void setPayingCustomer(PayingCustomer payingCustomer) {
this.payingCustomer = payingCustomer;
}
}
|
use uuid::Uuid;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::io::{Read, Cursor};
use std::sync::Arc;
use async_std::net;
pub use async_std::channel::{Receiver as AsyncStdReceiver, Sender as AsyncStdSender};
pub use tokio::sync::broadcast::{Sender as TokioBroadcastSender, Receiver as TokioBroadcastReceiver};
use futures::AsyncReadExt;
// pub mod room;
pub mod prelude {
pub use super::*;
}
/// Models a single chatroom that is hosted by the server.
#[derive(Debug)]
pub struct Chatroom {
/// The unique id of the chatroom.
pub id: Uuid,
/// The name of the chatroom.
pub name: Arc<String>,
/// A sending half of the broadcast channel that the chatroom-sub-broker task
/// will use to broadcast messages to all subscribing clients. Used for creating new subscriptions,
/// whenever a new client wants to join.
pub client_subscriber: TokioBroadcastSender<Response>,
/// The sending half of the channel that can be cloned and sent to any client's read task,
/// that way new client's can take a clone of the sending half of this channel and start sending events.
pub client_read_sender: AsyncStdSender<Event>,
/// The sending half of a channel used for synchronizing shutdown ie, dropping this channel (when it is 'Some')
/// will initiate a graceful shutdown procedure for the current chatroom-sub-broker task. May or may not be set.
pub shutdown: Option<AsyncStdSender<Null>>,
/// The capacity of the chatroom ie number of clients it can serve.
pub capacity: usize,
/// The current number of clients connected with the chatroom.
pub num_clients: usize,
}
impl Chatroom {
fn serialize_name_length(&self, tag: &mut [u8; 12]) {
let name_len = self.name.len();
for i in 0..4 {
tag[i] ^= ((name_len >> (i * 8)) & 0xff) as u8;
}
}
fn serialize_capacity(&self, tag: &mut [u8; 12]) {
for i in 4..8 {
tag[i] ^= ((self.capacity >> ((i % 4) * 8)) & 0xff) as u8;
}
}
fn serialize_num_clients(&self, tag: &mut [u8; 12]) {
for i in 8..12 {
tag[i] ^= ((self.num_clients >> ((i % 4) * 8)) & 0xff) as u8;
}
}
}
type ChatroomEncodeTag = [u8; 12];
impl SerializationTag for ChatroomEncodeTag {}
type ChatroomDecodeTag = (u32, u32, u32);
impl DeserializationTag for ChatroomDecodeTag {}
impl SerAsBytes for Chatroom {
type Tag = ChatroomEncodeTag;
fn serialize(&self) -> Self::Tag {
let mut tag = [0u8; 12];
self.serialize_name_length(&mut tag);
self.serialize_capacity(&mut tag);
self.serialize_num_clients(&mut tag);
tag
}
}
impl DeserAsBytes for Chatroom {
type TvlTag = ChatroomDecodeTag;
fn deserialize(tag: &Self::Tag) -> Self::TvlTag {
// let inner = tag;
let mut name_len = 0;
for i in 0..4 {
name_len ^= (tag[i] as u32) << (i * 8);
}
let mut capacity = 0;
for i in 4..8 {
capacity ^= (tag[i] as u32) << ((i % 4) * 8);
}
let mut num_clients = 0;
for i in 8..12 {
num_clients ^= (tag[i] as u32) << ((i % 4) * 8);
}
(name_len, capacity, num_clients)
}
}
impl AsBytes for Chatroom {
fn as_bytes(&self) -> Vec<u8> {
let mut bytes = self.serialize().to_vec();
bytes.extend_from_slice(self.name.as_bytes());
bytes
}
}
#[derive(Debug)]
pub struct ChatroomFrame {
pub name: String,
pub capacity: usize,
pub num_clients: usize,
}
impl ChatroomFrame {
pub fn try_parse<R: Read>(mut reader: R) -> Result<ChatroomFrame, &'static str> {
let mut tag = [0u8; 12];
reader.read_exact(&mut tag)
.map_err(|_| "error reading 'ChatroomFrame' tag from reader")?;
let (name_len, capacity, num_clients) = <Chatroom as DeserAsBytes>::deserialize(&tag);
let mut name_bytes = vec![0; name_len as usize];
reader.read_exact(name_bytes.as_mut_slice())
.map_err(|_| "error reading name bytes for 'ChatroomFrame' from reader")?;
let name = String::from_utf8(name_bytes)
.map_err(|_| "error parsing 'ChatroomFrame' name bytes as valid utf")?;
Ok(ChatroomFrame { name, capacity: capacity as usize, num_clients: num_clients as usize} )
}
}
#[derive(Debug)]
pub struct ChatroomFrames {
pub frames: Vec<ChatroomFrame>,
}
impl TryFrom<Vec<u8>> for ChatroomFrames {
type Error = &'static str;
fn try_from(bytes: Vec<u8>) -> Result<Self, Self::Error> {
let len = bytes.len() as u64;
let mut cursor = std::io::Cursor::new(bytes);
let mut frames = vec![];
// loop {
// let frame = ChatroomFrame::try_parse(&mut cursor)?;
// frames.push(frame);
// if cursor.position() == len {
// break;
// }
// }
while cursor.position() < len {
let frame = ChatroomFrame::try_parse(&mut cursor)?;
frames.push(frame);
}
Ok(ChatroomFrames { frames })
}
}
/// An enum that represents all possible responses that can be sent from the server back to the client
///
/// The `Response` is generated by both the main broker and the chatroom broker tasks, in response
/// to `Event`s triggered by client input. `Response` is used by each clients writing task to
/// decide what logic to execute to update the state of the writing task, as well as informing the client.
#[derive(Debug, Clone, PartialEq)]
pub enum Response {
/// A response informing the client they have successfully connected to the chatroom lobby
ConnectionOk,
/// A response informing the client they have successfully subscribed to a chatroom
Subscribed { chatroom_name: String },
/// A response informing the client they have successfully created/subscribed to, a new chatroom
ChatroomCreated { chatroom_name: String },
/// A response informing the client the chatroom they attempted to create already exists
ChatroomAlreadyExists {chatroom_name: String, lobby_state: Vec<u8>},
/// A response informing the client that the chatroom they attempt to join does not exist
ChatroomDoesNotExist { chatroom_name: String, lobby_state: Vec<u8> },
/// A response informing the client the chatroom they attempted to join is full and cannot
/// be joined.
ChatroomFull {chatroom_name: String, lobby_state: Vec<u8>},
/// A response that sends a message to the client from the chatroom
Message { peer_id: Uuid, msg: String },
/// A response informing the client they have successfully created a valid username
UsernameOk { username: String, lobby_state: Vec<u8> },
/// A response informing the client that the username they entered already exists
UsernameAlreadyExists { username: String },
/// A response informing the client they have successfully exited the chatroom
ExitChatroom { chatroom_name: String},
/// A response informing the client they have rejoined the lobby
Lobby {lobby_state: Vec<u8>},
/// A response informing the client they have exited the lobby
ExitLobby,
/// A response informing the client that their read-task is synced a new chatroom-sub-broker
ReadSync,
}
impl Response {
/// Attempts to parse a `Response` from the `input_reader`.
///
/// #Panics
/// If an invalid type flag is read form `input_reader`, the method will panic.
pub async fn try_parse<R: AsyncReadExt + Unpin>(mut input_reader: R) -> Result<Self, &'static str> {
// Create tag, and attempt to read from reader
let mut tag = [0u8; 22];
input_reader.read_exact(&mut tag)
.await
.map_err(|_| "unable to read tag from reader")?;
// Deserialize tag
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
if type_byte ^ 1 == 0 {
Ok(Response::ConnectionOk)
} else if type_byte ^ 2 == 0 {
let mut chatroom_name_bytes = vec![0; name_len as usize];
input_reader.read_exact(chatroom_name_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read chatroom name bytes from reader")?;
let chatroom_name = String::from_utf8(chatroom_name_bytes)
.map_err(|_| "unable to parse chatroom name as valid utf8")?;
Ok(Response::Subscribed {chatroom_name})
} else if type_byte ^ 3 == 0 {
let mut chatroom_name_bytes = vec![0; name_len as usize];
input_reader.read_exact(chatroom_name_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read chatroom name bytes from reader")?;
let chatroom_name = String::from_utf8(chatroom_name_bytes)
.map_err(|_| "unable to parse chatroom name as valid utf8")?;
Ok(Response::ChatroomCreated {chatroom_name})
} else if type_byte ^ 4 == 0 {
let mut chatroom_name_bytes = vec![0; name_len as usize];
let mut lobby_state_bytes = vec![0; data_len as usize];
input_reader.read_exact(chatroom_name_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read chatroom name bytes from reader")?;
let chatroom_name = String::from_utf8(chatroom_name_bytes)
.map_err(|_| "unable to parse chatroom name as valid utf8")?;
input_reader.read_exact(lobby_state_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read lobby state bytes from reader")?;
Ok(Response::ChatroomAlreadyExists {chatroom_name, lobby_state: lobby_state_bytes})
} else if type_byte ^ 5 == 0 {
let mut chatroom_name_bytes = vec![0; name_len as usize];
let mut lobby_state_bytes = vec![0; data_len as usize];
input_reader.read_exact(chatroom_name_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read chatroom name bytes from reader")?;
let chatroom_name = String::from_utf8(chatroom_name_bytes)
.map_err(|_| "unable to parse chatroom name as valid utf8")?;
input_reader.read_exact(lobby_state_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read lobby state bytes from reader")?;
Ok(Response::ChatroomDoesNotExist {chatroom_name, lobby_state: lobby_state_bytes})
} else if type_byte ^ 6 == 0 {
let mut chatroom_name_bytes = vec![0; name_len as usize];
let mut lobby_state_bytes = vec![0; data_len as usize];
input_reader.read_exact(chatroom_name_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read chatroom name bytes from reader")?;
let chatroom_name = String::from_utf8(chatroom_name_bytes)
.map_err(|_| "unable to parse chatroom name as valid utf8")?;
input_reader.read_exact(lobby_state_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read lobby state bytes from reader")?;
Ok(Response::ChatroomFull {chatroom_name, lobby_state: lobby_state_bytes})
} else if type_byte ^ 7 == 0 {
// let mut username_bytes = vec![0; name_len as usize];
let mut msg_bytes = vec![0; data_len as usize];
// input_reader.read_exact(username_bytes.as_mut_slice())
// .await
// .map_err(|_| "unable to read username bytes from reader")?;
input_reader.read_exact(msg_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read message bytes from reader")?;
// let username = String::from_utf8(username_bytes)
// .map_err(|_| "unable to parse username bytes as valid utf8")?;
let msg = String::from_utf8(msg_bytes)
.map_err(|_| "unable to parse message as valid utf8")?;
Ok(Response::Message {peer_id: Uuid::from_u128(id), msg})
} else if type_byte ^ 8 == 0 {
let mut username_bytes = vec![0; name_len as usize];
let mut lobby_state_bytes = vec![0; data_len as usize];
input_reader.read_exact(username_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read username bytes from reader")?;
input_reader.read_exact(lobby_state_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read lobby state bytes from reader")?;
let username = String::from_utf8(username_bytes)
.map_err(|_| "unable to parse username bytes as valid utf8")?;
Ok(Response::UsernameOk {username, lobby_state: lobby_state_bytes})
} else if type_byte ^ 9 == 0 {
let mut username_bytes = vec![0; name_len as usize];
input_reader.read_exact(username_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read username bytes from reader")?;
let username = String::from_utf8(username_bytes)
.map_err(|_| "unable to parse username bytes as valid utf8")?;
Ok(Response::UsernameAlreadyExists {username})
} else if type_byte ^ 10 == 0 {
let mut chatroom_name_bytes = vec![0; name_len as usize];
input_reader.read_exact(chatroom_name_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read chatroom name bytes from reader")?;
let chatroom_name = String::from_utf8(chatroom_name_bytes)
.map_err(|_| "unable to parse chatroom name bytes as valid utf8")?;
Ok(Response::ExitChatroom{chatroom_name})
} else if type_byte ^ 11 == 0 {
let mut lobby_state_bytes = vec![0; data_len as usize];
input_reader.read_exact(lobby_state_bytes.as_mut_slice())
.await
.map_err(|_| "unable to read lobby state bytes from reader")?;
Ok(Response::Lobby {lobby_state: lobby_state_bytes})
} else if type_byte ^ 12 == 0 {
Ok(Response::ExitLobby)
} else if type_byte ^ 13 == 0 {
Ok(Response::ReadSync)
} else {
panic!("invalid type byte detected");
}
}
pub fn is_exit_lobby(&self) -> bool {
match self {
Response::ExitLobby => true,
_ => false
}
}
pub fn is_exit_chatroom(&self) -> bool {
match self {
Response::ExitChatroom {chatroom_name} => true,
_ => false,
}
}
pub fn is_subscribed(&self) -> bool {
match self {
Response::Subscribed {chatroom_name} => true,
_ => false
}
}
pub fn is_chatroom_created(&self) -> bool {
match self {
Response::ChatroomCreated {chatroom_name} => true,
_ => false
}
}
pub fn is_message(&self) -> bool {
match self {
Response::Message {msg, peer_id} => true,
_ => false
}
}
pub fn is_connection_ok(&self) -> bool {
match self {
Response::ConnectionOk => true,
_ => false
}
}
pub fn is_read_sync(&self) -> bool {
match self {
Response::ReadSync => true,
_ => false
}
}
/// Helper method to reduce code duplication when serializing a `Response`
///
/// Many `Response` variants have fields which have a length property that needs to be serialized.
/// This method provides the functionality to do this.
/// Takes `tag`, the tag that we are serializing the `Response` into, `idx` the starting position
/// from which to start serializing a length value and `length`, the value that represents the length
/// we wish to serialize
fn serialize_data_length(tag: &mut ResponseEncodeTag, idx: usize, length: u32) {
for i in idx ..idx + 4 {
tag[i] ^= (length >> ((i - idx) * 8)) as u8;
}
}
/// Helper method to reduce code duplication when deserializing a `Response` tag.
///
/// Many `Response` variants have fields which have a length property that needs to be deserialized.
/// This method provides the functionality to do this.
/// Takes `tag`, the tag that we are deserializing `length` from, and the starting index `idx`,
/// that is the position in the `tag` we should start deserializing bytes from.
fn deserialize_data_length(tag: &ResponseEncodeTag, idx: usize, length: &mut u32) {
for i in idx ..idx + 4 {
*length ^= (tag[i] as u32) << ((i - idx) * 8)
}
}
}
unsafe impl Send for Response {}
/// The type-length-value tag type for serializing a `Response`
type ResponseEncodeTag = [u8; 22];
impl SerializationTag for ResponseEncodeTag {}
/// The type-length-value type for deserializing a `Response
type ResponseDecodeTag = (u8, u128, u8, u32);
impl DeserializationTag for ResponseDecodeTag {}
impl SerAsBytes for Response {
type Tag = ResponseEncodeTag;
fn serialize(&self) -> Self::Tag {
let mut bytes = [0u8; 22];
match self {
Response::ConnectionOk => bytes[0] ^= 1,
Response::Subscribed {chatroom_name} => {
bytes[0] ^= 2;
bytes[1] ^= chatroom_name.len() as u8;
// Response::serialize_length(&mut bytes, 1, chatroom_name.len() as u32);
}
Response::ChatroomCreated {chatroom_name} => {
bytes[0] ^= 3;
bytes[1] ^= chatroom_name.len() as u8;
// Response::serialize_length(&mut bytes, 1, chatroom_name.len() as u32);
}
Response::ChatroomAlreadyExists {chatroom_name, lobby_state} => {
bytes[0] ^= 4;
bytes[1] ^= chatroom_name.len() as u8;
Response::serialize_data_length(&mut bytes, 2, lobby_state.len() as u32);
// Response::serialize_length(&mut bytes, 5, lobby_state.len() as u32);
}
Response::ChatroomDoesNotExist {chatroom_name, lobby_state} => {
bytes[0] ^= 5;
bytes[1] ^= chatroom_name.len() as u8;
// Response::serialize_length(&mut bytes, 1, chatroom_name.len() as u32);
Response::serialize_data_length(&mut bytes, 2, lobby_state.len() as u32);
}
Response::ChatroomFull {chatroom_name, lobby_state} => {
bytes[0] ^= 6;
bytes[1] ^= chatroom_name.len() as u8;
// Response::serialize_length(&mut bytes, 1, chatroom_name.len() as u32);
Response::serialize_data_length(&mut bytes, 2, lobby_state.len() as u32);
}
Response::Message {peer_id, msg} => {
bytes[0] ^= 7;
let peer_id_bytes = peer_id.as_bytes();
bytes[1..17].copy_from_slice(peer_id_bytes);
// Response::serialize_length(&mut bytes, 17, username.len() as u32);
Response::serialize_data_length(&mut bytes, 17, msg.len() as u32);
}
Response::UsernameOk {username, lobby_state} => {
bytes[0] ^= 8;
bytes[1] ^= username.len() as u8;
// Response::serialize_length(&mut bytes, 1, username.len() as u32);
Response::serialize_data_length(&mut bytes, 2, lobby_state.len() as u32);
}
Response::UsernameAlreadyExists {username} => {
bytes[0] ^= 9;
bytes[1] ^= username.len() as u8;
// Response::serialize_length(&mut bytes, 1, username.len() as u32);
}
Response::ExitChatroom {chatroom_name} => {
bytes[0] ^= 10;
bytes[1] ^= chatroom_name.len() as u8;
// Response::serialize_length(&mut bytes, 1, chatroom_name.len() as u32);
}
Response::Lobby {lobby_state} => {
bytes[0] ^= 11;
Response::serialize_data_length(&mut bytes, 1, lobby_state.len() as u32);
}
Response::ExitLobby => bytes[0] ^= 12,
Response::ReadSync => bytes[0] ^= 13,
}
bytes
}
}
impl DeserAsBytes for Response {
type TvlTag = ResponseDecodeTag;
fn deserialize(tag: &Self::Tag) -> Self::TvlTag {
let type_byte = tag[0];
let mut name_len = 0u8;
let mut data_len = 0;
if type_byte ^ 1 == 0 {
(1, 0u128, 0u8, 0u32)
} else if type_byte ^ 2 == 0 {
// Response::deserialize_length(tag, 1,&mut name_len);
name_len ^= tag[1];
(2, 0, name_len, 0)
} else if type_byte ^ 3 == 0 {
// Response::deserialize_length(tag, 1, &mut name_len);
name_len ^= tag[1];
(3, 0, name_len, 0)
} else if type_byte ^ 4 == 0 {
// Response::deserialize_length(tag, 1, &mut name_len);
name_len ^= tag[1];
Response::deserialize_data_length(tag, 2, &mut data_len);
(4, 0, name_len, data_len)
} else if type_byte ^ 5 == 0 {
// Response::deserialize_length(tag, 1, &mut name_len);
name_len ^= tag[1];
Response::deserialize_data_length(tag, 2, &mut data_len);
(5, 0, name_len, data_len)
} else if type_byte ^ 6 == 0 {
// Response::deserialize_length(tag, 1, &mut name_len);
name_len ^= tag[1];
Response::deserialize_data_length(tag, 2, &mut data_len);
(6, 0, name_len, data_len)
} else if type_byte ^ 7 == 0 {
let mut id_bytes = [0u8; 16];
id_bytes.copy_from_slice(&tag[1..17]);
let id = Uuid::from_bytes(id_bytes).as_u128();
Response::deserialize_data_length(tag, 17, &mut data_len);
(7, id, name_len, data_len)
} else if type_byte ^ 8 == 0 {
// Response::deserialize_length(tag, 1, &mut name_len);
name_len ^= tag[1];
Response::deserialize_data_length(tag, 2, &mut data_len);
(8, 0, name_len, data_len)
} else if type_byte ^ 9 == 0 {
// Response::deserialize_length(tag, 1, &mut name_len);
name_len ^= tag[1];
(9, 0, name_len, 0)
} else if type_byte ^ 10 == 0 {
// Response::deserialize_length(tag, 1, &mut name_len);
name_len ^= tag[1];
(10, 0, name_len, data_len)
} else if type_byte ^ 11 == 0 {
Response::deserialize_data_length(tag, 1, &mut data_len);
(11, 0, name_len, data_len)
} else if type_byte ^ 12 == 0 {
(12, 0, 0, 0)
} else if type_byte ^ 13 == 0 {
(13, 0, 0, 0)
} else {
panic!("invalid type flag detected");
}
}
}
impl AsBytes for Response {
fn as_bytes(&self) -> Vec<u8> {
let mut bytes = vec![];
bytes.extend_from_slice(&self.serialize());
match self {
Response::ConnectionOk => {},
Response::Subscribed {chatroom_name} => bytes.extend_from_slice(chatroom_name.as_bytes()),
Response::ChatroomCreated {chatroom_name} => bytes.extend_from_slice(chatroom_name.as_bytes()),
Response::ChatroomAlreadyExists {chatroom_name, lobby_state} => {
bytes.extend_from_slice(chatroom_name.as_bytes());
bytes.extend_from_slice(lobby_state.as_slice());
}
Response::ChatroomDoesNotExist {chatroom_name, lobby_state} => {
bytes.extend_from_slice(chatroom_name.as_bytes());
bytes.extend_from_slice(lobby_state.as_slice());
}
Response::ChatroomFull {chatroom_name, lobby_state} => {
bytes.extend_from_slice(chatroom_name.as_bytes());
bytes.extend_from_slice(lobby_state.as_slice());
}
Response::Message {peer_id,msg} => {
// bytes.extend_from_slice(username.as_bytes());
bytes.extend_from_slice(msg.as_bytes());
}
Response::UsernameOk {username, lobby_state} => {
bytes.extend_from_slice(username.as_bytes());
bytes.extend_from_slice(lobby_state.as_slice());
}
Response::UsernameAlreadyExists {username} => {
bytes.extend_from_slice(username.as_bytes());
}
Response::ExitChatroom {chatroom_name} => {
bytes.extend_from_slice(chatroom_name.as_bytes());
}
Response::Lobby {lobby_state} => {
bytes.extend_from_slice(lobby_state.as_slice());
}
Response::ExitLobby => {}
Response::ReadSync => {}
}
bytes
}
}
#[derive(Debug)]
pub enum Event {
Quit {peer_id: Uuid},
Lobby {peer_id: Uuid},
ReadSync {peer_id: Uuid},
Join {
chatroom_name: String,
peer_id: Uuid
},
Create {
chatroom_name: String,
peer_id: Uuid
},
Username {
new_username: String,
peer_id: Uuid
},
NewClient {
stream: Arc<net::TcpStream>,
shutdown: AsyncStdReceiver<Null>,
chatroom_connection: AsyncStdSender<AsyncStdSender<Event>>,
peer_id: Uuid,
},
Message {
message: String,
peer_id: Uuid,
}
}
impl Event {
pub fn is_quit(&self) -> bool {
match self {
Event::Quit {peer_id} => true,
_ => false
}
}
}
// TODO: implement parsing to and from bytes for message events
#[derive(Debug, Clone, PartialEq)]
pub enum Frame {
Quit,
Lobby,
Join {
chatroom_name: String,
},
Create {
chatroom_name: String,
},
Username {
new_username: String,
},
Message {
message: String,
}
}
impl Eq for Frame {}
impl Frame {
/// Helper method to reduce code duplication when serializing a `Frame`
///
/// Many `Frame` variants have fields which have a length property that needs to be serialized.
/// This method provides the functionality to do this.
/// Takes `tag`, the tag that we are serializing the `Response` into, `idx` the starting position
/// from which to start serializing a length value and `length`, the value that represents the length
/// we wish to serialize
fn serialize_len(tag: &mut FrameEncodeTag, idx: usize, length: u32) {
for i in idx ..idx + 4 {
tag[i] ^= ((length >> ((i - idx) * 8)) & 0xff) as u8;
}
}
/// Helper method to reduce code duplication when deserializing a `Frame` tag.
///
/// Many `Frame` variants have fields which have a length property that needs to be deserialized.
/// This method provides the functionality to do this.
/// Takes `tag`, the tag that we are deserializing `length` from, and the starting index `idx`,
/// that is the position in the `tag` we should start deserializing bytes from.
fn deserialize_len(tag: &FrameEncodeTag, idx: usize, length: &mut u32) {
for i in idx .. idx + 4 {
*length ^= (tag[i] as u32) << ((i - idx) * 8);
}
}
/// Attempts to parse the `Frame` from `input_reader`.
///
/// #Panics
/// If an invalid type byte is read from `input_reader` (i.e. a byte that does not represent
/// a valid type of `Frame`) then the method panics.
pub async fn try_parse<R: AsyncReadExt + Unpin>(mut input_reader: R) -> Result<Self, &'static str> {
// Read tag from reader
let mut tag = [0u8; 5];
input_reader.read_exact(&mut tag)
.await
.map_err(|_| "unable to read frame tag bytes from reader")?;
// deserialize tag
let (type_byte, length) = Frame::deserialize(&tag);
// Attempt to parse the frame
if type_byte ^ 1 == 0 {
Ok(Frame::Quit)
} else if type_byte ^ 2 == 0 {
Ok(Frame::Lobby)
} else if type_byte ^ 3 == 0 {
let mut chatroom_name_bytes = vec![0; length as usize];
input_reader.read_exact(&mut chatroom_name_bytes)
.await
.map_err(|_| "unable to read bytes from reader")?;
Ok(Frame::Join {chatroom_name: String::from_utf8(chatroom_name_bytes).map_err(|_| "unable to parse chatroom name as valid utf8")?})
} else if type_byte ^ 4 == 0 {
let mut chatroom_name_bytes = vec![0; length as usize];
input_reader.read_exact(&mut chatroom_name_bytes)
.await
.map_err(|_| "unable to read bytes from reader")?;
Ok(Frame::Create {chatroom_name: String::from_utf8(chatroom_name_bytes).map_err(|_| "unable to parse chatroom name as valid utf8")?})
} else if type_byte ^ 5 == 0 {
let mut new_username_bytes = vec![0; length as usize];
input_reader.read_exact(&mut new_username_bytes)
.await
.map_err(|_| "unable to read bytes from reader")?;
Ok(Frame::Username {new_username: String::from_utf8(new_username_bytes).map_err(|_| "unable to parse new username as valid utf8")?})
} else if type_byte ^ 6 == 0 {
let mut message_bytes = vec![0; length as usize];
input_reader.read_exact(&mut message_bytes)
.await
.map_err(|_| "unable to read bytes from reader")?;
Ok(Frame::Message {message: String::from_utf8(message_bytes).map_err(|_| "unable to parse message as valid utf8")?})
} else {
Err("invalid type of frame received")
}
}
}
pub type FrameEncodeTag = [u8; 5];
impl SerializationTag for FrameEncodeTag {}
pub type FrameDecodeTag = (u8, u32);
impl DeserializationTag for FrameDecodeTag {}
impl SerAsBytes for Frame {
type Tag = FrameEncodeTag;
fn serialize(&self) -> Self::Tag {
let mut tag = [0u8; 5];
match self {
Frame::Quit => tag[0] ^= 1,
Frame::Lobby => tag[0] ^= 2,
Frame::Join {chatroom_name} => {
tag[0] ^= 3;
Frame::serialize_len(&mut tag, 1, chatroom_name.len() as u32);
}
Frame::Create {chatroom_name} => {
tag[0] ^= 4;
Frame::serialize_len(&mut tag, 1, chatroom_name.len() as u32);
}
Frame::Username {new_username} => {
tag[0] ^= 5;
Frame::serialize_len(&mut tag, 1,new_username.len() as u32);
}
Frame::Message { message} => {
tag[0] ^= 6;
Frame::serialize_len(&mut tag, 1, message.len() as u32);
}
}
tag
}
}
impl AsBytes for Frame {
fn as_bytes(&self) -> Vec<u8> {
let mut bytes = vec![];
bytes.extend_from_slice(&self.serialize());
match self {
Frame::Quit => {},
Frame::Lobby => {},
Frame::Join {chatroom_name} => bytes.extend_from_slice(chatroom_name.as_bytes()),
Frame::Create {chatroom_name} => bytes.extend_from_slice(chatroom_name.as_bytes()),
Frame::Username {new_username} => bytes.extend_from_slice(new_username.as_bytes()),
Frame::Message {message} => bytes.extend_from_slice(message.as_bytes()),
}
bytes
}
}
impl DeserAsBytes for Frame {
type TvlTag = FrameDecodeTag;
fn deserialize(tag: &Self::Tag) -> Self::TvlTag {
if tag[0] ^ 1 == 0 {
(1, 0)
} else if tag[0] ^ 2 == 0 {
(2, 0)
} else if tag[0] ^ 3 == 0 {
let mut chatroom_name_len = 0u32;
Frame::deserialize_len(&tag, 1, &mut chatroom_name_len);
(3, chatroom_name_len)
} else if tag[0] ^ 4 == 0 {
let mut chatroom_name_len = 0u32;
Frame::deserialize_len(&tag, 1, &mut chatroom_name_len);
(4, chatroom_name_len)
} else if tag[0] ^ 5 == 0 {
let mut username_len = 0;
Frame::deserialize_len(&tag,1, &mut username_len);
(5, username_len)
} else if tag[0] ^ 6 == 0 {
let mut message_len = 0;
Frame::deserialize_len(&tag, 1, &mut message_len);
(6, message_len)
} else {
panic!("invalid type byte detected, unable to deserialize 'Frame' tag")
}
}
}
pub struct Client {
pub username: String,
pub id: Uuid,
pub stream: Option<net::TcpStream>
}
pub enum Null {}
pub trait SerAsBytes {
type Tag: SerializationTag;
fn serialize(&self) -> Self::Tag;
}
pub trait DeserAsBytes: SerAsBytes {
type TvlTag: DeserializationTag;
fn deserialize(tag: &Self::Tag) -> Self::TvlTag;
}
pub trait AsBytes: SerAsBytes + DeserAsBytes {
fn as_bytes(&self) -> Vec<u8>;
}
pub trait SerializationTag {}
pub trait DeserializationTag {}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_serialize_frame() {
let frame = Frame::Quit;
let frame_tag = frame.serialize();
println!("{:?}", frame_tag);
assert_eq!(frame_tag, [1, 0, 0, 0, 0]);
let frame = Frame::Lobby;
let frame_tag = frame.serialize();
println!("{:?}", frame_tag);
assert_eq!(frame_tag, [2, 0, 0, 0, 0]);
let frame = Frame::Join { chatroom_name: String::from("Test Chatroom 1") };
let frame_tag = frame.serialize();
println!("{:?}", frame_tag);
assert_eq!(frame_tag, [3, 15, 0, 0, 0]);
let frame = Frame::Create { chatroom_name: String::from("Chatroom 1") };
let frame_tag = frame.serialize();
println!("{:?}", frame_tag);
assert_eq!(frame_tag, [4, 10, 0, 0, 0]);
let frame = Frame::Username { new_username: String::from("My new username") };
let frame_tag = frame.serialize();
println!("{:?}", frame_tag);
assert_eq!(frame_tag, [5, 15, 0, 0, 0]);
let frame = Frame::Message { message: String::from("My test message") };
let frame_tag = frame.serialize();
println!("{:?}", frame_tag);
assert_eq!(frame_tag, [6, 15, 0, 0, 0]);
}
#[test]
fn test_deserialize_frame() {
let frame = Frame::Quit;
let frame_tag = frame.serialize();
let (type_byte, length) = Frame::deserialize(&frame_tag);
println!("type: {}, length: {}", type_byte, length);
assert_eq!(type_byte, 1);
assert_eq!(length, 0);
let frame = Frame::Lobby;
let frame_tag = frame.serialize();
let (type_byte, length) = Frame::deserialize(&frame_tag);
println!("type: {}, length: {}", type_byte, length);
assert_eq!(type_byte, 2);
assert_eq!(length, 0);
let frame = Frame::Join { chatroom_name: String::from("Test Chatroom 1") };
let frame_tag = frame.serialize();
let (type_byte, length) = Frame::deserialize(&frame_tag);
println!("type: {}, length: {}", type_byte, length);
assert_eq!(type_byte, 3);
assert_eq!(length, 15);
let frame = Frame::Create { chatroom_name: String::from("Chatroom 1") };
let frame_tag = frame.serialize();
let (type_byte, length) = Frame::deserialize(&frame_tag);
println!("type: {}, length: {}", type_byte, length);
assert_eq!(type_byte, 4);
assert_eq!(length, 10);
let frame = Frame::Username { new_username: String::from("My new username") };
let frame_tag = frame.serialize();
let (type_byte, length) = Frame::deserialize(&frame_tag);
println!("type: {}, length: {}", type_byte, length);
assert_eq!(type_byte, 5);
assert_eq!(length, 15);
let frame = Frame::Message { message: String::from("My test message") };
let frame_tag = frame.serialize();
let (type_byte, length) = Frame::deserialize(&frame_tag);
println!("type: {}, length: {}", type_byte, length);
assert_eq!(type_byte, 6);
assert_eq!(length, 15);
}
#[test]
fn test_frame_as_bytes() {
let frame = Frame::Quit;
let frame_bytes = frame.as_bytes();
println!("{:?}", frame_bytes);
assert_eq!(frame_bytes, vec![1, 0, 0, 0, 0]);
let frame = Frame::Lobby;
let frame_bytes = frame.as_bytes();
println!("{:?}", frame_bytes);
assert_eq!(frame_bytes, vec![2, 0, 0, 0, 0]);
let frame = Frame::Join { chatroom_name: String::from("Test Chatroom 1") };
let frame_bytes = frame.as_bytes();
println!("{:?}", frame_bytes);
let mut res_bytes = vec![3, 15, 0, 0, 0];
res_bytes.extend_from_slice("Test Chatroom 1".as_bytes());
assert_eq!(frame_bytes, res_bytes);
let frame = Frame::Create { chatroom_name: String::from("Chatroom 1") };
let frame_bytes = frame.as_bytes();
println!("{:?}", frame_bytes);
let mut res_bytes = vec![4, 10, 0, 0, 0];
res_bytes.extend_from_slice("Chatroom 1".as_bytes());
assert_eq!(frame_bytes, res_bytes);
let frame = Frame::Username { new_username: String::from("My new username") };
let frame_bytes = frame.as_bytes();
println!("{:?}", frame_bytes);
let mut res_bytes = vec![5, 15, 0, 0, 0];
res_bytes.extend_from_slice("My new username".as_bytes());
assert_eq!(frame_bytes, res_bytes);
let frame = Frame::Message { message: String::from("My test message") };
let frame_bytes = frame.as_bytes();
println!("{:?}", frame_bytes);
let mut res_bytes = vec![6, 15, 0, 0, 0];
res_bytes.extend_from_slice("My test message".as_bytes());
assert_eq!(frame_bytes, res_bytes);
}
#[test]
fn test_frame_try_parse() {
use async_std::io::Cursor;
use async_std::task::block_on;
// Test quit
let frame = Frame::Quit;
let mut input_stream = Cursor::new(frame.as_bytes());
let parsed_frame_res = block_on(Frame::try_parse(&mut input_stream));
println!("{:?}", parsed_frame_res);
assert!(parsed_frame_res.is_ok());
let parsed_frame = parsed_frame_res.unwrap();
assert_eq!(frame, parsed_frame);
// Test quit
let frame = Frame::Lobby;
let mut input_stream = Cursor::new(frame.as_bytes());
let parsed_frame_res = block_on(Frame::try_parse(&mut input_stream));
println!("{:?}", parsed_frame_res);
assert!(parsed_frame_res.is_ok());
let parsed_frame = parsed_frame_res.unwrap();
assert_eq!(frame, parsed_frame);
// Test join
let frame = Frame::Join {chatroom_name: String::from("Testing testing... Chatroom good name")};
let mut input_stream = Cursor::new(frame.as_bytes());
let parsed_frame_res = block_on(Frame::try_parse( &mut input_stream));
println!("{:?}", parsed_frame_res);
assert!(parsed_frame_res.is_ok());
let parsed_frame = parsed_frame_res.unwrap();
assert_eq!(frame, parsed_frame);
// Test create
let frame = Frame::Create {chatroom_name: String::from("Testing testing... another testing Chatroom good name")};
let mut input_stream = Cursor::new(frame.as_bytes());
let parsed_frame_res = block_on(Frame::try_parse(&mut input_stream));
println!("{:?}", parsed_frame_res);
assert!(parsed_frame_res.is_ok());
let parsed_frame = parsed_frame_res.unwrap();
assert_eq!(frame, parsed_frame);
// Test username
let frame = Frame::Username {new_username: String::from("A good testing username")};
let mut input_stream = Cursor::new(frame.as_bytes());
let parsed_frame_res = block_on(Frame::try_parse(&mut input_stream));
println!("{:?}", parsed_frame_res);
assert!(parsed_frame_res.is_ok());
let parsed_frame = parsed_frame_res.unwrap();
assert_eq!(frame, parsed_frame);
// Test message
let frame = Frame::Message {message: String::from("Testing testing... another test message")};
let mut input_stream = Cursor::new(frame.as_bytes());
let parsed_frame_res = block_on(Frame::try_parse(&mut input_stream));
println!("{:?}", parsed_frame_res);
assert!(parsed_frame_res.is_ok());
let parsed_frame = parsed_frame_res.unwrap();
assert_eq!(frame, parsed_frame);
}
#[test]
fn test_serialize_response() {
// Test ConnectionOk
let response = Response::ConnectionOk;
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test Subscribed
let response = Response::Subscribed { chatroom_name: String::from("Test Chatroom") };
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [2, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test ChatroomCreated
let response = Response::ChatroomCreated { chatroom_name: String::from("Test Chatroom 42") };
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [3, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test ChatroomAlreadyExists
let response = Response::ChatroomAlreadyExists { chatroom_name: String::from("Test Chatroom 43"), lobby_state: vec![38, 23, 1, 1, 0] };
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [4, 16, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test ChatroomDoesNotExist
let response = Response::ChatroomDoesNotExist { chatroom_name: String::from("Test Chatroom 43"), lobby_state: vec![38, 23, 1, 1, 0] };
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [5, 16, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test ChatroomFull
let response = Response::ChatroomFull { chatroom_name: String::from("Test Chatroom 44"), lobby_state: vec![38, 23, 1, 1, 0, 0, 0, 0, 34] };
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [6, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test Message
let peer_id = Uuid::new_v4();
let response = Response::Message { peer_id, msg: String::from("Test message") };
let tag = response.serialize();
println!("{:?}", tag);
let mut expected_tag = [7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_tag[1..17].copy_from_slice(peer_id.as_bytes());
// expected_tag[17] = 13;
expected_tag[17] = 12;
assert_eq!(tag, expected_tag);
// Test UsernameOk
let response = Response::UsernameOk {username: String::from("Good test username"), lobby_state: vec![0, 0, 0] };
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [8, 18, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test UsernameAlreadyExists
let response = Response::UsernameAlreadyExists {username: String::from("Good test username 42")};
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [9, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test ExitChatroom
let response = Response::ExitChatroom {chatroom_name: String::from("Perfect chatroom name")};
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [10, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test Lobby
let response = Response::Lobby {lobby_state: vec![42, 42, 42, 42, 42, 42, 10, 1]};
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [11, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test ExitLobby
let response = Response::ExitLobby;
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test ReadSync
let response = Response::ReadSync;
let tag = response.serialize();
println!("{:?}", tag);
assert_eq!(tag, [13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
}
#[test]
fn test_deserialize_response() {
// Test ConnectionOk
let response = Response::ConnectionOk;
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 1);
assert_eq!(id, 0);
assert_eq!(name_len, 0);
assert_eq!(data_len, 0);
// Test Subscribed
let response = Response::Subscribed {chatroom_name: String::from("Good test chatroom name")};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 2);
assert_eq!(id, 0);
assert_eq!(name_len, 23);
assert_eq!(data_len, 0);
// Test ChatroomCreated
let response = Response::ChatroomCreated {chatroom_name: String::from("Good test chatroom name 42")};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 3);
assert_eq!(id, 0);
assert_eq!(name_len, 26);
assert_eq!(data_len, 0);
// Test ChatroomCreated
let response = Response::ChatroomAlreadyExists {chatroom_name: String::from("Good test chatroom name 143"), lobby_state: vec![1, 2, 3, 4]};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 4);
assert_eq!(id, 0);
assert_eq!(name_len, 27);
assert_eq!(data_len, 4);
// Test ChatroomDoesNotExist
let response = Response::ChatroomDoesNotExist {chatroom_name: String::from("Good test chatroom"), lobby_state: vec![1, 2, 3, 4, 5]};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 5);
assert_eq!(id, 0);
assert_eq!(name_len, 18);
assert_eq!(data_len, 5);
// Test ChatroomFull
let response = Response::ChatroomFull {chatroom_name: String::from("A Good full test chatroom"), lobby_state: vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 6);
assert_eq!(id, 0);
assert_eq!(name_len, 25);
assert_eq!(data_len, 11);
// Test Message
let peer_id = Uuid::new_v4();
let response = Response::Message {peer_id, msg: String::from("A good test message") };
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 7);
assert_eq!(id, peer_id.as_u128());
assert_eq!(name_len, 0);
assert_eq!(data_len, 19);
// Test UsernameOk
let response = Response::UsernameOk {username: String::from("Test Username 42"), lobby_state: vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 8);
assert_eq!(id, 0);
assert_eq!(name_len, 16);
assert_eq!(data_len, 11);
// Test UsernameAlreadyExists
let response = Response::UsernameAlreadyExists {username: String::from("Bad Test Username 42")};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 9);
assert_eq!(id, 0);
assert_eq!(name_len, 20);
assert_eq!(data_len, 0);
// Test ExitChatroom
let response = Response::ExitChatroom {chatroom_name: String::from("Decent test chatroom name")};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 10);
assert_eq!(id, 0);
assert_eq!(name_len, 25);
assert_eq!(data_len, 0);
// Test Lobby
let response = Response::Lobby {lobby_state: vec![42, 42, 42, 42, 42, 42, 10, 1]};
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 11);
assert_eq!(id, 0);
assert_eq!(name_len, 0);
assert_eq!(data_len, 8);
// Test ExitLobby
let response = Response::ExitLobby;
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 12);
assert_eq!(id, 0);
assert_eq!(name_len, 0);
assert_eq!(data_len, 0);
// Test ReadSync
let response = Response::ReadSync;
let tag = response.serialize();
let (type_byte, id, name_len, data_len) = Response::deserialize(&tag);
println!("{:?}", (type_byte, id, name_len, data_len));
assert_eq!(type_byte, 13);
assert_eq!(id, 0);
assert_eq!(name_len, 0);
assert_eq!(data_len, 0);
}
#[test]
fn test_response_as_bytes() {
// Test ConnectionOk
let response = Response::ConnectionOk;
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
assert_eq!(response_bytes, vec![1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Test Subscribed
let response = Response::Subscribed { chatroom_name: String::from("Test Chatroom") };
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![2, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("Test Chatroom").as_bytes());
assert_eq!(response_bytes, expected_bytes);
// Test ChatroomCreated
let response = Response::ChatroomCreated { chatroom_name: String::from("Test Chatroom 42") };
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![3, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("Test Chatroom 42").as_bytes());
assert_eq!(response_bytes, expected_bytes);
// Test ChatroomAlreadyExists
let response = Response::ChatroomAlreadyExists { chatroom_name: String::from("Test Chatroom 43"), lobby_state: vec![97, 98, 99] };
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![4, 16, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("Test Chatroom 43").as_bytes());
expected_bytes.extend_from_slice(&[97, 98, 99]);
assert_eq!(response_bytes, expected_bytes);
// Test ChatroomDoesNotExist
let response = Response::ChatroomDoesNotExist { chatroom_name: String::from("Test Chatroom 44"), lobby_state: vec![97, 98, 99, 100] };
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![5, 16, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("Test Chatroom 44").as_bytes());
expected_bytes.extend_from_slice(&[97, 98, 99, 100]);
assert_eq!(response_bytes, expected_bytes);
// Test ChatroomFull
let response = Response::ChatroomFull { chatroom_name: String::from("Another Test Chatroom 45"), lobby_state: vec![97, 98, 99, 100, 101, 103] };
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![6, 24, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("Another Test Chatroom 45").as_bytes());
expected_bytes.extend_from_slice(&[97, 98, 99, 100, 101, 103]);
assert_eq!(response_bytes, expected_bytes);
// Test Message
let peer_id = Uuid::new_v4();
let response = Response::Message { peer_id, msg: String::from("Hello World!")};
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![0; 22];
expected_bytes[0] = 7;
expected_bytes[1..17].copy_from_slice(peer_id.as_bytes());
expected_bytes[17] = 12;
// expected_bytes[21] = 12;
// expected_bytes.extend_from_slice(String::from("A good test username").as_bytes());
expected_bytes.extend_from_slice(String::from("Hello World!").as_bytes());
assert_eq!(response_bytes, expected_bytes);
// Test UsernameOk
let response = Response::UsernameOk { username: String::from("My test username"), lobby_state: vec![97, 98, 99, 100, 101, 103] };
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![8, 16, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("My test username").as_bytes());
expected_bytes.extend_from_slice(&[97, 98, 99, 100, 101, 103]);
assert_eq!(response_bytes, expected_bytes);
// Test UsernameAlreadyExists
let response = Response::UsernameAlreadyExists { username: String::from("My bad test username")};
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![9, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("My bad test username").as_bytes());
assert_eq!(response_bytes, expected_bytes);
// Test ExitChatroom
let response = Response::ExitChatroom { chatroom_name: String::from("Test Exit chatroom name")};
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![10, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(String::from("Test Exit chatroom name").as_bytes());
assert_eq!(response_bytes, expected_bytes);
// Test Lobby
let response = Response::Lobby { lobby_state: vec![97, 98, 99, 100, 101, 103]};
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![11, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
expected_bytes.extend_from_slice(&[97, 98, 99, 100, 101, 103]);
assert_eq!(response_bytes, expected_bytes);
// Test ExitLobby
let response = Response::ExitLobby;
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(response_bytes, expected_bytes);
// Test ReadSync
let response = Response::ReadSync;
let response_bytes = response.as_bytes();
println!("{:?}", response_bytes);
let mut expected_bytes = vec![13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(response_bytes, expected_bytes);
}
#[test]
fn test_try_parse_response() {
use async_std::io::Cursor;
use async_std::task::block_on;
// Test ConnectionOk
let response = Response::ConnectionOk;
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test Subscribed
let response = Response::Subscribed {chatroom_name: String::from("A good test chatroom name")};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test ChatroomCreated
let response = Response::ChatroomCreated {chatroom_name: String::from("A good test chatroom name 432134")};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test ChatroomAlreadyExists
let response = Response::ChatroomAlreadyExists {chatroom_name: String::from("A good test chatroom name 432134"), lobby_state: vec![0, 1, 2, 3, 4]};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test ChatroomDoesNotExist
let response = Response::ChatroomDoesNotExist {chatroom_name: String::from("A good test chatroom name 666"), lobby_state: vec![6, 6, 6]};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test ChatroomFull
let response = Response::ChatroomFull {chatroom_name: String::from("Another good test chatroom name 666"), lobby_state: vec![6, 6, 6, 6, 6, 6]};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test Message
let peer_id = Uuid::new_v4();
// let username = String::from("A good test username");
let msg = String::from("A good test message");
let response = Response::Message {peer_id, msg};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test UsernameOk
let response = Response::UsernameOk {username: String::from("Another good test username 666"), lobby_state: vec![6, 6, 6]};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test UsernameAlreadyExists
let response = Response::UsernameAlreadyExists {username: String::from("Another bad test username 666 already taken")};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test ExitChatroom
let response = Response::ExitChatroom {chatroom_name: String::from("A good test chatroom name for exiting")};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test Lobby
let response = Response::Lobby {lobby_state: vec![6, 6, 6]};
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test ExitLobby
let response = Response::ExitLobby;
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
// Test ReadSync
let response = Response::ReadSync;
let mut input_reader = Cursor::new(response.as_bytes());
let parsed_response_res = block_on(Response::try_parse(&mut input_reader));
println!("{:?}", parsed_response_res);
assert!(parsed_response_res.is_ok());
let parsed_response = parsed_response_res.unwrap();
assert_eq!(parsed_response, response);
}
#[test]
fn test_chatroom_serialize() {
use tokio::sync::broadcast;
use async_std::channel;
let id = Uuid::new_v4();
let (broadcast_sender, _) = broadcast::channel::<Response>(1);
let (chat_sender, _) = channel::unbounded::<Event>();
let chatroom = Chatroom {
id,
name: Arc::new(String::from("Test chatroom 666")),
client_subscriber: broadcast_sender,
client_read_sender: chat_sender,
shutdown: None,
capacity: 4798,
num_clients: 2353,
};
let tag = chatroom.serialize();
println!("{:?}", tag);
assert_eq!(tag, [17, 0, 0, 0, 190, 18, 0, 0, 49, 9, 0, 0])
}
#[test]
fn test_chatroom_deserialize() {
use tokio::sync::broadcast;
use async_std::channel;
let id = Uuid::new_v4();
let (broadcast_sender, _) = broadcast::channel::<Response>(1);
let (chat_sender, _) = channel::unbounded::<Event>();
let name = Arc::new(String::from("Test chatroom 666"));
let chatroom = Chatroom {
id,
name: name.clone(),
client_subscriber: broadcast_sender,
client_read_sender: chat_sender,
shutdown: None,
capacity: 4798,
num_clients: 2353,
};
let tag = chatroom.serialize();
let decode_tag = Chatroom::deserialize(&tag);
println!("{:?}", decode_tag);
assert_eq!(decode_tag.0, name.len() as u32);
assert_eq!(decode_tag.1, 4798);
assert_eq!(decode_tag.2, 2353);
}
#[test]
fn test_chatroom_as_bytes() {
use tokio::sync::broadcast;
use async_std::channel;
let id = Uuid::new_v4();
let (broadcast_sender, _) = broadcast::channel::<Response>(1);
let (chat_sender, _) = channel::unbounded::<Event>();
let name = Arc::new(String::from("Test chatroom 666"));
let chatroom = Chatroom {
id,
name: name.clone(),
client_subscriber: broadcast_sender,
client_read_sender: chat_sender,
shutdown: None,
capacity: 4798,
num_clients: 2353,
};
let chatroom_bytes = chatroom.as_bytes();
println!("{:?}", chatroom_bytes);
let mut expected_bytes = chatroom.serialize().to_vec();
expected_bytes.extend_from_slice(name.as_bytes());
assert_eq!(chatroom_bytes, expected_bytes);
}
#[test]
fn test_try_parse_chatroom_frame() {
use tokio::sync::broadcast;
use async_std::channel;
use std::io::Cursor;
let id = Uuid::new_v4();
let (broadcast_sender, _) = broadcast::channel::<Response>(1);
let (chat_sender, _) = channel::unbounded::<Event>();
let name = Arc::new(String::from("Test chatroom 666"));
let chatroom = Chatroom {
id,
name: name.clone(),
client_subscriber: broadcast_sender,
client_read_sender: chat_sender,
shutdown: None,
capacity: 4798,
num_clients: 2353,
};
let chatroom_bytes = chatroom.as_bytes();
let mut cursor = Cursor::new(chatroom_bytes);
// Attempt to parse the frame
let chatroom_frame_res = ChatroomFrame::try_parse(&mut cursor);
println!("{:?}", chatroom_frame_res);
let chatroom_frame = chatroom_frame_res.unwrap();
println!("{:?}", chatroom_frame);
assert_eq!(chatroom_frame.name.as_str(), chatroom.name.as_str());
assert_eq!(chatroom_frame.capacity, chatroom.capacity);
assert_eq!(chatroom_frame.num_clients, chatroom.num_clients);
}
#[test]
fn test_try_from_chatroom_frames() {
use tokio::sync::broadcast;
use async_std::channel;
use std::io::Cursor;
let id = Uuid::new_v4();
let (broadcast_sender, _) = broadcast::channel::<Response>(1);
let (chat_sender, _) = channel::unbounded::<Event>();
let name = Arc::new(String::from("Test chatroom 666"));
let chatroom1 = Chatroom {
id,
name: name.clone(),
client_subscriber: broadcast_sender,
client_read_sender: chat_sender,
shutdown: None,
capacity: 4798,
num_clients: 2353,
};
let id = Uuid::new_v4();
let (broadcast_sender, _) = broadcast::channel::<Response>(1);
let (chat_sender, _) = channel::unbounded::<Event>();
let name = Arc::new(String::from("Test chatroom 667"));
let chatroom2 = Chatroom {
id,
name: name.clone(),
client_subscriber: broadcast_sender,
client_read_sender: chat_sender,
shutdown: None,
capacity: 4798,
num_clients: 2353,
};
let id = Uuid::new_v4();
let (broadcast_sender, _) = broadcast::channel::<Response>(1);
let (chat_sender, _) = channel::unbounded::<Event>();
let name = Arc::new(String::from("Test chatroom 668"));
let chatroom3 = Chatroom {
id,
name: name.clone(),
client_subscriber: broadcast_sender,
client_read_sender: chat_sender,
shutdown: None,
capacity: 4798,
num_clients: 2353,
};
// Create simulated lobby state
let mut lobby_state = vec![];
lobby_state.append(&mut chatroom1.as_bytes());
lobby_state.append(&mut chatroom2.as_bytes());
lobby_state.append(&mut chatroom3.as_bytes());
let chatroom_frames_res = ChatroomFrames::try_from(lobby_state);
println!("{:?}", chatroom_frames_res);
assert!(chatroom_frames_res.is_ok());
let chatroom_frames = chatroom_frames_res.unwrap();
println!("{:?}", chatroom_frames);
assert_eq!(chatroom_frames.frames[0].name.as_str(), chatroom1.name.as_str());
assert_eq!(chatroom_frames.frames[0].capacity, chatroom1.capacity);
assert_eq!(chatroom_frames.frames[0].num_clients, chatroom1.num_clients);
assert_eq!(chatroom_frames.frames[1].name.as_str(), chatroom2.name.as_str());
assert_eq!(chatroom_frames.frames[1].capacity, chatroom2.capacity);
assert_eq!(chatroom_frames.frames[1].num_clients, chatroom2.num_clients);
assert_eq!(chatroom_frames.frames[2].name.as_str(), chatroom3.name.as_str());
assert_eq!(chatroom_frames.frames[2].capacity, chatroom3.capacity);
assert_eq!(chatroom_frames.frames[2].num_clients, chatroom3.num_clients);
// Now try with an empty vector of bytes
let lobby_state = vec![];
let chatroom_frames_res = ChatroomFrames::try_from(lobby_state);
println!("{:?}", chatroom_frames_res);
assert!(chatroom_frames_res.is_ok());
}
}
|
//
// Payment+Methods.swift
// Stripe
//
// Created by Jonathan Holland on 6/5/22.
//
import Foundation
extension Payment {
public struct Details: Codable {
/// The type of transaction-specific details of the payment method used in the payment, one of `ach_credit_transfer`, `ach_debit`, `acss_debit`, `alipay`, `au_becs_debit`,` bancontact`, `card`, `card_present`, `eps`, `giropay`, `ideal`, `klarna`, `multibanco`, `p24`, `sepa_debit`, `sofort`, `stripe_account`, or `wechat`. An additional hash is included on `payment_method_details` with a name matching this value. It contains information specific to the payment method.
public let type: String
/// Details about the payment method at the time of the transaction.
public let object: MethodOption
/// Designated initializer
public init(type: String, object: MethodOption) {
self.type = type
self.object = object
}
}
public enum MethodOption: Codable {
/// f paying by `afterpay_clearpay`, this sub-hash contains details about the AfterpayClearpay payment method options to pass to the order’s PaymentIntent.
case afterpay_clearpay(AfterpayClearpay)
/// If paying by `card`, this sub-hash contains details about the Card payment method options to pass to the order’s PaymentIntent.
case card(PaymentCard)
/// If paying by `customer_balance`, this sub-hash contains details about the Customer Balance payment method options to pass to the order’s PaymentIntent.
case customer_balance(CustomerBalance)
case klarna(Klarna)
case paypal(PayPal)
case usBankAccount(USBankAccount)
public init(from decoder: Decoder) throws {
var returnableError: Error?
do {
let singleValueContainer = try decoder.singleValueContainer()
let afterpay_clearpay = try singleValueContainer.decode(AfterpayClearpay.self)
self = .afterpay_clearpay(afterpay_clearpay)
print(self)
return
} catch {
returnableError = error
}
do {
let singleValueContainer = try decoder.singleValueContainer()
let card = try singleValueContainer.decode(PaymentCard.self)
self = .card(card)
print(self)
return
} catch {
returnableError = error
}
do {
let singleValueContainer = try decoder.singleValueContainer()
let customer_balance = try singleValueContainer.decode(CustomerBalance.self)
self = .customer_balance(customer_balance)
print(self)
return
} catch {
returnableError = error
}
do {
let singleValueContainer = try decoder.singleValueContainer()
let klarna = try singleValueContainer.decode(Klarna.self)
self = .klarna(klarna)
print(self)
return
} catch {
returnableError = error
}
do {
let singleValueContainer = try decoder.singleValueContainer()
let paypal = try singleValueContainer.decode(PayPal.self)
self = .paypal(paypal)
print(self)
return
} catch {
returnableError = error
}
do {
let singleValueContainer = try decoder.singleValueContainer()
let usBankAccount = try singleValueContainer.decode(USBankAccount.self)
self = .usBankAccount(usBankAccount)
print(self)
return
} catch {
returnableError = error
}
throw DecodingError.unknownType(returnableError)
}
}
public enum MethodType: String, Codable {
case card, customer_balance, ideal, link, sepa_debit, eps, wechat_pay, oxxo, bancontact, alipay, p24, bacs_debit, giropay, sofort, au_becs_debit, fpx, klarna, paypal, acss_debit, grabpay, afterpay_clearpay
}
// MARK: Payment Method Objects
public struct AfterpayClearpay: Codable {
/// Controls when the funds will be captured from the customer’s account.
public let captureMethod: CaptureMethod
/// Order identifier shown to the user in Afterpay’s online portal. We recommend using a value that helps you answer any questions a customer might have about the payment. The identifier is limited to 128 characters and may contain only public letters, digits, underscores, backslashes and dashes.
public let reference: String
/// Indicates that you intend to make future payments with the payment method.
///
/// Providing this parameter will [attach the payment method](https://stripe.com/docs/payments/save-during-payment) to the order’s Customer, if present, after the order’s PaymentIntent is confirmed and any required actions from the user are comppublic lete. If no Customer was provided, the payment method can still be [attached](https://stripe.com/docs/api/payment_methods/attach) to a Customer after the transaction comppublic letes.
///
/// When processing card payments, Stripe also uses `setup_future_usage` to dynamically optimize your payment flow and comply with regional legislation and network rules, such as [SCA](https://stripe.com/docs/strong-customer-authentication).
///
/// If `setup_future_usage` is already set and you are performing a request using a publishable key, you may only update the value from `on_session` to `off_session`.
public let setupFutureUsage: SetupFutureUsage
/// Designated initializer
public init(captureMethod: CaptureMethod, reference: String, setupFutureUsage: SetupFutureUsage) {
self.captureMethod = captureMethod
self.reference = reference
self.setupFutureUsage = setupFutureUsage
}
public enum CodingKeys: String, CodingKey {
case captureMethod = "capture_method", reference, setupFutureUsage = "setup_future_usage"
}
}
public struct CustomerBalance: Codable {
/// Configuration for the bank transfer funding type, if the `funding_type` is set to `bank_transfer`.
public let bankTransfer: BankTransfer
/// The funding method type to be used when there are not enough funds in the customer balance. Permitted values include: `bank_transfer`.
public let fundingType: String
/// Indicates that you intend to make future payments with this PaymentIntent’s payment method.
///
/// Providing this parameter [will attach the payment method](https://stripe.com/docs/payments/save-during-payment) to the PaymentIntent’s Customer, if present, after the PaymentIntent is confirmed and any required actions from the user are comppublic lete. If no Customer was provided, the payment method can still be [attached](https://stripe.com/docs/api/payment_methods/attach) to a Customer after the transaction comppublic letes.
///
/// When processing card payments, Stripe also uses `setup_future_usage` to dynamically optimize your payment flow and comply with regional legislation and network rules, such as [SCA](https://stripe.com/docs/strong-customer-authentication).
public let setupFutureUsage: SetupFutureUsage
/// Designated initializer
public init(bankTransfer: BankTransfer, fundingType: String, setupFutureUsage: SetupFutureUsage) {
self.bankTransfer = bankTransfer
self.fundingType = fundingType
self.setupFutureUsage = setupFutureUsage
}
public enum CodingKeys: String, CodingKey {
case bankTransfer = "bank_transfer", fundingType = "funding_type", setupFutureUsage = "setup_future_usage"
}
public struct BankTransfer: Codable {
/// List of address types that should be returned in the financial_addresses response. If not specified, all valid types will be returned.
///
/// Permitted values include: `sort_code`, `zengin`, `iban`, or `spei`.
public let requestedAddressTypes: [RequestedAddressType]
/// The bank transfer type that this PaymentIntent is allowed to use for funding Permitted values include: `eu_bank_transfer`, `gb_bank_transfer`, `jp_bank_transfer`, or `mx_bank_transfer`.
public let type: TransferType
/// The funding method type to be used when there are not enough funds in the customer balance.
///
/// Permitted values include: `bank_transfer`.
public let fundingType: String
/// Indicates that you intend to make future payments with this PaymentIntent’s payment method.
///
/// Providing this parameter will [attach the payment method](https://stripe.com/docs/payments/save-during-payment) to the PaymentIntent’s Customer, if present, after the PaymentIntent is confirmed and any required actions from the user are comppublic lete. If no Customer was provided, the payment method can still be [attached](https://stripe.com/docs/api/payment_methods/attach) to a Customer after the transaction comppublic letes.
///
/// When processing card payments, Stripe also uses `setup_future_usage` to dynamically optimize your payment flow and comply with regional legislation and network rules, such as [SCA](https://stripe.com/docs/strong-customer-authentication).
public let setupFutureUsage: SetupFutureUsage
/// Designated initializer
public init(requestedAddressTypes: [RequestedAddressType], type: TransferType, fundingType: String, setupFutureUsage: SetupFutureUsage) {
self.requestedAddressTypes = requestedAddressTypes
self.type = type
self.fundingType = fundingType
self.setupFutureUsage = setupFutureUsage
}
public enum CodingKeys: String, CodingKey {
case fundingType = "funding_type", requestedAddressTypes = "requested_address_types", setupFutureUsage = "setup_future_usage", type
}
public enum RequestedAddressType: String, Codable {
case zengin, sort_code, iban, spei
}
public enum TransferType: String, Codable {
case eu_bank_transfer, gb_bank_transfer, jp_bank_transfer, mx_bank_transfer
}
}
}
public struct Klarna: Codable {
/// Controls when the funds will be captured from the customer’s account.
public let captureMethod: CaptureMethod
/// Preferred locale of the Klarna checkout page that the customer is redirected to.
public let preferredLocale: String
/// Indicates that you intend to make future payments with this PaymentIntent’s payment method.
///
/// Providing this parameter [will attach the payment method](https://stripe.com/docs/payments/save-during-payment) to the PaymentIntent’s Customer, if present, after the PaymentIntent is confirmed and any required actions from the user are comppublic lete. If no Customer was provided, the payment method can still be [attached](https://stripe.com/docs/api/payment_methods/attach) to a Customer after the transaction comppublic letes.
///
/// When processing card payments, Stripe also uses `setup_future_usage` to dynamically optimize your payment flow and comply with regional legislation and network rules, such as [SCA](https://stripe.com/docs/strong-customer-authentication).
public let setupFutureUsage: SetupFutureUsage
/// Designated initializer
public init(captureMethod: CaptureMethod, preferredLocale: String, setupFutureUsage: SetupFutureUsage) {
self.captureMethod = captureMethod
self.preferredLocale = preferredLocale
self.setupFutureUsage = setupFutureUsage
}
public enum CodingKeys: String, CodingKey {
case captureMethod = "capture_method", preferredLocale = "preferred_locale", setupFutureUsage = "setup_future_usage"
}
}
public struct PaymentCard: Codable {
/// Controls when the funds will be captured from the customer’s account.
public let captureMethod: CaptureMethod
/// Indicates that you intend to make future payments with the payment method.
///
/// Providing this parameter will [attach the payment method](https://stripe.com/docs/payments/save-during-payment) to the order’s Customer, if present, after the order’s PaymentIntent is confirmed and any required actions from the user are comppublic lete. If no Customer was provided, the payment method can still be [attached](https://stripe.com/docs/api/payment_methods/attach) to a Customer after the transaction comppublic letes.
///
/// When processing card payments, Stripe also uses `setup_future_usage` to dynamically optimize your payment flow and comply with regional legislation and network rules, such as [SCA](https://stripe.com/docs/strong-customer-authentication).
///
/// If `setup_future_usage` is already set and you are performing a request using a publishable key, you may only update the value from `on_session` to `off_session`.
public let setupFutureUsage: SetupFutureUsage
/// Designated initializer
public init(captureMethod: CaptureMethod, setupFutureUsage: SetupFutureUsage) {
self.captureMethod = captureMethod
self.setupFutureUsage = setupFutureUsage
}
public enum CodingKeys: String, CodingKey {
case captureMethod = "capture_method", setupFutureUsage = "setup_future_usage"
}
}
public struct PayPal: Codable {
/// Controls when the funds will be captured from the customer’s account.
public let captureMethod: CaptureMethod
/// Preferred locale of the Klarna checkout page that the customer is redirected to.
public let preferredLocale: String
/// Designated initializer
public init(captureMethod: CaptureMethod, preferredLocale: String) {
self.captureMethod = captureMethod
self.preferredLocale = preferredLocale
}
public enum CodingKeys: String, CodingKey {
case captureMethod = "capture_method", preferredLocale = "preferred_locale"
}
}
public struct USBankAccount: Codable {
/// Account holder type: individual or company.
public let accountHolderType: AccountHolderType
/// Account type: checkings or savings. Defaults to checking if omitted.
public let accountType: AccountType
/// Name of the bank associated with the bank account.
public let bankName: String
/// Uniquely identifies this particular bank account. You can use this attribute to check whether two bank accounts are the same.
public let fingerprint: String
/// Last four digits of the bank account number.
public let last4: String
/// Routing number of the bank account.
public let routingNumber: String
/// Designated initializer
public init(accountHolderType: AccountHolderType, accountType: AccountType, bankName: String, fingerprint: String, last4: String, routingNumber: String) {
self.accountHolderType = accountHolderType
self.accountType = accountType
self.bankName = bankName
self.fingerprint = fingerprint
self.last4 = last4
self.routingNumber = routingNumber
}
public enum CodingKeys: String, CodingKey {
case accountHolderType = "account_holder_type", accountType = "account_type", bankName = "bank_name", fingerprint, last4, routingNumber = "routing_number"
}
public enum AccountHolderType: String, Codable {
/// Account belongs to an individual
case company
/// Account belongs to a company
case individual
}
public enum AccountType: String, Codable {
/// Bank account type is checking
case checking
/// Bank account type is savings
case savings
}
}
}
|
package channels
import (
"encoding/json"
"fmt"
"github.com/dgraph-io/badger/v4"
)
type Message struct {
Author string
Content string
}
type Channel struct {
Id string
Name string
}
func GetById(id string) (Channel, error) {
var channel Channel
db, err := badger.Open(badger.DefaultOptions("/tmp/gochatter"))
if err != nil {
return channel, err
}
defer db.Close()
err = db.View(func(txn *badger.Txn) error {
k := []byte(fmt.Sprintf("channel:%s", id))
item, err := txn.Get(k)
if err != nil {
return err
}
err = item.Value(func(val []byte) error {
return json.Unmarshal(val, &channel)
})
if err != nil {
return err
}
return nil
})
if err != nil {
return channel, err
}
return channel, nil
}
func GetMessages(id string) ([]Message, error) {
var messages []Message
db, err := badger.Open(badger.DefaultOptions("/tmp/gochatter"))
if err != nil {
return messages, err
}
defer db.Close()
err = db.View(func(txn *badger.Txn) error {
it := txn.NewIterator(badger.DefaultIteratorOptions)
defer it.Close()
prefix := []byte(fmt.Sprintf("channel:%s:message", id))
for it.Seek(prefix); it.ValidForPrefix(prefix); it.Next() {
var message Message
item := it.Item()
err := item.Value(func(v []byte) error {
return json.Unmarshal(v, &message)
})
if err != nil {
return err
}
messages = append(messages, message)
}
return nil
})
if err != nil {
return messages, err
}
return messages, nil
}
func CreateChannelMessage(id, author, content string) error {
db, err := badger.Open(badger.DefaultOptions("/tmp/gochatter"))
if err != nil {
return err
}
defer db.Close()
k := []byte(fmt.Sprintf("channel:%s", id))
seq, err := db.GetSequence(k, 1)
if err != nil {
return err
}
err = db.Update(func(txn *badger.Txn) error {
messageId, err := seq.Next()
if err != nil {
return err
}
k = []byte(fmt.Sprintf("channel:%s:message:%v", id, messageId))
message := Message{
Author: author,
Content: content,
}
messageBytes, err := json.Marshal(message)
if err != nil {
return err
}
return txn.Set(k, messageBytes)
})
if err != nil {
return err
}
return nil
}
|
/* eslint-disable jsx-a11y/anchor-is-valid */
// import logo from "./logo.svg";
import "./App.css";
import React, { useState } from "react";
import Navbar from "./Components/Navbar";
import TextForm from "./Components/TextForm";
import { Alter } from "./Components/Alter";
import { BrowserRouter as Router, Routes, Route } from "react-router-dom";
import About from "./Components/About";
import Footer from "./Components/Footer";
function App() {
const [themeState, setThemeState] = useState("light");
const [alertState, setAlertState] = useState(null);
const setAlert = (type, message) => {
setAlertState({
type: type,
message: message,
});
setTimeout(() => {
setAlertState(null);
}, 1500);
};
const setTheme = () => {
if (themeState === "light") {
setThemeState("dark");
document.body.style.background = "rgb(50 52 54)";
document.body.style.color = "white";
document.title = "TextUtils - Dark Mode";
setAlert("success", "Dark Mode is enabled");
} else {
setThemeState("light");
document.body.style.background = "white";
document.title = "TextUtils - light Mode";
document.body.style.color = "black";
setAlert("success", "Light Mode is enabled");
}
};
return (
<Router>
<Navbar
title="Text Utils"
theme={themeState}
setThemeCallback={setTheme}
/>
<div className="container">
<Alter alert={alertState} />
<Routes>
<Route path="/about" element={<About theme={themeState} />}></Route>
<Route
path="/"
element={
<TextForm
textTitle="Enter the text to Analyze"
theme={themeState}
setAlert={setAlert}
/>
}
></Route>
</Routes>
</div>
<Footer theme={themeState} />
</Router>
);
}
export default App;
|
import React, { useCallback, useEffect } from "react";
export const useOnClickOutside = (
ref: React.MutableRefObject<HTMLElement | null>,
handler: (event: React.MouseEvent) => void,
) => {
const listener = useCallback(
(event) => {
if (!ref.current || ref.current.contains(event.target)) {
return;
}
handler(event);
},
[ref, handler],
);
useEffect(() => {
document.addEventListener("mousedown", listener);
document.addEventListener("touchstart", listener);
return () => {
document.removeEventListener("mousedown", listener);
document.removeEventListener("touchstart", listener);
};
}, []);
};
|
/*
* Copyright (C) 2001 ArsDigita Corporation. All Rights Reserved.
*
* The contents of this file are subject to the ArsDigita Public
* License (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of
* the License at http://www.arsdigita.com/ADPL.txt
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
*/
package com.arsdigita.london.search;
import com.arsdigita.domain.DomainCollection;
import com.arsdigita.domain.DomainObject;
import com.arsdigita.persistence.DataCollection;
import com.arsdigita.persistence.DataObject;
import com.arsdigita.util.Assert;
import java.math.BigDecimal;
/**
*
* @see Server
* @version $Id: ServerCollection.java 287 2005-02-22 00:29:02Z sskracic $
*/
public class ServerCollection extends DomainCollection {
protected ServerCollection(DataCollection dataCollection) {
super(dataCollection);
}
/**
* Get the ID for the portal for the current row.
*
* @return the id of this portal.
* @post return != null
*/
public BigDecimal getID() {
BigDecimal id = (BigDecimal)m_dataCollection.get("id");
Assert.exists(id, BigDecimal.class);
return id;
}
/**
* Get the current item as a domain object.
*
* @return the domain object for the current row.
* @post return != null
*/
public DomainObject getDomainObject() {
DomainObject domainObject = getServer();
Assert.exists(domainObject, DomainObject.class);
return domainObject;
}
/**
* Get the current item as a Server domain object.
*
* @return a Server domain object.
* @post return != null
*/
public Server getServer() {
DataObject dataObject = m_dataCollection.getDataObject();
Server server = new Server(dataObject);
Assert.exists(server, Server.class);
return server;
}
}
|
import useFirstError from '@app/uiComponents/inputs/helpers/useFirstError';
import { PinInput } from '@mantine/core';
import { useState } from 'react';
import { Controller, useFormContext } from 'react-hook-form';
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
import styles from './css/InputPin.module.css';
import type { PinInputProps } from '@mantine/core/lib';
import type { RegisterOptions } from 'react-hook-form';
interface Props extends PinInputProps {
name: string;
onInputChange?: (value: string) => void;
options?: Omit<RegisterOptions, 'valueAsNumber' | 'valueAsDate' | 'setValueAs' | 'disabled'>;
}
export function InputPinControlled({ name, options, onInputChange, defaultValue, ...rest }: Props) {
const { control, getValues } = useFormContext();
const [value, setValue] = useState<string | undefined>(defaultValue || getValues(name));
const error = useFirstError(name);
return (
<>
<Controller
rules={options}
control={control}
name={name}
render={({ field: { onChange } }) => (
<PinInput
value={value}
error={Boolean(error)}
name={name}
onChange={(value) => {
onChange(value);
onInputChange?.(value);
setValue(value);
}}
{...rest}
/>
)}
/>
{error && <span className={styles.errorMessage}>{error}</span>}
</>
);
}
|
<html>
<header>
<meta charset="UTF-8" />
<link rel="stylesheet" href="../milligram.css" />
<style>
button {
margin: 0;
}
</style>
</header>
<body>
<p>Worksheet 4 part 1</p>
<div style="display: flex; width: 512px; align-items: center">
<button id="dec" style="flex: auto">- decrease</button>
<div style="padding: 0 12px; text-align: center">
subdivisions:
<br />
<span id="counter"></span>
</div>
<button id="inc" style="flex: auto">+ increase</button>
</div>
<canvas id="view" width="512" height="512"></canvas>
<script src="../common/webgl-utils.js"></script>
<script src="../common/initShaders.js"></script>
<script src="../common/MV.js"></script>
<script id="vertex-shader" type="x-shader/x-vertex">
attribute vec4 aPosition;
uniform mat4 uModel;
void main() {
gl_Position = uModel * aPosition;
}
</script>
<script id="fragment-shader" type="x-shader/x-fragment">
precision mediump float;
void main() {
gl_FragColor = vec4(1.0,1.0, 1.0, 1.0);
}
</script>
<script>
const canvas = document.getElementById("view");
const counter = document.getElementById("counter");
const inc = document.getElementById("inc");
const dec = document.getElementById("dec");
let gl;
let subdivision = 3;
const refreshCounter = () => (counter.textContent = subdivision);
refreshCounter();
const va = vec4(0.0, 0.0, -1.0, 1);
const vb = vec4(0.0, 0.942809, 0.333333, 1);
const vc = vec4(-0.816497, -0.471405, 0.333333, 1);
const vd = vec4(0.816497, -0.471405, 0.333333, 1);
const divideTriangle = (a, b, c, count) => {
if (count > 0) {
let ab = normalize(mix(a, b, 0.5), true);
let ac = normalize(mix(a, c, 0.5), true);
let bc = normalize(mix(b, c, 0.5), true);
return [
...divideTriangle(a, ab, ac, count - 1),
...divideTriangle(ab, b, bc, count - 1),
...divideTriangle(bc, c, ac, count - 1),
...divideTriangle(ab, bc, ac, count - 1),
];
} else {
return [a, b, c];
}
};
const tetrahedron = (a, b, c, d, n) => {
return [
...divideTriangle(a, b, c, n),
...divideTriangle(d, c, b, n),
...divideTriangle(a, d, b, n),
...divideTriangle(a, c, d, n),
];
};
const init = () => {
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) {
alert("WebGL isn’t available");
}
let program = initShaders(gl, "vertex-shader", "fragment-shader");
gl.useProgram(program);
gl.clearColor(0.3921, 0.5843, 0.9294, 1.0);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
let vBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
let aPosition = gl.getAttribLocation(program, "aPosition");
gl.vertexAttribPointer(aPosition, 4, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(aPosition);
let mUniform = gl.getUniformLocation(program, "uModel");
let m = translate(0, 0, 0);
let v = lookAt(vec3(0, 0, 6), vec3(0, 0, 0), vec3(0, 1, 0));
let p = ortho(-2, 2, -2, 2, -10, 10);
let mvp = mult(p, mult(v, m));
gl.uniformMatrix4fv(mUniform, false, flatten(mvp));
render(gl, subdivision);
};
dec.onclick = () => {
subdivision = Math.max(0, subdivision - 1);
refreshCounter();
render(gl, subdivision);
};
inc.onclick = () => {
subdivision++;
refreshCounter();
render(gl, subdivision);
};
const render = (gl, subdivision) => {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const pointsArray = tetrahedron(va, vb, vc, vd, subdivision);
const pointCount = pointsArray.length;
gl.bufferData(gl.ARRAY_BUFFER, flatten(pointsArray), gl.STATIC_DRAW);
for (var i = 0; i < pointCount; i += 3) {
gl.drawArrays(gl.LINE_LOOP, i, 3);
}
};
window.onload = init;
</script>
</body>
</html>
|
import { resolveDynamicQuery } from './resolveDynamicQuery';
describe('resolveDynamicQuery', () => {
it('Should return an unmodified query if no dynamic properties exist', () => {
const query = {
resource: 'test',
id: '42',
params: {
page: 3,
foo: 'bar'
},
data: {
bar: 'baz'
}
};
expect(resolveDynamicQuery(query, {})).toStrictEqual(query);
});
it('Should replace variables in all dynamic properties', () => {
const query = {
resource: 'test',
id: (_ref) => {
let {
id
} = _ref;
return id;
},
params: (_ref2) => {
let {
page
} = _ref2;
return {
page,
foo: 'bar'
};
},
data: (_ref3) => {
let {
bar
} = _ref3;
return {
bar
};
}
};
const vars = {
id: '42',
page: 3,
bar: 'baz'
};
expect(resolveDynamicQuery(query, vars)).toStrictEqual({
resource: 'test',
id: '42',
params: {
page: 3,
foo: 'bar'
},
data: {
bar: 'baz'
}
});
});
});
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Mock2</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T" crossorigin="anonymous">
<link rel="stylesheet"
href="https://unpkg.com/font-awesome/css/font-awesome.css">
</head>
<body>
<!-- As a heading -->
<nav class="navbar navbar-dark bg-dark navbar-expand-md">
<span class="navbar-brand mb-0">LOGO</span>
<button class="navbar-toggler" type="button"
data-toggle="collapse" data-target="#content">
<span class="navbar-toggler-icon"></span>
</button>
<nav class="nav collapse navbar-collapse justify-content-end" id="content">
<a class="nav-link active text-white" href="#">Home</a>
<a class="nav-link text-secondary" href="#">Contact</a>
<a class="nav-link text-secondary" href="#">About Us</a>
<a class="nav-link text-secondary" href="#">Messages<span class="badge badge-success">12</span></a>
</nav>
</nav>
<div class="container-fluid">
<h1 class="my-4">Dashboard:</h1>
<table class="table table-striped table-hover">
<thead>
<tr>
<th>Name</th>
<th>Team</th>
<th>All Star</th>
<th>Remove</th>
</tr>
</thead>
<tbody>
<tr>
<td>Steph</td>
<td>Warriors</td>
<td>Yes</td>
<td>
<a href="" class="btn btn-sm btn-danger"><i class="fa fa-trash"></i></a>
</td>
</tr>
<tr>
<td>Lebron</td>
<td>Cavaliers</td>
<td>Yes</td>
<td>
<a href="" class="btn btn-sm btn-danger"><i class="fa fa-trash"></i></a>
</td>
</tr>
<tr>
<td>Dwayne</td>
<td>Heat</td>
<td>Yes</td>
<td>
<a href="" class="btn btn-sm btn-danger"><i class="fa fa-trash"></i></a>
</td>
</tr>
<tr>
<td>Kobe</td>
<td>Lakers</td>
<td>Yes <span class="badge badge-warning">Last</span></td>
<td>
<a href="" class="btn btn-sm btn-danger"><i class="fa fa-trash"></i></a>
</td>
</tr>
<tr>
<td>Isaiah</td>
<td>Celtics</td>
<td>Yes <span class="badge badge-success">First</span></td>
<td>
<a href="" class="btn btn-sm btn-danger"><i class="fa fa-trash"></i></a>
</td>
</tr>
</tbody>
</table>
<hr>
<h2 class="my-3">Todos:</h2>
<div class="list-group my-2">
<a href="#" class="list-group-item list-group-item-success">
Feel good about Bootstrap
</a>
<a href="#" class="list-group-item list-group-item-info">
Feel informed about Bootstrap
</a>
<a href="#" class="list-group-item list-group-item-warning">
Feel warned about Bootstrap
</a>
<a href="#" class="list-group-item list-group-item-warning">
Maybe I should read the docs...
</a>
<a href="#" class="list-group-item list-group-item-danger">
Danger zone
</a>
<a href="#" class="list-group-item list-group-item-danger">
I really should read the docs...
</a>
</div>
</body>
</html>
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js" integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js" integrity="sha384-UO2eT0CpHqdSJQ6hJty5KVphtPhzWj9WO1clHTMGa3JDZwrnQq4sF86dIHNDz0W1" crossorigin="anonymous"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js" integrity="sha384-JjSmVgyd0p3pXB1rRibZUAYoIIy6OrQ6VrjIEaFf/nJGzIxFDsf4x0xIM+B07jRM" crossorigin="anonymous"></script>
</body>
</html>
|
import React from "react";
export type AccordionPropsType = {
title: string
collapsed: boolean
setAccordionCollapsed: () => void
onChange?: () => void
items: Array<ItemType>
color?: string
onClick: (e: any) => void
}
type AccordionTitlePropsType = {
title: string
setAccordionCollapsed: () => void
color?: string
}
type AccordionBodyType = {
items: Array<ItemType>
onClick: (e: any) => void
}
export type ItemType = {
title: string
value: number
}
export const MemoAccordion = React.memo(Accordion)
export function Accordion(props: AccordionPropsType) {
return (
<div>
<MemoAccordionTitle title={props.title}
setAccordionCollapsed={props.setAccordionCollapsed}
color={props.color}
/>
{!props.collapsed && <MemoAccordionBody onClick={props.onClick} items={props.items}/>}
</div>
)
}
export const MemoAccordionTitle = React.memo(AccordionTitle)
function AccordionTitle(props: AccordionTitlePropsType) {
console.log('AccordionTitle rendered')
return (
<div onClick={(e) => props.setAccordionCollapsed()}><h3
style={{color: props.color ? props.color : 'black'}}
>{props.title}</h3></div>
)
}
export const MemoAccordionBody = React.memo(AccordionBody)
function AccordionBody(props: AccordionBodyType) {
console.log('AccordionBody rendered')
const typeRender = (e: Array<ItemType>) => {
return e.map((e) => {
const onClickHandler = () => {
props.onClick(e.value)
console.log(`${e.title} clicked`)
}
return (
<li onClick={onClickHandler} key={e.value}>{e.title}</li>
)
})
}
return (
<div>
<ul>
{props.items ? typeRender(props.items) : props.items}
{/* <li>1</li>
<li>2</li>
<li>3</li>*/}
</ul>
</div>
)
}
/*
export function Accordion2(props: AccordionPropsType) {
console.log('Accordion rendered')
return (
<div>
<AccordionTitle title={props.title}/>
{!props.collapsed && <AccordionBody/>}
</div>
)
}*/
|
# Book Library Web App
Welcome to the Book Library Web App, a simple and elegant way to manage your personal book collection.
## Table of Contents
- [Introduction](#introduction)
- [Features](#features)
- [Getting Started](#getting-started)
- [Prerequisites](#prerequisites)
- [Installation](#installation)
- [Usage](#usage)
- [Enhancements](#enhancements)
- [Contributing](#contributing)
- [License](#license)
## Introduction
This web app provides a visually appealing and functional book library where you can add, track, and manage your books. It offers a clean design with a focus on user experience.
## Features
- **Add Books:** Easily add new books to your library with details such as title, author, pages, and reading status.
- **Responsive Design:** The web app is designed to be responsive, ensuring a seamless experience across various devices and screen sizes.
- **Appealing Book Display:** Each book entry is represented as a book, complete with cover images and clear information sections.
- **Interactive "New Book" Modal:** The "NEW BOOK" button opens a modal with a user-friendly form for adding books.
## Getting Started
### Prerequisites
- Web browser (Chrome, Firefox, Safari, etc.)
### Installation
1. Clone the repository:
```bash
git clone https://github.com/Alpattex/book-library-web-app.git
```
2. Open `index.html` in your preferred web browser.
## Usage
1. Click on the "New Book" button to open the modal.
2. Enter the details of the new book (Author, Title, Pages, Read status).
3. Click "Add Book" to save the book to your library.
4. View your library with a table displaying all added books.
## Enhancements
- To enhance the design further, consider customizing the styles in `styles.css`.
- Add more features such as book categories, sorting options, or a search function based on your preferences.
## Contributing
Contributions are welcome! If you have ideas for improvements or find any issues, please open an issue or submit a pull request.
## License
This project is licensed under the [MIT License](LICENSE).
## Acknowledgments
* Built with HTML, CSS, and JavaScript.
* Utilizes modern web design principles for a pleasant user experience.
|
import React from "react";
import moment from "moment";
import { View } from "react-native";
import { Text } from "react-native-paper";
import { default as MaterialIcons } from "react-native-vector-icons/MaterialIcons";
import PeopleAPI from "src/api/people";
import CircleAvatar from "src/components/Avatar/Circle";
const INITIAL_STATE = { profilePicture: "" }
export default class MyBubble extends React.PureComponent{
loadProfilePicture = () => {
if(this.props.withAvatar && this.props.senderEmail) {
new PeopleAPI().getDetail(this.props.senderEmail).then(people => {
this.setState({ profilePicture: people.applicationInformation.profilePicture });
})
}
}
constructor(props){
super(props);
this.state = INITIAL_STATE;
this.loadProfilePicture = this.loadProfilePicture.bind(this);
}
componentDidMount(){ this.loadProfilePicture(); }
render(){
const sentIcon = this.props.isSent? "done-all": "done";
const sentTimeString = this.props.isSent? moment(this.props.sentTime.seconds * 1000).format("HH:mmA"):"";
return(
<View style={{ flex: 1, flexDirection: "row-reverse", marginBottom: 8, marginTop: 8 }}>
{this.props.withAvatar?(
<CircleAvatar size={32} uri={this.state.profilePicture}/>
):<View/>}
<View style={{ marginRight: this.props.withAvatar? 8: 40, marginLeft: 40 }}>
<View style={{ borderRadius: 8, padding: 8, backgroundColor: "#0EAD69", alignItems: "flex-end" }}>
<Text style={{ fontSize: 12, color: "white" }}>{this.props.message}</Text>
<View style={{ paddingTop: 4, paddingBottom: 4, flexDirection: "row",alignItems: "center" }}>
<Text style={{ textAlign: "right", fontWeight: "500", color: "#E8E8E8", fontSize: 8, marginRight: 4 }}>{sentTimeString}</Text>
<MaterialIcons name={sentIcon} color="#E8E8E8" size={10}/>
</View>
</View>
</View>
</View>
)
}
}
MyBubble.defaultProps = {
withAvatar: false, isSent: false, message: "", sentTime: null, senderEmail: null
}
|
from rest_framework.views import APIView
from rest_framework import status
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from .serialization import *
from django.contrib.auth.hashers import make_password
from .models import *
from django.contrib.auth import authenticate
from rest_framework_simplejwt.tokens import RefreshToken
# Create your views here.
def get_tokens_for_user(user):
refresh = RefreshToken.for_user(user)
return {
'refresh': str(refresh),
'access': str(refresh.access_token),
}
class registerationview(APIView):
def post(self, request,format=None):
data=request.data
serialize = registerationserialization(data=data)
if serialize.is_valid():
if not User.objects.filter(national_id=request.data['national_id']).exists():
hashed_password = make_password(request.data['password'])
serialize.validated_data['password'] = hashed_password
user=serialize.save()
token=get_tokens_for_user(user)
return Response({'token':token,'msg': 'Account created',"data":serialize.data}, status=status.HTTP_201_CREATED)
else:
return Response({'msg': 'Account already exists'}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(serialize.errors, status=status.HTTP_403_FORBIDDEN)
class login(APIView):
def post(self,request):
serialize=loginserialize(data=request.data)
if serialize.is_valid():
national_id=serialize.data.get('national_id')
password=serialize.data.get("password")
user=authenticate(username=national_id,password=password)
if user:
token=get_tokens_for_user(user)
request_User=User.objects.get(national_id=serialize.data.get('national_id'))
user_serialize=profileserializer(request_User)
return Response({'token':token,"msg":"login success","user":user_serialize.data},status=status.HTTP_200_OK)
else:
return Response({"msg":"user not found"},status=status.HTTP_404_NOT_FOUND)
else:
return Response(serialize.errors)
class userprofile(APIView):
permission_classes=[IsAuthenticated]
def get(self,request):
serialize=profileserializer(request.user,many=False)
return Response(serialize.data,status=status.HTTP_200_OK)
class changepassword(APIView):
permission_classes=[IsAuthenticated]
def post(self,request):
serialize=changepasswordserialize(data=request.data,context={'user': request.user})
if serialize.is_valid():
return Response({"msg":"password changed successfully"},status=status.HTTP_200_OK)
return Response(serialize.errors,status=status.HTTP_400_BAD_REQUEST)
class forget_password(APIView):
# permission_classes=[IsAuthenticated]
def post(self,request):
serialize=sendpasswordresetemailserialize(data=request.data)
if serialize.is_valid(raise_exception=True):
return Response({"msg":"password reset link sent.please check your email"},status=status.HTTP_200_OK)
else:
return Response(serialize.errors,status=status.HTTP_400_BAD_REQUEST)
class userpasswordreset(APIView):
def post(self,request,uid,token):
serialize=userpasswordresetserialize(data=request.data,context= {'uid': uid,'token': token})
if serialize.is_valid(raise_exception=True):
return Response({"msg":"password reset successfully"},status=status.HTTP_200_OK)
else:
return Response(serialize.errors,status=status.HTTP_400_BAD_REQUEST)
class Update_User(APIView):
permission_classes=[IsAuthenticated]
def put(self,request):
data=request.data
user=request.user
user.first_name=data['first_name']
user.last_name=data['last_name']
user.birth_dt=data['birth_dt']
user.email=data['email']
user.national_id=data['national_id']
user.phone_number=data['phone_number']
if user.password!=" ":
user.password=make_password(data['password'])
else:
return Response({"msg":"password cant be none"},status=status.HTTP_403_FORBIDDEN)
serialize=updateserialize(user)
serialize.save
return Response({"msg":"updated successfully"},status=status.HTTP_202_ACCEPTED)
|
import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { LoginComponent } from './login/login.component';
import { HomeComponent } from './home/home.component'; // Import HomeComponent
import { AuthGuard } from './services/auth.guard'; // Import the guard
const routes: Routes = [
{
path : 'login', component: LoginComponent
},
{
// path:'', component:LoginComponent
path:'', component:HomeComponent
},
{
path: 'home', component: HomeComponent , canActivate: [AuthGuard]
},
];
@NgModule({
imports: [RouterModule.forRoot(routes)],
exports: [RouterModule]
})
export class AppRoutingModule { }
|
import React, { useRef } from "react";
import Link from "next/link";
import { gsap } from "gsap";
interface NavItemProps {
item: {
id: number;
name: string;
href: string;
};
}
const NavItem: React.FC<NavItemProps> = ({ item }) => {
const NavUnderlineLeft = useRef<HTMLSpanElement>(null);
const NavUnderlineRight = useRef<HTMLSpanElement>(null);
const onHover = () => {
gsap
.timeline({ defaults: { ease: "Power4.easeInOut", duration: 0.3 } })
.to(NavUnderlineLeft.current, {
scaleX: 0.3,
})
.to(NavUnderlineRight.current, {
x: 0,
delay: -0.3,
});
};
const unHover = () => {
gsap
.timeline({ defaults: { ease: "Power4.easeInOut", duration: 0.3 } })
.to(NavUnderlineRight.current, {
x: 110,
})
.to(NavUnderlineLeft.current, {
scaleX: 1,
delay: -0.3,
});
};
return (
<li
onMouseEnter={onHover}
onMouseLeave={unHover}
className="font-secondary text-primary text-xl nav-item opacity-0 -translate-x-[200px] clip-path-square-left-hidden relative w-full hidden md:block"
>
<Link href={item.href}>{item.name}</Link>
<span
ref={NavUnderlineLeft}
className="bg-tertiary h-[3px] w-4 absolute bottom-0 left-0 origin-left nav-underline-left"
></span>
<span
ref={NavUnderlineRight}
className="bg-tertiary h-[3px] w-[90%] translate-x-[110%] right-0 absolute bottom-0 nav-underline-right"
></span>
</li>
);
};
export default NavItem;
|
<script lang="ts">
import type { DeviceInterface } from '$lib/types/devices';
import { IndicatorStatus } from '$lib/components/StatusIndicatorBubble.svelte';
import StatusIndicatorBubble from '$lib/components/StatusIndicatorBubble.svelte';
import InformationElement from './InformationElement.svelte';
import routerImg from '$lib/images/router.png';
export let deviceInterface: DeviceInterface;
let status: IndicatorStatus;
const updateStatus = (intStatus: string, lineProto: string) => {
if (intStatus === 'up' && lineProto === 'up') status = IndicatorStatus.Active;
else if (intStatus === 'up' && lineProto !== 'up') status = IndicatorStatus.Idle;
else status = IndicatorStatus.Inactive;
};
$: updateStatus(deviceInterface.status, deviceInterface.lineProtocol);
</script>
<div class="container">
<div class="header">
<StatusIndicatorBubble {status} size={10} />
<h2>
{deviceInterface.type}{deviceInterface.number}
</h2>
<div class="int-status">
<span
class="status-left"
class:active={deviceInterface.status === 'up'}
class:inactive={deviceInterface.status !== 'up'}>{deviceInterface.status}</span
>
<span
class="lineProto-right"
class:active={deviceInterface.status === 'up'}
class:inactive={deviceInterface.status !== 'up'}
>{deviceInterface.lineProtocol}</span
>
</div>
</div>
<div class="indent">
<div class="section">
<h3 class="subtitle">Basic Info</h3>
<div class="indent">
<InformationElement name="IP" content={deviceInterface.ip || ''} />
<InformationElement
name="Input Packet Loss"
content={deviceInterface.inputPacketLoss}
/>
<InformationElement
name="Ouput Packet Loss"
content={deviceInterface.outputPacketLoss}
/>
</div>
</div>
{#if deviceInterface.neighbors.length > 0}
<h3 class="subtitle">Neighbors</h3>
<div class="neighbors indent">
{#each deviceInterface.neighbors as neighbor}
<div class="neighbor-header">
<img src={routerImg} alt="Network device" />
<h4>{neighbor.deviceID}</h4>
</div>
<div class="neighbor-details indent">
<InformationElement name="IP" content={neighbor.ip} />
<InformationElement name="Int" content={neighbor.interface} />
</div>
{/each}
</div>
{/if}
</div>
</div>
<style>
.container {
margin-bottom: 1em;
padding-bottom: 1em;
border-bottom: 2px solid #cacaca;
}
.header {
display: flex;
align-items: center;
}
.header h2 {
margin: 0 0 0 0.5em;
font-size: 1.4em;
}
.section {
margin: 0 0 0.6em 0;
}
.int-status {
display: flex;
margin: 0 0 0 1em;
}
.int-status span {
color: white;
margin: 0;
padding: 0.2em 0.4em;
font-size: 0.8em;
}
.status-left {
border-radius: 6px 0 0 6px;
}
.status-left.inactive {
border-right: 2px solid #902e25;
}
.status-left.active {
border-right: 2px solid #219552;
}
.lineProto-right {
border-radius: 0 6px 6px 0;
}
.active {
background-color: #2ecc71;
}
.inactive {
background-color: #e74c3c;
}
.subtitle {
margin: 0.3em 0 0.5em 0;
border-bottom: 2px solid #34495e;
display: inline-block;
}
.neighbor-header {
display: flex;
align-items: center;
}
.neighbor-header h4 {
margin: 0.3em;
font-weight: normal;
}
:global(.indent) {
padding-left: 1.4em;
}
:global(.indent-double) {
padding-left: 2.8em;
}
.neighbor-header img {
margin: 0 0.4em 0 0;
height: 1.1em;
width: auto;
}
</style>
|
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.CodeAnalysis.Elfie.Diagnostics;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Internal;
using Shop_Bear.Models;
using Shop_Bear.Models.ViewModels;
namespace Shop_Bear.Areas.Admin.Controllers
{
[Area("Admin")]
[Authorize]
public class StatisticalController : Controller
{
private readonly ShopBearContext _context;
public StatisticalController(ShopBearContext context)
{
_context = context;
}
public IActionResult Index()
{
var statisticalData = GetStatisticalData(); // Phương thức này lấy dữ liệu thống kê từ cơ sở dữ liệu
return View(statisticalData);
}
public IActionResult Yearly(int year)
{
var yearlyStatisticalData = GetYearlyStatisticalData(year);
return View( yearlyStatisticalData);
}
private List<StatisticalViewModel> GetStatisticalData()
{
var query = from o in _context.Orders
join od in _context.OrderDetails
on o.Id equals od.OrderId
join p in _context.Products
on od.ProductId equals p.Id
where o.Status == 1 || o.Status == 2
select new
{
CreatedDate = o.CreateDate,
Quantity = od.Quantity,
Price = od.Price,
OriginalPrice = p.OriginalPrice,
PriceSale = p.PriceSale
};
var result = query.GroupBy(x => x.CreatedDate.Date).Select(x => new
{
Date = x.Key,
TotalBuy = x.Sum(y => y.Quantity * y.PriceSale),
TotalSell = x.Sum(y => y.Quantity * y.OriginalPrice),
}).Select(x => new StatisticalViewModel
{
Date = x.Date,
DoanhThu = (decimal)x.TotalBuy,
LoiNhuan = (decimal)x.TotalBuy - (decimal)x.TotalSell
}).ToList();
return result;
}
private List<StatisticalViewModel> GetYearlyStatisticalData(int year)
{
var query = from o in _context.Orders
join od in _context.OrderDetails on o.Id equals od.OrderId
join p in _context.Products on od.ProductId equals p.Id
where o.CreateDate.Year == year && (o.Status == 1 || o.Status == 2)
select new
{
CreatedDate = o.CreateDate,
Quantities = od.Quantity,
Prices = od.Price,
PriceSale = p.PriceSale,
OriginalPrices = p.OriginalPrice
};
var result = query.GroupBy(x => x.CreatedDate.Month).Select(x => new
{
Month = x.Key,
TotalBuy = x.Sum(y => y.Quantities * y.PriceSale),
TotalSell = x.Sum(y => y.Quantities * y.OriginalPrices),
}).Select(x => new StatisticalViewModel
{
Date = new DateTime(year, x.Month, 1),
DoanhThu = x.TotalBuy.GetValueOrDefault(),
LoiNhuan = x.TotalBuy.GetValueOrDefault() - x.TotalSell.GetValueOrDefault()
}).ToList();
return result;
}
}
}
|
import { ReactNode, VFC } from 'react'
import Head from 'next/head'
import Link from 'next/link'
import image from 'next/image'
interface Props {
children: ReactNode
title: string
}
export const Layout: VFC<Props> = ({
children,
title = 'Welcome to Nextjs',
}) => {
return (
<div className="flex flex-col justify-center items-center min-h-screen text-gray-600 text-sm font-mono">
<Head>
<title>{title}</title>
</Head>
<header className="bg-gray-800 w-screen">
<nav className="flex items-center pl-8 h-14">
<div className="flex spave-x-4">
<Link href="/">
<a
// for test
data-testid="home-nav"
className="test-gray-300 hover:bg-gray-700 px-3 py-2 rounded"
>
Home
</a>
</Link>
<Link href="/local-state-a">
<a
data-testid="makevar-nav"
className="test-gray-300 hover:bg-gray-700 px-3 py-2 rounded"
>
makeVar
</a>
</Link>
<Link href="/hasura-main">
<a
data-testid="fetchpolicy-nav"
className="test-gray-300 hover:bg-gray-700 px-3 py-2 rounded"
>
FetchPolicy(Hasura)
</a>
</Link>
<Link href="/hasura-crud">
<a
data-testid="hasura-crud"
className="test-gray-300 hover:bg-gray-700 px-3 py-2 rounded"
>
CRUD(Hasura)
</a>
</Link>
<Link href="/hasura-ssg">
<a
data-testid="ssg-nav"
className="test-gray-300 hover:bg-gray-700 px-3 py-2 rounded"
>
SSG+ISR(Hasura)
</a>
</Link>
<Link href="/hooks-memo">
<a
data-testid="memo-nav"
className="test-gray-300 hover:bg-gray-700 px-3 py-2 rounded"
>
customHooks+memo
</a>
</Link>
<Link href="/lil-kt">
<a
data-testid="lil-kt-nav"
className="test-gray-300 hover:bg-gray-700 px-3 py-2 rounded"
>
lil-kt
</a>
</Link>
</div>
</nav>
</header>
<main className="flex flex-1 flex-col justify-center items-center w-screen">
{children}
</main>
</div>
)
}
|
/*
* NJ.java Copyright (C) 2022 Daniel H. Huson
*
* (Some files contain contributions from other authors, who are then mentioned separately.)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// NOTE: apply uses the upper triangle of the dist matix
package splitstree4.algorithms.distances;
import jloda.graph.Edge;
import jloda.graph.Node;
import jloda.phylo.PhyloTree;
import jloda.util.Basic;
import jloda.util.CanceledException;
import splitstree4.core.Document;
import splitstree4.nexus.Distances;
import splitstree4.nexus.Taxa;
import splitstree4.nexus.Trees;
import java.util.HashMap;
/**
* Implements the Neighbor-Joining algorithm of Saitou and Nei (1987).
*/
public class NJ implements Distances2Trees {
public final static String DESCRIPTION = "Computes the Neighbour-Joining tree (Saitou and Nei 1987)";
/**
* Determine whether given method can be applied to given data.
*
* @param taxa the input taxa
* @param dist the distances matrix
* @return always true, because Neighbor Joining is always applicable
*/
public boolean isApplicable(Document doc, Taxa taxa, Distances dist) {
return taxa != null && dist != null;
}
/**
* Applies the method to the given data
*
* @param taxa the input taxa
* @param dist the input distances
* @return the computed phylogenetic tree (PhyloTree) as a nexus Trees object
*/
public Trees apply(Document doc, Taxa taxa, Distances dist) throws CanceledException {
PhyloTree tree = new PhyloTree();
// ProgressDialog pd = new ProgressDialog("NJ...",""); //Set new progress bar.
// doc.setProgressListener(pd);
if (doc != null) {
doc.notifySetMaximumProgress(taxa.getNtax()); //initialize maximum progress
doc.notifySetProgress(0);
}
try {
HashMap<String, Node> TaxaHashMap = new HashMap<>();
int nbNtax = dist.getNtax();
StringBuffer[] tax = new StringBuffer[nbNtax + 1];
//Taxa labels are saved as a StringBuffer array
for (int t = 1; t <= nbNtax; t++) {
tax[t] = new StringBuffer();
tax[t].append(taxa.getLabel(t));
Node v = tree.newNode(); // create newNode for each Taxon
tree.setLabel(v, tax[t].toString());
tree.addTaxon(v, t);
TaxaHashMap.put(tax[t].toString(), v);
}
double[][] h = new double[nbNtax + 1][nbNtax + 1];// distance matix
double[] b = new double[nbNtax + 1];// the b variable in Neighbor Joining
int i_min = 0, j_min = 0; // needed for manipulation of h and b
double temp, dist_e, dist_f;//new edge weights
StringBuilder tax_old_i; //labels of taxa that are being merged
StringBuilder tax_old_j;
Node v;
Edge e, f; //from tax_old to new=merged edge
for (int i = 0; i <= nbNtax; i++) {
h[0][i] = 1.0; // with 1.0 marked columns indicate columns/rows
h[i][0] = 1.0;// that haven't been deleted after merging
}
for (int i = 1; i <= nbNtax; i++) {
for (int j = 1; j <= nbNtax; j++) { //fill up the
if (i < j)
h[i][j] = dist.get(i, j);// distance matix h
else
h[i][j] = dist.get(j, i);
}
}
// calculate b:
for (int i = 1; i <= nbNtax; i++) {
for (int j = 1; j <= nbNtax; j++) {
b[i] += h[i][j];
}
}
// recall: int i_min=0, j_min=0;
// actual for (finding all nearest Neighbors)
for (int actual = nbNtax; actual > 2; actual--) {
// find: min D (h, b, b)
double d_min = Double.MAX_VALUE;
for (int i = 1; i < nbNtax; i++) {
if (h[0][i] == 0.0) continue;
for (int j = i + 1; j <= nbNtax; j++) {
if (h[0][j] == 0.0)
continue;
if (h[i][j] - ((b[i] + b[j]) / (actual - 2)) < d_min) {
d_min = h[i][j] - ((b[i] + b[j]) / (actual - 2));
i_min = i;
j_min = j;
}
}
}
dist_e = 0.5 * (h[i_min][j_min] + b[i_min] / (actual - 2)
- b[j_min] / (actual - 2));
dist_f = 0.5 * (h[i_min][j_min] + b[j_min] / (actual - 2)
- b[i_min] / (actual - 2));
h[j_min][0] = 0.0;// marking
h[0][j_min] = 0.0;
// tax taxa update:
tax_old_i = new StringBuilder(tax[i_min].toString());
tax_old_j = new StringBuilder(tax[j_min].toString());
tax[i_min].insert(0, "(");
tax[i_min].append(",");
tax[i_min].append(tax[j_min]);
tax[i_min].append(")");
tax[j_min].delete(0, tax[j_min].length());
// b update:
b[i_min] = 0.0;
b[j_min] = 0.0;
// fusion of h
// double h_min = h[i_min][j_min];
for (int i = 1; i <= nbNtax; i++) {
if (h[0][i] == 0.0)
continue;
//temp=(h[i][i_min] + h[i][j_min] - h_min)/2; This is incorrect
temp = (h[i][i_min] + h[i][j_min] - dist_e - dist_f) / 2; // correct NJ
if (i != i_min) {
b[i] = b[i] - h[i][i_min] - h[i][j_min] + temp;
}
b[i_min] += temp;
h[i][i_min] = temp;
b[j_min] = 0.0;
}
for (int i = 0; i <= nbNtax; i++) {
h[i_min][i] = h[i][i_min];
h[i][j_min] = 0.0;
h[j_min][i] = 0.0;
}
// generate new Node for merged Taxa:
v = tree.newNode();
TaxaHashMap.put(tax[i_min].toString(), v);
// generate Edges from two Taxa that are merged to one:
e = tree.newEdge(TaxaHashMap.get(tax_old_i.toString()), v);
tree.setWeight(e, Math.max(dist_e, 0.0));
f = tree.newEdge(TaxaHashMap.get(tax_old_j.toString()), v);
tree.setWeight(f, Math.max(dist_f, 0.0));
if (doc != null)
doc.notifySetProgress(0);
}
// evaluating last two nodes:
for (int i = 1; i <= nbNtax; i++) {
if (h[0][i] == 1.0) {
i_min = i;
i++;
for (; i <= nbNtax; i++) {
if (h[0][i] == 1.0) {
j_min = i;
}
}
}
}
tax_old_i = new StringBuilder(tax[i_min].toString());
tax_old_j = new StringBuilder(tax[j_min].toString());
tax[i_min].insert(0, "(");
tax[i_min].append(",");
tax[i_min].append(tax[j_min]);
tax[i_min].append(")");
tax[j_min].delete(0, tax[j_min].length()); //not neces. but sets content to NULL
// generate new Node for merged Taxa:
// generate Edges from two Taxa that are merged to one:
e = tree.newEdge(TaxaHashMap.get(tax_old_i.toString()), TaxaHashMap.get(tax_old_j.toString()));
tree.setWeight(e, Math.max(h[i_min][j_min], 0.0));
tree.setRoot(e.getSource());
tree.redirectEdgesAwayFromRoot();
} catch (Exception ex) {
Basic.caught(ex);
}
//System.err.println(tree.toBracketString(true));
return new Trees("NJ", tree, taxa);
}
/**
* gets a short description of the algorithm
*
* @return a description
*/
public String getDescription() {
return DESCRIPTION;
}
}
// EOF
|
import { RouterProvider, createBrowserRouter } from "react-router-dom";
import Root from "./pages/Root";
import { CssBaseline } from "@mui/material";
import Home from "./pages/Home";
import Contacts from "./pages/Contacts";
import About from "./pages/About";
import NewContact from "./pages/NewContact";
import Error from "./pages/Error";
import ContactDetail from "./pages/ContactDetail";
import ContactEdit from "./pages/ContactEdit";
import contactFormAction from "./actions/contactFormAction";
import deleteContactAction from "./actions/deleteContactAction";
const router = createBrowserRouter([
{
path: "/",
element: <Root />,
errorElement: <Error />,
children: [{
index: true,
element: <Home />,
},
{
path: 'contacts',
children: [
{
index: true,
element: <Contacts />
},
{
path: ':contactId',
id: 'contact-detail',
loader: ContactDetail.loader,
children: [
{
index: true,
element: <ContactDetail />,
action: deleteContactAction,
},
{
path: 'edit',
element: <ContactEdit />,
action: contactFormAction,
}
]
},
{
path: 'new',
element: <NewContact />,
action: contactFormAction,
}
]
},
{
path: 'about',
element: <About />,
}
],
},
]);
function App() {
return (
<>
<CssBaseline />
<RouterProvider router={router} />
</>
);
}
export default App;
|
use std::{sync::Arc, cell::RefCell};
use std::sync::{Mutex, MutexGuard};
use crate::rendering::{get_command_encoder, RenderPassInfo, build_render_pass};
use crate::{math::{Vec3, Color}, rendering::{construct_render_pipeline, RenderPipelineInfo, RenderStage}, camera::{Camera, CameraUniform}};
use crate::gpu_utils::{BindGroup, Uniform, VertexBuffer, VertexData, GPUVec3, IndexBuffer, GPUVec4};
use crate::voxel::voxel_rendering::*;
use super::{terrain::VoxelTerrain, VoxelStorage, Voxel};
pub struct ChunkRenderData
{
face_instance_buffer: VertexBuffer<VoxelFace>
}
impl ChunkRenderData
{
pub fn face_instance_buffer(&self) -> &VertexBuffer<VoxelFace> { &self.face_instance_buffer }
pub fn new(mesh: &VoxelMesh, device: &wgpu::Device) -> Self
{
Self
{
face_instance_buffer: mesh.create_buffers(device)
}
}
}
pub struct TerrainRenderStage<TStorage> where TStorage : VoxelStorage<Voxel>
{
device: Arc<wgpu::Device>,
camera: Camera,
camera_uniform: RefCell<Uniform<CameraUniform>>,
_voxel_size_uniform: Uniform<f32>,
_voxel_color_storage: Uniform<[Color; 4]>,
chunk_position_uniform: RefCell<Uniform<GPUVec4<i32>>>,
vertex_buffer: VertexBuffer<VoxelVertex>,
index_buffer: IndexBuffer,
terrain: Arc<Mutex<VoxelTerrain<TStorage>>>,
terrain_bind_group: BindGroup,
render_pipeline: wgpu::RenderPipeline,
}
impl<TStorage> TerrainRenderStage<TStorage> where TStorage : VoxelStorage<Voxel> + Send + 'static
{
pub fn new(terrain: Arc<Mutex<VoxelTerrain<TStorage>>>, camera: Camera, device: Arc<wgpu::Device>, config: &wgpu::SurfaceConfiguration) -> Self
{
let terrain_mutex = terrain.lock().unwrap();
let mut camera_uniform_data = CameraUniform::new();
camera_uniform_data.update_view_proj(&camera);
let camera_uniform = Uniform::new(camera_uniform_data, wgpu::ShaderStages::VERTEX, &device);
let voxel_size_uniform = Uniform::new(terrain_mutex.info().voxel_size, wgpu::ShaderStages::VERTEX, &device);
let chunk_position_uniform = Uniform::new(GPUVec4::new(0, 0, 0, 0), wgpu::ShaderStages::VERTEX, &device);
let voxel_colors: [Color; 4] = terrain_mutex
.info().voxel_types
.iter()
.map(|v| v.color.into())
.collect::<Vec<_>>().try_into().unwrap();
let voxel_color_storage = Uniform::new(voxel_colors, wgpu::ShaderStages::VERTEX, &device);
let vertex_buffer = VertexBuffer::new(&VOXEL_FACE_VERTICES, &device, Some("Voxel Vertex Buffer"));
let index_buffer = IndexBuffer::new(&VOXEL_FACE_TRIANGLES, &device, Some("Voxel Index Buffer"));
let terrain_bind_group = BindGroup::new(&[&camera_uniform, &voxel_size_uniform, &chunk_position_uniform, &voxel_color_storage], &device);
println!("Camera uniform size {}", camera_uniform.size());
println!("Voxel size uniform size {}", voxel_size_uniform.size());
println!("Chunk position uniform size {}", chunk_position_uniform.size());
println!("Voxel color uniform size {}", voxel_color_storage.size());
let shader = &device.create_shader_module(wgpu::include_spirv!(env!("terrain_shader.spv")));
let render_pipeline = construct_render_pipeline(&device, config, &RenderPipelineInfo {
shader,
vs_main: "vs_main",
fs_main: "fs_main",
vertex_buffers: &[&VoxelFace::desc(), &VoxelVertex::desc()],
bind_groups: &[terrain_bind_group.layout()],
label: Some("Voxel Render Pipeline")
});
drop(terrain_mutex);
Self
{
device,
camera,
camera_uniform: RefCell::new(camera_uniform),
_voxel_size_uniform: voxel_size_uniform,
_voxel_color_storage: voxel_color_storage,
chunk_position_uniform: RefCell::new(chunk_position_uniform),
vertex_buffer,
index_buffer,
terrain_bind_group,
terrain,
render_pipeline
}
}
pub fn update(&mut self, camera: Camera)
{
self.camera = camera;
}
}
impl<TStorage> RenderStage for TerrainRenderStage<TStorage>
where TStorage : VoxelStorage<Voxel> + Send + 'static
{
fn on_draw(&mut self, device: &wgpu::Device, queue: &wgpu::Queue, view: &wgpu::TextureView, depth_texture: &crate::gpu_utils::Texture)
{
let terrain = self.terrain.lock().unwrap();
for chunk in terrain.chunks()
{
let Some(render_data) = chunk.render_data() else { continue; };
// update camera view
let mut data = CameraUniform::new();
data.update_view_proj(&self.camera);
self.camera_uniform.borrow_mut().enqueue_write(data, queue);
// update chunk position
let chunk_index: Vec3<i32> = chunk.index().cast().unwrap();
let chunk_position = (chunk_index * terrain.info().chunk_length() as i32).extend(0);
self.chunk_position_uniform.borrow_mut().enqueue_write(chunk_position.into(), queue);
let mut command_encoder = get_command_encoder(device);
let info = RenderPassInfo
{
command_encoder: &mut command_encoder,
render_pipeline: &self.render_pipeline,
bind_groups: &[self.terrain_bind_group.bind_group()],
view,
depth_texture: Some(depth_texture),
vertex_buffers: &[render_data.face_instance_buffer().slice_all(), self.vertex_buffer.slice_all()],
index_buffer: Some(self.index_buffer.slice(..)),
index_format: wgpu::IndexFormat::Uint32,
};
let mut render_pass = build_render_pass(info);
render_pass.draw_indexed(0..6, 0, 0..(render_data.face_instance_buffer().length() as u32));
drop(render_pass);
queue.submit(std::iter::once(command_encoder.finish()));
}
}
}
|
import { useState } from "react";
import s from "./doc.module.css";
import axios from "axios";
import { useNavigate } from "react-router-dom";
import { Backdrop } from "@mui/material";
import CircularProgress from "@mui/material/CircularProgress";
import Box from "@mui/material/Box";
function Doc_signin() {
const [doctor_id, setdoctor_id] = useState("");
const [password, setpassword] = useState("");
const [loading, setloading] = useState(false);
const [data, setdata] = useState([]);
const navigate = useNavigate();
const submitThis = async (e) => {
e.preventDefault();
setloading(true);
const info = { docID: doctor_id, pass: password };
try {
const response = await axios.post(
"https://ehs-q3hx.onrender.com/api/loginDoc",
info
);
console.log(response.data);
setdata(response.data);
setloading(false);
gotoPage();
} catch (error) {
console.error(error);
setloading(false);
}
};
function gotoPage() {
const name = data.docData.fullname;
console.log();
if (data.success === true) {
console.log("navigatig");
navigate("/doc_dash", { state: { docid: doctor_id, name: name } });
}
}
return (
<>
<div className={s.form}>
<form action="" method="POST" onSubmit={submitThis}>
<div className="title">Sehat Doctor Login</div>
<div className={s.inp_fields}>
<input
type="text"
name="doctor_id"
value={doctor_id}
onChange={(e) => setdoctor_id(e.target.value)}
id="doctor_id"
placeholder="Doctor ID"
required
/>
<input
type="password"
name="password"
value={password}
onChange={(e) => setpassword(e.target.value)}
id="password"
placeholder="Password"
required
/>
</div>
<div className={s.login_button}>
<button type="submit">Login</button>
</div>
</form>
{loading && (
<Box
sx={{
display: "flex",
justifyContent: "center",
alignItems: "center",
marginTop: "20px",
}}
>
<Backdrop open={loading}>
<CircularProgress />
</Backdrop>
</Box>
)}
</div>
</>
);
}
export default Doc_signin;
|
import { IActor, Actor, Size } from "../types/Actor";
import { Point } from "../types/Point";
import {
canvasWidth,
canvasHeight,
playWidth,
playHeight,
pieceUnit,
} from "../utils/CanvasMeasureVars";
export class Piece extends Actor implements IActor {
pieceSize: Size;
pieceColor: string;
origin: Point;
speed: Point;
maxSpeed: number;
constructor(
initialPos: Point,
initialSpeed = 10,
size: Size = { w: pieceUnit, h: pieceUnit }
) {
super();
this.pieceSize = size;
this.pieceColor = "red";
this.origin = { x: initialPos.x, y: initialPos.y };
this.maxSpeed = pieceUnit;
this.speed = { x: 0, y: this.maxSpeed };
}
update(delta: number) {
let newPosY = this.origin.y + this.speed.y * delta;
if (newPosY < canvasHeight - this.pieceSize.h && newPosY >= 0) {
this.origin.y = newPosY;
}
}
keyboard_event(key: string) {
switch (key) {
case `ArrowRight`:
if (
this.origin.y != canvasHeight - this.pieceSize.h &&
this.origin.x >= playWidth / 2 &&
this.origin.x < (playWidth * 3) / 2 - this.pieceSize.w
) {
this.origin.x += this.pieceSize.h / 2;
}
break;
case `ArrowLeft`:
if (
this.origin.y != canvasHeight - this.pieceSize.h &&
this.origin.x > playWidth / 2 + this.pieceSize.w / 2 &&
this.origin.x < (playWidth * 3) / 2 - this.pieceSize.w / 2
) {
this.origin.x -= this.pieceSize.w / 2;
}
break;
case `ArrowDown`:
if (this.origin.y == canvasHeight - this.pieceSize.h) {
this.speed.y = 0
this.speed.x = 0
} else {
this.speed.y += this.pieceSize.h
}
// console.log("->")
// this.speed.y ++
// console.log(this.speed.y)
break;
case ` `:
this.origin.y = canvasHeight - this.pieceSize.h;
this.speed.x = 0;
this.speed.y = 0;
break;
}
}
draw(delta: number, ctx: CanvasRenderingContext2D) {
let origin = this.origin;
let pieceSize = this.pieceSize;
ctx.strokeStyle = "red";
ctx.lineWidth = 4;
ctx.save();
ctx.translate(origin.x, origin.y);
ctx.beginPath();
ctx.fillStyle = this.pieceColor;
ctx.fillRect(0, 0, this.pieceSize.w, this.pieceSize.h);
ctx.closePath();
ctx.restore();
}
}
|
<!DOCTYPE html>
<html lang="zh">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title></title>
</head>
<body>
<div id="app">
<App></App>
</div>
<script src="../vue.js"></script>
<script>
Vue.component('VBtn', {
data() {
return {
}
},
template: `
<button @click = 'clickHandler'>
{{ id }}
</button>`,
props: ['id'],
methods: {
clickHandler() {
// 每个组件的this指向当前组件对象
console.log(this);
this.id++;
// this.$emit('父组件声明的自定义事件','传值’);
this.$emit('clickHandler', this.id);
}
}
})
let Vheader = {
data() {
return {
}
},
// 只要声明了父组件的属性,就可以使用
props: ['msg', 'post'],
template: `
<div class="child">
<h1>我是header组件</h1>
<h2>lee</h2>
<h2>{{ msg }}</h2>
<h3>{{ post.title }}</h3>
<VBtn v-bind:id = 'post.id' @clickHandler="clickHandler"></VBtn>
</div>
`,
methods: {
clickHandler(val) {
alert(val);
this.$emit('fatherHandler', val);
}
},
created() {
console.log(this);
}
}
let App = {
data() {
return {
text: "我是父组件的数据2",
post: {
id: 1,
title: 'My Journey with Vue'
}
}
},
// <Vheader :msg='text'></Vheader> 相当于自定义属性
template: `
<div id="a">
我是父组件的{{ post.id }}
<Vheader :msg='text' v-bind:post = 'post' @fatherHandler='father_Handler'></Vheader>
</div>
`,
methods: {
father_Handler(val) {
this.post.id = val;
}
},
components: {
Vheader
},
created() {
console.log(this)
}
}
new Vue({
el: "#app", //el只有一个,绑定根元素
data() {
return {
msg: 'steven'
}
},
components: {
App
},
created() {
console.log(this)
}
})
</script>
</body>
</html>
|
import React, { useState } from 'react';
import { View, Text, TouchableHighlight } from 'react-native';
import BoardList from '../../components/BoardList';
import data from '../../resources/data.json';
import styles from './styles'
import AddBoard from '../../components/AddBoard';
const Boards = () => {
const initialBoard = {
id: 0,
name: '',
description: '',
thumbnailPhoto: ''
};
const [boards, setBoards] = useState(data.boards);
const [isAddModalOpen, setIsAddModalOpen] = useState(false);
const [selectedBoard, setSelectedBoard] = useState(initialBoard);
const addEditBoard = (board) => {
if (selectedBoard.id === 0) {
//CREATE
board.id = Math.max(...boards.map((b) => b.id)) + 1;
setBoards([...boards, board]);
} else {
//EDIT
board.id = selectedBoard.id;
setBoards([...boards.filter(x => x.id !== selectedBoard.id), board]);
}
}
const editBoard = (board) => {
setSelectedBoard(board);
setIsAddModalOpen(true);
}
const deleteBoard = (id) => {
const newBoards = boards.filter((board) => board.id !== id);
setBoards([]);
setBoards(newBoards);
}
return (
<View>
<TouchableHighlight
style={styles.button}
onPress={() => setIsAddModalOpen(true)}>
<Text style={styles.buttonText}>Add Board</Text>
</TouchableHighlight>
<BoardList
boards={boards}
deleteBoard={(id) => deleteBoard(id)}
data={data}
editBoard={(board) => editBoard(board)}
/>
<AddBoard
board={boards}
isOpen={isAddModalOpen}
closeModal={() => setIsAddModalOpen(false)}
addEditBoard={(board) => addEditBoard(board)}
selectedBoard={selectedBoard}
/>
</View>
);
}
export default Boards;
|
# zk commands
define-command -params 1..2 \
-docstring %{
zk-new <title> [<directory>]: Create a zk node with the tile. Your kakoune pwd should be root directory of a zk notebook.
} zk-new %{
nop %sh{
zk new -t "$@" --no-input
}
evaluate-commands %sh{
# Asi nos aseguramos que se habra la nota que ha sido creada mas recientemente
# BUG: No encuentra cuando los nombres son con varias palabras.
name=$(echo $1 | tr " " "-")
file=$(fd -i --type file "$name" -x stat --format="%w %n" {} | sort -n | cut -d " " -f 4 | tail -1)
# echo $file
printf %s\\n "edit $file"
}
}
define-command -params 1..2 \
-docstring %{
zk-insert-new-node-link <title> [<directory>]: insert a link to a node that does not exist. It creates the node with the title and directory specified. Your kakoune pwd should be the root of a zk notebook
} zk-insert-new-node-link %{
nop %sh{
zk new -t "$@" --no-input
}
evaluate-commands %sh{
# Asi nosee aseguramos que se habra la nota que ha sido creada mas recientemente
name=$(echo $1 | tr " " "-")
file=$(fd -i --type file "$name" -x stat --format="%w %n" {} | sort -n | tail -1 | cut -d " " -f 4 | cut -c 3-)
# Initialize the path variable to the current directory
path=$(dirname "$kak_buffile")
# Initialize the relative path variable
relative_path=""
# Loop until the home directory is reached
while [ "$path" != "/" ]; do
# Check if the .zk folder exists in the current directory
if [ -d "$path/.zk" ]; then
break
else
# Append "../" to the relative path
relative_path="../$relative_path"
# Move up to the parent directory
path=$(dirname "$path")
fi
done
printf %s\\n "execute-keys %{i[]()<esc>hi$relative_path$file<esc><a-f>];i}"
}
}
define-command -docstring %{
Prompt all the nodes and when you select one, it inserts the link to that node where the cursor is placed. Your kakoune pwd should be the root of a zk notebook.
} zk-insert-link %{
prompt node: -menu -shell-script-candidates "fd --type=file" %{
evaluate-commands %sh{
# Initialize the path variable to the current directory
path=$(dirname "$kak_buffile")
# Initialize the relative path variable
relative_path=""
# Loop until the home directory is reached
while [ "$path" != "/" ]; do
# Check if the .zk folder exists in the current directory
if [ -d "$path/.zk" ]; then
break
else
# Append "../" to the relative path
relative_path="../$relative_path"
# Move up to the parent directory
path=$(dirname "$path")
fi
done
printf %s\\n "execute-keys %{i[]()<esc>hi$relative_path$kak_text<esc><a-f>];i}"
}
}
}
declare-user-mode zk
map global zk n ':zk-new ' -docstring 'Create and open new zk node'
map global zk l ':zk-insert-link<ret>' -docstring 'Insert a link to a node'
map global zk k ':zk-insert-new-node-link ' -docstring 'Insert a link to a non-existent node (creates it)'
|
<?php
namespace App\Controller;
use App\Entity\Student;
use App\Form\StudentType;
use App\Repository\StudentRepository;
use Doctrine\Persistence\ManagerRegistry;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
class StudentController extends AbstractController
{
#[Route('/student', name: 'app_student')]
public function index(): Response
{
return $this->render('student/index.html.twig', [
'controller_name' => 'StudentController',
]);
}
#[Route('student/list', name: 'student_list')]
public function list(): Response
{
return $this->render('student/list.html.twig', [
'list' => 'List of students',
]);
}
#[Route('student/add', name: 'app_student_add')]
public function add(Request $req, ManagerRegistry $m)
{
$student = new Student();
$form = $this->createForm(StudentType::class, $student);
$form->handleRequest($req);
if ($form->isSubmitted() && $form->isValid()) {
$m->getManager()->persist($student);
$m->getManager()->flush();
}
return $this->render('student/add.html.twig', [
'f' => $form->createView()
]);
}
}
|
<template lang="html">
<div class="effect" @click="LeekWars.effectRawOpened = !LeekWars.effectRawOpened">
<tooltip v-if="icon" content-class="fluid">
<template v-slot:activator="{ on }">
<img class="icon" :src="'/image/charac/small/' + icon + '.png'" v-on="on">
</template>
<i18n path="effect.increased_by">
<b slot="charac">{{ $t('characteristic.' + icon) }}</b>
</i18n>
<div>
{{ charac }} {{ $t('characteristic.' + icon) }} :
<span v-if="Math.round(effect.value1 * boost) == Math.round((effect.value1 + effect.value2) * boost)" v-html="$t('effect.type_' + effect.id + '_fixed', [Math.round(effect.value1 * boost)])"></span>
<span v-else v-html="$t('effect.type_' + effect.id, [Math.round(effect.value1 * boost), Math.round((effect.value1 + effect.value2) * boost)])"></span>
</div>
</tooltip>
<span v-if="passive">{{ $t('effect.passive') }}</span>
<i18n v-if="effect.id == 14" path="effect.type_14_fixed">
<b slot="summon">{{ $t('effect.summon_' + effect.value1) }}</b>
</i18n>
<span v-else-if="effect.value2 == 0" v-html="$t('effect.type_' + effect.id + '_fixed', [value1])"></span>
<span v-else v-html="$t('effect.type_' + effect.id, [format(effect.value1), format(effect.value1 + effect.value2)])"></span>
<span v-if="effect.modifiers & EffectModifier.ON_CASTER">
<span v-if="effectThe">
{{ $t('effect.the_caster') }}
</span>
<span v-else>
{{ $t('effect.to_the_caster') }}
</span>
</span>
<b v-if="effect.modifiers & EffectModifier.MULTIPLIED_BY_TARGETS"> {{ $t('effect.multiplied_target') }}</b>
<b v-if="effect.turns === -1">{{ $t('effect.infinite') }}</b>
<i18n v-else-if="effect.turns > 0" path="effect.on_n_turns">
<span slot="turns" v-html="$tc('effect.n_turns', effect.turns)"></span>
</i18n>
<span v-if="effect.modifiers & EffectModifier.STACKABLE">
(<b>{{ $t('effect.stackable') }}</b>)
</span>
<span v-if="effect.modifiers & EffectModifier.NOT_REPLACEABLE">
(<b>{{ $t('effect.not_replaceable') }}</b>)
</span>
<span v-if="effect.modifiers & EffectModifier.IRREDUCTIBLE">
(<b>{{ $t('effect.irreductible') }}</b>)
</span>
<tooltip v-if="enemies && !allies">
<template v-slot:activator="{ on }">
<span class="ennemies" v-on="on" />
</template>
<span>{{ $t('effect.target_enemies') }}</span>
</tooltip>
<span>
<tooltip v-if="allies && !enemies">
<template v-slot:activator="{ on }">
<span class="allies" v-on="on"></span>
</template>
<span>{{ $t('effect.target_allies') }}</span>
</tooltip>
</span>
<span>
<tooltip v-if="!caster">
<template v-slot:activator="{ on }">
<span class="not-player" v-on="on"></span>
</template>
<span>{{ $t('effect.target_not_player') }}</span>
</tooltip>
</span>
<span>
<tooltip v-if="!nonSummons">
<template v-slot:activator="{ on }">
<span class="summons" v-on="on"></span>
</template>
<span>{{ $t('effect.target_summons') }}</span>
</tooltip>
</span>
<span>
<tooltip v-if="!summons">
<template v-slot:activator="{ on }">
<span class="not-summons" v-on="on"></span>
</template>
<span>{{ $t('effect.target_not_summons') }}</span>
</tooltip>
</span>
<lw-code v-if="LeekWars.effectRawOpened" :single="true" :code="'[' + effect.id + ' ' + EffectType[effect.id] + ', ' + format(effect.value1) + ', ' + format(effect.value1 + effect.value2) + ', ' + effect.turns + ', ' + effect.targets + ', ' + effect.modifiers + ']'" class="raw" />
</div>
</template>
<script lang="ts">
import { Effect, EffectModifier, EffectType, State } from '@/model/effect'
import { i18n } from '@/model/i18n'
import { LeekWars } from '@/model/leekwars'
import { store } from '@/model/store'
import { Component, Prop, Vue } from 'vue-property-decorator'
@Component({ name: 'effect-view' })
export default class EffectView extends Vue {
@Prop() effect!: Effect
@Prop() passive!: boolean
EffectModifier = EffectModifier
EffectType = EffectType
raw_opened: boolean = false
get value1() {
if (this.effect.id === EffectType.ADD_STATE) {
// return State[this.effect.value1]
return i18n.t('effect.state_' + this.effect.value1)
}
return this.format(this.effect.value1)
}
get enemies() { return this.effect.targets & 1 }
get allies(): boolean { return (this.effect.targets & (1 << 1)) !== 0 }
get caster(): boolean { return (this.effect.targets & (1 << 2)) !== 0 }
get nonSummons(): boolean { return (this.effect.targets & (1 << 3)) !== 0 }
get summons(): boolean { return (this.effect.targets & (1 << 4)) !== 0 }
get effectThe(): boolean {
return this.effect.id === EffectType.HEAL || this.effect.id === EffectType.RAW_HEAL
}
format(n: number) {
if (Math.floor(n) !== n) {
return n.toFixed(2)
}
return n
}
get icon() {
if ([EffectType.DAMAGE].includes(this.effect.id)) { return 'strength' }
if ([EffectType.LIFE_DAMAGE].includes(this.effect.id)) { return 'life' }
if ([EffectType.HEAL, EffectType.BOOST_MAX_LIFE].includes(this.effect.id)) { return 'wisdom' }
if ([EffectType.ABSOLUTE_SHIELD, EffectType.RELATIVE_SHIELD].includes(this.effect.id)) { return 'resistance' }
if ([EffectType.DAMAGE_RETURN].includes(this.effect.id)) { return 'agility' }
if ([EffectType.BUFF_STRENGTH, EffectType.BUFF_RESISTANCE, EffectType.BUFF_WISDOM, EffectType.BUFF_AGILITY, EffectType.BUFF_MP, EffectType.BUFF_TP, EffectType.AFTEREFFECT, EffectType.NOVA_DAMAGE, EffectType.NOVA_VITALITY].includes(this.effect.id)) { return 'science' }
if ([EffectType.POISON, EffectType.SHACKLE_MP, EffectType.SHACKLE_TP, EffectType.SHACKLE_STRENGTH, EffectType.SHACKLE_MAGIC, EffectType.SHACKLE_AGILITY, EffectType.SHACKLE_WISDOM].includes(this.effect.id)) { return 'magic' }
}
get my_leek() {
return store.state.farmer ? LeekWars.first(store.state.farmer!.leeks) : null
}
get charac() {
return this.icon && this.my_leek ? this.my_leek[this.icon] : 0
}
get boost() {
if (this.icon === 'life') {
return this.charac / 100
} else {
return 1 + this.charac / 100
}
}
}
</script>
<style lang="scss" scoped>
div img.icon {
width: 16px;
margin-bottom: 1px;
}
.effect {
cursor: pointer;
}
.raw {
font-size: 13px;
padding: 4px 0;
display: block;
border: none;
}
</style>
|
function generateId () {
return Math.random().toString(16).slice(2, 9)
}
function getTasks () {
const tasks = JSON.parse(window.localStorage.getItem('tasks')) || []
return tasks
}
function checkAndDisplayEmptyMessage(taskList) {
if (taskList.children.length === 0) {
taskList.innerHTML = '<li class="fromLeft" style="text-align: center">Não há tarefas</li>';
}
}
function addToDOM (taskObject) {
const { completed, id, label, tab } = taskObject
const dayTasks = document.querySelector('.day-tasks')
const nightTasks = document.querySelector('.night-tasks')
if (dayTasks.textContent === 'Não há tarefas') {
dayTasks.textContent = ''
}
if (nightTasks.textContent === 'Não há tarefas') {
nightTasks.textContent = ''
}
const task = Object.assign(
document.createElement('li'),
{
className: 'task fromLeft',
innerHTML: `
<input
${completed ? 'checked' : ''}
id="${id}"
onclick="toggleCompleteStatus(event)"
type="checkbox"
/>
<label for="${id}" class="${completed ? 'completed' : ''}">${label}</label>
<button class="btn-delete" onclick="deleteTask(event)">x</button>
`
}
)
if (tab === 'day') {
dayTasks.appendChild(task)
} else {
nightTasks.appendChild(task)
}
checkAndDisplayEmptyMessage(dayTasks);
checkAndDisplayEmptyMessage(nightTasks);
}
function addTask () {
const inputTask = document.getElementById('new-task')
const tabSelected = document.getElementById('task-tab').value
if (!inputTask || !tabSelected || inputTask.value.trim() === '') return
const newTask = {
id: generateId(),
label: inputTask.value,
tab: tabSelected,
completed: false
}
const tasks = getTasks()
const tasksUpdated = [...tasks, newTask]
window.localStorage.setItem('tasks', JSON.stringify(tasksUpdated))
inputTask.value = ''
addToDOM(newTask)
}
function toggleCompleteStatus({ target }) {
const { id } = target
const tasks = getTasks()
const taskIndex = tasks.findIndex(task => task.id === id)
if (taskIndex !== -1) {
tasks[taskIndex].completed = !tasks[taskIndex].completed
window.localStorage.setItem('tasks', JSON.stringify(tasks))
} else {
console.error('Task not found')
}
target.nextElementSibling.classList.toggle('completed')
}
function deleteTask({ target }) {
const label = target.previousElementSibling
const id = label.htmlFor
const tasks = getTasks()
const updatedTasks = tasks.filter((task) => task.id !== id)
window.localStorage.setItem("tasks", JSON.stringify(updatedTasks))
target.parentNode.remove()
const dayTasks = document.querySelector('.day-tasks')
const nightTasks = document.querySelector('.night-tasks')
checkAndDisplayEmptyMessage(dayTasks);
checkAndDisplayEmptyMessage(nightTasks);
}
(function attachTasks() {
const tasks = getTasks();
const dayTasks = document.querySelector('.day-tasks')
const nightTasks = document.querySelector('.night-tasks')
if (tasks && tasks.length > 0) {
tasks.forEach(task => addToDOM(task))
} else {
checkAndDisplayEmptyMessage(dayTasks);
checkAndDisplayEmptyMessage(nightTasks);
}
})()
|
# Backgrounds
Your character has a story that predates their involvement in the adventures to come. Whether you came to your calling willingly or not, you did not appear out of thin air... unless _you did_, and that in itself is a story! Character backgrounds represent what your character did or trained to do before starting the adventurer's life. While the backgrounds in Chapter 1 of the _Fantasy AGE Basic Rulebook_ are admirably broad, the backgrounds included here allow you to tailor your character more to the races, cultures, and circumstances of Yewdar.
## Regional Backgrounds
Yewdar backgrounds are based on where in the world your character hails from.
* [Kulturia](#kulturia)
* [Cthene](#cthene)
* [Grendian Mountains](#grendian-mountains)
* [Laurentia](#laurentia)
* [Hol Khozet](#hol-khozet)
* [Cheland](#cheland)
* [Reisian Principalities](#reisian-principalities)
### Kulturia
An old, proud nation; home to the Osan human peoples. When formerly colonized by the Alsan peoples of the Aqori Imperium, they called the land Nevrodt. The Osan natives eventually overthrew Nevrodt in the late Tempest Age and established their own kingdom. After colonizing the Laurentian Plains and discovering the arcanathyst deposits beneath Apex City, Kulturia became a technological and economic powerhouse in the world. In the aftermath of the Bloodless War and the secession of Laurentia, the kingdom's future is now in doubt. Kulturia's climate is cold-to-temperate, with rich traditions of sailing, fishing, and animal husbandry. The government is a hereditary monarchy, with a number of aristocratic houses ruling the various cities and provinces that answer to the throne. The current reigning monarch is Queen Alasce. Kulturians are proud of their history, but uniquely sensitive to individual rights, despite being ruled by the Crown.
| 1d6 Result | Background | Focus |
| :---: | :---: | :--- |
| 1 | Aristocrat | Communication \(Etiquette\) or Intelligence \(Heraldry\) |
| 2 | Scholar | Intelligence \(Historical Lore\) or Intelligence \(Religious Lore\) |
| 3 | Soldier | Accuracy \(Brawling\) or Communication \(Gambling\) |
| 4 | Laborer | Constitution \(Stamina\) or Strength \(Might\) |
| 5 | Arcanist | Intelligence \(Arcane Lore\) or Intelligence \(Research\) |
| 6 | Merchant | Communication \(Bargaining\) or Communication \(Deception\) |
### Cthene
Cthene is not technically a nation-state. A large expanse of desert south of Kulturia, it is the location of a former dragon empire that aligned itself with the overthrown Nevrodt Empire. When the Day of Broken Scales transpired, the dragons that populated Cthene disappeared, and a natural disaster turned the region from grasslands and hill country into leagues of barren desert. Many bands of Drakon, left behind by their dragon creators, eke out a living in the Cthene Wastes. They band together in nomadic tribes that take shelter from the unforgiving desert climate in a series of protected sietches: rock formations that provide shelter from the sandstorms and brutal daytime sun. These tribes are not fully aligned with each other, and there is no functioning "national" government in the desert.
| 1d6 Result | Background | Focus |
| :---: | :---: | :--- |
| 1 | Criminal | Communication \(Deception\) or Dexterity \(Lock Picking\) |
| 2 | Exile | Communication \(Bargaining\) or Intelligence \(Cultural Lore\) |
| 3 | Nomad | Constitution \(Stamina\) or Intelligence \(Navigation\) |
| 4 | Hermit | Constitution \(Stamina\) or Willpower \(Self-Discipline\) |
| 5 | Arcanist | Intelligence \(Arcane Lore\) or Intelligence \(Research\) |
| 6 | Explorer | Perception \(Searching\) or Intelligence \(Cartography\) |
### Grendian Mountains
Slicing through the continent, the Grendian mountain range is home to Osenia's dwarven peoples. The range is home to many wild animals and is a rewarding hunting ground for those brave enough to venture into the foothills. Clear mountain passes through the ridge are few, but with the advent of airships this no longer proves detrimental to continental trade.
The dwarves live in great holds beneath the mountains, their subterranean cities and villages crafted by their steady, patient hands. They are an intensely private people, and few outsiders ever get to venture beneath the surface. The dwarven government has two political entities: the Crown and a feudal council known as the Clansraed. The Clansraed is a hereditary council of feudal aristocrats that expresses the will of the clans from across the mountains. The Crown is a hereditary monarchy maintained by the current ruling clan, who leads the clans in war and negotiates with other nations on their behalf. The ruling clan can be replaced by another clan by rite of combat against the ruling monarch or unanimous discommendation by the entire Clansraed. The current ruler is Dorin Axforge, who rules the clans from his clan's ancestral hold at Rockhome in central Grendia.
| 1d6 Result | Background | Focus |
| :---: | :---: | :--- |
| 1 | Soldier | Accuracy \(Brawling\) or Communication \(Gambling\) |
| 2 | Tradesperson | Communication \(Bargaining\) or Intelligence \(Evaluation\) |
| 3 | Guilder | Communication \(Etiquette\) or Dexterity \(Crafting\) |
| 4 | Merchant | Communication \(Bargaining\) or Communication \(Deception\) |
| 5 | Aristocrat | Communication \(Etiquette\) or Intelligence \(Heraldry\) |
| 6 | Functionary | Communication \(Leadership\) or Communication \(Persuasion\) |
### Laurentia
A mercantile oligarchy that separated from Kulturia some 25 years ago. Home to Apex City, the great metropolis of the Osenian continent. Run by a council of powerful commercial enterprises and merchant houses, Laurentia is the largest producer of arcanathyst in the world. Laurentia boasts most of the world's airship yards, and the vehicles are their second-largest export. They have a large population of Talosan, artificial constructs, which were built to serve as an army in what was assumed to be a civil war against the Kulturian crown. When Kulturia stood down their forces on the eve of battle in what would be known as the Bloodless War, the Talosan were found to have developed sentience. They have since integrated (somewhat) into society. Laurentia is governed by the Board of Ministers. The current First Minister is Dimitri Vengar, owner of the Mesa Yield Mining company.
Outside of Apex, the rolling plains extend for hundreds of miles in all directions. Despite decent fertility, settlements on the plains are few; Kulturian interests focused on developing Apex City's material wealth instead of populating the countryside with settlers. The new Laurentian government has recently encouraged immigrants to settle their outlying regions with an eye on increasing domestic tax revenue, agricultural production, and the pool of potential military conscripts.
| 1d6 Result | Background | Focus |
| :---: | :---: | :--- |
| 1 | Guilder | Communication \(Etiquette\) or Dexterity \(Crafting\) |
| 2 | Laborer | Constitution \(Stamina\) or Strength \(Might\) |
| 3 | Performer | Communication \(Performance\) or Intelligence \(Musical Lore\) |
| 4 | Skysailor | Constitution \(Drinking\) or Dexterity \(Skysailing\) |
| 5 | Tradesperson | Communication \(Bargaining\) or Intelligence \(Evaluation\) |
| 6 | Scholar | Intelligence \(Historical Lore\) or Intelligence \(Religious Lore\) |
### Hol Khozet
The vast jungles of Southern Osenia. While not an officially declared nation as others might be, all other countries respect the jungles as de facto sovereign territory of the orcish people. The jungles themselves are dense, dark, and dangerous to lifeforms not prepared for its vicious cycles of life and death. The orcs evolved in this humid crucible, and from it they draw their legendary strength and tenacity. The orcs have had extensive contact with the rest of the world, but not as a single entity. There are thirteen orcish clans, and each has its own traditions and opinions on dealing with other peoples and travel to other lands. There is a history of bitter conflict between the orcs and the dwarves of Grendia, with whom they share the longest national border on the continent.
The most notable single feature of this region is Khozet Tharn, a large freshwater lake in the northern jungle. It is home to a variety of marine life as well as the only neutral ground in orcish society. Killing anything but game on its shores is taboo.
| 1d6 Result | Background | Focus |
| :---: | :---: | :--- |
| 1 | Grunt | Constitution \(Stamina\) or Strength \(Might\) |
| 2 | Wise One | Intelligence \(Relgious Lore\) or Willpower \(Faith\) |
| 3 | Mystic | Intelligence \(Arcane Lore\) or Willpower \(Self-Discipline\) |
| 4 | Fist Leader | Communication \(Leadership\) or Strength \(Intimidation\) |
| 5 | Soldier | Accuracy \(Brawling\) or Communication \(Gambling\) |
| 6 | Warcaller | Communication \(Persuasion\) or Communication \(Leadership\) |
### Cheland
Cheland is not a single nation. The island sub-continent is bisected by the same mountain range that forms Grendia on the mainland. To the north of the mountains are thousands of square kilometers of prairie plains that host the most productive agriculture in the region. South of the mountains lies a dank fungal forest that contains exotic and gargantuan mushrooms and other spore-based life… including the enigmatic myconids. The mountain range itself is a jagged ridge crowned by an active volcano that the native Irsan human peoples call "the Forge." Cheland's Irsan culture is the only democracy on the continent; their monarch is elected by popular vote to a six-year term. The current queen is Rike (pronounced _REE-kuh_) Trubel, a rice farmer from the Lake Edi region.
| 1d6 Result | Background | Focus |
| :---: | :---: | :--- |
| 1 | Sailor | Constitution \(Drinking\) or Dexterity \(Sailing\) |
| 2 | Artist | Intelligence \(Cultural Lore\) or Intelligence \(Evaluation\) |
| 3 | Laborer | Constitution \(Stamina\) or Strength \(Might\) |
| 4 | Merchant | Communication \(Bargaining\) or Communication \(Deception\) |
| 5 | Innkeeper | Communication \(Bargaining\) or Perception \(Empathy\) |
| 6 | Scribe | Dexterity \(Calligraphy\) or Intelligence \(Writing\) |
### Reisian Principalities
An archipelago of seven islands off the southeast coast of Osenia, the Reisian Principalities are the political homeland of the halfling peoples. The islands were granted to their first leader, Reis the Grand Explorer, as a boon for her intrepid expedition to map the entirety of the continent. The Principalities are seven independent states that trade and negotiate with each other while interacting as a single nation with other countries. The seven islands are Hinkios, Sokos, Taros, Raxos, Korfea, Phodes, and Icalonia. Their low latitude and tropical climate make the islands a destination for well-heeled tourists, sea- and air-going merchants, and explorers of all kinds. Their capital, Hinshire, upon the big island of Hinkios, is a major port for trade with the halflings, orcs, Cheland, and northern Osenia.
| 1d6 Result | Background | Focus |
| :---: | :---: | :--- |
| 1 | Explorer | Perception \(Searching\) or Intelligence \(Cartography\) |
| 2 | Sailor | Constitution \(Drinking\) or Dexterity \(Sailing\) |
| 3 | Merchant | Communication \(Bargaining\) or Communication \(Deception\) |
| 4 | Criminal | Communication \(Deception\) or Dexterity \(Lock Picking\) |
| 5 | Guilder | Communication \(Etiquette\) or Dexterity \(Crafting\) |
| 6 | Innkeeper | Communication \(Bargaining\) or Perception \(Empathy\) |
## Background Descriptions
You are invited to freely interpret your background to suit your character's history and experiences. Even two soldiers in the same army will not have arrived to their service in the same way, and the details of their lives, fears, desires, and aspirations are different. Treat your character background as a way to help build the foundation of your character's past, not a limitation on their future.
* [Arcanist](#arcanist)
* [Aristocrat](#aristocrat)
* [Artist](#artist)
* [Criminal](#criminal)
* [Exile](#exile)
* [Explorer](#Explorer)
* [Fist Leader (Orc)](#fist-leader-orc)
* [Functionary (Dwarf)](#functionary-dwarf)
* [Grunt (Orc)](#grunt-orc)
* [Guilder](#guilder)
* [Hermit](#hermit)
* [Laborer](#laborer)
* [Merchant](#merchant)
* [Mystic](#mystic)
* [Nomad](#nomad)
* [Performer](#performer)
* [Sailor](#sailor)
* [Scholar](#scholar)
* [Scribe](#scribe)
* [Skysailor](#skysailor)
* [Soldier](#soldier)
* [Tradesperson](#tradesperson)
* [Warcaller (Orc)](#warcaller-orc)
* [Wise One (Orc)](#wise-one-orc)
### Arcanist
You have studied and trained in the science of magic, honing your skills to create new arcane items, empower magical conveniences, and repair them when they fail. The so-called "pure" mages may look down at you, but where they lose themselves in their explorations of personal power, you should take pride in your service. Modern society would certainly break down without you.
### Aristocrat
You were born to a noble family, and you take your family/clan ties seriously. You were taught your letters and the arts, cultural and military history, by the best teachers your family could retain. You may be authentic and down-to-earth, or snooty and aloof, but your elevated status insulates you from consequences that cause others to hesitate.
### Artist
As the precocious child chases an enchanting butterfly, so do you seek ever after beauty. It may take a specific shape or many, but whether a poem, song, sculpture or painting, bringing beauty into the world drives you ever on. You may have apprenticed with another artist, in a studio with other aspiring creatives, or maybe you simply have exceptional natural talent. Regardless, you likely specialize in a particular style of art, and you must determine how to make your living from it.
### Criminal
Via bad decisions or bad fortune, you've wound up an outcast with no legitimate place in the world. Either by your own wits or the teaching of others, you've learned how to survive by any means possible. You know your way around the city, and how to spot a mark. You can tell which houses are worth robbing and which are a waste of time. And you know that if you're caught, you're dead.
### Exile
Maybe you did it. Maybe your family did. Regardless, _someone_ did _something_ that led to your exile. Maybe you made the wrong friends, or chose the wrong side in a conflict, or broke a taboo. Whatever it was, you've lived in the wilds for a while, and you know how to take care of yourself.
### Explorer
You are filled with wanderlust, a drive to go see what is over the next hill. Whether by sea, land, or air, journeying to new lands and finding forgotten places thrills you. Some explorers voyage for profit, some for the sheer joy of discovery. Your travels may lead to new trade routes, the preservation of history, or great wealth; you need only set out!
### Fist Leader (Orc)
You lead a fist of orcs, a small band of warriors and mystics that carry the honor of your sept and clan. Your strength, whether by spell or spear, inspires your followers. Whether scouting, hunting, or marching to war, you are the backbone of orcish might and all glory begins at the tip of your spear.
### Functionary (Dwarf)
The intricately formal society of the dwarves requires intermediaries like you who keep the cogs of their kingdoms turning. You take the orders from your superiors, be they guilders, nobles, or merchant princes, and you make sure their wishes are carried out while adhering to the dwarven law and tradition.
### Grunt (Orc)
You are an orcish laborer, one of the builders, choppers, or artisans who make your sept's holdings livable in the hostile jungle. You are strong from a life of carrying supplies and resources. Where the clan warriors are fierce, you are durable. You are the last line of defense for your people, and your broad shoulders carry the weight of all others.
### Guilder
You work for one of the powerful mercantile or craft guilds that control trade in the larger cities. You may know a trade, or you may just know how to manipulate money and people to get what you want. Your talents and connections ensure you and your family do quite well, which is how you hope to keep it.
### Hermit
You left civilization for some reason to become a hermit. Perhaps you took a holy vow, perhaps you needed time alone to ponder an important question or remove yourself from a difficult situation, or perhaps you just don’t care for people and are happy to be on your own. Time on your own has hardened you and made you self-reliant.
### Laborer
You carry, lift, tote, throw, handle, move, or otherwise deal with loads of material that have to be moved off of or onto ships or wagons or into buildings. The work isn’t difficult, but it is physically demanding, and you have the stamina and strength to show for it.
### Merchant
You have a way with people and you use that in your business dealings to become successful. You use your charisma to get the best deals or to convince others they’re getting the best deal. It's all in service to your success. You may have a small merchant stand selling odds and ends in a bazaar, or you may operate a thriving shop that specializes in carpets, foreign imports, or other valuables. Instead of a store, perhaps you travel from one place to another, buying unusual items in one place and transporting them to sell in another. Regardless of the type of merchant you are, you know how to come out ahead in any deal.
### Mystic
You have been initiated into the hidden mysteries, and know rituals to curry favor with the gods or spirits. In you is preserved the ancient knowledge, passed down in an unbroken line from the mystics of old to those they deemed worthy to carry on the sacred traditions. Some may respect you, while others may find your practices odd or objectionable. You don't care; through such practice you've touched true wisdom.
### Nomad
The open road calls to you, and you have answered. Instead of calling a single location home, you regard the entire world as yours. You love to wander. You constantly seek out new places, and meet new people. You rarely get lost, but even when you do, it usually turns out to be an adventure worth experiencing.
### Performer
You are a dancer, actor, musician, singer, or some other sort of performer. You enjoy getting in front of a room of people and affecting them in some way with your performance. You especially like it when they show their appreciation in other ways, such as with money, favors, or affection.
### Sailor
You know your way around a ship better than most. You’ve trained and lived at sea for long stretches of time, either in a navy or on a trading ship—maybe both at different times. You may call some port home, but most of your time is spent at sea, so you’ve traveled to many different parts of the world and seen things few can dream of.
### Scholar
When lords, generals, and politicians have questions, they come to you. You’ve studied extensively and if you don’t know an answer to a question, you can likely find someone else or a book that does. Others may change the course of history, but you’re the one who tells them how to do it.
### Scribe
Books, scrolls, and portfolios are what you fill your time with. The greatest libraries and universities of your time require your services to do what they do and they pay well for your services. Knowledge that no one ever sees isn’t knowledge — it’s a secret. When you get your hands on secrets, you commit them to books, then preserve and spread that knowledge across the world and through time. Do you revel in the importance of your profession or despise it as an isolating, backbreaking task?
### Skysailor
See [Sailor](#sailor)
### Soldier
You're a soldier, possibly even a guard or mercenary, who has signed up to fight for your country. You’ve trained for combat, but you also had an education in how to live and work as part of a unit as well as how to take and execute an order. A soldier who doesn’t follow orders is useless. Perhaps your time as a soldier is over and you’re on your own again, or maybe you deserted for some reason.
### Tradesperson
Resting somewhere between the laborer and guilder, the tradesperson specializes in some kind of skilled manual labor, such as masonry, carpentry, clockmaking, blacksmithing, or any of dozens of other trades. You’ve been trained in a trade and know how to talk about its practical and theoretical applications, which allows you to accurately plan and execute jobs.
### Warcaller (Orc)
By birth, selection, or rite of challenge, you lead \(or once led\) a sept of orc clanfolk. Many fists of warriors answer your call, and you are yourself a fierce combatant in your own right. While the sept serves you, you also serve the sept, and when a threat arises you may be called upon to leave your people and deal with it personally.
### Wise One (Orc)
See [Mystic](#mystic)
|
# RabbitMQ
- is a message broker
- like a postal service
- gets msgs from producer to consumer
- producer pushes msg to broker
- broker knows how to get it to its final destination -- producer doesn't need to keep checking in
- aka ASYNC COMMUNICATION
- synchronous communication -- e.g. http req -- wait around til api responds -- can't do anything till it does -- eats up resources
- consumers subscribe to msgs that come off broker
- async communication on both ends -- from producer to broker and from broker to consumer
- local runs on port 15672
# Exchange
- heart of msg broker
- knows how to route msg from producer to consumer
- diff types of exchanges for diff types of msgs -- analogous to express vs registered posts in postal service
- producer always sends msg to Exchange
- push msgs into 1+ queue(s)
- diff types (determines whether msg should be sent to just 1 queue, multiple queues, should msg be discarded, etc):
1. direct
2. topic
3. headers
4. fan-out
# Queue
- where msgs sit till read / consumed by interested consumer(s)
- like a mailbox
- tied to exchanges w/ bindings

# Binding
- Exchange can be tied to multiple Queues
- Queue can be tied to multiple Exchanges
# Connection
- every producer / consumer should open ONE TCP connection to broker
- one connection can have multiple CHANNELS
- saves resources
# Channel
- could assign one channel to each thread of a producer process
- msgs in diff channels are isolated from each other
# AMQP
- protocol which rabbitmq runs on
- "open standard" for passing msgs b/t apps
- uses RPC pattern to let one computer to execute programs / methods on another computer
- like letting client execute method on broker
- 2-way communication
- both client and broker can use RPC to run programs on each other
- rabbitmq uses cmds which consist of classes and methods to enable communication b/c clients and broker
- e.g. client can send exchange_declare cmd to broker
- tells broker to create new exchange
- class = exchange; method = declare
- data required to execute the cmd is included in a "frame" (just a data structure with a standard structure)
- got multiple frame types defined by AMQP
1. method frame
2. content header frame
3. body frame
4. heartbeat frame
# Competing Consumers Pattern
- common messaging pattern
- aka Work Queue
- used to distribute time-consuming tasks (e.g. processing an img or doing some ML; sth that would take longer than HTTP window to complete) among multiple workers
- e.g. producer pops out msgs every 5s and consumer takes 10s to process 1 msg
- queue will fill up -- so have multiple consumers to avoid out-of-memory issue
- rabbitmq assigns msgs/tasks in queue to subscribed consumers in round robin manner by default
- round robin isn't ideal when consumers have very diff processing times from each other
- to overcome this, set prefetch value to 1 so if consumer has 1 msg in flight, it won't assign msg in queue to that consumer
- having a queue completely decouples producers from consumers
- so if we notice a bunch of waiting tasks in queue, we can just add another consumer to take 1/nth of the load -- ezpz
- SCALABILITY
- with this ability to add consumers easily, we make the sys more RELIABLE if 1 consumer / worker dies
# Pub/Sub Pattern
- another common messaging pattern
- like the opposite of Competing Consumers
- in CC, only 1 worker will get a certain msg
- in P/S, all workers get a copy of each msg
- like in microservices architecture, msg could be about new user
- 1 service for storing in db
- 1 service for auditing
- 1 service for handling promotions
- implementation detail: rabbitmq doesn't actually dup these msgs -- just create 1 msg and have each interested queue keep ref to that msg
- uses fan-out exchange -- will publish each msg to multiple queues
- exchange uses Bindings to determine which queues are interested in its msgs
- service can leverage Temporary Queues rather than declaring a queue upfront
- only keep the queue around when you're interested in receiving certain msgs

# Smart Routing
- enable services to subscribe to only subset of msgs
- achievable with Topic Exchange / Direct Exchange + smart usage of binding + routing keys
## Direct Exchange
- more flexible routing
- routing algo deps on binding and routing keys to route msg only to interested consumers
- binding key: specific name on some binding
- routing key: what the producer associated with a particular msg
- for Msg X to make it to Queue Y, X's routing key must = binding key b/t exchange and Y

- Payments and Shop services only get subset of msgs from Producer but Analytics service has multiple bindings on its queue which will get all msgs from Producer
## Topic Exchange
- no arbitrary routing key -- must be list of words delimited by dots w/ some meaning to sys or msg being sent
- e.g. `user.europe.payments` - denotes msg is relevant for `user` entity or any service interested in `user` entity, for region `europe`, deals w/ maybe receiving / sending payment

- illustrates how Topic Exchange is just a superset of Direct Exchange
- Topic Exchange also supports wildcards in binding keys tho
- `*` means substitute exactly 1 word
- `#` means substitute 0+ words

# Request-Reply Pattern
- another common messaging pattern
- rather than having simple producer and consumer, now have 2 entities which will both consume and produce msgs -- call em "client" and "server"
- client:
- sends req to server
- declares queue that it'll receive replies from
- pushes reqs to Exchange of unspecified type
- tells server which queue to push reply to by setting `reply_to` field
- can also specify the `message_id` and `correlation_id` so client knows which reply corresponds to which req
- server processes req and sends reply

# Exchange to Exchange Routing
- exchanges can be bound not only to queues but also other exchanges
# Headers Exchange

- `x-match: any` means ifcc any of the headers of msg match any of the headers in Binding, route msg along that Binding
- `x-match: all` means only route msg along a Binding if Binding's headers are superset of msg's headers
# Consistent Hashing Exchange
- part of separate plugin that needs to be installed -- not part of rabbitmq installation
- install on Intel Mac with `/usr/local/opt/rabbitmq/sbin/rabbitmq-plugins enable rabbitmq_consistent_hash_exchange`
- wanna distribute msgs amongst a bunch of workers equally
- Competing Consumers pattern alr does this but this is just another option if you don't want rabbitmq to decide which queue that msg should go to
- could decide based on property of msg itself
- could assign weight to workers (e.g. assign heavier weight to worker w/ better hardware)
- bind Consistent Hashing Exchange to queues w/ `routing_key` equal to some number
- number denotes weight of msgs which should be assigned to each queue
- so if they're all 1, then all queues get same number of msgs
- setting just one of the `routing_key`s to 2 means you want the attached queue to get twice the number of msgs
- `routing_key` number is directly proportional to how much hashing space each queue gets assigned
- incoming msg gets its `routing_key` hashed to fall somewhere in this hashing space

- 2 msgs w/ the same `routing_key` will get routed to the same queue / service
- but beware of binding yet another queue to the CHE -- hashing space has changed -- msgs which used to go to Service B might not go to Service B anymore after introducing another queue
|
import UserService from "@/services/UserService";
import { CODE_ROUTE, HOME_ROUTE, REGISTRATION_ROUTE, PROFILE_ROUTE } from "@/utils/consts";
import { defineStore } from "pinia";
import { ref } from 'vue'
import { useRouter } from "vue-router";
import { GoogleAuthProvider, signInWithPopup, signInWithPhoneNumber } from "firebase/auth";
import { auth } from '@/firebase/index'
export const useAuth = defineStore('auth', () => {
const user = ref({})
const token = ref('')
const router = useRouter()
const userRegistrationWithEmail = async (userData) => {
try {
const response = await UserService.registration(userData)
token.value = response.data.idToken
user.value = response.data.email
sessionStorage.setItem('token', token.value)
console.log(user.value)
router.push(PROFILE_ROUTE)
}
catch (err) {
console.log(err)
}
}
const userLoginWithEmail = async (userData) => {
try {
const response = await UserService.login(userData)
token.value = response.data.idToken
sessionStorage.setItem('token', token.value)
user.value = response.data.email
router.push(PROFILE_ROUTE)
}
catch (err) {
console.log(err)
}
}
const userLoginWithGoogle = async () => {
try {
const provider = new GoogleAuthProvider()
signInWithPopup(auth, provider).then((response) => {
GoogleAuthProvider.credentialFromResult(response)
token.value = response.user.accessToken
sessionStorage.setItem('token', token.value)
user.value = response.user
router.push(PROFILE_ROUTE)
})
}
catch (err) {
console.log(err)
}
}
const userLogout = async () => {
try {
const response = await UserService.logout()
await router.push(REGISTRATION_ROUTE)
user.value = {}
token.value = ''
return response
} catch (err) {
console.log(err)
}
}
const onPhoneLogin = async (recaptchaVerifier, number) => {
const appVerifier = recaptchaVerifier.value;
try {
const confirmationResult = await signInWithPhoneNumber(auth, number.value, appVerifier);
user.value = confirmationResult;
router.push(CODE_ROUTE)
console.log(user.value)
} catch (error) {
console.error("Error sending verification code", error);
}
}
const verifyOtp = async (code) => {
try {
const response = await user.value.confirm(code.value)
token.value = response.user.accessToken
sessionStorage.setItem('token', token.value)
user.value = response.user
return response
}
catch (e) {
console.log(e)
}
}
return {
user, token, userRegistrationWithEmail, userLoginWithEmail, userLogout, userLoginWithGoogle, onPhoneLogin, verifyOtp
}
})
|
import { useMutation, useQueryClient } from "@tanstack/react-query";
import { useAuth } from "@clerk/clerk-react";
import { useNavigate } from "@tanstack/react-router";
async function deleteTeam(
authToken: string,
team_id: string
) {
return await fetch(`/api/teams/${team_id}`, {
method: "DELETE",
headers: { Authorization: `Bearer ${authToken}` },
});
}
const useDeleteTeam = (team_id: string) => {
const queryClient = useQueryClient();
const navigate = useNavigate();
const { getToken } = useAuth();
return useMutation({
mutationKey: ["teams"],
mutationFn: async () => {
const token = await getToken();
return deleteTeam(token as string, team_id);
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["teams"] });
navigate({ to: "/teams" });
},
onError: (error) => {
console.error(error);
alert(error.message);
},
});
};
export default useDeleteTeam;
|
<template>
<section>
<v-row no-gutters>
<v-col cols="12" md="6" class="pa-0 green lighten-5">
<v-card color="transparent" flat min-height="100vh">
<v-img
class="top-bubble"
:src="require('@/assets/bubbles1.png')"
contain
max-height="200"
/>
<v-card-title
class="display-3 font-weight-light d-flex green--text text--darken-3"
>About me</v-card-title
>
<v-card-text class="title font-weight-light"
>I have been in a <strong>software development</strong> world since
2015 and I would say <strong>web</strong> development has always
been my <strong>strongest</strong> suit. I work
<strong>hard</strong> and <strong>effective</strong> to provide
<strong>clients</strong> with what they need from idea to
<strong>final product</strong>.
</v-card-text>
<v-row no-gutters class="px-4 pb-4">
<v-col cols="12">
<div
class="display-1 font-weight-light green--text text--darken-4"
>
Skills
</div>
</v-col>
<v-col cols="12">
<v-card color="transparent" class="mt-2">
<v-row no-gutters class="pa-4">
<v-col
class="mb-2"
cols="12"
v-for="(entity, index) in skills"
:key="index"
>
<div class="d-flex flex-row justify-space-between">
<div class="text-uppercase title font-weight-light">
{{ entity.title }}
</div>
<div class="title font-weight-light">
{{ entity.percentage }}%
</div>
</div>
<v-progress-linear
:color="entity.color"
:value="entity.percentage"
height="10"
striped
></v-progress-linear>
</v-col>
<v-col cols="12" class="mt-3">
<span v-for="(entity, index) in tags" :key="index">
<v-chip class="ma-1" color="green lighten-3">{{
entity
}}</v-chip>
</span>
</v-col>
</v-row>
</v-card>
</v-col>
</v-row>
</v-card>
</v-col>
<v-col cols="12" md="6" class="pa-0">
<v-img
:src="require('@/assets/marathon.jpg')"
height="100vh"
class="hidden-sm-and-down"
/>
<v-img
:src="require('@/assets/profile_picture.jpg')"
height="100vh"
class="hidden-md-and-up"
/>
</v-col>
</v-row>
</section>
</template>
<script>
export default {
name: "SectionAbout",
data: () => ({
skills: [
{
title: "Web Developement",
percentage: 95,
color: "blue"
},
{
title: "Web Design",
percentage: 85,
color: "orange"
},
{
title: "Web Support",
percentage: 90,
color: "lime darken-2"
}
],
tags: [
"JavasScript",
"HTML",
"CSS",
"Vue",
"Docker",
"Linux",
"Django REST framework",
"REST API",
"CircleCI",
"Python",
"Node",
"Git",
"PostgreSQL",
"React",
"Java",
"Apache Solr",
"Angular",
"TypeScript",
"NGINX",
"etc."
]
}),
methods: {
calculateRandomColor() {
return (
"#" +
Math.floor(Math.random() * 16777215)
.toString(16)
.padStart(6, "0")
);
}
}
};
</script>
<style scoped></style>
|
// Copyright 2018 The Cockroach Authors.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
package roachprod
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"net/url"
"os"
"os/exec"
"path"
"path/filepath"
"regexp"
"runtime"
"slices"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/DataExMachina-dev/side-eye-go/sideeyeclient"
"github.com/cockroachdb/cockroach/pkg/build"
"github.com/cockroachdb/cockroach/pkg/cli/exit"
"github.com/cockroachdb/cockroach/pkg/cmd/roachprod/grafana"
"github.com/cockroachdb/cockroach/pkg/roachprod/cloud"
"github.com/cockroachdb/cockroach/pkg/roachprod/config"
"github.com/cockroachdb/cockroach/pkg/roachprod/fluentbit"
"github.com/cockroachdb/cockroach/pkg/roachprod/install"
"github.com/cockroachdb/cockroach/pkg/roachprod/lock"
"github.com/cockroachdb/cockroach/pkg/roachprod/logger"
"github.com/cockroachdb/cockroach/pkg/roachprod/opentelemetry"
"github.com/cockroachdb/cockroach/pkg/roachprod/prometheus"
"github.com/cockroachdb/cockroach/pkg/roachprod/promhelperclient"
"github.com/cockroachdb/cockroach/pkg/roachprod/vm"
"github.com/cockroachdb/cockroach/pkg/roachprod/vm/aws"
"github.com/cockroachdb/cockroach/pkg/roachprod/vm/azure"
"github.com/cockroachdb/cockroach/pkg/roachprod/vm/gce"
"github.com/cockroachdb/cockroach/pkg/roachprod/vm/local"
"github.com/cockroachdb/cockroach/pkg/server/debug/replay"
"github.com/cockroachdb/cockroach/pkg/util/ctxgroup"
"github.com/cockroachdb/cockroach/pkg/util/httputil"
"github.com/cockroachdb/cockroach/pkg/util/retry"
"github.com/cockroachdb/cockroach/pkg/util/syncutil"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
"github.com/cockroachdb/errors"
"github.com/cockroachdb/errors/oserror"
"golang.org/x/sys/unix"
)
// MalformedClusterNameError is returned when the cluster name passed to Create is invalid.
type MalformedClusterNameError struct {
name string
reason string
suggestions []string
}
func (e *MalformedClusterNameError) Error() string {
return fmt.Sprintf("Malformed cluster name %s, %s. Did you mean one of %s", e.name, e.reason, e.suggestions)
}
// findActiveAccounts is a hook for tests to inject their own FindActiveAccounts
// implementation. i.e. unit tests that don't want to actually access a provider.
var findActiveAccounts = vm.FindActiveAccounts
// verifyClusterName ensures that the given name conforms to
// our naming pattern of "<username>-<clustername>". The
// username must match one of the vm.Provider account names
// or the --username override.
func verifyClusterName(l *logger.Logger, clusterName, username string) error {
if clusterName == "" {
return fmt.Errorf("cluster name cannot be blank")
}
sanitizedName := vm.DNSSafeName(clusterName)
if sanitizedName != clusterName {
return &MalformedClusterNameError{name: clusterName, reason: "invalid characters", suggestions: []string{sanitizedName}}
}
if config.IsLocalClusterName(clusterName) {
return nil
}
// Use the vm.Provider account names, or --username.
var accounts []string
if len(username) > 0 {
cleanAccount := vm.DNSSafeName(username)
if cleanAccount != username {
l.Printf("WARN: using `%s' as username instead of `%s'", cleanAccount, username)
}
accounts = []string{cleanAccount}
} else {
seenAccounts := map[string]bool{}
active, err := findActiveAccounts(l)
if err != nil {
return err
}
for _, account := range active {
if !seenAccounts[account] {
seenAccounts[account] = true
cleanAccount := vm.DNSSafeName(account)
if cleanAccount != account {
l.Printf("WARN: using `%s' as username instead of `%s'", cleanAccount, account)
}
accounts = append(accounts, cleanAccount)
}
}
}
// If we see <account>-<something>, accept it.
for _, account := range accounts {
if strings.HasPrefix(clusterName, account+"-") && len(clusterName) > len(account)+1 {
return nil
}
}
// Try to pick out a reasonable cluster name from the input.
var suffix string
var reason string
if i := strings.Index(clusterName, "-"); i != -1 {
// The user specified a username prefix, but it didn't match an active
// account name. For example, assuming the account is "peter", `roachprod
// create joe-perf` should be specified as `roachprod create joe-perf -u
// joe`.
suffix = clusterName[i+1:]
reason = "username prefix does not match an active account name"
} else {
// The user didn't specify a username prefix. For example, assuming the
// account is "peter", `roachprod create perf` should be specified as
// `roachprod create peter-perf`.
suffix = clusterName
reason = "cluster name should start with a username prefix: <username>-<clustername>"
}
// Suggest acceptable cluster names.
var suggestions []string
for _, account := range accounts {
suggestions = append(suggestions, fmt.Sprintf("%s-%s", account, suffix))
}
return &MalformedClusterNameError{name: clusterName, reason: reason, suggestions: suggestions}
}
func sortedClusters() []string {
var r []string
syncedClusters.mu.Lock()
defer syncedClusters.mu.Unlock()
for n := range syncedClusters.clusters {
r = append(r, n)
}
sort.Strings(r)
return r
}
// newCluster initializes a SyncedCluster for the given cluster name.
//
// The cluster name can include a node selector (e.g. "foo:1-3"). If the
// selector is missing, the returned cluster includes all the machines.
func newCluster(
l *logger.Logger, name string, opts ...install.ClusterSettingOption,
) (*install.SyncedCluster, error) {
clusterSettings := install.MakeClusterSettings(opts...)
nodeSelector := "all"
{
parts := strings.Split(name, ":")
switch len(parts) {
case 2:
nodeSelector = parts[1]
fallthrough
case 1:
name = parts[0]
case 0:
return nil, fmt.Errorf("no cluster specified")
default:
return nil, fmt.Errorf("invalid cluster name: %s", name)
}
}
metadata, ok := readSyncedClusters(name)
if !ok {
err := errors.Newf(`unknown cluster: %s`, name)
err = errors.WithHintf(err, "\nAvailable clusters:\n %s\n", strings.Join(sortedClusters(), "\n "))
err = errors.WithHint(err, `Use "roachprod sync" to update the list of available clusters.`)
return nil, err
}
if clusterSettings.DebugDir == "" {
clusterSettings.DebugDir = os.ExpandEnv(config.DefaultDebugDir)
}
c, err := install.NewSyncedCluster(metadata, nodeSelector, clusterSettings)
if err != nil {
return nil, err
}
return c, nil
}
// userClusterNameRegexp returns a regexp that matches all clusters owned by the
// current user.
func userClusterNameRegexp(l *logger.Logger) (*regexp.Regexp, error) {
// In general, we expect that users will have the same
// account name across the services they're using,
// but we still want to function even if this is not
// the case.
seenAccounts := map[string]bool{}
accounts, err := vm.FindActiveAccounts(l)
if err != nil {
return nil, err
}
pattern := ""
for _, account := range accounts {
if !seenAccounts[account] {
seenAccounts[account] = true
if len(pattern) > 0 {
pattern += "|"
}
pattern += fmt.Sprintf("(^%s-)", regexp.QuoteMeta(account))
}
}
return regexp.Compile(pattern)
}
// Version returns version/build information.
func Version(l *logger.Logger) string {
info := build.GetInfo()
return info.Long()
}
// CachedClusters iterates over all roachprod clusters from the local cache, in
// alphabetical order.
func CachedClusters(fn func(clusterName string, numVMs int)) {
for _, name := range sortedClusters() {
c, ok := CachedCluster(name)
if !ok {
return
}
fn(c.Name, len(c.VMs))
}
}
// CachedCluster returns the cached information about a given cluster.
func CachedCluster(name string) (*cloud.Cluster, bool) {
return readSyncedClusters(name)
}
// ClearClusterCache indicates if we should ever clear the local cluster
// cache of clusters. This flag is set to false during Azure nightly runs,
// as the large amount of concurrent resources created will cause Azure.List
// to return stale VM information with no error. Similar to when there is an
// error, we do not want to remove any clusters from the cache.
var ClearClusterCache = true
// Sync grabs an exclusive lock on the roachprod state and then proceeds to
// read the current state from the cloud and write it out to disk. The locking
// protects both the reading and the writing in order to prevent the hazard
// caused by concurrent goroutines reading cloud state in a different order
// than writing it to disk.
func Sync(l *logger.Logger, options vm.ListOptions) (*cloud.Cloud, error) {
if !config.Quiet {
l.Printf("Syncing...")
}
unlock, err := lock.AcquireFilesystemLock(config.DefaultLockPath)
if err != nil {
return nil, err
}
defer unlock()
cld, err := cloud.ListCloud(l, options)
// ListCloud may fail for a provider, but we still want to continue as
// the cluster the caller is trying to add and use may have been found.
// Instead, we tell syncClustersCache not to remove any clusters as we
// can't tell if a cluster was deleted or not found due to the error.
// The next successful ListCloud call will clean it up.
overwriteMissingClusters := err == nil && ClearClusterCache
if err := syncClustersCache(l, cld, overwriteMissingClusters); err != nil {
return nil, err
}
var vms vm.List
for _, c := range cld.Clusters {
vms = append(vms, c.VMs...)
}
// Figure out if we're going to overwrite the DNS entries. We don't want to
// overwrite if we don't have all the VMs of interest, so we only do it if we
// have a list of all VMs from both AWS and GCE (so if both providers have
// been used to get the VMs and for GCP also if we listed the VMs in the
// default project).
refreshDNS := true
if p := vm.Providers[gce.ProviderName]; !p.Active() {
refreshDNS = false
} else {
var defaultProjectFound bool
for _, prj := range p.(*gce.Provider).GetProjects() {
if prj == gce.DefaultProject() {
defaultProjectFound = true
break
}
}
if !defaultProjectFound {
refreshDNS = false
}
}
// If there are no DNS required providers, we shouldn't refresh DNS,
// it's probably a misconfiguration.
if len(config.DNSRequiredProviders) == 0 {
refreshDNS = false
} else {
// If any of the required providers is not active, we shouldn't refresh DNS.
for _, p := range config.DNSRequiredProviders {
if !vm.Providers[p].Active() {
refreshDNS = false
break
}
}
}
// DNS entries are maintained in the GCE DNS registry for all vms, from all
// clouds.
if refreshDNS {
if !config.Quiet {
l.Printf("Refreshing DNS entries...")
}
if err := gce.SyncDNS(l, vms); err != nil {
l.Errorf("failed to update DNS: %v", err)
}
} else {
if !config.Quiet {
l.Printf("Not refreshing DNS entries. We did not have all the VMs.")
}
}
if err := vm.ProvidersSequential(vm.AllProviderNames(), func(p vm.Provider) error {
return p.CleanSSH(l)
}); err != nil {
return nil, err
}
return cld, nil
}
// List returns a cloud.Cloud struct of all roachprod clusters matching clusterNamePattern.
// Alternatively, the 'listMine' option can be provided to get the clusters that are owned
// by the current user.
func List(
l *logger.Logger, listMine bool, clusterNamePattern string, opts vm.ListOptions,
) (cloud.Cloud, error) {
if err := LoadClusters(); err != nil {
return cloud.Cloud{}, err
}
listPattern := regexp.MustCompile(".*")
if clusterNamePattern == "" {
if listMine {
var err error
listPattern, err = userClusterNameRegexp(l)
if err != nil {
return cloud.Cloud{}, err
}
}
} else {
if listMine {
return cloud.Cloud{}, errors.New("'mine' option cannot be combined with 'pattern'")
}
var err error
listPattern, err = regexp.Compile(clusterNamePattern)
if err != nil {
return cloud.Cloud{}, errors.Wrapf(err, "could not compile regex pattern: %s", clusterNamePattern)
}
}
cld, err := Sync(l, opts)
if err != nil {
return cloud.Cloud{}, err
}
// Encode the filtered clusters and all the bad instances.
filteredClusters := cld.Clusters.FilterByName(listPattern)
filteredCloud := cloud.Cloud{
Clusters: filteredClusters,
BadInstances: cld.BadInstances,
}
return filteredCloud, nil
}
// TruncateString truncates a string to maxLength and adds "..." to the end.
func TruncateString(s string, maxLength int) string {
if len(s) > maxLength {
return s[:maxLength-3] + "..."
}
return s
}
// Run runs a command on the nodes in a cluster.
func Run(
ctx context.Context,
l *logger.Logger,
clusterName, SSHOptions, processTag string,
secure bool,
stdout, stderr io.Writer,
cmdArray []string,
options install.RunOptions,
) error {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure), install.TagOption(processTag))
if err != nil {
return err
}
// Use "ssh" if an interactive session was requested (i.e. there is no
// remote command to run).
if len(cmdArray) == 0 {
return c.SSH(ctx, l, strings.Split(SSHOptions, " "), cmdArray)
}
// If no nodes were specified, run on nodes derived from the clusterName.
if len(options.Nodes) == 0 {
options.Nodes = c.TargetNodes()
}
cmd := strings.TrimSpace(strings.Join(cmdArray, " "))
return c.Run(ctx, l, stdout, stderr, options, TruncateString(cmd, 30), cmd)
}
// RunWithDetails runs a command on the nodes in a cluster.
func RunWithDetails(
ctx context.Context,
l *logger.Logger,
clusterName, SSHOptions, processTag string,
secure bool,
cmdArray []string,
options install.RunOptions,
) ([]install.RunResultDetails, error) {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure), install.TagOption(processTag))
if err != nil {
return nil, err
}
// Use "ssh" if an interactive session was requested (i.e. there is no
// remote command to run).
if len(cmdArray) == 0 {
return nil, c.SSH(ctx, l, strings.Split(SSHOptions, " "), cmdArray)
}
// If no nodes were specified, run on nodes derived from the clusterName.
if len(options.Nodes) == 0 {
options.Nodes = c.TargetNodes()
}
cmd := strings.TrimSpace(strings.Join(cmdArray, " "))
return c.RunWithDetails(ctx, l, options, TruncateString(cmd, 30), cmd)
}
// SQL runs `cockroach sql` on a remote cluster. If a single node is passed,
// an interactive session may start.
//
// NOTE: When querying a single-node in a cluster, a pseudo-terminal is attached
// to ssh which may result in an _interactive_ ssh session.
//
// CAUTION: this function should not be used by roachtest writers. Use syncedCluser.ExecSQL()
// instead.
func SQL(
ctx context.Context,
l *logger.Logger,
clusterName string,
secure bool,
tenantName string,
tenantInstance int,
authMode install.PGAuthMode,
database string,
cmdArray []string,
) error {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure))
if err != nil {
return err
}
if len(c.Nodes) == 1 {
return c.ExecOrInteractiveSQL(ctx, l, tenantName, tenantInstance, authMode, database, cmdArray)
}
results, err := c.ExecSQL(ctx, l, c.Nodes, tenantName, tenantInstance, authMode, database, cmdArray)
if err != nil {
return err
}
for i, r := range results {
printSQLResult(l, i, r, cmdArray)
}
return nil
}
// printSQLResult does a best-effort attempt to print single-result-row-per-node
// result-sets gathered from many nodes as one-line-per-node instead of header
// separated n-line blocks, to improve the overall readability, falling back to
// normal header-plus-response-block per node otherwise.
func printSQLResult(l *logger.Logger, i int, r *install.RunResultDetails, args []string) {
tableFormatted := false
for i, c := range args {
if c == "--format=table" || c == "--format" && len(args) > i+1 && args[i+1] == "table" {
tableFormatted = true
break
}
}
singleResultLen, resultLine := 3, 1 // 3 is header, result, empty-trailing.
if tableFormatted {
// table output adds separator above the result, and a trailing row count.
singleResultLen, resultLine = 5, 2
}
// If we got a header line and zero or one result lines, we can print the
// result line as one-line-per-node, rather than a header per node and then
// its n result lines, to make the aggregate output more readable. We can
// detect this by splitting on newline into only as many lines as we expect,
// and seeing if the final piece is empty or has the rest of >1 results in it.
lines := strings.SplitN(r.CombinedOut, "\n", singleResultLen)
if len(lines) > 0 && lines[len(lines)-1] == "" {
if i == 0 { // Print the header line of the results once.
fmt.Printf(" %s\n", lines[0])
if tableFormatted && len(lines) > 1 {
fmt.Printf(" %s\n", lines[1])
}
}
// Print the result line if there is one.
if len(lines) > resultLine {
fmt.Printf("%2d: %s\n", r.Node, lines[resultLine])
return
}
// No result from this node, so print a blank for its ID.
fmt.Printf("%2d:\n", r.Node)
return
}
// Just print the roachprod header identifying the node, then the node's whole
// response, including its internal header row.
l.Printf("node %d:\n%s", r.Node, r.CombinedOut)
}
// IP gets the ip addresses of the nodes in a cluster.
func IP(l *logger.Logger, clusterName string, external bool) ([]string, error) {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return nil, err
}
nodes := c.TargetNodes()
ips := make([]string, len(nodes))
for i := 0; i < len(nodes); i++ {
node := nodes[i]
if external {
ips[i] = c.Host(node)
} else {
ips[i], err = c.GetInternalIP(node)
if err != nil {
return nil, err
}
}
}
return ips, nil
}
// Status retrieves the status of nodes in a cluster.
func Status(
ctx context.Context, l *logger.Logger, clusterName, processTag string,
) ([]install.NodeStatus, error) {
c, err := getClusterFromCache(l, clusterName, install.TagOption(processTag))
if err != nil {
return nil, err
}
return c.Status(ctx, l)
}
// Stage stages release and edge binaries to the cluster.
// stageOS, stageDir, version can be "" to use default values
func Stage(
ctx context.Context,
l *logger.Logger,
clusterName string,
stageOS, stageArch, stageDir, applicationName, version string,
) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
os := "linux"
arch := "amd64"
if c.IsLocal() {
os = runtime.GOOS
arch = runtime.GOARCH
}
if stageOS != "" {
os = stageOS
}
if stageArch != "" {
arch = stageArch
}
// N.B. it's technically possible to stage a binary for a different OS/arch; e.g., emulated amd64 on mac silicon.
// However, we don't perform any other validation, hence a warning message is appropriate.
if c.IsLocal() && (os != runtime.GOOS || arch != runtime.GOARCH) {
l.Printf("WARN: locally staging %s/%s binaries on %s/%s", os, arch, runtime.GOOS, runtime.GOARCH)
}
dir := "."
if stageDir != "" {
dir = stageDir
}
return install.StageApplication(ctx, l, c, applicationName, version, os, vm.CPUArch(arch), dir)
}
// Reset resets all VMs in a cluster.
func Reset(l *logger.Logger, clusterName string) error {
if err := LoadClusters(); err != nil {
return err
}
if config.IsLocalClusterName(clusterName) {
return nil
}
c, err := getClusterFromCloud(l, clusterName)
if err != nil {
return err
}
return vm.FanOut(c.VMs, func(p vm.Provider, vms vm.List) error {
return p.Reset(l, vms)
})
}
// SetupSSH sets up the keys and host keys for the vms in the cluster.
func SetupSSH(ctx context.Context, l *logger.Logger, clusterName string) error {
if err := LoadClusters(); err != nil {
return err
}
cld, err := Sync(l, vm.ListOptions{})
if err != nil {
return err
}
cloudCluster, ok := cld.Clusters[clusterName]
if !ok {
return fmt.Errorf("could not find %s in list of cluster", clusterName)
}
zones := make(map[string][]string, len(cloudCluster.VMs))
for _, vm := range cloudCluster.VMs {
zones[vm.Provider] = append(zones[vm.Provider], vm.Zone)
}
providers := make([]string, 0)
for provider := range zones {
providers = append(providers, provider)
}
// Configure SSH for machines in the zones we operate on.
if err := vm.ProvidersSequential(providers, func(p vm.Provider) error {
unlock, lockErr := lock.AcquireFilesystemLock(config.DefaultLockPath)
if lockErr != nil {
return lockErr
}
defer unlock()
return p.ConfigSSH(l, zones[p.Name()])
}); err != nil {
return err
}
if err = cloudCluster.PrintDetails(l); err != nil {
return err
}
// Run ssh-keygen -R serially on each new VM in case an IP address has been recycled
for _, v := range cloudCluster.VMs {
cmd := exec.Command("ssh-keygen", "-R", v.PublicIP)
out, err := cmd.CombinedOutput()
if err != nil {
l.Printf("could not clear ssh key for hostname %s:\n%s", v.PublicIP, string(out))
}
}
// Wait for the nodes in the cluster to start.
if err := LoadClusters(); err != nil {
return err
}
installCluster, err := newCluster(l, clusterName)
if err != nil {
return err
}
if err := installCluster.Wait(ctx, l); err != nil {
return err
}
// Fetch public keys from gcloud to set up ssh access for all users into the
// shared ubuntu user.
authorizedKeys, err := gce.GetUserAuthorizedKeys()
if err != nil {
return errors.Wrap(err, "failed to retrieve authorized keys from gcloud")
}
installCluster.AuthorizedKeys = authorizedKeys.AsSSH()
return installCluster.SetupSSH(ctx, l)
}
// Extend extends the lifetime of the specified cluster to prevent it from being destroyed.
func Extend(l *logger.Logger, clusterName string, lifetime time.Duration) error {
if err := LoadClusters(); err != nil {
return err
}
c, err := getClusterFromCloud(l, clusterName)
if err != nil {
return err
}
if err := cloud.ExtendCluster(l, c, lifetime); err != nil {
return err
}
// Reload the clusters and print details.
c, err = getClusterFromCloud(l, clusterName)
if err != nil {
return err
}
return c.PrintDetails(l)
}
// Default scheduled backup runs a full backup every hour and an incremental
// every 15 minutes.
const DefaultBackupSchedule = `RECURRING '*/15 * * * *' FULL BACKUP '@hourly' WITH SCHEDULE OPTIONS first_run = 'now'`
// DefaultStartOpts returns a StartOpts populated with default values.
func DefaultStartOpts() install.StartOpts {
return install.StartOpts{
EncryptedStores: false,
NumFilesLimit: config.DefaultNumFilesLimit,
SkipInit: false,
StoreCount: 1,
VirtualClusterID: 2,
ScheduleBackups: false,
ScheduleBackupArgs: DefaultBackupSchedule,
InitTarget: 1,
SQLPort: 0,
VirtualClusterName: install.SystemInterfaceName,
AdminUIPort: 0,
}
}
// Start starts nodes on a cluster.
func Start(
ctx context.Context,
l *logger.Logger,
clusterName string,
startOpts install.StartOpts,
clusterSettingsOpts ...install.ClusterSettingOption,
) error {
c, err := getClusterFromCache(l, clusterName, clusterSettingsOpts...)
if err != nil {
return err
}
if err = c.Start(ctx, l, startOpts); err != nil {
return err
}
updatePrometheusTargets(ctx, l, c)
return nil
}
// UpdateTargets updates prometheus target configurations for a cluster.
func UpdateTargets(
ctx context.Context,
l *logger.Logger,
clusterName string,
clusterSettingsOpts ...install.ClusterSettingOption,
) error {
if err := LoadClusters(); err != nil {
return err
}
c, err := newCluster(l, clusterName, clusterSettingsOpts...)
if err != nil {
return err
}
updatePrometheusTargets(ctx, l, c)
return nil
}
// updatePrometheusTargets updates the prometheus instance cluster config. Any error is logged and ignored.
func updatePrometheusTargets(ctx context.Context, l *logger.Logger, c *install.SyncedCluster) {
nodeIPPorts := make(map[int]*promhelperclient.NodeInfo)
nodeIPPortsMutex := syncutil.RWMutex{}
var wg sync.WaitGroup
for _, node := range c.Nodes {
if _, ok := promhelperclient.SupportedPromProjects[c.VMs[node-1].Project]; ok &&
c.VMs[node-1].Provider == gce.ProviderName {
wg.Add(1)
go func(index int, v vm.VM) {
defer wg.Done()
// only gce is supported for prometheus
desc, err := c.DiscoverService(ctx, install.Node(index), "", install.ServiceTypeUI, 0)
if err != nil {
l.Errorf("error getting the port for node %d: %v", index, err)
return
}
nodeInfo := fmt.Sprintf("%s:%d", v.PrivateIP, desc.Port)
nodeIPPortsMutex.Lock()
// ensure atomicity in map update
nodeIPPorts[index] = &promhelperclient.NodeInfo{Target: nodeInfo, CustomLabels: createLabels(v)}
nodeIPPortsMutex.Unlock()
}(int(node), c.VMs[node-1])
}
}
wg.Wait()
if len(nodeIPPorts) > 0 {
if err := promhelperclient.DefaultPromClient.UpdatePrometheusTargets(ctx,
c.Name, false, nodeIPPorts, !c.Secure, l); err != nil {
l.Errorf("creating cluster config failed for the ip:ports %v: %v", nodeIPPorts, err)
}
}
}
// regionRegEx is the regex to extract the region label from zone available as vm property
var regionRegEx = regexp.MustCompile("(^.+[0-9]+)(-[a-f]$)")
// createLabels returns the labels to be populated in the target configuration in prometheus
func createLabels(v vm.VM) map[string]string {
labels := map[string]string{
"cluster": v.Labels["cluster"],
"instance": v.Name,
"host_ip": v.PrivateIP,
"project": v.Project,
"zone": v.Zone,
"tenant": install.SystemInterfaceName,
"job": "cockroachdb",
}
match := regionRegEx.FindStringSubmatch(v.Zone)
if len(match) > 1 {
labels["region"] = match[1]
}
// the following labels are present if the test labels are added before the VM is started
if t, ok := v.Labels["test_name"]; ok {
labels["test_name"] = t
}
if t, ok := v.Labels["test_run_id"]; ok {
labels["test_run_id"] = t
}
return labels
}
// Monitor monitors the status of cockroach nodes in a cluster.
func Monitor(
ctx context.Context, l *logger.Logger, clusterName string, opts install.MonitorOpts,
) (chan install.NodeMonitorInfo, error) {
c, err := newCluster(l, clusterName)
if err != nil {
return nil, err
}
return c.Monitor(l, ctx, opts), nil
}
// StopOpts is used to pass options to Stop.
type StopOpts struct {
ProcessTag string
Sig int
// If Wait is set, roachprod waits until the PID disappears (i.e. the
// process has terminated).
Wait bool // forced to true when Sig == 9
// If MaxWait is set, roachprod waits that approximate number of seconds
// until the PID disappears.
MaxWait int
// Options that only apply to StopServiceForVirtualCluster
VirtualClusterID int
VirtualClusterName string
SQLInstance int
}
// DefaultStopOpts returns StopOpts populated with the default values used by Stop.
func DefaultStopOpts() StopOpts {
return StopOpts{
ProcessTag: "",
Sig: 9,
Wait: false,
MaxWait: 0,
}
}
// Stop stops nodes on a cluster.
func Stop(ctx context.Context, l *logger.Logger, clusterName string, opts StopOpts) error {
c, err := getClusterFromCache(l, clusterName, install.TagOption(opts.ProcessTag))
if err != nil {
return err
}
return c.Stop(ctx, l, opts.Sig, opts.Wait, opts.MaxWait, "")
}
// Signal sends a signal to nodes in the cluster.
func Signal(ctx context.Context, l *logger.Logger, clusterName string, sig int) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return c.Signal(ctx, l, sig)
}
// Init initializes the cluster.
func Init(ctx context.Context, l *logger.Logger, clusterName string, opts install.StartOpts) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return c.Init(ctx, l, opts.GetInitTarget())
}
// Wipe wipes the nodes in a cluster.
func Wipe(ctx context.Context, l *logger.Logger, clusterName string, preserveCerts bool) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return c.Wipe(ctx, l, preserveCerts)
}
// Reformat reformats disks in a cluster to use the specified filesystem.
func Reformat(ctx context.Context, l *logger.Logger, clusterName string, fs string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
var fsCmd string
switch fs {
case vm.Zfs:
if err := install.Install(ctx, l, c, []string{vm.Zfs}); err != nil {
return err
}
fsCmd = `sudo zpool create -f data1 -m /mnt/data1 /dev/sdb`
case vm.Ext4:
fsCmd = `sudo mkfs.ext4 -F /dev/sdb && sudo mount -o defaults /dev/sdb /mnt/data1`
default:
return fmt.Errorf("unknown filesystem %q", fs)
}
err = c.Run(ctx, l, os.Stdout, os.Stderr, install.WithNodes(c.Nodes), "reformatting", fmt.Sprintf(`
set -euo pipefail
if sudo zpool list -Ho name 2>/dev/null | grep ^data1$; then
sudo zpool destroy -f data1
fi
if mountpoint -q /mnt/data1; then
sudo umount -f /mnt/data1
fi
%s
sudo chmod 777 /mnt/data1
`, fsCmd))
if err != nil {
return err
}
return nil
}
// Install installs third party software.
//
// The cluster name can include a node selector (e.g. "foo:1-3").
func Install(ctx context.Context, l *logger.Logger, clusterName string, software []string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
// As seen in #103316, this can hit a 503 Service Unavailable when
// trying to download the package, so we retry every 30 seconds
// for up to 5 mins below. The caller may choose to fail or skip the test.
return retry.WithMaxAttempts(ctx, retry.Options{
InitialBackoff: 30 * time.Second,
Multiplier: 1,
}, 10, func() error {
err := install.Install(ctx, l, c, software)
err = errors.Wrapf(err, "retryable infrastructure error: could not install %s", software)
if err != nil {
l.Printf(err.Error())
}
return err
})
}
// Download downloads 3rd party tools, using a GCS cache if possible.
func Download(
ctx context.Context, l *logger.Logger, clusterName string, src, sha, dest string,
) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return install.Download(ctx, l, c, src, sha, dest)
}
// DistributeCerts distributes certificates to the nodes in a cluster.
// If the certificates already exist, no action is taken.
func DistributeCerts(ctx context.Context, l *logger.Logger, clusterName string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return c.DistributeCerts(ctx, l)
}
// Put copies a local file to the nodes in a cluster.
func Put(
ctx context.Context, l *logger.Logger, clusterName, src, dest string, useTreeDist bool,
) error {
c, err := getClusterFromCache(l, clusterName, install.UseTreeDistOption(useTreeDist))
if err != nil {
return err
}
return c.Put(ctx, l, c.Nodes, src, dest)
}
// Get copies a remote file from the nodes in a cluster.
// If the file is retrieved from multiple nodes the destination
// file name will be prefixed with the node number.
func Get(ctx context.Context, l *logger.Logger, clusterName, src, dest string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return c.Get(ctx, l, c.Nodes, src, dest)
}
type PGURLOptions struct {
Database string
Secure bool
External bool
VirtualClusterName string
SQLInstance int
Auth install.PGAuthMode
}
// PgURL generates pgurls for the nodes in a cluster.
func PgURL(
ctx context.Context, l *logger.Logger, clusterName, certsDir string, opts PGURLOptions,
) ([]string, error) {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(opts.Secure), install.PGUrlCertsDirOption(certsDir))
if err != nil {
return nil, err
}
nodes := c.TargetNodes()
ips := make([]string, len(nodes))
if opts.External {
for i := 0; i < len(nodes); i++ {
ips[i] = c.VMs[nodes[i]-1].PublicIP
}
} else {
for i := 0; i < len(nodes); i++ {
ip, err := c.GetInternalIP(nodes[i])
if err == nil {
ips[i] = ip
}
}
}
var urls []string
for i, ip := range ips {
desc, err := c.DiscoverService(ctx, nodes[i], opts.VirtualClusterName, install.ServiceTypeSQL, opts.SQLInstance)
if err != nil {
return nil, err
}
if ip == "" {
return nil, errors.Errorf("empty ip: %v", ips)
}
urls = append(urls, c.NodeURL(ip, desc.Port, opts.VirtualClusterName, desc.ServiceMode, opts.Auth, opts.Database))
}
if len(urls) != len(nodes) {
return nil, errors.Errorf("have nodes %v, but urls %v from ips %v", nodes, urls, ips)
}
return urls, nil
}
type urlConfig struct {
path string
usePublicIP bool
openInBrowser bool
secure bool
port int
virtualClusterName string
sqlInstance int
}
func urlGenerator(
ctx context.Context,
c *install.SyncedCluster,
l *logger.Logger,
nodes install.Nodes,
uConfig urlConfig,
) ([]string, error) {
var urls []string
for i, node := range nodes {
host := vm.Name(c.Name, int(node)) + "." + gce.DNSDomain()
// There are no DNS entries for local clusters.
if c.IsLocal() {
uConfig.usePublicIP = true
}
// verify DNS is working / fallback to IPs if not.
if i == 0 && !uConfig.usePublicIP {
if _, err := net.LookupHost(host); err != nil {
l.Errorf("host %s is unreachable, falling back to public IPs. DNS entries might be outdated, run `roachprod sync`.", host)
uConfig.usePublicIP = true
}
}
if uConfig.usePublicIP {
host = c.VMs[node-1].PublicIP
}
port := uConfig.port
if port == 0 {
desc, err := c.DiscoverService(
ctx, node, uConfig.virtualClusterName, install.ServiceTypeUI, uConfig.sqlInstance,
)
if err != nil {
return nil, err
}
port = desc.Port
}
scheme := "http"
if uConfig.secure {
scheme = "https"
}
if !strings.HasPrefix(uConfig.path, "/") {
uConfig.path = "/" + uConfig.path
}
url := fmt.Sprintf("%s://%s:%d%s", scheme, host, port, uConfig.path)
urls = append(urls, url)
if uConfig.openInBrowser {
cmd := browserCmd(url)
if err := cmd.Run(); err != nil {
return nil, err
}
}
}
return urls, nil
}
func browserCmd(url string) *exec.Cmd {
var cmd string
var args []string
switch runtime.GOOS {
case "darwin":
cmd = "/usr/bin/open"
case "windows":
cmd = "cmd"
args = []string{"/c", "start"}
default:
cmd = "xdg-open"
}
args = append(args, url)
return exec.Command(cmd, args...)
}
// AdminURL generates admin UI URLs for the nodes in a cluster.
func AdminURL(
ctx context.Context,
l *logger.Logger,
clusterName, virtualClusterName string,
sqlInstance int,
path string,
usePublicIP, openInBrowser, secure bool,
) ([]string, error) {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure))
if err != nil {
return nil, err
}
uConfig := urlConfig{
path: path,
usePublicIP: usePublicIP,
openInBrowser: openInBrowser,
secure: secure,
virtualClusterName: virtualClusterName,
sqlInstance: sqlInstance,
}
return urlGenerator(ctx, c, l, c.TargetNodes(), uConfig)
}
// SQLPorts finds the SQL ports for a cluster.
func SQLPorts(
ctx context.Context,
l *logger.Logger,
clusterName string,
secure bool,
virtualClusterName string,
sqlInstance int,
) ([]int, error) {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure))
if err != nil {
return nil, err
}
var ports []int
for _, node := range c.Nodes {
port, err := c.NodePort(ctx, node, virtualClusterName, sqlInstance)
if err != nil {
return nil, errors.Wrapf(err, "Error discovering SQL Port for node %d", node)
}
ports = append(ports, port)
}
return ports, nil
}
// AdminPorts finds the AdminUI ports for a cluster.
func AdminPorts(
ctx context.Context,
l *logger.Logger,
clusterName string,
secure bool,
virtualClusterName string,
sqlInstance int,
) ([]int, error) {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure))
if err != nil {
return nil, err
}
var ports []int
for _, node := range c.Nodes {
port, err := c.NodeUIPort(ctx, node, virtualClusterName, sqlInstance)
if err != nil {
return nil, errors.Wrapf(err, "Error discovering UI Port for node %d", node)
}
ports = append(ports, port)
}
return ports, nil
}
// PprofOpts specifies the options needed by Pprof().
type PprofOpts struct {
Heap bool
Open bool
StartingPort int
Duration time.Duration
}
// Pprof TODO
func Pprof(ctx context.Context, l *logger.Logger, clusterName string, opts PprofOpts) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
var profType string
var description string
if opts.Heap {
description = "capturing heap profile"
profType = "heap"
} else {
description = "capturing CPU profile"
profType = "profile"
}
outputFiles := []string{}
mu := &syncutil.Mutex{}
pprofPath := fmt.Sprintf("debug/pprof/%s?seconds=%d", profType, int(opts.Duration.Seconds()))
minTimeout := 30 * time.Second
timeout := 2 * opts.Duration
if timeout < minTimeout {
timeout = minTimeout
}
httpClient := httputil.NewClientWithTimeout(timeout)
startTime := timeutil.Now().Unix()
err = c.Parallel(ctx, l, install.WithNodes(c.TargetNodes()).WithDisplay(description),
func(ctx context.Context, node install.Node) (*install.RunResultDetails, error) {
res := &install.RunResultDetails{Node: node}
host := c.Host(node)
port, err := c.NodeUIPort(ctx, node, "" /* virtualClusterName */, 0 /* sqlInstance */)
if err != nil {
return nil, err
}
scheme := "http"
if c.Secure {
scheme = "https"
}
outputFile := fmt.Sprintf("pprof-%s-%d-%s-%04d.out", profType, startTime, c.Name, node)
outputDir := filepath.Dir(outputFile)
file, err := os.CreateTemp(outputDir, ".pprof")
if err != nil {
res.Err = errors.Wrap(err, "create tmpfile for pprof download")
return res, res.Err
}
defer func() {
err := file.Close()
if err != nil && !errors.Is(err, oserror.ErrClosed) {
l.Errorf("warning: could not close temporary file")
}
err = os.Remove(file.Name())
if err != nil && !oserror.IsNotExist(err) {
l.Errorf("warning: could not remove temporary file")
}
}()
pprofURL := fmt.Sprintf("%s://%s:%d/%s", scheme, host, port, pprofPath)
resp, err := httpClient.Get(context.Background(), pprofURL)
if err != nil {
res.Err = err
return res, res.Err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
res.Err = errors.Newf("unexpected status from pprof endpoint: %s", resp.Status)
return res, res.Err
}
if _, err := io.Copy(file, resp.Body); err != nil {
res.Err = err
return res, res.Err
}
if err := file.Sync(); err != nil {
res.Err = err
return res, res.Err
}
if err := file.Close(); err != nil {
res.Err = err
return res, res.Err
}
if err := os.Rename(file.Name(), outputFile); err != nil {
res.Err = err
return res, res.Err
}
mu.Lock()
outputFiles = append(outputFiles, outputFile)
mu.Unlock()
return res, nil
})
for _, s := range outputFiles {
l.Printf("Created %s", s)
}
if err != nil {
exit.WithCode(exit.UnspecifiedError())
}
if opts.Open {
waitCommands := []*exec.Cmd{}
for i, file := range outputFiles {
port := opts.StartingPort + i
cmd := exec.Command("go", "tool", "pprof",
"-http", fmt.Sprintf(":%d", port),
file)
waitCommands = append(waitCommands, cmd)
if err := cmd.Start(); err != nil {
return err
}
}
for _, cmd := range waitCommands {
err := cmd.Wait()
if err != nil {
return err
}
}
}
return nil
}
// Destroy TODO
func Destroy(
l *logger.Logger, destroyAllMine bool, destroyAllLocal bool, clusterNames ...string,
) error {
if err := LoadClusters(); err != nil {
return errors.Wrap(err, "problem loading clusters")
}
// We want to avoid running ListCloud() if we are only trying to destroy a
// local cluster.
var cld *cloud.Cloud
switch {
case destroyAllMine:
if len(clusterNames) != 0 {
return errors.New("--all-mine cannot be combined with cluster names")
}
if destroyAllLocal {
return errors.New("--all-mine cannot be combined with --all-local")
}
destroyPattern, err := userClusterNameRegexp(l)
if err != nil {
return err
}
// ListCloud may fail due to a transient provider error, but we may have still
// found the cluster(s) we care about. Destroy the cluster(s) we know about
// and let the caller retry.
cld, _ = cloud.ListCloud(l, vm.ListOptions{IncludeEmptyClusters: true})
clusters := cld.Clusters.FilterByName(destroyPattern)
clusterNames = clusters.Names()
case destroyAllLocal:
if len(clusterNames) != 0 {
return errors.New("--all-local cannot be combined with cluster names")
}
clusterNames = local.Clusters()
default:
if len(clusterNames) == 0 {
return errors.New("no cluster name provided")
}
}
if err := ctxgroup.GroupWorkers(
context.TODO(),
len(clusterNames),
func(ctx context.Context, idx int) error {
name := clusterNames[idx]
if config.IsLocalClusterName(name) {
return destroyLocalCluster(ctx, l, name)
}
if cld == nil {
// ListCloud may fail due to a transient provider error, but we may have still
// found the cluster(s) we care about. Destroy the cluster(s) we know about
// and let the caller retry.
cld, _ = cloud.ListCloud(l, vm.ListOptions{IncludeEmptyClusters: true})
}
return destroyCluster(ctx, cld, l, name)
}); err != nil {
return err
}
l.Printf("OK")
return nil
}
func destroyCluster(
ctx context.Context, cld *cloud.Cloud, l *logger.Logger, clusterName string,
) error {
c, ok := cld.Clusters[clusterName]
if !ok {
return fmt.Errorf("cluster %s does not exist", clusterName)
}
if c.IsEmptyCluster() {
l.Printf("Destroying empty cluster %s with 0 nodes", clusterName)
} else {
l.Printf("Destroying cluster %s with %d nodes", clusterName, len(c.VMs))
}
return cloud.DestroyCluster(l, c)
}
func destroyLocalCluster(ctx context.Context, l *logger.Logger, clusterName string) error {
if _, ok := readSyncedClusters(clusterName); !ok {
return fmt.Errorf("cluster %s does not exist", clusterName)
}
c, err := newCluster(l, clusterName)
if err != nil {
return err
}
if err := c.Wipe(ctx, l, false); err != nil {
return err
}
return local.DeleteCluster(l, clusterName)
}
// ClusterAlreadyExistsError is returned when the cluster name passed to Create is already used by another cluster.
type ClusterAlreadyExistsError struct {
name string
}
func (e *ClusterAlreadyExistsError) Error() string {
return fmt.Sprintf("cluster %s already exists", e.name)
}
func cleanupFailedCreate(l *logger.Logger, clusterName string) error {
c, err := getClusterFromCloud(l, clusterName)
if err != nil {
// If the cluster doesn't exist, we didn't manage to create any VMs
// before failing. Not an error.
//nolint:returnerrcheck
return nil
}
return cloud.DestroyCluster(l, c)
}
func AddLabels(l *logger.Logger, clusterName string, labels map[string]string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
err = vm.FanOut(c.VMs, func(p vm.Provider, vms vm.List) error {
return p.AddLabels(l, vms, labels)
})
if err != nil {
return err
}
// Adding labels is not supported for local clusters, we don't
// need to update the local cluster cache.
if config.IsLocalClusterName(clusterName) {
return nil
}
// Update the tags in the local cluster cache.
for _, m := range c.Cluster.VMs {
for k, v := range labels {
m.Labels[k] = v
}
}
return saveCluster(l, &c.Cluster)
}
func RemoveLabels(l *logger.Logger, clusterName string, labels []string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
err = vm.FanOut(c.VMs, func(p vm.Provider, vms vm.List) error {
return p.RemoveLabels(l, vms, labels)
})
if err != nil {
return err
}
// Update the tags in the local cluster cache.
for _, m := range c.Cluster.VMs {
for _, label := range labels {
delete(m.Labels, label)
}
}
return saveCluster(l, &c.Cluster)
}
// Create TODO
func Create(
ctx context.Context, l *logger.Logger, username string, opts ...*cloud.ClusterCreateOpts,
) (retErr error) {
var numNodes int
for _, o := range opts {
numNodes = numNodes + o.Nodes
}
if numNodes <= 0 || numNodes >= 1000 {
// Upper limit is just for safety.
return fmt.Errorf("number of nodes must be in [1..999]")
}
clusterName := opts[0].CreateOpts.ClusterName
if err := verifyClusterName(l, clusterName, username); err != nil {
return err
}
isLocal := config.IsLocalClusterName(clusterName)
if isLocal {
// To ensure that multiple processes don't create local clusters at
// the same time (causing port collisions), acquire the lock file.
unlockFn, err := lock.AcquireFilesystemLock(config.DefaultLockPath)
if err != nil {
return err
}
defer unlockFn()
}
if err := LoadClusters(); err != nil {
return errors.Wrap(err, "problem loading clusters")
}
if !isLocal {
// ListCloud may fail due to a transient provider error, but
// we may not even be creating a cluster with that provider.
// If the cluster does exist, and we didn't find it, it will
// fail on the provider's end.
cld, _ := cloud.ListCloud(l, vm.ListOptions{})
if _, ok := cld.Clusters[clusterName]; ok {
return &ClusterAlreadyExistsError{name: clusterName}
}
defer func() {
if retErr == nil {
return
}
l.Errorf("Cleaning up partially-created cluster (prev err: %s)", retErr)
if err := cleanupFailedCreate(l, clusterName); err != nil {
l.Errorf("Error while cleaning up partially-created cluster: %s", err)
} else {
l.Printf("Cleaning up OK")
}
}()
} else {
if _, ok := readSyncedClusters(clusterName); ok {
return &ClusterAlreadyExistsError{name: clusterName}
}
// If the local cluster is being created, force the local Provider to be used
for _, o := range opts {
o.CreateOpts.VMProviders = []string{local.ProviderName}
}
}
for _, o := range opts {
if o.CreateOpts.SSDOpts.FileSystem == vm.Zfs {
for _, provider := range o.CreateOpts.VMProviders {
// TODO(DarrylWong): support zfs on other providers, see: #123775.
// Once done, revisit all tests that set zfs to see if they can run on non GCE.
if !(provider == gce.ProviderName || provider == aws.ProviderName) {
return fmt.Errorf(
"creating a node with --filesystem=zfs is currently not supported in %q", provider,
)
}
}
}
}
l.Printf("Creating cluster %s with %d nodes...", clusterName, numNodes)
if createErr := cloud.CreateCluster(l, opts); createErr != nil {
return createErr
}
if config.IsLocalClusterName(clusterName) {
// No need for ssh for local clusters.
return LoadClusters()
}
l.Printf("Created cluster %s; setting up SSH...", clusterName)
return SetupSSH(ctx, l, clusterName)
}
func Grow(ctx context.Context, l *logger.Logger, clusterName string, numNodes int) error {
if numNodes <= 0 || numNodes >= 1000 {
// Upper limit is just for safety.
return fmt.Errorf("number of nodes must be in [1..999]")
}
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
err = cloud.GrowCluster(l, &c.Cluster, numNodes)
if err != nil {
return err
}
return SetupSSH(ctx, l, clusterName)
}
func Shrink(ctx context.Context, l *logger.Logger, clusterName string, numNodes int) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
err = cloud.ShrinkCluster(l, &c.Cluster, numNodes)
if err != nil {
return err
}
_, err = Sync(l, vm.ListOptions{})
return err
}
// GC garbage-collects expired clusters, unused SSH key pairs in AWS, and unused
// DNS records.
func GC(l *logger.Logger, dryrun bool) error {
if err := LoadClusters(); err != nil {
return err
}
// Use the `addOpFn` helper to run GC operations concurrently and collect
// errors.
errorsChan := make(chan error, 8)
var wg sync.WaitGroup
addOpFn := func(fn func() error) {
wg.Add(1)
go func() {
defer wg.Done()
errorsChan <- fn()
}()
}
// GCAwsKeyPairs has no dependencies and can start immediately.
addOpFn(func() error {
return cloud.GCAWSKeyPairs(l, dryrun)
})
// ListCloud may fail for a provider, but we can still attempt GC on
// the clusters we do have.
cld, _ := cloud.ListCloud(l, vm.ListOptions{IncludeEmptyClusters: true})
addOpFn(func() error {
return cloud.GCClusters(l, cld, dryrun)
})
addOpFn(func() error {
return cloud.GCDNS(l, cld, dryrun)
})
// Wait for all operations to finish and combine all errors.
go func() {
wg.Wait()
close(errorsChan)
}()
var combinedErrors error
for err := range errorsChan {
combinedErrors = errors.CombineErrors(combinedErrors, err)
}
return combinedErrors
}
// LogsOpts TODO
type LogsOpts struct {
Dir, Filter, ProgramFilter string
Interval time.Duration
From, To time.Time
Out io.Writer
}
// Logs TODO
func Logs(l *logger.Logger, clusterName, dest, username string, logsOpts LogsOpts) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return c.Logs(
l, logsOpts.Dir, dest, username, logsOpts.Filter, logsOpts.ProgramFilter,
logsOpts.Interval, logsOpts.From, logsOpts.To, logsOpts.Out,
)
}
// StageURL TODO
func StageURL(
l *logger.Logger, applicationName, version, stageOS string, stageArch string,
) ([]*url.URL, error) {
os := runtime.GOOS
if stageOS != "" {
os = stageOS
}
arch := runtime.GOARCH
if stageArch != "" {
arch = stageArch
}
urls, err := install.URLsForApplication(applicationName, version, os, vm.CPUArch(arch))
if err != nil {
return nil, err
}
return urls, nil
}
// InitProviders initializes providers and returns a map that indicates
// if a provider is active or inactive.
func InitProviders() map[string]string {
providersState := make(map[string]string)
if err := aws.Init(); err != nil {
providersState[aws.ProviderName] = "Inactive - " + err.Error()
} else {
providersState[aws.ProviderName] = "Active"
}
if err := gce.Init(); err != nil {
providersState[gce.ProviderName] = "Inactive - " + err.Error()
} else {
providersState[gce.ProviderName] = "Active"
}
if err := azure.Init(); err != nil {
providersState[azure.ProviderName] = "Inactive - " + err.Error()
} else {
providersState[azure.ProviderName] = "Active"
}
if err := local.Init(localVMStorage{}); err != nil {
providersState[local.ProviderName] = "Inactive - " + err.Error()
} else {
providersState[local.ProviderName] = "Active"
}
return providersState
}
// StartGrafana spins up a prometheus and grafana instance on the last node provided and scrapes
// from all other nodes.
func StartGrafana(
ctx context.Context,
l *logger.Logger,
clusterName string,
arch vm.CPUArch,
grafanaURL string,
grafanaJSON []string,
promCfg *prometheus.Config, // passed iff grafanaURL is empty
) error {
if (grafanaURL != "" || len(grafanaJSON) > 0) && promCfg != nil {
return errors.New("cannot pass grafanaURL or grafanaJSON and a non empty promCfg")
}
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
nodes, err := install.ListNodes("all", len(c.VMs))
if err != nil {
return err
}
if promCfg == nil {
promCfg = &prometheus.Config{}
// Configure the prometheus/grafana servers to run on the last node in the cluster
promCfg.WithPrometheusNode(nodes[len(nodes)-1])
// Configure scraping on all nodes in the cluster
promCfg.WithCluster(nodes)
promCfg.WithNodeExporter(nodes)
// Scrape all workload prometheus ports, just in case.
for _, i := range nodes {
promCfg.WithWorkload(fmt.Sprintf("workload_on_n%d", i), i, 0 /* use default port */)
}
// By default, spin up a grafana server
promCfg.Grafana.Enabled = true
if grafanaURL != "" {
promCfg.WithGrafanaDashboard(grafanaURL)
}
for _, str := range grafanaJSON {
promCfg.WithGrafanaDashboardJSON(str)
}
}
_, err = prometheus.Init(ctx, l, c, arch, *promCfg)
if err != nil {
return err
}
url, err := GrafanaURL(ctx, l, clusterName, false)
if err != nil {
return err
}
l.Printf("Grafana dashboard: %s", url)
return nil
}
// StopGrafana shuts down prometheus and grafana servers on the last node in
// the cluster, if they exist.
func StopGrafana(ctx context.Context, l *logger.Logger, clusterName string, dumpDir string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
nodes, err := install.ListNodes("all", len(c.VMs))
if err != nil {
return err
}
if err := prometheus.Shutdown(ctx, c, l, nodes, dumpDir); err != nil {
return err
}
return nil
}
// GrafanaURL returns a url to the grafana dashboard
func GrafanaURL(
ctx context.Context, l *logger.Logger, clusterName string, openInBrowser bool,
) (string, error) {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return "", err
}
nodes, err := install.ListNodes("all", len(c.VMs))
if err != nil {
return "", err
}
// grafana is assumed to be running on the last node in the target
grafanaNode := install.Nodes{nodes[len(nodes)-1]}
uConfig := urlConfig{
usePublicIP: true,
openInBrowser: openInBrowser,
secure: false,
port: 3000,
}
urls, err := urlGenerator(ctx, c, l, grafanaNode, uConfig)
if err != nil {
return "", err
}
return urls[0], nil
}
func AddGrafanaAnnotation(
ctx context.Context, host string, secure bool, req grafana.AddAnnotationRequest,
) error {
return grafana.AddAnnotation(ctx, host, secure, req)
}
// PrometheusSnapshot takes a snapshot of prometheus and stores the snapshot and
// a script to spin up a docker instance for it to the given directory. We
// assume the last node contains the prometheus server.
func PrometheusSnapshot(
ctx context.Context, l *logger.Logger, clusterName string, dumpDir string,
) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
nodes, err := install.ListNodes("all", len(c.VMs))
if err != nil {
return err
}
promNode := install.Nodes{nodes[len(nodes)-1]}
ctx, cancel := context.WithTimeout(ctx, 5*time.Minute)
defer cancel()
if err := prometheus.Snapshot(ctx, c, l, promNode, dumpDir); err != nil {
l.Printf("failed to get prometheus snapshot: %v", err)
return err
}
return nil
}
// SnapshotTTL controls how long volume snapshots are kept around.
const SnapshotTTL = 30 * 24 * time.Hour // 30 days
// CreateSnapshot snapshots all the persistent volumes attached to nodes in the
// named cluster.
func CreateSnapshot(
ctx context.Context, l *logger.Logger, clusterName string, vsco vm.VolumeSnapshotCreateOpts,
) ([]vm.VolumeSnapshot, error) {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return nil, err
}
nodes := c.TargetNodes()
nodesStatus, err := c.Status(ctx, l)
if err != nil {
return nil, err
}
// 1-indexed node IDs.
statusByNodeID := make(map[int]install.NodeStatus)
for _, status := range nodesStatus {
statusByNodeID[status.NodeID] = status
}
// TODO(irfansharif): Add validation that we're using some released version,
// probably the predecessor one. Also ensure that any running CRDB processes
// have been stopped since we're taking raw disk snapshots cluster-wide.
volumesSnapshotMu := struct {
syncutil.Mutex
snapshots []vm.VolumeSnapshot
}{}
if err := c.Parallel(ctx, l, install.WithNodes(nodes),
func(ctx context.Context, node install.Node) (*install.RunResultDetails, error) {
res := &install.RunResultDetails{Node: node}
cVM := c.VMs[node-1]
crdbVersion := statusByNodeID[int(node)].Version
if crdbVersion == "" {
crdbVersion = "unknown"
}
crdbVersion = strings.TrimPrefix(crdbVersion, "cockroach-")
// N.B. snapshot name cannot exceed 63 characters, so we use short sha for dev version.
if index := strings.Index(crdbVersion, "dev-"); index != -1 {
sha := crdbVersion[index+4:]
if len(sha) > 7 {
crdbVersion = crdbVersion[:index+4] + sha[:7]
}
}
labels := map[string]string{
"roachprod-node-src-spec": cVM.MachineType,
"roachprod-cluster-node": cVM.Name,
"roachprod-crdb-version": crdbVersion,
vm.TagCluster: clusterName,
vm.TagRoachprod: "true",
vm.TagLifetime: SnapshotTTL.String(),
vm.TagCreated: strings.ToLower(
strings.ReplaceAll(timeutil.Now().Format(time.RFC3339), ":", "_")), // format according to gce label naming requirements
}
for k, v := range vsco.Labels {
labels[k] = v
}
if err := vm.ForProvider(cVM.Provider, func(provider vm.Provider) error {
volumes, err := provider.ListVolumes(l, &cVM)
if err != nil {
return err
}
if len(volumes) == 0 {
return fmt.Errorf("node %d does not have any non-bootable persistent volumes attached", node)
}
for _, volume := range volumes {
snapshotFingerprintInfix := strings.ReplaceAll(
fmt.Sprintf("%s-n%d", crdbVersion, len(nodes)), ".", "-")
snapshotName := fmt.Sprintf("%s-%s-%04d", vsco.Name, snapshotFingerprintInfix, node)
if len(snapshotName) > 63 {
return fmt.Errorf("snapshot name %q exceeds 63 characters; shorten name prefix and use description arg. for more context", snapshotName)
}
volumeSnapshot, err := provider.CreateVolumeSnapshot(l, volume,
vm.VolumeSnapshotCreateOpts{
Name: snapshotName,
Labels: labels,
Description: vsco.Description,
})
if err != nil {
return err
}
l.Printf("created volume snapshot %s (id=%s) for volume %s on %s/n%d\n",
volumeSnapshot.Name, volumeSnapshot.ID, volume.Name, volume.ProviderResourceID, node)
volumesSnapshotMu.Lock()
volumesSnapshotMu.snapshots = append(volumesSnapshotMu.snapshots, volumeSnapshot)
volumesSnapshotMu.Unlock()
}
return nil
}); err != nil {
res.Err = err
}
return res, nil
}); err != nil {
return nil, err
}
sort.Sort(vm.VolumeSnapshots(volumesSnapshotMu.snapshots))
return volumesSnapshotMu.snapshots, nil
}
func ListSnapshots(
ctx context.Context, l *logger.Logger, provider string, vslo vm.VolumeSnapshotListOpts,
) ([]vm.VolumeSnapshot, error) {
var volumeSnapshots []vm.VolumeSnapshot
if err := vm.ForProvider(provider, func(provider vm.Provider) error {
var err error
volumeSnapshots, err = provider.ListVolumeSnapshots(l, vslo)
return err
}); err != nil {
return nil, err
}
return volumeSnapshots, nil
}
func DeleteSnapshots(
ctx context.Context, l *logger.Logger, provider string, snapshots ...vm.VolumeSnapshot,
) error {
return vm.ForProvider(provider, func(provider vm.Provider) error {
return provider.DeleteVolumeSnapshots(l, snapshots...)
})
}
func ApplySnapshots(
ctx context.Context,
l *logger.Logger,
clusterName string,
snapshots []vm.VolumeSnapshot,
opts vm.VolumeCreateOpts,
) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
if n := len(c.TargetNodes()); n != len(snapshots) {
return fmt.Errorf("mismatched number of snapshots (%d) to node count (%d)", len(snapshots), n)
// TODO(irfansharif): Validate labels (version, instance types).
}
// Detach and delete existing volumes. This is destructive.
if err := c.Parallel(ctx, l, install.WithNodes(c.TargetNodes()),
func(ctx context.Context, node install.Node) (*install.RunResultDetails, error) {
res := &install.RunResultDetails{Node: node}
cVM := &c.VMs[node-1]
if err := vm.ForProvider(cVM.Provider, func(provider vm.Provider) error {
volumes, err := provider.ListVolumes(l, cVM)
if err != nil {
return err
}
for _, volume := range volumes {
if err := provider.DeleteVolume(l, volume, cVM); err != nil {
return err
}
l.Printf("detached and deleted volume %s from %s", volume.ProviderResourceID, cVM.Name)
}
return nil
}); err != nil {
res.Err = err
}
return res, nil
}); err != nil {
return err
}
return c.Parallel(ctx, l, install.WithNodes(c.TargetNodes()),
func(ctx context.Context, node install.Node) (*install.RunResultDetails, error) {
res := &install.RunResultDetails{Node: node}
volumeOpts := opts // make a copy
volumeOpts.Labels = map[string]string{}
for k, v := range opts.Labels {
volumeOpts.Labels[k] = v
}
// TODO: same issue as above if the target nodes are not sequential starting from 1
cVM := &c.VMs[node-1]
if err := vm.ForProvider(cVM.Provider, func(provider vm.Provider) error {
volumeOpts.Zone = cVM.Zone
// NB: The "-1" signifies that it's the first attached non-boot volume.
// This is typical naming convention in GCE clusters.
volumeOpts.Name = fmt.Sprintf("%s-%04d-1", clusterName, node)
volumeOpts.SourceSnapshotID = snapshots[node-1].ID
volumes, err := provider.ListVolumes(l, cVM)
if err != nil {
return err
}
for _, vol := range volumes {
if vol.Name == volumeOpts.Name {
l.Printf(
"volume (%s) is already attached to node %d skipping volume creation", vol.ProviderResourceID, node)
return nil
}
}
volumeOpts.Labels[vm.TagCluster] = clusterName
volumeOpts.Labels[vm.TagLifetime] = cVM.Lifetime.String()
volumeOpts.Labels[vm.TagRoachprod] = "true"
volumeOpts.Labels[vm.TagCreated] = strings.ToLower(
strings.ReplaceAll(timeutil.Now().Format(time.RFC3339), ":", "_")) // format according to gce label naming requirements
volume, err := provider.CreateVolume(l, volumeOpts)
if err != nil {
return err
}
l.Printf("created volume %s", volume.ProviderResourceID)
device, err := cVM.AttachVolume(l, volume)
if err != nil {
return err
}
l.Printf("attached volume %s to %s", volume.ProviderResourceID, cVM.ProviderID)
// Save the cluster to cache.
if err := saveCluster(l, &c.Cluster); err != nil {
return err
}
var buf bytes.Buffer
if err := c.Run(ctx, l, &buf, &buf, install.WithNodes([]install.Node{node}),
"mounting volume", genMountCommands(device, "/mnt/data1")); err != nil {
l.Printf(buf.String())
return err
}
l.Printf("mounted %s to %s", volume.ProviderResourceID, cVM.ProviderID)
return nil
}); err != nil {
res.Err = err
}
return res, nil
})
}
func genMountCommands(devicePath, mountDir string) string {
return strings.Join([]string{
"sudo mkdir -p " + mountDir,
"sudo mount -o discard,defaults " + devicePath + " " + mountDir,
"sudo chmod 0777 " + mountDir,
}, " && ")
}
func isWorkloadCollectorVolume(v vm.Volume) bool {
if v, ok := v.Labels["roachprod_collector"]; ok && v == "true" {
return true
}
return false
}
const (
otelCollectorPort = 4317
jaegerUIPort = 16686
jaegerContainerName = "jaeger"
jaegerImageName = "jaegertracing/all-in-one:latest"
)
// StartJaeger starts a jaeger instance on the last node in the given
// cluster and configures the cluster to use it.
func StartJaeger(
ctx context.Context,
l *logger.Logger,
clusterName string,
virtualClusterName string,
secure bool,
configureNodes string,
) error {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure))
if err != nil {
return err
}
// TODO(ssd): Currently this just uses the all-in-one docker
// container with in memory storage. Might be nicer to just
// install from source or get linux binaries and start them
// with systemd. For now this just matches what we've been
// copy and pasting.
jaegerNode := c.TargetNodes()[len(c.TargetNodes())-1:]
err = install.InstallTool(ctx, l, c, jaegerNode, "docker", l.Stdout, l.Stderr)
if err != nil {
return err
}
startCmd := fmt.Sprintf("docker run -d --name %s -p %[2]d:%[2]d -p %[3]d:%[3]d %s",
jaegerContainerName,
otelCollectorPort,
jaegerUIPort,
jaegerImageName)
err = c.Run(ctx, l, l.Stdout, l.Stderr, install.WithNodes(jaegerNode), "start jaegertracing/all-in-one using docker", startCmd)
if err != nil {
return err
}
otelCollectionHost, err := c.GetInternalIP(jaegerNode[0])
if err != nil {
return err
}
otelCollectionHostPort := net.JoinHostPort(otelCollectionHost, strconv.Itoa(otelCollectorPort))
setupStmt := fmt.Sprintf("SET CLUSTER SETTING trace.opentelemetry.collector='%s'", otelCollectionHostPort)
if configureNodes != "" {
nodes, err := install.ListNodes(configureNodes, len(c.VMs))
if err != nil {
return err
}
_, err = c.ExecSQL(
ctx, l, nodes, virtualClusterName, 0, install.DefaultAuthMode(), "", /* database */
[]string{"-e", setupStmt},
)
if err != nil {
return err
}
}
url, err := JaegerURL(ctx, l, clusterName, false)
if err != nil {
return err
}
l.Printf("To use with CRDB: %s", setupStmt)
l.Printf("Jaeger UI: %s", url)
return nil
}
// StopJaeger stops and removes the jaeger container.
func StopJaeger(ctx context.Context, l *logger.Logger, clusterName string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
jaegerNode := c.TargetNodes()[len(c.TargetNodes())-1:]
stopCmd := fmt.Sprintf("docker stop %s", jaegerContainerName)
err = c.Run(ctx, l, l.Stdout, l.Stderr, install.WithNodes(jaegerNode), stopCmd, stopCmd)
if err != nil {
return err
}
rmCmd := fmt.Sprintf("docker rm %s", jaegerContainerName)
return c.Run(ctx, l, l.Stdout, l.Stderr, install.WithNodes(jaegerNode), rmCmd, rmCmd)
}
// JaegerURL returns a url to the jaeger UI, assuming it was installed
// on the lat node in the given cluster.
func JaegerURL(
ctx context.Context, l *logger.Logger, clusterName string, openInBrowser bool,
) (string, error) {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return "", err
}
jaegerNode := c.TargetNodes()[len(c.TargetNodes())-1:]
urls, err := urlGenerator(ctx, c, l, jaegerNode, urlConfig{
usePublicIP: true,
openInBrowser: openInBrowser,
secure: false,
port: jaegerUIPort,
})
if err != nil {
return "", err
}
return urls[0], nil
}
// StartFluentBit installs, configures, and starts Fluent Bit on the cluster
// identified by clusterName.
func StartFluentBit(
ctx context.Context, l *logger.Logger, clusterName string, config fluentbit.Config,
) error {
if config.DatadogAPIKey == "" {
return errors.New("Datadog API cannot be empty")
}
if err := LoadClusters(); err != nil {
return err
}
c, err := newCluster(l, clusterName)
if err != nil {
return err
}
return fluentbit.Install(ctx, l, c, config)
}
// StopFluentBit stops Fluent Bit on the cluster identified by clusterName.
func StopFluentBit(ctx context.Context, l *logger.Logger, clusterName string) error {
if err := LoadClusters(); err != nil {
return err
}
c, err := newCluster(l, clusterName)
if err != nil {
return err
}
return fluentbit.Stop(ctx, l, c)
}
// StartOpenTelemetry installs, configures, and starts the OpenTelemetry
// Collector on the cluster identified by clusterName.
func StartOpenTelemetry(
ctx context.Context, l *logger.Logger, clusterName string, config opentelemetry.Config,
) error {
if config.DatadogAPIKey == "" {
return errors.New("Datadog API cannot be empty")
}
if err := LoadClusters(); err != nil {
return err
}
c, err := newCluster(l, clusterName)
if err != nil {
return err
}
return opentelemetry.Install(ctx, l, c, config)
}
// Stop stops the OpenTelemetry Collector on the cluster identified by clusterName.
func StopOpenTelemetry(ctx context.Context, l *logger.Logger, clusterName string) error {
if err := LoadClusters(); err != nil {
return err
}
c, err := newCluster(l, clusterName)
if err != nil {
return err
}
return opentelemetry.Stop(ctx, l, c)
}
// DestroyDNS destroys the DNS records for the given cluster.
func DestroyDNS(ctx context.Context, l *logger.Logger, clusterName string) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
return vm.FanOutDNS(c.VMs, func(p vm.DNSProvider, vms vm.List) error {
return p.DeleteRecordsBySubdomain(ctx, c.Name)
})
}
// StorageCollectionPerformAction either starts or stops workload collection on
// a target cluster.
//
// On start it attaches a volume to each of the nodes specified in the cluster
// specifications and sends an HTTP request to the nodes. The nodes must be
// started with the COCKROACH_STORAGE_WORKLOAD_COLLECTOR environment variable.
// Otherwise, the HTTP endpoint will not be setup. Once a node receives the
// request it will perform a checkpoint which can take several minutes to
// complete. Until the checkpoint finishes the request will block. See
// HandleRequest() in pkg/server/debug/replay/replay.go for additional details.
// On stop this sends an HTTP request to each of the nodes in the cluster
// specification. On list-volumes it will read the local cache for the cluster
// to output the list of volumes attached to the nodes.
func StorageCollectionPerformAction(
ctx context.Context,
l *logger.Logger,
clusterName string,
action string,
opts vm.VolumeCreateOpts,
) error {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return err
}
if opts.Labels == nil {
opts.Labels = map[string]string{}
}
opts.Labels["roachprod_collector"] = "true"
mountDir := "/mnt/capture/"
switch action {
case "start":
if err := createAttachMountVolumes(ctx, l, c, opts, mountDir); err != nil {
return err
}
case "stop":
case "list-volumes":
printNodeToVolumeMapping(c)
return nil
default:
return errors.Errorf("Expected one of start or stop as the action got: %s", action)
}
printNodeToVolumeMapping(c)
return sendCaptureCommand(ctx, l, c, action, mountDir)
}
func printNodeToVolumeMapping(c *install.SyncedCluster) {
nodes := c.TargetNodes()
for _, n := range nodes {
cVM := c.VMs[n-1]
for _, volume := range cVM.NonBootAttachedVolumes {
if isWorkloadCollectorVolume(volume) {
fmt.Printf("Node ID: %d (Name: %s) -> Volume Name: %s (ID: %s)\n", n, cVM.Name, volume.Name, volume.ProviderResourceID)
}
}
}
}
func sendCaptureCommand(
ctx context.Context, l *logger.Logger, c *install.SyncedCluster, action string, captureDir string,
) error {
nodes := c.TargetNodes()
httpClient := httputil.NewClientWithTimeout(0 /* timeout: None */)
_, _, err := c.ParallelE(ctx, l, install.WithNodes(nodes).WithDisplay(fmt.Sprintf("Performing workload capture %s", action)),
func(ctx context.Context, node install.Node) (*install.RunResultDetails, error) {
port, err := c.NodeUIPort(ctx, node, "" /* virtualClusterName */, 0 /* sqlInstance */)
if err != nil {
return nil, err
}
res := &install.RunResultDetails{Node: node}
host := c.Host(node)
scheme := "http"
if c.Secure {
scheme = "https"
}
debugUrl := url.URL{
Scheme: scheme,
Host: net.JoinHostPort(host, strconv.Itoa(port)),
Path: "/debug/workload_capture",
}
r, err := httpClient.Get(ctx, debugUrl.String())
if err != nil {
res.Err = errors.New("Failed to retrieve current store workload collection state")
return res, res.Err
}
storeState := replay.ResponseType{}
err = json.NewDecoder(r.Body).Decode(&storeState)
if err != nil {
res.Err = errors.New("Failed to decode response from node")
return res, res.Err
}
for _, info := range storeState.Data {
wpa := replay.WorkloadCollectorPerformActionRequest{
StoreID: info.StoreID,
Action: action,
}
if captureDir != "" {
wpa.CaptureDirectory = path.Join(
captureDir,
"store_"+strconv.Itoa(info.StoreID),
timeutil.Now().Format("20060102150405"),
)
}
jsonValue, err := json.Marshal(wpa)
if err != nil {
res.Err = err
return res, res.Err
}
response, err := httpClient.Post(ctx, debugUrl.String(), httputil.JSONContentType, bytes.NewBuffer(jsonValue))
if err != nil {
res.Err = err
return res, res.Err
}
if response.StatusCode != http.StatusOK {
serverErrorMessage, err := io.ReadAll(response.Body)
if err != nil {
res.Err = err
return res, res.Err
}
res.Err = errors.Newf("%s", string(serverErrorMessage))
return res, res.Err
}
}
return res, res.Err
})
return err
}
func createAttachMountVolumes(
ctx context.Context,
l *logger.Logger,
c *install.SyncedCluster,
opts vm.VolumeCreateOpts,
mountDir string,
) error {
nodes := c.TargetNodes()
for idx, n := range nodes {
curNode := nodes[idx : idx+1]
cVM := &c.VMs[n-1]
err := vm.ForProvider(cVM.Provider, func(provider vm.Provider) error {
opts.Name = fmt.Sprintf("%s-n%d", c.Name, n)
for _, vol := range cVM.NonBootAttachedVolumes {
if vol.Name == opts.Name {
l.Printf(
"A volume (%s) is already attached to node %d skipping volume creation", vol.ProviderResourceID, n)
return nil
}
}
opts.Zone = cVM.Zone
volume, err := provider.CreateVolume(l, opts)
if err != nil {
return err
}
l.Printf("Created Volume %s", volume.ProviderResourceID)
device, err := cVM.AttachVolume(l, volume)
if err != nil {
return err
}
// Save the cluster to cache
err = saveCluster(l, &c.Cluster)
if err != nil {
return err
}
l.Printf("Attached Volume %s to %s", volume.ProviderResourceID, cVM.ProviderID)
err = c.Run(ctx, l, l.Stdout, l.Stderr, install.WithNodes(curNode),
"Mounting volume", genMountCommands(device, mountDir))
return err
})
if err != nil {
return err
}
l.Printf("Successfully mounted volume to %s", cVM.ProviderID)
}
return nil
}
// CreateLoadBalancer creates a load balancer for the SQL service on the given
// cluster. Currently only supports GCE.
func CreateLoadBalancer(
ctx context.Context,
l *logger.Logger,
clusterName string,
secure bool,
virtualClusterName string,
sqlInstance int,
) error {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure))
if err != nil {
return err
}
// If virtualClusterName is not provided, use the system interface name.
if virtualClusterName == "" {
virtualClusterName = install.SystemInterfaceName
}
// Find the SQL ports for the service on all nodes.
services, err := c.DiscoverServices(
ctx, virtualClusterName, install.ServiceTypeSQL,
install.ServiceNodePredicate(c.TargetNodes()...), install.ServiceInstancePredicate(sqlInstance),
)
if err != nil {
return err
}
port := config.DefaultSQLPort
if len(services) == 0 {
l.Errorf("WARNING: %s SQL service not found on cluster %s, using default SQL port %d",
virtualClusterName, clusterName, port)
} else {
port = services[0].Port
// Confirm that the service has the same port on all nodes.
for _, service := range services[1:] {
if port != service.Port {
return errors.Errorf("service %s must share the same port on all nodes, different ports found %d and %d",
virtualClusterName, port, service.Port)
}
}
}
// Create a load balancer for the service's port.
err = vm.FanOut(c.VMs, func(provider vm.Provider, vms vm.List) error {
createErr := provider.CreateLoadBalancer(l, vms, port)
if createErr != nil {
l.Errorf("Cleaning up partially-created load balancer (prev err: %s)", createErr)
cleanupErr := provider.DeleteLoadBalancer(l, vms, port)
if cleanupErr != nil {
l.Errorf("Error while cleaning up partially-created load balancer: %s", cleanupErr)
} else {
l.Printf("Cleaned up partially-created load balancer")
}
return errors.CombineErrors(createErr, cleanupErr)
}
return nil
})
if err != nil {
return err
}
// For secure clusters, the load balancer IP needs to be added to the
// cluster's certificate.
if secure {
err = c.RedistributeNodeCert(ctx, l)
if err != nil {
return err
}
// Send a SIGHUP to the nodes to reload the certificates.
err = c.Signal(ctx, l, int(unix.SIGHUP))
if err != nil {
return err
}
}
return nil
}
// LoadBalancerPgURL generates the postgres URL for a load balancer serving the
// given cluster.
func LoadBalancerPgURL(
ctx context.Context, l *logger.Logger, clusterName, certsDir string, opts PGURLOptions,
) (string, error) {
c, err := getClusterFromCache(l, clusterName, install.SecureOption(opts.Secure), install.PGUrlCertsDirOption(certsDir))
if err != nil {
return "", err
}
services, err := c.DiscoverServices(ctx, opts.VirtualClusterName, install.ServiceTypeSQL,
install.ServiceInstancePredicate(opts.SQLInstance))
if err != nil {
return "", err
}
port := config.DefaultSQLPort
serviceMode := install.ServiceModeExternal
if len(services) > 0 {
port = services[0].Port
serviceMode = services[0].ServiceMode
}
addr, err := c.FindLoadBalancer(l, port)
if err != nil {
return "", err
}
return c.NodeURL(addr.IP, port, opts.VirtualClusterName, serviceMode, opts.Auth, opts.Database), nil
}
// LoadBalancerIP resolves the IP of a load balancer serving the
// given cluster.
func LoadBalancerIP(
ctx context.Context, l *logger.Logger, clusterName, virtualClusterName string, sqlInstance int,
) (string, error) {
c, err := getClusterFromCache(l, clusterName)
if err != nil {
return "", err
}
services, err := c.DiscoverServices(ctx, virtualClusterName, install.ServiceTypeSQL,
install.ServiceInstancePredicate(sqlInstance))
if err != nil {
return "", err
}
port := config.DefaultSQLPort
if len(services) > 0 {
port = services[0].Port
}
addr, err := c.FindLoadBalancer(l, port)
if err != nil {
return "", err
}
return addr.IP, nil
}
// Deploy deploys a new version of cockroach to the given cluster. It currently
// does not support clusters running external SQL instances.
// TODO(herko): Add support for virtual clusters (external SQL processes)
func Deploy(
ctx context.Context,
l *logger.Logger,
clusterName, applicationName, version string,
pauseDuration time.Duration,
sig int,
wait bool,
maxWait int,
secure bool,
) error {
// Stage supports `workload` as well, so it needs to be excluded here. This
// list contains a subset that only pulls the cockroach binary.
supportedApplicationNames := []string{"cockroach", "release", "customized"}
if !slices.Contains(supportedApplicationNames, applicationName) {
return errors.Errorf("unsupported application name %s, supported names are %v", applicationName, supportedApplicationNames)
}
c, err := getClusterFromCache(l, clusterName, install.SecureOption(secure))
if err != nil {
return err
}
stageDir := "stage-cockroach"
err = c.Run(ctx, l, l.Stdout, l.Stderr, install.WithNodes(c.TargetNodes()), "creating staging dir",
fmt.Sprintf("rm -rf %[1]s && mkdir -p %[1]s", stageDir))
if err != nil {
return err
}
err = Stage(ctx, l, clusterName, "", "", stageDir, applicationName, version)
if err != nil {
return err
}
l.Printf("Performing rolling restart of %d nodes on %s", len(c.VMs), clusterName)
for _, node := range c.TargetNodes() {
curNode := []install.Node{node}
err = c.WithNodes(curNode).Stop(ctx, l, sig, wait, maxWait, "")
if err != nil {
return err
}
err = c.Run(ctx, l, l.Stdout, l.Stderr, install.WithNodes(curNode),
"relocate binary", fmt.Sprintf(`
mv -f ./cockroach ./cockroach.old \
&& cp ./%[1]s/cockroach ./cockroach \
&& rm -rf %[1]s`, stageDir))
if err != nil {
return err
}
err = c.Run(ctx, l, l.Stdout, l.Stderr, install.WithNodes(curNode),
"start cockroach", "./"+install.StartScriptPath(install.SystemInterfaceName, 0 /* sqlInstance */))
if err != nil {
l.Printf("Failed to start cockroach on node %d. The previous binary can be restored from 'cockroach.old'", node)
return err
}
if pauseDuration > 0 {
l.Printf("Pausing for %s", pauseDuration)
time.Sleep(pauseDuration)
}
}
return nil
}
var sideEyeEnvToken, _ = os.LookupEnv("SIDE_EYE_API_TOKEN")
// CaptureSideEyeSnapshot asks the Side-Eye service to take a snapshot of the
// cockroach processes of this cluster. All errors are logged and swallowed, and
// the call is a no-op if the SIDE_EYE_API_TOKEN is not in the env. The agents
// must previously have been installed and started with the cluster's name as
// the env name.
func CaptureSideEyeSnapshot(ctx context.Context, l *logger.Logger, sideEyeEnv string) {
sideEyeToken := sideEyeEnvToken
if sideEyeToken == "" {
sideEyeToken = install.GetGcloudSideEyeSecret()
}
if sideEyeToken == "" {
l.PrintfCtx(ctx, "Side-Eye token is not configured via SIDE_EYE_API_TOKEN or gcloud secret, skipping snapshot")
return
}
l.PrintfCtx(ctx, "capturing snapshot of %s env with Side-Eye", sideEyeEnv)
client, err := sideeyeclient.NewSideEyeClient(sideeyeclient.WithApiToken(sideEyeToken))
if err != nil {
l.Errorf("failed to create side-eye client: %s", err)
return
}
defer client.Close()
// Protect against the snapshot taking too long.
snapCtx, cancel := context.WithTimeout(ctx, time.Second*30)
defer cancel()
snapRes, err := client.CaptureSnapshot(snapCtx, sideEyeEnv)
if err != nil {
msg := err.Error()
if errors.Is(err, sideeyeclient.NoProcessesError{}) {
msg += "; is cockroach running?"
}
l.PrintfCtx(ctx, "side-eye failed to capture cluster snapshot: %s", msg)
return
}
l.PrintfCtx(ctx, "captured side-eye snapshot: %s", snapRes.SnapshotURL)
}
// getClusterFromCache finds and returns a SyncedCluster from
// the local cluster cache.
//
// The cluster name can include a node selector (e.g. "foo:1-3").
func getClusterFromCache(
l *logger.Logger, clusterName string, opts ...install.ClusterSettingOption,
) (*install.SyncedCluster, error) {
if err := LoadClusters(); err != nil {
return nil, err
}
c, err := newCluster(l, clusterName, opts...)
if err != nil {
return nil, err
}
return c, nil
}
// getClusterFromCloud finds and returns a specified cluster by querying
// provider APIs. This also syncs the local cluster cache through ListCloud.
func getClusterFromCloud(l *logger.Logger, clusterName string) (*cloud.Cluster, error) {
// ListCloud may fail due to a transient provider error, but
// we may have still found the cluster we care about. It will
// fail below if it can't find the cluster.
cld, err := cloud.ListCloud(l, vm.ListOptions{})
c, ok := cld.Clusters[clusterName]
if !ok {
if err != nil {
return &cloud.Cluster{}, errors.Wrapf(err, "cluster %s not found", clusterName)
}
return &cloud.Cluster{}, fmt.Errorf("cluster %s does not exist", clusterName)
}
return c, nil
}
|
package com.example.demoauth.config
import io.jsonwebtoken.Claims
import io.jsonwebtoken.Jwts
import io.jsonwebtoken.io.Decoders
import io.jsonwebtoken.security.Keys
import org.springframework.security.core.userdetails.UserDetails
import org.springframework.stereotype.Service
import java.security.Key
import java.util.Date
@Service
class JwtService {
private val secretKey = "703273357638792F423F4528482B4B6250655368566D597133743677397A2443"
fun extractUsername(token: String): String? {
return extractClaim(this, token, Claims::getSubject)
}
fun generateToken(userDetails: UserDetails): String {
return generateToken(mapOf(), userDetails)
}
private fun generateToken(extraClaims: Map<String, Any>, userDetails: UserDetails): String {
return Jwts
.builder()
.setClaims(extraClaims)
.setSubject(userDetails.username)
.setIssuedAt(Date(System.currentTimeMillis()))
.setExpiration(Date(System.currentTimeMillis() + 1000 * 60 * 60 * 10))
.signWith(getSignInKey(), io.jsonwebtoken.SignatureAlgorithm.HS256)
.compact()
}
fun isTokenValid(token: String, userDetails: UserDetails): Boolean {
val username = extractUsername(token)
return username == userDetails.username && !isTokenExpired(token)
}
private fun isTokenExpired(token: String): Boolean {
return extractExpiration(token)?.before(Date()) ?: false
}
private fun extractExpiration(token: String): Date? {
return formatStringToDate(extractClaim(this, token, Claims::getExpiration))
}
private fun formatStringToDate(extractClaim: String?): Date? {
return try {
Date(extractClaim?.toLong() ?: 0)
} catch (e: Exception) {
null
}
}
private fun extractAllClaims(token: String): Claims? {
return Jwts
.parserBuilder()
.setSigningKey(getSignInKey())
.build()
.parseClaimsJws(token)
.body
}
private fun getSignInKey(): Key {
val keyBytes = Decoders.BASE64.decode(secretKey)
return Keys.hmacShaKeyFor(keyBytes)
}
companion object {
private fun extractClaim(jwtService: JwtService, token: String, claimsResolver: (Claims) -> Any): String? {
val claims = jwtService.extractAllClaims(token)
return claims?.let { claimsResolver(it).toString() }
}
}
}
|
<template>
<form class="contactForm" ref="form" @submit.prevent="mailSubmit">
<div ref="message" class="message" v-show="message">
<p>{{ this.message }}</p>
</div>
<p class="contactNdTitle">Przez formularz</p>
<p class="error" v-if="errors.name">{{ errors.name }}</p>
<input
type="hidden"
id="g-recaptcha-response"
name="g-recaptcha-response"
/>
<input type="hidden" name="action" value="validate_captcha" />
<input
type="text"
class="name"
name="name"
placeholder="Imię i nazwisko"
v-model="mail.name"
/>
<input
type="text,"
class="company"
name="company"
placeholder="Firma(opcjonalnie)"
v-model="mail.company"
/>
<p class="error" v-if="errors.email">{{ errors.email }}</p>
<input
type="text,"
class="email"
name="email"
placeholder="Adres email"
v-model="mail.email"
/>
<p class="error" v-if="errors.phone">{{ errors.phone }}</p>
<input
type="text,"
class="phone"
name="phone"
placeholder="Numer telefonu"
v-model="mail.phone"
/>
<p class="error" v-if="errors.message">{{ errors.message }}</p>
<textarea
placeholder="Opisz w kilku słowach swoje zamówienie"
v-model="mail.message"
name="message"
></textarea>
<input
type="submit"
class="submit g-recaptcha"
data-sitekey="reCAPTCHA_site_key"
data-callback="onSubmit"
data-action="submit"
value="Wyślij"
/>
</form>
</template>
<script>
import emailjs from '@emailjs/browser'
export default {
data() {
return {
errors: {
name: '',
email: '',
phone: '',
message: '',
},
message: '',
mail: {
name: '',
company: '',
email: '',
phone: '',
message: '',
},
}
},
methods: {
async mailSubmit() {
if (!this.mail.name) {
this.errors.name = 'Przed wysłaniem musisz uzupełnić imię i nazwisko'
} else {
this.errors.name = ''
}
if (!this.mail.phone) {
this.errors.phone = 'Przed wysłaniem musisz uzupełnić numer telefonu'
} else {
this.errors.phone = ''
}
if (!this.mail.email) {
this.errors.email = 'Przed wysłaniem musisz uzupełnić adres e-mail'
} else {
this.errors.email = ''
}
if (!this.mail.message) {
this.errors.message =
'Przed wysłaniem musisz uzupełnić treść wiadomości'
} else {
this.errors.message = ''
}
if (
!this.errors.name &&
!this.errors.email &&
!this.errors.phone &&
!this.errors.message
) {
emailjs.sendForm(
process.env.SERVICE_ID,
process.env.TEMPLATE_ID,
this.$refs.form,
process.env.MAIL_KEY
)
this.message = 'Wiadomość została wysłana.'
this.mail.email = ''
this.mail.phone = ''
this.mail.name = ''
this.mail.message = ''
this.mail.company = ''
setTimeout(() => {
this.message = ''
}, 2000)
// grecaptcha.ready(function () {
// grecaptcha
// .execute(process.env.RECAPTCHA_SITE_KEY, { action: 'submit' })
// .then(function (token) {
// // Add your logic to submit to your backend server here.
// sendMail()
// })
// })
}
},
},
}
</script>
<style scoped lang="scss">
.contactForm {
position: relative;
display: flex;
flex-direction: column;
width: 100%;
input {
width: 100%;
height: 40px;
background: white;
color: #6d6d6d;
border: 1px solid #6d6d6d;
margin: 8px 0;
padding: 0 8px;
font-size: 16px;
}
textarea {
font-size: 16px;
padding: 8px;
height: 200px;
border: 1px solid #6d6d6d;
margin-top: 8px;
}
input[type='submit'] {
background: #005f73;
color: white;
}
}
.message {
color: rgb(232, 229, 229);
position: fixed;
bottom: 32px;
right: 32px;
z-index: 1;
height: 40px;
background: rgb(0, 140, 172);
border: 1px solid rgb(0, 40, 54);
text-align: center;
display: flex;
justify-content: center;
align-items: center;
padding: 32px;
}
.contactNdTitle {
font-size: 22px;
font-weight: bold;
margin: 32px 0 16px 0;
color: #ee9b00;
}
@media (min-width: 1440px) {
.message {
right: 120px;
}
}
.submit {
cursor: pointer;
}
.error {
color: crimson;
}
</style>
|
-- Databricks notebook source
-- MAGIC
-- MAGIC %md-sandbox
-- MAGIC
-- MAGIC <div style="text-align: center; line-height: 0; padding-top: 9px;">
-- MAGIC <img src="https://databricks.com/wp-content/uploads/2018/03/db-academy-rgb-1200px.png" alt="Databricks Learning" style="width: 600px">
-- MAGIC </div>
-- COMMAND ----------
-- MAGIC %md
-- MAGIC # Lab 4 - Delta Lab
-- MAGIC ## Module 8 Assignment
-- MAGIC In this lab, you will continue your work on behalf of Moovio, the fitness tracker company. You will be working with a new set of files that you must move into a "gold-level" table. You will need to modify and repair records, create new columns, and merge late-arriving data.
-- COMMAND ----------
-- MAGIC %run ../Includes/Classroom-Setup
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 1: Create a table
-- MAGIC
-- MAGIC **Summary:** Create a table from `json` files.
-- MAGIC
-- MAGIC Use this path to access the data: <br>
-- MAGIC `"dbfs:/mnt/training/healthcare/tracker/raw.json/"`
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Create a table named `health_tracker_data_2020`
-- MAGIC * Use optional fields to indicate the path you're reading from and epress that the schema should be inferred.
-- COMMAND ----------
DROP TABLE IF EXISTS health_tracker_data_2020;
CREATE TABLE health_tracker_data_2020 USING json OPTIONS (
path 'dbfs:/mnt/training/healthcare/tracker/raw.json/',
inferSchema 'true'
);
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 2: Preview the data
-- MAGIC
-- MAGIC **Summary:** View a sample of the data in the table.
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Query the table with `SELECT *` to see all columns
-- MAGIC * Sample 5 rows from the table
-- COMMAND ----------
SELECT
*
FROM
health_tracker_data_2020
LIMIT
5;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 3: Count Records
-- MAGIC **Summary:** Write a query to find the total number of records
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Count the number of records in the table
-- MAGIC
-- MAGIC **Answer the corresponding question in Coursera**
-- COMMAND ----------
SELECT COUNT(*) FROM health_tracker_data_2020;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 4: Create a Silver Delta table
-- MAGIC **Summary:** Create a Delta table that transforms and restructures your table
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Drop the existing `month` column
-- MAGIC * Isolate each property of the object in the `value` column to its own column
-- MAGIC * Cast time as timestamp **and** as a date
-- MAGIC * Partition by `device_id`
-- MAGIC * Use Delta to write the table
-- COMMAND ----------
CREATE TABLE silver_delta USING DELTA PARTITIONED BY (deviceId) AS
SELECT
value.device_id AS deviceId,
value.heartrate AS heartrate,
value.name AS name,
CAST(FROM_UNIXTIME(value.time) AS DATE) AS time
FROM
health_tracker_data_2020;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 5: Register table to the metastore
-- MAGIC **Summary:** Register your Silver table to the Metastore
-- MAGIC Steps to complete:
-- MAGIC * Be sure you can run the cell more than once without throwing an error
-- MAGIC * Write to the location: `/health_tracker/silver`
-- COMMAND ----------
DROP TABLE IF EXISTS silver_delta;
CREATE OR REPLACE TABLE silver_delta USING DELTA PARTITIONED BY (deviceId) LOCATION "/health_tracker/silver" AS (
SELECT
value.device_id AS deviceId,
value.heartrate AS heartrate,
value.name AS name,
CAST(FROM_UNIXTIME(value.time) AS DATE) AS time
FROM
health_tracker_data_2020
);
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 6: Check the number of records
-- MAGIC **Summary:** Check to see if all devices are reporting the same number of records
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Write a query that counts the number of records for each device
-- MAGIC * Include your partitioned device id column and the count of those records
-- MAGIC
-- MAGIC **Answer the corresponding question in Coursera**
-- COMMAND ----------
SELECT
deviceId,
COUNT(*)
FROM
silver_delta
GROUP BY
1;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 7: Plot records
-- MAGIC **Summary:** Attempt to visually assess which dates may be missing records
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Write a query that will return records from one devices that is **not** missing records as well as the device that seems to be missing records
-- MAGIC * Plot the results to visually inspect the data
-- MAGIC * Identify dates that are missing records
-- MAGIC
-- MAGIC **Answer the corresponding question in Coursera**
-- COMMAND ----------
SELECT
COUNT(time) AS time_count,
deviceId,
time
FROM
silver_delta
GROUP BY 2,3;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 8: Check for Broken Readings
-- MAGIC **Summary:** Check to see if your data contains records that would indicate a device has misreported data
-- MAGIC Steps to complete:
-- MAGIC * Create a view that contains all records reporting a negative heartrate
-- MAGIC * Plot/view that data to see which days include broken readings
-- COMMAND ----------
CREATE VIEW IF NOT EXISTS broken_readings AS
SELECT
*
FROM
silver_delta
WHERE
heartrate < 0;
SELECT
heartrate,
date_format(time, "E") AS day_of_week
FROM
broken_readings;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 9: Repair records
-- MAGIC **Summary:** Create a view that contains interpolated values for broken readings
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Create a temporary view that will hold all the records you want to update.
-- MAGIC * Transform the data such that all broken readings (where heartrate is reported as less than zero) are interpolated as the mean of the the data points immediately surrounding the broken reading.
-- MAGIC * After you write the view, count the number of records in it.
-- MAGIC
-- MAGIC **Answer the corresponding question in Coursera**
-- COMMAND ----------
SELECT
CONCAT(deviceId, "-", name, "-", time),
COUNT(*) AS cnt_p_key
FROM
silver_delta
GROUP BY 1;
-- COMMAND ----------
-- TODO: Update this query
CREATE
OR REPLACE TEMP VIEW temp_fix_readings AS
SELECT
deviceId,
IF(heartrate < 0, AVG(heartrate), heartrate) AS heartrate,
name,
time
FROM
silver_delta
WHERE
GROUP BY
1,3,4,heartrate;
CREATE OR REPLACE TEMP VIEW fix_readings AS
WITH temp_table AS (
SELECT
*,
CONCAT(deviceId, "-", name, "-", time) AS p_key
FROM
temp_fix_readings
)
SELECT
deviceId,
heartrate,
name,
time
FROM
temp_table
WHERE
p_key IN (
SELECT
CONCAT(deviceId, "-", name, "-", time) as p_key
FROM
broken_readings
);
SELECT COUNT(*) FROM fix_readings;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 10: Read late-arriving data
-- MAGIC **Summary:** Read in new late-arriving data
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Create a new table that contains the late arriving data at this path: `"dbfs:/mnt/training/healthcare/tracker/raw-late.json"`
-- MAGIC * Count the records <br/>
-- MAGIC
-- MAGIC **Answer the corresponding question in Coursera**
-- COMMAND ----------
DROP TABLE IF EXISTS late_data;
CREATE TABLE late_data USING json OPTIONS (
path "dbfs:/mnt/training/healthcare/tracker/raw-late.json",
inferSchema true
);
SELECT
COUNT(*) AS count_late_data
FROM
late_data;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 11: Prepare inserts
-- MAGIC **Summary:** Prepare your new, late-arriving data for insertion into the Silver table
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Create a temporary view that holds the new late-arriving data
-- MAGIC * Apply transformations to the data so that the schema matches our existing Silver table
-- COMMAND ----------
DROP VIEW late_data_delta;
CREATE OR REPLACE TEMP VIEW late_data_delta AS (
SELECT
value.device_id AS deviceId,
value.heartrate AS heartrate,
value.name AS name,
CAST(FROM_UNIXTIME(value.time) AS DATE) AS time
FROM
late_data
);
SELECT COUNT(*) FROM late_data_delta;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 12: Prepare upserts
-- MAGIC **Summary:** Prepare a view to upsert to our Silver table
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Create a temporary view that is the `UNION` of the views that hold data you want to insert and data you want to update
-- MAGIC * Count the records
-- MAGIC
-- MAGIC **Answer the corresponding question in Coursera**
-- COMMAND ----------
CREATE TEMP VIEW union_silver_data_view AS
SELECT
*
FROM
late_data_delta
UNION
SELECT
*
FROM
fix_readings;
SELECT
COUNT(*) COUNT
FROM
union_silver_data_view;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 13: Perform upserts
-- MAGIC
-- MAGIC **Summary:** Merge the upserts into your Silver table
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Merge data on the time and device id columns from your Silver table and your upserts table
-- MAGIC * Use `MATCH`conditions to decide whether to apply an update or an insert
-- COMMAND ----------
SELECT COUNT(*) FROM silver_delta;
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Exercise 14: Write to gold
-- MAGIC **Summary:** Create a Gold level table that holds aggregated data
-- MAGIC
-- MAGIC Steps to complete:
-- MAGIC * Create a Gold-level Delta table
-- MAGIC * Aggregate heartrate to display the average and standard deviation for each device.
-- MAGIC * Count the number of records
-- COMMAND ----------
--TODO
-- COMMAND ----------
-- MAGIC %md
-- MAGIC ## Cleanup
-- MAGIC Run the following cell to clean up your workspace.
-- COMMAND ----------
-- %run .Includes/Classroom-Cleanup
-- COMMAND ----------
-- MAGIC %md-sandbox
-- MAGIC © 2020 Databricks, Inc. All rights reserved.<br/>
-- MAGIC Apache, Apache Spark, Spark and the Spark logo are trademarks of the <a href="http://www.apache.org/">Apache Software Foundation</a>.<br/>
-- MAGIC <br/>
-- MAGIC <a href="https://databricks.com/privacy-policy">Privacy Policy</a> | <a href="https://databricks.com/terms-of-use">Terms of Use</a> | <a href="http://help.databricks.com/">Support</a>
|
#!/usr/bin/env python3
import os, time
import rospy
import math
from dynamixel_sdk import *
from uhvat_ros_driver.srv import *
from sensor_msgs.msg import JointState
from std_srvs.srv import Empty
import threading
lock = threading.Lock()
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
def getch():
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
# gripper setup
LEFT_DXL_ID = 13
RIGHT_DXL_ID = 14
LEFT_CLOSED_POSITION = 512
RIGHT_CLOSED_POSITION = 512
FULL_CLOSED = 80
LEFT_OPENED_POSITION = 512 - 300
RIGHT_OPENED_POSITION = 515 + 300
LESS_WIDE_OPENED = 200
# connection setup
ADDR_COMPLIANCE_CW = 28
ADDR_COMPLIANCE_CC = 29
ADDR_TORQUE_ENABLE = 24
ADDR_GOAL_POSITION = 30
ADDR_PRESENT_POSITION = 36
PROTOCOL_VERSION = 1.0 # See which protocol version is used in the Dynamixel
BAUDRATE = 57600 # Dynamixel default baudrate : 57600
DEVICENAME = '/dev/ttyUSB0' # Check which port is being used on your controller
TORQUE_ENABLE = 1 # Value for enabling the torque
TORQUE_DISABLE = 0 # Value for disabling the torque
DXL_MINIMUM_POSITION_VALUE = 0 # Dynamixel will rotate between this value
DXL_MAXIMUM_POSITION_VALUE = 1000 # and this value (note that the Dynamixel would not move when the position value is out of movable range. Check e-manual about the range of the Dynamixel you use.)
DXL_MOVING_STATUS_THRESHOLD = 20 # Dynamixel moving status threshold
portHandler = PortHandler(DEVICENAME)
packetHandler = PacketHandler(PROTOCOL_VERSION)
class GripperDriver(object):
def __init__(self) -> None:
super().__init__()
rospy.init_node('driver_node')
rospy.Service('gripper_state', SetGripperState, self.__set_gripper_state)
rospy.Service('gripper_reboot', Empty, self.__reboot)
# rospy.Service('gripper_change_mode', Empty, self.__change_mode)
# state publisher
self.pub = rospy.Publisher('/joint_states', JointState, queue_size=10)
self.mode = 'position'
self.left_pos, self.right_pos = 0, 0
self.prev_left_pos, self.prev_right_pos = 0, 0
self.driver_state = 'ready' # `read` or `write` or `ready`
def init(self):
"""
connect to usb
motors turn on
"""
try:
portHandler.openPort()
portHandler.setBaudRate(BAUDRATE)
print("Usb connection fine")
self.__position_mode()
# self.__speed_mode()
except Exception as e:
print(e)
print("Gripper connection failed")
print("Press any key to terminate...")
getch()
quit()
print("Gripper driver is working")
def __position_mode(self):
for id in [LEFT_DXL_ID, RIGHT_DXL_ID]:
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, id, 32, 0)
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, ADDR_TORQUE_ENABLE, 0)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, id, 6, 0)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, id, 8, 1023)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, id, 34, 1023)
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, ADDR_TORQUE_ENABLE, 1)
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, 25, 1)
if dxl_comm_result != COMM_SUCCESS or dxl_error != 0:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
print("Press any key to terminate...")
getch()
quit()
else:
print("Servomotors has been successfully connected")
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, ADDR_COMPLIANCE_CW, 254)
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, ADDR_COMPLIANCE_CC, 254)
def __speed_mode(self):
# Enable Dynamixel Torque
for id in [LEFT_DXL_ID, RIGHT_DXL_ID]:
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, ADDR_TORQUE_ENABLE, 0)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, id, 6, 0)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, id, 8, 0)
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, ADDR_TORQUE_ENABLE, 1)
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, id, 25, 1)
if dxl_comm_result != COMM_SUCCESS or dxl_error != 0:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
print("Press any key to terminate...")
getch()
quit()
else:
print("Servomotors has been successfully connected")
def __get_gripper_state(self):
l_dxl_present_position, l_dxl_comm_result, l_dxl_error = packetHandler.read4ByteTxRx(portHandler, LEFT_DXL_ID, ADDR_PRESENT_POSITION)
r_dxl_present_position, r_dxl_comm_result, r_dxl_error = packetHandler.read4ByteTxRx(portHandler, RIGHT_DXL_ID, ADDR_PRESENT_POSITION)
return l_dxl_present_position, r_dxl_present_position
def __set_gripper_state(self, req):
lock.acquire()
if self.driver_state == 'ready':
self.driver_state = 'write'
while portHandler.is_using:
time.sleep(0.01)
if req.state == 0: # wide opened
self.__switch('position')
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, LEFT_DXL_ID, ADDR_GOAL_POSITION, LEFT_OPENED_POSITION)
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, RIGHT_DXL_ID, ADDR_GOAL_POSITION, RIGHT_OPENED_POSITION)
elif req.state == 1: # less wide opened
self.__switch('position')
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, LEFT_DXL_ID, ADDR_GOAL_POSITION, LEFT_OPENED_POSITION + LESS_WIDE_OPENED)
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, RIGHT_DXL_ID, ADDR_GOAL_POSITION, RIGHT_OPENED_POSITION - LESS_WIDE_OPENED)
elif req.state == 2: # before position of full closed
self.__switch('position')
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, LEFT_DXL_ID, ADDR_GOAL_POSITION, LEFT_CLOSED_POSITION)
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, RIGHT_DXL_ID, ADDR_GOAL_POSITION, RIGHT_CLOSED_POSITION)
elif req.state == 3: # full closed
self.__switch('position')
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, LEFT_DXL_ID, ADDR_GOAL_POSITION, LEFT_CLOSED_POSITION + FULL_CLOSED)
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, RIGHT_DXL_ID, ADDR_GOAL_POSITION, RIGHT_CLOSED_POSITION - FULL_CLOSED)
elif req.state == 4: # speed -- low force
self.__switch('speed')
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, LEFT_DXL_ID, 32, 150)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, RIGHT_DXL_ID, 32, 1023 + 150)
elif req.state == 5: # speed -- more force
self.__switch('speed')
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, LEFT_DXL_ID, 32, 300)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, RIGHT_DXL_ID, 32, 1023 + 300)
elif req.state == 6: # speed -- max force
self.__switch('speed')
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, LEFT_DXL_ID, 32, 450)
dxl_comm_result, dxl_error = packetHandler.write2ByteTxRx(portHandler, RIGHT_DXL_ID, 32, 1023 + 450)
else:
rospy.logwarn(f"There is no state \"{req.state}\"")
self.driver_state = 'ready'
lock.release()
return []
def __reboot(self, req):
for id in [LEFT_DXL_ID, RIGHT_DXL_ID]:
dxl_comm_result, dxl_error = packetHandler.reboot(portHandler, id)
self.__position_mode()
rospy.logwarn('Rebooted!')
def __switch(self, mode):
self.mode = mode
if mode == 'position':
self.__position_mode()
else:
self.__speed_mode()
rospy.loginfo(f"mode: {self.mode}")
def __change_mode(self, req):
if self.mode == 'position':
self.__switch('speed')
else:
self.__switch('position')
def spin(self):
"""
The gripper illustration:
| |
|.| --- flange
\| |/ --- fingers
0 1 --- indexes in urdf
left right --- view from top direction
Opened (full) position is [tics]:
214 810 --- let's it is a zero position
Closed (full) position is [tics]:
572 438 --- it is a limit position
[tics to deg] =>
alpha = (pos - zero_position) / 1024
"""
LEFT_ZERO_POSITION = 214 * 300 / 1024
RIGHT_ZERO_POSITION = 810 * 300 / 1024
rate = rospy.Rate(300)
while not rospy.is_shutdown():
if self.driver_state == 'ready':
self.driver_state == 'read'
if not portHandler.is_using:
l, r = self.__get_gripper_state()
if l + r < 3000:
self.left_pos, self.right_pos = l, r
# print(self.left_pos, self.right_pos)
self.driver_state == 'ready'
# transform [tics] to [rad] with offset
left_pos_rad = (LEFT_ZERO_POSITION - self.left_pos * 300 / 1024) * math.pi / 180
right_pos_rad = (RIGHT_ZERO_POSITION - self.right_pos * 300 / 1024) * math.pi / 180
# filtring measurements
left_pos_rad = (left_pos_rad + self.prev_left_pos) / 2
right_pos_rad = (right_pos_rad + self.prev_right_pos) / 2
self.prev_left_pos, self.prev_right_pos = left_pos_rad, right_pos_rad
# print(left_pos_rad, right_pos_rad) # debug msg
try:
msg = JointState()
msg.header.stamp.secs = rospy.get_rostime().secs
msg.header.stamp.nsecs = rospy.get_rostime().nsecs
# TODO using lists below
msg.name.append('uhvat_joint_1') # left but right on the arm :)
msg.name.append('uhvat_joint_2')
msg.position.append(left_pos_rad) # left
msg.position.append(right_pos_rad)
self.pub.publish(msg)
except Exception as e:
print(e)
rate.sleep()
def main():
driver = GripperDriver()
driver.init()
driver.spin()
if __name__ == '__main__':
main()
|
import React from "react";
import Link from "next/link";
import { FaPencilAlt, FaTimes } from "react-icons/fa";
import styles from "@/styles/DashboardEvent.module.css";
import { EventInterface } from "@/types/eventInterface";
interface DashboardEventProps {
event: EventInterface;
handleDelete: (id: string | number) => void;
}
const DashboardEvent = ({ event, handleDelete }: DashboardEventProps) => {
return (
<div className={styles.event}>
<h4>
<Link href={`/events/${event.attributes.slug}`}>
<a>{event.attributes.name}</a>
</Link>
</h4>
<Link href={`/events/edit/${event.id}`}>
<a className={styles.edit}>
<FaPencilAlt /> edit event
</a>
</Link>
<a className={styles.delete} onClick={() => handleDelete(event.id)}>
<FaTimes /> Delete Event
</a>
</div>
);
};
export default DashboardEvent;
|
*> HEADER,COBOL,IX209A
IDENTIFICATION DIVISION.
PROGRAM-ID.
IX209A.
*> ***************************************************************
*> *
*> VALIDATION FOR:- *
*> *
*> "ON-SITE VALIDATION, NATIONAL INSTITUTE OF STD & TECH. ".
*> *
*> "COBOL 85 VERSION 4.2, Apr 1993 SSVG ".
*> *
*> ***************************************************************
*> *
*> X-CARDS USED BY THIS PROGRAM ARE :- *
*> *
*> X-55 - SYSTEM PRINTER NAME. *
*> X-82 - SOURCE COMPUTER NAME. *
*> X-83 - OBJECT COMPUTER NAME. *
*> *
*> ***************************************************************
*> "IX209A"
*> *****************************************************************
*> THE PURPOSE OF THIS PROGRAM IS TO TEST USE OF THE
*> START --- EQUAL TO --- STATEMENT USING FIRST THE PRIME
*> RECORD KEY AND THEN WITH EACH OF THE ALTERNATE RECORD KEYS
*> AS THE KEY OF REFERENCE. THE START STATEMENT NAMES,
*> IN ITS CONSTRUCT , EITHER THE DATA NAME SPECIFIED IN THE
*> KEY CLAUSE OR A DATA ITEM THAT IS SUBORDINATE TO THE
*> KEY NAME. DIFFERENT KEY VALUES ARE USED FOR TESTING.
*> IF A KEY VALUE IS PROVIDED WHICH MATCHES A RECORD IN THE FILE
*> WHEN THE START IS EXECUTED THEN THE RECORD IS EXPECTED TO
*> MADE AVAILABLE BY THE SUBSEQUENT READ STATEMENT. IF A KEY
*> VALUE IS PROVIDED WHICH DOES NOT MATCH ANY RECORD IN THE
*> FILE THEN THE INVALID KEY PATH IS EXPECTED TO BE TAKEN.
*> THE FILE STATUS CONTENTS RESULTING FROM EXECUTION OF THE
*> START TESTS ARE SAVED AND CHECKED IN LATER TESTS.
*>
*> REFERENCE AMERICAN NATIONAL STANDARD
*> PROGRAMMING LANGUAGE COBOL, X3.23-198X.
*> SECTION IX, INDEX I-O, THE START
*> STATEMENT. PARAGRAPHS 4.7.3 (3), (4);
*> 4.7.4 (1), (4), (5)
*> AND
*> THE FILE STATUS PARAGRAPH 1.3.4
*>
*> BEFORE EXECUTION OF THE START IN EACH TEST, A RECORD IS MADE
*> AVAILABLE FROM THE FILE THAT IS DIFFERENT THAN WILL RESULT
*> FROM THE TEST, AND THE RECORD KEY IS LOADED WITH A KEY VALUE.
*> DEPENDING ON THE NATURE OF THE TEST THE KEY VALUE MAY OR
*> MAY NOT BE A VALID KEY FOR THE FILE.
*>
*> THIS PROGRAM FIRST CREATES AN INDEXED SEQUENTIAL FILE
*> CONTAINING TWO ALTERNATE KEYS AND THE ONE REQUIRED RECORD
*> KEY FOR THE FILE. IMMEDIATELY FOLLOWING FILE CREATION THE
*> FILE IS READ AND THE RECORDS OF THE FILE VERIFIED FOR
*> ACCURACY. NEXT THE TESTS ARE EXECUTED USING THE START ---
*> EQUAL TO --- STATEMENT.
*> THE RECORDS IN THE FILE ARE CREATED IN SEQUENTIAL ORDER BY
*> RECORD KEY VALUE. FOLLOWING IS A SAMPLE OF THE DATA
*> CONTENTS FOR THE RECORD KEY AND TWO ALTERNATE RECORD KEYS IN
*> THE FILE.
*>
*> REC-NO RECORD-KEY ALTERNATE-KEY-1 ALTERNATE-KEY-2
*> ------ ---------- --------------- ---------------
*> 001 BBBBBBBBBC002 EEEEEEEEEF000ALTKEY1 WWWWWWWWWV398ALTKEY2
*> 002 BBBBBBBBCC004 EEEEEEEEFF004ALTKEY1 WWWWWWWWVV396ALTKEY2
*> 003 BBBBBBBCCC006 EEEEEEEFFF006ALTKEY1 WWWWWWWVVV394ALTKEY2
*> . . . .
*> . . . .
*> . . . .
*> 010 CCCCCCCCCC020 FFFFFFFFFF020ALTKEY1 VVVVVVVVVV380ALTKEY2
*> 011 CCCCCCCCCD022 FFFFFFFFFG022ALTKEY1 VVVVVVVVVV380ALTKEY2
*> 012 CCCCCCCCDD024 FFFFFFFFGG024ALTKEY1 VVVVVVVVUU376ALTKEY2
*> . . . .
*> . . . .
*> . . . .
*> 200 UUUUUUUUUU400 YYYYYYYYYY400ALTKEY1 DDDDDDDDDD000ALTKEY2
*>
*> NOTE 1 - ALTERNATE KEY NUMBER 2 CONTAINS DUPLICATE KEYS
*> EVERY 10TH AND 11TH RECORDS.
*>
*> NOTE 2 - THE FIRST 50 RECORDS AND LAST 25 RECORDS OF THE
*> FILE FOLLOW THE ABOVE SEQUENTIAL KEY PATTERN. FOR THE MIDDLE
*> 125 RECORDS ONLY THE NUMBER PART OF THE KEYS ARE VARIED
*> AND VARIED IN THE SEQUENCE SHOWN ABOVE. THAT IS, RECORD-KEY
*> AND ALTERNATE-KEY-1 ARE INCREMENTED BY 2 AND THE ALTERNATE-
*> KEY-2 IS DECREMENTED BY 2 EACH TIME A RECORD IS WRITTEN TO
*> THE FILE. THE FILE IS DESIGNED TO BE LARGE ENOUGH SO THAT
*> AN I-O OPERATION IS REQUIRED FOR EACH RECORD ACCESSED FROM
*> THE FILE.
*>
*> FILE CHARACTERISTICS ARE: FILE SIZE = 200 RECORDS
*> RECORD SIZE = 240 CHARS.
*> RECORD KEY SIZE = 13 CHARS.
*> ALTERNATE KEY 1 SIZE = 20 CHARS.
*> ALTERNATE KEY 2 SIZE = 20 CHARS.
*> ACCESS MODE = SEQUENTIAL
*>
*> A LIST OF COBOL ELEMENTS WITH THE PARAGRAPH NAME IN PARENTH-
*> ESIS THAT TESTS THE ELEMENT AND A SHORT DESCRIPTION OF THE
*> TEST FOLLOWS.
*>
*> WRITE --- INVALID KEY---. (INX-TEST-001) - THIS TEST CREATE
*> A FILE OF 200 RECORDS CONTAINING ONE RECORD KEY AND
*> TWO ALTERNATE KEYS.
*> READ ---AT END ---. (INX-TEST-002) - THIS TEST READS THE
*> FILE CREATED IN INX-TEST-001 AND VERIFIES THAT THE
*> FILE WAS CREATED CORRECTLY.
*> START --- KEY IS EQUAL TO RECORD-KEY INVALID KEY ---. (INX-
*> TEST-003.01 THRU INX-TEST-003.04) - THE START
*> STATEMENT IS EXECUTED USING THE RECORD-KEY FOR THE
*> FILE CONTAINING KEY VALUES WHICH RESPECTIVELY EQUAL A
*> RECORD IN THE FILE (.01), BETWEEN TWO EXISTING KEY
*> VALUES (02), LESS THAN THE FIRST RECORD IN THE FILE
*> (.03) AND GREATER THAN THAN THE LAST RECORD IN THE
*> FILE (.04).
*> START --- KEY IS EQUAL TO DATA-ITEM INVALID KEY ---. (INX-
*> TEST-003.05 THRU INX-TEST-003.09) - THE START
*> STATEMENT IS EXECUTED USING A DATA ITEM WHICH IS
*> SUBORDINATE TO THE RECORD-KEY NAME OF THE FILE
*> AND CONTAINING KEY VALUES WHICH RESPECTIVELY EQUAL A
*> RECORD IN THE FILE (.05), EQUAL A VALUE PRESENT IN
*> IN MORE THAN ONE RECORD IN THE FILE (.06),
*> NOT EQUAL TO ANY RECORD IN THE FILE (.07, LESS THAN
*> THE FIRST RECORD IN THE FILE (.08) AND GREATER THAN
*> THE LAST RECORD IN THE FILE (.09.
*> FILE STATUS. (INX-TEST-004.01 THRU INX-TEST-004.09) - THESE
*> TESTS CHECK THE CONTENTS OF THE FILE STATUS RESULTING
*> FROM THE START IN INX-TEST-003.01 THRU
*> INX-TEST-003.09.
*> START --- KEY IS EQUAL TO ALTNATE-KEY INVALID KEY --. (INX-
*> TEST-005.01 THRU INX-TEST-005.04) - THE START
*> STATEMENT IS EXECUTED USING THE ALTERNATE-KEY FOR THE
*> FILE CONTAINING KEY VALUES WHICH RESPECTIVELY EQUAL A
*> RECORD IN THE FILE (.01), BETWEEN TWO EXISTING KEY
*> VALUES (02), LESS THAN THE FIRST RECORD IN THE FILE
*> (.03) AND GREATER THAN THAN THE LAST RECORD IN THE
*> FILE (.04).
*> START --- KEY IS EQUAL TO DATA-ITEM INVALID KEY ---. (INX-
*> TEST-005.05 THRU INX-TEST-005.09) - THE START
*> STATEMENT IS EXECUTED USING A DATA ITEM WHICH IS
*> SUBORDINATE TO THE ALTERNATE-KEY NAME OF THE FILE
*> AND CONTAINING KEY VALUES WHICH RESPECTIVELY EQUAL A
*> RECORD IN THE FILE (.05), EQUAL A VALUE PRESENT IN
*> IN MORE THAN ONE RECORD IN THE FILE (.06),
*> NOT EQUAL TO ANY RECORD IN THE FILE (.07, LESS THAN
*> THE FIRST RECORD IN THE FILE (.08) AND GREATER THAN
*> THE LAST RECORD IN THE FILE (.09.
*> FILE STATUS. (INX-TEST-006.01 THRU INX-TEST-006.09) - THESE
*> TESTS CHECK THE CONTENTS OF THE FILE STATUS RESULTING
*> FROM THE START IN INX-TEST-005.01 THRU
*> INX-TEST-005.09.
*> START --- KEY IS EQUAL TO ALTNATE-KEY INVALID KEY --. (INX-
*> TEST-007.01 THRU INX-TEST-007.04) - THE START
*> STATEMENT IS EXECUTED USING THE ALTERNATE-KEY
*> WHICH SPECIFIES THE DUPLICATES OPTION FOR THE FILE
*> AND CONTAINING KEY VALUES WHICH RESPECTIVELY EQUAL A
*> RECORD IN THE FILE (.01), BETWEEN TWO EXISTING KEY
*> VALUES (02), LESS THAN THE FIRST RECORD IN THE FILE
*> (.03) AND GREATER THAN THAN THE LAST RECORD IN THE
*> FILE (.04).
*> START --- KEY IS EQUAL TO DATA-ITEM INVALID KEY ---. (INX-
*> TEST-007.05 THRU INX-TEST-007.09) - THE START
*> STATEMENT IS EXECUTED USING A DATA ITEM WHICH IS
*> SUBORDINATE TO THE ALTERNATE-KEY W/DUP FOR THE FILE
*> AND CONTAINING KEY VALUES WHICH RESPECTIVELY EQUAL A
*> RECORD IN THE FILE (.05), EQUAL A VALUE PRESENT IN
*> IN MORE THAN ONE RECORD IN THE FILE (.06),
*> NOT EQUAL TO ANY RECORD IN THE FILE (.07, LESS THAN
*> THE FIRST RECORD IN THE FILE (.08) AND GREATER THAN
*> THE LAST RECORD IN THE FILE (.09.
*> FILE STATUS. (INX-TEST-008.01 THRU INX-TEST-008.09) - THESE
*> TESTS CHECK THE CONTENTS OF THE FILE STATUS RESULTING
*> FROM THE START IN INX-TEST-007.01 THRU
*> INX-TEST-007.09.
*>
*> *****************************************************************
ENVIRONMENT DIVISION.
CONFIGURATION SECTION.
SOURCE-COMPUTER.
XXXXX082.
OBJECT-COMPUTER.
XXXXX083.
INPUT-OUTPUT SECTION.
FILE-CONTROL.
*> SELECT RAW-DATA ASSIGN TO
*> XXXXX062
*> ORGANIZATION IS INDEXED
*> ACCESS MODE IS RANDOM
*> RECORD KEY IS RAW-DATA-KEY.
SELECT PRINT-FILE ASSIGN TO
XXXXX055.
SELECT IX-FS1
ASSIGN TO
XXXXX024
*> XXXXX044
ACCESS MODE IS SEQUENTIAL
ORGANIZATION IS INDEXED
RECORD KEY IS IX-FS1-KEY
ALTERNATE RECORD KEY IS IX-FS1-ALTKEY1
ALTERNATE RECORD KEY IS IX-FS1-ALTKEY2 WITH DUPLICATES
FILE STATUS IS FS1-STATUS.
DATA DIVISION.
FILE SECTION.
*>
*> FD RAW-DATA.
*>
*> 01 RAW-DATA-SATZ.
*> 05 RAW-DATA-KEY PIC X(6).
*> 05 C-DATE PIC 9(6).
*> 05 C-TIME PIC 9(8).
*> 05 C-NO-OF-TESTS PIC 99.
*> 05 C-OK PIC 999.
*> 05 C-ALL PIC 999.
*> 05 C-FAIL PIC 999.
*> 05 C-DELETED PIC 999.
*> 05 C-INSPECT PIC 999.
*> 05 C-NOTE PIC X(13).
*> 05 C-INDENT PIC X.
*> 05 C-ABORT PIC X(8).
FD PRINT-FILE.
01 PRINT-REC PICTURE X(120).
01 DUMMY-RECORD PICTURE X(120).
FD IX-FS1
LABEL RECORDS ARE STANDARD
DATA RECORD IS IX-FS1R1-F-G-240
RECORD CONTAINS 240 CHARACTERS.
01 IX-FS1R1-F-G-240.
05 IX-FS1-REC-120 PICTURE X(120).
05 IX-FS1-REC-121-240.
10 FILLER PICTURE X(8).
10 IX-REC-KEY-AREA.
15 IX-FS1-KEY.
20 IX-FS1-KEY-1-10.
25 IX-FS1-KEY-1-5 PICTURE X(5).
25 IX-FS1-KEY-6-10 PICTURE X(5).
20 IX-FS1-KEY-11-13 PICTURE X(3).
15 FILLER PICTURE X(16).
10 FILLER PICTURE X(9).
10 IX-ALT-KEY1-AREA.
15 IX-FS1-ALTKEY1.
20 IX-FS1-ALTKEY1-1-10.
25 IX-FS1-ALTKEY1-1-5 PICTURE X(5).
25 IX-FS1-ALTKEY1-6-10 PICTURE X(5).
20 IX-FS1-ALTKEY1-11-13 PICTURE X(3).
20 IX-FS1-ALTKEY1-14-20 PICTURE X(7).
15 FILLER PICTURE X(9).
10 FILLER PICTURE X(9).
10 IX-ALT-KEY2-AREA.
15 IX-FS1-ALTKEY2.
20 IX-FS1-ALTKEY2-1-10.
25 IX-FS1-ALTKEY2-1-5 PICTURE X(5).
25 IX-FS1-ALTKEY2-6-10 PICTURE X(5).
20 IX-FS1-ALTKEY2-11-13 PICTURE X(3).
20 IX-FS1-ALTKEY2-14-20 PICTURE X(7).
15 FILLER PICTURE X(9).
10 FILLER PICTURE X(7).
WORKING-STORAGE SECTION.
01 WRK-FS1-RECKEY.
05 FS1-RECKEY-1-13.
10 FS1-RECKEY-1-10 PICTURE X(10).
10 FS1-RECKEY-11-13 PICTURE 9(3).
05 FILLER PICTURE X(16) VALUE SPACE.
01 WRK-FS1-ALTKEY1.
05 FS1-ALTKEY1-1-20.
10 FS1-ALTKEY1-1-10.
15 FS1-ALTKEY1-1-5 PICTURE X(5).
15 FS1-ALTKEY1-6-10 PICTURE X(5).
10 FS1-ALTKEY1-11-13 PICTURE 9(3).
10 FS1-ALTKEY1-14-20 PICTURE X(7).
05 FILLER PICTURE X(9) VALUE SPACE.
01 WRK-FS1-ALTKEY2.
05 FS1-ALTKEY2-1-20.
10 FS1-ALTKEY2-1-10.
15 FS1-ALTKEY2-1-5 PICTURE X(5).
15 FS1-ALTKEY2-6-10 PICTURE X(5).
10 FS1-ALTKEY2-11-13 PICTURE 9(3).
10 FS1-ALTKEY2-14-20 PICTURE X(7).
05 FILLER PICTURE X(9) VALUE SPACE.
01 RECNO PICTURE 9(5) VALUE ZERO.
01 FS1-STATUS PICTURE XX VALUE SPACE.
01 EXCUT-COUNTER-06V00 PICTURE 9(6) VALUE ZERO.
01 KEYSUB PICTURE 9(3) COMPUTATIONAL.
01 INVKEY-COUNTER PICTURE 9(3) COMPUTATIONAL.
01 RECORDS-WRITTEN PICTURE 9(3).
01 RECKEY-NUM PICTURE 9(3).
01 ALTKEY1-NUM PICTURE 9(3).
01 ALTKEY2-NUM PICTURE 9(3).
01 RECORD-KEY-CONTENT.
05 FILLER PIC X(53) VALUE
"BBBBBBBBBC002EEEEEEEEEF002ALTKEY1WWWWWWWWWV398ALTKEY2".
05 FILLER PIC X(53) VALUE
"BBBBBBBBCC004EEEEEEEEFF004ALTKEY1WWWWWWWWVV396ALTKEY2".
05 FILLER PIC X(53) VALUE
"BBBBBBBCCC006EEEEEEEFFF006ALTKEY1WWWWWWWVVV394ALTKEY2".
05 FILLER PIC X(53) VALUE
"BBBBBBCCCC008EEEEEEFFFF008ALTKEY1WWWWWWVVVV392ALTKEY2".
05 FILLER PIC X(53) VALUE
"BBBBBCCCCC010EEEEEFFFFF010ALTKEY1WWWWWVVVVV390ALTKEY2".
05 FILLER PIC X(53) VALUE
"BBBBCCCCCC012EEEEFFFFFF012ALTKEY1WWWWVVVVVV388ALTKEY2".
05 FILLER PIC X(53) VALUE
"BBBCCCCCCC014EEEFFFFFFF014ALTKEY1WWWVVVVVVV386ALTKEY2".
05 FILLER PIC X(53) VALUE
"BBCCCCCCCC016EEFFFFFFFF016ALTKEY1WWVVVVVVVV384ALTKEY2".
05 FILLER PIC X(53) VALUE
"BCCCCCCCCC018EFFFFFFFFF018ALTKEY1WVVVVVVVVV382ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCCCCCCCC020FFFFFFFFFF020ALTKEY1VVVVVVVVVV380ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCCCCCCCD022FFFFFFFFFG022ALTKEY1VVVVVVVVVV380ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCCCCCCDD024FFFFFFFFGG024ALTKEY1VVVVVVVVUU376ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCCCCCDDD026FFFFFFFGGG026ALTKEY1VVVVVVVUUU374ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCCCCDDDD028FFFFFFGGGG028ALTKEY1VVVVVVUUUU372ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCCCDDDDD030FFFFFGGGGG030ALTKEY1VVVVVUUUUU370ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCCDDDDDD032FFFFGGGGGG032ALTKEY1VVVVUUUUUU368ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCCDDDDDDD034FFFGGGGGGG034ALTKEY1VVVUUUUUUU366ALTKEY2".
05 FILLER PIC X(53) VALUE
"CCDDDDDDDD036FFGGGGGGGG036ALTKEY1VVUUUUUUUU364ALTKEY2".
05 FILLER PIC X(53) VALUE
"CDDDDDDDDD038FGGGGGGGGG038ALTKEY1VUUUUUUUUU362ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDDDDDDDD040GGGGGGGGGG040ALTKEY1UUUUUUUUUU360ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDDDDDDDE042GGGGGGGGGH042ALTKEY1UUUUUUUUUU360ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDDDDDDEE044GGGGGGGGHH044ALTKEY1UUUUUUUUTT356ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDDDDDEEE046GGGGGGGHHH046ALTKEY1UUUUUUUTTT354ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDDDDEEEE048GGGGGGHHHH048ALTKEY1UUUUUUTTTT352ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDDDEEEEE050GGGGGHHHHH050ALTKEY1UUUUUTTTTT350ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDDEEEEEE052GGGGHHHHHH052ALTKEY1UUUUTTTTTT348ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDDEEEEEEE054GGGHHHHHHH054ALTKEY1UUUTTTTTTT346ALTKEY2".
05 FILLER PIC X(53) VALUE
"DDEEEEEEEE056GGHHHHHHHH056ALTKEY1UUTTTTTTTT344ALTKEY2".
05 FILLER PIC X(53) VALUE
"DEEEEEEEEE058GHHHHHHHHH058ALTKEY1UTTTTTTTTT342ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEEEEEEEE060HHHHHHHHHH060ALTKEY1TTTTTTTTTT340ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEEEEEEEF062HHHHHHHHHI062ALTKEY1TTTTTTTTTT340ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEEEEEEFF064HHHHHHHHII064ALTKEY1TTTTTTTTSS336ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEEEEEFFF066HHHHHHHIII066ALTKEY1TTTTTTTSSS334ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEEEEFFFF068HHHHHHIIII068ALTKEY1TTTTTTSSSS332ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEEEFFFFF070HHHHHIIIII070ALTKEY1TTTTTSSSSS330ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEEFFFFFF072HHHHIIIIII072ALTKEY1TTTTSSSSSS328ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEEFFFFFFF074HHHIIIIIII074ALTKEY1TTTSSSSSSS326ALTKEY2".
05 FILLER PIC X(53) VALUE
"EEFFFFFFFF076HHIIIIIIII076ALTKEY1TTSSSSSSSS324ALTKEY2".
05 FILLER PIC X(53) VALUE
"EFFFFFFFFF078HIIIIIIIII078ALTKEY1TSSSSSSSSS322ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFFFFFFFF080IIIIIIIIII080ALTKEY1SSSSSSSSSS320ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFFFFFFFG082IIIIIIIIIJ082ALTKEY1SSSSSSSSSS320ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFFFFFFGG084IIIIIIIIJJ084ALTKEY1SSSSSSSSRR316ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFFFFFGGG086IIIIIIIJJJ086ALTKEY1SSSSSSSRRR314ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFFFFGGGG088IIIIIIJJJJ088ALTKEY1SSSSSSRRRR312ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFFFGGGGG090IIIIIJJJJJ090ALTKEY1SSSSSRRRRR310ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFFGGGGGG092IIIIJJJJJJ092ALTKEY1SSSSRRRRRR308ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFFGGGGGGG094IIIJJJJJJJ094ALTKEY1SSSRRRRRRR306ALTKEY2".
05 FILLER PIC X(53) VALUE
"FFGGGGGGGG096IIJJJJJJJJ096ALTKEY1SSRRRRRRRR304ALTKEY2".
05 FILLER PIC X(53) VALUE
"FGGGGGGGGG098IJJJJJJJJJ098ALTKEY1SRRRRRRRRR302ALTKEY2".
05 FILLER PIC X(53) VALUE
"GGGGGGGGGG100JJJJJJJJJJ100ALTKEY1RRRRRRRRRR300ALTKEY2".
05 FILLER PIC X(53) VALUE
"RRRRSSSSSS352VVVVWWWWWW352ALTKEY1GGGGFFFFFF048ALTKEY2".
05 FILLER PIC X(53) VALUE
"RRRSSSSSSS354VVVWWWWWWW354ALTKEY1GGGFFFFFFF046ALTKEY2".
05 FILLER PIC X(53) VALUE
"RRSSSSSSSS356VVWWWWWWWW356ALTKEY1GGFFFFFFFF044ALTKEY2".
05 FILLER PIC X(53) VALUE
"RSSSSSSSSS358VWWWWWWWWW358ALTKEY1GFFFFFFFFF042ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSSSSSSSS360WWWWWWWWWW360ALTKEY1FFFFFFFFFF040ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSSSSSSST362WWWWWWWWWX362ALTKEY1FFFFFFFFFF040ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSSSSSSTT364WWWWWWWWXX364ALTKEY1FFFFFFFFEE036ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSSSSSTTT366WWWWWWWXXX366ALTKEY1FFFFFFFEEE034ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSSSSTTTT368WWWWWWXXXX368ALTKEY1FFFFFFEEEE032ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSSSTTTTT370WWWWWXXXXX370ALTKEY1FFFFFEEEEE030ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSSTTTTTT372WWWWXXXXXX372ALTKEY1FFFFEEEEEE028ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSSTTTTTTT374WWWXXXXXXX374ALTKEY1FFFEEEEEEE026ALTKEY2".
05 FILLER PIC X(53) VALUE
"SSTTTTTTTT376WWXXXXXXXX376ALTKEY1FFEEEEEEEE024ALTKEY2".
05 FILLER PIC X(53) VALUE
"STTTTTTTTT378WXXXXXXXXX378ALTKEY1FEEEEEEEEE022ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTTTTTTTT380XXXXXXXXXX380ALTKEY1EEEEEEEEEE020ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTTTTTTTU382XXXXXXXXXY382ALTKEY1EEEEEEEEEE020ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTTTTTTUU384XXXXXXXXYY384ALTKEY1EEEEEEEEDD016ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTTTTTUUU386XXXXXXXYYY386ALTKEY1EEEEEEEDDD014ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTTTTUUUU388XXXXXXYYYY388ALTKEY1EEEEEEDDDD012ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTTTUUUUU390XXXXXYYYYY390ALTKEY1EEEEEDDDDD010ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTTUUUUUU392XXXXYYYYYY392ALTKEY1EEEEDDDDDD008ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTTUUUUUUU394XXXYYYYYYY394ALTKEY1EEEDDDDDDD006ALTKEY2".
05 FILLER PIC X(53) VALUE
"TTUUUUUUUU396XXYYYYYYYY396ALTKEY1EEDDDDDDDD004ALTKEY2".
05 FILLER PIC X(53) VALUE
"TUUUUUUUUU398XYYYYYYYYY398ALTKEY1EDDDDDDDDD002ALTKEY2".
05 FILLER PIC X(53) VALUE
"UUUUUUUUUU400YYYYYYYYYY400ALTKEY1DDDDDDDDDD000ALTKEY2".
01 RECORD-KEY-DATA REDEFINES RECORD-KEY-CONTENT.
05 KEY-VALUES OCCURS 75 TIMES.
10 RECKEY-VALUE PICTURE X(13).
10 ALTKEY1-VALUE PICTURE X(20).
10 ALTKEY2-VALUE PICTURE X(20).
01 INIT-FLAG PICTURE 9.
01 HOLD-FILESTATUS-RECORD.
05 FILESTATUS PICTURE XX OCCURS 10 TIMES.
01 FILE-RECORD-INFORMATION-REC.
03 FILE-RECORD-INFO-SKELETON.
05 FILLER PICTURE X(48) VALUE
"FILE= ,RECORD= /0,RECNO=000000,UPDT=00".
05 FILLER PICTURE X(46) VALUE
",ODO=0000,PGM= ,LRECL=000000,BLKSIZ =0000".
05 FILLER PICTURE X(26) VALUE
",LFIL=000000,ORG= ,LBLR= ".
05 FILLER PICTURE X(37) VALUE
",RECKEY= ".
05 FILLER PICTURE X(38) VALUE
",ALTKEY1= ".
05 FILLER PICTURE X(38) VALUE
",ALTKEY2= ".
05 FILLER PICTURE X(7) VALUE SPACE.
03 FILE-RECORD-INFO OCCURS 10 TIMES.
05 FILE-RECORD-INFO-P1-120.
07 FILLER PIC X(5).
07 XFILE-NAME PIC X(6).
07 FILLER PIC X(8).
07 XRECORD-NAME PIC X(6).
07 FILLER PIC X(1).
07 REELUNIT-NUMBER PIC 9(1).
07 FILLER PIC X(7).
07 XRECORD-NUMBER PIC 9(6).
07 FILLER PIC X(6).
07 UPDATE-NUMBER PIC 9(2).
07 FILLER PIC X(5).
07 ODO-NUMBER PIC 9(4).
07 FILLER PIC X(5).
07 XPROGRAM-NAME PIC X(5).
07 FILLER PIC X(7).
07 XRECORD-LENGTH PIC 9(6).
07 FILLER PIC X(7).
07 CHARS-OR-RECORDS PIC X(2).
07 FILLER PIC X(1).
07 XBLOCK-SIZE PIC 9(4).
07 FILLER PIC X(6).
07 RECORDS-IN-FILE PIC 9(6).
07 FILLER PIC X(5).
07 XFILE-ORGANIZATION PIC X(2).
07 FILLER PIC X(6).
07 XLABEL-TYPE PIC X(1).
05 FILE-RECORD-INFO-P121-240.
07 FILLER PIC X(8).
07 XRECORD-KEY PIC X(29).
07 FILLER PIC X(9).
07 ALTERNATE-KEY1 PIC X(29).
07 FILLER PIC X(9).
07 ALTERNATE-KEY2 PIC X(29).
07 FILLER PIC X(7).
01 TEST-RESULTS.
02 FILLER PIC X VALUE SPACE.
02 FEATURE PIC X(20) VALUE SPACE.
02 FILLER PIC X VALUE SPACE.
02 P-OR-F PIC X(5) VALUE SPACE.
02 FILLER PIC X VALUE SPACE.
02 PAR-NAME.
03 FILLER PIC X(19) VALUE SPACE.
03 PARDOT-X PIC X VALUE SPACE.
03 DOTVALUE PIC 99 VALUE ZERO.
02 FILLER PIC X(8) VALUE SPACE.
02 RE-MARK PIC X(61).
01 TEST-COMPUTED.
02 FILLER PIC X(30) VALUE SPACE.
02 FILLER PIC X(17) VALUE
" COMPUTED=".
02 COMPUTED-X.
03 COMPUTED-A PIC X(20) VALUE SPACE.
03 COMPUTED-N REDEFINES COMPUTED-A
PIC -9(9).9(9).
03 COMPUTED-0V18 REDEFINES COMPUTED-A PIC -.9(18).
03 COMPUTED-4V14 REDEFINES COMPUTED-A PIC -9(4).9(14).
03 COMPUTED-14V4 REDEFINES COMPUTED-A PIC -9(14).9(4).
03 CM-18V0 REDEFINES COMPUTED-A.
04 COMPUTED-18V0 PIC -9(18).
04 FILLER PIC X.
03 FILLER PIC X(50) VALUE SPACE.
01 TEST-CORRECT.
02 FILLER PIC X(30) VALUE SPACE.
02 FILLER PIC X(17) VALUE " CORRECT =".
02 CORRECT-X.
03 CORRECT-A PIC X(20) VALUE SPACE.
03 CORRECT-N REDEFINES CORRECT-A PIC -9(9).9(9).
03 CORRECT-0V18 REDEFINES CORRECT-A PIC -.9(18).
03 CORRECT-4V14 REDEFINES CORRECT-A PIC -9(4).9(14).
03 CORRECT-14V4 REDEFINES CORRECT-A PIC -9(14).9(4).
03 CR-18V0 REDEFINES CORRECT-A.
04 CORRECT-18V0 PIC -9(18).
04 FILLER PIC X.
03 FILLER PIC X(2) VALUE SPACE.
03 COR-ANSI-REFERENCE PIC X(48) VALUE SPACE.
01 CCVS-C-1.
02 FILLER PIC IS X(99) VALUE IS " FEATURE PASS PARAGRAPH-NAME REMARKS".
02 FILLER PIC X(20) VALUE SPACE.
01 CCVS-C-2.
02 FILLER PIC X VALUE SPACE.
02 FILLER PIC X(6) VALUE "TESTED".
02 FILLER PIC X(15) VALUE SPACE.
02 FILLER PIC X(4) VALUE "FAIL".
02 FILLER PIC X(94) VALUE SPACE.
01 REC-SKL-SUB PIC 9(2) VALUE ZERO.
01 REC-CT PIC 99 VALUE ZERO.
01 DELETE-COUNTER PIC 999 VALUE ZERO.
01 ERROR-COUNTER PIC 999 VALUE ZERO.
01 INSPECT-COUNTER PIC 999 VALUE ZERO.
01 PASS-COUNTER PIC 999 VALUE ZERO.
01 TOTAL-ERROR PIC 999 VALUE ZERO.
01 ERROR-HOLD PIC 999 VALUE ZERO.
01 DUMMY-HOLD PIC X(120) VALUE SPACE.
01 RECORD-COUNT PIC 9(5) VALUE ZERO.
01 ANSI-REFERENCE PIC X(48) VALUE SPACES.
01 CCVS-H-1.
02 FILLER PIC X(39) VALUE SPACES.
02 FILLER PIC X(42) VALUE
"OFFICIAL COBOL COMPILER VALIDATION SYSTEM".
02 FILLER PIC X(39) VALUE SPACES.
01 CCVS-H-2A.
02 FILLER PIC X(40) VALUE SPACE.
02 FILLER PIC X(7) VALUE "CCVS85 ".
02 FILLER PIC XXXX VALUE
"4.2 ".
02 FILLER PIC X(28) VALUE
" COPY - NOT FOR DISTRIBUTION".
02 FILLER PIC X(41) VALUE SPACE.
01 CCVS-H-2B.
02 FILLER PIC X(15) VALUE
"TEST RESULT OF ".
02 TEST-ID PIC X(9).
02 FILLER PIC X(4) VALUE
" IN ".
02 FILLER PIC X(12) VALUE
" HIGH ".
02 FILLER PIC X(22) VALUE
" LEVEL VALIDATION FOR ".
02 FILLER PIC X(58) VALUE
"ON-SITE VALIDATION, NATIONAL INSTITUTE OF STD & TECH. ".
01 CCVS-H-3.
02 FILLER PIC X(34) VALUE
" FOR OFFICIAL USE ONLY ".
02 FILLER PIC X(58) VALUE
"COBOL 85 VERSION 4.2, Apr 1993 SSVG ".
02 FILLER PIC X(28) VALUE
" COPYRIGHT 1985 ".
01 CCVS-E-1.
02 FILLER PIC X(52) VALUE SPACE.
02 FILLER PIC X(14) VALUE IS "END OF TEST- ".
02 ID-AGAIN PIC X(9).
02 FILLER PIC X(45) VALUE SPACES.
01 CCVS-E-2.
02 FILLER PIC X(31) VALUE SPACE.
02 FILLER PIC X(21) VALUE SPACE.
02 CCVS-E-2-2.
03 ERROR-TOTAL PIC XXX VALUE SPACE.
03 FILLER PIC X VALUE SPACE.
03 ENDER-DESC PIC X(44) VALUE
"ERRORS ENCOUNTERED".
01 CCVS-E-3.
02 FILLER PIC X(22) VALUE
" FOR OFFICIAL USE ONLY".
02 FILLER PIC X(12) VALUE SPACE.
02 FILLER PIC X(58) VALUE
"ON-SITE VALIDATION, NATIONAL INSTITUTE OF STD & TECH. ".
02 FILLER PIC X(13) VALUE SPACE.
02 FILLER PIC X(15) VALUE
" COPYRIGHT 1985".
01 CCVS-E-4.
02 CCVS-E-4-1 PIC XXX VALUE SPACE.
02 FILLER PIC X(4) VALUE " OF ".
02 CCVS-E-4-2 PIC XXX VALUE SPACE.
02 FILLER PIC X(40) VALUE
" TESTS WERE EXECUTED SUCCESSFULLY".
01 XXINFO.
02 FILLER PIC X(19) VALUE
"*** INFORMATION ***".
02 INFO-TEXT.
04 FILLER PIC X(8) VALUE SPACE.
04 XXCOMPUTED PIC X(20).
04 FILLER PIC X(5) VALUE SPACE.
04 XXCORRECT PIC X(20).
02 INF-ANSI-REFERENCE PIC X(48).
01 HYPHEN-LINE.
02 FILLER PIC IS X VALUE IS SPACE.
02 FILLER PIC IS X(65) VALUE IS "*****************************************************************".
02 FILLER PIC IS X(54) VALUE IS "******************************************************".
01 CCVS-PGM-ID PIC X(9) VALUE
"IX209A".
PROCEDURE DIVISION.
CCVS1 SECTION.
OPEN-FILES.
*> OPEN I-O RAW-DATA.
*> MOVE CCVS-PGM-ID TO RAW-DATA-KEY.
*> READ RAW-DATA INVALID KEY GO TO END-E-1.
*> MOVE "ABORTED " TO C-ABORT.
*> ADD 1 TO C-NO-OF-TESTS.
*> ACCEPT C-DATE FROM DATE.
*> ACCEPT C-TIME FROM TIME.
*> REWRITE RAW-DATA-SATZ INVALID KEY GO TO END-E-1.
*> END-E-1.
*> CLOSE RAW-DATA.
OPEN OUTPUT PRINT-FILE.
MOVE CCVS-PGM-ID TO TEST-ID. MOVE CCVS-PGM-ID TO ID-AGAIN.
MOVE SPACE TO TEST-RESULTS.
PERFORM HEAD-ROUTINE THRU COLUMN-NAMES-ROUTINE.
MOVE ZERO TO REC-SKL-SUB.
PERFORM CCVS-INIT-FILE 9 TIMES.
CCVS-INIT-FILE.
ADD 1 TO REC-SKL-SUB.
MOVE FILE-RECORD-INFO-SKELETON
TO FILE-RECORD-INFO (REC-SKL-SUB).
CCVS-INIT-EXIT.
GO TO CCVS1-EXIT.
CLOSE-FILES.
*> OPEN I-O RAW-DATA.
*> MOVE CCVS-PGM-ID TO RAW-DATA-KEY.
*> READ RAW-DATA INVALID KEY GO TO END-E-2.
*> MOVE "OK. " TO C-ABORT.
*> MOVE PASS-COUNTER TO C-OK.
*> MOVE ERROR-HOLD TO C-ALL.
*> MOVE ERROR-COUNTER TO C-FAIL.
*> MOVE DELETE-COUNTER TO C-DELETED.
*> MOVE INSPECT-COUNTER TO C-INSPECT.
*> REWRITE RAW-DATA-SATZ INVALID KEY GO TO END-E-2.
*> END-E-2.
*> CLOSE RAW-DATA.
PERFORM END-ROUTINE THRU END-ROUTINE-13. CLOSE PRINT-FILE.
TERMINATE-CCVS.
*> EXIT PROGRAM.
*> TERMINATE-CALL.
STOP RUN.
INSPT. MOVE "INSPT" TO P-OR-F. ADD 1 TO INSPECT-COUNTER.
PASS. MOVE "PASS " TO P-OR-F. ADD 1 TO PASS-COUNTER.
FAIL. MOVE "FAIL*" TO P-OR-F. ADD 1 TO ERROR-COUNTER.
DE-LETE. MOVE "*****" TO P-OR-F. ADD 1 TO DELETE-COUNTER.
MOVE "****TEST DELETED****" TO RE-MARK.
PRINT-DETAIL.
IF REC-CT NOT EQUAL TO ZERO
MOVE "." TO PARDOT-X
MOVE REC-CT TO DOTVALUE.
MOVE TEST-RESULTS TO PRINT-REC. PERFORM WRITE-LINE.
IF P-OR-F EQUAL TO "FAIL*" PERFORM WRITE-LINE
PERFORM FAIL-ROUTINE THRU FAIL-ROUTINE-EX
ELSE PERFORM BAIL-OUT THRU BAIL-OUT-EX.
MOVE SPACE TO P-OR-F. MOVE SPACE TO COMPUTED-X.
MOVE SPACE TO CORRECT-X.
IF REC-CT EQUAL TO ZERO MOVE SPACE TO PAR-NAME.
MOVE SPACE TO RE-MARK.
HEAD-ROUTINE.
MOVE CCVS-H-1 TO DUMMY-RECORD. PERFORM WRITE-LINE 2 TIMES.
MOVE CCVS-H-2A TO DUMMY-RECORD. PERFORM WRITE-LINE 2 TIMES.
MOVE CCVS-H-2B TO DUMMY-RECORD. PERFORM WRITE-LINE 3 TIMES.
MOVE CCVS-H-3 TO DUMMY-RECORD. PERFORM WRITE-LINE 3 TIMES.
COLUMN-NAMES-ROUTINE.
MOVE CCVS-C-1 TO DUMMY-RECORD. PERFORM WRITE-LINE.
MOVE CCVS-C-2 TO DUMMY-RECORD. PERFORM WRITE-LINE 2 TIMES.
MOVE HYPHEN-LINE TO DUMMY-RECORD. PERFORM WRITE-LINE.
END-ROUTINE.
MOVE HYPHEN-LINE TO DUMMY-RECORD. PERFORM WRITE-LINE 5 TIMES.
END-RTN-EXIT.
MOVE CCVS-E-1 TO DUMMY-RECORD. PERFORM WRITE-LINE 2 TIMES.
END-ROUTINE-1.
ADD ERROR-COUNTER TO ERROR-HOLD ADD INSPECT-COUNTER TO
ERROR-HOLD. ADD DELETE-COUNTER TO ERROR-HOLD.
ADD PASS-COUNTER TO ERROR-HOLD.
*> IF PASS-COUNTER EQUAL TO ERROR-HOLD GO TO END-ROUTINE-12.
MOVE PASS-COUNTER TO CCVS-E-4-1.
MOVE ERROR-HOLD TO CCVS-E-4-2.
MOVE CCVS-E-4 TO CCVS-E-2-2.
MOVE CCVS-E-2 TO DUMMY-RECORD PERFORM WRITE-LINE.
END-ROUTINE-12.
MOVE "TEST(S) FAILED" TO ENDER-DESC.
IF ERROR-COUNTER IS EQUAL TO ZERO
MOVE "NO " TO ERROR-TOTAL
ELSE
MOVE ERROR-COUNTER TO ERROR-TOTAL.
MOVE CCVS-E-2 TO DUMMY-RECORD.
PERFORM WRITE-LINE.
END-ROUTINE-13.
IF DELETE-COUNTER IS EQUAL TO ZERO
MOVE "NO " TO ERROR-TOTAL ELSE
MOVE DELETE-COUNTER TO ERROR-TOTAL.
MOVE "TEST(S) DELETED " TO ENDER-DESC.
MOVE CCVS-E-2 TO DUMMY-RECORD. PERFORM WRITE-LINE.
IF INSPECT-COUNTER EQUAL TO ZERO
MOVE "NO " TO ERROR-TOTAL
ELSE MOVE INSPECT-COUNTER TO ERROR-TOTAL.
MOVE "TEST(S) REQUIRE INSPECTION" TO ENDER-DESC.
MOVE CCVS-E-2 TO DUMMY-RECORD. PERFORM WRITE-LINE.
MOVE CCVS-E-3 TO DUMMY-RECORD. PERFORM WRITE-LINE.
WRITE-LINE.
ADD 1 TO RECORD-COUNT.
*> IF RECORD-COUNT GREATER 42
*> MOVE DUMMY-RECORD TO DUMMY-HOLD
*> MOVE SPACE TO DUMMY-RECORD
*> WRITE DUMMY-RECORD AFTER ADVANCING PAGE
*> MOVE CCVS-H-1 TO DUMMY-RECORD PERFORM WRT-LN 2 TIMES
*> MOVE CCVS-H-2A TO DUMMY-RECORD PERFORM WRT-LN 2 TIMES
*> MOVE CCVS-H-2B TO DUMMY-RECORD PERFORM WRT-LN 3 TIMES
*> MOVE CCVS-H-3 TO DUMMY-RECORD PERFORM WRT-LN 3 TIMES
*> MOVE CCVS-C-1 TO DUMMY-RECORD PERFORM WRT-LN
*> MOVE CCVS-C-2 TO DUMMY-RECORD PERFORM WRT-LN
*> MOVE HYPHEN-LINE TO DUMMY-RECORD PERFORM WRT-LN
*> MOVE DUMMY-HOLD TO DUMMY-RECORD
*> MOVE ZERO TO RECORD-COUNT.
PERFORM WRT-LN.
WRT-LN.
WRITE DUMMY-RECORD AFTER ADVANCING 1 LINES.
MOVE SPACE TO DUMMY-RECORD.
BLANK-LINE-PRINT.
PERFORM WRT-LN.
FAIL-ROUTINE.
IF COMPUTED-X NOT EQUAL TO SPACE
GO TO FAIL-ROUTINE-WRITE.
IF CORRECT-X NOT EQUAL TO SPACE GO TO FAIL-ROUTINE-WRITE.
MOVE ANSI-REFERENCE TO INF-ANSI-REFERENCE.
MOVE "NO FURTHER INFORMATION, SEE PROGRAM." TO INFO-TEXT.
MOVE XXINFO TO DUMMY-RECORD. PERFORM WRITE-LINE 2 TIMES.
MOVE SPACES TO INF-ANSI-REFERENCE.
GO TO FAIL-ROUTINE-EX.
FAIL-ROUTINE-WRITE.
MOVE TEST-COMPUTED TO PRINT-REC PERFORM WRITE-LINE
MOVE ANSI-REFERENCE TO COR-ANSI-REFERENCE.
MOVE TEST-CORRECT TO PRINT-REC PERFORM WRITE-LINE 2 TIMES.
MOVE SPACES TO COR-ANSI-REFERENCE.
FAIL-ROUTINE-EX. EXIT.
BAIL-OUT.
IF COMPUTED-A NOT EQUAL TO SPACE GO TO BAIL-OUT-WRITE.
IF CORRECT-A EQUAL TO SPACE GO TO BAIL-OUT-EX.
BAIL-OUT-WRITE.
MOVE CORRECT-A TO XXCORRECT. MOVE COMPUTED-A TO XXCOMPUTED.
MOVE ANSI-REFERENCE TO INF-ANSI-REFERENCE.
MOVE XXINFO TO DUMMY-RECORD. PERFORM WRITE-LINE 2 TIMES.
MOVE SPACES TO INF-ANSI-REFERENCE.
BAIL-OUT-EX. EXIT.
CCVS1-EXIT.
EXIT.
SECT-0001-IX209A SECTION.
WRITE-INT-GF-01.
OPEN OUTPUT IX-FS1.
MOVE "IX-FS1" TO XFILE-NAME (1).
MOVE "R1-F-G" TO XRECORD-NAME (1).
MOVE ZERO TO XRECORD-NUMBER (1).
MOVE "RC" TO CHARS-OR-RECORDS (1).
MOVE CCVS-PGM-ID TO XPROGRAM-NAME (1).
MOVE 240 TO XRECORD-LENGTH (1).
MOVE 001 TO XBLOCK-SIZE (1).
MOVE "IX" TO XFILE-ORGANIZATION (1).
MOVE "S" TO XLABEL-TYPE (1).
MOVE 200 TO RECORDS-IN-FILE (1).
MOVE "CREATE-FILE-FS1" TO FEATURE.
MOVE "WRITE-TEST-GF-01" TO PAR-NAME.
MOVE ZERO TO KEYSUB.
MOVE ZERO TO INVKEY-COUNTER.
WRITE-INIT-GF-01-01.
PERFORM WRITE-TEST-GF-01-1 50 TIMES.
PERFORM WRITE-TEST-GF-01-2 125 TIMES.
PERFORM WRITE-TEST-GF-01-1 25 TIMES.
GO TO WRITE-TEST-GF-01.
WRITE-TEST-GF-01-1.
ADD 001 TO XRECORD-NUMBER (1).
ADD 001 TO KEYSUB.
MOVE RECKEY-VALUE (KEYSUB) TO FS1-RECKEY-1-13.
MOVE ALTKEY1-VALUE (KEYSUB) TO FS1-ALTKEY1-1-20.
MOVE ALTKEY2-VALUE (KEYSUB) TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO XRECORD-KEY (1).
MOVE WRK-FS1-ALTKEY1 TO ALTERNATE-KEY1 (1).
MOVE WRK-FS1-ALTKEY2 TO ALTERNATE-KEY2 (1).
MOVE FILE-RECORD-INFO (1) TO IX-FS1R1-F-G-240.
WRITE IX-FS1R1-F-G-240
INVALID KEY ADD 001 TO INVKEY-COUNTER.
ADD 001 TO EXCUT-COUNTER-06V00.
WRITE-TEST-GF-01-2.
ADD 002 TO FS1-RECKEY-11-13.
ADD 002 TO FS1-ALTKEY1-11-13.
SUBTRACT 002 FROM FS1-ALTKEY2-11-13.
ADD 001 TO XRECORD-NUMBER (1).
MOVE WRK-FS1-RECKEY TO XRECORD-KEY (1).
MOVE WRK-FS1-ALTKEY1 TO ALTERNATE-KEY1 (1).
MOVE WRK-FS1-ALTKEY2 TO ALTERNATE-KEY2 (1).
MOVE FILE-RECORD-INFO (1) TO IX-FS1R1-F-G-240.
WRITE IX-FS1R1-F-G-240
INVALID KEY ADD 001 TO INVKEY-COUNTER.
ADD 001 TO EXCUT-COUNTER-06V00.
WRITE-TEST-GF-01.
SUBTRACT INVKEY-COUNTER FROM EXCUT-COUNTER-06V00
GIVING RECORDS-WRITTEN.
MOVE 200 TO CORRECT-18V0.
MOVE RECORDS-WRITTEN TO COMPUTED-18V0.
IF RECORDS-WRITTEN EQUAL TO 200
PERFORM PASS
ELSE
PERFORM FAIL.
MOVE "RECORDS IN FILE" TO RE-MARK.
PERFORM PRINT-DETAIL.
GO TO WRITE-TEST-GF-01-END.
WRITE-DELETE-GF-01.
MOVE "WRITE-TEST-GF-01" TO PAR-NAME.
PERFORM DE-LETE.
PERFORM PRINT-DETAIL.
WRITE-TEST-GF-01-END.
CLOSE IX-FS1.
READ-INIT-F1-01.
OPEN INPUT IX-FS1.
MOVE "READ-TEST-F1-01" TO PAR-NAME.
MOVE "READ FILE IX-FS1" TO FEATURE.
MOVE ZERO TO EXCUT-COUNTER-06V00.
MOVE 02 TO RECKEY-NUM.
MOVE 002 TO ALTKEY1-NUM.
READ-TEST-F1-01-R1.
READ IX-FS1 AT END GO TO READ-TEST-F1-01.
MOVE IX-REC-KEY-AREA TO WRK-FS1-RECKEY.
MOVE IX-ALT-KEY1-AREA TO WRK-FS1-ALTKEY1.
IF FS1-RECKEY-11-13 EQUAL TO RECKEY-NUM
AND FS1-ALTKEY1-11-13 EQUAL TO ALTKEY1-NUM
NEXT SENTENCE
ELSE
PERFORM READ-FAIL-F1-01.
ADD 001 TO EXCUT-COUNTER-06V00.
ADD 002 TO RECKEY-NUM
ADD 002 TO ALTKEY1-NUM.
GO TO READ-TEST-F1-01-R1.
READ-TEST-F1-01.
IF EXCUT-COUNTER-06V00 EQUAL TO 200
PERFORM PASS ELSE
MOVE "IX-28; 4.5.2 OR IX-41; 4.9.2 NOT COORECTLY EXECUTED"
TO RE-MARK
PERFORM FAIL.
MOVE EXCUT-COUNTER-06V00 TO COMPUTED-18V0.
MOVE 200 TO CORRECT-18V0.
MOVE "RECORDS IN FILE" TO RE-MARK.
PERFORM PRINT-DETAIL.
GO TO READ-EXIT-F1-01.
READ-FAIL-F1-01.
PERFORM FAIL.
MOVE FS1-RECKEY-11-13 TO COMPUTED-18V0.
MOVE RECKEY-NUM TO CORRECT-18V0.
MOVE "NUM EMBEDDED IN RECKEY" TO RE-MARK.
PERFORM PRINT-DETAIL.
READ-EXIT-F1-01.
CLOSE IX-FS1.
START-INIT-GF-01.
OPEN INPUT IX-FS1.
MOVE "START EQ TO RECKEY" TO FEATURE.
MOVE "START-TEST-GF-01" TO PAR-NAME.
MOVE "********************" TO HOLD-FILESTATUS-RECORD.
*>
*> THIS TEST TESTS THE "START -- EQUAL TO" FOR PROPER POSITIONING
*> OF THE RECORD POINTER FOR THE SUBSEQUENT READ STATEMENT.
*> START-TEST-GF-01 USE ONLY THE PRIME RECORD KEY FOR ESTABLISHING
*> THE CURRENT RECORD POINTER FOR THE FILE. THE FOLLOWING IS A
*> SUMMARY OF THE TEST CONDITIONS AND THE EXPECTED ACTION TO BE
*> TAKEN FOR THE TESTS.
*>
*> CONDITIONS (CONTENTS OF KEY) / ACTION
*>
*> START-TEST-GF-01 - EQUAL A RECORD IN FILE / RECORD FOUND
*> START-TEST-GF-02 - BETWEEN 2 EXISTING KEY VALUES / INVALID KEY
*> START-TEST-GF-03 - LESS THAN FIRST FILE RECORD / INVALID KEY
*> START-TEST-GF-04 - GREATER THAN LAST FILE RECORD / INVALID KEY
*> START-TEST-GF-05 - UNEQUAL SIZE OPERANDS (EQUAL) / RECORD FOUN
*> START-TEST-GF-06 - UNEQUAL SIZE OPERANDS (EQUAL) / RECORD FOUN
*> START-TEST-GF-07 - UNEQUAL SIZE OPERANDS (UNEQUAL) / INVLD KEY
*> START-TEST-GF-08 - UNEQUAL SIZE OPERANDS (UNEQUAL) / INVLD KEY
*> START-TEST-GF-09 - UNEQUAL SIZE OPERANDS (UNEQUAL) /INVLD KEY
*>
*> BEFORE EACH TEST A RECORD IS MADE AVAILABLE WHICH IS DIFFERENT
*> THAN THE ONE WHICH IS EXPECTED TO BE PRESENT FOLLOWING A TEST.
*> IF AN INVALID KEY IS EXPECTED FROM THE TEST, THE KEYS
*> ASSOCIATED WITH THE FILE WHICH ARE NOT PARTICIPATING IN THE
*> START STATEMENT WILL BE LOADED WITH VALUES WHICH WOULD
*> MATCH RECORDS IN THE FILE. IF KEY MATCH IS EXPECTED FROM
*> THE TEST, THE KEYS ASSOCIATED WITH THE FILE WHICH ARE NOT
*> PARTICIPATING IN THE START STATEMENT WILL BE LOADED WITH
*> VALUES WHICH WOULD NOT MATCH RECORDS IN THE FILE. THE FILE
*> STATUS FROM EXECUTION OF EACH START IS CAPTURED FOR LATER TESTS
*>
START-INIT-GF-01-01.
PERFORM START-INITIALIZE-RECORD.
MOVE "START-TEST-GF-01" TO PAR-NAME.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "**" TO FILESTATUS (1)
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
GO TO START-DELETE-GF-01.
MOVE "CCCCCCCCCD022" TO FS1-RECKEY-1-13.
MOVE "EEEEEFFFFF022ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "EEEEEFFFFF022ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-01.
*>
*> START-TEST-GF-01 - THE START SHOULD FIND A RECORD IN THE FILE
*> WHICH HAS A RECORD KEY VALUE OF
*> CCCCCCCCCD022 (RECORD NUMBER 11).
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (1)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-01.
MOVE FS1-STATUS TO FILESTATUS (1).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-01.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 11
PERFORM PASS
MOVE SPACE TO RE-MARK
GO TO START-WRITE-GF-01.
MOVE 11 TO RECNO.
PERFORM DISPLAY-RECORD-KEYS.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-01.
PERFORM FAIL.
MOVE 11 TO CORRECT-18V0.
MOVE "IX-36; 4.7.2 ETC." TO RE-MARK.
GO TO START-WRITE-GF-01.
START-DELETE-GF-01.
PERFORM DE-LETE.
START-WRITE-GF-01.
PERFORM PRINT-DETAIL.
START-INIT-GF-02.
MOVE "START-TEST-GF-02" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (2)
GO TO START-DELETE-GF-02.
MOVE "EEEEEEEFFF067" TO FS1-RECKEY-1-13.
MOVE "HHHHHHHIII066ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "TTTTTTTSSS334ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-02.
*>
*> START-TEST-GF-02 - THE START SHOULD NOT FIND A RECORD IN THE
*> FILE WHICH HAS A RECORD KEY VALUE OF
*> "EEEEEEEFFF067". THIS KEY VALUE IS
*> SEQUENTIALLY LOCATED BETWEEN TWO CURRENTLY
*> EXISTING RECORD KEYS IN THE FILE.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (2)
GO TO START-PASS-GF-02.
MOVE FS1-STATUS TO FILESTATUS (2).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
PERFORM FAIL.
MOVE "IX-36; 4.7.2 ETC." TO RE-MARK.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-02.
START-PASS-GF-02.
PERFORM PASS.
GO TO START-WRITE-GF-02.
START-DELETE-GF-02.
PERFORM DE-LETE.
START-WRITE-GF-02.
PERFORM PRINT-DETAIL.
START-INIT-GF-03.
PERFORM START-INITIALIZE-RECORD.
MOVE "START-TEST-GF-03" TO PAR-NAME.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (3)
GO TO START-DELETE-GF-03.
MOVE "BBBBBBBBBC001" TO FS1-RECKEY-1-13.
MOVE "EEEEEEEEEF002ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV398ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-03.
*>
*> START-TEST-GF-03 - THE START STATEMENT SHOULD NOT FIND A
*> RECORD IN THE FILE WHICH HAS A RECORD
*> KEY VALUE OF "BBBBBBBBBC001". THIS KEY
*> VALUE IS SEQUENTIALLY LOWER THAN ANY
*> CURRENTLY EXISTING KEY IN THE FILE.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (3)
GO TO START-PASS-GF-03.
MOVE FS1-STATUS TO FILESTATUS (3).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC." TO RE-MARK.
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-03.
START-PASS-GF-03.
PERFORM PASS.
GO TO START-WRITE-GF-03.
START-DELETE-GF-03.
PERFORM DE-LETE.
START-WRITE-GF-03.
PERFORM PRINT-DETAIL.
START-INIT-GF-04.
MOVE "START-TEST-GF-04" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (4)
GO TO START-DELETE-GF-04.
MOVE "UUUUUUUUUU401" TO FS1-RECKEY-1-13.
MOVE "YYYYYYYYYY400ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "DDDDDDDDDD000ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-04.
*>
*> START-TEST-GF-04 - THE START STATEMENT SHOULD NOT FIND A
*> RECORD IN THE FILE WHICH HAS A RECORD
*> KEY VALUE OF "UUUUUUUUUU401". THIS
*> VALUE IS SEQUENTIALLY ONE GREATER THAN
*> ANY RECORD KEY CURRENTLY EXISTING IN
*> THE FILE. AN INVALID KEY CONDITION
*> IS EXPECTED WHEN THE START IS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (4)
GO TO START-PASS-GF-04.
MOVE FS1-STATUS TO FILESTATUS (4).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC." TO RE-MARK.
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-04.
START-PASS-GF-04.
PERFORM PASS.
GO TO START-WRITE-GF-04.
START-DELETE-GF-04.
PERFORM DE-LETE.
START-WRITE-GF-04.
PERFORM PRINT-DETAIL.
START-INIT-GF-05.
MOVE "START-TEST-GF-05" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (5)
GO TO START-DELETE-GF-05.
MOVE "CDDDDDDDDD039" TO FS1-RECKEY-1-13.
MOVE "IIIIIIIIJJ083ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "ABCXXXXXXX400ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-05.
*> START-TEST-GF-05 - THE START STATEMENT USES AN OPERAND
*> IN THE KEY PHRASE WHICH IS NOT THE NAME
*> OF A RECORD KEY BUT IS THE NAME OF A
*> DATA ITEM WHICH IS SUBORDINATE TO THE
*> RECORD KEY. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 5 OF THE RECORD KEY)
*> IS A UNIQUE KEY VALUE FOR THE FILE. THE
*> BALANCE OF THE KEY (POSITIONS 6 THRU 13) IS
*> NOT A VALID KEY VALUE FOR THE FILE. THE
*> RECORD WITH THE RECORD KEY "CDDDDDDDDD038"
*> (RECORD NUMBER 19) IS EXPECTED TO BE FOUND.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY-1-5
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (5)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-05.
MOVE FS1-STATUS TO FILESTATUS (5).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-05.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 19
PERFORM PASS
GO TO START-WRITE-GF-05.
MOVE 19 TO RECNO.
PERFORM DISPLAY-RECORD-KEYS.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-05.
PERFORM FAIL.
MOVE 19 TO CORRECT-18V0.
MOVE "IX-36; 4.7.2 ETC.; SUBORDINATE D-I OF KEY" TO RE-MARK.
GO TO START-WRITE-GF-05.
START-DELETE-GF-05.
PERFORM DE-LETE.
START-WRITE-GF-05.
PERFORM PRINT-DETAIL.
START-INIT-GF-06.
MOVE "START-TEST-GF-06" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (6)
GO TO START-DELETE-GF-06.
MOVE "TTTTTUUUUU390" TO FS1-RECKEY-1-13.
MOVE "XYYYYYYYYY399ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV399ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-06.
*>
*> START-TEST-GF-06 - THE START STATEMENT USES AN OPERAND IN THE
*> KEY PHRASE WHICH IS NOT THE NAME OF A RECORD
*> KEY BUT IS THE NAME OF A DATA ITEM THAT IS
*> SUBORDINATE TO THE RECORD KEY. THE CONTENTS
*> OF THE DATA ITEM (POSITIONS 1 THRU 5 OF THE
*> RECORD KEY) IS A DUPLICATE OF THE FIRST
*> 5 POSITIONS OF 5 OTHER RECORDS IN THE FILE.
*> THIS TEST EXPECTS THE RECORD POINTER
*> TO BE POSITIONED TO RECORD KEY TTTTTTTTTT380
*> (RECORD NUMBER 190) WHICH WAS THE
*> FIRST RECORD WRITTEN TO THE FILE THAT
*> CONTAINS TTTTT IN THE FIRST 5 POSITIONS OF
*> THE KEY. THE RECORD KEY WAS LOADED WITH THE
*> VALUE "TTTTTUUUUU390" (KEY FOR RECORD NUMBER
*> 195) BEFORE THE START WAS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY-1-5
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (6)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-06.
MOVE FS1-STATUS TO FILESTATUS (6).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-06.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 190
PERFORM PASS
GO TO START-WRITE-GF-06.
MOVE 65 TO RECNO.
PERFORM DISPLAY-RECORD-KEYS.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-06.
MOVE "IX-36; 4.7.2 ETC.; SUBORDINATE D-I OF KEY" TO RE-MARK.
PERFORM FAIL.
MOVE 190 TO CORRECT-18V0.
GO TO START-WRITE-GF-06.
START-DELETE-GF-06.
PERFORM DE-LETE.
START-WRITE-GF-06.
PERFORM PRINT-DETAIL.
START-INIT-GF-07.
MOVE "START-TEST-GF-07" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (7)
GO TO START-DELETE-GF-07.
MOVE "CCCCCCD022 " TO FS1-RECKEY-1-13.
MOVE "FFFFFFFFFG022ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "VVVVVVVVVV380ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-07.
*>
*> START-TEST-GF-07 - THE START STATEMENT USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE RECORD KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE RECORD KEY)
*> IS LOADED WITH "CCCCCCD022". NO SUCH RECORD
*> SHOULD BE IN THE FILE. IF IN THE COMPARSION,
*> THE LONGER OPERAND IS TRUNCATED ON THE LEFT
*> INSTEAD OF ON THE RIGHT THE CONTENTS OF
*> THE DATA ITEM WILL MATCH A RECORD IN THE
*> FILE. THIS TEST EXPECTS THE LONGER OPERAND
*> TO BE TRUNCATED ON THE RIGHT CAUSING NO
*> DATA ITEM MATCH AND RESULTING IN AN INVALID
*> KEY CONDITION WHEN THE START IS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (7)
GO TO START-PASS-GF-07.
MOVE FS1-STATUS TO FILESTATUS (7).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC." TO RE-MARK.
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-07.
START-PASS-GF-07.
PERFORM PASS.
GO TO START-WRITE-GF-07.
START-DELETE-GF-07.
PERFORM DE-LETE.
START-WRITE-GF-07.
PERFORM PRINT-DETAIL.
START-INIT-GF-08.
MOVE "START-TEST-GF-08" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (8)
GO TO START-DELETE-GF-08.
MOVE "ABBBBBBBBC002" TO FS1-RECKEY-1-13.
MOVE "EEEEEEEEEF002ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWW400ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-08.
*>
*> START-TEST-GF-08 - THIS TEST USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE RECORD KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE RECORD KEY) IS
*> LOADED WITH "ABBBBBBBBC". THIS KEY VALUE
*> IS LOWER THAN ANY RECORD KEY VALUE IN
*> POSITION 1 THRU 10 EXISTING IN THE FILE
*> THEREFORE AN INVALID KEY CONDITION IS
*> EXPECTED WHEN THE START STATEMENT IS
*> EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (8)
GO TO START-PASS-GF-08.
MOVE FS1-STATUS TO FILESTATUS (8).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC." TO RE-MARK.
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-08.
START-PASS-GF-08.
PERFORM PASS.
GO TO START-WRITE-GF-08.
START-DELETE-GF-08.
PERFORM DE-LETE.
START-WRITE-GF-08.
PERFORM PRINT-DETAIL.
START-INIT-GF-09.
MOVE "START-TEST-GF-09" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (9)
GO TO START-DELETE-GF-09.
MOVE "UUUUUUUUUV400" TO FS1-RECKEY-1-13.
MOVE "EEEEEEEEEF002ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV398ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-09.
*>
*> START-TEST-GF-09 - THIS TEST USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE RECORD KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE RECORD KEY) IS
*> LOADED WITH "UUUUUUUUUV". THIS KEY VALUE
*> IS GREATER THAN ANY RECORD KEY VALUE IN
*> POSITION 1 THRU 10 EXISTING IN THE FILE
*> THEREFORE AN INVALID KEY CONDITION IS
*> EXPECTED WHEN THE START STATEMENT IS
*> EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-KEY-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (9)
GO TO START-PASS-GF-09.
MOVE FS1-STATUS TO FILESTATUS (9).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC." TO RE-MARK.
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-09.
START-PASS-GF-09.
PERFORM PASS.
GO TO START-WRITE-GF-09.
START-DELETE-GF-09.
PERFORM DE-LETE.
START-WRITE-GF-09.
PERFORM PRINT-DETAIL.
CLOSE IX-FS1.
START-INIT-GF-FILE-STATUS.
MOVE "FILE STATUS START:00" TO FEATURE.
MOVE "START-TEST-GF-10" TO PAR-NAME.
*>
*> THIS SERIES OF TESTS CHECKS THE CONTENTS OF THE FILE STATUS
*>
START-TEST-GF-10.
IF FILESTATUS (1) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-10.
*>
*> START-TEST-GF-10 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-01. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00".
*>
IF FILESTATUS (1) EQUAL TO "00"
PERFORM PASS
ELSE
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-01 " TO RE-MARK
PERFORM FAIL
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (1) TO COMPUTED-A.
START-WRITE-GF-10.
PERFORM PRINT-DETAIL.
START-TEST-GF-11.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-11" TO PAR-NAME.
IF FILESTATUS (2) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-11.
*>
*> START-TEST-GF-11 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-02. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (2) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-02 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (2) TO COMPUTED-A.
START-WRITE-GF-11.
PERFORM PRINT-DETAIL.
START-TEST-GF-12.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-12" TO PAR-NAME.
IF FILESTATUS (3) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-12.
*>
*> START-TEST-GF-12 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-03. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (3) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-03 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (3) TO COMPUTED-A.
START-WRITE-GF-12.
PERFORM PRINT-DETAIL.
START-TEST-GF-13.
MOVE "START-TEST-GF-13" TO PAR-NAME.
MOVE "FILE STATUS START:23" TO FEATURE.
IF FILESTATUS (4) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-13.
*>
*> START-TEST-GF-13 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-04. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (4) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-04 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (4) TO COMPUTED-A.
START-WRITE-GF-13.
PERFORM PRINT-DETAIL.
START-TEST-GF-14.
MOVE "START-TEST-GF-14" TO PAR-NAME.
MOVE "FILE STATUS START:00" TO FEATURE.
IF FILESTATUS (5) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-14.
*>
*> START-TEST-GF-14 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-05. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00"
*>
IF FILESTATUS (5) EQUAL TO "00"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-05 " TO RE-MARK
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (5) TO COMPUTED-A.
START-WRITE-GF-14.
PERFORM PRINT-DETAIL.
START-TEST-GF-15.
MOVE "START-TEST-GF-15" TO PAR-NAME.
MOVE "FILE STATUS START:00" TO FEATURE.
IF FILESTATUS (6) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-15.
*>
*> START-TEST-GF-15 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-06. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00"
*>
IF FILESTATUS (6) EQUAL TO "00"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-06 " TO RE-MARK
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (6) TO COMPUTED-A.
START-WRITE-GF-15.
PERFORM PRINT-DETAIL.
START-TEST-GGF-16.
MOVE "START-TEST-GF-16" TO PAR-NAME.
MOVE "FILE STATUS START:23" TO FEATURE.
IF FILESTATUS (7) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-16.
*>
*> START-TEST-GF-16 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-07. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23"
*>
IF FILESTATUS (7) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-07 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (7) TO COMPUTED-A.
START-WRITE-GF-16.
PERFORM PRINT-DETAIL.
START-TEST-GF-17.
MOVE "START-TEST-GF-17" TO PAR-NAME.
MOVE "FILE STATUS START:23" TO FEATURE.
IF FILESTATUS (8) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-17.
*>
*> START-TEST-GF-07 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-08. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (8) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-08 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (8) TO COMPUTED-A.
START-WRITE-GF-17.
PERFORM PRINT-DETAIL.
START-TEST-GF-18.
MOVE "START-TEST-GF-18" TO PAR-NAME.
MOVE "FILE STATUS START:23" TO FEATURE.
IF FILESTATUS (9) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-18.
*>
*> START-TEST-GF-18 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-09. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (9) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; FROM START-TEST-GF-09 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (9) TO COMPUTED-A.
START-WRITE-GF-18.
PERFORM PRINT-DETAIL.
*> *****************************************************************
START-INIT-005.
OPEN INPUT IX-FS1.
MOVE "STR EQ ALTKY W/O DUP" TO FEATURE.
MOVE "START-TEST-GF-19" TO PAR-NAME.
MOVE "********************" TO HOLD-FILESTATUS-RECORD.
*>
*> THIS TEST TESTS THE "START -- EQUAL TO" FOR PROPER POSITIONING
*> OF THE RECORD POINTER FOR THE SUBSEQUENT READ STATEMENT.
*> START-TEST-GF-19/27 USES ONLY THE ALTERNATE RECORD KEY WITHOUT
*> THE DUPLICATES OPTION FOR ESTABLISHING THE CURRENT RECORD
*> POINTER FOR THE FILE. THE FOLLOWING IS A SUMMARY OF THE TEST
*> CONDITIONS AND THE EXPECTED ACTION TO BE TAKEN FOR THE TESTS.
*>
*> CONDITIONS (CONTENTS OF KEY) / ACTION
*>
*> START-TEST-GF-19 - EQUAL A RECORD IN FILE / RECORD FOUND
*> START-TEST-GF-20 - BETWEEN 2 EXISTING KEY VALUES / INVALID KE
*> START-TEST-GF-21 - LESS THAN FIRST FILE RECORD / INVALID KEY
*> START-TEST-GF-22 - GREATER THAN LAST FILE RECORD / INVALID KE
*> START-TEST-GF-23 - UNEQUAL SIZE OPERANDS (EQUAL) / RECORD FOU
*> START-TEST-GF-24 - UNEQUAL SIZE OPERANDS (EQUAL) / RECORD FOU
*> START-TEST-GF-25 - UNEQUAL SIZE OPERANDS (UNEQUAL) / INVLD KE
*> START-TEST-GF-26 - UNEQUAL SIZE OPERANDS (UNEQUAL) / INVLD KE
*> START-TEST-GF-27 - UNEQUAL SIZE OPERANDS (UNEQUAL) /INVLD KEY
*>
*> BEFORE EACH TEST A RECORD IS MADE AVAILABLE WHICH IS DIFFERENT
*> THAN THE ONE WHICH IS EXPECTED TO BE PRESENT FOLLOWING A TEST.
*> IF AN INVALID KEY IS EXPECTED FROM THE TEST, THE KEYS
*> ASSOCIATED WITH THE FILE WHICH ARE NOT PARTICIPATING IN THE
*> START STATEMENT WILL BE LOADED WITH VALUES WHICH WOULD
*> MATCH RECORDS IN THE FILE. IF KEY MATCH IS EXPECTED FROM
*> THE TEST, THE KEYS ASSOCIATED WITH THE FILE WHICH ARE NOT
*> PARTICIPATING IN THE START STATEMENT WILL BE LOADED WITH
*> VALUES WHICH WOULD NOT MATCH RECORDS IN THE FILE. THE FILE
*> STATUS FROM EXECUTION OF EACH START IS CAPTURED FOR LATER TESTS
*>
START-INIT-GF-19.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "**" TO FILESTATUS (1)
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
GO TO START-DELETE-GF-19.
MOVE "CCCCCDDDDD022" TO FS1-RECKEY-1-13.
MOVE "XXXXXXXXXY382ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "EEEEEFFFFF022ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-19.
*>
*> START-TEST-GF-19 - THE START SHOULD FIND A RECORD IN THE FILE
*> WHICH HAS AN ALTERNATE KEY VALUE OF
*> XXXXXXXXXY382ALTKEY1 (RECORD NUMBER 191).
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (1)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-19.
MOVE FS1-STATUS TO FILESTATUS (1).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-19.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 191
PERFORM PASS
MOVE SPACE TO RE-MARK
GO TO START-WRITE-GF-19.
MOVE 66 TO RECNO.
PERFORM DISPLAY-ALTERNATE-KEY1.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-19.
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE 191 TO CORRECT-18V0.
GO TO START-WRITE-GF-19.
START-DELETE-GF-19.
PERFORM DE-LETE.
START-WRITE-GF-19.
PERFORM PRINT-DETAIL.
START-INIT-GF-20.
MOVE "START-TEST-GF-20" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (2)
GO TO START-DELETE-GF-20.
MOVE "EEEEEEEFFF066" TO FS1-RECKEY-1-13.
MOVE "HHHHHHHIII067ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "TTTTTTTSSS334ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-20.
*>
*> START-TEST-GF.02 - THE START SHOULD NOT FIND A RECORD IN THE
*> FILE WHICH HAS AN ALTERNATE KEY VALUE OF
*> HHHHHHHIII067ALTKEY1. THIS KEY VALUE IS
*> SEQUENTIALLY LOCATED BETWEEN TWO CURRENTLY
*> EXISTING ALTERNATE KEYS IN THE FILE.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (2)
GO TO START-PASS-GF-20.
MOVE FS1-STATUS TO FILESTATUS (2).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY1 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-20.
START-PASS-GF-20.
PERFORM PASS.
MOVE "INVALID KEY" TO RE-MARK.
GO TO START-WRITE-GF-20.
START-DELETE-GF-20.
PERFORM DE-LETE.
START-WRITE-GF-20.
PERFORM PRINT-DETAIL.
START-INIT-GF-21.
MOVE "START-TEST-GF-21" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (3)
GO TO START-DELETE-GF-21.
MOVE "BBBBBBBBBC002" TO FS1-RECKEY-1-13.
MOVE "EEEEEEEEEF001ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV398ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-21.
*>
*> START-TEST-GF-21 - THE START STATEMENT SHOULD NOT FIND A
*> RECORD IN THE FILE WHICH HAS AN ALTERNATE
*> KEY VALUE OF EEEEEEEEEF001ALTKEY1. THIS KEY
*> VALUE IS SEQUENTIALLY LOWER THAN ANY
*> CURRENTLY EXISTING KEY IN THE FILE.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (3)
GO TO START-PASS-GF-21.
MOVE FS1-STATUS TO FILESTATUS (3).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY1 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-21.
START-PASS-GF-21.
PERFORM PASS.
MOVE "INVALID KEY" TO RE-MARK.
GO TO START-WRITE-GF-21.
START-DELETE-GF-21.
PERFORM DE-LETE.
START-WRITE-GF-21.
PERFORM PRINT-DETAIL.
START-INIT-GF-22.
MOVE "START-TEST-GF-22" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (4)
GO TO START-DELETE-GF-22.
MOVE "UUUUUUUUUU400" TO FS1-RECKEY-1-13.
MOVE "YYYYYYYYYY401ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "DDDDDDDDDD000ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-22.
*>
*> START-TEST-GF-22 - THE START STATEMENT SHOULD NOT FIND A
*> RECORD IN THE FILE WHICH HAS AN ALTERNATE
*> KEY VALUE OF YYYYYYYYYY401ALTKEY1. THIS
*> VALUE IS SEQUENTIALLY GREATER THAN
*> ANY ALTERNATE KEY CURRENTLY EXISTING IN
*> THE FILE. AN INVALID KEY CONDITION
*> IS EXPECTED WHEN THE START IS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (4)
GO TO START-PASS-GF-22.
MOVE FS1-STATUS TO FILESTATUS (4).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY1 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-22.
START-PASS-GF-22.
PERFORM PASS.
GO TO START-WRITE-GF-22.
START-DELETE-GF-22.
PERFORM DE-LETE.
START-WRITE-GF-22.
PERFORM PRINT-DETAIL.
START-INIT-GF-23.
MOVE "START-TEST-GF-23" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (5)
GO TO START-DELETE-GF-23.
MOVE "CDDDDDDDDD039" TO FS1-RECKEY-1-13.
MOVE "GGGGHXXXXX052ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "ABCXXXXXXX400ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-23.
*> START-TEST-GF-23 - THE START STATEMENT USES AN OPERAND
*> IN THE KEY PHRASE WHICH IS NOT THE NAME
*> OF AN ALTERNATE KEY BUT IS THE NAME OF A
*> DATA ITEM WHICH IS SUBORDINATE TO THE
*> ALTERNATE KEY. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 5 OF THE ALTERNATE KEY)
*> IS A UNIQUE KEY VALUE FOR THE FILE. THE
*> BALANCE OF THE KEY (POSITIONS 6 THRU 13 OF
*> THE ALTERNATE KEY IS NOT A VALID KEY VALUE
*> FOR THE FILE. THE
*> RECORD WITH THE ALTERNATE KEY "GGGGHHHHHH052
*> ALTKEY1 (RECORD NUMBER 26) IS EXPECTED TO
*> BE FOUND.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1-1-5
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (5)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-23.
MOVE FS1-STATUS TO FILESTATUS (5).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-23.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 26
PERFORM PASS
GO TO START-WRITE-GF-23.
MOVE 26 TO RECNO.
PERFORM DISPLAY-ALTERNATE-KEY1.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-23.
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE 26 TO CORRECT-18V0.
GO TO START-WRITE-GF-23.
START-DELETE-GF-23.
PERFORM DE-LETE.
START-WRITE-GF-23.
PERFORM PRINT-DETAIL.
START-INIT-GF-24.
MOVE "START-TEST-GF-24" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (6)
GO TO START-DELETE-GF-24.
MOVE "YYYYYUUUUU390" TO FS1-RECKEY-1-13.
MOVE "XXXXXYYYYY390ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV399ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-24.
*>
*> START-TEST-GF-24 - THE START STATEMENT USES AN OPERAND IN THE
*> KEY PHRASE WHICH IS NOT THE NAME OF AN
*> ALTERNATE KEY BUT IS THE NAME OF A DATA ITEM
*> THAT IS SUBORDINATE TO THE KEY. THE CONTENTS
*> OF THE DATA ITEM (POSITIONS 1 THRU 5 OF THE
*> ALTERNATE KEY) IS A DUPLICATE OF THE FIRST
*> 5 POSITIONS OF 5 OTHER RECORDS IN THE FILE.
*> THIS TEST EXPECTS THE RECORD POINTER
*> TO BE POSITIONED TO RECORD KEY XXXXXXXXXX380
*> ALTKEY1 (RECORD NUMBER 190) WHICH WAS THE
*> FIRST RECORD WRITTEN TO THE FILE THAT
*> CONTAINS XXXXX IN THE FIRST 5 POSITIONS OF
*> THE KEY. THE ALTERNATE KEY WAS LOADED WITH THE
*> VALUE XXXXXYYYYY390ALTKEY1 (KEY FOR RECORD
*> NUMBER 195) BEFORE THE START WAS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1-1-5
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (6)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-24.
MOVE FS1-STATUS TO FILESTATUS (6).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-24.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 190
PERFORM PASS
GO TO START-WRITE-GF-24.
MOVE 65 TO RECNO.
PERFORM DISPLAY-ALTERNATE-KEY1.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-24.
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE 190 TO CORRECT-18V0.
GO TO START-WRITE-GF-24.
START-DELETE-GF-24.
PERFORM DE-LETE.
START-WRITE-GF-24.
PERFORM PRINT-DETAIL.
START-INIT-GF-25.
MOVE "START-TEST-GF-25" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (7)
GO TO START-DELETE-GF-25.
MOVE "CCCCCCCCCD022" TO FS1-RECKEY-1-13.
MOVE "022ALTKEY1 " TO FS1-ALTKEY1-1-20.
MOVE "VVVVVVVVVV380ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-25.
*>
*> START-TEST-GF-25 - THE START STATEMENT USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE ALTERNATE KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> POSITIONS 1 THRU 10 OF THE ALTERNATE KEY)
*> IS LOADED WITH "022ALTKEY1". NO SUCH RECORD
*> SHOULD BE IN THE FILE. IF IN THE COMPARSION,
*> THE LONGER OPERAND IS TRUNCATED ON THE LEFT
*> INSTEAD OF ON THE RIGHT THE CONTENTS OF
*> THE DATA ITEM WILL MATCH A RECORD IN THE
*> FILE. THIS TEST EXPECTS THE LONGER OPERAND
*> TO BE TRUNCATED ON THE RIGHT CAUSING NO
*> DATA ITEM MATCH AND RESULTING IN AN INVALID
*> KEY CONDITION WHEN THE START IS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (7)
GO TO START-PASS-GF-25.
MOVE FS1-STATUS TO FILESTATUS (7).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-25.
START-PASS-GF-25.
PERFORM PASS.
GO TO START-WRITE-GF-25.
START-DELETE-GF-25.
PERFORM DE-LETE.
START-WRITE-GF-25.
PERFORM PRINT-DETAIL.
START-INIT-GF-26.
MOVE "START-TEST-GF-26" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (8)
GO TO START-DELETE-GF-26.
MOVE "BBBBBBBBBC002" TO FS1-RECKEY-1-13.
MOVE "EEEEEEEEEE002ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV398ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-26.
*>
*> START-TEST-GF-26 - THIS TEST USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE ALTERNATE KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE RECORD KEY) IS
*> LOADED WITH "EEEEEEEEEE". THIS KEY VALUE
*> IS LOWER THAN ANY ALTERNATE KEY VALUE IN
*> POSITION 1 THRU 10 EXISTING IN THE FILE
*> THEREFORE AN INVALID KEY CONDITION IS
*> EXPECTED WHEN THE START STATEMENT IS
*> EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (8)
GO TO START-PASS-GF-26.
MOVE FS1-STATUS TO FILESTATUS (8).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-26.
START-PASS-GF-26.
PERFORM PASS.
GO TO START-WRITE-GF-26.
START-DELETE-GF-26.
PERFORM DE-LETE.
START-WRITE-GF-26.
PERFORM PRINT-DETAIL.
START-INIT-GF-27.
MOVE "START-TEST-GF-27" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (9)
GO TO START-DELETE-GF-27.
MOVE "UUUUUUUUUU400" TO FS1-RECKEY-1-13.
MOVE "YYYYYZYYYY400ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV398ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-27.
*>
*> START-TEST-GF-27 - THIS TEST USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE ALTERNATE KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE RECORD KEY) IS
*> LOADED WITH "YYYYYZYYYY". THIS KEY VALUE
*> IS GREATER THAN ANY ALTERNATE KEY VALUE IN
*> POSITION 1 THRU 10 EXISTING IN THE FILE
*> THEREFORE AN INVALID KEY CONDITION IS
*> EXPECTED WHEN THE START STATEMENT IS
*> EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY1-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (9)
GO TO START-PASS-GF-27.
MOVE FS1-STATUS TO FILESTATUS (9).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-27.
START-PASS-GF-27.
PERFORM PASS.
GO TO START-WRITE-GF-27.
START-DELETE-GF-27.
PERFORM DE-LETE.
START-WRITE-GF-27.
PERFORM PRINT-DETAIL.
CLOSE IX-FS1.
START-INIT-FILE-STATUS-2.
MOVE "FILE STATUS START:00" TO FEATURE.
MOVE "START-TEST-GF-28" TO PAR-NAME.
*>
*> THIS SERIES OF TESTS CHECKS THE CONTENTS OF THE FILE STATUS
*> CAPTURED FROM THE LAST NINE TSTS.
*>
START-TEST-GF-28.
IF FILESTATUS (1) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-28.
*>
*> START-TEST-GF-28 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-19. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00".
*>
IF FILESTATUS (1) EQUAL TO "00"
PERFORM PASS
ELSE
MOVE "IX-3; 1.3.4 (1) A FROM START-TEST-GF-19 " TO RE-MARK
PERFORM FAIL
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (1) TO COMPUTED-A.
START-WRITE-GF-28.
PERFORM PRINT-DETAIL.
START-TEST-GF-29.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-29" TO PAR-NAME.
IF FILESTATUS (2) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-29.
*>
*> START-TEST-GF-29 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-20. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (2) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C ; SEE START-TEST-GF-20 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (2) TO COMPUTED-A.
START-WRITE-GF-29.
PERFORM PRINT-DETAIL.
START-TEST-GF-30.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-30" TO PAR-NAME.
IF FILESTATUS (3) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-30.
*>
*> START-TEST-GF-30 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-21. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (3) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-21 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (3) TO COMPUTED-A.
START-WRITE-GF-30.
PERFORM PRINT-DETAIL.
START-TEST-GF-31.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-31" TO PAR-NAME.
IF FILESTATUS (4) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-31.
*>
*> START-TEST-GF-31 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-22. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (4) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-22 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (4) TO COMPUTED-A.
START-WRITE-GF-31.
PERFORM PRINT-DETAIL.
START-TEST-GF-32.
MOVE "FILE STATUS START:00" TO FEATURE.
MOVE "START-TEST-GF-32" TO PAR-NAME.
IF FILESTATUS (5) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-32.
*>
*> START-TEST-GF.05 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-23. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00"
*>
IF FILESTATUS (5) EQUAL TO "00"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; SEE START-TEST-GF-23 " TO RE-MARK
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (5) TO COMPUTED-A.
START-WRITE-GF-32.
PERFORM PRINT-DETAIL.
START-TEST-GF-33.
MOVE "FILE STATUS START:00" TO FEATURE.
MOVE "START-TEST-GF-33" TO PAR-NAME.
IF FILESTATUS (6) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-33.
*>
*> START-TEST-GF-33 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-24. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00"
*>
IF FILESTATUS (6) EQUAL TO "00"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; SEE START-TEST-GF-24 " TO RE-MARK
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (6) TO COMPUTED-A.
START-WRITE-GF-33.
PERFORM PRINT-DETAIL.
START-TEST-GF-34.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-34" TO PAR-NAME.
IF FILESTATUS (7) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-34.
*>
*> START-TEST-GF-34 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-25. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23"
*>
IF FILESTATUS (7) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-25 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (7) TO COMPUTED-A.
START-WRITE-GF-34.
PERFORM PRINT-DETAIL.
START-TEST-GF-35.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-35" TO PAR-NAME.
IF FILESTATUS (8) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-35.
*>
*> START-TEST-GF-35 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-26. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (8) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-26 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (8) TO COMPUTED-A.
START-WRITE-GF-35.
PERFORM PRINT-DETAIL.
START-TEST-GF-36.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-36" TO PAR-NAME.
IF FILESTATUS (9) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-36.
*>
*> START-TEST-GF-36 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-27. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (9) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-27 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (9) TO COMPUTED-A.
START-WRITE-GF-36.
PERFORM PRINT-DETAIL.
START-INIT-GF-37-ETC.
OPEN INPUT IX-FS1.
MOVE "STRT EQ ALTKY W/DUP" TO FEATURE.
MOVE "START-TEST-GF-37" TO PAR-NAME.
MOVE "********************" TO HOLD-FILESTATUS-RECORD.
*>
*> THIS TEST TESTS THE "START -- EQUAL TO" FOR PROPER POSITIONING
*> OF THE RECORD POINTER FOR THE SUBSEQUENT READ STATEMENT.
*> START-TEST-007 USES ONLY THE ALTERNATE RECORD KEY WITH DUPLI-
*> CATES OPTION (ALTERNATE-KEY2) FOR ESTABLISHING
*> THE CURRENT RECORD POINTER FOR THE FILE. THE FOLLOWING IS A
*> SUMMARY OF THE TEST CONDITIONS AND THE EXPECTED ACTION TO BE
*> TAKEN FOR THE TESTS.
*>
*> CONDITIONS (CONTENTS OF KEY) / ACTION
*>
*> START-TEST-GF-37 - EQUAL A RECORD IN FILE / RECORD FOUND
*> START-TEST-GF-38 - BETWEEN 2 EXISTING KEY VALUES / INVALID KEY
*> START-TEST-GF-39 - LESS THAN FIRST FILE RECORD / INVALID KEY
*> START-TEST-GF-40 - GREATER THAN LAST FILE RECORD / INVALID KEY
*> START-TEST-GF-41 - UNEQUAL SIZE OPERANDS (EQUAL) / RECORD FOUN
*> START-TEST-GF-42 - UNEQUAL SIZE OPERANDS (EQUAL) / RECORD FOUN
*> START-TEST-GF-43 - UNEQUAL SIZE OPERANDS (UNEQUAL) / INVLD KEY
*> START-TEST-GF-44 - UNEQUAL SIZE OPERANDS (UNEQUAL) / INVLD KEY
*> START-TEST-GF-45 - UNEQUAL SIZE OPERANDS (UNEQUAL) /INVLD KEY
*>
*> BEFORE EACH TEST A RECORD IS MADE AVAILABLE WHICH IS DIFFERENT
*> THAN THE ONE WHICH IS EXPECTED TO BE PRESENT FOLLOWING A TEST.
*> IF AN INVALID KEY IS EXPECTED FROM THE TEST, THE KEYS
*> ASSOCIATED WITH THE FILE WHICH ARE NOT PARTICIPATING IN THE
*> START STATEMENT WILL BE LOADED WITH VALUES WHICH WOULD MATCH
*> RECORDS IN THE FILE. IF A KEY MATCH IS EXPECTED FROM
*> THE TEST, THE KEYS ASSOCIATED WITH THE FILE WHICH ARE NOT
*> PARTICIPATING IN THE START STATEMENT WILL BE LOADED WITH
*> VALUES WHICH WOULD NOT MATCH RECORDS IN THE FILE. THE FILE
*> STATUS FROM EXECUTION OF EACH START IS CAPTURED FOR LATER TESTS
*>
START-INIT-GF-37.
MOVE "START-TEST-GF-37" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "**" TO FILESTATUS (1)
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
GO TO START-DELETE-GF-37.
MOVE "CCCCCDDDDD022" TO FS1-RECKEY-1-13.
MOVE "EEEEEFFFFF022ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "VVVVVVVVUU376ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-37.
*>
*> START-TEST-GF-37 - THE START SHOULD FIND A RECORD IN THE FILE
*> WHICH HAS AN ALTERNATE RECORD KEY VALUE OF
*> VVVVVVVVUU376ALTKEY2 (RECORD NUMBER 12).
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (1)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-37.
MOVE FS1-STATUS TO FILESTATUS (1).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-37.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 12
PERFORM PASS
MOVE SPACE TO RE-MARK
GO TO START-WRITE-GF-37.
MOVE 12 TO RECNO.
PERFORM DISPLAY-ALTERNATE-KEY2.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-37.
PERFORM FAIL.
MOVE 12 TO CORRECT-18V0.
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
GO TO START-WRITE-GF-37.
START-DELETE-GF-37.
PERFORM DE-LETE.
START-WRITE-GF-37.
PERFORM PRINT-DETAIL.
START-INIT-GF-38.
MOVE "START-TEST-GF-38" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (2)
GO TO START-DELETE-GF-38.
MOVE "EEEEEEEEFF064" TO FS1-RECKEY-1-13.
MOVE "HHHHHHHIII066ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "TTTTTTTSSS335ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-38.
*>
*> START-TEST-GF-38- THE START SHOULD NOT FIND A RECORD IN THE
*> FILE WHICH HAS AN ALTERNATE RECORD KEY VALUE
*> OF TTTTTTTSSS335ALTKEY2. THIS KEY VALUE IS
*> SEQUENTIALLY LOCATED BETWEEN TWO CURRENTLY
*> EXISTING ALTERNATE KEYS IN THE FILE.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (2)
GO TO START-PASS-GF-38.
MOVE FS1-STATUS TO FILESTATUS (2).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY2 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-38.
START-PASS-GF-38.
PERFORM PASS.
GO TO START-WRITE-GF-38.
START-DELETE-GF-38.
PERFORM DE-LETE.
START-WRITE-GF-38.
PERFORM PRINT-DETAIL.
START-INIT-GF-39.
MOVE "START-TEST-GF-39" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (3)
GO TO START-DELETE-GF-39.
MOVE "UUUUUUUUUU400" TO FS1-RECKEY-1-13.
MOVE "YYYYYYYYYY400ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "DDDDDDDDDC000ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-39.
*>
*> START-TEST-GF-39 - THE START STATEMENT SHOULD NOT FIND A
*> RECORD IN THE FILE WHICH HAS AN ALTERNATE
*> KEY VALUE OF DDDDDDDDDC000ALTKEY2. THIS KEY
*> VALUE IS SEQUENTIALLY LOWER THAN ANY
*> CURRENTLY EXISTING KEY IN THE FILE.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (3)
GO TO START-PASS-GF-39.
MOVE FS1-STATUS TO FILESTATUS (3).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY2 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-39.
START-PASS-GF-39.
PERFORM PASS.
GO TO START-WRITE-GF-39.
START-DELETE-GF-39.
PERFORM DE-LETE.
START-WRITE-GF-39.
PERFORM PRINT-DETAIL.
START-INIT-GF-40.
MOVE "START-TEST-GF-40" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (4)
GO TO START-DELETE-GF-40.
MOVE "BBBBBBBBBC002" TO FS1-RECKEY-1-13.
MOVE "YYYYYYYYYY400ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWV399ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-40.
*>
*> START-TEST-GF-40 - THE START STATEMENT SHOULD NOT FIND A
*> RECORD IN THE FILE WHICH HAS AN ALTERNATE
*> KEY VALUE OF WWWWWWWWWV399ALTKEY2. THIS
*> VALUE IS SEQUENTIALLY ONE GREATER THAN
*> ANY ALTERNATE KEY CURRENTLY EXISTING IN
*> THE FILE. AN INVALID KEY CONDITION
*> IS EXPECTED WHEN THE START IS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (4)
GO TO START-PASS-GF-40.
MOVE FS1-STATUS TO FILESTATUS (4).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY2 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-40.
START-PASS-GF-40.
PERFORM PASS.
GO TO START-WRITE-GF-40.
START-DELETE-GF-40.
PERFORM DE-LETE.
START-WRITE-GF-40.
PERFORM PRINT-DETAIL.
START-INIT-GF-41.
MOVE "START-TEST-GF-41" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (5)
GO TO START-DELETE-GF-41.
MOVE "CCCCCCCCCC038" TO FS1-RECKEY-1-13.
MOVE "IIIIIIIIJJ083ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "VUUUUVVVVV362ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-41.
*>
*> START-TEST-GF-41 - THE START STATEMENT USES AN OPERAND
*> IN THE KEY PHRASE WHICH IS NOT THE NAME
*> OF AN ALTERNATE KEY BUT IS THE NAME OF A
*> DATA ITEM WHICH IS SUBORDINATE TO THE
*> ALTERNATE KEY. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 5 OF THE ALTERNATE KEY)
*> IS A UNIQUE KEY VALUE FOR THE FILE. THE
*> BALANCE OF THE ALTERNATE KEY (POSITIONS 6
*> THRU 20) IN NOT A VALID KEY VALUE FOR THE
*> FILE. THE
*> RECORD WITH THE ALTERNATE KEY
*> VUUUUUUUUU362ALTKEY2 (RECORD NUMBER 19) IS
*> EXPECTED TO BE FOUND.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2-1-5
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (5)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-41.
MOVE FS1-STATUS TO FILESTATUS (5).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-41.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 19
PERFORM PASS
GO TO START-WRITE-GF-41.
MOVE 19 TO RECNO.
PERFORM DISPLAY-ALTERNATE-KEY2.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-41.
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE 19 TO CORRECT-18V0.
GO TO START-WRITE-GF-41.
START-DELETE-GF-41.
PERFORM DE-LETE.
START-WRITE-GF-41.
PERFORM PRINT-DETAIL.
START-INIT-GF-42.
MOVE "START-TEST-GF-42" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (6)
GO TO START-DELETE-GF-42.
MOVE "TTTTTTTTTT390" TO FS1-RECKEY-1-13.
MOVE "XYYYYYYYYY399ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "EEEEEDDDDD010ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-42.
*>
*> START-TEST-GF-42 - THE START STATEMENT USES AN OPERAND IN THE
*> KEY PHRASE WHICH IS NOT THE NAME OF AN ALTER-
*> NATE KEY BUT IS THE NAME OF A DATA ITEM THAT
*> SUBORDINATE TO THE ALTERNATE KEY. THE CONTENT
*> OF THE DATA ITEM (POSITIONS 1 THRU 5 OF THE
*> ALTERNATE KEY) IS A DUPLICATE OF THE FIRST
*> 5 POSITIONS OF 5 OTHER RECORDS IN THE FILE.
*> THIS TEST EXPECTS THE RECORD POINTER
*> TO BE POSITIONED TO ALTERNATE KEY
*> EEEEEDDDDD020ALTKEY2 (RECORD NO 195) WHICH
*> IS THE FIRST RECORD ALPHABETICALLY IN THE
*> FILE THAT CONTAINS EEEEE IN THE FIRST 5
*> POSITIONS OF THE KEY. NOTE THIS IS ALSO
*> A RECORD IN WHICH THE VALUE OF THE FULL
*> 20 POSITION KEY IS A DUPLICATE OF ANOTHER
*> RECORD (RECORD NUMBER 191). THE ALTERNATE
*> KEY WAS LOADED WITH THE VALUE
*> EEEEEDDDDD010ALTKEY2 (KEY FOR RECORD NUMBER
*> 195) BEFORE THE START WAS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2-1-5
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (6)
MOVE "INVALID KEY ON START" TO COMPUTED-A
GO TO START-FAIL-GF-42.
MOVE FS1-STATUS TO FILESTATUS (6).
READ IX-FS1 AT END
MOVE "AT END ON READ" TO COMPUTED-A
GO TO START-FAIL-GF-42.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
IF XRECORD-NUMBER (1) EQUAL TO 195
PERFORM PASS
GO TO START-WRITE-GF-42.
MOVE 65 TO RECNO.
PERFORM DISPLAY-ALTERNATE-KEY2.
MOVE XRECORD-NUMBER (1) TO COMPUTED-18V0.
START-FAIL-GF-42.
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE 195 TO CORRECT-18V0.
GO TO START-WRITE-GF-42.
START-DELETE-GF-42.
PERFORM DE-LETE.
START-WRITE-GF-42.
PERFORM PRINT-DETAIL.
START-INIT-GF-43.
MOVE "START-TEST-GF-43" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (7)
GO TO START-DELETE-GF-43.
MOVE "CCCCCCCCCD022" TO FS1-RECKEY-1-13.
MOVE "FFFFFFFFFG022ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "380ALTKEY2 " TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-43.
*>
*> START-TEST-GF-43 - THE START STATEMENT USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE ALTERNATE KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE RECORD KEY)
*> IS LOADED WITH "380ALTKEY2". NO SUCH RECORD
*> SHOULD BE IN THE FILE. IF IN THE COMPARSION,
*> THE LONGER OPERAND IS TRUNCATED ON THE LEFT
*> INSTEAD OF ON THE RIGHT THE CONTENTS OF
*> THE DATA ITEM WILL MATCH A RECORD IN THE
*> FILE. THIS TEST EXPECTS THE LONGER OPERAND
*> TO BE TRUNCATED ON THE RIGHT CAUSING NO
*> DATA ITEM MATCH AND RESULTING IN AN INVALID
*> KEY CONDITION WHEN THE START IS EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (7)
GO TO START-PASS-GF-43.
MOVE FS1-STATUS TO FILESTATUS (7).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY2 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-43.
START-PASS-GF-43.
PERFORM PASS.
GO TO START-WRITE-GF-43.
START-DELETE-GF-43.
PERFORM DE-LETE.
START-WRITE-GF-43.
PERFORM PRINT-DETAIL.
START-INIT-GF-44.
MOVE "START-TEST-GF-44" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (8)
GO TO START-DELETE-GF-44.
MOVE "UUUUUUUUUU400" TO FS1-RECKEY-1-13.
MOVE "YYYYYYYYYY400ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "DDDDDDDDDC000ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-44.
*>
*> START-TEST-GF-44 - THIS TEST USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE ALTERNATE KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE ALTERNATE KEY) IS
*> LOADED WITH "DDDDDDDDDC". THIS KEY VALUE
*> IS LOWER THAN ANY ALTERNATE KEY VALUE IN
*> POSITION 1 THRU 10 EXISTING IN THE FILE
*> THEREFORE AN INVALID KEY CONDITION IS
*> EXPECTED WHEN THE START STATEMENT IS
*> EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (8)
GO TO START-PASS-GF-44.
MOVE FS1-STATUS TO FILESTATUS (8).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE ALTERNATE-KEY2 (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-44.
START-PASS-GF-44.
PERFORM PASS.
GO TO START-WRITE-GF-44.
START-DELETE-GF-44.
PERFORM DE-LETE.
START-WRITE-GF-44.
PERFORM PRINT-DETAIL.
START-INIT-GF-45.
MOVE "START-TEST-GF-45" TO PAR-NAME.
PERFORM START-INITIALIZE-RECORD.
IF INIT-FLAG NOT EQUAL TO ZERO
MOVE "TEST IMPROPERLY INITIALIZED" TO RE-MARK
MOVE "**" TO FILESTATUS (9)
GO TO START-DELETE-GF-45.
MOVE "UUUUUUUUUU400" TO FS1-RECKEY-1-13.
MOVE "YYYYYYYYYY400ALTKEY1" TO FS1-ALTKEY1-1-20.
MOVE "WWWWWWWWWW400ALTKEY2" TO FS1-ALTKEY2-1-20.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY1 TO IX-ALT-KEY1-AREA.
MOVE WRK-FS1-ALTKEY2 TO IX-ALT-KEY2-AREA.
START-TEST-GF-45.
*>
*> START-TEST-GF-45 - THIS TEST USES AN OPERAND IN THE
*> KEY PHRASE OF THE START STATEMENT WHICH IS
*> A DATA ITEM SUBORDINATE TO THE ALTERNATE KEY
*> NAME. THE CONTENTS OF THE DATA ITEM
*> (POSITIONS 1 THRU 10 OF THE ALTERNATE KEY) IS
*> LOADED WITH "WWWWWWWWWW". THIS KEY VALUE
*> IS GREATER THAN ANY ALTERNATE KEY VALUE IN
*> POSITION 1 THRU 10 EXISTING IN THE FILE
*> THEREFORE AN INVALID KEY CONDITION IS
*> EXPECTED WHEN THE START STATEMENT IS
*> EXECUTED.
*>
START IX-FS1
KEY IS EQUAL TO IX-FS1-ALTKEY2-1-10
INVALID KEY MOVE FS1-STATUS TO FILESTATUS (9)
GO TO START-PASS-GF-45.
MOVE FS1-STATUS TO FILESTATUS (9).
READ IX-FS1 AT END
MOVE "AT END PATH TAKEN ON READ" TO RE-MARK.
MOVE IX-FS1R1-F-G-240 TO FILE-RECORD-INFO (1).
MOVE "IX-36; 4.7.2 ETC. " TO RE-MARK
PERFORM FAIL.
MOVE XRECORD-KEY (1) TO COMPUTED-A.
MOVE "INVALID KEY" TO CORRECT-A.
GO TO START-WRITE-GF-45.
START-PASS-GF-45.
PERFORM PASS.
GO TO START-WRITE-GF-45.
START-DELETE-GF-45.
PERFORM DE-LETE.
START-WRITE-GF-45.
PERFORM PRINT-DETAIL.
CLOSE IX-FS1.
START-INIT-FILE-STATUS-03.
MOVE "FILE STATUS START:00" TO FEATURE.
MOVE "START-TEST-GF-46" TO PAR-NAME.
*>
*> THIS SERIES OF TESTS CHECKS THE CONTENTS OF THE FILE STATUS
*> CAPTURED FROM THE NINE TESTS BEFORE.
*>
START-TEST-GF-46.
IF FILESTATUS (1) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-46.
*>
*> START-TEST-GF-046 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-37. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00".
*>
IF FILESTATUS (1) EQUAL TO "00"
PERFORM PASS
ELSE
MOVE "IX-3; 1.3.4 (1) A; SEE START-TEST-GF-37 " TO RE-MARK
PERFORM FAIL
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (1) TO COMPUTED-A.
START-WRITE-GF-46.
PERFORM PRINT-DETAIL.
START-TEST-GF-47.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-47" TO PAR-NAME.
IF FILESTATUS (2) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-47.
*>
*> START-TEST-GF-47 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-38. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (2) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-38 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (2) TO COMPUTED-A.
START-WRITE-GF-47.
PERFORM PRINT-DETAIL.
START-TEST-GF-48.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-48" TO PAR-NAME.
IF FILESTATUS (3) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-48.
*>
*> START-TEST-GF-48 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-39. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (3) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-39 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (3) TO COMPUTED-A.
START-WRITE-GF-48.
PERFORM PRINT-DETAIL.
START-TEST-GF-49.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-49" TO PAR-NAME.
IF FILESTATUS (4) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-49.
*>
*> START-TEST-GF-49 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*> RESULTING FROM START-TEST-GF-40. THE FILE
*>
IF FILESTATUS (4) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-40 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (4) TO COMPUTED-A.
START-WRITE-GF-49.
PERFORM PRINT-DETAIL.
START-TEST-GF-50.
MOVE "FILE STATUS START:00" TO FEATURE.
MOVE "START-TEST-GF-50" TO PAR-NAME.
IF FILESTATUS (5) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-50.
*>
*> START-TEST-GF-50 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-41. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00"
*>
IF FILESTATUS (5) EQUAL TO "00"
PERFORM PASS
MOVE "IX-3; 1.3.4 (1) A; SEE START-TEST-GF-41 " TO RE-MARK
ELSE PERFORM FAIL
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (5) TO COMPUTED-A.
START-WRITE-GF-50.
MOVE "FROM START-TEST-007.05" TO RE-MARK.
PERFORM PRINT-DETAIL.
START-TEST-GF-51.
MOVE "FILE STATUS START:00" TO FEATURE.
MOVE "START-TEST-GF-51" TO PAR-NAME.
IF FILESTATUS (6) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-51.
*>
*> START-TEST-GF-51 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-42. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "00"
*>
IF FILESTATUS (6) EQUAL TO "00"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-3; 1.3.4 (1) A; SEE START-TEST-GF-42 " TO RE-MARK
MOVE "00" TO CORRECT-A
MOVE FILESTATUS (6) TO COMPUTED-A.
START-WRITE-GF-51.
PERFORM PRINT-DETAIL.
START-TEST-GF-52.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-52" TO PAR-NAME.
IF FILESTATUS (7) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-52.
*>
*> START-TEST-GF-52 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-43. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23"
*>
IF FILESTATUS (7) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-43 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (7) TO COMPUTED-A.
START-WRITE-GF-52.
PERFORM PRINT-DETAIL.
START-TEST-GF-53.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-53" TO PAR-NAME.
IF FILESTATUS (8) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-53.
*>
*> START-TEST-GF-53 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-44. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (8) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-44 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (8) TO COMPUTED-A.
START-WRITE-GF-53.
PERFORM PRINT-DETAIL.
START-TEST-GF-54.
MOVE "FILE STATUS START:23" TO FEATURE.
MOVE "START-TEST-GF-54" TO PAR-NAME.
IF FILESTATUS (9) EQUAL TO "**"
PERFORM DE-LETE
GO TO START-WRITE-GF-54.
*>
*> START-TEST-GF-54 - THIS TEST CHECKS THE FILE STATUS CONTENTS
*> RESULTING FROM START-TEST-GF-45. THE FILE
*> STATUS CONTENTS IS EXPECTED TO BE "23".
*>
IF FILESTATUS (9) EQUAL TO "23"
PERFORM PASS
ELSE PERFORM FAIL
MOVE "IX-4; 1.3.4 (3) C; SEE START-TEST-GF-45 " TO RE-MARK
MOVE "23" TO CORRECT-A
MOVE FILESTATUS (9) TO COMPUTED-A.
START-WRITE-GF-54.
PERFORM PRINT-DETAIL.
*> START-WRITE-008.
GO TO START-TEST-COMPLETE.
*> START-CLOSE-FILES.
*> GO TO START-TEST-COMPLETE.
START-INITIALIZE-RECORD.
MOVE "GGGGGGGGGG200" TO FS1-RECKEY-1-13.
MOVE ZERO TO INIT-FLAG.
MOVE 9999 TO XRECORD-NUMBER (1).
MOVE SPACE TO IX-FS1R1-F-G-240.
MOVE WRK-FS1-RECKEY TO IX-REC-KEY-AREA.
START IX-FS1 KEY IS EQUAL TO IX-FS1-KEY INVALID KEY
MOVE 1 TO INIT-FLAG.
READ IX-FS1 INTO FILE-RECORD-INFO (1)
AT END MOVE 1 TO INIT-FLAG.
IF XRECORD-NUMBER (1) NOT EQUAL TO 100
MOVE 1 TO INIT-FLAG.
MOVE "**" TO FS1-STATUS.
DISPLAY-RECORD-KEYS.
MOVE XRECORD-KEY (1) TO WRK-FS1-RECKEY.
MOVE FS1-RECKEY-1-13 TO COMPUTED-A.
MOVE RECKEY-VALUE (RECNO) TO CORRECT-A.
MOVE SPACE TO P-OR-F.
MOVE "RECORD KEY VALUES" TO RE-MARK.
PERFORM PRINT-DETAIL.
DISPLAY-ALTERNATE-KEY1.
MOVE ALTERNATE-KEY1 (1) TO WRK-FS1-ALTKEY1.
MOVE FS1-ALTKEY1-1-20 TO COMPUTED-A.
MOVE ALTKEY1-VALUE (RECNO) TO CORRECT-A.
MOVE SPACE TO P-OR-F.
MOVE "ALTERNATE RECORD KEY1 VALUES" TO RE-MARK.
PERFORM PRINT-DETAIL.
DISPLAY-ALTERNATE-KEY2.
MOVE ALTERNATE-KEY2 (1) TO WRK-FS1-ALTKEY2.
MOVE FS1-ALTKEY2-1-20 TO COMPUTED-A.
MOVE ALTKEY2-VALUE (RECNO) TO CORRECT-A.
MOVE SPACE TO P-OR-F.
MOVE "ALTERNATE RECORD KEY2 VALUES" TO RE-MARK.
PERFORM PRINT-DETAIL.
START-TEST-COMPLETE.
EXIT.
CCVS-EXIT SECTION.
CCVS-999999.
GO TO CLOSE-FILES.
*> END-OF,IX209A
|
# Loading relevant packages
library(psychTools)
library(lavaan) # for SEM fit and model functions
library(semPlot) # for semPaths()
library(semptools) # for set_sem_layout
library(tidyverse) # for tidy code
library(CompQuadForm) # for multiple linearity
library(ICS) # mvnorm.kur.test
# Load Data
data <- holzinger.swineford
# Model building
model.A <- '
visual.percep =~ t01_visperc + t02_cubes + t03_frmbord + t04_lozenges
verbal.ability =~ t06_paracomp + t07_sentcomp + t09_wordmean
process.speed =~ t10_addition + t12_countdot + t13_sccaps
visual.percep ~~ verbal.ability
visual.percep ~~ process.speed
verbal.ability ~~ process.speed
'
# Checking assumptions for ML estimator
# Multivariate normality
mvnorm.kur.test(data[,c("t01_visperc", "t02_cubes", "t03_frmbord", "t04_lozenges", "t06_paracomp", "t07_sentcomp", "t09_wordmean", "t10_addition", "t12_countdot", "t13_sccaps")])
mvnorm.skew.test(data[,c("t01_visperc", "t02_cubes", "t03_frmbord", "t04_lozenges", "t06_paracomp", "t07_sentcomp", "t09_wordmean", "t10_addition", "t12_countdot", "t13_sccaps")])
# both p-values are significant -> non-normality
# MLM estimator (ML with Satorra-Bentler corrections) is used
# -> normality-adjusted robust standard errors should be used
fit.A <- sem(model.A, data = data, estimator = "MLM")
plot.A = semPaths(fit.A, label.scale=F, nCharNodes = 8,
sizeMan2=3.5, sizeMan=9, asize=3, edge.color="black", residuals = F, fixedStyle = 1,
whatLabels = "est")
summary(fit.A, fit.measures = T)
# Model Chi-Squared: 84.004 (df = 32, p = 0.000)
# CFI = 0.941
# TLI = 0.917
# RMSEA 0.075 (90 Percent confidence interval - lower = 0.056, - upper = 0.095)
# AIC = 8296.856
# BIC = 8309.177
# -> no sufficient model fit
model.B <- '
visual.percep =~ t01_visperc + t02_cubes + t03_frmbord + t04_lozenges
verbal.ability =~ t06_paracomp + t07_sentcomp + t09_wordmean
process.speed =~ t10_addition + t12_countdot + t13_sccaps
visual.percep ~~ verbal.ability
visual.percep ~~ process.speed
verbal.ability ~~ process.speed
t10_addition ~~ t12_countdot
'
fit.B <- sem(model.B, data = data, estimator = "MLM")
plot.B <- semPaths(fit.B, label.scale=F, nCharNodes = 8,
sizeMan2=3.5, sizeMan=9, asize=3, edge.color="black", residuals = F, fixedStyle = 1)
summary(fit.B, fit.measures = T)
# Test statistic = 53.971 (df = 31, p = 0.006)
# CFI = 0.971
# TLI = 0.958
# RMSEA = 0.05 ( 90 Percent confidence interval - lower = 0.03,-upper = 0.074)
# AIC = 8267.652
# BIC = 8356.623
# Since models are nested we can also run a Chi-squared difference test
anova <- as.matrix(anova(fit.A, fit.B))
CFI <- as.data.frame(matrix(c(0.974, 0.941), ncol = 1))
colnames(CFI) <- "CFI"
anova <- round(data.frame(anova, CFI), digits = 3)
rownames(anova) <- c("Model.B", "Model.A")
formattable(as.data.frame(anova))
# significant on a 0.001 level
summary(fit.B, standardized = T)
standardizedsolution(fit.B)
plot.B <- semPaths(fit.B, label.scale=F, nCharNodes = 8,
sizeMan2=3.5, sizeMan=9, asize=3, edge.color="black", residuals = F, fixedStyle = 1,
whatLabels = "std")
|
import os
import requests
from bs4 import BeautifulSoup
from dotenv import load_dotenv
load_dotenv()
url_sporting = os.getenv("URL_SPORTING")
headers = {
'Accept': os.getenv("HEADER_ACCEPT"),
'User-Agent': os.getenv("USER_AGENT")
}
def sporting_parse():
req_sporting = requests.get(url_sporting, headers=headers)
src_sporting = req_sporting.text
soup_sporting = BeautifulSoup(src_sporting, 'lxml')
# Находим все блоки с информацией о билетах
bilhetes_blocks = soup_sporting.find_all('div', class_='mmsubmenu__title')
football_events = []
# Перебираем каждый блок с билетами
for block in bilhetes_blocks:
if 'bilhetes futebol' in block.text.lower():
# Найден блок с билетами на футбол
matches_info = block.find_next('ul', class_='mmproximojogo')
# Находим каждую отдельную игру
matches = matches_info.find_all('li', class_='mmproximojogo__item')
# Перебираем информацию о каждой игре
for match in matches:
# Извлекаем дату и время
date_time = match.find('div', class_='item__date').text.strip()
# Извлекаем названия команд
teams = match.find_all('div', class_='equipa')
home_team = teams[0].text.strip()
away_team = teams[1].text.strip()
event_info = f'{date_time} - {home_team} - {away_team}'
football_events.append(event_info)
if len(football_events) == 0:
football_events.append('Спортинг на этой неделе не играет')
print(football_events[0])
return football_events
if __name__ == "__main__":
sporting_parse()
|
//
// OnBoardingView.swift
// Restart
//
// Created by Kunth Shah on 15/01/24.
//
import SwiftUI
struct OnBoardingView: View {
// MARK: PROPERTY
@AppStorage("onBoarding") var isonBoardingViewActive:Bool = true
@State private var buttonWidth: Double = UIScreen.main.bounds.width - 80
@State private var buttonOffSet: CGFloat = 0
@State private var isAnimating = false
// MARK: BODY
var body: some View {
ZStack {
Color("ColorBlue")
.ignoresSafeArea()
VStack {
// MARK: - HEADER
Spacer()
VStack(spacing: 0) {
Text("Wizardry.")
.font(.system(size: 60))
.fontWeight(.heavy)
.foregroundStyle(Color.white)
Text("""
Cast your own spells, because life's more magical when you're the wizard.
""")
.font(.title3)
.fontWeight(.light)
.foregroundStyle(Color.white)
.multilineTextAlignment(.center)
.padding(.horizontal, 10)
} //: HEADER
.opacity(isAnimating ? 1: 0)
.offset(y: isAnimating ? 0 : -40)
.animation(.easeOut(duration: 1), value: isAnimating)
// MARK: - CENTER
ZStack {
Circle()
.stroke(Color.white.opacity(0.2), lineWidth: 40)
.frame(width: 260, height: 260, alignment: .center)
Circle()
.stroke(Color.white.opacity(0.2), lineWidth: 80)
.frame(width: 260, height: 260, alignment: .center)
Image("character-3")
.resizable()
.scaledToFit()
.opacity(isAnimating ? 1 : 0)
.animation(.easeOut(duration: 0.5), value: isAnimating)
} //: CENTER
Spacer()
// MARK: - FOOTER
ZStack {
ZStack { // MARK: LOWER VIEW
Capsule()
.fill(Color.white.opacity(0.2))
Capsule()
.fill(Color.white.opacity(0.2)).padding(8)
Text("Launch Battle")
.font(.system(.title3, design: .rounded))
.fontWeight(.bold)
.foregroundStyle(Color.white)
.offset(x: 20, y:-1)
HStack {
Capsule()
.fill(Color("ColorRed"))
.frame(width: buttonOffSet + 80)
Spacer()
}
} // LOWER VIEW
HStack { // MARK: - UPPER BUTTON
ZStack {
Capsule()
.fill(Color("ColorRed"))
Capsule()
.fill(.black.opacity(0.15))
.padding(8)
Image(systemName: "chevron.right.2")
.font(
.system(size: 24)
.weight(.bold)
)
}
.foregroundStyle(Color.white)
.frame(width: 80)
.offset(x: buttonOffSet)
.gesture(
DragGesture().onChanged { gesture in
if gesture.translation.width > 0 && buttonOffSet <= buttonWidth - 80 {
buttonOffSet = gesture.translation.width
}
}
.onEnded { _ in
withAnimation(Animation.easeIn(duration: 1)) {
if buttonOffSet < buttonWidth / 2 {
buttonOffSet = 0
isonBoardingViewActive = true
}
else {
buttonOffSet = buttonWidth - 80
isonBoardingViewActive = false
}
}
}
) //: GESTURE
Spacer()
} // UPPER BUTTON
}.frame(width:buttonWidth, height: 80, alignment: .center)
.padding()
.opacity(isAnimating ? 1 : 0)
.offset(x:isAnimating ? 0: 40)
.animation(.easeOut(duration: 1), value: isAnimating)
// FOOTER
}
} //: ZSTACK
.onAppear(perform: {
isAnimating = true
})
}
}
#Preview {
OnBoardingView()
}
|
"""This module contains the CacheHandler for serializing and deserializing data using joblib."""
from pathlib import Path
from typing import Any
import joblib
from .base_cache_handler import CacheHandler
def write_joblib(cache_file_path: Path, output: Any) -> None:
"""Writes the given data to a cache file at the specified path, serializing it using joblib.
Args:
cache_file_path: A Path object representing the location where the cache file should be saved.
output: The data to be serialized and saved to the cache file.
"""
joblib.dump(output, cache_file_path)
def read_joblib(cache_file_path: Path) -> Any:
"""Reads the cache file from the specified path and returns the deserialized data.
Args:
cache_file_path: A Path object representing the location of the cache file.
Returns:
The deserialized data stored in the cache file.
"""
return joblib.load(cache_file_path)
JOBLIB_CACHE_HANDLER = CacheHandler(write_joblib, read_joblib, "joblib", can_handle_none=True)
"""A CacheHandler for Python objects using joblib."""
|
import ScrollLeftArrowIcon from "@/components/Icons/ScrollLeftArrowIcon";
import ScrollRightArrowIcon from "@/components/Icons/ScrollRightArrowIcon";
import ProductCard from "@/components/Store/ProductCard";
import Product from "@/hooks/entities/Product";
import useCarousel from "@/hooks/uses/useCarousel";
interface SimpleCarouselProps {
products: Product[],
id: string,
}
export default function SimpleCarousel(props: SimpleCarouselProps) {
const fromCarouselUse = useCarousel(props.products.length, props.id);
return (
<div className="flex items-center space-x-2">
<span
className={
fromCarouselUse.isTherePrevious()
? `cursor-pointer`
: 'opacity-50'
}
onClick={() =>
fromCarouselUse.isTherePrevious() &&
fromCarouselUse.scrollToSlide('previous')
}
>
<ScrollRightArrowIcon />
</span>
<div className="w-52 overflow-hidden">
<ul className="flex justify-center">
{props.products.map((product, index) =>
<li
id={`carousel-${props.id}-item-${index}`}
key={index}
className={`
${fromCarouselUse.isSlideVisible(index) ? 'visible' : 'hidden'}
${fromCarouselUse.isItCurrentSlide(index)
? (
(
fromCarouselUse.getScrollDirection() === 'previous' &&
'animate-fade-in-left'
) ||
(
fromCarouselUse.getScrollDirection() === 'next' &&
'animate-fade-in-right'
) || ''
)
: (
(
fromCarouselUse.getScrollDirection() === 'previous' &&
'animate-fade-out-right'
) ||
(
fromCarouselUse.getScrollDirection() === 'next' &&
'animate-fade-out-left'
) || ''
)
}
`}
>
<ProductCard product={product} />
</li>
)}
</ul>
</div>
<span
className={
fromCarouselUse.isThereNext()
? `cursor-pointer`
: 'opacity-50'
}
onClick={() =>
fromCarouselUse.isThereNext() &&
fromCarouselUse.scrollToSlide("next")
}
>
<ScrollLeftArrowIcon />
</span>
</div>
)
}
|
import 'package:flutter_test/flutter_test.dart';
import 'package:weather_forecast/domain/models/weather_model.dart';
import 'package:weather_forecast/infra/weather_api/models/weather_api_weather_response.dart';
import 'package:weather_forecast/infra/weather_api/translates/translate_weather_api.dart';
import '../mocks/weather_api_mocks.dart';
void main() {
group("Translate Weater Api Tests", () {
test("Ensure that a Weather API Response object is converted to Model without errors", () {
// Arrange
TranslateWeatherApi translateWeatherApi = TranslateWeatherApi();
WeatherApiWeatherResponse response = WeatherApiMocks.weatherApiWeatherResponse();
// Action
WeatherModel? weatherModel = translateWeatherApi.toEntity(response);
// Assert
expect(weatherModel, isNotNull);
expect(weatherModel.dateTime, DateTime(2023, 10, 14, 21, 0, 0));
expect(weatherModel.summary, response.weather?.main);
expect(weatherModel.description, response.weather?.description);
expect(weatherModel.icon, response.weather?.icon);
expect(weatherModel.temperatureCurrent, response.main?.temp);
expect(weatherModel.temperatureCurrentFeelsLike, response.main?.feelsLike);
expect(weatherModel.temperatureMin, response.main?.tempMin);
expect(weatherModel.temperatureMax, response.main?.tempMax);
});
test("Ensure that throws a Type Error when mandatory response field is null", () {
// Arrange
TranslateWeatherApi translateWeatherApi = TranslateWeatherApi();
WeatherApiWeatherResponse response = WeatherApiMocks.weatherApiWeatherResponse(weatherMainValueNull: true);
// Action and Assert
expect(() => translateWeatherApi.toEntity(response), throwsA(isA<TypeError>()));
});
});
}
|
package com.ozgen.telegrambinancebot.model.binance;
import lombok.Data;
import lombok.ToString;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.CollectionTable;
import javax.persistence.ElementCollection;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.PrePersist;
import javax.persistence.PreUpdate;
import java.util.Date;
import java.util.List;
@Entity
@Data
@ToString
public class OrderResponse {
@Id
@GeneratedValue(generator = "UUID")
@GenericGenerator(name = "UUID", strategy = "org.hibernate.id.UUIDGenerator")
private String id;
private String symbol;
private Long orderId;
private Long orderListId;
private String clientOrderId;
private Long transactTime;
private String price;
private String origQty;
private String executedQty;
private String cummulativeQuoteQty;
private String status;
private String timeInForce;
private String type;
private String side;
private Long workingTime;
private String selfTradePreventionMode;
@ElementCollection
@CollectionTable(name = "order_fills", joinColumns = @JoinColumn(name = "order_response_id"))
private List<Fill> fills;
private Date createdAt;
private Date updatedAt;
@PrePersist
protected void onCreate() {
createdAt = new Date();
updatedAt = new Date();
}
@PreUpdate
protected void onUpdate() {
updatedAt = new Date();
}
}
@Embeddable
@Data
class Fill {
private String price;
private String qty;
private String commission;
private String commissionAsset;
private Long tradeId;
}
|
import type { NextPage } from "next";
import { withUrqlClient } from "next-urql";
import Banner from "../components/Banner";
import Navbar from "../components/Navbar";
import Text from "../content/landing";
import { createUrqlClient } from "../utils/createUrqlClient";
const Landing: NextPage = () => {
return (
<div className='section'>
<Navbar />
<Banner page='Welcome'>Welcome</Banner>
<div className='container'>
<div className='landingContainer'>
<div className='aboutPTC landing-box'>
<div className='aboutTitle'>
<h2 className='landingTitle'>About PTC</h2>
<div className='welcomeContentContainer'>
<p className='welcomeContent'>
{Text.aboutParaOne}
</p>
<p className='welcomeContent'>
{Text.aboutParaTwo}
</p>
</div>
</div>
</div>
<div className='codingChallenge landing-box'>
<div className='whatTitle'>
<h2 className='landingTitle'>
PTC’s Coding Challenge?
</h2>
</div>
<div className='welcomeContentContainer'>
<p className='welcomeContent'>{Text.whatParaOne}</p>
</div>
</div>
<div className='learningGrowth landing-box'>
<div className='learningTitle'>
<h2 className='landingTitle'>
Learning and Growth
</h2>
</div>
<div className='welcomeContentContainer'>
<p className='welcomeContent'>
{Text.learningParaOne}
</p>
<ul>
<li>{Text.learningParaTwo}</li>
<li>{Text.learningParaThree}</li>
</ul>
</div>
</div>
<div className='getStarted landing-box'>
<div className='getStartedTitle'>
<h2 className='landingTitle'>Get Started!</h2>
</div>
<div className='welcomeContentContainer'>
<p className='welcomeContent'>
{Text.learningParaOne}
</p>
<ol>
<li>
{Text.getStartedParaTwo}
<a href='rules'>Rules</a>
</li>
<li>
{Text.getStartedParaThree}
<a href='challenges/1'>Challenges</a>
</li>
<li>{Text.getStartedParaFour}</li>
</ol>
</div>
</div>
<div className='landing-goodluck landing-box'>
Good Luck!
</div>
</div>
</div>
</div>
);
};
export default withUrqlClient(createUrqlClient, { ssr: true })(Landing);
|
import React, { useState } from "react";
import emailjs from "@emailjs/browser";
import { useNavigate } from "react-router-dom";
import { createUserWithEmailAndPassword } from "firebase/auth";
import { auth } from "../firebase";
export default function SignUpForm() {
const navigate = useNavigate();
const [name, setName] = useState("");
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [phone, setPhone] = useState("");
//Sending Email
const handleSubmit = async (e) => {
e.preventDefault();
try {
await createUserWithEmailAndPassword(auth, email, password);
navigate("/SignInForm");
console.log("Account Successfully Created");
} catch {
console.log("Sorry, something went wrong. Please try again.");
}
const serviceId = "service_er49brg";
const templateId = "template_wqmjn78";
const publicKey = "gpoRMpYBn1tyNN7kH";
const templateParams = {
from_name: name,
from_email: email,
to_name: name,
user_password: password,
phone: phone,
};
emailjs
.send(serviceId, templateId, templateParams, publicKey)
.then((response) => {
console.log("Email Sent Successfully", response);
navigate("/SignInForm");
setName("");
setEmail("");
setPassword("");
setPhone("");
})
.catch((error) => {
console.error("Error Sending email : ", error);
});
};
return (
<div>
<form className="signupfom d-flex flex-column" >
<div>
<label>Name : </label>
<input
type="text"
value={name}
onChange={(e) => setName(e.target.value)}
style={{height:"30px", width:"200px"}}
/>
</div>
<br />
<div>
<label>Email : </label>
<input
type="email"
value={email}
onChange={(e) => setEmail(e.target.value)}
style={{height:"30px", width:"200px"}}
/>
</div>
<br />
<div>
<label>Password : </label>
<input
type="password"
value={password}
onChange={(e) => setPassword(e.target.value)}
style={{height:"30px", width:"200px"}}
/>
</div>
<br />
<div className="d-flex justify-content-center">
<label>Phone : </label>
<input
type="phone"
value={phone}
onChange={(e) => setPhone(e.target.value)}
style={{height:"30px", width:"200px"}}
/>
</div>
<br />
<div>
<button className="btn border" type="submit" onClick={(e) => handleSubmit(e)}>
Send Email
</button>
</div>
<br />
</form>
</div>
);
}
|
/*
* Tencent is pleased to support the open source community by making BK-JOB蓝鲸智云作业平台 available.
*
* Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
*
* BK-JOB蓝鲸智云作业平台 is licensed under the MIT License.
*
* License for BK-JOB蓝鲸智云作业平台:
* --------------------------------------------------------------------
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package com.tencent.bk.job.common.model.dto;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.tencent.bk.job.common.annotation.PersistenceObject;
import com.tencent.bk.job.common.model.openapi.v4.OpenApiHostDTO;
import com.tencent.bk.job.common.model.vo.CloudAreaInfoVO;
import com.tencent.bk.job.common.model.vo.HostInfoVO;
import com.tencent.bk.job.common.util.ip.IpUtils;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.util.Objects;
import java.util.StringJoiner;
/**
* 作业执行对象-主机模型
*/
@Setter
@Getter
@NoArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
@ToString
@PersistenceObject
@Slf4j
public class HostDTO implements Cloneable {
/**
* 主机ID
*/
@JsonProperty("hostId")
private Long hostId;
/**
* 主机 Agent ID
*/
@JsonProperty("agentId")
private String agentId;
/**
* 云区域ID
*/
@JsonProperty("cloudAreaId")
private Long bkCloudId;
/**
* 云区域名称
*/
@JsonProperty("bkCloudName")
private String bkCloudName;
/**
* 主机IP - IPv4
*/
@JsonProperty("ip")
private String ip;
/**
* 主机IP - IPv6
*/
@JsonProperty("ipv6")
private String ipv6;
/**
* agent存活状态,0-异常,1-正常
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
private Integer alive;
/**
* 操作系统名称
*/
private String osName;
/**
* 操作系统类型
*/
private String osType;
/**
* 操作系统类型名称
*/
private String osTypeName;
/**
* 主机名称
*/
private String hostname;
/**
* 所属云厂商ID
*/
private String cloudVendorId;
/**
* 所属云厂商名称
*/
private String cloudVendorName;
@Deprecated
public HostDTO(Long bkCloudId, String ip) {
this.bkCloudId = bkCloudId;
this.ip = ip;
}
public HostDTO(Long hostId) {
this.hostId = hostId;
}
public HostDTO(Long hostId, Long bkCloudId, String ip) {
this.hostId = hostId;
this.bkCloudId = bkCloudId;
this.ip = ip;
}
public static HostDTO fromHostId(Long hostId) {
HostDTO hostDTO = new HostDTO();
hostDTO.setHostId(hostId);
return hostDTO;
}
@Deprecated
public static HostDTO fromCloudIp(String cloudIp) {
if (!IpUtils.checkCloudIp(cloudIp)) {
throw new IllegalArgumentException("Invalid cloudIp : " + cloudIp);
}
String[] ipProps = cloudIp.split(IpUtils.COLON);
return new HostDTO(Long.valueOf(ipProps[0]), ipProps[1]);
}
@Deprecated
public static HostDTO fromHostIdOrCloudIp(Long hostId, String cloudIp) {
HostDTO host = new HostDTO();
host.setHostId(hostId);
if (StringUtils.isNotEmpty(cloudIp)) {
String[] ipProps = cloudIp.split(IpUtils.COLON);
host.setBkCloudId(Long.valueOf(ipProps[0]));
host.setIp(ipProps[1]);
}
return host;
}
/**
* 返回主机 云区域:ipv4
*/
public String toCloudIp() {
if (StringUtils.isEmpty(ip)) {
return null;
} else {
return bkCloudId + ":" + ip;
}
}
/**
* 返回主机 云区域:ipv6
*/
public String toCloudIpv6() {
if (StringUtils.isEmpty(ipv6)) {
return null;
} else {
return bkCloudId + ":" + ipv6;
}
}
public HostInfoVO toHostInfoVO() {
HostInfoVO hostInfoVO = new HostInfoVO();
hostInfoVO.setHostId(hostId);
hostInfoVO.setCloudArea(new CloudAreaInfoVO(bkCloudId, bkCloudName));
hostInfoVO.setIp(ip);
hostInfoVO.setIpv6(ipv6);
hostInfoVO.setHostName(hostname);
hostInfoVO.setOsName(osName);
hostInfoVO.setOsTypeName(osTypeName);
hostInfoVO.setAlive(alive);
hostInfoVO.setAgentId(agentId);
hostInfoVO.setCloudVendorName(cloudVendorName);
return hostInfoVO;
}
public static HostDTO fromHostInfoVO(HostInfoVO hostInfoVO) {
if (hostInfoVO == null) {
return null;
}
HostDTO hostDTO = new HostDTO();
hostDTO.setHostId(hostInfoVO.getHostId());
hostDTO.setIp(hostInfoVO.getIp());
hostDTO.setIpv6(hostInfoVO.getIpv6());
CloudAreaInfoVO cloudAreaInfo = hostInfoVO.getCloudArea();
if (cloudAreaInfo != null) {
hostDTO.setBkCloudId(cloudAreaInfo.getId());
hostDTO.setBkCloudName(cloudAreaInfo.getName());
}
hostDTO.setAlive(hostInfoVO.getAgentStatus());
hostDTO.setOsName(hostInfoVO.getOsName());
hostDTO.setOsTypeName(hostInfoVO.getOsTypeName());
return hostDTO;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
HostDTO otherHost = (HostDTO) o;
if (hostId != null && otherHost.getHostId() != null) {
return hostId.equals(otherHost.getHostId());
} else if (this.getIp() != null && otherHost.getIp() != null) {
// 兼容没有hostId,只有ip的的场景
return bkCloudId.equals(otherHost.bkCloudId) &&
ip.equals(otherHost.ip);
} else {
return false;
}
}
@Override
public int hashCode() {
if (hostId != null) {
return Objects.hashCode(hostId);
} else {
// 兼容没有hostId的场景
return Objects.hash(bkCloudId, ip);
}
}
@SuppressWarnings("all")
public HostDTO clone() {
HostDTO clone = new HostDTO();
clone.setHostId(hostId);
clone.setAgentId(agentId);
clone.setBkCloudId(bkCloudId);
clone.setBkCloudName(bkCloudName);
clone.setIp(ip);
clone.setIpv6(ipv6);
clone.setAlive(alive);
return clone;
}
/**
* 获取主机的唯一KEY,用于去重等操作
*
* @return 主机KEY
*/
@JsonIgnore
public String getUniqueKey() {
if (hostId != null) {
return "HOST_ID:" + hostId;
} else {
return "HOST_IP:" + toCloudIp();
}
}
/**
* 获取主机的ip,优先返回ipv4
*
* @return 主机ipv4/ipv6, ipv4 优先
*/
@JsonIgnore
public String getPrimaryIp() {
return StringUtils.isNotEmpty(ip) ? ip : ipv6;
}
public String toStringBasic() {
return new StringJoiner(", ", HostDTO.class.getSimpleName() + "[", "]")
.add("hostId=" + hostId)
.add("bkCloudId=" + bkCloudId)
.add("ip='" + ip + "'")
.add("ipv6='" + ipv6 + "'")
.toString();
}
public void updateByHost(HostDTO host) {
if (host == null) {
return;
}
this.hostId = host.getHostId();
this.agentId = host.getAgentId();
this.bkCloudId = host.getBkCloudId();
this.bkCloudName = host.getBkCloudName();
this.ip = host.getIp();
this.ipv6 = host.getIpv6();
this.osName = host.getOsName();
this.osType = host.getOsType();
this.osTypeName = host.getOsTypeName();
this.alive = host.getAlive();
this.cloudVendorId = host.getCloudVendorId();
this.cloudVendorName = host.getCloudVendorName();
this.hostname = host.getHostname();
}
public OpenApiHostDTO toOpenApiHostDTO() {
OpenApiHostDTO openApiHostDTO = new OpenApiHostDTO();
openApiHostDTO.setHostId(hostId);
openApiHostDTO.setBkCloudId(bkCloudId);
openApiHostDTO.setBkCloudName(bkCloudName);
openApiHostDTO.setIp(ip);
openApiHostDTO.setIpv6(ipv6);
openApiHostDTO.setAlive(alive);
openApiHostDTO.setAgentId(agentId);
return openApiHostDTO;
}
}
|
#pragma once
#include <iostream>
#include <string>
#include "Employee.h"
class Fresher : public Employee
{
private:
string graDate;
string graRank;
string education;
public:
Fresher(string _graDate,string _graRank,string _education , int _id, string _fullName, string _phone, string _email, EmployeeType _employeeType, const vector<Certificate *> &_certificates)
: Employee(_id, _fullName, _phone, _email, _employeeType, _certificates), graDate(_graDate), graRank(_graRank), education(_education) {}
~Fresher() {}
void setGraDate(string _graDate)
{
graDate = _graDate;
}
string getGraDate() const
{
return graDate;
}
void setGraRank(string _graRank)
{
graRank = _graRank;
}
string getGraRank() const
{
return graRank;
}
void setEducation(string _education)
{
education = _education;
}
string getEducation() const
{
return education;
}
void showInfo() const
{
Employee::showInfo();
cout << "Graduation Date: " << graDate << endl;
cout << "Graduation Rank: " << graRank << endl;
cout << "Education: " << education << endl;
}
};
|
// truyền dữ liệu từ cha xuống con có 2 cách :
// 1. sử dụng thằng props
// 2. sử dụng hook context
// ví dụ: có màn A => màn B => màn C
import { createContext, useState } from "react";
import ParagraphUseContext from "./useContextChild";
export const ThemeContext = createContext()
function AppUseContext(){
const [theme, setTheme] = useState('blue')
const toggleTheme = () => {
setTheme(theme === 'yellow' ? 'blue': 'yellow');
}
return (
<ThemeContext.Provider value={theme}> <div>
<button style={{padding: 20}} onClick={toggleTheme}>
Toggle theme
</button>
<ParagraphUseContext/>
</div>
</ThemeContext.Provider>
)
}
export default AppUseContext
|
<div class="sticky-top">
<div class="shopping-header bg-dark d-none d-sm-none d-md-none d-lg-block">
<div class="d-flex justify-content-between">
<div class="country">
</div>
<div class="shopping-cart bg-warning d-flex">
<div class="p-4 align-self-center fs-1" [routerLink]="['/cart']">
<div class="base">
<span class="badge1">
<span class="badger">{{items.length}}</span>
<i class="fas fa-shopping-bag" [routerLink]="['/cart']"></i>
</span>
</div>
</div>
</div>
</div>
</div>
<div class="general-header bg-white d-none d-sm-none d-md-none d-lg-block">
<div class="d-flex justify-content-between" >
<div class="header-icon ps-3">
<span class="fs-2 fw-bold" [routerLink]="['/']">
<img class="img-fluid" src="assets/img/logo_white.png" alt="">
</span>
</div>
<div class="d-flex
align-self-center justify-content-between header-nav">
<span class="header-nav-item">
<a class=""
routerLinkActive="router-link-active"
[routerLink]="['/']"
[routerLinkActiveOptions]="{ exact: true }"
>
Home
</a>
</span>
<span class="header-nav-item">
<a class="header-nav-item" [routerLink]="['/about']"
routerLinkActive="router-link-active">
About
</a>
</span>
<span class="header-nav-item">
<a class="" [routerLink]="['/shop']"
routerLinkActive="router-link-active">
Shop
</a>
</span>
<span class="header-nav-item">
<a (click)="toggleAllServices()" class="">
Services
</a>
<div class="header-nav-sub-menu p-3"
[@fadeInOnEnter] *ngIf="all_services_show">
<p class="sub-item">
<a [routerLink]="['/services']" class="">All Services</a>
</p>
<p class="sub-item">
<a [routerLink]="['/service/shopping-request']" class="">Personal Request</a>
</p>
<p class="sub-item">
<a [routerLink]="['/service/gift-voucher']"
class="">Gift Vouchers</a>
</p>
<p class="sub-item">
<a [routerLink]="['/service/gift-box']" class="">Gift Box</a>
</p>
<p class="sub-item">
<a [routerLink]="['/service/resale']" class="">Clothing Resale</a>
</p>
</div>
</span>
<span class="header-nav-item">
<a class="" [routerLink]="['contact']"
routerLinkActive="router-link-active">
Contact
</a>
</span>
</div>
<div class="searchbar d-flex align-self-center">
<span class="searchbar-space">
<input type="text" class="search-input">
</span>
</div>
</div>
</div>
<!-- ------------------ This is the mobile view -------------------------------- -->
<div class="shopping-header-mobile bg-white d-block d-sm-block d-md-none">
<div class="justify-content-between d-flex">
<div class="sidebar-button d-flex">
<div class="p-1 ps-4 align-self-center fs-1" (click)="sidebar_show=!sidebar_show">
<i *ngIf="!sidebar_show" class="fas fa-bars"></i>
<i *ngIf="sidebar_show" class="fas fa-times"></i>
</div>
</div>
<div class="header-icon d-flex">
<div class="p-1 align-self-center" [routerLink]="['/']">
<img class="img-fluid" src="assets/img/logo_white.png" alt="">
</div>
</div>
<div class="shopping-cart bg-warning d-flex">
<div class="p-4 align-self-center fs-1 ">
<span class="badge1" data-badge="27">
<i class="fas fa-shopping-bag" [routerLink]="['/cart']"></i>
</span>
</div>
</div>
</div>
</div>
<!-- ------------------ This is the mobile view -------------------------------- -->
</div>
<div class="notification">
<app-notification></app-notification>
</div>
<div class="sidebar d-block d-sm-block d-md-none" *ngIf="sidebar_show" [@fadeInOnEnter] [@fadeOutOnLeave]>
<div class="sidebar-space">
<div class="sidebar-item mb-4">
<p [routerLink]="['/']" [routerLinkActiveOptions]="{ exact: true }"
routerLinkActive="router-link-active" class="">Home</p>
</div>
<div class="sidebar-item mb-4">
<p [routerLink]="['/about']" routerLinkActive="router-link-active" class="">About</p>
</div>
<div class="sidebar-item mb-4">
<p [routerLink]="['/services']"
routerLinkActive="router-link-active" class="">Services</p>
<div class="sidebar-sub-item" [routerLink]="['/service/shopping-request']" >
<p class="">Personal Request</p>
</div>
<div class="sidebar-sub-item" [routerLink]="['/service/gift-voucher']">
<p class="">Gift Vouchers</p>
</div>
<div class="sidebar-sub-item" [routerLink]="['/service/gift-box']" >
<p class="">Gift Box</p>
</div>
<div class="sidebar-sub-item" [routerLink]="['/service/resale']">
<p class="">Clothing Resale</p>
</div>
</div>
<div class="sidebar-item mb-4">
<p [routerLink]="['/shop']" class="">Shop</p>
<!-- <div class="sidebar-sub-item">
<p class="">Clothing</p>
</div>
<div class="sidebar-sub-item">
<p class="">Accesories</p>
</div>
<div class="sidebar-sub-item">
<p class="">Sales</p>
</div> -->
</div>
<div class="sidebar-item mb-4">
<p [routerLink]="['contact']" class="">Contact Us</p>
</div>
</div>
</div>
|
# 0x0B. C - malloc, free
#### Automatic and dynamic allocation, malloc and free
The malloc function is used to allocate a certain amount of memory during the execution of a program. It will request a block of memory from the heap. If the request is granted, the operating system will reserve the requested amount of memory and malloc will return a pointer to the reserved space.
When the amount of memory is not needed anymore, you must return it to the operating system by calling the function free.
Automatic allocation
When you declare variables or when you use strings within double quotes, the program is taking care of all the memory allocation. You do not have to think about it.
~~~
julien@ubuntu:~/c/malloc$ head -n 14 cisfun.c
/**
* cisfun - function used for concept introduction
* @n1: number of projects
* @n2: number of tasks
*
* Return: nothing.
*/
void cisfun(unsigned int n1, unsigned int n2)
{
int n;
char c;
int *ptr;
char array[3];
}
julien@ubuntu:~/c/malloc$
~~~
In the above example, the arguments and the local variables are stored automatically in memory when the function is called. The program reserves space and uses it without you having to think about it.
By default, the memory used to store those variables can be read and written. When the program leaves the function, the memory used for all the above variables is released for future use.
### Resources
##### Read or watch:
- 0x0a - malloc & free - quick overview.pdf
- Dynamic memory allocation in C - malloc calloc realloc free (stop at 6:50)
### Learning Objectives
At the end of this project, you are expected to be able to explain to anyone, without the help of Google:
**General**
- What is the difference between automatic and dynamic allocation
- What is malloc and free and how to use them
- Why and when use malloc
- How to use valgrind to check for memory leak
#### Requirements
##### General
- Allowed editors: vi, vim, emacs
- All your files will be compiled on Ubuntu 20.04 LTS using gcc, using the options ```-Wall -Werror -Wextra -pedantic -std=gnu89```
- All your files should end with a new line
- A ```README.md``` file, at the root of the folder of the project is mandatory
- Your code should use the ```Betty``` style. It will be checked using ```betty-style.pl``` and ```betty-doc.pl```
- The only C standard library functions allowed are ```malloc``` and ```free```. Any use of functions like ```printf```, ```puts```, ```calloc```, ```realloc``` etc… is forbidden
- You are not allowed to use global variables
- No more than 5 functions per file
- The prototypes of all your functions and the prototype of the function ```_putchar``` should be included in your header file called main.h
- Don’t forget to push your header file
- You are allowed to use the standard library
Task 0 - Float like a butterfly, sting like a bee
Write a function that creates an array of chars, and initializes it with a specific char.
- Prototype: char *create_array(unsigned int size, char c);
- Returns NULL if size = 0
- Returns a pointer to the array, or NULL if it fails
~~~
julien@ubuntu:~/0x0a. malloc, free$ cat 0-main.c
#include "main.h"
#include <stdio.h>
#include <stdlib.h>
/**
* simple_print_buffer - prints buffer in hexa
* @buffer: the address of memory to print
* @size: the size of the memory to print
*
* Return: Nothing.
*/
void simple_print_buffer(char *buffer, unsigned int size)
{
unsigned int i;
i = 0;
while (i < size)
{
if (i % 10)
{
printf(" ");
}
if (!(i % 10) && i)
{
printf("\n");
}
printf("0x%02x", buffer[i]);
i++;
}
printf("\n");
}
/**
* main - check the code for ALX School students.
*
* Return: Always 0.
*/
int main(void)
{
char *buffer;
buffer = create_array(98, 'H');
if (buffer == NULL)
{
printf("failed to allocate memory\n");
return (1);
}
simple_print_buffer(buffer, 98);
free(buffer);
return (0);
}
julien@ubuntu:~/0x0a. malloc, free$ gcc -Wall -pedantic -Werror -Wextra -std=gnu89 0-main.c 0-create_array.c -o a
julien@ubuntu:~/0x0a. malloc, free$ ./a
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
0x48 0x48 0x48 0x48 0x48 0x48 0x48 0x48
julien@ubuntu:~/0x0a. malloc, free$
~~~
#### Task 1 - The woman who has no imagination has no wings
Write a function that returns a pointer to a newly allocated space in memory, which contains a copy of the string given as a parameter.
- Prototype: ```char *_strdup(char *str);```
- The ```_strdup()``` function returns a pointer to a new string which is a duplicate of the string str. Memory for the new string is obtained with malloc, and can be freed with free.
Returns NULL if ```str = NULL```
- On success, the ```_strdup function``` returns a pointer to the duplicated string. It returns NULL if insufficient memory was available
- FYI: The standard library provides a similar function: strdup. Run man strdup to learn more.
~~~
julien@ubuntu:~/0x0a. malloc, free$ cat 1-main.c
#include "main.h"
#include <stdio.h>
#include <stdlib.h>
/**
* main - check the code for ALX School students.
*
* Return: Always 0.
*/
int main(void)
{
char *s;
s = _strdup("ALX SE");
if (s == NULL)
{
printf("failed to allocate memory\n");
return (1);
}
printf("%s\n", s);
free(s);
return (0);
}
julien@ubuntu:~/0x0a. malloc, free$ gcc -Wall -pedantic -Werror -Wextra -std=gnu89 1-main.c 1-strdup.c -o s
julien@ubuntu:~/0x0a. malloc, free$ ./s
ALX SE
julien@ubuntu:~/0x0a. malloc, free$
~~~
#### Task 2 - He who is not courageous enough to take risks will accomplish nothing in life
Write a function that concatenates two strings.
- Prototype: char *str_concat(char *s1, char *s2);
- The returned pointer should point to a newly allocated space in memory which contains the contents of s1, followed by the contents of s2, and null terminated
- if NULL is passed, treat it as an empty string
The function should return NULL on failure
~~~
julien@ubuntu:~/0x0a. malloc, free$ cat 2-main.c
#include "main.h"
#include <stdio.h>
#include <stdlib.h>
/**
* main - check the code for ALX School students.
*
* Return: Always 0.
*/
int main(void)
{
char *s;
s = str_concat("Betty ", "Holberton");
if (s == NULL)
{
printf("failed\n");
return (1);
}
printf("%s\n", s);
free(s);
return (0);
}
julien@ubuntu:~/0x0a. malloc, free$ gcc -Wall -pedantic -Werror -Wextra -std=gnu89 2-main.c 2-str_concat.c -o c
julien@ubuntu:~/c/curriculum_by_julien/holbertonschool-low_level_programming/0x0a. malloc, free$ ./c | cat -e
Betty Holberton$
julien@ubuntu:~/c/curriculum_by_julien/holbertonschool-low_level_programming/0x0a. malloc, free$
~~~
#### Task 3 - If you even dream of beating me you'd better wake up and apologize
Write a function that returns a pointer to a 2 dimensional array of integers.
- Prototype: int **alloc_grid(int width, int height);
- Each element of the grid should be initialized to 0
- The function should return NULL on failure
- If width or height is 0 or negative, return NULL
~~~
julien@ubuntu:~/0x0a. malloc, free$ cat 3-main.c
#include "main.h"
#include <stdio.h>
#include <stdlib.h>
/**
* print_grid - prints a grid of integers
* @grid: the address of the two dimensional grid
* @width: width of the grid
* @height: height of the grid
*
* Return: Nothing.
*/
void print_grid(int **grid, int width, int height)
{
int w;
int h;
h = 0;
while (h < height)
{
w = 0;
while (w < width)
{
printf("%d ", grid[h][w]);
w++;
}
printf("\n");
h++;
}
}
/**
* main - check the code for ALX School students.
*
* Return: Always 0.
*/
int main(void)
{
int **grid;
grid = alloc_grid(6, 4);
if (grid == NULL)
{
return (1);
}
print_grid(grid, 6, 4);
printf("\n");
grid[0][3] = 98;
grid[3][4] = 402;
print_grid(grid, 6, 4);
return (0);
}
julien@ubuntu:~/0x0a. malloc, free$ gcc -Wall -pedantic -Werror -Wextra -std=gnu89 3-main.c 3-alloc_grid.c -o g
julien@ubuntu:~/0x0a. malloc, free$ ./g
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 98 0 0
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 0 402 0
julien@ubuntu:~/0x0a. malloc, free$
~~~
#### Task 4 - It's not bragging if you can back it up
Write a function that frees a 2 dimensional grid previously created by your alloc_grid function.
- Prototype: ```void free_grid(int **grid, int height);```
- Note that we will compile with your alloc_grid.c file. Make sure it compiles.
~~~
julien@ubuntu:~/0x0a. malloc, free$ cat 4-main.c
#include "main.h"
#include <stdio.h>
#include <stdlib.h>
/**
* print_grid - prints a grid of integers
* @grid: the address of the two dimensional grid
* @width: width of the grid
* @height: height of the grid
*
* Return: Nothing.
*/
void print_grid(int **grid, int width, int height)
{
int w;
int h;
h = 0;
while (h < height)
{
w = 0;
while (w < width)
{
printf("%d ", grid[h][w]);
w++;
}
printf("\n");
h++;
}
}
/**
* main - check the code for ALX School students.
*
* Return: Always 0.
*/
int main(void)
{
int **grid;
grid = alloc_grid(6, 4);
if (grid == NULL)
{
return (1);
}
print_grid(grid, 6, 4);
printf("\n");
grid[0][3] = 98;
grid[3][4] = 402;
print_grid(grid, 6, 4);
free_grid(grid, 4);
return (0);
}
julien@ubuntu:~/0x0a. malloc, free$ gcc -Wall -pedantic -Werror -Wextra -std=gnu89 4-main.c 3-alloc_grid.c 4-free_grid.c -o f
julien@ubuntu:~/0x0a. malloc, free$ valgrind ./f
==5013== Memcheck, a memory error detector
==5013== Copyright (C) 2002-2015, and GNU GPL'd, by Julian Seward et al.
==5013== Using Valgrind-3.11.0 and LibVEX; rerun with -h for copyright info
==5013== Command: ./f
==5013==
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 98 0 0
0 0 0 0 0 0
0 0 0 0 0 0
0 0 0 0 402 0
==5013==
==5013== HEAP SUMMARY:
==5013== in use at exit: 0 bytes in 0 blocks
==5013== total heap usage: 6 allocs, 6 frees, 1,248 bytes allocated
==5013==
==5013== All heap blocks were freed -- no leaks are possible
==5013==
==5013== For counts of detected and suppressed errors, rerun with: -v
==5013== ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
julien@ubuntu:~/0x0a. malloc, free$
~~~
|
import React from 'react';
import { useUserAuth } from "../_utils/auth-context"; // Adjust the path as necessary
import Link from 'next/link';
const HomePage = () => {
const { user, gitHubSignIn, firebaseSignOut } = useUserAuth();
const handleSignIn = async () => {
try {
await gitHubSignIn();
} catch (error) {
console.error("Error signing in with GitHub:", error);
}
};
const handleSignOut = async () => {
try {
await firebaseSignOut();
} catch (error) {
console.error("Error signing out:", error);
}
};
return (
<div>
{user ? (
<>
<p>Welcome, {user.displayName} ({user.email})</p>
<button onClick={handleSignOut}>Logout</button>
<Link href="/week8/shopping-list">
<a>Go to Shopping List</a>
</Link>
</>
) : (
<button onClick={handleSignIn}>Login with GitHub</button>
)}
</div>
);
};
export default HomePage;
|
# Authorization Extension tools
This CLI program takes data from the Authorization Extension (groups, roles, permissions), combines it with data obtained from the tenant (users, applications) and generates different reports based on the data.
## Prerequisites
1. **NodeJS**. Get the LTS version from https://nodejs.org/en/download
## Setup
1. Clone the repository to a local folder using `git` or download a copy directly from GitHub.
```
git clone https://github.com/nicosabena/authorization-extension-tool
```
2. Install the required packages:
```
cd authorization-extension-tool
npm install
```
3. Make the tool (a CLI named `ae-tool`) accessible globally:
```
npm install -g
```
4. The command line tool can now be accessed using `ae-tool`. E.g. to obtain help:
```
ae-tool help
```
## Download data from the tenant
The tool relies on data exported from the tenant that has the Authorization Extension installed to analyze it. To obtain this data automatically:
1. Create a M2M application in the tenant where the Authorization Extension is installed, and assign permissions for:
1. Auth0 Management API: authorize the scopes `read:users` and `read:applications` so that the code can read users and applications.
2. `auth0-authorization-extension-api`: authorize the scope `read:configuration` so that the code can read the full configuration for the Authorization Extension, and `update:groups` if you want to delete inactive groups.
2. In a working folder (e.g. `~/ae-tool-working-folder/{tenant}) create a `.env` file (using `.env.example` as a template) and configure the all the values.
For `CLIENT_ID` and `CLIENT_SECRET`, use the values for the application created in step #1.
To download the data from the tenant into the working folder:
```
ae-tool download
```
The download will generate three files:
- `users.ndjson`
- `authorization_extension.json`
- `applications.json`
These three files will be the input to generate the reports
## Generating reports
To generate a report, type:
```
ae-tool report <report-name> [--flat] [--group <group-name>] [--json]
```
where `<report-name>` can be one of the following:
- `roles-without-permissions`: provides a list of roles that have no permissions associated.
- `empty-groups`: provides a list of groups that have no members and no nested groups.
- `groups-without-roles`: provides a list of groups that don't have a any roles assigned, either directly or through nested groups
- `permissions`: provides a list of permissions
- `roles`: a list of all roles in the system, with permissions for each (*)
- `groups`: a list of groups in the system
- `groups-and-roles`: a list of groups, with roles assigned to them (*)
- `groups-and-members`: a list of groups with all the members in it (*)
- `users`: a list of users in the system
- `groups-with-users-not-found`: a list of groups with users that are not present in the users export.
- `groups-with-inactive-members`: a list of groups with users that have not logged in since the `--cutoff` date.
The `--cutoff` option needs to be provided in the `yyyy-mm-dd` format.
- `groups-and-emails`: a list of user emails in the group. Use combined with `--group` to specify the group name to filter.
> (*) For reports where there's a one-to-many relationship (e.g. "roles" lists permissions for each role) use `--flat` to
> generate one row per combination.
You can get a list of report types by typing:
```
ae-tool report --help
```
Since all reports are in `.csv` format, it will usually make sense to send the output to a `.csv` file:
```
ae-tool report users > users.csv
ae-tool report empty-groups > empty-groups.csv
ae-tool report groups > groups.csv
ae-tool report roles --flat >roles-and-permissions-flat.csv
ae-tool report roles > roles-and-permissions.csv
ae-tool report roles-without-permissions > roles-without-permissions.csv
ae-tool report groups > groups.csv
ae-tool report groups-and-members --flat >groups-and-members.csv
ae-tool report groups-with-users-not-found --flat >groups-with-users-not-found.csv
ae-tool report groups-and-roles --flat >groups-and-roles-flat.csv
ae-tool report groups-and-roles >groups-and-roles.csv
ae-tool report permissions >permissions.csv
ae-tool report groups-with-inactive-members --flat --cutoff 2023-10-01 >groups-with-inactive-members-since-2023-10-01-flat.csv
ae-tool report nested-groups --flat >nested-groups-flat.csv
ae-tool report groups-and-emails --group "My user group"
```
# Other commands
## Copy group members
Copy all group members from one group to another:
```
ae-tool copy-group-members <source-group> <target-group> [--yes]
```
If your group name has spaces, use double quotes. E.g.:
```
ae-tool copy-group-members "my source group" "my target group"
```
`--yes` skips the confirmation prompt.
## Remove all group members
Remove all group members from one group to another:
```
ae-tool remove-all-group-members <group-name> [--yes]
```
If your group name has spaces, use double quotes. E.g.:
```
ae-tool remove-all-group-memberss "my group"
```
`--yes` skips the confirmation prompt.
## Add a group mapping
Create a new mapping for a group membership from an external connection.
```
ae-tool add-group-name <source-group> <connection-name> <external-group-name> [--yes]
```
Adds a mapping so that if a user logs in with `<connection-name>` and the `groups` claim/attribute includes `<external-group-name>`,
then the user is also considered a member of `source-group`.
If your group name has spaces, use double quotes. E.g.:
```
ae-tool add-group-mapping "my group" enterprise-idp "Managers and Directors"
```
`--yes` skips the confirmation prompt.
|
using LinearAlgebra;
using FFTW;
using SparseArrays;
using Random, Distributions;
include("mapping.jl")
include("Mtgeneration.jl")
include("Mperptz.jl")
# compute function value, gradient, Newton direction
# compute function value
# @param t The parameter of central path in interior point method (scalar)
# @param beta (a 1-dimensional vector)
# @param c (a 1-dimensional vector)
# (beta, c) is feasible
# @param paramf A tuple consist of 3 entries: (Mt, M_perptz, d)
# Mt M^{\top}: The transpose of M
# M_perptz M_{\perp}^{\top}*z
# d The constraint on the l1 norm of beta (scalar)
# @details Compute the function value of \phi at (t, beta, c)
# @return fval Function value of \phi (scalar)
# @example
# >t = 1;
# >dim = 1;
# >size1 = 10;
# >d = 5;
# >beta = zeros(size1);
# >c = (d/(2*size1)).*ones(size1);
# >missing_idx = [2; 6];
# >z = randn(size1);
# >z_zero = z;
# >z_zero[missing_idx].= 0;
# >Mt = generate_Mt(dim, size1, missing_idx);
# >M_perptz = M_perp_tz(z_zero, dim, size1);
# >paramf = (Mt, M_perptz, d);
# >fval(t, beta, c, paramf)
function fval(t, beta, c, paramf)
Mt = paramf[6];
M_perptz = paramf[3];
d = paramf[4];
# compute l, u, h, g
l, u, h, g = auxiliary_func(beta, c, d);
fval = t * sum((beta' * beta).- (Mt' * beta)'*(Mt' * beta).-((M_perptz'*beta).*2)) - sum(log.((-1).*l)) - sum(log.((-1).*u)) - sum(log.((-1).*h)) - log((-1)*g);
return fval
end
function fval2(t, beta, c, paramf)
dim = paramf[1];
size = paramf[2];
M_perptz = paramf[3];
d = paramf[4];
idx_missing = paramf[5];
# compute l, u, h, g
l, u, h, g = auxiliary_func(beta, c, d);
fval = t * sum((sum((M_perp_beta(dim, size, beta, idx_missing)).^2)).-((M_perptz'*beta).*2)) - sum(log.((-1).*l)) - sum(log.((-1).*u)) - sum(log.((-1).*h)) - log((-1)*g);
return fval
end
# compute gradient
# @param t The parameter of central path in interior point method (scalar)
# @param beta (a 1-dimensional vector)
# @param c (a 1-dimensional vector)
# (beta, c) is feasible
# @param paramf A tuple consist of 3 entries: (Mt, M_perptz, d)
# Mt M^{\top}: The transpose of M
# M_perptz M_{\perp}^{\top}*z
# d The constraint on the l1 norm of beta (scalar)
# @details Compute gradient of \phi on beta and c respectively at (t, beta, c)
# @return gbeta, gc Gradient of \phi on beta and c respectively
# (2 vectors each with length of problem size)
# @example
# >t = 1;
# >dim = 1;
# >size1 = 10;
# >d = 5;
# >beta = zeros(size1);
# >c = (d/(2*size1)).*ones(size1);
# >missing_idx = [2; 6];
# >z = randn(size1);
# >z_zero = z;
# >z_zero[missing_idx].= 0;
# >Mt = generate_Mt(dim, size1, missing_idx);
# >M_perptz = M_perp_tz(z_zero, dim, size1);
# >paramf = (Mt, M_perptz, d);
# >gbeta, gc = fgrad(t, beta, c, paramf);
function fgrad(t, beta, c, paramf)
Mt = paramf[6];
M_perptz = paramf[3];
d = paramf[4];
l, u, h, g = auxiliary_func(beta, c, d);
n = length(l);
gbeta = ((2*t).*beta).- ((2*t).*Mt*(Mt'*beta)).- ((2*t).*M_perptz).+ (inv.(l)).- (inv.(u));
gc = (inv.(l)).+ (inv.(u)).+ (inv.(h)).- ((1/g).*ones(n));
return gbeta, gc
end
function fgrad2(t, beta, c, paramf)
dim = paramf[1];
size = paramf[2];
M_perptz = paramf[3];
d = paramf[4];
idx_missing = paramf[5];
#Mt = paramf[6];
#M_perpt = paramf[7];
l, u, h, g = auxiliary_func(beta, c, d);
n = length(l);
gbeta = ((2*t).*M_perpt_M_perp_vec(dim, size, beta, idx_missing)).- ((2*t).*M_perptz).+ (inv.(l)).- (inv.(u));
gc = (inv.(l)).+ (inv.(u)).+ (inv.(h)).- ((1/g).*ones(n));
#println(norm(M_perpt*M_perpt'*beta - M_perpt_M_perp_vec(dim, size, beta, idx_missing)))
#println(norm(beta - Mt*Mt'*beta - M_perpt_M_perp_vec(dim, size, beta, idx_missing)))
return gbeta, gc
end
# compute Newton direction
# @param t The parameter of central path in interior point method (scalar)
# @param beta (a 1-dimensional vector)
# @param c (a 1-dimensional vector)
# (beta, c) is feasible
# @param gbeta Gradient of \phi on \beta (a 1-dimensional vector)
# @param gc Gradient of \phi on c (a 1-dimensional vector)
# @param paramf A tuple consist of 3 entries: (Mt, M_perptz, d)
# Mt M^{\top}: The transpose of M
# M_perptz M_{\perp}^{\top}*z
# d The constraint on the l1 norm of beta (scalar)
# @details Compute the Newton direction of \phi on beta and c respectively at (t, beta, c)
# @return delta_betab, delta_c The Newton direction of \phi on beta and c respectively
# (2 vectors each with length of problem size)
# @example
# >t = 1;
# >dim = 1;
# >size1 = 10;
# >d = 5;
# >beta = zeros(size1);
# >c = (d/(2*size1)).*ones(size1);
# >missing_idx = [2; 6];
# >z = randn(size1);
# >z_zero = z;
# >z_zero[missing_idx].= 0;
# >Mt = generate_Mt(dim, size1, missing_idx);
# >M_perptz = M_perp_tz(z_zero, dim, size1);
# >paramf = (Mt, M_perptz, d);
# >gbeta, gc = fgrad(t, beta, c, paramf);
# >delta_beta, delta_c = NT_direction(t, beta, c, gbeta, gc, paramf);
function NT_direction(t, beta, c, gradb, gradc, paramf)
Mt = paramf[6];
d = paramf[4];
l, u, h, g = auxiliary_func(beta, c, d);
Mt = Float64.(Mt);
(n,m) = size(Mt);
a = ((2t).*ones(n)).+ (inv.(l.^2)).+ (inv.(u.^2));
b = (inv.(l.^2)).- (inv.(u.^2));
d = (inv.(l.^2)).+ (inv.(u.^2)).+ (inv.(h.^2));
l22_tilde_inv = inv.(d.- (inv.(a)).*(b.^2));
l11_tilde_inv = (inv.(a)).+ (((inv.(a)).*b).^2).*l22_tilde_inv;
l12_tilde_inv = (-1).*(inv.(a)).*b.*l22_tilde_inv;
L11_invgb_L12inv_gc = (l11_tilde_inv.*gradb).+ (l12_tilde_inv.*gradc).- ((1/(g^2+sum(l22_tilde_inv))).*((l12_tilde_inv'*gradb).+ (l22_tilde_inv'*gradc)).*l12_tilde_inv);
L21_invgb_L22inv_gc = (l12_tilde_inv.*gradb).+ (l22_tilde_inv.*gradc).- ((1/(g^2+sum(l22_tilde_inv))).*((l12_tilde_inv'*gradb).+ (l22_tilde_inv'*gradc)).*l22_tilde_inv);
L11_invMt = (l11_tilde_inv.*Mt).- ((1/(g^2+sum(l22_tilde_inv))).*(l12_tilde_inv).*(l12_tilde_inv'*Mt));
L21_invMt = (l12_tilde_inv.*Mt).- ((1/(g^2+sum(l22_tilde_inv))).*(l22_tilde_inv).*(l12_tilde_inv'*Mt));
block = Symmetric(Matrix(I, m, m).- ((2t).*(Mt'*L11_invMt)));
temp = cholesky(block, check=false)\(Mt'*L11_invgb_L12inv_gc);
delta_beta = (-1).*(L11_invgb_L12inv_gc.+ (2t).*(L11_invMt*temp));
delta_c = (-1).*(L21_invgb_L22inv_gc.+ (2t).*(L21_invMt*temp));
return delta_beta, delta_c
end
function L_inv_r(t, l, u, h, r)
n = length(l);
r_beta = r[1:n];
r_c = r[(n+1):(2*n)];
l11 = (inv.(l.^2)).+(inv.(u.^2)).+(2*t);
l12 = (inv.(l.^2)).-(inv.(u.^2));
l22 = (inv.(l.^2)).+(inv.(u.^2)).+(inv.(h.^2));
l22_inv = inv.(l22.-((l12.^2).*(inv.(l11))));
l12_inv = (inv.(l11)).*l12.*l22_inv.*(-1);
l11_inv = (inv.(l11)).+((inv.(l11)).^2).*(l12.^2).*l22_inv;
return [(l11_inv.*r_beta).+(l12_inv.*r_c); (l12_inv.*r_beta).+(l22_inv.*r_c)]
end
function Hessian_vec(t, l, u, h, g, Mt, p)
n = length(l);
p_beta = p[1:n];
p_c = p[(n+1):(2*n)];
l11 = (inv.(l.^2)).+(inv.(u.^2)).+(2*t);
l12 = (inv.(l.^2)).-(inv.(u.^2));
l22 = (inv.(l.^2)).+(inv.(u.^2)).+(inv.(h.^2));
H11_pbeta = (l11.*p_beta).-((Mt*(Mt'*p_beta)).*(2*t));
H12_pc = l12.*p_c;
H21_pbeta = l12.*p_beta;
H22_pc = (l22.*p_c).+(ones(n).*(sum(p_c)/g^2));
return [H11_pbeta.+H12_pc; H21_pbeta.+H22_pc]
end
function Hessian_vec2(t, l, u, h, g, dim, size, idx_missing, p)
n = length(l);
p_beta = p[1:n];
p_c = p[(n+1):(2*n)];
l11 = (inv.(l.^2)).+(inv.(u.^2));
l12 = (inv.(l.^2)).-(inv.(u.^2));
l22 = (inv.(l.^2)).+(inv.(u.^2)).+(inv.(h.^2));
H11_pbeta = (l11.*p_beta).+((M_perpt_M_perp_vec(dim, size, p_beta, idx_missing)).*(2*t));
H12_pc = l12.*p_c;
H21_pbeta = l12.*p_beta;
H22_pc = (l22.*p_c).+(ones(n).*(sum(p_c)/g^2));
return [H11_pbeta.+H12_pc; H21_pbeta.+H22_pc]
end
function CG(t, beta, c, gradb, gradc, paramf, CG_esp = 10e-6)
Mt = paramf[6];
d = paramf[4];
l, u, h, g = auxiliary_func(beta, c, d);
Mt = Float64.(Mt);
n = length(beta);
b = [gradb; gradc].*(-1);
x0 = zeros(2*n);
r0 = Hessian_vec(t, l, u, h, g, Mt, x0).-b;
y0 = L_inv_r(t, l, u, h, r0);
p0 = y0.*(-1);
iter = 0
while(norm(r0)>CG_esp)
iter = iter + 1;
alpha = (r0'*y0)/(p0'*Hessian_vec(t, l, u, h, g, Mt, p0));
x1 = x0.+(p0.*alpha);
r1 = r0.+(Hessian_vec(t, l, u, h, g, Mt, p0).*alpha);
y1 = L_inv_r(t, l, u, h, r1);
beta = (r1'*y1)/(r0'*y0);
p0 = (p0.*beta).-y1;
r0 = r1;
y0 = y1;
x0 = x1;
end
return x0[1:n], x0[(n+1):(2*n)], iter
end
function CG2(t, beta, c, gradb, gradc, paramf, CG_esp = 10e-6)
dim = paramf[1];
size = paramf[2];
d = paramf[4];
idx_missing = paramf[5];
#Mt = paramf[6];
#Mt = Float64.(Mt);
l, u, h, g = auxiliary_func(beta, c, d);
n = length(beta);
b = [gradb; gradc].*(-1);
x0 = zeros(2*n);
r0 = Hessian_vec2(t, l, u, h, g, dim, size, idx_missing, x0).-b;
y0 = L_inv_r(t, l, u, h, r0);
p0 = y0.*(-1);
iter = 0
while(norm(r0)>CG_esp)
iter = iter + 1;
Hes_p = Hessian_vec2(t, l, u, h, g, dim, size, idx_missing, p0);
alpha = (r0'*y0)/(p0'*Hes_p);
#println("t=", t, " norm =", norm(Hessian_vec2(t, l, u, h, g, dim, size, idx_missing, p0)-Hessian_vec(t, l, u, h, g, Mt, p0)))
x1 = x0.+(p0.*alpha);
r1 = r0.+(Hes_p.*alpha);
y1 = L_inv_r(t, l, u, h, r1);
beta = (r1'*y1)/(r0'*y0);
p0 = (p0.*beta).-y1;
r0 = r1;
y0 = y1;
x0 = x1;
end
return x0[1:n], x0[(n+1):(2*n)], iter
end
function CG_precond(t, beta, c, gradb, gradc, paramf, CG_esp = 10e-6)
Mt = paramf[1];
d = paramf[3];
l, u, h, g = auxiliary_func(beta, c, d);
Mt = Float64.(Mt);
n = length(beta);
b = [gradb; gradc].*(-1);
H11 = diagm((inv.(l.^2)).+(inv.(u.^2))).+((2*t).*Matrix(I, n, n)).-((Mt*Mt').*(2*t));
H12 = diagm((inv.(l.^2)).-(inv.(u.^2)));
H22 = diagm((inv.(l.^2)).+(inv.(u.^2)).+(inv.(h.^2))).+(ones(n, n)/g^2);
H = [H11 H12; H12 H22];
l11 = (inv.(l.^2)).+(inv.(u.^2)).+(2*t);
l12 = (inv.(l.^2)).-(inv.(u.^2));
l22 = (inv.(l.^2)).+(inv.(u.^2)).+(inv.(h.^2));
L = [diagm(l11) diagm(l12); diagm(l12) diagm(l22)];
x0 = zeros(2*n);
r0 = H*x0-b;
y0 = inv(L)*r0;
p0 = y0.*(-1);
while(norm(r0)>CG_esp)
println("norm:", norm(r0))
alpha = (r0'*y0)/(p0'*H*p0);
x1 = x0.+(p0.*alpha);
r1 = r0.+((H*p0).*alpha);
y1 = inv(L)*r1;
beta = (r1'*y1)/(r0'*y0);
p0 = (p0.*beta).-y1;
r0 = r1;
y0 = y1;
x0 = x1;
end
return x0[1:n], x0[(n+1):(2*n)]
end
function CG_unprecond(t, beta, c, gradb, gradc, paramf, CG_esp = 10e-6)
Mt = paramf[1];
d = paramf[3];
l, u, h, g = auxiliary_func(beta, c, d);
Mt = Float64.(Mt);
n = length(beta);
b = [gradb; gradc].*(-1);
H11 = diagm((inv.(l.^2)).+(inv.(u.^2))).+((2*t).*Matrix(I, n, n)).-((Mt*Mt').*(2*t));
H12 = diagm((inv.(l.^2)).-(inv.(u.^2)));
H22 = diagm((inv.(l.^2)).+(inv.(u.^2)).+(inv.(h.^2))).+(ones(n, n)./(g^2));
H = [H11 H12; H12 H22];
x0 = zeros(2*n);
r0 = H*x0-b;
p0 = r0.*(-1);
while(norm(r0)>CG_esp)
println("norm:", norm(r0))
alpha = (r0'*r0)/(p0'*H*p0);
x1 = x0.+(p0.*alpha);
r1 = r0.+((H*p0).*alpha);
beta = (r1'*r1)/(r0'*r0);
p0 = (p0.*beta).-r1;
r0 = r1;
x0 = x1;
end
return x0[1:n], x0[(n+1):(2*n)]
end
# compute l, u, h, g
function auxiliary_func(beta, c, d)
return ((-1).*beta.-c, beta.-c, (-1).*c, sum(c)-d)
end
|
/*
* MIT License
*
* Copyright (c) 2022 Fairy Project
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.fairyproject.mc.version;
import io.fairyproject.mc.version.impl.MCVersionImpl;
public interface MCVersion extends Comparable<MCVersion> {
static MCVersion of(int major, int minor, int patch) {
return new MCVersionImpl(major, minor, patch);
}
static MCVersion of(int minor, int patch) {
return of(1, minor, patch);
}
static MCVersion of(int minor) {
return of(1, minor, 0);
}
int getMajor();
int getMinor();
int getPatch();
String getFormatted();
boolean isHigherThan(MCVersion version);
boolean isHigherOrEqual(MCVersion version);
boolean isLowerThan(MCVersion version);
boolean isLowerOrEqual(MCVersion version);
boolean isEqual(MCVersion version);
boolean isBetween(MCVersion lower, MCVersion higher);
boolean isBetweenOrEqual(MCVersion lower, MCVersion higher);
}
|
export interface Genre {
id: number;
name: string;
}
export interface Movie {
title: string;
backdrop_path: string;
media_type?: string;
release_date?: string;
first_air_date: string;
genre_ids: number[];
id: number;
name: string;
origin_country: string[];
original_language: string;
original_name: string;
overview: string;
popularity: number;
poster_path: string;
vote_average: number;
vote_count: number;
}
export interface Element {
type:
| "Bloopers"
| "Featurette"
| "Behind the Scenes"
| "Clip"
| "Trailer"
| "Teaser";
}
export interface MovieDataResponse {
videos: { results: VideoResponse[] | undefined };
vote_average: number;
release_date: string;
first_air_date: string;
overview: string;
genres: Genre[];
original_language: string;
vote_count: number;
}
export interface VideoResponse {
id: string;
iso_639_1: string;
iso_3166_1: string;
key: string;
name: string;
official: boolean;
published_at: Date;
site: string;
size: number;
type: Element["type"];
}
export interface NetflixPlans {
type: string;
videoQuality: string;
resolution: string;
price: string;
portability: boolean;
}
|
import React, { useState } from 'react';
import { useNavigate } from 'react-router';
function Signup({ updateUserRole }) {
const navigate = useNavigate();
const [input, setInput] = useState({
fullname: '',
email: '',
username: '',
password: ''
});
function handleChange(e) {
const { name, value } = e.target;
setInput(prevState => ({
...prevState,
[name]: value
}));
}
function handleSubmit(e) {
e.preventDefault();
// Send signup data to backend
fetch('https://online-marketing.onrender.com/signup', {
method: 'POST',
credentials:"include",
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(input),
})
.then(response => {
if (response.ok) {
return response.json();
}
throw new Error('Signup failed');
})
.then(data => {
// If signup successful, navigate to the home page
alert(`Thanks ${input.fullname} for joining us`);
updateUserRole(data.isShopOwner);
navigate('/login');
})
.catch(error => {
alert('Signup failed');
console.error('Error signing up:', error);
});
}
return (
<div className="container d-flex flex-column min-vh-100">
<div className="row justify-content-center align-items-center flex-grow-1">
<div className="col-md-6">
<form onSubmit={handleSubmit} className="signup-form p-4 shadow rounded">
<h2 className="mb-4 text-center">Sign Up</h2>
<div className="mb-3">
<label htmlFor="fullname" className="form-label">Full Name</label>
<input
type="text"
id="fullname"
name="fullname"
className="form-control form-control-lg"
placeholder="Enter your full name"
value={input.fullname}
onChange={handleChange}
required
/>
</div>
<div className="mb-3">
<label htmlFor="email" className="form-label">Email</label>
<input
type="email"
id="email"
name="email"
className="form-control form-control-lg"
placeholder="Input your email"
value={input.email}
onChange={handleChange}
required
/>
</div>
<div className="mb-3">
<label htmlFor="username" className="form-label">Username</label>
<input
type="text"
id="username"
name="username"
className="form-control form-control-lg"
placeholder="Enter a username"
value={input.username}
onChange={handleChange}
required
/>
</div>
<div className="mb-3">
<label htmlFor="password" className="form-label">Password</label>
<input
type="password"
id="password"
name="password"
className="form-control form-control-lg"
placeholder="Enter a password"
value={input.password}
onChange={handleChange}
required
/>
</div>
<div className="text-center">
<button type="submit" className="btn btn-primary btn-lg">Sign Up</button>
</div>
</form>
</div>
</div>
</div>
);
}
export default Signup;
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
<script type="text/javascript">
const scores = [
{ number: "N1047", chinese: 95, math: 79, english: 98 },
{ number: "N1176", chinese: 84, math: 72, english: 66 },
{ number: "N1087", chinese: 82, math: 98, english: 69 },
{ number: "N1808", chinese: 77, math: 87, english: 85 },
{ number: "N1365", chinese: 93, math: 80, english: 71 },
{ number: "N1416", chinese: 90, math: 77, english: 97 },
{ number: "N1048", chinese: 74, math: 75, english: 93 },
{ number: "N1126", chinese: 74, math: 89, english: 88 },
{ number: "N1386", chinese: 77, math: 99, english: 84 },
{ number: "N1869", chinese: 80, math: 92, english: 89 },
{ number: "N1666", chinese: 63, math: 81, english: 90 },
];
const totalScores = scores.map(student => {
const total = student.chinese + student.math + student.english;
return { number: student.number, total };
});
totalScores.sort((a, b) => b.total - a.total);
console.log('学号\t总分');
for (let student of totalScores) {
console.log(`${student.number}\t${student.total}`);
}
function getTopThree(subject) {
const sortedScores = [...scores].sort((a, b) => b[subject] - a[subject]);
return sortedScores.slice(0, 3);
}
console.log('语文\t学号\t成绩');
for (let student of getTopThree('chinese')) {
console.log(`\t\t${student.number}\t${student.chinese}`);
}
console.log('数学\t学号\t成绩');
for (let student of getTopThree('math')) {
console.log(`\t\t${student.number}\t${student.math}`);
}
console.log('英语\t学号\t成绩');
for (let student of getTopThree('english')) {
console.log(`\t\t${student.number}\t${student.english}`);
}
</script>
</head>
<body>
</body>
</html>
|
import { Injectable } from '@angular/core';
import { IClients } from './interface/iclients';
import { ICollab } from './interface/icollab';
import { IProject } from './interface/iproject';
import { ITeams } from './interface/iteams';
@Injectable({
providedIn: 'root'
})
export class AppService {
url = 'http://localhost:3000/collaborateurs';
async getAllCollab(): Promise<ICollab[]> {
const collab = await fetch(this.url);
return collab.json() ?? [];
}
async getCollabById(id: number): Promise<ICollab | undefined> {
const collabId = await fetch(`${this.url}/${id}`);
return (await collabId.json()) ?? [];
}
submitCollab(
lastName: string,
firstName: string,
team: string,
dateStart: string,
dateEnd: string,
project: number,
TOG: number,
dayLeft: number
) {
const data = {
lastName,
firstName,
team,
dateStart,
dateEnd,
project,
TOG,
dayLeft
};
fetch('http://localhost:3000/collaborateurs', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
})
.then((response) => response.json())
.then((data) => {
console.log('Success:', data);
})
.catch((error) => {
console.error('Error:', error);
});
}
urlProject = 'http://localhost:3000/project';
async getAllProject(): Promise<IProject[]> {
const project = await fetch(this.urlProject);
return project.json() ?? [];
}
async getProjectById(id: string): Promise<IProject | undefined> {
const projectId = await fetch(`${this.urlProject}/${id}`);
return (await projectId.json()) ?? [];
}
submitProject(
client: string,
project: string,
type: string,
status: string,
dayHuman: string,
team: string,
dateStart: string,
dateEnd: string
) {
const data = {
client,
project,
type,
status,
dayHuman,
team,
dateStart,
dateEnd
};
fetch('http://localhost:3000/project', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
})
.then((response) => response.json())
.then((data) => {
console.log('Success:', data);
})
.catch((error) => {
console.error('Error:', error);
});
}
urlTeams = 'http://localhost:3000/teams';
async getAllTeams(): Promise<ITeams[]> {
const teams = await fetch(this.urlTeams);
return teams.json() ?? [];
}
async getTeamsById(id: number): Promise<ITeams | undefined> {
const teamsId = await fetch(`${this.urlTeams}/${id}`);
return (await teamsId.json()) ?? [];
}
// submitTeams(teamName: string, collab: any, projectId: any) {
// const data = {
// teamName,
// collab,
// projectId
// };
submitTeams(teamName: string, collab: any, collabId: any, projectId: any) {
const data = {
teamName,
collab,
collabId,
projectId
};
fetch('http://localhost:3000/teams', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
})
.then((response) => response.json())
.then((data) => {
console.log('Success:', data);
})
.catch((error) => {
console.error('Error:', error);
});
}
urlClients = 'http://localhost:3000/clients';
async getAllClients(): Promise<IClients[]> {
const clients = await fetch(this.urlClients);
return clients.json() ?? [];
}
submitClient(clientName: string, projectName: string) {
const data = {
clientName,
projectName
};
fetch('http://localhost:3000/clients', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
})
.then((response) => response.json())
.then((data) => {
console.log('Success:', data);
})
.catch((error) => {
console.error('Error:', error);
});
}
}
|
package me.bossm0n5t3r.leetcode.sudokuSolver
class SudokuSolver {
class Solution {
fun solveSudoku(board: Array<CharArray>) {
val sudokuWrapper = SudokuWrapper(result = Array(9) { CharArray(9) { '.' } })
dfs(board, 0, sudokuWrapper)
(0 until 9).forEach { r ->
(0 until 9).forEach { c ->
board[r][c] = sudokuWrapper.result[r][c]
}
}
}
private data class SudokuWrapper(
val result: Array<CharArray>,
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as SudokuWrapper
if (!result.contentDeepEquals(other.result)) return false
return true
}
override fun hashCode(): Int {
return result.contentDeepHashCode()
}
}
private fun dfs(
board: Array<CharArray>,
cur: Int,
sudokuWrapper: SudokuWrapper,
) {
if (cur == 81) {
if (!isValid(sudokuWrapper.result) && isValid(board)) {
(0 until 9).forEach { r ->
(0 until 9).forEach { c ->
sudokuWrapper.result[r][c] = board[r][c]
}
}
}
return
}
val r = cur / 9
val c = cur % 9
if (board[r][c] != '.') {
dfs(board, cur + 1, sudokuWrapper)
return
}
val candidates = getCandidates(board, r, c)
val tmp = board[r][c]
candidates.forEach { candidate ->
board[r][c] = candidate
dfs(board, cur + 1, sudokuWrapper)
}
board[r][c] = tmp
}
private fun getCandidates(
board: Array<CharArray>,
r: Int,
c: Int,
): Set<Char> {
if (board[r][c] != '.') return emptySet()
val result = ('1'..'9').toMutableSet()
(0 until 9).forEach { i ->
if (i != c && board[r][i] != '.') {
result.remove(board[r][i])
}
if (i != r && board[i][c] != '.') {
result.remove(board[i][c])
}
}
val tmpR = (r / 3) * 3
val tmpC = (c / 3) * 3
(tmpR until tmpR + 3).forEach { insideR ->
(tmpC until tmpC + 3).forEach { insideC ->
val tmpChar = board[insideR][insideC]
if (tmpChar != '.' && result.contains(tmpChar)) {
result.remove(tmpChar)
}
}
}
return result
}
fun isValid(board: Array<CharArray>): Boolean {
board.forEachIndexed { index, chars ->
if (chars.contains('.')) return false
val row = chars.map { c -> c.toString().toInt() }
if (row.sum() < 45) return false
if (index % 3 == 0) {
(0 until 3).forEach { tmpC ->
val tmp =
listOf(
board[index][tmpC * 3],
board[index][tmpC * 3 + 1],
board[index][tmpC * 3 + 2],
board[index + 1][tmpC * 3],
board[index + 1][tmpC * 3 + 1],
board[index + 1][tmpC * 3 + 2],
board[index + 2][tmpC * 3],
board[index + 2][tmpC * 3 + 1],
board[index + 2][tmpC * 3 + 2],
)
.filter { it != '.' }
.map { it.toString().toInt() }
if (tmp.sum() < 45) return false
}
}
}
(0 until 9).forEach { c ->
val column =
listOf(
board[0][c],
board[1][c],
board[2][c],
board[3][c],
board[4][c],
board[5][c],
board[6][c],
board[7][c],
board[8][c],
)
.filter { it != '.' }
.map { it.toString().toInt() }
if (column.sum() < 45) return false
}
return true
}
}
}
|
import 'reflect-metadata'; // We need this in order to use @Decorators
import config from './config';
import express from 'express';
import Logger from './loaders/logger';
const path = require('path');
async function startServer() {
const app = express();
const swaggerUi = require('swagger-ui-express');
const swaggerDocument = require('./swagger.json');
app.use(express.static('public'));
app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerDocument));
// app.use('/upload',express.static(path.join(__dirname, '/uploads')));
/**
* A little hack here
* Import/Export can only be used in 'top-level code'
* Well, at least in node 10 without babel and at the time of writing
* So we are using good old require.
**/
//route to download a file
app.get('/download/:file(*)', (req, res) => {
var file = req.params.file;
var fileLocation = path.join('./src/uploads', file);
console.log(fileLocation);
res.download(fileLocation, file);
});
app.get('/view/:file(*)', (req, res) => {
var file = req.params.file;
var fileLocation = path.join('./uploads', file);
console.log(__dirname + "/uploads/Privacy.html");
res.sendFile(__dirname + "/uploads/Privacy.html");
});
await require('./loaders').default({ expressApp: app });
app
.listen(config.port, () => {
Logger.info(`
################################################
🛡️ Server listening on port: ${config.port} 🛡️
################################################
`);
})
.on('error', err => {
Logger.error(err);
process.exit(1);
});
}
startServer();
|
#include<stdio.h>
#include<stdlib.h>
int main() {
int initialSize = 10;
int *p = (int*)malloc(initialSize * sizeof(int)); // It's good practice to use sizeof(int)
if (!p) { // Always check if memory allocation was successful
printf("Memory allocation failed\n");
return 1;
}
int i = 0; // Index for the loop
printf("Input :\n");
while (1) {
scanf("%d", &p[i]);
if (p[i] == 22) { // If 22 is encountered, stop taking inputs
break;
}
i++;
if (i >= initialSize) { // Increase the size of array if limit is reached
initialSize += 10; // Increase by 10
int *newPtr = realloc(p, initialSize * sizeof(int)); // Use realloc to adjust memory allocation
if (!newPtr) { // Always check if reallocation was successful
printf("Memory reallocation failed\n");
free(p); // Free the previously allocated memory
return 1;
}
p = newPtr;
}
}
printf("\nOutput :\n");
for (int j = 0; j < i; j++) { // Use the counter 'i' as the upper limit for printing
printf("%d\n", p[j]);
}
free(p); // Don't forget to free the allocated memory
return 0;
}
|
import { useState, useEffect } from "react";
import Image from "next/image";
import Form from "./Form";
import {
Container,
Input,
Select,
Option,
InputContainer,
InputWrapper,
ButtonWrapper,
} from "./Register.styles";
import {
CloseButton,
Field,
Label,
SearchButton,
Count,
} from "./Sidebar.styles";
import Button from "./Button";
import { useRouter } from 'next/router';
import DeleteIcon from "../public/delete-icon.png";
import AddIcon from "../public/add-icon.png";
interface RegisterProps {
selectOptions: any;
}
interface formObjectInterface {
lider: string;
contato: string;
}
const formObject: formObjectInterface = {
lider: "",
contato: "",
};
function Register({ selectOptions }: RegisterProps) {
const router = useRouter();
useEffect(() => {
if (typeof window !== undefined) {
if (!window.localStorage.getItem("user")) {
router.push("/login")
} else {
const userData = window.localStorage.getItem("user");
setUser(JSON.parse(userData))
}
}
}, [])
function logout() {
if (typeof window !== undefined) {
router.push("/login");
window.localStorage.removeItem("user");
}
}
const [inputValues, setInputValues] = useState<formObjectInterface[]>([
formObject,
]);
const [selectValue, setSelectValue] = useState("");
const [register, setRegister] = useState(false);
const [user, setUser] = useState([]);
async function send() {
if (!isFormValid(selectValue, inputValues)) {
alert("Preencha todos os campos");
setRegister(false);
return;
}
const user = window.localStorage.getItem("user")
const updateData = { value: inputValues, column: "Líderes" };
const searchData = {
searchValue: selectValue,
searchColumn: "address",
};
const response = await fetch("/api/update", {
method: "POST",
body: JSON.stringify({ searchData, updateData,user }),
});
}
function isFormValid(selectValue: string, data: formObjectInterface[]) {
let isValid = true;
setRegister(true);
for (const item of data) {
if (item.lider != "" && item.contato == "") {
isValid = false;
break;
}
}
if (selectValue == "") isValid = false;
return isValid;
}
function handleSelectChange(e: any) {
setSelectValue(e.target.value);
}
function addInput(e: any) {
e.preventDefault();
const values = [...inputValues, formObject];
setInputValues(values);
}
function handleInputChange(index: number, e: any) {
const value = e.target.value;
const key = e.target.name;
const newValue = { [key]: value };
const currentInputValues = [...inputValues];
const currentInput = currentInputValues[index];
const newInputValue = { ...currentInput, ...newValue };
currentInputValues[index] = newInputValue;
setInputValues(currentInputValues);
}
function removeInput(index: number) {
const values = [...inputValues];
values.splice(index, 1);
setInputValues(values);
}
return (
<>
<section style={register ? {display: 'flex', width: "400px", flexDirection: "column", justifyContent: "center", margin: "0 auto"} : {display: 'none'}}>
<h1 style={{fontSize: '30px', fontFamily: 'Courier', color: 'white', textAlign: 'center', marginTop: '200px', marginBottom: "30px"}}>Registro feito com sucesso!</h1>
<Button
color={"#28eb8d"}
fontColor={"#000"}
onClick={() => setRegister(false)}
>
Retornar à página de registro
</Button>
<p style={{fontFamily: "courier", fontWeight: "bold", color: "#fff", margin: "10px 0px 10px 0px", textAlign: "center"}}>ou</p>
<Button
color={"#720800"}
fontColor={"#fff"}
onClick={() => router.push('/')}
>
Consultar mapa
</Button>
</section>
<section style={register ? {display: 'none'} : {display: 'block'}}>
<h1 style={{fontSize: '30px', fontFamily: 'Courier', color: 'white', textAlign: 'center', marginTop: '40px'}}>Registre os líderes</h1>
<Container>
<div style={{display: 'flex', flexDirection: 'column', marginRight: '20px', alignSelf: 'flex-start'}}>
<p style={{fontFamily: 'courier, arial', fontWeight: 'bold', fontSize: '18px', marginBottom: '10px'}}>Usuário <span style={{display: "block", color: '#fff', fontFamily: 'courier, arial', fontWeight: 'bold', fontSize: '24px'}}>{user.name}</span></p>
<Button
color={"#eb3b28"}
fontColor={"#000"}
onClick={() => logout()}
>
Sair
</Button>
<p style={{fontFamily: 'courier, arial', fontWeight: 'bold', fontSize: '18px', marginTop: '20px'}}>Associação <span style={{display: "block", color: '#fff', fontFamily: 'courier, arial', fontWeight: 'bold', fontSize: '24px'}}>{user.guild}</span></p>
<Label style={{marginTop: "20px"}}>Igreja</Label>
<Select onChange={handleSelectChange} value={selectValue}>
<Option value="">Selecione:</Option>
{selectOptions.map((option: any) => (
<Option key={option.address} value={option.address}>
{`${option.name}, ${option.address}`}
</Option>
))}
</Select>
</div>
<InputContainer>
{inputValues.map((inputValue: formObjectInterface, index: number) => {
return (
<InputWrapper key={index}>
<Label>Líder</Label>
<Input
name="lider"
value={inputValue.lider}
onChange={(e) => handleInputChange(index, e)}
/>
<Label>Contato</Label>
<Input
name="contato"
value={inputValue.contato}
onChange={(e) => handleInputChange(index, e)}
/>
<Button
color={"#eb3b28"}
fontColor={"#000"}
onClick={() => removeInput(index)}
>
<Image src={DeleteIcon} width={24} height={24} alt="Remover" />
</Button>
</InputWrapper>
);
})}
<ButtonWrapper>
<Button
color={"#28eb8d"}
fontColor={"#000"}
onClick={(e) => addInput(e)}
>
<Image src={AddIcon} width={20} height={20} alt="Adicionar" />
</Button>
</ButtonWrapper>
</InputContainer>
<Button color={"#720800"} fontColor={"#fff"} onClick={send}>
Registrar
</Button>
</Container>
</section>
</>
);
}
export default Register;
|
import { Button } from '@mui/material'
import { studentQuizzes } from 'helpers/APIs/student'
import { QuizModel } from 'helpers/types'
import Link from 'next/link'
import React, { useEffect, useState } from 'react'
import { useAppSelector } from 'redux/hooks'
const Batches = () => {
const [quizzes, setQuizzes] = useState<QuizModel[]>([])
const { isAuthenticated } = useAppSelector((state) => state.auth)
useEffect(() => {
studentQuizzes().then(setQuizzes)
}, [isAuthenticated])
return (
<div className="mt-5 w-full max-w-3xl">
<h1 className="mb-3 text-2xl">Upcoming Quizzes</h1>
<div className="hide-scroll-x flex w-full overflow-x-scroll">
{quizzes.map((quiz) => (
<div
key={quiz._id}
className="mr-4 flex-shrink-0 rounded-lg bg-primary-main px-3 py-2 text-fg"
>
<h3 className="m-0 mb-2">{quiz.title}</h3>
<p className="m-0 mb-2">Teacher: {quiz.teacher.name}</p>
<p className="m-0 mb-2">Duration {quiz.duration} hours</p>
<Link href={`/quiz/${quiz._id}`} passHref>
<Button variant="contained" color="secondary">
Attempt Quiz
</Button>
</Link>
</div>
))}
</div>
</div>
)
}
export default Batches
|
import Router from "koa-router"
import { endpoints } from "../../../shared/constants/api-endpoints"
import { TokenCredentials } from "../../../shared/user/types/token-credentials"
import { HttpException } from "../common/http-exception"
import { UserModel } from "../user/user.model"
import { CharacterModel } from "./character.model"
import { CharacterFields } from "./types/character-fields"
export function characterRoutes() {
const router = new Router()
router.post(endpoints.characters, async ctx => {
const { name } = await validateTokenCredentials(ctx.request.headers)
const fields = ctx.request.body as CharacterFields
const character = await CharacterModel.create({ fields, ownerName: name })
ctx.body = character.serialize()
})
router.get(endpoints.character(":id"), async ctx => {
const id = ctx.params.id as string
const character = await findCharacter(id)
ctx.body = character.serialize()
})
router.get(endpoints.characters, async ctx => {
const { owner, limit } = ctx.query
let query = CharacterModel.find().limit(Number(limit) || 10)
if (owner) {
query = query.find({ ownerName: owner })
}
const queryResult = await query
ctx.body = { characters: queryResult.map(c => c.serialize()) }
})
router.put(endpoints.character(":id"), async ctx => {
await validateTokenCredentials(ctx.request.headers)
const id = ctx.params.id as string
const character = await findCharacter(id)
// if (!await characters.isCharacterOwner(name, id)) {
// throw new HttpException("You do not own this character.", 403)
// }
const fields = ctx.request.body as CharacterFields
character.fields = fields
await character.save()
ctx.body = character.serialize()
})
router.delete(endpoints.character(":id"), async ctx => {
await validateTokenCredentials(ctx.request.headers)
const id = ctx.params.id as string
// if (!await characters.isCharacterOwner(name, id)) {
// throw new HttpException("You do not own this character.", 403)
// }
const result = await CharacterModel.findByIdAndRemove(id)
if (!result) {
throw new HttpException("Character not found", 404)
}
ctx.body = {}
})
return router.routes()
}
async function findCharacter(id: string) {
const character = await CharacterModel.findById(id)
if (!character) {
throw new HttpException("Character not found", 404)
}
return character
}
async function validateTokenCredentials({ name, token }: TokenCredentials) {
const user = await UserModel.findOne({ name })
if (!user || !user.isTokenValid(token)) {
throw new HttpException("Invalid or expired token", 401)
}
return user
}
|
import type { NextApiRequest, NextApiResponse } from 'next';
import { ApiLogger, DefaultApiLoggerOption } from './api-logger';
import { DEFAULT_MIDDLEWARE_ROUTER_METHOD } from './constants';
import { ApiErrorHandler } from './error-handler';
/**
* An object with key-value pairs, where keys are strings and values are of type `T`.
*/
export type TypedObject<T = unknown> = Record<string, T>;
/**
* The Next.js API request with `req.middleware` available.
*/
export interface NextApiRequestWithMiddleware<
M extends TypedObject = TypedObject,
> extends NextApiRequest {
/**
* An object containing middleware data.
*/
middleware: M;
}
/**
* A standard Next.js API handler with `req.middleware` available.
* @typeparam T The type of the response data.
* @typeparam M The type of the middleware data.
* @param req The Next.js API request object.
* @param res The Next.js API response object.
* @returns The response data or a Promise that resolves to the response data.
*/
export type NextApiHandlerWithMiddleware<
T = unknown,
M extends TypedObject = TypedObject,
> = (
req: NextApiRequestWithMiddleware<M>,
res: NextApiResponse
) => T | Promise<T> | void;
/**
* An HTTP API response.
* @typeparam T The type of the response data.
*/
export type ApiResponse<T = unknown> = SuccessApiResponse<T> | ErrorApiResponse;
/**
* An HTTP API success response with data.
* @typeparam T The type of the response data.
*/
export type SuccessApiResponse<T> = { success: true; data: T };
/**
* An HTTP API error response with a message.
*/
export type ErrorApiResponse = { success: false; message: string };
/**
* A custom HTTP method that can be used to define middleware.
*/
export type MiddlewareRouterMethod =
| typeof DEFAULT_MIDDLEWARE_ROUTER_METHOD
| RouterMethod;
/**
* The options for creating a router builder.
* @property {ApiErrorHandler} error - The error handler to use.
* @property {boolean} showMessage - Whether to show error messages in the response. Defaults to `true`.
* @property {ApiLogger} logger - The logger to use.
* @property {DefaultApiLoggerOption} loggerOption - The logger options to use.
*/
export type RouterBuilderOptions = Partial<{
error: ApiErrorHandler;
showMessage: boolean;
logger: ApiLogger;
loggerOption: DefaultApiLoggerOption;
}>;
/**
* The available HTTP methods for adding routes to a router builder.
*/
export type RouterMethod = 'GET' | 'PATCH' | 'DELETE' | 'POST' | 'PUT';
/**
* An object representing middleware that can be added to a router builder.
* The keys represent the middleware router method (e.g. 'ALL') and the values are arrays of middleware functions.
*/
export type InternalMiddlewareMap<
T extends TypedObject = TypedObject,
M extends TypedObject = TypedObject,
> = Partial<
Record<MiddlewareRouterMethod, NextApiHandlerWithMiddleware<T | void, M>[]>
>;
|
#!/usr/bin/env python
# This node send twist msg to regulate the drone in position on the x axis.
#The servo point can be mobile
from concurrent.futures import thread
from telnetlib import LOGOUT
import threading
import rospy
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Twist
x_drone1 = y_drone1 = z_drone1 = x_drone2 = y_drone2 = z_drone2 = 0.0
##PID SETTINGS##
TIME_STEP = 0.01
Kp = 1
Kd = 0
Ki = 0
MAX_VAR = 1.0
MIN_VAR = -1.0
##############
class PID():
def __init__(self,KP,KI,KD,target = 0):
self.kp = KP
self.ki = KI
self.kd = KD
self.sp = target
self.error_last = 0
self.integral_error = 0
self.saturation_max = None
self.saturation_min = None
def compute(self,pos,dt):
error = self.sp - pos #compute the error
derivative_error = (error - self.error_last) / dt #find the derivative of the error (how the error changes with time)
self.integral_error += error * dt #error build up over time
output = self.kp*error + self.ki*self.integral_error + self.kd*derivative_error
self.error_last = error
if output > self.saturation_max and self.saturation_max is not None:
output = self.saturation_max
elif output < self.saturation_min and self.saturation_min is not None:
output = self.saturation_min
return output
def setLims(self,min,max):
self.saturation_max = max
self.saturation_min = min
def callback1(data):
global x_drone2
pub = rospy.Publisher("/cmd_vel", Twist, queue_size=10)
cmd = Twist()
pid = PID(Kp,Ki,Kd, x_drone2)
pid.setLims(MIN_VAR,MAX_VAR)
cmd.linear.x = -1 * pid.compute(data.pose.position.x, TIME_STEP)
rospy.loginfo("commande : %s" %str(-1 *pid.compute(data.pose.position.x, TIME_STEP)))
pub.publish(cmd)
def callback2(data):
global x_drone2
global y_drone2
global z_drone2
x_drone2 = data.pose.position.x
y_drone2 = data.pose.position.y
z_drone2 = data.pose.position.z
def regul():
rospy.init_node('regul_X_PID', anonymous=True)
sub1 = rospy.Subscriber('/vrpn_client_node/Tello01/pose', PoseStamped, callback1)
sub2 = rospy.Subscriber('/vrpn_client_node/Tello02/pose', PoseStamped, callback2)
rospy.spin()
if __name__ == '__main__':
regul()
|
<!DOCTYPE html>
<html xmlns:th="http://www.thymeleaf.org"
xmlns:layout="http//www.ultraq.net.nz/thymeleaf/layout"
layout:decorator="template">
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<body>
<div layout:fragment="content">
<div class="wrapper">
<div class="content-wrapper">
<!-- Content Header (Page header) -->
<section class="content-header">
<div class="container-fluid">
<div class="row mb-2">
<div class="col-sm-6">
<h1>LISTE ELEVES EN FONCTION DE CLASSE</h1>
</div>
<!-- <div class="col-sm-6">
<ol class="breadcrumb float-sm-right">
<li class="breadcrumb-item"><a href="#">Home</a></li>
<li class="breadcrumb-item active">LISTE ELEVES EN FONCTION DE CLASSE</li>
</ol>
</div> -->
</div>
</div>
<!-- /.container-fluid -->
</section>
<!-- Main content -->
<section class="content">
<div class="container-fluid">
<div class="row">
<div class="col-md-3">
<div class="card">
<div class="card-header">
<h3 class="card-title">LISTE DES CLASSES</h3>
<div class="card-tools">
<button type="button" class="btn btn-tool"
data-card-widget="collapse">
<i class="fas fa-minus"></i>
</button>
</div>
</div>
<div class="card-body p-0" >
<ul class="nav nav-pills flex-column" >
<li class="nav-item" th:each="c: ${listeClasse}"><a class="nav-link"
th:value="${c.idClasse}"
th:text="${c.classeName}" th:href="@{/findByClasseEl(idClasse=${c.idClasse})}"> <i class=" nav-link fas fa-inbox"></i>
<span class="badge bg-primary float-right"></span>
</a></li>
</ul>
</div>
</div>
</div>
<!-- /.col -->
<div class="col-md-12">
<div class="card card-primary card-outline">
<div class="card-header">
<h3 class="card-title">LISTE DES ELEVES</h3>
<div class="card-header float-right">
<!-- <h3 class="card-title">DataTable with default features</h3> -->
<a th:href="@{inscription}"><button type="button" class="btn btn-primary"
data-toggle="modal" data-target="#addClasses">add
new</button></a>
</div>
<!-- <div class="col-md-2 ml-auto mr-auto">
<div class="input-group input-group-sm">
<input type="text" th:object="${classe1}" th:field="*{classeName}" class=" border-0" readonly >
</div>
</div> -->
</div>
<form >
<div th:if="${listeEleve}">
<!-- Main content -->
<section class="content">
<div class="container-fluid">
<div class="row">
<div class="col-12">
<div class="card">
<!-- <div class="card-header">
<h3 class="card-title">Date:</h3>
</div> -->
<!-- /.card-header -->
<div class="card-body">
<table id="listePres" class="table table-bordered table-striped">
<thead>
<tr>
<th>ID</th>
<th>PHOTOS</th>
<th>CODE ELEVES</th>
<th>NOM</th>
<th>PRENOM</th>
<th>SEXE</th>
<th>DATE NAIS.</th>
<th>LIEU NAIS.</th>
<th>DERNIER ETAB.</th>
<th>RELIGION</th>
<th>DERNIER CLASSE FREQ.</th>
<th>TEL</th>
<th>PIECE FOURN.</th>
<th>ADRESSE</th>
<th>CLASSE</th>
<th>ANNEE ACA.</th>
<!-- <th>TYPE BOURSE</th> -->
<th>ACTION</th>
</tr>
</thead>
<tbody>
<tr th:each="el:${listeEleve}">
<td th:text="${el.idPerson}"></td>
<td>
<img class="img-circle" th:src="@{/getPhotoEl(idPerson=${el.idPerson})}" width="50" height="50"/>
</td>
<td th:text="${el.codeEl}"></td>
<td th:text="${el.nom}"></td>
<td th:text="${el.prenom}"></td>
<td th:text="${el.sexe}"></td>
<td th:text="${el.dateNaissance}"></td>
<td th:text="${el.lieuDeNaissance}"></td>
<td th:text="${el.dernierEtablissement}"></td>
<td th:text="${el.religion}"></td>
<td th:text="${el.dernierClasse}"></td>
<td th:text="${el.telephone}"></td>
<td th:text="${el.pieceFournies}"></td>
<td th:text="${el.adresse}"></td>
<td th:text="${el.classe.classeName}"></td>
<td th:text="${el.AnneeAcade.anneeAC}"></td>
<!-- <td th:text="${el.typeDeBou.typeBourse}"></td> -->
<td>
<a th:href="@{/editeEleves(idPerson=${el.idPerson})}" class="btn btn-info"><i class="fas fa-edit"></i></a>
<a th:href="@{/deleteEleves(idPerson=${el.idPerson})}" class="btn btn-danger"
id="deleteButtonCat"><i class="fas fa-trash"></i></a>
<a th:href="@{/Details(idPerson=${el.idPerson})}" class="btn btn-info"><i class="fas fa-list"></i></a>
</td>
</tr>
</tbody>
<tfoot>
<tr>
<th>ID</th>
<th>PHOTOS</th>
<th>CODE ELEVES</th>
<th>NOM</th>
<th>PRENOM</th>
<th>SEXE</th>
<th>DATE NAIS.</th>
<th>LIEU NAIS.</th>
<th>DERNIER ETAB.</th>
<th>RELIGION</th>
<th>DERNIER CLASSE FREQ.</th>
<th>TEL</th>
<th>PIECE FOURN.</th>
<th>ADRESSE</th>
<th>CLASSE</th>
<th>ANNEE ACA.</th>
<!-- <th>TYPE BOURSE</th> -->
<th>ACTION</th>
</tr>
</tfoot>
</table>
</div>
<!-- /.card-body -->
</div>
<!-- /.card -->
</div>
<!-- /.col -->
</div>
<!-- /.row -->
</div>
<!-- /.container-fluid -->
</section>
</div>
</form>
</div>
</div>
<!-- /.col -->
</div>
<!-- /.row -->
</div>
<!-- /.container-fluid -->
</section>
</div>
</div>
</div>
<script src=https://www.kryogenix.org/code/browser/sorttable/sorttable.js></script>
</body>
</html>
|
@extends('layouts.main_layout')
@section('title')
{{ env('APP_NAME') }} | {{ trans('translation.numerotation_form_manage_numerotations') }} |
{{ trans('translation.numerotation_action_add') }}
@stop
@section('css')
@include('layouts.includes.form_css')
@endsection
@section('page-header')
@include('components.new_breadcrumb', [
'title' => trans('translation.numerotation_form_manage_numerotations'),
'subtitle' => trans('translation.numerotation_action_add'),
'route' => route('numerotations.index'),
'text' => trans('translation.numerotation_form_numerotations_list'),
'permission' => 'numerotation-list',
'icon' => 'lab la-stack-exchange',
])
@endsection
@section('content')
@if ($errors->any())
<div class="alert alert-danger">
<ul>
@foreach ($errors->all() as $error)
<li>{{ $error }}</li>
@endforeach
</ul>
</div>
@endif
<form action="{{ route('numerotations.store') }}" method="post" id="userForm">
@csrf
<div class="row">
<div class="col-12">
<div class="card card-body">
<div class="row">
@include('form.input', [
'cols' => 'col-md-4',
'column' => 'doc_type',
'model' => 'numerotation',
'optional' => 'text-danger',
'input_type' => 'text',
'class_name' => '',
'column_id' => 'doc_type',
'column_value' => old('doc_type'),
'readonly' => 'false',
])
@include('form.input', [
'cols' => 'col-md-4',
'column' => 'prefix',
'model' => 'numerotation',
'optional' => 'text-danger',
'input_type' => 'text',
'class_name' => '',
'column_id' => 'prefix',
'column_value' => old('prefix'),
'readonly' => 'false',
])
@include('form.input', [
'cols' => 'col-md-4',
'column' => 'increment_num',
'model' => 'numerotation',
'optional' => 'text-danger',
'input_type' => 'number',
'class_name' => '',
'column_id' => 'increment_num',
'column_value' => old('increment_num'),
'readonly' => 'false',
])
<div class="col-md-12 col-xl-12 col-xs-12 col-sm-12">
<div class="form-group">
<label for="content">{{ trans('translation.numerotation_form_comments') }}
<span class="text-secondary">*</span></label>
<textarea class="form-control ckeditor" name="comments" id="comments" style="height: 213px">{{ old('title') }}</textarea>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="col-lg-12">
<div class="text-start">
<button type="submit" class="btn btn-primary">{{ trans('translation.general_general_save') }}</button>
</div>
</div>
</form>
</div>
@endsection
@section('js')
@include('layouts.includes.form_js')
<script src="{{ asset('assets/custom_js/validate_number.js') }}"></script>
<script src="{{ asset('assets/custom_js/ckeditor.js') }}"></script>
{!! JsValidator::formRequest('App\Http\Requests\StoreNumerotationRequest') !!}
@endsection
|
/* eslint-disable import/no-anonymous-default-export */
import {makeAutoObservable} from 'mobx';
class TabsInfo{
constructor(){
this._tabsNames = {
events: "План мероприятий",
grands: "Гранты",
championship: "Чемпионат",
winners: "ЛМР",
study: "Аспирантура/Докторантура",
projectGroups: "Проектные группы",
scienificManagement: "Научное руководство"
};
this._adminTabsNames = {
users: "Пользователи"
};
this._defaultState = this._tabsNames.events;
this._currentState = this._defaultState;
makeAutoObservable(this);
}
get tabsNames(){
return this._tabsNames;
}
get adminTabsNames(){
return this._adminTabsNames;
}
get defaultState(){
return this._defaultState;
}
get currentState(){
return this._currentState;
}
set currentState(value){
const tabsHasValue = Object.values(this._tabsNames).includes(value);
const adminsHasValue = Object.values(this._adminTabsNames).includes(value);
if(tabsHasValue || adminsHasValue){
this._currentState = value;
}
this._currentState = this.defaultState;
}
}
export default TabsInfo;
|
// Package chat 对话插件
package chat
import (
"math/rand"
"strconv"
"time"
zero "github.com/wdvxdr1123/ZeroBot"
"github.com/wdvxdr1123/ZeroBot/extension/rate"
"github.com/wdvxdr1123/ZeroBot/message"
)
var poke = rate.NewManager(time.Minute*5, 8) // 戳一戳
func init() { // 插件主体
// 被喊名字
zero.OnFullMatch("", zero.OnlyToMe).SetBlock(false).FirstPriority().
Handle(func(ctx *zero.Ctx) {
var nickname = zero.BotConfig.NickName[0]
time.Sleep(time.Second * 1)
ctx.SendChain(message.Text(
[]string{
nickname + "在此,有何贵干~",
"(っ●ω●)っ在~",
"这里是" + nickname + "(っ●ω●)っ",
nickname + "不在呢~",
}[rand.Intn(4)],
))
})
// 戳一戳
zero.On("notice/notify/poke", zero.OnlyToMe).SetBlock(false).FirstPriority().
Handle(func(ctx *zero.Ctx) {
var nickname = zero.BotConfig.NickName[0]
switch {
case poke.Load(ctx.Event.UserID).AcquireN(3):
// 5分钟共8块命令牌 一次消耗3块命令牌
time.Sleep(time.Second * 1)
ctx.SendChain(message.Text("请不要戳", nickname, " >_<"))
case poke.Load(ctx.Event.UserID).Acquire():
// 5分钟共8块命令牌 一次消耗1块命令牌
time.Sleep(time.Second * 1)
ctx.SendChain(message.Text("喂(#`O′) 戳", nickname, "干嘛!"))
default:
// 频繁触发,不回复
}
})
// 群空调
var AirConditTemp = map[int64]int{}
var AirConditSwitch = map[int64]bool{}
zero.OnFullMatch("空调开").SetBlock(true).FirstPriority().
Handle(func(ctx *zero.Ctx) {
AirConditSwitch[ctx.Event.GroupID] = true
ctx.SendChain(message.Text("❄️哔~"))
})
zero.OnFullMatch("空调关").SetBlock(true).FirstPriority().
Handle(func(ctx *zero.Ctx) {
AirConditSwitch[ctx.Event.GroupID] = false
delete(AirConditTemp, ctx.Event.GroupID)
ctx.SendChain(message.Text("💤哔~"))
})
zero.OnRegex(`设置温度(\d+)`).SetBlock(true).FirstPriority().
Handle(func(ctx *zero.Ctx) {
if _, exist := AirConditTemp[ctx.Event.GroupID]; !exist {
AirConditTemp[ctx.Event.GroupID] = 26
}
if AirConditSwitch[ctx.Event.GroupID] {
temp := ctx.State["regex_matched"].([]string)[1]
AirConditTemp[ctx.Event.GroupID], _ = strconv.Atoi(temp)
ctx.SendChain(message.Text(
"❄️风速中", "\n",
"群温度 ", AirConditTemp[ctx.Event.GroupID], "℃",
))
} else {
ctx.SendChain(message.Text(
"💤", "\n",
"群温度 ", AirConditTemp[ctx.Event.GroupID], "℃",
))
}
})
zero.OnFullMatch(`群温度`).SetBlock(true).FirstPriority().
Handle(func(ctx *zero.Ctx) {
if _, exist := AirConditTemp[ctx.Event.GroupID]; !exist {
AirConditTemp[ctx.Event.GroupID] = 26
}
if AirConditSwitch[ctx.Event.GroupID] {
ctx.SendChain(message.Text(
"❄️风速中", "\n",
"群温度 ", AirConditTemp[ctx.Event.GroupID], "℃",
))
} else {
ctx.SendChain(message.Text(
"💤", "\n",
"群温度 ", AirConditTemp[ctx.Event.GroupID], "℃",
))
}
})
}
|
import React, { useState } from "react";
import "../styles/LoginSignup.css";
import { Link, useNavigate } from "react-router-dom";
import { getAuth, createUserWithEmailAndPassword } from 'firebase/auth'
import { ToastContainer, toast } from "react-toastify";
function Singup() {
const [name, setName] = useState("");
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [confirmPass, setConfirmPass] = useState("");
const auth = getAuth();
const navigate = useNavigate();
const signUp = async (e) => {
e.preventDefault();
if (!name) {
return toast.error("Please Enter Your Full Name")
}
else if (!email) {
return toast.error("Please Enter Your Email")
}
else if (!password) {
return toast.error("Please Enter Your Password")
}
else if (password !== confirmPass) {
return toast.error("Password do not match.")
}
try {
const userCredential = await createUserWithEmailAndPassword(auth, email, password)
const user = userCredential.user;
console.log(user);
toast.success("Account Successfully Created.")
navigate("/") //Navigate to login page after successfully create account
} catch (error) {
toast.error(error + "")
}
};
return (
<div className="container">
<div className="signupForm">
<form onSubmit={signUp}>
<div className="signupHeader">
<p>Create an Account</p>
</div>
<div className="signupInput">
<input type="text" placeholder="Full Name" value={name} onChange={(e) => setName(e.target.value)} required />
<input type="email" placeholder="Email" value={email} onChange={(e) => setEmail(e.target.value)} required />
<input type="password" placeholder="Password" value={password} onChange={(e) => setPassword(e.target.value)} required autoComplete="off" />
<input type="password" placeholder="Confirm Password" value={confirmPass} onChange={(e) => setConfirmPass(e.target.value)} required autoComplete="off" />
<button onClick={signUp} type="submit">Sign Up</button>
</div>
<div className="createAccount">
<p>
Already have an account? <Link to="/">Log In</Link>
</p>
</div>
</form>
</div>
<ToastContainer />
</div>
);
}
export default Singup;
|
import 'package:clean_arch_v2/features/profile/data/datasources/local_datasource.dart';
import 'package:clean_arch_v2/features/profile/data/datasources/remote_datasource.dart';
import 'package:clean_arch_v2/features/profile/data/repositories/profile_repository_impl.dart';
import 'package:clean_arch_v2/features/profile/domain/repositories/profile_repository.dart';
import 'package:clean_arch_v2/features/profile/domain/usecases/get_all_users.dart';
import 'package:clean_arch_v2/features/profile/presentation/bloc/profile_bloc.dart';
import 'package:get_it/get_it.dart';
import 'package:hive/hive.dart';
abstract class GetAllUserDependencies {
Future<GetAllUsers> getAllUserDependencies();
}
class GetAllUserDependenciesImpl implements GetAllUserDependencies {
@override
Future<GetAllUsers> getAllUserDependencies() async {
final HiveInterface hive = Hive;
final ProfileRemoteDataSource profileRemoteDataSource = ProfileRemoteDataSourceImpl();
final ProfileLocalDataSource profileLocalDataSource = ProfileLocalDataSourceImpl(hive);
final ProfileRepository profileRepository = ProfileRepositoryImpl(remoteDataSource: profileRemoteDataSource, localDataSource: profileLocalDataSource);
final GetAllUsers getAllUsers = GetAllUsers(profileRepository);
return getAllUsers;
}
}
final sl = GetIt.instance;
Future<void> initializeDependencies() async {
sl.registerSingleton<HiveInterface>(Hive);
sl.registerSingleton<ProfileRemoteDataSource>(
ProfileRemoteDataSourceImpl()
);
sl.registerSingleton<ProfileLocalDataSource>(
ProfileLocalDataSourceImpl(sl())
);
sl.registerSingleton<ProfileRepository>(
ProfileRepositoryImpl(
remoteDataSource: sl(),
localDataSource: sl()
)
);
sl.registerSingleton<GetAllUsers>(
GetAllUsers(sl())
);
sl.registerFactory<ProfileBloc>(
() => ProfileBloc(sl())
);
}
|
import { useState, useEffect } from "react";
import { getPosts } from "./../api/user";
export const useGetPosts = (userId: number) => {
const [posts, setPosts] = useState([]);
useEffect(() => {
(async () => {
const listPosts = await getPosts(userId);
setPosts(listPosts);
})();
}, []);
const deleteItem = (index: number) => () => {
setPosts((posts) => posts.filter((_, i) => i !== index));
};
return {
posts,
deleteItem,
};
};
|
import { sendEmailVerification } from 'firebase/auth'
import { $h1, $Vertical } from 'lib/components/Generics'
import { useAuth } from 'lib/hooks/useAuth'
import { auth } from 'lib/api/firebase/app'
import LogRocket from 'logrocket'
import { useState } from 'react'
import { $Link, $ProfileSectionContainer, Oopsies } from '../common'
import { COLORS } from '@wormgraph/helpers'
import { FormattedMessage, useIntl } from 'react-intl'
import useWords from 'lib/hooks/useWords'
import useWindowSize from 'lib/hooks/useScreenSize'
import { manifest } from 'manifest'
const Onboarding = () => {
const { user } = useAuth()
console.log(`onboarding user <Profile/Onboarding>`, user)
const { screen } = useWindowSize()
// const isOnboardYoutube = localStorage.getItem('user.onboard.youtube')
console.log('user', user)
const showEmailVerification = !!user?.email && !user?.isEmailVerified
const showPhone = !user?.phone
const [isEmailSent, setIsEmailSent] = useState(false)
const [errorSendingEmail, setErrorSendingEmail] = useState(false)
const intl = useIntl()
const words = useWords()
const verifyEmailHeader = intl.formatMessage({
id: 'profile.onboarding.verifyEmailHeader',
defaultMessage: 'Verify your email',
description: 'Header for onboarding step. Part of onboarding is a user needs to verify their email address.',
})
// const learnHeader = intl.formatMessage({
// id: 'profile.onboarding.learnHeader',
// defaultMessage: 'Learn about Lootbox',
// description: 'Header for onboarding step. Part of onboarding is a user needs to learn how to use Lootbox.',
// })
if (!showEmailVerification && !showPhone) {
return null
}
const handleSendVerificationEmail = () => {
const user = auth.currentUser
if (user && !isEmailSent) {
sendEmailVerification(user)
.then(() => {
// Prevent spam, only allow one email to be sent
setIsEmailSent(true)
setErrorSendingEmail(false)
})
.catch((err) => {
setErrorSendingEmail(true)
LogRocket.captureException(err)
})
}
}
// const handleYoutubeClick = () => {
// localStorage.setItem('user.onboard.youtube', 'true')
// }
return (
<$ProfileSectionContainer screen={screen}>
<$Vertical spacing={4}>
<$h1 style={{ fontStyle: 'italic' }}>{words.yourAlmostSetup}</$h1>
{user?.isAnonymous ? (
<Oopsies
icon="🥷"
title="Your account is not verified"
message={
<span>
You should have received a login email. Please check your email for a link to verify your account. This
email expires after 4 hours. Check your spam folder.
</span>
}
/>
) : null}
{showEmailVerification ? (
<Oopsies
icon="📧"
title={verifyEmailHeader}
message={
<span>
<FormattedMessage
id="profile.onboarding.verifyEmailText"
defaultMessage="You should have received an email from us. {checkSpamFolderText}{newlineCharacter}Can't find it? {resendVerificationEmail}"
description="This is the message that tells the user to verify their email address."
values={{
checkSpamFolderText: (
<mark>
<FormattedMessage
id="profile.onboarding.checkSpamFolderText"
defaultMessage="Check your spam folder."
description="This is the text that tells the user to check their spam folder."
/>
</mark>
),
newlineCharacter: <br />,
resendVerificationEmail: (
<$Link
target="_blank"
onClick={handleSendVerificationEmail}
style={{ textDecoration: 'underline' }}
>
<FormattedMessage
id="profile.onboarding.resendVerificationEmailLink"
defaultMessage="Resend verification email."
description="This is the link that will send the user a verification email."
/>
</$Link>
),
}}
/>
{isEmailSent && ' ✅'}
{errorSendingEmail && (
<span style={{ fontStyle: 'italic', color: `${COLORS.surpressedFontColor}77` }}>
{' '}
{words.anErrorOccured}. {words.pleaseTryAgainLater}.
</span>
)}
</span>
}
/>
) : null}
{showPhone ? (
<Oopsies
icon="📲"
title="Add Phone Number"
message={
<span>
<b>You cannot claim your rewards</b> until you add a phone number to your account.
<$Link href={manifest.microfrontends.webflow.anonSignup} style={{ textDecoration: 'underline' }}>
Click here to add your Phone Number.
</$Link>
</span>
}
/>
) : null}
{/* {!isOnboardYoutube ? (
<Oopsies
icon="👨🎓"
title={learnHeader}
message={
<FormattedMessage
id="profile.onboarding.learnText"
defaultMessage="Start off by checking out our youtube channel. Find us here 👉 {lootboxYoutubeLink}"
description="This is the message that tells the user to learn about Lootbox via our Youtube channel."
values={{
lootboxYoutubeLink: (
<$Link
target="_blank"
onClick={handleYoutubeClick}
href="https://www.youtube.com/playlist?list=PL9j6Okee96W4rEGvlTjAQ-DdW9gJZ1wjC"
>
<FormattedMessage
id="profile.onboarding.lootboxYoutubeLink"
defaultMessage="Getting started with Lootbox."
description="This is the link that will take the user to the Youtube channel."
/>
</$Link>
),
}}
/>
}
/>
) : null} */}
</$Vertical>
</$ProfileSectionContainer>
)
}
export default Onboarding
|
<template>
<div class="ivu-cell-group" :class="{ level: level, group: isGroup }">
<LiItem
v-for="(item, index) in currentList"
:key="index"
:liData="item"
:textName="textName"
:valueName="valueName"
:childName="childName"
:isGroup="isGroup"
></LiItem>
</div>
</template>
<script>
import LiItem from './li-item';
export default {
name: 'TsUlList',
components: { LiItem },
props: {
dataList: Array,
textName: {
type: String,
default: 'name'
},
valueName: {
type: String,
default: 'id'
},
childName: {
type: String,
default: 'children'
},
multiple: {
type: Boolean,
default: false
},
level: {
type: Boolean,
default: false
},
isGroup: {
type: Boolean,
default: false
},
groupTextName: {
type: String,
default: 'name'
},
groupValueName: {
type: String,
default: 'id'
},
onClick: null
},
data() {
let _this = this;
return {
currentList: _this.dataList || [],
flatList: [],
selectedValue: _this.multiple ? [] : ''
};
},
created() {},
mounted() {
let _this = this;
_this.$on('on-click', this.handleClick);
_this.filterData();
},
methods: {
handleClick(val) {
let _this = this;
let item = _this.flatList.get(val);
if (_this.multiple) {
if (!item._isSelected) {
_this.selectedValue.push(val);
} else {
_this.selectedValue.splice(_this.selectedValue.indexOf(val), 1);
}
} else if (!_this.multiple && _this.selectedValue == val) {
return;
} else {
(_this.selectedValue != null && _this.selectedValue != undefined) && _this.$set(_this.flatList.get(_this.selectedValue), '_isSelected', false);
_this.selectedValue = val;
}
_this.$set(item, '_isSelected', !item._isSelected);
_this.$emit('li-click', _this.selectedValue);
},
filterData() {
let _this = this;
_this.flatList = new Map();
function compileFlatList(dataList, valueName, childName, value) {
dataList.forEach(item => {
_this.multiple ? (_this.selectedValue.indexOf(item[valueName]) >= 0 ? _this.$set(item, '_isSelected', true) : '') : _this.selectedValue == item[valueName] && _this.$set(item, '_isSelected', true);
_this.flatList.set(item[valueName], item);
if (item[childName] && item[childName].length > 0) {
compileFlatList(item[childName], valueName, childName);
}
});
}
if (_this.currentList && _this.currentList.length) {
compileFlatList(_this.currentList, _this.valueName, _this.childName);
}
}
},
computed: {},
watch: {
dataList(oldValue, newValue) {
let _this = this;
_this.currentList = _this.dataList || [];
_this.filterData();
}
}
};
</script>
|
import { Box, Button, Flex, Heading, Link } from "@chakra-ui/react";
import React from "react";
import NextLink from "next/link";
import { useLogoutMutation, useMeQuery } from "../generated/graphql";
import { isServer } from "../utils/isServer";
import { useRouter } from "next/router";
interface NavBarProps {}
const NavBar: React.FC<NavBarProps> = ({}) => {
const [{ data, fetching }] = useMeQuery({
pause: isServer(), // Don't run this on SSR.
});
const [{ fetching: logoutFetching }, logout] = useLogoutMutation();
const router = useRouter();
let body = null;
if (fetching) {
} else if (!data?.me) {
body = (
<>
<NextLink href="/login" passHref>
<Link mr={2} color="white">
Login
</Link>
</NextLink>
<NextLink href="/register" passHref>
<Link color="white">Register</Link>
</NextLink>
</>
);
} else {
body = (
<>
<Flex alignItems={"center"}>
<NextLink href="/create-post" passHref>
<Button as={Link} mr={2} variant={"solid"} color={"black"}>
Create Post
</Button>
</NextLink>
<Box mr={2}>{data.me.username}</Box>
<Button
variant={"link"}
onClick={async () => {
await logout();
router.reload();
}}
isLoading={logoutFetching}
>
Logout
</Button>
</Flex>
</>
);
}
return (
<Flex
bg="tomato"
p={4}
ml={"auto"}
position={"sticky"}
top={0}
zIndex={1}
color={"white"}
>
<Flex maxW={800} alignItems={"center"} flex={1} m={"auto"}>
<NextLink href={"/"} passHref>
<Link>
<Heading color={"white"}>LiReddit</Heading>
</Link>
</NextLink>
<Box ml={"auto"}>{body}</Box>
</Flex>
</Flex>
);
};
export default NavBar;
|
public class QuickSortStrategy implements SortingStrategy {
@Override
public void sort(int[] arr) {
sort(arr, 0, arr.length - 1);
}
private void sort(int[] arr, int left, int right) {
if (left < right) {
int pivot = partition(arr, left, right);
sort(arr, left, pivot - 1);
sort(arr, pivot + 1, right);
}
}
private int partition(int[] arr, int left, int right) {
int pivot = arr[right];
int i = left - 1;
for (int j = left; j < right; j++) {
if (arr[j] < pivot) {
i++;
swap(arr, i, j);
}
}
swap(arr, i + 1, right);
return i + 1;
}
private void swap(int[] arr, int i, int j) {
int temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
}
|
#include "stackdraft.h"
#include <stdio.h>
#include <stdlib.h>
/* ************ Prototype ************ */
/* *** Konstruktor/Kreator *** */
void CreateEmptyStackDraft(StackDraft* S, int capacity, Word authorDraft) {
/* I.S. sembarang; */
/* F.S. Membuat sebuah stack S yang kosong berkapasitas capacity */
/* jadi indeksnya antara 0.. capacity */
/* Ciri stack kosong : TOP bernilai Nil */
Table(*S) =
(InfoTypeStackDraft*)malloc(capacity * sizeof(InfoTypeStackDraft));
Top(*S) = Nil;
CapacityStackDraft(*S) = capacity;
AuthorDraft(*S) = authorDraft;
}
void dealocateStackDraft(StackDraft* s) {
/* I.S. s terdefinisi; */
/* F.S. (s) dikembalikan ke system, Top(s)=Nil; CapacityStackDraft(s)=0 */
free(Table(*s));
Top(*s) = Nil;
CapacityStackDraft(*s) = 0;
AuthorDraft(*s).Length = 0;
}
/* ************ Predikat untuk test keadaan KOLEKSI ************ */
boolean IsEmptyStackDraft(StackDraft S) {
/* Mengirim true jika Stack kosong: lihat definisi di atas */
return Nil == Top(S);
}
boolean IsFullStackDraft(StackDraft S) {
/* Mengirim true jika tabel penampung nilai elemen stack penuh */
return Top(S) == CapacityStackDraft(S) - 1;
}
/* ************ Menambahkan sebuah elemen ke Stack ************ */
void Push(StackDraft* S, InfoTypeStackDraft X) {
/* Menambahkan X sebagai elemen Stack S. */
/* I.S. S mungkin kosong, tabel penampung elemen stack TIDAK penuh */
/* F.S. X menjadi TOP yang baru,TOP bertambah 1 */
if (IsEmptyStackDraft(*S)) {
Top(*S) = 0;
} else {
Top(*S)++;
}
InfoTop(*S) = X;
}
/* ************ Menghapus sebuah elemen Stack ************ */
void Pop(StackDraft* S, InfoTypeStackDraft* X) {
/* Menghapus X dari Stack S. */
/* I.S. S tidak mungkin kosong */
/* F.S. X adalah nilai elemen TOP yang lama, TOP berkurang 1 */
*X = InfoTop(*S);
if (Top(*S) == 0) {
Top(*S) = Nil;
} else {
Top(*S)--;
}
}
/* ********* Mengubah Ukuran Stack ********* */
void expandStackDraft(StackDraft* s, int num) {
/* Proses : Menambahkan capacity s sebanyak num */
/* I.S. Stack sudah terdefinisi */
/* F.S. Ukuran Stack bertambah sebanyak num */
Table(*s) = (InfoTypeStackDraft*)realloc(
Table(*s), (CapacityStackDraft(*s) + num) * sizeof(InfoTypeStackDraft));
CapacityStackDraft(*s) += num;
}
void shrinkStackDraft(StackDraft* s, int num) {
/* Proses : Mengurangi capacity s sebanyak num */
/* I.S. Stack sudah terdefinisi, ukuran capacity > num, dan nEff < capacity -
* num. */
/* F.S. Ukuran stack berkurang sebanyak num. */
Table(*s) = (InfoTypeStackDraft*)realloc(
Table(*s), (CapacityStackDraft(*s) - num) * sizeof(InfoTypeStackDraft));
CapacityStackDraft(*s) -= num;
}
|
<?php
namespace App\Components\Wildberries;
use App\Components\MarketplaceApi;
use App\Models\Shop;
use Carbon\CarbonImmutable;
use GuzzleHttp\Client;
use GuzzleHttp\Exception\GuzzleException;
class Api implements MarketplaceApi
{
protected const METHOD_STOCKS = 'stocks';
protected WildberriesStockMapper $mapper;
public function __construct(protected Shop $shop)
{
$this->mapper = new WildberriesStockMapper();
}
/**
* @throws GuzzleException
* @throws \JsonException
*/
public function getStocks(CarbonImmutable $from)
{
$stocksUrl = \config('wildberries.supplier.url');
$method = self::METHOD_STOCKS;
$url = $stocksUrl . "/{$method}?dateFrom={$from->toDateString()}";
$response = $this->doRequest('GET', $url);
return $this->mapper->mapFromResponse($response);
}
/**
* @param string $method
* @param string $url
* @return mixed
* @throws GuzzleException
* @throws \JsonException
*/
protected function doRequest(string $method, string $url): mixed
{
$client = new Client();
$response = $client->request($method, $url, [
'headers' => [
'Authorization' => $this->shop->getActiveToken()->token
],
]);
return json_decode($response->getBody()->getContents(), true, 512, JSON_THROW_ON_ERROR);
}
}
|
import pymysql
import db.dbConnections as db
from helpers.http_response import http_response
def append_user_fields(user):
"""
Get the user fields from the harmony database and append them to the user
Args:
user: dict
"""
user_h = get_user_by_email(user["email"])
# append all the fields from user_h to user if not already present
for key in user_h:
if key not in user:
user[key] = user_h[key]
# remove unnecessary user_id field
if "user_id" in user:
user.pop("user_id")
def get_user_by_email(email):
"""
Search user by email in the personnel's and students tables in the harmony database
Args:
email (str): the email of the user
Returns:
dict: user data
"""
# get a connection to the database (harmony)
connection = db.get_harmony_connection()
try: # try to search the user by email
with connection: # use the connection
with connection.cursor() as cursor: # get a cursor
sql = """
select *
from personnels
where school_email = %s;
""" # SQL query to search the user by email
cursor.execute(sql, email) # execute the query
result = cursor.fetchone() # get the result
if result:
# add table name to the result
result["table"] = "personnels"
result["email"] = email
return result
else:
sql = """
select *
from students
where email = %s;
""" # SQL query to search the user by email
cursor.execute(sql, email) # execute the query
result = cursor.fetchone() # get the result
if result:
# add table name to the result
result["table"] = "students"
result["email"] = email
return result
else:
return None
except pymysql.MySQLError: # handle exceptions
http_response(500, "An error occurred while searching the user credentials from database.")
def search_users(search):
"""
Search users by search in the personnel's and students tables in the harmony database
Args:
search (str): the keyword to search
Returns:
list: list of user data
"""
search_string = f"%{search}%"
# get a connection to the database (harmony)
connection = db.get_harmony_connection()
try: # try to search the user by keyword
with connection: # use the connection
with connection.cursor() as cursor: # get a cursor
sql = """
select *
from personnels
where school_email like %s
or first_name like %s
or family_name like %s
or concat(first_name, ' ', family_name) like %s
;
""" # SQL query to search the user by keyword
cursor.execute(sql, (search_string, search_string, search_string, search_string)) # execute the query
results = cursor.fetchall() # get the results
for result in results:
# add table name to the result
result["table"] = "personnels"
sql = """
select *
from students
where email like %s
or first_name like %s
or family_name like %s
or concat(first_name, ' ', family_name) like %s
;
""" # SQL query to search the user by keyword
cursor.execute(sql, (search_string, search_string, search_string, search_string)) # execute the query
student_results = cursor.fetchall()
for result in student_results:
# add table name to the result
result["table"] = "students"
if results:
results += student_results # get the results
else:
results = student_results
return results
except pymysql.MySQLError: # handle exceptions
http_response(500, "An error occurred while searching the user credentials from database.")
|
import React from "react";
import { useEffect } from "react";
import Image from "next/image";
import Link from "next/link";
import { useState } from "react";
import { IoCloseSharp } from "react-icons/io5";
import { MdAccountCircle } from "react-icons/md";
import { BsCartCheckFill } from "react-icons/bs";
import { FaRegSurprise } from "react-icons/fa";
import {
AiOutlineShoppingCart,
AiOutlinePlusCircle,
AiOutlineMinusCircle,
} from "react-icons/ai";
import { useRef } from "react";
import { useRouter } from "next/router";
const Navbar = ({
logout,
user,
cart,
addToCart,
removeFromCart,
clearCart,
subTotal,
}) => {
const [dropdown, setDropdown] = useState(false);
const [sidebar, setSidebar] = useState(false);
const router = useRouter();
useEffect(() => {
Object.keys(cart).length !== 0 && setSidebar(true);
let cool = ["/checkout", "/", "/orders", "/order", "/myaccount","/forgot","/login","/signup"];
if (cool.includes(router.pathname)) {
setSidebar(false);
}
}, []);
// const toggleDropdown = () => {
// setDropdown(!dropdown);
// };
const toggleCart = () => {
setSidebar(!sidebar);
// if (ref.current.classList.contains("translate-x-full")) {
// ref.current.classList.remove("translate-x-full");
// ref.current.classList.add("translate-x-0");
// } else if (!ref.current.classList.contains("translate-x-full")) {
// ref.current.classList.remove("translate-x-0");
// ref.current.classList.add("translate-x-full");
// }
};
const ref = useRef();
return (
<div className="flex flex-col md:flex-row md:justify-start justify-center items-center py-2 shadow-2xl mb-1 sticky top-0 bg-gray-400 z-10">
<div className="logo mx-5">
<Link href={"/"}>
<Image
src="/logo.webp"
alt="asd"
width={"300"}
height={"300"}
priority
></Image>
</Link>
</div>
<div className="nav">
<ul className="flex space-x-14 font-bold md:text-md">
<Link href={"/tshirts"}>
<li>Tshirts</li>
</Link>
<Link href={"/hoodies"}>
<li>Hoodies</li>
</Link>
<Link href={"/stickers"}>
<li>Stickers</li>
</Link>
<Link href={"/mugs"}>
<li>Mugs</li>
</Link>
</ul>
</div>
<div className="cart absolute right-4 top-4 cursor-pointer flex">
<span
onMouseOver={() => {
setDropdown(true);
}}
onMouseLeave={() => {
setDropdown(false);
}}
>
{dropdown && (
<div
onMouseOver={() => {
setDropdown(true);
}}
onMouseLeave={() => {
setDropdown(false);
}}
className="absolute right-8 top-7 rounded-md shadow-lg border shadow-slate-500 w-56 px-5 py-2 bg-slate-600"
>
<ul>
<Link href={"/myaccount"}>
<li className="py-1 hover:text-white text-blue-100 font-bold">
My Account
</li>
</Link>
<Link href={"/orders"}>
<li className="py-1 hover:text-white text-blue-100 font-bold">
My Orders
</li>
</Link>
<li
onClick={logout}
className="py-1 hover:text-white text-blue-100 font-bold"
>
Logout
</li>
</ul>
</div>
)}
{user.value && (
<MdAccountCircle className=" text-xl md:text-3xl mx-3" />
)}
</span>
{!user.value && (
<Link href={"/login"}>
<button className=" text-sm md:text-lg mx-3">Login</button>
</Link>
)}
<AiOutlineShoppingCart
onClick={toggleCart}
className=" text-xl md:text-3xl"
/>
</div>
{/* Cart is start */}
<div
ref={ref}
className={`sideCart px-2 absolute top-0 bg-slate-400 w-full h-fit md:w-3/5 lg:w-2/5 transform transition-all ${
sidebar ? "right-0" : "-right-full"
}`}
>
<h2 className="text-2xl font-bold my-5 mx-2">Shopping Cart</h2>
<span
onClick={toggleCart}
className="absolute top-2 right-4 cursor-pointer"
>
<IoCloseSharp className="text-2xl font-extrabold mt-2" />
</span>
<hr />
<ol className="list-disc ml-4 mr-2 font-serif font-semibold ">
{Object.keys(cart).length == 0 && (
<div className="text-2xl flex text-center justify-center mt-5 font-bold text-red-500">
Oh! <FaRegSurprise className="mx-2 mt-1 " /> Your Cart is Empty...
</div>
)}
{Object.keys(cart).map((k) => {
return (
<li key={k}>
<div className="flex">
<div className="bg-red-500 w-2/3">
{cart[k].name}({cart[k].size}/{cart[k].varient})
</div>
<div className="bg-green-500 w-1/3 flex items-center justify-center text-lg">
<AiOutlineMinusCircle
onClick={() => {
removeFromCart(
k,
1,
cart[k].name,
cart[k].size,
cart[k].varient,
cart[k].price
);
}}
className="cursor-pointer"
/>
<span className="mx-2">{cart[k].qty}</span>
<AiOutlinePlusCircle
onClick={() => {
addToCart(
k,
1,
cart[k].name,
cart[k].size,
cart[k].varient,
cart[k].price
);
}}
className="cursor-pointer"
/>{" "}
</div>
</div>
</li>
);
})}
</ol>
{/* <hr /> */}
<div className="subTotal">Subtotal : ₹{subTotal}</div>
<div className="flex space-x-5">
<Link href={"/checkout"}>
<button
disabled={Object.keys(cart).length === 0}
className="disabled:bg-gray-400 flex text-white bg-green-900 border-0 py-2 pl-3 pr-4 my-5 focus:outline-none hover:bg-green-800 rounded"
>
<BsCartCheckFill className="mx-1 text-lg" />
checkout
</button>
</Link>
<button
disabled={Object.keys(cart).length === 0}
onClick={clearCart}
className="disabled:bg-gray-400 flex text-white bg-green-900 border-0 py-2 px-4 my-5 focus:outline-none hover:bg-green-800 rounded"
>
clear cart
</button>
</div>
</div>
</div>
);
};
export default Navbar;
|
from sqlalchemy import UUID, delete, select, update
from core.database import AsyncDatabase
from models.user import User
class UserRepository:
db: AsyncDatabase
def __init__(self, db: AsyncDatabase):
self.db = db
pass
async def create_user(self, data: User) -> UUID:
async with self.db.session_maker() as session:
session.add(data)
await session.commit()
pass
return data.id
async def get_users(self) -> list[User]:
stmt = select(User)
async with self.db.session_maker() as session:
users = await session.scalars(stmt)
pass
return users
async def get_user(self, id: UUID) -> User | None:
async with self.db.session_maker() as session:
user = await session.get(User, id)
pass
return user
async def get_user_by_email(self, email: str) -> User | None:
stmt = select(User).where(User.email == email)
async with self.db.session_maker() as session:
user = await session.scalar(stmt)
pass
return user
async def update_user(self, id: UUID, data: dict) -> None:
stmt = (
update(User)
.where(User.id == id)
.values(**data)
)
async with self.db.session_maker() as session:
await session.execute(stmt)
pass
return None
async def delete_user(self, id: UUID) -> None:
stmt = delete(User).where(User.id == id)
async with self.db.session_maker() as session:
await session.execute(stmt)
pass
return None
pass
|
/*
* Boomega - A modern book explorer & catalog application
* Copyright (C) 2020-2022 Daniel Gyoerffy
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.dansoftware.boomega.update;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
public class UpdateSearcherTest {
@ParameterizedTest
@CsvSource({
"0.0.0,0.0.1",
"0.0.1,0.0.2",
"0.1.0,0.2.0",
"1.0.0,2.0.0",
"2.1.6,2.1.7",
"4.2.1,8.0.0"
})
void shouldHaveUpdate(String baseVersion, String updateVersion) {
var searcher = buildUpdateSearcher(baseVersion, updateVersion);
assertThat(searcher.search()).isNotNull();
}
@ParameterizedTest
@CsvSource({
"0.0.1,0.0.0",
"1.0.0,0.0.1",
"2.0.1,2.0.0",
"3.4.1,3.4.0",
"8.1.22,8.1.0",
"18.12.3,10.0.0"
})
void shouldHaveNoUpdate(String baseVersion, String updateVersion) {
var searcher = buildUpdateSearcher(baseVersion, updateVersion);
assertThat(searcher.search()).isNull();
}
private UpdateSearcher buildUpdateSearcher(String baseVersion, String updateVersion) {
return new UpdateSearcher(new DummyReleasesFetcher(updateVersion), baseVersion);
}
private static class DummyReleasesFetcher implements ReleasesFetcher {
private final String updateVersion;
DummyReleasesFetcher(String updateVersion) {
this.updateVersion = updateVersion;
}
@NotNull
@Override
public Releases fetchReleases() {
Releases releases = new Releases();
Release release = new Release();
release.setVersion(updateVersion);
releases.add(release);
return releases;
}
}
}
|
import { ChangeDetectionStrategy, Component, Inject } from '@angular/core';
import { AbstractControl, FormArray, FormControl, FormGroup, Validators } from '@angular/forms';
import { MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog';
import { QuestionModel, QuestionType } from '../../models/question.model';
import { FormUtils } from '../../ultils/form.utils';
@Component({
selector: 'assessment-form-question-builder-dialog',
templateUrl: './form-question-builder-dialog.component.html',
styleUrls: ['./form-question-builder-dialog.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class FormQuestionBuilderDialogComponent {
public questionTypes: string[] = [QuestionType.Paragraph, QuestionType.Checkbox];
public addQuestionForm: FormGroup = new FormGroup({
questionType: new FormControl(QuestionType.Paragraph),
isRequired: new FormControl(true),
questionTitle: new FormControl('', Validators.required),
questionOptions: new FormArray([])
});
public get questionOptions(): FormArray {
return this.addQuestionForm.controls['questionOptions'] as FormArray;
}
public get questionType(): string {
return this.addQuestionForm.controls['questionType'].value;
}
constructor(public dialogRef: MatDialogRef<FormQuestionBuilderDialogComponent>, @Inject(MAT_DIALOG_DATA) public data: QuestionModel) {
this.addQuestionForm.controls['questionType'].valueChanges.subscribe((type: QuestionType) => {
if (type === QuestionType.Paragraph) {
if (this.questionOptions.length > 0) {
this.addQuestionForm.controls['questionOptions'] = new FormArray([]);
}
}
});
}
public addOption() {
this.questionOptions.push(new FormControl('', Validators.required));
}
public getControls(value: AbstractControl): FormControl {
return value as FormControl;
}
public submit(): void {
if (FormUtils.validateForm(this.addQuestionForm)) {
const question: QuestionModel = {
title: this.addQuestionForm.getRawValue().questionTitle,
type: this.addQuestionForm.getRawValue().questionType,
isRequired: this.addQuestionForm.getRawValue().isRequired,
options: this.addQuestionForm.getRawValue().questionOptions.map((value: string) => ({ title: value }))
};
if (question.type === QuestionType.Checkbox && question.options.length <= 0) {
return;
}
this.dialogRef.close(question);
}
}
}
|
<?php
namespace App\Observers;
use App\Models\Event\Event;
use App\Models\WebPush;
use App\Notifications\Event\EventCreatedNotification;
class EventObserver
{
/**
* Handle the Event "created" event.
*
* @param \App\Models\Event\Event $event
* @return void
*/
public function created(Event $event)
{
$notification = new EventCreatedNotification($event);
$notification->delay(5);
WebPush::all()
->each(fn (WebPush $web) => $web->notify($notification));
}
/**
* Handle the Event "updated" event.
*
* @param \App\Models\Event\Event $event
* @return void
*/
public function updated(Event $event)
{
//
}
/**
* Handle the Event "deleted" event.
*
* @param \App\Models\Event\Event $event
* @return void
*/
public function deleted(Event $event)
{
$this->deletetion($event);
}
private function deletetion(Event $event)
{
$event->images()->get()->each(fn ($img) => $img->delete());
$event->address()->delete();
$event->ticket()->delete();
$event->schedules()->delete();
$event->tags()->delete();
$event->organizers()->delete();
}
/**
* Handle the Event "restored" event.
*
* @param \App\Models\Event\Event $event
* @return void
*/
public function restored(Event $event)
{
//
}
/**
* Handle the Event "force deleted" event.
*
* @param \App\Models\Event\Event $event
* @return void
*/
public function forceDeleted(Event $event)
{
$this->deletetion($event);
}
}
|
use crate::{ctx::Context, Error, Result};
use chrono::Utc;
use serde::Deserialize;
use sqlx::{PgPool, Postgres};
use std::sync::Mutex;
/// Represents the Ticket that is visualized and sent to the client
#[derive(Clone, Debug, sqlx::FromRow)]
pub struct Ticket {
pub id: i32,
pub creator_id: i32,
pub creator_name: String,
pub title: String,
pub status: String,
pub description: String,
pub created_at: chrono::DateTime<Utc>,
}
/// Represents a Ticket Stored in 'tickets' table
#[derive(Clone, Debug, sqlx::FromRow)]
pub struct TicketC {
pub id: i32,
pub creator_id: i32,
pub title: String,
pub status: String,
pub description: String,
pub created_at: chrono::DateTime<Utc>,
}
/// This helps create tickets, thus it needs Deserialize
#[derive(Deserialize)]
pub struct TicketFC {
pub title: String,
pub status: String,
pub description: String,
}
impl Ticket {
pub fn from_ticket_c(ticket_c: TicketC, creator_name: &str) -> Self {
Ticket {
id: ticket_c.id,
creator_id: ticket_c.creator_id,
creator_name: creator_name.to_string(),
title: ticket_c.title,
status: ticket_c.status,
description: ticket_c.description,
created_at: ticket_c.created_at,
}
}
}
impl TicketFC {
pub fn new(title: impl Into<String>, description: impl Into<String>) -> Self {
TicketFC {
title: title.into(),
description: description.into(),
status: "to-do".to_string(),
}
}
}
pub struct UserCreate {
pub username: String,
pub hashed_password: String,
}
/// model controller
#[derive(Clone)]
pub struct ModelController {
db: std::sync::Arc<Mutex<PgPool>>,
}
impl ModelController {
pub fn new(db: PgPool) -> Result<Self> {
Ok(Self {
db: std::sync::Arc::new(Mutex::new(db)),
})
}
pub async fn create_ticket(&self, ctx: Context, ticket_fc: TicketFC) -> Result<Ticket> {
let creator_id = ctx.user_id();
let pool = self.db.lock().unwrap().to_owned();
match sqlx::query_as::<Postgres, TicketC>(
r#"INSERT INTO tickets (title, status, description, creator_id, created_at) VALUES ($1,$2,$3,$4,$5) RETURNING *;"#,
)
.bind(ticket_fc.title)
.bind(ticket_fc.status)
.bind(ticket_fc.description)
.bind(creator_id)
.bind(Utc::now())
.fetch_one(&pool)
.await
{
Ok(ticket) => {
println!("ticket {} - New ticket has been created", ticket.id);
Ok( Ticket {
creator_name: ctx.username().to_string(),
id: ticket.id,
creator_id: ticket.creator_id,
title: ticket.title,
status: ticket.status,
description: ticket.description,
created_at: ticket.created_at,
})
}
Err(e) => {
eprintln!("Failed to execute query: {:?}", e);
return Err(Error::TicketCreationFailed);
}
}
}
pub async fn get_tickets(&self, ctx: &Context) -> Result<Vec<Ticket>> {
let _creator_id = ctx.user_id();
let pool = self.db.lock().unwrap().to_owned();
match sqlx::query_as::<Postgres, Ticket>(r#"
SELECT tickets.id, tickets.title, tickets.status, tickets.description, tickets.created_at, tickets.creator_id, users.username AS creator_name
FROM tickets
INNER JOIN users ON tickets.creator_id = users.user_id;"#)
.fetch_all(&pool)
.await
{
Ok(tickets_result) => Ok(tickets_result),
Err(_) => Err(Error::NoTicketsFound),
}
}
pub async fn get_user_tickets(&self, ctx: &Context) -> Result<Vec<Ticket>> {
let pool = self.db.lock().unwrap().to_owned();
let _creator_id = ctx.user_id();
let username = ctx.username();
match sqlx::query_as::<Postgres, TicketC>("SELECT * FROM tickets WHERE creator_id=($1)")
.bind(_creator_id)
.fetch_all(&pool)
.await
{
Ok(tickets_result) => Ok(tickets_result
.iter()
.map(|ticket| Ticket::from_ticket_c(ticket.to_owned(), username))
.collect()),
Err(_) => Err(Error::NoTicketsFound),
}
}
pub async fn delete_ticket(&self, ctx: Context, id: i32) -> Result<()> {
let pool = self.db.lock().unwrap().to_owned();
let _creator_id = ctx.user_id();
match sqlx::query("DELETE from tickets WHERE id = $1")
.bind(id)
.execute(&pool)
.await
{
Ok(_) => {
return Ok(());
}
Err(e) => {
eprint!("ERROR: {:?}", e);
return Err(Error::TicketNotFound);
}
}
}
pub async fn register_new(&self, payload: UserCreate) -> Result<i32> {
let pool = self.db.lock().unwrap().to_owned();
let user_id: i32 = sqlx::query_scalar(
r#"INSERT INTO users (username, password) VALUES ($1, $2) RETURNING user_id ;"#,
)
.bind(&payload.username)
.bind(&payload.hashed_password)
.fetch_one(&pool)
.await
.map_err(|err| {
eprintln!("Error creating user: {:?}", err);
Error::UserCreateFail
})?;
Ok(user_id)
}
pub async fn get_pwd(&self, username: &str) -> Option<String> {
let pool = self.db.lock().unwrap().to_owned();
sqlx::query_scalar(r#"SELECT password FROM users WHERE username=($1);"#)
.bind(&username)
.fetch_optional(&pool)
.await
.unwrap()
}
pub async fn get_user_id(&self, username: &str) -> Result<i32> {
let pool = self.db.lock().unwrap().to_owned();
let user_id: i32 = sqlx::query_scalar(r#"SELECT user_id FROM users WHERE username=($1);"#)
.bind(username)
.fetch_one(&pool)
.await
.map_err(|_| Error::UserCreateFail)?;
Ok(user_id)
}
pub async fn get_username(&self, user_id: i32) -> Result<String> {
let pool = self.db.lock().unwrap().to_owned();
let s: String = sqlx::query_scalar(r#"SELECT username FROM users where user_id=($1);"#)
.bind(user_id)
.fetch_one(&pool)
.await
.map_err(|_| Error::UserIdNotFound)?;
Ok(s)
}
pub async fn username_exists(&self, username: &str) -> bool {
let pool = self.db.lock().unwrap().to_owned();
match sqlx::query_scalar(r#"SELECT EXISTS(SELECT 1 FROM users WHERE username=($1));"#)
.bind(username)
.fetch_one(&pool)
.await
{
Ok(exists) => exists,
Err(_) => false,
}
}
pub async fn update_ticket(&self, _ctx: &Context, id: i32, arg: &str) -> Result<()> {
let pool = self.db.lock().unwrap().to_owned();
let transaction = pool
.begin()
.await
.map_err(|_| Error::DatabaseError)
.unwrap();
// Perform the UPDATE operation
match sqlx::query("UPDATE tickets SET status = ($1) WHERE id = ($2);")
.bind(arg)
.bind(id)
.execute(&pool)
.await
{
Ok(_) => Ok(()),
Err(e) => {
// Rollback the transaction on UPDATE error
let _ = transaction
.rollback()
.await
.map_err(|_| Error::DatabaseError);
eprintln!("{:?}", e);
Err(Error::UpdateTicketError)
}
}
}
pub async fn get_ticket(&self, ctx: &Context, id: i32) -> Result<Ticket> {
let _creator_id = ctx.user_id();
let pool = self.db.lock().unwrap().to_owned();
match sqlx::query_as::<Postgres, Ticket>(r#"
SELECT tickets.id, tickets.title, tickets.status, tickets.description, tickets.created_at, tickets.creator_id, users.username AS creator_name
FROM tickets
INNER JOIN users ON tickets.creator_id = users.user_id
WHERE tickets.id = $1;
"#)
.bind(id)
.fetch_one(&pool)
.await
{
Ok(ticket) => Ok(ticket),
Err(_) => Err(Error::NoTicketsFound),
}
}
pub async fn update_ticket_description(
&self,
_ctx: &Context,
id: i32,
description: String,
) -> Result<()> {
let pool = self.db.lock().unwrap().to_owned();
dbg!(id);
let transaction = pool
.begin()
.await
.map_err(|_| Error::DatabaseError)
.unwrap();
// Perform the UPDATE operation
match sqlx::query("UPDATE tickets SET description = ($1) WHERE id = ($2)")
.bind(description)
.bind(id)
.execute(&pool)
.await
{
Ok(_) => Ok(()),
Err(e) => {
// Rollback the transaction on UPDATE error
let _ = transaction
.rollback()
.await
.map_err(|_| Error::DatabaseError);
eprintln!("{:?}", e);
Err(Error::UpdateTicketError)
}
}
}
}
|
import { sentenceCase } from 'change-case';
import { filter } from 'lodash';
import { useEffect, useState } from 'react';
import { Helmet } from 'react-helmet-async';
// @mui
import {
Avatar,
Box,
Button,
Card,
Checkbox,
CircularProgress,
Container,
IconButton,
MenuItem,
Paper,
Popover,
Stack,
Table,
TableBody,
TableCell,
TableContainer,
TablePagination,
TableRow,
Typography
} from '@mui/material';
// components
import Iconify from '../components/iconify';
import Label from '../components/label';
import Scrollbar from '../components/scrollbar';
// sections
import { UserListHead, UserListToolbar } from '../sections/@dashboard/user';
// mock
import { useGetAllVideosQuery } from '../redux/features/video/videoApi';
import VideoForm from './VideoForm';
// ----------------------------------------------------------------------
const TABLE_HEAD = [
{ id: 'title', label: 'Title', alignRight: false },
{ id: 'viewsCount', label: 'Views', alignRight: false },
{ id: 'likesCount', label: 'Likes', alignRight: false },
{ id: 'visibility', label: 'visibility', alignRight: false },
{ id: 'status', label: 'Status', alignRight: false },
{ id: 'createdAt', label: 'Created at', alignRight: false },
{ id: '' },
{ id: '' },
];
// ----------------------------------------------------------------------
function descendingComparator(a, b, orderBy) {
if (b[orderBy] < a[orderBy]) {
return -1;
}
if (b[orderBy] > a[orderBy]) {
return 1;
}
return 0;
}
function getComparator(order, orderBy) {
return order === 'desc'
? (a, b) => descendingComparator(a, b, orderBy)
: (a, b) => -descendingComparator(a, b, orderBy);
}
function applySortFilter(array, comparator, query) {
const stabilizedThis = array.map((el, index) => [el, index]);
stabilizedThis.sort((a, b) => {
const order = comparator(a[0], b[0]);
if (order !== 0) return order;
return a[1] - b[1];
});
if (query) {
return filter(array, (_user) => _user.title.toLowerCase().indexOf(query.toLowerCase()) !== -1);
}
return stabilizedThis.map((el) => el[0]);
}
export default function UserPage() {
const [open, setOpen] = useState(null);
const [page, setPage] = useState(0);
const [order, setOrder] = useState('asc');
const [selected, setSelected] = useState([]);
const [orderBy, setOrderBy] = useState('createdAt');
const [filterName, setFilterName] = useState('');
const [rowsPerPage, setRowsPerPage] = useState(5);
const [debouncedFilterName, setDebouncedFilterName] = useState(filterName);
const [isEditing, setIsEditing] = useState(false); // Add state for editing
const [editingId, setEditingId] = useState(null);
const [anchorEl, setAnchorEl] = useState(null);
useEffect(() => {
const handler = setTimeout(() => {
setDebouncedFilterName(filterName);
}, 800);
return () => {
clearTimeout(handler);
};
}, [filterName]);
const params = {
page: page,
pageSize: rowsPerPage,
sortBy: orderBy || 'createdAt',
sortOrder: order,
searchTerm: debouncedFilterName || "",
};
console.log(params, 'params from user page');
const { isFetching, isLoading, isError, error, data, refetch } = useGetAllVideosQuery(params, { refetchOnReconnect: true, refetchOnMountOrArgChange: true, refetchOnFocus: true, });
let content;
let USERLIST = data?.data || [];
const handleOpenMenu = (event, id) => {
setAnchorEl(event.currentTarget);
setOpen(event.currentTarget);
setEditingId(id)
};
const handleCloseMenu = () => {
setAnchorEl(null);
setOpen(null);
};
const handleRequestSort = (event, property) => {
const isAsc = orderBy === property && order === 'asc';
setOrder(isAsc ? 'desc' : 'asc');
setOrderBy(property);
};
const handleSelectAllClick = (event) => {
if (event.target.checked) {
const newSelecteds = USERLIST.map((n) => n.title);
setSelected(newSelecteds);
return;
}
setSelected([]);
};
const handleClick = (event, title) => {
console.log(title, 'title from the clinck');
const selectedIndex = selected.indexOf(title);
let newSelected = [];
if (selectedIndex === -1) {
newSelected = newSelected.concat(selected, title);
} else if (selectedIndex === 0) {
newSelected = newSelected.concat(selected.slice(1));
} else if (selectedIndex === selected.length - 1) {
newSelected = newSelected.concat(selected.slice(0, -1));
} else if (selectedIndex > 0) {
newSelected = newSelected.concat(selected.slice(0, selectedIndex), selected.slice(selectedIndex + 1));
}
setSelected(newSelected);
};
const handleChangePage = (event, newPage) => {
setPage(newPage);
};
const handleChangeRowsPerPage = (event) => {
setPage(0);
setRowsPerPage(parseInt(event.target.value, 10));
};
const handleFilterByName = (event) => {
setPage(0);
setFilterName(event.target.value);
console.log(event.target.value, 'event.target.value');
};
const handleEdit = (id) => {
console.log('edit here ', id);
handleCloseMenu();
setEditingId(id);
setIsEditing(true);
}
const handleCloseEdit = () => {
setIsEditing(false);
setEditingId(null);
}
const emptyRows = page > 0 ? Math.max(0, (1 + page) * rowsPerPage - USERLIST.length) : 0;
const filteredUsers = applySortFilter(USERLIST, getComparator(order, orderBy), filterName);
const isNotFound = data?.data?.length === 0;
return (
<>
<Helmet>
<title> VideoList </title>
</Helmet>
<Container>
<Stack direction="row" alignItems="center" justifyContent="space-between" mb={5}>
<Typography variant="h4" gutterBottom>
My Videos
</Typography>
<Button variant="contained" startIcon={<Iconify icon="eva:plus-fill" />}>
Upload New Video
</Button>
</Stack>
<Card>
<UserListToolbar numSelected={selected.length} filterName={filterName} onFilterName={handleFilterByName} />
<Scrollbar>
<TableContainer sx={{ minWidth: 800 }}>
<Table>
<UserListHead
order={order}
orderBy={orderBy}
headLabel={TABLE_HEAD}
rowCount={USERLIST.length}
numSelected={selected.length}
onRequestSort={handleRequestSort}
onSelectAllClick={handleSelectAllClick}
/>
<TableBody>
{
isFetching || isLoading ? (
<TableRow>
<TableCell colSpan={6}>
<Box display="flex" justifyContent="center">
<CircularProgress />
</Box>
</TableCell>
</TableRow>
) : (
filteredUsers.slice(page * rowsPerPage, page * rowsPerPage + rowsPerPage).map((row) => {
const { _id: id, title, createdAt, status, viewsCount, likesCount, visibility } = row;
const selectedVideo = selected.indexOf(title) !== -1;
return (
<TableRow hover key={id} tabIndex={-1} role="checkbox" selected={selectedVideo}>
<TableCell padding="checkbox">
<Checkbox checked={selectedVideo} onChange={(event) => handleClick(event, title)} />
</TableCell>
<TableCell component="th" scope="row" padding="none">
<Stack direction="row" alignItems="center" spacing={2}>
<Avatar alt={title} src={"avatarUrl"} />
<Typography variant="subtitle2" noWrap>
{title}
</Typography>
</Stack>
</TableCell>
<TableCell align="left">{viewsCount}</TableCell>
<TableCell align="left">{likesCount}</TableCell>
<TableCell align="left">{visibility}</TableCell>
<TableCell align="left">
<Label color={(status === 'banned' && 'error') || 'success'}>{sentenceCase(status)}</Label>
</TableCell>
<TableCell align="left">{new Date(createdAt).toLocaleString()}</TableCell>
<TableCell align="right">
<IconButton size="large" color="inherit" onClick={(event) => handleOpenMenu(event, id)}>
<Iconify icon={'eva:more-vertical-fill'} />
</IconButton>
</TableCell>
<TableCell align="right">
<Popover
open={Boolean(open)}
anchorEl={open}
onClose={handleCloseMenu}
anchorOrigin={{ vertical: 'top', horizontal: 'left' }}
transformOrigin={{ vertical: 'top', horizontal: 'right' }}
PaperProps={{
sx: {
p: 1,
width: 140,
'& .MuiMenuItem-root': {
px: 1,
typography: 'body2',
borderRadius: 0.75,
},
},
}}
>
<MenuItem onClick={() => handleEdit(USERLIST.find(video => video._id === editingId)?._id)}>
<Iconify icon={'eva:edit-fill'} sx={{ mr: 2 }} />
{USERLIST.find(video => video._id === editingId)?.title}
</MenuItem>
<MenuItem sx={{ color: 'error.main' }}>
<Iconify icon={'eva:trash-2-outline'} sx={{ mr: 2 }} />
Delete
</MenuItem>
</Popover>
</TableCell>
</TableRow>
);
})
)
}
{emptyRows > 0 && (
<TableRow style={{ height: 53 * emptyRows }}>
<TableCell colSpan={6} />
</TableRow>
)}
</TableBody>
{isNotFound && (
<TableBody>
<TableRow>
<TableCell align="center" colSpan={6} sx={{ py: 3 }}>
<Paper
sx={{
textAlign: 'center',
}}
>
<Typography variant="h6" paragraph>
Not found
</Typography>
<Typography variant="body2">
No results found for
<strong>"{filterName}"</strong>.
<br /> Try checking for typos or using complete words.
</Typography>
</Paper>
</TableCell>
</TableRow>
</TableBody>
)}
</Table>
</TableContainer>
</Scrollbar>
<TablePagination
rowsPerPageOptions={[5, 10, 25]}
component="div"
count={USERLIST.length}
rowsPerPage={rowsPerPage}
page={page}
onPageChange={handleChangePage}
onRowsPerPageChange={handleChangeRowsPerPage}
/>
</Card>
</Container>
{
isEditing && (
<VideoForm id={editingId} onClose={handleCloseEdit} />
)
}
</>
);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.