text stringlengths 1 1.05M |
|---|
import { Component } from '@angular/core';
import {LoadingController, NavController , ToastController} from 'ionic-angular';
import { DashboardPage } from '../dashboard/dashboard';
import { MenuController } from 'ionic-angular';
import 'rxjs/add/operator/map';
import {Http} from "@angular/http";
import {error} from "@angular/compiler/src/util";
import {Observable} from "rxjs";
@Component({
selector: 'page-home',
templateUrl: 'home.html'
})
export class HomePage {
data: any;
events: any;
public usernameLogin: string;
public passwordLogin: string;
private isUsernameValidLogin: boolean = true;
private isPasswordValidLogin: boolean = true;
public usernameRegister: string;
public passwordRegister: string;
public countryRegister: string;
public emailRegister: string;
public mobileRegister: string;
public genderRegister: string;
public nationalityRegister: string;
public identificationtypeRegister: string;
public idnumberRegister: string;
public addresssRegister: string;
public stateRegister: string;
public educationLevelRegister: string;
public majorRegister: string;
public universityRegister: string;
public graduationYearRegister: string;
public workplaceRegister: string;
public voulntrySideRegister: string;
private isEmailValidRegister: boolean = true;
private isUsernameValidRegister: boolean = true;
private isPasswordValidRegister: boolean = true;
private isCountryValidRegister: boolean = true;
private isMobileValidRegister: boolean = true;
private isNationalityValidRegister: boolean = true;
private isIdnumberValidRegister: boolean = true;
private isAddresssValidRegister: boolean = true;
private isStateValidRegister: boolean = true;
private isEducationLevelValidRegister: boolean = true;
private isMajorValidRegister: boolean = true;
private isUniversityValidRegister: boolean = true;
private isGraduationYearValidRegister: boolean = true;
private isWorkplaceValidRegister: boolean = true;
private isVoulntrySideValidRegister: boolean = true;
private regex = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
public selectedItem = "login";
constructor( public menuCtrl: MenuController , public navCtrl: NavController,private http:Http , private loadingCtrl: LoadingController , private toastCtrl: ToastController ) {
this.menuCtrl.enable(false, 'logMenu');
if (localStorage.getItem("user") === null) {
}else{
this.navCtrl.setRoot(DashboardPage);
}
this.data = {
"segmentLogin" : "الدخول",
"segmentRegister" : "تسجيل",
"background" : "assets/images/background/39.jpg",
"skip" : "تخطى",
"logo" : "assets/logo of app.png",
"boy" : "assets/icon/imoji_boy.png",
"girl" : "assets/icon/imoji_girl.png",
"login" : {
"username" : " أدخل البريد الإلكتروني الخاص بك ",
"password" : "<PASSWORD> ",
"labelUsername" : "ال<PASSWORD> الإلكتر<PASSWORD>",
"labelPassword" : "<PASSWORD> ",
"forgotPassword" : "<PASSWORD>؟",
"facebookLogin" : "تسجيل الدخول مع",
"login" : "الدخول",
"title" : "تسجيل الدخول إلى حسابك",
"errorUser" : "لا يمكن أن يكون الحقل خاليًا",
"errorPassword" : "<PASSWORD> أن يكون الحقل خاليًا"
},
"register": {
"title" : "تسجيل",
"username" : "أدخل اسمك الكامل",
"city" : "مسقط رأسك",
"password" : "<PASSWORD> <PASSWORD> ",
"email" : "عنوان بريدك الإلكتروني",
"Mobile" : "أدخل هاتفك المحمول",
"Nationality" : "أدخل جنسيتك",
"Identificationtype" : "ادخل رقمك السري",
"IdNumber" : "أدخل رقم الهوية الخاص بك",
"Addresss" : "من أي بلد أنت؟",
"State" : "اعزب أو متزوج",
"EducationLevel" : "أدخل مستوى التعليم الخاص بك",
"Major" : "أدخل تخصصك",
"University" : "أدخل جامعتك",
"GraduationYear" : "ادخل سنة التخرج",
"Workplace" : "أدخل مكان عملك",
"VoulntrySide" : "الجانب التطوعي",
"register" : "تسجيل",
"lableUsername" : "الاسم الكامل",
"lablePassword" : "<PASSWORD> ",
"lableEmail" : "البريد الإلكتروني",
"lableMobile" : "هاتفك المحمول",
"lableNationality" : "الجنسية",
"lableIdentificationtype" : "نوع الهوية",
"lableIdNumber" : "رقم الهوية",
"lableAddresss" : "عنوان",
"lableState" : "حالة",
"lableEducationLevel" : "مستوى التعليم",
"lableMajor" : "تخصص",
"lableUniversity" : "جامعة",
"lableGraduationYear" : "سنة التخرج",
"lableWorkplace" : "مكان العمل",
"lableVoulntrySide" : " الجانب التطوعي",
"lablePersonalImage" : "صورة شخصية",
"errorUser" : "لا يمكن أن يكون الحقل خاليًا",
"errorPassword" : "<PASSWORD>",
"errorEmail" : "عنوان البريد الإلكتروني غير صالح",
"errorCountry" : "لا يمكن أن يكون الحقل خاليًا",
"errorCity" : "لا يمكن أن يكون الحقل خاليًا"
}
};
this.events = {
onLogin: function (params) {
console.log("onLogin");
},
onRegister: function (params) {
console.log("onRegister");
},
onSkip: function (params) {
console.log("onSkip");
},
onFacebook: function (params) {
console.log("onFacebook");
}
};
}
url : any;
dataa: any;
onEvent = (event: string): void => {
if (event == "onLogin" ) {
if (this.validateLogin()) {
let datta = {
useremail: this.usernameLogin,
userpass: this.passwordLogin
}
console.log('clicked');
const loader = this.loadingCtrl.create({
content: " ... جلب البيانات",
});
loader.present();
this.http.get('http://api.hemam.online/loginuser' ,{params: datta} ).map(res => res.json()).catch(error => Observable.create(error.json())).subscribe(dataa => {
var result = dataa;
this.dataa= dataa;
loader.dismiss();
if (result != 0 ){
localStorage.setItem('user',JSON.stringify(result));
this.navCtrl.setRoot(DashboardPage);
}else{
const toast = this.toastCtrl.create({
message: 'بيانات الاعتماد الخاصة بك ليست صحيحة',
duration: 4000
});
toast.present();
}
},
err => {
const toast = this.toastCtrl.create({
message: 'شيء ما خطأ. حاول مرة اخرى',
duration: 4000
});
toast.present();
loader.dismiss();
});
}
if (this.events[event]) {
this.events[event]({
'username' : this.usernameLogin,
'password' : <PASSWORD>
});
}
} else if (event == "onRegister") {
if (this.validateRegister()) {
console.log('clicked ======');
let datta = {
usernameR : this. usernameRegister,
passwordR : this. passwordRegister,
countryR : this. countryRegister,
emailR : this. emailRegister,
mobileR : this. mobileRegister,
genderR : this. genderRegister,
nationalityR : this. nationalityRegister,
identificationtypeR : this. identificationtypeRegister,
idNumberR : this. idnumberRegister,
addresssR : this. addresssRegister,
stateR : this. stateRegister,
educationLevelR : this. educationLevelRegister,
majorR : this. majorRegister,
universityR : this. universityRegister,
graduationYearR : this. graduationYearRegister,
workplaceR : this. workplaceRegister,
voulntrySideR : this. voulntrySideRegister,
}
console.log('clicked');
const loader = this.loadingCtrl.create({
content: "... جلب البيانات",
});
loader.present();
this.http.get('http://api.hemam.online/registeruser' ,{params: datta} ).map(res => res.json()).catch(error => Observable.create(error.json())).subscribe(dataa => {
var result = dataa;
this.dataa= dataa;
loader.dismiss();
if (result == 1 ){
const toast = this.toastCtrl.create({
message: 'البريد الالكتروني موجود بالفعل',
duration: 4000
});
toast.present();
}else{
localStorage.setItem('user',JSON.stringify(result));
this.navCtrl.setRoot(DashboardPage);
}
}, err => {
const toast = this.toastCtrl.create({
message: 'شيء ما خطأ. حاول مرة اخرى',
duration: 4000
});
toast.present();
loader.dismiss();
});
}
if (this.events[event]) {
this.events[event]({
});
}
}
}
validateLogin():boolean {
this.isUsernameValidLogin = true;
this.isPasswordValidLogin = true;
if (!this.usernameLogin ||this.usernameLogin.length == 0) {
this.isUsernameValidLogin = false;
}
if (!this.passwordLogin || this.passwordLogin.length == 0) {
this.isPasswordValidLogin = false;
}
return this.isPasswordValidLogin && this.isUsernameValidLogin;
}
validateRegister():boolean {
this.isEmailValidRegister = true;
this.isUsernameValidRegister = true;
this.isPasswordValidRegister = true;
this.isCountryValidRegister = true;
this.isMobileValidRegister = true;
this.isNationalityValidRegister = true;
this.isIdnumberValidRegister = true;
this.isAddresssValidRegister = true;
this.isStateValidRegister = true;
this.isEducationLevelValidRegister = true;
this.isMajorValidRegister = true;
this.isUniversityValidRegister = true;
this.isGraduationYearValidRegister = true;
this.isWorkplaceValidRegister = true;
this.isVoulntrySideValidRegister = true;
if (!this.usernameRegister ||this.usernameRegister.length == 0) {
this.isUsernameValidRegister = false;
}
if (!this.emailRegister ||this.emailRegister.length == 0) {
this.isEmailValidRegister = false;
}
if (!this.passwordRegister || this.passwordRegister.length == 0) {
this.isPasswordValidRegister = false;
}
if (!this.mobileRegister || this.mobileRegister.length == 0) {
this.isMobileValidRegister = false;
}
// if (!this.genderRegister || this.genderRegister.length == 0) {
// this.isgenderValidRegister = false;
// }
// if (!this.countryRegister || this.countryRegister.length == 0) {
// this.isCountryValidRegister = false;
// }
if (!this.nationalityRegister || this.nationalityRegister.length == 0) {
this.isNationalityValidRegister = false;
}
// if (!this.identificationtypeRegister || this.identificationtypeRegister.length == 0) {
// this.isIdentificationtypeValidRegister = false;
// }
if (!this.idnumberRegister || this.idnumberRegister.length == 0) {
this.isIdnumberValidRegister = false;
}
if (!this.addresssRegister || this.addresssRegister.length == 0) {
this.isAddresssValidRegister = false;
}
if (!this.stateRegister || this.stateRegister.length == 0) {
this.isStateValidRegister = false;
}
if (!this.educationLevelRegister || this.educationLevelRegister.length == 0) {
this.isEducationLevelValidRegister = false;
}
if (!this.majorRegister || this.majorRegister.length == 0) {
this.isMajorValidRegister = false;
}
if (!this.universityRegister || this.universityRegister.length == 0) {
this.isUniversityValidRegister = false;
}
if (!this.graduationYearRegister || this.graduationYearRegister.length == 0) {
this.isGraduationYearValidRegister = false;
}
if (!this.workplaceRegister || this.workplaceRegister.length == 0) {
this.isWorkplaceValidRegister = false;
}
if (!this.voulntrySideRegister || this.voulntrySideRegister.length == 0) {
this.isVoulntrySideValidRegister = false;
}
// this.isEmailValidRegister = this.regex.test(this.emailRegister);
return this.isEmailValidRegister &&
this.isPasswordValidRegister &&
this.isUsernameValidRegister &&
this.isCountryValidRegister&&
this.isMobileValidRegister &&
this.isNationalityValidRegister &&
this.isIdnumberValidRegister &&
this.isAddresssValidRegister &&
this.isStateValidRegister &&
this.isEducationLevelValidRegister &&
this.isMajorValidRegister &&
this.isUniversityValidRegister &&
this.isGraduationYearValidRegister &&
this.isWorkplaceValidRegister &&
this.isVoulntrySideValidRegister;
}
isEnabled(value:string): boolean {
return this.selectedItem == value;
}
}
|
#!/bin/bash
#for Kernel optimization
#backup the file
/bin/cp /etc/sysctl.conf /etc/sysctl.conf.`date +%F_%T`
cat >> /etc/sysctl.conf<<EOF
net.ipv4.tcp_fintimeout = 2
net.ipv4.tcp_tw_reuse = 1
net.ipv4.tcp_tw_recycle = 1
net.ipv4.tcp_syncookies = 1
net.ipv4.tcp_keepalive_time = 600
net.ipv4.ip_local_port_range = 4000 65000
net.ipv4.tcp_max_syn_backlog = 16384
net.ipv4.tcp_max_tw_buckets = 36000
net.ipv4.route.gc_timeout =100
net.ipv4.tcp_syn_retries = 1
net.ipv4.tcp_synack_retries = 1
net.core.somaxconn = 16384
net.core.netdev_max_backlog = 16384
net.ipv4.tcp_max_orphans = 16384
#end
EOF
sysctl -p &> /dev/null
|
var zans=document.querySelectorAll("div.zan>span.fans_zan>i");
for(var fan of zans){
fan.onclick=function(){
var fan=this;
var n=parseInt(fan.innerHTML.slice(1));
n++;
fan.innerHTML="赞"+n;
}
}
//视频播放
var play=$(".img_play");
var media=$(".media");
var close=$(".close");
var p=document.getElementById("v3");
play.click(function(){
p.play();
media.css({"display":"block"});
close.css({"display":"block"});
})
close.click(function(){
p.pause();
media.css({"display":"none"});
close.css({"display":"none"});
})
//轮播图
window.onload=function(){
autoMove('img','span');
}
//轮播图函数
function autoMove(tagImg,tagSpan){
var imgs=document.querySelectorAll("#banner>.ido_banner>img");
var spans=document.querySelectorAll("#banner>.item>span");
//每次轮播后样式
/*轮播到哪个位置,就对哪个位置的图片样式进行设置,首先让所有的图片样式opacity变为0,然后对移动到的位置的样式进行设置opacity为1*/
function InitMove(index){
for(var i=0;i<imgs.length;i++){
imgs[i].style.opacity='0';
spans[i].style.background='#ccc';
}
imgs[index].style.opacity='1';
spans[index].style.background='#9a826d';
}
//第一次初始化
InitMove(0);
//轮播过程的变换函数
var count=1;
function fMove(){
if(count==imgs.length){
count=0;
}
InitMove(count);
count++;
}
//设置自动轮播定时器;
var scollMove=setInterval(fMove,2500);
//点击圆点,获取圆点对应图片
var imgs=document.querySelectorAll("#banner>.ido_banner>img");
var spans=document.querySelectorAll("#banner>.item>span");
imgs[0].style.opacity="1";
spans[0].style.background="#9a826d";
for(var span of spans){
span.onclick=function(){
clearInterval(scollMove);
for(var i=0;i<spans.length;i++){
if(spans[i].style.background="#9a826d"){
spans[i].style.background="#ccc";
imgs[i].style.opacity="0";
}
}
this.style.background="#9a826d";
var id=this.getAttribute("data-target");
var pic=document.getElementById(id);
pic.style.opacity="1";
scollMove=setInterval(fMove,2500);
}
}
}
//屏幕滚动一定位置导航栏固定
//取窗口滚动条高度
function getScrollTop(){
var scrollTop=0;
if(document.documentElement&&document.documentElement.scrollTop){
scrollTop=document.documentElement.scrollTop;
}else if(document.body){
scrollTop=document.body.scrollTop;
}
return scrollTop;
}
window.onscroll=function(){
var menuFixed=document.getElementById("menu-fixed");
var rightFixed=$(".right_fixed");
//if(window.scrollY>185){
if(getScrollTop()>185){
menuFixed.style.display="block";
rightFixed.css({"display":"block"});
}else{
menuFixed.style.display="none";
rightFixed.css({"display":"none"});
}
}
//获得用户名
// if(location.search!==""){
// var uid=location.search.split("=")[1];
// $.ajax({
// url:"http://127.0.0.1:3000/user/index",
// type:"get",
// data:{uid:uid},
// dataType:"json",
// success:function(result){
// // console.log(result);
// var uname=result[0].uname;
// $("#user").children().first().html(`${uname}`);
// }
// })
// }
// 首页商品推荐
$(function(){
$.ajax({
url:"http://127.0.0.1:3000/product/indexRecommend",
type:"get",
dataType:"json",
success:function(result){
// console.log(result.data);
var results=result.data;
var html="";
for(var result of results){
html+=`<div class="rever">
<div class="before">
<img src="${result.rming}"/>
</div>
<div class="after">
${result.r_abstract}
<p><a href="product_detail.html?lid=${result.rid}">查看详情</a></p>
</div>
</div>`
}
var recommend=$(".new_pro").children().last();
recommend.html(html);
}
})
}) |
package com.pearson.docussandra.plugininterfaces;
import com.pearson.docussandra.domain.objects.Document;
/**
* Interface that gets called anytime a document gets mutated. We <b>warned</b>:
* This could happen quite frequently, and if you are not careful you could
* substantially reduce the performance of Docussandra.
*
* Although this is an abstract class, it should be treated like an interface.
*
* All implementing classes should be thread safe and provide a no argument constructor.
*
* Multiple implementations are allowed, however, they will run in an arbitrary order.
*
* @author https://github.com/JeffreyDeYoung
*/
public abstract class NotifierPlugin implements Plugin
{
/**
* Types of possible mutations.
*/
public enum MutateType
{
CREATE,
UPDATE,
DELETE
}
/**
* This method will get called any time a document is mutated. Be careful
* about the amount of overhead this method produces, as it will be called
* frequently.
*
* @param type Type of mutation that has occured.
* @param document Updated document for this mutation. Will be null if the
* mutation was a delete, be sure to check for null.
*/
public abstract void doNotify(MutateType type, Document document);
}
|
package com.coltsoftware.liquidsledgehammer;
import java.io.PrintStream;
public interface UsagePrinter {
public abstract void printUsage(PrintStream out);
} |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_admin_panel_settings_outline = void 0;
var ic_admin_panel_settings_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "circle",
"attribs": {
"cx": "17",
"cy": "15.5",
"fill-rule": "evenodd",
"r": "1.12"
},
"children": [{
"name": "circle",
"attribs": {
"cx": "17",
"cy": "15.5",
"fill-rule": "evenodd",
"r": "1.12"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M17,17.5c-0.73,0-2.19,0.36-2.24,1.08c0.5,0.71,1.32,1.17,2.24,1.17 s1.74-0.46,2.24-1.17C19.19,17.86,17.73,17.5,17,17.5z",
"fill-rule": "evenodd"
},
"children": [{
"name": "path",
"attribs": {
"d": "M17,17.5c-0.73,0-2.19,0.36-2.24,1.08c0.5,0.71,1.32,1.17,2.24,1.17 s1.74-0.46,2.24-1.17C19.19,17.86,17.73,17.5,17,17.5z",
"fill-rule": "evenodd"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M18,11.09V6.27L10.5,3L3,6.27v4.91c0,4.54,3.2,8.79,7.5,9.82 c0.55-0.13,1.08-0.32,1.6-0.55C13.18,21.99,14.97,23,17,23c3.31,0,6-2.69,6-6C23,14.03,20.84,11.57,18,11.09z M11,17 c0,0.56,0.08,1.11,0.23,1.62c-0.24,0.11-0.48,0.22-0.73,0.3c-3.17-1-5.5-4.24-5.5-7.74v-3.6l5.5-2.4l5.5,2.4v3.51 C13.16,11.57,11,14.03,11,17z M17,21c-2.21,0-4-1.79-4-4c0-2.21,1.79-4,4-4s4,1.79,4,4C21,19.21,19.21,21,17,21z",
"fill-rule": "evenodd"
},
"children": [{
"name": "path",
"attribs": {
"d": "M18,11.09V6.27L10.5,3L3,6.27v4.91c0,4.54,3.2,8.79,7.5,9.82 c0.55-0.13,1.08-0.32,1.6-0.55C13.18,21.99,14.97,23,17,23c3.31,0,6-2.69,6-6C23,14.03,20.84,11.57,18,11.09z M11,17 c0,0.56,0.08,1.11,0.23,1.62c-0.24,0.11-0.48,0.22-0.73,0.3c-3.17-1-5.5-4.24-5.5-7.74v-3.6l5.5-2.4l5.5,2.4v3.51 C13.16,11.57,11,14.03,11,17z M17,21c-2.21,0-4-1.79-4-4c0-2.21,1.79-4,4-4s4,1.79,4,4C21,19.21,19.21,21,17,21z",
"fill-rule": "evenodd"
},
"children": []
}]
}]
}]
}]
}]
};
exports.ic_admin_panel_settings_outline = ic_admin_panel_settings_outline; |
<filename>rasa_nlu/featurizers/mitie_featurizer.py
from rasa_nlu.components import Component
from rasa_nlu.featurizers import Featurizer
from rasa_nlu.training_data import TrainingData
class MitieFeaturizer(Featurizer, Component):
name = "intent_featurizer_mitie"
context_provides = {
"train": ["intent_features"],
"process": ["intent_features"],
}
def ndim(self, feature_extractor):
# type: (mitie.total_word_feature_extractor) -> int
import mitie
return feature_extractor.num_dimensions
def train(self, training_data, mitie_feature_extractor):
# type: (TrainingData, mitie.total_word_feature_extractor) -> dict
import mitie
sentences = [e["text"] for e in training_data.intent_examples]
features = self.features_for_sentences(sentences, mitie_feature_extractor)
return {
"intent_features": features
}
def process(self, tokens, mitie_feature_extractor):
# type: ([str], mitie.total_word_feature_extractor) -> dict
import mitie
features = self.features_for_tokens(tokens, mitie_feature_extractor)
return {
"intent_features": features
}
def features_for_tokens(self, tokens, feature_extractor):
# type: ([str], mitie.total_word_feature_extractor) -> np.ndarray
import numpy as np
import mitie
vec = np.zeros(self.ndim(feature_extractor))
for token in tokens:
vec += feature_extractor.get_feature_vector(token)
if tokens:
return vec / len(tokens)
else:
return vec
def features_for_sentences(self, sentences, feature_extractor):
# type: ([str], mitie.total_word_feature_extractor) -> np.ndarray
import mitie
import numpy as np
X = np.zeros((len(sentences), self.ndim(feature_extractor)))
for idx, sentence in enumerate(sentences):
tokens = mitie.tokenize(sentence)
X[idx, :] = self.features_for_tokens(tokens, feature_extractor)
return X
|
#!/bin/bash
dieharder -d 12 -g 60 -S 2295580544
|
#!/bin/sh
BINDIR="${1}"
mkdir -p "${DESTDIR}/${MESON_INSTALL_PREFIX}/${BINDIR}"
install -d "${DESTDIR}/${MESON_INSTALL_PREFIX}/${BINDIR}"
install -m 0755 ${MESON_BUILD_ROOT}/src/xinstall/xinstall "${DESTDIR}/${MESON_INSTALL_PREFIX}/${BINDIR}"/install
|
const AWS = require('aws-sdk')
const inquirer = require('inquirer')
const getName = Tags => Tags.find( Tag => Tag.Key === 'Name' ).Value
const cleanVpcObject = Vpc => {
const VpcId = Vpc.VpcId,
CidrBlock = Vpc.CidrBlock,
Name = getName(Vpc.Tags)
return { VpcId, Name, CidrBlock, IsDefault: Vpc.IsDefault }
}
const cleanSubnetObject = Subnet => {
const SubnetId = Subnet.SubnetId,
Name = getName(Subnet.Tags),
CidrBlock = Subnet.CidrBlock,
AvailabilityZone = Subnet.AvailabilityZone;
return { SubnetId, Name, CidrBlock, AvailabilityZone }
}
const Vpcs = () => new AWS.EC2()
.describeVpcs({}).promise()
.then( data => data.Vpcs.map(cleanVpcObject) )
const Subnets = VpcId => new AWS.EC2()
.describeSubnets({
Filters: [{ Name: 'vpc-id', Values: [VpcId] }]
}).promise()
.then( data => data.Subnets.map(cleanSubnetObject) )
const subnetsQuestion = subnets => ({
type: 'checkbox',
name: 'SubnetIds',
message: 'Select subnets to deploy the lambda (recommend 3, one each availability zone)',
choices: subnets.map( subnet => ({
name: `(${subnet.SubnetId}) ${subnet.Name} - ${subnet.CidrBlock} on ${subnet.AvailabilityZone}`,
value: subnet.SubnetId
})),
validate: (selection) => {
if (!selection.length) {
return 'Select at least one'
}
return true;
}
})
const configureSubnets = options =>
Subnets(options.VpcId)
.then(
subnets => inquirer.prompt(subnetsQuestion(subnets))
)
.then( answer => Object.assign({}, options, answer))
const SecurityGroups = VpcId => new AWS.EC2()
.describeSecurityGroups({
Filters: [{ Name: 'vpc-id', Values: [VpcId] }]
}).promise()
.then( data => data.SecurityGroups )
const securityGroupsQuestions = groups => ({
type: 'checkbox',
name: 'SecurityGroupsIds',
choices: groups.map( group => ({
name: `(${group.GroupId}) ${group.GroupName} - ${group.Description}`,
value: group.GroupId
})),
message: 'Select security groups to use',
validate: (selection) => {
if (!selection.length) {
return 'Select at least one'
}
return true;
}
})
const configureSecurityGroups = options =>
SecurityGroups(options.VpcId)
.then( s => inquirer.prompt(securityGroupsQuestions(s)) )
.then( a => Object.assign({}, options, a))
const vpcIdQuestion = vpcs => ({
type: 'list',
name: 'VpcId',
choices: vpcs.map( vpc => ({
name: `${vpc.Name} - ${vpc.CidrBlock} (${vpc.VpcId})`,
value: vpc.VpcId
})),
message: 'Select vpc to use'
})
const configureVpc = options =>
Vpcs().then( vpcs => inquirer.prompt(vpcIdQuestion(vpcs)) )
.then( answer => Object.assign({}, options, answer))
.then( configureSubnets )
.then( configureSecurityGroups )
module.exports = options =>
inquirer.prompt({
type: 'confirm',
message: 'Do you want to connect with a VPC',
default: false,
name: 'connectWithVpc'
}).then( answers => {
if (answers.connectWithVpc) {
return configureVpc(options)
}
return options;
})
|
/*
* ftp4j - A pure Java FTP client library
*
* Copyright (C) 2008-2010 <NAME> (www.sauronsoftware.it)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License version
* 2.1, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License 2.1 for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License version 2.1 along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*/
package it.sauronsoftware.ftp4j;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.StringTokenizer;
/**
* This is an NVT-ASCII character stream writer.
*
* @author <NAME>
* @version 1.1
*/
class NVTASCIIWriter extends Writer {
/**
* NTV line separator.
*/
private static final String LINE_SEPARATOR = "\r\n";
/**
* The wrapped stream.
*/
private OutputStream stream;
/**
* The underlying writer.
*/
private Writer writer;
/**
* Builds the writer.
*
* @param stream
* The underlying stream.
* @param charsetName
* The name of a supported charset.
* @throws IOException
* If an I/O error occurs.
*/
public NVTASCIIWriter(OutputStream stream, String charsetName)
throws IOException {
this.stream = stream;
this.writer = new OutputStreamWriter(stream, charsetName);
}
/**
* Causes this writer to be closed.
*
* @throws IOException
*/
public void close() throws IOException {
synchronized (this) {
writer.close();
}
}
public void flush() throws IOException {
synchronized (this) {
writer.flush();
}
}
public void write(char[] cbuf, int off, int len) throws IOException {
synchronized (this) {
writer.write(cbuf, off, len);
}
}
/**
* Changes the current charset.
*
* @param charsetName
* The new charset.
* @throws IOException
* If I/O error occurs.
* @since 1.1
*/
public void changeCharset(String charsetName) throws IOException {
synchronized (this) {
writer = new OutputStreamWriter(stream, charsetName);
}
}
/**
* Writes a line in the stream.
*
* @param str
* The line.
* @throws IOException
* If an I/O error occurs.
*/
public void writeLine(String str) throws IOException {
StringBuffer buffer = new StringBuffer();
boolean atLeastOne = false;
StringTokenizer st = new StringTokenizer(str, LINE_SEPARATOR);
int count = st.countTokens();
for (int i = 0; i < count; i++) {
String line = st.nextToken();
if (line.length() > 0) {
if (atLeastOne) {
buffer.append('\r');
buffer.append((char) 0);
}
buffer.append(line);
atLeastOne = true;
}
}
if (buffer.length() > 0) {
String statement = buffer.toString();
// Sends the statement to the server.
writer.write(statement);
writer.write(LINE_SEPARATOR);
writer.flush();
}
}
}
|
<filename>src/icons/Sort.tsx
// Generated by script, don't edit it please.
import createSvgIcon from '../createSvgIcon';
import SortSvg from '@rsuite/icon-font/lib/direction/Sort';
const Sort = createSvgIcon({
as: SortSvg,
ariaLabel: 'sort',
category: 'direction',
displayName: 'Sort'
});
export default Sort;
|
#!/bin/bash
set -e
#pushd ${BUILD_PREFIX}/bin
# for fn in "${BUILD}-"*; do
# new_fn=${fn//${BUILD}-/}
# echo "Creating symlink from ${fn} to ${new_fn}"
# ln -sf "${fn}" "${new_fn}"
# varname=$(basename "${new_fn}" | tr a-z A-Z | sed "s/+/X/g" | sed "s/\./_/g" | sed "s/-/_/g")
# echo "$varname $CC"
# printf -v "$varname" "$BUILD_PREFIX/bin/${new_fn}"
# done
#popd
for file in ./crosstool_ng/packages/binutils/${PKG_VERSION}/*.patch; do
patch -p1 < $file;
done
# Fix permissions on license files--not sure why these are world-writable, but that's how
# they come from the upstream tarball
chmod og-w COPYING*
mkdir build
cd build
if [[ "$target_platform" == "osx-64" ]]; then
export CPPFLAGS="$CPPFLAGS -mmacosx-version-min=${MACOSX_DEPLOYMENT_TARGET}"
export CFLAGS="$CFLAGS -mmacosx-version-min=${MACOSX_DEPLOYMENT_TARGET}"
export CXXFLAGS="$CXXFLAGS -mmacosx-version-min=${MACOSX_DEPLOYMENT_TARGET}"
export LDFLAGS="$LDFLAGS -Wl,-pie -Wl,-headerpad_max_install_names -Wl,-dead_strip_dylibs"
fi
export LDFLAGS="$LDFLAGS -Wl,-rpath,$PREFIX/lib"
export HOST="${ctng_cpu_arch}-conda-linux-gnu"
../configure \
--prefix="$PREFIX" \
--target=$HOST \
--enable-ld=default \
--enable-gold=yes \
--enable-plugins \
--disable-multilib \
--disable-sim \
--disable-gdb \
--disable-nls \
--enable-default-pie \
--with-sysroot=$PREFIX/$HOST/sysroot \
make -j${CPU_COUNT}
#exit 1
|
<gh_stars>100-1000
/**
* The Form Builder Field Base Component
*
* @module aui-form-field
* @submodule aui-form-field-required
*/
var CSS_FIELD_REQUIRED = A.getClassName('form', 'field', 'required'),
CSS_FIELD_TITLE = A.getClassName('form', 'field', 'title');
/**
* An augmentation class which adds the required funcionality to form fields.
*
* @class A.FormFieldRequired
* @param {Object} config Object literal specifying widget configuration
* properties.
* @constructor
*/
A.FormFieldRequired = function() {};
A.FormFieldRequired.prototype = {
TPL_REQUIRED: '<span class="' + CSS_FIELD_REQUIRED + '">*</span>',
/**
* Constructor for the `A.FormFieldRequired` component. Lifecycle.
*
* @method initializer
* @protected
*/
initializer: function() {
this._uiSetRequired(this.get('required'));
this.after({
requiredChange: this._afterRequiredChange
});
},
/**
* Fired after the `required` attribute is set.
*
* @method _afterRequiredChange
* @protected
*/
_afterRequiredChange: function() {
this._uiSetRequired(this.get('required'));
},
/**
* Updates the ui according to the value of the `required` attribute.
*
* @method _uiSetRequired
* @param {String} required
* @protected
*/
_uiSetRequired: function(required) {
var titleNode = this.get('content').one('.' + CSS_FIELD_TITLE);
if (required) {
titleNode.append(this.TPL_REQUIRED);
} else {
if (titleNode.one('.' + CSS_FIELD_REQUIRED)) {
titleNode.one('.' + CSS_FIELD_REQUIRED).remove(true);
}
}
}
};
/**
* Static property used to define the default attribute configuration
* for the `A.FormFieldRequired`.
*
* @property ATTRS
* @type Object
* @static
*/
A.FormFieldRequired.ATTRS = {
/**
* Flag indicating if this field is required.
*
* @attribute required
* @default false
* @type {Boolean}
*/
required: {
validator: A.Lang.isBoolean,
value: false
}
}; |
SELECT Restaurant_Type, COUNT(*)
FROM Restaurants
WHERE City = 'Chicago'
GROUP BY Restaurant_Type
ORDER BY COUNT(*) DESC
LIMIT 1 |
<reponame>googleapis/googleapis-gen
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v8/resources/product_bidding_category_constant.proto
require 'google/ads/googleads/v8/enums/product_bidding_category_level_pb'
require 'google/ads/googleads/v8/enums/product_bidding_category_status_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/api/annotations_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v8/resources/product_bidding_category_constant.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v8.resources.ProductBiddingCategoryConstant" do
optional :resource_name, :string, 1
proto3_optional :id, :int64, 10
proto3_optional :country_code, :string, 11
proto3_optional :product_bidding_category_constant_parent, :string, 12
optional :level, :enum, 5, "google.ads.googleads.v8.enums.ProductBiddingCategoryLevelEnum.ProductBiddingCategoryLevel"
optional :status, :enum, 6, "google.ads.googleads.v8.enums.ProductBiddingCategoryStatusEnum.ProductBiddingCategoryStatus"
proto3_optional :language_code, :string, 13
proto3_optional :localized_name, :string, 14
end
end
end
module Google
module Ads
module GoogleAds
module V8
module Resources
ProductBiddingCategoryConstant = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.resources.ProductBiddingCategoryConstant").msgclass
end
end
end
end
end
|
<reponame>lephuongbg/lephuongbg.github.io<gh_stars>0
"use strict";
(function() {
var app;
app = angular.module("vCard", ["ngAnimate"]);
app.controller("VCardController", [
function() {
this.data = {
en: {
name: "<NAME>",
position: "WEB DEVELOPER",
callToAction: "CONTACT ME"
},
vi: {
name: "<NAME>",
position: "NHÀ PHÁT TRIỂN WEB",
callToAction: "LIÊN HẸ"
},
ja: {
name: "<NAME>",
position: "ウェブ開発者",
callToAction: "メールを送る"
}
};
return this.active = "en";
}
]);
app.directive("cardRippleTrigger", [
function() {
return {
restrict: "A",
link: function(scope, element) {
return element.on("click", function(event) {
var ripple;
ripple = document.createElement("div");
ripple.className = "ripple";
ripple.style.top = element[0].offsetTop + "px";
ripple.style.left = element[0].offsetLeft + "px";
ripple.addEventListener("animationend", function(event) {
return ripple.remove();
});
return angular.element(document.getElementById("card")).append(ripple);
});
}
};
}
]);
return app.animation(".lang-content", [
"$animateCss", function($animateCss) {
return {
enter: function(element) {
var parent, sibling;
parent = element.parent();
sibling = element.siblings();
$animateCss(parent, {
from: {
width: sibling.width(),
height: sibling.height()
},
to: {
width: element.width(),
height: element.height()
},
transitionStyle: '.5s cubic-bezier(.55, 0, .1, 1) all'
}).start().done(function() {
parent.css('width', '');
return parent.css('height', '');
});
return $animateCss(element, {
event: "enter",
structural: true
});
},
leave: function(element) {
return $animateCss(element, {
event: "leave",
structural: true
});
}
};
}
]);
})();
|
<gh_stars>10-100
package chylex.hee.packets.server;
import gnu.trove.set.hash.TShortHashSet;
import io.netty.buffer.ByteBuf;
import java.util.Collection;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.server.MinecraftServer;
import chylex.hee.game.save.SaveData;
import chylex.hee.game.save.types.player.CompendiumFile;
import chylex.hee.mechanics.compendium.content.KnowledgeFragment;
import chylex.hee.packets.AbstractServerPacket;
import chylex.hee.packets.PacketPipeline;
import chylex.hee.packets.client.C09SimpleEvent;
import chylex.hee.packets.client.C09SimpleEvent.EventType;
public class S01CompendiumReadFragments extends AbstractServerPacket{
private TShortHashSet fragments = new TShortHashSet();
public S01CompendiumReadFragments(){}
public S01CompendiumReadFragments(Collection<KnowledgeFragment> fragments){
fragments.forEach(fragment -> this.fragments.add((short)fragment.globalID));
}
@Override
public void write(ByteBuf buffer){
buffer.writeByte(fragments.size());
for(short val:fragments.toArray())buffer.writeShort(val);
}
@Override
public void read(ByteBuf buffer){
int amt = buffer.readByte();
for(int a = 0; a < amt; a++)fragments.add(buffer.readShort());
}
@Override
protected void handle(EntityPlayerMP player){
for(short id:fragments.toArray())SaveData.player(player, CompendiumFile.class).markFragmentAsRead(id);
if (!MinecraftServer.getServer().isDedicatedServer())PacketPipeline.sendToPlayer(player, new C09SimpleEvent(EventType.RESTORE_COMPENDIUM_PAUSE));
}
}
|
package com.netcracker.ncstore.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import java.util.UUID;
@Entity
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public class CartItem {
@Id
@GeneratedValue
private UUID id;
private UUID productId;
private int count;
@ManyToOne
@JoinColumn(name = "cart_id")
private Cart cart;
public CartItem(UUID productId, int count, Cart cart) {
this.productId = productId;
this.count = count;
this.cart = cart;
}
}
|
package co.com.bancolombia.commons.jms.api;
import javax.jms.Destination;
import javax.jms.JMSContext;
public interface MQDestinationProvider {
Destination create(JMSContext context);
}
|
import React from 'react'
import Link from 'gatsby-link'
import Helmet from 'react-helmet'
import { CSSTransitionGroup } from 'react-transition-group'
import Sidebar from '../layouts/sidebar'
export default class Now extends React.Component {
render() {
return (
<div className="container">
<Sidebar/>
<div className="blog-wrapper">
<CSSTransitionGroup
transitionName="fade"
transitionAppear={true}
transitionAppearTimeout={500}
transitionEnter={false}
transitionLeave={false}>
<h1>/now</h1>
<p>hello! i am currently redoing this website. see you in a month from now. check out my progress <a href="https://github.com/stylate/portfolio/tree/dev">here</a>. </p>
<p>also currently interning as a software engineer @ <a href="https://www.akamai.com/">akamai</a>. </p>
</CSSTransitionGroup>
</div>
</div>
);
}
}
|
<reponame>learnforpractice/micropython-cpp
# test passing addresses to viper
@micropython.viper
def get_addr(x:ptr) -> ptr:
return x
@micropython.viper
def memset(dest:ptr8, c:int, n:int):
for i in range(n):
dest[i] = c
# create array and get its address
ar = bytearray('0000')
addr = get_addr(ar)
print(type(ar))
print(type(addr))
print(ar)
# pass array as an object
memset(ar, ord('1'), len(ar))
print(ar)
# pass direct pointer to array buffer
memset(addr, ord('2'), len(ar))
print(ar)
# pass direct pointer to array buffer, with offset
memset(addr + 2, ord('3'), len(ar) - 2)
print(ar)
|
from typing import List
def unique_paths_with_obstacles(obstacle_grid: List[List[int]]) -> int:
m, n = len(obstacle_grid), len(obstacle_grid[0])
# If the starting cell has an obstacle, there is no path
if obstacle_grid[0][0] == 1:
return 0
# Initialize the first cell to 1 if it's not an obstacle
obstacle_grid[0][0] = 1
# Fill in the values for the first row
for i in range(1, n):
obstacle_grid[0][i] = 0 if obstacle_grid[0][i] == 1 else obstacle_grid[0][i-1]
# Fill in the values for the first column
for i in range(1, m):
obstacle_grid[i][0] = 0 if obstacle_grid[i][0] == 1 else obstacle_grid[i-1][0]
# Fill in the values for the rest of the grid
for i in range(1, m):
for j in range(1, n):
if obstacle_grid[i][j] == 1:
obstacle_grid[i][j] = 0
else:
obstacle_grid[i][j] = obstacle_grid[i-1][j] + obstacle_grid[i][j-1]
return obstacle_grid[m-1][n-1] |
def process_error_codes(error_list):
error_dict = {}
for error_code, error_message in error_list:
error_dict[error_code] = error_message
return error_dict |
<gh_stars>1-10
package implementation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
public class Boj9655 {
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
if(N % 2 == 0){
System.out.println("CY");
}
else{
System.out.println("SK");
}
}
}
|
#!/bin/sh
#BSUB -q gpuv100
#BSUB -gpu "num=1:mode=exclusive_process"
#BSUB -J train_beacon
#BSUB -n 1
#BSUB -W 24:00
#BSUB -B
#BSUB -N
#BSUB -R "rusage[mem=4GB]"
#BSUB -o logs/%J.out
#BSUB -e logs/%J.err
module load python3/3.8.4
module load cuda/8.0
module load cudnn/v7.0-prod-cuda8
module load ffmpeg/4.2.2
python3 src/train_model.py --n-epochs=500 beacon
|
#shellcheck shell=sh disable=SC2004,SC2016
% FIXTURE: "$SHELLSPEC_HELPERDIR/fixture"
% LIB: "$SHELLSPEC_HELPERDIR/fixture/lib"
% BIN: "$SHELLSPEC_HELPERDIR/fixture/bin"
% TMPBASE: "$SHELLSPEC_TMPBASE"
% MOCKDIR: "$SHELLSPEC_HELPERDIR/fixture/mock"
# This Include do not place inside of Describe. posh fails.
Include "$SHELLSPEC_LIB/core/dsl.sh"
Describe "core/dsl.sh"
Describe "shellspec_group_id()"
setup() {
SHELLSPEC_GROUP_ID="" SHELLSPEC_BLOCK_NO=""
}
check() {
echo "$SHELLSPEC_GROUP_ID"
echo "$SHELLSPEC_BLOCK_NO"
}
BeforeRun setup
AfterRun check
It 'sets group id'
When run shellspec_group_id 10 20
The line 1 of stdout should eq 10
The line 2 of stdout should eq 20
End
End
Describe "shellspec_example_id()"
setup() {
SHELLSPEC_EXAMPLE_ID="" SHELLSPEC_EXAMPLE_NO="" SHELLSPEC_BLOCK_NO=""
}
check() {
echo "$SHELLSPEC_EXAMPLE_ID"
echo "$SHELLSPEC_EXAMPLE_NO"
echo "$SHELLSPEC_BLOCK_NO"
}
BeforeRun setup
AfterRun check
It 'sets group id'
When run shellspec_example_id 10 20 30
The line 1 of stdout should eq 10
The line 2 of stdout should eq 20
The line 3 of stdout should eq 30
End
End
Describe "shellspec_metadata()"
mock() { shellspec_output() { echo "$1"; }; }
BeforeRun mock
It 'outputs METADATA'
When run shellspec_metadata
The stdout should eq 'METADATA'
End
End
Describe "shellspec_finished()"
mock() { shellspec_output() { echo "$1"; }; }
BeforeRun mock
It 'outputs FINISHED'
When run shellspec_finished
The stdout should eq 'FINISHED'
End
End
Describe "shellspec_yield()"
shellspec_yield12345() { echo "yield12345 $#"; }
echo_lineno() { echo "[$SHELLSPEC_LINENO]"; }
BeforeRun "SHELLSPEC_BLOCK_NO=12345"
AfterRun echo_lineno
It 'calls current block'
When run shellspec_yield
The line 1 of stdout should eq "yield12345 0"
The line 2 of stdout should eq "[]"
End
It 'calls current block with arguments'
When run shellspec_yield arg
The line 1 of stdout should eq "yield12345 1"
The line 2 of stdout should eq "[]"
End
End
Describe "shellspec_begin()"
mock() { shellspec_output() { echo "$1"; }; }
echo_specfile_specno() { echo "$SHELLSPEC_SPECFILE $SHELLSPEC_SPEC_NO"; }
BeforeRun mock
AfterRun echo_specfile_specno
It 'outputs BEGIN'
When run shellspec_begin specfile "$SHELLSPEC_SPEC_NO"
The line 1 of stdout should eq "BEGIN"
The line 2 of stdout should eq "specfile $SHELLSPEC_SPEC_NO"
End
End
Describe "shellspec_execdir()"
Before SHELLSPEC_SPECFILE="helper/fixture/spec_structure/basedir/dir1/dir2/test_spec.sh"
cd() { echo "$1"; }
Parameters
@project ""
@project/ "/"
@project/dir "/dir"
@basedir "/helper/fixture/spec_structure/basedir"
@basedir/ "/helper/fixture/spec_structure/basedir/"
@basedir/dir "/helper/fixture/spec_structure/basedir/dir"
@specfile "/helper/fixture/spec_structure/basedir/dir1/dir2"
@specfile/ "/helper/fixture/spec_structure/basedir/dir1/dir2/"
@specfile/dir "/helper/fixture/spec_structure/basedir/dir1/dir2/dir"
other "" # May change the specifications in the future
End
It 'changes the execution directory'
When run shellspec_execdir "$1"
The output should eq "${SHELLSPEC_PROJECT_ROOT}$2"
End
End
Describe "shellspec_perform()"
echo_enabled_filter() { echo "$SHELLSPEC_ENABLED $SHELLSPEC_FILTER"; }
AfterRun echo_enabled_filter
It 'sets filter variables'
When run shellspec_perform enabled filter
The stdout should eq "enabled filter"
End
End
Describe "shellspec_include_pack()"
It 'packs data for include'
When call shellspec_include_pack src args "$FIXTURE/source.sh" a b c
The variable src should eq "$FIXTURE/source.sh"
The variable args should eq "'a' 'b' 'c'"
End
End
Describe "shellspec_before_first_block()"
BeforeRun SHELLSPEC_BLOCK_NO=12345
before_first_block() {
shellspec_call_before_hooks() { echo "$@"; }
shellspec_before_first_block
shellspec_call_before_hooks() { :; }
}
It 'calls before all hooks'
When run before_first_block
The stdout should eq "ALL"
End
Context 'when dry-run mode'
BeforeRun SHELLSPEC_DRYRUN=1
It 'does not call before all hooks'
When run before_first_block
The stdout should eq ""
End
End
Context 'when skipeed'
BeforeRun "shellspec_on SKIP"
It 'does not call before all hooks'
When run before_first_block
The stdout should eq ""
End
End
Context 'when shellspec_call_before_hooks failed'
before_first_block() {
shellspec_call_before_hooks() { echo "$@"; return 123; }
shellspec_output() { echo "$@"; }
shellspec_before_first_block
shellspec_call_before_hooks() { :; }
}
It 'outputs hook error'
When run before_first_block
The line 1 should eq "ALL"
The line 2 should eq "BEFORE_ALL_ERROR"
End
End
End
Describe "shellspec_after_last_block()"
BeforeRun SHELLSPEC_BLOCK_NO=12345
after_last_block() {
shellspec_call_after_hooks() { echo "$@"; }
shellspec_after_last_block
shellspec_call_after_hooks() { :; }
}
It 'calls after all hooks'
When run after_last_block
The stdout should eq "ALL"
End
Context 'when shellspec_call_after_hooks failed'
after_last_block() {
shellspec_call_after_hooks() { echo "$@"; return 123; }
shellspec_output() { echo "$@"; }
shellspec_after_last_block
shellspec_call_after_hooks() { :; }
}
It 'outputs hook error'
When run after_last_block
The line 1 should eq "ALL"
The line 2 should eq "AFTER_ALL_ERROR"
End
End
End
Describe "shellspec_after_block()"
mock() {
shellspec_call_after_hooks() { echo "$@"; }
}
BeforeRun mock
It 'calls after mock hooks'
When run shellspec_after_block
The stdout should eq "MOCK"
End
End
Describe "shellspec_end()"
mock() { shellspec_output() { echo "$1"; }; }
echo_example_count() { echo "$SHELLSPEC_EXAMPLE_COUNT"; }
BeforeRun mock
AfterRun echo_example_count
It 'outputs END'
When run shellspec_end 1234
The line 1 of stdout should eq "END"
The line 2 of stdout should eq "1234"
End
End
Describe "shellspec_description()"
BeforeRun SHELLSPEC_DESCRIPTION=
BeforeRun SHELLSPEC_LINENO_BEGIN=10 SHELLSPEC_LINENO_END=20
AfterRun 'echo "$SHELLSPEC_DESCRIPTION"'
It 'builds description'
When run shellspec_description example_group desc
The stdout should eq "desc$SHELLSPEC_VT"
End
It 'translates @ to example lineno'
When run shellspec_description example @
The stdout should eq "<example:10-20>"
End
End
Describe "shellspec_example_group()"
mock() {
shellspec_output() { echo "$1"; }
shellspec_yield() { echo 'yield'; }
}
It 'calls yield block'
BeforeRun mock
When run shellspec_example_group
The stdout should include 'yield'
End
End
Describe "shellspec_example_block()"
mock() {
shellspec_parameters() { echo "called shellspec_parameters" "$@"; }
shellspec_example123() { echo "called shellspec_example123"; }
}
It 'calls shellspec_parameters if not defined SHELLSPEC_PARAMETER_NO exists'
BeforeRun mock SHELLSPEC_PARAMETER_NO=1000 SHELLSPEC_BLOCK_NO=123
When run shellspec_example_block
The stdout should eq 'called shellspec_parameters 1'
End
It 'calls shellspec_example if defined SHELLSPEC_PARAMETER_NO'
BeforeRun mock SHELLSPEC_PARAMETER_NO= SHELLSPEC_BLOCK_NO=123
When run shellspec_example_block
The stdout should eq 'called shellspec_example123'
End
End
Describe "shellspec_parameters()"
shellspec_parameters1000() { echo shellspec_parameters1000; }
shellspec_parameters1001() { echo shellspec_parameters1001; }
shellspec_parameters1002() { echo shellspec_parameters1002; }
It 'calls shellspec_parameters if not defined SHELLSPEC_PARAMETER_NO exists'
BeforeRun SHELLSPEC_PARAMETER_NO=1002
When run shellspec_parameters 1000
The line 1 of stdout should eq 'shellspec_parameters1000'
The line 2 of stdout should eq 'shellspec_parameters1001'
The line 3 of stdout should eq 'shellspec_parameters1002'
The lines of stdout should eq 3
End
End
Describe "shellspec_parameterized_example()"
shellspec_example0() { IFS=' '; echo "shellspec_example ${*:-}"; }
setup() {
SHELLSPEC_BLOCK_NO=0
SHELLSPEC_EXAMPLE_NO=123
SHELLSPEC_STDIO_FILE_BASE=1-2-3
}
check() {
echo $SHELLSPEC_EXAMPLE_NO
echo $SHELLSPEC_STDIO_FILE_BASE
}
BeforeRun setup
AfterRun check
It 'calls shellspec_example0'
When run shellspec_parameterized_example
The line 1 of stdout should eq 'shellspec_example '
The line 2 of stdout should eq 124
The line 3 of stdout should eq "1-2-3#1"
End
It 'calls shellspec_example0 with arguments'
When run shellspec_parameterized_example arg
The line 1 of stdout should eq 'shellspec_example arg'
The line 2 of stdout should eq 124
The line 3 of stdout should eq "1-2-3#1"
End
It 'increments SHELLSPEC_STDIO_FILE_BASE number'
BeforeRun "SHELLSPEC_STDIO_FILE_BASE=1-2-3#1"
When run shellspec_parameterized_example arg
The line 1 of stdout should eq 'shellspec_example arg'
The line 2 of stdout should eq 124
The line 3 of stdout should eq "1-2-3#2"
End
End
Describe "shellspec_example()"
mock() {
shellspec_profile_start() { :; }
shellspec_profile_end() { :; }
shellspec_output() { echo "$1"; }
}
BeforeRun mock prepare
Context 'when example is execution target'
prepare() { shellspec_invoke_example() { echo 'invoke_example'; }; }
BeforeRun SHELLSPEC_ENABLED=1 SHELLSPEC_FILTER=1 SHELLSPEC_DRYRUN=''
foo() { printf 1; false; printf 2; }
Context 'errexit is on'
Set 'errexit:on'
It 'invokes example'
When run shellspec_example 'description'
The stdout should include 'invoke_example'
End
It 'invokes example with arguments'
When run shellspec_example 'description' -- tag
The stdout should include 'invoke_example'
End
Specify "The foo() stops with 'false' with run evaluation"
Skip if 'shell flag handling broken' posh_shell_flag_bug
When run foo
The stdout should eq '1'
The status should be failure
End
Specify "The foo() does NOT stop with 'false' with call evaluation"
When call foo
The stdout should eq '12'
The status should be success
End
End
Context 'errexit is off'
Set 'errexit:off'
It 'invokes example'
When run shellspec_example 'description'
The stdout should include 'invoke_example'
End
It 'invokes example with arguments'
When run shellspec_example 'description' -- tag
The stdout should include 'invoke_example'
End
Specify "The foo() does not stop with 'false' with run evaluation"
When run foo
The stdout should eq '12'
The status should be success
End
Specify "The foo() does not stop with 'false' with run evaluation"
When call foo
The stdout should eq '12'
The status should be success
End
End
Context 'errexit is off (by default)'
Before "SHELLSPEC_ERREXIT=+e"
Specify "The foo() does not stop with 'false' with run evaluation"
When run foo
The stdout should eq '12'
The status should be success
End
Specify "The foo() does not stop with 'false' with run evaluation"
When call foo
The stdout should eq '12'
The status should be success
End
End
End
Context 'when example is aborted'
prepare() { shellspec_invoke_example() { return 12; }; }
BeforeRun SHELLSPEC_ENABLED=1 SHELLSPEC_FILTER=1 SHELLSPEC_DRYRUN=''
It 'outputs abort protocol'
When run shellspec_example
The stdout should include 'ABORTED'
The stdout should include 'FAILED'
End
End
Context 'when example is not execution target'
prepare() { shellspec_invoke_example() { echo 'invoke_example'; }; }
BeforeRun SHELLSPEC_ENABLED='' SHELLSPEC_FILTER='' SHELLSPEC_DRYRUN=''
It 'not invokes example'
When run shellspec_example
The stdout should not include 'invoke_example'
End
End
Context 'when dry-run mode'
prepare() { shellspec_invoke_example() { echo 'invoke_example'; }; }
BeforeRun SHELLSPEC_ENABLED=1 SHELLSPEC_FILTER=1 SHELLSPEC_DRYRUN=1
It 'always succeeds'
When run shellspec_example
The stdout should not include 'invoke_example'
The stdout should include 'EXAMPLE'
The stdout should include 'SUCCEEDED'
End
End
Context 'with tag and parameters'
prepare() { shellspec_invoke_example() { IFS=' '; echo "$*"; }; }
It 'passes parameters only'
When run shellspec_example 'description' tag1 tag2 -- a b c
The stdout should eq 'a b c'
End
End
End
Describe "shellspec_invoke_example()"
expectation() { shellspec_off NOT_IMPLEMENTED NO_EXPECTATION; }
mock() {
dsl_check=1
shellspec_output() { echo "$1"; }
shellspec_yield0() { echo "yield $#"; block; }
shellspec_dsl_check() { [ "$dsl_check" ]; }
}
BeforeRun SHELLSPEC_BLOCK_NO=0 mock
It 'skippes the all if skipped outside of example'
prepare() { shellspec_on SKIP; }
BeforeRun prepare
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'SKIP'
The stdout line 3 should equal 'SKIPPED'
End
It 'fails if hook error occurred'
BeforeRun SHELLSPEC_HOOK_ERROR=1
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'HOOK_ERROR'
The stdout line 3 should equal 'FAILED'
End
It 'fails if dsl_check error occurred'
BeforeRun dsl_check=''
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'ERROR'
End
It 'skipps the rest if skipped inside of example'
block() { shellspec_skip 1; }
When run shellspec_invoke_example 1
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 1'
The stdout line 3 should equal 'SKIP'
The stdout line 4 should equal 'SKIPPED'
End
It 'is fail if failed before skipping'
block() { expectation; shellspec_on FAILED; shellspec_skip 1; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'SKIP'
The stdout line 4 should equal 'FAILED'
End
It 'is unimplemented if there is nothing inside of example'
block() { :; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'NOT_IMPLEMENTED'
The stdout line 4 should equal 'TODO'
End
It 'is failed if FAILED switch is on'
block() { expectation; shellspec_on FAILED; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'FAILED'
End
It 'is warned and be status unhandled if UNHANDLED_STATUS switch is on'
block() { expectation; shellspec_on UNHANDLED_STATUS; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'UNHANDLED_STATUS'
The stdout line 4 should equal 'WARNED'
End
It 'is warned and be stdout unhandled if UNHANDLED_STDOUT switch is on'
block() { expectation; shellspec_on UNHANDLED_STDOUT; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'UNHANDLED_STDOUT'
The stdout line 4 should equal 'WARNED'
End
It 'is warned and be stderr unhandled if UNHANDLED_STDOUT switch is on'
block() { expectation; shellspec_on UNHANDLED_STDERR; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'UNHANDLED_STDERR'
The stdout line 4 should equal 'WARNED'
End
It 'is success if example ends successfully'
block() { expectation; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'SUCCEEDED'
End
It 'is todo if FAILED and PENDING switch is on'
block() { expectation; shellspec_on FAILED PENDING; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'TODO'
End
It 'is fixed if PENDING switch is on but not FAILED'
block() { expectation; shellspec_on PENDING; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'FIXED'
End
Context 'when --warning-as-failure'
BeforeRun SHELLSPEC_WARNING_AS_FAILURE=1
It 'is todo if PENDING switch is on and WARNED'
block() { expectation; shellspec_on PENDING WARNED; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'TODO'
End
End
Context 'when --no-warning-as-failure'
BeforeRun SHELLSPEC_WARNING_AS_FAILURE=''
It 'is todo if PENDING switch is on and FIXED'
block() { expectation; shellspec_on PENDING WARNED; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'FIXED'
End
End
It 'is failure if shellspec_call_before_each_hooks failed'
mock_hooks() { shellspec_before 'return 1'; }
BeforeRun mock_hooks
block() { expectation; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'BEFORE_EACH_ERROR'
The stdout line 3 should equal 'FAILED'
The stdout should not include 'yield'
End
It 'is failure if shellspec_call_after_each_hooks failed'
mock_hooks() { shellspec_after 'return 1'; }
BeforeRun mock_hooks
block() { expectation; }
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'AFTER_EACH_ERROR'
The stdout line 4 should equal 'FAILED'
The stdout should include 'yield'
End
It 'is error if something outputs to stderr'
block() { expectation; echo "error message" >&2; }
AfterRun ': > "$SHELLSPEC_LEAK_FILE"'
When run shellspec_invoke_example
The stdout line 1 should equal 'EXAMPLE'
The stdout line 2 should equal 'yield 0'
The stdout line 3 should equal 'LEAK'
The stdout line 4 should equal 'FAILED'
End
End
Describe "shellspec_dsl_check()"
mock() {
shellspec_fds_check() { echo "fds_check"; }
}
BeforeRun mock
It 'calls shellspec_fds_check'
When run shellspec_dsl_check
The line 1 of stdout should eq "fds_check"
End
End
Describe "shellspec_fds_check()"
mock() {
shellspec_output() { echo "$@"; }
}
BeforeRun mock
BeforeRun SHELLSPEC_FDVAR_AVAILABLE=''
It 'passes the check when give the correct fd'
BeforeRun SHELLSPEC_USE_FDS=3:4
When run shellspec_fds_check
The status should be success
End
It 'not passes the check when give the incorrect fd'
BeforeRun SHELLSPEC_USE_FDS=@
When run shellspec_fds_check
The output should eq "DSL_ERROR UseFD: Invalid file descriptor: @"
The status should be failure
End
It 'not passes the check when give the fd variable name'
BeforeRun SHELLSPEC_USE_FDS=FD
When run shellspec_fds_check
The output should eq "DSL_ERROR UseFD: Assigning file descriptors to variables is not supported in the current shell"
The status should be failure
End
Context "For shells that can assign file descriptors to variables"
BeforeRun SHELLSPEC_FDVAR_AVAILABLE=1
It 'passes the check when give the fd variable name'
BeforeRun SHELLSPEC_USE_FDS=FD
When run shellspec_fds_check
The status should be success
End
End
End
Describe "shellspec_around_call()"
_around_call() {
shellspec_call_before_evaluation_hooks() { echo "before" "$@"; }
shellspec_call_after_evaluation_hooks() { echo "after" "$@"; }
shellspec_around_call "$@" &&:
set -- $?
shellspec_call_before_evaluation_hooks() { :; }
shellspec_call_after_evaluation_hooks() { :; }
return "$1"
}
It 'calls statement'
When run _around_call echo ok
The line 1 of stdout should eq "before CALL"
The line 2 of stdout should eq "ok"
The line 3 of stdout should eq "after CALL"
End
Context "when error occurred in before evaluation"
_around_call() {
# shellcheck disable=SC2034
SHELLSPEC_HOOK="hook name"
shellspec_call_before_evaluation_hooks() { echo "before" "$@"; return 12; }
shellspec_call_after_evaluation_hooks() { echo "after" "$@"; }
shellspec_around_call "$@" &&:
set -- $?
shellspec_call_before_evaluation_hooks() { :; }
shellspec_call_after_evaluation_hooks() { :; }
return "$1"
}
It 'calls statement'
When run _around_call echo ok
The line 1 of stdout should eq "before CALL"
The line 2 of stdout should not eq "ok"
The line 3 of stdout should not eq "after CALL"
The status should eq 12
End
End
Context "when error occurred in after evaluation"
_around_call() {
# shellcheck disable=SC2034
SHELLSPEC_HOOK="hook name"
shellspec_call_before_evaluation_hooks() { echo "before" "$@"; }
shellspec_call_after_evaluation_hooks() { echo "after" "$@"; return 12; }
shellspec_around_call "$@" &&:
set -- $?
shellspec_call_before_evaluation_hooks() { :; }
shellspec_call_after_evaluation_hooks() { :; }
return "$1"
}
It 'calls statement'
When run _around_call echo ok
The line 1 of stdout should eq "before CALL"
The line 2 of stdout should eq "ok"
The line 3 of stdout should eq "after CALL"
The status should eq 12
End
End
End
Describe "shellspec_around_run()"
_around_run() {
shellspec_call_before_evaluation_hooks() { echo "before" "$@"; }
shellspec_call_after_evaluation_hooks() { echo "after" "$@"; }
shellspec_around_run "$@" &&:
set -- $?
shellspec_call_before_evaluation_hooks() { :; }
shellspec_call_after_evaluation_hooks() { :; }
return "$1"
}
It 'runs statement'
When run _around_run echo ok
The line 1 of stdout should eq "before RUN"
The line 2 of stdout should eq "ok"
The line 3 of stdout should eq "after RUN"
End
Context "when error occurred in before evaluation"
_around_run() {
# shellcheck disable=SC2034
SHELLSPEC_HOOK="hook name"
shellspec_call_before_evaluation_hooks() { echo "before" "$@"; return 12; }
shellspec_call_after_evaluation_hooks() { echo "after" "$@"; }
shellspec_around_run "$@" &&:
set -- $?
shellspec_call_before_evaluation_hooks() { :; }
shellspec_call_after_evaluation_hooks() { :; }
return "$1"
}
It 'runs statement'
When run _around_run echo ok
The line 1 of stdout should eq "before RUN"
The line 2 of stdout should not eq "ok"
The line 3 of stdout should not eq "after RUN"
The status should eq 12
End
End
Context "when error occurred in after evaluation"
_around_run() {
# shellcheck disable=SC2034
SHELLSPEC_HOOK="hook name"
shellspec_call_before_evaluation_hooks() { echo "before" "$@"; }
shellspec_call_after_evaluation_hooks() { echo "after" "$@"; return 12; }
shellspec_around_run "$@" &&:
set -- $?
shellspec_call_before_evaluation_hooks() { :; }
shellspec_call_after_evaluation_hooks() { :; }
return "$1"
}
It 'runs statement'
When run _around_run echo ok
The line 1 of stdout should eq "before RUN"
The line 2 of stdout should eq "ok"
The line 3 of stdout should eq "after RUN"
The status should eq 12
End
End
End
Describe "shellspec_when()"
init() {
shellspec_off EVALUATION
shellspec_on NOT_IMPLEMENTED NO_EXPECTATION
}
mock() {
shellspec_output() { echo "output:$1"; }
shellspec_statement_evaluation() { :; }
eval 'shellspec_on() { echo "on:$*"; }'
eval 'shellspec_off() { echo "off:$*"; }'
}
It 'calls evaluation'
BeforeRun init mock
When run shellspec_when call true
The stdout should include 'off:NOT_IMPLEMENTED'
The stdout should include 'on:EVALUATION'
The stdout should include 'output:EVALUATION'
End
It 'is syntax error when evaluation type missing'
BeforeRun init mock
When run shellspec_when
The stdout should include 'off:NOT_IMPLEMENTED'
The stdout should include 'on:EVALUATION'
The stdout should include 'on:FAILED'
The stdout should include 'output:SYNTAX_ERROR'
End
It 'is syntax error when evaluation missing'
BeforeRun init mock
When run shellspec_when call
The stdout should include 'off:NOT_IMPLEMENTED'
The stdout should include 'on:EVALUATION'
The stdout should include 'on:FAILED'
The stdout should include 'output:SYNTAX_ERROR'
End
It 'is syntax error when already executed evaluation'
prepare() { shellspec_on EVALUATION; }
BeforeRun init prepare mock
When run shellspec_when call true
The stdout line 1 should equal 'off:NOT_IMPLEMENTED'
The stdout line 2 should equal 'output:SYNTAX_ERROR_EVALUATION'
The stdout line 3 should equal 'on:FAILED'
End
It 'is syntax error when already executed expectation'
prepare() { shellspec_off NO_EXPECTATION; }
BeforeRun init prepare mock
When run shellspec_when
The stdout should include 'off:NOT_IMPLEMENTED'
The stdout should include 'on:EVALUATION'
The stdout should include 'on:FAILED'
The stdout should include 'output:SYNTAX_ERROR'
End
End
Describe "shellspec_statement()"
shellspec__statement_() { echo 'called'; }
inspect() {
shellspec_if SYNTAX_ERROR && echo 'SYNTAX_ERROR:on' || echo 'SYNTAX_ERROR:off'
shellspec_if FAILED && echo 'FAILED:on' || echo 'FAILED:off'
}
AfterRun inspect
It 'calls statement'
When run shellspec_statement _statement_ dummy
The stdout should include 'SYNTAX_ERROR:off'
The stdout should include 'FAILED:off'
The stdout should include 'called'
End
It 'is syntax error when statement raises syntax error'
shellspec__statement_() { shellspec_on SYNTAX_ERROR; }
When run shellspec_statement _statement_ dummy
The stdout should include 'SYNTAX_ERROR:on'
The stdout should include 'FAILED:on'
The stdout should not include 'called'
End
It 'does not call statement when already skipped'
prepare() { shellspec_on SKIP; }
BeforeRun prepare
When run shellspec_statement _statement_ dummy
The stdout should not include 'called'
End
End
Describe "shellspec_the()"
prepare() { shellspec_on NOT_IMPLEMENTED; }
mock() {
shellspec_statement_preposition() { echo expectation; }
shellspec_output() { echo "output:$1"; }
eval 'shellspec_on() { echo "on:$@"; }'
eval 'shellspec_off() { echo "off:$@"; }'
}
It 'calls expectation'
BeforeRun prepare mock
When run shellspec_the expectation
The stdout should not include 'output:SYNTAX_ERROR_EXPECTATION'
The stdout should include 'off:NOT_IMPLEMENTED NO_EXPECTATION'
The stdout should not include 'on:FAILED'
The stdout should include 'expectation'
End
It 'calls expectation'
BeforeRun prepare mock
When run shellspec_the
The stdout should include 'output:SYNTAX_ERROR_EXPECTATION'
The stdout should include 'off:NOT_IMPLEMENTED NO_EXPECTATION'
The stdout should include 'on:FAILED'
The stdout should not include 'expectation'
End
End
Describe "shellspec_assert()"
prepare() { shellspec_on NOT_IMPLEMENTED; }
mock() {
shellspec_output() { echo "output:$1"; }
eval 'shellspec_on() { echo "on:$@"; }'
eval 'shellspec_off() { echo "off:$@"; }'
}
Context "when errexit on"
Set errexit:on
It 'output unmatch when assertion succeeds'
BeforeRun prepare mock
When run shellspec_assert echo ok
The stdout should include 'off:NOT_IMPLEMENTED NO_EXPECTATION'
The stdout should include 'output:MATCHED'
The stdout should include 'ok'
End
End
Context "when errexit off"
Set errexit:off
It 'output unmatch when assertion succeeds'
BeforeRun prepare mock
When run shellspec_assert echo ok
The stdout should include 'off:NOT_IMPLEMENTED NO_EXPECTATION'
The stdout should include 'output:MATCHED'
The stdout should include 'ok'
End
End
It 'output unmatch when assertion fails'
BeforeRun prepare mock
When run shellspec_assert false
The stdout should include 'off:NOT_IMPLEMENTED NO_EXPECTATION'
The stdout should include 'on:FAILED'
The stdout should include 'output:ASSERT_ERROR'
End
It 'output warning when assertion succeeds but output to stderr'
warn() { echo warn >&2; return 0; }
BeforeRun prepare mock
When run shellspec_assert warn
The stdout should include 'off:NOT_IMPLEMENTED NO_EXPECTATION'
The stdout should include 'on:WARNED'
The stdout should include 'output:ASSERT_WARN'
End
It 'raises error without assertion'
BeforeRun prepare mock
When run shellspec_assert
The stdout should include 'output:SYNTAX_ERROR_EXPECTATION'
The stdout should include 'off:NOT_IMPLEMENTED NO_EXPECTATION'
The stdout should include 'on:FAILED'
The stdout should not include 'ok'
End
End
Describe "shellspec_path()"
echo_path_alias() { echo "$SHELLSPEC_PATH_ALIAS"; }
AfterRun echo_path_alias
It 'sets path alias'
When run shellspec_path path1 path2 path3
The stdout should eq "|path1|path2|path3|"
End
End
Describe "shellspec_skip()"
init() { SHELLSPEC_EXAMPLE_NO=1; }
mock() {
shellspec_output() { echo "output:$1"; }
}
inspect() {
shellspec_if SKIP && echo 'SKIP:on' || echo 'SKIP:off'
echo "skip_id:${SHELLSPEC_SKIP_ID-[unset]}"
echo "skip_reason:${SHELLSPEC_SKIP_REASON-[unset]}"
echo "example_no:${SHELLSPEC_EXAMPLE_NO-[unset]}"
}
BeforeRun init mock
AfterRun inspect
It 'skips example when inside of example'
When run shellspec_skip 123 "reason"
The stdout should include 'output:SKIP'
The stdout should include 'SKIP:on'
The stdout should include 'skip_id:123'
The stdout should include 'skip_reason:reason'
The stdout should include 'example_no:1'
End
It 'skips example when outside of example'
init() { SHELLSPEC_EXAMPLE_NO=; }
When run shellspec_skip 123 "skip reason"
The stdout line 1 should equal 'SKIP:on'
End
It 'do nothing when already skipped'
prepare() { shellspec_on SKIP; }
BeforeRun prepare
When run shellspec_skip 123 "skip reason"
The stdout should not include 'output:SKIP'
The stdout should include 'SKIP:on'
The stdout should include 'skip_id:'
The stdout should include 'skip_reason:'
The stdout should include 'example_no:1'
End
It 'skips example when satisfy condition'
When run shellspec_skip 123 if "reason" true
The stdout should include 'output:SKIP'
The stdout should include 'SKIP:on'
End
It 'does not skip example when not satisfy condition'
When run shellspec_skip 123 if "reason" false
The stdout should not include 'output:SKIP'
The stdout should include 'SKIP:off'
End
End
Describe "shellspec_pending()"
init() { SHELLSPEC_EXAMPLE_NO=1; }
mock() {
shellspec_output() { echo "output:$1"; }
}
inspect() {
shellspec_if PENDING && echo 'pending:on' || echo 'pending:off'
}
BeforeRun init mock
AfterRun inspect
It 'pending example when inside of example'
When run shellspec_pending
The stdout should include 'output:PENDING'
The stdout should include 'pending:on'
End
It 'does not pending example when already failed'
prepare() { shellspec_on FAILED; }
BeforeRun prepare
When run shellspec_pending
The stdout should include 'output:PENDING'
The stdout should include 'pending:off'
End
It 'does not pending example when already skipped'
prepare() { shellspec_on SKIP; }
BeforeRun prepare
When run shellspec_pending
The stdout should not include 'output:PENDING'
The stdout should include 'pending:off'
End
It 'does not pending example when outside of example'
prepare() { SHELLSPEC_EXAMPLE_NO=; }
BeforeRun prepare
When run shellspec_pending
The stdout should not include 'output:PENDING'
The stdout should include 'pending:on'
End
End
Describe "Include"
Include "$LIB/include.sh" # comment
Before 'unset __SOURCED__ ||:'
It 'includes script'
The result of "foo()" should eq "foo"
End
It 'supplies __SOURCED__ variable'
The output should be blank
The result of "get_sourced()" should eq "$LIB/include.sh"
End
End
Describe "shellspec_logger()"
It 'outputs to logfile'
logger_test() {
shellspec_logger "logger test1"
shellspec_logger "logger test2"
}
sleep() { :; }
Path log="$SHELLSPEC_TMPBASE/test-logfile"
BeforeCall SHELLSPEC_LOGFILE="$SHELLSPEC_TMPBASE/test-logfile"
When call logger_test
The line 1 of contents of file log should eq "logger test1"
The line 2 of contents of file log should eq "logger test2"
End
It 'sleeps to make the log easy to read'
shellspec_sleep() { echo sleep; }
BeforeCall SHELLSPEC_LOGFILE=/dev/null
When call shellspec_logger "logger test"
The stdout should eq "sleep"
End
It 'outputs to stdout when SHELLSPEC_LOGFILE not specified'
BeforeCall SHELLSPEC_LOGFILE=''
When call shellspec_logger "logger test"
The stdout should eq "logger test"
End
End
Describe "shellspec_deprecated()"
# shellcheck disable=SC2034
setup() {
SHELLSPEC_SPECFILE=spec.sh
SHELLSPEC_LINENO=10
SHELLSPEC_DEPRECATION_LOGFILE="$SHELLSPEC_TMPBASE/test-deprecation.log"
}
It 'outputs to logfile'
Path log="$SHELLSPEC_TMPBASE/test-deprecation.log"
BeforeRun "setup"
When run shellspec_deprecated "deprecated test"
The contents of file log should eq "spec.sh:10 deprecated test"
End
End
Describe "shellspec_intercept()"
It 'registor interceptor with default name'
When call shellspec_intercept foo
The variable SHELLSPEC_INTERCEPTOR should eq "|foo:__foo__|"
End
It 'registor interceptor with specified name'
When call shellspec_intercept foo:bar
The variable SHELLSPEC_INTERCEPTOR should eq "|foo:bar|"
End
It 'registor interceptor with same name'
When call shellspec_intercept foo:
The variable SHELLSPEC_INTERCEPTOR should eq "|foo:foo|"
End
It 'registor multiple interceptors at once'
When call shellspec_intercept foo bar
The variable SHELLSPEC_INTERCEPTOR should eq "|foo:__foo__|bar:__bar__|"
End
End
Describe "shellspec_set()"
shellspec_append_shell_option() { echo "$1 $2"; }
It 'calls shellspec_append_shell_option'
When run shellspec_set errexit:on noglob:off
The line 1 of stdout should eq "SHELLSPEC_SHELL_OPTIONS errexit:on"
The line 2 of stdout should eq "SHELLSPEC_SHELL_OPTIONS noglob:off"
End
End
Describe "shellspec_marker()"
It 'outputs maker'
When run shellspec_marker specfile 1234
The stderr should eq "${SHELLSPEC_SYN}shellspec_marker:specfile 1234"
End
End
Describe "shellspec_abort()"
It 'aborts'
When run shellspec_abort
The stderr should be blank
The status should eq 1
End
It 'aborts with exit status'
When run shellspec_abort 12
The stderr should be blank
The status should eq 12
End
It 'aborts with message'
When run shellspec_abort 1 'error'
The stderr should eq 'error'
The status should be failure
End
It 'aborts with extra message'
When run shellspec_abort 1 'error' 'extra'
The line 1 of stderr should eq 'error'
The line 2 of stderr should eq 'extra'
The status should be failure
End
End
Describe "shellspec_is_temporary_skip()"
Parameters
"" success
"# comment" success
"reason" failure
End
temporary_skip() {
SHELLSPEC_SKIP_REASON=$1
shellspec_is_temporary_skip
}
It "detects temporary skip"
When run temporary_skip "$1"
The status should be "$2"
End
End
Describe "shellspec_is_temporary_pending()"
Parameters
"" success
"# comment" success
"reason" failure
End
temporary_pending() {
# shellcheck disable=SC2034
SHELLSPEC_PENDING_REASON=$1
shellspec_is_temporary_pending
}
It "detects temporary pending"
When run temporary_pending "$1"
The status should be "$2"
End
End
Describe "shellspec_cat"
Data
#|test1
#|test2
End
It "outputs data"
When call shellspec_cat
The line 1 of stdout should eq "test1"
The line 2 of stdout should eq "test2"
End
End
Describe 'BeforeCall / AfterCall'
before() { echo before; }
after() { echo after; }
foo() { echo foo; }
BeforeCall before
AfterCall after
It 'called before / after expectation'
When call foo
The line 1 of stdout should eq before
The line 2 of stdout should eq foo
The line 3 of stdout should eq after
End
It 'can be specified multiple'
BeforeCall 'echo before2'
AfterCall 'echo after2'
When call foo
The line 1 of stdout should eq before
The line 2 of stdout should eq before2
The line 3 of stdout should eq foo
The line 4 of stdout should eq after2
The line 5 of stdout should eq after
End
It 'calls same scope with evaluation'
before() { value='before'; }
foo() { value="$value foo"; }
after() { echo "$value after"; }
When call foo
The stdout should eq "before foo after"
End
Describe 'BeforeCall'
It 'failed and evaluation not call'
before() { return 123; }
When call foo
The stdout should not include 'foo'
The status should eq 123
End
End
Describe 'AfterCall'
Context 'errexit is on'
Set errexit:on
It 'not called when evaluation failure'
foo() { echo foo; false; }
When call foo
The line 1 of stdout should eq before
The line 2 of stdout should eq foo
The line 3 of stdout should be undefined
The status should be failure
End
End
It 'fails cause evaluation to be failure'
after() { return 123; }
When call foo
The status should eq 123
The line 1 of stdout should eq 'before'
The line 2 of stdout should eq 'foo'
End
Context 'errexit is off'
Set errexit:off
It 'not called when evaluation failure'
foo() { echo foo; false; }
When call foo
The line 1 of stdout should eq before
The line 2 of stdout should eq foo
The line 3 of stdout should be undefined
The status should be failure
End
End
It 'fails cause evaluation to be failure'
after() { return 123; }
When call foo
The status should eq 123
The line 1 of stdout should eq 'before'
The line 2 of stdout should eq 'foo'
End
End
End
Describe 'BeforeRun / AfterRun'
before() { echo before; }
after() { echo after; }
foo() { echo foo; }
BeforeRun before
AfterRun after
It 'run before / after expectation'
When run foo
The line 1 of stdout should eq before
The line 2 of stdout should eq foo
The line 3 of stdout should eq after
End
It 'can be specified multiple'
BeforeRun 'echo before2'
AfterRun 'echo after2'
When run foo
The line 1 of stdout should eq before
The line 2 of stdout should eq before2
The line 3 of stdout should eq foo
The line 4 of stdout should eq after2
The line 5 of stdout should eq after
End
It 'runs same scope with evaluation'
before() { value='before'; }
foo() { value="$value foo"; }
after() { echo "$value after"; }
When run foo
The stdout should eq "before foo after"
End
Describe 'BeforeRun'
It 'failed and evaluation not run'
before() { return 123; }
When run foo
The stdout should not include 'foo'
The status should eq 123
End
End
Describe 'AfterRun'
Context 'errexit is on'
Set errexit:on
It 'not run when evaluation failure'
foo() { echo foo; false; }
When run foo
The line 1 of stdout should eq before
The line 2 of stdout should eq foo
The line 3 of stdout should be undefined
The status should be failure
End
End
It 'fails cause evaluation to be failure'
after() { return 123; }
When run foo
The status should eq 123
The line 1 of stdout should eq 'before'
The line 2 of stdout should eq 'foo'
End
Context 'errexit is off'
Set errexit:off
It 'not run when evaluation failure'
foo() { echo foo; false; }
When run foo
The line 1 of stdout should eq before
The line 2 of stdout should eq foo
The line 3 of stdout should be undefined
The status should be failure
End
End
It 'fails cause evaluation to be failure'
after() { return 123; }
When run foo
The status should eq 123
The line 1 of stdout should eq 'before'
The line 2 of stdout should eq 'foo'
End
End
End
Describe "shellspec_filter()"
setup() {
SHELLSPEC_ENABLED="" SHELLSPEC_FILTER="" SHELLSPEC_FOCUSED=""
}
check() {
echo "$SHELLSPEC_ENABLED"
echo "$SHELLSPEC_FOCUSED"
echo "$SHELLSPEC_FILTER"
}
BeforeRun setup
AfterRun check
It 'sets enabled flag'
When run shellspec_filter 1
The line 1 of stdout should be present
The line 2 of stdout should be blank
The line 3 of stdout should be blank
End
It 'sets focused flag'
When run shellspec_filter "" 1
The line 1 of stdout should be blank
The line 2 of stdout should be present
The line 3 of stdout should be blank
End
It 'sets filter flag'
When run shellspec_filter "" "" 1
The line 1 of stdout should be blank
The line 2 of stdout should be blank
The line 3 of stdout should be present
End
End
Describe "shellspec_dump()"
BeforeRun SHELLSPEC_STDOUT=stdout SHELLSPEC_STDERR=stderr
BeforeRun SHELLSPEC_STATUS=123
BeforeRun SHELLSPEC_SPECFILE=specfile SHELLSPEC_AUX_LINENO=10
BeforeRun SHELLSPEC_FD_3=fd3 SHELLSPEC_FD_AA=fdaa
BeforeRun SHELLSPEC_USE_FDS=3:AA:
It 'dumps stdout/stderr/status'
When run shellspec_dump
The line 1 should eq ""
The line 2 should eq "[Dump] specfile:10 (exit status: 123)"
The line 3 should eq "- stdout:"
The line 4 should eq "stdout"
The line 5 should eq "- stderr:"
The line 6 should eq "stderr"
The line 7 should eq "- fd 3:"
The line 8 should eq "fd3"
The line 9 should eq "- fd AA:"
The line 10 should eq "fdaa"
The lines of stdout should eq 10
End
End
Describe "shellspec_dump_file()"
BeforeRun mock
It 'dumps <unset> when the file not exist'
mock() { shellspec_readfile_once() { unset "$1" ||:; }; }
When run shellspec_dump_file var VAR varfile
The line 1 should eq ""
The line 2 should eq "- var: <unset>"
End
It 'dumps <empty> when the file is empty'
mock() { shellspec_readfile_once() { eval "$1=''"; }; }
When run shellspec_dump_file var VAR varfile
The line 1 should eq ""
The line 2 should eq "- var: <empty>"
End
It 'dumps data when the file is not empty'
mock() { shellspec_readfile_once() { eval "$1='data'"; }; }
When run shellspec_dump_file var VAR varfile
The line 1 should eq ""
The line 2 should eq "- var:"
The line 3 should eq "data"
End
End
Describe "shellspec_preserve()"
shellspec_clone() { echo shellspec_clone "$@"; }
BeforeRun 'SHELLSPEC_VARS_FILE=$TMPBASE/vars'
It 'calls shellspec_clone'
Path vars-file="$TMPBASE/vars"
When run shellspec_preserve var:var2
The contents of file vars-file should eq "shellspec_clone var:var2"
End
End
Describe "shellspec_mock()"
shellspec_gen_mock_code() { echo "gen_mock_code"; }
shellspec_create_mock_file() { echo "create mock file" "$@"; }
shellspec_chmod() { echo chmod "$@"; }
shellspec_mv() { echo mv "$@"; }
BeforeRun 'SHELLSPEC_MOCK_BINDIR=$MOCKDIR'
It 'create first mock'
When run shellspec_mock first-mock
The line 1 should eq "create mock file $MOCKDIR/first-mock"
The line 2 should eq "chmod +x $MOCKDIR/first-mock"
End
It 'create second mock'
When run shellspec_mock second-mock
The line 1 should eq "mv $MOCKDIR/second-mock $MOCKDIR/second-mock#1"
The line 2 should eq "create mock file $MOCKDIR/second-mock"
The line 3 should eq "chmod +x $MOCKDIR/second-mock"
End
It 'create second mock'
When run shellspec_mock second-mock
The line 1 should eq "mv $MOCKDIR/second-mock $MOCKDIR/second-mock#1"
The line 2 should eq "create mock file $MOCKDIR/second-mock"
The line 3 should eq "chmod +x $MOCKDIR/second-mock"
End
It 'create third mock'
When run shellspec_mock third-mock
The line 1 should eq "mv $MOCKDIR/third-mock $MOCKDIR/third-mock#2"
The line 2 should eq "create mock file $MOCKDIR/third-mock"
The line 3 should eq "chmod +x $MOCKDIR/third-mock"
End
End
Describe "shellspec_create_mock_file()"
Data
#|mock code
End
It "creates a mock file"
When call shellspec_create_mock_file "$TMPBASE/mock"
The file "$TMPBASE/mock" should be exist
End
End
Describe "shellspec_unmock()"
shellspec_rm() { echo rm "$@"; }
shellspec_mv() { echo mv "$@"; }
BeforeRun 'SHELLSPEC_MOCK_BINDIR=$MOCKDIR'
It 'delete first mock'
When run shellspec_unmock first-mock
The output should be blank
End
It 'delete second mock'
When run shellspec_unmock second-mock
The output should eq "rm $MOCKDIR/second-mock"
End
It 'delete third mock'
When run shellspec_unmock third-mock
The line 1 should eq "rm $MOCKDIR/third-mock"
The line 2 should eq "mv $MOCKDIR/third-mock#1 $MOCKDIR/third-mock"
End
It 'delete fourth mock'
When run shellspec_unmock fourth-mock
The line 1 should eq "rm $MOCKDIR/fourth-mock"
The line 2 should eq "mv $MOCKDIR/fourth-mock#2 $MOCKDIR/fourth-mock"
End
End
Describe "shellspec_usefd()"
preserve() { %preserve SHELLSPEC_USE_FDS:USE_FDS; }
BeforeRun SHELLSPEC_USE_FDS=1
AfterRun preserve
It 'appends to the SHELLSPEC_USE_FDS variable'
When run shellspec_usefd 2
The variable USE_FDS should eq "1:2"
End
End
End
|
'use strict';
const arrayShift = require('../array-shift.js');
describe('Array Shift', () => {
it('should shift the array', () => {
let array = [1, 2, 4, 5];
let expected = [1, 2, 3, 4, 5];
arrayShift(array, 3);
expect(array).toEqual(expected);
});
});
describe('Array Shift', () => {
it('should shift the array', () => {
let array = [1, 2, 4, 5, 6];
let expected = [1, 2, 4, 3, 5, 6];
arrayShift(array, 3);
expect(array).toEqual(expected);
});
}); |
import subprocess
def control_switch(switch_number: int, action: str) -> None:
if action.lower() == "on":
subprocess.Popen(['sh', f'../Switches/Switch{switch_number}_On.sh'])
elif action.lower() == "off":
subprocess.Popen(['sh', f'../Switches/Switch{switch_number}_Off.sh'])
else:
raise ValueError("Invalid action. Please use 'on' or 'off'.") |
package com.dam.user.rest.message;
import com.dam.user.model.entity.User;
public class UpdateRequest extends RestRequest {
private User userStored = new User();
private User userUpdate = new User();
public UpdateRequest(User userStored, User userUpdate) {
super("CS 0.0.1");
this.userStored = userStored;
this.userUpdate = userUpdate;
}
// public UpdateRequest(String userName, String password, String givenName, String lastName) {
// super("CS 0.0.1");
// user = new User(userName, password, givenName, lastName);
// }
public User getUserStored() {
return userStored;
}
public User getUserUpdate() {
return userUpdate;
}
} |
#!/usr/bin/env bash
docker rm -f zone-service
docker rmi zone-service
docker build -t zone-service . |
package maker
import "cf/models"
var appGuid func() string
func init() {
appGuid = guidGenerator("app")
}
func NewAppFields(overrides Overrides) (app models.ApplicationFields) {
app.Name = "app-name"
app.Guid = appGuid()
app.State = "started"
if overrides.Has("guid") {
app.Guid = overrides.Get("guid").(string)
}
if overrides.Has("name") {
app.Name = overrides.Get("name").(string)
}
if overrides.Has("state") {
app.State = overrides.Get("state").(string)
}
return
}
func NewApp(overrides Overrides) (app models.Application) {
app.ApplicationFields = NewAppFields(overrides)
return
}
|
#!/usr/bin/env bash
if [[ -z $1 || -z $2 ]] ; then
echo "USAGE make-video.sh topic tutorial-name"
exit 1
fi
topic=$1
tutor=$2
aws configure set region us-east-1
make _site/training-material/topics/$topic/tutorials/$tutor/slides.pdf ACTIVATE_ENV=pwd
bin/ari-quick.sh topics/$topic/tutorials/$tutor/slides.html
|
require_relative 'cluster_factory'
require_relative 'point'
module Geometry
=begin
The {Obround} class cluster represents a rectangle with semicircular end caps
{http://en.wiktionary.org/wiki/obround}
=end
class Obround
include ClusterFactory
# @overload new(width, height)
# Creates a {Obround} of the given width and height, centered on the origin
# @param [Number] height Height
# @param [Number] width Width
# @return [CenteredObround]
# @overload new(size)
# Creates a {Obround} of the given {Size} centered on the origin
# @param [Size] size Width and height
# @return [CenteredObround]
# @overload new(point0, point1)
# Creates a {Obround} using the given {Point}s
# @param [Point] point0 A corner
# @param [Point] point1 The other corner
# @overload new(origin, size)
# Creates a {Obround} from the given origin and size
# @param [Point] origin Lower-left corner
# @param [Size] size Width and height
# @return [SizedObround]
# @overload new(left, bottom, right, top)
# Creates a {Obround} from the locations of each side
# @param [Number] left X-coordinate of the left side
# @param [Number] bottom Y-coordinate of the bottom edge
# @param [Number] right X-coordinate of the right side
# @param [Number] top Y-coordinate of the top edge
# @return [Obround]
def self.new(*args)
case args.size
when 1
CenteredObround.new(args[0])
when 2
if args.all? { |a| a.is_a?(Numeric) }
CenteredObround.new(Size[*args])
elsif args.all? { |a| a.is_a?(Array) || a.is_a?(Point) }
original_new(*args)
elsif (args[0].is_a?(Point) or args[0].is_a?(Array)) and args[1].is_a?(Size)
SizedObround.new(*args)
else
raise ArgumentError, "Invalid arguments #{args}"
end
when 4
raise ArgumentError unless args.all? { |a| a.is_a?(Numeric) }
left, bottom, right, top = *args
original_new(Point[left, bottom], Point[right, top])
end
end
# Create a {Obround} using the given {Point}s
# @param [Point0] point0 The bottom-left corner (closest to the origin)
# @param [Point1] point1 The top-right corner (farthest from the origin)
def initialize(point0, point1)
point0, point1 = Point[point0], Point[point1]
raise(ArgumentError, "Point sizes must match") unless point0.size == point1.size
# Reorder the points to get lower-left and upper-right
if (point0.x > point1.x) && (point0.y > point1.y)
point0, point1 = point1, point0
else
p0x, p1x = [point0.x, point1.x].minmax
p0y, p1y = [point0.y, point1.y].minmax
point0 = Point[p0x, p0y]
point1 = Point[p1x, p1y]
end
@points = [point0, point1]
end
def eql?(other)
self.points == other.points
end
alias :== :eql?
# @group Accessors
# @return [Point] The {Obround}'s center
def center
min, max = @points.minmax { |a, b| a.y <=> b.y }
Point[(max.x + min.x) / 2, (max.y + min.y) / 2]
end
# @!attribute closed?
# @return [Bool] always true
def closed?
true
end
# @return [Array<Point>] The {Obround}'s four points (counterclockwise)
def points
point0, point2 = *@points
point1 = Point[point2.x, point0.y]
point3 = Point[point0.x, point2.y]
[point0, point1, point2, point3]
end
def origin
minx = @points.min { |a, b| a.x <=> b.x }
miny = @points.min { |a, b| a.y <=> b.y }
Point[minx.x, miny.y]
end
def height
min, max = @points.minmax { |a, b| a.y <=> b.y }
max.y - min.y
end
def width
min, max = @points.minmax { |a, b| a.x <=> b.x }
max.x - min.x
end
# @endgroup
end
class CenteredObround < Obround
# @return [Point] The {Obround}'s center
attr_accessor :center
attr_reader :origin
# @return [Size] The {Size} of the {Obround}
attr_accessor :size
# @overload new(width, height)
# Creates a {Obround} of the given width and height, centered on the origin
# @param [Number] height Height
# @param [Number] width Width
# @return [CenteredObround]
# @overload new(size)
# Creates a {Obround} of the given {Size} centered on the origin
# @param [Size] size Width and height
# @return [CenteredObround]
# @overload new(center, size)
# Creates a {Obround} with the given center point and size
# @param [Point] center
# @param [Size] size
def initialize(*args)
if args[0].is_a?(Size)
@center = Point[0, 0]
@size = args[0]
elsif args[0].is_a?(Geometry::Point) and args[1].is_a?(Geometry::Size)
@center, @size = args[0, 1]
elsif (2 == args.size) and args.all? { |a| a.is_a?(Numeric) }
@center = Point[0, 0]
@size = Geometry::Size[*args]
end
end
def eql?(other)
(self.center == other.center) && (self.size == other.size)
end
alias :== :eql?
# @group Accessors
# @return [Array<Point>] The {Obround}'s four points (clockwise)
def points
point0 = @center - @size / 2.0
point2 = @center + @size / 2.0
point1 = Point[point0.x, point2.y]
point3 = Point[point2.x, point0.y]
[point0, point1, point2, point3]
end
def height
@size.height
end
def width
@size.width
end
# @endgroup
end
class SizedObround < Obround
# @return [Point] The {Obround}'s origin
attr_accessor :origin
# @return [Size] The {Size} of the {Obround}
attr_accessor :size
# @overload new(width, height)
# Creates an {Obround} of the given width and height with its origin at [0,0]
# @param [Number] height Height
# @param [Number] width Width
# @return SizedObround
# @overload new(size)
# Creates an {Obround} of the given {Size} with its origin at [0,0]
# @param [Size] size Width and height
# @return SizedObround
# @overload new(origin, size)
# Creates an {Obround} with the given origin point and size
# @param [Point] origin
# @param [Size] size
# @return SizedObround
def initialize(*args)
if args[0].is_a?(Size)
@origin = Point[0, 0]
@size = args[0]
elsif (args[0].is_a?(Point) or args[0].is_a?(Array)) and args[1].is_a?(Geometry::Size)
@origin, @size = Point[args[0]], args[1]
elsif (2 == args.size) and args.all? { |a| a.is_a?(Numeric) }
@origin = Point[0, 0]
@size = Geometry::Size[*args]
end
end
def eql?(other)
(self.origin == other.origin) && (self.size == other.size)
end
alias :== :eql?
# @group Accessors
# @return [Point] The {Obround}'s center
def center
@origin + @size / 2
end
# @return [Array<Point>] The {Obround}'s four points (clockwise)
def points
point0 = @origin
point2 = @origin + @size
point1 = Point[point0.x, point2.y]
point3 = Point[point2.x, point0.y]
[point0, point1, point2, point3]
end
def height
@size.height
end
def width
@size.width
end
# @endgroup
end
end
|
#! /bin/bash
if [ -z "$AQUA_REGISTRY_USERNAME" ]; then echo "AQUA_REGISTRY_USERNAME env is unset" && exit 1; fi
if [ -z "$AQUA_REGISTRY_PASSWORD" ]; then echo "AQUA_REGISTRY_PASSWORD env is unset" && exit 1; fi
if [ -z "$AQUA_VERSION" ]; then echo "AQUA_VERSION env is unset" && exit 1; else echo "AQUA_VERSION env is set to '$AQUA_VERSION'"; fi
HARBOR_HOME="/opt/harbor"
HARBOR_PKI_DIR="/etc/harbor/pki/internal"
HARBOR_SCANNER_AQUA_VERSION="0.12.0"
SCANNER_UID=1000
SCANNER_GID=1000
mkdir -p $HARBOR_HOME/common/config/aqua-adapter
mkdir -p /data/aqua-adapter/reports
mkdir -p /data/aqua-adapter/opt
mkdir -p /var/lib/aqua-db/data
# Login to Aqua registry.
echo $AQUA_REGISTRY_PASSWORD | docker login registry.aquasec.com \
--username $AQUA_REGISTRY_USERNAME \
--password-stdin
# Copy the scannercli binary from the registry.aquasec.com/scanner image.
docker run --rm --entrypoint "" \
--volume $HARBOR_HOME/common/config/aqua-adapter:/out registry.aquasec.com/scanner:$AQUA_VERSION \
cp /opt/aquasec/scannercli /out
# Generate a private key.
openssl genrsa -out $HARBOR_PKI_DIR/aqua_adapter.key 4096
# Generate a certificate signing request (CSR).
openssl req -sha512 -new \
-subj "/C=CN/ST=Beijing/L=Beijing/O=example/OU=Personal/CN=aqua-adapter" \
-key $HARBOR_PKI_DIR/aqua_adapter.key \
-out $HARBOR_PKI_DIR/aqua_adapter.csr
# Generate an x509 v3 extension file.
cat > $HARBOR_PKI_DIR/aqua_adapter_v3.ext <<-EOF
authorityKeyIdentifier=keyid,issuer
basicConstraints=CA:FALSE
keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
extendedKeyUsage = serverAuth
subjectAltName = @alt_names
[alt_names]
DNS.1=aqua-adapter
EOF
# Use the v3.ext file to generate a certificate for your Harbor host.
openssl x509 -req -sha512 -days 365 \
-extfile $HARBOR_PKI_DIR/aqua_adapter_v3.ext \
-CA $HARBOR_PKI_DIR/harbor_internal_ca.crt \
-CAkey $HARBOR_PKI_DIR/harbor_internal_ca.key \
-CAcreateserial \
-in $HARBOR_PKI_DIR/aqua_adapter.csr \
-out $HARBOR_PKI_DIR/aqua_adapter.crt
chown $SCANNER_UID:$SCANNER_GID /data/aqua-adapter/reports
chown $SCANNER_UID:$SCANNER_GID /data/aqua-adapter/opt
chown $SCANNER_UID:$SCANNER_GID $HARBOR_HOME/common/config/aqua-adapter/scannercli
chown $SCANNER_UID:$SCANNER_GID $HARBOR_PKI_DIR/aqua_adapter.key
chown $SCANNER_UID:$SCANNER_GID $HARBOR_PKI_DIR/aqua_adapter.crt
cat << EOF > $HARBOR_HOME/common/config/aqua-adapter/env
SCANNER_LOG_LEVEL=debug
SCANNER_API_ADDR=:8443
SCANNER_API_TLS_KEY=/etc/pki/aqua_adapter.key
SCANNER_API_TLS_CERTIFICATE=/etc/pki/aqua_adapter.crt
SCANNER_AQUA_USERNAME=administrator
SCANNER_AQUA_PASSWORD=@Aqua12345
SCANNER_AQUA_HOST=https://aqua-console:8443
SCANNER_CLI_NO_VERIFY=true
SCANNER_AQUA_REGISTRY=Harbor
SCANNER_AQUA_USE_IMAGE_TAG=false
SCANNER_AQUA_REPORTS_DIR=/var/lib/scanner/reports
SCANNER_STORE_REDIS_URL=redis://redis:6379
SCANNER_CLI_OVERRIDE_REGISTRY_CREDENTIALS=false
EOF
cat << EOF > $HARBOR_HOME/docker-compose.override.yml
version: '2.3'
services:
aqua-adapter:
networks:
- harbor
container_name: aqua-adapter
# image: docker.io/aquasec/harbor-scanner-aqua:dev
# image: docker.io/aquasec/harbor-scanner-aqua:$HARBOR_SCANNER_AQUA_VERSION
image: public.ecr.aws/aquasecurity/harbor-scanner-aqua:$HARBOR_SCANNER_AQUA_VERSION
restart: always
cap_drop:
- ALL
depends_on:
- redis
volumes:
- type: bind
source: $HARBOR_PKI_DIR/aqua_adapter.key
target: /etc/pki/aqua_adapter.key
- type: bind
source: $HARBOR_PKI_DIR/aqua_adapter.crt
target: /etc/pki/aqua_adapter.crt
- type: bind
source: $HARBOR_HOME/common/config/aqua-adapter/scannercli
target: /usr/local/bin/scannercli
- type: bind
source: /data/aqua-adapter/reports
target: /var/lib/scanner/reports
- type: bind
source: /data/aqua-adapter/opt
target: /opt/aquascans
logging:
driver: "syslog"
options:
syslog-address: "tcp://127.0.0.1:1514"
tag: "aqua-adapter"
env_file:
$HARBOR_HOME/common/config/aqua-adapter/env
aqua-db:
networks:
- harbor
image: registry.aquasec.com/database:$AQUA_VERSION
container_name: aqua-db
environment:
- POSTGRES_PASSWORD=lunatic0
volumes:
- /var/lib/aqua-db/data:/var/lib/postgresql/data
aqua-console:
networks:
- harbor
ports:
- 9080:8080
image: registry.aquasec.com/console:$AQUA_VERSION
container_name: aqua-console
environment:
- ADMIN_PASSWORD=@Aqua12345
- SCALOCK_DBHOST=aqua-db
- SCALOCK_DBNAME=scalock
- SCALOCK_DBUSER=postgres
- SCALOCK_DBPASSWORD=lunatic0
- SCALOCK_AUDIT_DBHOST=aqua-db
- SCALOCK_AUDIT_DBNAME=slk_audit
- SCALOCK_AUDIT_DBUSER=postgres
- SCALOCK_AUDIT_DBPASSWORD=lunatic0
- AQUA_DOCKERLESS_SCANNING=1
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- aqua-db
aqua-gateway:
image: registry.aquasec.com/gateway:$AQUA_VERSION
container_name: aqua-gateway
environment:
- SCALCOK_LOG_LEVEL=DEBUG
- AQUA_CONSOLE_SECURE_ADDRESS=aqua-console:8443
- SCALOCK_DBHOST=aqua-db
- SCALOCK_DBNAME=scalock
- SCALOCK_DBUSER=postgres
- SCALOCK_DBPASSWORD=lunatic0
- SCALOCK_AUDIT_DBHOST=aqua-db
- SCALOCK_AUDIT_DBNAME=slk_audit
- SCALOCK_AUDIT_DBUSER=postgres
- SCALOCK_AUDIT_DBPASSWORD=lunatic0
networks:
- harbor
depends_on:
- aqua-db
- aqua-console
EOF
cd /opt/harbor
docker-compose up --detach
# Use Harbor 2.0 REST API to register aqua-adapter as an Interrogation Service.
cat << EOF > /tmp/aqua-adapter.registration.json
{
"name": "Aqua Enterprise $AQUA_VERSION",
"url": "https://aqua-adapter:8443",
"description": "Aqua Enterprise $AQUA_VERSION vulnerability scanner."
}
EOF
curl --include \
--user admin:Harbor12345 \
--request POST \
--header "accept: application/json" \
--header "Content-Type: application/json" \
--data-binary "@/tmp/aqua-adapter.registration.json" \
"http://localhost:8080/api/v2.0/scanners"
|
#!/usr/bin/env python
import datetime
def get_day_of_week(date):
day, month, year = [int(d) for d in date.split('-')]
day_of_week = datetime.date(year, month, day).weekday()
return day_of_week+1
print get_day_of_week("2020-05-15") # Outputs 6 (Friday) |
#!/usr/bin/env bash
set -euo pipefail
go build && ./run.bash |
public static int[] FindPair(int[] array, int sum)
{
for (int i = 0; i < array.Length; i++)
{
for (int j = i + 1; j < array.Length; j++)
{
if (array[i] + array[j] == sum)
{
return new int[] { array[i], array[j] };
}
}
}
return null;
} |
require('babel-polyfill')
require('babel-register')({
presets: [
[ "env", {
"targets": {
"node": "6.10"
}
}]
],
plugins: [
'transform-object-rest-spread',
'transform-runtime',
]
})
require('./build/index')
|
package luohuayu.anticheat;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
import cpw.mods.fml.common.network.NetworkRegistry;
import cpw.mods.fml.common.network.simpleimpl.SimpleNetworkWrapper;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import luohuayu.anticheat.message.*;
import net.minecraftforge.common.MinecraftForge;
@Mod(modid = "catanticheat", version = "1.2.7", name = "CatAntiCheat")
@SideOnly(Side.CLIENT)
public class CatAntiCheatMod {
public static CatAntiCheatMod instance;
public static int version = 2;
public SimpleNetworkWrapper networkChannel;
static {
RuntimeInjectCheck.monitorLaunchClassLoader();
}
@Mod.EventHandler
public void preInit(final FMLPreInitializationEvent event) {
instance = this;
MinecraftForge.EVENT_BUS.register(this);
networkChannel = NetworkRegistry.INSTANCE.newSimpleChannel("CatAntiCheat");
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.SPacketHelloHandler(), SPacketHello.class, 0, Side.CLIENT);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.SPacketFileCheckHandler(), SPacketFileCheck.class, 1, Side.CLIENT);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.SPacketClassCheckHandler(), SPacketClassCheck.class, 2, Side.CLIENT);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.SPacketScreenshotHandler(), SPacketScreenshot.class, 3, Side.CLIENT);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.CPacketHelloReplyHandler(), CPacketHelloReply.class, 4, Side.SERVER);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.CPacketFileHashHandler(), CPacketFileHash.class, 5, Side.SERVER);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.CPacketClassFoundHandler(), CPacketClassFound.class, 6, Side.SERVER);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.CPacketInjectDetectHandler(), CPacketInjectDetect.class, 7, Side.SERVER);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.CPacketImageDataHandler(), CPacketImageData.class, 8, Side.SERVER);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.SPacketDataCheckHandler(), SPacketDataCheck.class, 9, Side.CLIENT);
networkChannel.registerMessage(new AntiCheatPacketMessageHandler.CPacketVanillaDataHandler(), CPacketVanillaData.class, 10, Side.SERVER);
}
}
|
import torch
import torch.nn as nn
import torch.optim as optim
import torchvision.datasets as datasets
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
# Define the neural network model
class SimpleNN(nn.Module):
def __init__(self):
super(SimpleNN, self).__init__()
self.fc1 = nn.Linear(784, 128)
self.fc2 = nn.Linear(128, 10)
def forward(self, x):
x = torch.flatten(x, 1)
x = torch.relu(self.fc1(x))
x = torch.softmax(self.fc2(x), dim=1)
return x
# Load the MNIST dataset
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))])
train_dataset = datasets.MNIST(root='./data', train=True, transform=transform, download=True)
test_dataset = datasets.MNIST(root='./data', train=False, transform=transform, download=True)
train_loader = DataLoader(dataset=train_dataset, batch_size=64, shuffle=True)
test_loader = DataLoader(dataset=test_dataset, batch_size=1000, shuffle=False)
# Initialize the model, loss function, and optimizer
model = SimpleNN()
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=0.001)
# Train the model
num_epochs = 5
for epoch in range(num_epochs):
for batch_idx, (data, targets) in enumerate(train_loader):
optimizer.zero_grad()
outputs = model(data)
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.4f}')
# Evaluate the model on the test dataset
model.eval()
correct = 0
total = 0
with torch.no_grad():
for data, targets in test_loader:
outputs = model(data)
_, predicted = torch.max(outputs.data, 1)
total += targets.size(0)
correct += (predicted == targets).sum().item()
print(f'Accuracy on the test dataset: {100 * correct / total:.2f}%') |
package test
import (
"strings"
"golang.org/x/text/unicode/rangetable"
. "gopkg.in/check.v1"
"github.com/dmolesUC3/cos/internal/logging"
"github.com/dmolesUC3/cos/internal/suite"
)
type UnicodeSuite struct {
}
var _ = Suite(&UnicodeSuite{})
func (s *UnicodeSuite) TestNonCharacter(c *C) {
var count = 0
rangetable.Visit(suite.NonCharacter, func(rune) {
count += 1
})
// should be exactly 66 noncharacters, per
// https://www.unicode.org/faq/private_use.html#noncharacters
c.Assert(count, Equals, 66)
}
func (s *UnicodeSuite) TestUTF8InvalidSequences(c *C) {
const badChar = rune(0xfffd)
for i, bb := range suite.UTF8InvalidSequences {
bytesStr := logging.FormatByteArray(bb)
asString := string(bb)
c.Check(strings.ContainsRune(asString, badChar), Equals, true,
Commentf("%d: %v: expected %#x (%#v), got %#v", i, bytesStr, badChar, string(badChar), asString))
}
}
|
#!/usr/bin/env bats
@test "SDAAS platform inclusion" {
SD_CACHE='/tmp/ftest'
SD_LOG_FILE="$SD_CACHE/session.log"
SD_DEBUG_FILE="$SD_CACHE/session.debug"
. "$BATS_TEST_DIRNAME/../../scripts/platform.include"
run echo "OK"
[ ! -f .env ]
[ -f /tmp/ftest/session.log ]
[ -f /tmp/ftest/session.debug ]
[ ! -z "$SD_DEBUG" ]
[ $__module_caching -eq 1 ]
[ $__module_logging -eq 1 ]
[ $__module_bg_reasoning -eq 1 ]
[ $__module_kb -eq 1 ]
[ $__module_learning -eq 1 ]
}
|
#!/bin/sh -eux
rpm --import "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xee6d536cf7dc86e2d7d56f59a178ac6c6238f52e"
yum install -y yum-utils
yum-config-manager --add-repo https://packages.docker.com/1.13/yum/repo/main/centos/7
yum makecache fast
# nfs utils required by docker netshare
# nfs driver
yum install -y nfs-utils
yum install -y docker-engine
systemctl enable docker.service
systemctl start docker.service
#Configure direct lvm
yum install -y lvm2
pvcreate /dev/sdb
vgcreate docker /dev/sdb
lvcreate --wipesignatures y -n thinpool docker -l 95%VG
lvcreate --wipesignatures y -n thinpoolmeta docker -l 1%VG
lvconvert -y --zero n -c 512K --thinpool docker/thinpool --poolmetadata docker/thinpoolmeta
cat > /etc/lvm/profile/docker-thinpool.profile <<EOF
activation {
thin_pool_autoextend_threshold=80
thin_pool_autoextend_percent=20
}
EOF
lvchange --metadataprofile docker-thinpool docker/thinpool
lvs -o+seg_monitor
cat > /etc/docker/daemon.json <<EOF
{
"storage-driver": "devicemapper",
"storage-opts": [
"dm.thinpooldev=/dev/mapper/docker-thinpool",
"dm.use_deferred_removal=true",
"dm.use_deferred_deletion=true"
]
}
EOF
systemctl daemon-reload
systemctl restart docker.service
# Create user sigma that can use docker without sudo
useradd sigma
echo "sigma" | passwd --stdin sigma
usermod -aG docker sigma
usermod -aG docker vagrant
# ====================================================================
# Install docker-compose its installation requires docker epel and pip
# ====================================================================
yum -y install epel-release
yum -y install python-pip
pip install docker-compose
# My License
# https://storebits.docker.com/ee/centos/sub-04aa1f97-9df6-4a12-8049-e9f1a93f50e4 |
<gh_stars>0
import React, { Component } from 'react'
import { Redirect } from 'react-router-dom'
import { Elements, Forms, Message } from '@ludwigjs/ui'
import { Constants as constants } from '../constants/index'
class Login extends Component {
constructor(props) {
super(props)
}
componentDidMount() {
const { actions, match } = this.props
if (match.params.sid) {
actions.account.login(match.params.sid)
}
}
render() {
const { account, actions } = this.props
if (account.status === constants.ACCOUNT_STATUS.LOGGED_IN) {
return (<Redirect to='/account' />)
}
else if (account.status === constants.ACCOUNT_STATUS.LOGGING_IN) {
return (
<div>
<section className='t-mrg bolt-btm bolt-top'>
<Elements.PageTitle titleText={`Check Your Email`} />
</section>
<section>
<Message
title={`A One Time Login Was Sent`}
message={`Click it to login`} />
</section>
</div>
)
}
else if (account.status === constants.ACCOUNT_STATUS.NON_EXISTENT) {
return (
<div>
<section className='t-mrg bolt-btm bolt-top'>
<Elements.PageTitle titleText={`Whoops`} />
</section>
<section>
<Message
title={`Something went wrong`}
message={`Please refresh this page and try again`} />
</section>
</div>
)
}
else {
return (
<div>
<section className='t-mrg bolt-btm bolt-top'>
<Elements.PageTitle titleText={`Login`} />
</section>
<section>
<Forms.Login {...this.props} />
</section>
</div>
)
}
}
}
export default Login
|
import { createAsyncThunk, createSelector, createSlice } from "@reduxjs/toolkit";
import { Contract, ethers } from "ethers";
import { RootState } from "src/store";
import { IERC20, SPHM } from "src/typechain";
import { abi as ierc20Abi } from "../abi/IERC20.json";
import PhantomStorageAbi from "../abi/PhantomStorage.json";
import { abi as sPHMABI } from "../abi/sPHM.json";
import { addresses } from "../constants";
import { getMarketPrice, getTokenPrice, setAll } from "../helpers";
import { IBaseAsyncThunk } from "./interfaces";
const initialState = {
phantomTreasuryAddress: "",
loading: false,
loadingMarketPrice: false,
apy: "",
tvl: "",
fiveDayRate: "",
};
/**
* get phantomTreasuryAddress from app state or load it from the contract using the async thunk
*/
export const getOrLoadTreasuryAddress = async ({ networkID, provider }: any, { dispatch, getState }: any) => {
const { app: appState } = getState();
if (!appState?.phantomTreasuryAddress) {
return await loadTreasuryAddress({ networkID, provider });
}
return appState?.phantomTreasuryAddress;
};
export const loadTreasuryAddress = async ({ networkID, provider }: any) => {
const phantomStorage = new Contract(addresses[networkID].PhantomStorage, PhantomStorageAbi, provider.getSigner());
const phantomTreasuryAddress = await phantomStorage.getAddress(
ethers.utils.keccak256(ethers.utils.solidityPack(["string"], ["phantom.contracts.treasury"])),
);
return phantomTreasuryAddress;
};
export const loadAppDetails = createAsyncThunk(
"app/loadAppDetails",
async ({ networkID, provider }: IBaseAsyncThunk, { dispatch }) => {
let marketPrice;
try {
// const originalPromiseResult = await dispatch(
// loadMarketPrice({ networkID: networkID, provider: provider }),
// ).unwrap();
// marketPrice = originalPromiseResult?.marketPrice;
} catch (rejectedValueOrSerializedError) {
// handle error here
console.error("Returned a null response from dispatch(loadMarketPrice)");
return;
}
if (!provider) {
console.error("failed to connect to provider, please connect your wallet");
return {
marketPrice,
};
}
const [currentBlock, phantomTreasuryAddress] = await Promise.all([
provider.getBlockNumber(),
loadTreasuryAddress({ networkID, provider }),
]);
const sPHMContract = new ethers.Contract(addresses[networkID].sPHM as string, sPHMABI, provider) as SPHM;
const gPHMContract = new ethers.Contract(addresses[networkID].gPHM as string, ierc20Abi, provider) as IERC20;
const fPHMContract = new ethers.Contract(addresses[networkID].fPHM as string, ierc20Abi, provider) as IERC20;
const PHMContract = new ethers.Contract(addresses[networkID].PHM as string, ierc20Abi, provider) as IERC20;
// APY
const [apy, index, rewardYield, periodsPerYear] = await Promise.all([
sPHMContract.apy(),
sPHMContract.scalingFactor(),
sPHMContract.rewardYield(),
sPHMContract.periodsPerYear(),
]);
// Five day rate
// Math.pow(1 + sPHM.rewardYield(), 5 * sPHM.periodsPerYear() / 365) - 1
const nextRewardMult = +rewardYield.toString() / 1e18;
const rebasesPer5days = 5 * (+periodsPerYear.toString() / 365);
const fiveDayRate = Math.pow(1 + nextRewardMult, rebasesPer5days) - 1;
return {
currentIndex: +index.toString() / 1e18,
currentBlock,
phantomTreasuryAddress,
apy: +apy.toString() / 1e16,
nextRewardYield: +rewardYield.toString() / 1e16,
fiveDayRate: fiveDayRate * 100,
// stakingAPY,
// stakingTVL,
// stakingRebase,
// marketCap,
// marketPrice,
};
},
);
/**
* checks if app.slice has marketPrice already
* if yes then simply load that state
* if no then fetches via `loadMarketPrice`
*
* `usage`:
* ```
* const originalPromiseResult = await dispatch(
* findOrLoadMarketPrice({ networkID: networkID, provider: provider }),
* ).unwrap();
* originalPromiseResult?.whateverValue;
* ```
*/
export const findOrLoadMarketPrice = createAsyncThunk(
"app/findOrLoadMarketPrice",
async ({ networkID, provider }: IBaseAsyncThunk, { dispatch, getState }) => {
const state: any = getState();
let marketPrice;
// check if we already have loaded market price
if (state.app.loadingMarketPrice === false && state.app.marketPrice) {
// go get marketPrice from app.state
marketPrice = state.app.marketPrice;
} else {
// we don't have marketPrice in app.state, so go get it
try {
const originalPromiseResult = await dispatch(
loadMarketPrice({ networkID: networkID, provider: provider }),
).unwrap();
marketPrice = originalPromiseResult?.marketPrice;
} catch (rejectedValueOrSerializedError) {
// handle error here
console.error("Returned a null response from dispatch(loadMarketPrice)");
return;
}
}
return { marketPrice };
},
);
/**
* - fetches the OHM price from CoinGecko (via getTokenPrice)
* - falls back to fetch marketPrice from ohm-dai contract
* - updates the App.slice when it runs
*/
const loadMarketPrice = createAsyncThunk("app/loadMarketPrice", async ({ networkID, provider }: IBaseAsyncThunk) => {
let marketPrice: number;
try {
marketPrice = await getMarketPrice({ networkID, provider });
marketPrice = marketPrice / Math.pow(10, 9);
} catch (e) {
marketPrice = await getTokenPrice("olympus");
}
return { marketPrice };
});
const appSlice = createSlice({
name: "app",
initialState,
reducers: {
fetchAppSuccess(state, action) {
setAll(state, action.payload);
},
},
extraReducers: builder => {
builder
.addCase(loadAppDetails.pending, state => {
state.loading = true;
})
.addCase(loadAppDetails.fulfilled, (state, action) => {
setAll(state, action.payload);
state.loading = false;
})
.addCase(loadAppDetails.rejected, (state, { error }) => {
state.loading = false;
console.error(error.name, error.message, error.stack);
})
.addCase(loadMarketPrice.pending, (state, action) => {
state.loadingMarketPrice = true;
})
.addCase(loadMarketPrice.fulfilled, (state, action) => {
setAll(state, action.payload);
state.loadingMarketPrice = false;
})
.addCase(loadMarketPrice.rejected, (state, { error }) => {
state.loadingMarketPrice = false;
console.error(error.name, error.message, error.stack);
});
},
});
const baseInfo = (state: RootState) => state.app;
export default appSlice.reducer;
export const { fetchAppSuccess } = appSlice.actions;
export const getAppState = createSelector(baseInfo, app => app);
|
import re
def parse_setup_file(file_path):
with open(file_path, 'r') as file:
setup_content = file.read()
url_match = re.search(r"url='(.*?)'", setup_content)
author_match = re.search(r"author='(.*?)'", setup_content)
email_match = re.search(r"author_email='(.*?)'", setup_content)
description_match = re.search(r"description='(.*?)'", setup_content)
install_requires_match = re.search(r"install_requires=\[(.*?)\]", setup_content)
url = url_match.group(1) if url_match else None
authors = author_match.group(1).split(', ') if author_match else []
author_emails = email_match.group(1).split(', ') if email_match else []
description = description_match.group(1) if description_match else None
install_requires = [pkg.strip("'\" ") for pkg in install_requires_match.group(1).split(',')] if install_requires_match else []
return {
'url': url,
'authors': authors,
'author_emails': author_emails,
'description': description,
'install_requires': install_requires
} |
#!/bin/bash
# --------------------------------------------------------------------------
# This script deploys rendered graphs of the Synthea modules to github pages
# --------------------------------------------------------------------------
set -o errexit -o nounset
if [ "$TRAVIS_BRANCH" != "master" ]
then
echo "Skipping publication of module graphs for non-master branches."
exit 0
fi
rev=$(git rev-parse --short HEAD)
./gradlew graphviz
cd output
git init
git config user.name "Jason Walonoski"
git config user.email "jwalonoski@mitre.org"
git remote add upstream "https://$GH_TOKEN@github.com/synthetichealth/synthea.git"
git fetch upstream
git reset upstream/gh-pages
# echo "synthea.org" > CNAME
touch graphviz
git add -A graphviz/
git commit -m "rebuild graphs at ${rev}"
git push -q upstream HEAD:gh-pages
|
# Define a function to find numbers divisible by 7 but not multiples of 5
def find_numbers():
result = [str(num) for num in range(2000, 3201) if num % 7 == 0 and num % 5 != 0]
print(",".join(result))
# Call the function to find and print the numbers
find_numbers() |
<reponame>MarkGartner/mgas<gh_stars>0
package ru.job4j.array;
public class Turn {
public int[] reverse(int[] paramArray) {
int last = paramArray.length - 1;
int temp;
int midpoint = paramArray.length % 2 == 1 ? paramArray.length / 2 : paramArray.length / 2 - 1;
for (int i = 0; i < midpoint; i++) {
temp = paramArray[i];
paramArray[i] = paramArray[last - i];
paramArray[last - i] = temp;
}
if (paramArray.length % 2 == 0) {
temp = paramArray[midpoint];
paramArray[midpoint] = paramArray[midpoint + 1];
paramArray[midpoint + 1] = temp;
}
return paramArray;
}
}
|
<filename>frontend/src/components/Header/HeaderStyles.js<gh_stars>1-10
import { makeStyles } from "@material-ui/core";
export default makeStyles((theme) => ({
container: { display: "flex" },
BrandMobile: {
flexGrow: 1,
textDecoration: "none",
color: "#fff",
alignSelf: "center",
},
BrandDesktop: {
textDecoration: "none",
color: "#fff",
flexGrow: 1,
alignSelf: "center",
[theme.breakpoints.up("md")]: {
display: "flex",
},
},
}));
|
<gh_stars>0
'use strict';
exports.__esModule = true;
var _create = require('../utils/create');
var _create2 = _interopRequireDefault(_create);
var _Item = require('./Item');
var _Item2 = _interopRequireDefault(_Item);
var _field = require('../field');
var _field2 = _interopRequireDefault(_field);
var _button = require('../button');
var _button2 = _interopRequireDefault(_button);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = (0, _create2.default)({
render: function render() {
var _vm = this;var _h = _vm.$createElement;var _c = _vm._self._c || _h;return _c('div', { staticClass: "van-coupon-list" }, [_vm.showExchangeBar ? _c('cell-group', { staticClass: "van-coupon-list__top" }, [_c('field', { staticClass: "van-coupon-list__filed van-hairline--surround", attrs: { "placeholder": _vm.inputPlaceholder || _vm.$t('placeholder'), "maxlength": 20 }, model: { value: _vm.currentCode, callback: function callback($$v) {
_vm.currentCode = $$v;
}, expression: "currentCode" } }), _c('van-button', { staticClass: "van-coupon-list__exchange", attrs: { "size": "small", "type": "danger", "text": _vm.exchangeButtonText || _vm.$t('exchange'), "loading": _vm.exchangeButtonLoading, "disabled": _vm.buttonDisabled }, on: { "click": _vm.onClickExchangeButton } })], 1) : _vm._e(), _c('div', { ref: "list", staticClass: "van-coupon-list__list", class: { 'van-coupon-list--with-exchange': _vm.showExchangeBar } }, [_vm._l(_vm.coupons, function (item, index) {
return _c('coupon-item', { key: item.id || item.name, ref: "card", refInFor: true, attrs: { "data": item, "chosen": index === _vm.chosenCoupon }, nativeOn: { "click": function click($event) {
_vm.$emit('change', index);
} } });
}), _vm.disabledCoupons.length ? _c('h3', [_vm._v(_vm._s(_vm.disabledListTitle || _vm.$t('disabled')))]) : _vm._e(), _vm._l(_vm.disabledCoupons, function (item) {
return _c('coupon-item', { key: item.id || item.name, attrs: { "disabled": "", "data": item } });
}), !_vm.coupons.length && !_vm.disabledCoupons.length ? _c('div', { staticClass: "van-coupon-list__empty" }, [_c('img', { attrs: { "src": "https://img.yzcdn.cn/v2/image/wap/trade/new_order/empty@2x.png" } }), _c('p', [_vm._v(_vm._s(_vm.$t('empty')))])]) : _vm._e()], 2), _c('div', { directives: [{ name: "show", rawName: "v-show", value: _vm.showCloseButton, expression: "showCloseButton" }], staticClass: "van-coupon-list__close van-hairline--top", domProps: { "textContent": _vm._s(_vm.closeButtonText || _vm.$t('close')) }, on: { "click": function click($event) {
_vm.$emit('change', -1);
} } })], 1);
},
name: 'coupon-list',
components: {
VanButton: _button2.default,
Field: _field2.default,
CouponItem: _Item2.default
},
model: {
prop: 'code'
},
props: {
code: String,
closeButtonText: String,
inputPlaceholder: String,
disabledListTitle: String,
exchangeButtonText: String,
exchangeButtonLoading: Boolean,
exchangeButtonDisabled: Boolean,
exchangeMinLength: {
type: Number,
default: 1
},
chosenCoupon: {
type: Number,
default: -1
},
coupons: {
type: Array,
default: function _default() {
return [];
}
},
disabledCoupons: {
type: Array,
default: function _default() {
return [];
}
},
displayedCouponIndex: {
type: Number,
default: -1
},
showExchangeBar: {
type: Boolean,
default: true
},
showCloseButton: {
type: Boolean,
default: true
}
},
data: function data() {
return {
currentCode: this.code || ''
};
},
computed: {
buttonDisabled: function buttonDisabled() {
return !this.exchangeButtonLoading && (this.exchangeButtonDisabled || this.currentCode.length < this.exchangeMinLength);
}
},
watch: {
code: function code(_code) {
this.currentCode = _code;
},
currentCode: function currentCode(code) {
this.$emit('input', code);
},
displayedCouponIndex: function displayedCouponIndex(val) {
this.scrollToShowCoupon(val);
}
},
mounted: function mounted() {
this.scrollToShowCoupon(this.displayedCouponIndex);
},
methods: {
onClickExchangeButton: function onClickExchangeButton() {
this.$emit('exchange', this.currentCode);
// auto clear currentCode when not use v-model
if (!this.code) {
this.currentCode = '';
}
},
// scroll to show specific coupon
scrollToShowCoupon: function scrollToShowCoupon(index) {
var _this = this;
if (index === -1) {
return;
}
this.$nextTick(function () {
var _$refs = _this.$refs,
card = _$refs.card,
list = _$refs.list;
if (list && card && card[index]) {
list.scrollTop = card[index].$el.offsetTop - 100;
}
});
}
}
}); |
<reponame>nemanjam/next-prisma-boilerplate
/// <reference types="cypress" />
//
import { Routes } from 'lib-client/constants';
const fakeUser = require('../fixtures/fakeUser');
const cookieName = Cypress.env('COOKIE_NAME');
describe('Post:id page', () => {
before(() => {
cy.clearCookies();
cy.getCookies().should('be.empty');
// cy.seedDbViaUI();
cy.task('db:seed');
cy.loginAsAdmin();
});
beforeEach(() => {
Cypress.Cookies.preserveOnce(cookieName);
});
after(() => {
cy.task('db:teardown');
});
// just for this test and postTitle var
context('post page', () => {
beforeEach(() => {
// home page, must be logged in
cy.visit('/');
// assert logged in as admin
cy.findByText(/^log out$/i).should('exist');
cy.get('.home__list .post-item:first-child h2').invoke('text').as('postTitle');
});
// MUST use function() instead of () => {} for this
it('edit post button works', function () {
// remember title
const postTitle = this.postTitle as string;
// click title
cy.findByRole('heading', { name: RegExp(postTitle, 'i') })
.should('exist')
.click();
// assert we left Home page because of same heading
cy.findByRole('heading', { name: /home/i }).should('not.exist');
// assert post page
cy.url().should('match', RegExp(`/${fakeUser.username}/post/\\d+`, 'i'));
cy.findByRole('heading', { name: RegExp(postTitle, 'i') }).should('exist');
cy.log('arrived on Post page');
cy.log('Edit postTitle:' + postTitle);
// click edit
cy.findByText(/^edit$/i)
.should('exist')
.click();
// assert create/edit post page
cy.url().should('match', /\/post\/create\/\d+/i);
cy.log('arrived on Edit page');
cy.findByRole('heading', { name: /edit post/i }).should('exist');
cy.findByRole('textbox', { name: /title/i }).should('have.value', postTitle);
// must be before click()
cy.intercept('PATCH', `${Routes.API.POSTS}*`).as('patchPost');
// edit title
const editedTitle = `Edited: ${postTitle}`;
cy.findByRole('textbox', { name: /title/i }).clear().type(editedTitle);
cy.findByRole('button', { name: /update/i }).click();
cy.wait('@patchPost');
// assert post page with edited title
cy.url().should('match', RegExp(`/${fakeUser.username}/post/\\d+`, 'i'));
cy.findByRole('heading', { name: RegExp(editedTitle, 'i') }).should('exist');
});
it('delete post button works', function () {
// remember title
const postTitle = this.postTitle as string;
// click title
cy.findByRole('heading', { name: RegExp(postTitle, 'i') })
.should('exist')
.click();
// assert we left Home page because of same heading
cy.findByRole('heading', { name: /home/i }).should('not.exist');
// assert post page
cy.url().should('match', RegExp(`/${fakeUser.username}/post/\\d+`, 'i'));
cy.findByRole('heading', { name: RegExp(postTitle, 'i') }).should('exist');
cy.log('arrived on Post page');
cy.log('Delete postTitle:' + postTitle);
cy.intercept('DELETE', `${Routes.API.POSTS}*`).as('deletePost');
// click delete
cy.findByText(/^delete$/i)
.should('exist')
.click();
cy.wait('@deletePost');
// assert Home page
cy.findByRole('heading', { name: /home/i }).should('exist');
// assert first post doesnt exist
cy.findByRole('heading', { name: RegExp(postTitle, 'i') }).should('not.exist');
});
});
});
|
<reponame>jing-si/plant
package kr.co.gardener.util;
import java.lang.reflect.Field;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.HashMap;
public class GridSystem {
public HashMap<String, Object> getGrid() {
HashMap<String, Object> grid = new HashMap<String, Object>();
Field[] list = this.getClass().getDeclaredFields();
for (int a = 0; a < list.length; a++) {
try {
Field field = list[a];
field.setAccessible(true);
if (field.getType().toString().indexOf("Date") > 0) {
DateFormat format = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss");
grid.put("C" + a, format.format(field.get(this)));
} else {
grid.put("C" + a, field.get(this));
}
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
return grid;
}
}
|
import React from 'react';
const countRating = count => {
if (count <= 5) {
return {
desc: 'Low',
colour: 'bg-red',
percentage: 17
};
}
if (count <= 10) {
return {
desc: 'Medium',
colour: 'bg-yellow',
percentage: 50
};
}
return {
desc: 'High',
colour: 'bg-aqua',
percentage: 90
};
};
export const StockLevel = ({ count }) => {
const { desc, colour, percentage } = countRating(count);
return (
<div>
<p>
Stock Level: <b>{desc}</b>
</p>
<div
className="bg-lightgray flex rounded mt2 mx-auto col-2"
style={{
height: '0.5rem',
overflow: 'hidden'
}}
>
<div
className={`inline-block col-2 ${colour}`}
style={{
width: `${percentage}%`
}}
/>
</div>
</div>
);
};
|
// My Controllers File
angular.module('my-controllers',[])
.controller('oneCtrl',[...])
.controller('twoCtrl',[...]);
// My Services File
angular.module('my-services',[])
.factory('oneSrc',[...])
.facotry('twoSrc',[...]);
// My Directives File
angular.module('my-directives',[])
.directive('oneDrct',[...])
.directive('twoDrct',[...]);
// My Main Application File
angular.module('my-app',['my-controllers','my-services','my-directives',...]); |
#!/bin/bash
# Please update these carefully, some versions won't work under Wine
NSIS_FILENAME=nsis-3.03-setup.exe
NSIS_URL=https://prdownloads.sourceforge.net/nsis/$NSIS_FILENAME?download
NSIS_SHA256=bd3b15ab62ec6b0c7a00f46022d441af03277be893326f6fea8e212dc2d77743
ZBAR_FILENAME=zbarw-20121031-setup.exe
ZBAR_URL=https://sourceforge.net/projects/zbarw/files/$ZBAR_FILENAME/download
ZBAR_SHA256=177e32b272fa76528a3af486b74e9cb356707be1c5ace4ed3fcee9723e2c2c02
LIBUSB_FILENAME=libusb-1.0.22.7z
LIBUSB_URL=https://prdownloads.sourceforge.net/project/libusb/libusb-1.0/libusb-1.0.22/$LIBUSB_FILENAME?download
LIBUSB_SHA256=671f1a420757b4480e7fadc8313d6fb3cbb75ca00934c417c1efa6e77fb8779b
PYTHON_VERSION=3.5.4
## These settings probably don't need change
export WINEPREFIX=/opt/wine64
#export WINEARCH='win32'
PYHOME=c:/python$PYTHON_VERSION
PYTHON="wine $PYHOME/python.exe -OO -B"
# based on https://superuser.com/questions/497940/script-to-verify-a-signature-with-gpg
verify_signature() {
local file=$1 keyring=$2 out=
if out=$(gpg --no-default-keyring --keyring "$keyring" --status-fd 1 --verify "$file" 2>/dev/null) &&
echo "$out" | grep -qs "^\[GNUPG:\] VALIDSIG "; then
return 0
else
echo "$out" >&2
exit 1
fi
}
verify_hash() {
local file=$1 expected_hash=$2
actual_hash=$(sha256sum $file | awk '{print $1}')
if [ "$actual_hash" == "$expected_hash" ]; then
return 0
else
echo "$file $actual_hash (unexpected hash)" >&2
rm "$file"
exit 1
fi
}
download_if_not_exist() {
local file_name=$1 url=$2
if [ ! -e $file_name ] ; then
wget -O $PWD/$file_name "$url"
fi
}
# https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/templates/header.sh
retry() {
local result=0
local count=1
while [ $count -le 3 ]; do
[ $result -ne 0 ] && {
echo -e "\nThe command \"$@\" failed. Retrying, $count of 3.\n" >&2
}
! { "$@"; result=$?; }
[ $result -eq 0 ] && break
count=$(($count + 1))
sleep 1
done
[ $count -gt 3 ] && {
echo -e "\nThe command \"$@\" failed 3 times.\n" >&2
}
return $result
}
# Let's begin!
here=$(dirname $(readlink -e $0))
set -e
# Clean up Wine environment
echo "Cleaning $WINEPREFIX"
rm -rf $WINEPREFIX
echo "done"
wine 'wineboot'
cd /tmp/electrum-btcp-build
# Install Python
# note: you might need "sudo apt-get install dirmngr" for the following
# keys from https://www.python.org/downloads/#pubkeys
KEYLIST_PYTHON_DEV="531F072D39700991925FED0C0EDDC5F26A45C816 26DEA9D4613391EF3E25C9FF0A5B101836580288 CBC547978A3964D14B9AB36A6AF053F07D9DC8D2 C01E1CAD5EA2C4F0B8E3571504C367C218ADD4FF 12EF3DC38047DA382D18A5B999CDEA9DA4135B38 8417157EDBE73D9EAC1E539B126EB563A74B06BF DBBF2EEBF925FAADCF1F3FFFD9866941EA5BBD71 2BA0DB82515BBB9EFFAC71C5C9BE28DEE6DF025C 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D C9B104B3DD3AA72D7CCB1066FB9921286F5E1540 97FC712E4C024BBEA48A61ED3A5CA953F73C700D 7ED10B6531D7C8E1BC296021FC624643487034E5"
KEYRING_PYTHON_DEV="keyring-electrum-build-python-dev.gpg"
KEYSERVER_PYTHON_DEV="hkp://pool.sks-keyservers.net"
retry gpg --no-default-keyring --keyring $KEYRING_PYTHON_DEV --keyserver $KEYSERVER_PYTHON_DEV --recv-keys $KEYLIST_PYTHON_DEV
for msifile in core dev exe lib pip tools; do
echo "Installing $msifile..."
wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi"
wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi.asc"
verify_signature "${msifile}.msi.asc" $KEYRING_PYTHON_DEV
wine msiexec /i "${msifile}.msi" /qb TARGETDIR=C:/python$PYTHON_VERSION
done
# upgrade pip
$PYTHON -m pip install pip --upgrade
# Install pywin32-ctypes (needed by pyinstaller)
$PYTHON -m pip install pywin32-ctypes==0.1.2
# install PySocks
$PYTHON -m pip install win_inet_pton==1.0.1
$PYTHON -m pip install -r $here/../deterministic-build/requirements-binaries.txt
# Install PyInstaller
$PYTHON -m pip install https://github.com/ecdsa/pyinstaller/archive/fix_2952.zip
# Install ZBar
download_if_not_exist $ZBAR_FILENAME "$ZBAR_URL"
verify_hash $ZBAR_FILENAME "$ZBAR_SHA256"
wine "$PWD/$ZBAR_FILENAME" /S
# Upgrade setuptools (so Electrum can be installed later)
$PYTHON -m pip install setuptools --upgrade
# Install NSIS installer
download_if_not_exist $NSIS_FILENAME "$NSIS_URL"
verify_hash $NSIS_FILENAME "$NSIS_SHA256"
wine "$PWD/$NSIS_FILENAME" /S
download_if_not_exist $LIBUSB_FILENAME "$LIBUSB_URL"
verify_hash $LIBUSB_FILENAME "$LIBUSB_SHA256"
7z x -olibusb $LIBUSB_FILENAME -aos
cp libusb/MS32/dll/libusb-1.0.dll $WINEPREFIX/drive_c/python$PYTHON_VERSION/
# add dlls needed for pyinstaller:
cp $WINEPREFIX/drive_c/python$PYTHON_VERSION/Lib/site-packages/PyQt5/Qt/bin/* $WINEPREFIX/drive_c/python$PYTHON_VERSION/
echo "Wine is configured."
|
# Run by hand to copy latest files from node_modules to ui/vendor after updating package.json.
# You might want to delete ui/vendor subdirectories first to remove clutter if there is a signficant change.
# ace-diff.js is not copied by this script. It is an older version than what is in npm.
# References to ace-diff.js in a Notebook example should be updated to use latest files in the ace-diff folder.
mkdir -p src/ui/vendor/ace-src-noconflict/
cp -R node_modules/ace-builds/src-noconflict/* src/ui/vendor/ace-src-noconflict/
mkdir -p src/ui/vendor/ace-diff/
cp -R node_modules/ace-diff/dist/* src/ui/vendor/ace-diff/
cp node_modules/diff-match-patch/index.js src/ui/vendor/diff_match_patch.js
cp node_modules/dompurify/dist/purify.js src/ui/vendor/
cp node_modules/js-sha256/src/sha256.js src/ui/vendor/
cp node_modules/mithril/mithril.js src/ui/vendor/
mkdir -p src/ui/vendor/font-awesome/css
mkdir -p src/ui/vendor/font-awesome/fonts
cp -R node_modules/font-awesome/css/font-awesome.css src/ui/vendor/font-awesome/css
cp -R node_modules/font-awesome/fonts/* src/ui/vendor/font-awesome/fonts
cp node_modules/marked/lib/marked.js src/ui/vendor/
cp node_modules/pepjs/dist/pep.js src/ui/vendor/
cp node_modules/push.js/bin/push.js src/ui/vendor/
cp node_modules/requirejs/require.js src/ui/vendor/
cp node_modules/requirejs-text/text.js src/ui/vendor/
cp node_modules/socket.io-client/dist/socket.io.js src/ui/vendor/
cp node_modules/tachyons/css/tachyons.css src/ui/vendor/
cp node_modules/three/build/three.js src/ui/vendor/
|
import { isEmpty , Reg } from '../../../shared/utils'
export function required(v:unknown,args:any,next:()=>void,reject:(d?:any)=>void){
if(!isEmpty(v))next()
else reject({
valid:false,
tip:"该字段不可为空"
})
}
export function number(v:unknown,args:any,next:()=>void,reject:(d?:any)=>void){
const flag = isEmpty(v)||Reg.Number.test(v as string)
if(flag)next()
else reject({
valid:false,
tip:"非法数字"
})
}
export function email(v:unknown,args:any,next:()=>void,reject:(d?:any)=>void){
const flag =isEmpty(v)|| Reg.email.test(v as string)
if(flag)next()
else reject({
valid:false,
tip:"非法邮箱"
})
}
export function url(v:unknown,args:any,next:()=>void,reject:(d?:any)=>void){
const flag = isEmpty(v)||Reg.url.test(v as string)
if(flag)next()
else reject({
valid:false,
tip:"url地址格式错误"
})
}
export function mobile(v:unknown,args:any,next:()=>void,reject:(d?:any)=>void){
const flag = isEmpty(v)||Reg.mobile.test(v as string)
if(flag)next()
else reject({
valid:false,
tip:"手机号码格式错误"
})
}
export function chinese(v:unknown,args:any,next:()=>void,reject:(d?:any)=>void){
const flag = isEmpty(v)||Reg.chinese.test(v as string)
if(flag)next()
else reject({
valid:false,
tip:"请输入中文"
})
}
export function regexp(v:unknown,args:RegExp,next:()=>void,reject:(d?:any)=>void){
const flag = isEmpty(v)||args.test(v as string)
if(flag)next()
else reject({
valid:false,
tip:`正则匹配失败${args}.test(${v}) -> false`
})
}
|
'use strict'
const fs = require('fs')
const path = require('path')
require('dotenv').config({ path: path.join(__dirname, '/.env') })
const { Client } = require('pg')
const client = new Client()
const interestsFile = path.join(
__dirname,
'data',
'initial',
'ListOfInterests.csv'
)
fs.readFile(interestsFile, 'utf8', (err, data) => {
if (err) console.error(err)
const interestArray = data.split(/\r?\n|\r/)
let interestCleaned = interestArray[0].replace("'", "''").trim()
let interestList = "('" + interestCleaned + "')"
for (let i = 1; i < interestArray.length; ++i) {
interestCleaned = interestArray[i].replace("'", "''").trim()
if (interestCleaned.length > 3)
interestList += ", ('" + interestCleaned + "')"
}
const sqlQuery = `INSERT INTO interests(interest) VALUES ${interestList};`
client.connect()
client
.query(sqlQuery)
.then((r) => console.log('Loaded interetsts successfully!'))
.catch((e) => console.error(e.stack))
.then(() => client.end())
})
|
from django.shortcuts import render
from .models import OperatingSystem, Service, HardwareComponent, Server
def main(request):
os_list = OperatingSystem.objects.all()
svc_list = Service.objects.all()
hardware_list = HardwareComponent.objects.all()
return render(
request,
"inventory/main.html",
{"os_list": os_list, "svc_list": svc_list, "hardware_list": hardware_list},
)
def categorized(request, category, category_id):
category_dict = {"os": "Operating System", "svc": "Service", "hw": "Hardware"}
if category == "os":
server_list = Server.objects.filter(os__exact=category_id)
category_name = OperatingSystem.objects.get(id=category_id)
elif category == "svc":
server_list = Server.objects.filter(services__exact=category_id) # 好吗
category_name = Service.objects.get(id=category_id) # 详细情况
elif category == "hw":
server_list = Server.objects.filter(hardware_component__exact=category_id)
category_name = HardwareComponent.objects.get(id=category_id)
else:
server_list = []
return render(
request,
"inventory/categoried.html",
{
"server_list": server_list,
"category": category_dict[category], # 分类概览
"category_name": category_name, # 详情
},
)
def server_detail(request, server_id):
server = Server.objects.get(id=server_id)
return render(request, "inventory/server_detail.html", {"server": server})
|
"""
This audio library helps to detect and remove the common cameracrew commands.
Please complete it by using the required packages or modules from the oto-helper library, and the logic for it to work.
"""
# Add the import statements below
# Write the functions definitions that are required for this module below
def detect_cameracrew_commands(**args, **kwargs):
# Please add the docstrings for this function
def remove_cameracrew_commands(**args, **kwargs):
# Please add the doctrings for this function
|
package com.java.study.algorithm.zuo.bbasic.class_07;
public class Code_06_BestArrange{
} |
wget https://crystalidea.com/macs-fan-control/download # macs fan control
wget https://protonvpn.com/download/ProtonVPN.dmg # protonvpn
wget https://cdn-fastly.obsproject.com/downloads/obs-mac-25.0.8.dmg # obs
wget https://download.visualstudio.microsoft.com/download/pr/5fd170f1-05d8-4126-ae8f-9ff9dc683466/997547015dac7edcb155e5ac917b0c72/aspnetcore-runtime-3.1.9-osx-x64.tar.gz # dotnet core 3.1 |
<gh_stars>0
package com.md.appuserconnect.core.services.internal.apps;
import java.io.IOException;
import java.util.HashMap;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.md.appuserconnect.core.model.QNObjectManager;
import com.md.appuserconnect.core.model.apps.App;
import com.md.appuserconnect.core.utils.RRServices;
@SuppressWarnings("serial")
public class AppList extends HttpServlet {
private QNObjectManager objmgr = QNObjectManager.getInstance();
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
if (RRServices.checkUserHasValidAccount(req, resp)) {
HashMap<String, Object> params = RRServices.loadInputParameters(req, false);
String qnid = (String)params.get("qnid");
if (qnid != null) {
App[] apps = objmgr.getAppMgr().getAllAppsofQNID(qnid);
if (apps.length > 0)
App.sendArrayAsJSON(resp, apps);
else
RRServices.repsondErrorAsJSON(resp, "No apps found");
} else
RRServices.repsondErrorAsJSON(resp, "No parameters supplied");
}
}
}
|
def binary_search(arr, x):
low = 0
high = len(arr)-1
mid = 0
while low <= high:
mid = (high+low)//2
if arr[mid] < x:
low = mid + 1
elif arr[mid] > x:
high = mid - 1
else:
return mid
# Element not found
return -1 |
def verify_non_recoverable_states(sut):
expected_non_recoverable_states = [
"ROLLBACK_COMPLETE",
"DELETE_COMPLETE"
]
actual_non_recoverable_states = sut.get_non_recoverable_states() # Assuming a method to retrieve non-recoverable states
return set(expected_non_recoverable_states) == set(actual_non_recoverable_states) |
#!/bin/bash
chown -R ec2-user:ec2-user /opt/app/
chown -R ec2-user:ec2-user /usr/local/bin/
yum update-minimal --security -y
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
# Autodetect JAVA_HOME if not defined
if [ -e /usr/libexec/bigtop-detect-javahome ]; then
. /usr/libexec/bigtop-detect-javahome
elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
. /usr/lib/bigtop-utils/bigtop-detect-javahome
fi
export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
export METRON_VERSION=${project.version}
export METRON_HOME=/usr/metron/$METRON_VERSION
export DM_JAR=metron-maas-service-$METRON_VERSION-uber.jar
export HADOOP_OPTS="$HADOOP_OPTS $METRON_JVMFLAGS"
CP=$METRON_HOME/lib/$DM_JAR
HADOOP_CLASSPATH=$(echo $CP )
for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
if [ -f $jar ];then
LIBJARS="$jar,$LIBJARS"
fi
done
export HADOOP_CLASSPATH
yarn jar $METRON_HOME/lib/$DM_JAR org.apache.metron.maas.submit.ModelSubmission "$@"
|
#!/bin/bash
dieharder -d 1 -g 402 -S 1366070118
|
{% extends 'premain.sh' %}
{% block std_box %}
lib/dlfcn/scripts
{{super()}}
{% endblock %}
{% block premain_lang %}
c++
{% endblock %}
{% block premain_code_gen %}
(
set -eu
cat << EOF
{% block export_symbols %}
{% endblock %}
EOF
{% block export_symbols_sh %}
{% endblock %}
) | dl_stubs {{self.export_lib().strip()}}
{% endblock %}
|
-- phpMyAdmin SQL Dump
-- version 4.7.4
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Nov 08, 2018 at 08:19 AM
-- Server version: 10.1.28-MariaDB
-- PHP Version: 7.0.25
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `blackjack_delivery`
--
-- --------------------------------------------------------
--
-- Table structure for table `admin`
--
CREATE TABLE `admin` (
`id` int(11) NOT NULL,
`name` varchar(100) NOT NULL DEFAULT '',
`username` varchar(20) NOT NULL DEFAULT '',
`password` varchar(255) NOT NULL DEFAULT '',
`email` varchar(100) NOT NULL DEFAULT '',
`is_deleted` tinyint(1) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `admin`
--
INSERT INTO `admin` (`id`, `name`, `username`, `password`, `email`, `is_deleted`) VALUES
(1, 'Kenji', 'kenji', <PASSWORD>', '', 0),
(2, 'Vanji', 'vanji', '', '', 0),
(3, 'Reza', 'reza', '', '', 0),
(4, 'Dini', 'dini', '', '', 0),
(5, 'Cika', 'cika', '', '', 0),
(6, 'Adrian', 'adrian', '', '', 0);
-- --------------------------------------------------------
--
-- Table structure for table `customer_order`
--
CREATE TABLE `customer_order` (
`id` int(11) NOT NULL,
`ongkir_setting_id` int(11) NOT NULL,
`customer_name` varchar(100) NOT NULL DEFAULT '',
`shipping_address` text NOT NULL,
`shipping_method` varchar(20) NOT NULL,
`order_date` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `customer_order`
--
INSERT INTO `customer_order` (`id`, `ongkir_setting_id`, `customer_name`, `shipping_address`, `shipping_method`, `order_date`) VALUES
(12, 3, '', '', 'GOJEK', '2018-11-06 10:59:11'),
(13, 3, 'Kenji', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 11:00:00'),
(14, 3, 'Kenji', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 11:00:38'),
(15, 3, 'Kenji', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 11:01:08'),
(16, 3, 'Kenji', 'Jl. Jendral Sudirman 503', 'TIKI', '2018-11-06 12:28:32'),
(17, 3, '<NAME>', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 12:29:28'),
(18, 3, 'Vanj', 'Di blackjack itu sendiri', 'GOJEK', '2018-11-06 20:08:50'),
(19, 3, '<NAME>', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 20:34:15'),
(20, 3, '<NAME>', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 20:34:39'),
(21, 3, '<NAME>', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 20:34:56'),
(22, 3, '<NAME>', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 20:36:06'),
(23, 3, '<NAME>', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 20:36:55'),
(24, 3, '<NAME>', 'Jl. Jendral Sudirman 503', 'GOJEK', '2018-11-06 20:50:28');
-- --------------------------------------------------------
--
-- Table structure for table `item`
--
CREATE TABLE `item` (
`id` int(11) NOT NULL,
`name` varchar(50) NOT NULL DEFAULT '',
`image_path` text NOT NULL,
`sub_name_1` varchar(20) NOT NULL DEFAULT '',
`sub_name_2` varchar(20) NOT NULL DEFAULT '',
`description_long` text NOT NULL,
`stock` int(11) NOT NULL DEFAULT '0',
`price` int(11) NOT NULL DEFAULT '0',
`is_new` tinyint(1) NOT NULL DEFAULT '0',
`is_best_seller` tinyint(1) NOT NULL DEFAULT '0',
`created_by` varchar(20) NOT NULL DEFAULT '',
`created_date` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_by` varchar(20) NOT NULL DEFAULT '',
`updated_date` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`is_deleted` tinyint(1) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `item`
--
INSERT INTO `item` (`id`, `name`, `image_path`, `sub_name_1`, `sub_name_2`, `description_long`, `stock`, `price`, `is_new`, `is_best_seller`, `created_by`, `created_date`, `updated_by`, `updated_date`, `is_deleted`) VALUES
(4, 'Tiramisu Frostie', 'img/upload/4/default.jpg', '480ml', '', 'Frostie rasa Tiramisu', 10, 28000, 1, 1, 'kenji', '2018-11-06 09:08:09', 'kenji', '2018-11-06 20:50:28', 0),
(5, 'Galaxy Tab 3', 'img/upload/5/default.jpg', 'RAM 4GB', 'ROM 8GB', 'Tahun 2016 tapi masih bagus', 1, 250000, 0, 1, 'kenji', '2018-11-06 10:22:12', 'kenji', '2018-11-06 20:08:51', 0),
(6, 'Samsung J5 Putih', 'img/upload/6/default.jpg', 'RAM 4GB', 'ROM 16GB', 'Edisi tahun 2018, bagus', 0, 240000, 1, 0, 'kenji', '2018-11-06 10:24:35', 'kenji', '2018-11-06 12:28:32', 0),
(7, 'Samsung J5 Silver', 'img/upload/7/default.jpg', 'RAM 4GB', 'ROM 16GB', 'Edisi tahun 2018, bagus', 0, 240000, 1, 1, 'kenji', '2018-11-06 10:25:01', 'kenji', '2018-11-06 11:49:33', 0);
-- --------------------------------------------------------
--
-- Table structure for table `ongkir_setting`
--
CREATE TABLE `ongkir_setting` (
`id` int(11) NOT NULL,
`minimum_order` int(11) NOT NULL DEFAULT '0',
`free_value` int(11) NOT NULL DEFAULT '0',
`per_price` int(11) NOT NULL DEFAULT '0',
`maximum_free` int(11) NOT NULL DEFAULT '0',
`description` text NOT NULL,
`created_by` varchar(20) NOT NULL DEFAULT '',
`created_date` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `ongkir_setting`
--
INSERT INTO `ongkir_setting` (`id`, `minimum_order`, `free_value`, `per_price`, `maximum_free`, `description`, `created_by`, `created_date`) VALUES
(3, 100000, 10000, 100000, 50000, '', 'kenji', '2018-11-06 10:59:00');
-- --------------------------------------------------------
--
-- Table structure for table `order_item`
--
CREATE TABLE `order_item` (
`id` int(11) NOT NULL,
`customer_order_id` int(11) NOT NULL,
`item_id` int(11) NOT NULL,
`quantity` int(11) NOT NULL DEFAULT '0',
`price` int(11) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `order_item`
--
INSERT INTO `order_item` (`id`, `customer_order_id`, `item_id`, `quantity`, `price`) VALUES
(14, 15, 7, 1, 240000),
(15, 16, 6, 1, 240000),
(16, 17, 4, 1, 28000),
(17, 18, 4, 2, 28000),
(18, 18, 5, 1, 250000),
(19, 19, 4, 1, 28000),
(20, 20, 4, 1, 28000),
(21, 21, 4, 1, 28000),
(22, 22, 4, 1, 28000),
(23, 23, 4, 1, 28000),
(24, 24, 4, 1, 28000);
--
-- Indexes for dumped tables
--
--
-- Indexes for table `admin`
--
ALTER TABLE `admin`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `customer_order`
--
ALTER TABLE `customer_order`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `item`
--
ALTER TABLE `item`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `ongkir_setting`
--
ALTER TABLE `ongkir_setting`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `order_item`
--
ALTER TABLE `order_item`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `admin`
--
ALTER TABLE `admin`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT for table `customer_order`
--
ALTER TABLE `customer_order`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=25;
--
-- AUTO_INCREMENT for table `item`
--
ALTER TABLE `item`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT for table `ongkir_setting`
--
ALTER TABLE `ongkir_setting`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT for table `order_item`
--
ALTER TABLE `order_item`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=25;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
import numpy as np
# The linear equation
x, y = np.symbols('x y')
eq_1 = 2*x + 5*y - 10
# The system of equations
eq_2 = x + 2*y - 8
# Create a matrix of the coefficients
A = np.matrix([[2, 5], [1, 2]])
b = np.matrix([[10], [8]])
# Solve for x and y using the least-squares method
solution = np.linalg.inv(A.T.dot(A)).dot(A.T).dot(b)
x_solution, y_solution = solution.item(0), solution.item(1)
print(f'x = {x_solution}')
print(f'y = {y_solution}') # Output x = 6.0, y = 1.0 |
#!/bin/bash
convert -strip $1 -auto-orient $2
|
#!/bin/bash
# Runs the nvidia triton inference server
# https://github.com/triton-inference-server/server/blob/r21.08/docs/quickstart.md
docker run --gpus=1 --rm \
-p8000:8000 -p8001:8001 -p8002:8002 \
--volume $PWD/trtis_model_repo/hcgesture_tlt/1:/models \
nvcr.io/nvidia/tritonserver:21.08-py3 \
tritonserver --model-repository=/models
|
########################
########################
echo "kubectx"
sudo git clone -b master --single-branch https://github.com/ahmetb/kubectx.git /opt/kubectx
sudo ln -s /opt/kubectx/kubectx /usr/local/bin/kubectx
sudo ln -s /opt/kubectx/kubens /usr/local/bin/kubens
# Bash completions
COMPDIR=$(pkg-config --variable=completionsdir bash-completion)
sudo ln -sf /opt/kubectx/completion/kubens.bash $COMPDIR/kubens
sudo ln -sf /opt/kubectx/completion/kubectx.bash $COMPDIR/kubectx
# Zsh completions
mkdir -p ~/.oh-my-zsh/completions
chmod -R 755 ~/.oh-my-zsh/completions
ln -s /opt/kubectx/completion/kubectx.zsh ~/.oh-my-zsh/completions/_kubectx.zsh
ln -s /opt/kubectx/completion/kubens.zsh ~/.oh-my-zsh/completions/_kubens.zsh
########################
########################
echo "krew (kubectl krew package manager)"
tmpdir="$(mktemp -d)"
cd $tmpdir
curl -fsSLO "https://github.com/kubernetes-sigs/krew/releases/download/v0.3.2/krew.{tar.gz,yaml}"
tar zxvf krew.tar.gz
./krew-"$(uname | tr '[:upper:]' '[:lower:]')_amd64" install \
--manifest=krew.yaml --archive=krew.tar.gz
cd -
rm -rf $tmpdir
sudo cp ~/.krew/bin/kubectl-krew /usr/local/bin
########################
########################
echo "kubeval"
curl -sSL https://github.com/instrumenta/kubeval/releases/download/0.14.0/kubeval-linux-amd64.tar.gz | sudo tar -xzf - -C /usr/local/bin/
sudo chmod +x /usr/local/bin/kubeval
|
#pragma warning(disable : 4996)
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <cctype>
#include <cmath>
#include <ctime>
struct __Token
{
int loc;
int len;
};
static inline int
__getrandom(int min, int max);
static void
__do(char *buffer);
static void
__split(const char *str, struct __Token **tokens, int *nCount);
static char*
__RandomWords(const char *str, struct __Token *tokens, int nCount);
int
main(int argc, char **argv)
{
static char *str[] = {
"Today I go to the movies with my mom.",
"Hello world."
};
std::srand(static_cast<unsigned int>(std::time(nullptr)));
int selectedId = __getrandom(0, sizeof(str) / sizeof(str[0]) - 1);
__do(str[selectedId]);
return EXIT_SUCCESS;
}
void
__do(char *buffer)
{
struct __Token *tokens = nullptr;
int nCount = 0;
__split(buffer, &tokens, &nCount);
auto result =__RandomWords(buffer, tokens, nCount);
fprintf(stdout, "Selected: %s\n", buffer);
fprintf(stdout, "Result : %s\n", result);
std::free(result);
system("pause");
}
#define SKIPWS(In) \
while (*(In) != 0 && iswspace( (int)(*(In)) )) { \
++(In); \
++id; \
}
void
__split(const char *in, struct __Token **tokens, int *nCount)
{
char *it = const_cast<char *>(in);
char *next = it;
int id = 0;
(*tokens) = nullptr;
(*nCount) = 0;
SKIPWS(it);
while (*it != 0) {
if (iswspace((int)(*it))) {
int len = it - next;
if ((*tokens) == nullptr) {
(*tokens) = (struct __Token *)malloc(sizeof(struct __Token));
(*nCount) = 1;
} else {
(*nCount) += 1;
(*tokens) = (struct __Token *)realloc((*tokens), (*nCount) * sizeof(struct __Token));
}
(*tokens)[(*nCount) - 1].len = len;
(*tokens)[(*nCount) - 1].loc = id - len;
SKIPWS(it);
next = it;
}
++it;
++id;
}
if (it != nullptr && next != nullptr) {
int len = it - next;
if ((*tokens) == nullptr) {
(*tokens) = (struct __Token *)malloc(sizeof(struct __Token));
(*nCount) = 1;
}
else {
(*nCount) += 1;
(*tokens) = (struct __Token *)realloc((*tokens), (*nCount) * sizeof(struct __Token));
}
(*tokens)[(*nCount) - 1].len = len;
(*tokens)[(*nCount) - 1].loc = id - len;
}
}
static inline int
__getrandom(int min, int max){
return min = (std::rand() % (int)(max - min + 1));
}
char*
__RandomWords(const char *str, struct __Token *tokens, int nCount)
{
int *pId = (int *)std::malloc(nCount * sizeof(int));
char *newString = (char *)std::calloc(1, std::strlen(str) + 1);
for (auto i = 0; i < nCount; ++i)
pId[i] = i;
for (auto i = 0; i < nCount; i++) {
int id = __getrandom(0, nCount - 1);
{
auto temp = pId[id];
pId[id] = pId[i];
pId[i] = temp;
}
}
int it = 0;
for (auto i = 0; i < nCount; ++i) {
if (i != 0) {
std::strcat(newString, " ");
}
auto token = tokens[ pId[i] ];
auto ptr = str + token.loc;
std::strncpy(newString + it, ptr, token.len);
it += token.len + 1;
}
std::free(pId);
return newString;
}
|
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.maths.lowlevelapi.linearalgebra.lapack.general.auxiliary;
/**
* Computes planar rotations (often called Given's or Jacobi rotations)
* | cos sin | . | a | = | r |
* | -sin cos | | b | | 0 |
* where cos^2 + sin^2 = 1.
*
* Algorithm is taken from:
* "Discontinuous Plane Rotations and the Symmetric Eigenvalue Problem", <NAME>, Lockheed Martin Services Inc., <EMAIL>
* http://www.netlib.org/lapack/lawnspdf/lawn150.pdf
* It forms part of the LAPACK library which is provided under a modified BSD licensed.
* This algorithm is chosen as it produces a more continuous set of rotations whilst also avoiding under/overflow
* (as outlined in "Matrix Computations, Third Edition" Golub and Van Loan p216, Alg. 5.1.3,
* which is essentially the same idea but with the discontinuity is the sign still present).
*/
public class DLARTG {
/**
* A rather Fortran like call to compute givens rotations.
* For parameter meaning see class headers.
* @param a the distance in the first dimension
* @param b the distance in the second dimension
* @return a 3 long double [] containing {cos, sin, r} to match the definition in the headers.
*/
public static double[] givens(double a, double b) {
double c, s, u, r, t;
if (b == 0) {
c = Math.copySign(1, a);
s = 0;
r = Math.abs(a);
} else if (a == 0) {
c = 0;
s = Math.signum(b);
r = Math.abs(b);
} else if (a > b) {
t = b / a;
u = Math.signum(a) * Math.sqrt(1 + t * t);
c = 1 / u;
s = t * c;
r = a * u;
} else {
t = a / b;
u = Math.signum(b) * Math.sqrt(1 + t * t);
s = 1 / u;
c = t * s;
r = b * u;
}
double[] ret = {c, s, r };
return ret;
}
}
|
<reponame>yulrizka/grproxy
import {Box, Button, Tab, Tabs, Typography} from "@material-ui/core";
import React, {FC} from "react";
import {Call} from "./Call";
interface TabPanelProps {
children?: React.ReactNode;
index: any;
value: any;
}
function TabPanel(props: TabPanelProps) {
const {children, value, index, ...other} = props;
return (
<div
role="tabpanel"
hidden={value !== index}
id={`simple-tabpanel-${index}`}
aria-labelledby={`simple-tab-${index}`}
{...other}
>
{value === index && (
<Box p={3}>
<Typography>{children}</Typography>
</Box>
)}
</div>
);
}
function a11yProps(index: any) {
return {
id: `${index}`,
'aria-controls': `simple-tabpanel-${index}`,
};
}
interface TabDetailsProps {
currentCall: Call
}
export const TabDetails:FC<TabDetailsProps> = ({currentCall}) => {
const [value, setValue] = React.useState(0);
const handleChange = (event: React.ChangeEvent<{}>, newValue: number) => {
setValue(newValue);
};
const handleReplyClicked = () => {
alert("not implemented")
}
return (
<div>
<Tabs value={value} onChange={handleChange} aria-label="simple tabs example">
<Tab label="Request" {...a11yProps(0)} />
<Tab label="Response" {...a11yProps(1)} />
</Tabs>
<TabPanel value={value} index={0}>
{JSON.stringify(currentCall.request)}
<br/>
<Button variant="contained" onClick={handleReplyClicked}>Reply</Button>
</TabPanel>
<TabPanel value={value} index={1}>
{JSON.stringify(currentCall.response)}
</TabPanel>
</div>
)
}
|
#!/bin/bash
set -e
# MAKE SURE EVERY PARTITION IS PICKED UP AND UPDATED
partprobe
echo 1 > /sys/block/sda/device/rescan
sleep 2
if [[ "$1" == "centos"* ]] || [[ "$1" == "rhel7"* ]] || [[ "$1" == "rocky"* ]]; then
# IN CASE UPSTREAM PARTITION HAS SAME UUID AS HOST
mount -o nouuid /dev/sda1 /mnt
elif [[ "$1" == "rhel8"* ]]; then
mount -o nouuid /dev/sda3 /mnt
else
mount /dev/sda1 /mnt
fi
mount -o bind /dev /mnt/dev
mount -o bind /proc /mnt/proc
mount -o bind /sys /mnt/sys
|
SELECT *
FROM tableName
WHERE fieldName LIKE '%Apple%'; |
#!/bin/bash
SHOULD_SIGN=$1
if [ "$SHOULD_SIGN" ]
then
echo "Will sign the APP"
fi
# by Andy Maloney
# http://asmaloney.com/2013/07/howto/packaging-a-mac-os-x-application-using-a-dmg/
# make sure we are in the correct dir when we double-click a .command file
dir=${0%/*}
if [ -d "$dir" ]; then
cd "$dir"
fi
# set up your app name, architecture, and background image file name
APP_NAME="Bitzec"
rm dmg-background.tiff
ln -s ../resources/copay/mac/dmg-background.tiff dmg-background.tiff
rm volume-icon.icns
ln -s ../resources/copay/mac/volume-icon.icns volume-icon.icns
DMG_VOLUME_ICON="volume-icon.icns"
DMG_BACKGROUND_IMG="dmg-background.tiff"
PATH_NAME="${APP_NAME}/osx64/"
# you should not need to change these
APP_EXE="${PATH_NAME}${APP_NAME}.app/Contents/MacOS/nwjs"
VOL_NAME="${APP_NAME}"
DMG_TMP="${VOL_NAME}-temp.dmg"
DMG_FINAL="${VOL_NAME}.dmg"
STAGING_DIR="tmp"
# Check the background image DPI and convert it if it isn't 72x72
_BACKGROUND_IMAGE_DPI_H=`sips -g dpiHeight ${DMG_BACKGROUND_IMG} | grep -Eo '[0-9]+\.[0-9]+'`
_BACKGROUND_IMAGE_DPI_W=`sips -g dpiWidth ${DMG_BACKGROUND_IMG} | grep -Eo '[0-9]+\.[0-9]+'`
if [ $(echo " $_BACKGROUND_IMAGE_DPI_H != 72.0 " | bc) -eq 1 -o $(echo " $_BACKGROUND_IMAGE_DPI_W != 72.0 " | bc) -eq 1 ]; then
echo "WARNING: The background image's DPI is not 72. This will result in distorted backgrounds on Mac OS X 10.7+."
echo " I will convert it to 72 DPI for you."
_DMG_BACKGROUND_TMP="${DMG_BACKGROUND_IMG%.*}"_dpifix."${DMG_BACKGROUND_IMG##*.}"
sips -s dpiWidth 72 -s dpiHeight 72 ${DMG_BACKGROUND_IMG} --out ${_DMG_BACKGROUND_TMP}
DMG_BACKGROUND_IMG="${_DMG_BACKGROUND_TMP}"
fi
# clear out any old data
rm -rf "${STAGING_DIR}" "${DMG_TMP}" "${DMG_FINAL}"
# copy over the stuff we want in the final disk image to our staging dir
mkdir -p "${STAGING_DIR}"
cp -rpf "${PATH_NAME}${APP_NAME}.app" "${STAGING_DIR}"
# ... cp anything else you want in the DMG - documentation, etc.
pushd "${STAGING_DIR}"
popd
# Fix size to 250MB
SIZE=250
if [ $? -ne 0 ]; then
echo "Error: Cannot compute size of staging dir"
exit
fi
# Sign Code (MATIAS)
if [ $SHOULD_SIGN ]
then
echo "Signing ${APP_NAME} DMG"
export IDENTITY="3rd Party Mac Developer Application: BitPay, Inc. (884JRH5R93)"
# not need for 'out of app store' distribution (?)
# export PARENT_PLIST=parent.plist
# export CHILD_PLIST=child.plist
export APP_PATH=${STAGING_DIR}/${APP_NAME}.app
codesign --deep -s "${IDENTITY}" $APP_PATH"/Contents/Versions/52.0.2743.82/nwjs Helper.app" && echo "Sign 1"
codesign --deep -s "${IDENTITY}" $APP_PATH"/Contents/Versions/52.0.2743.82/nwjs Framework.framework/Resources/app_mode_loader.app" && echo "Sign 2"
codesign --deep -s "${IDENTITY}" $APP_PATH && echo "Sign 3"
echo "Signing Done"
fi
# create the temp DMG file
hdiutil create -srcfolder "${STAGING_DIR}" -volname "${VOL_NAME}" -fs HFS+ \
-fsargs "-c c=64,a=16,e=16" -format UDRW -megabytes ${SIZE} "${DMG_TMP}"
echo "Created DMG: ${DMG_TMP}"
# mount it and save the device
DEVICE=$(hdiutil attach -readwrite -noverify "${DMG_TMP}" | \
egrep '^/dev/' | sed 1q | awk '{print $1}')
sleep 2
# add a link to the Applications dir
echo "Adding link to /Applications"
pushd /Volumes/"${VOL_NAME}"
# We name the symlink with a *non-breaking space* to avoid displaying extra text
ln -s /Applications " " # <- not your ordinary space
popd
# "bless" the folder to open it in Finder automatically when the volume is mounted
echo "Blessing disk image"
bless --folder /Volumes/"${VOL_NAME}" --openfolder /Volumes/"${VOL_NAME}"
# add a background image
echo "Adding background to disk image"
mkdir /Volumes/"${VOL_NAME}"/.background
cp "${DMG_BACKGROUND_IMG}" /Volumes/"${VOL_NAME}"/.background/
echo "Adding volume icon to disk image"
# we install this here to avoid trying to install it on linux or windows, where
# it fails to install
npm install fileicon
# use fileicon node_module
cp "${DMG_VOLUME_ICON}" /Volumes/"${VOL_NAME}"/.VolumeIcon.icns
`npm bin`/fileicon set /Volumes/"${VOL_NAME}"/ /Volumes/"${VOL_NAME}"/.VolumeIcon.icns
# tell the Finder to resize the window, set the background,
# change the icon size, place the icons in the right position, etc.
echo "Designing the unboxing experience..."
WINDOW_X=400
WINDOW_Y=100
WINDOW_WIDTH=500
WINDOW_HEIGHT=375
ICON_SIZE=100
ICON_LR_PADDING=140
ICON_Y=185
WINDOW_RIGHT=$(expr $WINDOW_X + $WINDOW_WIDTH)
WINDOW_BOTTOM=$(expr $WINDOW_Y + $WINDOW_HEIGHT)
RIGHT_ICON_PADDING_RIGHT=$(expr $WINDOW_WIDTH - $ICON_LR_PADDING)
HIDE_X=100 # no need to exceed WINDOW_WIDTH – will only create another scrollbar
HIDE_Y=$(expr $WINDOW_HEIGHT + $ICON_SIZE)
echo '
tell application "Finder"
tell disk "'${VOL_NAME}'"
open
set current view of container window to icon view
set toolbar visible of container window to false
set statusbar visible of container window to false
set the bounds of container window to {'${WINDOW_X}', '${WINDOW_Y}', '${WINDOW_RIGHT}', '${WINDOW_BOTTOM}'}
set viewOptions to the icon view options of container window
set arrangement of viewOptions to not arranged
set icon size of viewOptions to '${ICON_SIZE}'
set background picture of viewOptions to file ".background:'${DMG_BACKGROUND_IMG}'"
set position of item "'${APP_NAME}'.app" of container window to {'${ICON_LR_PADDING}', '${ICON_Y}'}
set position of item " " of container window to {'${RIGHT_ICON_PADDING_RIGHT}', '${ICON_Y}'}
set position of item ".background" of container window to {'${HIDE_X}', '${HIDE_Y}'}
set position of item ".VolumeIcon.icns" of container window to {'${HIDE_X}', '${HIDE_Y}'}
set position of item ".fseventsd" of container window to {'${HIDE_X}', '${HIDE_Y}'}
set position of item "Icon?" of container window to {'${HIDE_X}', '${HIDE_Y}'}
close
open
update without registering applications
delay 2
end tell
end tell
' | osascript
sync
# unmount it
hdiutil detach "${DEVICE}"
# now make the final image a compressed disk image
echo "Creating compressed image"
hdiutil convert "${DMG_TMP}" -format UDZO -imagekey zlib-level=9 -o "${DMG_FINAL}"
# clean up
rm -rf "${DMG_TMP}"
rm -rf "${STAGING_DIR}"
echo 'Done.'
exit
|
# coding: utf-8
# In[ ]:
import json
from bs4 import BeautifulSoup
import requests
from pprint import pprint
import re
import html5lib
import numpy as np
import pandas as pd
def pop_type(dens):
if dens < 300:
return 'rural'
elif dens > 1000:
return 'urban'
else:
return 'suburban'
def parse_density_table(s):
table = s.find('table', attrs={'class':'infobox geography vcard'})
rows = table.find_all('tr')
for row in rows:
th_row = row.find_all('th')
for ele in th_row:
if re.search("Density", ele.text):
density = ele.next_sibling.next_sibling.text
density = density.replace(",", "")
find = re.compile(r"^(.*?)\/")
return float(re.search(find, density).group(1))
return None
def check_city(city, state, file):
for f in range(len(file[0])):
if file[0][f]==city and file[1][f]==state:
return True
return False
def parse_climate_table(s):
tables = s.find_all('table')
climate_table = ''
for table in tables:
try:
if table.find('tr').find('th'):
if table.find('tr').find('th').text.lower().find('climate data') == 0:
climate_table = table
except:
print("No Climate Data")
continue
rows = climate_table.find_all('tr')
for row in rows:
if re.search("Average high", row.find('th').text):
temp_high = row.find_all('td')[12].text.split()[0]
if re.search("Average low", row.find('th').text):
temp_low = row.find_all('td')[12].text.split()[0]
break
return float(temp_high), float(temp_low)
with open('C:/Users/David/Downloads/all_cities_data.json', 'r') as data_file:
all_cities = map(lambda x: json.loads(x), data_file.readlines())
#check for cost > 0, job == true, zipcode == true
file = pd.read_csv('densityTable.csv', header=None)
# count = 0
for entry in all_cities:
# if count >= 20:
# break
if check_city(entry['name'], entry['state'], file):
print('Skipping: ' + entry['name'] + ', ' + entry['state'])
continue
if "job" in entry and entry["cost"] > 0 and len(entry["zipcode"]) != 0:
try:
city = entry['name']
city.replace(' ','_')
state = entry['state']
url = "https://en.wikipedia.org/w/index.php?title=" + city + ",_" + state + "&printable=yes"
# entry = 'Big Bar'
# entry.replace(' ','_')
# name = 'CA'
# url = "https://en.wikipedia.org/w/index.php?title=Irvine,_CA&printable=yes"
except:
print('No Result:', city + ',', state)
continue
# HTML parsing
handle = requests.get(url)
data = handle.text
soup = BeautifulSoup(data, 'html.parser')
# Parse table for DENSITY
try:
DENSITY_OUT = 0
DENSITY_OUT = parse_density_table(soup)
except:
print('No Density:', city + ',', state)
# TEMP_HIGH_OUT, TEMP_LOW_OUT = parse_climate_table(soup)
# POPULATION_TYPE_OUT = pop_type(DENSITY_OUT)
# output to file
output = city + ',' + state + ',' + str(DENSITY_OUT) + '\n'
print(output)
fd = open('densityTable.csv', 'a')
fd.write(output)
fd.close()
# count = count + 1
# In[ ]:
# In[ ]:
|
<reponame>nabeelkhan/Oracle-DBA-Life<filename>SCRIPTS/objwait.sql
REM FILE NAME: objwait.sql
REM LOCATION: System Monitoring\Reports
REM FUNCTION: Report the objects that your users are accessing
REM when they are forced to wait for disk I/O.
REM TESTED ON: 7.3.3.5, 8.0.4.1, 8.1.5, 8.1.7, 9.0.1
REM PLATFORM: non-specific
REM REQUIRES: v$session, v$sqlarea, v$session_wait, dba_extents
REM
REM This is a part of the Knowledge Xpert for Oracle Administration library.
REM Copyright (C) 2001 Quest Software
REM All rights reserved.
REM
REM******************** Knowledge Xpert for Oracle Administration ********************
SET heading off
SET pagesize 999
SET verify off
SET feedback off
SET echo off
COLUMN aa newline
SPOOL rep_out\objwait.sql
COLUMN nl newline
SELECT 'doc ' nl,
'User '
|| v$session.username
|| '('
|| v$session_wait.sid
|| ')' nl,
v$sqlarea.sql_text nl, '#' nl, 'select segment_name, segment_type ' nl,
'from dba_extents ' nl, 'where file_id='
|| v$session_wait.p1 nl,
' and '
|| v$session_wait.p2
|| ' between block_id and block_id
+ blocks -1);'
FROM v$session, v$sqlarea, v$session_wait
WHERE ( v$session_wait.event LIKE '%buffer%'
OR v$session_wait.event LIKE '%write%'
OR v$session_wait.event LIKE '%read%'
)
AND v$session_wait.sid = v$session.sid
AND v$session.sql_address = v$sqlarea.address
AND v$session.sql_hash_value = v$sqlarea.hash_value
/
SPOOL off
@rep_out\objwait.sql
|
<filename>src/core/utils/hitTest.ts
import SheetBasic from 'core/SheetBasic';
import { getColSize } from './col';
import { getRowSize } from './row';
import { distanceOfCellToCanvasOrigin } from './distance';
import { DRAGGER_SIZE } from 'consts';
export const hitSeriebox = (
sheet: SheetBasic,
mouseX: number,
mouseY: number
) => {
const state = sheet.getState();
const { x, y, xEnd, yEnd } = state.selectedGroupViewport;
const _x = Math.max(x, xEnd);
const _y = Math.max(y, yEnd);
const width = getColSize(sheet, _x);
const height = getRowSize(sheet, _y);
const pos = distanceOfCellToCanvasOrigin(sheet, _x, _y);
if (pos) {
const size = DRAGGER_SIZE;
const draggerXStart = pos[0] + width - size / 2;
const draggerYStart = pos[1] + height - size / 2;
const draggerXEnd = draggerXStart + size;
const draggerYEnd = draggerYStart + size;
return (
mouseX >= draggerXStart &&
mouseX <= draggerXEnd &&
mouseY >= draggerYStart &&
mouseY <= draggerYEnd
);
}
return false;
};
export const isCellInMergeViewport = (
sheet: SheetBasic,
cellX: number,
cellY: number
) => {
const state = sheet.getState();
const merges = state.merges;
if (merges.length > 0) {
for (let i = 0; i < merges.length; i += 1) {
const viewport = merges[i];
if (
cellX >= viewport.x &&
cellX <= viewport.xEnd &&
cellY >= viewport.y &&
cellY <= viewport.yEnd
) {
return { viewport: viewport, index: i };
}
}
}
return null;
};
|
#!/usr/bin/env bash
set -euo pipefail
main() {
create_stream
create_database
wait_until_running
create_topics
}
create_stream() {
avn service create stream \
--service-type kafka \
--plan startup-2 \
--cloud google-europe-west3
}
create_database() {
avn service create database \
--service-type pg \
--plan hobbyist \
--cloud google-europe-west3
}
wait_until_running() {
while true; do
pending_services="$(
avn service list --format '{service_name}' \
| xargs -n1 avn service get --format '{state}' \
| grep --count --invert-match RUNNING
)"
if [ "$pending_service" -eq 0 ]; then
break
else
sleep 2
fi
done
}
create_topics() {
avn service topic-create stream \
website_monitor \
--partitions 1 \
--replication 2
avn service topic-create stream \
website_monitor_test \
--partitions 1 \
--replication 2
}
main |
package io.opensphere.subterrain.xraygoggles.ui;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JMenu;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.MapManager;
import io.opensphere.core.Toolbox;
import io.opensphere.core.control.ControlContext;
import io.opensphere.core.control.ControlRegistry;
import io.opensphere.core.control.ui.MenuBarRegistry;
import io.opensphere.core.control.ui.UIRegistry;
import io.opensphere.core.geometry.GeometryRegistry;
import io.opensphere.core.util.swing.EventQueueUtilities;
import io.opensphere.core.viewer.ViewChangeSupport;
import io.opensphere.core.viewer.impl.DynamicViewer;
import io.opensphere.core.viewer.impl.ScreenViewer;
import io.opensphere.subterrain.xraygoggles.model.XrayGogglesModel;
/**
* Unit test for the {@link XrayGogglesMenuProvider}.
*/
public class XrayGogglesMenuProviderTest
{
/**
* Tests the menu provider.
*/
@Test
public void test()
{
EasyMockSupport support = new EasyMockSupport();
JMenu viewMenu = new JMenu("View");
UIRegistry uiRegistry = createUIRegistry(support, viewMenu);
ScreenViewer viewer = createViewer(support);
MapManager mapManager = createMapManager(support, viewer);
GeometryRegistry geomRegistry = support.createNiceMock(GeometryRegistry.class);
Toolbox toolbox = createToolbox(support, uiRegistry, mapManager, geomRegistry);
support.replayAll();
XrayGogglesModel model = new XrayGogglesModel();
XrayGogglesMenuProvider provider = new XrayGogglesMenuProvider(toolbox, model);
EventQueueUtilities.runOnEDTAndWait(() ->
{
});
JCheckBoxMenuItem menuItem = (JCheckBoxMenuItem)viewMenu.getItem(0);
assertEquals("Underground", menuItem.getText());
assertNull(model.getLowerLeft());
menuItem.doClick();
assertNotNull(model.getLowerLeft());
menuItem.doClick();
assertNull(model.getLowerLeft());
provider.close();
support.verifyAll();
}
/**
* Creates an easy mocked {@link ControlRegistry}.
*
* @param support Used to create the mock.
* @return The mocked {@link ControlRegistry}.
*/
private ControlRegistry createControlRegistry(EasyMockSupport support)
{
ControlContext glui = support.createNiceMock(ControlContext.class);
ControlContext globe = support.createNiceMock(ControlContext.class);
ControlRegistry controlRegistry = support.createMock(ControlRegistry.class);
EasyMock.expect(controlRegistry.getControlContext(ControlRegistry.GLUI_CONTROL_CONTEXT)).andReturn(glui).times(2);
EasyMock.expect(controlRegistry.getControlContext(ControlRegistry.GLOBE_CONTROL_CONTEXT)).andReturn(globe).times(4);
return controlRegistry;
}
/**
* Creates an easy mocked {@link MapManager}.
*
* @param support Used to create the mock.
* @param viewer A mocked {@link ScreenViewer} to return.
* @return The mocked {@link MapManager}.
*/
private MapManager createMapManager(EasyMockSupport support, ScreenViewer viewer)
{
DynamicViewer standardViewer = support.createNiceMock(DynamicViewer.class);
ViewChangeSupport changeSupport = new ViewChangeSupport();
MapManager mapManager = support.createNiceMock(MapManager.class);
EasyMock.expect(mapManager.getViewChangeSupport()).andReturn(changeSupport).atLeastOnce();
EasyMock.expect(mapManager.getScreenViewer()).andReturn(viewer);
EasyMock.expect(mapManager.getStandardViewer()).andReturn(standardViewer);
return mapManager;
}
/**
* Creates an easy mocked {@link Toolbox}.
*
* @param support Used to create the mock.
* @param uiRegistry The mocked {@link UIRegistry} to return.
* @param mapManager The mocked {@link MapManager} to return.
* @param geomRegistry The mocked {@link GeometryRegistry} to return.
* @return The mocked {@link Toolbox}.
*/
private Toolbox createToolbox(EasyMockSupport support, UIRegistry uiRegistry, MapManager mapManager,
GeometryRegistry geomRegistry)
{
ControlRegistry controlRegistry = createControlRegistry(support);
Toolbox toolbox = support.createMock(Toolbox.class);
EasyMock.expect(toolbox.getUIRegistry()).andReturn(uiRegistry);
EasyMock.expect(toolbox.getMapManager()).andReturn(mapManager);
EasyMock.expect(toolbox.getGeometryRegistry()).andReturn(geomRegistry);
EasyMock.expect(toolbox.getControlRegistry()).andReturn(controlRegistry);
return toolbox;
}
/**
* Creates an easy mocked {@link UIRegistry}.
*
* @param support Used to create the mock.
* @param viewMenu The test view menu.
* @return The mocked {@link UIRegistry}.
*/
private UIRegistry createUIRegistry(EasyMockSupport support, JMenu viewMenu)
{
MenuBarRegistry menuBarRegistry = support.createMock(MenuBarRegistry.class);
EasyMock.expect(menuBarRegistry.getMenu(MenuBarRegistry.MAIN_MENU_BAR, MenuBarRegistry.VIEW_MENU)).andReturn(viewMenu);
UIRegistry uiRegistry = support.createMock(UIRegistry.class);
EasyMock.expect(uiRegistry.getMenuBarRegistry()).andReturn(menuBarRegistry);
return uiRegistry;
}
/**
* Creates an easy mocked {@link ScreenViewer}.
*
* @param support Used to create the mock.
* @return The mocked screen viewer.
*/
private ScreenViewer createViewer(EasyMockSupport support)
{
ScreenViewer viewer = support.createMock(ScreenViewer.class);
EasyMock.expect(Integer.valueOf(viewer.getViewportHeight())).andReturn(Integer.valueOf(1080));
EasyMock.expect(Integer.valueOf(viewer.getViewportWidth())).andReturn(Integer.valueOf(1920));
return viewer;
}
}
|
#!/bin/bash
sudo echo 'Starting video screenshot and thumbnails generator' #sudo because find uses sudo
if [ "$#" -eq 1 ];
then
starting_folder=$1
else
echo 'Please enter folder'
read -r starting_folder
fi
# resume where previously left off? "n" will
# not overwrite prior files created
resume_prior_ops="n"; # "y" for yes and "n" for no
find_results="";
command="find $starting_folder -regextype posix-awk -iregex '.+(\.mp4|\.wmv|\.mov|\.avi|\.flv|\.mpg|\.mpeg)'";
find_results=$(eval $command);
for video_file_orig in $find_results
do
fullpath=`realpath $video_file_orig`;
pathonly=`dirname $fullpath`;
add_star="/*";
command="echo $video_file_orig | sed 's/.*[.]\(mp4\|wmv\|mov\|avi\|flv\|mpg\|mpeg\)$/\/\1/g' | sed 's/\///'g";
video_name_ext=$(eval $command);
command="echo $video_file_orig | sed 's/^\/\(.*\)[.]$video_name_ext$/\/\1/'";
video_file=$(eval $command);
command="echo $video_file_orig | sed 's/^.*\/\(.*\)[.]$video_name_ext$/\/\1/'";
video_name_no_ext=$(eval $command);
icon_folder="$pathonly/icon";
thumbpath="$pathonly/icon/videos";
thumb_ext="_thumb.jpg";
thumb_path_name="$thumbpath$video_name_no_ext$thumb_ext";
eval "mkdir $icon_folder";
eval "mkdir $thumbpath";
# 60 sec
resize_cmd="convert $thumb_path_name -resize 1080 $thumb_path_name";
crop_cmd="mogrify -crop 650x1080+250+0 $thumb_path_name";
command="ffmpeg -$resume_prior_ops -itsoffset -60 -i $video_file_orig -vcodec mjpeg -vframes 1 -an -f rawvideo $thumb_path_name && $resize_cmd && $crop_cmd";
echo $command;
eval $command;
#eval "convert $thumb_path_name -resize 1080 $thumb_path_name";
#eval "mogrify -crop 650x1080+250+0 $thumb_path_name";
#eval "mv $video_file$thumb_ext $thumbpath";
#command="/media/veracrypt10/other/General/software/custom_icon/auto_icon_assign_both_args.sh $starting_folder";
#command="xterm -e \"sleep 10 && /usr/bin/gio set \\\"$video_file_orig\\\" metadata::custom-icon \\\"file://$thumb_path_name\\\"\" && echo \"done\" && sleep 10";
/usr/bin/gio set "$video_file_orig" metadata::custom-icon "file://$thumb_path_name";
#echo $command;
#eval $command;
echo "changing permissions";
command="sudo chmod -R 777 $thumbpath$add_star";
eval $command;
echo $command;
command="sudo chmod -R 777 $thumbpath";
eval $command;
done
|
<gh_stars>0
var async = require("async");
var assert = require('assert');
var Amount = require("divvy-lib").Amount;
var Remote = require("divvy-lib").Remote;
var Server = require("./server").Server;
var testutils = require("./testutils");
var config = testutils.init_config();
suite('Sending', function() {
var $ = { };
setup(function(done) {
testutils.build_setup().call($, done);
});
teardown(function(done) {
testutils.build_teardown().call($, done);
});
test("send XDV to non-existent account with insufficent fee", function (done) {
var self = this;
var ledgers = 20;
var got_proposed;
$.remote.transaction()
.payment('root', 'alice', "1")
.once('submitted', function (m) {
// Transaction got an error.
// console.log("proposed: %s", JSON.stringify(m));
assert.strictEqual(m.engine_result, 'tecNO_DST_INSUF_XDV');
got_proposed = true;
$.remote.ledger_accept(); // Move it along.
})
.once('final', function (m) {
// console.log("final: %s", JSON.stringify(m, undefined, 2));
assert.strictEqual(m.engine_result, 'tecNO_DST_INSUF_XDV');
done();
})
.submit();
});
// Also test transaction becomes lost after tecNO_DST.
test("credit_limit to non-existent account = tecNO_DST", function (done) {
$.remote.transaction()
.divvy_line_set("root", "100/USD/alice")
.once('submitted', function (m) {
//console.log("proposed: %s", JSON.stringify(m));
assert.strictEqual(m.engine_result, 'tecNO_DST');
done();
})
.submit();
});
test("credit_limit", function (done) {
var self = this;
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "mtgox"], callback);
},
function (callback) {
self.what = "Check a non-existent credit limit.";
$.remote.request_divvy_balance("alice", "mtgox", "USD", 'CURRENT')
.on('divvy_state', function (m) {
callback(new Error(m));
})
.on('error', function(m) {
// console.log("error: %s", JSON.stringify(m));
assert.strictEqual('remoteError', m.error);
assert.strictEqual('entryNotFound', m.remote.error);
callback();
})
.request();
},
function (callback) {
self.what = "Create a credit limit.";
testutils.credit_limit($.remote, "alice", "800/USD/mtgox", callback);
},
function (callback) {
$.remote.request_divvy_balance("alice", "mtgox", "USD", 'CURRENT')
.on('divvy_state', function (m) {
// console.log("BALANCE: %s", JSON.stringify(m));
// console.log("account_balance: %s", m.account_balance.to_text_full());
// console.log("account_limit: %s", m.account_limit.to_text_full());
// console.log("peer_balance: %s", m.peer_balance.to_text_full());
// console.log("peer_limit: %s", m.peer_limit.to_text_full());
assert(m.account_balance.equals("0/USD/alice"));
assert(m.account_limit.equals("800/USD/mtgox"));
assert(m.peer_balance.equals("0/USD/mtgox"));
assert(m.peer_limit.equals("0/USD/alice"));
callback();
})
.request();
},
function (callback) {
self.what = "Modify a credit limit.";
testutils.credit_limit($.remote, "alice", "700/USD/mtgox", callback);
},
function (callback) {
$.remote.request_divvy_balance("alice", "mtgox", "USD", 'CURRENT')
.on('divvy_state', function (m) {
assert(m.account_balance.equals("0/USD/alice"));
assert(m.account_limit.equals("700/USD/mtgox"));
assert(m.peer_balance.equals("0/USD/mtgox"));
assert(m.peer_limit.equals("0/USD/alice"));
callback();
})
.request();
},
// Set negative limit.
function (callback) {
$.remote.transaction()
.divvy_line_set("alice", "-1/USD/mtgox")
.once('submitted', function (m) {
assert.strictEqual('temBAD_LIMIT', m.engine_result);
callback();
})
.submit();
},
// function (callback) {
// self.what = "Display ledger";
//
// $.remote.request_ledger('current', true)
// .on('success', function (m) {
// console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
//
// callback();
// })
// .request();
// },
function (callback) {
self.what = "Zero a credit limit.";
testutils.credit_limit($.remote, "alice", "0/USD/mtgox", callback);
},
function (callback) {
self.what = "Make sure line is deleted.";
$.remote.request_divvy_balance("alice", "mtgox", "USD", 'CURRENT')
.on('divvy_state', function (m) {
// Used to keep lines.
// assert(m.account_balance.equals("0/USD/alice"));
// assert(m.account_limit.equals("0/USD/alice"));
// assert(m.peer_balance.equals("0/USD/mtgox"));
// assert(m.peer_limit.equals("0/USD/mtgox"));
callback(new Error(m));
})
.on('error', function (m) {
// console.log("error: %s", JSON.stringify(m));
assert.strictEqual('remoteError', m.error);
assert.strictEqual('entryNotFound', m.remote.error);
callback();
})
.request();
},
// TODO Check in both owner books.
function (callback) {
self.what = "Set another limit.";
testutils.credit_limit($.remote, "alice", "600/USD/bob", callback);
},
function (callback) {
self.what = "Set limit on other side.";
testutils.credit_limit($.remote, "bob", "500/USD/alice", callback);
},
function (callback) {
self.what = "Check divvy_line's state from alice's pov.";
$.remote.request_divvy_balance("alice", "bob", "USD", 'CURRENT')
.on('divvy_state', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
assert(m.account_balance.equals("0/USD/alice"));
assert(m.account_limit.equals("600/USD/bob"));
assert(m.peer_balance.equals("0/USD/bob"));
assert(m.peer_limit.equals("500/USD/alice"));
callback();
})
.request();
},
function (callback) {
self.what = "Check divvy_line's state from bob's pov.";
$.remote.request_divvy_balance("bob", "alice", "USD", 'CURRENT')
.on('divvy_state', function (m) {
assert(m.account_balance.equals("0/USD/bob"));
assert(m.account_limit.equals("500/USD/alice"));
assert(m.peer_balance.equals("0/USD/alice"));
assert(m.peer_limit.equals("600/USD/bob"));
callback();
})
.request();
}
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
});
suite('Sending future', function() {
var $ = { };
setup(function(done) {
testutils.build_setup().call($, done);
});
teardown(function(done) {
testutils.build_teardown().call($, done);
});
test('direct divvy', function(done) {
var self = this;
// $.remote.set_trace();
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob"], callback);
},
function (callback) {
self.what = "Set alice's limit.";
testutils.credit_limit($.remote, "alice", "600/USD/bob", callback);
},
function (callback) {
self.what = "Set bob's limit.";
testutils.credit_limit($.remote, "bob", "700/USD/alice", callback);
},
function (callback) {
self.what = "Set alice send bob partial with alice as issuer.";
$.remote.transaction()
.payment('alice', 'bob', "24/USD/alice")
.once('submitted', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.once('final', function (m) {
assert(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balance.";
$.remote.request_divvy_balance("alice", "bob", "USD", 'CURRENT')
.once('divvy_state', function (m) {
assert(m.account_balance.equals("-24/USD/alice"));
assert(m.peer_balance.equals("24/USD/bob"));
callback();
})
.request();
},
function (callback) {
self.what = "Set alice send bob more with bob as issuer.";
$.remote.transaction()
.payment('alice', 'bob', "33/USD/bob")
.once('submitted', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.once('final', function (m) {
assert(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balance from bob's pov.";
$.remote.request_divvy_balance("bob", "alice", "USD", 'CURRENT')
.once('divvy_state', function (m) {
assert(m.account_balance.equals("57/USD/bob"));
assert(m.peer_balance.equals("-57/USD/alice"));
callback();
})
.request();
},
function (callback) {
self.what = "Bob send back more than sent.";
$.remote.transaction()
.payment('bob', 'alice', "90/USD/bob")
.once('submitted', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.once('final', function (m) {
assert(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balance from alice's pov: 1";
$.remote.request_divvy_balance("alice", "bob", "USD", 'CURRENT')
.once('divvy_state', function (m) {
assert(m.account_balance.equals("33/USD/alice"));
callback();
})
.request();
},
function (callback) {
self.what = "Alice send to limit.";
$.remote.transaction()
.payment('alice', 'bob', "733/USD/bob")
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.once('final', function (m) {
assert(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balance from alice's pov: 2";
$.remote.request_divvy_balance("alice", "bob", "USD", 'CURRENT')
.once('divvy_state', function (m) {
assert(m.account_balance.equals("-700/USD/alice"));
callback();
})
.request();
},
function (callback) {
self.what = "Bob send to limit.";
$.remote.transaction()
.payment('bob', 'alice', "1300/USD/bob")
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.once('final', function (m) {
assert(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balance from alice's pov: 3";
$.remote.request_divvy_balance("alice", "bob", "USD", 'CURRENT')
.once('divvy_state', function (m) {
assert(m.account_balance.equals("600/USD/alice"));
callback();
})
.request();
},
function (callback) {
// If this gets applied out of order, it could stop the big payment.
self.what = "Bob send past limit.";
$.remote.transaction()
.payment('bob', 'alice', "1/USD/bob")
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tecPATH_DRY');
})
.submit();
},
function (callback) {
self.what = "Verify balance from alice's pov: 4";
$.remote.request_divvy_balance("alice", "bob", "USD", 'CURRENT')
.once('divvy_state', function (m) {
assert(m.account_balance.equals("600/USD/alice"));
callback();
})
.request();
},
// function (callback) {
// // Make sure all is good after canonical ordering.
// self.what = "Close the ledger and check balance.";
//
// $.remote
// .once('ledger_closed', function (message) {
// // console.log("LEDGER_CLOSED: A: %d: %s", ledger_closed_index, ledger_closed);
// callback();
// })
// .ledger_accept();
// },
// function (callback) {
// self.what = "Verify balance from alice's pov: 5";
//
// $.remote.request_divvy_balance("alice", "bob", "USD", 'CURRENT')
// .once('divvy_state', function (m) {
// console.log("account_balance: %s", m.account_balance.to_text_full());
// console.log("account_limit: %s", m.account_limit.to_text_full());
// console.log("peer_balance: %s", m.peer_balance.to_text_full());
// console.log("peer_limit: %s", m.peer_limit.to_text_full());
//
// assert(m.account_balance.equals("600/USD/alice"));
//
// callback();
// })
// .request();
// },
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
});
suite('Gateway', function() {
var $ = { };
setup(function(done) {
testutils.build_setup().call($, done);
});
teardown(function(done) {
testutils.build_teardown().call($, done);
});
test("customer to customer with and without transfer fee", function (done) {
var self = this;
// $.remote.set_trace();
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "mtgox"], callback);
},
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote, {
"alice" : "100/AUD/mtgox",
"bob" : "100/AUD/mtgox",
},
callback);
},
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "1/AUD/alice" ],
},
callback);
},
function (callback) {
self.what = "Verify balances.";
testutils.verify_balances($.remote, {
"alice" : "1/AUD/mtgox",
"mtgox" : "-1/AUD/alice",
},
callback);
},
function (callback) {
self.what = "Alice sends Bob 1 AUD";
$.remote.transaction()
.payment("alice", "bob", "1/AUD/mtgox")
.once('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 2.";
testutils.verify_balances($.remote, {
"alice" : "0/AUD/mtgox",
"bob" : "1/AUD/mtgox",
"mtgox" : "-1/AUD/bob",
},
callback);
},
function (callback) {
self.what = "Set transfer rate.";
$.remote.transaction()
.account_set("mtgox")
.transfer_rate(1e9*1.1)
.once('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Bob sends Alice 0.5 AUD";
$.remote.transaction()
.payment("bob", "alice", "0.5/AUD/mtgox")
.send_max("0.55/AUD/mtgox") // !!! Very important.
.once('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 3.";
testutils.verify_balances($.remote, {
"alice" : "0.5/AUD/mtgox",
"bob" : "0.45/AUD/mtgox",
"mtgox" : [ "-0.5/AUD/alice","-0.45/AUD/bob" ],
},
callback);
},
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
test("customer to customer, transfer fee, default path with and without specific issuer for Amount and SendMax", function (done) {
var self = this;
// $.remote.set_trace();
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "mtgox"], callback);
},
function (callback) {
self.what = "Set transfer rate.";
$.remote.transaction()
.account_set("mtgox")
.transfer_rate(1e9*1.1)
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote, {
"alice" : "100/AUD/mtgox",
"bob" : "100/AUD/mtgox",
},
callback);
},
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "4.4/AUD/alice" ],
},
callback);
},
function (callback) {
self.what = "Verify balances.";
testutils.verify_balances($.remote, {
"alice" : "4.4/AUD/mtgox",
},
callback);
},
function (callback) {
self.what = "Alice sends 1.1/AUD/mtgox Bob 1/AUD/mtgox";
$.remote.transaction()
.payment("alice", "bob", "1/AUD/mtgox")
.send_max("1.1/AUD/mtgox")
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 2.";
testutils.verify_balances($.remote, {
"alice" : "3.3/AUD/mtgox",
"bob" : "1/AUD/mtgox",
},
callback);
},
function (callback) {
self.what = "Alice sends 1.1/AUD/mtgox Bob 1/AUD/bob";
$.remote.transaction()
.payment("alice", "bob", "1/AUD/bob")
.send_max("1.1/AUD/mtgox")
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 3.";
testutils.verify_balances($.remote, {
"alice" : "2.2/AUD/mtgox",
"bob" : "2/AUD/mtgox",
},
callback);
},
function (callback) {
self.what = "Alice sends 1.1/AUD/alice Bob 1/AUD/mtgox";
$.remote.transaction()
.payment("alice", "bob", "1/AUD/mtgox")
.send_max("1.1/AUD/alice")
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 4.";
testutils.verify_balances($.remote, {
"alice" : "1.1/AUD/mtgox",
"bob" : "3/AUD/mtgox",
},
callback);
},
function (callback) {
// Must fail, doesn't know to use the mtgox
self.what = "Alice sends 1.1/AUD/alice Bob 1/AUD/bob";
$.remote.transaction()
.payment("alice", "bob", "1/AUD/bob")
.send_max("1.1/AUD/alice")
.once('submitted', function (m) {
// console.log("submitted: %s", JSON.stringify(m));
callback(m.engine_result !== 'tecPATH_DRY');
})
.submit();
},
function (callback) {
self.what = "Verify balances 5.";
testutils.verify_balances($.remote, {
"alice" : "1.1/AUD/mtgox",
"bob" : "3/AUD/mtgox",
},
callback);
}
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
test("subscribe test customer to customer with and without transfer fee", function (done) {
var self = this;
// $.remote.set_trace();
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "mtgox"], callback);
},
function (callback) { testutils.ledger_close($.remote, callback); },
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote, {
"alice" : "100/AUD/mtgox",
"bob" : "100/AUD/mtgox",
},
callback);
},
function (callback) { testutils.ledger_close($.remote, callback); },
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "1/AUD/alice" ],
},
callback);
},
function (callback) { testutils.ledger_close($.remote, callback); },
function (callback) {
self.what = "Verify balances.";
testutils.verify_balances($.remote, {
"alice" : "1/AUD/mtgox",
"mtgox" : "-1/AUD/alice",
},
callback);
},
function (callback) {
self.what = "Alice sends Bob 1 AUD";
$.remote.transaction()
.payment("alice", "bob", "1/AUD/mtgox")
.on('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) { testutils.ledger_close($.remote, callback); },
function (callback) {
self.what = "Verify balances 2.";
testutils.verify_balances($.remote, {
"alice" : "0/AUD/mtgox",
"bob" : "1/AUD/mtgox",
"mtgox" : "-1/AUD/bob",
},
callback);
},
function (callback) {
self.what = "Set transfer rate.";
$.remote.transaction()
.account_set("mtgox")
.transfer_rate(1e9*1.1)
.once('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) { testutils.ledger_close($.remote, callback); },
function (callback) {
self.what = "Bob sends Alice 0.5 AUD";
$.remote.transaction()
.payment("bob", "alice", "0.5/AUD/mtgox")
.send_max("0.55/AUD/mtgox") // !!! Very important.
.on('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 3.";
testutils.verify_balances($.remote, {
"alice" : "0.5/AUD/mtgox",
"bob" : "0.45/AUD/mtgox",
"mtgox" : [ "-0.5/AUD/alice","-0.45/AUD/bob" ],
},
callback);
},
function (callback) {
self.what = "Subscribe and accept.";
self.count = 0;
self.found = 0;
$.remote
.on('transaction', function (m) {
// console.log("ACCOUNT: %s", JSON.stringify(m));
self.found = 1;
})
.on('ledger_closed', function (m) {
// console.log("LEDGER_CLOSE: %d: %s", self.count, JSON.stringify(m));
if (self.count) {
callback(!self.found);
} else {
self.count = 1;
$.remote.ledger_accept();
}
})
.request_subscribe().accounts("mtgox")
.request();
$.remote.ledger_accept();
}
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
test("subscribe test: customer to customer with and without transfer fee: transaction retry logic", function (done) {
var self = this;
// $.remote.set_trace();
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "mtgox"], callback);
},
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote,
{
"alice" : "100/AUD/mtgox",
"bob" : "100/AUD/mtgox",
},
callback);
},
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "1/AUD/alice" ],
},
callback);
},
function (callback) {
self.what = "Verify balances.";
testutils.verify_balances($.remote, {
"alice" : "1/AUD/mtgox",
"mtgox" : "-1/AUD/alice",
},
callback);
},
function (callback) {
self.what = "Alice sends Bob 1 AUD";
$.remote.transaction()
.payment("alice", "bob", "1/AUD/mtgox")
.on('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 2.";
testutils.verify_balances($.remote, {
"alice" : "0/AUD/mtgox",
"bob" : "1/AUD/mtgox",
"mtgox" : "-1/AUD/bob",
},
callback);
},
// function (callback) {
// self.what = "Set transfer rate.";
//
// $.remote.transaction()
// .account_set("mtgox")
// .transfer_rate(1e9*1.1)
// .once('proposed', function (m) {
// // console.log("proposed: %s", JSON.stringify(m));
// callback(m.engine_result !== 'tesSUCCESS');
// })
// .submit();
// },
// We now need to ensure that all prior transactions have executed before
// the next transaction is submitted, as txn application logic has
// changed.
function(next){$.remote.ledger_accept(function(){next();});},
function (callback) {
self.what = "Bob sends Alice 0.5 AUD";
$.remote.transaction()
.payment("bob", "alice", "0.5/AUD/mtgox")
.on('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances 3.";
testutils.verify_balances($.remote, {
"alice" : "0.5/AUD/mtgox",
"bob" : "0.5/AUD/mtgox",
"mtgox" : [ "-0.5/AUD/alice","-0.5/AUD/bob" ],
},
callback);
},
function (callback) {
self.what = "Subscribe and accept.";
self.count = 0;
self.found = 0;
$.remote
.on('transaction', function (m) {
// console.log("ACCOUNT: %s", JSON.stringify(m));
self.found = 1;
})
.on('ledger_closed', function (m) {
// console.log("LEDGER_CLOSE: %d: %s", self.count, JSON.stringify(m));
if (self.count) {
callback(!self.found);
} else {
self.count = 1;
$.remote.ledger_accept();
}
})
.request_subscribe().accounts("mtgox")
.request();
$.remote.ledger_accept();
},
function (callback) {
self.what = "Verify balances 4.";
testutils.verify_balances($.remote, {
"alice" : "0.5/AUD/mtgox",
"bob" : "0.5/AUD/mtgox",
"mtgox" : [ "-0.5/AUD/alice","-0.5/AUD/bob" ],
},
callback);
},
]
async.waterfall(steps, function (error) {
assert(!error, self.what);
done();
});
});
});
suite('Indirect divvy', function() {
var $ = { };
setup(function(done) {
testutils.build_setup().call($, done);
});
teardown(function(done) {
testutils.build_teardown().call($, done);
});
test("indirect divvy", function (done) {
var self = this;
// $.remote.set_trace();
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "mtgox"], callback);
},
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote, {
"alice" : "600/USD/mtgox",
"bob" : "700/USD/mtgox",
},
callback);
},
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "70/USD/alice", "50/USD/bob" ],
},
callback);
},
function (callback) {
self.what = "Verify alice balance with mtgox.";
testutils.verify_balance($.remote, "alice", "70/USD/mtgox", callback);
},
function (callback) {
self.what = "Verify bob balance with mtgox.";
testutils.verify_balance($.remote, "bob", "50/USD/mtgox", callback);
},
function (callback) {
self.what = "Alice sends more than has to issuer: 100 out of 70";
$.remote.transaction()
.payment("alice", "mtgox", "100/USD/mtgox")
.once('submitted', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tecPATH_PARTIAL');
})
.submit();
},
function (callback) {
self.what = "Alice sends more than has to bob: 100 out of 70";
$.remote.transaction()
.payment("alice", "bob", "100/USD/mtgox")
.once('submitted', function (m) {
//console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tecPATH_PARTIAL');
})
.submit();
}
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
test("indirect divvy with path", function (done) {
var self = this;
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "mtgox"], callback);
},
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote, {
"alice" : "600/USD/mtgox",
"bob" : "700/USD/mtgox",
},
callback);
},
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "70/USD/alice", "50/USD/bob" ],
},
callback);
},
function (callback) {
self.what = "Alice sends via a path";
$.remote.transaction()
.payment("alice", "bob", "5/USD/mtgox")
.pathAdd( [ { account: "mtgox" } ])
.on('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify alice balance with mtgox.";
testutils.verify_balance($.remote, "alice", "65/USD/mtgox", callback);
},
function (callback) {
self.what = "Verify bob balance with mtgox.";
testutils.verify_balance($.remote, "bob", "55/USD/mtgox", callback);
}
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
test("indirect divvy with multi path", function (done) {
var self = this;
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "carol", "amazon", "mtgox"], callback);
},
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote, {
"amazon" : "2000/USD/mtgox",
"bob" : [ "600/USD/alice", "1000/USD/mtgox" ],
"carol" : [ "700/USD/alice", "1000/USD/mtgox" ],
},
callback);
},
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "100/USD/bob", "100/USD/carol" ],
},
callback);
},
function (callback) {
self.what = "Alice pays amazon via multiple paths";
$.remote.transaction()
.payment("alice", "amazon", "150/USD/mtgox")
.pathAdd( [ { account: "bob" } ])
.pathAdd( [ { account: "carol" } ])
.on('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
function (callback) {
self.what = "Verify balances.";
testutils.verify_balances($.remote, {
"alice" : [ "-100/USD/bob", "-50/USD/carol" ],
"amazon" : "150/USD/mtgox",
"bob" : "0/USD/mtgox",
"carol" : "50/USD/mtgox",
},
callback);
},
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
});
test("indirect divvy with path and transfer fee", function (done) {
var self = this;
var steps = [
function (callback) {
self.what = "Create accounts.";
testutils.create_accounts($.remote, "root", "10000.0", ["alice", "bob", "carol", "amazon", "mtgox"], callback);
},
function (callback) {
self.what = "Set mtgox transfer rate.";
testutils.transfer_rate($.remote, "mtgox", 1.1e9, callback);
},
function (callback) {
self.what = "Set credit limits.";
testutils.credit_limits($.remote, {
"amazon" : "2000/USD/mtgox",
"bob" : [ "600/USD/alice", "1000/USD/mtgox" ],
"carol" : [ "700/USD/alice", "1000/USD/mtgox" ],
},
callback);
},
function (callback) {
self.what = "Distribute funds.";
testutils.payments($.remote, {
"mtgox" : [ "100/USD/bob", "100/USD/carol" ],
},
callback);
},
function (callback) {
self.what = "Alice pays amazon via multiple paths";
$.remote.transaction()
.payment("alice", "amazon", "150/USD/mtgox")
.send_max("200/USD/alice")
.pathAdd( [ { account: "bob" } ])
.pathAdd( [ { account: "carol" } ])
.on('proposed', function (m) {
// console.log("proposed: %s", JSON.stringify(m));
callback(m.engine_result !== 'tesSUCCESS');
})
.submit();
},
// function (callback) {
// self.what = "Display ledger";
//
// $.remote.request_ledger('current', true)
// .on('success', function (m) {
// console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
//
// callback();
// })
// .request();
// },
function (callback) {
self.what = "Verify balances.";
// 65.00000000000001 is correct.
// This is result of limited precision.
testutils.verify_balances($.remote, {
"alice" : [ "-100/USD/bob", "-65.00000000000001/USD/carol" ],
"amazon" : "150/USD/mtgox",
"bob" : "0/USD/mtgox",
"carol" : "35/USD/mtgox",
},
callback);
}
]
async.waterfall(steps, function(error) {
assert(!error, self.what);
done();
});
})
});
suite('Invoice ID', function() {
var $ = { };
setup(function(done) {
testutils.build_setup().call($, done);
});
teardown(function(done) {
testutils.build_teardown().call($, done);
});
test('set InvoiceID on payment', function(done) {
var self = this;
var steps = [
function (callback) {
self.what = 'Create accounts';
testutils.create_accounts($.remote, 'root', '10000.0', [ 'alice' ], callback);
},
function (callback) {
self.what = 'Send a payment with InvoiceID';
var tx = $.remote.transaction();
tx.payment('root', 'alice', '10000');
tx.invoiceID('DEADBEEF');
tx.once('submitted', function(m) {
assert.strictEqual(m.engine_result, 'tesSUCCESS');
assert.strictEqual(m.tx_json.InvoiceID, 'DEADBEEF00000000000000000000000000000000000000000000000000000000');
callback();
});
tx.submit();
}
]
async.series(steps, function(err) {
assert(!err, self.what + ': ' + err);
done();
});
});
});
// vim:sw=2:sts=2:ts=8:et
|
const path = require('path');
const processModel = require('./src/model/index.js');
module.exports.createPages = ({ actions, graphql }) => {
const { createPage } = actions;
const langs = ['ru', 'en']; // TODO: there store and how to pass languages here?
const lang = 'ru'; // TODO: how to pass current language here?
const processedModel = processModel.expand(require('./model/model'), lang);
const model = processedModel.model;
model.forEach(page => {
let template;
page.type = page.type || 'article';
switch (page.type) {
default:
template = path.resolve(`src/components/Page/Page.tsx`);
}
return createPage({
path: page.url,
component: template,
context: {
...page,
model,
langs,
lang
},
});
});
}
|
<gh_stars>1-10
import { call, fork, put, select, takeEvery, all } from 'redux-saga/effects';
import { get, log, post } from '@navikt/digisyfo-npm';
import { browserHistory } from 'react-router';
import * as actions from './sykepengesoknader_actions';
import * as actiontyper from '../../../data/actiontyper';
import history from '../../../history';
import { finnSoknad } from './sykepengesoknader';
import logger from '../../../logging';
import { skalHenteSykepengesoknader } from './sykepengesoknaderSelectors';
import { getUrlTilKvittering, getUrlTilSoknad } from '../../../utils/urlUtils';
const gaTilKvittering = (sykepengesoknadsId) => {
browserHistory.push(getUrlTilKvittering(sykepengesoknadsId));
};
export function* oppdaterSykepengesoknader() {
yield put(actions.henterSykepengesoknader());
try {
const data = yield call(get, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader`);
yield put(actions.sykepengesoknaderHentet(data));
} catch (e) {
log(e);
logger.error(`Kunne ikke hente sykepengesoknader. URL: ${window.location.href} - ${e.message}`);
yield put(actions.hentSykepengesoknaderFeilet());
}
}
export function* hentSykepengesoknaderHvisIkkeHentet() {
const skalHente = yield select(skalHenteSykepengesoknader);
if (skalHente) {
yield oppdaterSykepengesoknader();
}
}
export function* sendSykepengesoknad(action) {
yield put(actions.senderSykepengesoknad());
try {
const sykepengesoknad = yield call(post, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader/${action.sykepengesoknad.id}/actions/send`, action.sykepengesoknad);
yield put(actions.sykepengesoknadSendt(action.sykepengesoknad.id, sykepengesoknad));
} catch (e) {
log(e);
logger.error(`Kunne ikke sende sykepengesøknad. URL: ${window.location.href} - ${e.message}`);
yield put(actions.sendSykepengesoknadFeilet());
}
}
export function* sendSykepengesoknadTilArbeidsgiver(action) {
yield put(actions.senderSykepengesoknad());
try {
const sykepengesoknad = yield call(post, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader/${action.sykepengesoknadsId}/actions/send-til-arbeidsgiver`);
yield put(actions.sykepengesoknadSendtTilArbeidsgiver(action.sykepengesoknadsId, sykepengesoknad));
} catch (e) {
log(e);
logger.error(`Kunne ikke sende sykepengesøknad til arbeidsgiver. URL: ${window.location.href} - ${e.message}`);
yield put(actions.sendSykepengesoknadFeilet());
}
}
export function* sendSykepengesoknadTilNAV(action) {
yield put(actions.senderSykepengesoknad());
try {
const sykepengesoknad = yield call(post, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader/${action.sykepengesoknadsId}/actions/send-til-nav`);
yield put(actions.sykepengesoknadSendtTilNAV(action.sykepengesoknadsId, sykepengesoknad));
} catch (e) {
log(e);
logger.error(`Kunne ikke sende sykepengesøknad til NAV. URL: ${window.location.href} - ${e.message}`);
yield put(actions.sendSykepengesoknadFeilet());
}
}
export function* startEndring(action) {
try {
const sykepengesoknad = yield call(post, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader/${action.sykepengesoknadsId}/actions/korriger`);
yield put(actions.endringStartet(sykepengesoknad));
yield history.push(getUrlTilSoknad(sykepengesoknad.id));
} catch (e) {
log(e);
yield put(actions.startEndringFeilet());
}
}
export function* hentBerikelse(action) {
const soknad = yield select(finnSoknad, action.sykepengesoknadsId);
if (!soknad.id) {
yield call(oppdaterSykepengesoknader);
}
yield put(actions.henterBerikelse());
try {
const data = yield call(get, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader/${action.sykepengesoknadsId}/berik`);
yield put(actions.berikelseHentet(data, action.sykepengesoknadsId));
} catch (e) {
log(e);
logger.error(`Kunne ikke hente berikelse av søknaden. URL: ${window.location.href} - ${e.message}`);
yield put(actions.hentBerikelseFeilet());
}
}
export function* avbrytSoknad(action) {
yield put(actions.avbryterSoknad());
try {
yield call(post, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader/${action.sykepengesoknad.id}/actions/avbryt`);
yield put(actions.soknadAvbrutt(action.sykepengesoknad.id));
gaTilKvittering(action.sykepengesoknad.id);
} catch (e) {
log(e);
logger.error(`Kunne ikke avbryte søknad. URL: ${window.location.href} - ${e.message}`);
yield put(actions.avbrytSoknadFeilet());
}
}
export function* gjenapneSoknad(action) {
yield put(actions.gjenapnerSoknad());
try {
yield call(post, `${process.env.REACT_APP_SYFOREST_ROOT}/soknader/${action.sykepengesoknad.id}/actions/gjenapne`);
yield put(actions.soknadGjenapnet(action.sykepengesoknad.id));
} catch (e) {
log(e);
logger.error(`Kunne ikke gjenåpne søknad. URL: ${window.location.href} - ${e.message}`);
yield put(actions.gjenapneSoknadFeilet());
}
}
function* watchHentBerikelse() {
yield takeEvery(actiontyper.SYKEPENGESOKNAD_BERIKELSE_FORESPURT, hentBerikelse);
}
function* watchHentSykepengesoknader() {
yield takeEvery(actiontyper.HENT_SYKEPENGESOKNADER_FORESPURT, hentSykepengesoknaderHvisIkkeHentet);
}
function* watchSendSykepengesoknad() {
yield takeEvery(actiontyper.SEND_SYKEPENGESOKNAD_FORESPURT, sendSykepengesoknad);
}
function* watchSendSykepengesoknadTilNAV() {
yield takeEvery(actiontyper.SEND_SYKEPENGESOKNAD_TIL_NAV_FORESPURT, sendSykepengesoknadTilNAV);
}
function* watchSendSykepengesoknadTilArbeidsgiver() {
yield takeEvery(actiontyper.SEND_SYKEPENGESOKNAD_TIL_ARBEIDSGIVER_FORESPURT, sendSykepengesoknadTilArbeidsgiver);
}
function* watchEndreSykepengesoknad() {
yield takeEvery(actiontyper.START_ENDRING_SYKEPENGESOKNAD_FORESPURT, startEndring);
}
function* watchAvbrytSoknad() {
yield takeEvery(actiontyper.AVBRYT_SYKEPENGESOKNAD_FORESPURT, avbrytSoknad);
}
function* watchGjenapneSoknad() {
yield takeEvery(actiontyper.GJENAPNE_SYKEPENGESOKNAD_FORESPURT, gjenapneSoknad);
}
export default function* sykepengesoknadSagas() {
yield all([
fork(watchHentSykepengesoknader),
fork(watchSendSykepengesoknad),
fork(watchSendSykepengesoknadTilNAV),
fork(watchSendSykepengesoknadTilArbeidsgiver),
fork(watchEndreSykepengesoknad),
fork(watchHentBerikelse),
fork(watchAvbrytSoknad),
fork(watchGjenapneSoknad),
]);
}
|
import { DrawerContentScrollView } from "@react-navigation/drawer"
export const validators = {
"URLValidator": (s) => {
let regex = "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"
if (s == "") {
return false
} else {
let r = RegExp(regex)
return r.test(s)
}
}
} |
#include <assert.h>
#include <fstream>
#include <iostream>
#include <math.h>
#include <sstream>
#include <sys/stat.h>
#include <yaml-cpp/yaml.h>
#define NINT(x) ((x) < 0.0 ? (int)((x)-0.5) : (int)((x) + 0.5))
class VanHove {
private:
float **posits_;
float ***hist_;
float **hist1d_;
float *time_;
int *lag_times_;
int *n_avg_;
int *handedness_;
bool handedness_analysis_ = false;
bool handedness_similar_ = true;
float system_size_ = -1;
float inv_size_ = -1;
float density_ = -1;
// TODO Initialize the next few parameters from yaml file
float equil_percentage_ = 0.1;
float bin_resolution_ = 0.5;
int n_lags_ = 30;
int sample_freq_ = 100;
int max_lag_ = 15000;
int n_bins_1d_ = -1;
int n_rows_ = -1;
int n_filaments_ = -1;
std::string file_root_;
std::string input_file_;
void LoadParams(const YAML::Node &node);
void BinDist(int t, float *r1, float *r2, bool double_count);
void VerifyFile() const;
void ReadLineNumber();
void ReadFileHeader(std::ifstream &in);
void RunSelfAnalysis();
void RunCollectiveAnalysis();
void AverageHistogram();
void WriteOutputHeader();
void OutputHistogram();
void ClearHistogram();
bool CheckHandedness(int i, int j);
std::ofstream output_;
std::ofstream output1d_;
VanHove(const VanHove &) = delete; // non construction-copyable
VanHove &operator=(const VanHove &) = delete; // non copyable
public:
VanHove(const YAML::Node &node);
~VanHove();
void RunAnalysis();
};
VanHove::VanHove(const YAML::Node &node) {
LoadParams(node);
VerifyFile();
ReadLineNumber();
std::ifstream input(input_file_);
ReadFileHeader(input);
printf("File has %d rows of data and %d filaments\n", n_rows_, n_filaments_);
int n_equil = static_cast<int>(equil_percentage_ * n_rows_);
n_rows_ -= n_equil;
if (max_lag_ >= n_rows_) {
max_lag_ = n_rows_ - 1;
}
lag_times_ = new int[n_lags_];
n_avg_ = new int[n_lags_];
float lag_dist = log(max_lag_) / (n_lags_ - 1);
lag_times_[0] = 0;
for (int i = 1; i < n_lags_; ++i) {
lag_times_[i] = (int)exp(i * lag_dist);
if (lag_times_[i] <= lag_times_[i - 1]) {
lag_times_[i] = lag_times_[i - 1] + 1;
}
}
time_ = new float[n_rows_];
// (N * (N-1) / 2 min distances, with 2 variables for each
posits_ = new float *[n_rows_];
for (int i = 0; i < n_rows_; ++i) {
posits_[i] = new float[2 * n_filaments_];
}
handedness_ = new int[n_filaments_];
hist_ = new float **[n_lags_];
for (int i = 0; i < n_lags_; ++i) {
hist_[i] = new float *[n_bins_1d_];
for (int j = 0; j < n_bins_1d_; ++j) {
hist_[i][j] = new float[n_bins_1d_];
}
}
hist1d_ = new float *[n_lags_];
for (int i = 0; i < n_lags_; ++i) {
hist1d_[i] = new float[2 * n_bins_1d_];
}
// time, then x, y, z, ux, uy, uz for each filament
std::string line;
std::string z; // dummy variable
int t = -n_equil;
while (std::getline(input, line)) {
if (t >= 0) {
std::stringstream ss(line);
ss >> time_[t];
for (int i = 0; i < n_filaments_; ++i) {
ss >> posits_[t][2 * i];
ss >> posits_[t][2 * i + 1];
// Ignore third dimension and orientations
for (int j = 0; j < 4; ++j) {
ss >> z;
}
}
}
t++;
}
assert(t == n_rows_);
input.close();
if (handedness_analysis_) {
input.open(file_root_ + ".handedness.analysis");
std::getline(input, line); // header
std::getline(input, line); // n_filaments, intrinsic_curvature
std::getline(input, line); // header
std::getline(input, line); // handednesses
std::stringstream ss(line);
for (int i = 0; i < n_filaments_; ++i) {
ss >> handedness_[i];
}
input.close();
}
}
VanHove::~VanHove() {
for (int i = 0; i < n_rows_; ++i) {
delete[] posits_[i];
}
for (int i = 0; i < n_lags_; ++i) {
for (int j = 0; j < n_bins_1d_; ++j) {
delete[] hist_[i][j];
}
delete[] hist_[i];
delete[] hist1d_[i];
}
delete[] hist_;
delete[] hist1d_;
delete[] posits_;
delete[] time_;
delete[] lag_times_;
delete[] n_avg_;
delete[] handedness_;
printf("Analysis complete\n");
}
void VanHove::BinDist(int t, float *r1, float *r2, bool double_count) {
float ds[2];
float dr_mag = 0;
for (int i = 0; i < 2; ++i) {
float s1 = inv_size_ * r1[i];
float s2 = inv_size_ * r2[i];
s1 -= NINT(s1);
s2 -= NINT(s2);
ds[i] = s2 - s1;
ds[i] -= NINT(ds[i]);
dr_mag += ds[i] * ds[i];
}
/* Rescale dr_mag to 0-1 */
dr_mag = sqrt(2 * dr_mag);
/* I've encountered that ds[i] = 0.5 exactly, within float precision,
so make sure we bin this value properly if it occurs */
int i = static_cast<int>(n_bins_1d_ * (ds[0] + 0.5));
if (i == n_bins_1d_) {
i = n_bins_1d_ - 1;
}
int j = static_cast<int>(n_bins_1d_ * (ds[1] + 0.5));
if (j == n_bins_1d_) {
j = n_bins_1d_ - 1;
}
hist_[t][i][j] += 1.0;
i = static_cast<int>(2 * n_bins_1d_ * dr_mag);
if (i == 2 * n_bins_1d_) {
i = 2 * n_bins_1d_ - 1;
}
hist1d_[t][i] += 1.0;
/* For collective analysis, count the other's min distance vector */
if (double_count) {
hist1d_[t][i] += 1.0;
i = static_cast<int>(n_bins_1d_ * (-ds[0] + 0.5));
if (i == n_bins_1d_) {
i = n_bins_1d_ - 1;
}
j = static_cast<int>(n_bins_1d_ * (-ds[1] + 0.5));
if (j == n_bins_1d_) {
j = n_bins_1d_ - 1;
}
hist_[t][i][j] += 1.0;
}
}
void VanHove::LoadParams(const YAML::Node &node) {
file_root_ = node["run_name"].as<std::string>();
// TODO generalize to more than one filament type
YAML::Node fnode = node["filament"][0];
std::string spec_name = fnode["name"].as<std::string>();
file_root_ = file_root_ + "_filament_" + spec_name;
input_file_ = file_root_ + ".msd.analysis";
system_size_ = 2.0 * node["system_radius"].as<float>();
if (fnode["randomize_intrinsic_curvature_handedness"].as<bool>() &&
(fnode["intrinsic_curvature"].as<float>() != 0 ||
fnode["radius_of_curvature"].as<float>() > 0)) {
handedness_analysis_ = true;
}
inv_size_ = 1.0 / system_size_;
n_bins_1d_ = static_cast<int>(system_size_ / bin_resolution_);
if (n_bins_1d_ % 2 == 0) {
n_bins_1d_++;
}
if (node["n_dim"].as<int>() != 2) {
printf("Error: VanHove2D called on 3D data\n");
exit(1);
} else if (node["n_periodic"].as<int>() != 2) {
printf("Error: VanHove2D analysis must be with periodic boundary"
" conditions\n");
exit(1);
}
}
void VanHove::VerifyFile() const {
struct stat buffer;
if (stat(input_file_.c_str(), &buffer) == 0) {
printf("Reading from file %s\n", input_file_.c_str());
} else {
printf("Error: Failed to open MSD analysis file %s\n", input_file_.c_str());
exit(1);
}
}
void VanHove::ReadLineNumber() {
int n_lines = 0;
std::ifstream input(input_file_);
std::string line;
while (std::getline(input, line)) {
++n_lines;
}
input.close();
n_rows_ = n_lines - 3;
assert(n_rows_ > 0);
}
void VanHove::ReadFileHeader(std::ifstream &in) {
std::string z; // dummy variable
std::string line;
// Skip first line
std::getline(in, line);
std::getline(in, line);
std::stringstream ss(line);
// Read nsteps, nspec, delta, filament number
ss >> z >> z >> z >> n_filaments_;
// Skip third line
std::getline(in, line);
density_ = n_filaments_ * inv_size_ * inv_size_;
}
void VanHove::RunAnalysis() {
RunSelfAnalysis();
RunCollectiveAnalysis();
/* Run again on dissimilar handedness particles if we are distinguishing
between similar and dissimilar handedness */
if (handedness_analysis_) {
handedness_similar_ = !handedness_similar_;
RunCollectiveAnalysis();
}
}
void VanHove::WriteOutputHeader() {
output_ << "n_bins_1d n_frames\n";
output_ << n_bins_1d_ << " " << n_lags_ << "\n";
output_ << "lag_times\n";
output_ << time_[lag_times_[0]] - time_[lag_times_[0]];
for (int i = 1; i < n_lags_; ++i) {
output_ << " " << time_[lag_times_[i]] - time_[lag_times_[0]];
}
output_ << "\n";
output1d_ << "n_bins_1d n_frames\n";
output1d_ << 2 * n_bins_1d_ << " " << n_lags_ << "\n";
output1d_ << "lag_times\n";
output1d_ << time_[lag_times_[0]] - time_[lag_times_[0]];
for (int i = 1; i < n_lags_; ++i) {
output1d_ << " " << time_[lag_times_[i]] - time_[lag_times_[0]];
}
output1d_ << "\n";
}
void VanHove::AverageHistogram() {
for (int t = 0; t < n_lags_; ++t) {
float factor = n_filaments_ * n_avg_[t];
for (int i = 0; i < n_bins_1d_; ++i) {
for (int j = 0; j < n_bins_1d_; ++j) {
hist_[t][i][j] /= factor;
}
}
}
for (int t = 0; t < n_lags_; ++t) {
float factor = n_filaments_ * n_avg_[t];
for (int i = 0; i < 2 * n_bins_1d_; ++i) {
hist1d_[t][i] /= factor;
}
}
}
void VanHove::OutputHistogram() {
output_ << "n_samples_per_frame_per_filament\n";
output_ << n_avg_[0];
output1d_ << "n_samples_per_frame_per_filament\n";
output1d_ << n_avg_[0];
for (int i = 1; i < n_lags_; ++i) {
output_ << " " << n_avg_[i];
output1d_ << " " << n_avg_[i];
}
output_ << "\n";
output1d_ << "\n";
for (int t = 0; t < n_lags_; ++t) {
for (int i = 0; i < n_bins_1d_; ++i) {
for (int j = 0; j < n_bins_1d_; ++j) {
if (j > 0)
output_ << " ";
output_ << hist_[t][i][j];
}
output_ << "\n";
}
}
for (int t = 0; t < n_lags_; ++t) {
for (int i = 0; i < 2 * n_bins_1d_; ++i) {
if (i > 0)
output1d_ << " ";
output1d_ << hist1d_[t][i];
}
output1d_ << "\n";
}
}
void VanHove::ClearHistogram() {
for (int t = 0; t < n_lags_; ++t) {
for (int i = 0; i < n_bins_1d_; ++i) {
for (int j = 0; j < n_bins_1d_; ++j) {
hist_[t][i][j] = 0.0;
}
}
}
for (int t = 0; t < n_lags_; ++t) {
for (int i = 0; i < 2 * n_bins_1d_; ++i) {
hist1d_[t][i] = 0.0;
}
}
}
void VanHove::RunCollectiveAnalysis() {
fflush(stdout);
std::string collective_output_name =
file_root_ + ".van_hove_collective.analysis";
std::string collective_output_name_1d =
file_root_ + ".van_hove_collective_1d.analysis";
if (handedness_analysis_ && handedness_similar_) {
printf("Beginning Van Hove Collective Distribution analysis of %s for"
" filaments with similar handedness\n",
file_root_.c_str());
collective_output_name =
file_root_ + ".van_hove_collective_similar.analysis";
collective_output_name_1d =
file_root_ + ".van_hove_collective_similar_1d.analysis";
} else if (handedness_analysis_) {
printf("Beginning Van Hove Collective Distribution analysis of %s for"
" filaments with dissimilar handedness\n",
file_root_.c_str());
collective_output_name =
file_root_ + ".van_hove_collective_dissimilar.analysis";
collective_output_name_1d =
file_root_ + ".van_hove_collective_dissimilar_1d.analysis";
} else {
printf("Beginning Van Hove Collective Distribution analysis of %s\n",
file_root_.c_str());
}
output_.open(collective_output_name);
output1d_.open(collective_output_name_1d);
WriteOutputHeader();
std::fill(n_avg_, n_avg_ + n_lags_, 0);
ClearHistogram();
for (int t0 = 0; t0 < n_rows_ - sample_freq_; t0 += sample_freq_) {
for (int t = 0; t < n_lags_; ++t) {
int T = t0 + lag_times_[t];
if (T > n_rows_ - 1)
continue;
n_avg_[t]++;
for (int i = 0; i < n_filaments_ - 1; ++i) {
float *r1 = &posits_[t0][2 * i];
for (int j = i + 1; j < n_filaments_; ++j) {
if (handedness_analysis_ && CheckHandedness(i, j))
continue;
float *r2 = &posits_[T][2 * j];
BinDist(t, r1, r2, true);
}
}
}
}
AverageHistogram();
OutputHistogram();
output_.close();
output1d_.close();
}
void VanHove::RunSelfAnalysis() {
printf("Beginning Van Hove Self Distribution analysis of %s\n",
file_root_.c_str());
fflush(stdout);
std::string self_output_name = file_root_ + ".van_hove_self.analysis";
std::string self_output_name_1d = file_root_ + ".van_hove_self_1d.analysis";
output_.open(self_output_name);
output1d_.open(self_output_name_1d);
WriteOutputHeader();
std::fill(n_avg_, n_avg_ + n_lags_, 0);
ClearHistogram();
for (int t0 = 0; t0 < n_rows_ - sample_freq_; t0 += sample_freq_) {
for (int t = 0; t < n_lags_; ++t) {
int T = t0 + lag_times_[t];
if (T > n_rows_ - 1)
continue;
n_avg_[t]++;
for (int i = 0; i < n_filaments_; ++i) {
float *r1 = &posits_[t0][2 * i];
float *r2 = &posits_[T][2 * i];
BinDist(t, r1, r2, false);
}
}
}
AverageHistogram();
OutputHistogram();
output_.close();
output1d_.close();
}
// 'continue' upon return true
bool VanHove::CheckHandedness(int i, int j) {
if (handedness_similar_) {
// Continue if handedness is dissimilar
return (handedness_[i] != handedness_[j]);
} else {
// Continue if handedness is similar
return (handedness_[i] == handedness_[j]);
}
}
int main(int argc, char *argv[]) {
if (argc != 2) {
printf("Missing <parameters yaml file> argument.\n");
printf("Usage: %s <parameters yaml file>\n", argv[0]);
return 1;
}
std::string fname(argv[1]);
YAML::Node node = YAML::LoadFile(fname);
VanHove vh(node);
vh.RunAnalysis();
return 0;
}
|
class InvalidAmount(Exception):
pass
class BankAccount:
def __init__(self, account_number, balance):
self.account_number = account_number
self.balance = balance
def deposit(self):
msg = "Enter amount to deposit: \n"
try:
amount = int(input(msg))
if amount <= 0:
raise InvalidAmount(amount)
else:
update_balance(self.account_number, amount)
info(f"${amount} has been credited to your account and new balance is {self.balance + amount}\n")
except ValueError:
warn("Invalid input. Please enter a valid positive integer amount to deposit.")
except InvalidAmount as e:
warn(f"Invalid amount: {e}. Please enter a valid positive amount to deposit.") |
import scrapy
class ArticleSpider(scrapy.Spider):
name = 'articles'
start_urls = ['https://www.some-website.com/latest']
def parse(self, response):
for article in response.css('article'):
yield {
'title': article.css('h2 ::text').extract_first(),
'url': article.css('a ::attr(href)').extract_first(),
}
next_page = response.css('li.next a ::attr(href)').extract_first()
if next_page is not None:
yield response.follow(next_page, callback=self.parse) |
<filename>src/cct720/control/MenuControl.java
package cct720.control;
import java.awt.Desktop;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import cct720.view.MenuView;
public class MenuControl {
private MenuView menuView;
public MenuControl(){
this.menuView = new MenuView(this);
}
public void btExitControl(){
System.exit(0);
}
public void btHelpControl(){
try {
URI u = new URI("http://fcgomes92.wix.com/teste1");
Desktop.getDesktop().browse(u);
} catch (IOException e) {
e.printStackTrace();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
public void btStartControl(){
new BeginGameControl();
this.menuView.getFrame().setVisible(false);
}
}
|
#! /bin/sh -e
# tup - A file-based build system
#
# Copyright (C) 2011-2018 Mike Shal <marfey@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# The monitor changes its working directory to the directory of a file
# modification. In this test, we get the monitor to have its current directory
# set to a location that is then deleted. A future modification to another
# valid file should allow the autoupdate to work, instead of trying to run it
# from a deleted directory.
. ./tup.sh
check_monitor_supported
monitor --autoupdate
mkdir dir2
cat > dir2/Tupfile << HERE
: foreach *.c |> gcc -c %f -o %o |> %B.o
HERE
echo 'int foo(void) {return 7;}' > dir2/ok.c
tup flush
sym_check dir2/ok.o foo
mkdir tmp
cat > tmp/Tupfile << HERE
: foreach *.c |> gcc -c %f -o %o |> %B.o
HERE
echo 'int foo(void) {return 7;}' > tmp/ok.c
tup flush
sym_check tmp/ok.o foo
# Monitor now has CWD as tmp, so remove that directory.
rm -rf tmp
tup flush
# Make a change to the still existing file - the autoupdate should work
# successfully.
echo 'int bar(void) {return 7;}' > dir2/ok.c
tup flush
sym_check dir2/ok.o bar
eotup
|
<reponame>FinFlow-Labs/rediculous
package io.chrisdavenport.rediculous
import cats._
import cats.implicits._
import cats.data._
import cats.effect._
import scodec.bits.ByteVector
/**
* For When you don't trust automatic pipelining.
*
* ClusterMode: Multi Key Operations Will use for the first key
* provided.
*
* [[pipeline]] method converts the Pipeline state to the Redis Monad. This will error
* if you pipeline and have not actually enterred any redis commands.
**/
final case class RedisPipeline[A](value: RedisTransaction.RedisTxState[RedisTransaction.Queued[A]]){
def pipeline[F[_]: Concurrent]: Redis[F, A] = RedisPipeline.pipeline[F](this)
}
object RedisPipeline {
implicit val ctx: RedisCtx[RedisPipeline] = new RedisCtx[RedisPipeline]{
def keyedBV[A: RedisResult](key: ByteVector, command: NonEmptyList[ByteVector]): RedisPipeline[A] =
RedisPipeline(RedisTransaction.RedisTxState{for {
s1 <- State.get[(Int, List[NonEmptyList[ByteVector]], Option[ByteVector])]
(i, base, value) = s1
_ <- State.set((i + 1, command :: base, value.orElse(Some(key))))
} yield RedisTransaction.Queued(l => RedisResult[A].decode(l(i)))})
def unkeyedBV[A: RedisResult](command: NonEmptyList[ByteVector]): RedisPipeline[A] = RedisPipeline(RedisTransaction.RedisTxState{for {
out <- State.get[(Int, List[NonEmptyList[ByteVector]], Option[ByteVector])]
(i, base, value) = out
_ <- State.set((i + 1, command :: base, value))
} yield RedisTransaction.Queued(l => RedisResult[A].decode(l(i)))})
}
implicit val applicative: Applicative[RedisPipeline] = new Applicative[RedisPipeline]{
def pure[A](a: A) = RedisPipeline(Monad[RedisTransaction.RedisTxState].pure(Monad[RedisTransaction.Queued].pure(a)))
override def ap[A, B](ff: RedisPipeline[A => B])(fa: RedisPipeline[A]): RedisPipeline[B] =
RedisPipeline(RedisTransaction.RedisTxState(
Nested(ff.value.value).ap(Nested(fa.value.value)).value
))
}
val fromTransaction = new (RedisTransaction ~> RedisPipeline){
def apply[A](fa: RedisTransaction[A]): RedisPipeline[A] = RedisPipeline(fa.value)
}
val toTransaction = new (RedisPipeline ~> RedisTransaction){
def apply[A](fa: RedisPipeline[A]): RedisTransaction[A] = RedisTransaction(fa.value)
}
def pipeline[F[_]] = new SendPipelinePartiallyApplied[F]
class SendPipelinePartiallyApplied[F[_]]{
def apply[A](tx: RedisPipeline[A])(implicit F: Concurrent[F]): Redis[F, A] = {
Redis(Kleisli{(c: RedisConnection[F]) =>
val ((_, commandsR, key), RedisTransaction.Queued(f)) = tx.value.value.run((0, List.empty, None)).value
val commands = commandsR.reverse.toNel
commands.traverse(nelCommands => RedisConnection.runRequestInternal(c)(fs2.Chunk.seq(nelCommands.toList), key) // We Have to Actually Send A Command
.flatMap{nel =>
val l = nel.toList
val c = fs2.Chunk.seq(l)
val resp = f(c)
RedisConnection.closeReturn[F, A](resp)}
).flatMap{
case Some(a) => a.pure[F]
case None => F.raiseError(RedisError.Generic("Rediculous: Attempted to Pipeline Empty Command"))
}
})
}
}
}
|
package dev.fiki.forgehax.main.services;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import dev.fiki.forgehax.api.cmd.settings.LongSetting;
import dev.fiki.forgehax.api.event.SubscribeListener;
import dev.fiki.forgehax.api.events.entity.LocalPlayerUpdateEvent;
import dev.fiki.forgehax.api.mod.ServiceMod;
import dev.fiki.forgehax.api.modloader.RegisterMod;
import dev.fiki.forgehax.api.spam.SpamMessage;
import dev.fiki.forgehax.main.Common;
import joptsimple.internal.Strings;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
@RegisterMod
public class SpamService extends ServiceMod {
private static final List<SpamMessage> SENDING = Lists.newCopyOnWriteArrayList();
public static boolean send(SpamMessage spam) {
if (!SENDING.contains(spam)) {
return SENDING.add(spam);
} else {
return false;
}
}
public static boolean isActivatorPresent(String activator) {
if (activator == null) {
return false;
}
for (SpamMessage msg : SENDING) {
if (activator.equalsIgnoreCase(msg.getActivator())) {
return true;
}
}
return false;
}
public static boolean isEmpty() {
return SENDING.isEmpty();
}
public final LongSetting delay = newLongSetting()
.name("delay")
.description("Delay between each message in ms")
.defaultTo(5000L)
.changedListener(
(from, to) -> {
nextSendMs = 0L;
})
.build();
/**
* Next time to send a message
*/
private long nextSendMs = 0L;
private Map<String, AtomicLong> customDelays = Maps.newConcurrentMap();
{
newSimpleCommand()
.name("reset")
.description("Resets spam delay and send list")
.executor(
args -> {
nextSendMs = Long.MAX_VALUE;
SENDING.clear();
customDelays.clear();
nextSendMs = 0;
args.inform("Reset chat spam");
})
.build();
}
@SubscribeListener
public void onTick(LocalPlayerUpdateEvent event) {
if (!SENDING.isEmpty() && System.currentTimeMillis() > nextSendMs) {
SENDING
.stream()
.filter(
msg -> {
if (!Strings.isNullOrEmpty(msg.getType())) {
long time = customDelays.getOrDefault(msg.getType(), new AtomicLong(0)).get();
return System.currentTimeMillis() > time;
} else {
return true;
}
})
.sorted()
.findFirst()
.ifPresent(
msg -> {
Common.getLocalPlayer().chat(msg.getMessage());
customDelays.computeIfAbsent(msg.getType(), t -> new AtomicLong(0L))
.set(System.currentTimeMillis() + msg.getDelay());
nextSendMs = System.currentTimeMillis() + delay.getValue();
SENDING.remove(msg);
});
}
}
}
|
package com.example.android.miwok;
import android.app.Activity;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
public class PhrasesActivity extends AppCompatActivity {
MediaPlayer mp;
AudioManager audioManager;
AudioManager.OnAudioFocusChangeListener afChangeListener =
new AudioManager.OnAudioFocusChangeListener() {
public void onAudioFocusChange(int focusChange) {
if (focusChange == AudioManager.AUDIOFOCUS_LOSS) {
releaseMediaPlayer();
}
else if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) {
mp.pause();
mp.seekTo(0);
}
else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) {
mp.start();
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_numbers);
this.setTitle("Phrases");
ArrayList<word> phrases = new ArrayList<word>();
phrases.add(new word("Where are you going?", "minto wuksus", R.raw.phrase_where_are_you_going));
phrases.add(new word("What is your name?", "tinnә oyaase'nә", R.raw.phrase_what_is_your_name));
phrases.add(new word("My name is...", "oyaaset...", R.raw.phrase_my_name_is));
phrases.add(new word("How are you feeling?", "michәksәs?", R.raw.phrase_how_are_you_feeling));
phrases.add(new word("I’m feeling good.", "kuchi achit", R.raw.phrase_im_feeling_good));
phrases.add(new word("Are you coming?", "әәnәs'aa?", R.raw.phrase_are_you_coming));
phrases.add(new word("Yes, I’m coming.", "hәә’ әәnәm", R.raw.phrase_yes_im_coming));
phrases.add(new word("I’m coming.", "әәnәm", R.raw.phrase_im_coming));
phrases.add(new word("Let’s go.", "yoowutis", R.raw.phrase_lets_go));
phrases.add(new word("Come here.", "әnni'nem", R.raw.phrase_come_here));
final WordAdapter Adapter = new WordAdapter(this, phrases, R.color.category_phrases);
ListView listView = (ListView) findViewById(R.id.list);
listView.setAdapter(Adapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
releaseMediaPlayer();
word obj = Adapter.getItem(position);
audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int result = audioManager.requestAudioFocus(afChangeListener,AudioManager.STREAM_MUSIC,AudioManager.AUDIOFOCUS_GAIN);
if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
// audioManager.r
mp = MediaPlayer.create(PhrasesActivity.this, obj.getAudioId());
mp.start();
mp.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
releaseMediaPlayer();
}
});
}
}
});
}
private void releaseMediaPlayer() {
if (mp != null) {
mp.release();
mp = null;
audioManager.abandonAudioFocus(afChangeListener);
}
}
@Override
protected void onStop()
{
super.onStop();
releaseMediaPlayer();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.