text stringlengths 1 1.05M |
|---|
<reponame>kant/taggr<gh_stars>10-100
import React from "react";
import styled from "styled-components";
const Wrapper = styled.div<{ url: string }>`
height: 100%;
width: 100%;
border-radius: 6px;
background-image: url("${(props) => props.url}");
background-repeat: no-repeat;
background-position: center center;
background-size: cover;
box-shadow: 2px 4px 4px rgba(0, 0, 0, 0.24);
`;
type Props = {
url: string;
};
const ImageTile = ({ url }: Props) => <Wrapper url={url} />;
export default ImageTile;
|
import React, { useRef, useEffect, useState, useCallback } from "react";
import { makeStyles } from "@material-ui/core/styles";
import { Grid, Box, Typography } from "@material-ui/core";
import ChatMessage from "./ChatMessage";
import { useScroll, useUpdate, useScrolling, useDebounce } from "react-use";
import ChatUnreadMessages from "./ChatUnreadMessages";
import { useSelector } from "react-redux";
import { isEventOwner, getUserId } from "../../Redux/eventSession";
import NoChatMessagesImg from "../../Assets/illustrations/undraw_Group_chat_unwm.svg";
const useStyles = makeStyles((theme) => ({
root: {
height: "100%",
overflow: "auto",
overflowX: "hidden",
flexWrap: "inherit"
},
messagesContainerGrid: {
flex: 1
},
messageContainer: {
width: "100%"
},
shadowElement: {
float: "left",
clear: "both"
},
emptyPane: {
marginTop: theme.spacing(4),
textAlign: "center"
},
emptyImage: {
width: "55%",
marginBottom: theme.spacing(1)
}
}));
export default ({ user, users, messages }) => {
const chatEnd = useRef(null);
const scrollRef = React.useRef(null);
const { y } = useScroll(scrollRef);
const [scrollY, setScrollY] = useState(y);
useDebounce(
() => {
setScrollY(y);
// console.log("Stopped debouncing y: " + y);
},
1000,
[y]
);
const scrollingOriginal = useScrolling(scrollRef);
const [scrolling, setScrolling] = useState(false);
useDebounce(
() => {
setScrolling(scrollingOriginal);
// console.log("Stopped debouncing scrollingOriginal: " + scrollingOriginal);
},
1000,
[scrollingOriginal]
);
const isScrollAtBottom = !!(
scrollRef.current &&
scrollRef.current.scrollHeight - scrollY === scrollRef.current.clientHeight
);
const [hasScrolledToBottom, setHasScrolledToBottom] = useState(false);
const [lastTrackedMessagesJson, setLastTrackedMessagesJson] = useState(
messages ? JSON.stringify(messages) : null
);
const [lastScrollingState, setLastScrollingState] = useState(false);
const [countNewMessages, setCountNewMessages] = useState(0);
const forceRender = useUpdate();
const scrollToBottom = React.useCallback(
() => chatEnd.current.scrollIntoView({ behavior: "auto" }),
[]
);
const isOwner = useSelector(isEventOwner);
const userId = useSelector(getUserId);
// handle new messages
useEffect(() => {
// console.log("useEffect: handle new messages");
const currentMessagesJson = JSON.stringify(messages);
if (currentMessagesJson !== lastTrackedMessagesJson) {
if (isScrollAtBottom) {
scrollToBottom();
setCountNewMessages(0);
} else {
const lastMessages = JSON.parse(lastTrackedMessagesJson);
const diff = messages.length - lastMessages.length;
// console.log({ prev: lastMessages.length, new: messages.length, diff });
setCountNewMessages(diff > 0 ? countNewMessages + diff : 0);
}
setLastTrackedMessagesJson(currentMessagesJson);
}
}, [
countNewMessages,
isScrollAtBottom,
lastTrackedMessagesJson,
messages,
scrollToBottom
]);
// force update of timestamps
useEffect(() => {
// console.log("useEffect: force update of timestamps");
const intervalId = setInterval(() => {
// setMessages((mess) => [...mess]);
forceRender();
}, 60 * 1000);
return () => clearInterval(intervalId);
}, [forceRender]);
// initial scroll to bottom
useEffect(() => {
// console.log("useEffect: initial scroll to bottom");
if (
messages &&
messages.length > 0 &&
!hasScrolledToBottom &&
scrollY === 0
) {
chatEnd.current.scrollIntoView({ behavior: "auto" });
setHasScrolledToBottom(true);
}
}, [hasScrolledToBottom, messages, scrollY]);
// detect scroll changed
useEffect(() => {
// console.log("useEffect: detect scroll changed");
if (scrolling !== lastScrollingState) {
if (isScrollAtBottom) {
setCountNewMessages(0);
}
setLastScrollingState(scrolling);
}
}, [isScrollAtBottom, lastScrollingState, scrolling]);
const classes = useStyles();
const onClickNewMessages = useCallback(() => {
setCountNewMessages(0);
scrollToBottom();
}, [scrollToBottom]);
// console.log({
// scrolling,
// isScrollAtBottom,
// scrollY,
// countNewMessages,
// isForcingScroll
// });
return (
<div className={classes.root} ref={scrollRef}>
<div ref={scrollRef}>
<Grid
container
direction="column-reverse"
justify="flex-start"
alignItems="center"
className={classes.messagesContainerGrid}
>
{messages &&
messages.map((message) => (
<Grid
item
key={message.messageId}
className={classes.messageContainer}
// ref={!firstMessageRef ? firstMessageRef : lastMessageRef}
>
<ChatMessage
message={message}
user={user}
users={users}
isOwner={isOwner}
userId={userId}
/>
</Grid>
))}
{/* {loadingMore && (
<Grid item className={classes.messageContainer}>
<Typography variant="caption" display="block" align="center">
Loading more messages...
</Typography>
</Grid>
)} */}
</Grid>
<div className={classes.shadowElement} ref={chatEnd} />
</div>
{countNewMessages > 0 && (
<ChatUnreadMessages
numMessages={countNewMessages}
onClickUnread={onClickNewMessages}
/>
)}
{(!messages || messages.length === 0) && (
<Box className={classes.emptyPane}>
<img
className={classes.emptyImage}
src={NoChatMessagesImg}
alt="Polls coming soon"
/>
<Typography variant="body2" color="textSecondary" display="block">
No messages sent yet. Say hi to everyone...
</Typography>
</Box>
)}
</div>
);
};
|
#!/usr/bin/env bash
echo " ______ _"
echo " | ___ \ | |"
echo " | |_/ / _ ___| |__"
echo " | / | | / __| '_ \ "
echo " | |\ \ |_| \__ \ | | |"
echo " \_| \_\__,_|___/_| |_|"
|
'use strict';
const arr = [
'a',
1,
true,
{value: 'key'},
['1', '2', '3'],
function a() {
console.log(`simple function ${a}`);
}
];
function SetExample(array) {
this.set = array;
}
SetExample.prototype.iterator = function() {
const arr = [];
this.set.forEach(el => arr.push({ type: typeof el, value: el}));
console.log(arr);
return arr;
}
const array = new SetExample(arr);
array.iterator();
|
import re
class VersionInfo:
def __init__(self, major, minor, micro, release_level, serial):
self.major = major
self.minor = minor
self.micro = micro
self.release_level = release_level
self.serial = serial
def parse_version_string(self, version_string):
match = re.match(r'(?P<major>\d+)\.(?P<minor>\d+)\.(?P<micro>\d+)(?P<release_level>[a-z]+)?(?P<serial>\d+)?', version_string)
self.major = int(match.group("major"))
self.minor = int(match.group("minor"))
self.micro = int(match.group("micro"))
self.release_level = match.group("release_level") if match.group("release_level") else ""
self.serial = int(match.group("serial")) if match.group("serial") else 0
def __str__(self):
version_str = f"{self.major}.{self.minor}.{self.micro}"
if self.release_level:
version_str += self.release_level
if self.serial:
version_str += str(self.serial)
return version_str
# Example usage
version_string = "3.7.1rc2"
version_info = VersionInfo(0, 0, 0, "", 0) # Initialize with default values
version_info.parse_version_string(version_string)
print(version_info) |
use std::sync::Arc;
// Define the trait for layers
trait Layer {
fn any(&self) -> &dyn Layer;
}
// Define the specific trait for picture layers
trait PictureLayer: Layer {
fn needs_cache(&self) -> bool;
}
// Define the ValueBox and its associated functions
struct ValueBox<T>(T);
impl<T> ValueBox<T> {
fn with_not_null_value_return<R, F>(&self, default: R, f: F) -> R
where
F: FnOnce(&T) -> R,
{
if let Some(inner) = self.0.as_ref() {
f(inner)
} else {
default
}
}
}
// Implement the compositor_picture_layer_needs_cache function
#[no_mangle]
pub fn compositor_picture_layer_needs_cache(layer: *mut ValueBox<Arc<dyn Layer>>) -> bool {
unsafe {
// Extract the Arc from the pointer
let layer_arc = &*layer;
// Use with_not_null_value_return to handle the layer extraction and caching check
layer_arc.with_not_null_value_return(false, |layer| {
// Attempt to downcast the layer to PictureLayer
let picture_layer = layer
.any()
.downcast_ref::<dyn PictureLayer>()
.expect("Is not a picture layer!");
// Check if the picture layer needs caching
picture_layer.needs_cache()
})
}
} |
<filename>src/material/custom/other/pieces.ts
import { PitchValue, Rendering } from '@musical-patterns/material'
import { ContourPiece, Thunk } from '@musical-patterns/utilities'
import { RenderingName, Renderings, thunkRenderings } from '../../rendering'
import { thunkOtherBlocks } from './blocks'
import { OtherBlocks, OtherContourPieces } from './types'
const thunkOtherContourPieces: Thunk<OtherContourPieces> =
(): OtherContourPieces => {
const blocks: OtherBlocks = thunkOtherBlocks()
const renderings: Renderings = thunkRenderings()
const glis: Rendering<PitchValue> = renderings[ RenderingName.GLIS ]
const flatline: Rendering<PitchValue> = renderings[ RenderingName.FLATLINE ]
const backboneFifteen: ContourPiece<PitchValue> = glis(blocks.backboneFifteen)
const backboneTwentyfour: ContourPiece<PitchValue> = glis(blocks.backboneTwentyfour)
const shiftyFifteen: ContourPiece<PitchValue> = glis(blocks.shiftyFifteen)
const shiftyTwentyfour: ContourPiece<PitchValue> = glis(blocks.shiftyTwentyfour)
const shiftyTwentyfourVariant: ContourPiece<PitchValue> = glis(blocks.shiftyTwentyfourVariant)
const shifty: ContourPiece<PitchValue> = glis(blocks.shifty)
const secretLongChord: ContourPiece<PitchValue> = flatline(blocks.secretLongChord)
return {
backboneFifteen,
backboneTwentyfour,
secretLongChord,
shifty,
shiftyFifteen,
shiftyTwentyfour,
shiftyTwentyfourVariant,
}
}
export {
thunkOtherContourPieces,
}
|
<filename>AWSteria/builds/RV64ACDFIMSU_Flute_verilator_AWS/Verilog_RTL/vpi_wrapper_c_start_timing.h
/*
* Generated by Bluespec Compiler, version 2019.05.beta2 (build a88bf40db, 2019-05-24)
*
*
*
*/
#ifndef __vpi_wrapper_c_start_timing_h__
#define __vpi_wrapper_c_start_timing_h__
#include <vpi_user.h>
/* registration function */
void c_start_timing_vpi_register();
/* VPI wrapper function */
PLI_INT32 c_start_timing_calltf(PLI_BYTE8 *user_data);
#endif /* ifndef __vpi_wrapper_c_start_timing_h__ */
|
import { DatePipe } from '@angular/common';
import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core';
import { UtilsServiceService } from '../../../utils-service.service';
@Component({
selector: 'ngx-add-new-bon-livraison',
templateUrl: './add-new-bon-livraison.component.html',
styleUrls: ['./add-new-bon-livraison.component.scss']
})
export class AddNewBonLivraisonComponent implements OnInit {
@Input() bonLivraison = {
bonLivraisonId: null,
bonLivraisonNumber: '',
bonLivraisonCurrency: 'TND',
bonLivraisonDate: null,
customer: null,
products: null,
totalHTBrut: 0,
totalHTBrutS:'0',
remise: 0,
remiseS:'0',
totalHT: 0,
totalHTS:'0',
totalTVA: 0,
totalTVAS:'0',
totalFodec: 0,
totalFodecS:'0',
totalTaxe: 0,
totalTaxeS:'0',
timbreFiscal: 0.600,
timbreFiscalS:'0.600',
totalTTC: 0,
totalTTCS:'0',
// montantBonLivraison: 0,
// montantBonLivraisonS:'0',
bonLivraisonLines: [],
productGroup:null,
commercialName: '',
}
blPrefix="";
blNumber="";
@Output() addNewBonLivraisonEvent = new EventEmitter();
@Output() cancelEvent = new EventEmitter();
clients = [];
produits = [];
productGroups = [];
maxDateInvoiceDate;
minDateDeadlineDate;
timeLine = {
timeLineId: null,
timeLineTable: [],
};
line = {
product: {
productId: null,
productLabel: '',
productReference: '',
productDescription: '',
productUrlImage: '',
productPrixHT: 0,
productTVA: 0,
productFodec: 0,
productTTC: 0,
productUnite: 'PIECE',
productType: 'MATERIEL'
},
productGroup:null,
quantity: 1,
remiseTaux:0,
remiseValeur:null,
montantHt:null,
montantHtBrut:null,
montantTva:null,
montantFaudec:null,
};
produit = null;
showProduitWindow = false;
constructor(private UtilsService: UtilsServiceService, private datePipe: DatePipe) { }
ngOnInit(): void {
if (this.bonLivraison.bonLivraisonDate == null) {
this.bonLivraison.bonLivraisonDate = this.datePipe.transform(new Date(), 'yyyy-MM-dd');
}
this.maxDateInvoiceDate = this.datePipe.transform(new Date(), 'yyyy-MM-dd');
this.minDateDeadlineDate = this.datePipe.transform(new Date(), 'yyyy-MM-dd');
this.initiateLine();
this.getAllCustomers();
//this.getAllProdcuts();
this.getAllProductsGroups();
this.initProduit();
this.bonLivraison.bonLivraisonLines.forEach(line=>{
this.timeLine.timeLineTable.push(line)
})
if(this.bonLivraison.bonLivraisonId != null && this.bonLivraison.bonLivraisonId != ""){
this.blPrefix=this.bonLivraison.bonLivraisonNumber.substr(0,8);
this.blNumber=this.bonLivraison.bonLivraisonNumber.substr(8,4);
}
this.calculPrixTotalBonLivraison();
}
initProduit() {
this.produit = {
productId: null,
productLabel: '',
productReference: '',
productDescription: '',
productUrlImage: '',
productPrixHT: 0,
productTVA: 0,
productFaudec: 0,
productTTC: 0,
productUnite: 'PIECE',
productType: 'MATERIEL'
};
}
getAllCustomers() {
const context = this;
this.UtilsService.get(UtilsServiceService.API_CLIENT).subscribe(response => {
context.clients = response;
if (this.clients.length > 0 && this.bonLivraison.customer == null) {
this.bonLivraison.customer = this.clients[0];
}
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors du chargement des clients`);
});
}
/* getAllProdcuts() {
const context = this;
this.UtilsService.get(UtilsServiceService.API_PRODUIT).subscribe(response => {
context.produits = response;
if (this.produit.length > 0) {
this.line.product = this.produits[0];
}
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors du chargement des produits`);
});
}
*/
getAllProductsGroups() {
this.UtilsService.get(UtilsServiceService.API_PRODUIT_GROUP).subscribe(response => {
let productGroups = response;
productGroups.forEach(productGroup => {
if(productGroup.productList.length>0){
this.productGroups.push(productGroup);
}
})
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors du chargement des familles de produits`);
});
}
compareCustomer(a: any, b: any): boolean {
if (a == null || b == null) return true;
return a.customerId === b.customerId;
}
changeProductGroup(line){
line.product = line.productGroup.productList[0];
this.calculPrixTotalEdited(line);
}
initiateLine() {
this.line = {
product: {
productId: null,
productLabel: '',
productReference: '',
productDescription: '',
productUrlImage: '',
productPrixHT: 0,
productTVA: 0,
productFodec: 0,
productTTC: 0,
productUnite: 'PIECE',
productType: 'MATERIEL'
},
productGroup:null,
quantity: 1,
remiseTaux: 0,
remiseValeur:null,
montantHt:null,
montantHtBrut:null,
montantTva:null,
montantFaudec:null,
};
}
deleteLine(i,line) {
if(line.bonLivraisonLineId != null && line.bonLivraisonLineId != ''){
this.UtilsService.delete(UtilsServiceService.API_BONLIVRAISONLINE+'/'+ line.bonLivraisonLineId ).subscribe(response => {
this.timeLine.timeLineTable.splice(i, 1);
this.calculPrixTotalBonLivraison();
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors du du Suppression De ligne`);
});
}else{
this.timeLine.timeLineTable.splice(i, 1);
this.calculPrixTotalBonLivraison();
}
}
convertLine(line) {
line.remiseTaux=this.convertAmount(line.remiseTaux);
line.remiseValeur=this.convertAmount(line.remiseValeur);
line.montantFaudec=this.convertAmount(line.montantFaudec);
line.montantHt=this.convertAmount(line.montantHt);
line.montantHtBrut=this.convertAmount(line.montantHtBrut);
line.montantTva=this.convertAmount(line.montantTva);
}
addLine() {
this.line.remiseTaux=this.convertAmount(this.line.remiseTaux);
this.line.remiseValeur=this.convertAmount(this.line.remiseValeur);
this.line.montantFaudec=this.convertAmount(this.line.montantFaudec);
this.line.montantHt=this.convertAmount(this.line.montantHt);
this.line.montantHtBrut=this.convertAmount(this.line.montantHtBrut);
this.line.montantTva=this.convertAmount(this.line.montantTva);
this.timeLine.timeLineTable.push(this.line);
this.calculPrixTotalBonLivraison();
this.initiateLine();
}
cancel() {
this.cancelEvent.emit();
}
checkGeneratedBonLivraisonValid() {
return this.bonLivraison.customer == "" || this.bonLivraison.customer == null ||
this.timeLine.timeLineTable.length == 0;
}
saveGeneratedBonLivraison(){
this.bonLivraison.bonLivraisonLines=this.timeLine.timeLineTable;
this.bonLivraison.bonLivraisonNumber=this.blPrefix+this.blNumber;
this.addNewBonLivraisonEvent.emit(this.bonLivraison);
}
showProduitModal() {
this.showProduitWindow = true;
}
hideProduitWindow() {
this.showProduitWindow = false;
}
/* saveNewProduit($) {
const context = this;
this.UtilsService.post(UtilsServiceService.API_PRODUIT, this.produit).subscribe(response => {
this.hideProduitWindow();
if (context.produit.productId == null) {
this.UtilsService.showToast('success',
'produit ajouté avec succés',
`Le produit ${this.produit.productLabel} a été ajouté avec succcés`);
} else {
this.UtilsService.showToast('success',
'produit modfié avec succés',
`Le produit ${this.produit.productLabel} a été modifié avec succcés`);
}
context.getAllProdcuts();
context.initProduit();
},
error => {
this.UtilsService.showToast('danger',
'Erreur interne',
`Un erreur interne a été produit lors de la souvegarde du produit ${this.produit.productLabel}`);
});
} */
changeProduct(event) {
this.line.product = event;
this.calculPrixTotal();
}
calculPrixTotal(){
this.line.montantHtBrut=this.line.product.productPrixHT*this.line.quantity
this.line.remiseValeur=this.line.montantHtBrut*(this.line.remiseTaux/100);
this.line.montantHt=this.line.montantHtBrut-this.line.remiseValeur;
this.line.montantFaudec=this.line.montantHt*(this.line.product.productFodec/100);
this.line.montantTva=(this.line.montantHt+this.line.montantFaudec)*(this.line.product.productTVA/100);
this.calculPrixTotalBonLivraison();
this.convertLine(this.line);
}
calculPrixTotalEdited(line){
line.montantHtBrut=line.product.productPrixHT*line.quantity
line.remiseValeur=line.montantHtBrut*(line.remiseTaux/100);
line.montantHt=line.montantHtBrut-line.remiseValeur;
line.montantFaudec=line.montantHt*(line.product.productFodec/100);
line.montantTva=(line.montantHt+line.montantFaudec)*(line.product.productTVA/100);
this.convertLine(line)
this.calculPrixTotalBonLivraison();
}
calculPrixTotalBonLivraison() {
this.bonLivraison.totalHTBrut = 0;
this.bonLivraison.totalHT = 0;
this.bonLivraison.totalTVA = 0;
this.bonLivraison.totalFodec = 0;
this.bonLivraison.remise = 0;
this.bonLivraison.totalTaxe = this.bonLivraison.totalTVA + this.bonLivraison.totalFodec + this.bonLivraison.timbreFiscal;
this.bonLivraison.totalTaxeS=this.UtilsService.convertAmountToString(this.bonLivraison.totalTaxe.toString());
this.bonLivraison.totalTTC = this.bonLivraison.totalHT + this.bonLivraison.totalTaxe;
this.bonLivraison.totalTTCS=this.UtilsService.convertAmountToString(this.bonLivraison.totalTTC.toString());
this.timeLine.timeLineTable.forEach((line) => {
this.bonLivraison.totalHTBrut += line.montantHtBrut;
this.bonLivraison.totalHT += line.montantHt;
this.bonLivraison.totalTVA += line.montantTva;
this.bonLivraison.totalFodec += line.montantFaudec;
this.bonLivraison.remise += line.remiseValeur;
this.bonLivraison.totalTaxe = this.bonLivraison.totalTVA + this.bonLivraison.totalFodec + this.bonLivraison.timbreFiscal;
this.bonLivraison.totalTTC = this.bonLivraison.totalHT + this.bonLivraison.totalTaxe;
})
this.bonLivraison.totalHTBrut=this.convertAmount(this.bonLivraison.totalHTBrut);
this.bonLivraison.totalHTBrutS=this.UtilsService.convertAmountToString(this.bonLivraison.totalHTBrut.toString());
this.bonLivraison.totalHT=this.convertAmount(this.bonLivraison.totalHT);
this.bonLivraison.totalHTS=this.UtilsService.convertAmountToString(this.bonLivraison.totalHT.toString());
this.bonLivraison.totalTVA=this.convertAmount(this.bonLivraison.totalTVA);
this.bonLivraison.totalTVAS=this.UtilsService.convertAmountToString(this.bonLivraison.totalTVA.toString());
this.bonLivraison.totalFodec=this.convertAmount(this.bonLivraison.totalFodec);
this.bonLivraison.totalFodecS=this.UtilsService.convertAmountToString(this.bonLivraison.totalFodecS.toString());
this.bonLivraison.remise=this.convertAmount(this.bonLivraison.remise);
this.bonLivraison.remiseS=this.UtilsService.convertAmountToString(this.bonLivraison.remise.toString());
this.bonLivraison.totalTaxe=this.convertAmount(this.bonLivraison.totalTaxe);
this.bonLivraison.totalTaxeS=this.UtilsService.convertAmountToString(this.bonLivraison.totalTaxe.toString());
this.bonLivraison.totalTTC=this.convertAmount(this.bonLivraison.totalTTC);
this.bonLivraison.totalTTCS=this.UtilsService.convertAmountToString(this.bonLivraison.totalTTC.toString());
}
convertAmount(amount):any
{
return amount=Math.round(amount * 1000) / 1000;
}
}
|
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.system.modular.role.entity;
import cn.stylefeng.roses.kernel.db.api.pojo.entity.BaseEntity;
import cn.stylefeng.roses.kernel.rule.annotation.ChineseDescription;
import com.baomidou.mybatisplus.annotation.*;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.math.BigDecimal;
/**
* 系统角色表
*
* @author majianguo
* @date 2020/11/5 下午4:32
*/
@EqualsAndHashCode(callSuper = true)
@Data
@TableName("sys_role")
public class SysRole extends BaseEntity {
/**
* 主键
*/
@TableId(value = "role_id", type = IdType.ASSIGN_ID)
@ChineseDescription("主键")
private Long roleId;
/**
* 角色名称
*/
@TableField("role_name")
@ChineseDescription("角色名称")
private String roleName;
/**
* 角色编码
*/
@TableField("role_code")
@ChineseDescription("角色编码")
private String roleCode;
/**
* 排序
*/
@TableField("role_sort")
@ChineseDescription("排序")
private BigDecimal roleSort;
/**
* 数据范围类型:10-仅本人数据,20-本部门数据,30-本部门及以下数据,40-指定部门数据,50-全部数据
*/
@TableField("data_scope_type")
@ChineseDescription("数据范围类型:10-仅本人数据,20-本部门数据,30-本部门及以下数据,40-指定部门数据,50-全部数据")
private Integer dataScopeType;
/**
* 状态:1-启用,2-禁用
*/
@TableField("status_flag")
@ChineseDescription("状态:1-启用,2-禁用")
private Integer statusFlag;
/**
* 备注
*/
@TableField(value = "remark", insertStrategy = FieldStrategy.IGNORED, updateStrategy = FieldStrategy.IGNORED)
@ChineseDescription("备注")
private String remark;
/**
* 删除标记(Y-已删除,N-未删除)
*/
@TableField(value = "del_flag",fill = FieldFill.INSERT)
@ChineseDescription("删除标记(Y-已删除,N-未删除)")
private String delFlag;
/**
* 是否是系统角色:Y-是,N-否
*/
@TableField("role_system_flag")
@ChineseDescription("是否是系统角色:Y-是,N-否")
private String roleSystemFlag;
/**
* 角色类型
*/
@TableField("role_type_code")
@ChineseDescription("角色类型")
private String roleTypeCode;
}
|
import requests
url = 'https://www.example.com'
response = requests.get(url) |
<filename>cmd/compose/parser_test.go
package compose
import (
"errors"
"fmt"
"reflect"
"strings"
"testing"
"unsafe"
"gopkg.in/yaml.v2"
)
const composeFile string = `version: "3.7"
services:
service:
image: service-image
volumes:
- service:/app:delegated
service2:
image: service-image2
volumes:
- service2:/app:delegated
volumes:
service: null
service2: null
`
const composeWithoutService = `version: "3.7"
services:
service2:
image: service-image2
volumes:
- service2:/app:delegated
volumes:
service: null
service2: null
`
const composeWithouServiceVolume string = `version: "3.7"
services:
service:
image: service-image
volumes:
- service:/app:delegated
service2:
image: service-image2
volumes:
- service2:/app:delegated
volumes:
service2: null
`
const newComposeService string = `image: new-service-image
volumes:
- service:/app:delegated
`
const composeWithNewService string = `version: "3.7"
services:
service:
image: new-service-image
volumes:
- service:/app:delegated
service2:
image: service-image2
volumes:
- service2:/app:delegated
volumes:
service: null
service2: null
`
func TestNewParser(t *testing.T) {
p := NewParser()
if _, assert := p.(*DefaultParser); !assert {
t.Errorf("NewParser() did not return a *DefaultParser")
}
}
func TestLoadDefaultParser(t *testing.T) {
p := NewParser()
if err := p.Load(composeFile); err != nil {
t.Errorf("unexpected error loading docker compose file; error: %v", err)
}
yamlData := getYamlData(p.(*DefaultParser))
parsed := yaml.MapSlice{}
_ = yaml.Unmarshal([]byte(composeFile), &parsed)
if !reflect.DeepEqual(yamlData, parsed) {
t.Error("failed loading docker compose file content")
}
}
func TestStringDefaultParser(t *testing.T) {
p := NewParser()
_ = p.Load(composeFile)
content, err := p.String()
if err != nil {
t.Errorf("unexpected error getting docker compose file content; error: %v", err)
}
if strings.TrimSpace(content) != strings.TrimSpace(composeFile) {
t.Errorf("expecting content '%s', got '%s'", strings.TrimSpace(composeFile), strings.TrimSpace(content))
}
}
func TestRemoveServiceDefaultParser(t *testing.T) {
p := NewParser()
_ = p.Load(composeFile)
p.RemoveService("service")
yamlData := getYamlData(p.(*DefaultParser))
parsed := yaml.MapSlice{}
_ = yaml.Unmarshal([]byte(composeWithoutService), &parsed)
if !reflect.DeepEqual(yamlData, parsed) {
t.Error("failed removing docker compose file service")
}
}
func TestRemoveVolumeDefaultParser(t *testing.T) {
p := NewParser()
_ = p.Load(composeFile)
p.RemoveVolume("service")
yamlData := getYamlData(p.(*DefaultParser))
parsed := yaml.MapSlice{}
_ = yaml.Unmarshal([]byte(composeWithouServiceVolume), &parsed)
if !reflect.DeepEqual(yamlData, parsed) {
t.Error("failed removing docker compose file volume")
}
}
func TestSetServiceDefaultParser(t *testing.T) {
p := NewParser()
_ = p.Load(composeFile)
if err := p.SetService("service", newComposeService); err != nil {
t.Errorf("unexpected error setting docker compose service; error: %v", err)
}
yamlData := getYamlData(p.(*DefaultParser))
parsed := yaml.MapSlice{}
_ = yaml.Unmarshal([]byte(composeWithNewService), &parsed)
if !reflect.DeepEqual(yamlData, parsed) {
t.Error("failed setting docker compose file service")
}
}
func TestErrorSetServiceDefaultParser(t *testing.T) {
p := NewParser()
_ = p.Load(composeFile)
originalYamlUnmarshalFn := yamlUnmarshalFn
defer func() {
yamlUnmarshalFn = originalYamlUnmarshalFn
}()
yamlUnmarshalFn = func(in []byte, out interface{}) error {
fmt.Println("unmarshal")
return errors.New("yaml unmarshal error")
}
err := p.SetService("service", newComposeService)
if err == nil {
t.Error("expecting error 'yaml unmarshal error', got none")
} else if err.Error() != "yaml unmarshal error" {
t.Errorf("expecting error 'yaml unmarshal error', got %v", err)
}
}
func TestNotFoundSetServiceDefaultParser(t *testing.T) {
p := NewParser()
_ = p.Load(composeFile)
err := p.SetService("service_not_exists", newComposeService)
if err == nil {
t.Error("expecting error 'service service_not_exists not found', got none")
} else if err.Error() != "service service_not_exists not found" {
t.Errorf("expecting error 'service service_not_exists not found', got %v", err)
}
}
func TestErrorStringDefaultParser(t *testing.T) {
p := NewParser()
_ = p.Load(composeFile)
originalYamlMarshalFn := yamlMarshalFn
defer func() {
yamlMarshalFn = originalYamlMarshalFn
}()
yamlMarshalFn = func(in interface{}) ([]byte, error) {
return nil, errors.New("yaml marshal error")
}
_, err := p.String()
if err == nil {
t.Error("expecting error 'yaml marshal error', got none")
} else if err.Error() != "yaml marshal error" {
t.Errorf("expecting error 'yaml marshal error', got %v", err)
}
}
func getYamlData(p *DefaultParser) yaml.MapSlice {
parserStruct := reflect.ValueOf(p).Elem()
reflectYamlData := parserStruct.FieldByName("yamlData")
return reflect.NewAt(reflectYamlData.Type(), unsafe.Pointer(reflectYamlData.UnsafeAddr())).Elem().Interface().(yaml.MapSlice)
}
|
import { getCurrentUser, logOut } from './core/LogInInteractor'
import { deleteAccount } from './core/account/AccountInteractor'
export default AppConfig = {
initialize: initializeConfiguration,
MOCK_SERVICES: {
'react-native-firebase': {
shouldUse: true,
auth: {
// User to have logged in upon start
loggedInUser: {
uid: '123',
phoneNumber: '+15555555555'
},
// User that exists on the 'back end', but is not logged in locally
// existingUser: {
// uid: '123',
// phoneNumber: '+15555555555'
// }
},
firestore: {
collections: {
// Existing users in "back end"
users: {
'123': {
account: {
id: '123',
role: 'SHARER',
ministryId: 'AA000'
},
convos: 22,
conversions: 13
// contacts: [
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// {
// name: 'Joe',
// phone: '5134033455',
// currentStepIndex: 0,
// currentStepDesc: 'Spiritual beliefs',
// id: '456'
// },
// ]
}
},
// Existing ministries in "back end"
ministries: {
'AA000': {
name: 'Tree of Life Ministry',
id: 'AA000',
data: {},
convos: 150,
conversions: 98
}
},
global: {
global: {
convos: 307,
conversions: 124
}
}
}
}
},
},
// Mock delay for loading (i.e. when using mock firestore)
MOCK_DELAY: 0,
// App
FORCE_FRESH_START: false,
// Authentication
FORCE_LOGOUT: false,
// Account
FORCE_NEW_ACCOUNT: false,
// Animation
SKIP_ANIMATIONS: false,
defaultAnimDuration: () => duration(1000),
animDuration: duration
}
/**
* Run any custom initialization
*/
function initializeConfiguration() {
global.objLog = {
log: obj => console.log(JSON.stringify(obj))
}
if (AppConfig.FORCE_FRESH_START || AppConfig.FORCE_NEW_ACCOUNT) {
const user = getCurrentUser()
if (user != null) {
deleteAccount(getCurrentUser().uid).then().catch(error => { console.log(error) })
}
}
if (AppConfig.FORCE_FRESH_START || AppConfig.FORCE_LOGOUT) {
logOut().then().catch(error => { console.log(error) })
}
}
/**
* Given a duration, will make it instantaneous if currently in DEV environment
*/
function duration(duration) {
return AppConfig.SKIP_ANIMATIONS ? 1 : duration
} |
*/30 * * * * cd /Users/arpitkjain/Desktop/Data/POC/CoVIDVaccineTracker/cowin_bot && /usr/local/opt/python@3.8/bin/python3 main.py >> /Users/arpitkjain/Desktop/Data/POC/CoVIDVaccineTracker/cowin_bot/cowinBot_output.log 2>&1
*/30 * * * * cd /home/arpit/CoVIDVaccineTracker/cowin_bot && /usr/bin/python3 main.py >> /home/arpit/CoVIDVaccineTracker/cowin_bot/cowinBot_output.log 2>&1
|
import random
import numpy as np
from sklearn.neural_network import MLPRegressor
#create a training and test set from the pseudorandom data
random.seed(1)
x = np.linspace(0, 1, 200)
y = np.array([random.uniform(0,1) for i in range(200)])
#split the data into training and testing
x_train = np.reshape(x[:160], (-1, 1))
x_test = np.reshape(x[160:], (-1, 1))
y_train = y[:160]
y_test = y[160:]
#create the MLP regressor
mlp = MLPRegressor(activation="relu", hidden_layer_sizes=(5,5))
mlp.fit(x_train, y_train)
#generate the pseudorandom numbers
generated_numbers = mlp.predict(x_test)
#print the generated numbers
print("Generated numbers:", generated_numbers) |
const joinArrays = (arr1, arr2) => {
return arr1.map(item1 => arr2.find(item2 => item1.id === item2.id)).map(matchingItem => ({ ...matchingItem, ...arr1.find(item1 => item1.id === matchingItem.id) }));
}; |
#include <iostream>
#include <vector>
struct TeamPoints {
int ID;
int points;
};
std::vector<TeamPoints> teamPoints; // Assuming this vector is initialized with team IDs and initial points
void updateTeamPoints(int fTeam, int sTeam, int winner) {
for (int i = 0; i < teamPoints.size(); i++) {
if (teamPoints[i].ID == fTeam || teamPoints[i].ID == sTeam) {
if (winner == 0) {
teamPoints[i].points++; // Increment points for both teams in case of a tie
} else if (teamPoints[i].ID == winner) {
teamPoints[i].points += 3; // Increment points for the winning team
}
}
}
} |
var group__sai__sync__control =
[
[ "ARM_SAI_ASYNCHRONOUS", "group__sai__sync__control.html#gad123537cb6ab9eefd6feab193ed74655", null ],
[ "ARM_SAI_SYNCHRONOUS", "group__sai__sync__control.html#gad2ad5406c30c353e80f54b40b3de5db8", null ]
]; |
<reponame>NarcisAssignment/SimpleLoadbalancer<gh_stars>0
package controller
import (
"net/http"
"simpleLoadbalancer/model"
"github.com/gin-gonic/gin"
)
// It should have pagination and it is not very scalble now
func FindBooks(c *gin.Context) {
var books []model.Book
model.DB.Find(&books)
c.JSON(http.StatusOK, gin.H{"data": books})
}
func FindBook(c *gin.Context) {
var book model.Book
// we use connection pool (openConnection:100, maxIdelConnection:10, connctionLife:1h)
if err := model.DB.Where("id = ?", c.Param("id")).First(&book).Error; err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "Record not found!"})
return
}
c.JSON(http.StatusOK, gin.H{"data": book})
}
|
<gh_stars>1-10
#pragma once
#ifdef __cplusplus
extern "C"
{
#endif
struct RECENT;
typedef struct RECENT *PRECENT;
PRECENT Recent_New(INT nCapacity);
INT Recent_GetCapacity(PRECENT pRecent);
INT Recent_GetCount(PRECENT pRecent);
LPCTSTR Recent_GetAt(PRECENT pRecent, INT i);
void Recent_Print(PRECENT pRecent);
INT Recent_Find(PRECENT pRecent, LPCTSTR psz);
void Recent_Add(PRECENT pRecent, LPCTSTR psz);
void Recent_Remove(PRECENT pRecent, LPCTSTR psz);
void Recent_Delete(PRECENT pRecent);
void Recent_UnitTest();
#ifdef __cplusplus
} // extern "C"
#endif
|
package evilcraft.render.particle;
import net.minecraft.block.BlockLiquid;
import net.minecraft.block.material.Material;
import net.minecraft.client.particle.EntityFX;
import net.minecraft.util.MathHelper;
import net.minecraft.world.World;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
/**
* Particle that appears underneath blocks for simulating drops.
* Courtesy of BuildCraft: https://github.com/BuildCraft/BuildCraft/blob/master/common/buildcraft/energy/render/EntityDropParticleFX.java
*
*/
@SideOnly(Side.CLIENT)
public class ExtendedEntityDropParticleFX extends EntityFX {
/**
* The height of the current bob
*/
private int bobTimer;
/**
* Make a new instance.
* @param world The world.
* @param x X coordinate.
* @param y Y coordinate.
* @param z Z coordinate.
* @param particleRed Red color.
* @param particleGreen Green color.
* @param particleBlue Blue color.
*/
public ExtendedEntityDropParticleFX(World world, double x, double y, double z, float particleRed, float particleGreen, float particleBlue) {
super(world, x, y, z, 0.0D, 0.0D, 0.0D);
this.motionX = this.motionY = this.motionZ = 0.0D;
this.particleRed = particleRed;
this.particleGreen = particleGreen;
this.particleBlue = particleBlue;
this.setParticleTextureIndex(113);
this.setSize(0.01F, 0.01F);
this.particleGravity = 0.06F;
this.bobTimer = 40;
this.particleMaxAge = (int) (64.0D / (Math.random() * 0.8D + 0.2D));
this.motionX = this.motionY = this.motionZ = 0.0D;
}
@Override
public void onUpdate() {
this.prevPosX = this.posX;
this.prevPosY = this.posY;
this.prevPosZ = this.posZ;
this.motionY -= (double) this.particleGravity;
if (this.bobTimer-- > 0) {
this.motionX *= 0.02D;
this.motionY *= 0.02D;
this.motionZ *= 0.02D;
this.setParticleTextureIndex(113);
} else {
this.setParticleTextureIndex(112);
}
this.moveEntity(this.motionX, this.motionY, this.motionZ);
this.motionX *= 0.9800000190734863D;
this.motionY *= 0.9800000190734863D;
this.motionZ *= 0.9800000190734863D;
if (this.particleMaxAge-- <= 0) {
this.setDead();
}
if (this.onGround) {
this.setParticleTextureIndex(114);
this.motionX *= 0.699999988079071D;
this.motionZ *= 0.699999988079071D;
}
Material material = this.worldObj.getBlock(MathHelper.floor_double(this.posX), MathHelper.floor_double(this.posY), MathHelper.floor_double(this.posZ)).getMaterial();
if (material.isLiquid() || material.isSolid()) {
double d0 = (double) ((float) (MathHelper.floor_double(this.posY) + 1) - BlockLiquid.getLiquidHeightPercent(this.worldObj.getBlockMetadata(MathHelper.floor_double(this.posX), MathHelper.floor_double(this.posY), MathHelper.floor_double(this.posZ))));
if (this.posY < d0) {
this.setDead();
}
}
}
}
|
<filename>data-prepper-api/src/main/java/com/amazon/dataprepper/model/sink/Sink.java
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package com.amazon.dataprepper.model.sink;
import com.amazon.dataprepper.model.record.Record;
import java.util.Collection;
/**
* Data Prepper sink interface. Sink may publish records to a disk, a file,
* to OpenSearch, other pipelines, or other external systems.
*/
public interface Sink<T extends Record<?>> {
/**
* outputs collection of records which extend {@link Record}.
*
* @param records the records to write to the sink.
*/
void output(Collection<T> records);
/**
* Prepare sink for shutdown, by cleaning up resources and threads.
*/
void shutdown();
}
|
<gh_stars>0
"use strict";
var candidates = [10, 1, 2, 7, 6, 1, 5];
var target = 8;
function combinationSum2(candidates, target) {
var results = [];
var unique = {};
candidates.sort();
var foo = function (result, ind, sum, str) {
if (sum >= target || ind >= candidates.length) {
if (sum === target) {
if (!unique.hasOwnProperty(str)) {
results.push(result.slice(0));
unique[str] = 0;
}
}
return;
}
result.push(candidates[ind]);
foo(result, ind + 1, sum + candidates[ind], str + candidates[ind].toString());
result.pop();
foo(result, ind + 1, sum, str);
return;
};
foo([], 0, 0, "");
if (true) {
results.forEach(function (result) { return console.log(result); });
}
return results;
}
;
combinationSum2(candidates, target);
|
<reponame>soarqin/blitzd<filename>blitzd/cache/CheckRevision.cpp
#include "Config.h"
#include "CheckRevision.h"
#include "core/Cfg.h"
#include "utils/Memory.h"
#include "utils/Random.h"
#include "utils/File.h"
#include "utils/DataConv.h"
#include "text/Parser.h"
#include "text/XmlNode.h"
namespace Cache
{
void CheckRevision::Load()
{
uint a = Utils::GetRandom32() & 0x7FFFFFFF;
uint b = Utils::GetRandom32() & 0x7FFFFFFF;
uint c = Utils::GetRandom32() & 0x7FFFFFFF;
byte rop1 = Utils::GetRandom8() % 3;
byte rop2 = Utils::GetRandom8() % 3;
byte rop3 = Utils::GetRandom8() % 3;
byte rop4 = Utils::GetRandom8() % 3;
const char rop[] = {'+', '-', '^'};
char __formula[1024];
sprintf(__formula, "A=%u B=%u C=%u 4 A=A%cS B=B%cC C=C%cA A=A%cB", a, b, c, rop[rop1], rop[rop2], rop[rop3], rop[rop4]);
_formula = __formula;
LOG_DEBUG(("Generated formula: %s", __formula));
Text::XmlNode cfg_((Core::cfg.GetCfgDir() + "/" + Core::cfg.GetCheckRevFile()).c_str());
std::auto_ptr<Text::XmlNode> node(cfg_.GetChild("Revisions"));
if(node.get() != NULL)
{
std::vector<std::string> keys;
keys.push_back("gameName");
keys.push_back("gameId");
keys.push_back("file1");
keys.push_back("file2");
keys.push_back("file3");
keys.push_back("exever");
std::vector<std::vector<std::string> > results;
node->ReadArray(results, keys, "Revision");
size_t cnt = results.size();
for(size_t i = 0; i < cnt; ++ i)
{
Revision rev;
rev.gameName = results[i][0];
rev.gameId = Utils::Reverse(*(uint*)results[i][1].c_str());
std::string fn[3];
fn[0] = results[i][2];
fn[1] = results[i][3];
fn[2] = results[i][4];
if(_STR.strcmpni(results[i][5].c_str(), "0x", 2) == 0)
rev.exeVer = strtoul(results[i][5].c_str() + 2, NULL, 16);
else
rev.exeVer = strtoul(results[i][5].c_str(), NULL, 10);
if(fn[0].length() > 0 && fn[1].length() > 0 && fn[2].length() > 0 && rev.gameId != 0)
{
fn[0] = results[i][1] + '/' + fn[0];
fn[1] = results[i][1] + '/' + fn[1];
fn[2] = results[i][1] + '/' + fn[2];
if(!Calculate(_formula, fn, rev.chksum))
{
LOG_ERROR(("Failed to calculate checksum for %s", rev.gameName.c_str()));
continue;
}
for(int i = 0; i < 8 ; i ++)
{
LOG_DEBUG(("Checksum[%d] = 0x%08X", i, rev.chksum[i]));
}
_revlist.push_back(rev);
}
}
}
}
CheckRevision::Revision* CheckRevision::findRev( int verIndex, uint chksum, uint exeVer )
{
for(size_t i = 0; i < _revlist.size(); i ++)
{
if(_revlist[i].chksum[verIndex] == chksum && (_revlist[i].exeVer == 0 || _revlist[i].exeVer == exeVer))
return &_revlist[i];
}
return NULL;
}
const char * CheckRevision::GetFormula()
{
return _formula.c_str();
}
bool CheckRevision::Calculate( std::string& formulaStr, std::string * fileNames, uint * result, uint sindex /*= 0*/, uint eindex /*= 7*/ )
{
const uint mpqHashValues[] =
{
0xE7F4CB62,
0xF6A14FFC,
0xAA5504AF,
0x871FCDC2,
0x11BF6A18,
0xC57292E6,
0x7927D27E,
0x2FEC8733
};
uint oval[4], val[4], opTarg[4], opVal1[4], opVal2[4];
char opType[4];
std::vector<std::string> tokens;
Text::SeperateString(tokens, formulaStr, ' ');
int formulaIdx = 0;
size_t i;
for (i = 0; i < tokens.size(); ++i)
{
std::string token = tokens[i];
if (token.find('=') != std::string::npos)
{
std::vector<std::string> nameTokens;
Text::SeperateString(nameTokens, token, '=');
if (nameTokens.size() != 2)
{
if(token.length() > 1)
return false;
continue;
}
int targ = ((nameTokens[0][0] == 'S') ? 3 : (nameTokens[0][0] - 'A'));
std::string v = nameTokens[1];
if (v[0] >= '0' && v[0] <= '9')
{
oval[targ] = strtoul(v.c_str(), NULL, 10);
}
else
{
opTarg[formulaIdx] = targ;
opVal1[formulaIdx] = (v[0] == 'S') ? 3 : (v[0] - 'A');
opType[formulaIdx] = v[1];
opVal2[formulaIdx] = (v[2] == 'S') ? 3 : (v[2] - 'A');
formulaIdx++;
}
}
}
std::vector<byte> buf[3];
for (i = 0; i < 3; ++ i)
{
Utils::FileStream fs;
fs.Open(Core::cfg.GetGamesDir() + '/' + fileNames[i]);
uint fLen = (uint)fs.Size();
uint bufSize = (fLen + 1023) & (~1023);
buf[i].resize(bufSize);
fs.Read(&buf[i][0], fLen);
fs.Close();
byte pad = 0xff;
for (uint j = fLen; j < bufSize; ++j)
{
buf[i][j] = pad--;
}
}
for(uint verIndex = sindex; verIndex <= eindex; verIndex ++)
{
memcpy(val, oval, sizeof(val));
val[0] ^= mpqHashValues[verIndex];
for (int i = 0; i < 3; i++)
{
uint* bp = (uint*)&buf[i][0];
uint lc = (uint)buf[i].size() >> 2;
for (uint j = 0; j < lc; ++ j)
{
val[3] = *(bp++);
for (int k = 0; k < formulaIdx; k++)
{
switch (opType[k])
{
case '+':
val[opTarg[k]] = val[opVal1[k]] + val[opVal2[k]];
break;
case '-':
val[opTarg[k]] = val[opVal1[k]] - val[opVal2[k]];
break;
case '^':
val[opTarg[k]] = val[opVal1[k]] ^ val[opVal2[k]];
break;
case '*':
val[opTarg[k]] = val[opVal1[k]] * val[opVal2[k]];
break;
case '/':
val[opTarg[k]] = val[opVal1[k]] / val[opVal2[k]];
break;
default:
break;
}
}
}
}
result[verIndex] = val[2];
}
return true;
}
}
|
<reponame>DorinR/Politisense<filename>src/routes/api/ridings.js
import express from 'express'
const router = express.Router()
const controller = require('../../controllers/Controller')
// @route post api/ridings/getRidingCode/:riding
// @desc get riding code corresponding to the riding name passed in
// @access Public
router.get('/getRidingCode/:riding', controller.getRidingCode)
// @route post api/ridings/getRidingPopulation/:riding
// @desc get the population of the riding passed in
// @access Public
router.get('/getRidingPopulation/:riding', controller.getRidingPopulation)
router.get('/getRidingByRidingCode', controller.getRidingByRidingCode)
module.exports = router
|
<gh_stars>1-10
import datetime from './date';
import stringExtension from './string';
export { datetime, stringExtension };
|
<filename>src/net/yotvoo/asterd/app/Constants.java
package net.yotvoo.asterd.app;
class Constants {
static final double PREFERED_GAME_SCENE_WIDTH = 1200d;
static final double PREFERED_GAME_SCENE_HEIGHT = 800d;
static final double MAX_ENEMY_SIZE = 20d;
static final double MIN_ENEMY_SIZE = 5d;
static final double MAX_ENEMY_SPEED = 3d;
static final double MAX_ENEMY_PROXIMITY = 100d;
static final double MAX_ENEMY_COUNT = 20d;
static final double ENEMY_SPAWN_RATIO = 0.02d;
static final double MAX_STAR_SIZE = 4d;
static final int STARS_NUMBER = 500;
static final double BULLETS_INTERVAL = 300d;
//player object acceleration made during one update cycle
static final double ACCELERATION = 0.1d;
//player object rotation step made during one update cycle
static final double ROTATE_STEP = 3d;
static final int MAX_BULLET_AGE = 1000; // 1 sek
static final String HIGH_SCORE_FILE_NAME = "AsterDroids.score";
static final String SIMPLE_CONNECTION_FILE_NAME = "AsterDroids.connection";
}
|
<reponame>chrishumboldt/rocket-utility<gh_stars>0
/**
* @author <NAME>
*/
export enum State {
ACTIVE = 'active',
CLOSED = 'closed',
HIDDEN = 'hidden',
INACTIVE = 'inactive',
LOADING = 'loading',
OFF = 'off',
ON = 'on',
OPEN = 'open',
TOGGLED = 'toggled',
VISIBLE = 'visible'
}
export enum OpposingState {
ACTIVE = 'inactive',
CLOSED = 'open',
HIDDEN = 'visible',
INACTIVE = 'active',
OFF = 'on',
ON = 'off',
OPEN = 'closed',
TOGGLED = 'toggled',
VISIBLE = 'hidden'
}
|
<filename>truffle/src/main/java/org/jruby/truffle/language/arguments/ReadCallerFrameNode.java
/*
* Copyright (c) 2015, 2016 Oracle and/or its affiliates. All rights reserved. This
* code is released under a tri EPL/GPL/LGPL license. You can use it,
* redistribute it and/or modify it under the terms of the:
*
* Eclipse Public License version 1.0
* GNU General Public License version 2
* GNU Lesser General Public License version 2.1
*/
package org.jruby.truffle.language.arguments;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.profiles.ConditionProfile;
import com.oracle.truffle.api.source.SourceSection;
import org.jruby.truffle.RubyContext;
import org.jruby.truffle.language.NotProvided;
import org.jruby.truffle.language.RubyNode;
public class ReadCallerFrameNode extends RubyNode {
private final ConditionProfile callerFrameProfile = ConditionProfile.createBinaryProfile();
public ReadCallerFrameNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Override
public Object execute(VirtualFrame frame) {
final Object callerFrame = RubyArguments.getCallerFrame(frame);
if (callerFrameProfile.profile(callerFrame == null)) {
return NotProvided.INSTANCE;
} else {
return callerFrame;
}
}
}
|
# This file is supposed to be sourced by each Recipe
# that wants to use the functions contained herein
# like so:
# wget -q https://github.com/probonopd/AppImages/raw/master/functions.sh -O ./functions.sh
# . ./functions.sh
# RECIPE=$(realpath "$0")
# Options for apt-get to use local files rather than the system ones
OPTIONS="-o Debug::NoLocking=1
-o APT::Cache-Limit=125829120
-o Dir::Etc::sourcelist=./sources.list
-o Dir::State=./tmp
-o Dir::Cache=./tmp
-o Dir::State::status=./status
-o Dir::Etc::sourceparts=-
-o APT::Get::List-Cleanup=0
-o APT::Get::AllowUnauthenticated=1
-o Debug::pkgProblemResolver=true
-o Debug::pkgDepCache::AutoInstall=true
-o APT::Install-Recommends=0
-o APT::Install-Suggests=0
"
# Either get the file from remote or from a static place.
# critical for builds without network access like in Open Build Service
cat_file_from_url()
{
cat_excludelist="wget -q $1 -O -"
[ -e "$STATIC_FILES/${1##*/}" ] && cat_excludelist="cat $STATIC_FILES/${1##*/}"
$cat_excludelist
}
git_pull_rebase_helper()
{
git reset --hard HEAD
git pull
}
# Patch /usr to ././ in ./usr
# to make the contents of usr/ relocateable
# (this requires us to cd ./usr before running the application; AppRun does that)
patch_usr()
{
find usr/ -type f -executable -exec sed -i -e "s|/usr|././|g" {} \;
}
# Download AppRun and make it executable
get_apprun()
{
# wget -c https://github.com/probonopd/AppImageKit/releases/download/5/AppRun -O ./AppRun # 64-bit
wget -c https://github.com/probonopd/AppImageKit/releases/download/continuous/AppRun-x86_64 -O AppRun # 64-bit
chmod a+x AppRun
}
# Copy the library dependencies of all exectuable files in the current directory
# (it can be beneficial to run this multiple times)
copy_deps()
{
PWD=$(readlink -f .)
FILES=$(find . -type f -executable -or -name *.so.* -or -name *.so | sort | uniq )
for FILE in $FILES ; do
ldd "${FILE}" | grep "=>" | awk '{print $3}' | xargs -I '{}' echo '{}' >> DEPSFILE
done
DEPS=$(cat DEPSFILE | sort | uniq)
for FILE in $DEPS ; do
if [ -e $FILE ] && [[ $(readlink -f $FILE)/ != $PWD/* ]] ; then
cp -v --parents -rfL $FILE ./ || true
fi
done
rm -f DEPSFILE
}
# Move ./lib/ tree to ./usr/lib/
move_lib()
{
mkdir -p ./usr/lib ./lib && find ./lib/ -exec cp -v --parents -rfL {} ./usr/ \; && rm -rf ./lib
mkdir -p ./usr/lib ./lib64 && find ./lib64/ -exec cp -v --parents -rfL {} ./usr/ \; && rm -rf ./lib64
}
# Delete blacklisted files
delete_blacklisted()
{
BLACKLISTED_FILES=$( cat_file_from_url https://github.com/probonopd/AppImages/raw/master/excludelist | sed '/^\s*$/d' | sed '/^#.*$/d')
echo $BLACKLISTED_FILES
for FILE in $BLACKLISTED_FILES ; do
FOUND=$(find . -xtype f -name "${FILE}" 2>/dev/null)
if [ ! -z "$FOUND" ] ; then
echo "Deleting blacklisted ${FOUND}"
rm -f "${FOUND}"
fi
done
# Do not bundle developer stuff
rm -rf usr/include || true
rm -rf usr/lib/cmake || true
rm -rf usr/lib/pkgconfig || true
find . -name '*.la' | xargs -i rm {}
}
# Echo highest glibc version needed by the executable files in the current directory
glibc_needed()
{
find . -name *.so -or -name *.so.* -or -type f -executable -exec readelf -s '{}' 2>/dev/null \; | sed -n 's/.*@GLIBC_//p'| awk '{print $1}' | sort --version-sort | tail -n 1
}
# Add desktop integration
# Usage: get_desktopintegration name_of_desktop_file_and_exectuable
get_desktopintegration()
{
REALBIN=$(grep -o "^Exec=.*" *.desktop | sed -e 's|Exec=||g' | cut -d " " -f 1 | head -n 1)
cat_file_from_url https://raw.githubusercontent.com/probonopd/AppImageKit/master/desktopintegration > ./usr/bin/$REALBIN.wrapper
chmod a+x ./usr/bin/$REALBIN.wrapper
sed -i -e "s|^Exec=$REALBIN|Exec=$REALBIN.wrapper|g" $1.desktop
}
# Generate AppImage; this expects $ARCH, $APP and $VERSION to be set
generate_appimage()
{
# if [[ "$RECIPE" == *ecipe ]] ; then
# echo "#!/bin/bash -ex" > ./$APP.AppDir/Recipe
# echo "# This recipe was used to generate this AppImage." >> ./$APP.AppDir/Recipe
# echo "# See http://appimage.org for more information." >> ./$APP.AppDir/Recipe
# echo "" >> ./$APP.AppDir/Recipe
# cat $RECIPE >> ./$APP.AppDir/Recipe
# fi
#
# Detect the architecture of what we are packaging.
# The main binary could be a script, so let's use a .so library
BIN=$(find . -name *.so* -type f | head -n 1)
INFO=$(file "$BIN")
if [ -z $ARCH ] ; then
if [[ $INFO == *"x86-64"* ]] ; then
ARCH=x86_64
elif [[ $INFO == *"i686"* ]] ; then
ARCH=i686
elif [[ $INFO == *"armv6l"* ]] ; then
ARCH=armhf
else
echo "Could not automatically detect the architecture."
echo "Please set the \$ARCH environment variable."
exit 1
fi
fi
wget -c "https://github.com/probonopd/AppImageKit/releases/download/6/AppImageAssistant_6-x86_64.AppImage" -O AppImageAssistant # (64-bit)
chmod a+x ./AppImageAssistant
mkdir -p ../out || true
rm ../out/$APP"-"$VERSION"-x86_64.AppImage" 2>/dev/null || true
GLIBC_NEEDED=${GLIBC_NEEDED:=$(glibc_needed)}
./AppImageAssistant ./$APP.AppDir/ ../out/$APP"-"$VERSION".glibc"$GLIBC_NEEDED"-"$ARCH".AppImage"
}
# Generate AppImage type 2
generate_type2_appimage()
{
# Get the ID of the last successful build on Travis CI
# ID=$(wget -q https://api.travis-ci.org/repos/probonopd/appimagetool/builds -O - | head -n 1 | sed -e 's|}|\n|g' | grep '"result":0' | head -n 1 | sed -e 's|,|\n|g' | grep '"id"' | cut -d ":" -f 2)
# Get the transfer.sh URL from the logfile of the last successful build on Travis CI
# Only Travis knows why build ID and job ID don't match and why the above doesn't give both...
# URL=$(wget -q "https://s3.amazonaws.com/archive.travis-ci.org/jobs/$((ID+1))/log.txt" -O - | grep "https://transfer.sh/.*/appimagetool" | tail -n 1 | sed -e 's|\r||g')
# if [ -z "$URL" ] ; then
# URL=$(wget -q "https://s3.amazonaws.com/archive.travis-ci.org/jobs/$((ID+2))/log.txt" -O - | grep "https://transfer.sh/.*/appimagetool" | tail -n 1 | sed -e 's|\r||g')
# fi
URL="https://github.com/probonopd/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage"
wget -c "$URL" -O appimagetool
chmod a+x ./appimagetool
set +x
if ( [ ! -z "$KEY" ] ) && ( ! -z "$TRAVIS" ) ; then
wget https://github.com/probonopd/AppImageKit/files/584665/data.zip -O data.tar.gz.gpg
( set +x ; echo $KEY | gpg2 --batch --passphrase-fd 0 --no-tty --skip-verify --output data.tar.gz --decrypt data.tar.gz.gpg )
tar xf data.tar.gz
sudo chown -R $USER .gnu*
mv $HOME/.gnu* $HOME/.gnu_old ; mv .gnu* $HOME/
GLIBC_NEEDED=${GLIBC_NEEDED:=$(glibc_needed)}
VERSION=$VERSION.glibc$GLIBC_NEEDED ./appimagetool -n -s --bintray-user $BINTRAY_USER --bintray-repo $BINTRAY_REPO -v ./$APP.AppDir/
else
GLIBC_NEEDED=${GLIBC_NEEDED:=$(glibc_needed)}
VERSION=$VERSION.glibc$GLIBC_NEEDED ./appimagetool -n --bintray-user $BINTRAY_USER --bintray-repo $BINTRAY_REPO -v ./$APP.AppDir/
fi
set -x
mkdir -p ../out/ || true
mv *.AppImage* ../out/
}
# Generate status file for use by apt-get; assuming that the recipe uses no newer
# ingredients than what would require more recent dependencies than what we assume
# to be part of the base system
generate_status()
{
mkdir -p ./tmp/archives/
mkdir -p ./tmp/lists/partial
touch tmp/pkgcache.bin tmp/srcpkgcache.bin
wget -q -c "https://github.com/probonopd/AppImages/raw/master/excludedeblist"
rm status 2>/dev/null || true
for PACKAGE in $(cat excludedeblist | cut -d "#" -f 1) ; do
printf "Package: $PACKAGE\nStatus: install ok installed\nArchitecture: all\nVersion: 9:999.999.999\n\n" >> status
done
}
# Find the desktop file and copy it to the AppDir
get_desktop()
{
find usr/share/applications -iname "*${LOWERAPP}.desktop" -exec cp {} . \; || true
}
fix_desktop() {
# fix trailing semicolons
for key in Actions Categories Implements Keywords MimeType NotShowIn OnlyShowIn; do
sed -i '/'"$key"'.*[^;]$/s/$/;/' $1
done
}
# Find the icon file and copy it to the AppDir
get_icon()
{
find ./usr/share/pixmaps/$LOWERAPP.png -exec cp {} . \; 2>/dev/null || true
find ./usr/share/icons -path *64* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true
find ./usr/share/icons -path *128* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true
find ./usr/share/icons -path *512* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true
find ./usr/share/icons -path *256* -name $LOWERAPP.png -exec cp {} . \; 2>/dev/null || true
ls -lh $LOWERAPP.png || true
}
# Find out the version
get_version()
{
THEDEB=$(find ../*.deb -name $LOWERAPP"_*" | head -n 1)
if [ -z "$THEDEB" ] ; then
echo "Version could not be determined from the .deb; you need to determine it manually"
fi
VERSION=$(echo $THEDEB | cut -d "~" -f 1 | cut -d "_" -f 2 | cut -d "-" -f 1 | sed -e 's|1%3a||g' | sed -e 's|+dfsg||g' )
echo $VERSION
}
# transfer.sh
transfer() { if [ $# -eq 0 ]; then echo "No arguments specified. Usage:\necho transfer /tmp/test.md\ncat /tmp/test.md | transfer test.md"; return 1; fi
tmpfile=$( mktemp -t transferXXX ); if tty -s; then basefile=$(basename "$1" | sed -e 's/[^a-zA-Z0-9._-]/-/g'); curl --progress-bar --upload-file "$1" "https://transfer.sh/$basefile" >> $tmpfile; else curl --progress-bar --upload-file "-" "https://transfer.sh/$1" >> $tmpfile ; fi; cat $tmpfile; rm -f $tmpfile; }
# Patch binary files; fill with padding if replacement is shorter than original
# http://everydaywithlinux.blogspot.de/2012/11/patch-strings-in-binary-files-with-sed.html
# Example: patch_strings_in_file foo "/usr/local/lib/foo" "/usr/lib/foo"
function patch_strings_in_file() {
local FILE="$1"
local PATTERN="$2"
local REPLACEMENT="$3"
# Find all unique strings in FILE that contain the pattern
STRINGS=$(strings ${FILE} | grep ${PATTERN} | sort -u -r)
if [ "${STRINGS}" != "" ] ; then
echo "File '${FILE}' contain strings with '${PATTERN}' in them:"
for OLD_STRING in ${STRINGS} ; do
# Create the new string with a simple bash-replacement
NEW_STRING=${OLD_STRING//${PATTERN}/${REPLACEMENT}}
# Create null terminated ASCII HEX representations of the strings
OLD_STRING_HEX="$(echo -n ${OLD_STRING} | xxd -g 0 -u -ps -c 256)00"
NEW_STRING_HEX="$(echo -n ${NEW_STRING} | xxd -g 0 -u -ps -c 256)00"
if [ ${#NEW_STRING_HEX} -le ${#OLD_STRING_HEX} ] ; then
# Pad the replacement string with null terminations so the
# length matches the original string
while [ ${#NEW_STRING_HEX} -lt ${#OLD_STRING_HEX} ] ; do
NEW_STRING_HEX="${NEW_STRING_HEX}00"
done
# Now, replace every occurrence of OLD_STRING with NEW_STRING
echo -n "Replacing ${OLD_STRING} with ${NEW_STRING}... "
hexdump -ve '1/1 "%.2X"' ${FILE} | \
sed "s/${OLD_STRING_HEX}/${NEW_STRING_HEX}/g" | \
xxd -r -p > ${FILE}.tmp
chmod --reference ${FILE} ${FILE}.tmp
mv ${FILE}.tmp ${FILE}
echo "Done!"
else
echo "New string '${NEW_STRING}' is longer than old" \
"string '${OLD_STRING}'. Skipping."
fi
done
fi
}
|
import time
import random
from sense_hat import SenseHat
sense = SenseHat()
while True:
for i in ("N", "M", "D"):
# Generate random RGB color values
c = (random.randint(1, 256), random.randint(1, 256), random.randint(1, 256))
sense.show_letter(str(i), text_colour=c) # Display the letter with random color
time.sleep(1) # Delay for 1 second
time.sleep(2) # Delay for 2 seconds before repeating the sequence |
#!/bin/bash
TAG=latest
PROJECT=registry.cn-beijing.aliyuncs.com/zkr/kk-addup
echo "[GO BUILD] [$PROJECT:$TAG] >>"
docker run --rm -v `pwd`:/main:rw -v $GOPATH:/go:rw hailongz/kk-gobuild:latest go build
echo "[OK]"
echo "[DOCKER BUILD] [$PROJECT:$TAG] >>"
docker build -t $PROJECT:$TAG .
echo "[OK]"
echo "[DOCKER PUSH] [$PROJECT:$TAG] >>"
docker push $PROJECT:$TAG
echo "[OK]"
rm -rf main
|
<gh_stars>1-10
package com.tablebird.serviceproviderbuilder;
import androidx.annotation.NonNull;
/**
* An service achieve builder
*/
public interface ServiceBuilder<S> {
@NonNull
S build();
}
|
#!/bin/bash
files=(
bin/uptrace_darwin_arm64
bin/uptrace_darwin_amd64
bin/uptrace_linux_arm64
bin/uptrace_linux_amd64
bin/uptrace_windows_amd64.exe
dist/uptrace-*.aarch64.rpm
dist/uptrace-*.x86_64.rpm
dist/uptrace_*_amd64.deb
dist/uptrace_*_arm64.deb
);
for f in "${files[@]}"
do
if [[ ! -f $f ]]
then
echo "$f does not exist."
echo "::set-output name=passed::false"
exit 0
fi
done
echo "::set-output name=passed::true"
|
/*
* Copyright 2012-2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.geoxp.oss;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringReader;
import java.util.Arrays;
public class DirectoryHierarchyKeyStore extends KeyStore {
private final File directory;
public DirectoryHierarchyKeyStore(String directory) {
try {
this.directory = new File(directory).getCanonicalFile();
} catch (IOException ioe) {
throw new RuntimeException("Unable to determine canonical path.");
}
if (!this.directory.exists() || !this.directory.isDirectory()) {
throw new RuntimeException("Invalid directory '" + this.directory.getAbsolutePath() + "'");
}
}
@Override
public byte[] getSecret(String name, String fingerprint) throws OSSException {
try {
//
// Sanitize name
//
name = sanitizeSecretName(name);
File root = getSecretFile(name);
File secretFile = new File(root.getAbsolutePath() + ".secret");
File aclFile = findACLFile(name);
//
// Check if secret exists
//
if (!secretFile.exists() || !secretFile.isFile() || null == aclFile || !aclFile.exists() || !aclFile.isFile()) {
throw new OSSException("Missing secret or ACL file.");
}
//
// Check ACLs
//
// Sanitize fingerprint
if (null == fingerprint) {
fingerprint = "";
}
fingerprint = fingerprint.toLowerCase().replaceAll("[^0-9a-f]","");
boolean authorized = false;
Reader reader;
if (!OSS.hasSecureACLs()) {
reader = new FileReader(aclFile);
} else {
//
// Read ACL blob and unwrap it
//
InputStream in = new FileInputStream(aclFile);
byte[] buf = new byte[1024];
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while(true) {
int len = in.read(buf);
if (len < 0) {
break;
}
baos.write(buf, 0, len);
}
in.close();
byte[] k = OSS.getMasterSecret();
reader = new StringReader(new String(CryptoHelper.unwrapBlob(k, baos.toByteArray()), "UTF-8"));
Arrays.fill(k, (byte) 0);
}
try {
BufferedReader br = new BufferedReader(reader);
while(true) {
String line = br.readLine();
if (null == line) {
break;
}
String acl = line.toLowerCase().replaceAll("[^0-9a-f#*]", "");
if ("*".equals(acl) || fingerprint.equals(acl)) {
authorized = true;
break;
}
}
br.close();
} catch (IOException ioe) {
throw new OSSException(ioe);
}
if (!authorized) {
throw new OSSException("Access denied.");
}
//
// Read secret
//
ByteArrayOutputStream baos = new ByteArrayOutputStream((int) secretFile.length());
byte[] buf = new byte[1024];
try {
InputStream is = new FileInputStream(secretFile);
do {
int len = is.read(buf);
if (-1 == len) {
break;
}
baos.write(buf, 0, len);
} while(true);
is.close();
} catch (IOException ioe) {
throw new OSSException(ioe);
}
return baos.toByteArray();
} catch (IOException ioe) {
throw new OSSException(ioe);
}
}
@Override
public void putSecret(String name, byte[] secret) throws OSSException {
//
// Sanitize name
//
name = sanitizeSecretName(name);
File root = getSecretFile(name);
File secretFile = new File(root.getAbsolutePath() + ".secret");
if (secretFile.exists()) {
throw new OSSException("Secret '" + name + "' already exists.");
}
//
// Create hierarchy
//
if (secretFile.getParentFile().exists() && !secretFile.getParentFile().isDirectory()) {
throw new OSSException("Secret path already exists and is not a directory.");
}
if (!secretFile.getParentFile().exists() && !secretFile.getParentFile().mkdirs()) {
throw new OSSException("Unable to create path to secret file.");
}
try {
OutputStream os = new FileOutputStream(secretFile);
os.write(secret);
os.close();
} catch (IOException ioe) {
throw new OSSException(ioe);
}
}
/**
* Retrieve secret file from secret name
*
* @param name
* @return
*/
private File getSecretFile(String name) throws OSSException {
//
// Sanitize name
//
name = sanitizeSecretName(name);
//
// Replace '.' with '/'
//
String[] tokens = name.split("\\.");
File f;
f = this.directory.getAbsoluteFile();
for (int i = 0; i < tokens.length; i++) {
f = new File(f, tokens[i]);
}
return f;
}
@Override
public File getACLFile(String name) throws IOException, OSSException {
File path = getSecretFile(name);
return new File(path.getCanonicalPath() + ".acl");
}
/**
* Determine the ACL file to use for a given secret
*
* @param name Name of secret
* @return File of ACLs or null if none is suitable
*/
private File findACLFile(String name) throws IOException, OSSException {
File path = getSecretFile(name);
while (!path.equals(this.directory)) {
File acl = new File(path.getCanonicalPath() + ".acl");
if (acl.exists() && acl.isFile()) {
return acl;
}
path = path.getParentFile();
}
return null;
}
}
|
def max_3(n1, n2, n3):
max = n1
if n2 > max:
max = n2
if n3 > max:
max = n3
return max
# Example
print(max_3(1, 2, 3)) # Output: 3 |
/// Represents a Process Identifier (PID).
type Pid = u32;
/// Represents an error number.
type Errno = u32;
/// Maximum PID value.
const MAX_PID: Pid = 65535;
/// Represents a PID allocator.
struct PidAllocator {
used: Vec<bool>,
}
impl PidAllocator {
/// Creates a new PID allocator with the specified capacity.
fn new(capacity: usize) -> Self {
PidAllocator {
used: vec![false; capacity],
}
}
/// Allocates a PID from the allocator.
fn alloc(&mut self, _hint: Option<Pid>) -> Result<Pid, Errno> {
for (i, &is_used) in self.used.iter().enumerate() {
if !is_used {
self.used[i] = true;
return Ok(i as Pid + 1);
}
}
Err(1) // No available PID, return error number 1
}
/// Marks the specified PID as used.
fn set_used(&mut self, pid: Pid) {
if pid > 0 && pid <= self.used.len() as Pid {
self.used[(pid - 1) as usize] = true;
}
}
}
/// Represents a PID manager.
struct PidManager {
allocator: PidAllocator,
}
impl PidManager {
/// Initializes a new PID manager with the specified initial PID.
fn new(init_pid: Pid) -> Self {
let mut allocator = PidAllocator::new(MAX_PID as usize);
allocator.set_used(init_pid - 1);
PidManager { allocator }
}
/// Returns a unused PID and marks it as used.
fn get_unique_pid(&mut self) -> Result<Pid, Errno> {
match self.allocator.alloc(None) {
Ok(pid) => {
debug_assert!(pid <= MAX_PID);
Ok(pid)
}
Err(err) => Err(err),
}
}
}
fn main() {
let mut pid_manager = PidManager::new(300); // Initialize PID manager with an initial PID
match pid_manager.get_unique_pid() {
Ok(pid) => println!("Allocated PID: {}", pid),
Err(err) => println!("Failed to allocate PID, error number: {}", err),
}
} |
package simix;
import clojure.lang.IFn;
import com.sun.jna.Pointer;
import java.io.Closeable;
public class HNSWWrapper implements Closeable {
private Pointer ptr;
private int nUsers;
private boolean closing;
public HNSWWrapper(Pointer ptr) {
this.ptr = ptr;
this.nUsers = 0;
this.closing = false;
}
public Object use(IFn f) {
this.stepIn();
try {
return f.invoke(this.ptr);
} finally {
this.stepOut();
}
}
public void finalize() {
this.close();
}
public void close() {
try {
synchronized (this) {
this.closing = true;
if (this.nUsers > 0) {
this.wait();
}
if (this.ptr != null) {
LibHNSW.hnsw_release_index(this.ptr);
this.ptr = null;
}
this.closing = false;
this.notify();
}
} catch (InterruptedException ex) {
throw new SimixException("Thread was interrupted during close", ex);
}
}
private void stepIn() {
synchronized (this) {
if (this.ptr == null) {
throw new SimixException("Index is closed", null);
}
this.nUsers++;
}
}
private void stepOut() {
synchronized (this) {
if (--this.nUsers == 0 && this.closing) {
this.notify();
}
}
}
}
|
#!/bin/bash
if [[ "${PY_VER}" =~ 3 ]]
then
2to3 -w -n src
fi
$PYTHON -m pip install . --ignore-installed --no-deps -vv
|
// Package memwatch provides a memory watcher channel that trips
// or suicides when the runtime memory usage is over the limit
package memwatch
|
#!/bin/sh
exec docker stop $(cat process.txt)
|
#!/bin/sh
# SPDX-License-Identifier: MIT
# Copyright (C) 2021 Roland Csaszar
#
# Project: Fabulous-TEMPLATE
# File: run_sharplint.sh
#
################################################################################
# The Nuget token must be saved using `nuget setapikey` to not need to input it.
dotnet nuget push ./src/Fabulous-TEMPLATE/bin/Release/Fabulous-TEMPLATE.*.nupkg --source https://api.nuget.org/v3/index.json
|
/*
* Copyright 2011-2018 Branimir Karadzic. All rights reserved.
* License: https://github.com/bkaradzic/bgfx#license-bsd-2-clause
*/
/*
* Reference(s):
* - Based on Virtual Texture Demo by Brad Blanchard
* http://web.archive.org/web/20190103162638/http://linedef.com/virtual-texture-demo.html
*/
uniform vec4 u_vt_settings_1;
uniform vec4 u_vt_settings_2;
#define VirtualTextureSize u_vt_settings_1.x
#define AtlasScale u_vt_settings_1.y
#define BorderScale u_vt_settings_1.z
#define BorderOffset u_vt_settings_1.w
#define MipBias u_vt_settings_2.x
#define PageTableSize u_vt_settings_2.y
SAMPLER2D(s_vt_page_table, 0);
SAMPLER2D(s_vt_texture_atlas, 1);
// This function estimates mipmap levels
float MipLevel( vec2 uv, float size )
{
vec2 dx = dFdx( uv * size );
vec2 dy = dFdy( uv * size );
float d = max( dot( dx, dx ), dot( dy, dy ) );
return max( 0.5 * log2( d ), 0 );
}
// This function samples the page table and returns the page's
// position and mip level.
vec3 SampleTable( vec2 uv, float mip )
{
vec2 offset = fract( uv * PageTableSize ) / PageTableSize;
return texture2DLod( s_vt_page_table, uv - offset, mip ).xyz;
}
// This functions samples from the texture atlas and returns the final color
vec4 SampleAtlas( vec3 page, vec2 uv )
{
float mipsize = exp2( floor( page.z * 255.0 + 0.5 ) );
uv = fract( uv * PageTableSize / mipsize );
uv *= BorderScale;
uv += BorderOffset;
vec2 offset = floor( page.xy * 255.0 + 0.5 );
return texture2D( s_vt_texture_atlas, ( offset + uv ) * AtlasScale );
}
// Ugly brute force trilinear, look up twice and mix
vec4 VirtualTextureTrilinear( vec2 uv )
{
float miplevel = MipLevel( uv, VirtualTextureSize );
miplevel = clamp( miplevel, 0.0, log2( PageTableSize )-1.0 );
float mip1 = floor( miplevel );
float mip2 = mip1 + 1.0;
float mipfrac = miplevel - mip1;
vec3 page1 = SampleTable( uv, mip1 );
vec3 page2 = SampleTable( uv, mip2 );
vec4 sample1 = SampleAtlas( page1, uv );
vec4 sample2 = SampleAtlas( page2, uv );
return mix( sample1, sample2, mipfrac );
}
// Simple bilinear
vec4 VirtualTexture( vec2 uv )
{
float mip = floor( MipLevel( uv, VirtualTextureSize ) );
mip = clamp( mip, 0, log2( PageTableSize ) );
vec3 page = SampleTable( uv, mip );
return SampleAtlas( page, uv );
}
|
package com.scale.invest.data.collection.controller;
import com.scale.invest.data.collection.service.InvestFundDataSourceService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RestController;
/**
* <pre>
* @description: 表现层控制类
* @copyright: Copyright (c) 2021 Alex
* @author: bo.yan
* @version: 1.0
* @date: 2021-1-10
* @time: 21:49:52
* </pre>
*/
@RestController
public class InvestFundDataSourceController{
private static final Logger logger = LoggerFactory.getLogger(InvestFundDataSourceController.class);
@Autowired
private InvestFundDataSourceService investFundDataSourceService;
}
|
<filename>test/unit/lib/dropin-error.js
const DropinError = require('../../../src/lib/dropin-error');
const BraintreeError = require('braintree-web/lib/braintree-error');
describe('DropinError', () => {
test('inherits from Error', () => {
const instance = new DropinError({});
expect(instance).toBeInstanceOf(DropinError);
});
test('sets the name to DropinError', () => {
const instance = new DropinError({});
expect(instance.name).toBe('DropinError');
});
test('can pass in a object and set the message', () => {
const instance = new DropinError({
message: 'Cool message.'
});
expect(instance.message).toBe('Cool message.');
});
test('can pass in a string and set the message', () => {
const instance = new DropinError('Cool message.');
expect(instance.message).toBe('Cool message.');
});
test('can pass in an Error object and set the message', () => {
const instance = new DropinError(new Error('Cool message.'));
expect(instance.message).toBe('Cool message.');
});
test('can pass in a DropinError object and set the message', () => {
const instance = new DropinError(new DropinError('Cool message.'));
expect(instance.message).toBe('Cool message.');
});
test('sets the _braintreeWebError when given a BraintreeError', () => {
const btError = new BraintreeError({
message: 'Cool message.',
code: 'CODE',
type: 'MERCHANT'
});
const instance = new DropinError(btError);
expect(instance.message).toBe('Cool message.');
expect(instance._braintreeWebError).toBe(btError);
});
test(
'sets the _braintreeWebError when not given a BraintreeError',
() => {
const btError = new BraintreeError({
message: 'Cool message.',
code: 'CODE',
type: 'MERCHANT'
});
const instance = new DropinError({
message: 'Custom message.',
braintreeWebError: btError
});
expect(instance.message).toBe('Custom message.');
expect(instance._braintreeWebError).toBe(btError);
}
);
});
|
#ifndef _z2c_ZParser_h_
#define _z2c_ZParser_h_
#include <Core/Core.h>
using namespace Upp;
class ZSource;
class ZClass;
class Overload;
class Def;
class Context {
public:
ZClass* C1 = nullptr;
ZClass* C2 = nullptr;
Point P;
Overload* O = nullptr;
Def* D = nullptr;
ZSource* S = nullptr;
Context* Next = nullptr;
Context* Prev = nullptr;
};
class ZSyntaxError {
public:
String Path;
String Error;
ZSyntaxError(const String& path, const String& error): Path(path), Error(error) {
}
void PrettyPrint(Context* con, Stream& stream);
};
class ZParser: public CParser {
public:
enum NumberType {
ntInvalid,
ntInt,
ntDWord,
ntLong,
ntQWord,
ntFloat,
ntDouble,
ntPtrSize,
};
ZSource* Source = nullptr;
String Mode;
ZParser() {
}
ZParser(const char* ptr): CParser(ptr) {
}
Point GetPoint() {
return Point(GetLine(), GetPos().GetColumn());
}
String Identify();
String ErrorString(const Point& p, const String& text);
void Error(const Point& p, const String& text);
void Dup(const Point& p, const Point& p2, const String& text, const String& text2 = "");
void Ambig(const Point& p, const Point& p2, const String& text, const String& text2 = "");
void Warning(const Point& p, const String& text);
String ExpectId();
String ExpectZId();
String ExpectId(const String& id);
void Expect(char ch);
void Expect(char ch, char ch2);
int ExpectNum();
NumberType ReadInt64(int64& oInt, double& oDub, int& base);
bool IsCharConst() {
return term[0] == '\'';
}
void SetLine(int l) {
line = l;
}
bool IsZId();
uint32 ReadChar();
bool EatIf();
bool EatElse();
bool EatEndIf();
bool IsElse();
bool IsEndIf();
void SkipBlock();
void ExpectEndStat();
void EatNewlines();
private:
uint64 ReadNumber64Core(Point& p, int base);
NumberType ReadF(Point& p, int sign, double& oDub);
NumberType ReadI(Point& p, int sign, int64& oInt);
};
#endif |
# frozen_string_literal: true
require 'rest-gw2/server/action'
require 'jellyfish'
require 'rack'
module RestGW2
CONFIG = ENV['RESTGW2_CONFIG'] || File.expand_path("#{__dir__}/../../.env")
def self.extract_env path
return {} unless File.exist?(path)
Hash[File.read(path).strip.squeeze("\n").each_line.map do |line|
name, value = line.split('=')
[name, value.chomp] if !line.start_with?('#') && name && value
end.compact]
end
extract_env(CONFIG).each do |k, v|
ENV[k] ||= v
end
Server = Jellyfish::Builder.app do
use Rack::CommonLogger
use Rack::Chunked
use Rack::ContentLength
use Rack::Deflater
use Rack::ContentType, 'text/html; charset=utf-8'
map '/' do
run ServerAction.new
end
end
end
|
#!/bin/bash
echo '============='
echo 'IF STATEMENTS'
echo '============='
# basic equality tests
# algebraic
if [ 3 -eq 3 ]; then
echo 'this should print'
fi
# strings
if [ "hi" == "hi" ]; then
echo 'strings are equal'
fi
if [ -n "foo" ]; then
echo 'this WILL print b/c string is not empty'
fi
if [ -z "foo" ]; then
echo 'this WILL NOT print b/c string is not empty'
elif [ 5 -gt 3 ]; then # demonstrate use of 'elif' here
echo 'Yes! 5 is greater than 3'
fi
touch myfile.txt
mkdir mydir
# test whether a file exists
if [ -e myfile.txt ]; then
echo 'the file exists!'
fi
if [ -d mydir ]; then
echo 'the directory exists!'
fi
rm myfile.txt
rmdir mydir
# test whether a command exists
if [ -z "$(command -v asfoi)" ]; then
echo 'command asfoi not installed!'
fi
if [ -n "$(command -v curl)" ]; then
echo 'curl is installed!'
fi
echo '========='
echo 'FOR LOOPS'
echo '========='
# by default, uses $@ (positional parameters)
for i; do echo "Param is: $i"; done
# otherwise, iterate over words, strings, numbers..
for j in "first" "second" "third"; do echo $j; done
# can also use a variable as well as command substitution
for k in $(seq 10); do
echo "The number is " $k;
done
echo
echo '==========='
echo 'WHILE LOOPS'
echo '==========='
# the syntax is: while CONTROL-COMMAND; do CONSEQUENT-COMMANDS; done
# when the CONTROL-COMMAND fails, the loop exits
i=0
while [ $i -lt 3 ]; do
echo "The number is $i"
let i++
done
while true; do
echo "Press q to quit!"
read key
if [ $key == 'q' ]; then
break
else
echo 'Still running!'
fi
done
echo '==========='
echo 'UNTIL LOOPS'
echo '==========='
number=10
until [ $number -lt 5 ]; do
echo $number
let number--
done
|
from pprint import pprint
from cfg.core import ContextFreeGrammar, Terminal, Nonterminal, Marker
from cfg.table import END_MARKER, ParseTableNormalForm
class GrammarTestCase(object):
'''Contains a CFG and optionally a parse table.'''
def __init__(self, sections, filename):
self.sections = sections
self.filename = filename
self.cfg = None
self.parse_table = None
# Initialize CFG and parse table if sections are provided
if sections:
self.initialize_grammar()
def initialize_grammar(self):
# Initialize the CFG
self.cfg = ContextFreeGrammar()
# Parse and add grammar rules from sections
for section in self.sections:
if section.startswith('S ->'):
start_symbol, production = section.split(' -> ')
self.cfg.add_production(Nonterminal('S'), [Nonterminal(production.strip())])
self.cfg.set_start_symbol(Nonterminal('S'))
else:
nonterminal, production = section.split(' -> ')
nonterminal = Nonterminal(nonterminal.strip())
production = [Terminal(symbol.strip()) if symbol.islower() else Nonterminal(symbol.strip()) for symbol in production.split()]
self.cfg.add_production(nonterminal, production)
# Initialize the parse table
self.parse_table = ParseTableNormalForm(self.cfg)
self.parse_table.build_parse_table()
# Example usage
sections = [
'S -> NP VP',
'NP -> Det N',
'VP -> V NP',
'Det -> the',
'N -> cat',
'N -> dog',
'V -> chased'
]
filename = 'grammar_test.txt'
test_case = GrammarTestCase(sections, filename)
print(test_case.cfg)
print(test_case.parse_table) |
<gh_stars>1-10
/*
* Gray: A Ray Tracing-based Monte Carlo Simulator for PET
*
* Copyright (c) 2018, <NAME>, <NAME>, <NAME>, <NAME>
*
* This software is distributed under the terms of the MIT License unless
* otherwise noted. See LICENSE for further details.
*
*/
#include "Gray/Graphics/ViewableParallelepiped.h"
#include "Gray/VrMath/Aabb.h"
#include "Gray/VrMath/PolygonClip.h"
void ViewableParallelepiped::CalcPlaneInfo()
{
VectorR3 EdgeAB = VertexB;
EdgeAB -= VertexA;
VectorR3 EdgeAC = VertexC;
EdgeAC -= VertexA;
VectorR3 EdgeAD = VertexD;
EdgeAD -= VertexA;
// Front and back faces
NormalABC = EdgeAB;
NormalABC *= EdgeAC;
NormalABC.Normalize();
TopCoefABC = (NormalABC)^VertexA; // Front face coef.
BottomCoefABC = (NormalABC)^VertexD; // Back face coef.
if ( TopCoefABC<BottomCoefABC ) {
double temp = TopCoefABC;
TopCoefABC = BottomCoefABC;
BottomCoefABC = temp;
}
// Top and bottom faces
NormalABD = EdgeAB;
NormalABD *= EdgeAD;
NormalABD.Normalize();
TopCoefABD = (NormalABD)^VertexC; // Top face coef.
BottomCoefABD = (NormalABD)^VertexA; // Bottom face coef.
if ( TopCoefABD<BottomCoefABD ) {
double temp = TopCoefABD;
TopCoefABD = BottomCoefABD;
BottomCoefABD = temp;
}
// Left and right faces
NormalACD = EdgeAD;
NormalACD *= EdgeAC;
NormalACD.Normalize();
TopCoefACD = (NormalACD)^VertexB; // Right face coef.
BottomCoefACD = (NormalACD)^VertexA; // Left face coef.
if ( TopCoefACD<BottomCoefACD ) {
double temp = TopCoefACD;
TopCoefACD = BottomCoefACD;
BottomCoefACD = temp;
}
}
bool DoTwoPlanes( const VectorR3& viewPos, const VectorR3& viewDir,
const VectorR3 normal, double topCoef, double bottomCoef, int planeNum,
double *maxFront, int *frontNum, double *minBack, int *backNum,
double maxHitDistanceAllowed)
{
double pdotn = (viewPos^normal);
double udotn = (viewDir^normal);
if ( udotn > 0.0 ) {
if ( pdotn>topCoef ) {
return false; // Above top and pointing up
}
if ( bottomCoef-pdotn>=0.0 ) {
double newFront = (bottomCoef-pdotn)/udotn;
if ( newFront > *maxFront ) {
if ( newFront > *minBack || newFront > maxHitDistanceAllowed ) {
return false;
}
*maxFront = newFront; // Hits bottom from below (from outside)
*frontNum = planeNum+1; // +1 for bottom plane
}
}
double newBack = (topCoef-pdotn)/udotn;
if ( newBack < *minBack ) {
if ( newBack < *maxFront ) {
return false;
}
*minBack = newBack; // Hits top from inside
*backNum = planeNum;
}
} else if ( udotn < 0.0 ) {
if ( pdotn<bottomCoef ) {
return false; // Below bottom and pointing down
}
if ( topCoef-pdotn<=0.0 ) {
double newFront = (topCoef-pdotn)/udotn;
if ( newFront > *maxFront ) {
if ( newFront > *minBack || newFront > maxHitDistanceAllowed ) {
return false;
}
*maxFront = newFront; // Hits top from above (from outside)
*frontNum = planeNum;
}
}
double newBack = (bottomCoef-pdotn)/udotn;
if ( newBack < *minBack ) {
if ( newBack < *maxFront ) {
return false;
}
*minBack = newBack; // Hits top from inside
*backNum = planeNum+1; // +1 for bottom plane
}
} else if ( pdotn<bottomCoef || pdotn>topCoef ) {
return false;
}
return true;
}
bool DoTwoPlanesCheck(
const VectorR3& viewPos, const VectorR3& viewDir,
const VectorR3 normal, double topCoef, double bottomCoef, int planeNum,
double *maxFront, int *frontNum, double *minBack, int *backNum,
double maxHitDistanceAllowed)
{
if ( bottomCoef<topCoef ) {
return DoTwoPlanes( viewPos, viewDir, normal, topCoef, bottomCoef, planeNum,
maxFront, frontNum, minBack, backNum, maxHitDistanceAllowed);
}
double pdotn = (viewPos^normal);
double udotn = (viewDir^normal);
double hitDist = (topCoef-pdotn)/udotn;
if ( hitDist<0.0 || hitDist>maxHitDistanceAllowed
|| hitDist<*maxFront || hitDist>*minBack ) {
return false;
}
*maxFront = hitDist;
*minBack = hitDist;
return true;
}
// Returns an intersection if found with distance maxDistance
// viewDir must be a unit vector.
// intersectDistance and visPoint are returned values.
bool ViewableParallelepiped::FindIntersectionNT (
const VectorR3& viewPos, const VectorR3& viewDir, double maxDistance,
double *intersectDistance, VisiblePoint& returnedPoint ) const
{
double maxFrontDist = -DBL_MAX;
int frontFaceNum;
double minBackDist = DBL_MAX;
int backFaceNum;
// Face nums: 0,1,2,3,4,5 = front, back, top, bottom, right, left.
if (!DoTwoPlanes( viewPos, viewDir, NormalABC, TopCoefABC, BottomCoefABC, 0,
&maxFrontDist, &frontFaceNum, &minBackDist, &backFaceNum, maxDistance) ) {
return false;
}
if (!DoTwoPlanes( viewPos, viewDir, NormalABD, TopCoefABD, BottomCoefABD, 2,
&maxFrontDist, &frontFaceNum, &minBackDist, &backFaceNum, maxDistance) ) {
return false;
}
if (!DoTwoPlanes( viewPos, viewDir, NormalACD, TopCoefACD, BottomCoefACD,4,
&maxFrontDist, &frontFaceNum, &minBackDist, &backFaceNum, maxDistance) ) {
return false;
}
double alpha;
if ( maxFrontDist>0.0 ) {
alpha = maxFrontDist;
returnedPoint.SetFrontFace();
returnedPoint.SetMaterial(ViewableBase::GetMaterialBack());
} else if ( minBackDist>0.0 && minBackDist<maxDistance ) {
alpha = minBackDist;
returnedPoint.SetBackFace();
returnedPoint.SetMaterial(ViewableBase::GetMaterialFront());
} else {
return false;
}
VectorR3 v = viewDir;
v *= alpha;
v += viewPos; // Intersection point
*intersectDistance = alpha;
returnedPoint.SetPosition( v );
return true;
}
void ViewableParallelepiped::CalcBoundingPlanes( const VectorR3& u,
double *minDot, double *maxDot ) const
{
double startdot = (u^VertexA);
double mind = startdot;
double maxd = mind;
double t;
t = (u^VertexB)-startdot;
if ( t<0 ) {
mind += t;
} else {
maxd +=t;
}
t = (u^VertexC)-startdot;
if ( t<0 ) {
mind += t;
} else {
maxd += t;
}
t = (u^VertexD)-startdot;
if ( t<0 ) {
mind += t;
} else {
maxd += t;
}
*minDot = mind;
*maxDot = maxd;
}
bool ViewableParallelepiped::CalcExtentsInBox( const AABB& boundingAABB, AABB& retAABB ) const
{
const VectorR3 boxBoundMin = boundingAABB.GetBoxMin();
const VectorR3 boxBoundMax = boundingAABB.GetBoxMax();
VectorR3 VertArray[60];
VectorR3 deltaAB = GetVertexB();
VectorR3 deltaAC = GetVertexC();
VectorR3 deltaAD = GetVertexD();
const VectorR3& vertexA = GetVertexA();
deltaAB -= vertexA;
deltaAC -= vertexA;
deltaAD -= vertexA;
int baseCount = 0;
// Front face
VertArray[baseCount+0] = GetVertexA();
VertArray[baseCount+1] = GetVertexB();
VertArray[baseCount+2] = GetVertexC() + deltaAB;
VertArray[baseCount+3] = GetVertexC();
baseCount += ClipConvexPolygonAgainstBoundingBox(4, &(VertArray[baseCount]),
GetNormalABC(),
boxBoundMin, boxBoundMax);
// Back face
VertArray[baseCount+0] = GetVertexA() + deltaAD;
VertArray[baseCount+1] = GetVertexB() + deltaAD;
VertArray[baseCount+2] = GetVertexC() + deltaAB + deltaAD;
VertArray[baseCount+3] = GetVertexC() + deltaAD;
baseCount += ClipConvexPolygonAgainstBoundingBox(4, &(VertArray[baseCount]),
GetNormalABC(),
boxBoundMin, boxBoundMax);
// Left face
VertArray[baseCount+0] = GetVertexA();
VertArray[baseCount+1] = GetVertexC();
VertArray[baseCount+2] = GetVertexD() + deltaAC;
VertArray[baseCount+3] = GetVertexD();
baseCount += ClipConvexPolygonAgainstBoundingBox(4, &(VertArray[baseCount]),
GetNormalABC(),
boxBoundMin, boxBoundMax);
// Right face
VertArray[baseCount+0] = GetVertexA() + deltaAB;
VertArray[baseCount+1] = GetVertexC() + deltaAB;
VertArray[baseCount+2] = GetVertexD() + deltaAC + deltaAB;
VertArray[baseCount+3] = GetVertexD() + deltaAB;
baseCount += ClipConvexPolygonAgainstBoundingBox(4, &(VertArray[baseCount]),
GetNormalABC(),
boxBoundMin, boxBoundMax);
// Bottom face
VertArray[baseCount+0] = GetVertexA();
VertArray[baseCount+1] = GetVertexB();
VertArray[baseCount+2] = GetVertexD() + deltaAB;
VertArray[baseCount+3] = GetVertexD();
baseCount += ClipConvexPolygonAgainstBoundingBox(4, &(VertArray[baseCount]),
GetNormalABC(),
boxBoundMin, boxBoundMax);
// Top face
VertArray[baseCount+0] = GetVertexA() + deltaAC;
VertArray[baseCount+1] = GetVertexB() + deltaAC;
VertArray[baseCount+2] = GetVertexD() + deltaAB + deltaAC;
VertArray[baseCount+3] = GetVertexD() + deltaAC;
baseCount += ClipConvexPolygonAgainstBoundingBox(4, &(VertArray[baseCount]),
GetNormalABC(),
boxBoundMin, boxBoundMax);
int numClippedVerts = baseCount;
if ( numClippedVerts == 0 ) {
return false;
}
VectorR3 * extentsMin = &retAABB.GetBoxMin();
VectorR3 * extentsMax = &retAABB.GetBoxMax();
CalcBoundingBox(numClippedVerts, VertArray, extentsMin, extentsMax);
// Next six lines to avoid roundoff errors putting extents outside the bounding box
ClampRange( &extentsMin->x, boxBoundMin.x, boxBoundMax.x );
ClampRange( &extentsMin->y, boxBoundMin.y, boxBoundMax.y );
ClampRange( &extentsMin->z, boxBoundMin.z, boxBoundMax.z );
ClampRange( &extentsMax->x, boxBoundMin.x, boxBoundMax.x );
ClampRange( &extentsMax->y, boxBoundMin.y, boxBoundMax.y );
ClampRange( &extentsMax->z, boxBoundMin.z, boxBoundMax.z );
return true;
}
bool ViewableParallelepiped::QuickIntersectTest(
const VectorR3& viewPos, const VectorR3& viewDir, double maxDistance,
double *intersectDistanceIn, double *intersectDistanceOut,
const VectorR3& NormalA, double MinDotA, double MaxDotA,
const VectorR3& NormalB, double MinDotB, double MaxDotB,
const VectorR3& NormalC, double MinDotC, double MaxDotC )
{
double maxFrontDist = -DBL_MAX;
int frontFaceNum;
double minBackDist = DBL_MAX;
int backFaceNum;
// Face nums: 0,1,2,3,4,5 = A-top, A-bottom, B-top, B-bottom, C-top, C-bottom.
if (!DoTwoPlanesCheck( viewPos, viewDir, NormalA, MaxDotA, MinDotA, 0,
&maxFrontDist, &frontFaceNum, &minBackDist, &backFaceNum, maxDistance) ) {
return false;
}
if (!DoTwoPlanesCheck( viewPos, viewDir, NormalB, MaxDotB, MinDotB, 2,
&maxFrontDist, &frontFaceNum, &minBackDist, &backFaceNum, maxDistance) ) {
return false;
}
if (!DoTwoPlanesCheck( viewPos, viewDir, NormalC, MaxDotC, MinDotC, 4,
&maxFrontDist, &frontFaceNum, &minBackDist, &backFaceNum, maxDistance) ) {
return false;
}
*intersectDistanceIn = maxFrontDist;
*intersectDistanceOut = minBackDist;
assert( minBackDist>=0.0 );
return true;
}
bool ViewableParallelepiped::QuickIntersectTest(
const VectorR3& viewPos, const VectorR3& viewDir, double maxDistance,
double *intersectDistance,
const VectorR3& NormalA, double MinDotA, double MaxDotA,
const VectorR3& NormalB, double MinDotB, double MaxDotB,
const VectorR3& NormalC, double MinDotC, double MaxDotC )
{
double temp;
return QuickIntersectTest( viewPos, viewDir, maxDistance,
intersectDistance, &temp,
NormalA, MinDotA, MaxDotA,
NormalB, MinDotB, MaxDotB,
NormalC, MinDotC, MaxDotC );
}
|
/// <reference types="react" />
export interface Props {
/**
* Exit (unmount) the whole Ink app.
*/
readonly exit: (error?: Error) => void;
}
/**
* `AppContext` is a React context, which exposes a method to manually exit the app (unmount).
*/
declare const AppContext: import("react").Context<Props>;
export default AppContext;
|
<gh_stars>10-100
package com.imooc.scala.stream.transformation
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.api.scala._
/**
* 分区规则的使用
* Created by xuwei
*/
object StreamPartitionOpScala {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
//注意:在这里将这个隐式转换代码放到类上面
//因为默认它只在main函数生效,针对下面提取的shuffleOp是无效,否则也需要在shuffleOp添加这行代码
//import org.apache.flink.api.scala._
//注意:默认情况下Flink任务中算子的并行度会读取当前机器的CPU个数
//fromCollection的并行度为1,
val text = env.fromCollection(Array(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
//使用shuffle分区规则
//shuffleOp(text)
//使用rebalance分区规则
//rebalanceOp(text)
//使用rescale分区规则
//rescaleOp(text)
//使用broadcast分区规则
//broadcastOp(text)
//自定义分区规则:根据数据的奇偶性进行分区
//注意:此时虽然print算子的并行度为4,但是自定义的分区规则只会把数据分发给2个并行度,所以有2个是不干活
//custormPartitionOp(text)
env.execute("StreamPartitionOpScala")
}
private def custormPartitionOp(text: DataStream[Int]) = {
text.map(num => num)
.setParallelism(2) //设置map算子的并行度为2
//.partitionCustom(new MyPartitionerScala,0)//这种写法已经过期
.partitionCustom(new MyPartitionerScala, num => num) //官方建议使用keySelector
.print()
.setParallelism(4) //设置print算子的并行度为4
}
private def broadcastOp(text: DataStream[Int]) = {
text.map(num => num)
.setParallelism(2) //设置map算子的并行度为2
.broadcast
.print()
.setParallelism(4) //设置print算子的并行度为4
}
private def rescaleOp(text: DataStream[Int]) = {
text.map(num => num)
.setParallelism(2) //设置map算子的并行度为2
.rescale
.print()
.setParallelism(4) //设置print算子的并行度为4
}
private def rebalanceOp(text: DataStream[Int]) = {
text.map(num => num)
.setParallelism(2) //设置map算子的并行度为2
.rebalance
.print()
.setParallelism(4) //设置print算子的并行度为4
}
private def shuffleOp(text: DataStream[Int]) = {
//由于fromCollection已经设置了并行度为1,所以需要再接一个算子才能修改并行度,在这使用map算子
text.map(num => num)
.setParallelism(2) //设置map算子的并行度为2
.shuffle
.print()
.setParallelism(4) //设置print算子的并行度为4
}
}
|
#!/bin/sh
export PATH="/usr/local/opt/util-linux/bin:$PATH"
export PATH="/usr/local/opt/util-linux/sbin:$PATH"
|
import store, {useAppSelector} from "store";
import {ApplicationState} from "types/store";
import {ActionFactory} from "store/action";
import "../BoardSettings/BoardSettings.scss";
import {useTranslation} from "react-i18next";
import {BoardOption} from "./BoardOption";
import {BoardOptionButton} from "./BoardOptionButton";
import {BoardOptionToggle} from "./BoardOptionToggle";
export const ShowOtherUsersNotesOption = () => {
const {t} = useTranslation();
const state = useAppSelector((applicationState: ApplicationState) => ({
board: applicationState.board.data!,
}));
return (
<BoardOption data-testid="note">
<BoardOptionButton
label={state.board!.showNotesOfOtherUsers ? t("ShowOtherUsersNotesOption.hide") : t("ShowOtherUsersNotesOption.show")}
onClick={() => {
store.dispatch(ActionFactory.editBoard({id: state.board!.id, showNotesOfOtherUsers: !state.board!.showNotesOfOtherUsers}));
}}
>
<BoardOptionToggle active={state.board.showNotesOfOtherUsers} />
</BoardOptionButton>
</BoardOption>
);
};
|
const todoLength = document.querySelector('strong');
export const viewActive = (e) => {
const $selected = event.target.closest('a');
//resetScreen();
$selected.classList = 'selected'
document.querySelectorAll('.todo-list > li').forEach((li) => {
li.classList.remove('hidden');
if(li.className === 'completed') {
li.className = 'completed hidden';
}
});
todoCount(todoLength);
//resetScreen();
}; |
<reponame>fmi-basel/improc<gh_stars>0
import numpy as np
from scipy.ndimage.filters import gaussian_filter
from scipy.ndimage import find_objects
from skimage.transform import rescale
def resample_labels(labels, factor):
'''Resample labels one by one with a gaussian kernel'''
# TODO check alignment for float re-scaling factors
factor = np.broadcast_to(np.asarray(factor), labels.ndim)
resampled_labels = np.zeros(np.round(labels.shape * factor).astype(int),
dtype=np.int16)
locs = find_objects(labels)
locs = [loc for loc in locs if loc is not None]
# ignore label 0 (background)
for l, loc in zip(filter(None, np.unique(labels)), locs):
margin = np.ceil(factor.max()).astype(int)
loc = tuple(
slice(max(0, s.start - margin), s.stop + margin) for s in loc)
mask = labels[loc] == l
resampled_mask = rescale(mask.astype(np.float32),
scale=factor,
multichannel=False,
anti_aliasing=False,
preserve_range=True,
order=1)
loc_resampled = tuple(
slice(
np.floor(f * s.start).astype(int),
np.floor(f * s.start).astype(int) + s_size)
for s, f, s_size in zip(loc, factor, resampled_mask.shape))
resampled_labels[loc_resampled][resampled_mask > 0.5] = l
return resampled_labels
def match_spacing(img,
src_spacing,
dst_spacing='isotropic',
image_type='greyscale'):
'''
TODO
args:
image_type: one of 'greyscale', 'label_nearest', 'label_interp'
'''
input_dtype = img.dtype
src_spacing = np.asarray(src_spacing)
if isinstance(dst_spacing, str) and dst_spacing == 'isotropic':
scale = src_spacing / src_spacing.min()
else:
dst_spacing = np.asarray(dst_spacing)
scale = np.array(src_spacing) / np.array(dst_spacing)
if image_type == 'greyscale':
return rescale(img,
scale=scale,
multichannel=False,
anti_aliasing=True,
preserve_range=True,
order=1).astype(input_dtype)
elif image_type == 'label_interp':
return resample_labels(img, scale)
elif image_type in ['label_nearest', 'nearest']:
return rescale(img,
scale=scale,
multichannel=False,
anti_aliasing=False,
preserve_range=True,
order=0).astype(input_dtype)
else:
raise ValueError(
'image type "{}" not supported, expected "greyscale", "label_nearest" or "label_interp"'
.format(image_type))
|
/*
OOP345
Workshop 2
Title : Compound Types
Date : 03/01/17
Author : <NAME>
Student# : 038515151
Email : <EMAIL>
*/
#include <iostream>
#include <fstream>
#include <cctype>
#include <iomanip>
#include <string>
#include "Stations.h"
#include "Station.h"
using namespace std;
namespace w2 {
Stations::Stations(char * filename) {
fstream is(filename);
//is.open(filename, ios::in);
if (is.is_open()==false) {
cerr << "Not possible to open the file: " << filename << endl;
}
string line;
is >> stacount;
is.ignore(2, ';');
cout << "Station Count = " << stacount << endl;
string stationName;
stationActual = new Station[stacount];
unsigned stupass = 0, adultpass = 0;
for (size_t i = 0; i < stacount; i++) {
getline(is, stationName, ';');
is >> stupass;
is >> adultpass;
stationActual[i].set(stationName, stupass, adultpass);
}
is.close();
}
void Stations::update() const {
cout << "Passes Sold :" << endl << "-------------";
for (int i = 0; i < stacount; i++) {
int student, adult;
cout << stationActual[i].getName();
cout << endl;
cout << "Student Passes sold : ";
cin >> student;
cout << "Adult Passes sold : ";
cin >> adult;
stationActual[i].update(STUDENT_PASS, -student);
stationActual[i].update(ADULT_PASS, -adult);
}
}
void Stations::restock() const {
cout << "Passes Added :" << endl << "--------------" << endl;
for (int i = 0; i < stacount; i++) {
int s, a;
cout << stationActual[i].getName() << endl;
cout << "Student Passes added : ";
cin >> s;
cout << "Adult Passes added : ";
cin >> a;
stationActual[i].update(STUDENT_PASS, s);
stationActual[i].update(ADULT_PASS, a);
}
}
void Stations::report() const {
cout << "Passes in Stock : Student Adult" << endl;
cout << "-------------------------------" << endl;
for (int i = 0; i < stacount; i++) {
cout << left << setw(19) << stationActual[i].getName()
<< setw(6) << stationActual[i].inStock(STUDENT_PASS)
<< setw(6) << stationActual[i].inStock(ADULT_PASS)
<< endl;
}
}
Stations::~Stations() {
if (stationActual) {
delete[] stationActual;
}
}
}
|
#
# only init if installed.
fasd_cache="$HOME/.fasd-init-bash"
if [ "$(command -v fasd)" -nt "$fasd_cache" -o ! -s "$fasd_cache" ]; then
eval "$(fasd --init posix-alias zsh-hook zsh-ccomp zsh-ccomp-install zsh-wcomp zsh-wcomp-install)" >| "$fasd_cache"
fi
source "$fasd_cache"
unset fasd_cache
unalias -m "s"
# jump to recently used items
alias a='fasd -a' # any
alias ss='fasd -si' # show / search / select
alias d='fasd -d' # directory
alias f='fasd -f' # file
alias z='fasd_cd -d' # cd, same functionality as j in autojump
alias zz='fasd_cd -d -i' # interactive directory jump
|
public class Connector
{
// Other members and properties are not shown for brevity
/// <summary>
/// Returns 'true' if a connection is attached to the connector.
/// The other end of the connection may or may not be attached to a node.
/// </summary>
public bool IsConnectionAttached => AttachedConnections.Count > 0;
/// <summary>
/// The connections that are attached to this connector, or null if no connections are attached.
/// </summary>
public List<Connection> AttachedConnections { get; private set; }
// Other members and properties are not shown for brevity
} |
#!/bin/bash
THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source $THIS_DIR/../utils.sh
function main() {
local crontabFile="$THIS_DIR/crontab"
local logFile="$THIS_DIR/cron.log"
local updateAll="$THIS_DIR/../../update.sh >> $logFile 2>&1"
local logRotate="$THIS_DIR/logRotate.sh >> $logFile 2>&1"
echo "*/5 * * * * $updateAll" > $crontabFile
echo "9-59/10 * * * * $logRotate" >> $crontabFile
echo "<crontab>"
cat $crontabFile
echo "</crontab>"
if isRaspberryPi; then
crontab -u pi "$crontabFile"
fi
}
runMain "$@"
|
#!/bin/bash
#SBATCH --account=def-lombaert
#SBATCH --gres=gpu:1 # Number of GPUs (per node)
#SBATCH --cpus-per-task=8 # CPU cores/threads
#SBATCH --mem=92G # memory (per node)
#SBATCH --time=05-00:00 # time (DD-HH:MM)
#SBATCH --mail-user=pierre-luc.delisle@live.com
#SBATCH --mail-type=BEGIN
#SBATCH --mail-type=END
#SBATCH --mail-type=FAIL
#SBATCH --mail-type=REQUEUE
#SBATCH --mail-type=ALL
#SBATCH --output=%x-%j.out
#SBATCH --output=UNet_canada_triple_dataset_standardization.out
#SBATCH --job-name=UNet_canada_triple_dataset_standardization
nvidia-smi
source /home/pld2602/venv/bin/activate
CUDA_VISIBLE_DEVICES=0 python /project/def-lombaert/pld2602/code/deepNormalizev5/main_cc_unet.py --config=/project/def-lombaert/pld2602/code/deepNormalizev5/deepNormalize/experiments/experiments_canada/UNet/data_augmentation/config_data_augmentation_triple_dataset.yaml |
#!/bin/bash
# SPDX-License-Identifier: Apache-2.0
# The model name is passed as an argument -n name
source $(dirname $0)/xrs.sh
source $(dirname $0)/name_flag.sh
xrs_print_status_message "Creating model from meshroom for ${name} with model in ${MODEL_DIR}"
# Create the blender folder if it does not exist
mkdir -p ${MODEL_DIR}/${name}/blender
# 1. Import cameras from meshroom
# Get the name of the directory with the camera data - meshroom creates a hash
sfm_hash=$(ls -t -I "@*" ${MODEL_DIR}/${name}/meshroom/MeshroomCache/StructureFromMotion/ | grep -m 1 "")
xrs_print_status_message "Extracting cameras from ${sfm_hash}"
# Using Blender 2.78 for .abc camera import because it is broken in 2.79/2.80
blender278 --python-exit-code 1 -noaudio --python "${BLENDER_3XR_DIR}/init_model_with_meshroom_cameras.py" -- ${MODEL_DIR} ${name} ${sfm_hash}
#blender278 --background --python-exit-code 1 -noaudio --python "${BLENDER_3XR_DIR}/init_model_with_meshroom_cameras.py" -- ${MODEL_DIR} ${name} ${sfm_hash}
# 2. Import Mesh
# Get the name of the directory with the textured mesh - meshroom creates a hash
texture_hash=$(ls -t -I "@*" ${MODEL_DIR}/${name}/meshroom/MeshroomCache/Texturing/ | grep -m 1 "")
xrs_print_status_message "Importing mesh from ${sfm_hash}"
# After the .blend file is created, open it with Blender 2.80 to finish init
#blender ${MODEL_DIR}/${name}/blender/${name}.blend --background --python-exit-code 1 -noaudio --python "${BLENDER_3XR_DIR}/init_model_from_meshroom.py" -- ${MODEL_DIR} ${name} ${texture_hash}
# 3. Rotate mesh based on camera rings
#blender ${MODEL_DIR}/${name}/blender/${name}.blend --background --python-exit-code 1 -noaudio --python "${BLENDER_3XR_DIR}/center_scan_from_cameras.py"
|
package avardstock
import (
"context"
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"github.com/jinzhu/gorm"
. "github.com/qasemt/helper"
"golang.org/x/sync/semaphore"
_ "golang.org/x/sync/semaphore"
"io/ioutil"
"os"
"path"
"strconv"
"strings"
"sync"
"time"
)
func floatFromString(raw interface{}) (float64, error) {
str, ok := raw.(string)
if !ok {
// return 0, error(fmt.Sprintf("unable to parse, value not string: %T", raw))
return 0, nil
}
flt, err := strconv.ParseFloat(str, 64)
if err != nil {
// return 0, errors.Wrap(err, fmt.Sprintf("unable to parse as float: %s", str))
return 0, nil
}
return flt, nil
}
func timeFromUnixTimestampFloat(raw interface{}) (time.Time, error) {
ts, ok := raw.(float64)
if !ok {
return time.Time{}, nil
}
return time.Unix(0, int64(ts)*int64(time.Millisecond)), nil
}
type Rahavard_Data struct {
AssetID interface{} `json:"asset_id"`
Time float64 `json:"time"`
O float64 `json:"open"`
H float64 `json:"high"`
L float64 `json:"low"`
C float64 `json:"close"`
V float64 `json:"volume"`
}
type dbItem struct {
db *gorm.DB
p string
mutex *sync.Mutex
}
type IStockProvider interface {
make(sq StockQuery) error
downloadAsset(sq StockQuery, item TimeRange) ([]StockFromWebService, error)
/*
closeMyDb(d *gorm.DB)
getDateRangeYears(duration time.Duration, end time.Time) []TimeRange
SyncDb(wl *WatchListItem) error
ReadJsonWatchList() (*WatchListItem, error)
SyncStockList(dbLock *sync.Mutex) error
OutStockList(dbLock *sync.Mutex)
avardAssetProcess(parentWaitGroup *sync.WaitGroup, readFromLast bool, assetCode string, nameEn string, isIndex bool, provider EProvider) error
*/
Run(readfromLast bool, isSeq bool, timer_minute *int64) error
}
type StockProvider struct {
IStockProvider
Provider EProvider
FolderStoreMode EFolderStoreMode
IsSeqRunProcess bool
_WatchListItem *WatchListItem
HttpLock sync.Mutex
sem *semaphore.Weighted
}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
func (a StockProvider) procMake(sq StockQuery) {
if a.IsSeqRunProcess {
a.make(sq)
} else {
sq.WaitGroupobj.Add(1)
go a.make(sq)
}
}
func (a StockProvider) make(sq StockQuery) error {
if !a.IsSeqRunProcess {
defer sq.WaitGroupobj.Done()
}
var fullPath string
//:::::::::::::::::::::::::::::::::::::::::
var last = StockFromWebService{
ID: 0,
AssetId: "",
TimeFrame: 0,
TypeChart: 0,
Time: 0,
O: 0,
H: 0,
L: 0,
C: 0,
V: 0,
}
var dbname string = sq.Stock.AssetCode
if sq.Stock.IsIndex {
dbname = fmt.Sprintf("%si", sq.Stock.AssetCode)
}
db, fullPath, er := DatabaseInit(dbname, sq.TimeFrame.ToString())
if er != nil {
return errors.New(fmt.Sprintf("err:%v %v", er, fullPath))
}
defer a.closeMyDb(db)
//_________________
/* var isFind bool = false
var lock1 *sync.Mutex = dbLock
for _, g := range lockeList {
if g.p == fullPath {
isFind = true
lock1 = g.mutex
}
}
if isFind == false {
lockeList = append(lockeList, dbItem{db: db, p: fullPath, mutex: dbLock})
}*/
defer a.closeMyDb(db)
//::::::::::::::::::::::::::::::::::::::::: Get LOOP FROM WEB SERVICE
var times []TimeRange
var it = TimeRange{}
//var itemsFinal []StockItem
if sq.ReadfromLast {
//::::::::::::::::::::::::::::::::::::::::: Get LAst RECORD FROM DATABASE
e := getLastRecord(db, sq.DBLock, sq.Stock.AssetCode, sq.TimeFrame.ToMinuth(), sq.TypeChart, &last)
if GetVerbose() {
fmt.Printf("last record -> timeframe : %v time : %v\n", sq.TimeFrame.ToString2(), UnixTimeToTime(last.Time).ToString())
}
if e != nil {
return e
}
if last.ID == 0 {
it.Begin = sq.EndTime.Add(sq.Duration)
} else {
//it.Begin = time.Unix(0, int64(last.Time)*int64(time.Millisecond))
if last.Time == 0 {
return errors.New("last time not valid ")
}
it.Begin = time.Unix(0, (last.Time)*int64(time.Millisecond))
}
it.End = sq.EndTime
times = append(times, it)
} else
{
t := a.getDateRangeYears(sq.Duration, sq.EndTime)
times = append(times, t...)
}
var itemsRaws []StockFromWebService
if a.Provider == Binance {
for _, h := range times {
l := a.getDateRangeBy500Hours(h.Begin, h.End, sq.TimeFrame)
for _, h1 := range l {
if GetVerbose() {
fmt.Printf("_______________________\n")
}
raws, e := a.downloadAsset(sq, h1)
if e != nil {
return e
}
itemsRaws = append(itemsRaws, raws...)
}
}
} else {
return errors.New("no selected")
}
//::::::::::::::::::::::::::::::::::::::::: INSERT TO DATABASE
{
fmt.Println(a.Provider.ToString(), "->>", "Type", sq.TypeChart.ToTypeChartStr(), "asset ", sq.Stock.NameEn, "time frame ", sq.TimeFrame.ToString(), "load from net : ", len(itemsRaws))
if len(itemsRaws) > 0 {
InsertStocks(db, sq.DBLock, last, sq.Stock.IsIndex, itemsRaws, sq.Stock.AssetCode, sq.TimeFrame, sq.TypeChart)
//if err != nil {
// return errors.New(fmt.Sprintf("Insert Stocks is fialed: %v ",err))
//}
}
}
//::::::::::::::::::::::::::::::::::::::::: LOAD FROM DATABASE AND OUT TO CSV
{
itemsRaw, err := getRecordesStock(db, sq.DBLock, sq.Stock.AssetCode, sq.TimeFrame, sq.TypeChart)
if err != nil {
return errors.New(fmt.Sprintf("get Stocks is failed: %v ", err))
}
var itemsFinal []StockItem
for _, k := range itemsRaw {
var v StockItem
time1 := time.Unix(0, int64(k.Time)*int64(time.Millisecond))
v.D = UnixTimeStrToFormatDT(time1, true, sq.TimeFrame)
v.T = UnixTimeStrToFormatDT(time1, false, sq.TimeFrame)
v.O = k.O
v.H = k.H
v.L = k.L
v.C = k.C
v.V = k.V
itemsFinal = append(itemsFinal, v)
}
if len(itemsFinal) > 0 {
var dirCachePath string
var fileName string = ""
switch a.Provider {
case Binance:
{
if a.FolderStoreMode == ByTimeFrame {
dirCachePath = path.Join(GetRootCache(), "crypto", sq.Stock.AssetCode)
} else {
dirCachePath = path.Join(GetRootCache(), "crypto")
}
fileName = fmt.Sprintf("%v_%v.csv", strings.ToLower(sq.Stock.AssetCode), strings.ToLower(sq.TimeFrame.ToString2()))
}
}
if !OutToCSVFile(itemsFinal, dirCachePath, fileName, true) {
return errors.New("get asset daily >>> out to csv failed")
}
}
//fmt.Println("final :", len(itemsFinal))
}
return nil
}
func (a StockProvider) closeMyDb(d *gorm.DB) {
if d != nil {
(*d).Close()
}
}
func (a StockProvider) getDateRangeYears(duration time.Duration, end time.Time) []TimeRange {
day_rang := []TimeRange{}
start := end.Add(duration)
diff := end.Sub(start).Hours() / 8760 //8760 hour = years
diff = diff + 1
for i := 0; i <= int(diff); i++ {
var tt = start.AddDate(i, 0, 0)
var d1 TimeRange
d1.File_name = TimeToString(tt, "yyyymmdd") + ".csv"
y, _, _ := tt.Date()
d1.Begin = time.Date(y, 1, 1, 0, 0, 0, 0, tt.Location())
d1.End = time.Date(y, 12, 31, 23, 59, 59, int(time.Second-time.Nanosecond), tt.Location())
day_rang = append(day_rang, d1)
}
return day_rang
}
func (a StockProvider) getDateRangeBy500Hours(start time.Time, end time.Time, frame ETimeFrame) []TimeRange {
if GetVerbose() {
fmt.Printf("download time range with 500 split -> s : %v e: %v \n", QTime{start}.ToString(), QTime{end}.ToString())
}
day_rang := []TimeRange{}
var diff float64
switch frame {
case M15:
diff = (end.Sub(start).Minutes() / 15) / 499
case H1:
{
diff = end.Sub(start).Hours() / 499
}
case H2:
{
diff = (end.Sub(start).Hours() / 2) / 499 //8760 hour = years
}
case H4:
{
diff = (end.Sub(start).Hours() / 4) / 499 //8760 hour = years
}
case D1:
{
/*var d1 TimeRange
d1.File_name = TimeToString(start, "yyyymmdd") + ".csv"
d1.Begin = start
d1.End = end
day_rang = append(day_rang, d1)
return day_rang*/
diff = (end.Sub(start).Hours() / 24) / 499 //8760 hour = years
}
}
var t1 time.Time
var t2 time.Time
for i := 0; i <= int(diff); i++ {
if i == 0 {
t1 = start
} else {
t1 = t2
}
switch frame {
case M15:
t2 = t1.Add((time.Minute * 15) * time.Duration(500))
case H1:
{
t2 = t1.Add((time.Hour) * time.Duration(500))
}
case H2:
{
t2 = t1.Add((time.Hour * 2) * time.Duration(500))
}
case H4:
{
t2 = t1.Add((time.Hour * 4) * time.Duration(500))
}
case D1:
{
t2 = t1.Add((time.Hour * 24) * time.Duration(500))
}
}
//::::::::::::::::::::::::::::::::::::::
//fmt.Println(t1,t2)
if t1.After(time.Now()) {
break
}
var d1 TimeRange
d1.File_name = TimeToString(t1, "yyyymmdd") + ".csv"
d1.Begin = t1
d1.End = t2
day_rang = append(day_rang, d1)
}
return day_rang
}
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: dorost kardan db stock ha
func (a StockProvider) SyncDb(wl *WatchListItem) error {
for _, f := range wl.Tehran {
var dbnametmp string = f.AssetCode
if f.IsIndex {
dbnametmp = fmt.Sprintf("%vi", f.AssetCode)
}
e := Migrate(dbnametmp, &a)
if e != nil {
return e
}
}
//____________
for _, f := range wl.Crypto {
var dbnametmp string = f.AssetCode
if f.IsIndex {
dbnametmp = fmt.Sprintf("%ti", f.AssetCode)
}
e := Migrate(dbnametmp, &a)
if e != nil {
return e
}
}
e := Migrate("main", &a)
if e != nil {
return e
}
fmt.Println("sync ..... done ")
return nil
}
func (a StockProvider) ReadJsonWatchList() (*WatchListItem, error) {
var list WatchListItem
watchPath := path.Join(GetRootCache(), "watchList.json")
if !IsExist(watchPath) {
//return nil, errors.New(fmt.Sprintf("watch list not found : %v", watchPath))
fmt.Printf(fmt.Sprintf("watch list not found : %v ,create default Watch list ", watchPath))
CreateWatchList(GetRootCache())
}
jsonFile, err := os.Open(watchPath)
if err != nil {
fmt.Println(err)
return nil, err
}
defer jsonFile.Close()
byteValue, _ := ioutil.ReadAll(jsonFile)
e := json.Unmarshal(byteValue, &list)
if e != nil {
return nil, e
}
return &list, nil
}
/*out stock */
func (a StockProvider) OutStockList(dbLock *sync.Mutex) error {
//var fullPath string
//:::::::::::::::::::::::::::::::::::::::::;
db, _, er := DatabaseInit("main", "")
if er != nil {
return er
}
defer a.closeMyDb(db)
items, err := GetNemadList(db, dbLock)
if err != nil {
return err
}
var data = [][]string{{}}
for _, k := range items {
//fmt.Printf("%v %v %v\n", k.EntityType, k.EntityId, k.TradeSymbol)
data = append(data, []string{k.EntityType, strconv.FormatInt(k.EntityId, 10), k.TradeSymbol})
}
//:::::::: write to csv
var s string = path.Join(GetRootCache(), "stock_list.csv")
file, err := os.Create(s)
if err != nil {
return errors.New(fmt.Sprintf("OutStockList -> Cannot create file %s", err))
}
defer file.Close()
writer := csv.NewWriter(file)
defer writer.Flush()
for _, value := range data {
err := writer.Write(value)
if err != nil {
return errors.New(fmt.Sprintf("OutStockList -> Cannot create file %s", err))
}
}
fmt.Printf("has been successfully created : %s \n", s)
return nil
}
func (a StockProvider) OutTemWatchList(dbLock *sync.Mutex) error {
//var fullPath string
//:::::::::::::::::::::::::::::::::::::::::;
db, _, er := DatabaseInit("main", "")
if er != nil {
return er
}
defer a.closeMyDb(db)
items, err := GetNemadList(db, dbLock)
if err != nil {
return err
}
data := WatchListItem{}
t := []WatchStock{}
t1 := true
for _, k := range items {
//fmt.Printf("%v %v %v\n", k.EntityType, k.EntityId, k.TradeSymbol)
g := WatchStock{}
g.TimeFrame = []string{"d"}
g.NameEn = strconv.FormatInt(k.EntityId, 10)
g.AssetCode = strconv.FormatInt(k.EntityId, 10)
g.IsIndex = true
if strings.Contains(k.EntityType, "index") {
g.IsIndex = false
}
g.IsAdj = &t1
t = append(t, g)
}
data.Tehran = t
data.Crypto = []WatchStock{}
//:::::::: write to csv
var s string = path.Join(GetRootCache(), "temp_watch_list.csv")
file, _ := json.MarshalIndent(data, "", " ")
_ = ioutil.WriteFile(s, file, 0644)
fmt.Printf("has been successfully created : %s \n", s)
return nil
}
func (a StockProvider) AddStockToWatchList(provider EProvider, stockName string, stockCode string, index_t bool, adj *bool) error {
w, e := a.ReadJsonWatchList()
if e != nil {
return errors.New(fmt.Sprintf("config read failed [%v] ", e))
}
g := WatchStock{}
if provider == Binance {
for _, l := range w.Crypto {
if l.AssetCode == stockCode {
fmt.Printf("stock is exist %v %v \n", stockCode, stockName)
return nil
}
}
g.TimeFrame = []string{"d"}
g.NameEn = stockName
g.AssetCode = stockCode
g.IsIndex = index_t
g.IsAdj = adj
w.Crypto = append(w.Crypto, g)
}
//+++++++++++++++++++++++++
s := path.Join(GetRootCache(), "watchList.json")
if err := os.Remove(s); err != nil && !os.IsNotExist(err) {
fmt.Printf("Failed to remove file for %v", err)
}
file, _ := json.MarshalIndent(w, "", " ")
_ = ioutil.WriteFile(s, file, 0644)
return nil
}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
func (a StockProvider) isHasTimeFrame(timeframe ETimeFrame, stock WatchStock) bool {
if stock.TimeFrame == nil {
return true
}
if len(stock.TimeFrame) == 0 {
return true
}
for _, g := range stock.TimeFrame {
if strings.ToLower(timeframe.ToString2()) == strings.ToLower(g) {
return true
}
}
return false
}
func (a StockProvider) isHasAdjust(stock WatchStock) bool {
if stock.IsAdj == nil {
return true
}
return *stock.IsAdj
}
func (a StockProvider) avardAssetProcess(parentWaitGroup *sync.WaitGroup, readFromLast bool, watchStock WatchStock) error {
a.sem.Acquire(context.Background(), 1)
defer a.sem.Release(1)
defer parentWaitGroup.Done()
if watchStock.NameEn == "" || watchStock.AssetCode == "" {
//parentWaitGroup.Done()
return errors.New("field is empty ")
}
var databaseLock sync.Mutex
var wg sync.WaitGroup
if a.Provider == Binance {
if a.isHasTimeFrame(M15, watchStock) {
a.procMake(StockQuery{WaitGroupobj: &wg, DBLock: &databaseLock, ReadfromLast: readFromLast, Stock: watchStock, Duration: -time.Duration(time.Hour * 24 * 250), EndTime: time.Now(), TimeFrame: M15, TypeChart: Normal})
}
if a.isHasTimeFrame(H1, watchStock) {
a.procMake(StockQuery{WaitGroupobj: &wg, DBLock: &databaseLock, ReadfromLast: readFromLast, Stock: watchStock, Duration: -time.Duration(time.Hour * 24 * 250), EndTime: time.Now(), TimeFrame: H1, TypeChart: Normal})
}
if a.isHasTimeFrame(H2, watchStock) {
a.procMake(StockQuery{WaitGroupobj: &wg, DBLock: &databaseLock, ReadfromLast: readFromLast, Stock: watchStock, Duration: -time.Duration(time.Hour * 24 * 250), EndTime: time.Now(), TimeFrame: H2, TypeChart: Normal})
}
if a.isHasTimeFrame(H4, watchStock) {
a.procMake(StockQuery{WaitGroupobj: &wg, DBLock: &databaseLock, ReadfromLast: readFromLast, Stock: watchStock, Duration: -time.Duration(time.Hour * 24 * 360), EndTime: time.Now(), TimeFrame: H4, TypeChart: Normal})
}
if a.isHasTimeFrame(D1, watchStock) {
a.procMake(StockQuery{WaitGroupobj: &wg, DBLock: &databaseLock, ReadfromLast: readFromLast, Stock: watchStock, Duration: -time.Duration(time.Hour * 24 * 9000), EndTime: time.Now(), TimeFrame: D1, TypeChart: Normal})
}
} else {
return errors.New("not selected :( ")
}
wg.Wait()
return nil
}
func (a StockProvider) Run(readfromLast bool, isSeq bool, timer_minute *int64) error {
var e error
a.sem = semaphore.NewWeighted(10)
a._WatchListItem, e = a.ReadJsonWatchList()
a.IsSeqRunProcess = isSeq
if e != nil {
return errors.New(fmt.Sprintf("config read failed [%v] ", e))
}
if timer_minute == nil {
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
var wg sync.WaitGroup
if a.Provider == Binance {
wg.Add(len(a._WatchListItem.Crypto))
for _, g := range a._WatchListItem.Crypto {
if a.IsSeqRunProcess {
a.avardAssetProcess(&wg, readfromLast, g)
} else {
go a.avardAssetProcess(&wg, readfromLast, g)
}
}
}
wg.Wait()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
} else {
tick := time.Tick(time.Duration(*timer_minute) * time.Minute)
isWorking := false
for {
if isWorking {
continue
}
select {
case <-tick:
fmt.Printf("timer -> %v\n", time.Now().Format(time.ANSIC))
isWorking = true
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
var wg sync.WaitGroup
if a.Provider == Binance {
wg.Add(len(a._WatchListItem.Crypto))
for _, g := range a._WatchListItem.Crypto {
if a.IsSeqRunProcess {
a.avardAssetProcess(&wg, readfromLast, g)
} else {
go a.avardAssetProcess(&wg, readfromLast, g)
}
}
}
wg.Wait()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
isWorking = false
}
}
}
return nil
}
|
<filename>qa/common/rest/lbaas_helper.py
from base_rest_helper import *
class PoolHelper(BaseRESTHelper):
def create_pool(self, parameters=None):
"""
Create lbaas pool with specified parameters.
Almost all parameters are not required to be passed into method - default values will be assigned here.
The only parameter that MUST be passed in parameters dict is netInterface.
It requires to find or create valid instance to get correct netInterface,
so it's better to do it out of this method (because it's responsible just for pool creation).
"""
params = {
'name': '',
'port': '123',
'lbMethod': '',
'monitors': [],
'enabled': 'on',
'instances': [], # list of strings, instance IDs. can be empty list.
'netInterface': '', # str, at least any existing net interface. can be taken from some instance.
'servicePort': '17',
'serviceWeight': '9'
}
# apply non-empty user-defined parameters
if parameters is not None:
for k in parameters:
if parameters[k] != "":
params[k] = parameters[k]
if params['name'] == "":
pools = self.utils.get_list('pools')
params['name'] = self.utils.generate_string(4, *[p['name'] for p in pools])
if params['lbMethod'] == "":
methods = self.utils.get_list('methods')
params['lbMethod'] = methods[0]
if params['monitors'] in ([], ''):
monitors = self.utils.get_list('monitors')
params['monitors'] = [monitors[0]]
if params['netInterface'] == '':
raise ValueError("netInterface value must be passed to create_pool method!")
res = self.utils.send_request("POST", 'create_pool', data=params)
return params # res contains just job id, so params is returned as more useful data.
def delete_pool(self, pools):
params = {'selectedPools': pools}
res = self.utils.send_request('POST', 'delete_pool', data=params)
remaining = [p for p in self.utils.get_list('pools') if p['name'] in pools]
return len(remaining) == 0
def show_pool(self, poolname):
params = {'id': poolname}
res = self.utils.send_request('GET', 'show_pool', data=params)
return json.loads(res.content)
def create_service(self, parameters):
"""
parameters dict has to define at least ip and pool keys:
- ip: can be taken from 'networks' parameter of the instance object;
- pool: name of pool object (its netInterface should correspond to instance's network?).
Also, the dict can contain any other parameters needed to create a service.
By default, service is created in disabled state.
"""
params = {
'name': self.utils.generate_string(4) + ":129", # name should end with port value
'instanceId': '',
'netInterface': '',
'id': '', # pool name
'port': '129',
'weight': '8'
}
# apply non-empty user-defined parameters
for k in parameters:
if parameters[k] != '':
params[k] = parameters[k]
res = self.utils.send_request("POST", 'create_service', data=params, validate_response=True)
return params
def delete_service(self, parameters):
"""
parameters dict contains two keys:
- pool: pool name,
- selectedServices: string of one service name or list of strings.
"""
res = self.utils.send_request('POST', 'delete_service', data=parameters)
services = parameters['selectedServices']
if type(services) == str: # convert value to list for unified filtering below
services = [services]
remaining = [s for s in self.show_pool(parameters['pool'])['services'] if s['name'] in services]
return len(remaining) == 0
def enable_service(self, parameters):
"""
parameters dict contains two keys:
- pool: pool name,
- selectedServices: string of one service name or list of strings.
"""
res = self.utils.send_request('POST', 'enable_service', data=parameters)
# TODO: if res contains meaningful content it's not needed to analyze results here. leave it for test-case.
#services = parameters['selectedServices']
#if type(services) == str: # convert value to list for unified filtering below
# services = [services]
#enabled = [s for s in self.show_pool(parameters['pool'])['services']
# if s['name'] in services and s['enabled'] is True]
#return len(enabled) == len(services)
def disable_service(self, parameters):
"""
parameters dict contains two keys:
- pool: pool name,
- selectedServices: string of one service name or list of strings.
"""
res = self.utils.send_request('POST', 'disable_service', data=parameters)
# TODO: if res contains meaningful content it's not needed to analyze results here. leave it for test-case.
#services = parameters['selectedServices']
#if type(services) == str: # convert value to list for unified filtering below
# services = [services]
#enabled = [s for s in self.show_pool(parameters['pool'])['services']
# if s['name'] in services and s['enabled'] is False]
#return len(enabled) == len(services)
class VipHelper(BaseRESTHelper):
def create_vip(self, parameters=None):
params = {
'name': '',
'ip': '172.16.31.10',
'port': 123,
'protocol': 'HTTP',
'enabled': 'on'
}
# apply non-empty user-defined parameters
if parameters is not None:
for k in parameters:
if parameters[k] != "":
params[k] = parameters[k]
if params['name'] == '':
vips = self.utils.get_list('vips')
params['name'] = self.utils.generate_string(4, *[p['name'] for p in vips])
res = self.utils.send_request("POST", 'create_vip', data=params)
return json.loads(res.content)
def update_vip(self, parameters):
"""
parameters dict has to contain the following keys:
id: current name of the vip
name: new name (or repeat old one if do not want to change)
ip: new or the same ip
port: integer
protocol
enabled: optional, 'on' if enabled. if disabled - do not mention this key at all.
"""
res = self.utils.send_request('POST', 'update_vip', data=parameters)
return json.loads(res.content)['resp']
def delete_vip(self, vid):
params = {'id': vid}
res = self.utils.send_request('POST', 'delete_vip', data=params)
remaining = [v for v in self.utils.get_list('vips') if v['name'] == vid]
return len(remaining) == 0
def show_vip(self, vname):
params = {'id': vname}
res = self.utils.send_request('GET', 'show_vip', data=params)
return json.loads(res.content)['vip']
class PolicyHelper(BaseRESTHelper):
def create_policy(self, parameters=None):
params = { # the dict can also contain optional tenantName - to create policy for non-current tenant.
'name': '',
'rule': 'rest rule'
}
# apply non-empty user-defined parameters
if parameters is not None:
for k in parameters:
if parameters[k] != "":
params[k] = parameters[k]
if params['name'] == '':
policies = self.utils.get_list('policies')
params['name'] = self.utils.generate_string(4, *[p['name'] for p in policies])
res = self.utils.send_request("POST", 'create_policy', data=params)
return json.loads(res.content)
def update_policy(self, parameters):
"""
parameters dict has to contain the following keys:
id: current name of the policy
name: new name (or repeat old one if do not want to change)
rule: new or the same rule
tenantName: optional
"""
res = self.utils.send_request('POST', 'update_policy', data=parameters)
return json.loads(res.content)
def delete_policy(self, pname, tenantname=None):
"""
Arguments:
- id: string, policy name
- tenantName: string, optional
"""
params = {'id': pname}
if tenantname is not None:
params['tenantName'] = tenantname
res = self.utils.send_request('POST', 'delete_policy', data=params)
remaining = [p for p in self.utils.get_list('policies') if p['name'] == params['id']]
return len(remaining) == 0
|
#!/usr/bin/env bash
set -e
# this script is used to update vendored dependencies
#
# Usage:
# vendor.sh revendor all dependencies
# vendor.sh github.com/docker/engine-api revendor only the engine-api dependency.
# vendor.sh github.com/docker/engine-api v0.3.3 vendor only engine-api at the specified tag/commit.
# vendor.sh git github.com/docker/engine-api v0.3.3 is the same but specifies the VCS for cases where the VCS is something else than git
# vendor.sh git golang.org/x/sys eb2c74142fd19a79b3f237334c7384d5167b1b46 https://github.com/golang/sys.git vendor only golang.org/x/sys downloading from the specified URL
cd "$(dirname "$BASH_SOURCE")/.."
source 'hack/.vendor-helpers.sh'
case $# in
0)
rm -rf vendor/
;;
# If user passed arguments to the script
1)
eval "$(grep -E "^clone [^ ]+ $1" "$0")"
clean
exit 0
;;
2)
rm -rf "vendor/src/$1"
clone git "$1" "$2"
clean
exit 0
;;
[34])
rm -rf "vendor/src/$2"
clone "$@"
clean
exit 0
;;
*)
>&2 echo "error: unexpected parameters"
exit 1
;;
esac
# the following lines are in sorted order, FYI
clone git github.com/Azure/go-ansiterm 388960b655244e76e24c75f48631564eaefade62
clone git github.com/Microsoft/hcsshim v0.3.4
clone git github.com/Microsoft/go-winio v0.3.4
clone git github.com/Sirupsen/logrus v0.10.0 # logrus is a common dependency among multiple deps
clone git github.com/docker/libtrust 9cbd2a1374f46905c68a4eb3694a130610adc62a
clone git github.com/go-check/check 03a4d9dcf2f92eae8e90ed42aa2656f63fdd0b14 https://github.com/cpuguy83/check.git
clone git github.com/gorilla/context 14f550f51a
clone git github.com/gorilla/mux e444e69cbd
clone git github.com/kr/pty 5cf931ef8f
clone git github.com/mattn/go-shellwords v1.0.0
clone git github.com/mattn/go-sqlite3 v1.1.0
clone git github.com/tchap/go-patricia v2.1.0
clone git github.com/vdemeester/shakers 24d7f1d6a71aa5d9cbe7390e4afb66b7eef9e1b3
# forked golang.org/x/net package includes a patch for lazy loading trace templates
clone git golang.org/x/net 2beffdc2e92c8a3027590f898fe88f69af48a3f8 https://github.com/tonistiigi/net.git
clone git golang.org/x/sys eb2c74142fd19a79b3f237334c7384d5167b1b46 https://github.com/golang/sys.git
clone git github.com/docker/go-units 651fc226e7441360384da338d0fd37f2440ffbe3
clone git github.com/docker/go-connections fa2850ff103453a9ad190da0df0af134f0314b3d
clone git github.com/docker/engine-api 4eca04ae18f4f93f40196a17b9aa6e11262a7269
clone git github.com/RackSec/srslog 365bf33cd9acc21ae1c355209865f17228ca534e
clone git github.com/imdario/mergo 0.2.1
#get libnetwork packages
clone git github.com/docker/libnetwork 66c844678f7d7df33e4f46184e5b4749f0204b5a
clone git github.com/docker/go-events afb2b9f2c23f33ada1a22b03651775fdc65a5089
clone git github.com/armon/go-radix e39d623f12e8e41c7b5529e9a9dd67a1e2261f80
clone git github.com/armon/go-metrics eb0af217e5e9747e41dd5303755356b62d28e3ec
clone git github.com/hashicorp/go-msgpack 71c2886f5a673a35f909803f38ece5810165097b
clone git github.com/hashicorp/memberlist 88ac4de0d1a0ca6def284b571342db3b777a4c37
clone git github.com/hashicorp/go-multierror fcdddc395df1ddf4247c69bd436e84cfa0733f7e
clone git github.com/hashicorp/serf 598c54895cc5a7b1a24a398d635e8c0ea0959870
clone git github.com/docker/libkv v0.2.1
clone git github.com/vishvananda/netns 604eaf189ee867d8c147fafc28def2394e878d25
clone git github.com/vishvananda/netlink e73bad418fd727ed3a02830b1af1ad0283a1de6c
clone git github.com/BurntSushi/toml f706d00e3de6abe700c994cdd545a1a4915af060
clone git github.com/samuel/go-zookeeper d0e0d8e11f318e000a8cc434616d69e329edc374
clone git github.com/deckarep/golang-set ef32fa3046d9f249d399f98ebaf9be944430fd1d
clone git github.com/coreos/etcd v2.3.2
fix_rewritten_imports github.com/coreos/etcd
clone git github.com/ugorji/go f1f1a805ed361a0e078bb537e4ea78cd37dcf065
clone git github.com/hashicorp/consul v0.5.2
clone git github.com/boltdb/bolt fff57c100f4dea1905678da7e90d92429dff2904
clone git github.com/miekg/dns 75e6e86cc601825c5dbcd4e0c209eab180997cd7
# get graph and distribution packages
clone git github.com/docker/distribution 07f32ac1831ed0fc71960b7da5d6bb83cb6881b5
clone git github.com/vbatts/tar-split v0.9.11
# get go-zfs packages
clone git github.com/mistifyio/go-zfs 22c9b32c84eb0d0c6f4043b6e90fc94073de92fa
clone git github.com/pborman/uuid v1.0
# get desired notary commit, might also need to be updated in Dockerfile
clone git github.com/docker/notary v0.3.0
clone git google.golang.org/grpc ab0be5212fb225475f2087566eded7da5d727960 https://github.com/grpc/grpc-go.git
clone git github.com/miekg/pkcs11 df8ae6ca730422dba20c768ff38ef7d79077a59f
clone git github.com/docker/go v1.5.1-1-1-gbaf439e
clone git github.com/agl/ed25519 d2b94fd789ea21d12fac1a4443dd3a3f79cda72c
clone git github.com/opencontainers/runc f59ba3cdd76fdc08c004f42aa915996f6f420899 https://github.com/docker/runc.git # libcontainer
clone git github.com/opencontainers/specs 1c7c27d043c2a5e513a44084d2b10d77d1402b8c # specs
clone git github.com/seccomp/libseccomp-golang 32f571b70023028bd57d9288c20efbcb237f3ce0
# libcontainer deps (see src/github.com/opencontainers/runc/Godeps/Godeps.json)
clone git github.com/coreos/go-systemd v4
clone git github.com/godbus/dbus v4.0.0
clone git github.com/syndtr/gocapability 2c00daeb6c3b45114c80ac44119e7b8801fdd852
clone git github.com/golang/protobuf 3c84672111d91bb5ac31719e112f9f7126a0e26e
# gelf logging driver deps
clone git github.com/Graylog2/go-gelf aab2f594e4585d43468ac57287b0dece9d806883
clone git github.com/fluent/fluent-logger-golang v1.2.1
# fluent-logger-golang deps
clone git github.com/philhofer/fwd 899e4efba8eaa1fea74175308f3fae18ff3319fa
clone git github.com/tinylib/msgp 75ee40d2601edf122ef667e2a07d600d4c44490c
# fsnotify
clone git gopkg.in/fsnotify.v1 v1.2.11
# awslogs deps
clone git github.com/aws/aws-sdk-go v1.1.30
clone git github.com/go-ini/ini 060d7da055ba6ec5ea7a31f116332fe5efa04ce0
clone git github.com/jmespath/go-jmespath 0b12d6b521d83fc7f755e7cfc1b1fbdd35a01a74
# gcplogs deps
clone git golang.org/x/oauth2 2baa8a1b9338cf13d9eeb27696d761155fa480be https://github.com/golang/oauth2.git
clone git google.golang.org/api dc6d2353af16e2a2b0ff6986af051d473a4ed468 https://code.googlesource.com/google-api-go-client
clone git google.golang.org/cloud dae7e3d993bc3812a2185af60552bb6b847e52a0 https://code.googlesource.com/gocloud
# native credentials
clone git github.com/docker/docker-credential-helpers v0.3.0
# containerd
clone git github.com/docker/containerd 0366d7e9693c930cf18c0f50cc16acec064e96c5
# cluster
clone git github.com/docker/swarmkit 938530a15c8a0374b367f2b94ddfd8e8b9b61bad
clone git github.com/golang/mock bd3c8e81be01eef76d4b503f5e687d2d1354d2d9
clone git github.com/gogo/protobuf 43a2e0b1c32252bfbbdf81f7faa7a88fb3fa4028
clone git github.com/cloudflare/cfssl b895b0549c0ff676f92cf09ba971ae02bb41367b
clone git github.com/google/certificate-transparency 025a5cab06f6a819c455d9fdc9e2a1b6d0982284
clone git golang.org/x/crypto 3fbbcd23f1cb824e69491a5930cfeff09b12f4d2 https://github.com/golang/crypto.git
clone git github.com/mreiferson/go-httpclient 63fe23f7434723dc904c901043af07931f293c47
clone git github.com/hashicorp/go-memdb 98f52f52d7a476958fa9da671354d270c50661a7
clone git github.com/hashicorp/go-immutable-radix 8e8ed81f8f0bf1bdd829593fdd5c29922c1ea990
clone git github.com/hashicorp/golang-lru a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4
clone git github.com/coreos/pkg 2c77715c4df99b5420ffcae14ead08f52104065d
clone git github.com/pivotal-golang/clock 3fd3c1944c59d9742e1cd333672181cd1a6f9fa0
clone git github.com/prometheus/client_golang e51041b3fa41cece0dca035740ba6411905be473
clone git github.com/beorn7/perks b965b613227fddccbfffe13eae360ed3fa822f8d
clone git github.com/prometheus/client_model fa8ad6fec33561be4280a8f0514318c79d7f6cb6
clone git github.com/prometheus/common ffe929a3f4c4faeaa10f2b9535c2b1be3ad15650
clone git github.com/prometheus/procfs 454a56f35412459b5e684fd5ec0f9211b94f002a
clone hg bitbucket.org/ww/goautoneg 75cd24fc2f2c2a2088577d12123ddee5f54e0675
clone git github.com/matttproud/golang_protobuf_extensions fc2b8d3a73c4867e51861bbdd5ae3c1f0869dd6a
clone git github.com/pkg/errors 01fa4104b9c248c8945d14d9f128454d5b28d595
# cli
clone git github.com/spf13/cobra 75205f23b3ea70dc7ae5e900d074e010c23c37e9 https://github.com/dnephin/cobra.git
clone git github.com/spf13/pflag cb88ea77998c3f024757528e3305022ab50b43be
clone git github.com/inconshreveable/mousetrap 76626ae9c91c4f2a10f34cad8ce83ea42c93bb75
clone git github.com/flynn-archive/go-shlex 3f9db97f856818214da2e1057f8ad84803971cff
clean
|
#!/bin/sh
set -e
/greengrass/ggc/core/greengrassd start
daemon_pid=`cat /var/run/greengrassd.pid`
# block docker exit until daemon process dies.
while [ -d /proc/$daemon_pid ]
do
# Sleep for 1s before checking that greengrass daemon is still alive
daemon_cmdline=`cat /proc/$daemon_pid/cmdline`
if awk 'BEGIN{ exit(!(ARGV[1] ~ /^\/greengrass\/ggc\/packages\/\d+\.\d+\.\d+\/bin\/daemon.*/))}' "$daemon_cmdline"; then
sleep 1;
else
break;
fi;
done
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import React from 'react';
import { render } from 'enzyme';
import { requiredProps } from '../../../test/required_props';
import { EuiPageContent } from './page_content';
describe('EuiPageContent', () => {
test('is rendered', () => {
const component = render(<EuiPageContent {...requiredProps} />);
expect(component).toMatchSnapshot();
});
test('verticalPosition is rendered', () => {
const component = render(<EuiPageContent verticalPosition="center" />);
expect(component).toMatchSnapshot();
});
test('horizontalPosition is rendered', () => {
const component = render(<EuiPageContent horizontalPosition="center" />);
expect(component).toMatchSnapshot();
});
test('role can be removed', () => {
const component = render(<EuiPageContent role={null} />);
expect(component).toMatchSnapshot();
});
test('accepts panel props', () => {
const component = render(
<EuiPageContent
borderRadius="none"
hasShadow={false}
paddingSize="none"
/>
);
expect(component).toMatchSnapshot();
});
});
|
#!/bin/bash
while [ -n "$1" ]; do
case $1 in
-l|--local)
kubectl config use-context minikube
;;
-p|--prod)
kubectl config use-context --namespace=roygi prod_cluster
;;
esac
shift
done
|
<gh_stars>10-100
import { YarnProcess } from './yarn'
import { NpmProcess } from './npm'
export { YarnProcess }
export { NpmProcess }
export const Runners = [YarnProcess, NpmProcess]
|
#include<iostream>
using namespace std;
//Node structure
struct Node {
int data;
Node* next;
};
//Function to insert a node
void insertNode(Node** head, int newData)
{
Node* newNode = new Node();
newNode->data = newData;
newNode->next = (*head);
(*head) = newNode;
}
//Function to print the linked list
void printList(Node* head)
{
Node* temp = head;
while (temp != NULL) {
cout << temp->data << " ";
temp = temp->next;
}
}
//Main function
int main()
{
Node* head = NULL;
// Inserting nodes
insertNode(&head, 1);
insertNode(&head, 2);
insertNode(&head, 3);
insertNode(&head, 4);
insertNode(&head, 5);
cout << "Linked list: ";
printList(head);
return 0;
} |
<reponame>huangbin082/Bin
package com.leetcode;
public class Solution_1185 {
static String[] w = new String[]{"Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"};
static int[] m = new int[]{0, 31, 0, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
public String dayOfTheWeek(int day, int month, int year) {
int dayCount = 0;
for (int i = 1971; i < year; i++) {
dayCount += dayCount(i);
}
for (int i = 1; i < month; i++) {
dayCount += dayCount(year, i);
}
dayCount += day - 1;
return w[(dayCount + 4) % 7];
}
private int dayCount(int year) {
return (year % 4 == 0 && year % 100 != 0) || year % 400 == 0 ? 366 : 365;
}
private int dayCount(int year, int month) {
return month != 2 ? m[month - 1] : ((year % 4 == 0 && year % 100 != 0) || year % 400 == 0 ? 29 : 28);
}
}
|
import java.util.Stack;
public int countValidSubarrays(int[] nums) {
int ans = 0;
Stack<Integer> stack = new Stack<>();
for (int num : nums) {
while (!stack.isEmpty() && stack.peek() > num)
stack.pop();
stack.push(num);
ans += stack.size();
}
return ans;
} |
<filename>src/main/java/bi/ihela/client/dto/init/Merchant.java<gh_stars>1-10
/**
*
*/
package bi.ihela.client.dto.init;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.annotation.JsonRootName;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
/**
* @author jeanclaude.bucumi <EMAIL>
*/
@Getter
@Setter
@ToString
@NoArgsConstructor
@JsonRootName("merchant")
@JsonPropertyOrder({ "merchant", "title", "user" })
public class Merchant implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("merchant")
public MerchantInfo merchantInfo;
@JsonProperty("title")
public String title;
@JsonProperty("user")
public User user;
}
|
# -----------------------------------------------------------------------------
#
# Package : html2text/html2text
# Version : 4.0.1
# Source repo : https://github.com/mtibben/html2text
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=html2text/html2text
PACKAGE_VERSION=4.0.1
PACKAGE_URL=https://github.com/mtibben/html2text
yum -y update && yum install -y nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git jq curl php php-curl php-dom php-mbstring php-json nodejs make gcc-c++ patch diffutils php-gd php-pecl-zip
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" && php composer-setup.php --install-dir=/bin --filename=composer
composer require --dev phpunit/phpunit --with-all-dependencies ^7
mkdir output
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
if ! composer install; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! /home/tester/vendor/bin/phpunit; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
#!/bin/bash
NODE_ENV=production node server |
<filename>4.2_update.cpp
#include "2_Game.hpp"
void Game::update()
{
switch( state )
{
case MainMenu:
break;
case Playing:
//myPlayer movements
if( (sf::Keyboard::isKeyPressed(sf::Keyboard::A) && myPlayer.getPosition().x > 0) ||
(sf::Keyboard::isKeyPressed(sf::Keyboard::Left) && myPlayer.getPosition().x > 0) )
{
myPlayer.move(-3.f,0.f);
}
if( (sf::Keyboard::isKeyPressed(sf::Keyboard::D) && myPlayer.getPosition().x + myPlayer.getGlobalBounds().width < myWindow.getSize().x) ||
(sf::Keyboard::isKeyPressed(sf::Keyboard::Right) && myPlayer.getPosition().x + myPlayer.getGlobalBounds().width < myWindow.getSize().x) )
{
myPlayer.move(3.f,0.f);
}
myPlayerCenter = sf::Vector2f( myPlayer.getPosition().x + myPlayer.getGlobalBounds().width/2 - 4 , myPlayer.getPosition().y + myPlayer.getGlobalBounds().height/2 );
//shooting
if( ShootTimer < 40 )
{
ShootTimer++;
}
if(sf::Keyboard::isKeyPressed(sf::Keyboard::Space) && ShootTimer >= 40 )
{
ShootTimer = 0;
ShootSound.play();
ProjectileLimit--;
PlayerProjectileSample.setPosition(myPlayerCenter.x-5,myPlayerCenter.y-50);
PlayerProjectiles.push_back(PlayerProjectileSample);
ProjectileLimitText.setString(std::to_string(ProjectileLimit));
}
//projectile move and delete
for( int i = 0 ; i < PlayerProjectiles.size() ; i++)
{
PlayerProjectiles[i].move(0.f,-7.f);
if( PlayerProjectiles[i].getPosition().y + PlayerProjectiles[i].getGlobalBounds().height < 0 )
{
PlayerProjectiles.erase( PlayerProjectiles.begin()+i );
}
}
//egg projectiles
if( eggTimer > 390 )
{
eggTimer = 0;
int random = rand()%myEnemies.size();
eggProjectileSample.setPosition( myEnemies[random].getPosition().x + myEnemies[random].getGlobalBounds().width/2
, myEnemies[random].getPosition().y + myEnemies[random].getGlobalBounds().height );
eggProjectiles.push_back(eggProjectileSample);
}
else
{
eggTimer++;
}
//egg move and delete and creat eggsplash
for( int i = 0 ; i < eggProjectiles.size() ; i++ )
{
eggProjectiles[i].move(0.f,+1.f);
if( eggProjectiles[i].getPosition().y + eggProjectiles[i].getGlobalBounds().height > myWindow.getSize().y )
{
eggSplashSample.setPosition(eggProjectiles[i].getPosition().x,eggProjectiles[i].getPosition().y-6);
eggSplash.push_back(eggSplashSample);
eggSplashTimer.push_back(0);
eggDestroyedSound.play();
eggProjectiles.erase( eggProjectiles.begin()+i );
}
}
//egg splash delete
for( int i = 0 ; i < eggSplash.size() ; i++ )
{
if( eggSplashTimer[i] >= 180 )
{
eggSplash.erase( eggSplash.begin() + i );
eggSplashTimer.erase( eggSplashTimer.begin() + i );
}
else
{
eggSplashTimer[i]++;
}
}
//gifts
if( giftTimer > 2000 )
{
giftTimer = 0;
int random_x = rand()%((int)(myWindow.getSize().x-giftSample.getGlobalBounds().width));
giftSample.setPosition(random_x,0);
gifts.push_back(giftSample);
}
else
{
giftTimer++;
}
//gift move and delete
for( int i = 0 ; i < gifts.size() ; i++ )
{
gifts[i].move(0.f,+1.f);
if( gifts[i].getPosition().y > myWindow.getSize().y )
{
gifts.erase( gifts.begin()+i );
}
}
//enemies movements
if( Direction == "Left" )
{
for( int i = 0 ; i < myEnemies.size() ; i++)
{
myEnemies[i].move(-1.f,0.f);
}
}
else
{
for( int i = 0 ; i < myEnemies.size() ; i++)
{
myEnemies[i].move(1.f,0.f);
}
}
//direction change
for( int i = 0 ; i < myEnemies.size() ; i++ )
{
if( myEnemies[i].getPosition().x < 0 )
{
Direction = "Right";
break;
}
else if( myEnemies[i].getPosition().x + myEnemy.getGlobalBounds().width > myWindow.getSize().x )
{
Direction = "Left";
break;
}
}
//meat movement
for( int i = 0 ; i < meats.size() ; i++ )
{
meats[i].move(0.f,+1.5f);
if( meats[i].getPosition().y > myWindow.getSize().y )
{
meats.erase( meats.begin() + i );
}
}
//collision
//collision vs playerprojectiles and enemies
for( int k = 0 ; k < PlayerProjectiles.size() ; k++ )
{
for( int i = 0 ; i < myEnemies.size() ; i++ )
{
if( PlayerProjectiles[k].getGlobalBounds().intersects( myEnemies[i].getGlobalBounds() ) )
{
meatSample.setPosition( myEnemies[i].getPosition().x , myEnemies[i].getPosition().y );
meats.push_back(meatSample);
PlayerProjectiles.erase( PlayerProjectiles.begin()+k );
ChickenHitSound.play();
myEnemies.erase( myEnemies.begin()+i );
}
}
}
//collision vs enemy projectiles and player
for( int k = 0 ; k < eggProjectiles.size() ; k++ )
{
if( eggProjectiles[k].getGlobalBounds().intersects( myPlayer.getGlobalBounds() ) )
{
eggShipSound.play();
Health--;
HealthText.setString(std::to_string(Health));
eggProjectiles.erase( eggProjectiles.begin()+k );
}
}
//gifts collision
for( int i = 0 ; i < gifts.size() ; i++ )
{
if( gifts[i].getGlobalBounds().intersects(myPlayer.getGlobalBounds()) )
{
rewardSound.play();
int random = rand()%3 ;
if( random == 0 )
{
if( Health < 3 )
{
Health++;
HealthText.setString(std::to_string(Health));
}
}
else
{
ProjectileLimit += 5 ;
ProjectileLimitText.setString(std::to_string(ProjectileLimit));
}
gifts.erase(gifts.begin()+i);
}
}
//meat collision vs player
for( int i = 0 ; i < meats.size() ; i++ )
{
if( meats[i].getGlobalBounds().intersects(myPlayer.getGlobalBounds()) )
{
meatSound.play();
meats.erase( meats.begin() + i );
ProjectileLimit++;
ProjectileLimitText.setString(std::to_string(ProjectileLimit));
}
}
//state changes
if( myEnemies.size() == 0 && !stateSet )
{
state = Win;
winSound.play();
stateSet = true;
}
else if( Health == 0 && !stateSet )
{
state = Lose;
explosionSound.play();
messageText.setString("Game Over");
stateSet = true;
}
else if( ProjectileLimit == 0 && !stateSet )
{
state = Lose;
explosionSound.play();
messageText.setString("Out of ammo");
stateSet = true;
}
//timer update
if( myClock.getElapsedTime().asSeconds() >= 1 )
{
sec++;
if( sec > 59 )
{
min++;
sec = 0;
}
myClock.restart().asSeconds();
myTimerText.setString( std::to_string(min) + " : " + std::to_string(sec) );
}
break; //end case of playing
case Win:
messageText.setString("You Win");
break;
case Lose:
break;
case Pause:
break;
}
}
|
/*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
kmapi "kmodules.xyz/client-go/api/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const (
ResourceKindResourceOutline = "ResourceOutline"
ResourceResourceOutline = "resourceoutline"
ResourceResourceOutlines = "resourceoutlines"
)
// +genclient
// +genclient:nonNamespaced
// +genclient:skipVerbs=updateStatus
// +k8s:openapi-gen=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:object:root=true
// +kubebuilder:resource:path=resourceoutlines,singular=resourceoutline
type ResourceOutline struct {
metav1.TypeMeta `json:",inline"`
// +optional
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec ResourceOutlineSpec `json:"spec,omitempty"`
}
type ResourceOutlineSpec struct {
Resource kmapi.ResourceID `json:"resource"`
DefaultLayout bool `json:"defaultLayout"`
Header *PageBlockOutline `json:"header,omitempty"`
TabBar *PageBlockOutline `json:"tabBar,omitempty"`
Pages []ResourcePageOutline `json:"pages,omitempty"`
}
type ResourcePageOutline struct {
Name string `json:"name"`
Info *PageBlockOutline `json:"info,omitempty"`
Insight *PageBlockOutline `json:"insight,omitempty"`
Blocks []PageBlockOutline `json:"blocks,omitempty"`
}
// +kubebuilder:validation:Enum=Block;Self;SubTable;Connection
type TableKind string
const (
TableKindBlock TableKind = "Block"
TableKindConnection TableKind = "Connection"
TableKindSubTable TableKind = "SubTable"
TableKindSelf TableKind = "Self"
)
type PageBlockOutline struct {
Kind TableKind `json:"kind"` // ResourceBlockDefinition | Connection | Subtable(Field)
Name string `json:"name,omitempty"`
FieldPath string `json:"fieldPath,omitempty"`
*ResourceLocator `json:",inline,omitempty"`
DisplayMode ResourceDisplayMode `json:"displayMode,omitempty"`
Actions *ResourceActions `json:"actions,omitempty"`
View *ResourceTableDefinitionRef `json:"view,omitempty"`
}
type ResourceTableDefinitionRef struct {
Name string `json:"name,omitempty"`
Columns []ResourceColumnDefinition `json:"columns,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:object:root=true
type ResourceOutlineList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []ResourceOutline `json:"items,omitempty"`
}
|
// Define the WalletManagerDisconnectReason enum
public enum WalletManagerDisconnectReason {
case networkError
case authenticationError
case insufficientFunds
// Add more reasons as needed
}
// Implement the getDisconnectReason function
func getDisconnectReason(state: WalletManagerState) -> WalletManagerDisconnectReason? {
switch state {
case .disconnected(let reason):
return reason
default:
return nil
}
}
// Example usage
let disconnectedState = WalletManagerState.disconnected(reason: .networkError)
let connectedState = WalletManagerState.connected
if let reason = getDisconnectReason(state: disconnectedState) {
print("Disconnected due to: \(reason)")
} else {
print("Not in a disconnected state")
}
if let reason = getDisconnectReason(state: connectedState) {
print("Disconnected due to: \(reason)")
} else {
print("Not in a disconnected state")
} |
#!/bin/bash
set -eo pipefail
ARCH="$1"
echo "Customize ${ARCH}-bit version of Raspberry Pi OS Lite disk image"
if [ "${ARCH}" = "32" ]; then
DOWNLOAD_DIR="$(curl --silent 'https://downloads.raspberrypi.org/raspios_lite_armhf/images/?C=M;O=D' | grep --extended-regexp --only-matching 'raspios_lite_armhf-[0-9]{4}-[0-9]{2}-[0-9]{2}' | head -n 1)"
DOWNLOAD_ZIP_FILE="$(curl --silent "https://downloads.raspberrypi.org/raspios_lite_armhf/images/${DOWNLOAD_DIR}/" | grep --extended-regexp --only-matching '[0-9]{4}-[0-9]{2}-[0-9]{2}-raspios-buster-armhf-lite\.zip' | head -n 1)"
DOWNLOAD_FILENAME="${DOWNLOAD_ZIP_FILE%%.*}"
echo "Download latest disk image archive"
wget --no-verbose --no-clobber "https://downloads.raspberrypi.org/raspios_lite_armhf/images/${DOWNLOAD_DIR}/${DOWNLOAD_ZIP_FILE}"
echo "Verify downloaded disk image archive"
wget --no-verbose --no-clobber "https://downloads.raspberrypi.org/raspios_lite_armhf/images/${DOWNLOAD_DIR}/${DOWNLOAD_ZIP_FILE}.sha256"
sha256sum --check "${DOWNLOAD_ZIP_FILE}.sha256"
fi
if [ "${ARCH}" = "64" ]; then
DOWNLOAD_DIR="$(curl --silent 'https://downloads.raspberrypi.org/raspios_lite_arm64/images/?C=M;O=D' | grep --extended-regexp --only-matching 'raspios_lite_arm64-[0-9]{4}-[0-9]{2}-[0-9]{2}' | head -n 1)"
DOWNLOAD_ZIP_FILE="$(curl --silent "https://downloads.raspberrypi.org/raspios_lite_arm64/images/${DOWNLOAD_DIR}/" | grep --extended-regexp --only-matching '[0-9]{4}-[0-9]{2}-[0-9]{2}-raspios-buster-arm64-lite\.zip' | head -n 1)"
DOWNLOAD_FILENAME="${DOWNLOAD_ZIP_FILE%%.*}"
echo "Download latest disk image archive"
wget --no-verbose --no-clobber "https://downloads.raspberrypi.org/raspios_lite_arm64/images/${DOWNLOAD_DIR}/${DOWNLOAD_ZIP_FILE}"
echo "Verify downloaded disk image archive"
wget --no-verbose --no-clobber "https://downloads.raspberrypi.org/raspios_lite_arm64/images/${DOWNLOAD_DIR}/${DOWNLOAD_ZIP_FILE}.sha256"
sha256sum --check "${DOWNLOAD_ZIP_FILE}.sha256"
fi
echo "Unarchive zip file"
unzip -qo "${DOWNLOAD_ZIP_FILE}"
echo "Append 512MB to disk image"
dd if=/dev/zero bs=512M count=1 >> "${DOWNLOAD_FILENAME}.img"
echo "Set up loop devices"
LOOP_DEVICE="$(sudo losetup --find --show --partscan "${DOWNLOAD_FILENAME}.img")"
echo "Resize rootfs partition"
DISK_IMAGE_END="$(sudo parted --machine "${LOOP_DEVICE}" print free | tail -1 | cut -d ":" -f 3)"
sudo parted "${LOOP_DEVICE}" resizepart 2 "${DISK_IMAGE_END}"
echo "Grow filesystem of rootfs partition"
sudo e2fsck -f "${LOOP_DEVICE}p2"
sudo resize2fs "${LOOP_DEVICE}p2"
echo "Mount loop devices"
mkdir --parents boot
sudo mount "${LOOP_DEVICE}p1" boot
mkdir --parents rootfs
sudo mount "${LOOP_DEVICE}p2" rootfs
echo "Customize disk image"
./customize.sh "${DOWNLOAD_ZIP_FILE}"
echo "Flush write cache"
sync
echo "Umount loop devices"
# Wait 5 secs to get rid of "target is busy" error
sleep 5
sudo umount boot
rm --recursive --force boot
sudo umount rootfs
rm --recursive --force rootfs
echo "Detach loop devices"
sudo losetup --detach-all
echo "Show customized disk image"
ls -lh ./*.img
|
import os
import numpy as np
import tifffile
import imctools.external.omexml as ome
from xml.etree import cElementTree as ElementTree
import sys
import warnings
from imctools.io import change_dtype, CHANGE_DTYPE_LB_WARNING, CHANGE_DTYPE_UB_WARNING
if sys.version_info.major == 3:
from io import StringIO
uenc = 'unicode'
else:
from cStringIO import StringIO
uenc = 'utf-8'
class TiffWriter(object):
"""
"""
pixeltype_dict = ({np.int64().dtype: ome.PT_FLOAT,
np.int32().dtype: ome.PT_INT32,
np.int16().dtype: ome.PT_INT16,
np.uint16().dtype: ome.PT_UINT16,
np.uint32().dtype: ome.PT_UINT32,
np.uint8().dtype: ome.PT_UINT8,
np.float32().dtype: ome.PT_FLOAT,
np.float64().dtype: ome.PT_DOUBLE
})
pixeltype_np = {
ome.PT_FLOAT: np.dtype('float32'),
ome.PT_DOUBLE: np.dtype('float64'),
ome.PT_UINT8: np.dtype('uint8'),
ome.PT_UINT16: np.dtype('uint16'),
ome.PT_UINT32: np.dtype('uint32'),
ome.PT_INT8: np.dtype('int8'),
ome.PT_INT16: np.dtype('int16'),
ome.PT_INT32: np.dtype('int32')
}
def __init__(self, file_name, img_stack, pixeltype =None, channel_name=None, original_description=None, fluor=None):
self.file_name = file_name
self.img_stack = img_stack
self.channel_name = channel_name
if fluor is None:
self.fluor = channel_name
else:
self.fluor = fluor
if pixeltype is None:
pixeltype = self.pixeltype_dict[img_stack.dtype]
self.pixeltype = pixeltype
if original_description is None:
original_description = ''
self.original_description = original_description
def save_image(self, mode='imagej', compression=0, dtype=None, bigtiff=None):
"""
Saves the image as a tiff
:param mode: Specifies the tiff writing mode. Either 'imagej' or 'ome'
for .ome.tiff's
:param compression: Tiff compression level.
Default to 0 (no compression)
Internaly compressed tiffs are more incompatible and
not memory-mappable
:param dtype: dtype of the output tiff.
Default: take the dtype of the original data
:param bigtiff: should the tiff be writen as a 'bigtiff'?
'bigtiff' support >4gb of data, but are less widely
compatible.
Default: for 'imagej' mode: False
for 'ome' mode: True
"""
#TODO: add original metadata somehow
fn_out = self.file_name
img = self.img_stack.swapaxes(2, 0)
if dtype is not None:
dt = np.dtype(dtype)
else:
dt = self.pixeltype_np[self.pixeltype]
img = change_dtype(img, dt)
# img = img.reshape([1,1]+list(img.shape)).swapaxes(2, 0)
if mode == 'imagej':
if bigtiff is None:
bigtiff=False
tifffile.imsave(fn_out, img, compress=compression, imagej=True,
bigtiff=bigtiff)
elif mode == 'ome':
if bigtiff is None:
bigtiff=True
xml = self.get_xml(dtype=dtype)
tifffile.imsave(fn_out, img, compress=compression, imagej=False,
description=xml, bigtiff=bigtiff)
# def save_xml(self):
# xml = self.get_xml()
# with open(self.file_name+'.xml', 'w') as f:
# f.write(xml)
#
# def save_ome_tiff(self):
# #TODO: does not work
# img = self.img_stack.astype(np.uint16)
# print(img.shape)
# javabridge.start_vm(class_path=bioformats.JARS)
# bioformats.write_image(self.file_name, pixels=img, pixel_type='uint16', c=0, z=0, t=0,
# size_z=1, size_t=1, channel_metals=self.channel_name)
# javabridge.kill_vm()
@property
def nchannels(self):
return self.img_stack.shape[2]
def get_xml(self, dtype=None):
if dtype is not None:
pixeltype = self.pixeltype_dict[dtype]
else:
pixeltype = self.pixeltype
img = self.img_stack
omexml = ome.OMEXML()
omexml.image(0).Name = os.path.basename(self.file_name)
p = omexml.image(0).Pixels
p.SizeX = img.shape[0]
p.SizeY = img.shape[1]
p.SizeC = self.nchannels
p.SizeT = 1
p.SizeZ = 1
p.DimensionOrder = ome.DO_XYCZT
p.PixelType = pixeltype
p.channel_count = self.nchannels
for i in range(self.nchannels):
channel_info = self.channel_name[i]
p.Channel(i).set_SamplesPerPixel(1)
p.Channel(i).set_Name(channel_info)
p.Channel(i).set_ID('Channel:0:' + str(i))
p.Channel(i).node.set('Fluor', self.fluor[i])
# adds original metadata as annotation
if self.original_description is not None:
if isinstance(self.original_description,
type(ElementTree.Element(1))):
result = StringIO()
ElementTree.ElementTree(self.original_description).write(result,
encoding=uenc, method="xml")
desc = result.getvalue()
else:
desc = str(self.original_description)
omexml.structured_annotations.add_original_metadata(
'MCD-XML',
desc)
xml = omexml.to_xml()
return xml
|
<gh_stars>1-10
// 5550. 나는 개구리로소이다
// 2019.07.07
#include<iostream>
#include<string>
using namespace std;
int main()
{
int test_case;
int T;
cin >> T;
string croak = "croak";
for (test_case = 1; test_case <= T; ++test_case)
{
string s;
cin >> s;
int pos = 0;
int ans = 0;
bool flag = true;
// 개구리를 찾았는지 찾지 못했는지
bool find = false;
//
bool ansFlag = true;
string tmp = "";
while (flag)
{
tmp = s;
find = false;
for (int i = 0; i < s.size(); i++)
{
// 개구리 울음소리 검사. 검사된건 '0'으로 바꿈
if (s[i] == croak[pos])
{
s[i] = '0';
pos++;
// 끝까지 도달하면 개구리를 찾음
if (pos == 5)
{
find = true;
pos = 0;
}
}
// 최종 검사
if (i == s.size() - 1)
{
// 개구리를 찾음
if (find)
{
// 울음소리가 끝나지 않았다면 개구리가 아님
if (pos != 0)
{
ansFlag = false;
break;
}
}
// 개구리를 찾지 못함
else
{
s = tmp;
flag = false;
}
}
}
if (find)
{
ans++;
}
}
for (int i = 0; i < s.size(); i++)
{
// 울음소리 흔적이 남아있다면 개구리 울음소리가 아님.
if (s[i] != '0')
{
ansFlag = false;
}
}
if (ansFlag)
{
cout << "#" << test_case << " " << ans << endl;
}
else
{
cout << "#" << test_case << " -1" << endl;
}
}
return 0;
}
|
<gh_stars>0
package pulse.ui.components.models;
|
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
install_ubuntu_16_python_pip_deps python3.7
# Update bazel
install_bazelisk
# Export required variables for running pip.sh
export OS_TYPE="UBUNTU"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.7'
export PYTHON_BIN_PATH="$(which ${TF_PYTHON_VERSION})"
# Get the default test targets for bazel.
source tensorflow/tools/ci_build/build_scripts/DEFAULT_TEST_TARGETS.sh
# Export optional variables for running pip.sh
export TF_BUILD_FLAGS="--config=release_cpu_linux"
export TF_TEST_FLAGS="--define=no_tensorflow_py_deps=true --test_lang_filters=py --test_output=errors --verbose_failures=true --keep_going --test_env=TF2_BEHAVIOR=1"
export TF_TEST_TARGETS="${DEFAULT_BAZEL_TARGETS} -//tensorflow/lite/... "
export TF_PIP_TESTS="test_pip_virtualenv_non_clean test_pip_virtualenv_clean"
export TF_TEST_FILTER_TAGS='-no_oss,-oss_serial,-no_oss_py37,-v1only'
#export IS_NIGHTLY=0 # Not nightly; uncomment if building from tf repo.
export TF_PROJECT_NAME="tensorflow_cpu"
export TF_PIP_TEST_ROOT="pip_test"
./tensorflow/tools/ci_build/builds/pip_new.sh
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=10:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/discrete_Acrobot-v1_doule_ddpg_softcopy_action_noise_seed4_run1_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env Acrobot-v1 --random-seed 4 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/discrete/Acrobot-v1/doule_ddpg_softcopy_action_noise_seed4_run1
|
import { Component, OnInit, Input } from '@angular/core';
@Component({
selector: 'app-carousel',
templateUrl: './carousel.component.html',
styleUrls: ['./carousel.component.scss'],
})
export class CarouselComponent implements OnInit {
@Input() images: Array<String>;
@Input() minHeight: String;
@Input() autoPlay: boolean;
@Input() showArrows: boolean;
@Input() autoPlayInterval: Number;
constructor() {}
ngOnInit(): void {}
}
|
#!/usr/bin/env bash
export LETSENCRYPT_EMAIL=${LETSENCRYPT_EMAIL:-''}
export DOMAIN=${DOMAIN:-'localhost'}
rm -f /etc/nginx/sites-enabled/* /etc/nginx/sites-available/*
cp /opt/http.conf /etc/nginx/sites-enabled/http.conf
cp /opt/supervisor.conf /etc/supervisor/conf.d/supervisor.conf
/usr/bin/supervisord -c /etc/supervisor/conf.d/supervisor.conf &
sleep 1
if [[ -n $LETSENCRYPT_EMAIL ]]; then
# testing https://acme-staging-v02.api.letsencrypt.org/directory
# production https://acme-v02.api.letsencrypt.org/directory
if [[ ! -d /etc/letsencrypt/archive/${DOMAIN} ]]; then
echo "obtaining ssl certificates from letsencrypt"
mkdir -p /var/www/letsencrypt/.well-known/acme-challenge/ && \
certbot certonly --webroot -w /var/www/letsencrypt -d ${DOMAIN} --agree-tos --email ${LETSENCRYPT_EMAIL} --non-interactive --text --server https://acme-v02.api.letsencrypt.org/directory && \
chmod 640 /etc/letsencrypt/archive/${DOMAIN}/privkey*
else
echo "reusing previously obtained ssl certificates from letsencrypt"
fi
rm -f /etc/nginx/sites-enabled/* /etc/nginx/sites-available/* && \
envsubst '${DOMAIN}' < /opt/https.le.conf.tpl > /etc/nginx/sites-enabled/https.le.conf && \
nginx -s reload && \
echo "mainnet rest api server runs on https://${DOMAIN}"
elif [[ -d /etc/certs ]]; then
echo "using certificates provided in certs directory"
rm -f /etc/nginx/sites-enabled/* /etc/nginx/sites-available/* && \
cp /opt/https.conf /etc/nginx/sites-enabled/https.conf
nginx -s reload && \
echo "mainnet rest api server runs on https://${DOMAIN}"
else
echo "local mainnet server runs on http://${DOMAIN}"
fi
tail -f /dev/null
|
#!/bin/sh
rm bg1cre/ajantd.cre 2>>bgt/bash.debug
rm bg1cre/amnis2.cre 2>>bgt/bash.debug
rm bg1cre/archdr.cre 2>>bgt/bash.debug
rm bg1cre/avery.cre 2>>bgt/bash.debug
#rm bg1cre/bandit2.cre - this is now used by bgt-weidu
rm bg1cre/bandit3.cre 2>>bgt/bash.debug
rm bg1cre/bart8.cre 2>>bgt/bash.debug
rm bg1cre/bart9.cre 2>>bgt/bash.debug
rm bg1cre/bearpo2.cre 2>>bgt/bash.debug
rm bg1cre/beggba_a.cre 2>>bgt/bash.debug
rm bg1cre/beggba_b.cre 2>>bgt/bash.debug
rm bg1cre/beggba_c.cre 2>>bgt/bash.debug
rm bg1cre/beggba_d.cre 2>>bgt/bash.debug
rm bg1cre/blah6666.cre 2>>bgt/bash.debug
rm bg1cre/boyba1_b.cre 2>>bgt/bash.debug
rm bg1cre/boyba3.cre 2>>bgt/bash.debug
rm bg1cre/chickdef.cre 2>>bgt/bash.debug
rm bg1cre/cookge.cre 2>>bgt/bash.debug
rm bg1cre/cowled.cre 2>>bgt/bash.debug
rm bg1cre/cult4.cre 2>>bgt/bash.debug
rm bg1cre/cultass.cre 2>>bgt/bash.debug
rm bg1cre/d2dop.cre 2>>bgt/bash.debug
rm bg1cre/d2dur.cre 2>>bgt/bash.debug
rm bg1cre/d2kie.cre 2>>bgt/bash.debug
rm bg1cre/deagan.cre 2>>bgt/bash.debug
rm bg1cre/dean.cre 2>>bgt/bash.debug
rm bg1cre/dialogmo.cre 2>>bgt/bash.debug
rm bg1cre/digger_a.cre 2>>bgt/bash.debug
rm bg1cre/digger_b.cre 2>>bgt/bash.debug
rm bg1cre/digger_c.cre 2>>bgt/bash.debug
rm bg1cre/digger_d.cre 2>>bgt/bash.debug
rm bg1cre/digger_e.cre 2>>bgt/bash.debug
rm bg1cre/doppl7.cre 2>>bgt/bash.debug
rm bg1cre/drizzatt.cre 2>>bgt/bash.debug
rm bg1cre/drizzdef.cre 2>>bgt/bash.debug
rm bg1cre/druid2.cre 2>>bgt/bash.debug
rm bg1cre/druidc.cre 2>>bgt/bash.debug
rm bg1cre/edie.cre 2>>bgt/bash.debug
rm bg1cre/eldotd.cre 2>>bgt/bash.debug
rm bg1cre/elmins.cre 2>>bgt/bash.debug
rm bg1cre/ennahe.cre 2>>bgt/bash.debug
rm bg1cre/entar.cre 2>>bgt/bash.debug
rm bg1cre/erik3.cre 2>>bgt/bash.debug
rm bg1cre/erlinh.cre 2>>bgt/bash.debug
rm bg1cre/farm2.cre 2>>bgt/bash.debug
rm bg1cre/farmbe.cre 2>>bgt/bash.debug
rm bg1cre/farmwe.cre 2>>bgt/bash.debug
rm bg1cre/feldan.cre 2>>bgt/bash.debug
rm bg1cre/fenwic.cre 2>>bgt/bash.debug
rm bg1cre/ffowb3.cre 2>>bgt/bash.debug
rm bg1cre/flam8.cre 2>>bgt/bash.debug
rm bg1cre/flamc7.cre 2>>bgt/bash.debug
rm bg1cre/flind_a.cre 2>>bgt/bash.debug
rm bg1cre/flind_b.cre 2>>bgt/bash.debug
rm bg1cre/flind_c.cre 2>>bgt/bash.debug
rm bg1cre/ftowb1.cre 2>>bgt/bash.debug
rm bg1cre/ftowb2.cre 2>>bgt/bash.debug
rm bg1cre/ftowb3.cre 2>>bgt/bash.debug
rm bg1cre/ftowb4.cre 2>>bgt/bash.debug
rm bg1cre/ftowbe.cre 2>>bgt/bash.debug
rm bg1cre/ftowbesn.cre 2>>bgt/bash.debug
rm bg1cre/ftowfr_a.cre 2>>bgt/bash.debug
rm bg1cre/ftowfr_c.cre 2>>bgt/bash.debug
rm bg1cre/ftowfrsn.cre 2>>bgt/bash.debug
rm bg1cre/ftowna_c.cre 2>>bgt/bash.debug
rm bg1cre/ftowna_d.cre 2>>bgt/bash.debug
rm bg1cre/ftownasc.cre 2>>bgt/bash.debug
rm bg1cre/ftowubsc.cre 2>>bgt/bash.debug
rm bg1cre/ftowubsn.cre 2>>bgt/bash.debug
rm bg1cre/ftwbax.cre 2>>bgt/bash.debug
rm bg1cre/furnel.cre 2>>bgt/bash.debug
rm bg1cre/galkin.cre 2>>bgt/bash.debug
rm bg1cre/gateke.cre 2>>bgt/bash.debug
rm bg1cre/gibber02.cre 2>>bgt/bash.debug
rm bg1cre/girba3_b.cre 2>>bgt/bash.debug
rm bg1cre/girba3_d.cre 2>>bgt/bash.debug
rm bg1cre/girba3_e.cre 2>>bgt/bash.debug
rm bg1cre/girlbax.cre 2>>bgt/bash.debug
rm bg1cre/girlbe.cre 2>>bgt/bash.debug
rm bg1cre/glowtes2.cre 2>>bgt/bash.debug
rm bg1cre/glowtest.cre 2>>bgt/bash.debug
#rm bg1cre/gnoll02.cre - used by sobh/dsotsc/ntotsc
#rm bg1cre/gnoll03.cre - used by sobh/dsotsc/ntotsc
rm bg1cre/gnoll04.cre 2>>bgt/bash.debug
rm bg1cre/gnoll05.cre 2>>bgt/bash.debug
rm bg1cre/gnoll2.cre 2>>bgt/bash.debug
#rm bg1cre/gnoll3.cre - used by sobh/dsotsc/ntotsc
rm bg1cre/govern.cre 2>>bgt/bash.debug
rm bg1cre/guarbe.cre 2>>bgt/bash.debug
rm bg1cre/guarca.cre 2>>bgt/bash.debug
rm bg1cre/haffg2.cre 2>>bgt/bash.debug
rm bg1cre/halfen.cre 2>>bgt/bash.debug
rm bg1cre/halfen2.cre 2>>bgt/bash.debug
rm bg1cre/halfg2.cre 2>>bgt/bash.debug
rm bg1cre/hasten.cre 2>>bgt/bash.debug
rm bg1cre/hentold.cre 2>>bgt/bash.debug
rm bg1cre/hobgo2.cre 2>>bgt/bash.debug
rm bg1cre/hobgo3.cre 2>>bgt/bash.debug
rm bg1cre/hobgo4.cre 2>>bgt/bash.debug
rm bg1cre/hobgo6.cre 2>>bgt/bash.debug
rm bg1cre/hobgob03.cre 2>>bgt/bash.debug
rm bg1cre/hobgob04.cre 2>>bgt/bash.debug
rm bg1cre/hobgob05.cre 2>>bgt/bash.debug
rm bg1cre/hobgod.cre 2>>bgt/bash.debug
rm bg1cre/ignati.cre 2>>bgt/bash.debug
rm bg1cre/ironho.cre 2>>bgt/bash.debug
rm bg1cre/jeb.cre 2>>bgt/bash.debug
rm bg1cre/jellyco.cre 2>>bgt/bash.debug
rm bg1cre/keelor.cre 2>>bgt/bash.debug
rm bg1cre/kiel.cre 2>>bgt/bash.debug
rm bg1cre/kobold02.cre 2>>bgt/bash.debug
rm bg1cre/kobold03.cre 2>>bgt/bash.debug
rm bg1cre/kobold10.cre 2>>bgt/bash.debug
rm bg1cre/kobold11.cre 2>>bgt/bash.debug
rm bg1cre/kobold12.cre 2>>bgt/bash.debug
rm bg1cre/kobold2.cre 2>>bgt/bash.debug
rm bg1cre/kobold4.cre 2>>bgt/bash.debug
rm bg1cre/kobold5.cre 2>>bgt/bash.debug
rm bg1cre/kobold6.cre 2>>bgt/bash.debug
rm bg1cre/kobold8.cre 2>>bgt/bash.debug
rm bg1cre/kobold9.cre 2>>bgt/bash.debug
rm bg1cre/krotan.cre 2>>bgt/bash.debug
rm bg1cre/lenore.cre 2>>bgt/bash.debug
rm bg1cre/llinde.cre 2>>bgt/bash.debug
rm bg1cre/lowlgu.cre 2>>bgt/bash.debug
rm bg1cre/malkal.cre 2>>bgt/bash.debug
rm bg1cre/mcook6.cre 2>>bgt/bash.debug
rm bg1cre/memnis.cre 2>>bgt/bash.debug
rm bg1cre/merch3.cre 2>>bgt/bash.debug
rm bg1cre/mercha.cre 2>>bgt/bash.debug
rm bg1cre/mergen.cre 2>>bgt/bash.debug
rm bg1cre/merlin.cre 2>>bgt/bash.debug
rm bg1cre/messed.cre 2>>bgt/bash.debug
rm bg1cre/monk.cre 2>>bgt/bash.debug
rm bg1cre/monkd2.cre 2>>bgt/bash.debug
rm bg1cre/monkde.cre 2>>bgt/bash.debug
rm bg1cre/monken.cre 2>>bgt/bash.debug
rm bg1cre/monktu15.cre 2>>bgt/bash.debug
rm bg1cre/monktu16.cre 2>>bgt/bash.debug
rm bg1cre/monktu17.cre 2>>bgt/bash.debug
rm bg1cre/monktu18.cre 2>>bgt/bash.debug
rm bg1cre/monktu19.cre 2>>bgt/bash.debug
rm bg1cre/monktu20.cre 2>>bgt/bash.debug
rm bg1cre/monktu6.cre 2>>bgt/bash.debug
rm bg1cre/mtbe12.cre 2>>bgt/bash.debug
rm bg1cre/mtob10.cre 2>>bgt/bash.debug
rm bg1cre/mtob4.cre 2>>bgt/bash.debug
rm bg1cre/mtob5.cre 2>>bgt/bash.debug
rm bg1cre/mtowfrsn.cre 2>>bgt/bash.debug
rm bg1cre/mtowna_e.cre 2>>bgt/bash.debug
rm bg1cre/mtownasn.cre 2>>bgt/bash.debug
rm bg1cre/mtowubsn.cre 2>>bgt/bash.debug
rm bg1cre/mtowubst.cre 2>>bgt/bash.debug
rm bg1cre/narrat2.cre 2>>bgt/bash.debug
rm bg1cre/nasha.cre 2>>bgt/bash.debug
rm bg1cre/nelori.cre 2>>bgt/bash.debug
rm bg1cre/nhuntc.cre 2>>bgt/bash.debug
rm bg1cre/nimfur.cre 2>>bgt/bash.debug
rm bg1cre/nobl13.cre 2>>bgt/bash.debug
rm bg1cre/nobl15.cre 2>>bgt/bash.debug
rm bg1cre/noblay.cre 2>>bgt/bash.debug
rm bg1cre/noblba.cre 2>>bgt/bash.debug
rm bg1cre/noblbaz.cre 2>>bgt/bash.debug
rm bg1cre/noblsn_a.cre 2>>bgt/bash.debug
rm bg1cre/noblsn_b.cre 2>>bgt/bash.debug
rm bg1cre/nobw6.cre 2>>bgt/bash.debug
rm bg1cre/nobwba_b.cre 2>>bgt/bash.debug
rm bg1cre/nobwba_c.cre 2>>bgt/bash.debug
rm bg1cre/nobwsn_a.cre 2>>bgt/bash.debug
rm bg1cre/nobwsn_b.cre 2>>bgt/bash.debug
rm bg1cre/ogre03.cre 2>>bgt/bash.debug
rm bg1cre/ogre04.cre 2>>bgt/bash.debug
rm bg1cre/ogre05.cre 2>>bgt/bash.debug
rm bg1cre/ogre_a.cre 2>>bgt/bash.debug
rm bg1cre/ogre_b.cre 2>>bgt/bash.debug
rm bg1cre/ogre_c.cre 2>>bgt/bash.debug
rm bg1cre/ogre_d.cre 2>>bgt/bash.debug
rm bg1cre/ogre_e.cre 2>>bgt/bash.debug
rm bg1cre/ogred.cre 2>>bgt/bash.debug
rm bg1cre/ogregr1.cre 2>>bgt/bash.debug
rm bg1cre/ogregr2.cre 2>>bgt/bash.debug
rm bg1cre/ogregr3.cre 2>>bgt/bash.debug
rm bg1cre/ogregr4.cre 2>>bgt/bash.debug
rm bg1cre/ogregr_a.cre 2>>bgt/bash.debug
rm bg1cre/ogreha1.cre 2>>bgt/bash.debug
rm bg1cre/ogreha2.cre 2>>bgt/bash.debug
rm bg1cre/ogreha3.cre 2>>bgt/bash.debug
rm bg1cre/ogreha4.cre 2>>bgt/bash.debug
rm bg1cre/ogreha5.cre 2>>bgt/bash.debug
rm bg1cre/ogreha_a.cre 2>>bgt/bash.debug
rm bg1cre/ogreha_c.cre 2>>bgt/bash.debug
rm bg1cre/ogreha_d.cre 2>>bgt/bash.debug
rm bg1cre/ogreha_e.cre 2>>bgt/bash.debug
rm bg1cre/ogremba.cre 2>>bgt/bash.debug
rm bg1cre/olivia.cre 2>>bgt/bash.debug
rm bg1cre/perfba.cre 2>>bgt/bash.debug
rm bg1cre/pique.cre 2>>bgt/bash.debug
rm bg1cre/pnask.cre 2>>bgt/bash.debug
rm bg1cre/priilm.cre 2>>bgt/bash.debug
rm bg1cre/prosba.cre 2>>bgt/bash.debug
rm bg1cre/prosba_a.cre 2>>bgt/bash.debug
rm bg1cre/prosbax.cre 2>>bgt/bash.debug
rm bg1cre/ray.cre 2>>bgt/bash.debug
rm bg1cre/read.cre 2>>bgt/bash.debug
rm bg1cre/read4.cre 2>>bgt/bash.debug
rm bg1cre/read5.cre 2>>bgt/bash.debug
rm bg1cre/read6.cre 2>>bgt/bash.debug
rm bg1cre/repman.cre 2>>bgt/bash.debug
rm bg1cre/retard.cre 2>>bgt/bash.debug
rm bg1cre/satos.cre 2>>bgt/bash.debug
rm bg1cre/servwg.cre 2>>bgt/bash.debug
rm bg1cre/serwen.cre 2>>bgt/bash.debug
rm bg1cre/sevsun.cre 2>>bgt/bash.debug
rm bg1cre/shadow2.cre 2>>bgt/bash.debug
rm bg1cre/shalas.cre 2>>bgt/bash.debug
rm bg1cre/shara.cre 2>>bgt/bash.debug
rm bg1cre/shartd.cre 2>>bgt/bash.debug
rm bg1cre/shelto.cre 2>>bgt/bash.debug
rm bg1cre/shiloc.cre 2>>bgt/bash.debug
rm bg1cre/shop02.cre 2>>bgt/bash.debug
rm bg1cre/shop05.cre 2>>bgt/bash.debug
rm bg1cre/shopkeg.cre 2>>bgt/bash.debug
rm bg1cre/shopkn2.cre 2>>bgt/bash.debug
rm bg1cre/silvgu.cre 2>>bgt/bash.debug
#rm bg1cre/skelet02.cre - used by sobh/dsotsc/ntotsc
rm bg1cre/skelmel.cre 2>>bgt/bash.debug
rm bg1cre/slave2.cre 2>>bgt/bash.debug
rm bg1cre/slave3.cre 2>>bgt/bash.debug
rm bg1cre/solia2.cre 2>>bgt/bash.debug
rm bg1cre/svlast.cre 2>>bgt/bash.debug
rm bg1cre/sword.cre 2>>bgt/bash.debug
rm bg1cre/tamoko2.cre 2>>bgt/bash.debug
rm bg1cre/tasloi02.cre 2>>bgt/bash.debug
rm bg1cre/tasloi03.cre 2>>bgt/bash.debug
rm bg1cre/tester.cre 2>>bgt/bash.debug
rm bg1cre/thug.cre 2>>bgt/bash.debug
rm bg1cre/tiaxd.cre 2>>bgt/bash.debug
rm bg1cre/travel.cre 2>>bgt/bash.debug
rm bg1cre/utor.cre 2>>bgt/bash.debug
rm bg1cre/valino.cre 2>>bgt/bash.debug
rm bg1cre/velvet.cre 2>>bgt/bash.debug
rm bg1cre/verlin.cre 2>>bgt/bash.debug
rm bg1cre/voice.cre 2>>bgt/bash.debug
rm bg1cre/volose.cre 2>>bgt/bash.debug
rm bg1cre/wareho.cre 2>>bgt/bash.debug
rm bg1cre/warehx.cre 2>>bgt/bash.debug
rm bg1cre/warrid.cre 2>>bgt/bash.debug
rm bg1cre/welt.cre 2>>bgt/bash.debug
rm bg1cre/werewogr.cre 2>>bgt/bash.debug
rm bg1cre/whitkd.cre 2>>bgt/bash.debug
rm bg1cre/wilton.cre 2>>bgt/bash.debug
rm bg1cre/wolfw2.cre 2>>bgt/bash.debug
rm bg1cre/wolfw3.cre 2>>bgt/bash.debug
rm bg1cre/womand.cre 2>>bgt/bash.debug
rm bg1cre/xvart02.cre 2>>bgt/bash.debug
rm bg1cre/xvart03.cre 2>>bgt/bash.debug
rm bg1cre/yeslid.cre 2>>bgt/bash.debug
#existing creatures 2>>bgt/bash.debug
rm bg1cre/bearbl.cre 2>>bgt/bash.debug
rm bg1cre/bearblsu.cre 2>>bgt/bash.debug
rm bg1cre/bearbr.cre 2>>bgt/bash.debug
rm bg1cre/bearbrsu.cre 2>>bgt/bash.debug
rm bg1cre/bearcasu.cre 2>>bgt/bash.debug
rm bg1cre/belt.cre 2>>bgt/bash.debug
rm bg1cre/bird.cre 2>>bgt/bash.debug
rm bg1cre/bjorni.cre 2>>bgt/bash.debug
rm bg1cre/boyba1.cre 2>>bgt/bash.debug
rm bg1cre/brage2.cre 2>>bgt/bash.debug
rm bg1cre/cat.cre 2>>bgt/bash.debug
rm bg1cre/catdead.cre 2>>bgt/bash.debug
rm bg1cre/catp.cre 2>>bgt/bash.debug
rm bg1cre/cow.cre 2>>bgt/bash.debug
rm bg1cre/cowh.cre 2>>bgt/bash.debug
rm bg1cre/davaeo.cre 2>>bgt/bash.debug
rm bg1cre/dogwasu.cre 2>>bgt/bash.debug
rm bg1cre/dogwisu.cre 2>>bgt/bash.debug
rm bg1cre/dreppi.cre 2>>bgt/bash.debug
rm bg1cre/dreppi2.cre 2>>bgt/bash.debug
rm bg1cre/dreppi3.cre 2>>bgt/bash.debug
rm bg1cre/fireb1.cre 2>>bgt/bash.debug
rm bg1cre/firebe.cre 2>>bgt/bash.debug
rm bg1cre/flam13.cre 2>>bgt/bash.debug
rm bg1cre/ghoulsu.cre 2>>bgt/bash.debug
rm bg1cre/gibbersu.cre 2>>bgt/bash.debug
rm bg1cre/gnollsu.cre 2>>bgt/bash.debug
rm bg1cre/gorion.cre 2>>bgt/bash.debug
rm bg1cre/gorion3.cre 2>>bgt/bash.debug
rm bg1cre/hobgobsu.cre 2>>bgt/bash.debug
rm bg1cre/horse.cre 2>>bgt/bash.debug
rm bg1cre/innke2.cre 2>>bgt/bash.debug
rm bg1cre/jellgr.cre 2>>bgt/bash.debug
rm bg1cre/jellmu.cre 2>>bgt/bash.debug
rm bg1cre/jelloc.cre 2>>bgt/bash.debug
rm bg1cre/jellygr.cre 2>>bgt/bash.debug
rm bg1cre/jondal.cre 2>>bgt/bash.debug
rm bg1cre/jondalw.cre 2>>bgt/bash.debug
rm bg1cre/keeper.cre 2>>bgt/bash.debug
rm bg1cre/koboldsu.cre 2>>bgt/bash.debug
rm bg1cre/liia.cre 2>>bgt/bash.debug
rm bg1cre/monktu1.cre 2>>bgt/bash.debug
rm bg1cre/nalin.cre 2>>bgt/bash.debug
rm bg1cre/nobw4.cre 2>>bgt/bash.debug
rm bg1cre/ogregrsu.cre 2>>bgt/bash.debug
rm bg1cre/phlydi.cre 2>>bgt/bash.debug
rm bg1cre/plyogre.cre 2>>bgt/bash.debug
rm bg1cre/plyspid.cre 2>>bgt/bash.debug
rm bg1cre/plywolf.cre 2>>bgt/bash.debug
rm bg1cre/poghm10.cre 2>>bgt/bash.debug
rm bg1cre/poghm9.cre 2>>bgt/bash.debug
rm bg1cre/poghma4.cre 2>>bgt/bash.debug
rm bg1cre/poghma5.cre 2>>bgt/bash.debug
rm bg1cre/poghma7.cre 2>>bgt/bash.debug
rm bg1cre/ragefa.cre 2>>bgt/bash.debug
rm bg1cre/rat.cre 2>>bgt/bash.debug
rm bg1cre/rat2.cre 2>>bgt/bash.debug
rm bg1cre/reevor.cre 2>>bgt/bash.debug
rm bg1cre/reevor2.cre 2>>bgt/bash.debug
rm bg1cre/reevor3.cre 2>>bgt/bash.debug
rm bg1cre/schlum.cre 2>>bgt/bash.debug
rm bg1cre/seagul.cre 2>>bgt/bash.debug
rm bg1cre/skelded.cre 2>>bgt/bash.debug
rm bg1cre/skele2.cre 2>>bgt/bash.debug
rm bg1cre/sleepdw.cre 2>>bgt/bash.debug
rm bg1cre/sleepfh.cre 2>>bgt/bash.debug
rm bg1cre/sleepmh.cre 2>>bgt/bash.debug
rm bg1cre/squirr.cre 2>>bgt/bash.debug
rm bg1cre/tasloisu.cre 2>>bgt/bash.debug
rm bg1cre/tethto2.cre 2>>bgt/bash.debug
rm bg1cre/tethto3.cre 2>>bgt/bash.debug
rm bg1cre/voieas.cre 2>>bgt/bash.debug
rm bg1cre/voinor.cre 2>>bgt/bash.debug
rm bg1cre/voisou.cre 2>>bgt/bash.debug
rm bg1cre/voiwes.cre 2>>bgt/bash.debug
rm bg1cre/voltin.cre 2>>bgt/bash.debug
rm bg1cre/watch2.cre 2>>bgt/bash.debug
rm bg1cre/watch3.cre 2>>bgt/bash.debug
rm bg1cre/watch6.cre 2>>bgt/bash.debug
rm bg1cre/watch9.cre 2>>bgt/bash.debug
rm bg1cre/winthr2.cre 2>>bgt/bash.debug
rm bg1cre/winthr3.cre 2>>bgt/bash.debug
rm bg1cre/wolfchar.cre 2>>bgt/bash.debug
rm bg1cre/wolfdi.cre 2>>bgt/bash.debug
rm bg1cre/wolfdisu.cre 2>>bgt/bash.debug
rm bg1cre/wolfsu.cre 2>>bgt/bash.debug
rm bg1cre/worgsu.cre 2>>bgt/bash.debug
rm bg1cre/xvartsu.cre 2>>bgt/bash.debug
#existing and unused creatures
rm bg1cre/bird_in.cre 2>>bgt/bash.debug
rm bg1cre/bird_ine.cre 2>>bgt/bash.debug
rm bg1cre/bird_inn.cre 2>>bgt/bash.debug
rm bg1cre/bird_ins.cre 2>>bgt/bash.debug
rm bg1cre/bird_inw.cre 2>>bgt/bash.debug
rm bg1cre/charbase.cre 2>>bgt/bash.debug
rm bg1cre/drunk2.cre 2>>bgt/bash.debug
rm bg1cre/e31.cre 2>>bgt/bash.debug
rm bg1cre/e32.cre 2>>bgt/bash.debug
rm bg1cre/e33.cre 2>>bgt/bash.debug
rm bg1cre/e34.cre 2>>bgt/bash.debug
rm bg1cre/e35.cre 2>>bgt/bash.debug
rm bg1cre/e36.cre 2>>bgt/bash.debug
rm bg1cre/poghm3.cre 2>>bgt/bash.debug
rm bg1cre/poghm6.cre 2>>bgt/bash.debug
rm bg1cre/poghm8.cre 2>>bgt/bash.debug
rm bg1cre/poghma.cre 2>>bgt/bash.debug
rm bg1cre/poghma3.cre 2>>bgt/bash.debug
rm bg1cre/poghma6.cre 2>>bgt/bash.debug
rm bg1cre/poghma8.cre 2>>bgt/bash.debug
rm bg1cre/squirl.cre 2>>bgt/bash.debug
#bgt-included npcs
rm bg1cre/ajanti4.cre 2>>bgt/bash.debug
rm bg1cre/ajanti6.cre 2>>bgt/bash.debug
rm bg1cre/ajanti.cre 2>>bgt/bash.debug
rm bg1cre/alora6.cre 2>>bgt/bash.debug
rm bg1cre/alora.cre 2>>bgt/bash.debug
rm bg1cre/branwe5.cre 2>>bgt/bash.debug
rm bg1cre/branwe.cre 2>>bgt/bash.debug
rm bg1cre/coran5.cre 2>>bgt/bash.debug
rm bg1cre/coran.cre 2>>bgt/bash.debug
rm bg1cre/dynahe2.cre 2>>bgt/bash.debug
rm bg1cre/dynahe4.cre 2>>bgt/bash.debug
rm bg1cre/dynahe6.cre 2>>bgt/bash.debug
rm bg1cre/dynahe.cre 2>>bgt/bash.debug
rm bg1cre/edwin2.cre 2>>bgt/bash.debug
rm bg1cre/edwin4.cre 2>>bgt/bash.debug
rm bg1cre/edwin6.cre 2>>bgt/bash.debug
rm bg1cre/edwin.cre 2>>bgt/bash.debug
rm bg1cre/eldoth5.cre 2>>bgt/bash.debug
rm bg1cre/eldoth.cre 2>>bgt/bash.debug
rm bg1cre/faldor5.cre 2>>bgt/bash.debug
rm bg1cre/faldor.cre 2>>bgt/bash.debug
rm bg1cre/garric2.cre 2>>bgt/bash.debug
rm bg1cre/garric4.cre 2>>bgt/bash.debug
rm bg1cre/garric6.cre 2>>bgt/bash.debug
rm bg1cre/garric.cre 2>>bgt/bash.debug
rm bg1cre/imoen1.cre 2>>bgt/bash.debug
rm bg1cre/imoen2.cre 2>>bgt/bash.debug
rm bg1cre/imoen4.cre 2>>bgt/bash.debug
rm bg1cre/imoen6.cre 2>>bgt/bash.debug
rm bg1cre/imoen.cre 2>>bgt/bash.debug
rm bg1cre/jaheir2.cre 2>>bgt/bash.debug
rm bg1cre/jaheir4.cre 2>>bgt/bash.debug
rm bg1cre/jaheir6.cre 2>>bgt/bash.debug
rm bg1cre/jaheir.cre 2>>bgt/bash.debug
rm bg1cre/kagain2.cre 2>>bgt/bash.debug
rm bg1cre/kagain4.cre 2>>bgt/bash.debug
rm bg1cre/kagain6.cre 2>>bgt/bash.debug
rm bg1cre/kagain.cre 2>>bgt/bash.debug
rm bg1cre/khalid2.cre 2>>bgt/bash.debug
rm bg1cre/khalid4.cre 2>>bgt/bash.debug
rm bg1cre/khalid6.cre 2>>bgt/bash.debug
rm bg1cre/khalid.cre 2>>bgt/bash.debug
rm bg1cre/kivan4.cre 2>>bgt/bash.debug
rm bg1cre/kivan6.cre 2>>bgt/bash.debug
rm bg1cre/kivan.cre 2>>bgt/bash.debug
rm bg1cre/minsc2.cre 2>>bgt/bash.debug
rm bg1cre/minsc4.cre 2>>bgt/bash.debug
rm bg1cre/minsc6.cre 2>>bgt/bash.debug
rm bg1cre/minsc.cre 2>>bgt/bash.debug
rm bg1cre/montar2.cre 2>>bgt/bash.debug
rm bg1cre/montar4.cre 2>>bgt/bash.debug
rm bg1cre/montar6.cre 2>>bgt/bash.debug
rm bg1cre/montar.cre 2>>bgt/bash.debug
rm bg1cre/quayle4.cre 2>>bgt/bash.debug
rm bg1cre/quayle6.cre 2>>bgt/bash.debug
rm bg1cre/quayle.cre 2>>bgt/bash.debug
rm bg1cre/safana4.cre 2>>bgt/bash.debug
rm bg1cre/safana6.cre 2>>bgt/bash.debug
rm bg1cre/safana.cre 2>>bgt/bash.debug
rm bg1cre/sharte4.cre 2>>bgt/bash.debug
rm bg1cre/sharte6.cre 2>>bgt/bash.debug
rm bg1cre/sharte.cre 2>>bgt/bash.debug
rm bg1cre/skie6.cre 2>>bgt/bash.debug
rm bg1cre/skie.cre 2>>bgt/bash.debug
rm bg1cre/tiax4.cre 2>>bgt/bash.debug
rm bg1cre/tiax6.cre 2>>bgt/bash.debug
rm bg1cre/tiax.cre 2>>bgt/bash.debug
rm bg1cre/viconi4.cre 2>>bgt/bash.debug
rm bg1cre/viconi6.cre 2>>bgt/bash.debug
rm bg1cre/viconi.cre 2>>bgt/bash.debug
rm bg1cre/xan4.cre 2>>bgt/bash.debug
rm bg1cre/xan6.cre 2>>bgt/bash.debug
rm bg1cre/xan.cre 2>>bgt/bash.debug
rm bg1cre/xzar2.cre 2>>bgt/bash.debug
rm bg1cre/xzar4.cre 2>>bgt/bash.debug
rm bg1cre/xzar6.cre 2>>bgt/bash.debug
rm bg1cre/xzar.cre 2>>bgt/bash.debug
rm bg1cre/yeslic5.cre 2>>bgt/bash.debug
rm bg1cre/yeslic.cre 2>>bgt/bash.debug |
# Log activity.
echo "==============================================================================="
echo "Install db2 client"
echo "==============================================================================="
|
# models.py
from django.db import models
class SubscriptionPlan(models.Model):
name = models.CharField(max_length=100)
price = models.DecimalField(max_digits=10, decimal_places=2)
automatic_renewal = models.BooleanField(default=False, verbose_name='has automatic renewal')
class Quota(models.Model):
name = models.CharField(max_length=100)
limit = models.IntegerField()
# migrations file (e.g., 0002_alter_order_field.py)
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('yourappname', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='subscriptionplan',
name='order',
field=models.PositiveIntegerField(db_index=True, editable=False, verbose_name='order'),
),
migrations.AlterField(
model_name='quota',
name='order',
field=models.PositiveIntegerField(db_index=True, editable=False, verbose_name='order'),
),
] |
package edu.washington.cse.instrumentation.analysis.utils;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
public class GetterGenerator {
public static void main(final String[] args) {
final List<Object> def = new ArrayList<>();
final Set<SootMethod> accessMethods = new HashSet<>();
soot.Main.main(args);
for(final SootClass sc : Scene.v().getApplicationClasses()) {
for(final SootMethod m : sc.getMethods()) {
if(m.getName().length() <= 3) {
continue;
}
if(!m.getName().startsWith("get")) {
continue;
}
if(!m.isPublic()) {
continue;
}
final String rawPropertyName = m.getName().substring(3);
final String propertyName = rawPropertyName.substring(0, 1).toLowerCase() + rawPropertyName.substring(1);
final Map<String, Object> entry = new HashMap<>();
entry.put("sig", m.getSignature());
entry.put("resources", Collections.singletonList(propertyName));
accessMethods.add(m);
def.add(entry);
}
}
final List<String> methodSigs = new ArrayList<>();
for(final SootMethod am : accessMethods) {
methodSigs.add(am.getSignature());
}
final Map<String, Object> entry = new HashMap<>();
entry.put("access-sigs", methodSigs);
def.add(0, entry);
final DumperOptions dOpt = new DumperOptions();
dOpt.setWidth(Integer.MAX_VALUE);
final Yaml y = new Yaml(dOpt);
y.dump(def, new PrintWriter(System.err));
}
}
|
<gh_stars>0
import { Injectable } from '@angular/core';
import { Http } from "@angular/http";
import { ChatMessage } from "./chat-message";
import { Observable } from 'rxjs';
import "rxjs/add/operator/toPromise";
@Injectable()
export class ChatService {
apiRoot: string = "http://localhost:5000/api/chat";
wsRoot: string = "ws://localhost:5000/ws";
webSocket: WebSocket;
newMessageStream: Observable<ChatMessage>;
constructor(private http: Http) {
this.webSocket = new WebSocket(this.wsRoot);
this.newMessageStream = Observable.fromEvent(this.webSocket, "message")
.map(event => JSON.parse((event as MessageEvent).data))
.map(msg => new ChatMessage(msg));
}
getMessages(options): Promise<ChatMessage[]> {
return this.http.get(`${ this.apiRoot }`)
.toPromise()
.then(res => res.json().map(msg => new ChatMessage(msg)))
.then(msgs => msgs.reverse());
}
postMessage(messagePayload: object) {
return this.http.post(`${ this.apiRoot }`, messagePayload)
.toPromise();
}
}
|
#include<stdio.h>
int main()
{
int A[2][2], B[2][2], C[2][2];
int i, j;
printf("Enter elements of 1st matrix\n");
for (i = 0; i < 2; ++i)
for (j = 0; j < 2; ++j) {
scanf("%d", &A[i][j]);
}
printf("Enter elements of 2nd matrix\n");
for (i = 0; i < 2; ++i)
for (j = 0; j < 2; ++j) {
scanf("%d", &B[i][j]);
}
for (i = 0; i < 2; ++i)
for (j = 0; j < 2; ++j)
C[i][j] = A[i][j] + B[i][j];
printf("Resultant matrix\n");
for (i = 0; i < 2; ++i)
for (j = 0; j < 2; ++j) {
printf("%d ", C[i][j]);
if (j == 1)
printf("\n");
}
return 0;
} |
/*
*
*/
package net.community.chest.web.servlet;
import javax.servlet.http.HttpServletRequest;
import net.community.chest.lang.StringUtil;
import net.community.chest.util.AbstractParametersHolder;
/**
* <P>Copyright as per GPLv2</P>
* <P>Provides useful decoding methods to {@link HttpServletRequest} parameters</P>
* @author <NAME>.
* @since Nov 23, 2010 11:18:05 AM
*/
public class ServletRequestParameters extends AbstractParametersHolder {
private HttpServletRequest _request;
public HttpServletRequest getRequest ()
{
return _request;
}
public void setRequest (HttpServletRequest request)
{
_request = request;
}
public ServletRequestParameters (HttpServletRequest request)
{
_request = request;
}
public ServletRequestParameters ()
{
this(null);
}
/*
* @see net.community.chest.util.ParametersHolder#getParameter(java.lang.String, java.lang.String)
*/
@Override
public String getParameter (final String paramName, final String defValue)
{
final HttpServletRequest req=getRequest();
final String paramValue=
((null == req) || (null == paramName) || (paramName.length() <= 0)) ? null : req.getParameter(paramName);
if ((null == paramValue) || (paramValue.length() <= 0))
return defValue;
return StringUtil.stripDelims(paramValue);
}
}
|
// Package main
package main
import (
"fmt"
)
func search(slice []int, value int) int {
for i, v := range slice {
if v == value {
return i
}
}
return -1
}
func main() {
slice := []int{21, 23, 24, 25}
value := 24
index := search(slice, value)
fmt.Printf("Index of %d is %d\n", value, index)
} |
package tr.com.minicrm.productgroup.data.postgresql;
import tr.com.minicrm.productgroup.data.ProductGroup;
public class ProductGroupImpl implements ProductGroup {
private Long id;
private String name;
private int version;
public ProductGroupImpl(Long id, String name, int version) {
super();
this.id = id;
this.name = name;
this.version = version;
}
public ProductGroupImpl(String name) {
super();
this.name = name;
}
public ProductGroupImpl() {
super();
}
@Override
public Long getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public int getVersion() {
return version;
}
}
|
'use strict';
const childProcess = require('child_process');
const fs = require('fs');
const path = require('path');
function getFiles(cppDir) {
return fs
.readdirSync(cppDir)
.filter(function (item) {
const fullPath = path.join(cppDir, item);
return fs.lstatSync(fullPath).isDirectory();
})
.map(function (dirname) {
const fullPath = path.join(cppDir, dirname);
return fs
.readdirSync(fullPath)
.filter(function (item) {
return item.endsWith('.cpp');
})
.map(function (filename) {
return path.join(cppDir, dirname, filename);
});
})
.reduce(function (flattenedArray, fileSet) {
return flattenedArray.concat(fileSet);
}, []);
}
function execute(command, printOutput = false) {
const execFileMatch = command.match(/\/c\+\+\/([\w-_/.]+)$/);
const execFile = execFileMatch && execFileMatch.length > 1 ? execFileMatch[1] : '';
const printFile = execFile.replace(/\.out$/, '.cpp');
return new Promise(function (resolve, reject) {
return childProcess.exec(command, function (err, stdout, stderr) {
if (err) {
console.log(`${printFile}\n${stdout}`);
return reject(err);
} else if (printOutput) {
console.log(`${printFile}\n${stdout}`);
}
return resolve();
});
});
}
function runTest(filename) {
const outputFile = filename.replace(/\.cpp$/, '.out');
const compileCommand = `g++ --std=c++11 --output ${outputFile} ${filename}`;
const executeCommand = outputFile;
return execute(compileCommand)
.then(execute.bind(null, executeCommand, true));
}
function clearOutputFiles() {
const clearCommand = 'find . -name "*.out" -delete';
return execute(clearCommand);
}
if (!module.parent) {
const args = process.argv;
const userRequestedFiles = args.slice(2);
const cppDir = path.join(__dirname, 'c++');
const files = userRequestedFiles.length ? userRequestedFiles : getFiles(cppDir);
const promises = files.map(runTest);
Promise.all(promises)
.then(clearOutputFiles);
}
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.crmsfa.client.opportunities.form;
import org.opentaps.gwt.common.client.UtilUi;
import org.opentaps.gwt.common.client.form.FindPartyForm;
import org.opentaps.gwt.common.client.form.ServiceErrorReader;
import org.opentaps.gwt.common.client.form.base.SubFormPanel;
import org.opentaps.gwt.common.client.listviews.ContactListView;
import org.opentaps.gwt.common.client.lookup.configuration.PartyLookupConfiguration;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.RequestException;
import com.google.gwt.http.client.Response;
import com.gwtext.client.core.EventObject;
import com.gwtext.client.core.SortDir;
import com.gwtext.client.data.Record;
import com.gwtext.client.data.Store;
import com.gwtext.client.data.StringFieldDef;
import com.gwtext.client.util.Format;
import com.gwtext.client.widgets.grid.CellMetadata;
import com.gwtext.client.widgets.grid.ColumnConfig;
import com.gwtext.client.widgets.grid.GridPanel;
import com.gwtext.client.widgets.grid.Renderer;
import com.gwtext.client.widgets.grid.event.GridCellListenerAdapter;
/**
* A combination of a contacts list view and a tabbed form used to filter that list view.
*/
public class OpportunityContactsSubview extends FindPartyForm {
public static final String MODULE = OpportunityContactsSubview.class.getName();
private final ContactListView contactListView;
public final String accountPartyId;
private Integer deleteColumnIndex;
/**
* Constructor with autoLoad parameter, use this constructor if some filters need to be set prior to loading the grid data.
* @param autoLoad sets the grid autoLoad parameter, set to <code>false</code> if some filters need to be set prior to loading the grid data
*/
public OpportunityContactsSubview(String accountPartyId, boolean autoLoad) {
super(UtilUi.MSG.crmContactId(), UtilUi.MSG.crmFindContacts());
this.accountPartyId = accountPartyId;
contactListView = new ContactListView() {
/** {@inheritDoc} */
@Override
public void init() {
String entityViewUrl = "/crmsfa/control/viewContact?partyId={0}";
StringFieldDef idDefinition = new StringFieldDef(PartyLookupConfiguration.INOUT_PARTY_ID);
makeLinkColumn(UtilUi.MSG.crmContactId(), idDefinition, entityViewUrl, true);
makeLinkColumn(UtilUi.MSG.crmContactName(), idDefinition, new StringFieldDef(PartyLookupConfiguration.INOUT_FRIENDLY_PARTY_NAME), entityViewUrl, true);
makeColumn(UtilUi.MSG.partyCity(), new StringFieldDef(PartyLookupConfiguration.INOUT_CITY));
makeColumn(UtilUi.MSG.crmPrimaryEmail(), new StringFieldDef(PartyLookupConfiguration.INOUT_EMAIL));
makeColumn(UtilUi.MSG.crmPrimaryPhone(), new StringFieldDef(PartyLookupConfiguration.INOUT_FORMATED_PHONE_NUMBER));
makeColumn(UtilUi.MSG.partyToName(), new StringFieldDef(PartyLookupConfiguration.INOUT_TO_NAME));
makeColumn(UtilUi.MSG.partyAttentionName(), new StringFieldDef(PartyLookupConfiguration.INOUT_ATTENTION_NAME));
makeColumn(UtilUi.MSG.partyAddressLine1(), new StringFieldDef(PartyLookupConfiguration.INOUT_ADDRESS));
makeColumn(UtilUi.MSG.partyAddressLine2(), new StringFieldDef(PartyLookupConfiguration.OUT_ADDRESS_2));
makeColumn(UtilUi.MSG.partyState(), new StringFieldDef(PartyLookupConfiguration.INOUT_STATE));
makeColumn(UtilUi.MSG.partyCountry(), new StringFieldDef(PartyLookupConfiguration.INOUT_COUNTRY));
makeColumn(UtilUi.MSG.partyPostalCode(), new StringFieldDef(PartyLookupConfiguration.INOUT_POSTAL_CODE));
makeColumn(UtilUi.MSG.crmPostalCodeExt(), new StringFieldDef(PartyLookupConfiguration.OUT_POSTAL_CODE_EXT));
deleteColumnIndex = getCurrentColumnIndex();
ColumnConfig config = makeColumn("", new Renderer() {
public String render(Object value, CellMetadata cellMetadata, Record record, int rowIndex, int colNum, Store store) {
return Format.format("<img width=\"15\" height=\"15\" class=\"checkbox\" src=\"{0}\"/>", UtilUi.ICON_DELETE);
}
});
config.setWidth(26);
config.setResizable(false);
config.setFixed(true);
config.setSortable(false);
addGridCellListener(new GridCellListenerAdapter() {
private final String actionUrl = "/crmsfa/control/removeContactFromAccount";
/** {@inheritDoc} */
@Override
public void onCellClick(GridPanel grid, int rowIndex, int colindex, EventObject e) {
if (colindex == OpportunityContactsSubview.this.deleteColumnIndex) {
String contactPartyId = getStore().getRecordAt(rowIndex).getAsString("partyId");
RequestBuilder request = new RequestBuilder(RequestBuilder.POST, actionUrl);
request.setHeader("Content-type", "application/x-www-form-urlencoded");
request.setRequestData(Format.format("partyId={0}&accountPartyId={0}&contactPartyId={1}", OpportunityContactsSubview.this.accountPartyId, contactPartyId));
request.setCallback(new RequestCallback() {
public void onError(Request request, Throwable exception) {
// display error message
markGridNotBusy();
UtilUi.errorMessage(exception.toString());
}
public void onResponseReceived(Request request, Response response) {
// if it is a correct response, reload the grid
markGridNotBusy();
UtilUi.logInfo("onResponseReceived, response = " + response, MODULE, "ContactListView.init()");
if (!ServiceErrorReader.showErrorMessageIfAny(response, actionUrl)) {
// commit store changes
getStore().reload();
loadFirstPage();
}
}
});
try {
markGridBusy();
UtilUi.logInfo("posting batch", MODULE, "ContactListView.init()");
request.send();
} catch (RequestException re) {
// display error message
UtilUi.errorMessage(e.toString(), MODULE, "ContactListView.init()");
}
}
}
});
configure(PartyLookupConfiguration.URL_FIND_CONTACTS, PartyLookupConfiguration.INOUT_PARTY_ID, SortDir.ASC);
// by default, hide non essential columns
setColumnHidden(PartyLookupConfiguration.INOUT_PARTY_ID, true);
setColumnHidden(PartyLookupConfiguration.INOUT_STATE, true);
setColumnHidden(PartyLookupConfiguration.INOUT_COUNTRY, true);
setColumnHidden(PartyLookupConfiguration.INOUT_TO_NAME, true);
setColumnHidden(PartyLookupConfiguration.INOUT_ATTENTION_NAME, true);
setColumnHidden(PartyLookupConfiguration.INOUT_ADDRESS, true);
setColumnHidden(PartyLookupConfiguration.OUT_ADDRESS_2, true);
setColumnHidden(PartyLookupConfiguration.INOUT_POSTAL_CODE, true);
setColumnHidden(PartyLookupConfiguration.OUT_POSTAL_CODE_EXT, true);
}
};
contactListView.setHeader(false);
contactListView.setAutoLoad(autoLoad);
contactListView.init();
addListView(contactListView);
}
@Override
protected void buildFilterByNameTab(SubFormPanel p) {
// do nothing
}
@Override
protected void filterByNames() {
// do nothing
}
/** {@inheritDoc} */
@Override
public Integer getListAndFormSpacing() {
return 0;
}
}
|
#!/bin/bash -x
V2="--server https://acme-v02.api.letsencrypt.org/directory"
#sudo apt install letsencrypt
curl -X POST http://127.0.0.1:31600/api/daemon/stop/$PROXY_DAEMON
sudo letsencrypt certonly --standalone -d $PROXY_HOSTNAME --force-renew $V2
curl -X POST http://127.0.0.1:31600/api/daemon/start/$PROXY_DAEMON
|
export * from './ponyfill';
|
//// [excessPropertyCheckWithUnions.ts]
type ADT = {
tag: "A",
a1: string
} | {
tag: "D",
d20: 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20
} | {
tag: "T",
}
let wrong: ADT = { tag: "T", a1: "extra" }
wrong = { tag: "A", d20: 12 }
wrong = { tag: "D" }
type Ambiguous = {
tag: "A",
x: string
} | {
tag: "A",
y: number
} | {
tag: "B",
z: boolean
} | {
tag: "C"
}
let amb: Ambiguous
// no error for ambiguous tag, even when it could satisfy both constituents at once
amb = { tag: "A", x: "hi" }
amb = { tag: "A", y: 12 }
amb = { tag: "A", x: "hi", y: 12 }
// correctly error on excess property 'extra', even when ambiguous
amb = { tag: "A", x: "hi", extra: 12 }
amb = { tag: "A", y: 12, extra: 12 }
// assignability errors still work.
// But note that the error for `z: true` is the fallback one of reporting on
// the last constituent since assignability error reporting can't find a single best discriminant either.
amb = { tag: "A" }
amb = { tag: "A", z: true }
type Overlapping =
| { a: 1, b: 1, first: string }
| { a: 2, second: string }
| { b: 3, third: string }
let over: Overlapping
// these two are not reported because there are two discriminant properties
over = { a: 1, b: 1, first: "ok", second: "error" }
over = { a: 1, b: 1, first: "ok", third: "error" }
//// [excessPropertyCheckWithUnions.js]
var wrong = { tag: "T", a1: "extra" };
wrong = { tag: "A", d20: 12 };
wrong = { tag: "D" };
var amb;
// no error for ambiguous tag, even when it could satisfy both constituents at once
amb = { tag: "A", x: "hi" };
amb = { tag: "A", y: 12 };
amb = { tag: "A", x: "hi", y: 12 };
// correctly error on excess property 'extra', even when ambiguous
amb = { tag: "A", x: "hi", extra: 12 };
amb = { tag: "A", y: 12, extra: 12 };
// assignability errors still work.
// But note that the error for `z: true` is the fallback one of reporting on
// the last constituent since assignability error reporting can't find a single best discriminant either.
amb = { tag: "A" };
amb = { tag: "A", z: true };
var over;
// these two are not reported because there are two discriminant properties
over = { a: 1, b: 1, first: "ok", second: "error" };
over = { a: 1, b: 1, first: "ok", third: "error" };
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.