text stringlengths 1 1.05M |
|---|
#include <AL/al.h>
#include <AL/alc.h>
class OpenALWrapper {
public:
void getBuffer3f(ALuint buffer, ALenum param, ALfloat *value1, ALfloat *value2, ALfloat *value3) {
alGetBuffer3f(buffer, param, value1, value2, value3);
}
void getBufferfv(ALuint buffer, ALenum param, ALfloat *values) {
alGetBufferfv(buffer, param, values);
}
void getBufferi(ALuint buffer, ALenum param, ALint *value) {
alGetBufferi(buffer, param, value);
}
void getBuffer3i(ALuint buffer, ALenum param, ALint *value1, ALint *value2, ALint *value3) {
alGetBuffer3i(buffer, param, value1, value2, value3);
}
void getBufferiv(ALuint buffer, ALenum param, ALint *values) {
alGetBufferiv(buffer, param, values);
}
ALCcontext* createContext(ALCdevice *device, const ALCint *attrlist) {
return alcCreateContext(device, attrlist);
}
ALCboolean makeContextCurrent(ALCcontext *context) {
return alcMakeContextCurrent(context);
}
}; |
<reponame>Teleology/ts-design
class ReadImg {
private fileName:string;
constructor(fileName: string) {
this.fileName = fileName;
this.loadFromDisk();
}
public display():void {
console.log("display" + this.fileName);
}
private loadFromDisk():void {
console.log("loading" + this.fileName);
}
}
class ProxyImg {
private realImg: ReadImg;
constructor(fileName: string) {
this.realImg = new ReadImg(fileName);
}
public display():void {
this.realImg.display();
}
}
let proxyImg = new ProxyImg("1.png");
proxyImg.display(); |
import pandas as pd
# Read in the CSV file
df = pd.read_csv("data.csv")
# Calculate population density
df['Density'] = df['Population'] / df['Area']
# Print the density of the first 5 countries
print(df.head()['Density']) |
SELECT products.name, AVG(reviews.rating) AS avg_rating
FROM products
INNER JOIN reviews
ON products.id = reviews.product_id
GROUP BY products.name
ORDER BY avg_rating DESC
LIMIT 5; |
let num = prompt("Please enter a number: ");
let output = '<table border="1">';
for (let i = 1; i <= 10; i++) {
let row = "<tr><td>" + num + " X " + i + " = " + (num * i) + "</td></tr>";
output += row;
}
output += "</table>"
document.write(output); |
import subprocess
import sys
def get_commit_timestamp(file_path, commit_hash):
try:
timestamp = subprocess.check_output(['git', 'show', '-s', '--format=%ct', commit_hash, '--', file_path])
timestamp = timestamp.decode('utf-8').strip() # Decode and remove any trailing newline characters
return int(timestamp)
except subprocess.CalledProcessError:
print("Error: Unable to retrieve commit timestamp.")
sys.exit(1)
# Example usage
file_path = "path/to/your/file"
commit_hash = "a1b2c3d" # Replace with the actual commit hash
timestamp = get_commit_timestamp(file_path, commit_hash)
print(f"Commit timestamp: {timestamp}") |
//
// HGHomeViewController.h
// HGPersonalCenter
//
// Created by Arch on 2017/6/16.
// Copyright © 2017年 mint_bin. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface HGHomeViewController : HGBaseViewController
@end
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package models;
import java.util.List;
import javax.swing.AbstractListModel;
/**
*
* @author pretizy
*/
public class Vmodel extends AbstractListModel {
//create local variables
private List list;
private ChapterModel cmodel;
public Vmodel(ChapterModel cmodel) {
//assign references of the current chapter to the instance variable
this.cmodel = cmodel;
Finder find= cmodel.getFind();
setVerse(find.findVerses(1));
}
public void setVerse(List verses){
this.list= verses;
this.fireContentsChanged(this, 0, list.size());
}
public ChapterModel getCmodel() {
return cmodel;
}
@Override
public int getSize() {
//return size of the verse to the list
return this.list.size();
}
@Override
public Object getElementAt(int index) {
//get the current input output operation froom the chapter
return list.get(index);
}
}
|
using System;
using System.Collections.Immutable;
public class ImmutableStack<T>
{
private ImmutableStack<T> previousStack;
private T value;
private ImmutableStack(ImmutableStack<T> previousStack, T value)
{
this.previousStack = previousStack;
this.value = value;
}
public ImmutableStack<T> Push(T value)
{
return new ImmutableStack<T>(this, value);
}
public ImmutableStack<T> Pop()
{
if (previousStack == null)
{
throw new InvalidOperationException("Cannot pop from an empty stack");
}
return previousStack;
}
public T Peek()
{
if (previousStack == null)
{
throw new InvalidOperationException("Cannot peek an empty stack");
}
return value;
}
} |
<reponame>HibiscusLotus/react-management-ssytem<filename>src/router.js
import React, { lazy, Suspense } from 'react';
import { BrowserRouter as Router, Route, Switch, Redirect } from 'react-router-dom';
import { checkIsAdmin, checkAuth } from './utils/utils';
const Login = lazy(() => import('./pages/Login/index'));
const Main = lazy(() => import('./pages/Main/index'));
const Icons = lazy(() => import('./pages/Icons/index'));
const Tables = lazy(() => import('./pages/Tables/index'));
const Dashboard = lazy(() => import('./pages/Dashboard/index'));
const Tabs = lazy(() => import('./pages/Tabs/index'));
const Forms = lazy(() => import('./pages/Forms/index'));
const Upload = lazy(() => import('./pages/Forms/upload'));
const Charts = lazy(() => import('./pages/Charts/index'));
const ErrorRoute = lazy(() => import('./pages/Error/index'));
const Permission = lazy(() => import('./pages/Permission/index'));
const Locale = lazy(() => import('./pages/Locale/index'))
const Editor = lazy(() => import('./pages/Editor/index'))
const Markdown = lazy(() => import('./pages/Markdown/index'))
const Echarts = lazy(() => import('./pages/Echarts/index'))
export const AppRoutes = () => {
return (
<Router>
<Suspense fallback={<div>加载中...</div>}>
<Switch>
<Route exact path="/" component={Login} />
<Route path="/login" component={Login} />
<AuthRoute path="/main" component={Main} />
<Route path="/error/:code" component={ErrorRoute} />
<Route component={ErrorRoute} />
</Switch>
</Suspense>
</Router>
);
};
export const MainRoutes = () => {
return (
<Suspense fallback={<div></div>}>
<Switch>
<Redirect exact from="/main" to="/main/dashboard" />
<Route exact path="/main/dashboard" component={Dashboard} />
<Route exact path="/main/icons" component={Icons} />
<Route exact path="/main/tables" component={Tables} />
<Route exact path="/main/tabs" component={Tabs} />
<Route exact path="/main/forms" component={Forms} />
<Route exact path="/main/upload" component={Upload} />
<Route exact path="/main/charts" component={Charts} />
<AdminRoute exact path="/main/permission" component={Permission} />
<Route exact path="/main/i18n" component={Locale} />
<Route exact path="/main/editor" component={Editor} />
<Route exact path="/main/markdown" component={Markdown} />
<Route exact path="/main/echarts" component={Echarts} />
<Route component={ErrorRoute} />
</Switch>
</Suspense>
);
};
// 路由管理员权限校验
const AdminRoute = ({ component: Component, ...rest }) => {
return (
<Route
{...rest}
render={props =>
// checkIsAdmin 方法里做了权限校验
checkIsAdmin() ? <Component {...props} /> : <Redirect to="/error/403" />
}
/>
);
};
// 路由登录鉴权
const AuthRoute = ({ component: Component, ...rest }) => {
return (
<Route
{...rest}
render={props =>
// checkAuth 方法判断是否已登录
checkAuth() ? <Component {...props} /> : <Redirect to="/login" />
}
/>
);
}; |
import { Injectable } from '@nestjs/common'
import { ConfigService } from '@nestjs/config'
@Injectable()
export class ApiConfigService {
constructor(private configService: ConfigService) {}
/* SERVER */
get port(): number {
return this.configService.get<number>('PORT', 3000)
}
/* DATABASE */
get databaseHost(): string {
return this.configService.get<string>('DATABASE_HOST', '127.0.0.1')
}
get databasePort(): number {
return this.configService.get<number>('DATABASE_PORT', 5432)
}
get databaseUsername(): string {
return this.configService.get<string>('DATABASE_USER', 'postgres')
}
get databasePassword(): string {
return this.configService.get<string>('DATABASE_PASSWORD', '<PASSWORD>')
}
get databaseDB(): string {
return this.configService.get<string>('DATABASE_DB', 'postgres')
}
}
|
def partition(arr,low,high):
i = ( low-1 ) # index of smaller element
pivot = arr[high] # pivot
for j in range(low , high):
# If current element is smaller than the pivot
if arr[j] < pivot:
# increment index of smaller element
i = i+1
arr[i],arr[j] = arr[j],arr[i]
arr[i+1],arr[high] = arr[high],arr[i+1]
return ( i+1 )
def quickSort(arr,low,high):
if low < high:
# pi is partitioning index, arr[p] is now
# at right place
pi = partition(arr,low,high)
# Separately sort elements before
# partition and after partition
quickSort(arr, low, pi-1)
quickSort(arr, pi+1, high)
arr = [10, 80, 30, 90, 40, 50, 70]
n = len(arr)
quickSort(arr,0,n-1)
print ("Sorted array is:")
for i in range(n):
print ("%d" %arr[i]), |
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
trigger() {
url=$(pos=$1 yq '.entries.[env(pos)].url' ./scenario)
method=$(pos=$1 yq '.entries.[env(pos)].method // "GET"' ./scenario)
pos=$1 yq '.entries.[env(pos)].headers.[]' ./scenario > /req_header
rst=$(curl -Is -X $method -H @/req_header $url | head -1 | grep "HTTP/1.1 200")
if [[ -z "$rst" ]]; then
echo "failed to access $2"
exit 1
fi
echo "access $2 successful"
}
clear() {
rst=$(curl -Is -X GET http://${COLLECTOR}/receiveData/clear | head -1 | grep "HTTP/1.1 200")
if [[ -z "$rst" ]]; then
echo "failed to clear collector segments"
exit 1
fi
echo "sweep collector segments successful"
}
validate() {
name=$1
expectedData=$2
times=0
while [ $times -lt $MAX_RETRY_TIMES ]; do
curl -X POST --data-raw "$(cat $expectedData)" --dump-header ./header -o /response -s ${COLLECTOR}/dataValidate
rst=$(head -1 /header | grep "HTTP/1.1 200")
if [[ -n "$rst" ]]; then
echo "scenario $name verification successful"
return 0
fi
sleep 3
times=$((times+1))
done
cat /response
echo "scenario $name verification failed"
exit 1
}
scenarios=$(yq e '.scenarios | length' /config.yaml)
echo "total scenarios number: $scenarios"
scenario=0
while [ $scenario -lt $scenarios ]; do
clear
pos=$scenario yq -P '.scenarios.[env(pos)]' /config.yaml > ./scenario
name=$(yq '.name' ./scenario)
entries=$(yq '.entries | length' ./scenario)
expectedData=$(yq '.expected' ./scenario)
entry=0
while [ $entry -lt $entries ]; do
trigger $entry
entry=$((entry+1))
done
sleep 5 # wait for agent report trace segments.
validate $name $expectedData
scenario=$((scenario+1))
done
|
#!/bin/bash
function docker_tag_exists() {
EXISTS=$(curl -s https://hub.docker.com/v2/repositories/$1/tags/?page_size=10000 | jq -r "[.results | .[] | .name == \"$2\"] | any")
test $EXISTS = true
}
if docker_tag_exists svenruppert/maven-3.2.5-graalvm 1.0.0-7; then
echo skip building, image already existing - svenruppert/maven-3.2.5-graalvm 1.0.0-7
else
echo start building the images
docker build -t svenruppert/maven-3.2.5-graalvm .
docker tag svenruppert/maven-3.2.5-graalvm:latest svenruppert/maven-3.2.5-graalvm:1.0.0-7
docker push svenruppert/maven-3.2.5-graalvm:1.0.0-7
fi |
<reponame>luanlazz/barbecue-app-back<filename>src/presentation/controllers/barbecue-participant/remove/remove-participant-controller.spec.ts
import { RemoveParticipantController } from './remove-participant-controller'
import { HttpRequest } from '@/presentation/protocols'
import { mockRemoveParticipant, mockLoadParticipantById, mockLoadBarbecueById } from '@/presentation/test'
import { serverError, noContent, forbidden, serviceUnavailable } from '@/presentation/helpers'
import { InvalidParamError, UnexpectedError } from '@/presentation/errors'
import { LoadBarbecueById, LoadParticipantById, RemoveParticipant } from '@/domain/usecases'
import { throwError } from '@/domain/test'
type SutTypes = {
sut: RemoveParticipantController
loadBarbecueByIdStub: LoadBarbecueById
loadParticipantByIdStub: LoadParticipantById
removeParticipantStub: RemoveParticipant
}
const makeSut = (): SutTypes => {
const loadBarbecueByIdStub = mockLoadBarbecueById()
const loadParticipantByIdStub = mockLoadParticipantById()
const removeParticipantStub = mockRemoveParticipant()
const sut = new RemoveParticipantController(loadBarbecueByIdStub, loadParticipantByIdStub, removeParticipantStub)
return {
sut,
loadBarbecueByIdStub,
loadParticipantByIdStub,
removeParticipantStub
}
}
const mockRequest = (): HttpRequest => ({
params: {
barbecueId: 'any_barbecue_id',
participantId: 'any_participant_id'
}
})
describe('RemoveParticipant Controller', () => {
test('should call LoadBarbecueById with correct barbecue id', async () => {
const { sut, loadBarbecueByIdStub } = makeSut()
const loadSpy = jest.spyOn(loadBarbecueByIdStub, 'loadById')
await sut.handle(mockRequest())
expect(loadSpy).toHaveBeenCalledWith(mockRequest().params.barbecueId)
})
test('should return 500 if LoadBarbecueById throws', async () => {
const { sut, loadBarbecueByIdStub } = makeSut()
jest.spyOn(loadBarbecueByIdStub, 'loadById').mockImplementation(throwError)
const httpResponse = await sut.handle(mockRequest())
expect(httpResponse).toEqual(serverError(new Error()))
})
test('should return 403 if LoadBarbecueById not return a barbecue', async () => {
const { sut, loadBarbecueByIdStub } = makeSut()
jest.spyOn(loadBarbecueByIdStub, 'loadById').mockReturnValueOnce(null)
const httpResponse = await sut.handle(mockRequest())
expect(httpResponse).toEqual(forbidden(new InvalidParamError('barbecueId')))
})
test('should call LoadParticipantById with correct participant id', async () => {
const { sut, loadParticipantByIdStub } = makeSut()
const loadSpy = jest.spyOn(loadParticipantByIdStub, 'loadById')
await sut.handle(mockRequest())
expect(loadSpy).toHaveBeenCalledWith(mockRequest().params.participantId)
})
test('should return 500 if LoadParticipantById throws', async () => {
const { sut, loadParticipantByIdStub } = makeSut()
jest.spyOn(loadParticipantByIdStub, 'loadById').mockImplementation(throwError)
const httpResponse = await sut.handle(mockRequest())
expect(httpResponse).toEqual(serverError(new Error()))
})
test('should return 403 if LoadParticipantById not return a barbecue', async () => {
const { sut, loadParticipantByIdStub } = makeSut()
jest.spyOn(loadParticipantByIdStub, 'loadById').mockReturnValueOnce(null)
const httpResponse = await sut.handle(mockRequest())
expect(httpResponse).toEqual(forbidden(new InvalidParamError('participantId')))
})
test('should call RemoveParticipant with correct values', async () => {
const { sut, removeParticipantStub } = makeSut()
const removeSpy = jest.spyOn(removeParticipantStub, 'remove')
await sut.handle(mockRequest())
expect(removeSpy).toHaveBeenCalledWith(mockRequest().params.barbecueId, mockRequest().params.participantId)
})
test('should return 500 if RemoveParticipant throws', async () => {
const { sut, removeParticipantStub } = makeSut()
jest.spyOn(removeParticipantStub, 'remove').mockImplementation(throwError)
const httpResponse = await sut.handle(mockRequest())
expect(httpResponse).toEqual(serverError(new Error()))
})
test('should return 503 if RemoveParticipant return false', async () => {
const { sut, removeParticipantStub } = makeSut()
jest.spyOn(removeParticipantStub, 'remove').mockReturnValueOnce(Promise.resolve(false))
const httpResponse = await sut.handle(mockRequest())
expect(httpResponse).toEqual(serviceUnavailable(new UnexpectedError('remove participant')))
})
test('should return 204 on success', async () => {
const { sut } = makeSut()
const httpResponse = await sut.handle(mockRequest())
expect(httpResponse).toEqual(noContent())
})
})
|
/*
*
*/
package net.community.chest.util.logging;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.channels.Channel;
import java.util.Map;
import java.util.TreeMap;
import net.community.chest.io.EOLStyle;
import net.community.chest.io.output.NullOutputStream;
import net.community.chest.reflect.MethodUtil;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* <P>Used to "intercept" printed data and log it</P>
* @author <NAME>.
* @since Sep 1, 2008 1:29:18 PM
*/
public abstract class AbstractLoggingPrintStream extends PrintStream implements Channel {
private final Map<String,StackTraceElement> _hierarchyMap;
protected AbstractLoggingPrintStream ()
{
super(new NullOutputStream());
// build a map of the hierarchy so we can ignore it when resolving the call sequence
final StackTraceElement[] sa=new Exception().getStackTrace();
if ((null == sa) || (sa.length <= 1))
throw new IllegalStateException("No stack trace hierarchy");
final int cIndex=MethodUtil.getFirstConstructorElement(sa);
if (cIndex < 0)
throw new IllegalStateException("Cannot determine class hierarchy");
_hierarchyMap = new TreeMap<String,StackTraceElement>();
for (int sIndex=cIndex; sIndex >= 0; sIndex--)
{
final StackTraceElement elem=sa[sIndex];
final String clsName=(null == elem) ? null : elem.getClassName();
if ((null == clsName) || (clsName.length() <= 0))
continue;
_hierarchyMap.put(clsName, elem);
}
}
/*
* @see java.io.PrintStream#append(char)
*/
@Override
public PrintStream append (char c)
{
print(c);
return this;
}
/*
* @see java.io.PrintStream#append(java.lang.CharSequence)
*/
@Override
public PrintStream append (CharSequence csq)
{
print(csq.toString());
return this;
}
/*
* @see java.io.PrintStream#append(java.lang.CharSequence, int, int)
*/
@Override
public PrintStream append (CharSequence csq, int start, int end)
{
return append(csq.subSequence(start, end));
}
private boolean _open /* =false */;
/*
* @see java.nio.channels.Channel#isOpen()
*/
@Override
public boolean isOpen ()
{
return _open;
}
public void setOpen (boolean o)
{
_open = o;
}
/*
* @see java.io.PrintStream#close()
*/
@Override
public void close ()
{
if (isOpen())
setOpen(false);
super.close();
}
/**
* TRUE=Attach the {@link StackTraceElement} of the caller
*/
private boolean _resolveCaller=true;
public boolean isResolveCaller ()
{
return _resolveCaller;
}
public void setResolveCaller (boolean resolveCaller)
{
_resolveCaller = resolveCaller;
}
protected StackTraceElement resolveCallerElement ()
{
final StackTraceElement[] sa=new Exception().getStackTrace();
if ((null == sa) || (sa.length <= 1))
return null; // debug breakpoint
for (final StackTraceElement elem : sa)
{
final String eClass=(null == elem) ? null : elem.getClassName();
if ((null == eClass) || (eClass.length() <= 0))
continue;
// stop at first method that is NOT part of the hierarchy
if (_hierarchyMap.get(eClass) != null)
continue;
return elem;
}
return null;
}
/**
* The default {@link LogLevelWrapper} used for logging the data
*/
private LogLevelWrapper _defLevel=LogLevelWrapper.DEBUG;
public LogLevelWrapper getDefaultLogLevel ()
{
return _defLevel;
}
public void setDefaultLogLevel (LogLevelWrapper l)
{
_defLevel = l;
}
/**
* Called by default implementation of {@link #print(String)} to execute
* the actual logging
* @param l The {@link LogLevelWrapper} value
* @param ce The location (if {@link #isResolveCaller()}=TRUE) and
* successfully resolved the caller
* @param s The written data {@link String} "line"
* @throws IOException if failed to write the data
*/
public abstract void log (LogLevelWrapper l, StackTraceElement ce, String s) throws IOException;
private StringBuilder _workBuf /* =null */;
protected StringBuilder getWorkBuffer (final int len)
{
if (null == _workBuf)
_workBuf = new StringBuilder(Math.max(len, Byte.MAX_VALUE));
return _workBuf;
}
private static StringBuilder appendCleanData (final StringBuilder sb, final String s)
{
final int sLen=(null == s) ? 0 : s.length();
for (int curPos=0; curPos < sLen; )
{
final int crPos=s.indexOf('\r', curPos);
if (crPos < curPos)
{
final String remString=(curPos > 0) ? s.substring(curPos) : s;
if ((remString != null) && (remString.length() > 0))
sb.append(remString);
break;
}
final String clrText=(curPos == crPos) ? null : s.substring(curPos, crPos);
if ((clrText != null) && (clrText.length() > 0))
sb.append(clrText);
curPos = crPos + 1;
}
return sb;
}
private static String getCleanData (final StringBuilder sb, final String s)
{
if ((null == s) || (s.length() <= 0) || (s.indexOf('\r') < 0))
return s;
final int prevLen=sb.length();
final StringBuilder res=appendCleanData(sb, s);
final String ret=res.toString();
res.setLength(prevLen);
return ret;
}
/*
* @see java.io.PrintStream#print(java.lang.String)
*/
@Override
public void print (final String s)
{
if (!isOpen())
{
setError();
return;
}
StackTraceElement ce=null;
final int sLen=(null == s) ? 0 : s.length();
final StringBuilder sb=getWorkBuffer(sLen);
// break it up into LF separated lines
for (int curPos=0; curPos < sLen; )
{
final int lfPos=s.indexOf('\n', curPos);
// if no more LF(s) then accumulate whatever is left for next time
if (lfPos < curPos)
{
final String remString=(curPos > 0) ? s.substring(curPos) : s;
if ((remString != null) && (remString.length() > 0))
appendCleanData(sb, remString);
break;
}
final String clrText=(curPos == lfPos) ? null : s.substring(curPos, lfPos), msgText;
// check if have data from previous call
if (sb.length() > 0)
{
if ((clrText != null) && (clrText.length() > 0))
appendCleanData(sb, clrText);
msgText = sb.toString();
sb.setLength(0); // re-start accumulation
}
else
msgText = getCleanData(sb, clrText);
if ((msgText != null) && (msgText.length() > 0))
{
if ((null == ce) && isResolveCaller())
ce = resolveCallerElement();
try
{
log(getDefaultLogLevel(), ce, msgText);
}
catch(IOException e)
{
setError();
}
}
curPos = lfPos + 1;
}
}
/*
* @see java.io.PrintStream#print(boolean)
*/
@Override
public void print (boolean b)
{
print(String.valueOf(b));
}
/*
* @see java.io.PrintStream#print(char)
*/
@Override
public void print (char c)
{
print(String.valueOf(c));
}
/*
* @see java.io.PrintStream#print(char[])
*/
@Override
public void print (char[] s)
{
print(new String(s));
}
/*
* @see java.io.PrintStream#print(double)
*/
@Override
public void print (double d)
{
print(String.valueOf(d));
}
/*
* @see java.io.PrintStream#print(float)
*/
@Override
public void print (float f)
{
print(String.valueOf(f));
}
/*
* @see java.io.PrintStream#print(int)
*/
@Override
public void print (int i)
{
print(String.valueOf(i));
}
/*
* @see java.io.PrintStream#print(long)
*/
@Override
public void print (long l)
{
print(String.valueOf(l));
}
/*
* @see java.io.PrintStream#print(java.lang.Object)
*/
@Override
public void print (Object obj)
{
print(String.valueOf(obj));
}
/*
* @see java.io.PrintStream#println()
*/
@Override
public void println ()
{
print(EOLStyle.LOCAL.getStyleString());
}
/*
* @see java.io.PrintStream#println(boolean)
*/
@Override
public void println (boolean x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(char)
*/
@Override
public void println (char x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(char[])
*/
@Override
public void println (char[] x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(double)
*/
@Override
public void println (double x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(float)
*/
@Override
public void println (float x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(int)
*/
@Override
public void println (int x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(long)
*/
@Override
public void println (long x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(java.lang.Object)
*/
@Override
public void println (Object x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#println(java.lang.String)
*/
@Override
public void println (String x)
{
print(x);
println();
}
/*
* @see java.io.PrintStream#write(byte[], int, int)
*/
@Override
public void write (byte[] buf, int off, int len)
{
if (len > 0)
{
final char[] ca=new char[len];
for (int cIndex=0, cOffset=off; cIndex < len; cOffset++, cIndex++)
ca[cIndex] = (char) (buf[cOffset] & 0x00FF);
print(ca);
}
}
/*
* @see java.io.PrintStream#write(int)
*/
@Override
public void write (int b)
{
print((char) (b & 0x00FF));
}
}
|
var getRecentPosts = function(amount, callback) {
var rss = $("link[type='application/rss+xml']").attr("href");
$.get(rss, function(data) {
var recent = [];
var parsed = $.parseXML(data);
var posts = $(data).find("item");
if (amount) posts = posts.slice(0, amount); // Only display the first number of posts (defined by amount)
for (var i = 0; posts && i < posts.length; i++) {
var post = posts.eq(i);
recent.push({
title: post.find("title").text(),
content: post.find("description").text(),
url: post.find("link").text(),
date: post.find("pubDate").text()
});
}
callback(recent); // Done collecting posts, process to callback
});
};
var crop = function(str, words) {
var cache = str.split(/\s+/, words);
return cache.join(" ");
}
// Gets called on document ready
$(function() {
var num_posts = 200;
var num_words = 40;
getRecentPosts(num_posts, function(posts) { // Display [x-null] posts!
var template = "";
for (var i = 0; i < posts.length; i++) {
var post = posts[i];
var excerpt = crop($("<div/>").html(post.content).text(), num_words); // strip html and crop string!
template += "<article class='post-excerpt'><span><time>"
+ post.date.substr(5, 11)
+ "</time><h2>"
+ post.title
+ "</h2><hr /><p>"
+ excerpt
+ "</p></span><a class='pure-button button-s green-green' href='"
+ post.url
+ "'>Read More</a></article>";
}
$("#posts_list").html(template)
});
});
|
<gh_stars>0
package net.querz.event.test;
import static junit.framework.TestCase.*;
import net.querz.event.Event;
import java.util.*;
public class EventCallCollector {
private static LinkedHashMap<UUID, LinkedHashMap<String, Event>> map = new LinkedHashMap<>();
public static void assertEventExists(UUID id, String name, Event event) {
assertTrue(map.containsKey(id));
assertTrue(map.get(id).containsKey(name));
assertTrue(map.get(id).get(name) == event);
}
public static void assertEventDoesNotExist(UUID id, String name) {
if (map.containsKey(id)) {
assertFalse(map.get(id).containsKey(name));
}
}
public static void add(UUID id, String name, Event event) {
if (map.containsKey(id)) {
assertFalse(map.get(id).containsKey(name));
map.get(id).put(name, event);
} else {
LinkedHashMap<String, Event> s = new LinkedHashMap<>();
s.put(name, event);
map.put(id, s);
}
}
public static void assertCallOrder(UUID id, String first, String second, Event event) {
assertEventExists(id, first, event);
assertEventExists(id, second, event);
assertTrue(indexOf(map.get(id), first) < indexOf(map.get(id), second));
}
private static int indexOf(LinkedHashMap<String, Event> m, String key) {
int i = 0;
for (Map.Entry<String, Event> entry : m.entrySet()) {
if (entry.getKey().equals(key)) {
return i;
}
i++;
}
fail("Key does not exist in map.");
return -1;
}
public static void clear() {
map.clear();
}
}
|
<reponame>jameseden1/lorawan-stack
// Copyright © 2019 The Things Network Foundation, The Things Industries B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package ttnpb_test
import (
"testing"
"github.com/smartystreets/assertions"
. "go.thethings.network/lorawan-stack/v3/pkg/ttnpb"
"go.thethings.network/lorawan-stack/v3/pkg/util/test/assertions/should"
)
func TestMACVersion_Version(t *testing.T) {
a := assertions.New(t)
for v := range MACVersion_name {
if v == int32(MACVersion_MAC_UNKNOWN) {
continue
}
a.So(func() { MACVersion(v).Version() }, should.NotPanic)
}
}
func TestMACVersionCompare(t *testing.T) {
for _, tc := range []struct {
A, B MACVersion
Expected int
Panics bool
}{
{
A: MACVersion_MAC_V1_0,
B: MACVersion_MAC_V1_0_1,
Expected: -1,
},
{
A: MACVersion_MAC_V1_1,
B: MACVersion_MAC_V1_0,
Expected: 1,
},
{
A: MACVersion_MAC_V1_1,
B: MACVersion_MAC_V1_1,
Expected: 0,
},
{
A: MACVersion_MAC_V1_0_2,
B: MACVersion_MAC_V1_1,
Expected: -1,
},
{
A: MACVersion_MAC_UNKNOWN,
B: MACVersion_MAC_V1_1,
Panics: true,
},
{
A: MACVersion_MAC_UNKNOWN,
B: MACVersion_MAC_UNKNOWN,
Panics: true,
},
{
A: MACVersion_MAC_V1_0,
B: MACVersion_MAC_UNKNOWN,
Panics: true,
},
} {
a := assertions.New(t)
if tc.Panics {
a.So(func() { tc.A.Compare(tc.B) }, should.Panic)
return
}
a.So(tc.A.Compare(tc.B), should.Equal, tc.Expected)
if tc.A != tc.B {
a.So(tc.B.Compare(tc.A), should.Equal, -tc.Expected)
}
}
}
func TestDataRateIndex(t *testing.T) {
a := assertions.New(t)
a.So(DataRateIndex_DATA_RATE_4.String(), should.Equal, "DATA_RATE_4")
b, err := DataRateIndex_DATA_RATE_4.MarshalText()
a.So(err, should.BeNil)
a.So(string(b), should.Resemble, "4")
for _, str := range []string{"4", "DATA_RATE_4"} {
var idx DataRateIndex
err = idx.UnmarshalText([]byte(str))
a.So(idx, should.Equal, DataRateIndex_DATA_RATE_4)
}
}
func TestDeviceEIRP(t *testing.T) {
a := assertions.New(t)
a.So(DeviceEIRP_DEVICE_EIRP_10.String(), should.Equal, "DEVICE_EIRP_10")
b, err := DeviceEIRP_DEVICE_EIRP_10.MarshalText()
a.So(err, should.BeNil)
a.So(b, should.Resemble, []byte("DEVICE_EIRP_10"))
var v DeviceEIRP
err = v.UnmarshalText([]byte("DEVICE_EIRP_10"))
a.So(v, should.Equal, DeviceEIRP_DEVICE_EIRP_10)
a.So(err, should.BeNil)
}
|
import { PromiseOrValue } from '../promise/promise';
import { MapFunction } from '../value/map';
import { Maybe } from '../value/maybe.type';
/**
* Function that returns a value.
*/
export type Getter<T> = () => T;
/**
* Getter with the design of returning a new value each time.
*/
export type Factory<T> = Getter<T>;
/**
* Function that returns a value with an optional single argument.
*/
export type FactoryWithInput<O, I> = (args?: I) => O;
/**
* Function that returns a value with a single argument.
*/
export type FactoryWithRequiredInput<T, A> = MapFunction<A, T>;
/**
* Either a Getter, or an instance of the item.
*/
export type GetterOrValue<T> = T | Getter<T>;
/**
* A GetterOrValue returned from a Promise.
*/
export type AsyncGetterOrValue<T> = GetterOrValue<PromiseOrValue<T>>;
/**
* Either a GetterWithInput, or a Getter.
*/
export type GetterOrFactoryWithInput<T, A> = Getter<T> | FactoryWithInput<T, A>;
/**
* Either a GetterOrValue, or a FactoryWithInput.
*/
export type GetterOrValueWithInput<T extends string | number | object | symbol, A> = GetterOrValue<T> | FactoryWithInput<T, A>;
export type StringOrGetter = GetterOrValue<string>;
/**
* Returns true if the input object looks like a Getter (is a function).
*
* @param value
* @returns
*/
export function isGetter<T = unknown>(value: unknown): value is Getter<T> {
return typeof value === 'function';
}
/**
* If the input is a function, it is executed. Otherwise, the value is returned.
*
* @param input
* @returns
*/
export function getValueFromGetter<T>(input: GetterOrValue<T>): T;
export function getValueFromGetter<T>(this: unknown, input: GetterOrValue<T>): T;
export function getValueFromGetter<T>(this: unknown, input: GetterOrValue<T>, inputArgs?: unknown): T;
export function getValueFromGetter<T, A>(this: unknown, input: GetterOrFactoryWithInput<T, A>, args?: A): T;
export function getValueFromGetter<T extends string | number | object | symbol, A>(this: unknown, input: GetterOrValueWithInput<T, A>, args?: A): T;
export function getValueFromGetter<T, A>(this: unknown, input: unknown, args?: A): T {
if (typeof input === 'function') {
return input(args);
} else {
return input as T;
}
}
/**
* Returns the input as a getter.
*
* @param input
* @returns
*/
export function asGetter<T>(input: GetterOrValue<T>): Getter<T> {
if (typeof input === 'function') {
return input as Getter<T>;
} else {
return makeGetter(input);
}
}
/**
* Wraps the input and returns a Getter for that value.
*
* @param input
* @returns
*/
export function makeGetter<T>(input: T): Getter<T> {
return () => input;
}
/**
* A factory that can take in an index input optionally.
*/
export type FactoryWithIndex<T> = FactoryWithInput<T, number> | FactoryWithRequiredInput<T, number>;
export function makeWithFactory<T>(factory: Factory<T> | FactoryWithIndex<T>, count: number): T[] {
const results: T[] = [];
for (let i = 0; i < count; i += 1) {
results.push(factory(i));
}
return results;
}
export function makeWithFactoryInput<T, A>(factory: FactoryWithInput<T, A>, input: Maybe<A>[]): T[];
export function makeWithFactoryInput<T, A>(factory: FactoryWithRequiredInput<T, A>, input: A[]): T[];
export function makeWithFactoryInput<T, A>(factory: FactoryWithRequiredInput<T, A>, input: A[]): T[] {
return input.map((x) => factory(x));
}
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cd ../com
python train_eval_h5.py \
--model ../../float/mobilenet_1_0_224_tf.h5 \
--eval_only=true \
--eval_images=true \
--eval_image_path ../../data/Imagenet/val_dataset \
--eval_image_list ../../data/Imagenet/val.txt \
--label_offset 1 \
--gpus 2
|
#!/bin/bash
set -x
# Machine-specific path, naturally
local_script_path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
protocol_repo_path="$local_script_path/../devtools-protocol"
browser_protocol_path="$protocol_repo_path/json/browser_protocol.json"
js_protocol_path="$protocol_repo_path/json/js_protocol.json"
# => into viewer
cd $local_script_path
local_tot_protocol_path="pages/_data/tot.json"
local_v8_protocol_path="pages/_data/v8.json"
if ! [ -s $browser_protocol_path ]; then
echo "error: couldn't find local protocol file" >&2; exit 1
fi
# copy the protocol.json over
cp $js_protocol_path $local_v8_protocol_path
# merge and create all our data files
node merge-protocol-files.js $browser_protocol_path $js_protocol_path > $local_tot_protocol_path
node make-stable-protocol.js
node create-search-index.js
# get the latest change
# => into chromium
cd $(dirname "$browser_protocol_path")
br_commit_line=$(git log --date=iso --no-color --max-count=1 -- browser_protocol.json | grep -E -o "^commit.*")
br_date_line=$(git log --date=iso --no-color --max-count=1 -- browser_protocol.json | grep -E -o "^Date.*")
cd $(dirname "$js_protocol_path")
js_commit_line=$(git log --date=iso --no-color --max-count=1 -- js_protocol.json | grep -E -o "^commit.*")
js_date_line=$(git log --date=iso --no-color --max-count=1 -- js_protocol.json | grep -E -o "^Date.*")
# copy it into the HTML file
# => into viewer
cd $local_script_path
# we no longer printing the most recent protocol git hashes.
# we can restore this when the devtools-protocol repo starts includes that data
cat pages/tot.md | sed -Ee "s/^(<code browser>)Date.*/\1$br_date_line/" > pages/tot.md.new
cat pages/tot.md.new | sed -Ee "s/^(<code js>)Date.*/\1$js_date_line/" > pages/tot.md
rm -f pages/tot.md.new
|
#!/bin/bash
#
# Test harness for generating files under tests/rust_protobuf/v[23]
# and expecting them either to succeed or fail for some known reason.
#
# Checked in the end for non-empty value which serves as a boolean flag
have_failures=""
# Expected codegen failures are marked in the associative array `must_fail`
# with the relative path as the key and reason as value.
# When adding new, remember not to add any whitespace around `=`.
declare -A must_fail
must_fail["v2/test_group_pb.proto"]="expected failure (empty read)"
must_fail["v2/test_root_pb.proto"]="root search is not implemented yet"
must_fail["v3/test_enum_alias_pb.proto"]="enum alias not implemented"
must_fail["v2/test_enum_alias_pb.proto"]="enum alias not implemented"
must_fail["v2/test_expose_oneof_pb.proto"]="missing file"
must_fail["v2/test_enum_invalid_default.proto"]="enum variant does not exist"
must_fail["v3/test_enum_invalid_default.proto"]="enum variant does not exist"
# Combined stdout and stderr for codegen of unexpectedly failed file.
declare -A outs
expecting_failure() {
[ "${must_fail["$1"]+_}" ]
}
expecting_success() {
! expecting_failure "$1"
}
success_msg() {
if expecting_failure "$1"; then
echo "${must_fail["$1"]}"
else
echo "ok"
fi
}
for f in v[23]/*.proto; do
ret=0
rm -f "${f%.proto}.rs"
out="$(cargo run -p pb-rs --quiet -- "$f" 2>&1)" || ret=$?
if expecting_failure "$f" && [ "$ret" -eq 0 ]; then
outs["$f"]="$out"
have_failures="true"
echo "$f: unexpected success"
elif expecting_success "$f" && [ "$ret" -ne 0 ]; then
have_failures="true"
outs["$f"]="$out"
echo "$f: unexpected failure $ret"
else
echo "$f: $(success_msg "$f")"
fi
done
for f in common/*.proto; do
ret=0
rm -f "${f%.proto}.rs"
out="$(cargo run -p pb-rs --quiet -- "$f" 2>&1)" || ret=$?
if expecting_failure "$f" && [ "$ret" -eq 0 ]; then
outs["$f"]="$out"
have_failures="true"
echo "$f: unexpected success"
elif expecting_success "$f" && [ "$ret" -ne 0 ]; then
have_failures="true"
outs["$f"]="$out"
echo "$f: unexpected failure $ret"
else
echo "$f: $(success_msg "$f")"
fi
done
echo
if [ "$have_failures" ]; then
echo "There were code generation failures:"
for f in "${!outs[@]}"; do
echo
echo "$f:"
echo "${outs["$f"]}"
done
exit 1
else
echo "All files generated as expected"
fi
|
#Import necessary packages
import nltk
import nltk.classify.util
from nltk.classify import NaiveBayesClassifier
#Define function to split the text into individual words
def word_feats(words):
return dict([(word, True) for word in words])
#Sample text and split it into words
text = "Ciao mondo!"
words = text.lower().split()
# Construct the classifier
clf = NaiveBayesClassifier.train(word_feats(words).items())
# Determine the language of the text
lang_detected = clf.classify(word_feats(text))
# Print out the result
print("The language of the text is: " + lang_detected)
# Output : The language of the text is: italian |
#!/usr/bin/env bash
set -euo pipefail
../tools-public/generate-dynamic-macros-android.sh
./gradlew --stacktrace :app:preBuild
./gradlew --stacktrace :app:assembleProdMinSdkProdKernelRelease
zipalign -f -v -p 4 app/build/outputs/apk/prodMinSdkProdKernel/release/app-prodMinSdk-prodKernel-release-unsigned.apk app-prod-release-unsigned-aligned.apk
apksigner sign \
--ks <(echo $ANDROID_KEYSTORE_B64 | base64 -d) \
--ks-key-alias $ANDROID_KEY_ALIAS --ks-pass env:ANDROID_KEYSTORE_PASSWORD \
--key-pass env:ANDROID_KEY_PASSWORD \
--out exponent-release.apk app-prod-release-unsigned-aligned.apk
|
#!/bin/bash
dieharder -d 11 -g 7 -S 2265652022
|
//import {Store, get, set, del, clear, keys, drop} from "../extern/idb-keyval.js"
import {JAXDiskStore, get, set, del, clear, keys, drop} from "./JAXDiskDB.js"
var JAXDisk,__Proto;
//***************************************************************************
//JAX's virtual disk system for web
//***************************************************************************
JAXDisk=function(diskName,majorDisk=null,code="")
{
this.name=diskName;
//this.dbStore=new Store('JAXDisk_'+diskName, code?(code+"@"+diskName):diskName);
this.dbStore=new JAXDiskStore(diskName);
this.writeObj=null;
this.writeVsn=0;
if(majorDisk){
majorDisk.subDisk[code]=this;
}else {
JAXDisk.diskHash[diskName] = this;
this.subDisk={};
}
this.refCount=1;
this.infoStore=new JAXDiskStore(diskName, "info");
this.baseStore=new JAXDiskStore(diskName, "base");
};
//---------------------------------------------------------------------------
//All available disk:
//JAXDisk.sysStore=new Store('JAXDisk_', "System");
JAXDisk.sysStore=JAXDiskStore.systemStore();
JAXDisk.disks=null;
JAXDisk.diskHash={};
//***************************************************************************
//JAXDisk static-system functions:
//***************************************************************************
{
//---------------------------------------------------------------------------
//Init jax-disk system.
JAXDisk.init=async function(refresh=false) {
if(JAXDisk.disks && !refresh){
return;
}
return get("disks", JAXDisk.sysStore).then(list => {
if (Array.isArray(list)) {
JAXDisk.disks = new Set(list);
} else {
JAXDisk.disks = new Set();
set("disks", [], JAXDisk.sysStore).then(() => {
});
}
});
};
//---------------------------------------------------------------------------
//Open an disk, may create new disk if param create is true:
JAXDisk.openDisk = async function (diskName, create) {
let disk;
if(!this.disks) {
await this.init();
}
disk = this.diskHash[diskName];
if (disk) {
//Check if the disk is still there:
try {
let list = await get("disks", JAXDisk.sysStore);
if (list.indexOf(diskName) >= 0) {
disk.refCount++;
return disk;
} else {
//The disk is removed.
this.disks.delete(diskName);
delete this.diskHash[diskName];
return null;
}
}catch(err) {
return null;
}
}
if (!this.disks.has(diskName)) {
let list=await get("disks",JAXDisk.sysStore);
if(list && list.indexOf(diskName)>=0){
this.disks.add(diskName);
return new JAXDisk(diskName);
}
if (create) {
return await JAXDisk.newDisk(diskName);
}
return null;
} else {
let list=await get("disks",JAXDisk.sysStore);
if(list && list.indexOf(diskName)>=0) {
return new JAXDisk(diskName);
}
//The disk is removed.
this.disks.delete(diskName);
delete this.diskHash[diskName];
return null;
}
return null;
};
//---------------------------------------------------------------------------
//Check if a disk is exist:
JAXDisk.diskExist = function (diskName,doubleCheck=0) {
var self=this;
return new Promise((resolve, reject) => {
if (self.disks.has(diskName)) {
resolve(1);
} else {
if(doubleCheck){
//再次检查数据库:
return get("disks",JAXDisk.sysStore).then(list=>{
if(list.indexOf(diskName)>=0){
JAXDisk.disks.add(diskName);
resolve(1);
}else{
resolve(0);
}
});
}else {
resolve(0);
}
}
});
};
//---------------------------------------------------------------------------
//Create a new disk:
JAXDisk.newDisk = function (diskName) {
let self=this,diskObj;
if(diskName.indexOf("/")>=0 || diskName.indexOf("*")>=0){
throw new Error("New disk: illegal name.");
}
return new Promise((resolve, reject) => {
if (self.disks.has(diskName)) {
self.openDisk(diskName,0).then((disk)=>{
resolve(disk);
});
return;
}
self.disks.add(diskName);
set('disks', Array.from(self.disks), self.sysStore).then(() => {
//let store=new Store('JAXDisk_'+diskName, diskName,1);
let store=new JAXDiskStore(diskName);
set(".",{},store).then(()=>{
diskObj=new JAXDisk(diskName);
resolve(diskObj);
});
})
});
};
//---------------------------------------------------------------------------
//Get a disk's info VO:
JAXDisk.getDiskInfo=async function(diskName){
let disk,infoStore,info;
if(!this.disks) {
await this.init();
}
disk=await this.openDisk(diskName,false);
if(!disk){
return null;
}
infoStore=disk.infoStore;
if(!infoStore){
//disk.infoStore=infoStore=new Store('JAXDiskInfo_'+diskName, diskName,1);
disk.infoStore=infoStore=new JAXDiskStore(diskName, "info");
}
return await get("info",infoStore);
};
//---------------------------------------------------------------------------
//Set a disk's info VO:
JAXDisk.setDiskInfo=async function(diskName,info){
let disk,infoStore,pms;
if(!this.disks) {
await this.init();
}
disk=await this.openDisk(diskName);
if(!disk){
return null;
}
infoStore=disk.infoStore;
if(!infoStore){
//disk.infoStore=infoStore=new Store('JAXDiskInfo_'+diskName, diskName,1);
disk.infoStore=infoStore=new JAXDiskStore(diskName, "info");
await set("info",info,infoStore);
return;
}
if(disk.writeObj){
await disk.writeObj;
}
pms=set("info",info,infoStore);
disk.writeObj=pms;
pms.writeVsn=disk.writeVsn++;
return pms;
};
//---------------------------------------------------------------------------
//Get a disk's info VO:
JAXDisk.getDiskAttr=async function(diskName,attr){
let disk,infoStore,info;
if(!this.disks) {
await this.init();
}
disk=await this.openDisk(diskName,false);
if(!disk){
return null;
}
infoStore=disk.infoStore;
if(!infoStore){
disk.infoStore=infoStore=new JAXDiskStore(diskName, "info");
}
return await get("attr_"+attr,infoStore);
};
//---------------------------------------------------------------------------
//Set a disk's attr:
JAXDisk.setDiskAttr=async function(diskName,key,val){
let disk,infoStore,pms;
if(!this.disks) {
await this.init();
}
disk=await this.openDisk(diskName);
if(!disk){
return null;
}
infoStore=disk.infoStore;
if(!infoStore){
disk.infoStore=infoStore=new JAXDiskStore(diskName, "info");
await set("attr_"+key,val,infoStore);
return;
}
if(disk.writeObj){
await disk.writeObj;
}
pms=set("attr_"+key,val,infoStore);
disk.writeObj=pms;
pms.writeVsn=disk.writeVsn++;
return pms;
};
//---------------------------------------------------------------------------
//Remove a disk, clear the DB(but not drop the db)
JAXDisk.dropDisk = function (diskName) {
let self;
self=this;
return new Promise((resolve, reject) => {
self.openDisk(diskName).then(async diskObj=>{
if(diskObj){
if(diskObj.dbStore) {
await clear(diskObj.dbStore);
if(diskObj.infoStore) {
await clear(diskObj.infoStore);
}
if(diskObj.baseStore) {
await clear(diskObj.baseStore);
}
}
//await drop("Disk_"+diskName);
}
self.disks.delete(diskName);
delete self.diskHash[diskName];
set('disks', Array.from(self.disks), self.sysStore).then(resolve);
});
});
};
//---------------------------------------------------------------------------
//Get current disks list:
JAXDisk.getDisks=function(){
return Array.from(this.disks);
};
//---------------------------------------------------------------------------
//Get current disk names
JAXDisk.getDiskNames=async function(){
let list,i,n,name,disk;
await this.init(true);
list=Array.from((this.disks));
n=list.length;
for(i=0;i<n;i++){
name=list[i];
disk=await JAXDisk.openDisk(name,0);
if(!disk){
list.splice(i,1);
JAXDisk.disks.delete(name);
delete JAXDisk.diskHash[name];
n--;i--;
}
}
return list;
};
}
JAXDisk.prototype=__Proto={};
//***************************************************************************
//JAXDisk member funcitons
//***************************************************************************
{
var divPath=function (dirPath)
{
let pos,dirName,upPath;
//Split dir upper and base:
if(dirPath.endsWith("/")){
dirPath=dirPath.substring(0,dirPath.length-1);
}
pos=dirPath.lastIndexOf("/");
if(pos>=0){
dirName=dirPath.substring(pos+1);
upPath=dirPath.substring(0,pos);
}else{
dirName=dirPath;
upPath="";
}
if(upPath.startsWith("/")){
upPath=upPath.substring(1);
}
return [dirName,upPath];
};
//-----------------------------------------------------------------------
//Create a new dir:
__Proto.newDir=function(dirPath,allowRoot=0,recursive=true)
{
var self=this;
let writeVsn;
if(dirPath==='.'){
if(!allowRoot) {
throw "Error: '.' is not allowed for folder name.";
}
}
if(dirPath.endsWith("/")){
dirPath=dirPath.substring(0,dirPath.length-1);
}
if(dirPath.startsWith("/")){
dirPath=dirPath.substring(1);
}
async function mkDirList(list){
let i,n,stub;
n=list.length;
for(i=0;i<n;i++) {
stub=list[i];
await set(stub.path, stub.obj,self.dbStore);
}
return list[0];
}
async function doNewDir() {
let waitPath;
if(self.writeObj && self.writeObj.writeVsn!==writeVsn){
waitPath=self.writeObj.path;
await self.writeObj;
}
return get(dirPath, self.dbStore).then(async curDirObj => {
let upPath, pos, dirName;
let dirList;
let time = Date.now();
dirList=[];
//Check if path is already there and if it's dir?
if (curDirObj instanceof Uint8Array) {
throw "Can't create dir on file!";
} else if (typeof (curDirObj) === 'object') {
return curDirObj;
}
//Path is empty, create dir:
dirList.push({path:dirPath,obj:{}});
[dirName, upPath] = divPath(dirPath);
if(!upPath){
upPath=".";
}
while(upPath){
let dirObj;
dirObj=await get(upPath, self.dbStore);
if(!dirObj){
if(!recursive){
return null;
}
dirObj={};
dirObj[dirName]={
name: dirName, dir: 1, createTime: time, modifyTime: time,
};
dirList.push({path:upPath,obj:dirObj});
}else{
dirObj[dirName]={
name: dirName, dir: 1, createTime: time, modifyTime: time,
};
dirList.push({path:upPath,obj:dirObj});
break;
}
if(upPath==="."){
throw "newDir: Bad disk structure!";
}
[dirName, upPath] = divPath(upPath);
if(!upPath){
upPath=".";
}
}
return await mkDirList(dirList);
})
}
//Sync write operation:
writeVsn=this.writeVsn++;
self.writeObj=doNewDir();
self.writeObj.writeVsn=writeVsn;
self.writeObj.path=dirPath;
//console.log("Set wait obj: "+dirPath);
return self.writeObj;
};
//-----------------------------------------------------------------------
//Delete an entry-item, if path is a dir, also delete the whole dir tree under it.
__Proto.del=function(path){
var self=this;
let writeVsn;
//console.log("Disk.del: "+path);
if(path.endsWith("/")){
path=path.substring(0,path.length-1);
}
if(path.startsWith("/")){
path=path.substring(1);
}
if(path.startsWith("./")){
path=path.substring(2);
}
//-------------------------------------------------------------------
//Delete file/dir item array(list)
async function doDelList(list){
let i,n,item,pList;
n=list.length;
pList=[];
for(i=0;i<n;i++){
item=list[i];
pList.push(del(item,self.dbStore));//Delete one item
}
return Promise.allSettled(pList);
}
//-------------------------------------------------------------------
//List an dir's all sub-tree items including sub in sub
async function doMakeList(tgtPath,tgtList){
let list,i,n,stub;
tgtList.push(tgtPath);
list=await self.getEntries(tgtPath);
n=list.length;
for(i=0;i<n;i++){
stub=list[i];
if(stub.dir){
await doMakeList((tgtPath?(tgtPath+"/"+stub.name):stub.name),tgtList);
}else{
tgtList.push((tgtPath?(tgtPath+"/"+stub.name):stub.name));
}
}
}
//-------------------------------------------------------------------
//Erase item's entry in upper dir record:
async function doDelEntry(tgtPath){
let tgtName,upPath;
[tgtName,upPath]=divPath(tgtPath);
if(!upPath){
upPath=".";
}
return get(upPath,self.dbStore).then((upDirObj)=>{
if(upDirObj){
delete upDirObj[tgtName];
return set(upPath,upDirObj,self.dbStore);
}
});
}
//-------------------------------------------------------------------
//Check delete item type, exec the del operation:
async function checkAndDel()
{
let waitPath;
if(self.writeObj && self.writeObj.writeVsn!==writeVsn){
waitPath=self.writeObj.path;
console.log("Waiting: "+path+" on "+waitPath);
await self.writeObj;
console.log("Wait done: "+path+" on "+waitPath);
}
return get(path,self.dbStore).then(async (delObj)=> {
let delList;
//Do the delete:
delList=[];
if(delObj instanceof Uint8Array) {
//File, nothing more.
delList.push(path);
return doDelList(delList).then(()=>{
return doDelEntry(path);
});
}else if(delObj){
//Dir, generate the sub-item list to delete
await doMakeList(path,delList);
return doDelList(delList).then(()=>{
return doDelEntry(path);
});
}else{
return doDelEntry(path);
}
});
}
writeVsn=this.writeVsn++;
self.writeObj=checkAndDel();
self.writeObj.writeVsn=writeVsn;
self.writeObj.path=path;
//console.log("Set wait obj: "+path);
return self.writeObj;
};
//-----------------------------------------------------------------------
//Save a file, fileObj can be string, File-Object, etc.
__Proto.saveFile=function(path,fileObj,recursive=true)
{
var self,tgtName,upPath,byteAry,time,writeVsn,byteHex;
self=this;
//console.log("JAXDisk.saveFile: Disk.saveFile: "+path);
if(path.endsWith("/")){
throw "JAXDisk.saveFile: Error: filename can't end with '/'!";
}
if(path.startsWith("/")){
path=path.substring(1);
}
[tgtName,upPath]=divPath(path);
time=Date.now();
let digestBytes=async function(buf) {
let hex;
const hashBuffer = await crypto.subtle.digest('SHA-256', buf); // hash the message
const hashArray = Array.from(new Uint8Array(hashBuffer)); // convert buffer to byte array
hex= hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); // convert bytes to hex string
return hex;
};
//Save byte content to DB, update entry info, make base backup if needed:
async function saveByteAry(){
let dirVO,stub,oldData,oldHash;
//console.log("saveByteAry: "+path+", writeObj: "+(self.writeObj?self.writeObj.filePath:"null"));
//wait for current update file:
if(self.writeObj && self.writeObj.writeVsn!==writeVsn){
await self.writeObj;
}
//get upper dirVO:
dirVO=await get(upPath?upPath:".",self.dbStore);
if(!dirVO){
throw "Path is not available: "+upPath;
}
stub=dirVO[tgtName];
if(stub){
//file exists, update stub, save base if 1st :
oldHash=stub.hash;
stub.modifyTime=time;
if(!stub.modified && (oldHash!==byteHex || stub.size!==byteAry.byteLength)) {
oldData=await get(upPath ? (upPath + "/" + tgtName) : tgtName, self.dbStore);
//save the base file content:
if(oldData) {
set(upPath ? (upPath + "/" + tgtName) : tgtName, oldData, self.baseStore);
}
stub.modified=true;
}
stub.size=byteAry.byteLength;
stub.hash=byteHex;
//update stub:
}else{
//new file, create stub:
dirVO[tgtName]={
name:tgtName,dir:0,createTime:time,modifyTime:time,size:byteAry.byteLength,modified:true,
hash:byteHex
};
}
await set(upPath?(upPath+"/"+tgtName):tgtName,byteAry,self.dbStore);
await set(upPath?upPath:".",dirVO,self.dbStore);
}
async function arrayBuffer(file){
if(file.arrayBuffer){
return file.arrayBuffer();
}
return new Promise((onDone,onError)=>{
let reader=new FileReader();
reader.onload=function(event) {
let arrayBuffer = event.target.result;
onDone(arrayBuffer);
};
reader.readAsArrayBuffer(file);
})
}
function doCopy(){
//Ensure saved object is ByteArray
if (typeof (fileObj) === 'string') {
let encoder = new TextEncoder();
byteAry = encoder.encode(fileObj);
return digestBytes(byteAry).then(hex=>{
writeVsn = self.writeVsn++;
byteHex=hex;
self.writeObj =saveByteAry();
self.writeObj.filePath = path;
self.writeObj.writeVsn = writeVsn;
return self.writeObj;
});
} else if (fileObj instanceof File) {
return arrayBuffer(fileObj).then(async buf => {
byteAry = new Uint8Array(buf);
return digestBytes(byteAry).then(hex=>{
writeVsn = self.writeVsn++;
byteHex=hex;
self.writeObj =saveByteAry();
self.writeObj.filePath = path;
self.writeObj.writeVsn = writeVsn;
return self.writeObj;
});
});
} else if (fileObj instanceof Uint8Array) {
byteAry = fileObj;
return digestBytes(byteAry).then(hex=>{
writeVsn = self.writeVsn++;
byteHex=hex;
self.writeObj =saveByteAry();
self.writeObj.filePath = path;
self.writeObj.writeVsn = writeVsn;
return self.writeObj;
});
}else if(fileObj instanceof ArrayBuffer){
byteAry = new Uint8Array(fileObj);
return digestBytes(byteAry).then(hex=>{
writeVsn = self.writeVsn++;
byteHex=hex;
self.writeObj =saveByteAry();
self.writeObj.filePath = path;
self.writeObj.writeVsn = writeVsn;
return self.writeObj;
});
}
}
if(upPath && recursive){
//Ensure the target dir is there:
return self.newDir(upPath).then(()=>{
return doCopy();
});
}else{
return doCopy();
}
};
//-----------------------------------------------------------------------
//Load file data as ByteArray
__Proto.loadFile=function(path)
{
var self;
self=this;
if(path.startsWith("/")){
path=path.substring(1);
}
return get(path,self.dbStore).then(fileObj=>{
if(fileObj instanceof Uint8Array){
return fileObj;
}
return null;
});
};
//-----------------------------------------------------------------------
//Load file data as text
__Proto.loadText=function(path)
{
var self;
self=this;
if(path.startsWith("/")){
path=path.substring(1);
}
return get(path,self.dbStore).then(fileObj=>{
if(fileObj instanceof Uint8Array){
let enc = new TextDecoder("utf-8");
return enc.decode(fileObj);
}
return null;
}).catch(err=>{
return null;
});
};
//-----------------------------------------------------------------------
//Read file, if encode!==null, read as text:
__Proto.readFile=function(path,encode=null){
if(encode) {
return this.loadText(path);
}else {
return this.loadFile(path);
}
};
//-----------------------------------------------------------------------
//List sub-item-vo under path, return null if path is a file:
__Proto.getEntries=function(path)
{
var self;
self=this;
if(path.startsWith("/")){
path=path.substring(1);
}
if(!path){
path='.';
}
return get(path,self.dbStore).then(fileObj=>{
if(fileObj instanceof Uint8Array || !fileObj){
return null;//这是文件,不是目录, 或者路径不存在
}
return Object.values(fileObj);
});
};
//-----------------------------------------------------------------------
//Check if a path is existed:
__Proto.isExist=function(path)
{
var self=this;
if(path.startsWith("/")){
path=path.substring(1);
}
if(!path){
path='.';
}
return get(path,self.dbStore).then(fileObj=>{
return !!fileObj;
});
};
//-----------------------------------------------------------------------
//Get item entry(info) by path
__Proto.getEntry=async function(path){
let self=this;
let dir,fileName;
[fileName,dir]=divPath(path);
if(dir.startsWith("/")){
dir=dir.substring(1);
}
if(!dir){
dir='.';
}
let dirObj=await get(dir,self.dbStore);
if(dirObj) {
if(fileName===""){
return {name:self.name,dir:1,disk:1};
}
return dirObj[fileName]||null;
}
return null;
};
//-----------------------------------------------------------------------
//Set item entry-info by path
__Proto.setEntryInfo=async function(path,info){
let self=this;
let entry,pms;
entry=await this.getEntry(path);
if(typeof(entry)==="object"){
let dir,fileName,writeVersion;
[fileName,dir]=divPath(path);
if(dir.startsWith("/")){
dir=dir.substring(1);
}
if(!dir){
dir='.';
}
Object.assign(entry,info);
if(self.writeObj){
await self.writeObj;
}
writeVersion=self.writeVsn++;
self.writeObj=pms=get(dir,self.dbStore).then((dirInfo)=>{
dirInfo[fileName]=entry;
return set(dir,dirInfo,self.dbStore);
});
pms.writeVsn=writeVersion;
return pms;
}
};
//-----------------------------------------------------------------------
//copy a file or dir, src can from another disk (orgDisk)
__Proto.copyFile=function(path,newPath,overwrite=1,orgDisk=null)
{
var self=this;
var dirList,fileList;
orgDisk=orgDisk||this;
if(path.startsWith("/")){
path=path.substring(1);
}
if(path.endsWith("/")){
path=path.substring(0,path.length-1);
}
if(!path){
path='.';
}
if(newPath.startsWith("/")){
newPath=newPath.substring(1);
}
if(newPath.endsWith("/")){
newPath=newPath.substring(0,newPath.length-1);
}
if(!newPath){
newPath='.';
}
dirList=[];
fileList=[];
async function checkInItem(itemPath,tgtPath) {
var itemObj,subPath,subTgtPath,curItem;
itemObj=await get(itemPath,orgDisk.dbStore);
if(itemObj instanceof Uint8Array){
curItem=await get(tgtPath,self.dbStore);//Is target there?
if(curItem) {
if(overwrite && curItem instanceof Uint8Array) {//Can't overwrite a dir with file!
fileList.push({org: itemPath, tgt: tgtPath});
}
}else{
fileList.push({org: itemPath, tgt: tgtPath});
}
}else if(typeof(itemObj)==="object"){
var stub,itemName,name;
dirList.push({org:itemPath,tgt:tgtPath});
for(itemName in itemObj){
name=itemName;
stub=itemObj[name];
subPath=itemPath?(itemPath+"/"+stub.name):stub.name;
subTgtPath=tgtPath?(tgtPath+"/"+stub.name):stub.name;
await checkInItem(subPath,subTgtPath);
}
}
}
function copyOneFile(stub){
return orgDisk.loadFile(stub.org).then(fileData=>{
return self.saveFile(stub.tgt,fileData);
});
}
return get(path,orgDisk.dbStore).then(async fileObj=>{
let i,n,pList;
if(!fileObj){
throw "Missing copy source: "+path;
}
await checkInItem(path,newPath);
pList=[];
n=dirList.length;
for(i=0;i<n;i++){
pList.push(self.newDir(dirList[i].tgt));
}
return Promise.allSettled(pList).then(async ()=>{
let pList=[],p,stub;
n=fileList.length;
for(i=0;i<n;i++){
stub=fileList[i];
p=copyOneFile(stub);
pList.push(p);
}
return Promise.allSettled(pList).then(()=>{
return dirList.map((item)=>{
return item.tgt;
}).concat(fileList.map(item=>{
return item.tgt;
}));
});
});
});
};
//-----------------------------------------------------------------------
//Rename a file/dir
__Proto.rename=function(path,newPath)
{
var self=this;
let orgName,orgPath,tgtName,tgtPath;
if(path.startsWith("/")){
path=path.substring(1);
}
if(path.endsWith("/")){
path=path.substring(0,path.length-1);
}
if(!path){
path='.';
}
[orgName, orgPath] = divPath(path);
if(newPath.startsWith("/")){
newPath=newPath.substring(1);
}
if(newPath.endsWith("/")){
newPath=newPath.substring(0,newPath.length-1);
}
if(!newPath){
newPath='.';
}
[tgtName, tgtPath] = divPath(newPath);
if(tgtPath!==orgPath){
throw "Path error."
}
if(orgName===tgtName){//Same name:
return (async function(){return true})();
}
return self.copyFile(path,newPath).then(()=>{
return self.del(path);
});
};
//-----------------------------------------------------------------------
//Get all items path-name in a flat list:
__Proto.getAllItemPath=async function(){
return await keys(this.dbStore);
};
//-----------------------------------------------------------------------
//Load a file's base version:
__Proto.loadFileBase=async function(path,encode=null){
let self,fileObj;
self=this;
if(path.startsWith("/")){
path=path.substring(1);
}
if(!self.baseStore){
return null;
}
fileObj=await get(path,self.baseStore);
if(fileObj instanceof Uint8Array){
if(encode) {
let enc = new TextDecoder("utf-8");
return enc.decode(fileObj);
}else{
return fileObj;
}
}
return null;
};
}
export {JAXDisk};
|
class Api::V4::Categories::RunnersController < Api::V4::ApplicationController
before_action :set_category, only: [:index]
def index
render json: Api::V4::UserBlueprint.render(@category.runners, root: :runners)
end
end
|
#! /bin/sh
rm -rf ./docs/.vuepress/dist
npm run build
git add .
git commit -m 'AUTO_COMMIT'
git push https://github.com/bytrix/mant-doc.git
git checkout -b gh-pages origin/gh-pages
git branch -a
git checkout master docs/.vuepress/dist
mv docs/.vuepress/dist/* ./doc
|
<filename>Playtime/operators.rb
#!/usr/bin/ruby
a = 5
b = 3
puts "#{a} + #{b} = #{a+b}"
puts "#{a} - #{b} = #{a-b}"
puts "#{a} x #{b} = #{a*b}"
puts "#{a} / #{b} = #{a/b}"
puts "#{a} % #{b} = #{a%b}"
puts "#{a}^#{b} = #{a**b}" |
// Copyright 2022 DeepL SE (https://www.deepl.com)
// Use of this source code is governed by an MIT
// license that can be found in the LICENSE file.
const fs = require('fs');
const path = require('path');
const languages = require('./languages');
const util = require('./util');
function deleteFile(filePath) {
try {
fs.unlinkSync(filePath);
} catch (err) {
// ignore
}
}
const documents = new Map();
util.scheduleCleanup(documents, (document, id) => {
if (document.pathIn) {
try {
fs.unlinkSync(document.pathIn);
} catch (err) {
// ignore
}
}
if (document.path_out) {
deleteFile(document.path_out);
}
console.log('Removed document:', id);
});
function generateRandomHexString(length) {
const hex = '0123456789ABCDEF';
let output = '';
for (let i = 0; i < length; i += 1) {
output += hex.charAt(Math.floor(Math.random() * 16));
}
return output;
}
async function createDocument(file, authKey, targetLang, sourceLang, glossary) {
const extname = path.extname(file.name).toLowerCase();
if (!(['.txt', '.docx', '.pptx', '.htm', '.html'].includes(extname))) {
throw new util.HttpError('Invalid file data.', 400);
}
if (extname !== '.txt') {
throw new util.HttpError('Mock server only implements document translation for .txt files.', 503);
}
// Generate id & key for document
const documentId = generateRandomHexString(32);
const documentKey = generateRandomHexString(64);
const pathIn = `./documents/${documentId}`;
await file.mv(pathIn);
// Add document to list
const document = {
id: documentId,
key: documentKey,
pathIn,
path_out: undefined,
name: file.name,
mimetype: file.mimetype,
created: new Date(),
used: new Date(),
authKey,
source_lang: sourceLang,
target_lang: targetLang,
glossary,
// Mock server simplification: billed characters assumed to be file size
billed_characters: file.size,
status: 'queued',
seconds_remaining: undefined,
error: undefined,
};
documents.set(documentId, document);
console.log(`Storing document ${documentId} (key: ${documentKey})`);
return document;
}
function getDocument(documentId, documentKey, authKey, session) {
const document = documents.get(documentId);
if (document?.key === documentKey && document.authKey === authKey) {
const queuedUntil = session?.doc_queue_time || 0;
const translatingUntil = (session?.doc_translate_time || 0) + queuedUntil;
document.used = new Date();
const age = document.used - document.created;
if (document.error) {
document.status = 'error';
} else if (age < queuedUntil) {
document.status = 'queued';
} else if (age < translatingUntil || document.path_out === undefined) {
document.status = 'translating';
document.seconds_remaining = Math.round(Math.max(translatingUntil - age, 0) / 1000);
} else {
document.status = 'done';
document.seconds_remaining = 0;
}
return document;
}
throw new util.HttpError('not found', 404);
}
async function translateDocument(document, session) {
/* eslint-disable no-param-reassign */
// Note: this function may modify the document and session arguments
// session failed document will be checked and decremented if active.
// document will modified with the translation result.
const { pathIn } = document;
const pathOut = `${pathIn}.result`;
if (session?.doc_failure > 0) {
session.doc_failure -= 1;
document.error = 'Translation error triggered';
console.log(`Failing translation of ${pathIn}`);
} else {
const textIn = fs.readFileSync(pathIn, 'utf8');
const textOut = languages.translate(textIn, document.target_lang, document.source_lang,
document.glossary);
fs.writeFileSync(pathOut, textOut.text);
document.path_out = pathOut;
console.log(`Translated ${pathIn} to ${document.target_lang}, stored result at ${pathOut}`);
}
console.log(`Removing input document ${pathIn}`);
deleteFile(pathIn);
/* eslint-enable no-param-reassign */
}
function removeDocument(document) {
console.log(`Removing output document ${document.path_out}`);
deleteFile(document.path_out);
documents.delete(document.id);
}
module.exports = {
createDocument, getDocument, translateDocument, removeDocument,
};
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f /tmp/%FIFO_DIR%/fifo/*
mkdir /tmp/%FIFO_DIR%/fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkdir work/gul_S1_summaryaalcalc
mkdir work/gul_S2_summaryaalcalc
mkdir work/full_correlation/gul_S1_summaryaalcalc
mkdir work/full_correlation/gul_S2_summaryaalcalc
mkdir work/il_S1_summaryaalcalc
mkdir work/il_S2_summaryaalcalc
mkdir work/full_correlation/il_S1_summaryaalcalc
mkdir work/full_correlation/il_S2_summaryaalcalc
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_fc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S2_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S2_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S2_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S2_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/il_S2_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_pltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_summary_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_eltcalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_summarycalc_P2
mkfifo /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_pltcalc_P2
# --- Do insured loss computes ---
eltcalc -s < /tmp/%FIFO_DIR%/fifo/il_S1_eltcalc_P2 > work/kat/il_S1_eltcalc_P2 & pid1=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/il_S1_summarycalc_P2 > work/kat/il_S1_summarycalc_P2 & pid2=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P2 > work/kat/il_S1_pltcalc_P2 & pid3=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/il_S2_eltcalc_P2 > work/kat/il_S2_eltcalc_P2 & pid4=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/il_S2_summarycalc_P2 > work/kat/il_S2_summarycalc_P2 & pid5=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/il_S2_pltcalc_P2 > work/kat/il_S2_pltcalc_P2 & pid6=$!
tee < /tmp/%FIFO_DIR%/fifo/il_S1_summary_P2 /tmp/%FIFO_DIR%/fifo/il_S1_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/il_S1_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/il_S1_pltcalc_P2 work/il_S1_summaryaalcalc/P2.bin > /dev/null & pid7=$!
tee < /tmp/%FIFO_DIR%/fifo/il_S2_summary_P2 /tmp/%FIFO_DIR%/fifo/il_S2_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/il_S2_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/il_S2_pltcalc_P2 work/il_S2_summaryaalcalc/P2.bin > /dev/null & pid8=$!
summarycalc -m -f -1 /tmp/%FIFO_DIR%/fifo/il_S1_summary_P2 -2 /tmp/%FIFO_DIR%/fifo/il_S2_summary_P2 < /tmp/%FIFO_DIR%/fifo/il_P2 &
# --- Do ground up loss computes ---
eltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P2 > work/kat/gul_S1_eltcalc_P2 & pid9=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P2 > work/kat/gul_S1_summarycalc_P2 & pid10=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P2 > work/kat/gul_S1_pltcalc_P2 & pid11=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S2_eltcalc_P2 > work/kat/gul_S2_eltcalc_P2 & pid12=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/gul_S2_summarycalc_P2 > work/kat/gul_S2_summarycalc_P2 & pid13=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/gul_S2_pltcalc_P2 > work/kat/gul_S2_pltcalc_P2 & pid14=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P2 /tmp/%FIFO_DIR%/fifo/gul_S1_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S1_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S1_pltcalc_P2 work/gul_S1_summaryaalcalc/P2.bin > /dev/null & pid15=$!
tee < /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P2 /tmp/%FIFO_DIR%/fifo/gul_S2_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S2_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/gul_S2_pltcalc_P2 work/gul_S2_summaryaalcalc/P2.bin > /dev/null & pid16=$!
summarycalc -m -i -1 /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P2 -2 /tmp/%FIFO_DIR%/fifo/gul_S2_summary_P2 < /tmp/%FIFO_DIR%/fifo/gul_P2 &
# --- Do insured loss computes ---
eltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_eltcalc_P2 > work/full_correlation/kat/il_S1_eltcalc_P2 & pid17=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_summarycalc_P2 > work/full_correlation/kat/il_S1_summarycalc_P2 & pid18=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_pltcalc_P2 > work/full_correlation/kat/il_S1_pltcalc_P2 & pid19=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_eltcalc_P2 > work/full_correlation/kat/il_S2_eltcalc_P2 & pid20=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_summarycalc_P2 > work/full_correlation/kat/il_S2_summarycalc_P2 & pid21=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_pltcalc_P2 > work/full_correlation/kat/il_S2_pltcalc_P2 & pid22=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_summary_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_pltcalc_P2 work/full_correlation/il_S1_summaryaalcalc/P2.bin > /dev/null & pid23=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_summary_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_pltcalc_P2 work/full_correlation/il_S2_summaryaalcalc/P2.bin > /dev/null & pid24=$!
summarycalc -m -f -1 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S1_summary_P2 -2 /tmp/%FIFO_DIR%/fifo/full_correlation/il_S2_summary_P2 < /tmp/%FIFO_DIR%/fifo/full_correlation/il_P2 &
# --- Do ground up loss computes ---
eltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_eltcalc_P2 > work/full_correlation/kat/gul_S1_eltcalc_P2 & pid25=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarycalc_P2 > work/full_correlation/kat/gul_S1_summarycalc_P2 & pid26=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_pltcalc_P2 > work/full_correlation/kat/gul_S1_pltcalc_P2 & pid27=$!
eltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_eltcalc_P2 > work/full_correlation/kat/gul_S2_eltcalc_P2 & pid28=$!
summarycalctocsv -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarycalc_P2 > work/full_correlation/kat/gul_S2_summarycalc_P2 & pid29=$!
pltcalc -s < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_pltcalc_P2 > work/full_correlation/kat/gul_S2_pltcalc_P2 & pid30=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_pltcalc_P2 work/full_correlation/gul_S1_summaryaalcalc/P2.bin > /dev/null & pid31=$!
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_eltcalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summarycalc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_pltcalc_P2 work/full_correlation/gul_S2_summaryaalcalc/P2.bin > /dev/null & pid32=$!
summarycalc -m -i -1 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S1_summary_P2 -2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_S2_summary_P2 < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P2 &
tee < /tmp/%FIFO_DIR%/fifo/full_correlation/gul_fc_P2 /tmp/%FIFO_DIR%/fifo/full_correlation/gul_P2 | fmcalc -a2 > /tmp/%FIFO_DIR%/fifo/full_correlation/il_P2 &
eve 2 2 | getmodel | gulcalc -S0 -L0 -r -j /tmp/%FIFO_DIR%/fifo/full_correlation/gul_fc_P2 -a1 -i - | tee /tmp/%FIFO_DIR%/fifo/gul_P2 | fmcalc -a2 > /tmp/%FIFO_DIR%/fifo/il_P2 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20 $pid21 $pid22 $pid23 $pid24 $pid25 $pid26 $pid27 $pid28 $pid29 $pid30 $pid31 $pid32
# --- Do insured loss kats ---
kat -s work/kat/il_S1_eltcalc_P2 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P2 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P2 > output/il_S1_summarycalc.csv & kpid3=$!
kat -s work/kat/il_S2_eltcalc_P2 > output/il_S2_eltcalc.csv & kpid4=$!
kat work/kat/il_S2_pltcalc_P2 > output/il_S2_pltcalc.csv & kpid5=$!
kat work/kat/il_S2_summarycalc_P2 > output/il_S2_summarycalc.csv & kpid6=$!
# --- Do insured loss kats for fully correlated output ---
kat -s work/full_correlation/kat/il_S1_eltcalc_P2 > output/full_correlation/il_S1_eltcalc.csv & kpid7=$!
kat work/full_correlation/kat/il_S1_pltcalc_P2 > output/full_correlation/il_S1_pltcalc.csv & kpid8=$!
kat work/full_correlation/kat/il_S1_summarycalc_P2 > output/full_correlation/il_S1_summarycalc.csv & kpid9=$!
kat -s work/full_correlation/kat/il_S2_eltcalc_P2 > output/full_correlation/il_S2_eltcalc.csv & kpid10=$!
kat work/full_correlation/kat/il_S2_pltcalc_P2 > output/full_correlation/il_S2_pltcalc.csv & kpid11=$!
kat work/full_correlation/kat/il_S2_summarycalc_P2 > output/full_correlation/il_S2_summarycalc.csv & kpid12=$!
# --- Do ground up loss kats ---
kat -s work/kat/gul_S1_eltcalc_P2 > output/gul_S1_eltcalc.csv & kpid13=$!
kat work/kat/gul_S1_pltcalc_P2 > output/gul_S1_pltcalc.csv & kpid14=$!
kat work/kat/gul_S1_summarycalc_P2 > output/gul_S1_summarycalc.csv & kpid15=$!
kat -s work/kat/gul_S2_eltcalc_P2 > output/gul_S2_eltcalc.csv & kpid16=$!
kat work/kat/gul_S2_pltcalc_P2 > output/gul_S2_pltcalc.csv & kpid17=$!
kat work/kat/gul_S2_summarycalc_P2 > output/gul_S2_summarycalc.csv & kpid18=$!
# --- Do ground up loss kats for fully correlated output ---
kat -s work/full_correlation/kat/gul_S1_eltcalc_P2 > output/full_correlation/gul_S1_eltcalc.csv & kpid19=$!
kat work/full_correlation/kat/gul_S1_pltcalc_P2 > output/full_correlation/gul_S1_pltcalc.csv & kpid20=$!
kat work/full_correlation/kat/gul_S1_summarycalc_P2 > output/full_correlation/gul_S1_summarycalc.csv & kpid21=$!
kat -s work/full_correlation/kat/gul_S2_eltcalc_P2 > output/full_correlation/gul_S2_eltcalc.csv & kpid22=$!
kat work/full_correlation/kat/gul_S2_pltcalc_P2 > output/full_correlation/gul_S2_pltcalc.csv & kpid23=$!
kat work/full_correlation/kat/gul_S2_summarycalc_P2 > output/full_correlation/gul_S2_summarycalc.csv & kpid24=$!
wait $kpid1 $kpid2 $kpid3 $kpid4 $kpid5 $kpid6 $kpid7 $kpid8 $kpid9 $kpid10 $kpid11 $kpid12 $kpid13 $kpid14 $kpid15 $kpid16 $kpid17 $kpid18 $kpid19 $kpid20 $kpid21 $kpid22 $kpid23 $kpid24
|
<gh_stars>0
/* eslint-disable prettier/prettier */
import React, {Component, useState} from 'react';
import {Text, View, StyleSheet, Image, Alert} from 'react-native';
import Calendar from '../../../image/calendar.png';
import clockwhite from '../../../image/clock.png';
import menuBlack from '../../../image/menu.png';
import calendarItem from '../../../image/calendarItem.png';
import {TextInput} from 'react-native-paper';
import PlayItem from '../../../image/playButton.png';
import {FlatList, TouchableOpacity} from 'react-native-gesture-handler';
import * as Resources from '../../config/resource';
export default function ProjectDetail({route,navigation}) {
const {projectName} = route.params;
const {ClientName} = route.params;
const {contract} = route.params;
const {workType} = route.params;
const {status} = route.params;
const {Id} = route.params;
const deleteProject = () => {
Resources.deleteProject(Id)
.then((r) => {
console.log('Project Berhasil Di Hapus')
Alert.alert('Delete Succes');
console.log(r);
navigation.navigate('ProjectList');
})
.catch((e) => {
console.log(e);
});
};
const AlertDelete = () =>
Alert.alert(
"Delete",
"Are You Sure To Delete This ? \nNote: Project Must Not Be Available",
[
{
text: "Cancel",
onPress: () => console.log("Cancel Pressed"),
style: "cancel"
},
{
text: 'OK',
onPress: () => {
deleteProject();
},
},
],
{cancelable: false},
);
return (
<View style={{flexDirection: 'column'}}>
<View style={{flexDirection:'row', justifyContent:'space-between', marginHorizontal:10, marginTop:30}}>
<View
style={{
width: 122,
height: 34,
backgroundColor: '#26BF64',
marginRight: 21,
}}>
<TouchableOpacity onPress={() => navigation.navigate('EditProject', {id: Id, name: projectName , clientNames: ClientName, PO: contract, worktype: workType, status: status})}>
<Text
style={{
color: '#FFFFFF',
fontSize: 14,
fontFamily: 'Nunito-SemiBold',
textAlign: 'center',
paddingTop: 3,
}}>
Edit Project
</Text>
</TouchableOpacity>
</View>
<View
style={{
width: 122,
height: 34,
backgroundColor: '#DC3545',
}}>
<TouchableOpacity onPress={AlertDelete}>
<Text
style={{
color: '#FFFFFF',
fontSize: 14,
fontFamily: 'Nunito-SemiBold',
textAlign: 'center',
paddingTop: 3,
}}>
Delete
</Text>
</TouchableOpacity>
</View>
</View>
<View style={{marginHorizontal: 10, marginTop: 30, flexDirection: 'row'}}>
<Text style={{fontFamily: 'Nunito-Light',fontSize: 15,}}>Project Name :</Text>
<Text style={{marginLeft: 10, fontFamily: 'Nunito-Light',fontSize: 15,}}> {projectName}</Text>
</View>
<View style={{marginHorizontal: 10, marginTop: 30, flexDirection: 'row'}}>
<Text style={{fontFamily: 'Nunito-Light',fontSize: 15,}}>Client Name :</Text>
<Text style={{marginLeft: 10, fontFamily: 'Nunito-Light',fontSize: 15,}}> {ClientName}</Text>
</View>
<View style={{marginHorizontal: 10, marginTop: 30, flexDirection: 'row'}}>
<Text style={{fontFamily: 'Nunito-Light',fontSize: 15,}}>PO/Contact Number :</Text>
<Text style={{marginLeft: 10, fontFamily: 'Nunito-Light',fontSize: 15,}}> {contract}</Text>
</View>
<View style={{marginHorizontal: 10, marginTop: 30, flexDirection: 'row'}}>
<Text style={{fontFamily: 'Nunito-Light',fontSize: 15,}}>Work Type :</Text>
<Text style={{marginLeft: 10, fontFamily: 'Nunito-Light',fontSize: 15,}}> {workType}</Text>
</View>
{status === 0 && (
<View style={{marginHorizontal: 10, marginTop: 30, flexDirection: 'row'}}>
<Text style={{fontFamily: 'Nunito-Light',fontSize: 15,}}>Status :</Text>
<Text style={{marginLeft: 10, fontFamily: 'Nunito-Light',fontSize: 15,}}> Not Active</Text>
</View>
)}
{status === 1 && (
<View style={{marginHorizontal: 10, marginTop: 30, flexDirection: 'row'}}>
<Text style={{fontFamily: 'Nunito-Light',fontSize: 15,}}>Status :</Text>
<Text style={{marginLeft: 10, fontFamily: 'Nunito-Light',fontSize: 15,}}> Active</Text>
</View>
)}
</View>
);
}
|
// --- 对象解构 ---
/* let person = {
name: 'Matt',
age: 27,
};
let { name: personName, age: personAge } = person;
console.log(personName);
console.log(personAge);
let { name = 'Jack', job = 'Software engineer' } = person; // 设置默认值
console.log(name); // Matt
console.log(job); // Software engineer */
// 如果事先声明的变量,则赋值表达式必须包含在一对括号中 ()
/* let personName, personAge, name, age;
let person = {
name: 'Matt',
age: 27,
};
({ name, age } = person);
console.log(name);
console.log(age);
({ name: personName, age: personAge } = person);
console.log(personName);
console.log(personAge); */
// --- 嵌套解构 ---
/* let person = {
name: 'Matt',
age: 27,
job: {
title: 'Software engineer',
},
};
let personCopy = {};
({
name: personCopy.name,
age: personCopy.age,
job: personCopy.job,
} = person);
// personCopy 被赋给的是对象的引用,所以修改person.job会影响personCopy
person.job.title = 'Hacker';
console.log(personCopy); // { name: 'Matt', age: 27, job: { title: 'Hacker' } } */
// 解构赋值使用嵌套结构
/* let person = {
name: 'Matt',
age: 27,
job: {
title: 'Software engineer',
},
};
// 声明 title 并将 person.job.title 赋值给它
let { job: { title } } = person;
let { job: { title: jobTitle } } = person;
console.log(title);
console.log(jobTitle); */
// 参数解构赋值
let person = {
name: 'Matt',
age: 27,
};
function printPerson(foo, { name, age }, bar) {
console.log(arguments); // 推荐把参数写为 ...args
console.log(name, age);
}
printPerson('1st', person, '2nd'); |
<reponame>Switchcrafter/tinyusb<gh_stars>1000+
require 'ceedling/constants'
class GeneratorHelper
constructor :streaminator
def test_results_error_handler(executable, shell_result)
notice = ''
error = false
if (shell_result[:output].nil? or shell_result[:output].strip.empty?)
error = true
# mirror style of generic tool_executor failure output
notice = "\n" +
"ERROR: Test executable \"#{File.basename(executable)}\" failed.\n" +
"> Produced no output to $stdout.\n"
elsif ((shell_result[:output] =~ TEST_STDOUT_STATISTICS_PATTERN).nil?)
error = true
# mirror style of generic tool_executor failure output
notice = "\n" +
"ERROR: Test executable \"#{File.basename(executable)}\" failed.\n" +
"> Produced no final test result counts in $stdout:\n" +
"#{shell_result[:output].strip}\n"
end
if (error)
# since we told the tool executor to ignore the exit code, handle it explicitly here
notice += "> And exited with status: [#{shell_result[:exit_code]}] (count of failed tests).\n" if (shell_result[:exit_code] != nil)
notice += "> And then likely crashed.\n" if (shell_result[:exit_code] == nil)
notice += "> This is often a symptom of a bad memory access in source or test code.\n\n"
@streaminator.stderr_puts(notice, Verbosity::COMPLAIN)
raise
end
end
end
|
#!/usr/bin/env bash
##########################################################################
# This is the Cake bootstrapper script for Linux and OS X.
# This file was downloaded from https://github.com/cake-build/resources
# Feel free to change this file to fit your needs.
##########################################################################
# Define directories.
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLS_DIR=$SCRIPT_DIR/tools
ADDINS_DIR=$TOOLS_DIR/Addins
MODULES_DIR=$TOOLS_DIR/Modules
NUGET_EXE=$TOOLS_DIR/nuget.exe
CAKE_EXE=$TOOLS_DIR/Cake/Cake.exe
PACKAGES_CONFIG=$TOOLS_DIR/packages.config
PACKAGES_CONFIG_MD5=$TOOLS_DIR/packages.config.md5sum
ADDINS_PACKAGES_CONFIG=$ADDINS_DIR/packages.config
MODULES_PACKAGES_CONFIG=$MODULES_DIR/packages.config
# Define md5sum or md5 depending on Linux/OSX
MD5_EXE=
if [[ "$(uname -s)" == "Darwin" ]]; then
MD5_EXE="md5 -r"
else
MD5_EXE="md5sum"
fi
# Define default arguments.
SCRIPT="build.cake"
CAKE_ARGUMENTS=()
# Parse arguments.
for i in "$@"; do
case $1 in
-s|--script) SCRIPT="$2"; shift ;;
--) shift; CAKE_ARGUMENTS+=("$@"); break ;;
*) CAKE_ARGUMENTS+=("$1") ;;
esac
shift
done
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
# Make sure that packages.config exist.
if [ ! -f "$TOOLS_DIR/packages.config" ]; then
echo "Downloading packages.config..."
curl -Lsfo "$TOOLS_DIR/packages.config" https://cakebuild.net/download/bootstrapper/packages
if [ $? -ne 0 ]; then
echo "An error occurred while downloading packages.config."
exit 1
fi
fi
# Download NuGet if it does not exist.
if [ ! -f "$NUGET_EXE" ]; then
echo "Downloading NuGet..."
curl -Lsfo "$NUGET_EXE" https://dist.nuget.org/win-x86-commandline/latest/nuget.exe
if [ $? -ne 0 ]; then
echo "An error occurred while downloading nuget.exe."
exit 1
fi
fi
# Restore tools from NuGet.
pushd "$TOOLS_DIR" >/dev/null
if [ ! -f "$PACKAGES_CONFIG_MD5" ] || [ "$( cat "$PACKAGES_CONFIG_MD5" | sed 's/\r$//' )" != "$( $MD5_EXE "$PACKAGES_CONFIG" | awk '{ print $1 }' )" ]; then
find . -type d ! -name . | xargs rm -rf
fi
mono "$NUGET_EXE" install -ExcludeVersion
if [ $? -ne 0 ]; then
echo "Could not restore NuGet tools."
exit 1
fi
$MD5_EXE "$PACKAGES_CONFIG" | awk '{ print $1 }' >| "$PACKAGES_CONFIG_MD5"
popd >/dev/null
# Restore addins from NuGet.
if [ -f "$ADDINS_PACKAGES_CONFIG" ]; then
pushd "$ADDINS_DIR" >/dev/null
mono "$NUGET_EXE" install -ExcludeVersion
if [ $? -ne 0 ]; then
echo "Could not restore NuGet addins."
exit 1
fi
popd >/dev/null
fi
# Restore modules from NuGet.
if [ -f "$MODULES_PACKAGES_CONFIG" ]; then
pushd "$MODULES_DIR" >/dev/null
mono "$NUGET_EXE" install -ExcludeVersion
if [ $? -ne 0 ]; then
echo "Could not restore NuGet modules."
exit 1
fi
popd >/dev/null
fi
# Make sure that Cake has been installed.
if [ ! -f "$CAKE_EXE" ]; then
echo "Could not find Cake.exe at '$CAKE_EXE'."
exit 1
fi
# Start Cake
exec mono "$CAKE_EXE" $SCRIPT "${CAKE_ARGUMENTS[@]}"
|
// Define the StampComputeResources class
class StampComputeResources {
// Define properties
// ...
// Define the one-to-many relationship with StampTask
@OneToMany(() => StampTask, stampTask => stampTask.resourceType2)
stampTasks: StampTask[];
}
// Define the StampTask class
class StampTask {
// Define properties
// ...
// Define the many-to-one relationship with StampComputeResources
@ManyToOne(() => StampComputeResources, stampComputeResources => stampComputeResources.stampTasks)
resourceType2: StampComputeResources;
} |
exports.up = async(knex, Promise) => {
await knex.schema.table("guilds", (t) => {
t.string("speedrun", 255);
});
};
exports.down = async(knex, Promise) => {
await knex.schema.table("guilds", (t) => {
t.dropColumn("speedrun");
});
};
|
#!/bin/bash
PATH=./node_modules/.bin/:$PATH
# Clean previous distribution build.
rm -rf dist/*
# Test if online compilation should be used.
if [ "${ONLINE:-true}" == "true" ]; then
echo "Compiling using Google Closure Service..."
curl --silent \
--data output_format=text \
--data output_info=compiled_code \
--data use_closure_library=true \
--data compilation_level=SIMPLE_OPTIMIZATIONS \
--data formatting=PRETTY_PRINT \
--data-urlencode js_code@src/asyoutypeformatter.js \
--data-urlencode js_code@src/index.js \
--data-urlencode js_code@src/metadata.js \
--data-urlencode js_code@src/phonemetadata.pb.js \
--data-urlencode js_code@src/phonenumber.pb.js \
--data-urlencode js_code@src/phonenumberutil.js \
--data-urlencode js_code@src/shortnumberinfo.js \
--data-urlencode js_code@src/shortnumbermetadata.js \
--output dist/libphonenumber.original.js \
https://closure-compiler.appspot.com/compile
else
echo "Compiling locally..."
ant build
fi
if [ "${BROWSERIFY:-true}" == "true" ]; then
echo "Browserifying..."
browserify dist/libphonenumber.original.js --standalone libphonenumber --no-browser-field --outfile dist/libphonenumber.js
rm dist/libphonenumber.original.js
fi
echo "Build completed!"
|
<reponame>polens29/lb-billing<filename>app/containers/Integrations/actions.js
import {
UPDATE_INTEGRATION_STATUS,
GET_INTEGRATION_STATUS,
UPDATE_ALL_INTEGRATION_STATUS,
GET_ALL_INTEGRATION_STATUS,
INTEGRATIONS_MODAL_TOGGLE,
UPDATE_INTEGRATIONS_OBJECT,
SET_INTEGRATION_OBJECT,
SET_INTEGRATION_FORMAT,
SAVE_INTEGRATIONS_MAPPING,
AUTH_INTEGRATION,
} from './constants';
export function getAllIntegrationStatus(leadType) {
return {
type: GET_ALL_INTEGRATION_STATUS,
leadType: leadType.toLowerCase()
};
}
export function setAllIntegrationStatus(response) {
return {
type: UPDATE_ALL_INTEGRATION_STATUS,
response,
};
}
export function getIntegrationStatus(leadType, integrationType, force=false) {
return {
type: GET_INTEGRATION_STATUS,
leadType: leadType.toLowerCase(),
integrationType: integrationType.toLowerCase(),
force
};
}
export function setIntegrationStatus(status) {
return {
type: UPDATE_INTEGRATION_STATUS,
status,
};
}
export function integrationsModalToggle(status, integrationType) {
return {
type: INTEGRATIONS_MODAL_TOGGLE,
status,
integrationType,
};
}
export function updateIntegrationsObject(leadType, integrationType, objectId) {
return {
type: UPDATE_INTEGRATIONS_OBJECT,
leadType,
integrationType: integrationType.toLowerCase(),
objectId,
};
}
export function setIntegrationObject(integrationObject) {
return {
type: SET_INTEGRATION_OBJECT,
integrationObject,
};
}
export function setIntegrationFormat(integrationFormat) {
return {
type: SET_INTEGRATION_FORMAT,
integrationFormat,
};
}
export function saveIntegrationsMapping(leadType, integrationType, payload) {
return {
type: SAVE_INTEGRATIONS_MAPPING,
leadType,
integrationType: integrationType.toLowerCase(),
payload,
};
}
export function authenticateIntegration(leadType, integrationType, payload) {
return {
type: AUTH_INTEGRATION,
leadType,
integrationType: integrationType.toLowerCase(),
payload,
};
}
|
module Easymarklet
class DluxGenerator < Rails::Generators::NamedBase
source_root File.expand_path('../templates', __FILE__)
def copy_files
template "dlux_bookmarklet.js", "app/assets/javascripts/#{file_name}_bookmarklet.js"
template "dlux_consumer.js", "app/assets/javascripts/#{file_name}_consumer.js"
template "dlux_producer.js", "app/assets/javascripts/#{file_name}_producer.js"
template "dlux_producer_controller.rb", "app/controllers/#{file_name}_producer_controller.rb"
template "dlux_producer_index.html.erb", "app/views/#{file_name}_producer/index.html.erb"
template "views/easymarklet_layout.html.erb", "app/views/layouts/#{file_name}_producer.html.erb"
template "views/easymarklet_buffer_frame.html.erb", "app/views/layouts/#{file_name}_producer_buffer.html.erb"
end
def create_routes
route("match '#{file_name}_producer' => '#{file_name}_producer#index'")
route("match '#{file_name}_producer/buffer' => '#{file_name}_producer#buffer'")
end
def display_msg
puts ""
puts "You can link to your new bookmarklet with this :"
puts ""
puts "<%= link_to '#{file_name.titleize}', easymarklet_js('#{file_name}_consumer.js') %>"
puts ""
end
end
end
|
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.interestrate.payments.method;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.Validate;
import com.opengamma.analytics.financial.interestrate.InstrumentDerivative;
import com.opengamma.analytics.financial.interestrate.InterestRateCurveSensitivity;
import com.opengamma.analytics.financial.interestrate.YieldCurveBundle;
import com.opengamma.analytics.financial.interestrate.method.PricingMethod;
import com.opengamma.analytics.financial.interestrate.payments.derivative.CapFloorIbor;
import com.opengamma.analytics.financial.model.interestrate.HullWhiteOneFactorPiecewiseConstantInterestRateModel;
import com.opengamma.analytics.financial.model.interestrate.definition.HullWhiteOneFactorPiecewiseConstantDataBundle;
import com.opengamma.analytics.math.statistics.distribution.NormalDistribution;
import com.opengamma.analytics.math.statistics.distribution.ProbabilityDistribution;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.CurrencyAmount;
import com.opengamma.util.tuple.DoublesPair;
/**
* Class used to compute the price and sensitivity of a Ibor cap/floor with
* Hull-White one factor model. The general pricing formula is given by:
* $$
* \begin{equation*}
* \frac{\delta_p}{\delta_F}P^D(0,t_p)\left( \frac{P^j(0,t_0)}{P^j(0,t_1)} N(-\kappa-\alpha_0) - (1+\delta_F K) N(-\kappa-\alpha_1) \right)
* \end{equation*}
* $$
* where:
* \begin{equation*}
* \kappa = \frac{1}{\alpha_1-\alpha_0} \left( \ln\left(\frac{(1+\delta_F K)P^j(0,t_1)}{P^j(0,t_0)}\right) - \frac12 (\alpha_1^2 - \alpha_0^2) \right).
* \end{equation*}
* $$
*/
public class CapFloorIborHullWhiteMethod implements PricingMethod {
/**
* The normal distribution.
*/
private static final ProbabilityDistribution<Double> NORMAL = new NormalDistribution(0, 1);
/**
* The Hull-White model.
*/
private final HullWhiteOneFactorPiecewiseConstantInterestRateModel _model = new HullWhiteOneFactorPiecewiseConstantInterestRateModel();
/**
* Constructor from the model.
*/
public CapFloorIborHullWhiteMethod() {
}
/**
* Computes the present value of a cap/floor in the Hull-White one factor model.
* @param cap The cap/floor.
* @param hwData The Hull-White parameters and the curves.
* @return The present value.
*/
public CurrencyAmount presentValue(final CapFloorIbor cap, final HullWhiteOneFactorPiecewiseConstantDataBundle hwData) {
ArgumentChecker.notNull(cap, "The cap/floor shoud not be null");
ArgumentChecker.notNull(hwData, "The Hull-White data shoud not be null");
double tp = cap.getPaymentTime();
double t0 = cap.getFixingPeriodStartTime();
double t1 = cap.getFixingPeriodEndTime();
double deltaF = cap.getFixingYearFraction();
double deltaP = cap.getPaymentYearFraction();
double k = cap.getStrike();
double dfPay = hwData.getCurve(cap.getFundingCurveName()).getDiscountFactor(tp);
double dfForwardT0 = hwData.getCurve(cap.getForwardCurveName()).getDiscountFactor(t0);
double dfForwardT1 = hwData.getCurve(cap.getForwardCurveName()).getDiscountFactor(t1);
double alpha0 = _model.alpha(hwData.getHullWhiteParameter(), 0.0, cap.getFixingTime(), tp, t0);
double alpha1 = _model.alpha(hwData.getHullWhiteParameter(), 0.0, cap.getFixingTime(), tp, t1);
double kappa = (Math.log((1 + deltaF * k) * dfForwardT1 / dfForwardT0) - (alpha1 * alpha1 - alpha0 * alpha0) / 2.0) / (alpha1 - alpha0);
double omega = (cap.isCap() ? 1.0 : -1.0);
double pv = deltaP / deltaF * dfPay * omega * (dfForwardT0 / dfForwardT1 * NORMAL.getCDF(omega * (-kappa - alpha0)) - (1.0 + deltaF * k) * NORMAL.getCDF(omega * (-kappa - alpha1)));
pv *= cap.getNotional();
return CurrencyAmount.of(cap.getCurrency(), pv);
}
@Override
public CurrencyAmount presentValue(InstrumentDerivative instrument, YieldCurveBundle curves) {
Validate.isTrue(instrument instanceof CapFloorIbor, "Ibor Cap/floor");
Validate.isTrue(curves instanceof HullWhiteOneFactorPiecewiseConstantDataBundle, "Bundle should contain Hull-White data");
return presentValue((CapFloorIbor) instrument, (HullWhiteOneFactorPiecewiseConstantDataBundle) curves);
}
/**
* Computes the present value curve sensitivity of a cap/floor in the Hull-White one factor model.
* @param cap The cap/floor.
* @param hwData The Hull-White parameters and the curves.
* @return The present value curve sensitivity.
*/
public InterestRateCurveSensitivity presentValueCurveSensitivity(final CapFloorIbor cap, final HullWhiteOneFactorPiecewiseConstantDataBundle hwData) {
ArgumentChecker.notNull(cap, "The cap/floor shoud not be null");
ArgumentChecker.notNull(hwData, "The Hull-White data shoud not be null");
double tp = cap.getPaymentTime();
double t0 = cap.getFixingPeriodStartTime();
double t1 = cap.getFixingPeriodEndTime();
double deltaF = cap.getFixingYearFraction();
double deltaP = cap.getPaymentYearFraction();
double k = cap.getStrike();
double omega = (cap.isCap() ? 1.0 : -1.0);
// Forward sweep
double dfPay = hwData.getCurve(cap.getFundingCurveName()).getDiscountFactor(tp);
double dfForwardT0 = hwData.getCurve(cap.getForwardCurveName()).getDiscountFactor(t0);
double dfForwardT1 = hwData.getCurve(cap.getForwardCurveName()).getDiscountFactor(t1);
double alpha0 = _model.alpha(hwData.getHullWhiteParameter(), 0.0, cap.getFixingTime(), tp, t0);
double alpha1 = _model.alpha(hwData.getHullWhiteParameter(), 0.0, cap.getFixingTime(), tp, t1);
double kappa = (Math.log((1 + deltaF * k) * dfForwardT1 / dfForwardT0) - (alpha1 * alpha1 - alpha0 * alpha0) / 2.0) / (alpha1 - alpha0);
double n0 = NORMAL.getCDF(omega * (-kappa - alpha0));
double n1 = NORMAL.getCDF(omega * (-kappa - alpha1));
// double pv = deltaP / deltaF * dfPay * omega * (dfForwardT0 / dfForwardT1 * n0 - (1.0 + deltaF * k) * n1) * cap.getNotional();
// Backward sweep
double pvBar = 1.0;
// double kappaBar = 0.0; // kappa is the optimal exercise boundary
double dfForwardT1Bar = -deltaP / deltaF * dfPay * omega * dfForwardT0 / (dfForwardT1 * dfForwardT1) * n0 * cap.getNotional() * pvBar;
double dfForwardT0Bar = deltaP / deltaF * dfPay * omega / dfForwardT1 * n0 * cap.getNotional() * pvBar;
double dfPayBar = deltaP / deltaF * omega * (dfForwardT0 / dfForwardT1 * n0 - (1.0 + deltaF * k) * n1) * cap.getNotional() * pvBar;
InterestRateCurveSensitivity result = new InterestRateCurveSensitivity();
final List<DoublesPair> listDiscounting = new ArrayList<DoublesPair>();
listDiscounting.add(new DoublesPair(cap.getPaymentTime(), -cap.getPaymentTime() * dfPay * dfPayBar));
result = result.plus(cap.getFundingCurveName(), listDiscounting);
final List<DoublesPair> listForward = new ArrayList<DoublesPair>();
listForward.add(new DoublesPair(cap.getFixingPeriodStartTime(), -cap.getFixingPeriodStartTime() * dfForwardT0 * dfForwardT0Bar));
listForward.add(new DoublesPair(cap.getFixingPeriodEndTime(), -cap.getFixingPeriodEndTime() * dfForwardT1 * dfForwardT1Bar));
result = result.plus(cap.getForwardCurveName(), listForward);
return result;
}
/**
* Computes the present value Hull-White parameters sensitivity of a cap/floor in the Hull-White one factor model.
* @param cap The cap/floor.
* @param hwData The Hull-White parameters and the curves.
* @return The present value parameters sensitivity.
*/
public double[] presentValueHullWhiteSensitivity(final CapFloorIbor cap, final HullWhiteOneFactorPiecewiseConstantDataBundle hwData) {
ArgumentChecker.notNull(cap, "The cap/floor shoud not be null");
ArgumentChecker.notNull(hwData, "The Hull-White data shoud not be null");
double tp = cap.getPaymentTime();
double[] t = new double[2];
t[0] = cap.getFixingPeriodStartTime();
t[1] = cap.getFixingPeriodEndTime();
double deltaF = cap.getFixingYearFraction();
double deltaP = cap.getPaymentYearFraction();
double k = cap.getStrike();
double omega = (cap.isCap() ? 1.0 : -1.0);
// Forward sweep
double dfPay = hwData.getCurve(cap.getFundingCurveName()).getDiscountFactor(tp);
double dfForwardT0 = hwData.getCurve(cap.getForwardCurveName()).getDiscountFactor(t[0]);
double dfForwardT1 = hwData.getCurve(cap.getForwardCurveName()).getDiscountFactor(t[1]);
int nbSigma = hwData.getHullWhiteParameter().getVolatility().length;
double[] alpha = new double[2];
double[][] alphaDerivatives = new double[2][nbSigma];
for (int loopcf = 0; loopcf < 2; loopcf++) {
alpha[loopcf] = _model.alpha(hwData.getHullWhiteParameter(), 0.0, cap.getFixingTime(), tp, t[loopcf], alphaDerivatives[loopcf]);
}
double kappa = (Math.log((1 + deltaF * k) * dfForwardT1 / dfForwardT0) - (alpha[1] * alpha[1] - alpha[0] * alpha[0]) / 2.0) / (alpha[1] - alpha[0]);
double[] n = new double[2];
for (int loopcf = 0; loopcf < 2; loopcf++) {
n[loopcf] = NORMAL.getCDF(omega * (-kappa - alpha[loopcf]));
}
// double pv = deltaP / deltaF * dfPay * omega * (dfForwardT0 / dfForwardT1 * n0 - (1.0 + deltaF * k) * n1) * cap.getNotional();
// Backward sweep
double pvBar = 1.0;
double[] nBar = new double[2];
nBar[1] = deltaP / deltaF * dfPay * omega * (1.0 + deltaF * k) * cap.getNotional() * pvBar;
nBar[0] = deltaP / deltaF * dfPay * omega * dfForwardT0 / dfForwardT1 * cap.getNotional();
double[] alphaBar = new double[2];
for (int loopcf = 0; loopcf < 2; loopcf++) {
alphaBar[loopcf] = NORMAL.getPDF(omega * (-kappa - alpha[loopcf])) * -omega * nBar[loopcf];
}
double[] sigmaBar = new double[nbSigma];
for (int loopcf = 0; loopcf < 2; loopcf++) {
for (int loopsigma = 0; loopsigma < nbSigma; loopsigma++) {
sigmaBar[loopsigma] += alphaDerivatives[loopcf][loopsigma] * alphaBar[loopcf];
}
}
return sigmaBar;
}
}
|
#import the necessary packages
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_absolute_error
from sklearn.preprocessing import StandardScaler
#load the data into pandas DataFrame
data = pd.read_csv('stock_data.csv')
#define the features and target
features = data[['Open', 'High', 'Low', 'Close']]
target = data['Adj Close']
#split the data into training and test set
X_train, x_test, y_train, y_test = train_test_split(features, target, test_size = 0.2, random_state = 0)
# apply standard scaling to the training and test data set
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
#train the model
model=RandomForestRegressor(n_estimators=100)
model.fit(X_train,y_train)
#evaluate the model
prediction=model.predict(X_test)
mae=mean_absolute_error(prediction,y_test)
print('Mean Absolute Error:', mae) |
#!/bin/bash
set -x
top_dir=$(pwd)
out_dir=""
if [ ! -z $1 ];then
mkdir -p $1
out_dir=$1
fi
tmp_dir=`mktemp -d`
cd $tmp_dir
if [ ! -f ./bin/gosec ];then
curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s 2.0.0
fi
result_file=result.json
issue_file=issues.txt
./bin/gosec -fmt=json -out=${result_file} ${top_dir}/...
python ${top_dir}/analyze.py -i ${result_file} ${issue_file}
ret=$?
rm $result_file
chmod 666 $issue_file
if [ "x" != "x$out_dir" ];then
mv $issue_file $out_dir
fi
exit $ret
|
## ek9/shell-config - https://github.com/ek9/shell-config
## 05-programs.sh
## This file sets up custom shell programs
# setup custom programs
if [[ -x $(command -v vim) ]]; then
export EDITOR=vim
export VISUAL=vim
export FCEDIT=vim
fi
[[ -x $(command -v elinks) ]] && export BROWSER="elinks"
export PAGER=less
export VIEWER=$PAGER
export SYSTEMD_PAGER=$PAGER
export LESSHISTFILE=-
export LESS='-R'
export TIME_STYLE="long-iso"
|
# <<if you put this script into source root remove this command
cd ..
# if you put this script into source root remove this command>>
python main.py -a resnet18 --dist-url 'tcp://127.0.0.1:8889' --dist-backend 'nccl' --multiprocessing-distributed \
--world-size 1 --rank 0 /home/aistudio/Desktop/datasets/ILSVRC2012/ |
#!/bin/bash
MAX_ATTEMPTS=5
adb root
adb devices | grep emulator | cut -f1 | while read id; do
apks=(/usr/bin/*.apk)
if [ "$CHROME_MOBILE" == "y" ]; then
adb -s "$id" uninstall "com.android.chrome" || true
fi
for apk in "${apks[@]}"; do
if [ -r "$apk" ]; then
for i in `seq 1 ${MAX_ATTEMPTS}`; do
echo "Installing $apk (attempt #$i of $MAX_ATTEMPTS)"
adb -s "$id" install "$apk" && break || sleep 15 && echo "Retrying to install $apk"
done
fi
done
adb -s "$id" emu kill -2 || true
done
|
<html>
<head> </head>
<body>
<p>Name: John </p>
<p>Age: 28 </p>
<img src="image.jpg" width="400" height="400">
</body>
</html> |
#include <bits/stdc++.h>
#define fast ios_base::sync_with_stdio(0);cin.tie(NULL);cout.tie(NULL)
#define ll long long int
#define ld long double
using namespace std;
const int N = 105;
const int MOD = 1e9 + 7;
int ans, ct[N][N];
char a[N + 1][N + 1];
int n, m, str, end1;
bool vis[N][N];
void dijkstra(int str1, int end2){
set<pair<int, pair<int, int> > > s;
s.insert({0, {str1, end2}});
// cost ---- ith cordinate, jth cordinate;
while(!s.empty()){
int cost = s.begin()->first;
int i = s.begin()->second.first, j = s.begin()->second.second;
ct[i][j] = cost;
s.erase(s.begin());
vis[i][j] = 1;
if(i + 1 <= m and a[i + 1][j] != '*' and !vis[i + 1][j]){
s.insert({cost + 1, {i + 1, j}});
vis[i + 1][j] = true;
}
if(i - 1 >= 1 and a[i - 1][j] != '*' and !vis[i - 1][j]){
s.insert({cost + 1, {i - 1, j}});
vis[i - 1][j] = true;
}
if(j + 1 <= n and a[i][j + 1] != '*' and !vis[i][j + 1]){
s.insert({cost + 1, {i, j + 1}});
vis[i][j + 1] = true;
}
if(j - 1 >= 1 and a[i][j - 1] != '*' and !vis[i][j - 1]){
s.insert({cost + 1, {i, j - 1}});
vis[i][j - 1] = true;
}
if(j + 1 <= n and i + 1 <= m and a[i + 1][j + 1] != '*' and !vis[i + 1][j + 1]){
s.insert({cost + 1, {i + 1, j + 1}});
vis[i + 1][j + 1] = true;
}
if(j + 1 <= n and i - 1 >= 1 and a[i - 1][j + 1] != '*' and !vis[i - 1][j + 1]){
s.insert({cost + 1, {i - 1, j + 1}});
vis[i - 1][j + 1] = true;
}
if(j - 1 >= 1 and i - 1 >= 1 and a[i - 1][j - 1] != '*' and !vis[i - 1][j - 1]){
s.insert({cost + 1, {i - 1, j - 1}});
vis[i - 1][j - 1] = true;
}
if(j - 1 >= 1 and i + 1 <= m and a[i + 1][j - 1] != '*' and !vis[i + 1][j - 1]){
s.insert({cost + 1, {i + 1, j - 1}});
vis[i + 1][j - 1] = true;
}
}
}
void solve(){
cin >> n >> m >> end1 >> str;
for(int i = m; i >= 1; --i)
for(int j = 1; j <= n; ++j)
cin >> a[i][j];
dijkstra(str, end1);
int mx = 0;
for(int i = 1; i <= m; ++i)
for(int j = 1; j <= n; ++j)
mx = max(mx, ct[i][j]);
cout << mx << '\n';
}
int main(){
fast;
int t = 1;
// cin >> t;
while(t--)
solve();
cerr << "Time elapsed : " << 1.0 * clock() / CLOCKS_PER_SEC << " sec \n";
return 0;
}
|
import Vue from 'vue'
import VueRouter from 'vue-router'
import { LAYOUT, VIEW } from '../constants/globals'
Vue.use(VueRouter)
const routes = [
{
path: '/',
redirect: '/dashboard',
},
{
path: '/dashboard',
name: VIEW.dashboard,
component: () => import('../views/Dashboard.vue'),
},
{
path: '/measurements',
name: VIEW.measurements,
meta: { layout: LAYOUT.measurements },
component: () => import('../views/Measurements.vue'),
},
{
path: '/in-progress-workout',
name: VIEW.inProgressWorkout,
meta: { layout: LAYOUT.inProgressWorkout },
component: () => import('../views/InProgressWorkout.vue'),
},
{
path: '*', // 404
name: VIEW.notFound,
component: () => import('../views/NotFound.vue'),
},
]
const router = new VueRouter({
routes,
})
export default router
|
#!/usr/bin/env bash
set -euo pipefail
version=4.1.2
rstudio_image=davetang/rstudio:${version}
container_name=rstudio_dtang_bioinfo
port=8989
package_dir=${HOME}/r_packages_${version}
path=$(realpath $(dirname $0)/..)
if [[ ! -d ${package_dir} ]]; then
mkdir ${package_dir}
fi
docker run -d \
-p ${port}:8787 \
--rm \
--name ${container_name} \
-v ${package_dir}:/packages \
-v ${path}:/home/rstudio/bioinformatics_tips \
-e PASSWORD=password \
-e USERID=$(id -u) \
-e GROUPID=$(id -g) \
${rstudio_image}
>&2 echo ${container_name} listening on port ${port}
exit 0
|
<gh_stars>0
package com.leetcode;
public class Solution_283 {
public void moveZeroes(int[] nums) {
int index = 0;
for (int i = 0; i < nums.length; i++) {
if (nums[i] != 0) {
swap(nums, i, index++);
}
}
}
private void swap(int[] nums, int i, int j) {
int tmp = nums[i];
nums[i] = nums[j];
nums[j] = tmp;
}
}
|
<gh_stars>0
/* **** Notes
Flag.
*/
# define CALEND
# define CAR
# include "./../../../incl/config.h"
signed(__cdecl cals_flag(signed char(**argv),cals_t(*argp))) {
auto signed(__cdecl*f)(cals_t(*argp));
auto signed(__cdecl*(fn[]))(cals_t(*argp)) = {
(signed(__cdecl*)(cals_t(*))) (cals_flag_c),
(signed(__cdecl*)(cals_t(*))) (cals_flag_e),
(signed(__cdecl*)(cals_t(*))) (cals_flag_h),
(signed(__cdecl*)(cals_t(*))) (cals_flag_m),
(signed(__cdecl*)(cals_t(*))) (cals_flag_n),
(signed(__cdecl*)(cals_t(*))) (cals_flag_v),
(signed(__cdecl*)(cals_t(*))) (0x00),
};
auto signed char *(fl[]) = {
"c","e","h","m","n","v",0x00,
};
auto signed char **v;
auto signed short *w;
auto signed char *b;
auto signed i,l,n,r;
auto signed short flag;
if(!argv) return(0x00);
if(!argp) return(0x00);
AND(flag,0x00);
v = (argv);
r = ct_p(v);
if(!r) return(0x00);
if(0x01<(r)) {
l = ct_f(fn);
while(l) {
r = cmpr_part(&i,*(0x01+(v)),*(--l+(fl)));
if(!r) return(0x00);
if(!i) {
f = (*(l+(fn)));
r = f(argp);
if(!r) {
printf("%s (*(%d+(%s)))() \n","<< Error at fn.",l,"fn");
return(0x00);
}}}}
return(0x01);
}
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
object_type=$1
GPHOME=$2
VERBOSE=0
if [ "$3" == "1" ]; then
VERBOSE=1
fi
source ${GPHOME}/bin/lib/hawq_bash_functions.sh
SOURCE_PATH="source ${GPHOME}/greenplum_path.sh"
${SOURCE_PATH}
host_name=`${HOSTNAME}`
lowercase(){
echo "$1" | sed "y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/"
}
OS=`lowercase \`uname\``
if [ "${OS}" = "darwin" ]; then
OS=mac
distro_based_on='Mac'
distro_name=`sw_vers -productName`
distro_version=`sw_vers -productVersion`
distro_major_version=`echo $distro_version |awk -F '.' '{print $1}'`
else
if [ "${OS}" = "linux" ] ; then
if [ -f /etc/redhat-release ] ; then
distro_based_on='RedHat'
distro_name=`cat /etc/redhat-release |sed s/\ release.*//`
psuedo_name=`cat /etc/redhat-release | sed s/.*\(// | sed s/\)//`
distro_version=`cat /etc/redhat-release | sed s/.*release\ // | sed s/\ .*//`
distro_major_version=`echo $distro_version | awk -F '.' '{print $1}'`
elif [ -f /etc/SuSE-release ] ; then
distro_based_on='SuSe'
distro_name=`cat /etc/SuSE-release |sed s/\ release.*//`
psuedo_name=`cat /etc/SuSE-release | tr "\n" ' '| sed s/VERSION.*//`
distro_version=`cat /etc/SuSE-release | tr "\n" ' ' | sed s/.*=\ //`
distro_major_version=`echo $distro_version |awk -F '.' '{print $1}'`
elif [ -f /etc/debian_version ] ; then
distro_based_on='Debian'
distro_name=`cat /etc/lsb-release | grep '^DISTRIB_ID' | awk -F= '{ print $2 }'`
psuedo_name=`cat /etc/lsb-release | grep '^DISTRIB_CODENAME' | awk -F= '{ print $2 }'`
distro_version=`cat /etc/lsb-release | grep '^DISTRIB_RELEASE' | awk -F= '{ print $2 }'`
distro_major_version=`echo $distro_version |awk -F '.' '{print $1}'`
fi
fi
fi
mgmt_config_file=${GPHOME}/etc/_mgmt_config
if [ -f ${mgmt_config_file} ]; then
source ${mgmt_config_file} > /dev/null 2>&1
else
${ECHO} "${mgmt_config_file} is not exist, exit"
exit 1
fi
if [ ${object_type} = "master" ] || [ ${object_type} = "standby" ]; then
hawq_data_directory=${master_data_directory}
hawq_port=${master_port}
tmp_dir_list=${hawq_master_temp_directory//,/ }
elif [ ${object_type} = "segment" ]; then
hawq_data_directory=${segment_data_directory}
hawq_port=${segment_port}
tmp_dir_list=${hawq_segment_temp_directory//,/ }
else
${ECHO} "hawq init object should be one of master/standby/segment"
exit 1
fi
master_max_connections=${max_connections}
segment_max_connections=${max_connections}
standby_host_lowercase=`lowercase "${standby_host_name}"`
get_all_ip_address() {
if [ "${distro_based_on}" = "RedHat" ] && [ "${distro_major_version}" -ge 7 ]; then
cmd_str="${IFCONFIG} |${GREP} -v '127.0.0' | ${GREP} 'inet '|${AWK} '{print \$2}'"
if [ "${object_type}" = 'segment' ]; then
segment_ip_address_all=`${IFCONFIG} |${GREP} -v '127.0.0' | ${GREP} 'inet '|${AWK} '{print $2}'`
fi
elif [ "${distro_based_on}" = "Mac" ]; then
cmd_str="${IFCONFIG} |${GREP} -v '127.0.0' | ${GREP} 'inet '|${AWK} '{print \$2}'"
if [ "${object_type}" = 'segment' ]; then
segment_ip_address_all=`${IFCONFIG} |${GREP} -v '127.0.0' | ${GREP} 'inet '|${AWK} '{print $2}'`
fi
else
cmd_str="${IFCONFIG} |${GREP} -v '127.0.0' |${AWK} '/inet addr/{print substr(\$2,6)}'"
if [ "${object_type}" = 'segment' ]; then
segment_ip_address_all=`${IFCONFIG} |${GREP} -v '127.0.0' |${AWK} '/inet addr/{print substr(\$2,6)}'`
fi
fi
}
get_all_ip_address
PG_HBA=pg_hba.conf
TMP_PG_HBA=/tmp/pg_hba_conf_master.$$
MASTER_LOG_FILE=${log_filename}
STANDBY_LOG_FILE=${log_filename}
SEGMENT_LOG_FILE=${log_filename}
LOG_FILE=${log_filename}
PSQL=${GPHOME}/bin/psql
PG_CTL=${GPHOME}/bin/pg_ctl
if [ "${log_dir}" = "None" ]; then
log_dir=${HOME}/hawqAdminLogs
fi
if [ ! -d ${log_dir} ]; then
${MKDIR} -p ${log_dir}
fi
if [ ! -f ${log_filename} ]; then
touch ${log_filename}
fi
GET_CIDRADDR () {
if [ `${ECHO} $1 | ${GREP} -c :` -gt 0 ]; then
${ECHO} $1/128
else
${ECHO} $1/32
fi
}
LOAD_GP_TOOLKIT () {
CUR_DATE=`${DATE} +%Y%m%d`
FILE_TIME=`${DATE} +%H%M%S`
TOOLKIT_FILE=/tmp/_gp_toolkit_tmp_${CUR_DATE}_$FILE_TIME
LOG_MSG "[INFO]:-Loading hawq_toolkit..." verbose
ROLNAME=`$PSQL -q -t -A -p ${hawq_port} -c "select rolname from pg_authid where oid=10" template1`
if [ x"$ROLNAME" == x"" ];then
LOG_MSG "[FATAL]:-Failed to retrieve rolname." verbose
exit 1
fi
if [ -f ${TOOLKIT_FILE} ]; then
${RM} -f ${TOOLKIT_FILE}
fi
# We need SET SESSION AUTH here to load the toolkit
${ECHO} "SET SESSION AUTHORIZATION $ROLNAME;" >> ${TOOLKIT_FILE} 2>&1
RETVAL=$?
if [ $RETVAL -ne 0 ];then
${ECHO} "[FATAL]:-Failed to create the hawq_toolkit sql file." | tee -a ${MASTER_LOG_FILE}
exit 1
fi
${CAT} $GPHOME/share/postgresql/gp_toolkit.sql >> ${TOOLKIT_FILE} 2>&1
RETVAL=$?
if [ $RETVAL -ne 0 ];then
${ECHO} "[FATAL]:-Failed to create the hawq_toolkit sql file." | tee -a ${MASTER_LOG_FILE}
exit 1
fi
$PSQL -q -p ${hawq_port} -f ${TOOLKIT_FILE} template1
RETVAL=$?
if [ $RETVAL -ne 0 ];then
${ECHO} "[FATAL]:-Failed to create the hawq_toolkit schema." | tee -a ${MASTER_LOG_FILE}
exit 1
fi
$PSQL -q -p ${hawq_port} -f ${TOOLKIT_FILE} postgres
RETVAL=$?
if [ $RETVAL -ne 0 ];then
${ECHO} "[FATAL]:-Failed to create the hawq_toolkit schema." | tee -a ${MASTER_LOG_FILE}
exit 1
fi
${RM} -f ${TOOLKIT_FILE}
return $RETVAL
}
get_master_ipv6_addresses() {
if [ "${distro_based_on}" = "Mac" ]; then
MASTER_IPV6_LOCAL_ADDRESS_ALL=(`${IFCONFIG} | ${GREP} inet6 | ${AWK} '{print $2}' | cut -d'%' -f1`)
else
MASTER_IPV6_LOCAL_ADDRESS_ALL=(`ip -6 address show |${GREP} inet6|${AWK} '{print $2}' |cut -d'/' -f1`)
fi
}
update_master_pg_hba(){
# Updatepg_hba.conf for master.
${CAT} ${hawq_data_directory}/${PG_HBA} |${GREP} '^#' > ${TMP_PG_HBA}
mv ${TMP_PG_HBA} ${hawq_data_directory}/${PG_HBA}
# Setting local access"
${ECHO} "local all $USER ident" >> ${hawq_data_directory}/${PG_HBA}
# ${ECHO} "[INFO]:-Setting local host access"
${ECHO} "host all $USER 127.0.0.1/28 trust" >> ${hawq_data_directory}/${PG_HBA}
get_master_ipv6_addresses
MASTER_HBA_IP_ADDRESS=(`${ECHO} ${master_ip_address_all[@]} ${MASTER_IPV6_LOCAL_ADDRESS_ALL[@]}|tr ' ' '\n'|sort -u|tr '\n' ' '`)
for ip_address in ${MASTER_HBA_IP_ADDRESS[@]}; do
CIDR_MASTER_IP=$(GET_CIDRADDR ${ip_address})
CHK_COUNT=`${GREP} -c ${CIDR_MASTER_IP} ${hawq_data_directory}/${PG_HBA}`
if [ "$CHK_COUNT" -eq "0" ];then
${ECHO} "host all ${USER} ${CIDR_MASTER_IP} trust" >> ${hawq_data_directory}/${PG_HBA}
else
${ECHO} "${CIDR_MASTER_IP} already exist in ${hawq_data_directory}/${PG_HBA}"
fi
done
}
update_standby_pg_hba(){
# Updatepg_hba.conf for standby master.
STANDBY_HBA_IP_ADDRESSES=(`${ECHO} ${standby_ip_address_all[@]}|tr ' ' '\n'|sort -u|tr '\n' ' '`)
for ip_address in ${STANDBY_HBA_IP_ADDRESSES[@]}; do
CIDR_MASTER_IP=$(GET_CIDRADDR ${ip_address})
CHK_COUNT=`${GREP} -c ${CIDR_MASTER_IP} ${master_data_directory}/${PG_HBA}`
if [ "$CHK_COUNT" -eq "0" ];then
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${ECHO} \"host all ${USER} ${CIDR_MASTER_IP} trust\" >> ${master_data_directory}/${PG_HBA}"
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${standby_host_name} \
"${ECHO} \"host all ${USER} ${CIDR_MASTER_IP} trust\" >> ${master_data_directory}/${PG_HBA}"
fi
for segment_host_name in `cat ${GPHOME}/etc/slaves`; do
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${segment_host_name} \
"if [ -e ${segment_data_directory}/${PG_HBA} ]; then ${ECHO} \"host all all ${CIDR_MASTER_IP} trust\" >> ${segment_data_directory}/${PG_HBA}; fi"
done
done
}
update_segment_pg_hba(){
# Updatepg_hba.conf for segment.
# Setting local access"
MASTERS_HBA_IP_ADDRESSES=(`${ECHO} ${master_ip_address_all[@]} ${standby_ip_address_all[@]}|tr ' ' '\n'|sort -u|tr '\n' ' '`)
for ip_address in ${MASTERS_HBA_IP_ADDRESSES[@]}; do
CIDR_MASTER_IP=$(GET_CIDRADDR ${ip_address})
CHK_COUNT=`${GREP} -c ${CIDR_MASTER_IP} ${hawq_data_directory}/${PG_HBA}`
if [ "$CHK_COUNT" -eq "0" ];then
${ECHO} "host all all ${CIDR_MASTER_IP} trust" >> ${hawq_data_directory}/${PG_HBA}
fi
done
for ip_address in ${segment_ip_address_all[@]}; do
CIDR_MASTER_IP=$(GET_CIDRADDR ${ip_address})
CHK_COUNT=`${GREP} -c ${CIDR_MASTER_IP} ${hawq_data_directory}/${PG_HBA}`
if [ "$CHK_COUNT" -eq "0" ];then
${ECHO} "host all ${USER} ${CIDR_MASTER_IP} trust" >> ${hawq_data_directory}/${PG_HBA}
fi
done
}
master_init() {
${GPHOME}/bin/initdb -E UNICODE -D ${hawq_data_directory} --locale=${locale} --lc-collate=${hawq_lc_collate} \
--lc-ctype=${hawq_lc_ctype} --lc-messages=${hawq_lc_messages} --lc-monetary=${hawq_lc_monetary} \
--lc-numeric=${hawq_lc_numeric} --lc-time=${hawq_lc_time} --max_connections=${master_max_connections} \
--shared_buffers=${shared_buffers} --backend_output=${log_dir}/master.initdb 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Master postgres initdb failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
update_master_pg_hba 1>>${MASTER_LOG_FILE} 2>&1
${PG_CTL} -D ${hawq_data_directory} -l ${hawq_data_directory}/pg_log/startup.log -w -t 60 -o " -p ${hawq_port} --silent-mode=true -M master -i" start >> ${MASTER_LOG_FILE}
if [ $? -ne 0 ] ; then
${ECHO} "Start hawq master failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
$PSQL -p ${hawq_port} -d template1 -c "create filespace dfs_system on hdfs ('${dfs_url}');" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Create filespace failed, please check your hdfs settings" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
$PSQL -p ${hawq_port} -d template1 -c "create tablespace dfs_default filespace dfs_system;" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Create tablespace failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
env PGOPTIONS="-c gp_session_role=utility" $PSQL -p ${hawq_port} -d template1 -c \
"SET allow_system_table_mods='dml';UPDATE pg_database SET dat2tablespace = (SELECT oid FROM pg_tablespace WHERE spcname = 'dfs_default') WHERE datname = 'template1';" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Configure database template1 failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
$PSQL -p ${hawq_port} -d template1 -c "create database template0 tablespace dfs_default template template1;" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Create database template0 failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
env PGOPTIONS="-c gp_session_role=utility" $PSQL -p ${hawq_port} -d template1 -c "SET allow_system_table_mods='dml';UPDATE pg_database SET datistemplate = 't', datallowconn = false WHERE datname = 'template0';" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Configure database template0 failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
$PSQL -p ${hawq_port} -d template1 -c "create database postgres tablespace dfs_default;" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Create database postgres failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
env PGOPTIONS="-c gp_session_role=utility" $PSQL -p ${hawq_port} -d template1 -c "SET allow_system_table_mods='dml';UPDATE pg_database SET datistemplate = 't' WHERE datname = 'postgres';" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Configure database postgres failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
env PGOPTIONS="-c gp_session_role=utility" $PSQL -p ${hawq_port} -d template1 -c "CHECKPOINT;" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "CHECKPOINT failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
LOAD_GP_TOOLKIT
if [ $? -ne 0 ] ; then
${ECHO} "Load TOOLKIT failed" | tee -a ${MASTER_LOG_FILE}
exit 1
fi
$PSQL -p ${hawq_port} -d template1 -c "alter user \"${USER}\" password 'gparray';" 1>>${MASTER_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
${ECHO} "Alter user failed" 1>> ${MASTER_LOG_FILE} 2>&1
exit 1
fi
}
standby_init() {
# Make sure log file are created.
if [ ! -f ${STANDBY_LOG_FILE} ]; then
touch ${STANDBY_LOG_FILE};
fi
LOG_MSG ""
LOG_MSG "[INFO]:-Stopping HAWQ master"
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; hawq stop master -a -M fast;" >> ${STANDBY_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
LOG_MSG "[ERROR]:-Stop master failed" verbose
exit 1
else
LOG_MSG "[INFO]:-HAWQ master stopped" verbose
fi
# Sync data directories to standby master.
LOG_MSG "[INFO]:-Sync files to standby from master" verbose
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"cd ${master_data_directory}; \
${SOURCE_PATH}; ${GPHOME}/bin/lib/pysync.py -x gpperfmon/data -x pg_log -x db_dumps \
${master_data_directory} ${standby_host_name}:${master_data_directory};" >> ${STANDBY_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
LOG_MSG "[FATAL]:-Sync master files to standby failed" verbose
exit 1
fi
LOG_MSG "[INFO]:-Update pg_hba configuration"
update_standby_pg_hba
${MKDIR} -p ${master_data_directory}/pg_log | tee -a ${STANDBY_LOG_FILE}
STANDBY_IP_ADDRESSES=`${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${PING} -c1 -n ${standby_host_name} | head -n1 | sed 's/.*(\([0-9]*\.[0-9]*\.[0-9]*\.[0-9]*\)).*/\1/g';"`
if [ -z "${STANDBY_IP_ADDRESSES}" ] ; then
LOG_MSG "[FATAL]:-Standby ip address is empty" verbose
exit 1
else
LOG_MSG "[INFO]:-Standby ip address is ${STANDBY_IP_ADDRESSES}" verbose
fi
LOG_MSG "[INFO]:-Start hawq master" verbose
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; hawq start master -a --masteronly >> ${STANDBY_LOG_FILE}"
if [ $? -ne 0 ] ; then
LOG_MSG "[ERROR]:-Start HAWQ master failed" verbose
exit 1
else
LOG_MSG "[INFO]:-HAWQ master started" verbose
fi
LOG_MSG "[INFO]:-Try to remove existing standby from catalog" verbose
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; env PGOPTIONS=\"-c gp_session_role=utility\" $PSQL -p ${master_port} -d template1 \
-c\"select gp_remove_master_standby() where (select count(*) from gp_segment_configuration where role='s') = 1;\";" >> ${STANDBY_LOG_FILE} 2>&1
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; env PGOPTIONS=\"-c gp_session_role=utility\" $PSQL -p ${master_port} -d template1 -c \
\"select gp_add_master_standby('${standby_host_name}','${STANDBY_IP_ADDRESSES}','');\";" >>${STANDBY_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
LOG_MSG "[FATAL]:-Register standby infomation failed" verbose
exit 1
else
LOG_MSG "[INFO]:-Register standby to master successfully" verbose
fi
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; hawq stop master -a -M fast;" >> ${STANDBY_LOG_FILE}
if [ $? -ne 0 ] ; then
LOG_MSG "[ERROR]:-Stop HAWQ master failed" verbose
exit 1
else
LOG_MSG "[INFO]:-HAWQ master stopped" verbose
fi
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; hawq start standby -a;" >> ${STANDBY_LOG_FILE}
if [ $? -ne 0 ] ; then
LOG_MSG "[ERROR]:-Start HAWQ standby failed" verbose
exit 1
else
LOG_MSG "[INFO]:-HAWQ standby started" verbose
fi
sleep 5
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; hawq start master -a;" >> ${STANDBY_LOG_FILE}
if [ $? -ne 0 ] ; then
LOG_MSG "[ERROR]:-Start HAWQ master failed" verbose
exit 1
else
LOG_MSG "[INFO]:-HAWQ master started" verbose
fi
${SSH} -o 'StrictHostKeyChecking no' ${hawqUser}@${master_host_name} \
"${SOURCE_PATH}; env PGOPTIONS=\"-c gp_session_role=utility\" $PSQL -p ${master_port} -d template1 \
-c\"select * from gp_segment_configuration;\";" >>${STANDBY_LOG_FILE} 2>&1
}
segment_init() {
source ${GPHOME}/greenplum_path.sh
for tmp_path in `${ECHO} ${hawqSegmentTemp} | sed 's|,| |g'`; do
if [ ! -d ${tmp_path} ]; then
LOG_MSG "[ERROR]:-Temp directory is not exist, please create it" verbose
LOG_MSG "[ERROR]:-Segment init failed on ${host_name}" verbose
exit 1
else
if [ ! -w "${tmp_path}" ]; then
LOG_MSG "[ERROR]:-Do not have write permission to temp directory, please check" verbose
LOG_MSG "[ERROR]:-Segment init failed on ${host_name}" verbose
exit 1
fi
fi
done
export LD_LIBRARY_PATH=${GPHOME}/lib:${GPHOME}/ext/python/lib:${LD_LIBRARY_PATH}
${GPHOME}/bin/initdb -E UNICODE -D ${hawq_data_directory} --locale=${locale} --lc-collate=${hawq_lc_collate} \
--lc-ctype=${hawq_lc_ctype} --lc-messages=${hawq_lc_messages} --lc-monetary=${hawq_lc_monetary} \
--lc-numeric=${hawq_lc_numeric} --lc-time=${hawq_lc_time} --max_connections=${segment_max_connections} \
--shared_buffers=${shared_buffers} --backend_output=${log_dir}/segment.initdb 1>>${SEGMENT_LOG_FILE} 2>&1
if [ $? -ne 0 ] ; then
LOG_MSG "[ERROR]:-Postgres initdb failed" verbose
LOG_MSG "[ERROR]:-Segment init failed on ${host_name}" verbose
exit 1
fi
update_segment_pg_hba 1>>${SEGMENT_LOG_FILE} 2>&1
${PG_CTL} -D ${hawq_data_directory} -l ${hawq_data_directory}/pg_log/startup.log -w -t 60 -o \
" -p ${hawq_port} --silent-mode=true -M segment -i" start >> ${SEGMENT_LOG_FILE}
if [ $? -ne 0 ] ; then
LOG_MSG "[ERROR]:-Segment init failed on ${host_name}" verbose
exit 1
fi
}
check_data_directorytory() {
# If it's default directory, create it if not exist.
default_mdd=~/hawq-data-directory/masterdd
default_sdd=~/hawq-data-directory/segmentdd
if [ "${hawq_data_directory}" = "${default_mdd}" ]; then
${MKDIR} -p ${default_mdd}
elif [ "${hawq_data_directory}" = "${default_sdd}" ]; then
${MKDIR} -p ${default_sdd}
fi
# Check if data directory already exist and clean.
if [ "${hawq_data_directory}" = "" ]; then
LOG_MSG "[ERROR]:-Data directory path is not valid value on ${host_name}" verbose
exit 1
fi
if [ -d ${hawq_data_directory} ]; then
if [ "$(ls -A ${hawq_data_directory})" ]; then
LOG_MSG "[ERROR]:-Data directory ${hawq_data_directory} is not empty on ${host_name}" verbose
exit 1
fi
else
LOG_MSG "[ERROR]:-Data directory ${hawq_data_directory} does not exist, please create it" verbose
exit 1
fi
}
check_standby_data_directorytory() {
# If it's default directory, create it if not exist.
default_mdd=~/hawq-data-directory/masterdd
default_sdd=~/hawq-data-directory/segmentdd
if [ "${hawq_data_directory}" = "${default_mdd}" ]; then
${MKDIR} -p ${default_mdd}
elif [ "${hawq_data_directory}" = "${default_sdd}" ]; then
${MKDIR} -p ${default_sdd}
fi
# Check if data directory already exist and clean.
if [ "${hawq_data_directory}" = "" ]; then
LOG_MSG "[ERROR]:-Data directory path is not valid value on ${host_name}" verbose
exit 1
fi
if [ -d "${hawq_data_directory}" ]; then
if [ "$(ls -A ${hawq_data_directory}| ${GREP} -v pg_log)" ]; then
LOG_MSG "[ERROR]:-Data directory ${hawq_data_directory} is not empty on ${host_name}" verbose
exit 1
else
if [ -d ${hawq_data_directory}/pg_log ] && [ "$(ls -A ${hawq_data_directory}/pg_log)" ]; then
LOG_MSG "[ERROR]:-Data directory ${hawq_data_directory} is not empty on ${host_name}" verbose
exit 1
fi
fi
else
LOG_MSG "[ERROR]:-Data directory ${hawq_data_directory} does not exist on ${host_name}, please create it" verbose
exit 1
fi
}
check_temp_directory() {
# Check if temp directory exist.
for tmp_dir in ${tmp_dir_list}; do
if [ ! -d ${tmp_dir} ]; then
LOG_MSG "[ERROR]:-Temporary directory ${tmp_dir} does not exist, please create it" verbose
exit 1
fi
if [ ! -w ${tmp_dir} ]; then
LOG_MSG "[ERROR]:-Temporary directory ${tmp_dir} is not writable, exit." verbose
exit 1
fi
done
}
if [ ${object_type} == "master" ]; then
check_data_directorytory
check_temp_directory
master_init
elif [ ${object_type} == "standby" ]; then
check_standby_data_directorytory
standby_init
elif [ ${object_type} == "segment" ]; then
check_data_directorytory
check_temp_directory
segment_init
else
${ECHO} "Please input correct node object"
exit 1
fi
exit 0
|
<reponame>ixrjog/caesar-web
import request from '@/plugin/axios'
const baseUrl = '/user/application'
export function queryApplicationExcludeUserPage (data) {
return request({
url: baseUrl + '/exclude/page/query',
method: 'post',
data
})
}
export function queryApplicationIncludeUserPage (data) {
return request({
url: baseUrl + '/include/page/query',
method: 'post',
data
})
}
export function queryApplicationBuildJobExcludeUserPage (data) {
return request({
url: baseUrl + '/build/job/exclude/page/query',
method: 'post',
data
})
}
export function queryApplicationBuildJobIncludeUserPage (data) {
return request({
url: baseUrl + '/build/job/include/page/query',
method: 'post',
data
})
}
|
<reponame>smagill/opensphere-desktop
/**
* Data Source Management Framework.
*/
package io.opensphere.mantle.datasources;
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import { Inject, Injectable } from "@angular/core";
import { Observable, Subject } from "rxjs";
import { MSAL_INSTANCE } from "./constants";
import { EventMessage, EventMessageUtils, IPublicClientApplication, InteractionStatus } from "@azure/msal-browser";
import { MsalService } from "./msal.service";
@Injectable()
export class MsalBroadcastService {
private _msalSubject: Subject<EventMessage>;
public msalSubject$: Observable<EventMessage>;
private _inProgress: Subject<InteractionStatus>;
public inProgress$: Observable<InteractionStatus>;
constructor(
@Inject(MSAL_INSTANCE) private msalInstance: IPublicClientApplication,
private authService: MsalService
) {
this._msalSubject = new Subject<EventMessage>();
this.msalSubject$ = this._msalSubject.asObservable();
this._inProgress = new Subject<InteractionStatus>();
this.inProgress$ = this._inProgress.asObservable();
this.msalInstance.addEventCallback((message: EventMessage) => {
this._msalSubject.next(message);
const status = EventMessageUtils.getInteractionStatusFromEvent(message);
if (status !== null) {
this.authService.getLogger().verbose(`BroadcastService - ${message.eventType} results in setting inProgress to ${status}`);
this._inProgress.next(status);
}
});
}
}
|
//package com.gmail.gustgamer29.listeners.protocollib;
//
//import com.comphenix.protocol.PacketType;
//import com.comphenix.protocol.ProtocolLibrary;
//import com.comphenix.protocol.events.ListenerPriority;
//import com.comphenix.protocol.events.PacketAdapter;
//import com.comphenix.protocol.events.PacketEvent;
//import com.google.common.collect.Sets;
//import org.bukkit.plugin.Plugin;
//
//import java.util.Set;
//
//public class PlayerHidePacketAdapter extends PacketAdapter {
//
// private Set<String> toHide = Sets.newHashSet();
//
//
// public PlayerHidePacketAdapter(Plugin plugin) {
// super(plugin, ListenerPriority.NORMAL, PacketType.Play.Server.PLAYER_INFO);
// }
//
// @Override
// public void onPacketReceiving(PacketEvent event) {
// if(event.getPacketType() == PacketType.Play.Server.PLAYER_INFO){
//
// }
// }
//
// public void registerPacktEvent(){
// ProtocolLibrary.getProtocolManager().addPacketListener(this);
// }
//}
|
class ConfigItem < ActiveRecord::Base
has_many :host_configs, :dependent => :destroy
has_many :hosts, :through => :host_configs
validates_uniqueness_of :name, :message => "Name is already being used."
before_destroy :check_and_remove_deps
def parent_name
if self.parent_id
item = ConfigItem.find(self.parent_id)
if item
return item.name
end
end
end
# Check for any config items with their parent_id set to us, and set it to nil
def check_and_remove_deps
items = ConfigItem.find_all_by_parent_id(self.id)
items.each { |item|
item.update_attributes(:parent_id => nil)
}
end
end
|
<reponame>SoftwarearchitekturTeam/TypeTogether
package de.hswhameln.typetogether.networking.shared.helperinterfaces;
/**
* Interface for a function that has a functional meaning
*/
// TODO name may be changed later
@FunctionalInterface
public interface FunctionalFunction <T> {
T apply() throws Exception;
}
|
#!/bin/bash
while getopts :f:d: flag; do
((arg_count++))
case $flag in
f) fps=$OPTARG;;
d) display=display_$OPTARG;;
esac
done
[[ ! -f ~/.config/orw/config ]] && ~/.orw/scripts/generate_orw_config.sh
read resolution position <<< $(awk '\
/^'${display:-full_resolution}'/ {
if(/^full/ || xy) print $2 "x" $3, xy
else xy = "+" $2 "," $3 }' ~/.config/orw/config)
shift $((arg_count * 2))
[[ $@ ]] && filename="$@" || filename=$(date +"%Y-%m-%d-%H:%M")
#~/.orw/scripts/notify.sh -p "<span font='Roboto Mono 10'> </span>recording started"
~/.orw/scripts/notify.sh -s osd -i 'recording started'
#~/.orw/scripts/notify.sh osd 'recording started'
#~/.orw/scripts/notify.sh osd 'recording started'
ffmpeg -y -f x11grab -r ${fps-25} -s $resolution -draw_mouse 1 -i $DISPLAY$position \
-f pulse -async 1 -i default -c:v libx264 -preset ultrafast -vsync 1 ~/Videos/$filename.mp4
|
export class UpdateUserProfileDto {
email:string
password:<PASSWORD>
username:string
} |
<reponame>yanovitchsky/sequent
class Symbol
def self.deserialize_from_json(value)
value.blank? ? nil : value.try(:to_sym)
end
end
class String
def self.deserialize_from_json(value)
value&.to_s
end
end
class Integer
def self.deserialize_from_json(value)
value.blank? ? nil : value.to_i
end
end
class Float
def self.deserialize_from_json(value)
value.blank? ? nil : value.to_f
end
end
class BigDecimal
def self.deserialize_from_json(value)
return nil if value.nil?
BigDecimal(value)
end
end
module Boolean
def self.deserialize_from_json(value)
value.nil? ? nil : (value.present? ? value : false)
end
end
class Date
def self.from_params(value)
return value if value.is_a?(Date)
value.blank? ? nil : Date.iso8601(value.dup)
rescue ArgumentError
value
end
def self.deserialize_from_json(value)
value.blank? ? nil : Date.iso8601(value.dup)
end
end
class DateTime
def self.from_params(value)
value.blank? ? nil : DateTime.iso8601(value.dup)
rescue ArgumentError
value
end
def self.deserialize_from_json(value)
value.blank? ? nil : DateTime.iso8601(value.dup)
end
end
class Array
def self.deserialize_from_json(value)
value
end
end
class Hash
def self.deserialize_from_json(value)
value
end
end
|
#!/usr/bin/env python
"""This module contains the AST node types and the classes for extracting them from Java and Python.
The most important classes here are ExtractAstPython and ExtractAstJava.
"""
import ast
from lib2to3 import refactor, pgen2
import javalang
from . import error
from .complexity_java import ComplexityJava
# for python
# https://docs.python.org/3/library/ast.html
# node types from: http://greentreesnakes.readthedocs.io/en/latest/nodes.html
PYTHON_NODE_TYPES = ['Num', 'Str', 'Bytes', 'List', 'Tuple', 'Set', 'Dict', 'Ellipsis', 'NameConstant', 'Name', 'Load', 'Store', 'Del', 'Starred', 'Expr', 'UnaryOp', 'UAdd', 'USub', 'Not', 'Invert', 'BinOp', 'Add', 'Sub', 'Mult', 'Div', 'FloorDiv', 'Mod', 'Pow', 'LShift', 'RShift', 'BitOr', 'BitXor', 'BitAnd', 'MatMult', 'BoolOp', 'And', 'Or', 'Compare', 'Eq', 'NotEq', 'Lt', 'LtE', 'Gt', 'GtE', 'Is', 'IsNot', 'In', 'NotIn', 'Call', 'keyword', 'IfExp', 'Attribute', 'Subscript', 'Index', 'Slice', 'ExtSlice', 'ListComp', 'SetComp', 'GeneratorExp', 'DictComp', 'comprehension', 'Assign', 'AugAssign', 'Print', 'Raise', 'Assert', 'Delete', 'Pass', 'Import', 'ImportFrom', 'alias', 'Module', 'Constant', 'FormattedValue', 'JoinedString']
# control flow
PYTHON_NODE_TYPES += ['If', 'For', 'While', 'Break', 'Continue', 'Try', 'TryFinally', 'TryExcept', 'ExceptHandler', 'With', 'withitem']
# function and class defs
PYTHON_NODE_TYPES += ['FunctionDef', 'Lambda', 'arguments', 'arg', 'Return', 'Yield', 'YieldFrom', 'Global', 'Nonlocal', 'ClassDef']
# new async stuff (python 3.5)
PYTHON_NODE_TYPES += ['AsyncFunctionDef', 'Await', 'AsyncFor', 'AsyncWith']
# for java
# node types from: https://github.com/c2nes/javalang/blob/master/javalang/tree.py
JAVA_NODE_TYPES = [
'CompilationUnit', 'Import', 'Documented', 'Declaration', 'TypeDeclaration', 'PackageDeclaration', 'ClassDeclaration', 'EnumDeclaration', 'InterfaceDeclaration', 'AnnotationDeclaration',
'Type', 'BasicType', 'ReferenceType', 'TypeArgument',
'TypeParameter',
'Annotation', 'ElementValuePair', 'ElementArrayValue',
'Member', 'MethodDeclaration', 'FieldDeclaration', 'ConstructorDeclaration',
'ConstantDeclaration', 'ArrayInitializer', 'VariableDeclaration', 'LocalVariableDeclaration', 'FormalParameter', 'InferredFormalParameter',
'Statement', 'IfStatement', 'WhileStatement', 'DoStatement', 'ForStatement', 'AssertStatement', 'BreakStatement', 'ContinueStatement', 'ReturnStatement', 'ThrowStatement', 'SynchronizedStatement',
'TryStatement', 'SwitchStatement', 'BlockStatement', 'StatementExpression',
'TryResource', 'CatchClause', 'CatchClauseParameter',
'SwitchStatementCase', 'ForControl', 'EnhancedForControl',
'Expression', 'Assignment', 'TernaryExpression', 'BinaryOperation', 'Cast', 'MethodReference', 'LambdaExpression',
'Primary', 'Literal', 'This', 'MemberReference', 'Invocation', 'ExplicitConstructorInvocation', 'SuperConstructorInvocation', 'MethodInvocation', 'SuperMethodInvocation', 'SuperMemberReference', 'ArraySelector', 'ClassReference', 'VoidClassReference', 'VariableDeclarator', 'ClassCreator', 'ArrayCreator', 'InnerClassCreator',
'EnumBody', 'EnumConstantDeclaration', 'AnnotationMethod',
]
# https://docs.python.org/3/library/2to3.html
def convert_2to3(file_content, file_name):
"""Quick helper function to convert python2 to python3 so that we can keep the ast buildin."""
# all default fixers
avail_fixes = set(refactor.get_fixers_from_package("lib2to3.fixes"))
# create default RefactoringTool, apply to passed file_content string and return fixed string
rt = refactor.RefactoringTool(avail_fixes)
tmp = rt.refactor_string(file_content, file_name)
return str(tmp)
class NodePathVisitor:
"""Overwrite ast.NodeVisitor because we also want the level for pretty printing.
This just includes the level for the NodePrintVisitor.
"""
def visit(self, node, level=0):
"""Visit a node."""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node, level)
def generic_visit(self, node, level):
"""Called if no explicit visitor function exists for a node."""
for field, value in ast.iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, ast.AST):
self.visit(item, level=level + 1)
elif isinstance(value, ast.AST):
self.visit(value, level=level + 1)
class NodePrintVisitor(NodePathVisitor):
"""Prints AST incl. depth."""
def generic_visit(self, node, level):
name = getattr(node, 'id', None)
if name:
out = '{} ({})'.format(type(node).__name__, name)
else:
out = '{}'.format(type(node).__name__)
print(' ' * level + out)
super().generic_visit(node, level)
class NodeTypeCountVisitor(ast.NodeVisitor):
"""Used to count imports, node types and nodes for Python."""
def __init__(self):
self.type_counts = {k: 0 for k in PYTHON_NODE_TYPES} # set 0 for every known type
self.imports = []
self.node_count = 0
super().__init__()
def generic_visit(self, node):
type_name = type(node).__name__
self.node_count += 1
if type_name in self.type_counts.keys():
self.type_counts[type_name] += 1
else:
# if we encounter an unknown node we have to raise an error because then our vector length is not right
raise error.CoastException("Unkown NodeType encountered: {}".format(type_name))
if type_name == 'Import':
names = getattr(node, 'names', [])
for n in names:
self.imports.append(n.name)
# from datetime import date -> import datetime.date
if type_name == 'ImportFrom':
names = getattr(node, 'names', [])
module = getattr(node, 'module', None)
for n in names:
self.imports.append('{}.{}'.format(module, n.name))
super().generic_visit(node)
class ExtractAstJava:
"""Extracts the AST from .java Files.
Uses the javalang Library.
"""
def __init__(self, filename):
self.astdata = None
self.imports = []
self.type_counts = {k: 0 for k in JAVA_NODE_TYPES} # set 0 for every known type
self.node_count = 0
self.filename = filename
def method_metrics(self):
# new complexity metrics
cj = ComplexityJava(self.astdata)
return list(cj.cognitive_complexity()) # we list() here because cognitive_complexity is a generator
def load(self):
"""Read the AST."""
try:
with open(self.filename, 'r', encoding='latin-1') as f: # latin-1 because we assume no crazy umlaut function names
self.astdata = javalang.parse.parse(f.read())
except javalang.parser.JavaSyntaxError:
err = 'Parser Error in file: {}'.format(self.filename)
raise error.ParserException(err)
except javalang.tokenizer.LexerError as le:
err = 'Lexer Error in file: {}\n{}'.format(self.filename, le)
raise error.ParserException(err)
except IndexError:
err = 'IndexError in JavaLang, Java > 8'
raise error.ParserException(err)
assert self.astdata is not None
for path, node in self.astdata.walk_tree_iterative():
type_name = type(node).__name__
self.node_count += 1
if type_name == 'CompilationUnit':
for imp in getattr(node, 'imports', []):
import_line = imp.path
if imp.wildcard:
import_line += '.*'
self.imports.append(import_line)
if type_name in self.type_counts.keys():
self.type_counts[type_name] += 1
else:
raise error.CoastException("Unknown NodeType encountered: {}".format(type_name))
class ExtractAstPython:
"""Extracts the AST from .py Files.
Uses the build in ast and the visitor pattern."""
def __init__(self, filename):
self.astdata = None
self.filename = filename
def load(self):
"""Read the AST.
We add a \n at the end because 2to3 dies otherwise.
"""
try:
with open(self.filename, 'r', encoding='latin-1') as f:
self.astdata = ast.parse(source=convert_2to3(f.read() + '\n', self.filename), filename=self.filename)
assert self.astdata is not None
self.nt = NodeTypeCountVisitor()
self.nt.visit(self.astdata)
except pgen2.parse.ParseError as e:
err = 'Parser Error in file: {}, error: {}'.format(self.filename, e)
raise error.ParserException(err)
except SyntaxError as e:
err = 'Syntax Error in file: {}, error: {}'.format(self.filename, e)
raise error.SyntaxException(err)
@property
def imports(self):
return self.nt.imports
@property
def type_counts(self):
return self.nt.type_counts
@property
def node_count(self):
return self.nt.node_count
|
<reponame>raihanannafi/perfstatbeat<filename>vendor/github.com/elastic/beats/filebeat/prospector/docker/prospector.go
package docker
import (
"fmt"
"path"
"github.com/elastic/beats/filebeat/channel"
"github.com/elastic/beats/filebeat/prospector"
"github.com/elastic/beats/filebeat/prospector/log"
"github.com/elastic/beats/libbeat/common"
"github.com/elastic/beats/libbeat/common/cfgwarn"
"github.com/pkg/errors"
)
func init() {
err := prospector.Register("docker", NewProspector)
if err != nil {
panic(err)
}
}
// NewProspector creates a new docker prospector
func NewProspector(cfg *common.Config, outletFactory channel.Factory, context prospector.Context) (prospector.Prospectorer, error) {
cfgwarn.Experimental("Docker prospector is enabled.")
config := defaultConfig
if err := cfg.Unpack(&config); err != nil {
return nil, errors.Wrap(err, "reading docker prospector config")
}
// Wrap log prospector with custom docker settings
if len(config.Containers.IDs) == 0 {
return nil, errors.New("Docker prospector requires at least one entry under 'containers.ids'")
}
for idx, containerID := range config.Containers.IDs {
cfg.SetString("paths", idx, path.Join(config.Containers.Path, containerID, "*.log"))
}
if err := checkStream(config.Containers.Stream); err != nil {
return nil, err
}
if err := cfg.SetString("docker-json", -1, config.Containers.Stream); err != nil {
return nil, errors.Wrap(err, "update prospector config")
}
return log.NewProspector(cfg, outletFactory, context)
}
func checkStream(val string) error {
for _, s := range []string{"all", "stdout", "stderr"} {
if s == val {
return nil
}
}
return fmt.Errorf("Invalid value for containers.stream: %s, supported values are: all, stdout, stderr", val)
}
|
#!/bin/sh
set -x
# Create a new image version with latest code changes.
docker build . --tag pleo-antaeus
# Build the code.
docker run \
--publish 7000:7000 \
--rm \
--interactive \
--tty \
# This volume is only there so incremental builds are way faster
--volume pleo-antaeus-build-cache:/root/.gradle \
pleo-antaeus
|
import random
def generate_strings(length, characters):
strings = []
for _ in range(length):
s = ""
for _ in range(length):
s += random.choice(characters)
strings.append(s)
return strings |
package com.jaminh.ws;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringTokenServiceApplication {
public static void main(String[] args) {
SpringApplication.run(SpringTokenServiceApplication.class, args);
}
}
|
#!/bin/bash
# bash "scrict mode"
set -e
set -u
set -o pipefail
# Build fresh
if [[ -d build ]];then
rm -rf build/*
fi
cmake \
-S src \
-B build
cmake build -L
cmake --build build -- -j$(($(nproc) - 1))
|
// Copyright 2013-2020, University of Colorado Boulder
import './BinPacker.js';
import './Bounds2.js';
import './Bounds3.js';
import './Complex.js';
import './ConvexHull2.js';
import './DampedHarmonic.js';
import './DelaunayTriangulation.js';
import './Dimension2.js';
import dot from './dot.js';
import './EigenvalueDecomposition.js';
import './LinearFunction.js';
import './LUDecomposition.js';
import './Matrix.js';
import './Matrix3.js';
import './Matrix4.js';
import './MatrixOps3.js';
import './Permutation.js';
import './Plane3.js';
import './QRDecomposition.js';
import './Quaternion.js';
import './Random.js';
import './Range.js';
import './Ray2.js';
import './Ray3.js';
import './Rectangle.js';
import './SingularValueDecomposition.js';
import './Sphere3.js';
import './Transform3.js';
import './Transform4.js';
import './Utils.js';
import './Vector2.js';
import './Vector3.js';
import './Vector4.js';
export default dot; |
<gh_stars>0
import json
class SensorData:
def __init__(self):
self.temp = 0.0
self.humidity = 0
self.pool = 0.0
def set_temp_humidity(self, string):
my_json = json.loads(string)
self.temp = my_json["temp"]
self.humidity = my_json["humidity"]
def set_pool(self, string):
my_json = json.loads(string)
self.pool = my_json["temp"]
def jsonify(self) -> str:
string = self.__dict__
return json.dumps(string)
|
<gh_stars>0
declare module "dynoxhost.js" {
export class DynoxHost {
constructor(apiKey: string) {}
getUsage(id: string)
getDetails(id: string)
setPowerState(id: string, state: string)
createBackup(id: string)
getBackupDetails(id: string, backupID: string)
}
} |
#/bin/sh
find . -type d -maxdepth 1 -not -path ./output -not -path . -exec sh -c "cd {} && pwd && spago upgrade-set && cd .." \;
|
import Enumerable from "./enumerable";
export default function<T>(iterable: Iterable<T>): Enumerable<T> {
return new Enumerable(iterable);
}
|
#ifndef MODELPC_H
#define MODELPC_H
#include <QObject>
#include <QImage>
#include <QByteArray>
#include <QColor>
#include <QPoint>
#include <QVector>
#include <QProcess>
#include <QTime>
#include <QFileInfo>
#include <QtGui>
#include <QtCore/QRandomGenerator>
#include <QPair>
#include "qaesencryption.h"
#include <QCryptographicHash>
/*! \file modelpc.h
* Header of ModelPC class
* \sa ControllerPC, ModelPC, ViewPC
*/
/*!
* \brief The ModelPC class
* Model Layer of the app.
* Main class that does the work of PictureCrypt logic
* Controled by ControllerPC
* \sa ViewPC, ControllerPC
* \author <NAME> (waleko)
*/
class ModelPC : public QObject
{
Q_OBJECT
public:
ModelPC();
enum CryptMode {Unspecified, v1_3, v1_4, jphs_mode};
static QImage *Encrypt(QByteArray data, QImage *image, CryptMode _mode, QString key = "", int _bitsUsed = 8, QString *_error = nullptr);
static QByteArray Decrypt(QImage * image, QString key, CryptMode _mode = Unspecified, QString *_error = nullptr);
signals:
/*!
* \brief alertView Signal to be called to create MessageBox.
* \param messageCode Message Code to be shown.
* \param isWarning Flag if message is critical.
* \sa ModelPC::alert, ViewPC::alert
*/
void alertView(QString messageCode, bool isWarning);
/*!
* \brief saveData Signal to be called to save data from ModelPC::decrypt.
* \param data Data to be saved.
*/
void saveData(QByteArray data);
/*!
* \brief saveImage Signal to be called to save image from ModelPC::encrypt.
* \param image Image to be saved.
*/
void saveImage(QImage *image);
/*!
* \brief setProgress Signal to be called to set progress of ProgressDialog.
* \param val Value to be set.
* \sa ViewPC::setProgress
*/
void setProgress(int val);
public slots:
QImage *encrypt(QByteArray data, QImage *image, int _mode, QString key = "", int _bitsUsed = 8, QString *_error = nullptr);
QByteArray decrypt(QImage * image, QString key, int _mode = Unspecified, QString *_error = nullptr);
void fail(QString message);
void alert(QString message, bool isWarning = false);
public:
QByteArray unzip(QByteArray data, QByteArray key);
/*!
* \brief success Flag that true by default,
* but in case of error or cancelling of ProgressDialog it turns to false, which stops execution of ModelPC::circuit
*/
bool success;
/*!
* \brief version Version of the class
*/
long version;
/*!
* \brief versionString Version as string
*/
QString versionString;
/*!
* \brief defaultJPHSDir Default JPHS directory
*/
QString defaultJPHSDir;
protected:
static QImage *Inject(QByteArray encr_data, QImage * image, CryptMode _mode, int _bitsUsed = 8, QString *_error = nullptr);
void circuit(QImage * image, QByteArray * data, long long int countBytes);
void jphs(QImage * image, QByteArray * data);
void processPixel(QPoint pos, QVector<QPoint> *were, bool isEncrypt);
void encryptv1_4(QImage *image, QByteArray data, QString key);
QByteArray decryptv1_3(QImage * image, QString key);
QByteArray decryptv1_4(QImage * image, QString key);
void proccessPixelsv1_4(QImage *image, QByteArray* data, QByteArray key, bool isEncrypt, QVector<QPair<QPoint, QPair<int, int> > > *were, long long size = -1);
QByteArray zip(QByteArray data, QByteArray key);
/*!
* \brief error Current error
*/
QString * error;
private:
int bitsUsed;
bool fileExists(QString path);
QByteArray bytes(long long n);
unsigned int mod(int input);
QByteArray ver_byte;
QColor RGBbytes(long long byte);
QString generateVersionString(long ver);
uint randSeed();
bool isTry = false;
QByteArray * circuitData;
QImage * circuitImage;
long long circuitCountBytes;
long cur;
bool mustGoOn(bool isEncrypt);
QVector <bool> bitsBuffer;
long pop(int bits = -1);
void push(int data, int bits = -1);
void setError(QString word);
QByteArray GetRandomBytes(long long count = 32);
protected slots:
QImage *inject(QByteArray encr_data, QImage * image, int _mode, int _bitsUsed = 8, QString *_error = nullptr);
};
#endif // MODELPC_H
|
#!/bin/bash
. path.sh
nnet3-compute-prob exp/xvector_nnet_1a_kadv5_rm457/final.raw 'ark,bg:nnet3-copy-egs scp:exp/xvector_nnet_1a_kadv5_rm457/egs/valid_diagnostic_adv.scp ark:- | nnet3-merge-egs --minibatch-size=1:64 ark:- ark:- |'
|
package com.iplante.imdb.movies.service;
import com.iplante.imdb.movies.entity.Movie;
import com.iplante.imdb.movies.repository.MovieRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import javax.persistence.EntityNotFoundException;
import java.util.List;
/**
* Service in charge of custom CRUD operations against {@link Movie} entities.
*
* @author <NAME>
* @version 1
* @since 8/26/20
*/
@Service
public class MovieService {
private final CastClient castClient;
private final MovieRepository movieRepository;
/**
* Service constructor.
*
* @param castClient the REST client to interact with the Cast API.
* @param movieRepository the {@link Movie} repository.
*/
@Autowired
public MovieService(CastClient castClient, MovieRepository movieRepository) {
this.castClient = castClient;
this.movieRepository = movieRepository;
}
/**
* Add a list of {@link Movie} entities to the database.
*
* @param movieList the list of {@link Movie} entities.
*/
public void addMovies(List<Movie> movieList) {
movieRepository.saveAll(movieList);
}
/**
* Get all cast members for a given {@link Movie}.
*
* @param movieId the ID of the {@link Movie}.
* @param pageable the optional Pageable query parameters.
* @return Object containing a list of cast member for the given {@link Movie}. We return Object to just
* act as a proxy and return exactly what we got.
* @throws EntityNotFoundException if no {@link Movie} found for given ID.
*/
public Object getMovieCast(long movieId, Pageable pageable) {
final var movie = movieRepository.findById(movieId);
if (movie.isPresent()) {
final var castIds = movie.get().getCast();
return castClient.getMovieCast(castIds, pageable);
} else {
throw new EntityNotFoundException("Movie not found: movieId=" + movieId);
}
}
/**
* Get a count of all {@link Movie} entities in the database.
*
* @return a count of {@link Movie} entities.
*/
public long getMovieCount() {
return movieRepository.count();
}
}
|
#ifndef __HARDWARE_HXX
#define __HARDWARE_HXX
#include <os/LinuxGpio.hxx>
#include <os/LinuxPWM.hxx>
#include "utils/GpioInitializer.hxx"
#define HARDWARE_IMPL "PB Multifunction board"
// On chip GPIO:
// OD: 7, 19, 20, 23,
// Motor: 26, 27, 45, 46,
// Points: 47, 48, 50, 52,
// Buttons: 57, 58, 59 60,
// LEDs: 64, 65, 86 87,
// Signals: I2C1
// GPIO Header: 110, 111, 112, 113,
// 114, 115, 116, 117
#define ODPin GpioInputActiveLow
GPIO_PIN(OD1, ODPin, 7);
GPIO_PIN(OD2, ODPin, 19);
GPIO_PIN(OD3, ODPin, 20);
GPIO_PIN(OD4, ODPin, 23);
#define MotorPin GpioOutputSafeLow
#define SensePin GpioInputActiveHigh
GPIO_PIN(Motor1, MotorPin, 26);
GPIO_PIN(Motor2, MotorPin, 27);
GPIO_PIN(Motor3, MotorPin, 45);
GPIO_PIN(Motor4, MotorPin, 46);
GPIO_PIN(Points1, SensePin, 47);
GPIO_PIN(Points2, SensePin, 48);
GPIO_PIN(Points3, SensePin, 50);
GPIO_PIN(Points4, SensePin, 52);
#define ButtonPin GpioInputActiveLow
GPIO_PIN(Button1, ButtonPin, 57);
GPIO_PIN(Button2, ButtonPin, 58);
GPIO_PIN(Button3, ButtonPin, 59);
GPIO_PIN(Button4, ButtonPin, 60);
#define LEDPin GpioOutputSafeLow
GPIO_PIN(LED1, LEDPin, 64);
GPIO_PIN(LED2, LEDPin, 65);
GPIO_PIN(LED3, LEDPin, 86);
GPIO_PIN(LED4, LEDPin, 87);
GPIO_PIN(GPIO1, GpioOutputSafeLow, 110);
GPIO_PIN(GPIO2, GpioOutputSafeLow, 111);
GPIO_PIN(GPIO3, GpioOutputSafeLow, 112);
GPIO_PIN(GPIO4, GpioOutputSafeLow, 113);
GPIO_PIN(GPIO5, GpioOutputSafeLow, 114);
GPIO_PIN(GPIO6, GpioOutputSafeLow, 115);
GPIO_PIN(GPIO7, GpioOutputSafeLow, 116);
GPIO_PIN(GPIO8, GpioOutputSafeLow, 117);
typedef GpioInitializer<OD1_Pin, OD2_Pin, OD3_Pin, OD4_Pin,
Motor1_Pin, Motor2_Pin, Motor3_Pin, Motor4_Pin,
Points1_Pin, Points2_Pin, Points3_Pin, Points4_Pin,
Button1_Pin, Button2_Pin, Button3_Pin, Button4_Pin,
LED1_Pin, LED2_Pin, LED3_Pin, LED4_Pin,
GPIO1_Pin, GPIO2_Pin, GPIO3_Pin, GPIO4_Pin,
GPIO5_Pin, GPIO6_Pin, GPIO7_Pin, GPIO8_Pin> GpioInit;
static constexpr uint32_t PWMCHIP = 8; //guess...
static constexpr uint32_t A0 = 0;
extern LinuxPWM A0_Pin;
static constexpr uint32_t A1 = 1;
extern LinuxPWM A1_Pin;
static constexpr uint32_t A2 = 2;
extern LinuxPWM A2_Pin;
static constexpr uint32_t A3 = 3;
extern LinuxPWM A3_Pin;
static constexpr uint32_t A4 = 4;
extern LinuxPWM A4_Pin;
static constexpr uint32_t A5 = 5;
extern LinuxPWM A5_Pin;
static constexpr uint32_t A6 = 6;
extern LinuxPWM A6_Pin;
static constexpr uint32_t A7 = 7;
extern LinuxPWM A7_Pin;
static constexpr uint32_t B0 = 8;
extern LinuxPWM B0_Pin;
static constexpr uint32_t B1 = 9;
extern LinuxPWM B1_Pin;
static constexpr uint32_t B2 = 10;
extern LinuxPWM B2_Pin;
static constexpr uint32_t B3 = 11;
extern LinuxPWM B3_Pin;
static constexpr uint32_t B4 = 12;
extern LinuxPWM B4_Pin;
static constexpr uint32_t B5 = 13;
extern LinuxPWM B5_Pin;
static constexpr uint32_t B6 = 14;
extern LinuxPWM B6_Pin;
static constexpr uint32_t B7 = 15;
extern LinuxPWM B7_Pin;
#define USE_SOCKET_CAN_PORT 1
#define DEFAULT_CAN_SOCKET "can1"
//#define USE_GRIDCONNECT_HOST 1
//#define DEFAULT_TCP_GRIDCONNECT_PORT 12021
//#define DEFAULT_TCP_GRIDCONNECT_HOST localhost
//#define USE_OPENLCB_TCP_HOST 1
//#define DEFAULT_OPENLCB_TCP_PORT 12000
//#define DEFAULT_OPENLCB_TCP_HOST localhost
//#define PRINT_ALL_PACKETS
//#define HAVE_SOCKET_CAN_PORT
//#define SOCKET_CAN_PORT "vcan0"
#endif // __HARDWARE_HXX
|
#!/usr/bin/env bash
# Created by deirk93 on 4/12/19
set -e
pushd $(dirname $0) > /dev/null
SCRIPTPATH=$(pwd -P)
popd > /dev/null
SCRITPT=$(basename $0)
REGISTER_IMAGE="registry"
REGISTER_VERSION="latest"
REGISTER_DOMAIN="repo.dashuai.life"
REGISTER_NAME="mageregistry"
EXPOSEDPORT=5000
NGINX_IMAGE="nginx"
NGINX_VERSION="latest"
NGINX_NAME="nginxre"
NGINX_PORT=8080
REGISTRYHOME="/opt/registry"
NGINXHOME="/opt/nginx"
function Deployment(){
echo -e "pull the registry image"
PULLVERSION=$(docker pull "${REGISTER_IMAGE}:${REGISTER_VERSION}")
PULLRESULT=$(echo ${PULLVERSION} | awk '{print $NF}')
if [ ${PULLRESULT} == "${REGISTER_IMAGE}:${REGISTER_VERSION}" ];then
RESULT_TMP=$(mkdir -p "${REGISTRYHOME}/config")
if [ $$ != "" ];then
cd "${REGISTRYHOME}/config"
cat >> config.yml <<EOF
version: 0.1
log:
fields:
service: registry
storage:
cache:
blobdescriptor: inmemory
filesystem:
#you'd better to change the path to the Net store,like GlustFS and Ceph.
rootdirectory: ${REGISTRYHOME}/registry
http:
addr: :5000
headers:
X-Content-Type-Options: [nosniff]
health:
storagedriver:
enabled: true
interval: 10s
threshold: 3
EOF
mkdir -p "${REGISTRYHOME}/register"
if [ $$ != "" ];then
echo -e "docker registry install successed.."
fi
else
echo -e "${REGISTRYHOME}:config not exit..."
exit
fi
else
echo -e "docker pull ${REGISTER_IMAGE}:${REGISTER_VERSION} failed ..."
exit
fi
}
function DeployNginx(){
echo -e "docker pull ${NGINX_IMAGE}:${NGINX_VERSION}"
PULLNGINXVERSION=$(docker pull "${NGINX_IMAGE}:${NGINX_VERSION}")
PULLNGINXRESULT=$(echo ${PULLNGINXVERSION} | awk '{print $NF}')
if [ ${PULLNGINXRESULT} == "${NGINX_IMAGE}:${NGINX_VERSION}" ];then
for i in log config;
do
mkdir -p "${NGINXHOME}/${i}"
done
fi
REGISTER_IP=$(docker ps -a -f name="${REGISTER_NAME}" -f status=running | grep "${REGISTER_NAME}" |xargs docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' )
if [ $REGISTER_IP = "" ];then
echo -e "Get registry IP failed.."
exit
fi
cd ${NGINXHOME}/config
cat >> nginx.conf << EOF
user nginx;
worker_processes auto;
error_log ${NGINXHOME}/error.log;
pid /tmp/nginx.pid;
events {
worker_connections 1024;
}
http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log ${NGINXHOME}/access.log main;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
include ${NGINXHOME}/mime.types;
default_type application/octet-stream;
# Load modular configuration files from the /etc/nginx/conf.d directory.
# See http://nginx.org/en/docs/ngx_core_module.html#include
# for more information.
#include /etc/nginx/conf.d/*.conf;
server {
listen ${NGINX_PORT} default_server;
listen [::]:${NGINX_PORT} default_server;
server_name {REGISTER_DOMAIN};
#root /usr/share/nginx/html;
# Load configuration files for the default server block.
#include /etc/nginx/default.d/*.conf;
location / {
}
location /v2/ {
# Do not allow connections from docker 1.5 and earlier
# docker pre-1.6.0 did not properly set the user agent on ping, catch "Go *" user agents
if ($http_user_agent ~ "^(docker\/1\.(3|4|5(?!\.[0-9]-dev))|Go ).*$" ) {
return 404;
}
proxy_pass http://docker-registry;
proxy_set_header Host $http_host; # required for docker client's sake
proxy_set_header X-Real-IP $remote_addr; # pass on real client's IP
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 900;
error_page 404 /404.html;
location = /40x.html {
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
}
}
#include ./registry/registry.conf;
server {
listen 443;
server_name ${REGISTER_DOMAIN};
# disable any limits to avoid HTTP 413 for large image uploads
client_max_body_size 0;
# required to avoid HTTP 411: see Issue #1486 (https://github.com/docker/docker/issues/1486)
chunked_transfer_encoding on;
location /v2/ {
# Do not allow connections from docker 1.5 and earlier
# docker pre-1.6.0 did not properly set the user agent on ping, catch "Go *" user agents
if ($http_user_agent ~ "^(docker\/1\.(3|4|5(?!\.[0-9]-dev))|Go ).*$" ) {
return 404;
}
proxy_pass http://docker-registry;
proxy_set_header Host $http_host; # required for docker client's sake
proxy_set_header X-Real-IP $remote_addr; # pass on real client's IP
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 900;
}
upstream docker-registry {
server ${REGISTER_IP}:5000;
}
}
}
EOF
cd ${NGINXHOME}
cat >> mime.types <<EOF
types {
text/html html htm shtml;
text/css css;
text/xml xml;
image/gif gif;
image/jpeg jpeg jpg;
application/javascript js;
application/atom+xml atom;
application/rss+xml rss;
text/mathml mml;
text/plain txt;
text/vnd.sun.j2me.app-descriptor jad;
text/vnd.wap.wml wml;
text/x-component htc;
image/png png;
image/svg+xml svg svgz;
image/tiff tif tiff;
image/vnd.wap.wbmp wbmp;
image/webp webp;
image/x-icon ico;
image/x-jng jng;
image/x-ms-bmp bmp;
font/woff woff;
font/woff2 woff2;
application/java-archive jar war ear;
application/json json;
application/mac-binhex40 hqx;
application/msword doc;
application/pdf pdf;
application/postscript ps eps ai;
application/rtf rtf;
application/vnd.apple.mpegurl m3u8;
application/vnd.google-earth.kml+xml kml;
application/vnd.google-earth.kmz kmz;
application/vnd.ms-excel xls;
application/vnd.ms-fontobject eot;
application/vnd.ms-powerpoint ppt;
application/vnd.oasis.opendocument.graphics odg;
application/vnd.oasis.opendocument.presentation odp;
application/vnd.oasis.opendocument.spreadsheet ods;
application/vnd.oasis.opendocument.text odt;
application/vnd.openxmlformats-officedocument.presentationml.presentation
pptx;
application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
xlsx;
application/vnd.openxmlformats-officedocument.wordprocessingml.document
docx;
application/vnd.wap.wmlc wmlc;
application/x-7z-compressed 7z;
application/x-cocoa cco;
application/x-java-archive-diff jardiff;
application/x-java-jnlp-file jnlp;
application/x-makeself run;
application/x-perl pl pm;
application/x-pilot prc pdb;
application/x-rar-compressed rar;
application/x-redhat-package-manager rpm;
application/x-sea sea;
application/x-shockwave-flash swf;
application/x-stuffit sit;
application/x-tcl tcl tk;
application/x-x509-ca-cert der pem crt;
application/x-xpinstall xpi;
application/xhtml+xml xhtml;
application/xspf+xml xspf;
application/zip zip;
application/octet-stream bin exe dll;
application/octet-stream deb;
application/octet-stream dmg;
application/octet-stream iso img;
application/octet-stream msi msp msm;
audio/midi mid midi kar;
audio/mpeg mp3;
audio/ogg ogg;
audio/x-m4a m4a;
audio/x-realaudio ra;
video/3gpp 3gpp 3gp;
video/mp2t ts;
video/mp4 mp4;
video/mpeg mpeg mpg;
video/quicktime mov;
video/webm webm;
video/x-flv flv;
video/x-m4v m4v;
video/x-mng mng;
video/x-ms-asf asx asf;
video/x-ms-wmv wmv;
video/x-msvideo avi;
}
EOF
echo -e "start nginx ..."
docker run -d -p ${NGINX_PORT} -v "${NGINXHOME}/config/nginx.conf":/etc/nginx/nginx.conf \
-v $NGINXHOME/log:/var/log/nginx \
--name=$NGINX_NAME \
"${NGINX_IMAGE}:${NGINX_VERSION}"
}
function RunRegistry(){
echo -e "start registory..."
docker run -d -p ${EXPOSEDPORT}:5000 \
--name="${REGISTER_NAME}" --restart=always \
-v "${REGISTRYHOME}/config/config.yml":/etc/docker/registry/config.yml \
-e REGISTRY_STORAGE_DELETE_ENABLED=true \
-v "${REGISTRYHOME}/register":/var/lib/registry \
${REGISTER_IMAGE}:${REGISTER_VERSION}
}
function GetStatus(){
Getstatus=$(docker ps -a -f name="${REGISTER_NAME}" -f status=running | grep "${REGISTER_NAME}")
if [ "${Getstatus}" != "" ];then
echo "Registry is running..."
echo ""
echo $Getstatus "\n"
exit
else
echo "Registry start failed...Please used command 'docker logs ${REGISTER_NAME}' to debug or kill the abnormal instance."
PS3="Enter you choice: "
_INPUT='^[0-9]$+'
select opt in "remove" "debug";do
case ${opt} in
remove)
KillFailedContained=$(docker rm -f ${REGISTER_NAME})
Deployment
;;
debug)
echo "Please use 'docker logs ${REGISTER_NAME}' to debug"
exit
;;
3)
echo "Input error.."
;;
esac
done
fi
}
function ShowUsage(){
echo "usage:"
echo -e "-h : Show this message"
echo -e "-d : Install registry service on localhost "
echo -e "-r : Run registry service on localhost "
echo -e "-s : Check to see if the registry service is installed locally. "
echo ""
}
if [ "${1}" == "" ];then
ShowUsage
fi
while [ "${1}" != "" ];do
case ${1} in
-h| --help)
ShowUsage
;;
-d | --deploy)
Deployment
RunRegistry
;;
-r | --run)
GetStatus
RunRegistry
;;
-s | --status )
GetStatus
;;
*) #匹配静默错误
echo "Option requires an argument." >&2
exit 0
;;
esac
shift
done
|
#!/bin/bash
# ========== Experiment Seq. Idx. 2895 / 56.2.3.0 / N. 0 - _S=56.2.3.0 D1_N=56 a=1 b=-1 c=-1 d=-1 e=1 f=1 D3_N=4 g=1 h=-1 i=-1 D4_N=3 j=3 D5_N=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 2895 / 56.2.3.0 / N. 0 - _S=56.2.3.0 D1_N=56 a=1 b=-1 c=-1 d=-1 e=1 f=1 D3_N=4 g=1 h=-1 i=-1 D4_N=3 j=3 D5_N=0 ==========\n\n'
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
RESULTS_DIR="$JBHI_DIR/results"
if [[ "No" == "Yes" ]]; then
SVM_SUFFIX="svm"
PREDICTIONS_FORMAT="isbi"
else
SVM_SUFFIX="nosvm"
PREDICTIONS_FORMAT="titans"
fi
RESULTS_PREFIX="$RESULTS_DIR/deep.56.layer.4.test.3.index.2895.$SVM_SUFFIX"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$RESULTS_PREFIX.finish.txt"
# ...this experiment is a little different --- only one master procedure should run, so there's only a master lock file
METRICS_TEMP_PATH="$RESULTS_DIR/this_results.anova.txt"
METRICS_PATH="$RESULTS_DIR/all_results.anova.txt"
START_PATH="$METRICS_PATH.start.txt"
FINISH_PATH="-"
LOCK_PATH="$METRICS_PATH.running.lock"
LAST_OUTPUT="$METRICS_PATH"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
# If the experiment was started before, do any cleanup necessary
echo -n
else
echo "D1_N;D3_N;D4_N;a;b;c;d;e;f;g;h;i;j;m_ap;m_auc;m_tn;m_fp;m_fn;m_tp;m_tpr;m_fpr;k_ap;k_auc;k_tn;k_fp;k_fn;k_tp;k_tpr;k_fpr;isbi_auc" > "$METRICS_PATH"
fi
python \
"$SOURCES_GIT_DIR/etc/compute_metrics.py" \
--metadata_file "$SOURCES_GIT_DIR/data/all-metadata.csv" \
--predictions_format "$PREDICTIONS_FORMAT" \
--metrics_file "$METRICS_TEMP_PATH" \
--predictions_file "$RESULTS_PATH"
EXPERIMENT_STATUS="$?"
echo -n "56;4;3;" >> "$METRICS_PATH"
echo -n "1;-1;-1;-1;1;1;1;-1;-1;3;" >> "$METRICS_PATH"
tail "$METRICS_TEMP_PATH" -n 1 >> "$METRICS_PATH"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
#!/bin/sh
CONFIG=passwall
TMP_PATH=/var/etc/$CONFIG
TMP_BIN_PATH=$TMP_PATH/bin
TMP_ID_PATH=$TMP_PATH/id
config_n_get() {
local ret=$(uci -q get $CONFIG.$1.$2 2>/dev/null)
echo ${ret:=$3}
}
config_t_get() {
local index=0
[ -n "$4" ] && index=$4
local ret=$(uci -q get $CONFIG.@$1[$index].$2 2>/dev/null)
echo ${ret:=$3}
}
if [ "$(top -bn1 | grep -v grep | grep $CONFIG/monitor.sh | wc -l)" -gt 2 ]; then
exit 1
fi
ENABLED=$(config_t_get global enabled 0)
[ "$ENABLED" != 1 ] && return 1
ENABLED=$(config_t_get global_delay start_daemon 0)
[ "$ENABLED" != 1 ] && return 1
sleep 58s
while [ "$ENABLED" -eq 1 ]
do
#TCP
[ -f "$TMP_ID_PATH/TCP" ] && {
TCP_NODE=$(cat $TMP_ID_PATH/TCP)
if [ "$TCP_NODE" != "nil" ]; then
#kcptun
use_kcp=$(config_n_get $TCP_NODE use_kcp 0)
if [ $use_kcp -gt 0 ]; then
icount=$(top -bn1 | grep -v grep | grep "$TMP_BIN_PATH/kcptun" | grep -i "tcp" | wc -l)
if [ $icount = 0 ]; then
/etc/init.d/$CONFIG restart
exit 0
fi
fi
icount=$(top -bn1 | grep -v -E 'grep|kcptun' | grep "$TMP_BIN_PATH" | grep -i "TCP" | wc -l)
if [ $icount = 0 ]; then
/etc/init.d/$CONFIG restart
exit 0
fi
fi
}
#udp
[ -f "$TMP_ID_PATH/UDP" ] && {
UDP_NODE=$(cat $TMP_ID_PATH/UDP)
if [ "$UDP_NODE" != "nil" ]; then
[ "$UDP_NODE" == "tcp" ] && continue
[ "$UDP_NODE" == "tcp_" ] && UDP_NODE=$TCP_NODE
icount=$(top -bn1 | grep -v grep | grep "$TMP_BIN_PATH" | grep -i "UDP" | wc -l)
if [ $icount = 0 ]; then
/etc/init.d/$CONFIG restart
exit 0
fi
fi
}
#dns
dns_mode=$(config_t_get global dns_mode)
if [ "$dns_mode" != "nonuse" ] && [ "$dns_mode" != "custom" ] && [ "$dns_mode" != "fake_ip" ]; then
icount=$(netstat -apn | grep 7913 | wc -l)
if [ $icount = 0 ]; then
/etc/init.d/$CONFIG restart
exit 0
fi
fi
[ -f "$TMP_BIN_PATH/chinadns-ng" ] && {
icount=$(top -bn1 | grep -v grep | grep $TMP_BIN_PATH/chinadns-ng | wc -l)
if [ $icount = 0 ]; then
/etc/init.d/$CONFIG restart
exit 0
fi
}
#haproxy
use_haproxy=$(config_t_get global_haproxy balancing_enable 0)
if [ $use_haproxy -gt 0 ]; then
icount=$(top -bn1 | grep -v grep | grep "$TMP_BIN_PATH/haproxy" | wc -l)
if [ $icount = 0 ]; then
/etc/init.d/$CONFIG restart
exit 0
fi
fi
sleep 58s
done
|
#!/usr/bin/env bash
# Is doctl already installed
if ! command_exists doctl; then
# Print a message to the console
line "Installing doctl..."
# Get the latest version of the digitalocean cli
DOCTL_LATEST_VERSION=$(github_get_latest_release_version "digitalocean/doctl")
DOCTL_VERSION_NUMBER=${DOCTL_LATEST_VERSION:1}
# Create a temporary directory
DOCTL_DOWNLOAD_DIR=$(mktemp -dt doctl.XXXXXXXX)
# Download the installer
curl -fsSL "https://github.com/digitalocean/doctl/releases/download/$DOCTL_LATEST_VERSION/doctl-$DOCTL_VERSION_NUMBER-linux-amd64.tar.gz" > $DOCTL_DOWNLOAD_DIR/doctl.tar.gz
# Extract the archive
tar zxvf $DOCTL_DOWNLOAD_DIR/doctl.tar.gz -C $DOCTL_DOWNLOAD_DIR
# Move the bianry to /usr/local/bin
sudo_askpass mv $DOCTL_DOWNLOAD_DIR/doctl /usr/local/bin
# Cleanup
rm -rf $DOCTL_DOWNLOAD_DIR
fi
|
<reponame>Kun-a-Kun/Algorithms-Fourth-Edition-Exercises
package Chapter1_4Text;
import java.util.Scanner;
public class TestScanner {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("请输入一个字符串");
System.out.println("您输入的字符串是:" + scanner.next());
}
}
|
# -*- coding: utf-8 -*-
from django.db import transaction
from django.utils import timezone
from app.revisioner.actions import created
from app.revisioner.actions import modified
from app.revisioner.actions import dropped
from app.definitions.models import Table, Column, Index
from utils.contenttypes import get_content_type_for_model
from utils.shortcuts import run_raw_sql
def commit_revisions(datastore, run, logger):
"""We will only ever commit revisions for the most recent run.
"""
logger.info('Starting commit process.')
if run.finished:
logger.info('Run has already been committed.')
return
with transaction.atomic():
for model_name, action_class in created.get_actions():
logger.info(f'Starting commit process for created {model_name} objects.')
action = action_class(run, datastore, logger)
action.apply()
action.revisions.update(applied_on=timezone.now())
for model_name, action_class in modified.get_actions():
logger.info(f'Starting commit process for modified {model_name} objects.')
action = action_class(run, datastore, logger)
action.apply()
action.revisions.update(applied_on=timezone.now())
for model_name, action_class in dropped.get_actions():
logger.info(f'Starting commit process for dropped {model_name} objects.')
action = action_class(run, datastore, logger)
action.apply()
action.revisions.update(applied_on=timezone.now())
# We remove all of the "Column was created" revisions because they aren't super
# useful from a debugging or UI perspective.
run_raw_sql(
'''
DELETE FROM revisioner_revision
WHERE applied_on IS NOT NULL
AND action = 1
AND resource_type_id IN (%(column)s, %(index)s)
AND run_id = %(run)s
AND parent_resource_revision_id IN (
SELECT revision_id
FROM revisioner_revision
WHERE action = 1
AND run_id = %(run)s
AND resource_type_id = %(table)s
)
''',
{
'column': get_content_type_for_model(Column).id,
'index': get_content_type_for_model(Index).id,
'table': get_content_type_for_model(Table).id,
'run': run.id,
},
)
run.mark_as_finished()
logger.info('Run has been committed.')
|
#!/bin/sh
cd `dirname $0`/../..
python ./scripts/cleanup_datasets/cleanup_datasets.py ./config/galaxy.ini -d 10 -6 -r $@ >> ./scripts/cleanup_datasets/delete_datasets.log
|
#!/usr/bin/env bash
a2enmod headers
|
#!/usr/bin/env bash
zig build-exe example.zig -I/usr/include -I/usr/include/x86_64-linux-gnu/ -lc -lreadline
|
# Set environment variables for running Hadoop on Amazon EC2 here. All are required.
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Your Amazon Account Number.
AWS_ACCOUNT_ID=
# Your Amazon AWS access key.
AWS_ACCESS_KEY_ID=
# Your Amazon AWS secret access key.
AWS_SECRET_ACCESS_KEY=
# Location of EC2 keys.
# The default setting is probably OK if you set up EC2 following the Amazon Getting Started guide.
EC2_KEYDIR=`dirname "$EC2_PRIVATE_KEY"`
# The EC2 key name used to launch instances.
# The default is the value used in the Amazon Getting Started guide.
KEY_NAME=gsg-keypair
# Where your EC2 private key is stored (created when following the Amazon Getting Started guide).
# You need to change this if you don't store this with your other EC2 keys.
PRIVATE_KEY_PATH=`echo "$EC2_KEYDIR"/"id_rsa-$KEY_NAME"`
# SSH options used when connecting to EC2 instances.
SSH_OPTS=`echo -i "$PRIVATE_KEY_PATH" -o StrictHostKeyChecking=no -o ServerAliveInterval=30`
# The version of Hadoop to use.
HADOOP_VERSION=0.19.0
# The Amazon S3 bucket where the Hadoop AMI is stored.
# The default value is for public images, so can be left if you are using running a public image.
# Change this value only if you are creating your own (private) AMI
# so you can store it in a bucket you own.
S3_BUCKET=hadoop-images
# Enable public access to JobTracker and TaskTracker web interfaces
ENABLE_WEB_PORTS=true
# The script to run on instance boot.
USER_DATA_FILE=hadoop-ec2-init-remote.sh
# The EC2 instance type: m1.small, m1.large, m1.xlarge
INSTANCE_TYPE="m1.small"
#INSTANCE_TYPE="m1.large"
#INSTANCE_TYPE="m1.xlarge"
#INSTANCE_TYPE="c1.medium"
#INSTANCE_TYPE="c1.xlarge"
# The EC2 group master name. CLUSTER is set by calling scripts
CLUSTER_MASTER=$CLUSTER-master
# Cached values for a given cluster
MASTER_PRIVATE_IP_PATH=~/.hadooop-private-$CLUSTER_MASTER
MASTER_IP_PATH=~/.hadooop-$CLUSTER_MASTER
MASTER_ZONE_PATH=~/.hadooop-zone-$CLUSTER_MASTER
#
# The following variables are only used when creating an AMI.
#
# The version number of the installed JDK.
JAVA_VERSION=1.6.0_07
# SUPPORTED_ARCHITECTURES = ['i386', 'x86_64']
# The download URL for the Sun JDK. Visit http://java.sun.com/javase/downloads/index.jsp and get the URL for the "Linux self-extracting file".
if [ "$INSTANCE_TYPE" == "m1.small" -o "$INSTANCE_TYPE" == "c1.medium" ]; then
ARCH='i386'
BASE_AMI_IMAGE="ami-2b5fba42" # ec2-public-images/fedora-8-i386-base-v1.07.manifest.xml
JAVA_BINARY_URL=''
else
ARCH='x86_64'
BASE_AMI_IMAGE="ami-2a5fba43" # ec2-public-images/fedora-8-x86_64-base-v1.07.manifest.xml
JAVA_BINARY_URL=''
fi
if [ "$INSTANCE_TYPE" == "c1.medium" ]; then
AMI_KERNEL=aki-9b00e5f2 # ec2-public-images/vmlinuz-2.6.18-xenU-ec2-v1.0.i386.aki.manifest.xml
fi
if [ "$INSTANCE_TYPE" == "c1.xlarge" ]; then
AMI_KERNEL=aki-9800e5f1 # ec2-public-images/vmlinuz-2.6.18-xenU-ec2-v1.0.x86_64.aki.manifest.xml
fi
if [ "$AMI_KERNEL" != "" ]; then
KERNEL_ARG="--kernel ${AMI_KERNEL}"
fi
|
<reponame>ohduran/urbanosisto<filename>src/components/Header.js<gh_stars>0
import React from "react";
import { Link } from 'gatsby';
import algoliasearch from 'algoliasearch/lite';
import { InstantSearch, SearchBox } from 'react-instantsearch-dom';
import '../styles/index.css';
import {ProductConsumer} from "../context/ProductContext"
import SearchHits from '../components/SearchHits'
import IconCart from '../icons/Cart';
import IconUser from '../icons/User';
import IconMenu from '../icons/Menu';
const searchClient = algoliasearch('6NOA2X35JA', 'dcb1de01d431e5f1a041cd815879d3fb');
const displayHitsIfSearchBoxIsFilled = () =>{
if (!document.getElementsByClassName('ais-SearchBox-input')[0].value.length){
document.getElementsByClassName('ais-Hits')[0].style.display = 'none'
}
else{
document.getElementsByClassName('ais-Hits')[0].style.display = 'block'
}
}
const resetDisplayHits = () =>{
document.getElementsByClassName('ais-Hits')[0].style.display = 'none'
}
export default class extends React.Component {
constructor(props){
super(props);
this.toggleClass = this.toggleClass.bind(this);
this.state = {
active: false,
};
}
toggleClass() {
const currentState = this.state.active;
this.setState({ active: !currentState });
console.log(this.state);
}
render () {
return (
<header className="grid grid-cols-2 md:grid-cols-3 bg-white px-4 py-3 font-family-montserrat-alternates" style={{
gridTemplateRows: "25px 2fr"
}}>
<Link to="/"><h1 className="row-start-1 col-start-1 font-family-montserrat-subrayada text-lg">Urbano Sisto</h1></Link>
<nav className={"row-start-2 px-1 pt-2 md:pt-0 md:row-start-1 md:col-start-2 md:flex md:justify-center md:align-center" + (this.state.active? ' ': ' hidden')}>
<Link to="catalog/novedades" className="block md:inline px-2 font-semibold rounded hover:bg-gray-200">Novedades</Link>
<Link to="catalog/prendas" className="block md:inline px-2 font-semibold rounded hover:bg-gray-200">Prendas</Link>
<Link to="catalog/accesorios" className="block md:inline px-2 font-semibold rounded hover:bg-gray-200">Accesorios</Link>
</nav>
<nav className="col-start-2 md:col-start-3 flex pl-4 align-center justify-self-end justify-end">
<Link to="/" className="hidden px-1 sm:px-4 py-1 text-sm">Español</Link>
<InstantSearch searchClient={searchClient} indexName="Items">
<SearchBox
translations={{
placeholder: 'Buscar'
}}
onChange={event => {
displayHitsIfSearchBoxIsFilled()
}}
onReset={event => {
resetDisplayHits()
}}
/>
<SearchHits className="ais-Hits"/>
</InstantSearch>
<Link to="/" className="hidden px-1 sm:px-4 py-1"><IconUser className="h-4 w-4" /></Link>
<ProductConsumer>
{(value) =>{
if(value.cart.length > 0){
return (
<Link to="/cart" className="block px-3 sm:px-4 py-1 flex">
<IconCart className="text-green-500 h-4 w-4" />
<span className="text-sm text-green-500">{value.cart.length}</span>
</Link>
)
}
else{
return (
<span className="block px-3 sm:px-4 py-1 flex">
<IconCart className="h-4 w-4" />
</span>
)
}
}}
</ProductConsumer>
<button className="block pr-2 py-1 md:hidden" onClick={() => {this.toggleClass()}}>
<IconMenu className="h-4 w-4" />
</button>
</nav>
</header>
)
}
}
|
<filename>AndroidNanoDegreeProject5/app/src/main/java/io/github/marcelbraghetto/deviantartreader/features/collection/ui/CollectionFavouritesActionView.java<gh_stars>0
package io.github.marcelbraghetto.deviantartreader.features.collection.ui;
import android.animation.Animator;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.RelativeLayout;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import io.github.marcelbraghetto.deviantartreader.R;
import io.github.marcelbraghetto.deviantartreader.features.application.MainApp;
import io.github.marcelbraghetto.deviantartreader.framework.foundation.eventbus.contracts.EventBusProvider;
import io.github.marcelbraghetto.deviantartreader.framework.foundation.eventbus.events.CollectionFavouritesEvent;
/**
* Created by <NAME> on 12/03/16.
*
* Custom action view for the favourites toolbar icon.
*/
public class CollectionFavouritesActionView extends RelativeLayout {
private static final int ANIMATION_DURATION = 400;
private static final float ANIMATION_ROTATION = 360f;
@Bind(R.id.favourites_icon_off) View mIconOff;
@Bind(R.id.favourites_icon_on) View mIconOn;
@OnClick(R.id.favourites_button)
void onButtonClick() {
handleButtonClick();
}
// We want to retain this statically so it survives any config changes etc.
private static boolean sIsOn;
private boolean mBusy;
private final EventBusProvider mEventBusProvider = MainApp.getDagger().getEventBusProvider();
public CollectionFavouritesActionView(Context context) {
super(context);
init();
}
public CollectionFavouritesActionView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public CollectionFavouritesActionView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
private void init() {
inflate(getContext(), R.layout.collection_favourites_action_view, this);
ButterKnife.bind(this);
if(sIsOn) {
mIconOn.setAlpha(1f);
mIconOff.setAlpha(0f);
} else {
mIconOn.setAlpha(0f);
mIconOff.setAlpha(1f);
}
}
private void handleButtonClick() {
// Clicking is ignored if the view is currently in the 'busy' state, typically because
// there is an animation still in progress.
if(mBusy) {
return;
}
// Get busy!
mBusy = true;
if(sIsOn) {
animateToOff();
sIsOn = false;
} else {
animateToOn();
sIsOn = true;
}
// Broadcast an event notifying of the change in the favourites mode.
mEventBusProvider.postEvent(new CollectionFavouritesEvent(sIsOn));
}
private void animateToOff() {
mIconOff.clearAnimation();
mIconOn.clearAnimation();
mIconOff.animate().rotation(ANIMATION_ROTATION).alpha(1f).setDuration(ANIMATION_DURATION).start();
mIconOn.animate().rotation(-ANIMATION_ROTATION).alpha(0f).setDuration(ANIMATION_DURATION).setListener(new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) { }
@Override
public void onAnimationEnd(Animator animation) {
mBusy = false;
}
@Override
public void onAnimationCancel(Animator animation) { }
@Override
public void onAnimationRepeat(Animator animation) { }
}).start();
}
private void animateToOn() {
mIconOff.clearAnimation();
mIconOn.clearAnimation();
mIconOff.animate().rotation(-ANIMATION_ROTATION).alpha(0f).setDuration(ANIMATION_DURATION).start();
mIconOn.animate().rotation(ANIMATION_ROTATION).alpha(1f).setDuration(ANIMATION_DURATION).setListener(new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) {
}
@Override
public void onAnimationEnd(Animator animation) {
mBusy = false;
}
@Override
public void onAnimationCancel(Animator animation) {
}
@Override
public void onAnimationRepeat(Animator animation) {
}
}).start();
}
}
|
CIFAR='--data_path data/ --log_every 100 --dataset cifar100 --cuda --log_dir logs/'
SEED=0
MEMORIES=100
FIRST_INCREMENT=50
########## CIFAR DATASET multi-Pass ##########
##### La-MAML #####
python3 main.py $CIFAR --model lamaml_cifar \
-expt_name lamaml_cifar_baseline_"$FIRST_INCREMENT"_memories_"$MEMORIES"\
--memories $MEMORIES \
--batch_size 10 \
--replay_batch_size 10 \
--n_epochs 10 \
--opt_lr 0.25 \
--alpha_init 0.1 \
--opt_wt 0.1 \
--glances 1 \
--loader class_incremental_loader \
--increment 5 \
--arch "pc_cnn" \
--cifar_batches 5 \
--learn_lr \
--log_every 3125 \
--second_order \
--class_order random \
--seed $SEED \
--grad_clip_norm 1.0 \
--calc_test_accuracy \
--validation 0.1 \
--first-incremet $FIRST_INCREMENT
|
/*
* @Date: 2022-03-28 11:15:48
* @LastEditors: huangzh873
* @LastEditTime: 2022-03-30 22:10:31
* @FilePath: /vt-cesium2.0/src/libs/cesium-vue.ts
*/
import { App } from 'vue';
import { CesiumRef } from '@/@types/index';
// 为什么要用Symbol
export const CESIUM_REF_KEY = Symbol('cesiumRef')
declare module '@vue/runtime-core' {
interface ComponentCustomProperties {
readonly $cesiumRef: CesiumRef
readonly cesiumRef: CesiumRef
}
}
export default {
install: function (app: App<Element>): void {
const cr: CesiumRef = {
viewer: undefined,
viewerContainer: undefined,
}
// 在根结点(APP)下通过provide挂载cr,用于获取cesiumRef
app.config.globalProperties.$cesiumRef = cr
app.provide<CesiumRef>(CESIUM_REF_KEY, cr)
},
} |
#!/usr/bin/env bash
set -xe
this_dir="$(dirname "${BASH_SOURCE[0]}")"
full_path_this_dir="$(cd "${this_dir}" && pwd)"
git_root="$(cd "${full_path_this_dir}/../../.." && pwd)"
docker run --rm -d \
--name pbs \
-h pbs \
-v "$git_root":/working \
-p 8000:8000 \
-p 8786:8786 \
-p 8088:8088 \
--cap-add=SYS_RESOURCE \
daskgateway/testing-pbs
|
#!/bin/sh
:
# shellcheck disable=SC2039
mount-in-help()
{
echo "pot mount-in [-hvwr] -p pot -m mnt -f fscomp | -d directory | -z dataset"
echo ' -h print this help'
echo ' -v verbose'
echo ' -p pot : the working pot'
echo ' -f fscomp : the fs component to be mounted'
echo ' -z zfs dataset : the zfs dataset to be mounted'
echo ' -d directory : the directory that has to be mounted in the pot (absolute pathname)'
echo ' -m mnt : the mount point inside the pot'
echo ' -w : '"don't use nullfs, but change the zfs mountpoint [usable only with -z and -f](potentially DANGEROUS)"
echo ' -r : mount in read-only'
}
# $1 pot
# $2 mount point
_is_mountpoint_used()
{
# shellcheck disable=SC2039
local _pname _mnt_p _proot
_pname="$1"
_mnt_p="${2#/}"
_conf=$POT_FS_ROOT/jails/$_pname/conf/fscomp.conf
_proot=$POT_FS_ROOT/jails/$_pname/m
if grep -q " $_proot/$_mnt_p$" "$_conf" ||
grep -q " $_proot/$_mnt_p " "$_conf" ; then
# mount point already used
return 0 # true
fi
if grep -q "$_proot/$_mnt_p " "$_conf" ; then
# mountpoint used as source directory ?? wtf
_error "The mountpoint is already used as source directory mount-in"
return 0 # true
fi
return 1 # false, mountpoint not used
}
# $1 pot
# $2 mount point
_mountpoint_validation()
{
# shellcheck disable=SC2039
local _pname _mnt_p _mpdir _mounted _real_mnt
_pname="$1"
_mnt_p="$2"
_mpdir=$POT_FS_ROOT/jails/$_pname/m
_mounted=false # false
if _is_mountpoint_used "$_pname" "$_mnt_p" ; then
_error "The mount point $_mnt_p is already in use"
return 1 # false
fi
if ! _is_pot_running "$_pname" ; then
_mounted=true # true
if ! _pot_mount "$_pname" >/dev/null ; then
_error "Pot $_pname failed to mount"
return 1 # false
fi
fi
# if the mountpoint doesn't exist, make it
if [ ! -d "$_mpdir/$_mnt_p" ]; then
if ! mkdir -p "$_mpdir/$_mnt_p" ; then
if eval $_mounted ; then
_pot_umount "$_pname" >/dev/null
fi
return 1 # false
fi
fi
_real_mnt=$( chroot "$_mpdir" /bin/realpath "$_mnt_p")
if eval $_mounted ; then
_pot_umount "$_pname" >/dev/null
fi
echo "$_real_mnt"
return 0 # true
}
_directory_validation()
{
# shellcheck disable=SC2039
local _pname _dir _proot _conf
_pname="$1"
_dir="$2"
_proot=$POT_FS_ROOT/jails/$_pname
_conf=$POT_FS_ROOT/jails/$_pname/conf/fscomp.conf
if [ "$_dir" != "${_dir%$_proot}" ]; then
# dir is inside the pot
return 1 # false
fi
if grep -q "$_dir " "$_conf" ; then
# the directory is already used
return 1 # false
fi
return 0 # true
}
# $1 zfs dataset
# $2 pot
# $3 mount point
# $4 mount option (zfs-remount, ro)
_mount_dataset()
{
# shellcheck disable=SC2039
local _dset _pname _mnt_p _pdir _opt
_dset="$1"
_pname="$2"
# Removing the trailing /
_mnt_p="${3#/}"
_opt="${4}"
_pdir=$POT_FS_ROOT/jails/$_pname
_debug "mount zfs dataset:$_dset mnt_p:$_pdir/m/$_mnt_p opt:$_opt"
if [ -z "$_opt" ]; then
${ECHO} "$_dset $_pdir/m/$_mnt_p" >> "$_pdir/conf/fscomp.conf"
else
${ECHO} "$_dset $_pdir/m/$_mnt_p $_opt" >> "$_pdir/conf/fscomp.conf"
fi
if _is_pot_running "$_pname" ; then
if [ "$_opt" = "zfs-remount" ]; then
zfs set mountpoint="$_pdir/m/$_mnt_p" "$_dset"
else
_node=$( _get_zfs_mountpoint "$_dset" )
if ! mount_nullfs -o "${_opt:-rw}" "$_node" "$_pdir/m/$_mnt_p" ; then
_error "Error mounting $_node on $_pname"
else
_debug "Mounted $_node on $_pname"
fi
fi
fi
}
# $1 directory
# $2 pot
# $3 mount point
# $4 mount option (ro)
_mount_dir()
{
# shellcheck disable=SC2039
local _dir _pname _mnt_p _pdir _opt
_dir="$1"
_pname="$2"
# Removing the trailing /
_mnt_p="${3#/}"
_opt="${4}"
_pdir=$POT_FS_ROOT/jails/$_pname
_debug "add directory:$_dir mnt_p:$_pdir/m/$_mnt_p opt:$_opt"
if [ -z "$_opt" ]; then
${ECHO} "$_dir $_pdir/m/$_mnt_p" >> "$_pdir/conf/fscomp.conf"
else
${ECHO} "$_dir $_pdir/m/$_mnt_p $_opt" >> "$_pdir/conf/fscomp.conf"
fi
if _is_pot_running "$_pname" ; then
if ! mount_nullfs -o "${_opt:-rw}" "$_dir" "$_pdir/m/$_mnt_p" ; then
_error "Error mounting $_dir on $_pname"
else
_debug "Mounted $_dir on $_pname"
fi
fi
}
# shellcheck disable=SC2039
pot-mount-in()
{
local _pname _fscomp _mnt_p _remount _readonly _opt _dir _real_mnt_p
OPTIND=1
_pname=
_mnt_p=
_remount="NO"
_readonly="NO"
_opt=
_dir=
_fscomp=
_dset=
logger -t pot -p local0.debug -- "mount-in: $*"
while getopts "hvf:d:z:p:m:wr" _o ; do
case "$_o" in
h)
mount-in-help
return 0
;;
v)
_POT_VERBOSITY=$(( _POT_VERBOSITY + 1))
;;
f)
_fscomp="$OPTARG"
;;
d)
_dir="$OPTARG"
;;
z)
_dset="$OPTARG"
;;
p)
_pname="$OPTARG"
;;
m)
_mnt_p="$OPTARG"
;;
w)
_remount="YES"
;;
r)
_readonly="YES"
;;
*)
mount-in-help
return 1
;;
esac
done
if [ -z "$_pname" ]; then
_error "A pot name is mandatory"
mount-in-help
return 1
fi
if [ -z "$_fscomp" ] && [ -z "$_dir" ] && [ -z "$_dset" ] ; then
_error "One of -f|-d|-z option has to be used"
mount-in-help
return 1
fi
if [ -n "$_fscomp" ] && [ -n "$_dir" ]; then
_error "-f and -d options are mutually exclusive"
mount-in-help
return 1
fi
if [ -n "$_fscomp" ] && [ -n "$_dset" ]; then
_error "-f and -z options are mutually exclusive"
mount-in-help
return 1
fi
if [ -n "$_dir" ] && [ -n "$_dset" ]; then
_error "-d and -z options are mutually exclusive"
mount-in-help
return 1
fi
if [ -z "$_mnt_p" ]; then
_error "A mount point is mandatory"
mount-in-help
return 1
fi
if ! _is_absolute_path "$_mnt_p" ; then
_error "The mount point has to be an absolute pathname"
return 1
fi
if [ "${_mnt_p}" = "/" ]; then
_error "/ is not a valid mount point"
return 1
fi
if [ "$_remount" = "YES" ]; then
if [ -n "$_dir" ]; then
_error "Remount cannot be used with directories, but with fscomp only"
mount-in-help
return 1
fi
_opt="zfs-remount"
# TODO: investigate
if [ "$_readonly" = "YES" ]; then
_info "Readonly and remount are mutually exclusive: readonly considered, remount ignored"
_remount="NO"
_opt="ro"
fi
else
if [ "$_readonly" = "YES" ]; then
_opt="ro"
fi
fi
if [ -n "$_fscomp" ]; then
if ! _is_fscomp "$_fscomp" ; then
_error "fscomp $_fscomp is not valid"
mount-in-help
return 1
fi
fi
if [ -n "$_dset" ]; then
if ! _zfs_dataset_valid "$_dset" ; then
_error "dataset $_dset is not valid"
mount-in-help
return 1
fi
fi
# TODO: check that the directory doesn't conflict with anything already mounted
if [ -n "$_dir" ]; then
if [ ! -d "$_dir" ]; then
_error "$_dir is not a directory"
mount-in-help
return 1
fi
if ! _is_absolute_path "$_dir" ; then
if ! _dir="$(realpath -q "$_dir")" > /dev/null ; then
_error "Not able to convert $_dir as an absolute pathname"
mount-in-help
return 1
fi
fi
if ! _directory_validation "$_pname" "$_dir" ; then
_error "Directory $_dir not valid, already used or already part of the pot"
return 1
fi
fi
if ! _is_pot "$_pname" ; then
_error "pot $_pname is not valid"
mount-in-help
return 1
fi
if ! _is_uid0 ; then
return 1
fi
if ! _real_mnt_p="$(_mountpoint_validation "$_pname" "$_mnt_p" )" ; then
_error "The mountpoint is not valid!"
return 1
fi
if [ -n "$_dir" ]; then
_mount_dir "$_dir" "$_pname" "$_real_mnt_p" $_opt
return $?
fi
if [ -n "$_dset" ]; then
_mount_dataset "$_dset" "$_pname" "$_real_mnt_p" $_opt
return $?
fi
if [ -n "$_fscomp" ]; then
_mount_dataset "$POT_ZFS_ROOT/fscomp/$_fscomp" "$_pname" "$_real_mnt_p" $_opt
return $?
fi
}
|
#!/bin/bash
file=$1
kotlinc $file.kt -include-runtime -d $file.jar
|
<filename>lib/helpers/choiceOfChoices.js
/**
* Get choice of choices from chosen
*
* @param {Array.strings} choices Choices available
* @param {string} chosen Chosen choice
* @param {string|null} defaultChoice Fallback choice
* @returns {string|null} Returns the choice, default choice or null if undefined or error exists
*/
export default (choices, defaultChoice) => {
const dummyFn = () => null;
if (!Array.isArray(choices) || choices.some((choice) => typeof choice !== 'string')) {
console.error('Choices is expected to be an array of strings.');
return dummyFn;
}
if (defaultChoice && typeof defaultChoice !== 'string') {
console.error('Default choice is expected to be undefined or a string.');
return dummyFn;
}
return (chosen) => (choices.indexOf(chosen) !== -1 ? chosen : defaultChoice);
};
|
<gh_stars>0
const util = require('../../../utils/util.js');
const api = require('../../../config/api.js');
Page({
/**
* 页面的初始数据
*/
data: {
},
onlineConsultation() {
const { allData } = this.data
if (wx.getStorageSync("accountId")) {
let data = {
phone: wx.getStorageSync('userPhone'),
accountId: wx.getStorageSync('accountId'),
imId: wx.getStorageSync('nimId'),
channel: 'xcx',
brandName: allData.brandName
}
util.request(api.javaCustserviceUrl + "custservice/v1.0/huiju/getSaleIm", data, 'POST')
.then((res) => {
console.log(res)
if (res.code == 0) {
var brandObj = { "sendImageUrl": allData.brandLogo, "titleName": allData.brandName, "mainPoint": allData.mainPoint, "subTitle": allData.joinInvestMin + '-' + allData.joinInvestMax, "city": allData.location, "sendBrandID": allData.brandId }
var url = '/pages/customerChat/customerChat?sessionId=p2p-' + res.data.imId + '&brandObj=' + JSON.stringify(brandObj)
// var url = '/pages/customerChat/customerChat?sessionId=p2p-' + res.data.imId
wx.navigateTo({
url: url
});
}
})
.catch((error) => { });
} else {
let url = "/pages/login/login"
wx.navigateTo({ url })
}
},
makeCall() {
wx.makePhoneCall({
phoneNumber: "4000330560" //仅为示例,并非真实的电话号码
});
},
backPage() {
wx.navigateBack({
delta: 1
})
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
this.setData({
allData: JSON.parse(options.allData),
})
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function () {
}
}) |
# Shortcuts
alias copyssh="pbcopy < $HOME/.ssh/id_rsa.pub"
alias reloadshell="source $HOME/.zshrc"
alias reloaddns="dscacheutil -flushcache && sudo killall -HUP mDNSResponder"
alias ll="/usr/local/opt/coreutils/libexec/gnubin/ls -ahlF --color --group-directories-first"
weather() { curl -4 wttr.in/${1:-dallas} }
alias phpstorm='open -a /Applications/PhpStorm.app "`pwd`"'
alias shrug="echo '¯\_(ツ)_/¯' | pbcopy"
alias c="clear"
alias composer="php -d memory_limit=-1 /usr/local/bin/composer"
alias zbundle="antibody bundle < $DOTFILES/zsh_plugins.txt > $DOTFILES/zsh_plugins.sh"
alias serve=valet share
alias vstart="valet start && brew services start mysql"
alias vstop="valet stop && brew services stop mysql"
alias chrome="open -n -a /Applications/Google\ Chrome.app --args --user-data-dir='/tmp/chrome_dev_session' --disable-web-security"
alias php='nocorrect php'
alias npm='nocorrect npm'
alias yarn='nocorrect yarn'
# Directories
alias dotfiles="cd $DOTFILES"
alias library="cd $HOME/Library"
alias sites="cd $HOME/Sites"
alias cake="cd $HOME/Sites/eat-cake"
alias lara="sites && cd laravel/"
alias braodway="cd $HOME/broadway"
# Laravel
alias a="nocorrect php artisan"
alias ams="php artisan migrate:fresh --seed"
# PHP
alias php73="/usr/local/Cellar/php@7.3/7.3.13/bin/php"
alias php72="/usr/local/Cellar/php@7.2/7.2.26/bin/php"
alias cfresh="rm -rf vendor/ composer.lock && composer i"
# JS
alias nfresh="rm -rf node_modules/ package-lock.json && npm install"
alias watch="npm run watch"
# Vagrant
alias v="vagrant global-status"
alias vup="vagrant up"
alias vhalt="vagrant halt"
alias vssh="vagrant ssh"
alias vreload="vagrant reload"
alias vrebuild="vagrant destroy --force && vagrant up"
# Docker
alias docker-composer="docker-compose"
#alias dstop="docker stop $(docker ps -a -q)"
#alias dpurgecontainers="dstop && docker rm $(docker ps -a -q)"
#alias dpurgeimages="docker rmi $(docker images -q)"
#dbuild() { docker build -t=$1 .; }
#dbash() { docker exec -it $(docker ps -aqf "name=$1") bash; }
# Git
alias commit="git add . && git commit -m"
alias gcommit="git add . && git commit"
alias amend="git add . && git commit --amend --no-edit"
alias wip="commit wip"
alias gst="git status"
alias gb="git branch"
alias gc="git checkout"
alias gd="git diff"
alias resolve="git add . && git commit --no-edit"
alias gl="git log --oneline --decorate --color"
alias nuke="git clean -df && git reset --hard"
alias unstage="git restore --staged ."
alias pushup="git push -u origin HEAD"
# Work stuff
alias cmstart="docker-compose -f ~/Sites/cmdev/web/docker-compose.yml up -d && docker-compose -f ~/Sites/cmdev/partner-dashboard/docker-compose.yml up -d && docker-compose -f ~/Sites/cmdev/elastic-legacy/docker-compose.yml up -d && docker-compose -f ~/Sites/cmdev/orders/docker-compose.yml up -d"
alias cmstop="docker-compose -f ~/Sites/cmdev/web/docker-compose.yml down && docker-compose -f ~/Sites/cmdev/partner-dashboard/docker-compose.yml down && docker-compose -f ~/Sites/cmdev/elastic-legacy/docker-compose.yml down && docker-compose -f ~/Sites/cmdev/orders/docker-compose.yml down"
alias cmrestart="cmstop && cmstart"
alias cmrphp="docker-compose -f ~/Sites/cmdev/web/docker-compose.yml down && cmstart"
alias m7start="docker-compose -f ~/Sites/m7/docker-compose.yml up -d"
alias m7stop="docker-compose -f ~/Sites/m7/docker-compose.yml down"
alias m7restart="m7stop && m7start"
alias cmdisablexdebug="docker exec -it cm-web-php /bin/bash -c 'mv /usr/local/etc/php/conf.d/docker-php-ext-xdebug.ini /usr/local/etc/php/conf.d/docker-php-ext-xdebug.ini.disabled && ls -l /usr/local/etc/php/conf.d' && sleep 1 && cmrphp"
alias cmenablexdebug="docker exec -it cm-web-php /bin/bash -c 'mv /usr/local/etc/php/conf.d/docker-php-ext-xdebug.ini.disabled /usr/local/etc/php/conf.d/docker-php-ext-xdebug.ini' && sleep 1 && cmrphp"
alias cmphpinilist="docker exec -it cm-web-php /bin/bash -c 'ls -l /usr/local/etc/php/conf.d'"
alias cmpd="docker exec -it cm-partner-dashboard-php"
alias restartvalet-"rm ~/.config/valet/valet.sock && valet restart"
alias coverage="export XDEBUG_MODE=coverage && vendor/bin/phpunit --coverage-html reports/" |
#!/bin/bash
#
# Copyright (c) 2017-2018 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
[[ "${DEBUG}" != "" ]] && set -o xtrace
set -o errexit
set -o nounset
set -o pipefail
set -o errtrace
SCRIPT_PATH=$(dirname "$(readlink -f "$0")")
source "${SCRIPT_PATH}/../../../lib/common.bash"
source "${SCRIPT_PATH}/../../../.ci/lib.sh"
# runc is installed in /usr/local/sbin/ add that path
export PATH="$PATH:/usr/local/sbin"
containerd_tarball_version=$(get_version "externals.containerd.version")
# Runtime to be used for testing
RUNTIME=${RUNTIME:-containerd-shim-kata-v2}
SHIMV2_TEST=${SHIMV2_TEST:-""}
FACTORY_TEST=${FACTORY_TEST:-""}
KILL_VMM_TEST=${KILL_VMM_TEST:-""}
KATA_HYPERVISOR="${KATA_HYPERVISOR:-qemu}"
USE_DEVMAPPER="${USE_DEVMAPPER:-false}"
ARCH=$(uname -m)
default_runtime_type="io.containerd.runc.v2"
# Type of containerd runtime to be tested
containerd_runtime_type="${default_runtime_type}"
# Runtime to be use for the test in containerd
containerd_runtime_test=${RUNTIME}
if [ -n "${SHIMV2_TEST}" ]; then
containerd_runtime_type="io.containerd.kata.v2"
containerd_runtime_test="io.containerd.kata.v2"
fi
readonly runc_runtime_bin=$(command -v "runc")
readonly CRITEST=${GOPATH}/bin/critest
# Flag to do tasks for CI
SNAP_CI=${SNAP_CI:-""}
CI=${CI:-""}
containerd_shim_path="$(command -v containerd-shim)"
readonly cri_containerd_repo=$(get_version "externals.containerd.url")
#containerd config file
readonly tmp_dir=$(mktemp -t -d test-cri-containerd.XXXX)
export REPORT_DIR="${tmp_dir}"
readonly CONTAINERD_CONFIG_FILE="${tmp_dir}/test-containerd-config"
readonly default_containerd_config="/etc/containerd/config.toml"
readonly default_containerd_config_backup="$CONTAINERD_CONFIG_FILE.backup"
readonly kata_config="/etc/kata-containers/configuration.toml"
readonly kata_config_backup="$kata_config.backup"
readonly default_kata_config="/usr/share/defaults/kata-containers/configuration.toml"
ci_config() {
sudo mkdir -p $(dirname "${kata_config}")
[ -f "$kata_config" ] && sudo cp "$kata_config" "$kata_config_backup" || \
sudo cp "$default_kata_config" "$kata_config"
source /etc/os-release || source /usr/lib/os-release
ID=${ID:-""}
if [ "$ID" == ubuntu ] && [ -n "${CI}" ] ;then
# https://github.com/kata-containers/tests/issues/352
if [ -n "${FACTORY_TEST}" ]; then
sudo sed -i -e 's/^#enable_template.*$/enable_template = true/g' "${kata_config}"
echo "init vm template"
sudo -E PATH=$PATH "$RUNTIME" factory init
fi
fi
if [ -n "${CI}" ]; then
(
echo "Install cni config"
${SCRIPT_PATH}/../../../.ci/configure_cni.sh
)
fi
echo "enable debug for kata-runtime"
sudo sed -i 's/^#enable_debug =/enable_debug =/g' ${kata_config}
}
ci_cleanup() {
source /etc/os-release || source /usr/lib/os-release
if [ -n "${FACTORY_TEST}" ]; then
echo "destroy vm template"
sudo -E PATH=$PATH "$RUNTIME" factory destroy
fi
if [ -n "${KILL_VMM_TEST}" ] && [ -e "$default_containerd_config_backup" ]; then
echo "restore containerd config"
sudo systemctl stop containerd
sudo cp "$default_containerd_config_backup" "$default_containerd_config"
fi
[ -f "$kata_config_backup" ] && sudo mv "$kata_config_backup" "$kata_config" || \
sudo rm "$kata_config"
}
create_containerd_config() {
local runtime="$1"
# kata_annotations is set to 1 if caller want containerd setup with
# kata annotations support.
local kata_annotations=${2-0}
[ -n "${runtime}" ] || die "need runtime to create config"
local runtime_type="${containerd_runtime_type}"
if [ "${runtime}" == "runc" ]; then
runtime_type="io.containerd.runc.v2"
fi
local containerd_runtime="${runtime}"
if [ "${runtime_type}" == "${default_runtime_type}" ];then
local containerd_runtime=$(command -v "${runtime}")
fi
# Remove dots. Dots are used by toml syntax as atribute separator
runtime="${runtime//./-}"
cat << EOF | sudo tee "${CONTAINERD_CONFIG_FILE}"
[debug]
level = "debug"
[plugins]
[plugins.cri]
[plugins.cri.containerd]
default_runtime_name = "$runtime"
[plugins.cri.containerd.runtimes.${runtime}]
runtime_type = "${runtime_type}"
$( [ $kata_annotations -eq 1 ] && \
echo 'pod_annotations = ["io.katacontainers.*"]' && \
echo ' container_annotations = ["io.katacontainers.*"]'
)
[plugins.cri.containerd.runtimes.${runtime}.options]
Runtime = "${containerd_runtime}"
[plugins.linux]
shim = "${containerd_shim_path}"
EOF
if [ "$USE_DEVMAPPER" == "true" ]; then
sudo sed -i 's|^\(\[plugins\]\).*|\1\n \[plugins.devmapper\]\n pool_name = \"contd-thin-pool\"\n base_image_size = \"4096MB\"|' ${CONTAINERD_CONFIG_FILE}
echo "Devicemapper configured"
cat "${CONTAINERD_CONFIG_FILE}"
fi
}
cleanup() {
ci_cleanup
[ -d "$tmp_dir" ] && rm -rf "${tmp_dir}"
}
trap cleanup EXIT
err_report() {
local log_file="${REPORT_DIR}/containerd.log"
if [ -f "$log_file" ]; then
echo "ERROR: containerd log :"
echo "-------------------------------------"
cat "${log_file}"
echo "-------------------------------------"
fi
}
trap err_report ERR
check_daemon_setup() {
info "containerd(cri): Check daemon works with runc"
create_containerd_config "runc"
#restart docker service as TestImageLoad depends on it
[ -z "${USE_PODMAN:-}" ] && restart_docker_service
# in some distros(AlibabaCloud), there is no btrfs-devel package available,
# so pass GO_BUILDTAGS="no_btrfs" to make to not use btrfs.
sudo -E PATH="${PATH}:/usr/local/bin" \
REPORT_DIR="${REPORT_DIR}" \
FOCUS="TestImageLoad" \
RUNTIME="" \
CONTAINERD_CONFIG_FILE="$CONTAINERD_CONFIG_FILE" \
make GO_BUILDTAGS="no_btrfs" -e cri-integration
}
testContainerStart() {
# no_container_yaml set to 1 will not create container_yaml
# because caller has created its own container_yaml.
no_container_yaml=${1-0}
local pod_yaml=${REPORT_DIR}/pod.yaml
local container_yaml=${REPORT_DIR}/container.yaml
local image="busybox:latest"
cat << EOF > "${pod_yaml}"
metadata:
name: busybox-sandbox1
EOF
#TestContainerSwap has created its own container_yaml.
if [ $no_container_yaml -ne 1 ]; then
cat << EOF > "${container_yaml}"
metadata:
name: busybox-killed-vmm
image:
image: "$image"
command:
- top
EOF
fi
sudo cp "$default_containerd_config" "$default_containerd_config_backup"
sudo cp $CONTAINERD_CONFIG_FILE "$default_containerd_config"
restart_containerd_service
sudo crictl pull $image
podid=$(sudo crictl runp $pod_yaml)
cid=$(sudo crictl create $podid $container_yaml $pod_yaml)
sudo crictl start $cid
}
testContainerStop() {
info "stop pod $podid"
sudo crictl stopp $podid
info "remove pod $podid"
sudo crictl rmp $podid
sudo cp "$default_containerd_config_backup" "$default_containerd_config"
restart_containerd_service
}
TestKilledVmmCleanup() {
if [ -z "${SHIMV2_TEST}" ] || [ -z "${KILL_VMM_TEST}" ]; then
return
fi
info "test killed vmm cleanup"
testContainerStart
qemu_pid=$(ps aux|grep qemu|grep -v grep|awk '{print $2}')
info "kill qemu $qemu_pid"
sudo kill -SIGKILL $qemu_pid
# sleep to let shimv2 exit
sleep 1
remained=$(ps aux|grep shimv2|grep -v grep || true)
[ -z $remained ] || die "found remaining shimv2 process $remained"
testContainerStop
info "stop containerd"
}
TestContainerMemoryUpdate() {
if [[ "${KATA_HYPERVISOR}" != "qemu" ]] || [[ "${ARCH}" == "ppc64le" ]] || [[ "${ARCH}" == "s390x" ]]; then
return
fi
test_virtio_mem=$1
if [ $test_virtio_mem -eq 1 ]; then
if [[ "$ARCH" != "x86_64" ]]; then
return
fi
info "Test container memory update with virtio-mem"
sudo sed -i -e 's/^#enable_virtio_mem.*$/enable_virtio_mem = true/g' "${kata_config}"
else
info "Test container memory update without virtio-mem"
sudo sed -i -e 's/^enable_virtio_mem.*$/#enable_virtio_mem = true/g' "${kata_config}"
fi
testContainerStart
vm_size=$(($(crictl exec $cid cat /proc/meminfo | grep "MemTotal:" | awk '{print $2}')*1024))
if [ $vm_size -gt $((2*1024*1024*1024)) ] || [ $vm_size -lt $((2*1024*1024*1024-128*1024*1024)) ]; then
testContainerStop
die "The VM memory size $vm_size before update is not right"
fi
sudo crictl update --memory $((2*1024*1024*1024)) $cid
sleep 1
vm_size=$(($(crictl exec $cid cat /proc/meminfo | grep "MemTotal:" | awk '{print $2}')*1024))
if [ $vm_size -gt $((4*1024*1024*1024)) ] || [ $vm_size -lt $((4*1024*1024*1024-128*1024*1024)) ]; then
testContainerStop
die "The VM memory size $vm_size after increase is not right"
fi
if [ $test_virtio_mem -eq 1 ]; then
sudo crictl update --memory $((1*1024*1024*1024)) $cid
sleep 1
vm_size=$(($(crictl exec $cid cat /proc/meminfo | grep "MemTotal:" | awk '{print $2}')*1024))
if [ $vm_size -gt $((3*1024*1024*1024)) ] || [ $vm_size -lt $((3*1024*1024*1024-128*1024*1024)) ]; then
testContainerStop
die "The VM memory size $vm_size after decrease is not right"
fi
fi
testContainerStop
}
getContainerSwapInfo() {
swap_size=$(($(crictl exec $cid cat /proc/meminfo | grep "SwapTotal:" | awk '{print $2}')*1024))
swappiness=$(crictl exec $cid cat /proc/sys/vm/swappiness)
swap_in_bytes=$(crictl exec $cid cat /sys/fs/cgroup/memory/memory.memsw.limit_in_bytes)
}
TestContainerSwap() {
if [[ "${KATA_HYPERVISOR}" != "qemu" ]] || [[ "${ARCH}" != "x86_64" ]]; then
return
fi
local container_yaml=${REPORT_DIR}/container.yaml
info "Test container with guest swap"
create_containerd_config "${containerd_runtime_test}" 1
sudo sed -i -e 's/^#enable_guest_swap.*$/enable_guest_swap = true/g' "${kata_config}"
# Test without swap device
testContainerStart
getContainerSwapInfo
# Current default swappiness is 60
if [ $swappiness -ne 60 ]; then
testContainerStop
die "The VM swappiness $swappiness without swap device is not right"
fi
if [ $swap_in_bytes -lt 1125899906842624 ]; then
testContainerStop
die "The VM swap_in_bytes $swap_in_bytes without swap device is not right"
fi
if [ $swap_size -ne 0 ]; then
testContainerStop
die "The VM swap size $swap_size without swap device is not right"
fi
testContainerStop
# Test with swap device
cat << EOF > "${container_yaml}"
metadata:
name: busybox-killed-vmm
annotations:
io.katacontainers.container.resource.swappiness: "100"
io.katacontainers.container.resource.swap_in_bytes: "1610612736"
linux:
resources:
memory_limit_in_bytes: 1073741824
image:
image: "$image"
command:
- top
EOF
testContainerStart 1
getContainerSwapInfo
if [ $swappiness -ne 100 ]; then
testContainerStop
die "The VM swappiness $swappiness with swap device is not right"
fi
if [ $swap_in_bytes -ne 1610612736 ]; then
testContainerStop
die "The VM swap_in_bytes $swap_in_bytes with swap device is not right"
fi
if [ $swap_size -ne 536870912 ]; then
testContainerStop
die "The VM swap size $swap_size with swap device is not right"
fi
testContainerStop
# Test without swap_in_bytes
cat << EOF > "${container_yaml}"
metadata:
name: busybox-killed-vmm
annotations:
io.katacontainers.container.resource.swappiness: "100"
linux:
resources:
memory_limit_in_bytes: 1073741824
image:
image: "$image"
command:
- top
EOF
testContainerStart 1
getContainerSwapInfo
if [ $swappiness -ne 100 ]; then
testContainerStop
die "The VM swappiness $swappiness without swap_in_bytes is not right"
fi
# swap_in_bytes is not set, it should be a value that bigger than 1125899906842624
if [ $swap_in_bytes -lt 1125899906842624 ]; then
testContainerStop
die "The VM swap_in_bytes $swap_in_bytes without swap_in_bytes is not right"
fi
if [ $swap_size -ne 1073741824 ]; then
testContainerStop
die "The VM swap size $swap_size without swap_in_bytes is not right"
fi
testContainerStop
# Test without memory_limit_in_bytes
cat << EOF > "${container_yaml}"
metadata:
name: busybox-killed-vmm
annotations:
io.katacontainers.container.resource.swappiness: "100"
image:
image: "$image"
command:
- top
EOF
testContainerStart 1
getContainerSwapInfo
if [ $swappiness -ne 100 ]; then
testContainerStop
die "The VM swappiness $swappiness without memory_limit_in_bytes is not right"
fi
# swap_in_bytes is not set, it should be a value that bigger than 1125899906842624
if [ $swap_in_bytes -lt 1125899906842624 ]; then
testContainerStop
die "The VM swap_in_bytes $swap_in_bytes without memory_limit_in_bytes is not right"
fi
if [ $swap_size -ne 2147483648 ]; then
testContainerStop
die "The VM swap size $swap_size without memory_limit_in_bytes is not right"
fi
testContainerStop
create_containerd_config "${containerd_runtime_test}"
}
# k8s may restart docker which will impact on containerd stop
stop_containerd() {
local tmp=$(pgrep kubelet || true)
[ -n "$tmp" ] && sudo kubeadm reset -f
sudo systemctl stop containerd
}
main() {
info "Stop crio service"
systemctl is-active --quiet crio && sudo systemctl stop crio
info "Stop containerd service"
systemctl is-active --quiet containerd && stop_containerd
# Configure enviroment if running in CI
ci_config
# make sure cri-containerd test install the proper critest version its testing
rm -f "${CRITEST}"
go get -d ${cri_containerd_repo}
pushd "${GOPATH}/src/${cri_containerd_repo}"
git reset HEAD
# In CCv0 we are using a fork of containerd, so pull the matching branch of this
containerd_branch=$(get_version "externals.containerd.branch")
git checkout "${containerd_branch}"
# switch to the default pause image set by containerd:1.6.x
sed -i 's#k8s.gcr.io/pause:3.[0-9]#k8s.gcr.io/pause:3.6#' integration/main_test.go
cp "${SCRIPT_PATH}/container_restart_test.go.patch" ./integration/container_restart_test.go
# Make sure the right artifacts are going to be built
make clean
check_daemon_setup
info "containerd(cri): testing using runtime: ${containerd_runtime_test}"
create_containerd_config "${containerd_runtime_test}"
info "containerd(cri): Running cri-integration"
passing_test=(
TestContainerStats
TestContainerRestart
TestContainerListStatsWithIdFilter
TestContainerListStatsWithIdSandboxIdFilter
TestDuplicateName
TestImageLoad
TestImageFSInfo
TestSandboxCleanRemove
)
if [[ "${KATA_HYPERVISOR}" == "cloud-hypervisor" || \
"${KATA_HYPERVISOR}" == "qemu" ]]; then
issue="https://github.com/kata-containers/tests/issues/2318"
info "${KATA_HYPERVISOR} fails with TestContainerListStatsWithSandboxIdFilter }"
info "see ${issue}"
else
passing_test+=("TestContainerListStatsWithSandboxIdFilter")
fi
# in some distros(AlibabaCloud), there is no btrfs-devel package available,
# so pass GO_BUILDTAGS="no_btrfs" to make to not use btrfs.
for t in "${passing_test[@]}"
do
sudo -E PATH="${PATH}:/usr/local/bin" \
REPORT_DIR="${REPORT_DIR}" \
FOCUS="${t}" \
RUNTIME="" \
CONTAINERD_CONFIG_FILE="$CONTAINERD_CONFIG_FILE" \
make GO_BUILDTAGS="no_btrfs" -e cri-integration
done
TestContainerMemoryUpdate 1
TestContainerMemoryUpdate 0
TestKilledVmmCleanup
popd
}
main |
#ifndef _COMMONASSETS_H
#define _COMMONASSETS_H
#include "graphicscore.h"
#include "bitmapfont.h"
#include <os_generic.h>
#include "objreader.h"
extern struct UniformMatch * OverallUniforms;
extern struct Shader * ButtonShader;
extern struct Shader * TextShader;
extern struct BitmapFont * OldSansBlack;
extern float Ambient[4];
extern float Emission[4];
void SetupCommonAssets();
void CheckCommonAssets();
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.