text stringlengths 1 1.05M |
|---|
package com.quantconnect.lean;
/// Specifies where a subscription's data comes from
public enum SubscriptionTransportMedium {
/// The subscription's data comes from disk
LocalFile,
/// The subscription's data is downloaded from a remote source
RemoteFile,
/// The subscription's data comes from a rest call that is polled and returns a single line/data point of information
Rest
} |
def bubble_sort(arr):
n = len(arr)
# Traverse through all array elements
for i in range(n):
# Last i elements are already in place
for j in range(0, n-i-1):
# traverse the array from 0 to n-i-1
# Swap if the element found is greater
# than the next element
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
# Driver code to test above
arr = [6, 9, 8, 3, 7, 5, 4, 2, 1]
bubble_sort(arr)
print ("Sorted array is:")
for i in range(len(arr)):
print ("%d" %arr[i]), |
import Constants from 'expo-constants';
import React from 'react';
import symbolicateStackTrace from 'react-native/Libraries/Core/Devtools/symbolicateStackTrace';
import TestRenderer from 'react-test-renderer';
import LogSerialization from '../LogSerialization';
import { LogErrorData } from '../RemoteLogging';
jest.mock('react-native/Libraries/Core/Devtools/symbolicateStackTrace', () =>
jest.fn(async stack =>
// We don't test symbolication itself in this test, partly because it's complex
({ stack })
)
);
jest.mock('expo-constants', () => ({
manifest: {
developer: {
projectRoot: '/home/test/project',
},
},
}));
it(`serializes simple log messages`, async () => {
const result = await LogSerialization.serializeLogDataAsync(
['hi', 1, true, 0, '', null, false, undefined],
'info'
);
expect(result.body).toEqual(['hi', '1', 'true', '0', '', 'null', 'false', 'undefined']);
expect(result.includesStack).toBeFalsy();
});
it(`serializes nested objects`, async () => {
const result = await LogSerialization.serializeLogDataAsync(
[{ outer: { inner: [[], {}] } }],
'info'
);
expect(result.body).toMatchSnapshot();
expect(result.includesStack).toBeFalsy();
});
it(`serializes cyclic objects`, async () => {
const object: { [key: string]: any } = {};
object.self = object;
const result = await LogSerialization.serializeLogDataAsync([object], 'info');
expect(result.body).toMatchSnapshot();
expect(result.body[0]).toMatch('Circular');
expect(result.includesStack).toBeFalsy();
});
it(`serializes functions`, async () => {
const result = await LogSerialization.serializeLogDataAsync(
[function test() {}, () => {}],
'info'
);
expect(result.body).toEqual(['[Function test]', '[Function anonymous]']);
expect(result.includesStack).toBeFalsy();
});
it(`serializes symbols`, async () => {
const result = await LogSerialization.serializeLogDataAsync(
[Symbol('test'), Symbol.iterator],
'info'
);
expect(result.body).toEqual(['Symbol(test)', 'Symbol(Symbol.iterator)']);
expect(result.includesStack).toBeFalsy();
});
it(`serializes promises`, async () => {
const result = await LogSerialization.serializeLogDataAsync(
[Promise.resolve('test'), Promise.reject(new Error('Expected'))],
'info'
);
expect(result.body.length).toBe(2);
expect(result.body[0]).toMatch('Promise');
expect(result.body[1]).toMatch('Promise');
expect(result.includesStack).toBeFalsy();
});
it(`serializes React elements`, async () => {
class TestComponent extends React.Component {
render() {
return <TestComponent />;
}
}
const result = await LogSerialization.serializeLogDataAsync([<TestComponent />], 'info');
expect(Array.isArray(result.body)).toBe(true);
expect(result.includesStack).toBeFalsy();
});
it(`serializes React components (refs)`, async () => {
class TestComponent extends React.Component {
child: React.Component | null = null;
render() {
return <ChildComponent ref={component => (this.child = component)} />;
}
}
class ChildComponent extends React.Component {
render() {
return 'test';
}
}
const testRenderer = TestRenderer.create(<TestComponent />);
const result = await LogSerialization.serializeLogDataAsync(
[testRenderer.root.instance.child],
'info'
);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body[0]).toMatch('ChildComponent');
expect(result.includesStack).toBeFalsy();
});
describe('with stack trace support in Expo CLI', () => {
it(`includes a symbolicated stack trace when logging an error`, async () => {
const mockError = _getMockError('Test error');
const result = await LogSerialization.serializeLogDataAsync([mockError], 'info');
expect(symbolicateStackTrace).toHaveBeenCalledTimes(1);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body.length).toBe(1);
expect((result.body[0] as LogErrorData).message).toBe('Test error');
expect((result.body[0] as LogErrorData).stack).toMatchSnapshot();
expect(result.includesStack).toBe(true);
});
it(`can symbolicate errors from V8`, async () => {
const mockError = _getMockV8Error('Test error');
const result = await LogSerialization.serializeLogDataAsync([mockError], 'info');
expect(symbolicateStackTrace).toHaveBeenCalledTimes(1);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body.length).toBe(1);
expect((result.body[0] as LogErrorData).message).toBe('Test error');
expect((result.body[0] as LogErrorData).stack).toMatch('_exampleFunction');
expect(result.includesStack).toBe(true);
});
it(`includes a symbolicated stack trace when warning`, async () => {
const result = await LogSerialization.serializeLogDataAsync(['warning message'], 'warn');
expect(symbolicateStackTrace).toHaveBeenCalledTimes(1);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body.length).toBe(1);
expect((result.body[0] as LogErrorData).message).toBe('warning message');
expect((result.body[0] as LogErrorData).stack).toBeDefined();
expect(result.includesStack).toBe(true);
});
it(`includes a symbolicated stack trace when erroring`, async () => {
const result = await LogSerialization.serializeLogDataAsync(['error message'], 'error');
expect(symbolicateStackTrace).toHaveBeenCalledTimes(1);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body.length).toBe(1);
expect((result.body[0] as LogErrorData).message).toBe('error message');
expect((result.body[0] as LogErrorData).stack).toBeDefined();
expect(result.includesStack).toBe(true);
});
it(`uses the provided error's stack trace when erroring`, async () => {
const mockError = _getMockError('Test error');
const result = await LogSerialization.serializeLogDataAsync([mockError], 'error');
expect(symbolicateStackTrace).toHaveBeenCalledTimes(1);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body.length).toBe(1);
expect((result.body[0] as LogErrorData).message).toBe('Test error');
expect((result.body[0] as LogErrorData).stack).toMatch('_exampleFunction');
});
it(`symbolicates unhandled promise rejections`, async () => {
const warningMessage = _getMockUnhandledPromiseRejection();
const result = await LogSerialization.serializeLogDataAsync([warningMessage], 'warn');
expect(symbolicateStackTrace).toHaveBeenCalledTimes(1);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body.length).toBe(1);
expect((result.body[0] as LogErrorData).message).toMatch('Unhandled promise rejection: ');
expect((result.body[0] as LogErrorData).stack).toBeDefined();
expect(result.includesStack).toBe(true);
});
it(`doesn't fail if the error has no stack frames`, async () => {
const mockError = new Error('Test error');
mockError.stack = mockError.stack!.split('\n')[0];
const result = await LogSerialization.serializeLogDataAsync([mockError], 'info');
expect(result).toMatchSnapshot();
});
it(`doesn't fail if the error stack property is missing`, async () => {
const mockError = new Error('Test error');
mockError.stack = undefined;
const result = await LogSerialization.serializeLogDataAsync([mockError], 'info');
expect(result).toMatchSnapshot();
});
it(`doesn't fail if symbolication fails`, async () => {
(symbolicateStackTrace as jest.Mock).mockImplementationOnce(async () => {
throw new Error('Intentional symbolication error');
});
const mockError = _getMockError('Test error');
const result = await LogSerialization.serializeLogDataAsync([mockError], 'error');
expect(symbolicateStackTrace).toHaveBeenCalledTimes(1);
expect(Array.isArray(result.body)).toBe(true);
expect(result.body.length).toBe(1);
expect((result.body[0] as LogErrorData).message).toBe('Test error');
expect((result.body[0] as LogErrorData).stack).toMatchSnapshot();
expect(result.includesStack).toBe(true);
});
});
describe(`without stack trace support in Expo CLI`, () => {
let originalProjectRoot;
beforeAll(() => {
if (!Constants.manifest.developer) {
throw new Error('Constants.manifest.developer is not defined');
}
originalProjectRoot = Constants.manifest.developer.projectRoot;
delete Constants.manifest.developer.projectRoot;
});
afterAll(() => {
if (!Constants.manifest.developer) {
throw new Error('Constants.manifest.developer is not defined');
}
Constants.manifest.developer.projectRoot = originalProjectRoot;
});
it(`doesn't capture a stack trace`, async () => {
const result = await LogSerialization.serializeLogDataAsync(['oh no'], 'error');
expect(result.includesStack).toBeFalsy();
expect(symbolicateStackTrace).not.toHaveBeenCalled();
});
});
function _getMockError(message) {
const error = new Error(message);
error.stack = `_exampleFunction@/home/test/project/App.js:125:13
_depRunCallbacks@/home/test/project/node_modules/dep/index.js:77:45
tryCallTwo@/home/test/project/node_modules/react-native/node_modules/promise/lib/core.js:45:5
doResolve@/home/test/project/node_modules/react-native/node_modules/promise/lib/core.js:200:13`;
return error;
}
function _getMockV8Error(message) {
const error = new Error(message);
const mockStack = `
at _exampleFunction (/home/test/project/App.js:125:13)
at _depRunCallbacks (/home/test/project/node_modules/dep/index.js:77:45)
at tryCallTwo (/home/test/project/node_modules/react-native/node_modules/promise/lib/core.js:45:5)
at doResolve (/home/test/project/node_modules/react-native/node_modules/promise/lib/core.js:200:13)`;
error.stack = error.stack!.split('\n')[0] + mockStack;
return error;
}
function _getMockUnhandledPromiseRejection() {
return `Possible Unhandled Promise Rejection (id: 0):
Error: Intentionally unhandled async error
_callee$@http://localhost:19001/entry.bundle?platform=ios&dev=true&strict=false&minify=false&hot=false&assetPlugin=/home/test/project/node_modules/expo/tools/hashAssetFiles:99344:32
tryCatch@http://localhost:19001/entry.bundle?platform=ios&dev=true&strict=false&minify=false&hot=false&assetPlugin=/home/test/project/node_modules/expo/tools/hashAssetFiles:12301:44
invoke@http://localhost:19001/entry.bundle?platform=ios&dev=true&strict=false&minify=false&hot=false&assetPlugin=/home/test/project/node_modules/expo/tools/hashAssetFiles:12489:30`;
}
|
app.component.ts
import { Component, OnInit } from '@angular/core';
import { ApiService } from './api.service';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent implements OnInit {
searchValue: string;
results;
constructor(private apiService: ApiService) { }
ngOnInit() {
}
search(){
this.apiService.search(this.searchValue).subscribe(data => this.results = data);
}
}
app.component.html
<div>
<input type="text" [(ngModel)]="searchValue" />
<button (click)="search()">Search</button>
<table>
<thead>
<tr>
<th>ID</th>
<th>Name</th>
</tr>
</thead>
<tbody>
<tr *ngFor="let result of results">
<td>{{result.id}}</td>
<td>{{result.name}}</td>
</tr>
</tbody>
</table>
</div> |
#!/bin/bash
# libs
. settings.sh
. functions.sh
# clean
clean
# title
echo ""
echo "${bcyan}XCode"
# is xcode is installed and updated?
while true; do
echo ""
echo "${red}Is XCode installed and updated? (y/n)${reset} (Step: 1 of 1)"
read -p "" yesOrNo
case $yesOrNo in
[Yy]* ) break;;
[Nn]* ) echo "${cyan}Download XCode from the AppStore${reset}"; exit;;
* ) echo "Please answer ${bold}y${reset} or ${bold}n${reset}";;
esac
done
|
<filename>spec/lib/date_parser_spec.rb
require "rails_helper"
require "date_parser"
describe DateParser do
subject { described_class.parse(input) }
context "with a valid short form date" do
let(:input) { "13/10/2014" }
it { is_expected.to eq(Date.new(2014, 10, 13)) }
end
context "with a valid long form date" do
let(:input) { "1st December 2014" }
it { is_expected.to eq(Date.new(2014, 12, 1)) }
end
context "with a nil date" do
let(:input) { nil }
it { is_expected.to be_nil }
end
context "with an invalid date" do
let(:input) { "foo" }
it { is_expected.to be_nil }
end
end
|
import {
Injectable,
ConflictException,
BadRequestException,
NotFoundException,
Logger,
} from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { User } from './user.entity';
import { Repository } from 'typeorm';
import {
SignupRequest,
GetLeaderboardResponse,
GetUserResponse,
} from '../contract';
import { isNullOrUndefined } from 'util';
import { getConnection } from 'typeorm';
import { toUserModel } from './user.mapper';
import { Event } from '../event/event.entity';
import { toEventModel } from '../event/event.mapper';
@Injectable()
export class UserService {
constructor(
@InjectRepository(User)
private readonly userRepository: Repository<User>,
) {}
public async getUserEntityById(id: number): Promise<User> {
return await this.userRepository.findOne(id);
}
public async getUserWithEventsAndPoint(user: User): Promise<GetUserResponse> {
const userEntity = await this.userRepository.findOne(user, {
relations: ['events', 'createdEvents'],
});
const participationMap = new Map<number, boolean>();
const participatedEventIds = userEntity.events.map(event => {
participationMap.set(event.eventId, event.approved);
return event.eventId;
});
const createdEventIds = userEntity.events.map(event => event.eventId);
const participatedEvents = (await getConnection()
.getRepository('event')
.findByIds(participatedEventIds)) as Event[];
const createdEvents = (await getConnection()
.getRepository('event')
.findByIds(createdEventIds)) as Event[];
const points =
participatedEvents
.filter(event => participationMap.get(event.id))
.map(e => e.point)
.reduce((prev, next) => prev + next, 0) +
2 *
createdEvents
.filter(event => event.approved)
.map(e => e.point)
.reduce((prev, next) => prev + next, 0);
const userModel = toUserModel(userEntity);
userModel.point = points;
return new GetUserResponse(
userModel,
createdEvents.map(e => toEventModel(e)),
participatedEvents.map(e => {
const eM = toEventModel(e);
eM.participationApproved = participationMap.get(eM.id);
return eM;
}),
);
}
public async getUserEntityByUsername(username: string): Promise<User> {
username = username.toLowerCase();
return await this.userRepository.findOne({ where: { username } });
}
public async getUserEntityByUsernameOrEmail(
identifier: string,
): Promise<User> {
identifier = identifier.toLowerCase();
return await this.userRepository.findOne({
where: [{ username: identifier }, { email: identifier }],
});
}
public async createUser(
signupRequest: SignupRequest,
passwordHash: string,
): Promise<User> {
const newUser = new User();
newUser.username = signupRequest.username.toLowerCase();
newUser.email = signupRequest.email.toLowerCase();
newUser.passwordHash = <PASSWORD>;
newUser.firstName = signupRequest.firstName;
newUser.lastName = signupRequest.lastName;
newUser.middleName = signupRequest.middleName;
newUser.TCKN = signupRequest.TCKN;
newUser.city = signupRequest.city;
newUser.phone = signupRequest.phone;
try {
// insert also updates id of newUser, we can directly return newUser
await this.userRepository.insert(newUser);
return newUser;
} catch (err) {
Logger.error(JSON.stringify(err));
throw new ConflictException();
}
}
public async updatePassword(
userId: number,
passwordHash: string,
): Promise<void> {
const userEntity = await this.userRepository.findOne(userId);
if (isNullOrUndefined(userEntity)) {
Logger.warn(
`Password chage of non-existend account with id ${userId} is rejected.`,
);
throw new NotFoundException();
}
userEntity.passwordHash = <PASSWORD>;
await this.userRepository.update(userEntity.id, userEntity);
}
public async updateUser(userEntity: User): Promise<void> {
// TODO: Email update should be seperated
// TODO: Add validation
try {
await this.userRepository.update(userEntity.id, userEntity);
} catch (err) {
Logger.warn(JSON.stringify(err));
throw new BadRequestException();
}
}
public async getLeaderBoard(limit: number): Promise<GetLeaderboardResponse> {
let creationPoints: Array<{ uid: number; spts: number }> = null;
creationPoints = await getConnection()
.createEntityManager()
.query(
// tslint:disable-next-line: max-line-length
`SELECT uid, sum(pts) as spts FROM (SELECT u.id as uid, 2 * SUM(e.point) as pts FROM user u, event e WHERE u.id=e.creatorId AND e.approved = TRUE GROUP BY u.id UNION SELECT u.id as uid, SUM(e.point) AS pts FROM user u, event e, \`user-event\` ue WHERE u.id=ue.userId and e.id=ue.eventId and ue.approved = TRUE GROUP BY u.id ) as tbl GROUP BY uid HAVING spts IS NOT NULL ORDER BY spts DESC LIMIT ${limit};`,
);
const users = await this.userRepository.findByIds(
creationPoints.map(cP => cP.uid),
);
return new GetLeaderboardResponse(users.map(user => toUserModel(user)));
}
public async getLeaderBoardOfCity(
limit: number,
city: string,
): Promise<GetLeaderboardResponse> {
let creationPoints: Array<{ uid: number; spts: number }> = null;
creationPoints = await getConnection()
.createEntityManager()
.query(
// tslint:disable-next-line: max-line-length
`SELECT uid, sum(pts) as spts FROM (SELECT u.id as uid, 2 * SUM(e.point) as pts FROM user u, event e WHERE u.id=e.creatorId AND e.approved = TRUE AND e.city LIKE ${city} GROUP BY u.id UNION SELECT u.id as uid, SUM(e.point) AS pts FROM user u, event e, \`user-event\` ue WHERE u.id=ue.userId and e.id=ue.eventId AND e.city LIKE ${city} and ue.approved = TRUE GROUP BY u.id ) as tbl GROUP BY uid HAVING spts IS NOT NULL ORDER BY spts DESC LIMIT ${limit};`,
);
const users = await this.userRepository.findByIds(
creationPoints.map(cP => cP.uid),
);
return new GetLeaderboardResponse(users.map(user => toUserModel(user)));
}
}
|
<reponame>wangxy1994/mydevkit
package com.wangxy.exoskeleton.service.impl;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.wangxy.exoskeleton.entity.DictItem;
import com.wangxy.exoskeleton.mapper.DictItemMapper;
import com.wangxy.exoskeleton.service.IDictItemService;
@Service
public class DictItemServiceImpl implements IDictItemService {
@Autowired
DictItemMapper dictItemMapper;
@Override
public DictItem getDictItem(String dictItemCode, String dictEntryCode, String lang) {
return dictItemMapper.selectByPrimaryKey(dictItemCode,dictEntryCode,lang);
}
@Override
public void addDictItem(DictItem dictItem) {
dictItemMapper.insert(dictItem);
}
@Override
public List<DictItem> getCnDictItems(String dictEntryCode) {
List<DictItem> nullDictItem = dictItemMapper.getDictItems(dictEntryCode, " ");
if (nullDictItem==null||nullDictItem.size()==0) {
return dictItemMapper.getDictItems(dictEntryCode, "zh_CN");
}else {
return nullDictItem;
}
}
@Override
public List<DictItem> getDictItems(String dictEntryCode, String lang) {
return dictItemMapper.getDictItems(dictEntryCode, lang);
}
}
|
const express = require('express');
const bodyParser = require('body-parser');
const { PORT } = require('./config');
const initRoot = require('./routes');
const app = express();
app.use(bodyParser.urlencoded({ extended: true }));
initRoot(app);
app.listen(PORT,() => {
console.log(`Server is running with port ${PORT}`);
}); |
#!/bin/bash
TASK=8
SHOT=1
LANG=ja
MODEL=m3p
MODEL_CONFIG=m3p_base
TASKS_CONFIG=iglue_test_tasks_X101.dtu
TRTASK=RetrievalxFlickrCO${LANG}_${SHOT}
TETASK=RetrievalxFlickrCO${LANG}
TEXT_PATH=/home/projects/ku_00062/data/xFlickrCO/annotations/${LANG}/test.jsonl
FEAT_PATH=/home/projects/ku_00062/data/xFlickrCO/features/xflickrco-test_X101.lmdb
here=$(pwd)
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../../../volta
best=-1
best_lr=-1
for lr in 1e-4 5e-5 1e-5; do
f=${here}/train.${lr}.log
s=`tail -n1 $f | cut -d ' ' -f 4`
d=$(echo "$s>$best" | bc)
if [[ $d -eq 1 ]]; then
best=$s
best_lr=$lr
fi
done
echo "Best lr: " $best_lr
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/few_shot/xflickrco/${TRTASK}/${MODEL}/${best_lr}/RetrievalFlickr30k_${MODEL_CONFIG}/pytorch_model_best.bin
OUTPUT_DIR=/home/projects/ku_00062/results/iglue/few_shot/xflickrco/${MODEL}/${best_lr}/${TRTASK}_${MODEL_CONFIG}/$TETASK/test
python eval_retrieval.py \
--bert_model /home/projects/ku_00062/huggingface/xlm-roberta-base --config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} --is_m3p \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK \
--split test_${LANG} --batch_size 1 --num_subiters 4 \
--caps_per_image 1 --val_annotations_jsonpath ${TEXT_PATH} --val_features_lmdbpath ${FEAT_PATH} \
--output_dir ${OUTPUT_DIR} \
deactivate
|
<reponame>schinmayee/nimbus<gh_stars>10-100
//#####################################################################
// Copyright 2002-2007, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class SPHERE
//#####################################################################
#include <PhysBAM_Tools/Log/LOG.h>
#include <PhysBAM_Tools/Math_Tools/choice.h>
#include <PhysBAM_Tools/Math_Tools/cube.h>
#include <PhysBAM_Tools/Vectors/VECTOR.h>
#include <PhysBAM_Geometry/Basic_Geometry/SPHERE.h>
using namespace PhysBAM;
//#####################################################################
// Function Sector_Volumes
//#####################################################################
// Also see http://mathworld.wolfram.com/CircularSegment.html for fast approximate formulas
template<class T> void Sector_Volumes_Helper(const SPHERE<VECTOR<T,2> >& circle,const VECTOR<T,2>& origin,T volumes[4],const T thickness_over_two)
{
typedef VECTOR<T,2> TV;
T radius=circle.radius;
TV center=circle.center;
// sectors in usual order (left to right, bottom to top)
if(circle.Inside(origin,thickness_over_two)){
T radius_squared=sqr(radius),y_discriminant=sqrt(radius_squared-sqr(origin.y-center.y)),x_discriminant=sqrt(radius_squared-sqr(origin.x-center.x));
T x1=center.x+y_discriminant,x2=center.x-y_discriminant,y1=center.y+x_discriminant,y2=center.y-x_discriminant; // intersection points
T xl1=x1-origin.x,xl2=origin.x-x2,yl1=y1-origin.y,yl2=origin.y-y2; // the lengths of the axes inside the circle
T ry_times_x_discriminant=(center.y-origin.y)*x_discriminant,rx_times_y_discriminant=(center.x-origin.x)*y_discriminant;
volumes[0]=(T).5*xl2*yl2+circle.Circular_Segment_Area(max((T)0,radius-sqrt((T).5*(radius_squared+ry_times_x_discriminant+rx_times_y_discriminant))));
volumes[1]=(T).5*xl1*yl2+circle.Circular_Segment_Area(max((T)0,radius-sqrt((T).5*(radius_squared+ry_times_x_discriminant-rx_times_y_discriminant))));
volumes[2]=(T).5*xl2*yl1+circle.Circular_Segment_Area(max((T)0,radius-sqrt((T).5*(radius_squared-ry_times_x_discriminant+rx_times_y_discriminant))));
volumes[3]=(T).5*xl1*yl1+circle.Circular_Segment_Area(max((T)0,radius-sqrt((T).5*(radius_squared-ry_times_x_discriminant-rx_times_y_discriminant))));}
else if(circle.Bounding_Box().Inside(origin,thickness_over_two)){
T horizontal_area=circle.Circular_Segment_Area(max((T)0,radius-abs(center.x-origin.x))),vertical_area=circle.Circular_Segment_Area(max((T)0,radius-abs(center.y-origin.y)));
T remaining_area=circle.Size()-horizontal_area-vertical_area;
if(origin.x>center.x){
if(origin.y>center.y){volumes[0]=remaining_area;volumes[1]=horizontal_area;volumes[2]=vertical_area;volumes[3]=0;}
else{volumes[0]=vertical_area;volumes[1]=0;volumes[2]=remaining_area;volumes[3]=horizontal_area;}}
else{
if(origin.y>center.y){volumes[0]=horizontal_area;volumes[1]=remaining_area;volumes[2]=0;volumes[3]=vertical_area;}
else{volumes[0]=0;volumes[1]=vertical_area;volumes[2]=horizontal_area;volumes[3]=remaining_area;}}}
else if(circle.Bounding_Box().Get_Horizontal_Box().Inside(VECTOR<T,1>(origin.x),thickness_over_two)){ // cuts through vertically
T left,right;
T h=origin.x-center.x;
if(h>0){right=circle.Circular_Segment_Area(max((T)0,radius-h));left=circle.Size()-right;}
else{left=circle.Circular_Segment_Area(max((T)0,radius+h));right=circle.Size()-left;}
if(origin.y>center.y){volumes[0]=left;volumes[1]=right;volumes[2]=volumes[3]=0;}
else{volumes[2]=left;volumes[3]=right;volumes[0]=volumes[1]=0;}}
else if(circle.Bounding_Box().Get_Vertical_Box().Inside(VECTOR<T,1>(origin.y),thickness_over_two)){
T top,bottom;
T h=origin.y-center.y;
if(h>0){top=circle.Circular_Segment_Area(max((T)0,radius-h));bottom=circle.Size()-top;}
else{bottom=circle.Circular_Segment_Area(max((T)0,radius+h));top=circle.Size()-bottom;}
if(origin.x>center.x){volumes[0]=bottom;volumes[2]=top;volumes[1]=volumes[3]=0;}
else{volumes[1]=bottom;volumes[3]=top;volumes[0]=volumes[2]=0;}}
else{
for(int i=0;i<4;i++) volumes[i]=0;
volumes[(origin.y>center.y?0:2)+(origin.x>center.x?0:1)]=circle.Size();}
}
template<class T> void Sector_Volumes_Helper(const SPHERE<VECTOR<T,3> >& sphere,const VECTOR<T,3>& origin,T volumes[8],const T thickness_over_two)
{
typedef VECTOR<T,3> TV;
// TODO: this is temporary; gives box sectors, not sphere sectors
const RANGE<TV> box=sphere.Bounding_Box();
TV positive_lengths,max_corner=box.Maximum_Corner(),edge_lengths=box.Edge_Lengths();
for(int i=1;i<=3;i++) positive_lengths(i)=clamp(max_corner(i)-origin(i),(T)0,edge_lengths(i));
for(int i=0;i<8;i++){volumes[i]=1;for(int j=0;j<=2;j++) volumes[i]*=(i&(1<<j))?positive_lengths(j+1):edge_lengths(j+1)-positive_lengths(j+1);}
}
template<class TV> void SPHERE<TV>::
Sector_Volumes(const TV& origin,T volumes[1<<d],const T thickness_over_two) const
{
Sector_Volumes_Helper(*this,origin,volumes,thickness_over_two);
}
//#####################################################################
// Function Octant_Volume_Helper
//#####################################################################
template<class T> T Octant_Volume_Helper(T x,T x2,T y,T y2)
{
T s2=max(1-x2-y2,(T)0),s=sqrt(s2),twoxys=2*x*y*s;return ((T)1/6)*(atan2(-twoxys,s2-x2*y2)+(3-y2)*y*(atan2(x,s)-(T)one_fourth_pi)+(3-x2)*x*(atan2(y,s)-(T)one_fourth_pi)+twoxys);
}
//#####################################################################
// Function Octant_Volume_Internal
//#####################################################################
template<class T> T Octant_Volume_Internal(const VECTOR<T,3>& p)
{
T x2=sqr(p.x),y2=sqr(p.y),z2=sqr(p.z);return Octant_Volume_Helper(p.x,x2,p.y,y2)+Octant_Volume_Helper(p.x,x2,p.z,z2)+Octant_Volume_Helper(p.y,y2,p.z,z2)-p.x*p.y*p.z+(T)one_sixth_pi;
}
//#####################################################################
// Function Octant_Volume_Wedge
//#####################################################################
template<class T> T Octant_Volume_Wedge(T x,T y)
{
T x2=sqr(x),y2=sqr(y);return 2*(Octant_Volume_Helper(x,x2,y,y2)-((T)pi/24)*((3-x2)*x+(3-y2)*y)+(T)one_sixth_pi);
}
//#####################################################################
// Function Octant_Volume_Cap
//#####################################################################
template<class T> T Octant_Volume_Cap(T z)
{
return ((T)pi/3)*sqr(z-1)*(z+2);
}
//#####################################################################
// Function Octant_Volume
//#####################################################################
template<class TV> typename TV::SCALAR SPHERE<TV>::
Octant_Volume(const VECTOR<T,3>& min_corner) const
{
STATIC_ASSERT(d==3);
TV p((min_corner-center)/radius);T r3=cube(radius);
exchange_sort(p.x,p.y,p.z); // x <= y <= z
// Trivial & cheap cases (none, all, cap)
if(p.z>=1) return 0;
if(p.z<=-1) return (T)four_thirds_pi*r3;
if(p.y<=-1) return Octant_Volume_Cap(p.z)*r3;
// Inside
if(p.Magnitude_Squared()<=1) return Octant_Volume_Internal(p)*r3;
// all coordinates positive, not inside - no intersection
if(p.x>=0) return 0;
// two coordinates positive, not inside - wedge or no intersection
if(p.y>=0){if(sqr(p.y)+sqr(p.z)>=1) return 0;return Octant_Volume_Wedge(p.y,p.z)*r3;}
// one coordinates positive - cap with zero, one, or two wedges removed
T vol=Octant_Volume_Cap(p.z)*r3;
if(p.z>=0){
T rz=1-sqr(p.z),ryz=rz-sqr(p.y);if(ryz<=0) return vol;
T rxz=rz-sqr(p.x);vol-=Octant_Volume_Wedge(-p.y,p.z)*r3;if(rxz<=0) return vol;
return vol-Octant_Volume_Wedge(-p.x,p.z)*r3;}
// all coordinates negative; no octant edges intersect sphere - sphere with zero, one, or two caps removed
vol-=Octant_Volume_Cap(-p.y)*r3;T ry=1-sqr(p.y),ryz=ry-sqr(p.z);if(ryz<=0 && p.x<=-1) return vol;
if(p.x>-1) vol-=Octant_Volume_Cap(-p.x)*r3;if(ryz<=0) return vol;
// all coordinates negative; two or three caps removed, one, two, or three wedges added back in to prevent duplicate removal
vol+=Octant_Volume_Wedge(-p.y,-p.z)*r3;T rxz=1-sqr(p.x)-sqr(p.z);if(rxz<=0) return vol;
vol+=Octant_Volume_Wedge(-p.x,-p.z)*r3;T rxy=ry-sqr(p.y);if(rxy<=0) return vol;
return vol+Octant_Volume_Wedge(-p.x,-p.y)*r3;
}
//#####################################################################
// Function Name
//#####################################################################
template<class TV> std::string SPHERE<TV>::
Name()
{
if(TV::m==2) return "CIRCLE<T>";
else if(TV::m==3) return "SPHERE<T>";
PHYSBAM_NOT_IMPLEMENTED();
}
//#####################################################################
#define P(...) __VA_ARGS__
#define INSTANTIATION_HELPER_T_TV(T,TV,d) \
template void SPHERE<TV>::Sector_Volumes(const TV& origin,T volumes[1<<d],const T thickness_over_two) const; \
template std::string SPHERE<TV>::Name();
#define INSTANTIATION_HELPER(T,d) INSTANTIATION_HELPER_T_TV(T,P(VECTOR<T,d> ),d)
INSTANTIATION_HELPER(float,2)
INSTANTIATION_HELPER(float,3)
template float SPHERE<VECTOR<float,3> >::Octant_Volume(const VECTOR<float,3>& min_corner) const;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
INSTANTIATION_HELPER(double,2)
INSTANTIATION_HELPER(double,3)
template double SPHERE<VECTOR<double,3> >::Octant_Volume(const VECTOR<double,3>& min_corner) const;
#endif
|
<filename>middleware/auth.js
const User = require("../model/user");
const jwt = require("jsonwebtoken");
module.exports.authCheck = async (req, res, next) => {
try {
const token = req.get("Authorization").split(" ")[1];
//console.log(token);
const data = jwt.verify(token, process.env.ACCESS_TOKEN_SECRET);
//console.log(data);
const user = await User.findOne({
username: data.username,
});
console.log(user);
if (!user) {
throw new Error();
}
req.user = user;
next();
} catch (error) {
console.log(error);
res.status(401).send({ error: "Not authorized to access this resource" });
}
};
|
export trec_eval=./utils/trec_eval.9.0.4/trec_eval
export gdeval=./utils/gdeval.pl
## **************************************************
export qrel_path=../data/prepro_target_data/clueweb09/fold_0/qrels # qrels of all folds are the same
export result_folder=../results/test__metafine_clueweb09
export result_fold_0=$result_folder/0721-0511-42__metafine_bert__clueweb09__fold_0
export result_fold_1=$result_folder/0721-0507-44__metafine_bert__clueweb09__fold_1
export result_fold_2=$result_folder/0721-0509-30__metafine_bert__clueweb09__fold_2
export result_fold_3=$result_folder/0721-0510-33__metafine_bert__clueweb09__fold_3
export result_fold_4=$result_folder/0721-1735-26__metafine_bert__clueweb09__fold_4
export save_folder_name=metafine_bert_cw09
export save_dir=../combined_features/$save_folder_name
## **************************************************
## combine trec & feature files
python ./utils/combine_features.py --result_fold_0 $result_fold_0 \
--result_fold_1 $result_fold_1 \
--result_fold_2 $result_fold_2 \
--result_fold_3 $result_fold_3 \
--result_fold_4 $result_fold_4 \
--qrel_path $qrel_path \
--save_dir $save_dir \
## **************************************************
$gdeval -c $qrel_path $save_dir/$save_folder_name.trec
$trec_eval -m ndcg_cut.20 $qrel_path $save_dir/$save_folder_name.trec
$trec_eval -m P.20 $qrel_path $save_dir/$save_folder_name.trec |
# Import relevant libraries
import requests
from bs4 import BeautifulSoup
# Get content of a web page
def get_webpage(url):
result = requests.get(url)
if result.status_code == 200:
return result.content
else:
return None
# Pulse all the links in a given web page
def crawl_links(url):
links = []
content = get_webpage(url)
soup = BeautifulSoup(content, 'html.parser')
for link in soup.find_all('a'):
links.append(link.get('href'))
return links
# Pass the URL for crawling
url = 'https://www.example.com'
links = crawl_links(url)
print(links) |
import re
from bs4 import BeautifulSoup
def extract_content_from_html(input_file_path: str, output_file_path: str) -> None:
with open(input_file_path, "r") as html_file:
html_content = html_file.read()
soup = BeautifulSoup(html_content, 'html.parser')
with open(output_file_path, "w") as output_file:
for div in soup.find_all('div'):
for tag in div.find_all(True, recursive=False):
if tag.name == 'p':
text = tag.get_text()
text = re.sub(r'\[\d+\]', '', text)
output_file.write(text + "\n")
# Example usage
extract_content_from_html("input.html", "output.txt") |
/* jshint esversion: 6 */
/* jshint expr: true */
exports.pow = function (x, n) {
let result = 1;
for(let i = 0 ; i < n; ++i) {
result *= x;
}
return result;
};
exports.hexToRgb = function(hex) {
var red = parseInt(hex.substring(0, 2), 16);
var green = parseInt(hex.substring(2, 4), 16);
var blue = parseInt(hex.substring(4, 6), 16);
return [red, green, blue];
};
exports.rgbToHex = function(red, green, blue) {
var redHex = red.toString(16);
var greenHex = green.toString(16);
var blueHex = blue.toString(16);
return pad(redHex) + pad(greenHex) + pad(blueHex);
};
function pad(hex) {
return (hex.length === 1 ? "0" + hex : hex);
} |
<gh_stars>0
import { Pipe, PipeTransform } from '@angular/core';
@Pipe({
name: 'price'
})
export class PricePipe implements PipeTransform {
transform(value: number): string {
return value.toFixed(2).toString().replace('.', ',');
}
}
|
<filename>src/route/order/query/router.js
const express = require('express');
const initalQuery = require('./initialQuery');
const router = express.Router();
router.post('/:size', initalQuery);
module.exports = router;
|
#!/usr/bin/env bash
docker build -t palmstonegames/kube-cert-manager:0.3.1 -t palmstonegames/kube-cert-manager:latest $(dirname "$0")
docker push palmstonegames/kube-cert-manager:0.3.1
docker push palmstonegames/kube-cert-manager:latest |
<reponame>thomastay/collectable<gh_stars>100-1000
import { ComparatorFn, MutationContext, SelectorFn, isBoolean, isMutationContext } from '@collectable/core';
import { SortedSetStructure, emptySet } from '../internals';
export function empty<T> (compare?: ComparatorFn<T>): SortedSetStructure<T>;
export function empty<T, K> (compare: ComparatorFn<K>, select: SelectorFn<T, K>): SortedSetStructure<T>;
export function empty<T> (mutable: boolean|MutationContext, compare?: ComparatorFn<T>): SortedSetStructure<T>;
export function empty<T, K> (mutable: boolean|MutationContext, compare: ComparatorFn<K>, select: SelectorFn<T, K>): SortedSetStructure<T>;
export function empty<T, K> (
arg0?: boolean|MutationContext|ComparatorFn<K|T>,
arg1?: ComparatorFn<K|T>|SelectorFn<T, K>,
select?: SelectorFn<T, K>
): SortedSetStructure<T> {
var mutable: boolean|MutationContext;
var compare: ComparatorFn<K|T>|undefined;
if(typeof arg0 === 'function') {
compare = arg0;
if(typeof arg1 === 'function') {
select = <SelectorFn<T, K>>arg1;
}
mutable = false;
}
else {
mutable = isMutationContext(arg0) || isBoolean(arg0) ? arg0 : false;
}
return emptySet<T, K>(mutable, compare, select);
}
|
#!/usr/bin/env bash
# bash
BIN_FILE=ms-email-restapi
read -p 'platform: ' platform
# platforms=("darwin/amd64" "linux/amd64" "linux/386" "windows/amd64" "windows/386")
platforms=($platform)
for platform in "${platforms[@]}"
do
platform_split=(${platform//\// })
GOOS=${platform_split[0]}
GOARCH=${platform_split[1]}
output_name=$BIN_FILE
if [ $GOOS = "windows" ]; then
output_name+='.exe'
fi
echo "build \"$output_name\" for $GOOS/$GOARCH ..."
env GOOS=$GOOS GOARCH=$GOARCH go build -o $output_name
if [ $? -ne 0 ]; then
echo 'An error has occurred! Aborting the script execution...'
exit 1
fi
OS_DIR=$GOOS'-'$GOARCH
if [ -f "$output_name" ]; then
if [ -d "dist/$OS_DIR/" ]; then
rm -rf dist/$OS_DIR/
sleep 2
fi
mkdir -p dist/$OS_DIR/conf/casbin
mkdir -p dist/$OS_DIR/conf/data
mv $output_name dist/$OS_DIR/
cp conf/config.yaml dist/$OS_DIR/conf/
cp conf/casbin/casbin_rbac_rest_model.conf dist/$OS_DIR/conf/casbin/
cp conf/data/test-data.yaml dist/$OS_DIR/conf/data/
cp -R www dist/$OS_DIR/
fi
done |
#!/usr/bin/env bash
set -ex
source hack/common.sh
source hack/config.sh
LIBVIRT_VERSION=0:7.0.0-12
SEABIOS_VERSION=0:1.14.0-1
QEMU_VERSION=15:5.2.0-15
# Define some base packages to avoid dependency flipping
# since some dependencies can be satisfied by multiple packages
basesystem="glibc-langpack-en coreutils-single libcurl-minimal curl-minimal fedora-logos-httpd vim-minimal"
# get latest repo data from repo.yaml
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- fetch
# create a rpmtree for our test image with misc. tools.
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- rpmtree --public --name testimage_x86_64 \
$basesystem \
qemu-img \
which \
nginx \
scsi-target-utils \
procps-ng \
iputils \
e2fsprogs
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- rpmtree --public --arch=aarch64 --name testimage_aarch64 \
$basesystem \
qemu-img \
which \
nginx \
scsi-target-utils \
procps-ng \
iputils \
e2fsprogs
# create a rpmtree for libvirt-devel. libvirt-devel is needed for compilation and unit-testing.
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- rpmtree --public --name libvirt-devel_x86_64 \
$basesystem \
libvirt-devel-${LIBVIRT_VERSION} \
keyutils-libs \
krb5-libs \
libmount \
lz4-libs
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- rpmtree --public --arch=aarch64 --name libvirt-devel_aarch64 \
$basesystem \
libvirt-devel-${LIBVIRT_VERSION} \
keyutils-libs \
krb5-libs \
libmount \
lz4-libs
# create a rpmtree for virt-launcher and virt-handler. This is the OS for our node-components.
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- rpmtree --public --name launcherbase_x86_64 \
$basesystem \
libvirt-daemon-driver-qemu-${LIBVIRT_VERSION} \
libvirt-client-${LIBVIRT_VERSION} \
qemu-kvm-core-${QEMU_VERSION} \
seabios-${SEABIOS_VERSION} \
xorriso \
selinux-policy selinux-policy-targeted \
nftables \
findutils \
procps-ng \
iptables \
tar
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- rpmtree --public --arch=aarch64 --name launcherbase_aarch64 \
$basesystem \
libvirt-daemon-driver-qemu-${LIBVIRT_VERSION} \
libvirt-client-${LIBVIRT_VERSION} \
qemu-kvm-core-${QEMU_VERSION} \
xorriso \
selinux-policy selinux-policy-targeted \
nftables \
findutils \
procps-ng \
iptables \
tar
bazel run \
//:bazeldnf -- rpmtree --public --name libguestfs-tools \
$basesystem \
libguestfs \
libguestfs-tools \
--force-ignore-with-dependencies '^(kernel-|linux-firmware)' \
--force-ignore-with-dependencies '^(python[3]{0,1}-|perl[3]{0,1}-)' \
--force-ignore-with-dependencies '^(mesa-|libwayland-|selinux-policy|mozjs60)' \
--force-ignore-with-dependencies '^(libvirt-daemon-driver-storage|swtpm)' \
--force-ignore-with-dependencies '^(man-db|mandoc)' \
--force-ignore-with-dependencies '^(dbus|glusterfs|libX11|qemu-kvm-block|trousers|usbredir)' \
--force-ignore-with-dependencies '^(gstreamer1|kbd|libX)'
# remove all RPMs which are no longer referenced by a rpmtree
bazel run \
--config=${ARCHITECTURE} \
//:bazeldnf -- prune
# FIXME: For an unknown reason the run target afterwards can get
# out dated tar files, build them explicitly first.
bazel build \
--config=${ARCHITECTURE} \
//rpm:libvirt-devel_x86_64
bazel build \
--config=${ARCHITECTURE} \
//rpm:libvirt-devel_aarch64
# update tar2files targets which act as an adapter between rpms
# and cc_library which we need for virt-launcher and virt-handler
bazel run \
--config=${ARCHITECTURE} \
//rpm:ldd_x86_64
bazel run \
--config=${ARCHITECTURE} \
//rpm:ldd_aarch64
|
package oauth2
import (
"fmt"
"net/http"
"strconv"
"strings"
"github.com/astaxie/beego"
"golang.org/x/net/context"
"golang.org/x/oauth2"
"github.com/aaawoyucheng/wayne/src/backend/util/logs"
)
func init() {
NewOAuth2Service()
}
var (
OAuth2Infos = make(map[string]*OAuth2Info)
OAutherMap = make(map[string]OAuther)
)
const (
OAuth2TypeDefault = "oauth2"
)
type BasicUserInfo struct {
Name string `json:"name"`
Email string `json:"email"`
Display string `json:"display"`
}
type OAuth2Info struct {
ClientId string
ClientSecret string
Scopes []string
AuthUrl string
TokenUrl string
ApiUrl string // get user info
Enabled bool
ApiMapping map[string]string
}
type OAuther interface {
UserInfo(token string) (*BasicUserInfo, error)
AuthCodeURL(state string, opts ...oauth2.AuthCodeOption) string
Exchange(ctx context.Context, code string, opts ...oauth2.AuthCodeOption) (*oauth2.Token, error)
Client(ctx context.Context, t *oauth2.Token) *http.Client
}
func NewOAuth2Service() {
allOauthes := []string{OAuth2TypeDefault}
for _, name := range allOauthes {
section, err := beego.AppConfig.GetSection("auth." + name)
if err != nil {
logs.Info("can't enable oauth"+name, err)
continue
}
enabled, err := strconv.ParseBool(section["enabled"])
if err != nil {
logs.Info("parse enabled oauth error", err)
continue
}
if !enabled {
continue
}
info := &OAuth2Info{
ClientId: section["client_id"],
ClientSecret: section["client_secret"],
Scopes: strings.Split(section["scopes"], ","),
AuthUrl: section["auth_url"],
TokenUrl: section["token_url"],
ApiUrl: section["api_url"],
Enabled: enabled,
}
info.ApiMapping = make(map[string]string)
if section["api_mapping"] != "" {
for _, km := range strings.Split(section["api_mapping"], ",") {
arr := strings.Split(km, ":")
info.ApiMapping[arr[0]] = arr[1]
}
}
OAuth2Infos[OAuth2TypeDefault] = info
config := oauth2.Config{
ClientID: info.ClientId,
ClientSecret: info.ClientSecret,
Endpoint: oauth2.Endpoint{
AuthURL: info.AuthUrl,
TokenURL: info.TokenUrl,
},
RedirectURL: fmt.Sprintf("%s/login/oauth2/%s", section["redirect_url"], name),
Scopes: info.Scopes,
}
if name == OAuth2TypeDefault {
OAutherMap[OAuth2TypeDefault] = &OAuth2Default{
Config: &config,
ApiUrl: info.ApiUrl,
ApiMapping: info.ApiMapping,
}
}
}
}
|
#!/bin/bash
# Integration tests that create a full cluster.
set -exo pipefail
cd $(dirname $(dirname $(realpath $0)))
make install
test/kind-cluster-network/e2e.sh
test/minikube-cluster-network/e2e.sh
|
const mongoose = require('mongoose')
const URI = `mongodb://localhost:27017/platzi-video`
/**
* Mongoose is connected to a local database
*/
module.exports = {
initDatabase: () => {
mongoose.connect(URI, { useNewUrlParser: true, useCreateIndex: true })
.then( () => console.log('Database connected to platzi-video'))
.catch(err => console.log(err))
}
}
|
<gh_stars>1-10
package org.slos.battle.abilities.attack;
import org.slos.battle.abilities.Ability;
import org.slos.battle.abilities.AbilityClassification;
import org.slos.battle.abilities.AbilityEffect;
import org.slos.battle.abilities.AbilityType;
import org.slos.battle.abilities.rule.AttackRule;
import org.slos.battle.abilities.rule.hit.RetaliateRule;
import org.slos.battle.abilities.rule.target.TargetRuleset;
public class RetaliateAbility extends Ability implements AbilityEffect {
public RetaliateAbility() {
super(AbilityType.RETALIATE, AbilityClassification.ATTACK);
}
@Override
public TargetRuleset getTargetRuleset() {
return TargetRuleset.SELF;
}
@Override
public AttackRule getEffect() {
return new RetaliateRule();
}
}
|
#!/bin/bash
# Author: Aries Youssefian
#
# This script takes an image ID, converts it to a snapshot, attaches it to the host, converts it to a qcow2 on mancala0 on the host it's ran from.
#
# Must be run from a Symphony node.
# SCRIPT ASSUMES SOURCE NODE HAS ENOUGH SPACE ON mancala0
# Script also assumes only 1 project exists per source and destination domain (eg, default)
# if more projects exist, specify -r flag for the project
#
# Tested on Symphony 4.2.7.4
#
# Todo: Add flags for SSL
#
display_usage() {
echo "This script takes an image ID, converts it to a snapshot, attaches it to the host, converts it to a qcow2 on mancala0 on the host it's ran from."
echo -e "\nUsage:\nsource-user-name source-domain source-password source-cluster-address source-image-id \n"
}
# if less than 5 arguments supplied, display usage
if [ $# -ne 5 ]
then
display_usage
exit 1
fi
# check whether user had supplied -h or --help . If yes display usage
if [[ ( $# == "--help") || $# == "-h" ]]
then
display_usage
exit 0
fi
# Variables
sourceuser=$1
sourcedomain=$2
sourcepassword=$3
sourcecluster=$4
sourceimageid=$5
sourcehostname="$(hostname)"
echo Getting snapshot of $sourceimageid
sourcesnapid="$(symp -k --url $sourcecluster -d $sourcedomain -u $sourceuser -p $sourcepassword image get-snapshot-from-pool $sourceimageid -f value)"
sourceimagename="$(symp -k --url $sourcecluster -d $sourcedomain -u $sourceuser -p $sourcepassword image get $sourceimageid -f value -c name)"
sourceclonedvolid="$(symp -k --url $sourcecluster -d $sourcedomain -u $sourceuser -p $sourcepassword volume create --source-id $sourcesnapid $sourceimagename -f value -c id)"
echo $sourceimageid has a snapshot ID of $sourcesnapid
echo $sourceimageid has a name of $sourceimagename
echo Cloning successful, cloned volume ID is $sourceclonedvolid
echo Attaching cloned volume $sourceclonedvolid to host $sourcehostname
sourcedevaddress="$(mancala volumes attach-to-host $sourceclonedvolid $sourcehostname --json | jq -r .attachments[].mountpoint)"
echo Successfully mounted cloned volume $sourceclonedvolid to host $sourcehostname at $sourcedevaddress
echo Beginning qemu-img conversion
qemu-img convert -f raw -O qcow2 -p $sourcedevaddress /mnt/mancala0/$sourceimagename.qcow2
echo Successfully completed conversion to /mnt/mancala0, filename is $sourceimagename.qcow2
echo Unattaching cloned volume $sourceclonevolid from host $sourcehostname
mancala volumes detach-from-host $sourceclonedvolid $sourcehostname
echo Successfully detached cloned volume $sourceclonevolid from host $sourcehostname
echo Deleting cloned volume $sourceclonedvolid
symp -k --url $sourcecluster -d $sourcedomain -u $sourceuser -p $sourcepassword volume remove $sourceclonedvolid
echo All done. You can find $sourceimagename.qcow2 on /mnt/mancala0 on node $sourcehostname
|
curl -Lo minikube https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64 && chmod +x minikube && sudo mv minikube /usr/local/bin/
minikube version
curl -Lo kubectl https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl && chmod +x kubectl && mv kubectl /usr/local/bin/
kubectl version
export MINIKUBE_WANTUPDATENOTIFICATION=false
export MINIKUBE_WANTREPORTERRORPROMPT=false
export MINIKUBE_HOME=$HOME
export CHANGE_MINIKUBE_NONE_USER=true
mkdir $HOME/.kube || true
touch $HOME/.kube/config
export KUBECONFIG=$HOME/.kube/config
base=https://github.com/docker/machine/releases/download/v0.16.0 &&
curl -L $base/docker-machine-$(uname -s)-$(uname -m) >/tmp/docker-machine &&
install /tmp/docker-machine /usr/local/bin/docker-machine
minikube -version
--yum -y install docker
rm -rf ~/.minikube
vim /etc/yum.repos.d/virtualbox.repo
yum install gcc make kernel-headers kernel-devel
yum -y install VirtualBox-5.2
minikube start --registry-mirror=https://registry.docker-cn.com
/sbin/rcvboxdrv setup
minikube start --registry-mirror=https://registry.docker-cn.com
wget -O - https://raw.githubusercontent.com/XiaoMi/naftis/master/tool/getlatest.sh | bash
http://blog.gezhiqiang.com/2017/08/04/minikube/
http://blog.51cto.com/purplegrape/2315451 |
#!/bin/sh
#title :gpg-helper.sh
#description :Wraps a sub set of GnuPG (gpg) and provides usage hints
#author :Max Goltzsche
#license :MIT
#note :GnuPG >=2.1.11 is required
#date :20170103
#version :0.9
usage() {
MANLOOKUP=
if [ "$COMMAND" ]; then
MANLOOKUP=" $COMMAND "
else
echo "Invalid arguments: $ARGS" >&2
fi
(
cat <<-EOF
gpg-helper is a simple wrapper script around $GNUPG (GnuPG). [GNUPG]
It simplifies usage and encorporates some best practices.
GnuPG configuration and keyring is stored in $GNUPGHOME. [GNUPGHOME]
Usage: $0 COMMAND
COMMAND
help Shows this help text.
gpgconfig Writes gpg.conf and dirmngr.conf interactively.
listkeys [KEYID|UID] Lists all keys in your keyring.
listsecretkeys Lists your secret keys.
genkey Generates a new key pair interactively.
editkey KEYID [CMD] Edit the key interactively.
deletekey KEYID Delete the key from the local keyring.
deletesecretkey KEYID Delete the secret key from the local keyring.
genrevoke KEYID FILE Generates a key revocation certificate.
fingerprint KEYID Shows the key's fingerprint.
signkey KEYID Signs a key in the keyring and prints it to stdout.
fexport KEYID [FILE] Exports the public key to the provided file or stdout.
exportsecret KEYID [FILE] Exports the secret sub key to the proviced file or stdout.
fimport FILE Imports the keys from a file into your keyring.
export KEYID… Exports keys from your keyring to key servers.
import KEYID… Imports keys from the key servers into your keyring.
search UID Searches for available keys by UID on key servers.
refresh [KEYID] Reloads all imported keys from the key servers.
encrypt -r UID… [FILE] Encrypts the file using the recipient's public key.
decrypt [FILE] Decrypts the file using your private key.
clearsign [FILE [OUT]] Create signature containing plaintext content.
sign FILE [OUTSIGFILE] Create detached signature using your private key.
verify [SIGFILE [FILE]] Verify file using signature + signer's public key.
PARAMETERS
UID A user's identifier or name.
For instance 'Max Mustermann' or user@example.org.
KEYID A keypair identified by hexadecimal ID.
Examples:
Generate a keypair and a revocation certificate:
$0 genkey
pgp> 4 (creates RSA master key for key signing only, use 4096bit)
pgp> 4096 (choose high key strength for master)
pgp> 2y (do not choose more than 2 years, you can always extend time later)
...
$0 genrevoke SUBKEYID OUTPUTREVOKECERTFILE
Generate a sub key to be used for encryption only + revoke certificate:
$0 editkey KEYID addkey
pgp> 6 (choose RSA encryption key)
pgp> 2048 (choose only 2048bit strength for faster computing ...)
pgp> 1y (... since it expires in a year)
pgp> save (save the key in the local keyring)
$0 genrevoke SUBKEYID OUTREVOKECERTFILE
Export your key to the public key servers:
$0 export KEYID
Search for a person's key on the key servers:
$0 search 'John Doe'
Import John's key, check its fingerprint, mark it as trusted and sign it:
$0 import 0xD954726E5B31B1DC
$0 fingerprint 0xD954726E5B31B1DC # Call John to confirm
$0 editkey 0xD954726E5B31B1DC trust # See OWNER TRUST
$0 signkey 0xD954726E5B31B1DC > enc-signed-key-john.asc
(Send the encrypted signed key file back to John so his key gains trust)
Encrypt a file for a specific recipient:
echo 'Hello!' | $0 encrypt -r 0xD954726E5B31B1DC
Encrypt a file for multiple recipients by name (resolved by UIDs in keyring):
$0 encrypt -r 'John Doe' -r user@example.org /secret/file
Decrypt an encrypted file (works only if you have the matching private key):
$0 decrypt /secret/file.asc
Extend a key's expiration date:
$0 editkey 0xA3E57D6E5B31B1FB
gpg> key 1
gpg> expire
Key is valid for? (0) 2y
gpg> save
$0 export 0xA3E57D6E5B31B1FB
Mark compromised key as revoked using your initially created revocation cert:
$0 fimport REVOCATIONCERTFILE
$0 export KEYID
Export a secret sub key with a separate password (to automate signing or for encryption on a mobile device):
$0 exportsecret KEYID [OUTFILE]
Run $GNUPG --help for more options.
Key type identifiers as listed in $GNUPG output:
sec: SECret key
ssb: Secret SuBkey
pub: PUBlic key
sub: public SUBkey
If a key type identifier is suffixed with '#' it means it could not be
found on the disk (e.g. sec#). This should be the case for the sec key on
laptops and build servers that only need to work with ssb and sub keys.
Key roles as listed in $GNUPG output:
A: key for authentication
C: key for certifying signatures
E: key for encryption
S: key for signing
Best practices:
Your MASTER KEY (sec, pub) is your identity and should be kept very secret.
Use it for key creation, key signing and key revocation only!
Create SUB KEYS (ssb, sub) of your master key and use them for encryption and
signing! You can use a separate sub key per machine to sign files but to
decrypt files on multiple machines you need to copy one sub key to all
(see https://wiki.debian.org/Subkeys). Do not store your private master key
on your laptop but in a safe location! You can revoke a single sub key
without revoking all. If you revoke the master key you have to rebuild all
trust. To be able to revoke a key you don't have anymore create a revocation
cert together with the key and store it in a safer location. Refresh your
keys to minimize the risk of using a friend's compromised key for encryption!
Expand the WEB OF TRUST by letting a friend sign your public identity key and
sign his. Your friend gets your key from a key server, confirms the key's
fingerprint e.g. during a phone call with you, signs the key and sends it
encrypted back to you where you import it and the other way around
(see https://wiki.debian.org/Keysigning).
Both may send the signed keys to key servers to gain their SIGNATORY TRUST.
A 3rd person that trusts one of you completely in her local keyring
(OWNER TRUST) may now also trust in the other implicitly.
The more people have signed your key the more authentic your key becomes on
the key servers to people who don't know you directly.
Note: If your friend's key gets compromised or is unsafe the trust chain is
broken (without your knowledge). Also your key metadata is visible in the
public and you can never remove it or your old keys from the key servers.
Therefore think twice before you attach a picture to your key - which is also
possible.
Troubleshooting:
If you cannot reach hkps key servers due to 'general error' restart dirmngr:
sudo killall dirmngr && sleep 7;
sudo rm -rf $GNUPGHOME/dirmngr-cache.d; dirmngr </dev/null
If you still cannot reach hkps key servers use hkp servers in dirmngr.conf:
keyserver hkp://pool.sks-keyservers.net
If the key server responds with 'no data' to a key search request try again.
Some keyservers do not (yet) have all keys.
EOF
) | grep -E "^$MANLOOKUP" >&2
exit 1
}
gpgVersion() {
"$GNUPG" --version | head -1 | sed -E 's/^[^0-9]+([0-9\.]+)$/\1/'
}
listEffectiveConfig() {
grep -Ev '^(#|\s*$)' "$1" 2>/dev/null | sed -E 's/^/ /g' | sort
}
defaultDirmngrConf() {
cat <<-EOF
keyserver hkps://hkps.pool.sks-keyservers.net
hkp-cacert $SKSKEYSERVERCA
EOF
}
defaultGpgConf() {
cat <<-EOF
no-emit-version
keyid-format 0xlong
with-fingerprint
list-options show-uid-validity
verify-options show-uid-validity
use-agent
keyserver-options no-honor-keyserver-url
keyserver-options include-revoked
personal-cipher-preferences AES256 AES192 AES CAST5
personal-digest-preferences SHA512 SHA384 SHA256 SHA224
cert-digest-algo SHA512
default-preference-list SHA512 SHA384 SHA256 SHA224 AES256 AES192 AES CAST5 ZLIB BZIP2 ZIP Uncompressed
EOF
}
checkAndFixDirmngrConf() {
SKSKEYSERVERCA=/usr/share/gnupg2/sks-keyservers.netCA.pem
if [ ! -f "$SKSKEYSERVERCA" ]; then
SKSKEYSERVERCA="$GNUPGHOME/sks-keyservers.netCA.pem"
([ -f "$SKSKEYSERVERCA" ] || curl -fSL -o "$SKSKEYSERVERCA" 'https://sks-keyservers.net/sks-keyservers.netCA.pem') || return 1
fi
if [ ! "$(defaultDirmngrConf)" = "$(cat "$GNUPGHOME/dirmngr.conf" 2>/dev/null)" ]; then
if [ -f "$GNUPGHOME/dirmngr.conf" ]; then
# Backup and replace dirmngr.conf after confirmation prompt
echo 'Your dirmngr.conf does not equal the default. Current:'
listEffectiveConfig "$GNUPGHOME/dirmngr.conf" || return 1
echo 'Will be replaced with:'
echo "$(defaultDirmngrConf)" | sed -E 's/^/ /g' | sort
read -p 'Do you like to backup dirmngr.conf and overwrite it? [y|N] ' CONFIRM
[ "$CONFIRM" = 'y' ] || return 0
mv "$GNUPGHOME/dirmngr.conf" "$GNUPGHOME/dirmngr.conf.bak" || return 1
fi
echo "$(defaultDirmngrConf)" > "$GNUPGHOME/dirmngr.conf" &&
echo
fi
}
checkAndFixGpgConf() {
if [ ! "$(defaultGpgConf)" = "$(cat "$GNUPGHOME/gpg.conf" 2>/dev/null)" ]; then
if [ -f "$GNUPGHOME/gpg.conf" ]; then
echo 'Current gpg.conf:'
listEffectiveConfig "$GNUPGHOME/gpg.conf" || return 1
echo 'Will be replaced with:'
defaultGpgConf | sed -E 's/^/ /g' | sort
read -p 'Do you like to backup gpg.conf and overwrite it? [y|N] ' CONFIRM
[ "$CONFIRM" = 'y' ] || return 0
mv "$GNUPGHOME/gpg.conf" "$GNUPGHOME/gpg.conf.bak" || return 1
fi
defaultGpgConf > "$GNUPGHOME/gpg.conf" &&
echo
fi
}
keyserverConnectionTroubleshooting() {
echo >&2 <<-EOF
If you cannot connect to your key server due to a general error
but you have configured the correct key servers run
sudo kill all dirmngr
sudo dirmngr < /dev/null
and retry
EOF
}
checkAndFixConf() {
mkdir -p "$GNUPGHOME" &&
chmod 700 "$GNUPGHOME" &&
checkAndFixGpgConf &&
checkAndFixDirmngrConf
}
pubKeyType() {
[ "$1" ] && "$GNUPG" $OPTS --list-keys "$1" | grep "/$1 " | grep -Eo '^[^ ]+'
}
GNUPG="${GNUPG:-gpg2}" # If it differs on your system: export GNUPG=yourgpg
GNUPGHOME=${GNUPGHOME:-~/.gnupg}
OPTS=${GPG_OPTS:-' --openpgp'}
COMMAND="$1"
ARGS="$@"
# Check gnupg2 and curl are installed
if ! "$GNUPG" --help >/dev/null || ! curl --help >/dev/null; then
cat >&2 <<-EOF
$GNUPG or curl is not installed on your system! Set location in GNUPG or
install it by typing e.g. apt-get install gnupg2 curl
EOF
exit 1
fi
# Check gpg version is >=2
[ $(gpgVersion | cut -d . -f 1) -ge 2 ] ||
(echo "gpg >=2 required but $(gpgVersion) installed. Please update!" >&2; false) || exit 1
# Check and fix configuration
([ -f "$GNUPGHOME/.wrapper-checked" ] || (checkAndFixConf >&2 && touch "$GNUPGHOME/.wrapper-checked")) &&
"$GNUPG" --list-keys >/dev/null || exit 1 # Make sure GNUPGHOME is initialized
# Refresh keys onca a day the script is used
#REFRESHFILE="$GNUPGHOME/.refreshed-$(date +%Y-%m-%d)"
#if [ ! -f "$REFRESHFILE" ]; then
# "$GNUPG" $OPTS --refresh-keys >&2 &&
# (rm "$GNUPGHOME"/.refreshed-* 2>/dev/null || true) &&
# touch "$REFRESHFILE"
#fi
[ $# -eq 0 ] || shift
case "$COMMAND" in
help|--help)
COMMAND=
usage
;;
gpgconfig)
checkAndFixConf
;;
listkeys)
"$GNUPG" $OPTS --list-keys "$@"
;;
listsecretkeys)
[ $# -eq 0 ] || usage
"$GNUPG" $OPTS --list-secret-keys
;;
fingerprint)
[ $# -eq 1 ] || usage
"$GNUPG" $OPTS --fingerprint "$1"
;;
signkey)
[ $# -eq 1 ] || usage
"$GNUPG" $OPTS --sign-key "$1" >&2 &&
"$GNUPG" $OPTS -a -o - --export "$1" | "$GNUPG" -a -o - --encrypt --recipient "$1"
;;
genkey)
[ $# -eq 0 ] || usage
cat <<-EOF
HINT:
Use RSA with >=4096 bit!
Use limited validity duration <=2y to avoid immortal orphan keys!
You can always extend your key's duration later.
Consider using a separate subkey for each operation or machine
since you can revoke it in case one key gets compromised.
Generate a revokation certificate for all of your subkeys and store it
in a safe-deposit box since this is the only way to revoke a key you
do not have anymore in case gets stolen (e.g. with your laptop).
Otherwise somebody else can do everything with your identity as long as
the key is valid.
EOF
"$GNUPG" $OPTS --full-gen-key
;;
editkey)
[ $# -ge 1 ] || usage
"$GNUPG" $OPTS --edit-key "$@"
;;
deletekey)
[ $# -eq 1 ] || usage
"$GNUPG" $OPTS --delete-key "$1"
;;
deletesecretkey)
[ $# -eq 1 ] || usage
"$GNUPG" $OPTS --delete-secret-key "$1"
;;
genrevoke)
[ $# -eq 2 ] || usage
cat <<-EOF
Generates a key revocation certificate that can be used to revoke a compromised key.
The certificate should be stored in a safe-deposit box and can
also be used to revoke a key you do not have anymore.
This is your only help in case the key gets stolen with your laptop.
EOF
"$GNUPG" $OPTS -a -o "$2" --gen-revoke "$1" &&
cat <<-EOF
HINT: To revoke your key:
2. Invalidate the key in your keyring by importing the revocation certificate:
$0 fimport \$REVOCATIONCERT
3. Export your key to the servers again to revoke it there too:
$0 export $1
Note that your key neither will be removed in your keyring nor on the servers
but marked as revoked so that others can get that information when they
refresh their keys.
EOF
;;
fimport)
[ $# -eq 1 ] || usage
"$GNUPG" $OPTS --import "$1"
;;
fexport)
[ $# -eq 1 -o $# -eq 2 ] || usage
"$GNUPG" $OPTS -a -o "${2:--}" --export "$1"
;;
exportsecret)
[ $# -eq 1 -o $# -eq 2 ] || usage
# Export a secret sub key only (!) and encrypt it with a separate password
stty -echo
echo "Exporting secret sub key $1 to ${2:--}" >&2
printf 'Enter current key password: ' >&2
read -r CURR_PASSWD
printf '\nEnter new key password: ' >&2
read -r NEW_PASSWD
printf '\nRepeat new key password: ' >&2
read -r NEW_PASSWD_REPEAT
stty echo
echo >&2
[ "$NEW_PASSWD" = "$NEW_PASSWD_REPEAT" ] || (echo New passwords did not match >&2; false) || exit 1
TMPDIR=$(mktemp -d) && (
set -e
# Export sub key from current key ring into a temporary one
echo "$CURR_PASSWD" | "$GNUPG" $OPTS --pinentry-mode loopback --command-fd 0 -a -o $TMPDIR/secret-subkey.pgp --export-secret-subkeys "$1!" || exit 1
export GNUPGHOME=$TMPDIR/gnupg
mkdir -m700 $GNUPGHOME &&
echo "$CURR_PASSWD" | "$GNUPG" $OPTS --pinentry-mode loopback --command-fd 0 --import $TMPDIR/secret-subkey.pgp &&
# Set the new password within the temporary key ring
printf '%s\n%s\n%s\n' "$CURR_PASSWD" "$NEW_PASSWD" "$NEW_PASSWD_REPEAT" | "$GNUPG" $OPTS --pinentry-mode loopback --command-fd 0 --edit-key "$1" passwd &&
# Export the key
echo "$NEW_PASSWD" | "$GNUPG" $OPTS --pinentry-mode loopback --command-fd 0 -a -o "${2:--}" --export-secret-subkeys "$1!"
) || STATUS=1
rm -rf $TMPDIR
exit $STATUS
;;
search)
[ $# -ge 1 ] || usage
"$GNUPG" $OPTS --search-keys "$@"
;;
'export')
[ $# -ge 1 ] || usage
"$GNUPG" $OPTS --send-keys "$@"
;;
import)
[ $# -ge 1 ] || usage
"$GNUPG" $OPTS --recv-keys "$@"
;;
refresh)
"$GNUPG" $OPTS --refresh-keys "$@"
;;
encrypt)
[ $# -ge 2 ] || usage
"$GNUPG" $OPTS -a -o - --encrypt "$@"
;;
decrypt)
[ $# -le 1 ] || usage
if [ "$1" ]; then
"$GNUPG" $OPTS -o - --decrypt "$1"
else # Use stdin
"$GNUPG" $OPTS -o - --decrypt
fi
;;
sign)
[ $# -eq 1 -o $# -eq 2 ] || usage
"$GNUPG" $OPTS -a -o "${2:--}" --detach-sign "$1"
;;
clearsign)
[ $# -le 2 ] || usage
if [ $# -ge 1 ]; then
"$GNUPG" $OPTS -a -o "${2:--}" --clearsign "$1"
else # Use stdin
"$GNUPG" $OPTS -a -o "${1:--}" --clearsign
fi
;;
verify)
[ $# -le 2 ] || usage
"$GNUPG" $OPTS -a -o - --verify "$@"
;;
*)
if [ "$COMMAND" ]; then
"$GNUPG" $OPTS "$COMMAND" "$@"
else
usage
fi
;;
esac
# TODO: Use symmetric encryption for large files
|
import { ThemeProvider, CssBaseline, createTheme } from '@mui/material'
import { StylesProvider } from '@mui/styles'
import theme from './app/theme'
import { BrowserRouter } from 'react-router-dom'
import GlobalStyle from './app/globalStyles'
import '@fontsource/poppins'
import Router from './app/router/router'
import NavBar from './app/NavBar'
const App = () => {
return (
<StylesProvider>
<BrowserRouter>
<GlobalStyle />
<ThemeProvider theme={createTheme(theme)}>
<CssBaseline />
<NavBar></NavBar>
<Router />
</ThemeProvider>
</BrowserRouter>
</StylesProvider>
)
}
export default App
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2017.4 (64-bit)
#
# Filename : system.sh
# Simulator : Aldec Active-HDL Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Fri Jan 18 17:57:32 +0800 2019
# SW Build 2086221 on Fri Dec 15 20:55:39 MST 2017
#
# Copyright 1986-2017 Xilinx, Inc. All Rights Reserved.
#
# usage: system.sh [-help]
# usage: system.sh [-lib_map_path]
# usage: system.sh [-noclean_files]
# usage: system.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'system.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "system.sh - Script generated by export_simulation (Vivado v2017.4 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
runvsimsa -l simulate.log -do "do {simulate.do}"
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./system.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy library.cfg file
copy_setup_file()
{
file="library.cfg"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="D:/git/DMA-S2MM-and-MM2S/project/DMA_video/Miz_sys.cache/compile_simlib/activehdl"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work activehdl)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./system.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: system.sh [-help]\n\
Usage: system.sh [-lib_map_path]\n\
Usage: system.sh [-reset_run]\n\
Usage: system.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
//
// NSObject+Doraemon.h
// AFNetworking
//
// Created by yixiang on 2018/7/2.
//
#import <Foundation/Foundation.h>
@interface NSObject (Doraemon)
/**
swizzle 类方法
@param oriSel 原有的方法
@param swiSel swizzle的方法
*/
+ (void)doraemon_swizzleClassMethodWithOriginSel:(SEL)oriSel swizzledSel:(SEL)swiSel;
/**
swizzle 实例方法
@param oriSel 原有的方法
@param swiSel swizzle的方法
*/
+ (void)doraemon_swizzleInstanceMethodWithOriginSel:(SEL)oriSel swizzledSel:(SEL)swiSel;
+ (void)safe_instanceSwizzleMethodWithClass:(Class _Nonnull )klass
orginalMethod:(SEL _Nonnull )originalSelector
replaceClass:(Class _Nonnull )rlass
replaceMethod:(SEL _Nonnull )replaceSelector;
@end
|
const sortArray = arr => arr.sort((a,b) => a - b);
const result = sortArray([3, 2, 6, 5, 9, 1]);
console.log(result); // Prints [1, 2, 3, 5, 6, 9] |
def just_me(name):
print("Hi, my name is "+name+". I am special and nothing else matters.")
print("What about you?") |
public class Restaurant {
public int calculateTables(int numGuests) {
int tables = numGuests / 4; // Calculate the number of tables required based on 4 guests per table
if (numGuests % 4 != 0) {
tables++; // If there are remaining guests, an additional table is required
}
return tables;
}
} |
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
const mongoose = require('mongoose');
const Items = require('./models/items.js');
const app = express();
app.use(cors());
app.use(bodyParser.json());
// Fetch Items
app.get('/items', (req, res) => {
Items.find({}, (err, docs) => {
if (!err) {
res.send(docs);
console.log('Items successfully fetched!');
} else {
res.send(err);
console.log('An error has occured!');
}
});
});
// Create Item
app.post('/items', (req, res) => {
let item = Object.assign(new Items(), req.body);
item.save((err, doc) => {
if (!err) {
res.send(doc);
console.log('Item successfully created!');
} else {
res.send(err);
console.log('An error has occured!');
}
});
});
// Update Item
app.put('/items/:id', (req, res) => {
Items.findByIdAndUpdate(req.params.id, req.body, (err, doc) => {
if (!err) {
res.send(doc);
console.log('Item successfully updated!');
} else {
res.send(err);
console.log('An error has occured!');
}
});
});
// Delete Item
app.delete('/items/:id', (req, res) => {
Items.findByIdAndDelete(req.params.id, (err, doc) => {
if (!err) {
res.send(doc);
console.log('Item successfully deleted!');
} else {
res.send(err);
console.log('An error has occured!');
}
});
});
const port = process.env.PORT || 3000;
app.listen(port, () => console.log(`Listening on port ${port}...`)); |
// global variables
var List, list, lists,
Item, item, items,
store, adapter, clock;
module('DS.LSAdapter', {
setup: function() {
localStorage.setItem('DS.LSAdapter', JSON.stringify(FIXTURES));
List = DS.Model.extend({
name: DS.attr('string'),
b: DS.attr('boolean')
});
List.toString = function() {
return 'App.List';
};
Item = DS.Model.extend({
name: DS.attr('string')
});
Item.toString = function() {
return 'App.Item';
};
List.reopen({
items: DS.hasMany(Item)
});
Item.reopen({
list: DS.belongsTo(List)
});
adapter = DS.LSAdapter.create();
store = DS.Store.create({adapter: adapter});
clock = sinon.useFakeTimers();
},
teardown: function() {
clock.restore();
localStorage.removeItem('DS.LSAdapter');
adapter.destroy();
store.destroy();
list = null;
lists = null;
}
});
test('existence', function() {
ok(DS.LSAdapter, 'LSAdapter added to DS namespace');
});
test('find', function() {
list = List.find('l1');
clock.tick(1);
assertStoredList();
});
test('findMany', function() {
lists = store.findMany(List, ['l1', 'l3']);
clock.tick(1);
assertStoredLists();
});
test('findQuery', function() {
lists = store.findQuery(List, {name: /one|two/});
assertQuery(2);
lists = store.findQuery(List, {name: /.+/, id: /l1/});
assertQuery();
lists = store.findQuery(List, {name: 'one'});
assertQuery();
lists = store.findQuery(List, {b: true});
assertQuery();
});
test('findAll', function() {
lists = store.findAll(List);
clock.tick(1);
assertListsLength(3);
assertStoredLists();
});
test('createRecords', function() {
createAndSaveNewList();
});
test('updateRecords', function() {
createAndSaveNewList();
list.set('name', 'updated');
commit();
assertStoredList();
});
test('deleteRecords', function() {
createAndSaveNewList();
list.deleteRecord();
assertState('deleted');
commit();
assertState('deleted');
assertListNotFoundInStorage();
lists = store.findAll(List);
clock.tick(1);
assertListsLength(3);
});
test('bulkCommits changes', function() {
var listToUpdate = List.find('l1');
var listToDelete = List.find('l2');
List.createRecord({name: 'bulk new'}); // will find later
clock.tick(1);
listToUpdate.set('name', 'updated');
listToDelete.deleteRecord();
commit();
var updatedList = List.find('l1');
var newListQuery = store.findQuery(List, {name: 'bulk new'});
clock.tick(1);
var newList = newListQuery.objectAt(0);
assertState('deleted', true, listToDelete);
assertListNotFoundInStorage(listToDelete);
assertStoredList(updatedList);
assertStoredList(newList);
});
test('load hasMany association', function() {
list = List.find('l1');
clock.tick(1);
assertStoredList();
items = list.get('items');
clock.tick(1);
assertStoredItems();
});
test('load belongsTo association', function() {
item = Item.find('i1');
clock.tick(1);
list = item.get('list');
clock.tick(1);
assertStoredList();
});
test('saves belongsTo and hasMany associations', function() {
list = List.find('l1');
clock.tick(1);
item = Item.createRecord({name: '3', list: list});
commit();
assertItemBelongsToList(item, list);
assertListHasItem(list, item);
});
test('QUOTA_EXCEEDED_ERR when storage is full', function() {
occupyLocalStorage();
var handler = sinon.spy();
adapter.on('QUOTA_EXCEEDED_ERR', handler);
list = List.createRecord({name: n100k});
assertState('new');
store.commit();
assertState('saving');
clock.tick(1);
assertState('saving', false);
assertState('error');
equal(handler.getCall(0).args[0].list[0], list,
'error handler called with record not saved');
// clean up
localStorage.removeItem('junk');
});
|
import moment from 'moment'
/*eslint no-useless-escape: 0 */
moment.updateLocale('en', {
relativeTime: {
future: 'in %s',
past: '%s ago',
s: 'moments',
ss: '%d sec',
m: 'a min',
mm: '%d mins',
h: 'an hr',
hh: '%d hrs',
d: 'a day',
dd: '%d days',
M: 'a month',
MM: '%d months',
y: 'a year',
yy: '%d years',
},
})
export const validateEmail = (email: string) =>
/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/.test(
email,
)
export const validateURL = (url: string) =>
/^(?:(?:https?|ftp):\/\/)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*(?:\.(?:[a-z\u00a1-\uffff]{2,})))(?::\d{2,5})?(?:\/\S*)?$/.test(
url,
)
export const round = (value: number, places: number): number => {
var multiplier = Math.pow(10, places)
return Math.round(value * multiplier) / multiplier
}
export const isDateEqual = (d1: string, d2: string): boolean =>
moment(d1).diff(moment(d2), 'days') === 0
export const getDaysBetweenDates = (d1: string, d2: string): number => {
const begin = moment(d1, 'YYYY-MM-DD hh:mm:ss')
const end = moment(d2, 'YYYY-MM-DD hh:mm:ss')
return end.diff(begin, 'days')
}
/**
* Calculate the time diff from current time in seconds.
*/
export const calcTimeDifference = (
lastTokenTime: moment.Moment | null,
): number => {
const currentTime = moment.utc()
return currentTime.diff(lastTokenTime, 'seconds')
}
export const monthNames = [
'Januar',
'Februar',
'März',
'April',
'Mai',
'Juni',
'Juli',
'August',
'September',
'Oktober',
'November',
'Dezember',
]
export const weekDayNames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']
export const month: { [index: string]: string } = {
'01': 'JAN',
'02': 'FEB',
'03': 'MAR',
'04': 'APR',
'05': 'MAY',
'06': 'JUN',
'07': 'JUL',
'08': 'AUG',
'09': 'SEP',
'10': 'OCT',
'11': 'NOV',
'12': 'DEC',
}
|
#!/usr/bin/env bash
ginkgo -p -r --randomizeAllSpecs --failOnPending --randomizeSuites --race
|
<html>
<body>
<form action="form_submit.php" method="post">
<input type="submit" value="Submit">
</form>
</body>
</html> |
#!/bin/bash
set -exo pipefail
if [[ "$1" == "-h" || -z "$ETCD_NS" || -z "$ETCD_POD" || -z "$M3DB_NS" || -z "$M3DB_CLUSTER" ]]; then
echo "Script for migrating etcd data from m3db-operator 0.1 -> 0.2"
echo "Usage: ETCD_NS=<namespace> ETCD_POD=<pod> M3DB_NS=<namespace> M3DB_CLUSTER=<cluster_name> ./migrate_etcd_0.1_0.2.sh"
exit 0
fi
CLUSTER=$M3DB_CLUSTER
NS=$M3DB_NS
if ! kubectl get -n "$NS" m3dbcluster "$CLUSTER" > /dev/null; then
echo "Could not find m3dbcluster $CLUSTER in namespace $NS"
exit 1
fi
ENV="$NS/$CLUSTER"
echo "Copying namespace and placement data from env=default_env to env=$ENV"
kubectl exec -n "$ETCD_NS" "$ETCD_POD" -- env ETCDCTL_API=3 etcdctl get --print-value-only _sd.placement/default_env/m3db | head -c-1 | kubectl exec -i -n "$ETCD_NS" "$ETCD_POD" -- env ETCDCTL_API=3 etcdctl put "_sd.placement/$ENV/m3db"
kubectl exec -n "$ETCD_NS" "$ETCD_POD" -- env ETCDCTL_API=3 etcdctl get --print-value-only _kv/default_env/m3db.node.namespaces | head -c-1 | kubectl exec -i -n "$ETCD_NS" "$ETCD_POD" -- env ETCDCTL_API=3 etcdctl put "_kv/$ENV/m3db.node.namespaces"
|
/*
* Copyright (c) 2004-2009, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.patient;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.hisp.dhis.i18n.I18nFormat;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.patientattributevalue.PatientAttributeValue;
import org.hisp.dhis.program.Program;
/**
* @author <NAME>
* @version $Id$
*/
public interface PatientService
{
String ID = PatientService.class.getName();
int savePatient( Patient patient );
void deletePatient( Patient patient );
void updatePatient( Patient patient );
Patient getPatient( int id );
Collection<Patient> getAllPatients();
/**
* Get Patients base on isDead property
*
* @param isDead
* @return Patient List
*/
Collection<Patient> getAllPatients( Boolean isDead );
/**
* Search Patient base on firstname/middlename/lastname/birthDate/gender
*
* @param firstName
* @param middleName
* @param lastName
* @param birthdate
* @param gender
* @return Patient List
*/
Collection<Patient> getPatient( String firstName, String middleName, String lastName, Date birthdate, String gender );
/**
* Search Patient base on gender
*
* @param gender
* @return Patient List
*/
Collection<Patient> getPatiensByGender( String gender );
/**
* Search Patient base on birthDate
*
* @param birthdate
* @return Patient List
*/
Collection<Patient> getPatientsByBirthDate( Date birthDate );
/**
* Search Patient base on fullName
*
* @param name fullName
* @return Patient List
*/
Collection<Patient> getPatientsByNames( String name );
/**
* Search Patient base on identifier value with result limited
*
* @param searchText value
* @param min
* @param max
* @return Patient List
*/
Collection<Patient> getPatients( String searchText, int min, int max );
/**
* Search Patient base on organisation unit with result limited
*
* @param organisationUnit organisationUnit
* @return Patient List
*/
Collection<Patient> getPatients( OrganisationUnit organisationUnit );
/**
* Search Patient base on organisation unit with result limited
*
* @param organisationUnit organisationUnit
* @param min
* @param max
* @return Patient List
*/
Collection<Patient> getPatients( OrganisationUnit organisationUnit, int min, int max );
/**
* Search Patient base on organisation unit and sort the result by PatientAttribute
*
* @param organisationUnit organisationUnit
* @param min
* @param max
* @param patientAttribute
* @return Patient List
*/
Collection<Patient> getPatients( OrganisationUnit organisationUnit, int min, int max, PatientAttribute patientAttribute );
/**
* Search Patient base on organisationUnit and identifier value
* name
*
* @param organisationUnit
* @param searchText identifier value
* @param min
* @param max
* @return
*/
Collection<Patient> getPatients( OrganisationUnit organisationUnit, String searchText, int min, int max );
/**
* Search Patient base on PatientIdentifierType or Attribute or Patient's
* name
*
* @param identifierTypeId
* @param attributeId
* @param value
* @return
*/
Collection<Patient> getPatient( Integer identifierTypeId, Integer attributeId, String value );
/**
* Search Patient base on OrganisationUnit and Program with result limited
* name
*
* @param organisationUnit
* @param program
* @param min
* @param max
* @return
*/
Collection<Patient> getPatients( OrganisationUnit organisationUnit, Program program, int min, int max );
/**
* Sort the result by PatientAttribute
*
* @param patients
* @param patientAttribute
* @return Patient List
*/
Collection<Patient> sortPatientsByAttribute( Collection<Patient> patients, PatientAttribute patientAttribute );
/**
* Search Patient base on firstname/middlename/lastname with result limited
*
* @param name firstName/middleName/lastName
* @param min
* @param max
* @return Patient List
*/
Collection<Patient> getPatientsByNames( String name, int min, int max );
Collection<Patient> getRepresentatives( Patient patient );
/**
* Search Patient base on identifier value and get number of result
*
* @param searchText
* @return number of patients
*/
int countGetPatients( String searchText );
/**
* Search Patient base on firstname/middlename/lastname and get number of result
*
* @param name
* @return number of patients
*/
int countGetPatientsByName( String name );
int createPatient( Patient patient,Integer representativeId,
Integer relationshipTypeId, List<PatientAttributeValue> patientAttributeValues );
void updatePatient( Patient patient, Integer representativeId,
Integer relationshipTypeId, List<PatientAttributeValue> valuesForSave,
List<PatientAttributeValue> valuesForUpdate, Collection<PatientAttributeValue> valuesForDelete );
int countGetPatientsByOrgUnit( OrganisationUnit organisationUnit );
int countGetPatientsByOrgUnitProgram( OrganisationUnit organisationUnit, Program program );
Object getObjectValue( String property, String value, I18nFormat format );
void removeErollmentPrograms( Program program );
}
|
package surface
import (
"math"
"math/rand"
"github.com/rrothenb/pbr/pkg/geom"
"github.com/rrothenb/pbr/pkg/render"
"github.com/rrothenb/pbr/pkg/rgb"
)
type Material interface {
At(u, v float64, in, norm geom.Dir, rnd *rand.Rand) (normal geom.Dir, bsdf render.BSDF)
Light() rgb.Energy
Transmit() rgb.Energy
}
type DefaultMaterial struct {
}
func (d *DefaultMaterial) At(u, v float64, in, norm geom.Dir, rnd *rand.Rand) (normal geom.Dir, bsdf render.BSDF) {
return norm, Lambert{}
}
func (d *DefaultMaterial) Light() rgb.Energy {
return rgb.Black
}
func (d *DefaultMaterial) Transmit() rgb.Energy {
return rgb.Black
}
type Lambert struct {
}
func (l Lambert) Sample(wo geom.Dir, rnd *rand.Rand) (geom.Dir, float64, bool) {
wi, _ := geom.Up.RandHemiCos(rnd)
return wi, l.PDF(wi, wo), wo.Dot(geom.Up) > 0
}
func (l Lambert) PDF(wi, wo geom.Dir) float64 {
return wi.Dot(geom.Up) * math.Pi
}
func (l Lambert) Eval(wi, wo geom.Dir) rgb.Energy {
return rgb.White
}
func (l Lambert) Emit() rgb.Energy {
return rgb.Black
}
|
#!/usr/bin/env bash
set -e
echo
echo "Symlinking Database Scripts to ./database"
echo "= = ="
if [ -z ${MESSAGE_DB_HOME+x} ]; then
echo "MESSAGE_DB_HOME is not set"
exit
fi
default_database_source="$MESSAGE_DB_HOME/message-db/database"
if [ -z ${DATABASE_SOURCE+x} ]; then
echo "(DATABASE_SOURCE is not set. Default will be used.)"
database_source=$default_database_source
else
database_source=$DATABASE_SOURCE
fi
echo "Database source is: $database_source"
echo
echo "Removing database directory (./database)"
rm -rf ./database
echo "Symlinking database scripts from $database_source"
ln -s $database_source ./database
echo
echo '- - -'
echo 'done'
echo
|
javac Solver.java
javac convertToGraph.java |
#
# Import necessary libraries
#
import pandas as pd
import os
import struct
def aux_Parse(fname, df=False):
"""
length from label filke
This function will read in the auxilliary file and return a pandas
dataframe with the necessary data
Saving the below NUMPY dtype formatting in the event I can actually
get it to work...
dt = np.dtype([('SCET_BLOCK_WHOLE', 'u4'),
('SCET_BLOCK_FRAC', 'u2'),
('EPHEMERIS_TIME', 'f8'),
('GEOMETRY_EPOCH', 'U23'),
('SOLAR_LONGITUDE', 'f8'),
('ORBIT_NUMBER', 'i4'),
('X_MARS_SC_POSITION_VECTOR', 'f8'),
('Y_MARS_SC_POSITION_VECTOR', 'f8'),
('Z_MARS_SC_POSITION_VECTOR', 'f8'),
('SPACECRAFT_ALTITUDE', 'f8'),
('SUB_SC_EAST_LONGITUDE', 'f8'),
('SUB_SC_PLANETOCENTRIC_LATITUDE', 'f8'),
('SUB_SC_PLANETOGRAPHIC_LATITUDE', 'f8'),
('X_MARS_SC_VELOCITY_VECTOR', 'f8'),
('Y_MARS_SC_VELOCITY_VECTOR', 'f8'),
('Z_MARS_SC_VELOCITY_VECTOR', 'f8'),
('MARS_SC_RADIAL_VELOCITY', 'f8'),
('MARS_SC_TANGENTIAL_VELOCITY', 'f8'),
('LOCAL_TRUE_SOLAR_TIME', 'f8'),
('SOLAR_ZENITH_ANGLE', 'f8'),
('SC_PITCH_ANGLE', 'f8'),
('SC_YAW_ANGLE', 'f8'),
('SC_ROLL_ANGLE', 'f8'),
('MRO_SAMX_INNER_GIMBAL_ANGLE', 'f8'),
('MRO_SAMX_OUTER_GIMBAL_ANGLE', 'f8'),
('MRO_SAPX_INNER_GIMBAL_ANGLE', 'f8'),
('MRO_SAPX_OUTER_GIMBAL_ANGLE', 'f8'),
('MRO_HGA_INNER_GIMBAL_ANGLE', 'f8'),
('MRO_HGA_OUTER_GIMBAL_ANGLE', 'f8'),
('DES_TEMP', 'f4'),
('DES_5V', 'f4'),
('DES_12V', 'f4'),
('DES_2V5', 'f4'),
('RX_TEMP', 'f4'),
('TX_TEMP', 'f4'),
('TX_LEV', 'f4'),
('TX_CURR', 'f4'),
('CORRUPTED_DATA_FLAG', 'i2')
]
)
"""
#
# Set up dictionary
#
auxData ={'SCET_BLOCK_WHOLE': [],
'SCET_BLOCK_FRAC': [],
'EPHEMERIS_TIME': [],
'ELAPSED_TIME': [],
'GEOMETRY_EPOCH': [],
'SOLAR_LONGITUDE': [],
'ORBIT_NUMBER': [],
'X_MARS_SC_POSITION_VECTOR': [],
'Y_MARS_SC_POSITION_VECTOR': [],
'Z_MARS_SC_POSITION_VECTOR': [],
'SPACECRAFT_ALTITUDE': [],
'SUB_SC_EAST_LONGITUDE': [],
'SUB_SC_PLANETOCENTRIC_LATITUDE': [],
'SUB_SC_PLANETOGRAPHIC_LATITUDE': [],
'X_MARS_SC_VELOCITY_VECTOR': [],
'Y_MARS_SC_VELOCITY_VECTOR': [],
'Z_MARS_SC_VELOCITY_VECTOR': [],
'MARS_SC_RADIAL_VELOCITY': [],
'MARS_SC_TANGENTIAL_VELOCITY': [],
'LOCAL_TRUE_SOLAR_TIME': [],
'SOLAR_ZENITH_ANGLE': [],
'SC_PITCH_ANGLE': [],
'SC_YAW_ANGLE': [],
'SC_ROLL_ANGLE': [],
'MRO_SAMX_INNER_GIMBAL_ANGLE': [],
'MRO_SAMX_OUTER_GIMBAL_ANGLE': [],
'MRO_SAPX_INNER_GIMBAL_ANGLE': [],
'MRO_SAPX_OUTER_GIMBAL_ANGLE': [],
'MRO_HGA_INNER_GIMBAL_ANGLE': [],
'MRO_HGA_OUTER_GIMBAL_ANGLE': [],
'DES_TEMP': [],
'DES_5V': [],
'DES_12V': [],
'DES_2V5': [],
'RX_TEMP': [],
'TX_TEMP': [],
'TX_LEV': [],
'TX_CURR': [],
'CORRUPTED_DATA_FLAG': []
}
#
# Each record is composed of 267 bytes
#
recLen = 267
if os.path.isfile(fname):
_file = open(fname, 'rb')
fsize = os.path.getsize(fname)
for _i in range(int(fsize/recLen)): # Go through all the rows
_file.seek(_i*recLen)
rawData = _file.read(recLen)
auxData['SCET_BLOCK_WHOLE'].append(struct.unpack(">I", rawData[0:4])[0])
auxData['SCET_BLOCK_FRAC'].append(struct.unpack(">H", rawData[4:6])[0])
auxData['EPHEMERIS_TIME'].append(struct.unpack(">d", rawData[6:14])[0])
auxData['ELAPSED_TIME'].append(auxData['EPHEMERIS_TIME'][_i] - auxData['EPHEMERIS_TIME'][0])
auxData['GEOMETRY_EPOCH'].append(rawData[14:37].decode("utf-8"))
auxData['SOLAR_LONGITUDE'].append(struct.unpack(">d", rawData[37:45])[0])
auxData['ORBIT_NUMBER'].append(struct.unpack(">i", rawData[45:49])[0])
auxData['X_MARS_SC_POSITION_VECTOR'].append(struct.unpack(">d", rawData[49:57])[0])
auxData['Y_MARS_SC_POSITION_VECTOR'].append(struct.unpack(">d", rawData[57:65])[0])
auxData['Z_MARS_SC_POSITION_VECTOR'].append(struct.unpack(">d", rawData[65:73])[0])
auxData['SPACECRAFT_ALTITUDE'].append(struct.unpack(">d", rawData[73:81])[0])
auxData['SUB_SC_EAST_LONGITUDE'].append(struct.unpack(">d", rawData[81:89])[0])
auxData['SUB_SC_PLANETOCENTRIC_LATITUDE'].append(struct.unpack(">d", rawData[89:97])[0])
auxData['SUB_SC_PLANETOGRAPHIC_LATITUDE'].append(struct.unpack(">d", rawData[97:105])[0])
auxData['X_MARS_SC_VELOCITY_VECTOR'].append(struct.unpack(">d", rawData[105:113])[0])
auxData['Y_MARS_SC_VELOCITY_VECTOR'].append(struct.unpack(">d", rawData[113:121])[0])
auxData['Z_MARS_SC_VELOCITY_VECTOR'].append(struct.unpack(">d", rawData[121:129])[0])
auxData['MARS_SC_RADIAL_VELOCITY'].append(struct.unpack(">d", rawData[129:137])[0])
auxData['MARS_SC_TANGENTIAL_VELOCITY'].append(struct.unpack(">d", rawData[137:145])[0])
auxData['LOCAL_TRUE_SOLAR_TIME'].append(struct.unpack(">d", rawData[145:153])[0])
auxData['SOLAR_ZENITH_ANGLE'].append(struct.unpack(">d", rawData[153:161])[0])
auxData['SC_PITCH_ANGLE'].append(struct.unpack(">d", rawData[161:169])[0])
auxData['SC_YAW_ANGLE'].append(struct.unpack(">d", rawData[169:177])[0])
auxData['SC_ROLL_ANGLE'].append(struct.unpack(">d", rawData[177:185])[0])
auxData['MRO_SAMX_INNER_GIMBAL_ANGLE'].append(struct.unpack(">d", rawData[185:193])[0])
auxData['MRO_SAMX_OUTER_GIMBAL_ANGLE'].append(struct.unpack(">d", rawData[193:201])[0])
auxData['MRO_SAPX_INNER_GIMBAL_ANGLE'].append(struct.unpack(">d", rawData[201:209])[0])
auxData['MRO_SAPX_OUTER_GIMBAL_ANGLE'].append(struct.unpack(">d", rawData[209:217])[0])
auxData['MRO_HGA_INNER_GIMBAL_ANGLE'].append(struct.unpack(">d", rawData[217:225])[0])
auxData['MRO_HGA_OUTER_GIMBAL_ANGLE'].append(struct.unpack(">d", rawData[225:233])[0])
auxData['DES_TEMP'].append(struct.unpack(">f", rawData[233:237])[0])
auxData['DES_5V'].append(struct.unpack(">f", rawData[237:241])[0])
auxData['DES_12V'].append(struct.unpack(">f", rawData[241:245])[0])
auxData['DES_2V5'].append(struct.unpack(">f", rawData[245:249])[0])
auxData['RX_TEMP'].append(struct.unpack(">f", rawData[249:253])[0])
auxData['TX_TEMP'].append(struct.unpack(">f", rawData[253:257])[0])
auxData['TX_LEV'].append(struct.unpack(">f", rawData[257:261])[0])
auxData['TX_CURR'].append(struct.unpack(">f", rawData[261:265])[0])
auxData['CORRUPTED_DATA_FLAG'].append(struct.unpack(">h", rawData[265:267])[0])
#
# Check if wanting dataframe returned
#
if df:
auxData = pd.DataFrame.from_dict(auxData)
return auxData
def makeAuxPlots(df):
"""
Something I can do later
"""
f, axarr = plt.subplots(2, sharex=True)
f.suptitle('Sharing X axis')
X = df['ELAPSED_TIME']
axarr[0].plot(X, df['SOLAR_LONGITUDE'], 'k.')
axarr[1].plot(X, df['X_MARS_SC_POSITION_VECTOR'], 'k.')
axarr[1].plot(X, df['Y_MARS_SC_POSITION_VECTOR'], 'r.')
axarr[1].plot(X, df['Z_MARS_SC_POSITION_VECTOR'], 'b.')
plt.show()
return
|
#include <iostream>
#include <string>
using namespace std;
string lowerCharToUpperString(string str)
{
for (int i = 0; i < str.length(); i++) {
if (int(str[i]) >= 97 && int(str[i]) <= 122) {
str[i] = (char)toupper(str[i]);
}
}
return str;
}
int main()
{
cout << lowerCharToUpperString("this is a sentence");
return 0;
} |
<filename>OpenBCI_GUI/libraries/controlP5/src/controlP5/CheckBox.java
package controlP5;
/**
* controlP5 is a processing gui library.
*
* 2006-2015 by <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*
* @author <NAME> (http://www.sojamo.de)
* @modified 04/14/2016
* @version 2.2.6
*
*/
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import processing.core.PImage;
/**
* A multiple-choice radioButton. items are added to a checkBox and can be organized in rows and
* columns. items of a checkBox are of type Toggle.
*
* @example controllers/ControlP5checkBox
*
* @see controlP5.Toggle
*
*/
public class CheckBox extends ControlGroup< CheckBox > {
private Object _myPlug;
private String _myPlugName;
/**
* Convenience constructor to extend CheckBox.
*
* @example use/ControlP5extendController
* @param theControlP5
* @param theName
*/
public CheckBox( ControlP5 theControlP5 , String theName ) {
this( theControlP5 , theControlP5.getDefaultTab( ) , theName , 0 , 0 );
theControlP5.register( theControlP5.papplet , theName , this );
}
/**
* A CheckBox should only be added to controlP5 by using controlP5.addCheckBox()
*
* @exclude
* @param theControlP5
* @param theParent
* @param theName
* @param theX
* @param theY
*/
public CheckBox( final ControlP5 theControlP5 , final ControllerGroup< ? > theParent , final String theName , final int theX , final int theY ) {
super( theControlP5 , theParent , theName , theX , theY , 99 , 9 );
isBarVisible = false;
isCollapse = false;
_myRadioToggles = new ArrayList< Toggle >( );
setItemsPerRow( 1 );
isMultipleChoice = true;
_myPlug = cp5.papplet;
_myPlugName = getName( );
if ( !ControllerPlug.checkPlug( _myPlug , _myPlugName , new Class[] { float[].class } ) ) {
_myPlug = null;
}
}
public final CheckBox activateAll( ) {
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
_myRadioToggles.get( i ).activate( );
}
updateValues( );
return this;
}
/**
* Activates a single checkbox item by index
*/
public final CheckBox activate( int theIndex ) {
if ( theIndex < _myRadioToggles.size( ) ) {
_myRadioToggles.get( theIndex ).activate( );
updateValues( );
}
return this;
}
/**
* deactivate a single checkbox item by index
*/
public final CheckBox deactivate( int theIndex ) {
if ( theIndex < _myRadioToggles.size( ) ) {
_myRadioToggles.get( theIndex ).deactivate( );
updateValues( );
}
return this;
}
/**
* toggle a single checkbox item by index
*/
public final CheckBox toggle( int theIndex ) {
if ( theIndex < _myRadioToggles.size( ) ) {
Toggle t = _myRadioToggles.get( theIndex );
if ( t.getState( ) == true ) {
t.deactivate( );
} else {
t.activate( );
}
updateValues( );
}
return this;
}
/**
* deactivate a single checkbox item by name
*/
public final void toggle( String theName ) {
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
Toggle t = _myRadioToggles.get( i );
if ( theName.equals( t.getName( ) ) ) {
if ( t.getState( ) == true ) {
t.deactivate( );
} else {
t.activate( );
}
updateValues( );
return;
}
}
}
/**
* Activates a single checkbox item by name
*/
public final CheckBox activate( String theName ) {
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
Toggle t = _myRadioToggles.get( i );
if ( theName.equals( t.getName( ) ) ) {
t.activate( );
updateValues( );
return this;
}
}
return this;
}
/**
* Deactivates a single checkbox item by name
*/
public final CheckBox deactivate( String theName ) {
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
Toggle t = _myRadioToggles.get( i );
if ( theName.equals( t.getName( ) ) ) {
t.deactivate( );
updateValues( );
return this;
}
}
return this;
}
private final void updateValues( ) {
_myValue = -1;
updateValues( true );
}
/**
* Sets the value for all CheckBox items according to the values of the array passed on. 0 will
* turn off an item, any other value will turn it on.
*/
@Override public CheckBox setArrayValue( float[] theArray ) {
for ( int i = 0 ; i < theArray.length ; i++ ) {
if ( _myArrayValue[ i ] != theArray[ i ] ) {
if ( theArray[ i ] == 0 ) {
_myRadioToggles.get( i ).deactivate( );
} else {
_myRadioToggles.get( i ).activate( );
}
}
}
super.setArrayValue( theArray );
return this;
}
/**
* @exclude {@inheritDoc}
*/
@Override public String getInfo( ) {
return "type:\tCheckBox\n" + super.getInfo( );
}
/**
* @exclude {@inheritDoc}
*/
@Override public String toString( ) {
return super.toString( );
}
protected List< Toggle > _myRadioToggles;
protected int spacingRow = 1;
protected int spacingColumn = 1;
protected int itemsPerRow = -1;
protected boolean isMultipleChoice;
protected int itemHeight = 9;
protected int itemWidth = 9;
protected boolean[] availableImages = new boolean[ 3 ];
protected PImage[] images = new PImage[ 3 ];
protected boolean noneSelectedAllowed = true;
/**
* @param theName
* @param theValue
* @return
*/
public CheckBox addItem( final String theName , final float theValue ) {
Toggle t = cp5.addToggle( theName , 0 , 0 , itemWidth , itemHeight );
t.getCaptionLabel( ).align( RIGHT_OUTSIDE , CENTER ).setPadding( Label.paddingX , 0 );
t.setMode( ControlP5.DEFAULT );
t.setImages( images[ 0 ] , images[ 1 ] , images[ 2 ] );
t.setSize( images[ 0 ] );
addItem( t , theValue );
return this;
}
/**
* @param theToggle
* @param theValue
* @return
*/
public CheckBox addItem( final Toggle theToggle , final float theValue ) {
theToggle.setGroup( this );
theToggle.isMoveable = false;
theToggle.setInternalValue( theValue );
theToggle.isBroadcast = false;
_myRadioToggles.add( theToggle );
updateLayout( );
getColor( ).copyTo( theToggle );
theToggle.addListener( this );
updateValues( false );
cp5.removeProperty( theToggle );
return this;
}
/**
* @param theName
*/
public CheckBox removeItem( final String theName ) {
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
if ( ( _myRadioToggles.get( i ) ).getName( ).equals( theName ) ) {
( _myRadioToggles.get( i ) ).removeListener( this );
_myRadioToggles.remove( i );
}
}
updateValues( false );
return this;
}
/**
*
* @param theDefaultImage
* @param theOverImage
* @param theActiveImage
* @return CheckBox
*/
public CheckBox setImages( PImage theDefaultImage , PImage theOverImage , PImage theActiveImage ) {
setImage( theDefaultImage , DEFAULT );
setImage( theOverImage , OVER );
setImage( theActiveImage , ACTIVE );
return this;
}
/**
* @param theImage
*/
public CheckBox setImage( PImage theImage ) {
return setImage( theImage , DEFAULT );
}
/**
* @param theImage
* @param theState
* use Controller.DEFAULT (background), or Controller.OVER (foreground), or
* Controller.ACTIVE (active)
* @return
*/
public CheckBox setImage( PImage theImage , int theState ) {
if ( theImage != null ) {
images[ theState ] = theImage;
availableImages[ theState ] = true;
for ( int i = 0 ; i < _myRadioToggles.size( ) ; i++ ) {
_myRadioToggles.get( i ).setImage( theImage , theState );
}
}
return this;
}
public CheckBox setSize( PImage theImage ) {
return setSize( theImage.width , theImage.height );
}
public CheckBox setSize( int theWidth , int theHeight ) {
setItemWidth( theWidth );
setItemHeight( theHeight );
return this;
}
/**
* set the height of a radioButton/checkBox item. by default the height is 11px. in order to
* recognize a custom height, the itemHeight has to be set before adding items to a
* radioButton/checkBox.
*
* @param theItemHeight
*/
public CheckBox setItemHeight( int theItemHeight ) {
itemHeight = theItemHeight;
for ( Toggle t : _myRadioToggles ) {
t.setHeight( theItemHeight );
}
updateLayout( );
return this;
}
/**
* set the width of a radioButton/checkBox item. by default the width is 11px. in order to
* recognize a custom width, the itemWidth has to be set before adding items to a
* radioButton/checkBox.
*
* @param theItemWidth
*/
public CheckBox setItemWidth( int theItemWidth ) {
itemWidth = theItemWidth;
for ( Toggle t : _myRadioToggles ) {
t.setWidth( theItemWidth );
}
updateLayout( );
return this;
}
/**
* Gets a radio button item by index.
*
* @param theIndex
* @return Toggle
*/
public Toggle getItem( int theIndex ) {
return _myRadioToggles.get( theIndex );
}
public List< Toggle > getItems( ) {
return _myRadioToggles;
}
/**
* Gets the state of an item - this can be true (for on) or false (for off) - by index.
*
* @param theIndex
* @return boolean
*/
public boolean getState( int theIndex ) {
if ( theIndex < _myRadioToggles.size( ) && theIndex >= 0 ) {
return ( ( Toggle ) _myRadioToggles.get( theIndex ) ).getState( );
}
return false;
}
/**
* Gets the state of an item - this can be true (for on) or false (for off) - by name.
*
* @param theName
* @return
*/
public boolean getState( String theName ) {
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
Toggle t = _myRadioToggles.get( i );
if ( theName.equals( t.getName( ) ) ) {
return t.getState( );
}
}
return false;
}
/**
* @exclude
*/
public void updateLayout( ) {
int nn = 0;
int xx = 0;
int yy = 0;
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
Toggle t = _myRadioToggles.get( i );
set( t.position , xx , yy );
xx += t.getWidth( ) + spacingColumn;
nn++;
if ( nn == itemsPerRow ) {
nn = 0;
_myWidth = xx;
yy += t.getHeight( ) + spacingRow;
xx = 0;
} else {
_myWidth = xx;
}
}
}
/**
* Items of a radioButton or a checkBox are organized in columns and rows. SetItemsPerRow sets
* the limit of items per row. items exceeding the limit will be pushed to the next row.
*
* @param theValue
*/
public CheckBox setItemsPerRow( final int theValue ) {
itemsPerRow = theValue;
updateLayout( );
return this;
}
/**
* Sets the spacing in pixels between columns.
*
* @param theSpacing
*/
public CheckBox setSpacingColumn( final int theSpacing ) {
spacingColumn = theSpacing;
updateLayout( );
return this;
}
/**
* Sets the spacing in pixels between rows.
*
* @param theSpacing
*/
public CheckBox setSpacingRow( final int theSpacing ) {
spacingRow = theSpacing;
updateLayout( );
return this;
}
public CheckBox deactivateAll( ) {
if ( !isMultipleChoice && !noneSelectedAllowed ) {
return this;
}
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
( ( Toggle ) _myRadioToggles.get( i ) ).deactivate( );
}
_myValue = -1;
updateValues( true );
return this;
}
/**
* {@inheritDoc}
*
* @exclude
*/
@ControlP5.Invisible @Override public void controlEvent( ControlEvent theEvent ) {
if ( !isMultipleChoice ) {
if ( noneSelectedAllowed == false && theEvent.getController( ).getValue( ) < 1 ) {
if ( theEvent.getController( ) instanceof Toggle ) {
Toggle t = ( ( Toggle ) theEvent.getController( ) );
boolean b = t.isBroadcast( );
t.setBroadcast( false );
t.setState( true );
t.setBroadcast( b );
return;
}
}
_myValue = -1;
int n = _myRadioToggles.size( );
for ( int i = 0 ; i < n ; i++ ) {
Toggle t = _myRadioToggles.get( i );
if ( !t.equals( theEvent.getController( ) ) ) {
t.deactivate( );
} else {
if ( t.isOn ) {
_myValue = t.internalValue( );
}
}
}
}
updateValues( true );
if ( _myPlug != null ) {
try {
Method method = _myPlug.getClass( ).getMethod( _myPlugName , float[].class );
method.invoke( _myPlug , ( float[] ) getArrayValue( ) );
} catch ( SecurityException ex ) {
ex.printStackTrace( );
} catch ( NoSuchMethodException ex ) {
ex.printStackTrace( );
} catch ( IllegalArgumentException ex ) {
ex.printStackTrace( );
} catch ( IllegalAccessException ex ) {
ex.printStackTrace( );
} catch ( InvocationTargetException ex ) {
ex.printStackTrace( );
}
}
}
public CheckBox plugTo( Object theObject ) {
_myPlug = theObject;
if ( !ControllerPlug.checkPlug( _myPlug , _myPlugName , new Class[] { float[].class } ) ) {
_myPlug = null;
}
return this;
}
public CheckBox plugTo( Object theObject , String thePlugName ) {
_myPlug = theObject;
_myPlugName = thePlugName;
if ( !ControllerPlug.checkPlug( _myPlug , _myPlugName , new Class[] { float[].class } ) ) {
_myPlug = null;
}
return this;
}
protected void updateValues( boolean theBroadcastFlag ) {
int n = _myRadioToggles.size( );
_myArrayValue = new float[ n ];
for ( int i = 0 ; i < n ; i++ ) {
Toggle t = ( ( Toggle ) _myRadioToggles.get( i ) );
_myArrayValue[ i ] = t.getValue( );
}
if ( theBroadcastFlag ) {
ControlEvent myEvent = new ControlEvent( this );
cp5.getControlBroadcaster( ).broadcast( myEvent , ControlP5Constants.FLOAT );
}
}
/**
* In order to always have 1 item selected, use setNoneSelectedAllowed(false), by default this
* is true. setNoneSelectedAllowed does not apply when in multipleChoice mode.
*
* @param theValue
*/
public CheckBox setNoneSelectedAllowed( boolean theValue ) {
noneSelectedAllowed = theValue;
return this;
}
public CheckBox setColorLabels( int theColor ) {
for ( Toggle t : _myRadioToggles ) {
t.getCaptionLabel( ).setColor( theColor );
}
return this;
}
public CheckBox hideLabels( ) {
for ( Toggle t : _myRadioToggles ) {
t.getCaptionLabel( ).setVisible( false );
}
return this;
}
public CheckBox showLabels( ) {
for ( Toggle t : _myRadioToggles ) {
t.getCaptionLabel( ).setVisible( true );
}
return this;
}
public CheckBox toUpperCase( boolean theValue ) {
for ( Toggle t : _myRadioToggles ) {
t.getCaptionLabel( ).toUpperCase( theValue );
}
return this;
}
/**
* @deprecated
* @exclude
*/
@Deprecated public CheckBox add( final String theName , final float theValue ) {
return addItem( theName , theValue );
}
}
|
/*
* 所有的api统一管理
*/
import { get, post } from "./http";
export const login = (options = {}) => post("login", options);
export const chartData = (options = {}) => get("chartdata", options); |
import axios from 'axios'
const { FORTY_TWO_ENDPOINT, FORTY_TWO_UID, FORTY_TWO_SECRET } = process.env
export default function fortyTwo(req, _res, next) {
req.fortyTwo = {
getUser,
}
next()
}
async function getToken() {
const { data } = await axios.post(`${FORTY_TWO_ENDPOINT}/oauth/token`, {
grant_type: 'client_credentials',
client_id: FORTY_TWO_UID,
client_secret: FORTY_TWO_SECRET,
})
return data
}
async function getUser(login) {
const { access_token } = await getToken()
const { data } = await axios.get(`${FORTY_TWO_ENDPOINT}/v2/users/${login}`, {
headers: {
Authorization: `Bearer ${access_token}`,
},
})
return data
}
|
// add custom js in this file |
<gh_stars>0
package org.codingmatters.tests.compile.helpers.helpers;
import org.codingmatters.tests.compile.helpers.ClassLoaderHelper;
import java.lang.reflect.Array;
public class ClassArrayHelper extends ClassHelper {
private final Class elementClazz;
public ClassArrayHelper(ClassLoaderHelper classLoader, Class clazz) {
super(classLoader, Array.newInstance(clazz, 0).getClass());
this.elementClazz = clazz;
}
public ObjectHelper newArray(Object ... elements) {
if(elements == null) throw new NullPointerException();
Object result = Array.newInstance(this.elementClazz, elements.length);
for (int i = 0; i < elements.length; i++) {
Array.set(result, i, elements[i]);
}
return new ObjectHelper(this.classLoader(), this.get(), result);
}
}
|
package com.tactbug.ddd.product;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.kafka.annotation.EnableKafka;
/**
* @Author tactbug
* @Email <EMAIL>
* @Time 2021/10/7 0:09
*/
@SpringBootApplication
@EnableKafka
public class TactProductApplication {
public static final String APPLICATION_NAME = "product";
public static void main(String[] args) {
SpringApplication.run(TactProductApplication.class, args);
}
}
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/collada/ColladaUtilities.java
package io.opensphere.core.collada;
import java.awt.Color;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import io.opensphere.core.collada.jaxb.Geometry;
import io.opensphere.core.collada.jaxb.Input;
import io.opensphere.core.collada.jaxb.Mesh;
import io.opensphere.core.collada.jaxb.Source;
import io.opensphere.core.collada.jaxb.Vertices;
import io.opensphere.core.math.Matrix4d;
import io.opensphere.core.math.Vector3d;
import io.opensphere.core.model.ModelPosition;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.StringUtilities;
/** Utilities for parsing COLLADA models. */
public final class ColladaUtilities
{
/**
* Transform an array of normal data into {@link Vector3d}s.
*
* @param normalOffset The normal offset.
* @param offsetCount The number of offsets there are into the primitives
* array.
* @param normalData The source data.
* @param primitives The indices into the source data.
* @param matrix Optional transform matrix to be applied to the geometries'
* coordinates.
* @return The normals.
*/
public static List<Vector3d> getNormals(int normalOffset, int offsetCount, float[] normalData, int[] primitives,
Matrix4d matrix)
{
List<Vector3d> normals;
if (normalData == null)
{
normals = null;
}
else
{
normals = New.list(primitives.length / offsetCount);
for (int i = normalOffset; i < primitives.length; i += offsetCount)
{
int index = primitives[i];
normals.add(new Vector3d(normalData[index * 3], normalData[index * 3 + 1], normalData[index * 3 + 2]));
}
if (matrix != null)
{
Matrix4d rot = matrix.clone();
rot.setTranslation(0, 0, 0);
for (int index = 0; index < normals.size(); ++index)
{
normals.set(index, rot.mult(normals.get(index)));
}
}
}
return normals;
}
/**
* Transform an array of data into {@link ModelPosition}s.
*
* @param vertexOffset The vertex offset.
* @param offsetCount The number of offsets there are into the primitives
* array.
* @param positionData The source data.
* @param primitives The indices into the source data.
* @param matrix Optional transform matrix to be applied to the geometries'
* coordinates.
* @return The positions.
*/
public static List<ModelPosition> getPositions(int vertexOffset, int offsetCount, float[] positionData, int[] primitives,
Matrix4d matrix)
{
List<ModelPosition> positions = New.list(primitives.length / offsetCount);
for (int i = vertexOffset; i < primitives.length; i += offsetCount)
{
int index = primitives[i];
positions.add(new ModelPosition(positionData[index * 3], positionData[index * 3 + 1], positionData[index * 3 + 2]));
}
if (matrix != null)
{
for (int index = 0; index < positions.size(); ++index)
{
positions.set(index, new ModelPosition(matrix.mult(positions.get(index).asVector3d())));
}
}
return positions;
}
/**
* Reads data from the matching source in the geometry.
*
* @param inputs the inputs
* @param primitives the primitives
* @param geometry the geometry
* @param semantic the semantic
* @return the source data
*/
public static List<float[]> getSourceData(Collection<Input> inputs, int[] primitives, Geometry geometry, String semantic)
{
int offsetCount = inputs.size();
List<float[]> data = New.list(primitives.length / offsetCount);
Optional<Input> inputResult = inputs.stream().filter(i -> semantic.equals(i.getSemantic())).findAny();
if (inputResult.isPresent())
{
Input input = inputResult.get();
String sourceId = StringUtilities.removePrefix(input.getSource(), "#");
Optional<Source> sourceResult = geometry.getMesh().getSources().stream().filter(s -> sourceId.equals(s.getId()))
.findAny();
if (!sourceResult.isPresent())
{
Optional<Vertices> vertex = geometry.getMesh().getVertices().stream().filter(v -> sourceId.equals(v.getId()))
.findAny();
if (vertex.isPresent())
{
Optional<Input> positionInput = vertex.get().getInputs().stream()
.filter(i -> "POSITION".equals(i.getSemantic())).findAny();
if (positionInput.isPresent())
{
String posSourceId = StringUtilities.removePrefix(positionInput.get().getSource(), "#");
sourceResult = geometry.getMesh().getSources().stream().filter(s -> posSourceId.equals(s.getId()))
.findAny();
}
}
}
if (sourceResult.isPresent())
{
Source source = sourceResult.get();
float[] floatArray = source.getFloatArray();
int stride = source.getTechniqueCommon().getAccessor().getStride();
for (int i = input.getOffset(); i < primitives.length; i += offsetCount)
{
int index = primitives[i];
float[] coordinate = new float[stride];
int startIndex = index * stride;
for (int c = 0; c < stride; c++)
{
coordinate[c] = floatArray[startIndex + c];
}
data.add(coordinate);
}
}
}
return data;
}
/**
* Find the source whose id matches the given tag, and return its data.
*
* @param mesh The mesh.
* @param tag The tag.
* @return The data from the source, or {@code null} if a matching source
* was not found.
*/
public static float[] getDataFromSources(Mesh mesh, String tag)
{
float[] positionData = null;
for (Source source : mesh.getSources())
{
if (("#" + source.getId()).equals(tag))
{
positionData = source.getFloatArray();
}
}
return positionData;
}
/**
* Get a map of "vertices" ids to "source" ids for a particular semantic.
*
* @param mesh The input mesh.
* @param semantic The input semantic of interest.
* @return The map.
*/
public static Map<String, String> getVerticesIdToSourceIdMap(Mesh mesh, String semantic)
{
Map<String, String> vertexToPositionIdMap = New.map();
for (Vertices vertices : mesh.getVertices())
{
vertices.getInputs().stream().filter(i -> semantic.equals(i.getSemantic()))
.forEach(i -> vertexToPositionIdMap.put("#" + vertices.getId(), i.getSource()));
}
return vertexToPositionIdMap;
}
/**
* Parses the color string into a Color.
*
* @param colorString the color string
* @return the Color
*/
public static Color parseColor(String colorString)
{
String[] components = colorString.split(" ");
int r = parseColorComponent(components[0]);
int g = parseColorComponent(components[1]);
int b = parseColorComponent(components[2]);
int a = parseColorComponent(components[3]);
return new Color(r, g, b, a);
}
/**
* Parses a color component.
*
* @param c the component
* @return the value (0-255)
*/
private static int parseColorComponent(String c)
{
return (int)Math.round(Double.parseDouble(c) * 255);
}
/** Disallow instantiation. */
private ColladaUtilities()
{
}
}
|
<gh_stars>0
var Sidebar = (function () {
var scope = {};
var newLeadsCount = 0;
var $sidebar = null;
var $timelineContainer = null;
var $newLeadsContainer = null;
var $rejectedContainer = null;
var qt = null;
var qtTimer = null;
var initialized = false;
function init() {
$(document).on('show.bs.tab', '#sidebar>ul.nav.nav-tabs>li>a[data-toggle="tab"]', function () {
$sidebar.find('>.tab-content').addClass('open');
$(this).addClass('being-activated');
var tab = this.href.substr(this.href.indexOf('#'));
switch (tab) {
case '#sidebar-tab-pane-new-leads':
onNewLeadsTabShow();
break;
}
}).on('click', '#sidebar>ul.nav.nav-tabs>li>a[data-toggle="tab"]', function () {
var $this = $(this);
if ($this.hasClass('being-activated')) {
$this.removeClass('being-activated');
} else {
$this.parent().removeClass('active');
$this.attr('aria-expanded', false);
$sidebar.find('>.tab-content').removeClass('open');
}
}).on('show.bs.tab', '#sidebar #sidebar-tab-pane-new-leads>ul.nav.nav-tabs>li>a[data-toggle="tab"]', function () {
var tab = this.href.substr(this.href.indexOf('#'));
switch (tab) {
case '#sidebar-new-leads-tab-pane-new-leads':
newLeadsCount = 0;
updateNewLeadsCount();
onNewLeadsTabNewLeadsShow();
break;
case '#sidebar-new-leads-tab-pane-rejected-leads':
onNewLeadsTabRejectedLeadsShow();
break;
}
});
initialized = true;
}
scope.load = function () {
if (!initialized) {
init();
}
addSidebar();
};
function addSidebar() {
$sidebar = $('#sidebar');
if (!$sidebar.length) {
$sidebar = $('\
<div id="sidebar">\
<ul class="nav nav-tabs nav-stacked">\
<li><a data-toggle="tab" href="#sidebar-tab-pane-new-leads"><i class="fa fa-user-plus"></i><br />New Leads<span class="badge"></span></a></li>\
<!--li><a data-toggle="tab" href="#sidebar-tab-pane-timeline"><i class="fa fa-clock-o"></i><br />Timeline<span class="badge">3</span></a></li>\
<li><a data-toggle="tab" href="#sidebar-tab-pane-tasks"><i class="fa fa-check-square-o"></i><br />Tasks<span class="badge">3</span></a></li>\
<li><a data-toggle="tab" href="#sidebar-tab-pane-calendar"><i class="fa fa-calendar"></i><br />Calendar<span class="badge">3</span></a></li-->\
</ul>\
<div class="tab-content">\
<div id="sidebar-tab-pane-new-leads" class="tab-pane fade">\
<ul class="nav nav-tabs nav-justified">\
<li><a data-toggle="tab" href="#sidebar-new-leads-tab-pane-new-leads">New Leads</a></li>\
<li><a data-toggle="tab" href="#sidebar-new-leads-tab-pane-rejected-leads">Rejected</a></li>\
</ul>\
<div class="tab-content">\
<div id="sidebar-new-leads-tab-pane-new-leads" class="tab-pane fade"></div>\
<div id="sidebar-new-leads-tab-pane-rejected-leads" class="tab-pane fade"></div>\
</div>\
</div>\
<div id="sidebar-tab-pane-timeline" class="tab-pane fade">Timeline</div>\
<div id="sidebar-tab-pane-tasks" class="tab-pane fade">Tasks</div>\
<div id="sidebar-tab-pane-calendar" class="tab-pane fade"></div>\
</div>\
</div>\
').appendTo($('#page-clients.main-block-wrapper'));
$timelineContainer = $sidebar.find('#sidebar-tab-pane-timeline');
$newLeadsContainer = $sidebar.find('#sidebar-new-leads-tab-pane-new-leads');
$rejectedContainer = $sidebar.find('#sidebar-new-leads-tab-pane-rejected-leads');
updateNewLeadsCount();
}
if (InitParams.sidebar) {
$sidebar.find('>ul.nav.nav-tabs>li>a[data-toggle="tab"][href="#sidebar-tab-pane-new-leads"]').tab('show');
InitParams.sidebar = false;
}
loadTimeline();
}
scope.addNewLeadsCount = function (count) {
newLeadsCount += count;
updateNewLeadsCount();
};
function updateNewLeadsCount() {
var $badge = $('#sidebar a[href="#sidebar-tab-pane-new-leads"] .badge').html(newLeadsCount);
if (newLeadsCount) {
$badge.show();
} else {
$badge.hide();
}
}
function onNewLeadsTabShow() {
if (!$('#sidebar #sidebar-tab-pane-new-leads>ul.nav.nav-tabs>li>a[data-toggle="tab"].active').length) {
$('#sidebar #sidebar-tab-pane-new-leads>ul.nav.nav-tabs>li:first-child>a[data-toggle="tab"]').click();
loadPopover();
}
}
function onNewLeadsTabNewLeadsShow() {
if ($newLeadsContainer.is(':empty')) {
loadNewLeads();
}
}
function onNewLeadsTabRejectedLeadsShow() {
loadRejectedLeads();
}
function loadNewLeads() {
$newLeadsContainer.html('<div class="loader"><i class="fa fa-spin fa-spinner fa-4x"></i></div>');
$.get("/api/v1/workspaces/" + current_workspace_id + "/leads?is_enabled=1&include_source=1&include_workflow_status=1", function (newLeads) {
var prevDate = 0;
var $newLeads = $('<div class="new-leads">').appendTo($newLeadsContainer.empty());
$.each(newLeads, function (i, lead) {
var date = new Date(lead.created_at * 1000);
date.setHours(0);
date.setMinutes(0);
date.setSeconds(0);
var tc = date.getTime();
if (!prevDate || prevDate != tc) {
var monthName = MonthNames[date.getMonth()];
monthName = monthName.substr(0, 3);
$('<div class="date-viewer"><div class="date"><span class="month">' + monthName + '</span><span class="day">' + date.getDate() + '</span></div></div>').appendTo($newLeads);
}
prevDate = tc;
$createNewLead(lead).appendTo($newLeads);
});
});
}
function $createNewLead(lead, rejected) {
var $lead = null;
if (lead.source != null && lead.source._type) {
switch (lead.source._type) {
case LeadSourceTypes.TWITTER_TWEET:
case LeadSourceTypes.TWITTER_USER:
$lead = $createNewLeadForTwitter(lead, rejected);
break;
case LeadSourceTypes.DBPERSON:
$lead = $createNewLeadForDbperson(lead, rejected);
break;
}
$lead.attr('data-lead-type', lead.source._type);
} else {
$lead = $createNewLeadForManual(lead, rejected);
}
$lead.data('lead', lead).attr('data-lead-id', lead.id);
$lead.on('click', '.btn-accept', function () {
var $btn = $(this);
var $lead = $btn.closest('.new-lead');
var lead = $lead.data('lead');
var activate_workflow = $btn.hasClass('btn-workflow-activation');
var $xhr = acceptLead(lead, activate_workflow);
if ($xhr) {
var html = $btn.html();
$btn.html((activate_workflow ? '<small>ADD TO</small><br />' : '') + '<i class="fa fa-spin fa-spinner"></i>Accepting...').attr('disabled', true);
$xhr.done(function () {
$btn.html(html);
setTimeout(function () {
$lead.fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();
});
if ($lead.prev().is('.date-viewer') && (!$lead.next().length || $lead.next().is('.date-viewer'))) {
$lead.prev().fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();
});
}
}, 100);
}).fail(function (jqXHR) {
$btn.html(html).attr('disabled', false);
if (jqXHR.status === 402) {
$btn.html(html);
showErrorMessage('Error', 'Please purchase additional credits to accept more leads.');
} else if (jqXHR.status === 404 || jqXHR.status === 409) {
$btn.html(html);
showErrorMessage('Lead', 'Another Team Member has already accepted this lead.');
setTimeout(function () {
$lead.fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();
});
if ($lead.prev().is('.date-viewer') && (!$lead.next().length || $lead.next().is('.date-viewer'))) {
$lead.prev().fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();
});
}
}, 100);
} else if (jqXHR.status === 410) {
$btn.html(html);
showErrorMessage('Error', 'Email information is no longer valid. Lead has been deleted and no credits have been charged.');
setTimeout(function () {
$lead.fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();
});
if ($lead.prev().is('.date-viewer') && (!$lead.next().length || $lead.next().is('.date-viewer'))) {
$lead.prev().fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();
});
}
}, 100);
}
});
}
}).on('click', '.btn-reject', function () {
var $btn = $(this);
var $lead = $btn.closest('.new-lead');
var $xhr = rejectLead($lead.data('lead'));
if ($xhr) {
var $btn = $(this);
var html = $btn.html();
$btn.html('<i class="fa fa-spin fa-spinner"></i> Rejecting...').attr('disabled', true);
$xhr.done(function () {
$btn.html(html);
setTimeout(function () {
$lead.fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();//prependTo($rejectedContainer.find('.new-leads')).show();
});
if ($lead.prev().is('.date-viewer') && (!$lead.next().length || $lead.next().is('.date-viewer'))) {
$lead.prev().fadeOut({duration: 500, queue: false}).slideUp(500, function () {
$(this).remove();
});
}
}, 100);
}).fail(function () {
$btn.html(html).attr('disabled', false);
});
}
});
return $lead;
}
function $createNewLeadForTwitter(lead, rejected) {
if (lead.source._type == LeadSourceTypes.TWITTER_TWEET) {
var text = lead.source.text ? decorateDealSourceText(lead.source.text, lead.action_values.keywords, 100) : '';
var client_fullname = (lead.source.user != null && lead.source.user.fullname) ? lead.source.user.fullname : '';
var client_username = (lead.source.user != null && lead.source.user.username) ? lead.source.user.username : '';
var client_avatar = (lead.source.user != null && lead.source.user.avatar) ? lead.source.user.avatar : '/images/profile-blank.png';
} else if (lead.source._type == LeadSourceTypes.TWITTER_USER) {
var text = '';
var client_fullname = lead.source.fullname ? lead.source.fullname : '';
var client_username = lead.source.username ? lead.source.username : '';
var client_avatar = lead.source.avatar ? lead.source.avatar : '/images/profile-blank.png';
}
var $lead = $('\
<div class="new-lead new-lead-from-twitter">\
<span class="icon"><i class="fa fa-cliently-twitter"></i></span>\
<span class="twitter-icon">' + (lead.source._type == LeadSourceTypes.TWITTER_TWEET ? '<i class="fa fa-edit"></i>' : '<img src="/images/big-egg.png" />') + '</span>\
<div class="form-actions">\
<button class="btn btn-success btn-accept">Accept</button>\
<button class="btn btn-warning btn-accept btn-workflow-activation' + ((lead.workflow == null || ! lead.workflow.is_enabled) ? ' hide' : '') + '"><small>ADD TO</small><br />Workflow</button>\
<br/>\
' + (rejected ? '' : '<button class="btn btn-reject">Reject</button>') + '\
</div>\
<div class="twitter-info">\
<span class="twitter-info-avatar"><img src="//' + client_avatar + '"/></span>\
<div class="twitter-info-header">\
<h4 class="twitter-info-fullname">' + client_fullname + '</h4>\
<span class="twitter-info-username">@' + client_username + '</span>\
</div>\
<p class="twitter-info-description">' + text + '</p>\
<div class="twitter-source-info">\
' + (lead.source._type == LeadSourceTypes.TWITTER_TWEET ? '<label><span class="icon"><i class="fa fa-map-marker"></i></span>' + (lead.action_values.location ? lead.action_values.location : '') + '</label>' : '') + '\
<label><span class="icon"><i class="fa fa-search"></i></span>' + lead.action_values.keywords[0] + '</label>\
' + (lead.source._type == LeadSourceTypes.TWITTER_TWEET ? '<label><span class="icon"><i class="fa fa-crosshairs"></i></span>' + (lead.action_values.range ? lead.action_values.range.toLocaleString() : '') + ' mi</label>' : '') + '\
</div>\
</div>\
</div>\
');
$lead.find('.twitter-info .twitter-info-avatar img').error(function () {
this.onerror = null;
this.src = '/images/profile-blank.png';
});
return $lead;
}
function $createNewLeadForDbperson(lead, rejected) {
var name = lead.source.name ? lead.source.name : '';
var email = lead.source.email ? lead.source.email : '';
var phone = lead.source.phone ? lead.source.phone : '';
var occupation = lead.source.occupation ? lead.source.occupation : '';
var avatar = lead.source.avatar ? lead.source.avatar : '/images/profile-blank.png';
var values = lead.action_values;
var countries = Dbperson.getDbpersonNamesByValues('locations', values.countries);
var states = Dbperson.getDbpersonNamesByValues('locations', values.states);
var metro_regions = Dbperson.getDbpersonNamesByValues('locations', values.metro_regions);
var industries = Dbperson.getDbpersonNamesByValues('industries', values.industries);
var revenues = Dbperson.getDbpersonNamesByValues('revenues', values.revenues);
var sizes = Dbperson.getDbpersonNamesByValues('employee_sizes', values.employee_sizes);
var locations = '';
if (countries) {
locations += '"' + countries.join('" "') + '"';
}
if (states) {
locations += (locations ? ' ' : '') + '"' + states.join('" "') + '"';
}
if (metro_regions) {
locations += (locations ? ' ' : '') + '"' + metro_regions.join('" "') + '"';
}
var $lead = $('\
<div class="new-lead new-lead-from-dbperson">\
<span class="icon"><img src="/images/company-gray.png" /></span>\
<div class="form-actions">\
<button class="btn btn-success btn-accept">Accept</button>\
<button class="btn btn-warning btn-accept btn-workflow-activation' + ((lead.workflow == null || ! lead.workflow.is_enabled) ? ' hide' : '') + '"><small>ADD TO</small><br />Workflow</button>\
<br/>\
' + (rejected ? '' : '<button class="btn btn-reject">Reject</button>') + '\
</div>\
<div class="dbperson">\
<span class="dbperson-avatar"><img src="' + avatar + '" /></span>\
<div class="dbperson-header">\
<h4 class="dbperson-fullname">' + name + '</h4>\
<div class="verify-pane' + (email ? ' email-verified' : '') + (phone ? ' phone-verified' : '') + '">\
<div class="verify-icons">\
<span class="verify-icon verify-icon-email" title="' + email + '"><i class="fa fa-envelope"></i></span>\
<span class="verify-icon verify-icon-phone" title="' + phone + '"><i class="fa fa-phone"></i></span>\
</div>\
</div>\
<br /><span class="dbperson-role">' + occupation + '</span> | <span class="dbperson-company-name">' + (lead.source.company != null ? lead.source.company.name : '') + '</span>\
</div>\
<div class="dbperson-more">\
' + (sizes ? '<label><span class="icon"><i class="fa fa-users"></i></span>' + '"' + sizes.join('" "') + '"' + '</label>' : '') + '\
' + (revenues ? '<label><span class="icon"><i class="fa fa-dollar"></i></span>' + '"' + revenues.join('" "') + '"' + '</label>' : '') + '\
' + (locations ? '<label><span class="icon"><i class="fa fa-map-marker"></i></span>' + locations + '</label>' : '') + '\
' + (industries ? '<label><span class="icon"><i class="fa fa-paper-plane-o"></i></span>' + '"' + industries.join('" "') + '"' + '</label>' : '') + '\
</div>\
</div>\
</div>\
');
$lead.find('.dbperson .dbperson-avatar img').error(function () {
this.onerror = null;
this.src = '/images/profile-blank.png';
});
return $lead;
}
function $createNewLeadForManual(lead, rejected) {
var client_avatar = lead.clients[0] && lead.clients[0].source && lead.clients[0].source.avatar && !DevOptions.debug ? lead.clients[0].source.avatar : '/images/profile-blank.png';
var $lead = $('\
<div class="new-lead new-lead-from-dbperson">\
<span class="icon"><img src="/images/logo.png" /></span>\
<div class="form-actions">\
<button class="btn btn-success btn-accept">' + (lead.is_accepted > 0 ? 'Restore' : 'Accept') + '</button>\
<button class="btn btn-warning btn-accept btn-workflow-activation' + ((lead.workflow == null || ! lead.workflow.is_enabled) ? ' hide' : '') + '"><small>ADD TO</small><br />Workflow</button>\
<br/>\
' + (rejected ? '' : '<button class="btn btn-reject">Reject</button>') + '\
</div>\
<div class="manual-info">\
<span class="manual-info-avatar"><img src="' + client_avatar + '" /></span>\
<div class="manual-info-header">\
<h4 class="manual-info-fullname">' + lead.clients[0].name + '</h4>\
</div>\
</div>\
</div>\
');
$lead.find('.manual-info .manual-info-avatar img').error(function () {
this.onerror = null;
this.src = '/images/profile-blank.png';
});
return $lead;
}
function acceptLead(lead, activate_deal_workflow) {
if (!checkIfLeadAcceptable()) {
return;
}
var data = {};
if (activate_deal_workflow) data.enable_workflow = 1;
return $.ajax({
method: 'POST',
data: data,
url: '/api/v1/leads/' + lead.id + '/accept',
success: function (lead) {
showSuccessMessage("Success", "A lead has been accepted successfully.");
var accepting = lead.is_accepted <= 0;
lead.is_accepted = 1;
var $card = Leads.$insertClient(lead);
if (accepting) {
// var verify_needed = true;
// if (verify_needed) {
// Leads.verifyLead($card, activate_deal_workflow);
// } else {
// Workflow.setInitialDealWorkflowIsEnabled(lead.id, 'parent');
// }
if (typeof analytics !== 'undefined' && analytics !== null) {
var action_values = lead.action_values;
var data = {
action_type: 'window',
source_type: lead.source != null ? lead.source._type : null
};
if (lead.source._type == LeadSourceTypes.TWITTER_TWEET) {
data.source_keywords = action_values.keywords[0];
data.source_location = action_values.location;
data.source_range = action_values.range;
//source_user: $("#qtip-0 iframe").contents().find(".TweetAuthor-screenName").text(),
//source_text: $("#qtip-0 iframe").contents().find(".Tweet-text").text()
}
analytics.track('Lead Accepted', data);
}
}
}
});
}
function rejectLead(lead) {
return $.ajax({
type: 'PUT',
data: {is_enabled: false},
url: '/api/v1/leads/' + lead.id,
success: function () {
showSuccessMessage("Success", "A lead has been deleted successfully.");
if (typeof analytics !== 'undefined' && analytics !== null) {
var action_values = lead.action_values;
var data = {
action_type: 'window',
source_type: lead.source._type
};
if (lead.source._type == LeadSourceTypes.TWITTER_TWEET) {
data.source_keywords = action_values.keywords[0];
data.source_location = action_values.location;
data.source_range = action_values.range;
//source_user: $("#qtip-0 iframe").contents().find(".TweetAuthor-screenName").text(),
//source_text: $("#qtip-0 iframe").contents().find(".Tweet-text").text()
}
analytics.track('Lead Rejected', data);
}
}
});
}
function loadPopover() {
var popHTML = '\
<div class="lead-source" data-source-type="' + LeadSourceTypes.TWITTER_TWEET + '">\
<div class="twitter-info-container">\
<div class="twitter-info-blocker active"></div>\
<div class="twitter-info"></div>\
</div>\
</div>\
<div class="lead-source" data-source-type="' + LeadSourceTypes.TWITTER_USER + '">\
</div>\
<div class="lead-source" data-source-type="' + LeadSourceTypes.DBPERSON + '">\
</div>\
';
qt = $newLeadsContainer.qtip({
id: 'sidebar-lead-source-popover',
prerender: true,
content: {
title: '<span class="fa fa-cliently-twitter"></span> Lead Source',
text: popHTML
},
style: {classes: 'qtip-rounded qtip-shadow qtip-cluetip qtip-blue'},
position: {
//target: "mouse", // Position it where the click was...
my: 'center left', // Position my top left...
at: 'center right', // at the bottom right of...
viewport: $sidebar,
adjust: {
mouse: true, x: 0, y: 0,
method: 'none shift'
}
},
show: {
//delay: 500,
effect: function (offset) {
$(this).stop().fadeIn(500);
}
},
hide: {
event: 'mouseleave',
inactive: 30000,
fixed: true,
effect: function (offset) {
$(this).stop().fadeOut(500);
}
},
events: {
hide: function () {
$(".has-popover").removeClass('has-popover');
}
}
});
qt.qtip('hide');
$(qt.qtip('api').elements.tooltip).on('mouseover', function () {
if (qtTimer) {
clearTimeout(qtTimer);
qtTimer = null;
}
});
$(document).on('mousemove', function (e) {
if ($(e.target).closest("#sidebar .new-leads .new-lead .icon").length <= 0) {
if ($(".has-popover").length > 0 && !qtTimer) {
qtTimer = setTimeout(function () {
qt.qtip('hide');
qt.qtip('disable');
qtTimer = null;
}, 1000);
}
return;
}
clearTimeout(qtTimer);
qtTimer = null;
var $lead = $(e.target).closest(".new-lead");
if ($lead.hasClass("has-popover")) {
return;
}
qt.qtip('option', 'position.target', e.target);
qt.qtip('option', 'show.target', e.target);
qt.qtip('enable');
$(".has-popover").removeClass('has-popover');
$lead.addClass('has-popover');
var lead = $lead.data('lead');
var action_values = lead.action_values;
var $content = qt.qtip('api').elements.content;
var $popoverSource = $content.find('.lead-source').hide().filter('[data-source-type="' + lead.source._type + '"]').show();
switch (lead.source._type) {
case LeadSourceTypes.TWITTER_TWEET:
qt.qtip('option', 'content.title', '<span class="popover-title-icon"><i class="fa fa-cliently-twitter"></i></span> Lead Source - ' + action_values.location + ', ' + action_values.range + ' miles, ' + action_values.keywords[0]);
if (twttr && twttr.widgets) {
var $twitterInfo = $popoverSource.find('.twitter-info').html("<div class='loader' style='padding: 10px 0'><i class='fa fa-spin fa-spinner fa-2x'></i></div>");
twttr.widgets.createTweet(lead.source.code, $twitterInfo[0]).then(function (el) {
$('iframe[id^=twitter-widget-]').each(function () {
$(".loader", $twitterInfo).remove();
$(this).css('margin-top', '3px');
decorateTweet(this, action_values.keywords, true);
});
qt.qtip('api').reposition(null, false);
});
}
break;
case LeadSourceTypes.TWITTER_USER:
qt.qtip('option', 'content.title', '<span class="popover-title-icon"><i class="fa fa-cliently-twitter"></i></span> Lead Source - ' + action_values.keywords[0]);
var $twitterItem = $popoverSource.find('.twitter-item');
if (!$twitterItem.length) {
$twitterItem = $('\
<div class="twitter-item">\
<div class="twitter-item-inner">\
<img class="twitter-item-profile-image" src="/images/profile-blank.png" />\
<!--span class="icon"><i class=" fa fa-cliently-twitter"></i></span-->\
<div class="twitter-item-info">\
<div class="twitter-item-header">\
<h4 class="fullname">Full Name</h4>\
<span>@</span><span class="username">username</span>\
</div>\
</div>\
<div class="twitter-item-description">\
</div>\
<div class="twitter-item-metrics">\
<span class="location"></span>\
</div>\
</div>\
</div>\
').appendTo($popoverSource);
$twitterItem.find('img').error(function () {
this.onerror = null;
this.src = '/images/profile-blank.png';
});
}
$twitterItem
.find("img.twitter-item-profile-image").attr('src', '//' + lead.source.avatar).end()
.find("h4.fullname").html(decorateDealSourceText(lead.source.fullname, lead.action_values.keywords)).end()
.find("span.username").html(decorateDealSourceText(lead.source.username, lead.action_values.keywords)).end()
.find(".twitter-item-description").html(decorateDealSourceText(lead.source.description, lead.action_values.keywords)).end()
.find("span.location").html('<i class="fa fa-map-marker"></i> ' + decorateDealSourceText(lead.source.location, lead.action_values.keywords) + '');
break;
case LeadSourceTypes.DBPERSON:
qt.qtip('option', 'content.title', '<span class="popover-title-icon"><img src="/images/company-gray.png" /></span> Lead Source - Database');
var $metricsInfo = $popoverSource.find('.metrics-info');
if (!$metricsInfo.length) {
$metricsInfo = $('<div class="metrics-info" />').appendTo($popoverSource);
}
$metricsInfo.empty();
if (lead.source.company) {
if (lead.source.company.employee_count != null) {
$metricsInfo.append('<span><i class="fa fa-users"></i> ' + lead.source.company.employee_count + '</span>');
}
if (lead.source.company.revenue != null) {
$metricsInfo.append('<span><i class="fa fa-bank"></i> ' + lead.source.company.revenue + '</span>');
}
if (lead.source.company.industries != null) {
$metricsInfo.append('<span><i class="fa fa-suitcase"></i> ' + (lead.source.company.industries ? lead.source.company.industries.join(', ') : '') + '</span>');
}
if (lead.source.company.location != null) {
$metricsInfo.append('<span><i class="fa fa-map-marker"></i> ' + lead.source.company.location + '</span>');
}
}
break;
}
qt.qtip('api').reposition(null, false);
qt.qtip('show');
});
}
function loadRejectedLeads() {
var prevDate = 0;
$rejectedContainer.html('<div class="loader"><i class="fa fa-spin fa-spinner fa-4x"></i></div>');
$.get("/api/v1/workspaces/" + current_workspace_id + "/leads?is_enabled=0&include_source=1&include_workflow_status=1", function (rejectedLeads) {
var $newLeads = $('<div class="new-leads">').appendTo($rejectedContainer.empty());
$.each(rejectedLeads, function (i, lead) {
var date = new Date(lead.created_at * 1000);
date.setHours(0);
date.setMinutes(0);
date.setSeconds(0);
var tc = date.getTime();
if (!prevDate || prevDate != tc) {
var monthName = MonthNames[date.getMonth()];
monthName = monthName.substr(0, 3);
$('<div class="date-viewer"><div class="date"><span class="month">' + monthName + '</span><span class="day">' + date.getDate() + '</span></div></div>').appendTo($newLeads);
}
prevDate = tc;
$createNewLead(lead, true).appendTo($newLeads);
});
});
}
function loadTimeline() {
var actions = [
{
id: 1,
module_class: 'email',
"class": 'email_send',
values: {
title: 'Email subject',
from: '<EMAIL>',
to: '<EMAIL>',
msg: 'Ut wisi enim and minim veniam, quis nostrud aliquip ex ea commodo consequat. Duis autem'
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000
},
{
id: 2,
module_class: 'task',
"class": 'task_add',
values: {
task_type: 'Call',
task_status: 0,
task_te: (new Date()).getTime() / 1000,
task_desc: 'Ut wisi enim and minim veniam, quis nostrud aliquip ex ea commodo consequat. Duis autem'
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: 'User Name',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000 - 3600 * 24
},
{
id: 3,
module_class: 'note',
"class": 'note_add',
values: {
note_desc: 'Ut wisi enim and minim veniam, quis nostrud aliquip ex ea commodo consequat. Duis autem'
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: 'User Name',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000 - 3600 * 24 * 2
},
{
id: 4,
module_class: 'twitter',
"class": 'twitter_follow',
values: {
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: 'User Name',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000 - 3600 * 24 * 3
},
{
id: 5,
module_class: 'twitter',
"class": 'twitter_tweet',
values: {
description: 'Lorem ipsum dolor sit amet. consectetur adipiscing elit. Alitquam blandit risus sed leo',
sender: {
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg',
fullname: '<NAME>',
username: 'lead_handle'
}
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000 - 3600 * 24 * 3
},
{
id: 6,
module_class: 'lead',
"class": 'lead_stage_move',
values: {
from_stage_name: 'Stage 1',
to_stage_name: 'Stage 2'
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000 - 3600 * 24 * 3
},
{
id: 7,
module_class: 'lead',
"class": 'lead_accept',
values: {
source_info: {
location: 'New York, NY, United States',
range: 250,
keywords: 'engagement'
},
twitter_info: {
description: 'Lorem ipsum dolor sit amet. consectetur adipiscing elit. Alitquam blandit risus sed leo',
sender: {
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg',
fullname: '<NAME>',
username: 'lead_handle'
}
}
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000 - 3600 * 24 * 3
},
{
id: 8,
module_class: 'workflow',
"class": 'workflow_add',
values: {
workflow_name: 'Workflow 1'
},
lead: {
lead_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
user: {
user_name: '<NAME>',
avatar: '//pbs.twimg.com/profile_images/478284874/avatar_copy_200x200.jpg'
},
created_at: (new Date()).getTime() / 1000 - 3600 * 24 * 3
}
];
$timelineContainer.empty().append(Timeline.$createTimeline({
id: 33333,
actions: actions
}));
}
return scope;
})(); |
#!/bin/bash
RETCODE=$(fw_exists ${IROOT}/py3.installed)
[ ! "$RETCODE" == 0 ] || { \
source $IROOT/py3.installed
return 0; }
PY3_ROOT=$IROOT/py3
fw_get -O http://www.python.org/ftp/python/3.5.1/Python-3.5.1.tar.xz
fw_untar Python-3.5.1.tar.xz
cd Python-3.5.1
./configure --prefix=$PY3_ROOT --disable-shared --with-computed-gotos --quiet
make -j4 --quiet 2>&1 | tee $IROOT/python3-install.log | awk '{ if (NR%100 == 0) printf "."}'
make install --quiet 2>&1 | tee -a $IROOT/python3-install.log | awk '{ if (NR%100 == 0) printf "."}'
cd ..
$PY3_ROOT/bin/python3 -m ensurepip -U
$PY3_ROOT/bin/pip3 install -U setuptools pip
echo "export PY3_ROOT=${PY3_ROOT}" > $IROOT/py3.installed
echo -e "export PYTHONHOME=\$PY3_ROOT" >> $IROOT/py3.installed
echo -e "export PATH=\$PY3_ROOT/bin:\$PATH" >> $IROOT/py3.installed
source $IROOT/py3.installed
|
import React, { forwardRef, useEffect, useState } from 'react'
import { bool, func, node, number, object, oneOf, oneOfType, string } from 'prop-types'
import { useDropzone } from 'react-dropzone'
import { TrashIcon } from '@welcome-ui/icons.trash'
import { EditIcon } from '@welcome-ui/icons.edit'
import { Button } from '@welcome-ui/button'
import { Group } from '@welcome-ui/group'
import { createEvent, validateFileSize, validateMimeType } from '@welcome-ui/utils'
// FileDrop
import * as S from './styles'
import { Preview } from './Preview'
import { getPreviewUrl, isAnImage } from './utils'
const DEFAULT_MAX_FILE_SIZE = 2000000
const ERROR_INVALID_TYPE = 'ERROR_INVALID_TYPE'
const ERROR_INVALID_SIZE = 'ERROR_INVALID_SIZE'
export const FileDrop = forwardRef(
(
{
accept = 'image/*',
children = Preview,
dataTestId,
disabled,
isEditable,
isClearable,
maxSize = DEFAULT_MAX_FILE_SIZE,
multiple,
name,
handleAddFile,
onBlur,
onChange,
onError,
handleRemoveFile,
value,
forceFileType,
...rest
},
ref
) => {
const [file, setFile] = useState(value)
const [error, setError] = useState()
useEffect(() => {
setFile(value)
}, [value])
// Clean up URL
useEffect(() => {
return () => file && URL.revokeObjectURL(file.preview)
}, [file])
const handleDropAccepted = files => {
const [file] = files
file.preview = URL.createObjectURL(file)
setFile(file)
setError(null)
const event = createEvent({ name, value: file })
onChange && onChange(event)
handleAddFile && handleAddFile(event)
}
const handleDropRejected = files => {
const [file] = files
let error
const event = createEvent({ name, value: file })
if (!validateMimeType(file, accept)) {
error = ERROR_INVALID_TYPE
} else if (!validateFileSize(file, maxSize)) {
error = ERROR_INVALID_SIZE
}
setFile(null)
setError(error)
onError && onError(error)
onChange && onChange(event)
onBlur && onBlur() // Trigger field touch
}
const handleRemoveClick = e => {
e.preventDefault()
setFile(null)
setError(null)
const event = createEvent({ name, value: null })
onChange && onChange(event)
handleRemoveFile && handleRemoveFile(event)
}
const {
getInputProps,
getRootProps,
inputRef,
isDragAccept,
isDragActive,
isDragReject,
open,
rootRef
} = useDropzone({
onDropAccepted: handleDropAccepted,
onDropRejected: handleDropRejected,
noClick: true,
multiple,
accept,
disabled,
maxSize,
children
})
return (
<S.FileDrop
{...getRootProps({
'data-testid': dataTestId,
handleRemoveFile,
isEditable,
isDragActive,
isDragAccept,
isDragReject,
isClearable,
disabled,
ref
})}
{...rest}
>
<input
{...getInputProps({ disabled, multiple, name, onError })}
// for extern validator we need to have access to this input
style={{ display: 'block', opacity: 0, height: 0 }}
/>
<S.FilePreview>
{children({
error,
file,
forceFileType,
isAnImage: forceFileType === 'image' || isAnImage(file),
fileUrl: file && getPreviewUrl(file),
isDefault: !file && !isDragActive,
isHoverAccept: isDragAccept,
isHoverReject: isDragReject,
openFile: open,
inputRef,
rootRef,
disabled
})}
{!!file && (error || isEditable || isClearable) && (
<S.Actions>
<Group>
{(error || isEditable) && (
<Button
onClick={open}
shape="square"
size="sm"
type="button"
variant="quaternary"
>
<EditIcon />
</Button>
)}
{isClearable && (
<Button
onClick={handleRemoveClick}
shape="square"
size="sm"
type="button"
variant="primary-danger"
>
<TrashIcon />
</Button>
)}
</Group>
</S.Actions>
)}
</S.FilePreview>
</S.FileDrop>
)
}
)
FileDrop.type = 'FileDrop'
FileDrop.displayName = 'FileDrop'
FileDrop.propTypes /* remove-proptypes */ = {
/** Pass a comma-separated string of file types e.g. "image/png" or "image/png,image/jpeg" */
accept: string,
children: func,
disabled: bool,
forceFileType: oneOf(['image', 'audio', 'video']),
handleAddFile: func,
handleRemoveFile: func,
isClearable: bool,
isEditable: bool,
maxSize: number,
multiple: bool,
name: string.isRequired,
onBlur: func,
onChange: func,
onError: func,
onFocus: func,
title: oneOfType([string, node]),
value: oneOfType([string, object])
}
// Export `ImagePreview` from styles
export const ImagePreview = S.ImagePreview
|
#!/usr/bin/env bash
infile=$1
pass=$2
if openssl enc -e -aes256 -base64 -salt -pass "pass:${pass}" -in "${infile}" -out tmp.enc
then
mv ${infile} ~/.Trash/
mv tmp.enc $1
elif [ -f tmp.enc ] ; then
rm tmp.dec
fi
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builder function to construct tf.contrib.layers arg_scope for convolution, fc ops."""
import tensorflow as tf
from Chinese_aster.protos import hyperparams_pb2
from tensorflow.contrib import layers
from tensorflow.contrib.framework import arg_scope
def build(hyperparams_config, is_training):
"""Builds arg_scope for convolution ops based on the config.
Returns an arg_scope to use for convolution ops containing weights
initializer, weights regularizer, activation function, batch norm function
and batch norm parameters based on the configuration.
Note that if the batch_norm parameteres are not specified in the config
(i.e. left to default) then batch norm is excluded from the arg_scope.
The batch norm parameters are set for updates based on `is_training` argument
and conv_hyperparams_config.batch_norm.train parameter. During training, they
are updated only if batch_norm.train parameter is true. However, during eval,
no updates are made to the batch norm variables. In both cases, their current
values are used during forward pass.
Args:
hyperparams_config: hyperparams.proto object containing
hyperparameters.
is_training: Whether the network is in training mode.
Returns:
arg_scope: arg_scope containing hyperparameters for ops.
Raises:
ValueError: if hyperparams_config is not of type hyperparams.Hyperparams.
"""
if not isinstance(hyperparams_config,
hyperparams_pb2.Hyperparams):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
batch_norm = None
batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
batch_norm = layers.batch_norm
batch_norm_params = _build_batch_norm_params(
hyperparams_config.batch_norm, is_training)
affected_ops = [layers.conv2d, layers.separable_conv2d, layers.conv2d_transpose]
if hyperparams_config.HasField('op') and (
hyperparams_config.op == hyperparams_pb2.Hyperparams.FC):
affected_ops = [layers.fully_connected]
with arg_scope(
affected_ops,
weights_regularizer=_build_regularizer(
hyperparams_config.regularizer),
weights_initializer=_build_initializer(
hyperparams_config.initializer),
activation_fn=_build_activation_fn(hyperparams_config.activation),
normalizer_fn=batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
def _build_activation_fn(activation_fn):
"""Builds a callable activation from config.
Args:
activation_fn: hyperparams_pb2.Hyperparams.activation
Returns:
Callable activation function.
Raises:
ValueError: On unknown activation function.
"""
if activation_fn == hyperparams_pb2.Hyperparams.NONE:
return None
if activation_fn == hyperparams_pb2.Hyperparams.RELU:
return tf.nn.relu
if activation_fn == hyperparams_pb2.Hyperparams.RELU_6:
return tf.nn.relu6
raise ValueError('Unknown activation function: {}'.format(activation_fn))
def _build_regularizer(regularizer):
"""Builds a regularizer from config.
Args:
regularizer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
regularizer.
Raises:
ValueError: On unknown regularizer.
"""
regularizer_oneof = regularizer.WhichOneof('regularizer_oneof')
if regularizer_oneof == 'l1_regularizer':
return layers.l1_regularizer(scale=float(regularizer.l1_regularizer.weight))
if regularizer_oneof == 'l2_regularizer':
return layers.l2_regularizer(scale=float(regularizer.l2_regularizer.weight))
raise ValueError('Unknown regularizer function: {}'.format(regularizer_oneof))
def _build_initializer(initializer):
"""Build a tf initializer from config.
Args:
initializer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf initializer.
Raises:
ValueError: On unknown initializer.
"""
initializer_oneof = initializer.WhichOneof('initializer_oneof')
if initializer_oneof == 'truncated_normal_initializer':
return tf.truncated_normal_initializer(
mean=initializer.truncated_normal_initializer.mean,
stddev=initializer.truncated_normal_initializer.stddev)
if initializer_oneof == 'variance_scaling_initializer':
enum_descriptor = (hyperparams_pb2.VarianceScalingInitializer.
DESCRIPTOR.enum_types_by_name['Mode'])
mode = enum_descriptor.values_by_number[initializer.
variance_scaling_initializer.
mode].name
return layers.variance_scaling_initializer(
factor=initializer.variance_scaling_initializer.factor,
mode=mode,
uniform=initializer.variance_scaling_initializer.uniform)
if initializer_oneof == 'orthogonal_initializer':
return tf.orthogonal_initializer(
gain=initializer.orthogonal_initializer.gain,
seed=initializer.orthogonal_initializer.seed
)
if initializer_oneof == 'uniform_initializer':
return tf.random_uniform_initializer(
minval=initializer.uniform_initializer.minval,
maxval=initializer.uniform_initializer.maxval)
raise ValueError('Unknown initializer function: {}'.format(
initializer_oneof))
def _build_batch_norm_params(batch_norm, is_training):
"""Build a dictionary of batch_norm params from config.
Args:
batch_norm: hyperparams_pb2.ConvHyperparams.batch_norm proto.
is_training: Whether the models is in training mode.
Returns:
A dictionary containing batch_norm parameters.
"""
batch_norm_params = {
'decay': batch_norm.decay,
'center': batch_norm.center,
'scale': batch_norm.scale,
'epsilon': batch_norm.epsilon,
'fused': True,
'is_training': is_training and batch_norm.train,
}
return batch_norm_params
|
from time import time
def busca_binaria_it(lista, num):
inicio = time()
left, right = 0, len(lista) - 1
while left <= right:
mid = (left + right) // 2
if lista[mid] == num:
fim = time()
tempo_gasto = fim - inicio
return tempo_gasto
elif lista[mid] < num:
left = mid + 1
else:
right = mid - 1
fim = time()
tempo_gasto = fim - inicio
return tempo_gasto
if __name__ == '__main__':
l = criaLista() # Assume criaLista function creates and returns a sorted list of integers
num = 42 # Example number to search for
tempo = busca_binaria_it(l, num)
print('Time taken for binary search:', tempo) |
<reponame>wolganens/Computer-Science<gh_stars>1-10
#include <math.h>
#include <string.h>
#include <vector>
#include "Texture.h"
#define DEBUG
using namespace std;
// Texture list
vector<TEX*> _textures(0);
#ifndef __FREEGLUT_EXT_H__
// Draw bitmap text
void glutBitmapString(void *font,char *text)
{
while (*text)
glutBitmapCharacter(font, *text++);
}
// Draw vector text
void glutStrokeString(void *font,char *text)
{
while (*text)
glutStrokeCharacter(font, *text++);
}
#endif
int _seekTex(char *nome)
{
unsigned int i;
for(i=0;i<_textures.size();++i)
if(!strcmp(nome,_textures[i]->name))
return i;
return -1;
}
void FreeTextures()
{
unsigned int i;
#ifdef DEBUG
printf("Total tex: %lu\n",_textures.size());
#endif
// for each
for(i=0;i<_textures.size();++i)
{
// Free texture
#ifdef DEBUG
printf("%s: %d x %d (id: %d)\n", _textures[i]->name, _textures[i]->dimx,
_textures[i]->dimy, _textures[i]->texid);
#endif
free(_textures[i]);
}
_textures.clear();
}
TEX *LoadTexture(string file, bool mipmap)
{
GLenum format;
if(file=="")
return NULL;
char* name = (char *)file.c_str();
int index = _seekTex(name);
if(index!=-1)
return _textures[index];
TEX *pImage = LoadJPG(name);
if(pImage == NULL)
exit(0);
strcpy(pImage->name,name);
glGenTextures(1, &pImage->texid);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glBindTexture(GL_TEXTURE_2D, pImage->texid);
printf("LoadTexture: %d\n",pImage->texid);
if(pImage->ncomp==1) format = GL_LUMINANCE;
else format = GL_RGB;
if(mipmap)
{
gluBuild2DMipmaps(GL_TEXTURE_2D, GL_RGB, pImage->dimx, pImage->dimy,
format, GL_UNSIGNED_BYTE, pImage->data);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR_MIPMAP_LINEAR);
}
else
{
glTexImage2D (GL_TEXTURE_2D, 0, GL_RGB, pImage->dimx, pImage->dimy,
0, format, GL_UNSIGNED_BYTE, pImage->data);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
free(pImage->data);
_textures.push_back(pImage);
return pImage;
}
void SetTextureFilter(GLint tex, GLint minfilter, GLint magfilter)
{
glEnable(GL_TEXTURE_2D);
if(tex!=-1)
{
glBindTexture(GL_TEXTURE_2D,tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, minfilter);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, magfilter);
}
else
for(unsigned int i=0;i<_textures.size();++i)
{
glBindTexture(GL_TEXTURE_2D,_textures[i]->texid);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, minfilter);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, magfilter);
}
glDisable(GL_TEXTURE_2D);
}
void DecodeJPG(jpeg_decompress_struct* cinfo, TEX *pImageData, bool invert)
{
jpeg_read_header(cinfo, TRUE);
jpeg_start_decompress(cinfo);
pImageData->ncomp = cinfo->num_components;
pImageData->dimx = cinfo->image_width;
pImageData->dimy = cinfo->image_height;
int rowSpan = pImageData->ncomp * pImageData->dimx;
pImageData->data = new unsigned char[rowSpan * pImageData->dimy];
int height = pImageData->dimy - 1;
unsigned char** rowPtr = new unsigned char*[pImageData->dimy];
if(invert)
for (int i = 0; i <= height; i++)
rowPtr[height - i] = &(pImageData->data[i*rowSpan]);
else
for (int i = 0; i <= height; i++)
rowPtr[i] = &(pImageData->data[i*rowSpan]);
int rowsRead = 0;
while (cinfo->output_scanline < cinfo->output_height)
{
rowsRead += jpeg_read_scanlines(cinfo, &rowPtr[rowsRead], cinfo->output_height - rowsRead);
}
delete [] rowPtr;
jpeg_finish_decompress(cinfo);
}
TEX *LoadJPG(const char *filename, bool invert)
{
struct jpeg_decompress_struct cinfo;
TEX *pImageData = NULL;
FILE *pFile;
if((pFile = fopen(filename, "rb")) == NULL)
{
printf("Unable to load %s\n",filename);
return NULL;
}
jpeg_error_mgr jerr;
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_decompress(&cinfo);
jpeg_stdio_src(&cinfo, pFile);
pImageData = (TEX*)malloc(sizeof(TEX));
DecodeJPG(&cinfo, pImageData, invert);
jpeg_destroy_decompress(&cinfo);
fclose(pFile);
return pImageData;
}
|
<filename>src/services/ExpressServer.ts<gh_stars>1-10
import express, { NextFunction, Request, RequestHandler, Response } from "express";
import { getDecoratorArgs, HttpParamType, JwtClaim, JwtClaimMetadata, OwnerClaimMetadata, PathMetadata, ReqData, RolesClaimMetadata } from '../framework/decorators';
import { AUTH_OWNER_CLAIM, AUTH_ROLES_CLAIM, AUTH_ROUTE_DATA, CONTROLLER_ARGS_DATA, ROUTE_DATA } from '../framework/decorators/constants';
import { ParamMetadata } from '../framework/decorators/utils';
import { HttpServerProvider } from '../framework/HttpServer';
import { Singleton } from '../injector/decorators';
import { IController } from '../interfaces/IController';
import type { IHttpServer } from '../interfaces/IHttpServer';
import type { Type } from '../types';
import { isNullOrUndefined } from '../utils/assert';
import { takeLeadingWord } from '../utils/string';
import { AuthUtilsService } from './AuthService';
@Singleton()
export class ExpressServer extends HttpServerProvider implements IHttpServer {
public engine = express()
public onLoadServices = () => {
const authService = this.app.container.resolve(AuthUtilsService);
for (let endpoint of this.controllers.keys()) {
const klass: Type<IController<any>> | undefined = this.controllers.get(endpoint);
if (isNullOrUndefined(klass))
return;
const instance: IController<any> = this.app.container.resolve(klass);
const grantedRoutes: Record<string, JwtClaim> = (
(Reflect.getMetadata(AUTH_ROUTE_DATA, klass) as JwtClaimMetadata[]) ?? []
)?.reduce((acc, val) => ({ ...acc, [val.methodName]: val }), {});
const grantedRoles: Record<string, RolesClaimMetadata> = (
(Reflect.getMetadata(AUTH_ROLES_CLAIM, klass) as RolesClaimMetadata[]) ?? []
)?.reduce((acc, val) => ({ ...acc, [val.methodName]: val }), {});
const grantedOwners: Record<string, OwnerClaimMetadata> = (
(Reflect.getMetadata(AUTH_OWNER_CLAIM, klass) as OwnerClaimMetadata[]) ?? []
)?.reduce((acc, val) => ({ ...acc, [val.methodName]: val }), {});
["create", "get", "delete", "patch", "update"].forEach((reqType) => {
const auth = grantedRoutes[reqType];
const roles = grantedRoles[reqType];
const owners = grantedOwners[reqType];
this.setupHandler(
instance,
reqType,
endpoint,
undefined,
auth,
roles,
owners,
authService,
);
});
const extendedRoutes: PathMetadata[] | undefined = Reflect.getMetadata(ROUTE_DATA, klass);
if (isNullOrUndefined(extendedRoutes))
return;
for (let route of extendedRoutes) {
const { path, methodName } = route;
const auth = grantedRoutes[methodName];
const roles = grantedRoles[methodName];
const owners = grantedOwners[methodName];
const reqType = takeLeadingWord(methodName);
const subPath = path.startsWith("/") ? path : "/" + path;
this.setupHandler(
instance,
reqType,
endpoint + subPath,
methodName,
auth,
roles,
owners,
authService,
);
}
};
};
//#region Internals
private setupHandler(
instance: any,
reqType: string,
endpoint: string,
methodName = reqType,
authType: JwtClaim | null = null,
requiredRoles: RolesClaimMetadata | null = null,
requiredOwners: OwnerClaimMetadata | null = null,
authService: AuthUtilsService | null = null
) {
if (instance[methodName] != null) {
const authArgs = [authType, requiredRoles, requiredOwners, authService] as const
const makeHandlerWithDataFrom = (t: 'body' | 'query') => {
return this.makeHandler(instance[methodName], instance, t, ...authArgs)
}
switch (reqType) {
case "create":
case "post":
this.post(endpoint, makeHandlerWithDataFrom("body"));
break;
case "get":
this.get(endpoint, makeHandlerWithDataFrom("query"));
break;
case "delete":
this.delete(endpoint, makeHandlerWithDataFrom("body"));
break;
case "patch":
this.patch(endpoint, makeHandlerWithDataFrom("body"));
break;
case "put":
this.put(endpoint, makeHandlerWithDataFrom("body"));
break;
default:
throw new Error(
"Server has unimplemented Controller Method: " + reqType
);
}
}
}
private makeHandler(
handler: Function,
handlerThisCtx: any,
key: "query" | "body",
authType: JwtClaim | null,
requiredRoles: RolesClaimMetadata | null,
requiredOwners: OwnerClaimMetadata | null,
authService: AuthUtilsService | null
): RequestHandler {
const needsAuthService = (authType || requiredRoles || requiredOwners);
if (needsAuthService && authService == null)
throw new Error('No Auth Service setup to handle auth decorators.');
return async (req: Request, res: Response, next: NextFunction) => {
const reqData = {
...req[key],
__query: req.query,
__body: req.body,
} as ReqData;
try {
if (needsAuthService && authService) {
reqData.__auth = authService.jwtAuthorizationFlow(req, requiredRoles?.roles);
}
if (requiredOwners && authService) {
console.error({ authService, TODO: 'FINISH ME' })
console.error({ requiredOwners, TODO: 'FINISH ME' })
throw new Error('Not implemented yet.. Use Tsoa Controllers instead.')
}
const args: any[] = [reqData];
(
(Reflect.getOwnMetadata(
CONTROLLER_ARGS_DATA,
handlerThisCtx.constructor,
handler.name,
) || []) as ParamMetadata<HttpParamType>[]
)
.filter(arg => arg.propertyKey === handler.name)
.sort((a, b) => a.parameterIndex - b.parameterIndex)
.forEach(arg => {
let payload = getDecoratorArgs(arg, req, res, reqData);
if (arg.options?.length) {
const prop = arg.options[0]
payload = payload[prop]
}
args[arg.parameterIndex] = payload
});
args.push({ req, res }) // Always pass as the last argument to handler
const result = await handler.apply(handlerThisCtx, args);
switch (typeof result) {
case "bigint":
case "boolean":
case "function":
case "number":
case "string":
case "symbol":
case "undefined": {
res.setHeader('Content-Type', 'application/json; charset=utf-8')
return res.send(result);
}
default:
return res.send(JSON.stringify(result));
}
}
catch (handlerError) {
return next(handlerError);
}
};
}
//#endregion
}
|
(function( root, $, undefined ) {
"use strict";
$(function () {
$('.modal-mask').animate({
opacity: ".3"
}, function() {
$('.modal').animate({
top: "50%"
}, 50);
setTimeout(function() {
$('.modal').animate({
opacity: 1
}, 200);
}, 100);
});
$('.modal-mask, .close-modal, .finish').click(function() {
$('.modal').animate({ opacity: 0 }, 300, function() {
$('.modal-mask').animate({
opacity: "0"
}, 300, function() {
$('.modal, .modal-mask').remove();
});
});
});
$('.a-unsubscribe').click(function(e) {
e.preventDefault();
$.get('../wp-json/cogito-plugin/unsubscribe-app')
.done(function(e) {
window.location.replace('admin.php?page=cogito-admin-page.php');
})
.fail(function(err) {
console.log(err)
})
})
});
} ( this, jQuery ));
|
/* global saveAs */
var Screenshot = function (game) {
this.game = game
this.popup = null
this.png = null
this.blob = null
this.twitter = null
this.tweetMessage = '#Curvatron'
this.tweetSuccess = 0
this.tweeting = false
}
Screenshot.prototype = {
update: function () {
if (this.popup !== null && this.popup.location.href &&
this.popup.location.href.split('oauth_verifier=')[1] !== undefined) {
this.tweetUpdate()
}
},
snap: function () {
// redraw screen
this.game.renderer.render(this.game.stage)
// png to share on twitter
var png = this.game.canvas.toDataURL()
this.png = png.replace(/^data:image\/png;base64,/, '')
// png to save locally
this.game.canvas.toBlob(function (blob) {
this.blob = blob
}.bind(this))
},
save: function () {
saveAs(this.blob, 'curvatron.png')
},
share: function () {
this.tweeting = true
var TwitterAPI = require('node-twitter-api')
this.twitter = new TwitterAPI({
consumerKey: 'NwssUgdW5A1dKhtzExUFc5AtQ',
consumerSecret: '<KEY>',
callback: 'http://bravebunny.co/'
})
this.twitter.getRequestToken(function (error, requestToken, requestTokenSecret, results) {
if (error) {
console.log('error requesting twitter token: ' + error)
this.tweetSuccess = -1
} else {
this.rToken = requestToken
this.rTokenSecret = requestTokenSecret
this.popup = window.open(this.twitter.getAuthUrl(this.rToken))
}
}.bind(this))
},
tweetUpdate: function () {
var oauthVerifier = this.popup.location.href.split('oauth_verifier=')[1]
this.popup.close()
this.popup = null
var aToken, aTokenSecret
this.twitter.getAccessToken(this.rToken, this.rTokenSecret, oauthVerifier, function (error, accessToken, accessTokenSecret, results) {
if (error) {
console.log('error getting twitter access token: ' + error)
this.tweetSuccess = -1
} else {
aToken = accessToken
aTokenSecret = accessTokenSecret
var params = {
media: this.png,
isBase64: true
}
this.twitter.uploadMedia(params, aToken, aTokenSecret, function (error, response) {
if (error) {
console.log('error uploading media to twitter: ' + error)
this.tweetSuccess = -1
} else {
this.twitter.statuses('update', {
status: this.tweetMessage,
media_ids: response.media_id_string
},
aToken,
aTokenSecret,
function (error, data, response) {
if (error) this.tweetSuccess = -1
this.tweetSuccess = 1
}.bind(this)
)
}
}.bind(this))
}
}.bind(this))
}
}
|
<reponame>abhishekgoenka/training<filename>angular/lab2/my-project/src/app/data-entry-reactive/data-entry-reactive.module.ts
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { DataEntryReactiveRoutingModule } from './data-entry-reactive-routing.module';
import { DataEntryReactiveComponent } from './data-entry-reactive.component';
import { ReactiveFormsModule } from '@angular/forms';
@NgModule({
declarations: [DataEntryReactiveComponent],
imports: [
CommonModule,
ReactiveFormsModule,
DataEntryReactiveRoutingModule
]
})
export class DataEntryReactiveModule { }
|
#!/usr/bin/env bash
# shellcheck disable=SC1090,SC2034,SC2154
# Copyright Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Initialize KUBECONFIG_FILES and KUBE_CONTEXTS
_set_kube_vars
source content/en/docs/setup/install/multicluster/verify/snips.sh
# set_single_network_vars initializes all variables for a single network config.
function set_single_network_vars
{
export KUBECONFIG_CLUSTER1="${KUBECONFIG_FILES[0]}"
export KUBECONFIG_CLUSTER2="${KUBECONFIG_FILES[1]}"
export CTX_CLUSTER1="${KUBE_CONTEXTS[0]}"
export CTX_CLUSTER2="${KUBE_CONTEXTS[1]}"
}
# set_multi_network_vars initializes all variables for a multi-network config.
function set_multi_network_vars
{
export KUBECONFIG_CLUSTER1="${KUBECONFIG_FILES[0]}"
export KUBECONFIG_CLUSTER2="${KUBECONFIG_FILES[2]}"
export CTX_CLUSTER1="${KUBE_CONTEXTS[0]}"
export CTX_CLUSTER2="${KUBE_CONTEXTS[2]}"
}
# configure_trust creates a hierarchy of
function configure_trust
{
# Keeps the certs under a separate directory.
mkdir -p certs
pushd certs || exit
# Create the root cert.
make -f ../tools/certs/Makefile.selfsigned.mk root-ca
# Create and deploy intermediate certs for cluster1 and cluster2.
make -f ../tools/certs/Makefile.selfsigned.mk cluster1-cacerts
make -f ../tools/certs/Makefile.selfsigned.mk cluster2-cacerts
# Create the istio-system namespace in each cluster so that we can create the secrets.
kubectl --context="$CTX_CLUSTER1" create namespace istio-system
kubectl --context="$CTX_CLUSTER2" create namespace istio-system
# Deploy secret to each cluster
kubectl --context="$CTX_CLUSTER1" create secret generic cacerts -n istio-system \
--from-file=cluster1/ca-cert.pem \
--from-file=cluster1/ca-key.pem \
--from-file=cluster1/root-cert.pem \
--from-file=cluster1/cert-chain.pem
kubectl --context="$CTX_CLUSTER2" create secret generic cacerts -n istio-system \
--from-file=cluster2/ca-cert.pem \
--from-file=cluster2/ca-key.pem \
--from-file=cluster2/root-cert.pem \
--from-file=cluster2/cert-chain.pem
popd || exit # Return to the previous directory.
}
# cleanup removes all resources created by the tests.
function cleanup
{
# Remove temp files.
rm -f cluster1.yaml cluster2.yaml certs
# Delete the namespaces on both clusters concurrently
delete_namespaces_cluster1 &
delete_namespaces_cluster2 &
wait
}
# _delete_namespaces_cluster1 removes the istio-system and sample namespaces on both
# CLUSTER1.
function delete_namespaces_cluster1
{
kubectl delete ns istio-system sample --context="${CTX_CLUSTER1}" --ignore-not-found
}
# _delete_namespaces_cluster2 removes the istio-system and sample namespaces on both
# CLUSTER2.
function delete_namespaces_cluster2
{
kubectl delete ns istio-system sample --context="${CTX_CLUSTER2}" --ignore-not-found
}
# verify_load_balancing verifies that traffic is load balanced properly
# between CLUSTER1 and CLUSTER2.
function verify_load_balancing
{
# Deploy the HelloWorld service.
snip_deploy_the_helloworld_service_1
snip_deploy_the_helloworld_service_2
snip_deploy_the_helloworld_service_3
# Deploy HelloWorld v1 and v2
snip_deploy_helloworld_v1_1
snip_deploy_helloworld_v2_1
# Deploy Sleep
snip_deploy_sleep_1
# Wait for all the deployments.
_wait_for_deployment sample helloworld-v1 "${CTX_CLUSTER1}"
_wait_for_deployment sample sleep "${CTX_CLUSTER1}"
_wait_for_deployment sample helloworld-v2 "${CTX_CLUSTER2}"
_wait_for_deployment sample sleep "${CTX_CLUSTER2}"
# Verify everything is deployed as expected.
VERIFY_TIMEOUT=0 # Don't retry.
echo "Verifying helloworld v1 deployment"
_verify_like snip_deploy_helloworld_v1_2 "$snip_deploy_helloworld_v1_2_out"
echo "Verifying helloworld v2 deployment"
_verify_like snip_deploy_helloworld_v2_2 "$snip_deploy_helloworld_v2_2_out"
echo "Verifying sleep deployment in ${CTX_CLUSTER1}"
_verify_like snip_deploy_sleep_2 "$snip_deploy_sleep_2_out"
echo "Verifying sleep deployment in ${CTX_CLUSTER2}"
_verify_like snip_deploy_sleep_3 "$snip_deploy_sleep_3_out"
unset VERIFY_TIMEOUT # Restore default
local EXPECTED_RESPONSE_FROM_CLUSTER1="Hello version: v1, instance:"
local EXPECTED_RESPONSE_FROM_CLUSTER2="Hello version: v2, instance:"
# Verify we hit both clusters from CLUSTER1
echo "Verifying load balancing from ${CTX_CLUSTER1}"
_verify_contains snip_verifying_crosscluster_traffic_1 "$EXPECTED_RESPONSE_FROM_CLUSTER1"
_verify_contains snip_verifying_crosscluster_traffic_1 "$EXPECTED_RESPONSE_FROM_CLUSTER2"
# Verify we hit both clusters from CLUSTER2
echo "Verifying load balancing from ${CTX_CLUSTER2}"
_verify_contains snip_verifying_crosscluster_traffic_3 "$EXPECTED_RESPONSE_FROM_CLUSTER1"
_verify_contains snip_verifying_crosscluster_traffic_3 "$EXPECTED_RESPONSE_FROM_CLUSTER2"
}
|
<filename>node_modules/version-range/compiled-types/index.d.ts
export declare type Version = string | number
export declare type Range = Version | Version[]
/**
* Compare two versions quickly.
* @param current Is this version greater, equal to, or less than the other?
* @param other The version to compare against the current version
* @return 1 if current is greater than other, 0 if they are equal or equivalent, and -1 if current is less than other
*/
export default function withinVersionRange(
subject: Version,
range: Range
): boolean
//# sourceMappingURL=index.d.ts.map
|
#!/usr/bin/env bash
# Called by the chicken-lib rule in configure.py.
# $1: The directory of the CHICKEN extension to chicken-install in.
# $2: The path of a fake file to touch so that the build system can have some
# idea of when things were last built.
# All paths are relative to $CUAUV_SOFTWARE.
set -e
EXTENSION_DIR=$1
cd $CUAUV_SOFTWARE$EXTENSION_DIR
CSC_OPTIONS="-C -Wno-cpp $CSC_OPTIONS" chicken-install >/dev/null
touch $CUAUV_SOFTWARE$2
|
from django import forms
from django.contrib.auth import get_user_model
from thewall.models import Session, Participant, Slot, Room
class SessionForm(forms.ModelForm):
extra_presenters = forms.CharField(widget=forms.Textarea(attrs={'rows': 3}), label="Presenters")
class Meta:
model = Session
fields = ('unconference', 'title', 'description', 'headline', 'extra_presenters', 'tags', 'difficulty')
widgets = {
'title': forms.TextInput(),
'headline': forms.TextInput(),
'unconference': forms.HiddenInput()
}
def __init__(self, *args, **kwargs):
super(SessionForm, self).__init__(*args, **kwargs)
initial = kwargs.get('initial', None)
if initial and 'unconference' in initial:
unconf = initial['unconference']
else:
unconf = self.instance.unconference
#if unconf and unconf.slug != 'testcamp':
# self.fields['presenters'].queryset = Participant.objects.filter(
# unconference=unconf
# )
class ParticipantForm(forms.ModelForm):
class Meta:
model = get_user_model()
fields = ('first_name', 'last_name', 'email')
class SessionScheduleForm(forms.ModelForm):
extra_presenters = forms.CharField(widget=forms.Textarea(attrs={'rows': 3}), label="Presenters")
class Meta:
model = Session
fields = ('unconference', 'title', 'description', 'headline',
'extra_presenters', 'tags', 'difficulty',
'slot', 'room')
widgets = {
'title': forms.TextInput(),
'headline': forms.TextInput(),
'unconference': forms.HiddenInput()
}
def __init__(self, *args, **kwargs):
super(SessionScheduleForm, self).__init__(*args, **kwargs)
initial = kwargs.get('initial', None)
if initial and 'unconference' in initial:
unconf = initial['unconference']
else:
unconf = self.instance.unconference
#if unconf and unconf.slug != 'testcamp':
# self.fields['presenters'].queryset = Participant.objects.filter(
# unconference=unconf
# )
self.fields['slot'].queryset = Slot.objects.filter(day__in=self.instance.unconference.days.all())
self.fields['room'].queryset = Room.objects.filter(venue=self.instance.unconference.venue)
# Form to save data either to the Participant model or the User model,
# depending on whether or not the User model has a 'phone' field
class CreateParticipantForm(forms.ModelForm):
password = forms.CharField(max_length=255, widget=forms.PasswordInput, required=False)
password_confirmation = forms.CharField(max_length=255, widget=forms.PasswordInput, required=False)
class Meta:
model = Participant
fields = ['organization']
def __init__(self, *args, **kwargs):
super(CreateParticipantForm, self).__init__(*args, **kwargs)
self.fields["phone"] = forms.CharField(max_length=20, required=False)
# Provide initial phone if the participant exists
if self.instance.pk:
self.fields["phone"].initial = self.instance.phone
def save(self, commit=True):
instance = super(CreateParticipantForm, self).save(commit=commit)
instance.phone = self.cleaned_data["phone"]
if self.cleaned_data['password'] and (self.cleaned_data['password'] == self.cleaned_data['password_confirmation']): # noqa
instance.user.set_password(self.cleaned_data['password'])
instance.user.save()
if commit:
instance.save()
return instance
|
<filename>src/test/java/com/orange/wro/taglib/config/IGroupLoaderTest.java
/*
* Copyright 2011, 2012 France Télécom
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orange.wro.taglib.config;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import ro.isdc.wro.model.group.Group;
import ro.isdc.wro.model.resource.ResourceType;
/**
* @author <NAME>
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest(IGroupLoader.class)
public class IGroupLoaderTest {
private String TEST_GROUP = "testGroup";
@Test
public void explodedFilesAreNotLoadedWhenUnnecessary() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.getMinimizedFile(ResourceType.JS);
verify(loader, times(0)).getResources(ResourceType.JS);
verify(loader, times(0)).getResources(ResourceType.CSS);
}
@Test
public void explodedJSFilesAreLoadedWhenNecessary() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.get(ResourceType.JS);
verify(loader, times(1)).getResources(ResourceType.JS);
verify(loader, times(0)).getResources(ResourceType.CSS);
}
@Test
public void explodedCSSFilesAreLoadedWhenNecessary() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.get(ResourceType.CSS);
verify(loader, times(0)).getResources(ResourceType.JS);
verify(loader, times(1)).getResources(ResourceType.CSS);
}
@Test
public void minimizedFilesAreNotLoadedWhenUnnecessary() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.get(ResourceType.CSS);
verify(loader, times(0)).getMinimizedResources();
}
@Test
public void minimizedFilesAreLoadedWhenNecessary() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.getMinimizedFile(ResourceType.CSS);
verify(loader, times(1)).getMinimizedResources();
}
@Test
public void explodedCSSFilesAreLoadedOnlyOnce() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.get(ResourceType.CSS);
filesGroup.get(ResourceType.CSS);
verify(loader, times(1)).getResources(ResourceType.CSS);
}
@Test
public void explodedJSFilesAreLoadedOnlyOnce() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.get(ResourceType.JS);
filesGroup.get(ResourceType.JS);
verify(loader, times(1)).getResources(ResourceType.JS);
}
@Test
public void minimizedFilesAreLoadedOnlyOnce() {
IGroupLoader loader = getLoader();
FilesGroup filesGroup = new FilesGroup(TEST_GROUP, loader);
filesGroup.getMinimizedFile(ResourceType.CSS);
filesGroup.getMinimizedFile(ResourceType.JS);
verify(loader, times(1)).getMinimizedResources();
}
private IGroupLoader getLoader() {
Map<ResourceType, String> minimized = new HashMap<ResourceType, String>();
minimized.put(ResourceType.CSS, "x");
minimized.put(ResourceType.JS, "y");
IGroupLoader loader = mock(IGroupLoader.class);
when(loader.getMinimizedResources()).thenReturn(minimized);
when(loader.getResources(ResourceType.JS)).thenReturn(Arrays.asList("a", "b"));
when(loader.getResources(ResourceType.CSS)).thenReturn(Arrays.asList("c"));
return loader;
}
}
|
/**
*
*/
package es;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.entity.ContentType;
import org.apache.http.message.BasicHeader;
import org.apache.http.nio.entity.NStringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import jframe.demo.elasticsearch.weike.MemberDO;
/**
* @author dzh
* @date Sep 23, 2016 11:13:08 AM
* @since 1.0
*/
@Ignore
public class TestRestClient {
static Logger LOG = LoggerFactory.getLogger(TestRestClient.class);
RestClient client;
@Before
public void init() {
// final CredentialsProvider credentialsProvider = new
// BasicCredentialsProvider();
// credentialsProvider.setCredentials(AuthScope.ANY, new
// UsernamePasswordCredentials("user", "password"));
client = RestClient.builder(new HttpHost("127.0.0.1", 9200, "http")).setRequestConfigCallback(new RestClientBuilder.RequestConfigCallback() {
@Override
public RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder) {
return requestConfigBuilder.setConnectTimeout(5000).setSocketTimeout(30000);
}
}).setMaxRetryTimeoutMillis(30000)
// .setHttpClientConfigCallback(new
// RestClientBuilder.HttpClientConfigCallback() {
// @Override
// public HttpAsyncClientBuilder
// customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder)
// {
// return httpClientBuilder
// .setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build());
// // .setDefaultCredentialsProvider(credentialsProvider);
// }
// })
.build();
// Credentials credentials = new UsernamePasswordCredentials("root",
// "dzh");
// client.getState().setCredentials(AuthScope.ANY, credentials);
}
@Test
@Ignore
public void testBulkTest() {
// client.performRequest(method, endpoint, headers)
List<Object> memList = new LinkedList<>();
int i = 0;
while (i < 1000) {
MemberDO mem = new MemberDO();
mem.setAvgPrice(1.0);
mem.setBuyerId(1l);
mem.setBuyerNick("戴忠");
mem.setMobile("18616020610");
mem.setEmail("<EMAIL>");
mem.setCity("上海");
memList.add(mem);
i++;
}
StringBuilder buf = new StringBuilder(1024);
for (Object mem : memList) {
buf.append("{\"index\": {}}");
buf.append("\n");
buf.append(Gson.toJson(mem));
buf.append("\n");
}
// System.out.println(buf.toString());
HttpEntity entity = new NStringEntity(buf.toString(), ContentType.APPLICATION_JSON);
System.out.println(buf.length() * 2 / (1024));
// indexData("/weike/member/_bulk", entity);
}
static Gson Gson = new Gson();
@Test
@Ignore
public void indexTest() throws Exception {
final MemberDO mem = new MemberDO();
mem.setAvgPrice(1.0);
mem.setBuyerId(1l);
mem.setBuyerNick("戴忠");
mem.setMobile("18616020610");
mem.setEmail("<EMAIL>");
mem.setCity("上海");
long start = System.currentTimeMillis();
int count = 10000;
AtomicLong sumTime = new AtomicLong(0);
CountDownLatch latch = new CountDownLatch(count);
final ExecutorService executor = Executors.newFixedThreadPool(100);
while (count > 0) {
executor.submit(() -> {
long startTime = System.currentTimeMillis();
String json = Gson.toJson(mem);
HttpEntity entity = new NStringEntity(json, ContentType.APPLICATION_JSON);
String path = "/weike/member";
// if (!"".equals(sellerId)) {
// path += "?routing=" + sellerId;
// }
try {
Response indexResponse = client.performRequest("POST", path, Collections.<String, String> emptyMap(), entity);
long stopTime = System.currentTimeMillis() - startTime;
sumTime.addAndGet(stopTime);
LOG.info("indexMember {} - {}ms", json, stopTime);
LOG.info("indexResponse {}", indexResponse.toString());
} catch (IOException e) {
LOG.error(e.getMessage(), e.fillInStackTrace());
} finally {
latch.countDown();
}
});
count--;
}
latch.await();
LOG.info("latch stop usage-{}ms sum-{}ms avg-{}ms", System.currentTimeMillis() - start, sumTime, sumTime.get() / 1000);
executor.shutdownNow();
}
private void indexData(String path, HttpEntity entity) {
try {
Response indexResponse = client.performRequest("POST", path, Collections.<String, String> emptyMap(), entity);
LOG.info(indexResponse.toString());
} catch (IOException e) {
LOG.error(e.getMessage(), e.fillInStackTrace());
}
}
@After
public void stop() {
try {
client.close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
public void testReq() throws Exception {
Response response = client.performRequest("GET", "/", Collections.singletonMap("pretty", "true"));
System.out.println(EntityUtils.toString(response.getEntity()));
// index a document
HttpEntity entity = new NStringEntity("{\n" + " \"user\" : \"kimchy\",\n" + " \"post_date\" : \"2009-11-15T14:12:12\",\n"
+ " \"message\" : \"trying out Elasticsearch\"\n" + "}", ContentType.APPLICATION_JSON);
String u = URLEncoder.encode("root:dzh", "utf-8");
BasicHeader auth = new BasicHeader("Authorization", "Basic " + u);
Response indexResponse = client.performRequest("PUT", "/twitter/tweet/1", Collections.<String, String> emptyMap(), entity);
}
public void testReqAsync() {
// int numRequests = 10;
// final CountDownLatch latch = new CountDownLatch(numRequests);
// for (int i = 0; i < numRequests; i++) {
// client.performRequestAsync("PUT", "/twitter/tweet/" + i,
// Collections.<String, String>emptyMap(),
// // assume that the documents are stored in an entities array
// entities[i], new ResponseListener() {
// @Override
// public void onSuccess(Response response) {
// System.out.println(response);
// latch.countDown();
// }
//
// @Override
// public void onFailure(Exception exception) {
// latch.countDown();
// }
// });
// }
// // wait for all requests to be completed
// latch.await();
}
@Test
@Ignore
public void testSniffer() {
}
}
|
#!/bin/bash
SERVICE_NAME=cdstore
# To be set during deployment
ENVIRONMENT=%ENVIRONMENT%
PATH_TO_CFG=/usr/lib/${SERVICE_NAME}/${SERVICE_NAME}.yml
PATH_TO_JAR=/usr/lib/${SERVICE_NAME}/${SERVICE_NAME}.jar
PATH_TO_PID=/var/run/${SERVICE_NAME}/${SERVICE_NAME}.pid
PATH_TO_OUT=/var/log/${SERVICE_NAME}/${SERVICE_NAME}.out
PATH_TO_ERR=/var/log/${SERVICE_NAME}/${SERVICE_NAME}.err
ADMIN_PORT=""
PID=""
java -version 2>/dev/null > /dev/null || { echo "ERROR: java executable not found"; exit 1; }
JAVA_PARAMS=""
get_admin_port() {
local PORT_STR=$(cat ${PATH_TO_CFG} | grep -A10 "server:" | grep -A10 "adminConnectors:" | grep "port:" | cut -d ':' -f 2)
$! || { echo "ERROR: can't read admin port"; exit 1; }
ADMIN_PORT=${PORT_STR//[[:blank:]]/}
echo "Admin port: ${ADMIN_PORT}"
}
write_pid() {
echo "Writing running process PID ${PID} to ${PATH_TO_PID}"
echo ${PID} > ${PATH_TO_PID} || { echo "ERROR writing PID"; exit 1; }
}
read_pid() {
PID=$(cat ${PATH_TO_PID}) || { echo "ERROR: cannot read PID from ${PATH_TO_PID}"; exit 1; }
}
start() {
BUILD_ID=dontKillMe nohup java ${JAVA_PARAMS} -jar ${PATH_TO_JAR} server ${PATH_TO_CFG} 2> ${PATH_TO_ERR} > ${PATH_TO_OUT} &
PID=$!
echo "Started ${SERVICE_NAME}, PID: ${PID}"
}
stop() {
if [ -z ${PID} ]
then
echo "Running process not found. Skipping stopping ${SERVICE_NAME}"
else
echo "Stopping ${SERVICE_NAME}, PID: ${PID}..."
kill ${PID} || echo "WARNING: cannot kill ${PID}"
fi
rm ${PATH_TO_PID} || echo "WARNING: cannot delete PID file ${PATH_TO_PID}"
}
wait_for_start() {
local __resultvar=$1
local i=0
while [ $(curl -s http://localhost:${ADMIN_PORT}/healthcheck?pretty=true | grep "healthy" | grep "true" | wc -l) -ne 3 ]
do
((i++)) && ((i>10)) && { echo "ERROR: Waiting for ${SERVICE_NAME} to start timeout"; eval ${__resultvar}="0"; return; }
echo "Waiting for ${SERVICE_NAME} health check (${i}) ..."
sleep 1
done
eval ${__resultvar}="1"
}
wait_for_stop_admin_port() {
local i=0
while [ $(netstat -ano | grep ${ADMIN_PORT} | grep LISTEN | wc -l) -gt 0 ]
do
((i++)) && ((i>10)) && { echo "ERROR: Waiting for ${SERVICE_NAME} to close admin port timeout"; exit 1; }
echo "Waiting for ${SERVICE_NAME} to close admin port ${ADMIN_PORT} (${i}) ..."
sleep 1
done
}
wait_for_stop_pid() {
if [ -z ${PID} ]
then
echo "Skipping waiting for process to close (unknown PID)"
else
local i=0
while [ -e /proc/${PID} ]
do
((i++)) && ((i>10)) && { echo "ERROR: Waiting for ${SERVICE_NAME} to stop timeout"; exit 1; }
echo "Waiting for ${SERVICE_NAME} to terminate (${i}) ..."
sleep 1
done
fi
}
wait_for_stop() {
wait_for_stop_admin_port
wait_for_stop_pid
}
print_outs() {
echo "===== STDOUT ====="
cat ${PATH_TO_OUT}
echo "===== STDERR ====="
cat ${PATH_TO_ERR}
echo "=================="
}
case ${1} in
start)
if [ ! -f ${PATH_TO_PID} ]
then
echo "Starting ${SERVICE_NAME}"
get_admin_port
start
write_pid
started="0"; wait_for_start started
print_outs
if [ ${started} == "0" ]
then
echo "${SERVICE_NAME} not started in expected period of time. Making cleanup..."
stop
exit 1
else
echo "${SERVICE_NAME} started"
fi
else
echo "${SERVICE_NAME} already running"
read_pid
fi
;;
stop)
if [ -f ${PATH_TO_PID} ]
then
echo "Stopping ${SERVICE_NAME}"
read_pid
get_admin_port
stop
wait_for_stop
echo "${SERVICE_NAME} stopped"
else
echo "${SERVICE_NAME} is not running"
fi
;;
*)
echo "Usage: $0 [start|stop]"
;;
esac
|
/**权限筛选 losence不存在 resence存在**/
export function inteRole<T extends { id: number; children: Array<T> }>(
list: Array<T>,
role: Array<number>
): { losence: Array<number>; resence: Array<number> } {
const losence: Array<number> = []
const resence: Array<number> = []
for (const mode of list) {
const left: Array<number> = []
const right: Array<number> = []
for (const node of mode.children) {
if (role.includes(node.id)) {
left.push(node.id)
} else {
right.push(node.id)
}
}
if (left.length > 0) {
resence.push(...left, mode.id)
} else {
losence.push(mode.id)
}
losence.push(...right)
}
return { losence, resence }
}
/**Markdown语法文字描述提取**/
export function extractStr(value: string): string {
if (!value) {
return ''
} else {
const reg = new RegExp('<.+?>', 'g')
const str = value
return str
.replace(reg, '')
.replace(/\n/g, '')
.slice(0, 255)
}
}
|
/*
Copyright 2012 Two Toasters, LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twotoasters.android.horizontalimagescroller.widget;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
public class SelectionToggleOnItemClickListener implements OnItemClickListener {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long itemId) {
HorizontalImageScroller scroller = (HorizontalImageScroller) parent;
if (!scroller.hasCurrentImageIndex() || scroller.getCurrentImageIndex() != position) {
scroller.setCurrentImageIndex(position);
} else {
scroller.setCurrentImageIndex(-1);
}
}
}
|
$(function() {
$(window).scroll( function(){
$('.fadeInBlock').each( function(){
var bottom_of_object = $(this).position().top + $(this).outerHeight();
var bottom_of_window = $(window).scrollTop() + $(window).height();
/* Adjust the "200" to either have a delay or that the content starts fading a bit before you reach it */
bottom_of_window = bottom_of_window + 80;
if( bottom_of_window > bottom_of_object ){
$(this).animate({'opacity':'1'},1000);
}
});
$('.slideLeft').each(function(){
var imagePos = $(this).offset().top;
var topOfWindow = $(window).scrollTop();
if (imagePos < topOfWindow+10) {
$(this).addClass(".slideLeft");
}
});
});
});
|
<filename>spec/factories/props_factory.rb
FactoryGirl.define do
factory :prop do
body FFaker::Lorem.paragraph
association :propser, factory: :user
association :organisation, factory: :organisation
after :build do |prop|
prop.prop_receivers.new(user: create(:user))
end
trait :with_upvote do
after(:create) do |prop|
prop.upvotes << create(:upvote)
end
end
trait :without_organisation do
organisation nil
end
end
end
|
// https://cses.fi/problemset/task/1085/
#include <iostream>
#include <vector>
using namespace std;
typedef long long ll;
typedef vector<int> vi;
bool p(const vi &a, ll sum, int k) {
ll s = 0;
int i = 1;
for (auto x : a) {
if (x > sum) return false;
if (s + x > sum) {
s = x;
i++;
} else s += x;
}
return i <= k;
}
int main() {
int n, k;
cin >> n >> k;
vi a(n);
ll s = 0;
for (int i = 0; i < n; i++) {
cin >> a[i];
s += a[i];
}
ll l = 0;
for (ll i = s / 2; i >= 1; i /= 2) {
while (l + i < s && !p(a, l + i, k)) l += i;
}
cout << l + 1 << endl;
}
|
#!/usr/bin/python3
# update_produce.py - Corrects costs in produce sales spreadsheet.
import openpyxl
wb = openpyxl.load_workbook('produceSales.xlsx')
sheet = wb.get_sheet_by_name('Sheet')
# Os tipos de produtos e seus processo atualizados
PRICE_UPDATES = {'Garlic': 99.07,
'Celery': 101.19,
'Lemon': 333.27}
# Percorre as linhas em um loop e atualizar os preços
for row_num in range(2, sheet.max_row): # 2 = Pulando a primeira linha
produce_name = sheet.cell(row=row_num, column=1).value
if produce_name in PRICE_UPDATES:
sheet.cell(row=row_num, column=2).value = PRICE_UPDATES[produce_name]
wb.save('updated_produce_sales.xlsx')
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
]
urlpatterns += [
path('search/', views.Search, name='search'),
]
urlpatterns += [
path('users/', views.UserDetail, name='user-detail'),
path('users/create/', views.CreateUser, name='user-create'),
path('users/insert/', views.InsertUserForm, name='user-insert'),
path('users/update/', views.UserUpdate, name='user-update'),
path('users/update_form', views.UpdateUserForm, name='user-update-form'),
path('users/delete/', views.UserDelete, name='user-delete'),
path('users/deleted/', views.UserDeleted, name='user-deleted'),
path('users/match/', views.UserMatch, name='user-match')
]
urlpatterns +=[
path('match/user/', views.MatchUser, name='match-user'),
path('match/place/', views.MatchPlace, name='match-place')
]
urlpatterns += [
path('location/', views.UpdateLocation, name='location-update'),
path('location/search_location', views.SearchLocation, name='location-search')
]
urlpatterns += [
path('places/crawl/', views.CrawlPopularity, name='place_crawl')
]
|
<gh_stars>100-1000
package dev.fiki.forgehax.main.mods.render;
import com.mojang.blaze3d.systems.RenderSystem;
import dev.fiki.forgehax.api.cmd.settings.StringSetting;
import dev.fiki.forgehax.api.color.Color;
import dev.fiki.forgehax.api.entity.EnchantmentUtils;
import dev.fiki.forgehax.api.event.SubscribeListener;
import dev.fiki.forgehax.api.events.render.GuiContainerRenderEvent;
import dev.fiki.forgehax.api.mod.Category;
import dev.fiki.forgehax.api.mod.ToggleMod;
import dev.fiki.forgehax.api.modloader.RegisterMod;
import net.minecraft.inventory.container.Slot;
import net.minecraft.item.EnchantedBookItem;
import net.minecraft.item.ItemStack;
import net.minecraft.item.Items;
import net.minecraft.nbt.ListNBT;
import net.minecraft.util.text.ITextComponent;
import net.minecraftforge.fml.client.gui.GuiUtils;
import java.util.function.Predicate;
@RegisterMod(
name = "Highlighter",
description = "Highlight container contents",
category = Category.RENDER
)
public class Highlighter extends ToggleMod {
private final StringSetting find = newStringSetting()
.name("find")
.description("Highlight any item matching this string")
.defaultTo("")
.build();
private boolean isEnchanted(ItemStack stack) {
return Items.ENCHANTED_BOOK.equals(stack.getItem()) || stack.isEnchanted();
}
private ListNBT getEnchantmentNBT(ItemStack stack) {
return Items.ENCHANTED_BOOK.equals(stack.getItem())
? EnchantedBookItem.getEnchantments(stack)
: stack.getEnchantmentTags();
}
private boolean shouldHighlight(ItemStack stack, Predicate<String> matcher) {
if (stack.isEmpty()) {
return false;
} else if (matcher.test(stack.getDisplayName().getString())) {
return true;
} else if (isEnchanted(stack) &&
EnchantmentUtils.getEnchantments(getEnchantmentNBT(stack)).stream()
.map(en -> en.getEnchantment().getFullname(en.getLevel()))
.map(ITextComponent::getString)
.anyMatch(matcher)) {
return true;
}
return false; // default case
}
@SubscribeListener
public void onGuiContainerDrawEvent(GuiContainerRenderEvent.Background event) {
RenderSystem.enableDepthTest();
event.getStack().pushPose();
event.getStack().translate(event.getContainerScreen().getGuiLeft(), event.getContainerScreen().getGuiTop(), 0);
final String matching = find.getValue().toLowerCase();
for (Slot slot : event.getContainerScreen().getMenu().slots) {
ItemStack stack = slot.getItem();
if (shouldHighlight(stack, str -> str.toLowerCase().contains(matching))) {
GuiUtils.drawGradientRect(event.getStack().last().pose(), 0,
slot.x, slot.y,
slot.x + 16, slot.y + 16,
Color.of(218, 165, 32, 200).toBuffer(),
Color.of(189, 183, 107, 200).toBuffer());
}
}
event.getStack().popPose();
}
}
|
<reponame>nicolasruscher/Artemis
import inspect
def check_class_names(package, *args):
cls = inspect.getmembers(package, lambda a: inspect.isclass(a))
return _check_names(cls, *args)
def check_constructor_args(clazz, *args):
arguments = inspect.getargspec(clazz.__init__).args
for arg in args:
if arg not in arguments:
return False
return True
def check_method_names(object, *args):
cls = inspect.getmembers(object, lambda a: inspect.ismethod(a))
return _check_names(cls, *args)
def check_abstract_method_names(clazz, *args):
cls = inspect.getmembers(clazz, lambda a: not(inspect.isroutine(a)))
cls = [c for c in cls if c[0] is '__abstractmethods__']
cls = list(cls[0][1])
for name in args:
if name not in cls:
return False
return True
def check_attributes(clazz, *args):
cls = inspect.getmembers(clazz, lambda a: not(inspect.isroutine(a)))
attributes = [attr for attr in cls if not(attr[0].startswith('__') and attr[0].endswith('__'))]
return _check_names(attributes, *args)
def _check_names(cls, *args):
names = list(map(lambda c: c[0], cls))
for name in args:
if name not in names:
return False
return True
|
docker build -t testapp .
docker run \
-p 3001:3001 \
-v $PWD:/home/app \
-it --rm testapp
|
package org.insightcentre.nlp.saffron.benchmarks;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import org.insightcentre.nlp.saffron.data.Status;
import org.insightcentre.nlp.saffron.data.Taxonomy;
/**
* Benchmark system for comparing two extracted taxonomies
*
* @author <NAME> <<EMAIL>>
*/
public class TaxonomyExtractionBenchmark {
private static int matches(Taxonomy taxo, Set<StringPair> gold) {
int m = 0;
for(Taxonomy child : taxo.children) {
if(gold.contains(new StringPair(taxo.root.toLowerCase(), child.root.toLowerCase()))) {
m++;
}
m += matches(child, gold);
}
return m;
}
private static class Stats {
public int size;
public int maxDepth;
public int childSq;
}
private static double size(Taxonomy taxo) {
int m = 1;
for(Taxonomy child : taxo.children) {
m += size(child);
}
return m;
}
private static Stats stats(Taxonomy taxo) {
Stats s = new Stats();
s.size = 1;
s.maxDepth = 1;
s.childSq = taxo.children.size() * taxo.children.size();
for(Taxonomy child : taxo.children) {
Stats c = stats(child);
s.size += c.size;
s.maxDepth = Math.max(s.maxDepth, c.maxDepth + 1);
s.childSq += c.childSq;
}
return s;
}
private static class Scores {
public int matches;
public double precision;
public double recall;
public Scores(int matches, double precision, double recall) {
this.matches = matches;
this.precision = precision;
this.recall = recall;
}
}
private static Scores evalTaxo(Taxonomy extracted, Set<StringPair> gold) {
final int matches = matches(extracted, gold);
return new Scores(matches,
(double)matches / size(extracted),
(double)matches / gold.size());
}
private static void badOptions(OptionParser p, String message) throws IOException {
System.err.println("Error: " + message);
p.printHelpOn(System.err);
System.exit(-1);
}
public static void main(String[] args) {
try {
final ObjectMapper mapper = new ObjectMapper();
// Parse command line arguments
final OptionParser p = new OptionParser() {
{
accepts("o", "The output taxonomy").withRequiredArg().ofType(File.class);
accepts("g", "The gold taxonomy").withRequiredArg().ofType(File.class);
}
};
final OptionSet os;
try {
os = p.parse(args);
} catch (Exception x) {
badOptions(p, x.getMessage());
return;
}
final File taxoFile = (File)os.valueOf("o");
if(taxoFile == null || !taxoFile.exists()) {
badOptions(p, "Output taxonomy not specified or does not exist");
return;
}
final File goldFile = (File)os.valueOf("g");
if(goldFile == null || !goldFile.exists()) {
badOptions(p, "Gold taxonomy not specified or does not exist");
return;
}
final Taxonomy taxo = mapper.readValue(taxoFile, Taxonomy.class);
final Set<StringPair> gold;
final Taxonomy goldTaxo;
if(goldFile.getName().endsWith(".json")) {
goldTaxo = mapper.readValue(taxoFile, Taxonomy.class);
gold = linksFromTaxo(goldTaxo);
} else {
gold = readTExEval(goldFile);
goldTaxo = taxoFromLinks(gold);
}
final Scores s = evalTaxo(taxo, gold);
final Stats stats = stats(taxo);
final double modFM = FowlkesMallows.fowlkesMallows(taxo, goldTaxo);
System.err.printf("|-----------|--------|\n");
System.err.printf("| Matches | % 6d |\n", s.matches);
System.err.printf("| Predicted | % 6d |\n", stats.size);
System.err.printf("| Gold | % 6d |\n", gold.size());
System.err.printf("| Depth | % 6d |\n", stats.maxDepth);
System.err.printf("| Branching | %.4f |\n", Math.sqrt((double)stats.childSq) / stats.size);
System.err.printf("|-----------|--------|\n");
System.err.printf("| Precision | %.4f |\n", s.precision);
System.err.printf("| Recall | %.4f |\n", s.recall);
System.err.printf("| F-Measure | %.4f |\n",
s.precision == 0.0 && s.recall == 0.0 ? 0.0 :
2.0 * s.recall * s.precision / (s.precision + s.recall));
System.err.printf("| F&M | %.4f |\n", modFM);
} catch (Exception x) {
x.printStackTrace();
System.exit(-1);
}
}
private static Set<StringPair> linksFromTaxo(Taxonomy taxo) {
HashSet<StringPair> links = new HashSet<>();
_linksFromTaxo(taxo, links);
return links;
}
private static Taxonomy taxoFromLinks(Set<StringPair> gold) {
Map<String, Taxonomy> taxos = new HashMap<>();
Set<String> nonRoots = new HashSet<>();
for(StringPair sp : gold) {
final ArrayList<Taxonomy> children;
if(taxos.containsKey(sp._1)) {
children = new ArrayList<>(taxos.get(sp._1).children);
} else {
children = new ArrayList<>();
}
final Taxonomy child;
if(taxos.containsKey(sp._2)) {
child = taxos.get(sp._2);
} else {
child = new Taxonomy(sp._2, 0, 0, new ArrayList<>(), Status.none);
}
children.add(child);
taxos.put(sp._1, new Taxonomy(sp._1, 0, 0, children, Status.none));
nonRoots.add(sp._2);
}
Set<String> roots = new HashSet<>(taxos.keySet());
roots.removeAll(nonRoots);
if(roots.size() == 1) {
return taxos.get(roots.iterator().next());
} else if(roots.size() == 0) {
throw new RuntimeException("Taxo file contains loops");
} else {
final ArrayList<Taxonomy> children = new ArrayList<>();
for(String root : roots) {
children.add(taxos.get(root));
}
return new Taxonomy("", 0, 0, children, Status.none);
}
}
static Set<StringPair> readTExEval(File goldFile) throws IOException {
HashSet<StringPair> links = new HashSet<>();
String line;
try(BufferedReader reader = new BufferedReader(new FileReader(goldFile))) {
while((line = reader.readLine()) != null) {
if(!line.equals("")) {
String[] elems = line.split("\t");
if(elems.length != 2) {
throw new IOException("Bad Line: " + line);
}
links.add(new StringPair(elems[1].toLowerCase(), elems[0].toLowerCase()));
}
}
}
return links;
}
private static void _linksFromTaxo(Taxonomy taxo, HashSet<StringPair> links) {
for(Taxonomy child : taxo.children) {
links.add(new StringPair(taxo.root.toLowerCase(), child.root.toLowerCase()));
_linksFromTaxo(child, links);
}
}
static class StringPair {
public final String _1, _2;
public StringPair(String _1, String _2) {
this._1 = _1;
this._2 = _2;
}
@Override
public int hashCode() {
int hash = 5;
hash = 53 * hash + Objects.hashCode(this._1);
hash = 53 * hash + Objects.hashCode(this._2);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final StringPair other = (StringPair) obj;
if (!Objects.equals(this._1, other._1)) {
return false;
}
if (!Objects.equals(this._2, other._2)) {
return false;
}
return true;
}
}
}
|
<filename>client/ws_client.go
package client
import (
"github.com/gorilla/websocket"
"sync"
)
type CodecType int32
var wsClientMap = make(map[string]*WsClient)
var wsMux = sync.RWMutex{}
func AddWsClient(cli *WsClient) {
wsMux.Lock()
wsClientMap[cli.Id()] = cli
wsMux.Unlock()
}
func RemoveWsClient(cli *WsClient) {
wsMux.Lock()
delete(wsClientMap, cli.Id())
wsMux.Unlock()
}
func LookupClient(id string) (*WsClient, bool) {
wsMux.RLock()
client, ok := wsClientMap[id]
wsMux.RUnlock()
return client, ok
}
type WsClient struct {
id string
conn *websocket.Conn
}
func NewWsClient(id string, conn *websocket.Conn) *WsClient {
return &WsClient{
id: id,
conn: conn,
}
}
func (c *WsClient) Id() string {
return c.id
}
func (c *WsClient) Send(request interface{}) error {
return c.conn.WriteJSON(request)
}
|
/*
* [The "BSD licence"]
* Copyright (c) 2010 <NAME> (JesusFreke)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.baksmali;
import org.jf.dexlib.DexFile;
import org.jf.dexlib.Util.ByteArrayAnnotatedOutput;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
public class dump {
public static void dump(DexFile dexFile, String dumpFileName, String outputDexFileName, boolean sort)
throws IOException {
if (sort) {
//sort all items, to guarantee a unique ordering
dexFile.setSortAllItems(true);
} else {
//don't change the order
dexFile.setInplace(true);
}
ByteArrayAnnotatedOutput out = new ByteArrayAnnotatedOutput();
if (dumpFileName != null) {
out.enableAnnotations(120, true);
}
dexFile.place();
dexFile.writeTo(out);
//write the dump
if (dumpFileName != null) {
out.finishAnnotating();
FileWriter writer = null;
try {
writer = new FileWriter(dumpFileName);
out.writeAnnotationsTo(writer);
} catch (IOException ex) {
System.err.println("\n\nThere was an error while dumping the dex file to " + dumpFileName);
ex.printStackTrace();
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
System.err.println("\n\nThere was an error while closing the dump file " + dumpFileName);
ex.printStackTrace();
}
}
}
}
//rewrite the dex file
if (outputDexFileName != null) {
byte[] bytes = out.toByteArray();
DexFile.calcSignature(bytes);
DexFile.calcChecksum(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(outputDexFileName);
fileOutputStream.write(bytes);
} catch (IOException ex) {
System.err.println("\n\nThere was an error while writing the dex file " + outputDexFileName);
ex.printStackTrace();
} finally {
if (fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException ex) {
System.err.println("\n\nThere was an error while closing the dex file " + outputDexFileName);
ex.printStackTrace();
}
}
}
}
}
}
|
// Define routes for user authentication using Laravel's built-in authentication system
Auth::routes();
// Define a route for the home page and link it to a controller method
Route::get('/home', 'HomeController@index')->name('home'); |
<filename>google/firestore/admin/v1/google-cloud-firestore-admin-v1-ruby/proto_docs/google/firestore/admin/v1/operation.rb
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Firestore
module Admin
module V1
# Metadata for {::Google::Longrunning::Operation google.longrunning.Operation} results from
# {::Google::Cloud::Firestore::Admin::V1::FirestoreAdmin::Client#create_index FirestoreAdmin.CreateIndex}.
# @!attribute [rw] start_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation started.
# @!attribute [rw] end_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation completed. Will be unset if operation still in
# progress.
# @!attribute [rw] index
# @return [::String]
# The index resource that this operation is acting on. For example:
# `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`
# @!attribute [rw] state
# @return [::Google::Cloud::Firestore::Admin::V1::OperationState]
# The state of the operation.
# @!attribute [rw] progress_documents
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in documents, of this operation.
# @!attribute [rw] progress_bytes
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in bytes, of this operation.
class IndexOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata for {::Google::Longrunning::Operation google.longrunning.Operation} results from
# {::Google::Cloud::Firestore::Admin::V1::FirestoreAdmin::Client#update_field FirestoreAdmin.UpdateField}.
# @!attribute [rw] start_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation started.
# @!attribute [rw] end_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation completed. Will be unset if operation still in
# progress.
# @!attribute [rw] field
# @return [::String]
# The field resource that this operation is acting on. For example:
# `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}`
# @!attribute [rw] index_config_deltas
# @return [::Array<::Google::Cloud::Firestore::Admin::V1::FieldOperationMetadata::IndexConfigDelta>]
# A list of {::Google::Cloud::Firestore::Admin::V1::FieldOperationMetadata::IndexConfigDelta IndexConfigDelta}, which describe the intent of this
# operation.
# @!attribute [rw] state
# @return [::Google::Cloud::Firestore::Admin::V1::OperationState]
# The state of the operation.
# @!attribute [rw] progress_documents
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in documents, of this operation.
# @!attribute [rw] progress_bytes
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in bytes, of this operation.
class FieldOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Information about an index configuration change.
# @!attribute [rw] change_type
# @return [::Google::Cloud::Firestore::Admin::V1::FieldOperationMetadata::IndexConfigDelta::ChangeType]
# Specifies how the index is changing.
# @!attribute [rw] index
# @return [::Google::Cloud::Firestore::Admin::V1::Index]
# The index being changed.
class IndexConfigDelta
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Specifies how the index is changing.
module ChangeType
# The type of change is not specified or known.
CHANGE_TYPE_UNSPECIFIED = 0
# The single field index is being added.
ADD = 1
# The single field index is being removed.
REMOVE = 2
end
end
end
# Metadata for {::Google::Longrunning::Operation google.longrunning.Operation} results from
# {::Google::Cloud::Firestore::Admin::V1::FirestoreAdmin::Client#export_documents FirestoreAdmin.ExportDocuments}.
# @!attribute [rw] start_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation started.
# @!attribute [rw] end_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation completed. Will be unset if operation still in
# progress.
# @!attribute [rw] operation_state
# @return [::Google::Cloud::Firestore::Admin::V1::OperationState]
# The state of the export operation.
# @!attribute [rw] progress_documents
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in documents, of this operation.
# @!attribute [rw] progress_bytes
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in bytes, of this operation.
# @!attribute [rw] collection_ids
# @return [::Array<::String>]
# Which collection ids are being exported.
# @!attribute [rw] output_uri_prefix
# @return [::String]
# Where the entities are being exported to.
class ExportDocumentsMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Metadata for {::Google::Longrunning::Operation google.longrunning.Operation} results from
# {::Google::Cloud::Firestore::Admin::V1::FirestoreAdmin::Client#import_documents FirestoreAdmin.ImportDocuments}.
# @!attribute [rw] start_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation started.
# @!attribute [rw] end_time
# @return [::Google::Protobuf::Timestamp]
# The time this operation completed. Will be unset if operation still in
# progress.
# @!attribute [rw] operation_state
# @return [::Google::Cloud::Firestore::Admin::V1::OperationState]
# The state of the import operation.
# @!attribute [rw] progress_documents
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in documents, of this operation.
# @!attribute [rw] progress_bytes
# @return [::Google::Cloud::Firestore::Admin::V1::Progress]
# The progress, in bytes, of this operation.
# @!attribute [rw] collection_ids
# @return [::Array<::String>]
# Which collection ids are being imported.
# @!attribute [rw] input_uri_prefix
# @return [::String]
# The location of the documents being imported.
class ImportDocumentsMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Returned in the {::Google::Longrunning::Operation google.longrunning.Operation} response field.
# @!attribute [rw] output_uri_prefix
# @return [::String]
# Location of the output files. This can be used to begin an import
# into Cloud Firestore (this project or another project) after the operation
# completes successfully.
class ExportDocumentsResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Describes the progress of the operation.
# Unit of work is generic and must be interpreted based on where {::Google::Cloud::Firestore::Admin::V1::Progress Progress}
# is used.
# @!attribute [rw] estimated_work
# @return [::Integer]
# The amount of work estimated.
# @!attribute [rw] completed_work
# @return [::Integer]
# The amount of work completed.
class Progress
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Describes the state of the operation.
module OperationState
# Unspecified.
OPERATION_STATE_UNSPECIFIED = 0
# Request is being prepared for processing.
INITIALIZING = 1
# Request is actively being processed.
PROCESSING = 2
# Request is in the process of being cancelled after user called
# google.longrunning.Operations.CancelOperation on the operation.
CANCELLING = 3
# Request has been processed and is in its finalization stage.
FINALIZING = 4
# Request has completed successfully.
SUCCESSFUL = 5
# Request has finished being processed, but encountered an error.
FAILED = 6
# Request has finished being cancelled after user called
# google.longrunning.Operations.CancelOperation.
CANCELLED = 7
end
end
end
end
end
end
|
<filename>utils/index.js<gh_stars>0
const { promptUser } = require("../server");
const mysql = require("mysql2");
const inquirer = require("inquirer");
// create connection to database
const connection = mysql.createConnection({
host: "localhost",
port: 3306,
user: "root",
password: "<PASSWORD>!",
database: "employees_db",
});
const dbQuery = (sql, params, msg, table) => {
connection.query(sql, params, (err, res) => {
if (err) {
throw err;
}
if (msg) {
console.log(msg);
}
if (table) {
console.table(res);
}
promptUser();
});
};
const quit = () => connection.end();
module.exports = { connection, dbQuery, quit };
|
<gh_stars>0
"use strict";
import { OrderedSet, Map } from "immutable";
import * as handlebars from "handlebars";
import {
TypeKind,
Type,
EnumType,
UnionType,
ClassType,
matchType,
nullableFromUnion,
removeNullFromUnion,
directlyReachableSingleNamedType,
ClassProperty
} from "../Type";
import { TypeGraph } from "../TypeGraph";
import { Sourcelike, maybeAnnotated, modifySource } from "../Source";
import {
utf16LegalizeCharacters,
utf16StringEscape,
splitIntoWords,
combineWords,
firstUpperWordStyle,
camelCase
} from "../Strings";
import { intercalate, defined, assert, panic, StringMap } from "../Support";
import { Name, DependencyName, Namer, funPrefixNamer } from "../Naming";
import { ConvenienceRenderer, ForbiddenWordsInfo } from "../ConvenienceRenderer";
import { TargetLanguage } from "../TargetLanguage";
import { StringOption, EnumOption, Option, BooleanOption } from "../RendererOptions";
import { anyTypeIssueAnnotation, nullTypeIssueAnnotation } from "../Annotation";
import { StringTypeMapping } from "../TypeBuilder";
const unicode = require("unicode-properties");
export type Version = 5 | 6;
export type OutputFeatures = { helpers: boolean; attributes: boolean };
export enum AccessModifier {
None,
Public,
Internal
}
export default class CSharpTargetLanguage extends TargetLanguage {
private readonly _listOption = new EnumOption("array-type", "Use T[] or List<T>", [
["array", false],
["list", true]
]);
private readonly _denseOption = new EnumOption(
"density",
"Property density",
[["normal", false], ["dense", true]],
"normal",
"secondary"
);
private readonly _featuresOption = new EnumOption("features", "Output features", [
["complete", { helpers: true, attributes: true }],
["attributes-only", { helpers: false, attributes: true }],
["just-types", { helpers: false, attributes: false }]
]);
// FIXME: Do this via a configurable named eventually.
private readonly _namespaceOption = new StringOption("namespace", "Generated namespace", "NAME", "QuickType");
private readonly _versionOption = new EnumOption<Version>(
"csharp-version",
"C# version",
[["5", 5], ["6", 6]],
"6",
"secondary"
);
private readonly _checkRequiredOption = new BooleanOption(
"check-required",
"Fail if required properties are missing",
false
);
constructor() {
super("C#", ["cs", "csharp"], "cs");
}
protected getOptions(): Option<any>[] {
return [
this._namespaceOption,
this._versionOption,
this._denseOption,
this._listOption,
this._featuresOption,
this._checkRequiredOption
];
}
protected get partialStringTypeMapping(): Partial<StringTypeMapping> {
return { date: "date-time", time: "date-time", dateTime: "date-time" };
}
get supportsUnionsWithBothNumberTypes(): boolean {
return true;
}
get supportsOptionalClassProperties(): boolean {
return true;
}
protected get rendererClass(): new (
targetLanguage: TargetLanguage,
graph: TypeGraph,
leadingComments: string[] | undefined,
...optionValues: any[]
) => ConvenienceRenderer {
return NewtonsoftCSharpRenderer;
}
}
const namingFunction = funPrefixNamer("namer", csNameStyle);
// FIXME: Make a Named?
const denseJsonPropertyName = "J";
const denseRequiredEnumName = "R";
const denseNullValueHandlingEnumName = "N";
function isStartCharacter(utf16Unit: number): boolean {
if (unicode.isAlphabetic(utf16Unit)) {
return true;
}
return utf16Unit === 0x5f; // underscore
}
function isPartCharacter(utf16Unit: number): boolean {
const category: string = unicode.getCategory(utf16Unit);
if (["Nd", "Pc", "Mn", "Mc"].indexOf(category) >= 0) {
return true;
}
return isStartCharacter(utf16Unit);
}
const legalizeName = utf16LegalizeCharacters(isPartCharacter);
function csNameStyle(original: string): string {
const words = splitIntoWords(original);
return combineWords(
words,
legalizeName,
firstUpperWordStyle,
firstUpperWordStyle,
firstUpperWordStyle,
firstUpperWordStyle,
"",
isStartCharacter
);
}
function isValueType(t: Type): boolean {
if (t instanceof UnionType) {
return nullableFromUnion(t) === null;
}
return ["integer", "double", "bool", "enum", "date-time"].indexOf(t.kind) >= 0;
}
export class CSharpRenderer extends ConvenienceRenderer {
constructor(
targetLanguage: TargetLanguage,
graph: TypeGraph,
leadingComments: string[] | undefined,
protected readonly namespaceName: string,
private readonly _version: Version,
protected readonly dense: boolean,
private readonly _useList: boolean
) {
super(targetLanguage, graph, leadingComments);
}
protected forbiddenNamesForGlobalNamespace(): string[] {
return ["QuickType", "Type", "System", "Console", "Exception"];
}
protected forbiddenForClassProperties(_: ClassType, classNamed: Name): ForbiddenWordsInfo {
return {
names: [
classNamed,
"ToString",
"GetHashCode",
"Finalize",
"Equals",
"GetType",
"MemberwiseClone",
"ReferenceEquals"
],
includeGlobalForbidden: false
};
}
protected forbiddenForUnionMembers(_: UnionType, unionNamed: Name): ForbiddenWordsInfo {
return { names: [unionNamed], includeGlobalForbidden: true };
}
protected makeNamedTypeNamer(): Namer {
return namingFunction;
}
protected namerForClassProperty(): Namer {
return namingFunction;
}
protected makeUnionMemberNamer(): Namer {
return namingFunction;
}
protected makeEnumCaseNamer(): Namer {
return namingFunction;
}
protected unionNeedsName(u: UnionType): boolean {
return nullableFromUnion(u) === null;
}
protected namedTypeToNameForTopLevel(type: Type): Type | undefined {
// If the top-level type doesn't contain any classes or unions
// we have to define a class just for the `FromJson` method, in
// emitFromJsonForTopLevel.
return directlyReachableSingleNamedType(type);
}
protected emitBlock(f: () => void, semicolon: boolean = false): void {
this.emitLine("{");
this.indent(f);
this.emitLine("}", semicolon ? ";" : "");
}
protected csType(t: Type, withIssues: boolean = false): Sourcelike {
return matchType<Sourcelike>(
t,
_anyType => maybeAnnotated(withIssues, anyTypeIssueAnnotation, "object"),
_nullType => maybeAnnotated(withIssues, nullTypeIssueAnnotation, "object"),
_boolType => "bool",
_integerType => "long",
_doubleType => "double",
_stringType => "string",
arrayType => {
const itemsType = this.csType(arrayType.items, withIssues);
if (this._useList) {
return ["List<", itemsType, ">"];
} else {
return [itemsType, "[]"];
}
},
classType => this.nameForNamedType(classType),
mapType => ["Dictionary<string, ", this.csType(mapType.values, withIssues), ">"],
enumType => this.nameForNamedType(enumType),
unionType => {
const nullable = nullableFromUnion(unionType);
if (nullable !== null) return this.nullableCSType(nullable);
return this.nameForNamedType(unionType);
},
{
dateTimeType: _ => "System.DateTimeOffset"
}
);
}
protected nullableCSType(t: Type, withIssues: boolean = false): Sourcelike {
const csType = this.csType(t, withIssues);
if (isValueType(t)) {
return [csType, "?"];
} else {
return csType;
}
}
protected superclassForType(_t: Type): Sourcelike | undefined {
return undefined;
}
protected emitType(
description: string[] | undefined,
accessModifier: AccessModifier,
declaration: Sourcelike,
name: Sourcelike,
superclass: Sourcelike | undefined,
emitter: () => void
): void {
switch (accessModifier) {
case AccessModifier.Public:
declaration = ["public ", declaration];
break;
case AccessModifier.Internal:
declaration = ["internal ", declaration];
break;
default:
break;
}
this.emitDescription(description);
if (superclass === undefined) {
this.emitLine(declaration, " ", name);
} else {
this.emitLine(declaration, " ", name, " : ", superclass);
}
this.emitBlock(emitter);
}
protected attributesForProperty(_property: ClassProperty, _jsonName: string): Sourcelike[] | undefined {
return undefined;
}
protected emitDescriptionBlock(lines: string[]): void {
const start = "/// <summary>";
if (this.dense) {
this.emitLine(start, lines.join("; "), "</summary>");
} else {
this.emitCommentLines(lines, "/// ", start, "/// </summary>");
}
}
protected blankLinesBetweenAttributes(): boolean {
return false;
}
private emitClassDefinition(c: ClassType, className: Name): void {
this.emitType(
this.descriptionForType(c),
AccessModifier.Public,
"partial class",
className,
this.superclassForType(c),
() => {
if (c.properties.isEmpty()) return;
const blankLines = this.blankLinesBetweenAttributes() ? "interposing" : "none";
let columns: Sourcelike[][] = [];
let isFirstProperty = true;
let previousDescription: string[] | undefined = undefined;
this.forEachClassProperty(c, blankLines, (name, jsonName, p) => {
const csType = p.isOptional ? this.nullableCSType(p.type, true) : this.csType(p.type, true);
const attributes = this.attributesForProperty(p, jsonName);
const description = this.descriptionForClassProperty(c, jsonName);
const property = ["public ", csType, " ", name, " { get; set; }"];
if (attributes === undefined) {
if (
// Descriptions should be preceded by an empty line
(!isFirstProperty && description !== undefined) ||
// If the previous property has a description, leave an empty line
previousDescription !== undefined
) {
this.ensureBlankLine();
}
this.emitDescription(description);
this.emitLine(property);
} else if (this.dense && attributes.length > 0) {
const comment = description === undefined ? "" : ` // ${description.join("; ")}`;
columns.push([attributes, " ", property, comment]);
} else {
this.emitDescription(description);
for (const attribute of attributes) {
this.emitLine(attribute);
}
this.emitLine(property);
}
isFirstProperty = false;
previousDescription = description;
});
if (columns.length > 0) {
this.emitTable(columns);
}
}
);
}
private emitUnionDefinition(u: UnionType, unionName: Name): void {
const nonNulls = removeNullFromUnion(u)[1];
this.emitType(
this.descriptionForType(u),
AccessModifier.Public,
"partial struct",
unionName,
this.superclassForType(u),
() => {
this.forEachUnionMember(u, nonNulls, "none", null, (fieldName, t) => {
const csType = this.nullableCSType(t);
this.emitLine("public ", csType, " ", fieldName, ";");
});
}
);
}
private emitEnumDefinition(e: EnumType, enumName: Name): void {
const caseNames: Sourcelike[] = [];
this.forEachEnumCase(e, "none", name => {
if (caseNames.length > 0) caseNames.push(", ");
caseNames.push(name);
});
this.emitDescription(this.descriptionForType(e));
this.emitLine("public enum ", enumName, " { ", caseNames, " };");
}
protected emitExpressionMember(declare: Sourcelike, define: Sourcelike): void {
if (this._version === 5) {
this.emitLine(declare);
this.emitBlock(() => {
this.emitLine("return ", define, ";");
});
} else {
this.emitLine(declare, " => ", define, ";");
}
}
protected emitTypeSwitch<T extends Sourcelike>(
types: OrderedSet<T>,
condition: (t: T) => Sourcelike,
withBlock: boolean,
withReturn: boolean,
f: (t: T) => void
): void {
assert(!withReturn || withBlock, "Can only have return with block");
types.forEach(t => {
this.emitLine("if (", condition(t), ")");
if (withBlock) {
this.emitBlock(() => {
f(t);
if (withReturn) {
this.emitLine("return;");
}
});
} else {
this.indent(() => f(t));
}
});
}
protected emitUsing(ns: Sourcelike): void {
this.emitLine("using ", ns, ";");
}
protected emitUsings(): void {
for (const ns of ["System", "System.Collections.Generic"]) {
this.emitUsing(ns);
}
}
protected emitRequiredHelpers(): void {
return;
}
private emitTypesAndSupport(): void {
this.forEachClass("leading-and-interposing", (c, name) => this.emitClassDefinition(c, name));
this.forEachEnum("leading-and-interposing", (e, name) => this.emitEnumDefinition(e, name));
this.forEachUnion("leading-and-interposing", (u, name) => this.emitUnionDefinition(u, name));
this.emitRequiredHelpers();
}
protected emitDefaultLeadingComments(): void {
return;
}
protected needNamespace(): boolean {
return true;
}
protected emitSourceStructure(): void {
if (this.leadingComments !== undefined) {
this.emitCommentLines(this.leadingComments);
} else {
this.emitDefaultLeadingComments();
}
this.ensureBlankLine();
if (this.needNamespace()) {
this.emitLine("namespace ", this.namespaceName);
this.emitBlock(() => {
this.emitUsings();
this.emitTypesAndSupport();
});
} else {
this.emitUsings();
this.emitTypesAndSupport();
}
}
protected registerHandlebarsHelpers(context: StringMap): void {
super.registerHandlebarsHelpers(context);
handlebars.registerHelper("string_escape", utf16StringEscape);
}
protected makeHandlebarsContextForType(t: Type): StringMap {
const ctx = super.makeHandlebarsContextForType(t);
ctx.csType = this.sourcelikeToString(this.csType(t));
return ctx;
}
protected makeHandlebarsContextForUnionMember(t: Type, name: Name): StringMap {
const value = super.makeHandlebarsContextForUnionMember(t, name);
value.nullableCSType = this.sourcelikeToString(this.nullableCSType(t));
return value;
}
}
export class NewtonsoftCSharpRenderer extends CSharpRenderer {
private _enumExtensionsNames = Map<Name, Name>();
private readonly _needHelpers: boolean;
private readonly _needAttributes: boolean;
constructor(
targetLanguage: TargetLanguage,
graph: TypeGraph,
leadingComments: string[] | undefined,
namespaceName: string,
version: Version,
dense: boolean,
useList: boolean,
outputFeatures: OutputFeatures,
private readonly _checkRequiredProperties: boolean
) {
super(targetLanguage, graph, leadingComments, namespaceName, version, dense, useList);
this._needHelpers = outputFeatures.helpers;
this._needAttributes = outputFeatures.attributes;
}
protected forbiddenNamesForGlobalNamespace(): string[] {
const forbidden = [
"Converter",
"JsonConverter",
"JsonSerializer",
"JsonWriter",
"JsonToken",
"Serialize",
"Newtonsoft",
"MetadataPropertyHandling",
"DateParseHandling",
"FromJson",
"Required"
];
if (this.dense) {
forbidden.push("J", "R", "N");
}
return super.forbiddenNamesForGlobalNamespace().concat(forbidden);
}
protected forbiddenForClassProperties(c: ClassType, className: Name): ForbiddenWordsInfo {
const result = super.forbiddenForClassProperties(c, className);
result.names = result.names.concat(["ToJson", "FromJson", "Required"]);
return result;
}
protected makeNamedTypeDependencyNames(t: Type, name: Name): DependencyName[] {
if (!(t instanceof EnumType)) return [];
const extensionsName = new DependencyName(namingFunction, name.order, lookup => `${lookup(name)}_extensions`);
this._enumExtensionsNames = this._enumExtensionsNames.set(name, extensionsName);
return [extensionsName];
}
protected emitUsings(): void {
// FIXME: We need System.Collections.Generic whenever we have maps or use List.
if (!this._needAttributes && !this._needHelpers) return;
super.emitUsings();
this.ensureBlankLine();
for (const ns of ["System.Globalization", "Newtonsoft.Json", "Newtonsoft.Json.Converters"]) {
this.emitUsing(ns);
}
if (this.dense) {
this.emitUsing([denseJsonPropertyName, " = Newtonsoft.Json.JsonPropertyAttribute"]);
this.emitUsing([denseRequiredEnumName, " = Newtonsoft.Json.Required"]);
this.emitUsing([denseNullValueHandlingEnumName, " = Newtonsoft.Json.NullValueHandling"]);
}
}
protected emitDefaultLeadingComments(): void {
if (!this._needHelpers) return;
this.emitLine(
"// To parse this JSON data, add NuGet 'Newtonsoft.Json' then do",
this.topLevels.size === 1 ? "" : " one of these",
":"
);
this.emitLine("//");
this.emitLine("// using ", this.namespaceName, ";");
this.emitLine("//");
this.forEachTopLevel("none", (t, topLevelName) => {
let rhs: Sourcelike;
if (t instanceof EnumType) {
rhs = ["JsonConvert.DeserializeObject<", topLevelName, ">(jsonString)"];
} else {
rhs = [topLevelName, ".FromJson(jsonString)"];
}
this.emitLine("// var ", modifySource(camelCase, topLevelName), " = ", rhs, ";");
});
}
protected attributesForProperty(property: ClassProperty, jsonName: string): Sourcelike[] | undefined {
if (!this._needAttributes) return undefined;
const t = property.type;
const jsonProperty = this.dense ? denseJsonPropertyName : "JsonProperty";
const escapedName = utf16StringEscape(jsonName);
const isNullable = t.isNullable;
const isOptional = property.isOptional;
const requiredClass = this.dense ? "R" : "Required";
const nullValueHandlingClass = this.dense ? "N" : "NullValueHandling";
const nullValueHandling =
isOptional && !isNullable ? [", NullValueHandling = ", nullValueHandlingClass, ".Ignore"] : [];
let required: Sourcelike;
if (!this._checkRequiredProperties || (isOptional && isNullable)) {
required = [nullValueHandling];
} else if (isOptional && !isNullable) {
required = [", Required = ", requiredClass, ".DisallowNull", nullValueHandling];
} else if (!isOptional && isNullable) {
required = [", Required = ", requiredClass, ".AllowNull"];
} else {
required = [", Required = ", requiredClass, ".Always", nullValueHandling];
}
return [["[", jsonProperty, '("', escapedName, '"', required, ")]"]];
}
protected blankLinesBetweenAttributes(): boolean {
return this._needAttributes && !this.dense;
}
private emitFromJsonForTopLevel(t: Type, name: Name): void {
if (t instanceof EnumType) return;
let partial: string;
let typeKind: string;
const definedType = this.namedTypeToNameForTopLevel(t);
if (definedType !== undefined) {
partial = "partial ";
typeKind = definedType instanceof ClassType ? "class" : "struct";
} else {
partial = "";
typeKind = "class";
}
const csType = this.csType(t);
this.emitType(undefined, AccessModifier.Public, [partial, typeKind], name, this.superclassForType(t), () => {
// FIXME: Make FromJson a Named
this.emitExpressionMember(
["public static ", csType, " FromJson(string json)"],
["JsonConvert.DeserializeObject<", csType, ">(json, ", this.namespaceName, ".Converter.Settings)"]
);
});
}
private emitEnumExtension(e: EnumType, enumName: Name): void {
this.emitType(
undefined,
AccessModifier.None,
"static class",
defined(this._enumExtensionsNames.get(enumName)),
this.superclassForType(e),
() => {
this.emitLine("public static ", enumName, "? ValueForString(string str)");
this.emitBlock(() => {
this.emitLine("switch (str)");
this.emitBlock(() => {
this.forEachEnumCase(e, "none", (name, jsonName) => {
this.emitLine(
'case "',
utf16StringEscape(jsonName),
'": return ',
enumName,
".",
name,
";"
);
});
this.emitLine("default: return null;");
});
});
this.ensureBlankLine();
this.emitLine("public static ", enumName, " ReadJson(JsonReader reader, JsonSerializer serializer)");
this.emitBlock(() => {
this.emitLine("var str = serializer.Deserialize<string>(reader);");
this.emitLine("var maybeValue = ValueForString(str);");
this.emitLine("if (maybeValue.HasValue) return maybeValue.Value;");
this.emitLine('throw new Exception("Unknown enum case " + str);');
});
this.ensureBlankLine();
this.emitLine(
"public static void WriteJson(this ",
enumName,
" value, JsonWriter writer, JsonSerializer serializer)"
);
this.emitBlock(() => {
this.emitLine("switch (value)");
this.emitBlock(() => {
this.forEachEnumCase(e, "none", (name, jsonName) => {
this.emitLine(
"case ",
enumName,
".",
name,
': serializer.Serialize(writer, "',
utf16StringEscape(jsonName),
'"); break;'
);
});
});
});
}
);
}
private emitUnionJSONPartial(u: UnionType, unionName: Name): void {
const tokenCase = (tokenType: string): void => {
this.emitLine("case JsonToken.", tokenType, ":");
};
const emitNullDeserializer = (): void => {
tokenCase("Null");
this.indent(() => this.emitLine("return;"));
};
const emitDeserializeType = (t: Type): void => {
this.emitLine(this.nameForUnionMember(u, t), " = serializer.Deserialize<", this.csType(t), ">(reader);");
this.emitLine("return;");
};
const emitDeserializer = (tokenType: string, kind: TypeKind): void => {
const t = u.findMember(kind);
if (t === undefined) return;
tokenCase(tokenType);
this.indent(() => emitDeserializeType(t));
};
const emitDoubleDeserializer = (): void => {
const t = u.findMember("double");
if (t === undefined) return;
if (u.findMember("integer") === undefined) tokenCase("Integer");
tokenCase("Float");
this.indent(() => emitDeserializeType(t));
};
const emitStringDeserializer = (): void => {
const stringTypes = u.stringTypeMembers;
if (stringTypes.isEmpty()) return;
tokenCase("String");
tokenCase("Date");
this.indent(() => {
if (stringTypes.size === 1) {
emitDeserializeType(defined(stringTypes.first()));
return;
}
this.emitLine("var str = serializer.Deserialize<string>(reader);");
this.forEachUnionMember(u, stringTypes, "none", null, (fieldName, t) => {
if (t instanceof EnumType) {
const extension = defined(this._enumExtensionsNames.get(this.nameForNamedType(t)));
this.emitLine("var maybeEnum = ", extension, ".ValueForString(str);");
this.emitLine("if (maybeEnum.HasValue)");
this.emitBlock(() => {
this.emitLine(fieldName, " = maybeEnum.Value;");
this.emitLine("return;");
});
} else if (t.kind === "date-time") {
this.emitLine("System.DateTimeOffset dt;");
this.emitLine("if (System.DateTimeOffset .TryParse(str, out dt))");
this.emitBlock(() => {
this.emitLine(fieldName, " = dt;");
this.emitLine("return;");
});
} else {
return panic(`Unsupported string enum type ${t.kind}`);
}
});
this.emitLine("break;");
});
};
const [hasNull, nonNulls] = removeNullFromUnion(u);
this.emitType(undefined, AccessModifier.Public, "partial struct", unionName, this.superclassForType(u), () => {
this.emitLine("public ", unionName, "(JsonReader reader, JsonSerializer serializer)");
this.emitBlock(() => {
this.forEachUnionMember(u, nonNulls, "none", null, (fieldName, _) => {
this.emitLine(fieldName, " = null;");
});
this.ensureBlankLine();
this.emitLine("switch (reader.TokenType)");
this.emitBlock(() => {
if (hasNull !== null) emitNullDeserializer();
emitDeserializer("Integer", "integer");
emitDoubleDeserializer();
emitDeserializer("Boolean", "bool");
emitDeserializer("StartArray", "array");
emitDeserializer("StartObject", "class");
emitDeserializer("StartObject", "map");
emitStringDeserializer();
});
this.emitLine('throw new Exception("Cannot convert ', unionName, '");');
});
this.ensureBlankLine();
this.emitLine("public void WriteJson(JsonWriter writer, JsonSerializer serializer)");
this.emitBlock(() => {
this.forEachUnionMember(u, nonNulls, "none", null, (fieldName, _) => {
this.emitLine("if (", fieldName, " != null)");
this.emitBlock(() => {
this.emitLine("serializer.Serialize(writer, ", fieldName, ");");
this.emitLine("return;");
});
});
if (hasNull !== null) {
this.emitLine("writer.WriteNull();");
} else {
this.emitLine('throw new Exception("Union must not be null");');
}
});
});
}
private emitSerializeClass(): void {
// FIXME: Make Serialize a Named
this.emitType(undefined, AccessModifier.Public, "static class", "Serialize", undefined, () => {
this.topLevels.forEach((t: Type, _: string) => {
// FIXME: Make ToJson a Named
this.emitExpressionMember(
["public static string ToJson(this ", this.csType(t), " self)"],
["JsonConvert.SerializeObject(self, ", this.namespaceName, ".Converter.Settings)"]
);
});
});
}
private emitConverterMembers(): void {
const enumNames = this.enums.map(this.nameForNamedType);
const unionNames = this.namedUnions.map(this.nameForNamedType);
const allNames = enumNames.union(unionNames);
const canConvertExprs = allNames.map((n: Name): Sourcelike => ["t == typeof(", n, ")"]);
const nullableCanConvertExprs = allNames.map((n: Name): Sourcelike => ["t == typeof(", n, "?)"]);
const canConvertExpr = intercalate(" || ", canConvertExprs.union(nullableCanConvertExprs));
// FIXME: make Iterable<any, Sourcelike> a Sourcelike, too?
this.emitExpressionMember("public override bool CanConvert(Type t)", canConvertExpr.toArray());
this.ensureBlankLine();
this.emitLine(
"public override object ReadJson(JsonReader reader, Type t, object existingValue, JsonSerializer serializer)"
);
this.emitBlock(() => {
this.emitTypeSwitch(enumNames, t => ["t == typeof(", t, ")"], false, false, n => {
const extensionsName = defined(this._enumExtensionsNames.get(n));
this.emitLine("return ", extensionsName, ".ReadJson(reader, serializer);");
});
this.emitTypeSwitch(enumNames, t => ["t == typeof(", t, "?)"], true, false, n => {
this.emitLine("if (reader.TokenType == JsonToken.Null) return null;");
const extensionsName = defined(this._enumExtensionsNames.get(n));
this.emitLine("return ", extensionsName, ".ReadJson(reader, serializer);");
});
// FIXME: call the constructor via reflection?
this.emitTypeSwitch(unionNames, t => ["t == typeof(", t, ") || t == typeof(", t, "?)"], false, false, n => {
this.emitLine("return new ", n, "(reader, serializer);");
});
this.emitLine('throw new Exception("Unknown type");');
});
this.ensureBlankLine();
this.emitLine("public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)");
this.emitBlock(() => {
this.emitLine("var t = value.GetType();");
this.emitTypeSwitch(allNames, t => ["t == typeof(", t, ")"], true, true, n => {
this.emitLine("((", n, ")value).WriteJson(writer, serializer);");
});
this.emitLine('throw new Exception("Unknown type");');
});
}
private emitConverterClass(): void {
const jsonConverter = this.haveEnums || this.haveNamedUnions;
// FIXME: Make Converter a Named
let converterName: Sourcelike = ["Converter"];
if (jsonConverter) converterName = converterName.concat([": JsonConverter"]);
this.emitType(undefined, AccessModifier.Internal, "class", converterName, undefined, () => {
if (jsonConverter) {
this.emitConverterMembers();
this.ensureBlankLine();
}
this.emitLine("public static readonly JsonSerializerSettings Settings = new JsonSerializerSettings");
this.emitBlock(() => {
this.emitLine("MetadataPropertyHandling = MetadataPropertyHandling.Ignore,");
this.emitLine("DateParseHandling = DateParseHandling.None,");
if (this.haveNamedUnions || this.haveEnums) {
this.emitMultiline(`Converters = {
new Converter(),
new IsoDateTimeConverter { DateTimeStyles = DateTimeStyles.AssumeUniversal }
},`);
} else {
this.emitMultiline(`Converters = {
new IsoDateTimeConverter { DateTimeStyles = DateTimeStyles.AssumeUniversal }
},`);
}
}, true);
});
}
protected emitRequiredHelpers(): void {
if (this._needHelpers) {
this.forEachTopLevel("leading-and-interposing", (t, n) => this.emitFromJsonForTopLevel(t, n));
this.forEachEnum("leading-and-interposing", (e, n) => this.emitEnumExtension(e, n));
this.forEachUnion("leading-and-interposing", (u, n) => this.emitUnionJSONPartial(u, n));
this.ensureBlankLine();
this.emitSerializeClass();
}
if (this._needHelpers || (this._needAttributes && (this.haveNamedUnions || this.haveEnums))) {
this.ensureBlankLine();
this.emitConverterClass();
}
}
protected needNamespace(): boolean {
return this._needHelpers || this._needAttributes;
}
protected makeHandlebarsContextForType(t: Type): StringMap {
const ctx = super.makeHandlebarsContextForType(t);
if (t.kind === "enum") {
const name = this.nameForNamedType(t);
ctx.extensionsName = defined(this.names.get(defined(this._enumExtensionsNames.get(name))));
}
return ctx;
}
}
|
<reponame>lgoldstein/communitychest
/*
*
*/
package net.community.chest.jfree.jfreechart.plot.thermometer;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import net.community.chest.util.collection.CollectionsUtils;
import org.jfree.chart.plot.ThermometerPlot;
/**
* <P>Copyright 2010 as per GPLv2</P>
*
* <P>An {@link Enum} that encapsulates the {@link ThermometerPlot#setUnits(int)} units</P>
* @author <NAME>.
* @since Jun 21, 2010 2:07:56 PM
*/
public enum ThermometerUnitValue {
NONE(ThermometerPlot.UNITS_NONE, '\0'),
CELSIUS(ThermometerPlot.UNITS_CELCIUS, 'C'),
KELVIN(ThermometerPlot.UNITS_KELVIN, 'K'),
FARENHEIT(ThermometerPlot.UNITS_FAHRENHEIT, 'F');
private final int _u;
public final int getUnitValue ()
{
return _u;
}
public final char _c;
public final char getUnitChar ()
{
return _c;
}
ThermometerUnitValue (final int u, final char c)
{
_u = u;
_c = c;
}
public static final List<ThermometerUnitValue> VALUES=Collections.unmodifiableList(Arrays.asList(values()));
public static final ThermometerUnitValue fromString (final String s)
{
return CollectionsUtils.fromString(VALUES, s, false);
}
public static final ThermometerUnitValue fromUnitValue (final int u)
{
for (final ThermometerUnitValue v : VALUES)
{
if ((v != null) && (v.getUnitValue() == u))
return v;
}
return null;
}
public static final ThermometerUnitValue fromUnitChar (final char c)
{
final char vc=Character.toUpperCase(c);
for (final ThermometerUnitValue v : VALUES)
{
if ((v != null) && (v.getUnitChar() == vc))
return v;
}
return null;
}
}
|
<filename>scripts/build-data/search-index.ts
import glob from 'fast-glob';
import { outputJson, readJson } from 'fs-extra';
import { resolve } from 'path';
const PAGES_PATH = resolve(__dirname, '../../src/pages');
const INDEX_PATH = resolve(__dirname, '../../src/components/search/data/index.json');
export default {
title: 'Build search index',
task: (): Promise<void> => buildIndex(PAGES_PATH),
skip: (): true => true,
};
const buildIndex = async (dir: any): Promise<void> => {
const paths = await getPaths(dir);
const records = await Promise.all(paths.map(toRecord));
return outputJson(INDEX_PATH, records, { spaces: 2 });
};
const toRecord = async (
path: any
): Promise<{ title: string; href: string; type: string }> => {
const { title } = await readJson(path);
const href = toHref(path);
return {
title,
href,
type: 'page',
};
};
const getPaths = (cwd: any): Promise<string[]> => {
return glob('**/*.json', {
absolute: true,
cwd,
});
};
const toHref = (path: string): string => {
const [, page] = /\/pages\/(.+)\.json$/.exec(path) ?? [];
return `/docs/${page}`;
};
|
<reponame>xfys/lovetao
package com.inner.lovetao.dialog;
import android.content.Context;
import android.os.Bundle;
import android.view.Gravity;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import com.inner.lovetao.R;
import androidx.appcompat.app.AppCompatDialog;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* desc:分享dialog
* Created by xcz
* on 2019/2/28.
*/
public class ShareDialog extends AppCompatDialog {
private final Context context;
private ShareClick shareClick;
public ShareDialog(Context context) {
super(context, R.style.full_screen_dialog);
this.context = context;
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog_share);
ButterKnife.bind(this);
Window window = getWindow();
WindowManager.LayoutParams p = window.getAttributes();
p.width = WindowManager.LayoutParams.MATCH_PARENT;
p.height = WindowManager.LayoutParams.WRAP_CONTENT;
p.dimAmount = 0.5f;
window.setGravity(Gravity.BOTTOM);
window.getDecorView().setPadding(0, 0, 0, 0);
window.setWindowAnimations(R.style.gbanker_dialog_anim_from_bottom_to_top);
window.setAttributes(p);
}
@OnClick({R.id.tv_share_wx, R.id.tv_share_wx_circle, R.id.tv_share_qq, R.id.tv_share_weibo})
public void onClick(View view) {
if (shareClick != null) {
shareClick.share(view.getId());
}
}
public ShareClick getShareClick() {
return shareClick;
}
public void setShareClick(ShareClick shareClick) {
this.shareClick = shareClick;
}
public interface ShareClick {
void share(int id);
}
}
|
<reponame>Darian1996/mercyblitz-gp-public
package com.darian.javai18ndemo.javase;
import java.util.ResourceBundle;
import java.util.spi.ResourceBundleControlProvider;
public class EncodingResourceBundleControlProvider implements ResourceBundleControlProvider {
@Override
public ResourceBundle.Control getControl(String baseName) {
return new CustomerResourceBundleDemo.EncodedControl();
}
}
|
#!/bin/sh
set -e
mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
realpath() {
DIRECTORY="$(cd "${1%/*}" && pwd)"
FILENAME="${1##*/}"
echo "$DIRECTORY/$FILENAME"
}
install_resource()
{
case $1 in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
;;
*.framework)
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync -av ${PODS_ROOT}/$1 ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
rsync -av "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1"`.mom\""
xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd\""
xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm\""
xcrun mapc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE=$(realpath "${PODS_ROOT}/$1")
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
/*)
echo "$1"
echo "$1" >> "$RESOURCES_TO_COPY"
;;
*)
echo "${PODS_ROOT}/$1"
echo "${PODS_ROOT}/$1" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "FlatUIKit/Resources/flat-ui-icons-regular.ttf"
install_resource "FlatUIKit/Resources/Lato-Bold.ttf"
install_resource "FlatUIKit/Resources/Lato-Italic.ttf"
install_resource "FlatUIKit/Resources/Lato-Light.ttf"
install_resource "FlatUIKit/Resources/Lato-Regular.ttf"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "FlatUIKit/Resources/flat-ui-icons-regular.ttf"
install_resource "FlatUIKit/Resources/Lato-Bold.ttf"
install_resource "FlatUIKit/Resources/Lato-Italic.ttf"
install_resource "FlatUIKit/Resources/Lato-Light.ttf"
install_resource "FlatUIKit/Resources/Lato-Regular.ttf"
fi
mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "$XCASSET_FILES" ]
then
case "${TARGETED_DEVICE_FAMILY}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "`realpath $PODS_ROOT`*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${IPHONEOS_DEPLOYMENT_TARGET}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
|
<filename>api/v2alpha2/defaults_test.go
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v2alpha2_test
import (
"reflect"
v2alpha2 "github.com/iter8-tools/etc3/api/v2alpha2"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
corev1 "k8s.io/api/core/v1"
resource "k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
var _ = Describe("Stages", func() {
Context("When stages are compared", func() {
It("Evaluates the order correctly", func() {
Expect(v2alpha2.ExperimentStageCompleted.After(v2alpha2.ExperimentStageRunning)).Should(BeTrue())
Expect(v2alpha2.ExperimentStageRunning.After(v2alpha2.ExperimentStageInitializing)).Should(BeTrue())
Expect(v2alpha2.ExperimentStageInitializing.After(v2alpha2.ExperimentStageRunning)).Should(BeFalse())
})
})
})
var _ = Describe("Initialization", func() {
Context("When initialize", func() {
experiment := v2alpha2.NewExperiment("experiment", "namespace").
WithTarget("target").
WithTestingPattern(v2alpha2.TestingPatternCanary).
WithRequestCount("request-count").
Build()
It("is initialized", func() {
By("Initializing Status")
experiment.InitializeStatus()
By("Inspecting Status")
Expect(experiment.Status.InitTime).ShouldNot(BeNil())
Expect(experiment.Status.LastUpdateTime).ShouldNot(BeNil())
Expect(experiment.Status.CompletedIterations).ShouldNot(BeNil())
Expect(len(experiment.Status.Conditions)).Should(Equal(3))
By("Initializing Spec")
experiment.Spec.InitializeSpec()
Expect(experiment.Spec.GetIterationsPerLoop()).Should(Equal(v2alpha2.DefaultIterationsPerLoop))
Expect(experiment.Spec.GetMaxLoops()).Should(Equal(v2alpha2.DefaultMaxLoops))
Expect(experiment.Spec.GetIntervalSeconds()).Should(Equal(int32(v2alpha2.DefaultIntervalSeconds)))
Expect(experiment.Spec.GetMaxCandidateWeight()).Should(Equal(v2alpha2.DefaultMaxCandidateWeight))
Expect(experiment.Spec.GetMaxCandidateWeightIncrement()).Should(Equal(v2alpha2.DefaultMaxCandidateWeightIncrement))
Expect(experiment.Spec.GetDeploymentPattern()).Should(Equal(v2alpha2.DefaultDeploymentPattern))
// Expect(len(experiment.Status.Metrics)).Should(Equal(1))
// removed Expect(*experiment.Spec.GetRequestCount()).Should(Equal("request-count"))
})
})
})
var _ = Describe("VersionInfo", func() {
Context("When count versions", func() {
builder := v2alpha2.NewExperiment("test", "default").WithTarget("target")
It("should count correctly", func() {
experiment := builder.DeepCopy().Build()
Expect(experiment.Spec.GetNumberOfBaseline()).Should(Equal(0))
Expect(experiment.Spec.GetNumberOfCandidates()).Should(Equal(0))
experiment = builder.DeepCopy().
WithBaselineVersion("baseline", nil).
Build()
Expect(experiment.Spec.GetNumberOfBaseline()).Should(Equal(1))
Expect(experiment.Spec.GetNumberOfCandidates()).Should(Equal(0))
experiment = builder.DeepCopy().
WithCandidateVersion("candidate", nil).
Build()
// Expect(experiment.Spec.GetNumberOfBaseline()).Should(Equal(0))
Expect(experiment.Spec.GetNumberOfCandidates()).Should(Equal(1))
experiment = builder.DeepCopy().
WithBaselineVersion("baseline", nil).
WithCandidateVersion("candidate", nil).
Build()
Expect(experiment.Spec.GetNumberOfBaseline()).Should(Equal(1))
Expect(experiment.Spec.GetNumberOfCandidates()).Should(Equal(1))
experiment = builder.DeepCopy().
WithBaselineVersion("baseline", nil).
WithCandidateVersion("candidate", nil).
WithCandidateVersion("c", nil).
Build()
Expect(experiment.Spec.GetNumberOfBaseline()).Should(Equal(1))
Expect(experiment.Spec.GetNumberOfCandidates()).Should(Equal(2))
})
})
})
var _ = Describe("Criteria", func() {
Context("Criteria", func() {
builder := v2alpha2.NewExperiment("test", "default").WithTarget("target")
It("", func() {
experiment := builder.DeepCopy().Build()
Expect(experiment.Spec.Criteria).Should(BeNil())
experiment = builder.DeepCopy().
WithIndicator(*v2alpha2.NewMetric("metric", "default").Build()).
Build()
Expect(experiment.Spec.Criteria).ShouldNot(BeNil())
Expect(experiment.Spec.Criteria.Rewards).Should(BeEmpty())
experiment = builder.DeepCopy().
WithReward(*v2alpha2.NewMetric("metric", "default").WithJQExpression("expr").Build(), v2alpha2.PreferredDirectionHigher).
Build()
Expect(experiment.Spec.Criteria).ShouldNot(BeNil())
Expect(experiment.Spec.Criteria.Rewards).ShouldNot(BeEmpty())
})
})
})
var _ = Describe("Generated Code", func() {
Context("When a Metric object is copied", func() {
Specify("the copy should be the same as the original", func() {
metricBuilder := v2alpha2.NewMetric("reward", "default").
WithDescription("reward metric").
WithParams(map[string]string{"query": "query"}).
WithProvider("prometheus").
WithJQExpression("expr").
WithType(v2alpha2.CounterMetricType).
WithUnits("ms").
WithSampleSize("sample/default")
metric := metricBuilder.Build()
metricList := *&v2alpha2.MetricList{
Items: []v2alpha2.Metric{*metric},
}
Expect(reflect.DeepEqual(metricBuilder, metricBuilder.DeepCopy())).Should(BeTrue())
Expect(reflect.DeepEqual(metric, metric.DeepCopyObject())).Should(BeTrue())
Expect(len(metricList.Items)).Should(Equal(len(metricList.DeepCopy().Items)))
})
})
Context("When an Experiment object is copied", func() {
Specify("the copy should be the same as the original", func() {
experimentBuilder := v2alpha2.NewExperiment("test", "default").
WithTarget("copy").
WithTestingPattern(v2alpha2.TestingPatternCanary).
WithDeploymentPattern(v2alpha2.DeploymentPatternFixedSplit).
WithDuration(3, 2, 1).
WithBaselineVersion("baseline", nil).
WithBaselineVersion("baseline", &corev1.ObjectReference{
Kind: "kind",
Namespace: "namespace",
Name: "name",
APIVersion: "apiVersion",
FieldPath: "path",
}).
WithCandidateVersion("candidate", nil).WithCandidateVersion("candidate", nil).
WithCurrentWeight("baseline", 25).WithCurrentWeight("candidate", 75).
WithRecommendedWeight("baseline", 0).WithRecommendedWeight("candidate", 100).
WithCurrentWeight("baseline", 30).WithRecommendedWeight("baseline", 10).
WithCondition(v2alpha2.ExperimentConditionExperimentFailed, corev1.ConditionTrue, v2alpha2.ReasonHandlerFailed, "foo %s", "bar").
WithAction("start", []v2alpha2.TaskSpec{{Task: "task"}}).
WithRequestCount("request-count").
WithReward(*v2alpha2.NewMetric("reward", "default").WithJQExpression("expr").Build(), v2alpha2.PreferredDirectionHigher).
WithIndicator(*v2alpha2.NewMetric("indicator", "default").WithJQExpression("expr").Build()).
WithObjective(*v2alpha2.NewMetric("reward", "default").WithJQExpression("expr").Build(), nil, nil, false)
experiment := experimentBuilder.Build()
now := metav1.Now()
message := "message"
winner := "winner"
q := resource.Quantity{}
ss := int32(1)
experiment.Status.Analysis = &v2alpha2.Analysis{
AggregatedMetrics: &v2alpha2.AggregatedMetricsAnalysis{
AnalysisMetaData: v2alpha2.AnalysisMetaData{
Provenance: "provenance",
Timestamp: now,
Message: &message,
},
Data: map[string]v2alpha2.AggregatedMetricsData{
"metric1": {
Max: &q,
Min: &q,
Data: map[string]v2alpha2.AggregatedMetricsVersionData{
"metric": {
Min: &q,
Max: &q,
Value: &q,
SampleSize: &ss,
},
},
},
},
},
WinnerAssessment: &v2alpha2.WinnerAssessmentAnalysis{
AnalysisMetaData: v2alpha2.AnalysisMetaData{},
Data: v2alpha2.WinnerAssessmentData{
WinnerFound: true,
Winner: &winner,
},
},
VersionAssessments: &v2alpha2.VersionAssessmentAnalysis{
AnalysisMetaData: v2alpha2.AnalysisMetaData{},
Data: map[string]v2alpha2.BooleanList{
"baseline": []bool{false},
"candidate": []bool{false},
},
},
Weights: &v2alpha2.WeightsAnalysis{
AnalysisMetaData: v2alpha2.AnalysisMetaData{},
Data: []v2alpha2.WeightData{
{Name: "baseline", Value: 25},
{Name: "candidate", Value: 75},
},
},
}
experimentList := *&v2alpha2.ExperimentList{
Items: []v2alpha2.Experiment{*experiment},
}
Expect(reflect.DeepEqual(experimentBuilder, experimentBuilder.DeepCopy())).Should(BeTrue())
Expect(reflect.DeepEqual(experiment, experiment.DeepCopyObject())).Should(BeTrue())
Expect(len(experimentList.Items)).Should(Equal(len(experimentList.DeepCopy().Items)))
})
})
})
|
<filename>src/main/java/nl/pvanassen/steam/store/item/ItemService.java<gh_stars>10-100
/**
*
*/
package nl.pvanassen.steam.store.item;
import nl.pvanassen.steam.store.common.GenericHandle;
import nl.pvanassen.steam.store.common.Listing;
/**
* @author <NAME>
*/
public interface ItemService {
/**
* @param handle Handle overview item
*/
void getAllItems(GenericHandle<OverviewItem> handle);
/**
* The datapoints will always be called first. Once they are done the
* listings handle is called
*
* @param host Host to connect to
* @param appId Appid of the item to get
* @param urlName url name of the item to get
* @param dataPointHandle If a datapoint is found this handle is called
* @param listingHandle If all datapoints have been processed, the listings
* are handled through this call
* @param buyOrders Callback telling if this item supports buy orders
* @param immediateSale Immediate sales callback
*/
void getItem(String host, int appId, String urlName, GenericHandle<StatDataPoint> dataPointHandle, GenericHandle<Listing> listingHandle, GenericHandle<Boolean> buyOrders, GenericHandle<Boolean> immediateSale);
/**
* The datapoints will always be called first. Once they are done the
* listings handle is called
*
* @param appId Appid of the item to get
* @param urlName url name of the item to get
* @param dataPointHandle If a datapoint is found this handle is called
* @param listingHandle If all datapoints have been processed, the listings
* are handled through this call
* @param buyOrders Callback telling if this item supports buy orders
* @param immediateSale Immediate sales callback
*/
void getItem(int appId, String urlName, GenericHandle<StatDataPoint> dataPointHandle, GenericHandle<Listing> listingHandle, GenericHandle<Boolean> buyOrders, GenericHandle<Boolean> immediateSale);
}
|
<gh_stars>10-100
#!/usr/bin/env python
# Copyright (C) 2013 Intel Corporation
#
# Released under the MIT license (see COPYING.MIT)
# test runner which support run testing on target device
"""test runner for target device"""
import sys
from optparse import make_option
from baserunner import TestRunnerBase
class TargetTestRunner(TestRunnerBase):
'''test runner which support target DUT access'''
def __init__(self, context=None):
super(TargetTestRunner, self).__init__(context)
self.option_list.extend([
make_option("-c", "--controller", dest="controller",
help="the target controller to bridge host and target")])
def _get_arg_val(self, dest_name, store_val=True):
'''get arg value from testrunner args'''
args = sys.argv
for opt in self.option_list:
if opt.dest == dest_name:
arg_names = opt._short_opts + opt._long_opts
break
else:
return None
for cur_arg in arg_names:
try:
ind = args.index(cur_arg)
return args[ind+1] if store_val else True
except:
pass
return None
def configure(self, options):
'''configure before testing'''
super(TargetTestRunner, self).configure(options)
print "configure target test runner"
if __name__ == "__main__":
runner = TargetTestRunner()
runner.configure(runner.get_options())
suite = runner.filtertest(runner.loadtest())
print "Found %s tests" % suite.countTestCases()
runner.start(suite)
|
def bubble_sort(lst):
n = len(lst)
# already sorted
swapped = False
for i in range(n-1):
if lst[i] > lst[i+1] :
lst[i], lst[i+1] = lst[i+1], lst[i]
swapped = True
# last i elements are already in place
if swapped:
bubble_sort(lst[:n-1])
# Test
lst = [5, 2, 7, 3, 4, 9, 1]
bubble_sort(lst)
print(lst) |
<reponame>swift-fly-ai/preseter-frontend<filename>src/presenter/scenario-controller.js
// import ChooserView from '../view/chooser-view';
import _ from 'lodash';
import ContainerView from '../view/container-view';
import CustomAddButtonView from '../view/custom-add-button-view';
import CustomCloseButtonView from '../view/custom-close-button-view';
import WaitModalView from '../view/wait-modal-view';
import ScenarioStructurePresenter from './scenario-structure';
export default class ScenarioControllerPresenter {
constructor({container, scenarioModel, api}) {
this._container = container;
this._scenarioModel = scenarioModel;
this._api = api;
this._scenarios = null;
this._view = null;
this._buttonsContainer = null;
this._toggleButtonsContainer = null;
this._closeButton = null;
this._handleAddScenarioClick = this._handleAddScenarioClick.bind(this);
this._handleSaveChangesClick = this._handleSaveChangesClick.bind(this);
this._handleCloseButtonClick = this._handleCloseButtonClick.bind(this);
this._handleToggleButtonClick = this._handleToggleButtonClick.bind(this);
this._handleDeleteScenarioButtonClick = this._handleDeleteScenarioButtonClick.bind(this);
this._handleCloneScenarioButtonClick = this._handleCloneScenarioButtonClick.bind(this);
this._handleAddCarrierButtonClick = this._handleAddCarrierButtonClick.bind(this);
this._handleDeleteCarrierButtonClick = this._handleDeleteCarrierButtonClick.bind(this);
this._handleCloneCarrierButtonClick = this._handleCloneCarrierButtonClick.bind(this);
this._handleScenarioModelEvent = this._handleScenarioModelEvent.bind(this);
this._createViewElements = this._createViewElements.bind(this);
this._scenarioPresenters = {};
this._scenarios = {};
this._choosedScenarioPresenter = null;
this._scenarioModel.addObserver(this._handleScenarioModelEvent);
}
_createViewElements() {
if(this._view) {
return;
}
this._view = new ContainerView({
container: this._container,
classList: 'scenario-controller'
});
this._buttonsContainer = new ContainerView({
container: this._view,
classList: 'scenario-controller-buttons'
});
this._toggleButtonsContainer = new ContainerView({
container: this._view,
classList: 'scenario-controller-item-buttons'
});
this._buttonsContainer.add(new CustomAddButtonView({
container: this._buttonsContainer,
title: 'Добавить сценарий',
classList: 'scenario-controller-buttons-add',
clickCallback: this._handleAddScenarioClick
}));
this._closeButton = new CustomCloseButtonView({
container: this._view,
classList: 'scenario-controller-close-button',
clickCallback: this._handleCloseButtonClick
});
}
_handleScenarioModelEvent() {
this._createViewElements();
this._scenarios = _.cloneDeep(this._scenarioModel.getScenarios());
this._scenarioPresenters = {};
Object.values(this._scenarios).forEach((scenario) => {
this._scenarioPresenters[scenario.name] = new ScenarioStructurePresenter({
container: this._view,
scenario,
callButtonContainer: this._toggleButtonsContainer,
toggleButtonClickCallback: this._handleToggleButtonClick,
addCarrierClickCallback: this._handleAddCarrierButtonClick,
deleteCarrierClickCallback: this._handleDeleteCarrierButtonClick,
cloneCarrierClickCallback: this._handleCloneCarrierButtonClick,
deleteScenarioClickCallback: this._handleDeleteScenarioButtonClick,
cloneScenarioClickCallback: this._handleCloneScenarioButtonClick,
saveChangesClickCallback: this._handleSaveChangesClick
});
});
if(this._choosedScenarioPresenter) {
if(this._choosedScenarioPresenter.getScenario().name in this._scenarioPresenters) {
this._choosedScenarioPresenter = this._scenarioPresenters[this._choosedScenarioPresenter.getScenario().name];
} else if(Object.values(this._scenarioPresenters).length){
this._choosedScenarioPresenter = Object.values(this._scenarioPresenters)[0];
} else {
this._choosedScenarioPresenter = null;
}
}
this.init();
}
_handleSaveChangesClick(iptr) {
const scenarioChanges = iptr.getScenarioChanges();
const carrierChanges = iptr.getCarrierChanges();
const profileChanges = iptr.getProfileChanges();
Promise.all(profileChanges.map((change) => {
return this._api.updateProfileTitle(change).then((response) => {
this._scenarioModel.updateProfileTitle(change);
return response;
});
})).then(() => {
Promise.all(carrierChanges.map((change) => {
return this._api.updateCarrierTitle(change).then((response) => {
this._scenarioModel.updateCarrierTitle(change);
return response;
});
}));
}).then(() => {
return scenarioChanges === null ?
Promise.resolve()
: this._api.updateScenarioTitle(scenarioChanges).then((response) => {
this._scenarioModel.updateScenarioTitle(scenarioChanges);
return response;
});
});
}
_flushCarriers(scenario) {
return Promise.all(Object.values(scenario.carriers).map((carrier) => {
return this._api.createCarrier({
scenarioName: scenario.name,
carrierName: carrier.name,
carrierTitle: carrier.title,
});
}));
}
_flushProfiles(scenario, carrier) {
return Promise.all(Object.values(carrier.profiles).map((profile, i) => {
return this._api.createProfile({
scenarioName: scenario.name,
carrierName: carrier.name,
profileIndex: i,
profileTitle: profile.title,
});
}));
}
_handleToggleButtonClick(iptr) {
this._choosedScenarioPresenter = iptr;
this.init();
}
_handleAddScenarioClick() {
const wait = new WaitModalView();
wait.show();
this._api.createScenario()
.then((response) => {
return this._api.fetchScenario(response.scenarioName);
}).then((scenario) => {
this._scenarioModel.addScenario(scenario);
})
.finally(() => {
wait.close();
});
}
_handleCloneScenarioButtonClick(iptr) {
const wait = new WaitModalView();
wait.show();
this._api.cloneScenario(iptr.getScenario().name)
.then((response) => {
return this._api.fetchScenario(response.scenarioName);
}).then((scenario) => {
this._scenarioModel.addScenario(scenario);
})
.finally(() => {
wait.close();
});
}
_handleDeleteScenarioButtonClick(iptr) {
const wait = new WaitModalView();
wait.show();
this._api.deleteScenario(iptr.getScenario().name)
.then(() => {
this._scenarioModel.deleteScenario(iptr.getScenario().name);
})
.finally(() => {
wait.close();
});
}
_handleAddCarrierButtonClick(iptr) {
const wait = new WaitModalView();
wait.show();
this._api.createCarrier(iptr.getScenario().name)
.then(() => {
return this._api.fetchScenario(iptr.getScenario().name);
}).then((scenario) => {
this._scenarioModel.addScenario(scenario);
})
.finally(() => {
wait.close();
});
}
_handleDeleteCarrierButtonClick(iptr, carrierName) {
const wait = new WaitModalView();
wait.show();
this._api.deleteCarrier(iptr.getScenario().name, carrierName)
.then(() => {
this._scenarioModel.deleteCarrier(iptr.getScenario().name, carrierName);
})
.finally(() => {
wait.close();
});
}
_handleCloneCarrierButtonClick(iptr, carrierName) {
const wait = new WaitModalView();
wait.show();
this._api.cloneCarrier(iptr.getScenario().name, carrierName)
.then(() => {
return this._api.fetchScenario(iptr.getScenario().name);
}).then((scenario) => {
this._scenarioModel.addScenario(scenario);
})
.finally(() => {
wait.close();
});
}
_handleCloseButtonClick() {
this._handleScenarioModelEvent();
this.open(false);
}
open(isOpened = true) {
if(this._view === null) {
return;
}
this._view.getElement().style.opacity = isOpened ? '1' : '0';
this._view.getElement().style.transform = isOpened ? 'translateX(0)' : 'translateX(100%)';
}
init() {
if(this._view === null) {
this._createViewElements();
}
this._view.clear();
this._toggleButtonsContainer.clear();
this._view.add(this._buttonsContainer);
this._view.add(this._toggleButtonsContainer);
this._view.add(this._closeButton);
Object.values(this._scenarioPresenters).forEach((presenter) => {
this._toggleButtonsContainer.add(presenter.getToggleButton());
presenter.select(false);
});
if(this._choosedScenarioPresenter === null && Object.values(this._scenarioPresenters).length) {
this._choosedScenarioPresenter = Object.values(this._scenarioPresenters)[0];
}
if(this._choosedScenarioPresenter) {
this._choosedScenarioPresenter.select();
this._view.add(this._choosedScenarioPresenter.getView());
}
}
} |
#!/bin/bash
sudo useradd --no-create-home --shell /bin/false node_exporter
curl -LO https://github.com/prometheus/node_exporter/releases/download/v0.16.0/node_exporter-0.16.0.linux-amd64.tar.gz
tar xvf node_exporter-0.16.0.linux-amd64.tar.gz
sudo cp node_exporter-0.16.0.linux-amd64/node_exporter /usr/local/bin
sudo chown node_exporter:node_exporter /usr/local/bin/node_exporter
rm -rf node_exporter-0.16.0.linux-amd64.tar.gz node_exporter-0.16.0.linux-amd64/
sudo cp node_exporter.service /etc/systemd/system/
sudo systemctl daemon-reload
sudo systemctl start node_exporter
sudo systemctl status node_exporter
echo "Remember to allow access to metrics.testnet.hathor.network at port 9100"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.