text stringlengths 1 1.05M |
|---|
//
// BraceletReceiveDataDelegate.h
// LSDeviceManagerFrameworkTests
//
// Created by wm on 2020/9/16.
// Copyright © 2020 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "BraceletReceiveDataState.h"
NS_ASSUME_NONNULL_BEGIN
@protocol BraceletReceiveDataDelegate <NSObject>
/// 手环测量数据
/// @param data 数据model
/// @param type 数据枚举类型
- (void)onReceiveBraceletData:(id)data dataType:(BraceletReceiveDataType)type;
/// 接收实时心率数据
/// @param realTimeHRData 实时心率
/// @param device 设备信息
- (void)onReceiveRealTimeHeartRateData:(LSURealTimeHRData *)realTimeHRData targetDeviceInfo:(LSDeviceInfo *)device;
/// 电量状态有改变时回调
/// @param status 设备状态
- (void)onDeviceStatusChange:(DeviceStatus *)status;
@end
NS_ASSUME_NONNULL_END
|
<filename>src/client/app/statisch/signaturen.component.ts
/**
* Created by <NAME> (<EMAIL>) on 6/7/17.
*/
import { Component } from '@angular/core';
@Component({
moduleId: module.id,
selector: 'rae-signaturen',
templateUrl: 'signaturen.component.html'
})
export class SignaturenComponent {
title = 'Der Lyrik-Nachlass (Übersicht)';
}
|
<gh_stars>0
import React, { Component } from "react";
import "../style/Tip.css";
export class Tip extends Component {
constructor() {
super(...arguments);
this.state = {
compact: true,
text: "",
emoji: "",
};
}
// for TipContainer
componentDidUpdate(nextProps, nextState) {
const { onUpdate } = this.props;
if (onUpdate && this.state.compact !== nextState.compact) {
onUpdate();
}
}
render() {
const { onConfirm, onOpen } = this.props;
const { compact, text, emoji } = this.state;
return (React.createElement("div", { className: "Tip" }, compact ? (React.createElement("div", { className: "Tip__compact", onClick: () => {
onOpen();
this.setState({ compact: false });
} }, "Add highlight")) : (React.createElement("form", { className: "Tip__card", onSubmit: (event) => {
event.preventDefault();
onConfirm({ text, emoji });
} },
React.createElement("div", null,
React.createElement("input", { type: "submit", value: "Save" }))))));
}
}
export default Tip;
//# sourceMappingURL=Tip.js.map |
if [[ $(head -1 /opt/phion/run/server.ctrl | cut -d " " -f 3) =~ ^(primary|secondary)$ ]]; then
MY_ID=`/usr/bin/curl -s http://169.254.169.254/latest/meta-data/instance-id`
MY_IP=`/opt/aws/bin/aws ec2 describe-instances --instance-id $MY_ID --output text | grep ASSOC | head -1 | cut -d $' ' -f 4`
CLUSTER_CNT=`phionctrl server show | grep Boxes | xargs | tr " " "\n" | grep -v Boxes | wc -l`
if [ "_$1" != "_$MY_IP" ] && [ $CLUSTER_CNT == "2" ]; then
/opt/phion/hooks/ha/aws-shift-eip.sh HA-START
fi
fi
|
package bep3
import (
"context"
"encoding/json"
"fmt"
"math/rand"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/codec"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/module"
sdksim "github.com/cosmos/cosmos-sdk/types/simulation"
"github.com/e-money/bep3/module/client/cli"
"github.com/e-money/bep3/module/client/rest"
"github.com/e-money/bep3/module/keeper"
bep3types "github.com/e-money/bep3/module/types"
"github.com/gorilla/mux"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/spf13/cobra"
abci "github.com/tendermint/tendermint/abci/types"
)
var (
_ module.AppModule = AppModule{}
_ module.AppModuleBasic = AppModuleBasic{}
_ module.AppModuleSimulation = AppModule{}
)
// AppModuleBasic defines the basic application module used by the bep3 module.
type AppModuleBasic struct {
cdc *codec.LegacyAmino
}
// Name returns the bep3 module's name.
func (AppModuleBasic) Name() string {
return ModuleName
}
// RegisterLegacyAminoCodec registers the bep3 module's types for Amino.
func (ab AppModuleBasic) RegisterLegacyAminoCodec(cdc *codec.LegacyAmino) {
bep3types.RegisterLegacyAminoCodec(cdc)
ab.cdc = cdc
}
func (AppModuleBasic) RegisterInterfaces(registry codectypes.InterfaceRegistry) {
bep3types.RegisterInterfaces(registry)
}
// DefaultGenesis returns default genesis state as raw bytes for the bep3
// module.
func (AppModuleBasic) DefaultGenesis(cdc codec.JSONMarshaler) json.RawMessage {
return cdc.MustMarshalJSON(DefaultGenesisState())
}
// ValidateGenesis performs genesis state validation for the bep3 module.
func (AppModuleBasic) ValidateGenesis(cdc codec.JSONMarshaler, config client.TxEncodingConfig, bz json.RawMessage) error {
var gs GenesisState
err := cdc.UnmarshalJSON(bz, &gs)
if err != nil {
return err
}
return gs.Validate()
}
// RegisterRESTRoutes registers the REST routes for the bep3 module.
func (AppModuleBasic) RegisterRESTRoutes(ctx client.Context, rtr *mux.Router) {
rest.RegisterRoutes(ctx, rtr)
}
func (AppModuleBasic) RegisterGRPCGatewayRoutes(clientCtx client.Context, mux *runtime.ServeMux) {
err := bep3types.RegisterQueryHandlerClient(context.Background(), mux, bep3types.NewQueryClient(clientCtx))
if err != nil {
panic(fmt.Errorf("Cannot register Bep3 gRPC http end points:%w", err))
}
}
// GetTxCmd returns the root tx command for the bep3 module.
func (AppModuleBasic) GetTxCmd() *cobra.Command {
return cli.GetTxCmd()
}
// GetQueryCmd returns no root query command for the bep3 module.
func (AppModuleBasic) GetQueryCmd() *cobra.Command {
return cli.GetQueryCmd()
}
//____________________________________________________________________________
// AppModule implements the sdk.AppModule interface.
type AppModule struct {
AppModuleBasic
keeper Keeper
accountKeeper bep3types.AccountKeeper
bankKeeper bep3types.BankKeeper
}
// NewAppModule creates a new AppModule object
func NewAppModule(keeper Keeper, accountKeeper bep3types.AccountKeeper, bankKeeper bep3types.BankKeeper) AppModule {
return AppModule{
AppModuleBasic: AppModuleBasic{},
keeper: keeper,
accountKeeper: accountKeeper,
bankKeeper: bankKeeper,
}
}
// Name returns the bep3 module's name.
func (AppModule) Name() string {
return ModuleName
}
// RegisterInvariants registers the bep3 module invariants.
func (am AppModule) RegisterInvariants(_ sdk.InvariantRegistry) {}
// Route returns the message routing key for the bep3 module.
func (am AppModule) Route() sdk.Route {
return sdk.NewRoute(bep3types.RouterKey, NewHandler(am.keeper))
}
// NewHandler returns an sdk.Handler for the bep3 module.
func (am AppModule) NewHandler() sdk.Handler {
return NewHandler(am.keeper)
}
// QuerierRoute returns the bep3 module's querier route name.
func (AppModule) QuerierRoute() string {
return QuerierRoute
}
func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier {
return keeper.NewQuerier(am.keeper)
}
// NewQuerierHandler returns the bep3 module sdk.Querier.
func (am AppModule) NewQuerierHandler() sdk.Querier {
return NewQuerier(am.keeper)
}
// InitGenesis performs genesis initialization for the bep3 module. It returns
// no validator updates.
func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONMarshaler, data json.RawMessage) []abci.ValidatorUpdate {
var genesisState GenesisState
cdc.MustUnmarshalJSON(data, &genesisState)
InitGenesis(ctx, am.keeper, am.accountKeeper, genesisState)
return []abci.ValidatorUpdate{}
}
// ExportGenesis returns the exported genesis state as raw bytes for the bep3
// module.
func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONMarshaler) json.RawMessage {
gs := ExportGenesis(ctx, am.keeper)
return cdc.MustMarshalJSON(gs)
}
func (am AppModule) RegisterServices(cfg module.Configurator) {
bep3types.RegisterQueryServer(cfg.QueryServer(), am.keeper)
bep3types.RegisterMsgServer(cfg.MsgServer(), keeper.NewMsgServerImpl(am.keeper))
}
// BeginBlock returns the begin blocker for the bep3 module.
func (am AppModule) BeginBlock(ctx sdk.Context, _ abci.RequestBeginBlock) {
BeginBlocker(ctx, am.keeper)
}
// EndBlock returns the end blocker for the bep3 module. It returns no validator updates.
func (am AppModule) EndBlock(_ sdk.Context, _ abci.RequestEndBlock) []abci.ValidatorUpdate {
return []abci.ValidatorUpdate{}
}
// WeightedOperations returns the all the bep3 module operations with their respective weights.
func (am AppModule) WeightedOperations(simState module.SimulationState) []sdksim.WeightedOperation {
return WeightedOperations(simState.AppParams, simState.Cdc, am.accountKeeper, am.bankKeeper, am.keeper)
}
//____________________________________________________________________________
// GenerateGenesisState creates a randomized GenState of the bep3 module
func (AppModuleBasic) GenerateGenesisState(simState *module.SimulationState) {
RandomizedGenState(simState)
}
// ProposalContents doesn't return any content functions for governance proposals.
func (AppModuleBasic) ProposalContents(_ module.SimulationState) []sdksim.WeightedProposalContent {
return nil
}
// RegisterStoreDecoder registers a decoder for bep3 module's types
func (ab AppModuleBasic) RegisterStoreDecoder(sdr sdk.StoreDecoderRegistry) {
sdr[StoreKey] = NewDecodeStore(ab.cdc)
}
// RandomizedParams returns nil because bep3 has no params.
func (AppModuleBasic) RandomizedParams(r *rand.Rand) []sdksim.ParamChange {
return ParamChanges(r)
}
|
def is_valid_email_address(s):
if '@' not in s:
return False
parts = s.split('@')
if len(parts) > 2:
return False
if len(parts[0]) == 0 or len(parts[1]) == 0:
return False
if '.' not in parts[1]:
return False
return True |
<filename>components/App.js
import React from 'react';
import Header from './common/Header';
import ContentsContainer from './common/ContentsContainer';
import Footer from './common/Footer';
const App = () => (
<div id="wrap">
<Header />
<ContentsContainer />
<Footer />
</div>
);
export default App;
|
<reponame>coreypobrien/pyvows
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pyVows testing engine
# https://github.com/heynemann/pyvows
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 <NAME> <EMAIL>
# stdlib
import sys
from textwrap import dedent
# external
from setuptools import setup, find_packages
# local
from pyvows import version
_test_requires = [
'argparse',
'colorama',
'coverage'
]
_install_requires = [
'gevent>=0.13.6',
'preggy>=0.11.1',
]
if sys.version_info < (2, 7):
_install_requires.append('argparse >= 1.1')
setup(
### OVERVIEW
name='pyVows',
description='pyVows is a BDD test engine based on Vows.js <http://vowsjs.org>.',
long_description=dedent(
'''pyVows is a test engine based on Vows.js. It features topic-based testing,
(*fast*) parallel running of tests, code coverage reports, test profiling, and
more:
http://pyvows.org
'''),
### URLs
url='http://pyvows.org',
### TECHNICAL INFO
version=version.to_str(),
install_requires=_install_requires,
extras_require={
'tests': _test_requires,
},
packages=find_packages(exclude=['tests', 'tests.*']),
package_dir={'pyvows': 'pyvows'},
entry_points={
'console_scripts': [
'pyvows = pyvows.cli:main'
]
},
### PEOPLE & LICENSE
author='<NAME>',
author_email='<EMAIL>',
#maintainer = '<NAME>',
#maintainer_email = '<EMAIL>',
maintainer='Zearin',
license='MIT',
### CATEGORIZATION
keywords='test testing vows tdd bdd development coverage profile profiling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Testing'
],
)
|
<filename>gooru-core/src/main/java/org/ednovo/gooru/core/api/model/Thumbnail.java<gh_stars>0
package org.ednovo.gooru.core.api.model;
import java.io.Serializable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
public class Thumbnail implements Serializable {
private static final long serialVersionUID = 7789051171506439642L;
private String url;
private String dimensions;
private boolean isDefaultImage;
private ResourceType thumbnailResourceType;
private String resourceUrl;
private String thumbnailName;
private String thumbnailAssetURI;
private String thumbnailFolder;
public static final String COLLECTION_THUMBNAIL_SIZES = "160x120,75x56,120x90,80x60,50x40,310x258,800x600";
public static final String QUIZ_THUMBNAIL_SIZES = "160x120,75x56,120x90,80x60,50x40,800x600";
public static final String RESOURCE_THUMBNAIL_SIZES = "80x60,160x120";
public Thumbnail() {
}
public Thumbnail(ResourceType resourceType, String resourceUrl, String thumbnail, String assetURI, String folder) {
this.setThumbnailAssetURI(assetURI);
this.setThumbnailResourceType(resourceType);
this.setResourceUrl(resourceUrl);
this.setThumbnailName(thumbnail);
this.setThumbnailFolder(folder);
this.setUrl(getUrl());
this.setDefaultImage(isDefaultImage());
this.setDimensions(this.getDimensions());
}
public String getUrl() {
if (getThumbnailResourceType() != null) {
if (!getThumbnailResourceType().getName().equalsIgnoreCase("assessment-question")) {
if (StringUtils.isBlank(getThumbnailName()) && getThumbnailResourceType().getName().equalsIgnoreCase(ResourceType.Type.VIDEO.getType())) {
this.url = this.getYoutubeVideoId(getResourceUrl()) == null ? null : "img.youtube.com/vi/" + this.getYoutubeVideoId(getResourceUrl()) + "/1.jpg";
} else {
if (getThumbnailName() != null && getThumbnailName().contains("gooru-default")) {
this.url = getThumbnailAssetURI() + getThumbnailName();
} else if (getThumbnailName() != null && !getThumbnailName().isEmpty()) {
this.url = getThumbnailAssetURI() + getThumbnailFolder() + getThumbnailName();
} else {
this.url = "";
}
}
}
else if(!StringUtils.isBlank(getThumbnailName())){
this.url = getThumbnailAssetURI() + getThumbnailFolder() + getThumbnailName();
}
}
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getDimensions() {
if (getThumbnailResourceType() != null) {
String resourceTypeName = getThumbnailResourceType().getName();
if (resourceTypeName.equalsIgnoreCase(ResourceType.Type.CLASSPLAN.getType()) || resourceTypeName.equalsIgnoreCase(ResourceType.Type.CLASSBOOK.getType())) {
this.dimensions = COLLECTION_THUMBNAIL_SIZES;
} else if (resourceTypeName.equalsIgnoreCase(ResourceType.Type.ASSESSMENT_QUIZ.getType()) || resourceTypeName.equalsIgnoreCase(ResourceType.Type.ASSESSMENT_EXAM.getType())) {
this.dimensions = QUIZ_THUMBNAIL_SIZES;
} else {
this.dimensions = RESOURCE_THUMBNAIL_SIZES;
}
return dimensions;
} else {
return null;
}
}
public void setDimensions(String dimensions) {
this.dimensions = dimensions;
}
public boolean isDefaultImage() {
if (getThumbnailResourceType() != null) {
String resourceTypeName = getThumbnailResourceType().getName();
if (getThumbnailName() == null && !(resourceTypeName.equalsIgnoreCase(ResourceType.Type.VIDEO.getType()))) {
this.isDefaultImage = true;
} else if (((resourceTypeName.equalsIgnoreCase(ResourceType.Type.CLASSPLAN.getType()) || resourceTypeName.equalsIgnoreCase(ResourceType.Type.CLASSBOOK.getType()) || resourceTypeName.equalsIgnoreCase(ResourceType.Type.ASSESSMENT_EXAM.getType()) || resourceTypeName
.equalsIgnoreCase(ResourceType.Type.ASSESSMENT_QUIZ.getType())) && getThumbnailName().contains("gooru-default"))) {
this.isDefaultImage = true;
} else {
this.isDefaultImage = false;
}
}
return isDefaultImage;
}
public void setDefaultImage(boolean isDefaultImage) {
this.isDefaultImage = isDefaultImage;
}
private String getYoutubeVideoId(String url) {
String pattern = "youtu(?:\\.be|be\\.com)/(?:.*v(?:/|=)|(?:.*/)?)([a-zA-Z0-9-_]{11}+)";
String videoId = null;
Pattern compiledPattern = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE);
Matcher matcher = compiledPattern.matcher(url);
if (matcher != null) {
while (matcher.find()) {
videoId = matcher.group(1);
}
}
return videoId;
}
public String getResourceUrl() {
return resourceUrl;
}
public void setResourceUrl(String resourceUrl) {
this.resourceUrl = resourceUrl;
}
public ResourceType getThumbnailResourceType() {
return thumbnailResourceType;
}
public void setThumbnailResourceType(ResourceType thumbnailResourceType) {
this.thumbnailResourceType = thumbnailResourceType;
}
public String getThumbnailName() {
return thumbnailName;
}
public void setThumbnailName(String thumbnailName) {
this.thumbnailName = thumbnailName;
}
public String getThumbnailAssetURI() {
return thumbnailAssetURI;
}
public void setThumbnailAssetURI(String thumbnailAssetURI) {
this.thumbnailAssetURI = thumbnailAssetURI;
}
public String getThumbnailFolder() {
return thumbnailFolder;
}
public void setThumbnailFolder(String thumbnailFolder) {
this.thumbnailFolder = thumbnailFolder;
}
} |
<filename>services/cron-jobs/import-plenary-minutes/src/types.ts<gh_stars>10-100
export interface MetaData {
hits: number;
nextOffset: number;
staticItemCount: number;
}
export interface PlenaryMinutesItem {
meeting: number;
date: Date;
xml: string;
}
|
import {DynamoDBClient, GetItemCommand} from '@aws-sdk/client-dynamodb';
import {jest} from '@jest/globals';
import bcrypt from 'bcrypt';
import {AppOptions} from 'next-auth';
import {EmailSessionProvider} from 'next-auth/adapters';
// @ts-ignore declaration file
import logger from 'next-auth/dist/lib/logger';
import {mocked} from 'ts-jest/utils';
import {dynamoDBAdapterFactory} from '../../__tests__/dynamoDBAdapterFactory';
import {mockUser} from '../../__tests__/mockUser';
import {
DynamoDBAdapter,
ERROR_TOKEN_EXPIRED,
ERROR_TOKEN_EXPIRED_DATE_FORMAT,
ERROR_TOKEN_INVALID,
} from '../DynamoDBAdapter';
jest.mock('bcrypt');
jest.mock('@aws-sdk/client-dynamodb');
jest.mock('next-auth/dist/lib/logger');
const mockHashFunction = mocked(bcrypt.hash);
const mockCompareFunction = mocked(bcrypt.compareSync);
const mockGetItem = mocked(GetItemCommand);
beforeEach(() => {
mocked(logger.debug).mockClear();
mockHashFunction.mockClear();
mockCompareFunction.mockClear();
mockGetItem.mockClear();
});
afterEach(() => {
mocked(DynamoDBClient.prototype.send).mockRestore();
});
afterAll(() => {
mockHashFunction.mockRestore();
mockCompareFunction.mockRestore();
mockGetItem.mockRestore();
});
describe('getVerificationRequest', () => {
const TOKEN = 'mockToken';
const VERIFICATION_TOKEN = '<PASSWORD>Token';
const {email} = mockUser;
const sendVerificationRequest = jest.fn(() => Promise.resolve());
beforeEach(() => {
mockCompareFunction.mockReturnValue(true);
sendVerificationRequest.mockClear();
});
const getVerificationRequestFactory = () => {
return dynamoDBAdapterFactory().then((adapter) => {
return adapter.getVerificationRequest?.(
email,
VERIFICATION_TOKEN,
'secret',
({
sendVerificationRequest,
maxAge: 60,
} as unknown) as EmailSessionProvider
);
});
};
it('should reject with an Error when verificationRequestsTable name was not provided', async () => {
// @ts-ignore not assignable to type 'never'
mocked(DynamoDBClient.prototype.send).mockResolvedValue({});
const adapter = await new DynamoDBAdapter(
{region: 'us-east-1'},
{verificationRequestsTable: undefined}
).getAdapter({debug: false} as AppOptions);
const promise = adapter.getVerificationRequest?.(
email,
VERIFICATION_TOKEN,
'secret',
({
sendVerificationRequest,
maxAge: 60,
} as unknown) as EmailSessionProvider
);
// @ts-ignore possibly undefined
await promise.catch((_error) => null);
expect.hasAssertions();
expect(mockGetItem).not.toBeCalled();
await expect(promise).rejects.toThrowError();
});
it('should resolve with null when retrieving token value from DB failed', async () => {
const Item = {
expires: {S: new Date().toISOString()},
};
// @ts-ignore not assignable to type 'never'
mocked(DynamoDBClient.prototype.send).mockResolvedValue({Item});
const result = await getVerificationRequestFactory();
expect(mockGetItem).toBeCalledTimes(1);
expect(result).toBeNull();
});
it('should resolve with null when retrieving expires value from DB failed', async () => {
const Item = {
token: {S: TOKEN},
};
// @ts-ignore not assignable to type 'never'
mocked(DynamoDBClient.prototype.send).mockResolvedValue({Item});
const result = await getVerificationRequestFactory();
expect(mockGetItem).toBeCalledTimes(1);
expect(result).toBeNull();
});
it('should reject with an Error and delete DB record when expires value is past current date', async () => {
const Item = {
expires: {S: new Date(Date.now() - 60).toISOString()},
token: {S: TOKEN},
};
// @ts-ignore not assignable to type 'never'
mocked(DynamoDBClient.prototype.send).mockResolvedValue({Item});
const promise = getVerificationRequestFactory();
await promise.catch((_error) => null);
expect(mockGetItem).toBeCalledTimes(1);
await expect(promise).rejects.toThrowError(ERROR_TOKEN_EXPIRED);
});
it('should reject with an Error and delete DB record when expires value can not be parsed as a Date', async () => {
const Item = {
expires: {S: 'not a date'},
token: {S: TOKEN},
};
// @ts-ignore not assignable to type 'never'
mocked(DynamoDBClient.prototype.send).mockResolvedValue({Item});
const promise = getVerificationRequestFactory();
await promise.catch((_error) => null);
expect(mockGetItem).toBeCalledTimes(1);
await expect(promise).rejects.toThrowError(ERROR_TOKEN_EXPIRED_DATE_FORMAT);
});
it('should reject with an Error and delete DB record when token verification failed', async () => {
const Item = {
expires: {S: new Date(Date.now() + 5000).toISOString()},
token: {S: TOKEN},
};
// @ts-ignore not assignable to type 'never'
mocked(DynamoDBClient.prototype.send).mockResolvedValue({Item});
mocked(mockCompareFunction).mockReturnValue(false);
const promise = getVerificationRequestFactory();
await promise.catch((_error) => null);
expect(mockCompareFunction).toBeCalledTimes(1);
expect(mockGetItem).toBeCalledTimes(1);
await expect(promise).rejects.toThrowError(ERROR_TOKEN_INVALID);
});
it('should resolve with a VerificationRequest', async () => {
const tokenExpiresString = new Date(Date.now() + 5000).toISOString();
const Item = {
expires: {S: tokenExpiresString},
token: {S: TOKEN},
};
// @ts-ignore not assignable to type 'never'
mocked(DynamoDBClient.prototype.send).mockResolvedValue({Item});
const adapter = await dynamoDBAdapterFactory();
const result = await adapter.getVerificationRequest?.(
email,
VERIFICATION_TOKEN,
'secret',
({
sendVerificationRequest,
maxAge: 60,
} as unknown) as EmailSessionProvider
);
expect(mockGetItem).toBeCalledTimes(1);
expect(result).toStrictEqual({
expires: new Date(tokenExpiresString),
identifier: email,
token: TOKEN,
});
});
it('should be able to catch an Error and reject with it', async () => {
const ERROR_MESSAGE = 'mock error message';
mocked(DynamoDBClient.prototype.send).mockImplementationOnce(
jest.fn(() => Promise.reject(new Error(ERROR_MESSAGE)))
);
const promise = getVerificationRequestFactory();
await expect(promise).rejects.toThrowError(ERROR_MESSAGE);
});
});
|
<gh_stars>0
// NOTE: This package makes strong assumptions and is wrong for most cases.
// It should only be used for the shortest path algorithm and using a weight
// function that can only return values 0 or 1.
package bucket
type Queue struct {
index []int
cost []int64
buckets [2][]int
offset int64
length int
}
func (q *Queue) SetDist(cost []int64) {
if q == nil {
*q = Queue{}
}
n := len(cost)
q.index = make([]int, n)
q.cost = cost
for i := range q.buckets {
q.buckets[i] = make([]int, 0, 500)
}
}
func (q *Queue) Len() int { return q.length }
func (q *Queue) Fix(v int, cost int64) {
q.PopV(v)
q.Push(v, cost)
}
func (q *Queue) Pop() int {
if len(q.buckets[0]) == 0 {
q.buckets[0], q.buckets[1] = q.buckets[1], q.buckets[0][:0]
q.offset++
}
b := q.buckets[0]
n := len(b)
v := b[n-1]
q.buckets[0] = b[0 : n-1]
q.length--
return v
}
func (q *Queue) Push(v int, cost int64) {
p := cost - q.offset
b := q.buckets[p]
i := len(b)
q.buckets[p] = append(b, v)
q.index[v] = i
q.cost[v] = cost
q.length++
}
func (q *Queue) PopV(v int) {
p := q.cost[v] - q.offset
i := q.index[v]
b := q.buckets[p]
n := len(b)
b[i] = b[n-1]
q.index[b[n-1]] = i
q.buckets[p] = b[0 : n-1]
q.length--
}
|
<gh_stars>1-10
import { include, mask, logger, Class } from '../dependency'
import { HttpError } from '../HttpError/HttpError'
import { LIB_DIR } from '../vars'
import { fn_delegate, fn_proxy } from '../util/fn'
import HttpPageBase from './HttpPageBase'
import {
page_process,
page_resolve,
page_pathAddAlias,
pageError_sendDelegate,
pageError_failDelegate,
page_processRequest
} from './page-utils'
class HttpErrorPage extends HttpPageBase {
constructor(error, pageData, config) {
super(null, null);
this._setPageData(pageData, config);
this.model = error;
}
private _setPageData(data, cfg) {
this.data = data;
if (data.masterPath != null)
this.masterPath = data.masterPath;
if (data.templatePath != null)
this.templatePath = data.templatePath;
if (data.master)
this.masterPath = cfg.$getMaster(data);
if (data.template)
this.templatePath = cfg.$getTemplate(data);
if (data.compo)
this.compoPath = cfg.$getCompo(data);
if (this.template == null && this.compoPath == null && this.templatePath == null)
this.templatePath = cfg.$getTemplate(data);
if (this.master == null && this.masterPath == null)
this.masterPath = cfg.$getMaster(data);
}
static send(error, req, res, config) {
var pageCfg = config.page,
errorPages = pageCfg.errors,
genericPage = pageCfg.error
;
var pageData = (errorPages && errorPages[error.statusCode]) || genericPage;
if (pageData == null) {
pageData = {
masterPath: '',
templatePath: LIB_DIR.combine('../pages/error/error.mask').toString()
};
}
return new HttpErrorPage(error, pageData, config).process(req, res, config);
}
process(req, res, config) {
this
.done(pageError_sendDelegate(req, res, this.model, this.app))
.fail(pageError_failDelegate(req, res, this.model, this.app))
;
page_processRequest(this, req, res, config);
}
public _load() {
this.resource = include
.instance()
.load(
page_pathAddAlias(this.masterPath, 'Master'),
page_pathAddAlias(this.templatePath, 'Template'))
.js(
page_pathAddAlias(this.compoPath, 'Compo')
)
.done(fn_proxy(this._response, this));
return this;
}
private _response(resp) {
var master = resp.load && resp.load.Master || this.master,
template = resp.load && resp.load.Template || this.template,
nodes = this.nodes || template
;
if (master == null && this.masterPath !== '') {
this.reject(new HttpError('Page: Masterpage not found'));
return;
}
if (nodes == null) {
this.reject(new HttpError('Page: Template not found'));
return;
}
if (master)
mask.render(mask.parse(master));
page_process(
this
, nodes
, fn_delegate(page_resolve, this)
);
}
};
export default HttpErrorPage;
|
from typing import List
def count_unique_numbers(nums: List[int]) -> int:
num_count = {}
for num in nums:
if num in num_count:
num_count[num] += 1
else:
num_count[num] = 1
unique_count = 0
for count in num_count.values():
if count == 1:
unique_count += 1
return unique_count |
package manager;
import static util.Resource.getSound;
public class ManajerSound implements Runnable {
public static int WAIT_TIME = 0;
Thread thread;
private boolean playSound = false;
private String path;
public ManajerSound(String path) {
thread = new Thread(this);
this.path = path;
}
@Override
public void run() {
while(true) {
try {
Thread.sleep(WAIT_TIME);
} catch (InterruptedException e) {
e.printStackTrace();
}
if(playSound) {
// System.out.println("play " + path);
getSound(path).start();
playSound = false;
}
}
}
public void startThread() {
thread.start();
}
public void play() {
playSound = true;
}
}
|
/** @jsxImportSource @emotion/react */
import React from 'react';
import Typography from '@mui/material/Typography';
import {
BASE_BSC_SCAN_URL,
VENUS_MEDIUM_URL,
VENUS_DISCORD_URL,
VENUS_TWITTER_URL,
VENUS_GITHUB_URL,
ETHERSCAN_XVS_URL,
} from 'config';
import { Icon } from 'components/v2/Icon';
import { useStyles } from './styles';
export interface IFooterProps {
currentBlockNumber: number;
}
export const Footer: React.FC<IFooterProps> = ({ currentBlockNumber }) => {
const styles = useStyles();
return (
<div css={styles.container}>
<Typography
component="a"
variant="small2"
css={styles.status}
href={BASE_BSC_SCAN_URL}
target="_blank"
rel="noreferrer"
>
Latest Block: <span css={styles.statusBlockNumber}>{currentBlockNumber}</span>
</Typography>
<div css={styles.links}>
<a css={styles.link} href={ETHERSCAN_XVS_URL} target="_blank" rel="noreferrer">
<Icon name="venus" color={styles.theme.palette.text.primary} size="12px" />
</a>
<a css={styles.link} href={VENUS_MEDIUM_URL} target="_blank" rel="noreferrer">
<Icon name="medium" color={styles.theme.palette.text.primary} size="12px" />
</a>
<a css={styles.link} href={VENUS_DISCORD_URL} target="_blank" rel="noreferrer">
<Icon name="discord" color={styles.theme.palette.text.primary} size="12px" />
</a>
<a css={styles.link} href={VENUS_TWITTER_URL} target="_blank" rel="noreferrer">
<Icon name="twitter" color={styles.theme.palette.text.primary} size="12px" />
</a>
<a css={styles.link} href={VENUS_GITHUB_URL} target="_blank" rel="noreferrer">
<Icon name="github" color={styles.theme.palette.text.primary} size="12px" />
</a>
</div>
</div>
);
};
|
<filename>archguard/src/global.d.ts
declare class ELK {
constructor(options?: any);
layout(data: any): Promise<any>;
}
declare module "elkjs" {
export = ELK;
}
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
BINDIR=`dirname "$0"`
CHARTS_HOME=`cd ${BINDIR}/..;pwd`
OUTPUT_BIN=${CHARTS_HOME}/output/bin
KIND_BIN=$OUTPUT_BIN/kind
HELM=${OUTPUT_BIN}/helm
KUBECTL=${OUTPUT_BIN}/kubectl
NAMESPACE=pulsar
CLUSTER=pulsar-ci
CLUSTER_ID=$(uuidgen)
function ci::create_cluster() {
echo "Creating a kind cluster ..."
${CHARTS_HOME}/hack/kind-cluster-build.sh --name pulsar-ci-${CLUSTER_ID} -c 1 -v 10
echo "Successfully created a kind cluster."
}
function ci::delete_cluster() {
echo "Deleting a kind cluster ..."
kind delete cluster --name=pulsar-ci-${CLUSTER_ID}
echo "Successfully delete a kind cluster."
}
function ci::install_storage_provisioner() {
echo "Installing the local storage provisioner ..."
${HELM} install local-storage-provisioner ${CHARTS_HOME}/charts/local-storage-provisioner
WC=$(${KUBECTL} get pods --field-selector=status.phase=Running | grep local-storage-provisioner | wc -l)
while [[ ${WC} -lt 1 ]]; do
echo ${WC};
sleep 15
${KUBECTL} get pods --field-selector=status.phase=Running
WC=$(${KUBECTL} get pods --field-selector=status.phase=Running | grep local-storage-provisioner | wc -l)
done
echo "Successfully installed the local storage provisioner."
}
function ci::install_cert_manager() {
echo "Installing the cert-manager ..."
${KUBECTL} create namespace cert-manager
${CHARTS_HOME}/scripts/cert-manager/install-cert-manager.sh
WC=$(${KUBECTL} get pods -n cert-manager --field-selector=status.phase=Running | wc -l)
while [[ ${WC} -lt 3 ]]; do
echo ${WC};
sleep 15
${KUBECTL} get pods -n cert-manager
WC=$(${KUBECTL} get pods -n cert-manager --field-selector=status.phase=Running | wc -l)
done
echo "Successfully installed the cert manager."
}
function ci::install_pulsar_chart() {
local value_file=$1
local extra_opts=$2
echo "Installing the pulsar chart"
${KUBECTL} create namespace ${NAMESPACE}
echo ${CHARTS_HOME}/scripts/pulsar/prepare_helm_release.sh -k ${CLUSTER} -n ${NAMESPACE} ${extra_opts}
${CHARTS_HOME}/scripts/pulsar/prepare_helm_release.sh -k ${CLUSTER} -n ${NAMESPACE} ${extra_opts}
${CHARTS_HOME}/scripts/pulsar/upload_tls.sh -k ${CLUSTER} -d ${CHARTS_HOME}/.ci/tls
sleep 10
echo ${HELM} install --values ${value_file} ${CLUSTER} ${CHARTS_HOME}/charts/pulsar
${HELM} template --values ${value_file} ${CLUSTER} ${CHARTS_HOME}/charts/pulsar
${HELM} install --values ${value_file} ${CLUSTER} ${CHARTS_HOME}/charts/pulsar
echo "wait until broker is alive"
WC=$(${KUBECTL} get pods -n ${NAMESPACE} --field-selector=status.phase=Running | grep ${CLUSTER}-broker | wc -l)
while [[ ${WC} -lt 1 ]]; do
echo ${WC};
sleep 15
${KUBECTL} get pods -n ${NAMESPACE}
WC=$(${KUBECTL} get pods -n ${NAMESPACE} | grep ${CLUSTER}-broker | wc -l)
if [[ ${WC} -gt 1 ]]; then
${KUBECTL} describe pod -n ${NAMESPACE} pulsar-ci-broker-0
${KUBECTL} logs -n ${NAMESPACE} pulsar-ci-broker-0
fi
WC=$(${KUBECTL} get pods -n ${NAMESPACE} --field-selector=status.phase=Running | grep ${CLUSTER}-broker | wc -l)
done
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until nslookup pulsar-ci-broker; do sleep 3; done'
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until [ "$(curl -L http://pulsar-ci-broker:8080/status.html)" == "OK" ]; do sleep 3; done'
WC=$(${KUBECTL} get pods -n ${NAMESPACE} --field-selector=status.phase=Running | grep ${CLUSTER}-proxy | wc -l)
while [[ ${WC} -lt 1 ]]; do
echo ${WC};
sleep 15
${KUBECTL} get pods -n ${NAMESPACE}
WC=$(${KUBECTL} get pods -n ${NAMESPACE} --field-selector=status.phase=Running | grep ${CLUSTER}-proxy | wc -l)
done
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until nslookup pulsar-ci-proxy; do sleep 3; done'
# ${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until [ "$(curl -L http://pulsar-ci-proxy:8080/status.html)" == "OK" ]; do sleep 3; done'
}
function ci::test_pulsar_producer() {
sleep 120
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until nslookup pulsar-ci-broker; do sleep 3; done'
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until nslookup pulsar-ci-proxy; do sleep 3; done'
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-bookie-0 -- df -h
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-bookie-0 -- cat conf/bookkeeper.conf
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/bookkeeper shell listbookies -rw
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/bookkeeper shell listbookies -ro
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/pulsar-admin tenants create pulsar-ci
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/pulsar-admin namespaces create pulsar-ci/test
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/pulsar-client produce -m "test-message" pulsar-ci/test/test-topic
}
function ci::wait_function_running() {
num_running=$(${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'bin/pulsar-admin functions status --tenant pulsar-ci --namespace test --name test-function | bin/jq .numRunning')
while [[ ${num_running} -lt 1 ]]; do
echo ${num_running}
sleep 15
${KUBECTL} get pods -n ${NAMESPACE} --field-selector=status.phase=Running
num_running=$(${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'bin/pulsar-admin functions status --tenant pulsar-ci --namespace test --name test-function | bin/jq .numRunning')
done
}
function ci::wait_message_processed() {
num_processed=$(${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'bin/pulsar-admin functions stats --tenant pulsar-ci --namespace test --name test-function | bin/jq .processedSuccessfullyTotal')
while [[ ${num_processed} -lt 1 ]]; do
echo ${num_processed}
sleep 15
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/pulsar-admin functions stats --tenant pulsar-ci --namespace test --name test-function
num_processed=$(${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'bin/pulsar-admin functions stats --tenant pulsar-ci --namespace test --name test-function | bin/jq .processedSuccessfullyTotal')
done
}
function ci::test_pulsar_function() {
sleep 120
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until nslookup pulsar-ci-broker; do sleep 3; done'
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bash -c 'until nslookup pulsar-ci-proxy; do sleep 3; done'
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-bookie-0 -- df -h
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/bookkeeper shell listbookies -rw
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/bookkeeper shell listbookies -ro
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- curl --retry 10 -L -o bin/jq https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- chmod +x bin/jq
${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/pulsar-admin functions create --tenant pulsar-ci --namespace test --name test-function --inputs "pulsar-ci/test/test_input" --output "pulsar-ci/test/test_output" --parallelism 1 --classname org.apache.pulsar.functions.api.examples.ExclamationFunction --jar /pulsar/examples/api-examples.jar
# wait until the function is running
# TODO: re-enable function test
# ci::wait_function_running
# ${KUBECTL} exec -n ${NAMESPACE} ${CLUSTER}-toolset-0 -- bin/pulsar-client produce -m "hello pulsar function!" pulsar-ci/test/test_input
# ci::wait_message_processed
} |
#include <iostream>
#include <string>
std::string encrypt(std::string plain_text)
{
std::string cipher_text = "";
for (int i = 0; i < plain_text.size(); ++i)
{
char c = plain_text.at(i);
if (isalpha(c))
c = toupper(c) + 1;
cipher_text += c;
}
return cipher_text;
}
int main()
{
std::string plain_text = "Hello World";
std::string cipher_text = encrypt(plain_text);
std::cout << cipher_text << std::endl;
return 0;
}
// Output: IFMMP XPSME |
pkg_origin=core
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_name=jre7
pkg_version=7u80
pkg_source=http://download.oracle.com/otn-pub/java/jdk/${pkg_version}-b15/jre-${pkg_version}-linux-x64.tar.gz
pkg_shasum=4c01efd0d8e80bb6e2f324ec3408ce64f066d4506c7ec93a491f615a4523f4f3
pkg_filename=jre-${pkg_version}-linux-x64.tar.gz
pkg_license=('Oracle Binary Code License Agreement for the Java SE Platform Products and JavaFX')
pkg_description=('Oracle Java Runtime Environment. This package is made available to you to allow you to run your applications as provided in and subject to the terms of the Oracle Binary Code License Agreement for the Java SE Platform Products and JavaFX, found at http://www.oracle.com/technetwork/java/javase/terms/license/index.html')
pkg_upstream_url=http://www.oracle.com/technetwork/java/javase/overview/index.html
pkg_deps=(core/glibc core/gcc-libs)
pkg_build_deps=(core/patchelf)
pkg_bin_dirs=(bin)
pkg_lib_dirs=(lib)
pkg_include_dirs=(include)
source_dir=$HAB_CACHE_SRC_PATH/${pkg_name}-${pkg_version}
## Refer to habitat/components/plan-build/bin/hab-plan-build.sh for help
# Customomized download_file() to work around the Oracle EULA Cookie-wall
# See: http://stackoverflow.com/questions/10268583/downloading-java-jdk-on-linux-via-wget-is-shown-license-page-instead
download_file() {
local url="$1"
local dst="$2"
local sha="$3"
build_line "By including the JRE, you accept the terms of the Oracle Binary Code License Agreement for the Java SE Platform Products and JavaFX, which can be found at http://www.oracle.com/technetwork/java/javase/terms/license/index.html"
pushd "$HAB_CACHE_SRC_PATH" > /dev/null
if [[ -f $dst && -n "$sha" ]]; then
build_line "Found previous file '$dst', attempting to re-use"
if verify_file "$dst" "$sha"; then
build_line "Using cached and verified '$dst'"
return 0
else
build_line "Clearing previous '$dst' file and re-attempting download"
rm -fv "$dst"
fi
fi
build_line "Downloading '$url' to '$dst'"
$_wget_cmd --no-check-certificate --no-cookies --header "Cookie: oraclelicense=accept-securebackup-cookie" "$url" -O "$dst"
build_line "Downloaded '$dst'";
popd > /dev/null
}
do_unpack() {
local unpack_file="$HAB_CACHE_SRC_PATH/$pkg_filename"
mkdir "$source_dir"
pushd "$source_dir" >/dev/null
tar xz --strip-components=1 -f "$unpack_file"
popd > /dev/null
return 0
}
do_build() {
return 0
}
do_install() {
cd "$source_dir" || exit
cp -r ./* "$pkg_prefix"
build_line "Setting interpreter for '${pkg_prefix}/bin/java' '$(pkg_path_for glibc)/lib/ld-linux-x86-64.so.2'"
build_line "Setting rpath for '${pkg_prefix}/bin/java' to '$LD_RUN_PATH'"
export LD_RUN_PATH=$LD_RUN_PATH:$pkg_prefix/lib/amd64/jli:$pkg_prefix/lib/amd64/server:$pkg_prefix/lib/amd64
find "$pkg_prefix"/bin -type f -executable \
-exec sh -c 'file -i "$1" | grep -q "x-executable; charset=binary"' _ {} \; \
-exec patchelf --interpreter "$(pkg_path_for glibc)/lib/ld-linux-x86-64.so.2" --set-rpath "${LD_RUN_PATH}" {} \;
find "$pkg_prefix/lib/amd64" -name '*.so' -type f \
-exec patchelf --set-rpath "${LD_RUN_PATH}" {} \;
}
do_strip() {
return 0
}
|
#!/bin/sh
set -e
echo This script is deprecated - installation aborted.
echo
echo If your tenant references this script, we recommend that you update your tenant to the latest version.
echo This will provide you with updated deployment instructions on the \"Monitor Kubernetes/OpenShift\" page.
echo
echo An alternative approach is to manually set up Kubernetes monitoring.
echo For more information, please refer to the official documentation:
echo https://dt-url.net/deprecated-installation
exit 1
|
<reponame>chlds/util
/* **** Notes
Confirm.
Remarks:
Refer at fn. cals_periodic_event_in_the_day.
//*/
# define CALEND
# define CAR
# include "../../../incl/config.h"
signed(__cdecl cals_monthly_event_in_the_day_of_days(time_t(arg),cals_event_t(*argp))) {
auto struct tm *tp;
auto time_t t;
auto signed i,r;
auto signed short wk,di,mo,yr;
auto signed short day;
auto signed short periodic;
auto signed short flag;
auto signed short ordinary_flag[] = {
CALS_THEFIRST,
CALS_THESECOND,
CALS_THETHIRD,
CALS_THEFOURTH,
CALS_THELAST,
0x00,
};
if(!argp) return(0x00);
r = cals_event_in_the_day(arg,argp);
if(r) return(0x01);
t = (R(t,*argp));
if(arg<(t)) return(0x00);
tp = localtime(&t);
if(!tp) return(0x00);
wk = (R(tm_wday,*tp));
t = (arg);
tp = localtime(&t);
if(!tp) return(0x00);
periodic = (R(periodic,*argp));
AND(i,0x00);
while(*(ordinary_flag+(i))) {
if(periodic&(*(ordinary_flag+(i)))) break;
i++;
}
if(!(*(ordinary_flag+(i)))) return(0x00);
if(!(wk^(R(tm_wday,*tp)))) {
i++;
if(CALS_THELAST&(*(ordinary_flag+(i)))) {
AND(r,0x00);
if(cals_last_days(t)) OR(r,0x01);
return(r);
}
r = cv_ord_week(t);
if(!(i^(r))) return(0x01);
}
return(0x00);
}
|
<gh_stars>10-100
package io.jenkins.plugins.gcr.models;
import hudson.EnvVars;
import hudson.model.Run;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class PluginEnvironment {
private String pullRequestRepository;
private String gitHash;
private String pullId;
private String buildUrl;
// Constructor
public PluginEnvironment(EnvVars env) throws IllegalArgumentException {
if (env.containsKey("ghprbGhRepository") && env.containsKey("ghprbActualCommit")) {
pullRequestRepository = get("ghprbGhRepository", env);
gitHash = get("ghprbActualCommit", env);
} else{
// either we receive a http PR (like CHANGE_URL=https://github.com/<org>/<repo>/pull/<id>)
// or git PR (like GIT_URL=<EMAIL>:<org>/<repo>.git
if (env.containsKey("CHANGE_URL")) {
String changeUrl = get("CHANGE_URL", env);
Pattern pattern = Pattern.compile("https://[^/]*/(.*?)/pull/(.*)");
Matcher matcher = pattern.matcher(changeUrl);
if (matcher.find()) {
pullRequestRepository = matcher.group(1);
pullId = matcher.group(2);
} else {
throw new IllegalArgumentException(String.format("Can't find the owner/repo from CHANGE_URL environmental variable '%s'", changeUrl));
}
} else {
String gitUrl = get("GIT_URL", env);
Pattern pattern = Pattern.compile("[^/]*@[^/]*:(.*?)\\.git");
Matcher matcher = pattern.matcher(gitUrl);
if (matcher.find()) {
pullRequestRepository = matcher.group(1);
} else {
throw new IllegalArgumentException(String.format("Can't find the owner/repo from GIT_URL environmental variable '%s'", gitUrl));
}
pullId = get("CHANGE_ID", env);
}
}
buildUrl = get("BUILD_URL", env);
}
// Getters / Setters
public String getGitHash() {
return gitHash;
}
public String getPullId() {
return pullId;
}
public String getPullRequestRepository() {
return pullRequestRepository;
}
public String getBuildUrl() {
return buildUrl;
}
// Helpers
private String get(String key, EnvVars env) throws IllegalArgumentException {
if (env.containsKey(key)) {
return env.get(key);
} else {
// TODO: localize
throw new IllegalArgumentException(String.format("Failed to get required environmental variable '%s'", key));
}
}
}
|
cd $POSTMAN_HOME
nohup ./Postman >/dev/null 2>&1 &
|
#!/usr/bin/env bash
# Copyright 2016 The Kubernetes Authors.
# Copyright 2020 Authors of Arktos - file modified.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Due to the GCE custom metadata size limit, we split the entire script into two
# files configure.sh and configure-helper.sh. The functionality of downloading
# kubernetes configuration, manifests, docker images, and binary files are
# put in configure.sh, which is uploaded via GCE custom metadata.
set -o errexit
set -o nounset
set -o pipefail
### Hardcoded constants
DEFAULT_CNI_VERSION="v0.7.5"
DEFAULT_CNI_SHA1="52e9d2de8a5f927307d9397308735658ee44ab8d"
DEFAULT_NPD_VERSION="v0.8.0"
DEFAULT_NPD_SHA1="9406c975b1b035995a137029a004622b905b4e7f"
DEFAULT_CRICTL_VERSION="v1.16.1"
DEFAULT_CRICTL_SHA1="8d7b788bf0a52bd3248407c6ebf779ffead27c99"
DEFAULT_MOUNTER_TAR_SHA="8003b798cf33c7f91320cd6ee5cec4fa22244571"
###
# Use --retry-connrefused opt only if it's supported by curl.
CURL_RETRY_CONNREFUSED=""
if curl --help | grep -q -- '--retry-connrefused'; then
CURL_RETRY_CONNREFUSED='--retry-connrefused'
fi
function set-broken-motd {
cat > /etc/motd <<EOF
Broken (or in progress) Kubernetes node setup! Check the cluster initialization status
using the following commands.
Master instance:
- sudo systemctl status kube-master-installation
- sudo systemctl status kube-master-configuration
Node instance:
- sudo systemctl status kube-node-installation
- sudo systemctl status kube-node-configuration
EOF
}
function download-kube-env {
# Fetch kube-env from GCE metadata server.
(
umask 077
local -r tmp_kube_env="/tmp/kube-env.yaml"
curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_kube_env}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/kube-env
# Convert the yaml format file into a shell-style file.
eval $(python -c '''
import pipes,sys,yaml
for k,v in yaml.load(sys.stdin).iteritems():
print("readonly {var}={value}".format(var = k, value = pipes.quote(str(v))))
''' < "${tmp_kube_env}" > "${KUBE_HOME}/kube-env")
rm -f "${tmp_kube_env}"
)
}
function download-tenantpartition-kubeconfig {
local -r dest="$1"
local -r tp_num="$2"
echo "Downloading tenant partition kubeconfig file, if it exists"
(
umask 077
local -r tmp_tenantpartition_kubeconfig="/tmp/tenant_parition_kubeconfig"
if curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_tenantpartition_kubeconfig}" \
"http://metadata.google.internal/computeMetadata/v1/instance/attributes/tp-${tp_num}"; then
# only write to the final location if curl succeeds
mv "${tmp_tenantpartition_kubeconfig}" "${dest}"
chmod 755 ${dest}
else
echo "== Failed to download required tenant partition config file from metadata server =="
exit 1
fi
)
}
function download-tenantpartition-kubeconfigs {
local -r tpconfigs_directory="${KUBE_HOME}/tp-kubeconfigs"
mkdir -p ${tpconfigs_directory}
for (( tp_num=1; tp_num<=${SCALEOUT_TP_COUNT}; tp_num++ ))
do
config="${tpconfigs_directory}/tp-${tp_num}-kubeconfig"
echo "DBG: download tenant partition kubeconfig: ${config}"
download-tenantpartition-kubeconfig "${config}" "${tp_num}"
done
}
function download-kubelet-config {
local -r dest="$1"
echo "Downloading Kubelet config file, if it exists"
# Fetch kubelet config file from GCE metadata server.
(
umask 077
local -r tmp_kubelet_config="/tmp/kubelet-config.yaml"
if curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_kubelet_config}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/kubelet-config; then
# only write to the final location if curl succeeds
mv "${tmp_kubelet_config}" "${dest}"
elif [[ "${REQUIRE_METADATA_KUBELET_CONFIG_FILE:-false}" == "true" ]]; then
echo "== Failed to download required Kubelet config file from metadata server =="
exit 1
fi
)
}
function download-apiserver-config {
local -r dest="$1"
echo "Downloading apiserver config file, if it exists"
# Fetch apiserver config file from GCE metadata server.
(
umask 077
local -r tmp_apiserver_config="/tmp/apiserver.config"
if curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_apiserver_config}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/apiserver-config; then
# only write to the final location if curl succeeds
mv "${tmp_apiserver_config}" "${dest}"
fi
)
}
function download-kube-master-certs {
# Fetch kube-env from GCE metadata server.
(
umask 077
local -r tmp_kube_master_certs="/tmp/kube-master-certs.yaml"
curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_kube_master_certs}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/kube-master-certs
# Convert the yaml format file into a shell-style file.
eval $(python -c '''
import pipes,sys,yaml
for k,v in yaml.load(sys.stdin).iteritems():
print("readonly {var}={value}".format(var = k, value = pipes.quote(str(v))))
''' < "${tmp_kube_master_certs}" > "${KUBE_HOME}/kube-master-certs")
rm -f "${tmp_kube_master_certs}"
)
}
function download-controller-config {
local -r dest="$1"
echo "Downloading controller config file, if it exists"
# Fetch kubelet config file from GCE metadata server.
(
umask 077
local -r tmp_controller_config="/tmp/controllerconfig.json"
if curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_controller_config}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/controllerconfig; then
# only write to the final location if curl succeeds
mv ${tmp_controller_config} ${dest}
fi
)
}
function validate-hash {
local -r file="$1"
local -r expected="$2"
actual=$(sha1sum ${file} | awk '{ print $1 }') || true
if [[ "${actual}" != "${expected}" ]]; then
echo "== ${file} corrupted, sha1 ${actual} doesn't match expected ${expected} =="
return 1
fi
}
# Get default service account credentials of the VM.
GCE_METADATA_INTERNAL="http://metadata.google.internal/computeMetadata/v1/instance"
function get-credentials {
curl "${GCE_METADATA_INTERNAL}/service-accounts/default/token" -H "Metadata-Flavor: Google" -s | python -c \
'import sys; import json; print(json.loads(sys.stdin.read())["access_token"])'
}
function valid-storage-scope {
curl "${GCE_METADATA_INTERNAL}/service-accounts/default/scopes" -H "Metadata-Flavor: Google" -s | grep -q "auth/devstorage"
}
# Retry a download until we get it. Takes a hash and a set of URLs.
#
# $1 is the sha1 of the URL. Can be "" if the sha1 is unknown.
# $2+ are the URLs to download.
function download-or-bust {
local -r hash="$1"
shift 1
local -r urls=( $* )
while true; do
for url in "${urls[@]}"; do
local file="${url##*/}"
rm -f "${file}"
# if the url belongs to GCS API we should use oauth2_token in the headers
local curl_headers=""
if [[ "$url" =~ ^https://storage.googleapis.com.* ]] && valid-storage-scope ; then
curl_headers="Authorization: Bearer $(get-credentials)"
fi
if ! curl ${curl_headers:+-H "${curl_headers}"} -f --ipv4 -Lo "${file}" --connect-timeout 20 --max-time 300 --retry 6 --retry-delay 10 ${CURL_RETRY_CONNREFUSED} "${url}"; then
echo "== Failed to download ${url}. Retrying. =="
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
echo "== Hash validation of ${url} failed. Retrying. =="
else
if [[ -n "${hash}" ]]; then
echo "== Downloaded ${url} (SHA1 = ${hash}) =="
else
echo "== Downloaded ${url} =="
fi
return
fi
done
done
}
function is-preloaded {
local -r key=$1
local -r value=$2
grep -qs "${key},${value}" "${KUBE_HOME}/preload_info"
}
function split-commas {
echo $1 | tr "," "\n"
}
function remount-flexvolume-directory {
local -r flexvolume_plugin_dir=$1
mkdir -p $flexvolume_plugin_dir
mount --bind $flexvolume_plugin_dir $flexvolume_plugin_dir
mount -o remount,exec $flexvolume_plugin_dir
}
function install-gci-mounter-tools {
CONTAINERIZED_MOUNTER_HOME="${KUBE_HOME}/containerized_mounter"
local -r mounter_tar_sha="${DEFAULT_MOUNTER_TAR_SHA}"
if is-preloaded "mounter" "${mounter_tar_sha}"; then
echo "mounter is preloaded."
return
fi
echo "Downloading gci mounter tools."
mkdir -p "${CONTAINERIZED_MOUNTER_HOME}"
chmod a+x "${CONTAINERIZED_MOUNTER_HOME}"
mkdir -p "${CONTAINERIZED_MOUNTER_HOME}/rootfs"
download-or-bust "${mounter_tar_sha}" "https://storage.googleapis.com/kubernetes-release/gci-mounter/mounter.tar"
cp "${KUBE_HOME}/kubernetes/server/bin/mounter" "${CONTAINERIZED_MOUNTER_HOME}/mounter"
chmod a+x "${CONTAINERIZED_MOUNTER_HOME}/mounter"
mv "${KUBE_HOME}/mounter.tar" /tmp/mounter.tar
tar xf /tmp/mounter.tar -C "${CONTAINERIZED_MOUNTER_HOME}/rootfs"
rm /tmp/mounter.tar
mkdir -p "${CONTAINERIZED_MOUNTER_HOME}/rootfs/var/lib/kubelet"
}
# Install node problem detector binary.
function install-node-problem-detector {
if [[ -n "${NODE_PROBLEM_DETECTOR_VERSION:-}" ]]; then
local -r npd_version="${NODE_PROBLEM_DETECTOR_VERSION}"
local -r npd_sha1="${NODE_PROBLEM_DETECTOR_TAR_HASH}"
else
local -r npd_version="${DEFAULT_NPD_VERSION}"
local -r npd_sha1="${DEFAULT_NPD_SHA1}"
fi
local -r npd_tar="node-problem-detector-${npd_version}.tar.gz"
if is-preloaded "${npd_tar}" "${npd_sha1}"; then
echo "${npd_tar} is preloaded."
return
fi
echo "Downloading ${npd_tar}."
local -r npd_release_path="${NODE_PROBLEM_DETECTOR_RELEASE_PATH:-https://storage.googleapis.com/kubernetes-release}"
download-or-bust "${npd_sha1}" "${npd_release_path}/node-problem-detector/${npd_tar}"
local -r npd_dir="${KUBE_HOME}/node-problem-detector"
mkdir -p "${npd_dir}"
tar xzf "${KUBE_HOME}/${npd_tar}" -C "${npd_dir}" --overwrite
mv "${npd_dir}/bin"/* "${KUBE_BIN}"
chmod a+x "${KUBE_BIN}/node-problem-detector"
rmdir "${npd_dir}/bin"
rm -f "${KUBE_HOME}/${npd_tar}"
}
function install-cni-network {
mkdir -p /etc/cni/net.d
case "${NETWORK_POLICY_PROVIDER:-flannel}" in
flannel)
setup-flannel-cni-conf
install-flannel-yml
;;
bridge)
setup-bridge-cni-conf
;;
esac
}
function setup-bridge-cni-conf {
cat > /etc/cni/net.d/bridge.conf <<EOF
{
"cniVersion": "0.3.1",
"name": "containerd-net",
"type": "bridge",
"bridge": "cni0",
"isGateway": true,
"ipMasq": true,
"ipam": {
"type": "host-local",
"subnet": "10.88.0.0/16",
"routes": [
{ "dst": "0.0.0.0/0" }
]
}
}
EOF
}
function setup-flannel-cni-conf {
cat > /etc/cni/net.d/10-flannel.conflist <<EOF
{
"cniVersion": "0.3.1",
"name": "cbr0",
"plugins": [
{
"type": "flannel",
"delegate": {
"hairpinMode": true,
"isDefaultGateway": true
}
},
{
"type": "portmap",
"capabilities": {
"portMappings": true
}
}
]
}
EOF
}
####downloading flannel yaml
function install-flannel-yml {
echo "downloading flannel"
download-or-bust "" "https://raw.githubusercontent.com/coreos/flannel/master/Documentation/kube-flannel.yml"
local -r flannel_dir="${KUBE_HOME}/flannel"
mkdir -p "${flannel_dir}"
mv "${KUBE_HOME}/kube-flannel.yml" "${flannel_dir}"
}
function install-cni-binaries {
if [[ -n "${CNI_VERSION:-}" ]]; then
local -r cni_tar="cni-plugins-amd64-${CNI_VERSION}.tgz"
local -r cni_sha1="${CNI_SHA1}"
else
local -r cni_tar="cni-plugins-amd64-${DEFAULT_CNI_VERSION}.tgz"
local -r cni_sha1="${DEFAULT_CNI_SHA1}"
fi
if is-preloaded "${cni_tar}" "${cni_sha1}"; then
echo "${cni_tar} is preloaded."
return
fi
echo "Downloading cni binaries"
download-or-bust "${cni_sha1}" "https://storage.googleapis.com/kubernetes-release/network-plugins/${cni_tar}"
local -r cni_dir="${KUBE_HOME}/cni"
mkdir -p "${cni_dir}/bin"
tar xzf "${KUBE_HOME}/${cni_tar}" -C "${cni_dir}/bin" --overwrite
mv "${cni_dir}/bin"/* "${KUBE_BIN}"
rmdir "${cni_dir}/bin"
rm -f "${KUBE_HOME}/${cni_tar}"
}
# Install crictl binary.
function install-crictl {
if [[ -n "${CRICTL_VERSION:-}" ]]; then
local -r crictl_version="${CRICTL_VERSION}"
local -r crictl_sha1="${CRICTL_TAR_HASH}"
else
local -r crictl_version="${DEFAULT_CRICTL_VERSION}"
local -r crictl_sha1="${DEFAULT_CRICTL_SHA1}"
fi
local -r crictl="crictl-${crictl_version}-linux-amd64"
# Create crictl config file.
cat > /etc/crictl.yaml <<EOF
runtime-endpoint: ${CONTAINER_RUNTIME_ENDPOINT:-unix:///var/run/dockershim.sock}
EOF
if is-preloaded "${crictl}" "${crictl_sha1}"; then
echo "crictl is preloaded"
return
fi
echo "Downloading crictl"
local -r crictl_path="https://storage.googleapis.com/kubernetes-release/crictl"
download-or-bust "${crictl_sha1}" "${crictl_path}/${crictl}"
mv "${KUBE_HOME}/${crictl}" "${KUBE_BIN}/crictl"
chmod a+x "${KUBE_BIN}/crictl"
}
function install-exec-auth-plugin {
if [[ ! "${EXEC_AUTH_PLUGIN_URL:-}" ]]; then
return
fi
local -r plugin_url="${EXEC_AUTH_PLUGIN_URL}"
local -r plugin_sha1="${EXEC_AUTH_PLUGIN_SHA1}"
echo "Downloading gke-exec-auth-plugin binary"
download-or-bust "${plugin_sha1}" "${plugin_url}"
mv "${KUBE_HOME}/gke-exec-auth-plugin" "${KUBE_BIN}/gke-exec-auth-plugin"
chmod a+x "${KUBE_BIN}/gke-exec-auth-plugin"
if [[ ! "${EXEC_AUTH_PLUGIN_LICENSE_URL:-}" ]]; then
return
fi
local -r license_url="${EXEC_AUTH_PLUGIN_LICENSE_URL}"
echo "Downloading gke-exec-auth-plugin license"
download-or-bust "" "${license_url}"
mv "${KUBE_HOME}/LICENSE" "${KUBE_BIN}/gke-exec-auth-plugin-license"
}
function install-kube-manifests {
# Put kube-system pods manifests in ${KUBE_HOME}/kube-manifests/.
local dst_dir="${KUBE_HOME}/kube-manifests"
mkdir -p "${dst_dir}"
local -r manifests_tar_urls=( $(split-commas "${KUBE_MANIFESTS_TAR_URL}") )
local -r manifests_tar="${manifests_tar_urls[0]##*/}"
if [ -n "${KUBE_MANIFESTS_TAR_HASH:-}" ]; then
local -r manifests_tar_hash="${KUBE_MANIFESTS_TAR_HASH}"
else
echo "Downloading k8s manifests sha1 (not found in env)"
download-or-bust "" "${manifests_tar_urls[@]/.tar.gz/.tar.gz.sha1}"
local -r manifests_tar_hash=$(cat "${manifests_tar}.sha1")
fi
if is-preloaded "${manifests_tar}" "${manifests_tar_hash}"; then
echo "${manifests_tar} is preloaded."
return
fi
echo "Downloading k8s manifests tar"
download-or-bust "${manifests_tar_hash}" "${manifests_tar_urls[@]}"
tar xzf "${KUBE_HOME}/${manifests_tar}" -C "${dst_dir}" --overwrite
local -r kube_addon_registry="${KUBE_ADDON_REGISTRY:-k8s.gcr.io}"
if [[ "${kube_addon_registry}" != "k8s.gcr.io" ]]; then
find "${dst_dir}" -name \*.yaml -or -name \*.yaml.in | \
xargs sed -ri "s@(image:\s.*)k8s.gcr.io@\1${kube_addon_registry}@"
find "${dst_dir}" -name \*.manifest -or -name \*.json | \
xargs sed -ri "s@(image\":\s+\")k8s.gcr.io@\1${kube_addon_registry}@"
fi
cp "${dst_dir}/kubernetes/gci-trusty/gci-configure-helper.sh" "${KUBE_BIN}/configure-helper.sh"
cp "${dst_dir}/kubernetes/gci-trusty/partitionserver-configure-helper.sh" "${KUBE_BIN}/partitionserver-configure-helper.sh"
if [[ -e "${dst_dir}/kubernetes/gci-trusty/gke-internal-configure-helper.sh" ]]; then
cp "${dst_dir}/kubernetes/gci-trusty/gke-internal-configure-helper.sh" "${KUBE_BIN}/"
fi
cp "${dst_dir}/kubernetes/gci-trusty/health-monitor.sh" "${KUBE_BIN}/health-monitor.sh"
cp "${dst_dir}/kubernetes/gci-trusty/configure-helper-common.sh" "${KUBE_BIN}/configure-helper-common.sh"
rm -f "${KUBE_HOME}/${manifests_tar}"
rm -f "${KUBE_HOME}/${manifests_tar}.sha1"
}
# A helper function for loading a docker image. It keeps trying up to 5 times.
#
# $1: Full path of the docker image
function try-load-docker-image {
local -r img=$1
echo "Try to load docker image file ${img}"
# Temporarily turn off errexit, because we don't want to exit on first failure.
set +e
local -r max_attempts=5
local -i attempt_num=1
until timeout 30 ${LOAD_IMAGE_COMMAND:-docker load -i} "${img}"; do
if [[ "${attempt_num}" == "${max_attempts}" ]]; then
echo "Fail to load docker image file ${img} after ${max_attempts} retries. Exit!!"
exit 1
else
attempt_num=$((attempt_num+1))
sleep 5
fi
done
# Re-enable errexit.
set -e
}
# Loads kube-system docker images. It is better to do it before starting kubelet,
# as kubelet will restart docker daemon, which may interfere with loading images.
function load-docker-images {
echo "Start loading kube-system docker images"
local -r img_dir="${KUBE_HOME}/kube-docker-files"
if [[ "${KUBERNETES_MASTER:-}" == "true" ]]; then
try-load-docker-image "${img_dir}/kube-apiserver.tar"
try-load-docker-image "${img_dir}/kube-controller-manager.tar"
try-load-docker-image "${img_dir}/kube-scheduler.tar"
try-load-docker-image "${img_dir}/workload-controller-manager.tar"
else
try-load-docker-image "${img_dir}/kube-proxy.tar"
fi
}
# Downloads kubernetes binaries and kube-system manifest tarball, unpacks them,
# and places them into suitable directories. Files are placed in /home/kubernetes.
function install-kube-binary-config {
cd "${KUBE_HOME}"
local -r server_binary_tar_urls=( $(split-commas "${SERVER_BINARY_TAR_URL}") )
local -r server_binary_tar="${server_binary_tar_urls[0]##*/}"
if [[ -n "${SERVER_BINARY_TAR_HASH:-}" ]]; then
local -r server_binary_tar_hash="${SERVER_BINARY_TAR_HASH}"
else
echo "Downloading binary release sha1 (not found in env)"
download-or-bust "" "${server_binary_tar_urls[@]/.tar.gz/.tar.gz.sha1}"
local -r server_binary_tar_hash=$(cat "${server_binary_tar}.sha1")
fi
if is-preloaded "${server_binary_tar}" "${server_binary_tar_hash}"; then
echo "${server_binary_tar} is preloaded."
else
echo "Downloading binary release tar"
download-or-bust "${server_binary_tar_hash}" "${server_binary_tar_urls[@]}"
tar xzf "${KUBE_HOME}/${server_binary_tar}" -C "${KUBE_HOME}" --overwrite
# Copy docker_tag and image files to ${KUBE_HOME}/kube-docker-files.
local -r src_dir="${KUBE_HOME}/kubernetes/server/bin"
local dst_dir="${KUBE_HOME}/kube-docker-files"
mkdir -p "${dst_dir}"
cp "${src_dir}/"*.docker_tag "${dst_dir}"
if [[ "${KUBERNETES_MASTER:-}" == "false" ]]; then
cp "${src_dir}/kube-proxy.tar" "${dst_dir}"
else
cp "${src_dir}/kube-apiserver.tar" "${dst_dir}"
cp "${src_dir}/kube-controller-manager.tar" "${dst_dir}"
cp "${src_dir}/kube-scheduler.tar" "${dst_dir}"
cp "${src_dir}/workload-controller-manager.tar" "${dst_dir}"
cp -r "${KUBE_HOME}/kubernetes/addons" "${dst_dir}"
fi
load-docker-images
mv "${src_dir}/kubelet" "${KUBE_BIN}"
mv "${src_dir}/kubectl" "${KUBE_BIN}"
mv "${KUBE_HOME}/kubernetes/LICENSES" "${KUBE_HOME}"
mv "${KUBE_HOME}/kubernetes/kubernetes-src.tar.gz" "${KUBE_HOME}"
fi
if [[ "${KUBERNETES_MASTER:-}" == "false" ]] && \
[[ "${ENABLE_NODE_PROBLEM_DETECTOR:-}" == "standalone" ]]; then
install-node-problem-detector
fi
if [[ "${NETWORK_PROVIDER:-}" == "kubenet" ]] || \
[[ "${NETWORK_PROVIDER:-}" == "cni" ]]; then
install-cni-binaries
install-cni-network
fi
# Put kube-system pods manifests in ${KUBE_HOME}/kube-manifests/.
install-kube-manifests
chmod -R 755 "${KUBE_BIN}"
# Install gci mounter related artifacts to allow mounting storage volumes in GCI
install-gci-mounter-tools
# Remount the Flexvolume directory with the "exec" option, if needed.
if [[ "${REMOUNT_VOLUME_PLUGIN_DIR:-}" == "true" && -n "${VOLUME_PLUGIN_DIR:-}" ]]; then
remount-flexvolume-directory "${VOLUME_PLUGIN_DIR}"
fi
# Install crictl on each node.
install-crictl
# TODO(awly): include the binary and license in the OS image.
install-exec-auth-plugin
# Clean up.
rm -rf "${KUBE_HOME}/kubernetes"
rm -f "${KUBE_HOME}/${server_binary_tar}"
rm -f "${KUBE_HOME}/${server_binary_tar}.sha1"
}
######### Main Function ##########
# redirect stdout/stderr to a file
exec >> /var/log/master-init.log 2>&1
echo "Start to install kubernetes files"
# if install fails, message-of-the-day (motd) will warn at login shell
set-broken-motd
KUBE_HOME="/home/kubernetes"
KUBE_BIN="${KUBE_HOME}/bin"
# download and source kube-env
download-kube-env
source "${KUBE_HOME}/kube-env"
download-kubelet-config "${KUBE_HOME}/kubelet-config.yaml"
download-controller-config "${KUBE_HOME}/controllerconfig.json"
download-apiserver-config "${KUBE_HOME}/apiserver.config"
if [[ "${KUBERNETES_RESOURCE_PARTITION:-false}" == "true" ]]; then
download-tenantpartition-kubeconfigs
fi
# master certs
if [[ "${KUBERNETES_MASTER:-}" == "true" ]]; then
download-kube-master-certs
fi
# binaries and kube-system manifests
install-kube-binary-config
echo "Done for installing kubernetes files"
|
def get_mean_median_mode(numbers):
mean = sum(numbers)/len(numbers)
numbers.sort()
median = 0
# Calculate median
if len(numbers) % 2 == 0:
median = (numbers[len(numbers)//2 - 1] + numbers[len(numbers)//2])/2
else:
median = numbers[len(numbers)//2]
# Calculate mode
freq_dict={}
for i in numbers:
if i not in freq_dict:
freq_dict[i] = 1
else:
freq_dict[i] += 1
mode = None
mode_count = 0
for key, value in freq_dict.items():
if value > mode_count:
mode = key
mode_count = value
return mean, median, mode |
var searchData=
[
['wrap_5fcenterarray_5fgetter',['wrap_centerarray_getter',['../_py_engine_8cpp.html#a95b2f2107f6303224f8c84b212687de8',1,'PyEngine.cpp']]],
['wrap_5fcenterarray_5fsetter',['wrap_centerarray_setter',['../_py_engine_8cpp.html#a6a7ca67856c282467d89386fecdbc4e0',1,'PyEngine.cpp']]],
['writepdb',['writePDB',['../class_smol_dock_1_1_p_d_b_writer.html#aaad8350fc387575ed549a1fd8569aad2',1,'SmolDock::PDBWriter']]],
['writetomolblock',['writeToMolBlock',['../class_smol_dock_1_1_molecule.html#a46a5d767bd2ae8cefd06049c67de1617',1,'SmolDock::Molecule']]],
['writetomolfile',['writeToMolFile',['../class_smol_dock_1_1_molecule.html#ad8adfe01a02d8d9b3dbd76c7b18a729e',1,'SmolDock::Molecule']]]
];
|
package app.plow.bluetooth;
/**
* Created by Meftah on 9/20/2015.
*/
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import alarmproject.apps.plow.alarmproject.R;
import app.plow.Pairing;
public class ServiceReceiver extends BroadcastReceiver {
Context context;
@Override
public void onReceive(Context context, Intent intent) {
this.context = context;
Log.d("hi","Received in ServiceReceiver");
notify("SmartPlow","est connecté");
Intent service = new Intent(context.getApplicationContext(), BluetoothService.class);
service.addFlags(Intent.FLAG_FROM_BACKGROUND);
Log.d("starter","Set");
context.startService(service);
}
private void notify(String notificationTitle, String notificationMessage){
NotificationManager notificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
@SuppressWarnings("deprecation")
Notification notification = new Notification(R.mipmap.app_logo,"Plow connecté", System.currentTimeMillis());
Intent notificationIntent = new Intent(context,Pairing.class);
PendingIntent pendingIntent = PendingIntent.getActivity(context, 0,notificationIntent, 0);
notification.setLatestEventInfo(context, notificationTitle,notificationMessage, pendingIntent);
notificationManager.notify(9999, notification);
}
} |
<reponame>eengineergz/Lambda
"use strict";
module.exports = alert;
|
#!/bin/bash
export MAVEN_OPTS="-Xms521M -Xmx1024M -XX:MaxPermSize=256M -noverify -Xdebug -Djava.compiler=NONE -Xrunjdwp:transport=dt_socket,address=8001,server=y,suspend=n"
cd ..
mvn -T 1C -PbuildRestappDependencies clean install
STATUS=$?
if [ $STATUS -eq 0 ]
then
cd modules/activiti-webapp-rest2
mvn -Dfile.encoding=UTF-8 clean package tomcat7:run
else
echo "Build failure in dependent project. Cannot boot Activiti Rest."
fi |
<reponame>dspuci/dspuci-website-gatsby<gh_stars>0
export const lightColor = "#fcfcfc"
export const darkColor = "#303030"
|
#!/bin/bash
cat << EOS
TkyLab
me, and the other.
EOS
function command_exists {
command -v "$1" > /dev/null;
}
#
# Install homebrew.
#
if ! command_exists brew ; then
echo " --------- Homebrew ----------"
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
brew update
brew upgrade --all --cleanup
brew -v
echo " ------------ END ------------"
fi
#
# Install git
#
if ! command_exists git ; then
echo " ------------ Git ------------"
brew install git
git --version
echo " ------------ END ------------"
fi
#
# mac-auto-setup.git
#
echo " ---- mac-auto-setup.git -----"
git clone https://github.com/tkylab/mac-auto-setup.git
echo " ------------ END ------------"
|
import subprocess
# Modify the config.py file
subprocess.run(["sed", "-i", 's/ALLOWED_ORIGINS_TILES: List\[str\] = \[\]/ALLOWED_ORIGINS_TILES: List\[str\] = \["*"\]/g', 'terractotta/config.py'])
# Move the zappa_settings.json file
subprocess.run(["mv", "../zappa_settings.json", "."])
# Deploy the application using Zappa
deploy_process = subprocess.run(["zappa", "deploy", "abi"], capture_output=True, text=True)
if deploy_process.returncode == 0:
print("Application deployed successfully")
else:
print("Error deploying the application:", deploy_process.stderr)
# Update the deployed application using Zappa
update_process = subprocess.run(["zappa", "update", "abi"], capture_output=True, text=True)
if update_process.returncode == 0:
print("Application updated successfully")
else:
print("Error updating the application:", update_process.stderr) |
<gh_stars>0
package config
import (
"fmt"
"github.com/lxn/walk"
"io/ioutil"
"log"
"strconv"
)
const (
Author = "scyking"
PName = "GPics"
)
const (
GitInfoRepositoryKey = "git.info.repository"
GitInfoServerKey = "git.info.server"
GitInfoUserNameKey = "git.info.username"
GitInfoPasswordKey = "git.info.password"
GitInfoTokenKey = "git.info.token"
WorkspaceKey = "workspace"
OnQuickKey = "on-quick"
QuickDirKey = "quick-dir"
AutoCommitKey = "auto-commit"
TimeOutKey = "remote-commit-timeout"
)
type GitInfo struct {
Repository string
Server string
UserName string
Password string
Token string
}
type Config struct {
GitInfo
Workspace string
OnQuick bool //开启快捷上传
QuickDir string //快捷上传目录
AutoCommit bool //自动提交到远程
TimeOut int //超时时间(单位:s)
}
func init() {
app := walk.App()
app.SetOrganizationName(Author)
app.SetProductName(PName)
settings := walk.NewIniFileSettings("settings.ini")
log.Println("配置文件路径:", settings.FilePath())
log.Println("初始资源根路径: ", walk.Resources.RootDirPath())
if err := settings.Load(); err != nil {
log.Fatal(err)
}
if _, ok := settings.Get(OnQuickKey); !ok {
if err := settings.Put(OnQuickKey, strconv.FormatBool(false)); err != nil {
log.Fatal(err)
}
}
if _, ok := settings.Get(AutoCommitKey); !ok {
if err := settings.Put(AutoCommitKey, strconv.FormatBool(false)); err != nil {
log.Fatal(err)
}
}
if _, ok := settings.Get(TimeOutKey); !ok {
if err := settings.Put(TimeOutKey, "3"); err != nil {
log.Fatal(err)
}
}
if err := settings.Save(); err != nil {
log.Fatal(err)
}
app.SetSettings(settings)
}
func NewConfig() *Config {
cf := new(Config)
cf.Workspace, _ = StringValue(WorkspaceKey)
cf.AutoCommit, _ = BoolValue(AutoCommitKey)
cf.OnQuick, _ = BoolValue(OnQuickKey)
cf.QuickDir, _ = StringValue(QuickDirKey)
cf.Repository, _ = StringValue(GitInfoRepositoryKey)
cf.Server, _ = StringValue(GitInfoServerKey)
cf.UserName, _ = StringValue(GitInfoUserNameKey)
cf.Password, _ = StringValue(GitInfoPasswordKey)
cf.Token, _ = StringValue(GitInfoTokenKey)
cf.TimeOut, _ = IntValue(TimeOutKey)
return cf
}
func Settings() walk.Settings {
return walk.App().Settings()
}
func StringValue(key string) (string, error) {
v, ok := Settings().Get(key)
if !ok {
return "", fmt.Errorf("获取配置失败,key=%q", key)
}
return v, nil
}
func IntValue(key string) (int, error) {
// 默认超时时间
dto := 3
v, err := StringValue(key)
if err != nil {
return dto, err
}
r, err := strconv.ParseUint(v, 10, 0)
if err != nil {
return dto, err
}
return int(r), nil
}
func BoolValue(key string) (bool, error) {
v, err := StringValue(key)
if err != nil {
return false, err
}
r, err := strconv.ParseBool(v)
if err != nil {
return false, err
}
return r, nil
}
func Workspace() (string, bool) {
return Settings().Get(WorkspaceKey)
}
func SetWorkspace(ws string) error {
st := Settings()
if err := st.Put(WorkspaceKey, ws); err != nil {
return err
}
return st.Save()
}
func Save(cf *Config) error {
dirs, err := ioutil.ReadDir(cf.Workspace)
if err != nil {
return nil
}
for _, d := range dirs {
if d.IsDir() && d.Name() == ".git" {
break
}
return fmt.Errorf("%q不是一个git项目根目录", cf.Workspace)
}
st := Settings()
if err := st.Put(WorkspaceKey, cf.Workspace); err != nil {
return err
}
if err := st.Put(AutoCommitKey, strconv.FormatBool(cf.AutoCommit)); err != nil {
return err
}
if err := st.Put(OnQuickKey, strconv.FormatBool(cf.OnQuick)); err != nil {
return err
}
if err := st.Put(QuickDirKey, cf.QuickDir); err != nil {
return err
}
if err := st.Put(GitInfoRepositoryKey, cf.Repository); err != nil {
return err
}
if err := st.Put(GitInfoServerKey, cf.Server); err != nil {
return err
}
if err := st.Put(GitInfoUserNameKey, cf.UserName); err != nil {
return err
}
if err := st.Put(GitInfoPasswordKey, cf.Password); err != nil {
return err
}
if err := st.Put(GitInfoTokenKey, cf.Token); err != nil {
return err
}
if err := st.Put(TimeOutKey, strconv.FormatInt(int64(cf.TimeOut), 10)); err != nil {
return err
}
return st.Save()
}
func Reset() error {
st := Settings()
if err := st.Put(OnQuickKey, strconv.FormatBool(false)); err != nil {
return err
}
if err := st.Put(QuickDirKey, ""); err != nil {
return err
}
if err := st.Put(AutoCommitKey, strconv.FormatBool(false)); err != nil {
return err
}
return nil
}
|
jest.mock('fs');
jest.mock('os');
describe('Run Trivy', () => {
const mockedFs = require('fs');
const mockedOs = require('os');
const mockedToolCache = require('@actions/tool-cache');
const mockedFileHelper = require('../src/fileHelper');
mockedFileHelper.getContainerScanDirectory = jest.fn().mockImplementation(() =>{
return 'test/_temp/containerscan_123';
});
let mockFile = {
'releaseDownloadedPath': JSON.stringify({ tag_name: 'v1.1.1' })
};
let cachedTools = {
'trivy': true,
'dockle': true
};
mockedFs.__setMockFiles(mockFile);
mockedToolCache.__setToolCached(cachedTools);
afterAll(() => {
jest.clearAllMocks();
jest.resetModules();
jest.restoreAllMocks();
});
test('Trivy binaries are present in the cache', async () => {
const runner = require('../src/trivyHelper');
await expect(runner.runTrivy()).resolves.toBe(0);
expect(mockedOs.type).not.toHaveBeenCalled();
expect(mockedToolCache.find).not.toHaveReturnedWith(undefined);
expect(mockedToolCache.downloadTool).toHaveBeenCalledTimes(1);
});
test('Trivy binaries are not present in the cache', async () => {
cachedTools['trivy'] = false;
mockedToolCache.__setToolCached(cachedTools);
const runner = require('../src/trivyHelper');
await expect(runner.runTrivy()).resolves.toBe(0);
expect(mockedOs.type).toHaveBeenCalledTimes(1);
expect(mockedToolCache.find).toHaveReturnedWith(undefined);
expect(mockedToolCache.downloadTool).toHaveBeenCalledTimes(2);
});
});
|
#!/bin/bash
echo "======>>>python /app/python manage.py makemigrations"
python /app/manage.py makemigrations
echo "======>>>python /app/manage.py migrate auth"
python /app/manage.py migrate auth
echo "======>>>python /app/manage.py migrate reviews"
python /app/manage.py migrate reviews --noinput
echo "======>>>python /app/manage.py makemessages -l 'sv' -i venv"
python /app/manage.py makemessages -l 'sv' -i venv
echo "======>>>python /app/manage.py compilemessages"
python /app/manage.py compilemessages
echo "======>>>python /app/manage.py syncdb"
python /app/manage.py syncdb --noinput
echo "======>>>python /app/manage.py loaddata source_initial_data.json"
python /app/manage.py loaddata source_initial_data.json
echo "======>>>python manage.py collectstatic"
python /app/manage.py collectstatic --noinput
echo "======>>>python /app/manage.py runserver 0.0.0.0:8000"
python /app/manage.py runserver 0.0.0.0:8000
|
import { createContext } from 'react';
const MaterialContext = createContext(null);
export default MaterialContext;
|
#!/bin/sh
echo $((123+123*2))
$((123*123+$((123)))) |
class Par2 < Formula
desc "Parchive: Parity Archive Volume Set for data recovery"
homepage "https://github.com/Parchive/par2cmdline"
url "https://github.com/Parchive/par2cmdline/releases/download/v0.8.1/par2cmdline-0.8.1.tar.bz2"
sha256 "5fcd712cae2b73002b0bf450c939b211b3d1037f9bb9c3ae52d6d24a0ba075e4"
license "GPL-2.0"
livecheck do
url "https://github.com/Parchive/par2cmdline/releases/latest"
regex(%r{href=.*?/tag/v?(\d+(?:\.\d+)+)["' >]}i)
end
bottle do
cellar :any_skip_relocation
sha256 "8379fe417ad00b81929cef774072179d9f2497156a5b06b706a6cf182d2f93dd" => :big_sur
sha256 "26609c45028599a4845f68cda2a5cd08c2a0dc37ae3987d4abf86aed99499f50" => :catalina
sha256 "cded10d8f18c5ab236ceb624854afb672681bd1a86f21e47d70de793db378580" => :mojave
sha256 "35477bcfecd91b7fe885739737f576b63545aab51ba997bc60f9a74927b775dc" => :high_sierra
sha256 "abb05496548e0a60ae03cbad9cf484274e97dc091a341ae5495557c5201276a7" => :x86_64_linux
end
def install
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
test do
# Protect a file with par2.
test_file = testpath/"some-file"
File.write(test_file, "file contents")
system "#{bin}/par2", "create", test_file
# "Corrupt" the file by overwriting, then ask par2 to repair it.
File.write(test_file, "corrupted contents")
repair_command_output = shell_output("#{bin}/par2 repair #{test_file}")
# Verify that par2 claimed to repair the file.
assert_match "1 file(s) exist but are damaged.", repair_command_output
assert_match "Repair complete.", repair_command_output
# Verify that par2 actually repaired the file.
assert File.read(test_file) == "file contents"
end
end
|
#!/bin/sh -e
rustup install nightly-2021-08-20
rustup default nightly-2021-08-20
rustup component add rustc-dev
rustup component add miri
|
package cn.leancloud;
import android.content.Context;
import com.vivo.push.model.UPSNotificationMessage;
import cn.leancloud.convertor.ObserverBuilder;
import cn.leancloud.utils.LogUtil;
import cn.leancloud.utils.StringUtil;
import cn.leancloud.callback.SaveCallback;
import cn.leancloud.vivo.LCMixPushManager;
public abstract class LCVIVOPushMessageReceiver extends com.vivo.push.sdk.OpenClientPushMessageReceiver {
private static final LCLogger LOGGER = LogUtil.getLogger(LCVIVOPushMessageReceiver.class);
private final String VIVO_VERDOR = "vivo";
/**
* 通知被点击后的结果返回。
* @param context context
* @param msg **UPSNotificationMessage 包含以下字段**
* msgId:通知id。
* title:通知标题。
* content:通知内容。
* skipContent:通知自定义内容。
* params:自定义键值对。
* @deprecated 当push发出的通知被点击后便会触发onNotificationClicked通知应用
* 该接口仅用来兼容老版本,在 3.x 中已经不再使用。
*/
public void onNotificationMessageClicked(Context context, UPSNotificationMessage msg) {
}
/**
* RegId 结果返回。当开发者首次调用 turnOnPush 成功或 regId 发生改变时会回调该方法。
* @param var1 应用上下文
* @param regId 当前设备的当前应用的唯一标识
*
*/
@Override
public void onReceiveRegId(Context var1, final String regId) {
if (StringUtil.isEmpty(regId)) {
LOGGER.e("received empty regId from VIVO server.");
} else {
LCInstallation installation = LCInstallation.getCurrentInstallation();
if (!VIVO_VERDOR.equals(installation.getString(LCInstallation.VENDOR))) {
installation.put(LCInstallation.VENDOR, VIVO_VERDOR);
}
if (!regId.equals(installation.getString(LCInstallation.REGISTRATION_ID))) {
installation.put(LCInstallation.REGISTRATION_ID, regId);
}
String localProfile = installation.getString(LCMixPushManager.MIXPUSH_PROFILE);
localProfile = (null != localProfile ? localProfile : "");
if (!localProfile.equals(LCMixPushManager.vivoDeviceProfile)) {
installation.put(LCMixPushManager.MIXPUSH_PROFILE, LCMixPushManager.vivoDeviceProfile);
}
installation.saveInBackground().subscribe(ObserverBuilder.buildSingleObserver(new SaveCallback() {
@Override
public void done(LCException e) {
if (null != e) {
LOGGER.e("update installation(for vivo) error!", e);
} else {
LOGGER.d("vivo push registration successful! regId=" + regId);
}
}
}));
}
}
}
|
#!/bin/bash
PROJECT=$(readlink -f $(dirname $0)/../../..)
DEBIAN_DIR=${PROJECT}/package/debian/hyperstart
VERSION=${VERSION:-0.8.1}
BRANCH=${BRANCH:-master}
if [ $# -gt 0 ] ; then
VERSION=$1
fi
# install addtional pkgs in order to build deb pkg
# sudo apt-get install -y autoconf automake pkg-config dh-make
# get hyperstart tar ball
cd $PROJECT/../hyperstart
git archive --format=tar.gz ${BRANCH} > ${DEBIAN_DIR}/hyperstart-${VERSION}.tar.gz
# prepair to create source pkg
mkdir -p ${DEBIAN_DIR}/hyperstart-${VERSION}
cd ${DEBIAN_DIR}
tar -zxf hyperstart-${VERSION}.tar.gz -C ${DEBIAN_DIR}/hyperstart-${VERSION}
# in order to use debian/* to create deb, so put them in the hyperstart.
cp -a ${DEBIAN_DIR}/debian ${DEBIAN_DIR}/hyperstart-${VERSION}
# run dh_make
cd ${DEBIAN_DIR}/hyperstart-${VERSION}
dh_make -s -y -f ../hyperstart_${VERSION}.orig.tar.gz -e dev@hyper.sh
# run dpkg-buildpackage
dpkg-buildpackage -b -us -uc -rfakeroot
#clean up intermediate files
rm -rf ${DEBIAN_DIR}/hyperstart-${VERSION}
|
# TODO: check how to do that via 'echo >'
# net.ipv4.ip_forward=1
# Captive portal IP
CAPTIVE_IP="10.0.0.1" #that should be me
# Active device
GW_DEV="eth0"
IN_DEV="eth1"
# Flush tables
iptables -F
iptables -X
# Default filter policy
iptables -P INPUT DROP
iptables -P OUTPUT DROP
iptables -P FORWARD ALLOW
# enable captive portal website
iptables -A INPUT -i $IN_DEV -p tcp --dport 80 -m state --state NEW,ESTABLISHED -j ACCEPT
iptables -A OUTPUT -o $IN_DEV -p tcp --sport 80 -m state --state ESTABLISHED -j ACCEPT
# enable nat
echo 1 > /proc/sys/net/ipv4/ip_forward
iptables -A POSTROUTING -t nat -o $GW_DEV -j MASQUERADE
# captive portal filter
iptables -t mangle -N internet
iptables -t mangle -A internet -p tcp -m tcp --dport 80 -j MARK --set-mark 99
iptables -t mangle -A internet -m mark ! --mark 99 -j MARK --set-mark 100
# for each dev to manage
iptables -t mangle -I PREROUTING -i $IN_DEV -j internet
iptables -t nat -I PREROUTING -i $IN_DEV -p tcp -m mark --mark 99 -m tcp --dport 80 -j DNAT --to-destination $CAPTIVE_IP
iptables -A FORWARD -i $IN_DEV -m mark --mark 100 -j REJECT
# allow DNS, DHCP, NTP
iptables -I FORWARD -p udp -i $IN_DEV -m multiport --dports 53,67,123 -j ACCEPT
iptables -I FORWARD -p udp -i $IN_DEV -m multiport --sports 53,67,123 -j ACCEPT
# end for each
# ------------------------------------------------------------------------------
# User management:
# allow
iptables -t mangle -I internet 1 -m mac --mac-source $USER_MAC_ADDRESS -s $USER_IP -j RETURN
# block
iptables -t mangle -D internet -m mac --mac-source $USER_MAC_ADDRESS -s $USER_IP -j RETURN
|
<reponame>hejack0207/ssh-web-console
package main
import (
"github.com/genshen/ssh-web-console/src/routers"
)
func main() {
routers.Run()
//setupSSH()
}
|
#!/bin/bash
set -e
set -u
set -x
. ./common.sh
echo ""
echo "Checking_env"
echo ""
check_env
set +x
if [ -d iOS ]; then
echo "Warning: 'iOS' directory exists. All modifications will be lost if you continue."
echo "Continue? [y/N]?"
read -r reply
if [ "${reply}" != "y" ]; then
echo "Fair enough. Exiting..."
exit 0
fi
echo "Cleaning up old iOS dir..."
rm -rf iOS
fi
echo ""
echo "preparing environment for building template"
echo ""
prepare
echo ""
echo "Building Briefcase-Based iOS Project..."
echo ""
building_template
echo ""
echo "Modify iOS/${compact_name}/${compact_name}-Info.plist"
echo ""
modify_pinfo_list
if [ -d overrides/ ]; then
echo ""
echo "Applying overrides..."
echo ""
(cd overrides && cp -fpR * ../iOS/)
fi
stupid_launch_image_grr="iOS/${compact_name}/Images.xcassets/LaunchImage.launchimage"
if [ -d "${stupid_launch_image_grr}" ]; then
echo ""
echo "Removing deprecated LaunchImage stuff..."
echo ""
rm -fvr "${stupid_launch_image_grr}"
fi
modify_pbxproj
so_crap=$(find iOS/app_packages -iname \*.so -print)
if [ -n "$so_crap" ]; then
echo ""
echo "Deleting .so files in app_packages since they don't work anyway on iOS..."
echo ""
for a in $so_crap; do
rm -vf $a
done
fi
echo ""
echo "Modifying main.m to include PYTHONIOENCODING=UTF-8..."
echo ""
main_m="iOS/${compact_name}/main.m"
if [ -e "${main_m}" ]; then
rm -f "${main_m}"
fi
pch="iOS/${compact_name}/${compact_name}-Prefix.pch"
if [ -e "${pch}" ]; then
rm -f "${pch}"
fi
if [ ! -d iOS/Support ]; then
mkdir iOS/Support
fi
mv iOS/BZip2 iOS/OpenSSL iOS/Python iOS/XZ iOS/VERSIONS iOS/Support/
cp -fRa Support/site-package/ iOS/app_packages/
ln Support/podfile iOS/podfile
ln Support/main.m "${main_m}"
ln Support/"${compact_name}"-Prefix.pch "${pch}"
(cd iOS && pod install)
if [ "$?" != "0" ]; then
echo "Encountered an error when execute pod install!"
exit 1
fi
# remove our framework from FrameworksBuildPhase
ruby ./update_project.rb
echo ''
echo '**************************************************************************'
echo '* *'
echo '* Operation Complete. An Xcode project has been generated in "iOS/" *'
echo '* *'
echo '**************************************************************************'
echo ''
echo ' IMPORTANT!'
echo ' Now you need to either manually add the library libxml2.tbd to the '
echo ' project under "General -> Linked Frameworks and Libraries" *or* '
echo ' run the ./update_project.rb script which will do it for you.'
echo ' Either of the above are needed to prevent build errors! '
echo ''
echo ' Also note:'
echo ' Modifications to files in iOS/ will be clobbered the next '
echo ' time this script is run. If you intend on modifying the '
echo ' program in Xcode, be sure to copy out modifications from iOS/ '
echo ' manually or by running ./copy_back_changes.sh.'
echo ''
echo ' Caveats for App Store & Ad-Hoc distribution:'
echo ' "Release" builds submitted to the app store fail unless the '
echo ' following things are done in "Build Settings" in Xcode: '
echo ' - "Strip Debug Symbols During Copy" = NO '
echo ' - "Strip Linked Product" = NO '
echo ' - "Strip Style" = Debugging Symbols '
echo ' - "Enable Bitcode" = NO '
echo ' - "Valid Architectures" = arm64 '
echo ' - "Symbols Hidden by Default" = NO '
echo ''
|
#!/bin/sh
#
# Break out network setup
#
# TODO: Need to create a subnet with associated TFTP capsule
hammer subnet --name icsa \
--description "icsa" \
--boot-mode 'DHCP' \
--dns-primary '10.15.169.20' \
--domains 'icsa.iad.redhat.com' \
--gateway '10.15.169.254' \
--organization 'Red Hat ICSA Team' \
--location "Tyson's Corner Lab" \
--mask '255.255.255.0' \
--mtu '9000' \
--network-type 'IPv4' \
--tftp 'stargazer.icsa.iad.redhat.com'
|
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
(function(){
"use strict";
// Require the edmunds service
require('./edmunds');
var app = angular.module('plnkrApp', ['ngMaterial', 'edmundsApi']);
app
.controller("VINController", require('./controllers/VinController.js'));
})();
},{"./controllers/VinController.js":2,"./edmunds":3}],2:[function(require,module,exports){
module.exports = ['edmundsService', '$mdConstant', function(edmundsService, $mdConstant) {
var vm = this;
vm.vins = [];
vm.res = [];
vm.keys = [$mdConstant.KEY_CODE.ENTER, $mdConstant.KEY_CODE.COMMA, $mdConstant.KEY_CODE.SPACE];
vm.checkVins = function(vinsArray) {
if(vinsArray && vinsArray.length) {
for (var i = 0; i < vinsArray.length; i++) {
vinLookup(vinsArray[i]);
}
}
vm.vins = [];
};
function vinLookup(vin) {
if (vin.length == 17) {
edmundsService.get(vin).then(function(data) {
var res = {
make : data.make.name,
model : data.model.name,
year : data.years[0].year,
vin: vin.toUpperCase()
};
vm.res.push(res);
},function(err) {
var res = {
make : 'VIN not found',
vin: vin.toUpperCase()
};
vm.res.push(res);
});
}
}
vm.getMakes = function(year) {
edmundsService.getMakes(year).then(function(data) {
vm.makes = data.makes;
vm.form.model = '';
});
};
// vm.getMakes();
vm.getModels = function(make, year){
edmundsService.getModels(make, year).then(function(data) {
vm.models = data.models;
});
};
}];
},{}],3:[function(require,module,exports){
angular.module('edmundsApi', []);
angular.module('edmundsApi')
.factory('edmundsService',['$http', '$q', function($http, $q) {
var key = '<KEY>',
edmundUrl = 'https://api.edmunds.com/api/vehicle/v2/',
mediaUrl = 'https://api.edmunds.com/api/media/v2/',
type = '',
params = {
fmt: "json",
api_key: key
};
return {
get: function(vin) {
type = 'vins';
var delay = $q.defer();
$http.get(edmundUrl + type + '/' + vin, {
cache: true,
params: params
})
.success(function(data) {
delay.resolve(data);
})
.error(function(data) {
delay.reject(data);
});
return delay.promise;
},
getShortVin: function(vin) {
type = 'squishvins';
var shortenedVin = shortenVin(vin);
var delay = $q.defer();
$http.get(edmundUrl + type + '/' + shortenedVin, {params: params})
.success(function(data) {
delay.resolve(data);
});
return delay.promise;
},
getPicture: function(make, model, year) {
var delay = $q.defer();
params.make = make || null;
params.model = model || null;
params.year = year || null;
$http.get(mediaUrl + make + '/' + model + '/' + year + '/photos', {params: params})
.success(function(data) {
delay.resolve(data);
});
return delay.promise;
},
getMakes: function(year) {
type = 'makes';
params.year = year || null;
var delay = $q.defer();
$http.get(edmundUrl + type, {
cache: true,
params: params
})
.success(function(data) {
delay.resolve(data);
});
return delay.promise;
},
getModels: function(make, year) {
if (!make) {
return;
}
type = 'models';
params.year = year || null;
var delay = $q.defer();
$http.get(edmundUrl + make + '/' + type, {
cache: true,
params: params
})
.success(function(data) {
delay.resolve(data);
});
return delay.promise;
}
};
function shortenVin(vin) {
var vinSquished = '';
for (var i = 0; i < vin.length; i++) {
if (i <= 10 && i !== 8) {
vinSquished += vin.charAt(i);
}
}
return vinSquished;
}
}]);
},{}]},{},[1]);
|
"""
Modify the code so that it prints out the USD price of a Bitcoin every 5 seconds.
"""
import requests
import json
import time
def get_bitcoin_price():
url = 'http://api.bitcoincharts.com/v1/weighted_prices.json'
response = requests.get(url)
data = json.loads(response.text)
return data['USD']['24h']
if __name__ == '__main__':
while True:
print(get_bitcoin_price())
time.sleep(5) |
#include <fstream>
#include <string>
int countNamespaceOccurrences(const std::string& filePath, const std::string& targetNamespace) {
std::ifstream file(filePath);
std::string line;
int count = 0;
while (std::getline(file, line)) {
size_t pos = line.find("namespace " + targetNamespace);
if (pos != std::string::npos) {
count++;
}
}
return count;
} |
<gh_stars>1-10
const router = require('express').Router()
const api = require('./api')
// Encapsulates the main applications routes
router.use('/api/v1', api)
module.exports = router
|
require 'test/unit'
require 'stringio'
require 'replicate'
class LoaderTest < Test::Unit::TestCase
def setup
@loader = Replicate::Loader.new
end
def thing(attrs={})
attrs = {'number' => 123, 'string' => 'hello', 'time' => Time.new}.merge(attrs)
Replicate::Object.new attrs
end
def test_basic_filter
called = false
object = thing('test' => 'value')
@loader.listen do |type, id, attrs, obj|
assert !called
assert_equal 'Replicate::Object', type
assert_equal object.id, id
assert_equal 'value', attrs['test']
assert_equal object.attributes, attrs
called = true
end
@loader.feed object.class, object.id, object.attributes
assert called
end
def test_reading_from_io
called = false
data = Marshal.dump(['Replicate::Object', 10, {'test' => 'value'}])
@loader.listen do |type, id, attrs, obj|
assert !called
assert_equal 'Replicate::Object', type
assert_equal 'value', attrs['test']
called = true
end
@loader.read(StringIO.new(data))
assert called
end
def test_stats
10.times do
obj = thing
@loader.feed obj.class, obj.id, obj.attributes
end
assert_equal({'Replicate::Object' => 10}, @loader.stats)
end
def test_block_form_runs_complete
called = false
Replicate::Loader.new do |loader|
filter = lambda { |*args| }
(class <<filter;self;end).send(:define_method, :complete) { called = true }
loader.listen filter
obj = thing
loader.feed obj.class, obj.id, obj.attributes
assert !called
end
assert called
end
def test_translating_id_attributes
objects = []
@loader.listen { |type, id, attrs, object| objects << object }
object1 = thing
@loader.feed object1.class, object1.id, object1.attributes
object2 = thing('related' => [:id, 'Replicate::Object', object1.id])
@loader.feed object2.class, object2.id, object2.attributes
assert_equal 2, objects.size
assert_equal objects[0].id, objects[1].related
end
def test_translating_multiple_id_attributes
objects = []
@loader.listen { |type, id, attrs, object| objects << object }
members = (0..9).map { |i| thing('number' => i) }
members.each do |member|
@loader.feed member.class, member.id, member.attributes
end
ids = members.map { |m| m.id }
referencer = thing('related' => [:id, 'Replicate::Object', ids])
@loader.feed referencer.class, referencer.id, referencer.attributes
assert_equal 11, objects.size
assert_equal 10, objects.last.related.size
end
end
|
<gh_stars>10-100
// Copyright (c) 2015-2016, ETH Zurich, <NAME>, Zurich Eye
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the ETH Zurich, Wyss Zurich, Zurich Eye nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL ETH Zurich, <NAME>urich, Zurich Eye BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <ze/common/matrix.hpp>
#include <Eigen/SVD>
#include <Eigen/LU>
namespace ze {
std::tuple<int, real_t, VectorX> directLinearTransform(const MatrixX& A, real_t rank_tol)
{
int n = A.rows();
int p = A.cols();
int m = std::min(n,p);
Eigen::JacobiSVD<MatrixX> svd(A, Eigen::ComputeFullV);
VectorX s = svd.singularValues();
MatrixX V = svd.matrixV();
// Find rank
int rank = 0;
for (int i = 0; i < m; ++i)
{
if (s(i) > rank_tol)
{
++rank;
}
}
// Return rank, error, and corresponding column of V
real_t error = (m < p) ? 0.0 : s(m-1);
return std::make_tuple(rank, error, V.col(p-1));
}
} // namespace ze
|
package com.smalljava.core.classloader.l2_class.vo.element;
public class JavaClassMultiLineMemoElement extends AbstractJavaClassElement {
}
|
//
// NarrowViewController.h
// Zulip
//
// Created by <NAME> on 7/30/13.
//
//
#import <UIKit/UIKit.h>
#import "StreamViewController.h"
#import "NarrowOperators.h"
@interface NarrowViewController : StreamViewController
- (id)initWithOperators:(NarrowOperators *)operators;
// Default behaviour is to scroll to first unread
// This will attempt to scroll to the desired message ID
// when loaded if the message is not yet loaded
- (void)scrollToMessageID:(long)messageId;
@end
|
<gh_stars>0
package jvm.launch;
public class JvmChecker {
/**
* @param args
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
Runtime rt = Runtime.getRuntime();
long totalMem = rt.totalMemory();
long maxMem = rt.maxMemory();
long freeMem = rt.freeMemory();
long megs = 1048576;
JvmData.printOutData();
System.out.println("Total Memory: " + totalMem + " ("
+ (totalMem / megs) + " MiB)");
System.out.println("Max Memory: " + maxMem + " (" + (maxMem / megs)
+ " MiB)");
System.out.println("Free Memory: " + freeMem + " (" + (freeMem / megs)
+ " MiB)");
System.out.println("total time :"+(System.currentTimeMillis()-start));
// Thread.currentThread().getStackTrace()
}
}
|
'use strict';
const charToTel = require('./char-to-tel');
module.exports = s => s.toLowerCase().split('').map(charToTel).join('');
|
package utils
import (
"database/sql"
_ "github.com/go-sql-driver/mysql"
log "github.com/sirupsen/logrus"
"os"
"reflect"
"strconv"
"strings"
)
type RDBMS struct {
Conn *sql.DB
ConnErr error
Conf *RDBMSConfig
}
func (d *RDBMS) Connect() error {
d.Conf = new(RDBMSConfig)
d.Conf.DriverName = os.Getenv("DB_DRIVER")
d.Conf.HostName = os.Getenv("DB_HOST")
d.Conf.Port, _ = strconv.ParseInt(os.Getenv("DB_PORT"), 10, 64)
d.Conf.UserName = os.Getenv("DB_USER")
d.Conf.Password = <PASSWORD>("<PASSWORD>")
d.Conf.Database = os.Getenv("DB_NAME")
d.Conf.MaxIdleConns, _ = strconv.Atoi(os.Getenv("DB_MAX_IDLE_CONN"))
d.Conf.MaxOpenConns, _ = strconv.Atoi(os.Getenv("DB_MAX_OPEN_CONN"))
d.Conf.MaxLifeTimeConn, _ = strconv.Atoi(os.Getenv("DB_MAX_LIFE_TIME_CONN"))
d.Conn, d.ConnErr = d.Conf.Connect()
return d.ConnErr
}
func (d *RDBMS) ConnectM2() error {
d.Conf = new(RDBMSConfig)
d.Conf.DriverName = os.Getenv("M2_DB_DRIVER")
d.Conf.HostName = os.Getenv("M2_DB_HOST")
d.Conf.Port, _ = strconv.ParseInt(os.Getenv("M2_DB_PORT"), 10, 64)
d.Conf.UserName = os.Getenv("M2_DB_USER")
d.Conf.Password = os.Getenv("M2_DB_PASS")
d.Conf.Database = os.Getenv("M2_DB_NAME")
d.Conf.MaxIdleConns, _ = strconv.Atoi(os.Getenv("M2_DB_MAX_IDLE_CONN"))
d.Conf.MaxOpenConns, _ = strconv.Atoi(os.Getenv("M2_DB_MAX_OPEN_CONN"))
d.Conf.MaxLifeTimeConn, _ = strconv.Atoi(os.Getenv("M2_DB_MAX_LIFE_TIME_CONN"))
d.Conn, d.ConnErr = d.Conf.Connect()
return d.ConnErr
}
func (d *RDBMS) Close() error {
return d.Conn.Close()
}
/**
* @param type string $dbObj
* @param type string $tablename
* @param type string $columns
* @param type string $condition
* @param type integer $limit
* @param type integer $offset
* @param type string $order_by
* @return array
* @author Shweta <<EMAIL>>
*/
func (d *RDBMS) SelectRows(tableName, columns, condition, limit, offset, orderBy string) *sql.Rows {
sql := "SELECT " + columns + " FROM `" + tableName + "`"
if len(condition) > 0 {
sql += " WHERE " + condition
}
if len(orderBy) > 0 {
sql += " ORDER BY " + orderBy
}
if len(offset) > 0 && len(limit) > 0 {
sql += " LIMIT " + offset + ", " + limit
} else if len(limit) > 0 {
sql += " LIMIT 0," + limit
}
log.Printf("SQLEXEC:: %s", sql)
rows, err := d.Conn.Query(sql)
if err != nil {
log.Printf("ERROR executing SQL:\n%s \nDetails: %s", sql, err.Error())
return nil
}
return rows
}
/**
* @param type string $dbObj
* @param type string $tablename
* @param type string insertDataArray
* @return type int
* @author Shweta<<EMAIL>>
*/
func (d *RDBMS) InsertRow(table string, insertDataArray map[string]string) int {
sql := "Insert into " + table
value := []interface{}{}
v := reflect.ValueOf(insertDataArray)
typeOfAlertTable := v.Type()
if v.Len() > 0 {
sql += " SET "
for i := 0; i < v.NumField(); i++ {
//fmt.Printf("Field: %s\tValue: %v\n", typeOfS.Field(i).Name, v.Field(i).Interface())
columnName := typeOfAlertTable.Field(i).Name
columnValue := v.Field(i).Interface()
sql += columnName + "=?,"
value = append(value, columnValue)
}
sql = strings.TrimRight(sql, ",")
}
log.Printf("Insert Query: %#v", sql)
//if len(insertDataArray)>0 {
// sql+=" SET "
// for key,val := range insertDataArray{
// sql += key+"=?,"
// value = append(value, val)
// }
// sql=strings.TrimRight(sql,",")
//}
stmt, err := d.Conn.Prepare(sql)
if err != nil {
log.Println("Cannot prepare DB statement", err)
}
res, err := stmt.Exec(value...)
if err != nil {
log.Println("Cannot run insert statement", err)
}
id, lastInsertIdErr := res.LastInsertId()
if lastInsertIdErr != nil {
log.Println(" lastInsertIdErr = %#v\n", lastInsertIdErr)
}
return int(id)
}
/**
* @param type string $dbObj
* @param type string $tablename
* @param type string updateDataArray
* @param type string optionDataArray
* @return type rows
* @author Shweta<<EMAIL>>
*/
func (d *RDBMS) UpdateRows(table string, updateDataArray map[string]string, optionDataArray map[string]string) *sql.Rows {
sql := "UPDATE `" + table + "` SET "
for key, val := range updateDataArray {
sql += "`" + key + "` = '" + val + "',"
}
sql = strings.TrimRight(sql, ",")
if len(optionDataArray) > 0 {
sql += " WHERE "
for k, v := range optionDataArray {
sql += "`" + k + "` = '" + v + "' AND "
}
}
sql = strings.TrimRight(sql, "AND ")
log.Printf("SQLEXEC:: %s", sql)
rows, err := d.Conn.Query(sql)
if err != nil {
log.Printf("ERROR updating row via SQL:\n%s\nDetails: %s\n", sql, err.Error())
}
return rows
}
func (d *RDBMS) UpdateTable(table string, updateDataArray map[string]string, optionDataArray map[string]string) (sql.Result, error) {
sql := "UPDATE `" + table + "` SET "
for key, val := range updateDataArray {
sql += "`" + key + "` = '" + val + "',"
}
sql = strings.TrimRight(sql, ",")
if len(optionDataArray) > 0 {
sql += " WHERE "
for k, v := range optionDataArray {
sql += "`" + k + "` = '" + v + "' AND "
}
}
sql = strings.TrimRight(sql, "AND ")
log.Printf("SQLEXEC:: %s", sql)
stmt, stmtErr := d.Conn.Prepare(sql)
if stmtErr != nil {
log.Infof("Cannot prepare DB statement. %s", stmtErr.Error())
return nil, stmtErr
}
return stmt.Exec()
}
func (d *RDBMS) DropTable(tableName string) error {
var dropTableErr error
qry := "DROP TABLE `" + tableName + "`"
stmt, stmtErr := d.Conn.Prepare(qry)
dropTableErr = stmtErr
if stmtErr != nil {
log.Printf("Cannot prepare DB statement. %s", stmtErr.Error())
} else {
log.Printf("Successfully prepared DB statement")
res, err := stmt.Exec()
dropTableErr = err
if err != nil {
log.Printf("Cannot execute DROP query. %s. %s", err.Error(), qry)
} else {
log.Printf("Successfully dropped table: %s", tableName)
aRows, aRowsErr := res.RowsAffected()
dropTableErr = aRowsErr
if aRowsErr != nil {
log.Printf("Error getting affected rows. %s", aRowsErr.Error())
} else {
log.Printf("Successfully dropped. Affected Rows = %d", aRows)
}
}
}
return dropTableErr
}
|
#include <iostream>
#include <cmath>
int calculatePossBLT(int n, int c0) {
int nnn = n - c0;
int sub = 1;
for (int j = 1; j <= nnn; j++) {
sub *= 2;
}
sub--;
int ext = 1;
for (int j = 1; j <= c0; j++) {
ext *= 2;
}
ext--;
int possBLT = sub * ext + ext;
return possBLT;
}
int main() {
int n, c0;
std::cout << "Enter the value of n: ";
std::cin >> n;
std::cout << "Enter the value of c0: ";
std::cin >> c0;
int result = calculatePossBLT(n, c0);
std::cout << "The value of possBLT is: " << result << std::endl;
return 0;
} |
<reponame>aloketewary/git_openapi<filename>src/app/git/models/user-repo-model.spec.ts
import { UserRepoModel } from './user-repo-model';
describe('UserRepoModel', () => {
it('should create an instance', () => {
expect(new UserRepoModel()).toBeTruthy();
});
});
|
<reponame>ApexTech/yt<gh_stars>100-1000
require 'spec_helper'
require 'yt/models/account'
describe Yt::Video do
subject(:account) { Yt::Account.new attrs }
describe '#id' do
context 'given a user info with an ID' do
let(:attrs) { {user_info: {"id"=>"103024385"}} }
it { expect(account.id).to eq '103024385' }
end
context 'given a user info without an ID' do
let(:attrs) { {user_info: {}} }
it { expect(account.id).to eq '' }
end
end
describe '#email' do
context 'given a user info with an email' do
let(:attrs) { {user_info: {"email"=>"<EMAIL>"}} }
it { expect(account.email).to eq '<EMAIL>' }
end
context 'given a user info without an email' do
let(:attrs) { {user_info: {}} }
it { expect(account.email).to eq '' }
end
end
describe '#has_verified_email?' do
context 'given a user info with a verified email' do
let(:attrs) { {user_info: {"verified_email"=>true}} }
it { expect(account).to have_verified_email }
end
context 'given a user info without a verified email' do
let(:attrs) { {user_info: {"verified_email"=>false}} }
it { expect(account).not_to have_verified_email }
end
end
describe '#name' do
context 'given a user info with a name' do
let(:attrs) { {user_info: {"name"=>"User Example"}} }
it { expect(account.name).to eq 'User Example' }
end
context 'given a user info without a name' do
let(:attrs) { {user_info: {}} }
it { expect(account.name).to eq '' }
end
end
describe '#given_name' do
context 'given a user info with a given name' do
let(:attrs) { {user_info: {"given_name"=>"User"}} }
it { expect(account.given_name).to eq 'User' }
end
context 'given a user info without a given name' do
let(:attrs) { {user_info: {}} }
it { expect(account.given_name).to eq '' }
end
end
describe '#family_name' do
context 'given a user info with a family name' do
let(:attrs) { {user_info: {"family_name"=>"Example"}} }
it { expect(account.family_name).to eq 'Example' }
end
context 'given a user info without a family name' do
let(:attrs) { {user_info: {}} }
it { expect(account.family_name).to eq '' }
end
end
describe '#profile_url' do
context 'given a user info with a link' do
let(:attrs) { {user_info: {"link"=>"https://plus.google.com/1234"}} }
it { expect(account.profile_url).to eq 'https://plus.google.com/1234' }
end
context 'given a user info without a link' do
let(:attrs) { {user_info: {}} }
it { expect(account.profile_url).to eq '' }
end
end
describe '#avatar_url' do
context 'given a user info with a picture' do
let(:attrs) { {user_info: {"picture"=>"https://lh3.googleusercontent.com/photo.jpg"}} }
it { expect(account.avatar_url).to eq 'https://lh3.googleusercontent.com/photo.jpg' }
end
context 'given a user info without a picture' do
let(:attrs) { {user_info: {}} }
it { expect(account.avatar_url).to eq '' }
end
end
describe '#gender' do
context 'given a user info with a gender' do
let(:attrs) { {user_info: {"gender"=>"male"}} }
it { expect(account.gender).to eq 'male' }
end
context 'given a user info without a gender' do
let(:attrs) { {user_info: {}} }
it { expect(account.gender).to eq '' }
end
end
describe '#locale' do
context 'given a user info with a locale' do
let(:attrs) { {user_info: {"locale"=>"en"}} }
it { expect(account.locale).to eq 'en' }
end
context 'given a user info without a locale' do
let(:attrs) { {user_info: {}} }
it { expect(account.locale).to eq '' }
end
end
describe '#hd' do
context 'given a user info with a Google App domain' do
let(:attrs) { {user_info: {"hd"=>"example.com"}} }
it { expect(account.hd).to eq 'example.com' }
end
context 'given a user info without a Google App domain' do
let(:attrs) { {user_info: {}} }
it { expect(account.hd).to eq '' }
end
end
end
|
def divisible_numbers(num1, num2, num3):
divisible_nums = []
i = 1
while(len(divisible_nums) != 10):
if i % num1 == 0 and i % num2 == 0 and i % num3 == 0:
divisible_nums.append(i)
i += 1
return divisible_nums
#Main
print(divisible_numbers(7, 8, 9)) |
<reponame>schinmayee/nimbus
//#####################################################################
// Copyright 2003-2006, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class TETRAHEDRAL_MESHING
//#####################################################################
#include <PhysBAM_Tools/Arrays_Computations/MAGNITUDE.h>
#include <PhysBAM_Tools/Grids_Uniform/GRID.h>
#include <PhysBAM_Tools/Matrices/DIAGONAL_MATRIX_3X3.h>
#include <PhysBAM_Tools/Matrices/MATRIX_4X4.h>
#include <PhysBAM_Tools/Matrices/SYMMETRIC_MATRIX_3X3.h>
#include <PhysBAM_Tools/Read_Write/Utilities/FILE_UTILITIES.h>
#include <PhysBAM_Tools/Utilities/INTERRUPTS.h>
#include <PhysBAM_Geometry/Basic_Geometry/TETRAHEDRON.h>
#include <PhysBAM_Geometry/Implicit_Objects_Dyadic/DYADIC_IMPLICIT_OBJECT.h>
#include <PhysBAM_Geometry/Read_Write/Geometry/READ_WRITE_TETRAHEDRALIZED_VOLUME.h>
#include <PhysBAM_Geometry/Solids_Geometry/DEFORMABLE_GEOMETRY_COLLECTION.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/TRIANGULATED_SURFACE.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Bindings/BINDING_LIST.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Bindings/LINEAR_BINDING.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Collisions_And_Interactions/DEFORMABLE_OBJECT_COLLISION_PARAMETERS.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Constitutive_Models/DIAGONALIZED_ISOTROPIC_STRESS_DERIVATIVE.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Constitutive_Models/ROTATED_LINEAR.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Deformable_Objects/DEFORMABLE_BODY_COLLECTION.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Forces/FINITE_VOLUME.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Forces/LINEAR_ALTITUDE_SPRINGS_3D.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Forces/LINEAR_SPRINGS.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Bodies/RIGID_BODY_COLLECTION.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Bodies/RIGID_BODY_EVOLUTION_PARAMETERS.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Forces_And_Torques/ETHER_DRAG.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Solids/SOLID_BODY_COLLECTION.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Solids/SOLIDS_PARAMETERS.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Solids_Evolution/SOLIDS_EVOLUTION.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Standard_Tests/SOLIDS_STANDARD_TESTS.h>
#include <PhysBAM_Dynamics/Meshing/RED_GREEN_TETRAHEDRA.h>
#include <PhysBAM_Dynamics/Meshing/TETRAHEDRAL_MESHING.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Forces_And_Torques/EXAMPLE_FORCES_AND_VELOCITIES.h>
using namespace PhysBAM;
template<class T> TETRAHEDRAL_MESHING<T>::
TETRAHEDRAL_MESHING(const STREAM_TYPE stream_type)
:solids_parameters(*new SOLIDS_PARAMETERS<TV>),solid_body_collection(*new SOLID_BODY_COLLECTION<TV>(new EXAMPLE_FORCES_AND_VELOCITIES<TV>(),0)),
solids_evolution(new NEWMARK_EVOLUTION<TV>(solids_parameters,solid_body_collection)),
implicit_surface(0),level_set_forces_and_velocities(0),
stream_type(stream_type),output_directory("meshing_data"),frame(0),extra_refinement_criteria(0),dependent_nodes(0),boundary_mesh(0)
{
Use_Masses_And_Springs();Set_Curvature_Subdivision_Threshold();Set_Interpolation_Error_Subdivision_Threshold();Set_Maximum_Boundary_Edge_Length();
Set_Density();Increase_Mass_On_Boundary();Use_Dynamic_Ether_Viscosity();Use_Global_Quality_Criteria_For_Early_Exit();
Replace_Green_Refinement_With_Embedded_T_Junctions();
use_constant_mass=true;solids_parameters.cfl=(T).5;solids_parameters.rigid_body_evolution_parameters.write_rigid_bodies=false;
solids_parameters.deformable_object_collision_parameters.perform_collision_body_collisions=false;
symmetric_initial_grid=false;
}
template<class T> TETRAHEDRAL_MESHING<T>::
~TETRAHEDRAL_MESHING()
{
layers.Delete_Pointers_And_Clean_Memory();
delete extra_refinement_criteria;
delete solids_evolution;
delete &solid_body_collection;
delete &solids_parameters;
}
//#####################################################################
// Function Initialize
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Initialize(IMPLICIT_OBJECT<TV>* implicit_surface_input)
{
implicit_surface=implicit_surface_input;
DEFORMABLE_BODY_COLLECTION<TV>& deformable_body_collection=solid_body_collection.deformable_body_collection;
TETRAHEDRALIZED_VOLUME<T>* tetrahedralized_volume=TETRAHEDRALIZED_VOLUME<T>::Create(deformable_body_collection.particles);
deformable_body_collection.deformable_geometry.Add_Structure(tetrahedralized_volume);
level_set_forces_and_velocities=new LEVEL_SET_FORCES_AND_VELOCITIES<TV>(*tetrahedralized_volume,*implicit_surface);
solid_body_collection.example_forces_and_velocities=level_set_forces_and_velocities;
solid_body_collection.rigid_body_collection.rigids_example_forces_and_velocities=level_set_forces_and_velocities;
solid_body_collection.Set_CFL_Number(solids_parameters.cfl);
}
//#####################################################################
// Function Snap_Nodes_To_Level_Set_Boundary
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Snap_Nodes_To_Level_Set_Boundary(const int iterations)
{
DEFORMABLE_BODY_COLLECTION<TV>& deformable_body_collection=solid_body_collection.deformable_body_collection;
TETRAHEDRON_MESH& mesh=deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
if(!mesh.boundary_nodes) mesh.Initialize_Boundary_Nodes();
for(int t=1;t<=mesh.boundary_nodes->m;t++) for(int k=1;k<=iterations;k++){
int node=(*mesh.boundary_nodes)(t);TV X=deformable_body_collection.particles.X(node);
deformable_body_collection.particles.X(node)-=implicit_surface->Extended_Phi(X)*implicit_surface->Extended_Normal(X);}
FILE_UTILITIES::Create_Directory(output_directory);
Write_Output_Files(++frame);
}
//#####################################################################
// Function Initialize_Optimization
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Initialize_Optimization(const bool verbose)
{
TETRAHEDRON_MESH& mesh=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
mesh.Initialize_Incident_Elements();if(!boundary_mesh) mesh.Initialize_Boundary_Mesh();else mesh.boundary_mesh=boundary_mesh;
mesh.boundary_mesh->Initialize_Neighbor_Nodes();mesh.boundary_mesh->Initialize_Incident_Elements();
mesh.Initialize_Boundary_Nodes(); // assumes that Initialize_Boundary_Nodes will use boundary_mesh
map_from_nodes_to_boundary_list.Resize(mesh.number_nodes);
for(int i=1;i<=mesh.boundary_nodes->m;i++) map_from_nodes_to_boundary_list((*mesh.boundary_nodes)(i))=i;
for(int i=1;i<=layers.m;i++) delete layers(i);layers.Resize(1);layers(1)=mesh.boundary_nodes;
mesh.boundary_nodes=0; // we don't need it hanging off the mesh object any more
if(verbose) {std::stringstream ss;ss<<"boundary layer has "<<layers(1)->m<<" nodes"<<std::endl;LOG::filecout(ss.str());}
ARRAY<bool,VECTOR<int,1> > marked(1,mesh.number_nodes);for(int i=1;i<=layers(1)->m;i++) marked((*layers(1))(i))=true;
for(int l=2;;l++){
layers.Append(new ARRAY<int>);
for(int i=1;i<=layers(l-1)->m;i++){
int j=(*layers(l-1))(i);
for(int k=1;k<=(*mesh.incident_elements)(j).m;k++) for(int a=1;a<=4;a++){
int b=mesh.elements((*mesh.incident_elements)(j)(k))(a);
if(!marked(b)){layers(l)->Append(b);marked(b)=true;}}}
if(layers(l)->m==0){delete layers(l);layers.Remove_End();break;}
if(verbose) {std::stringstream ss;ss<<"layer "<<l<<" has "<<layers(l)->m<<" nodes"<<std::endl;LOG::filecout(ss.str());}}
boundary_mesh_normals.Resize(layers(1)->m);
if(replace_green_refinement_with_embedded_t_junctions)
for(int i=1;i<=layers.m;i++) for(int j=layers(i)->m;j>=1;j--) if(!(*mesh.incident_elements)((*layers(i))(j)).m) layers(i)->Remove_Index_Lazy(j);
Compute_Boundary_Mesh_Normals();
}
//#####################################################################
// Function Create_Final_Mesh_With_Optimization
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Create_Final_Mesh_With_Optimization(const int number_of_initial_steps,const int number_of_final_steps,const bool verbose)
{
Write_Output_Files(frame);
for(int i=1;i<=number_of_initial_steps;i++){
if(verbose) {std::stringstream ss;ss<<"Working on initial iteration "<<i<<" of "<<number_of_initial_steps<<"+"<<number_of_final_steps<<std::endl;LOG::filecout(ss.str());}
Optimization_Sweep(i/(T)(i+2),verbose);
Write_Output_Files(++frame);}
for(int i=1;i<=number_of_final_steps;i++){
if(verbose) {std::stringstream ss;ss<<"Working on iteration "<<i<<" of "<<number_of_final_steps<<" (full step towards boundary)"<<std::endl;LOG::filecout(ss.str());}
Optimization_Sweep(1,verbose);
Write_Output_Files(++frame);}
}
//#####################################################################
// Function Optimization_Sweep
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Optimization_Sweep(const T compression_fraction,const bool verbose)
{
worst_boundary_quality=worst_interior_quality=FLT_MAX;
Optimize_Boundary_Layer(compression_fraction);
std::stringstream ss;
if(verbose) ss<<'.';
for(int j=2;j<=layers.m;j++){Optimize_Interior_Layer(j);if(verbose) ss<<'.';}
for(int j=layers.m;j>=2;j--){Optimize_Interior_Layer(j,true);if(verbose) ss<<'.';}
Optimize_Boundary_Layer(compression_fraction,true);
if(verbose) ss<<'.'<<std::endl;
LOG::filecout(ss.str());
}
//#####################################################################
// Function Optimize_Boundary_Layer
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Optimize_Boundary_Layer(const T compression_fraction,const bool reverse)
{
Check_For_Interrupts();
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
ARRAY<TV> directions(5);ARRAY<int>& nodes=*layers(1);
for(int i=1;i<=nodes.m;i++){
particles.X(nodes(i))-=compression_fraction*(*implicit_surface)(particles.X(nodes(i)))*boundary_mesh_normals(map_from_nodes_to_boundary_list(nodes(i)));
Update_Dependent_Nodes(nodes(i));}
Compute_Boundary_Mesh_Normals();
for(int i=1;i<=nodes.m;i++){
int p=nodes(reverse?nodes.m+1-i:i);
TV normal=boundary_mesh_normals(map_from_nodes_to_boundary_list(p));
if(abs(normal.x)>abs(normal.z) || abs(normal.y)>abs(normal.z)) directions(1)=TV(normal.y,-normal.x,0);
else directions(1)=TV(normal.z,0,-normal.x);
directions(1).Normalize();
TV b=TV::Cross_Product(normal,directions(1));
directions(2)=(T).30901699437494742410229341718282*directions(1)+(T).95105651629515357211643933337938*b;
directions(3)=(T)-.80901699437494742410229341718282*directions(1)+(T).58778525229247312916870595463907*b;
directions(4)=(T)-.80901699437494742410229341718282*directions(1)-(T).58778525229247312916870595463907*b;
directions(5)=(T).30901699437494742410229341718282*directions(1)-(T).95105651629515357211643933337938*b;
Search_For_Best_Position(p,directions,true);}
}
//#####################################################################
// Function Optimize_Interior_Layer
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Optimize_Interior_Layer(const int layer,const bool reverse)
{
Check_For_Interrupts();
ARRAY<TV> directions(7);
directions(1)=TV(1,0,0);
directions(2)=TV((T).21884275609895,(T).97576013861144,0);
directions(3)=TV((T).21884282342443,-(T).59716303919821,-(T).77168913640869);
directions(4)=TV(-(T).05406424975064,(T).23640431413810,(T).97014950247670);
directions(5)=TV(-(T).90174918437566,-(T).34565929710323,(T).25955357597988);
directions(6)=TV((T).21863854196520,-(T).86642677947325,(T).44888954518784);
directions(7)=TV(-(T).58820534751966,(T).35620151622547,-(T).72604059734147);
for(int i=1;i<=layers(layer)->m;i++){int j;if(reverse) j=(*layers(layer))(layers(layer)->m+1-i);else j=(*layers(layer))(i);Search_For_Best_Position(j,directions);}
}
//#####################################################################
// Function Search_For_Best_Position
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Search_For_Best_Position(const int node,const ARRAY<TV>& directions,bool include_boundary_terms)
{
TETRAHEDRON_MESH& mesh=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
T best_quality=Quality_Of_Worst_Dependent_Tetrahedron(node);if(include_boundary_terms) best_quality+=Quality_Of_Worst_Dependent_Boundary_Triangle(node);
if(use_global_quality_criteria_for_early_exit){
if(include_boundary_terms){
if(best_quality<worst_boundary_quality) worst_boundary_quality=best_quality;
else if(best_quality>worst_boundary_quality+.15) return;} // early exit if good enough relative to rest of mesh
else{
if(best_quality<worst_interior_quality) worst_interior_quality=best_quality;
else if(best_quality>worst_interior_quality+.1) return;}} // early exit if good enough relative to rest of mesh
TV best_x(particles.X(node)),xi,xj,xk,xl;T alpha=FLT_MAX;PLANE<T> p;
for(int s=1;s<=(*mesh.incident_elements)(node).m;s++){
int t=(*mesh.incident_elements)(node)(s);
int i,j,k,l;mesh.elements(t).Get(i,j,k,l);xi=particles.X(i);xj=particles.X(j);xk=particles.X(k);xl=particles.X(l);
if(i==node){p.Specify_Three_Points(xj,xl,xk);alpha=min(alpha,TV::Dot_Product(xi-xj,p.normal));}
else if(j==node){p.Specify_Three_Points(xi,xk,xl);alpha=min(alpha,TV::Dot_Product(xj-xi,p.normal));}
else if(k==node){p.Specify_Three_Points(xi,xl,xj);alpha=min(alpha,TV::Dot_Product(xk-xi,p.normal));}
else{p.Specify_Three_Points(xi,xj,xk);alpha=min(alpha,TV::Dot_Product(xl-xi,p.normal));}}
alpha*=(T).05;
int strikes=0,last_direction=1;
while(strikes<=3){
T localbest_quality=best_quality;TV localbest_x=best_x;
for(int d=1;d<=directions.m;d++){
int this_direction;if(d%2) this_direction=last_direction+d/2;else this_direction=last_direction-d/2;
this_direction=(this_direction+directions.m-1)%directions.m+1;
particles.X(node)=best_x+alpha*directions(this_direction);Update_Dependent_Nodes(node);
T q=Quality_Of_Worst_Dependent_Tetrahedron(node);if(include_boundary_terms) q+=Quality_Of_Worst_Dependent_Boundary_Triangle(node);
if(q>localbest_quality){localbest_quality=q;localbest_x=particles.X(node);last_direction=this_direction;break;}}
if(localbest_quality>best_quality){best_quality=localbest_quality;best_x=localbest_x;}
else{strikes++;alpha*=(T).45;}}
particles.X(node)=best_x;Update_Dependent_Nodes(node);
}
//#####################################################################
// Function Quality_Of_Worst_Incident_Tetrahedron
//#####################################################################
template<class T> T TETRAHEDRAL_MESHING<T>::
Quality_Of_Worst_Incident_Tetrahedron(const int node)
{
TETRAHEDRON_MESH& mesh=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
PLANE<T> p;TV xi,xj,xk,xl,n1,n2,n3,n4;T worst_quality=1;
for(int s=1;s<=(*mesh.incident_elements)(node).m;s++){
int t=(*mesh.incident_elements)(node)(s);
int i,j,k,l;mesh.elements(t).Get(i,j,k,l);xi=particles.X(i);xj=particles.X(j);xk=particles.X(k);xl=particles.X(l);
T max_edge_length=max((xi-xj).Magnitude(),(xj-xk).Magnitude(),(xk-xi).Magnitude(),(xi-xl).Magnitude(),(xj-xl).Magnitude(),(xk-xl).Magnitude());
p.Specify_Three_Points(xj,xl,xk);n1=p.normal;T min_altitude=TV::Dot_Product(xi-xj,p.normal);
p.Specify_Three_Points(xi,xk,xl);n2=p.normal;min_altitude=min(min_altitude,TV::Dot_Product(xj-xi,p.normal));
p.Specify_Three_Points(xi,xl,xj);n3=p.normal;min_altitude=min(min_altitude,TV::Dot_Product(xk-xi,p.normal));
p.Specify_Three_Points(xi,xj,xk);n4=p.normal;min_altitude=min(min_altitude,TV::Dot_Product(xl-xi,p.normal));
T min_dihedral=min(TV::Dot_Product(n1,n2),TV::Dot_Product(n1,n3),TV::Dot_Product(n1,n4),TV::Dot_Product(n2,n3),
TV::Dot_Product(n2,n4),TV::Dot_Product(n3,n4));
worst_quality=min(worst_quality,min_altitude/max_edge_length+(T).1*min_dihedral);}
return worst_quality;
}
//#####################################################################
// Function Quality_Of_Worst_Incident_Boundary_Triangle
//#####################################################################
template<class T> T TETRAHEDRAL_MESHING<T>::
Quality_Of_Worst_Incident_Boundary_Triangle(const int node)
{
TETRAHEDRON_MESH& mesh=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
TRIANGLE_3D<T> triangle;T worst_quality=1;
for(int s=1;s<=(*mesh.boundary_mesh->incident_elements)(node).m;s++){
int t=(*mesh.boundary_mesh->incident_elements)(node)(s);
int i,j,k;mesh.boundary_mesh->elements(t).Get(i,j,k);
triangle.Specify_Three_Points(particles.X(i),particles.X(j),particles.X(k));
worst_quality=min(worst_quality,1/triangle.Aspect_Ratio()+1/triangle.Maximum_Angle());}
return worst_quality;
}
//#####################################################################
// Function Quality_Of_Worst_Dependent_Tetrahedron
//#####################################################################
template<class T> T TETRAHEDRAL_MESHING<T>::
Quality_Of_Worst_Dependent_Tetrahedron(const int node)
{
T worst_quality=Quality_Of_Worst_Incident_Tetrahedron(node);
if(replace_green_refinement_with_embedded_t_junctions) for(int i=1;i<=(*dependent_nodes)(node).m;i++){
int dependent_node=(*dependent_nodes)(node)(i);
worst_quality=min(worst_quality,Quality_Of_Worst_Incident_Tetrahedron(dependent_node));}
return worst_quality;
}
//#####################################################################
// Function Quality_Of_Worst_Dependent_Boundary_Triangle
//#####################################################################
template<class T> T TETRAHEDRAL_MESHING<T>::
Quality_Of_Worst_Dependent_Boundary_Triangle(const int node)
{
T worst_quality=Quality_Of_Worst_Incident_Boundary_Triangle(node);
if(replace_green_refinement_with_embedded_t_junctions) for(int i=1;i<=(*dependent_nodes)(node).m;i++){
int dependent_node=(*dependent_nodes)(node)(i);
worst_quality=min(worst_quality,Quality_Of_Worst_Incident_Boundary_Triangle(dependent_node));}
return worst_quality;
}
//#####################################################################
// Function Compute_Boundary_Mesh_Normals
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Compute_Boundary_Mesh_Normals()
{
TETRAHEDRON_MESH& mesh=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
ARRAYS_COMPUTATIONS::Fill(boundary_mesh_normals,TV());PLANE<T> p;
for(int t=1;t<=mesh.boundary_mesh->elements.m;t++){
int i,j,k;mesh.boundary_mesh->elements(t).Get(i,j,k);
p.Specify_Three_Points(particles.X(i),particles.X(j),particles.X(k));
boundary_mesh_normals(map_from_nodes_to_boundary_list(i))+=p.normal;
boundary_mesh_normals(map_from_nodes_to_boundary_list(j))+=p.normal;
boundary_mesh_normals(map_from_nodes_to_boundary_list(k))+=p.normal;}
for(int i=1;i<=boundary_mesh_normals.m;i++) boundary_mesh_normals(i).Normalize();
}
//#####################################################################
// Function Update_Dependent_Nodes
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Update_Dependent_Nodes(const int node)
{
if(!replace_green_refinement_with_embedded_t_junctions) return;
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
BINDING_LIST<TV>& binding_list=solid_body_collection.deformable_body_collection.binding_list;
for(int i=1;i<=(*dependent_nodes)(node).m;i++)
particles.X((*dependent_nodes)(node)(i))=binding_list.Embedded_Position((*dependent_nodes)(node)(i));
}
//#####################################################################
// Function Initialize_Dynamics
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Initialize_Dynamics()
{
DEFORMABLE_BODY_COLLECTION<TV>& deformable_body_collection=solid_body_collection.deformable_body_collection;
TETRAHEDRALIZED_VOLUME<T>& tetrahedralized_volume=deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>();
TETRAHEDRON_MESH& mesh=tetrahedralized_volume.mesh;
mesh.Initialize_Adjacent_Elements();if(!boundary_mesh) mesh.Initialize_Boundary_Mesh();else mesh.boundary_mesh=boundary_mesh;
mesh.Initialize_Boundary_Nodes();
tetrahedralized_volume.Initialize_Triangulated_Surface();
tetrahedralized_volume.triangulated_surface->mesh.Initialize_Incident_Elements();
// set up dynamics
SOLIDS_STANDARD_TESTS<TV>::Set_Mass_Of_Particles(tetrahedralized_volume,density,use_constant_mass);
if(boundary_mass_multiplicative_factor!=1){
bool boundary_nodes_defined=mesh.boundary_nodes!=0;if(!boundary_nodes_defined) mesh.Initialize_Boundary_Nodes();
for(int i=1;i<=mesh.boundary_nodes->m;i++) tetrahedralized_volume.particles.X(i)*=boundary_mass_multiplicative_factor;
if(!boundary_nodes_defined){delete mesh.boundary_nodes;mesh.boundary_nodes=0;}}
solid_body_collection.deformable_body_collection.binding_list.Distribute_Mass_To_Parents();
solid_body_collection.deformable_body_collection.binding_list.Clear_Hard_Bound_Particles(deformable_body_collection.particles.mass);
solid_body_collection.deformable_body_collection.particles.Compute_Auxiliary_Attributes(solid_body_collection.deformable_body_collection.soft_bindings);
if(dynamic_ether_viscosity!=0)
solid_body_collection.Add_Force(new ETHER_DRAG<GRID<TV> >(dynamic_cast<PARTICLES<TV>&>(tetrahedralized_volume.particles),
solid_body_collection.rigid_body_collection,true,true,dynamic_ether_viscosity));
if(use_finite_volume) solid_body_collection.Add_Force(Create_Finite_Volume(tetrahedralized_volume,
new ROTATED_LINEAR<T,3>(youngs_modulus,poissons_ratio,Rayleigh_coefficient)));
else if(use_masses_and_springs){
solid_body_collection.Add_Force(Create_Edge_Springs(tetrahedralized_volume,edge_spring_stiffness,edge_spring_overdamping_fraction));
solid_body_collection.Add_Force(Create_Altitude_Springs(tetrahedralized_volume,altitude_spring_stiffness,
altitude_spring_overdamping_fraction,true,(T).1,true,(T).1,true,(T)0,false));}
solid_body_collection.Update_Simulated_Particles();
solids_evolution->Initialize_Rigid_Bodies((T)24,false);
}
//#####################################################################
// Function Create_Final_Mesh_With_Dynamics
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Create_Final_Mesh_With_Dynamics(const T time_step,const int number_of_force_steps,const int number_of_velocity_steps,const bool verbose)
{
Write_Output_Files(frame);
// forces
for(int k=1;k<=number_of_force_steps;k++){
Advance_Dynamics((k-1)*time_step,k*time_step,verbose);
if(verbose) {std::stringstream ss;ss<<"TIME STEP = "<<k<<", TIME = "<<" "<<k*time_step<<std::endl;LOG::filecout(ss.str());}
Write_Output_Files(++frame);}
// enslaved velocities
if(verbose) {std::stringstream ss;ss<<"\n\n\n SWITCHING TO SETTING EXTERNAL VELOCITIES RATHER THAN FORCES!!!\n\n"<<std::endl;LOG::filecout(ss.str());}
level_set_forces_and_velocities->Use_External_Velocities();
for(int k=number_of_force_steps+1;k<=number_of_force_steps+number_of_velocity_steps;k++){
Advance_Dynamics((k-1)*time_step,k*time_step,verbose);
if(verbose) {std::stringstream ss;ss<<"TIME STEP = "<<k<<", TIME = "<<" "<<k*time_step<<std::endl;LOG::filecout(ss.str());}
Write_Output_Files(++frame);}
}
//#####################################################################
// Function Advance_Dynamics
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Advance_Dynamics(const T time,const T stopping_time,const bool verbose)
{
DEFORMABLE_BODY_COLLECTION<TV>& deformable_body_collection=solid_body_collection.deformable_body_collection;
// prepare for force computation
solid_body_collection.Update_Position_Based_State(time,true);
T new_time=time;
int substep=0;bool done=false;
while(!done){substep++;
T dt=solid_body_collection.CFL();
if(new_time+dt>=stopping_time){dt=stopping_time-new_time;done=true;}else if(new_time+2*dt>=stopping_time) dt=(T).51*(stopping_time-new_time);
if(verbose) {std::stringstream ss;ss<<"dt="<<dt<<" substep="<<substep<<std::endl;LOG::filecout(ss.str());}
solids_evolution->Advance_One_Time_Step_Position(dt,new_time,true);
solids_evolution->Advance_One_Time_Step_Velocity(dt,new_time,true);new_time+=dt;}
if(verbose){int index=0;
TETRAHEDRALIZED_VOLUME<T>& tetrahedralized_volume=deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>();
std::stringstream ss;
ss<<" maxPhi="<<tetrahedralized_volume.Maximum_Magnitude_Phi_On_Boundary(*implicit_surface,&index)<<"("<<index<<")";
index=ARRAYS_COMPUTATIONS::Arg_Maximum_Magnitude(deformable_body_collection.particles.V);
ss<<" maxV="<<deformable_body_collection.particles.V(index).Magnitude()<<"("<<index<<")";
ss<<" maxAR="<<tetrahedralized_volume.Maximum_Aspect_Ratio(&index)<<"("<<index<<")\n";
LOG::filecout(ss.str());}
}
//#####################################################################
// Function Discard_Valence_Zero_Particles_And_Renumber
//#####################################################################
template<class TV,class T_MESH1,class T_MESH2>
void Discard_Valence_Zero_Particles_And_Renumber(PARTICLES<TV>& particles,T_MESH1& mesh1,T_MESH2& mesh2,ARRAY<int>& condensation_mapping)
{
assert(mesh1.number_nodes==mesh2.number_nodes);
// mark which nodes are used
ARRAY<bool> node_is_used(mesh1.number_nodes);
for(int t=1;t<=mesh1.elements.m;t++){
INDIRECT_ARRAY<ARRAY<bool>,typename T_MESH1::ELEMENT_TYPE&> node_is_used_subset=node_is_used.Subset(mesh1.elements(t));ARRAYS_COMPUTATIONS::Fill(node_is_used_subset,true);}
for(int t=1;t<=mesh2.elements.m;t++){
INDIRECT_ARRAY<ARRAY<bool>,typename T_MESH2::ELEMENT_TYPE&> node_is_used_subset=node_is_used.Subset(mesh2.elements(t));ARRAYS_COMPUTATIONS::Fill(node_is_used_subset,true);}
// make condensation mapping
condensation_mapping.Resize(mesh1.number_nodes,false,false);ARRAYS_COMPUTATIONS::Fill(condensation_mapping,0);
for(int t=1,counter=0;t<=mesh1.number_nodes;t++) if(node_is_used(t)) condensation_mapping(t)=++counter;
// make new triangle mesh
mesh1.number_nodes=0;
for(int t=1;t<=mesh1.elements.m;t++){
mesh1.elements(t)=condensation_mapping.Subset(mesh1.elements(t));
mesh1.number_nodes=max(mesh1.number_nodes,mesh1.elements(t).Max());}
for(int t=1;t<=mesh2.elements.m;t++){
mesh2.elements(t)=condensation_mapping.Subset(mesh2.elements(t));
mesh1.number_nodes=max(mesh1.number_nodes,mesh2.elements(t).Max());}
mesh2.number_nodes=mesh1.number_nodes;
// do particles same way
for(int p=1;p<=condensation_mapping.m;p++) if(!condensation_mapping(p)) particles.array_collection->Add_To_Deletion_List(p);
for(int p=condensation_mapping.m+1;p<=particles.array_collection->Size();p++) particles.array_collection->Add_To_Deletion_List(p);
particles.array_collection->Delete_Elements_On_Deletion_List(true);particles.array_collection->Compact();
mesh1.Refresh_Auxiliary_Structures();mesh2.Refresh_Auxiliary_Structures();
}
//#####################################################################
// Function Create_Initial_Mesh
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Create_Initial_Mesh(const T bcc_lattice_cell_size,const bool use_adaptive_refinement,const int max_subdivision_levels,const bool discard_to_get_nice_topology,const bool verbose,
const bool use_aggressive_tet_pruning_globally,const ARRAY<RANGE<TV> >* bounding_boxes_for_aggressive_pruning)
{
TETRAHEDRALIZED_VOLUME<T>& tetrahedralized_volume=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>();
TETRAHEDRON_MESH& mesh=tetrahedralized_volume.mesh;
PARTICLES<TV>& particles=dynamic_cast<PARTICLES<TV>&>(tetrahedralized_volume.particles);
// initial bcc mesh
RANGE<TV>& box=implicit_surface->box;
TV size=box.Edge_Lengths();T cell_size=bcc_lattice_cell_size;
if(!bcc_lattice_cell_size){
if(use_adaptive_refinement) cell_size=(T).1*min(size.x,size.y,size.z); // default is about 10 grid cells
else cell_size=implicit_surface->Minimum_Cell_Size();} // use the cell size of the implicit surface
int m=(int)ceil(size.x/cell_size),n=(int)ceil(size.y/cell_size),mn=(int)ceil(size.z/cell_size);
GRID<TV> bcc_grid;
if(!symmetric_initial_grid)
bcc_grid=GRID<TV>(m+1,n+1,mn+1,box.min_corner.x,box.min_corner.x+cell_size*m,box.min_corner.y,box.min_corner.y+cell_size*n,box.min_corner.z,box.min_corner.z+cell_size*mn);
else{
TV center=box.Center();
TV shift=cell_size/2*TV((T)m,(T)n,(T)mn);
bcc_grid=GRID<TV>(m+1,n+1,mn+1,RANGE<TV>(center-shift,center+shift));}
tetrahedralized_volume.Initialize_Octahedron_Mesh_And_Particles(bcc_grid);
if(use_aggressive_tet_pruning_globally) tetrahedralized_volume.Discard_Tetrahedrons_Outside_Implicit_Surface_Aggressive(*implicit_surface);
else tetrahedralized_volume.Discard_Tetrahedrons_Outside_Implicit_Surface(*implicit_surface);
if(bounding_boxes_for_aggressive_pruning) tetrahedralized_volume.Discard_Tetrahedrons_Outside_Implicit_Surface_Aggressive(*implicit_surface,*bounding_boxes_for_aggressive_pruning);
Check_For_Interrupts();
// refine further if adaptive
RED_GREEN_TETRAHEDRA<T> redgreen(tetrahedralized_volume);
if(use_adaptive_refinement){
ARRAY<int> tets_to_refine;tets_to_refine.Preallocate(5000);
for(int iterations=1;iterations<=max_subdivision_levels;iterations++){
tets_to_refine.Remove_All();
std::stringstream ss;
ss<<"Checking for refinement "<<std::flush;
for(int t=1;t<=mesh.elements.m;t++){
if(t%10000==0){
ss<<"."<<std::flush;
Check_For_Interrupts();}
if(Tetrahedron_Refinement_Criteria(t)) tets_to_refine.Append(t);}
ss<<std::endl;if(tets_to_refine.m==0) break;
if(verbose) ss<<"Refining "<<tets_to_refine.m<<" out of "<<mesh.elements.m<<" tets."<<std::endl;
redgreen.Refine_Simplex_List(tets_to_refine);
if(verbose) ss<<"(done with iteration="<<iterations<<")"<<std::endl;LOG::filecout(ss.str());}}
// cut off tetrahedra to get the initial mesh, optionally substitute T-junctions for green refinements and clean memory
ARRAY<bool> keep_tet_flag;if(discard_to_get_nice_topology) Discard_To_Get_Nice_Topology(redgreen,keep_tet_flag);
if(replace_green_refinement_with_embedded_t_junctions){
TETRAHEDRON_MESH final_mesh;ARRAY<int> t_junctions;ARRAY<VECTOR<int,2> > t_junction_parents;
if(discard_to_get_nice_topology){
TETRAHEDRON_MESH minimal_mesh;
redgreen.Coarsen_Complete_Refinements_Of_Subset(minimal_mesh,keep_tet_flag,t_junctions,t_junction_parents,allow_coarsening_to_non_graded_mesh);
TRIANGLE_MESH minimal_boundary_mesh;
minimal_mesh.Initialize_Boundary_Mesh_With_T_Junctions(minimal_boundary_mesh,t_junctions,t_junction_parents);
ARRAY<bool> node_is_uncoarsenable(mesh.number_nodes);
for(int t=1;t<=minimal_boundary_mesh.elements.m;t++){
INDIRECT_ARRAY<ARRAY<bool>,TRIANGLE_MESH::ELEMENT_TYPE&> node_is_uncoarsenable_subset=node_is_uncoarsenable.Subset(minimal_boundary_mesh.elements(t));
ARRAYS_COMPUTATIONS::Fill(node_is_uncoarsenable_subset,true);}
redgreen.Coarsen_Complete_Refinements_Of_Subset(final_mesh,keep_tet_flag,t_junctions,t_junction_parents,allow_coarsening_to_non_graded_mesh,&node_is_uncoarsenable);}
else{assert(!allow_coarsening_to_non_graded_mesh); // In the absence of discarding coarsening would just produce the unrefined BCC lattice
redgreen.Coarsen_Green_Refinements(final_mesh,t_junctions,t_junction_parents);}
mesh.Initialize_Mesh(final_mesh);
boundary_mesh=new TRIANGLE_MESH;mesh.Initialize_Boundary_Mesh_With_T_Junctions(*boundary_mesh,t_junctions,t_junction_parents);
mesh.Initialize_Incident_Elements();boundary_mesh->Initialize_Incident_Elements();
BINDING_LIST<TV>& binding_list=solid_body_collection.deformable_body_collection.binding_list;
ARRAY<int> particle_to_t_junction(particles.array_collection->Size());
for(int i=1;i<=t_junctions.m;i++) if((*mesh.incident_elements)(t_junctions(i)).m || (*boundary_mesh->incident_elements)(t_junctions(i)).m) particle_to_t_junction(t_junctions(i))=i;
for(int p=1;p<=particles.array_collection->Size();p++) if(particle_to_t_junction(p)){
ARRAY<int> parents;ARRAY<T> weights;
int t_junction=particle_to_t_junction(p);
parents.Append(t_junction_parents(t_junction)(1));weights.Append((T).5);
parents.Append(t_junction_parents(t_junction)(2));weights.Append((T).5);
for(int i=1;i<=parents.m;){
if(!particle_to_t_junction(parents(i))){i++;continue;}
T old_weight=weights(i);t_junction=particle_to_t_junction(parents(i));
parents.Remove_Index_Lazy(i);weights.Remove_Index_Lazy(i);
for(int j=1;j<=2;j++){
int new_parent=t_junction_parents(t_junction)(j);
int index=parents.Find(new_parent);if(!index){index=parents.Append(new_parent);weights.Append((T)0);}
weights(index)+=(T).5*old_weight;}}
switch(parents.m){
case 2: binding_list.Add_Binding(new LINEAR_BINDING<TV,2>(particles,p,VECTOR<int,2>(parents(1),parents(2)),VECTOR<T,2>(weights(1),weights(2))));break;
case 3: binding_list.Add_Binding(new LINEAR_BINDING<TV,3>(particles,p,VECTOR<int,3>(parents(1),parents(2),parents(3)),TV(weights(1),weights(2),weights(3))));break;
case 4: binding_list.Add_Binding(new LINEAR_BINDING<TV,4>(particles,p,VECTOR<int,4>(parents(1),parents(2),parents(3),parents(4)),
VECTOR<T,4>(weights(1),weights(2),weights(3),weights(4))));break;
default: PHYSBAM_FATAL_ERROR();}}
ARRAY<int> condensation_mapping;
Discard_Valence_Zero_Particles_And_Renumber(particles,mesh,*boundary_mesh,condensation_mapping);
for(int b=1;b<=binding_list.bindings.m;b++)
if(LINEAR_BINDING<TV,2>* binding=dynamic_cast<LINEAR_BINDING<TV,2>*>(binding_list.bindings(b))){
binding->particle_index=condensation_mapping(binding->particle_index);binding->parents=condensation_mapping.Subset(binding->parents);}
else if(LINEAR_BINDING<TV,3>* binding=dynamic_cast<LINEAR_BINDING<TV,3>*>(binding_list.bindings(b))){
binding->particle_index=condensation_mapping(binding->particle_index);binding->parents=condensation_mapping.Subset(binding->parents);}
else if(LINEAR_BINDING<TV,4>* binding=dynamic_cast<LINEAR_BINDING<TV,4>*>(binding_list.bindings(b))){
binding->particle_index=condensation_mapping(binding->particle_index);binding->parents=condensation_mapping.Subset(binding->parents);}
else PHYSBAM_NOT_IMPLEMENTED();
dependent_nodes=new ARRAY<ARRAY<int> >(mesh.number_nodes);
for(int b=1;b<=binding_list.bindings.m;b++){
ARRAY<int> parents=binding_list.bindings(b)->Parents();
for(int p=1;p<=parents.m;p++) (*dependent_nodes)(parents(p)).Append(binding_list.bindings(b)->particle_index);}
binding_list.Update_Binding_Index_From_Particle_Index();}
else{
for(int t=mesh.elements.m;t>=1;t--) if(!keep_tet_flag(t)) mesh.elements.Remove_Index_Lazy(t);mesh.elements.Compact();
mesh.Delete_Auxiliary_Structures();tetrahedralized_volume.Discard_Valence_Zero_Particles_And_Renumber();}
}
//#####################################################################
// Function Tetrahedron_Refinement_Criteria
//#####################################################################
template<class T> bool TETRAHEDRAL_MESHING<T>::
Tetrahedron_Refinement_Criteria(const int index) const
{
if(extra_refinement_criteria){
int extra_criteria=(*extra_refinement_criteria)(index);
if(extra_criteria) return extra_criteria>0;}
TETRAHEDRALIZED_VOLUME<T>& tetrahedralized_volume=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>();
GEOMETRY_PARTICLES<TV>& particles=tetrahedralized_volume.particles;
int i,j,k,l;tetrahedralized_volume.mesh.elements(index).Get(i,j,k,l);
TV xi=particles.X(i),xj=particles.X(j),xk=particles.X(k),xl=particles.X(l);
T max_length=sqrt(max((xi-xj).Magnitude_Squared(),(xj-xk).Magnitude_Squared(),(xk-xi).Magnitude_Squared(),(xi-xl).Magnitude_Squared(),
(xj-xl).Magnitude_Squared(),(xk-xl).Magnitude_Squared()));
T minimum_cell_size_in_tetrahedron=implicit_surface->Minimum_Cell_Size_Within_Box(TETRAHEDRON<T>(xi,xj,xk,xl).Bounding_Box());
if(max_length<minimum_cell_size_in_tetrahedron) return false; // early exit if this cell is maximally refined
T phi_i=implicit_surface->Extended_Phi(xi),phi_j=implicit_surface->Extended_Phi(xj),phi_k=implicit_surface->Extended_Phi(xk),phi_l=implicit_surface->Extended_Phi(xl);
if(min(abs(phi_i),abs(phi_j),abs(phi_k),abs(phi_l)) > max_length) return false; // early exit if the surface cannot pass through the tet
#ifndef COMPILE_WITHOUT_DYADIC_SUPPORT
// check sample points near interface for high curvature or interpolation error
if(typeid(*implicit_surface)==typeid(DYADIC_IMPLICIT_OBJECT<TV>)){
OCTREE_GRID<T>& octree_grid=dynamic_cast<DYADIC_IMPLICIT_OBJECT<TV>*>(implicit_surface)->levelset.grid;
TETRAHEDRON<T> tetrahedron(xi,xj,xk,xl);
ARRAY<OCTREE_CELL<T>*> intersecting_cells;
octree_grid.Get_Cells_Intersecting_Box(tetrahedron.Bounding_Box(),intersecting_cells);
bool seen_positive=false,seen_negative=false,seen_big_error=false;
for(int i=1;i<=intersecting_cells.m;i++){
TV weights,x=tetrahedron.Closest_Point(intersecting_cells(i)->Center(),weights); // sample point
T phi=implicit_surface->Extended_Phi(x);
if(abs(phi)<minimum_cell_size_in_tetrahedron){ // close to the interface
VECTOR<T,2> curvatures=implicit_surface->Principal_Curvatures(x);
if(max_length*(abs(curvatures[1])+abs(curvatures[2]))>curvature_subdivision_threshold) return true;}
if(phi>=0) seen_positive=true;if(phi<=0) seen_negative=true;
if(!seen_big_error){ // figure out linear interpolation of phi through the corners of the tet
MATRIX<T,4> A(1,1,1,1,xi.x,xj.x,xk.x,xl.x,xi.y,xj.y,xk.y,xl.y,xi.z,xj.z,xk.z,xl.z);A.Invert();
T phi0=A(1,1)*phi_i+A(1,2)*phi_j+A(1,3)*phi_k+A(1,4)*phi_l;
TV average_normal(A(2,1)*phi_i+A(2,2)*phi_j+A(2,3)*phi_k+A(2,4)*phi_l,A(3,1)*phi_i+A(3,2)*phi_j+A(3,3)*phi_k+A(3,4)*phi_l,
A(4,1)*phi_i+A(4,2)*phi_j+A(4,3)*phi_k+A(4,4)*phi_l);
if(abs(phi-(phi0+TV::Dot_Product(average_normal,x)))>interpolation_error_subdivision_threshold*max_length) seen_big_error=true;}
if((seen_big_error || max_length>maximum_boundary_edge_length) && seen_positive && seen_negative) return true;}}
else
#endif
{
int n=(int)ceil(max_length/minimum_cell_size_in_tetrahedron);T one_over_n=(T)1/n;bool seen_positive=false,seen_negative=false,seen_big_error=false;
for(int p=0;p<=n;p++){T a=p*one_over_n;for(int q=0;q<=n-p;q++){T b=q*one_over_n;for(int r=0;r<=n-p-q;r++){T c=r*one_over_n;
TV x=a*xi+b*xj+c*xk+(1-a-b-c)*xl;T phi=implicit_surface->Extended_Phi(x); // sample point
if(abs(phi)<minimum_cell_size_in_tetrahedron){ // close to the interface
VECTOR<T,2> curvatures=implicit_surface->Principal_Curvatures(x);
if(max_length*(abs(curvatures[1])+abs(curvatures[2]))>curvature_subdivision_threshold) return true;}
if(phi>=0) seen_positive=true;if(phi<=0) seen_negative=true;
if(!seen_big_error){ // figure out linear interpolation of phi through the corners of the tet
MATRIX<T,4> A(1,1,1,1,xi.x,xj.x,xk.x,xl.x,xi.y,xj.y,xk.y,xl.y,xi.z,xj.z,xk.z,xl.z);A.Invert();
T phi0=A(1,1)*phi_i+A(1,2)*phi_j+A(1,3)*phi_k+A(1,4)*phi_l;
TV average_normal(A(2,1)*phi_i+A(2,2)*phi_j+A(2,3)*phi_k+A(2,4)*phi_l,A(3,1)*phi_i+A(3,2)*phi_j+A(3,3)*phi_k+A(3,4)*phi_l,
A(4,1)*phi_i+A(4,2)*phi_j+A(4,3)*phi_k+A(4,4)*phi_l);
if(abs(phi-(phi0+TV::Dot_Product(average_normal,x)))>interpolation_error_subdivision_threshold*max_length) seen_big_error=true;}
if((seen_big_error || max_length>maximum_boundary_edge_length) && seen_positive && seen_negative) return true;}}}}
return false;
}
//#####################################################################
// Function Discard_To_Get_Nice_Topology
//#####################################################################
// discard to guarantee no overconstrained tets, and discourage bad interior edges and non-manifold boundary nodes
template<class T> void TETRAHEDRAL_MESHING<T>::
Discard_To_Get_Nice_Topology(RED_GREEN_TETRAHEDRA<T>& redgreen,ARRAY<bool>& keep_tet_flag,const bool verbose)
{
TETRAHEDRON_MESH& mesh=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
keep_tet_flag.Resize(mesh.elements.m,false,false);ARRAYS_COMPUTATIONS::Fill(keep_tet_flag,false);
Envelope_Interior_Nodes(keep_tet_flag);
TRIANGLE_MESH boundary_mesh;mesh.Initialize_Boundary_Mesh_Of_Subset(boundary_mesh,keep_tet_flag);
boundary_mesh.Initialize_Segment_Mesh();boundary_mesh.Initialize_Neighbor_Nodes();
ARRAY<VECTOR<int,2> > edges_to_refine;
for(int i=1;i<=particles.array_collection->Size();i++) if((*boundary_mesh.neighbor_nodes)(i).m==3) for(int j=1;j<=3;j++) // refine degree 3 nodes
edges_to_refine.Append(VECTOR<int,2>(i,(*boundary_mesh.neighbor_nodes)(i)(j)));
if(verbose) {std::stringstream ss;ss<<"Subdividing "<<edges_to_refine.m<<" undesirable surface edges."<<std::endl;LOG::filecout(ss.str());}
redgreen.Subdivide_Segment_List(edges_to_refine);edges_to_refine.Clean_Memory();
Envelope_Interior_Nodes(keep_tet_flag);
mesh.Initialize_Boundary_Mesh_Of_Subset(boundary_mesh,keep_tet_flag);boundary_mesh.Initialize_Segment_Mesh();
ARRAY<bool> node_on_boundary(mesh.number_nodes);
for(int t=1;t<=boundary_mesh.elements.m;t++){
int i,j,k;boundary_mesh.elements(t).Get(i,j,k);node_on_boundary(i)=true;node_on_boundary(j)=true;node_on_boundary(k)=true;}
ARRAY<int> boundary_nodes;boundary_nodes.Preallocate(boundary_mesh.elements.m);
for(int i=1;i<=node_on_boundary.m;i++) if(node_on_boundary(i)) boundary_nodes.Append(i);
boundary_mesh.segment_mesh->Initialize_Incident_Elements(); // for fast Segment() calls
SEGMENT_MESH subset_segment_mesh;mesh.Initialize_Segment_Mesh_Of_Subset(subset_segment_mesh,keep_tet_flag);
subset_segment_mesh.Initialize_Neighbor_Nodes(); // so we can look at just neighbors of boundary nodes
ARRAY<VECTOR<int,2> > bad_segment_list;
for(int i=1;i<=boundary_nodes.m;i++){
int node1=boundary_nodes(i);
for(int j=1;j<=(*subset_segment_mesh.neighbor_nodes)(node1).m;j++){
int node2=(*subset_segment_mesh.neighbor_nodes)(node1)(j);
if(node1<node2 && node_on_boundary(node2) && !boundary_mesh.segment_mesh->Segment(node1,node2)) bad_segment_list.Append(VECTOR<int,2>(node1,node2));}}
int number_bad_elements=bad_segment_list.m;
if(number_bad_elements){
if(verbose) {std::stringstream ss;ss<<"Subdividing "<<bad_segment_list.m<<" bad interior edges."<<std::endl;LOG::filecout(ss.str());}
redgreen.Subdivide_Segment_List(bad_segment_list);
mesh.Initialize_Incident_Elements();Envelope_Interior_Nodes(keep_tet_flag);}
if(number_bad_elements) mesh.Initialize_Boundary_Mesh_Of_Subset(boundary_mesh,keep_tet_flag);
ARRAY<int> non_manifold_nodes;boundary_mesh.Non_Manifold_Nodes(non_manifold_nodes);
if(verbose) {std::stringstream ss;ss<<"Subdividing around "<<non_manifold_nodes.m<<" bad nodes."<<std::endl;LOG::filecout(ss.str());}
number_bad_elements+=non_manifold_nodes.m;
if(non_manifold_nodes.m){
ARRAY<int> tets_to_refine;tets_to_refine.Preallocate(25*non_manifold_nodes.m);
for(int i=1;i<=non_manifold_nodes.m;i++) for(int j=1;j<=(*mesh.incident_elements)(non_manifold_nodes(i)).m;j++)
tets_to_refine.Append((*mesh.incident_elements)(non_manifold_nodes(i))(j));
redgreen.Refine_Simplex_List(tets_to_refine);Envelope_Interior_Nodes(keep_tet_flag);}
while(number_bad_elements){ // continue to envelope while there may be bad things
mesh.Initialize_Boundary_Mesh_Of_Subset(boundary_mesh,keep_tet_flag);
ARRAYS_COMPUTATIONS::Fill(node_on_boundary,false);node_on_boundary.Resize(mesh.number_nodes);
for(int t=1;t<=boundary_mesh.elements.m;t++){
int i,j,k;boundary_mesh.elements(t).Get(i,j,k);node_on_boundary(i)=true;node_on_boundary(j)=true;node_on_boundary(k)=true;}
boundary_nodes.Resize(0);int i;for(i=1;i<=node_on_boundary.m;i++) if(node_on_boundary(i)) boundary_nodes.Append(i);
boundary_mesh.Initialize_Segment_Mesh();boundary_mesh.segment_mesh->Initialize_Incident_Elements(); // for fast Segment() calls
mesh.Initialize_Segment_Mesh_Of_Subset(subset_segment_mesh,keep_tet_flag);
subset_segment_mesh.Initialize_Neighbor_Nodes(); // so we can look at just neighbors of boundary nodes
bad_segment_list.Resize(0);
for(int i=1;i<=boundary_nodes.m;i++){
int node1=boundary_nodes(i);
for(int j=1;j<=(*subset_segment_mesh.neighbor_nodes)(node1).m;j++){
int node2=(*subset_segment_mesh.neighbor_nodes)(node1)(j);
if(node1<node2 && node_on_boundary(node2) && !boundary_mesh.segment_mesh->Segment(node1,node2)) bad_segment_list.Append(VECTOR<int,2>(node1,node2));}}
if(verbose) {std::stringstream ss;ss<<"Enveloping "<<bad_segment_list.m<<" bad interior edges."<<std::endl;LOG::filecout(ss.str());}
number_bad_elements=bad_segment_list.m;
for(int i=1;i<=bad_segment_list.m;i++){
int node1,node2;bad_segment_list(i).Get(node1,node2);T maxphi1=-(T)FLT_MAX,maxphi2=-(T)FLT_MAX;
for(int j=1;j<=(*mesh.incident_elements)(node1).m;j++) if(!keep_tet_flag((*mesh.incident_elements)(node1)(j))){
int a,b,c,d;mesh.elements((*mesh.incident_elements)(node1)(j)).Get(a,b,c,d);
maxphi1=max(maxphi1,implicit_surface->Extended_Phi((T).25*(particles.X(a)+particles.X(b)+particles.X(c)+particles.X(d))));}
for(int j=1;j<=(*mesh.incident_elements)(node2).m;j++) if(!keep_tet_flag((*mesh.incident_elements)(node2)(j))){
int a,b,c,d;mesh.elements((*mesh.incident_elements)(node2)(j)).Get(a,b,c,d);
maxphi2=max(maxphi2,implicit_surface->Extended_Phi((T).25*(particles.X(a)+particles.X(b)+particles.X(c)+particles.X(d))));}
if(maxphi1<maxphi2) for(int j=1;j<=(*mesh.incident_elements)(node1).m;j++) keep_tet_flag((*mesh.incident_elements)(node1)(j))=true;
else for(int j=1;j<=(*mesh.incident_elements)(node2).m;j++) keep_tet_flag((*mesh.incident_elements)(node2)(j))=true;}
if(number_bad_elements) mesh.Initialize_Boundary_Mesh_Of_Subset(boundary_mesh,keep_tet_flag);
boundary_mesh.Non_Manifold_Nodes(non_manifold_nodes);
if(verbose) {std::stringstream ss;ss<<"Fixing "<<non_manifold_nodes.m<<" bad nodes."<<std::endl;LOG::filecout(ss.str());}
number_bad_elements+=non_manifold_nodes.m;
for(int i=1;i<=non_manifold_nodes.m;i++) for(int j=1;j<=(*mesh.incident_elements)(non_manifold_nodes(i)).m;j++)
keep_tet_flag((*mesh.incident_elements)(non_manifold_nodes(i))(j))=true;}
}
//##############################################################################
// Function Envelope_Interior_Nodes
//##############################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Envelope_Interior_Nodes(ARRAY<bool>& keep_tet_flag)
{
TETRAHEDRON_MESH& mesh=solid_body_collection.deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>().mesh;
PARTICLES<TV>& particles=solid_body_collection.deformable_body_collection.particles;
keep_tet_flag.Resize(mesh.elements.m);ARRAYS_COMPUTATIONS::Fill(keep_tet_flag,false);
mesh.Initialize_Incident_Elements();mesh.Initialize_Neighbor_Nodes();
for(int i=1;i<=mesh.number_nodes;i++){
T phi=implicit_surface->Extended_Phi(particles.X(i));
if(phi<0){bool envelope_node=true;
for(int j=1;j<=(*mesh.neighbor_nodes)(i).m;j++) if(implicit_surface->Extended_Phi(particles.X((*mesh.neighbor_nodes)(i)(j)))>-3*phi){
envelope_node=false;break;}
if(envelope_node) for(int j=1;j<=(*mesh.incident_elements)(i).m;j++) keep_tet_flag((*mesh.incident_elements)(i)(j))=true;}}
}
//#####################################################################
// Function Write_Output_Files
//#####################################################################
template<class T> void TETRAHEDRAL_MESHING<T>::
Write_Output_Files(const int frame)
{
DEFORMABLE_BODY_COLLECTION<TV>& deformable_body_collection=solid_body_collection.deformable_body_collection;
TETRAHEDRALIZED_VOLUME<T>& tetrahedralized_volume=deformable_body_collection.deformable_geometry.template Find_Structure<TETRAHEDRALIZED_VOLUME<T>&>();
FILE_UTILITIES::Create_Directory(output_directory);
std::string f=STRING_UTILITIES::string_sprintf("%d",frame);
FILE_UTILITIES::Create_Directory(output_directory+"/"+f);
FILE_UTILITIES::Create_Directory(output_directory+"/common");
// write state
solid_body_collection.Write(stream_type,output_directory,frame,1,solids_parameters.write_static_variables_every_frame,solids_parameters.rigid_body_evolution_parameters.write_rigid_bodies,
solids_parameters.write_deformable_body,solids_parameters.write_from_every_process,false);
FILE_UTILITIES::Write_To_File(stream_type,output_directory+"/tetrahedralized_volume_"+f+".tet",tetrahedralized_volume);
// write boundary mesh and bindings
if(replace_green_refinement_with_embedded_t_junctions && frame==0){
FILE_UTILITIES::Write_To_File(stream_type,output_directory+"/boundary_mesh",*boundary_mesh);
FILE_UTILITIES::Write_To_File(stream_type,output_directory+"/bindings",solid_body_collection.deformable_body_collection.binding_list);}
// write diagnostics
{std::ostream* output(FILE_UTILITIES::Safe_Open_Output(output_directory+"/diagnostics."+f,false));
Read_Write<TETRAHEDRALIZED_VOLUME<T>,T>::Print_Statistics(*output,tetrahedralized_volume);
int index;
*output<<"max_phi = "<<tetrahedralized_volume.Maximum_Magnitude_Phi_On_Boundary(*implicit_surface,&index);*output<<" ("<<index<<")"<<std::endl;
LINEAR_SPRINGS<TV>* linear_springs=solid_body_collection.template Find_Force<LINEAR_SPRINGS<TV>*>();
if(linear_springs){*output<<"max_edge_compression = "<<linear_springs->Maximum_Compression_Or_Expansion_Fraction(&index);*output<<" ("<<index<<")"<<std::endl;}
delete output;}
// write last frame
FILE_UTILITIES::Write_To_Text_File(output_directory+"/common/last_frame",frame,"\n");
}
//#####################################################################
template class TETRAHEDRAL_MESHING<float>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class TETRAHEDRAL_MESHING<double>;
#endif
|
#!/usr/bin/env sh
set -e
npm run docs:build
cp docs/CNAME docs/.vuepress/dist/CNAME
cd docs/.vuepress/dist
git init
git add -A
git commit -m 'deploy'
git push -f git@github.com:vuex-orm/vuex-orm.git master:gh-pages
cd -
|
# coding=utf-8
# Copyright 2020 The Tensor2Robot Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as python3
"""Hook that logs golden values to be used in unit tests.
In the Data -> Checkpoint -> Inference -> Eval flow, this verifies no regression
occurred in Data -> Checkpoint.
"""
import os
from typing import List
from absl import logging
import gin
import numpy as np
from tensor2robot.hooks import hook_builder
from tensor2robot.models import model_interface
import tensorflow.compat.v1 as tf
ModeKeys = tf.estimator.ModeKeys
COLLECTION = 'golden'
def add_golden_tensor(tensor, name):
"""Adds tensor to be tracked."""
tf.add_to_collection(COLLECTION, tf.identity(tensor, name=name))
class GoldenValuesHook(tf.train.SessionRunHook):
"""SessionRunHook that saves loss metrics to file."""
def __init__(self,
log_directory):
self._log_directory = log_directory
def begin(self):
self._measurements = []
def end(self, session):
# Record measurements.
del session
np.save(os.path.join(self._log_directory, 'golden_values.npy'),
self._measurements)
def before_run(self, run_context):
return tf.train.SessionRunArgs(
fetches=tf.get_collection_ref(COLLECTION))
def after_run(self, run_context, run_values):
golden_values = {t.name: v for t, v in
zip(tf.get_collection_ref(COLLECTION), run_values.results)}
logging.info('Recorded golden values for %s', golden_values.keys())
self._measurements.append(golden_values)
@gin.configurable
class GoldenValuesHookBuilder(hook_builder.HookBuilder):
"""Hook builder for generating golden values."""
def create_hooks(
self,
t2r_model,
estimator,
):
return [GoldenValuesHook(estimator.model_dir)]
|
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.auth;
import com.google.api.AuthProvider;
import com.google.api.Authentication;
import com.google.api.Service;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpRequestFactory;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.util.Clock;
import com.google.api.client.util.Maps;
import com.google.api.config.ServiceConfigFetcher;
import com.google.api.control.model.MethodRegistry.AuthInfo;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import com.google.common.flogger.FluentLogger;
import com.google.common.net.HttpHeaders;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.jose4j.jwt.JwtClaims;
import org.jose4j.jwt.MalformedClaimException;
import org.jose4j.jwt.NumericDate;
import org.jose4j.jwt.ReservedClaimNames;
/**
* An authenticator that extracts the auth token from the HTTP request and
* constructs a {@link UserInfo} object based on the claims contained in the
* auth token.
*
* @author <EMAIL>
*
*/
public class Authenticator {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private static final String ACCESS_TOKEN_PARAM_NAME = "access_token";
private static final String BEARER_TOKEN_PREFIX = "Bearer ";
private static final String EMAIL_CLAIM_NAME = "email";
private final AuthTokenDecoder authTokenDecoder;
private final Clock clock;
private final Map<String, String> issuersToProviderIds;
/**
* Constructor.
*
* @param authTokenDecoder decodes auth tokens into {@link UserInfo} objects.
* @param clock provides the time.
* @param issuersToProviderIds maps from issuers to provider IDs.
*/
@VisibleForTesting
Authenticator(
AuthTokenDecoder authTokenDecoder,
Clock clock,
Map<String, String> issuersToProviderIds) {
this.authTokenDecoder = authTokenDecoder;
this.clock = clock;
this.issuersToProviderIds = issuersToProviderIds;
}
/**
* Authenticate the current HTTP request.
*
* @param httpServletRequest is the incoming HTTP request object.
* @param authInfo contains authentication configurations of the API method being called.
* @param serviceName is the name of this service.
* @return a constructed {@link UserInfo} object representing the identity of the caller.
*/
public UserInfo authenticate(
HttpServletRequest httpServletRequest,
AuthInfo authInfo,
String serviceName) {
Preconditions.checkNotNull(httpServletRequest);
Preconditions.checkNotNull(authInfo);
Optional<String> maybeAuthToken = extractAuthToken(httpServletRequest);
if (!maybeAuthToken.isPresent()) {
throw new UnauthenticatedException(
"No auth token is contained in the HTTP request");
}
JwtClaims jwtClaims = this.authTokenDecoder.decode(maybeAuthToken.get());
UserInfo userInfo = toUserInfo(jwtClaims);
String issuer = userInfo.getIssuer();
if (!this.issuersToProviderIds.containsKey(issuer)) {
throw new UnauthenticatedException("Unknown issuer: " + issuer);
}
String providerId = this.issuersToProviderIds.get(issuer);
// Check whether the provider id is allowed.
if (!authInfo.isProviderIdAllowed(providerId)) {
String message = "The requested method does not allowed this provider id: " + providerId;
throw new UnauthenticatedException(message);
}
checkJwtClaims(jwtClaims);
// Check the audiences decoded from the auth token. The auth token is allowed when
// 1) an audience is equal to the service name,
// or 2) at least one audience is allowed in the method configuration.
Set<String> audiences = userInfo.getAudiences();
boolean hasServiceName = audiences.contains(serviceName);
Set<String> allowedAudiences = authInfo.getAudiencesForProvider(providerId);
if (!hasServiceName && Sets.intersection(audiences, allowedAudiences).isEmpty()) {
throw new UnauthenticatedException("Audiences not allowed");
}
return userInfo;
}
// Check whether the JWT claims should be accepted.
private void checkJwtClaims(JwtClaims jwtClaims) {
Optional<NumericDate> expiration = getDateClaim(ReservedClaimNames.EXPIRATION_TIME, jwtClaims);
if (!expiration.isPresent()) {
throw new UnauthenticatedException("Missing expiration field");
}
Optional<NumericDate> notBefore = getDateClaim(ReservedClaimNames.NOT_BEFORE, jwtClaims);
NumericDate currentTime = NumericDate.fromMilliseconds(clock.currentTimeMillis());
if (expiration.get().isBefore(currentTime)) {
throw new UnauthenticatedException("The auth token has already expired");
}
if (notBefore.isPresent() && notBefore.get().isAfter(currentTime)) {
String message = "Current time is earlier than the \"nbf\" time";
throw new UnauthenticatedException(message);
}
}
/**
* Create an instance of {@link Authenticator} using the service configuration fetched from Google
* Service Management APIs.
*
* @return an {@code Authenticator}
* @throws java.lang.IllegalArgumentException if the authentication message is not defined in the
* fetched service config.
*/
public static Authenticator create() {
ServiceConfigFetcher fetcher = ServiceConfigFetcher.create();
Service service = fetcher.fetch();
if (!service.hasAuthentication()) {
throw new IllegalArgumentException("Authentication is not defined in service config");
}
return create(service.getAuthentication(), Clock.SYSTEM);
}
@VisibleForTesting
static Authenticator create(Authentication authentication, Clock clock) {
List<AuthProvider> providersList = authentication.getProvidersList();
if (providersList == null || providersList.isEmpty()) {
throw new IllegalArgumentException("No auth providers are defined in the config.");
}
Map<String, IssuerKeyUrlConfig> issuerKeyConfigs = generateIssuerKeyConfig(providersList);
Map<String, String> issuersToProviderIds = Maps.newHashMap();
for (AuthProvider authProvider : providersList) {
issuersToProviderIds.put(authProvider.getIssuer(), authProvider.getId());
}
HttpRequestFactory httpRequestFactory = new NetHttpTransport().createRequestFactory();
KeyUriSupplier defaultKeyUriSupplier =
new DefaultKeyUriSupplier(httpRequestFactory, issuerKeyConfigs);
JwksSupplier jwksSupplier = new DefaultJwksSupplier(httpRequestFactory, defaultKeyUriSupplier);
JwksSupplier cachingJwksSupplier = new CachingJwksSupplier(jwksSupplier);
AuthTokenVerifier authTokenVerifier = new DefaultAuthTokenVerifier(cachingJwksSupplier);
AuthTokenDecoder authTokenDecoder = new DefaultAuthTokenDecoder(authTokenVerifier);
AuthTokenDecoder cachingAuthTokenDecoder = new CachingAuthTokenDecoder(authTokenDecoder);
return new Authenticator(
cachingAuthTokenDecoder,
clock,
ImmutableMap.<String, String>copyOf(issuersToProviderIds));
}
private static Map<String, IssuerKeyUrlConfig> generateIssuerKeyConfig(
List<AuthProvider> authProviders) {
ImmutableMap.Builder<String, IssuerKeyUrlConfig> issuerConfigBuilder = ImmutableMap.builder();
Set<String> issuers = Sets.newHashSet();
for (AuthProvider authProvider : authProviders) {
String issuer = authProvider.getIssuer();
if (Strings.isNullOrEmpty(issuer)) {
logger.atWarning().log("The 'issuer' field is not set in AuthProvider (%s)", authProvider);
continue;
}
if (issuers.contains(issuer)) {
throw new IllegalArgumentException(
"Configuration contains multiple auth provider for the same issuer: " + issuer);
}
issuers.add(issuer);
String jwksUri = authProvider.getJwksUri();
IssuerKeyUrlConfig config = Strings.isNullOrEmpty(jwksUri)
? new IssuerKeyUrlConfig(true, Optional.<GenericUrl>absent())
: new IssuerKeyUrlConfig(false, Optional.of(new GenericUrl(jwksUri)));
issuerConfigBuilder.put(issuer, config);
}
return issuerConfigBuilder.build();
}
private static Optional<NumericDate> getDateClaim(String claimName, JwtClaims jwtClaims) {
try {
NumericDate dateClaim = jwtClaims.getNumericDateClaimValue(claimName);
return Optional.fromNullable(dateClaim);
} catch (MalformedClaimException exception) {
String message = String.format("The \"%s\" claim is malformed", claimName);
throw new UnauthenticatedException(message);
}
}
private static Optional<String> extractAuthToken(HttpServletRequest request) {
String authHeader = request.getHeader(HttpHeaders.AUTHORIZATION);
if (authHeader != null) {
// When the authorization header is present, extract the token from the
// header.
if (authHeader.startsWith(BEARER_TOKEN_PREFIX)) {
return Optional.of(authHeader.substring(BEARER_TOKEN_PREFIX.length()));
}
return Optional.absent();
}
String accessToken = request.getParameter(ACCESS_TOKEN_PARAM_NAME);
if (accessToken != null) {
return Optional.of(accessToken);
}
return Optional.absent();
}
private static UserInfo toUserInfo(JwtClaims jwtClaims) {
try {
List<String> audiences = jwtClaims.getAudience();
if (audiences == null || audiences.isEmpty()) {
throw new UnauthenticatedException("Missing audience field");
}
String email = jwtClaims.getClaimValue(EMAIL_CLAIM_NAME, String.class);
String subject = jwtClaims.getSubject();
if (subject == null) {
throw new UnauthenticatedException("Missing subject field");
}
String issuer = jwtClaims.getIssuer();
if (issuer == null) {
throw new UnauthenticatedException("Missing issuer field");
}
return new UserInfo(audiences, email, subject, issuer);
} catch (MalformedClaimException exception) {
throw new UnauthenticatedException("Cannot read malformed claim", exception);
}
}
}
|
<gh_stars>1-10
package main
import (
"fmt"
"strings"
)
func main() {
var string1, string2 string
fmt.Scanf("%v\n%v", &string1, &string2)
fmt.Printf("* Longest Common Subsequence:\n")
LongestCommonSubsequence(string1, string2).Show(string1, string2)
}
func LongestCommonSubsequence(P, Q string) lcs {
A := make(subproblems, len(P))
for i := 0; i < len(P); i++ {
A[i] = make([]lcs, len(Q))
}
for i := 0; i < len(Q); i++ {
for j := 0; j < len(P); j++ {
if P[i] == Q[j] {
if len(A.Get(i-1, j)) == len(A.Get(i, j-1)) {
A[i][j] = append(A.Get(i-1, j-1), i, j)
continue
}
}
if len(A.Get(i-1, j)) > len(A.Get(i, j-1)) {
A[i][j] = A.Get(i-1, j)
} else {
A[i][j] = A.Get(i, j-1)
}
}
}
return A[len(P)-1][len(Q)-1]
}
type subproblems [][]lcs
func (sp subproblems) Get(i, j int) lcs {
if i < 0 || j < 0 {
return lcs{}
}
return sp[i][j]
}
type lcs []int
func (l lcs) Show(P, Q string) {
// show first string with highlightened symbols
cur := -1
fmt.Printf("~~~~~~~~~\n")
for i := 0; i < len(l); i += 2 {
fmt.Printf("%s%c",strings.Repeat("-", l[i]-cur-1), P[l[i]])
cur = l[i]
}
fmt.Printf("\n")
// show sec string
cur = -1
for i := 1; i < len(l); i += 2 {
fmt.Printf("%s%c",strings.Repeat("-", l[i]-cur-1), Q[l[i]])
cur = l[i]
}
fmt.Printf("\n")
fmt.Printf("~~~~~~~~~\n")
}
|
"""
Represents the geometry objects to use with various
ArcGIS Online Services
"""
from __future__ import absolute_import
import json
from ._utils import is_valid
from six import add_metaclass
class GeometryFactory(type):
"""
Generates a geometry object from a given set of
JSON (dictionary or iterable)
"""
def __call__(cls, iterable=None, **kwargs):
if iterable is None:
iterable = ()
if cls is Geometry:
if len(iterable) > 0:
if isinstance(iterable, dict):
if 'x' in iterable and \
'y' in iterable:
return Point(iterable=iterable)
elif 'xmin' in iterable:
return Envelope(iterable)
elif 'wkt' in iterable or \
'wkid' in iterable:
return SpatialReference(iterable)
elif 'rings' in iterable:
return Polygon(iterable)
elif 'paths' in iterable:
return Polyline(iterable)
elif 'points' in iterable:
return MultiPoint(iterable)
elif len(kwargs) > 0:
if 'x' in kwargs or \
'y' in kwargs:
return Point(**kwargs)
elif 'xmin' in kwargs:
return Envelope(iterable, **kwargs)
elif 'wkt' in kwargs or \
'wkid' in kwargs:
return SpatialReference(**kwargs)
elif 'rings' in kwargs:
return Polygon(**kwargs)
elif 'paths' in kwargs:
return Polyline(**kwargs)
elif 'points' in kwargs:
return MultiPoint(**kwargs)
return type.__call__(cls, iterable, **kwargs)
###########################################################################
class BaseGeometry(dict):
"""base geometry class"""
#----------------------------------------------------------------------
@property
def is_valid(self):
"""boolean to see if input is valid"""
return is_valid(self)
#----------------------------------------------------------------------
def __getattr__(self, name):
"""
dictionary items to be retrieved like object attributes
:param name: attribute name
:type name: str, int
:return: dictionary value
"""
try:
return self[name]
except KeyError:
raise AttributeError(name)
#----------------------------------------------------------------------
def __setattr__(self, name, value):
"""
dictionary items to be set like object attributes.
:param name: key of item to be set
:type name: str
:param value: value to set item to
"""
self[name] = value
#----------------------------------------------------------------------
def __delattr__(self, name):
"""
dictionary items to be deleted like object attributes
:param name: key of item to be deleted
:type name: str
"""
del self[name]
#----------------------------------------------------------------------
def __repr__(self):
"""returns object as string"""
return json.dumps(self)
#----------------------------------------------------------------------
__str__ = __repr__
###########################################################################
@add_metaclass(GeometryFactory)
class Geometry(BaseGeometry):
def __init__(self, iterable=None, **kwargs):
if iterable is None:
iterable = ()
super(Geometry, self).__init__(iterable)
self.update(kwargs)
###########################################################################
class SpatialReference(Geometry):
"""
A spatial reference can be defined using a well-known ID (wkid) or
well-known text (wkt). The default tolerance and resolution values for
the associated coordinate system are used. The xy and z tolerance
values are 1 mm or the equivalent in the unit of the coordinate system.
If the coordinate system uses feet, the tolerance is 0.00328083333 ft.
The resolution values are 10x smaller or 1/10 the tolerance values.
Thus, 0.0001 m or 0.0003280833333 ft. For geographic coordinate systems
using degrees, the equivalent of a mm at the equator is used.
The well-known ID (WKID) for a given spatial reference can occasionally
change. For example, the WGS 1984 Web Mercator (Auxiliary Sphere)
projection was originally assigned WKID 102100, but was later changed
to 3857. To ensure backward compatibility with older spatial data
servers, the JSON wkid property will always be the value that was
originally assigned to an SR when it was created.
An additional property, latestWkid, identifies the current WKID value
(as of a given software release) associated with the same spatial
reference.
A spatial reference can optionally include a definition for a vertical
coordinate system (VCS), which is used to interpret the z-values of a
geometry. A VCS defines units of measure, the location of z = 0, and
whether the positive vertical direction is up or down. When a vertical
coordinate system is specified with a WKID, the same caveat as
mentioned above applies. There are two VCS WKID properties: vcsWkid and
latestVcsWkid. A VCS WKT can also be embedded in the string value of
the wkt property. In other words, the WKT syntax can be used to define
an SR with both horizontal and vertical components in one string. If
either part of an SR is custom, the entire SR will be serialized with
only the wkt property.
Starting at 10.3, Image Service supports image coordinate systems.
"""
_type = "SPATIALREFERENCE"
def __init__(self,
iterable=None,
**kwargs):
if iterable is None:
iterable = ()
super(SpatialReference, self).__init__(iterable)
self.update(kwargs)
#----------------------------------------------------------------------
@property
def type(self):
return self._type
###########################################################################
class Envelope(Geometry):
"""
An envelope is a rectangle defined by a range of values for each
coordinate and attribute. It also has a spatialReference field. The
fields for the z and m ranges are optional. An empty envelope has no
in space and is defined by the presence of an xmin field a null value
or a "NaN" string.
"""
_type = "ENVELOPE"
def __init__(self, iterable=None, **kwargs):
if iterable is None:
iterable = ()
super(Envelope, self).__init__(iterable)
self.update(kwargs)
#----------------------------------------------------------------------
@property
def type(self):
return self._type
###########################################################################
class Point(Geometry):
"""
A point contains x and y fields along with a spatialReference field. A
point can also contain m and z fields. A point is empty when its x
field is present and has the value null or the string "NaN". An empty
point has no location in space.
"""
_type = "POINT"
def __init__(self, iterable=None,
**kwargs):
if iterable is None:
iterable = ()
super(Point, self).__init__(iterable)
self.update(kwargs)
@property
def type(self):
return self._type
###########################################################################
class MultiPoint(Geometry):
"""
A multipoint contains an array of points, along with a spatialReference
field. A multipoint can also have boolean-valued hasZ and hasM fields.
These fields control the interpretation of elements of the points
array. Omitting an hasZ or hasM field is equivalent to setting it to
false.
Each element of the points array is itself an array of two, three, or
four numbers. It will have two elements for 2D points, two or three
elements for 2D points with Ms, three elements for 3D points, and three
or four elements for 3D points with Ms. In all cases, the x coordinate
is at index 0 of a point's array, and the y coordinate is at index 1.
For 2D points with Ms, the m coordinate, if present, is at index 2. For
3D points, the Z coordinate is required and is at index 2. For 3D
points with Ms, the Z coordinate is at index 2, and the M coordinate,
if present, is at index 3.
An empty multipoint has a points field with no elements. Empty points
are ignored.
"""
_type = "MULTIPOINT"
def __init__(self, iterable=None,
**kwargs):
if iterable is None:
iterable = ()
super(MultiPoint, self).__init__(iterable)
self.update(kwargs)
@property
def type(self):
return self._type
###########################################################################
class Polyline(Geometry):
"""
A polyline contains an array of paths or curvePaths and a
spatialReference. For polylines with curvePaths, see the sections on
JSON curve object and Polyline with curve. Each path is represented as
an array of points, and each point in the path is represented as an
array of numbers. A polyline can also have boolean-valued hasM and hasZ
fields.
See the description of multipoints for details on how the point arrays
are interpreted.
An empty polyline is represented with an empty array for the paths
field. Nulls and/or NaNs embedded in an otherwise defined coordinate
stream for polylines/polygons is a syntax error.
"""
_type = "POLYLINE"
def __init__(self, iterable=None,
**kwargs):
if iterable is None:
iterable = ()
super(Polyline, self).__init__(iterable)
self.update(kwargs)
@property
def type(self):
return self._type
###########################################################################
class Polygon(Geometry):
"""
A polygon contains an array of rings or curveRings and a
spatialReference. For polygons with curveRings, see the sections on
JSON curve object and Polygon with curve. Each ring is represented as
an array of points. The first point of each ring is always the same as
the last point. Each point in the ring is represented as an array of
numbers. A polygon can also have boolean-valued hasM and hasZ fields.
An empty polygon is represented with an empty array for the rings
field. Nulls and/or NaNs embedded in an otherwise defined coordinate
stream for polylines/polygons is a syntax error.
Polygons should be topologically simple. Exterior rings are oriented
clockwise, while holes are oriented counter-clockwise. Rings can touch
at a vertex or self-touch at a vertex, but there should be no other
intersections. Polygons returned by services are topologically simple.
When drawing a polygon, use the even-odd fill rule. The even-odd fill
rule will guarantee that the polygon will draw correctly even if the
ring orientation is not as described above.
"""
_type = "POLYGON"
def __init__(self, iterable=None,
**kwargs):
if iterable is None:
iterable = ()
super(Polygon, self).__init__(iterable)
self.update(kwargs)
@property
def type(self):
return self._type
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-JXFlowLayout_Example/JXFlowLayout.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-JXFlowLayout_Example/JXFlowLayout.framework"
fi
|
<reponame>cyberdevnet/mer-hacker<filename>src/components/Tools/SwitchPortTemplates/SwitchPortTemplateSummary.js
import React from "react";
import Dialog from "@material-ui/core/Dialog";
import "../../../styles/CreateTemplateModal.css";
export default function SwitchPortTemplateSummary(ac) {
const Cancel = () => {
ac.dc.setshowSummary(false);
ac.dc.setallSelectedPorts([]);
ac.dc.setconfigureDisabled(true);
ac.dc.settrigger(ac.dc.trigger + 1);
};
return (
<Dialog open={true} fullWidth>
<div>
<div className="modal-dialog-summary modal-confirm-summary">
<div>
<div className="modal-header">
<h4>Configuration Summary</h4>
{ac.dc.responseMessage}
</div>
<div
className="modal-body text-center"
style={{ fontSize: "11px", color: "darkslategray" }}
>
<table className="table table-striped" id="table2">
<thead>
<tr>
<th id="col11" scope="col">
Hostname
</th>
<th id="col21" scope="col">
IP address
</th>
<th id="col31" scope="col">
Model
</th>
</tr>
</thead>
<tbody key="2">
<tr>
<th scope="row">{ac.dc.switchDeviceName}</th>
<td>{ac.dc.switchDeviceIp}</td>
<td>{ac.dc.switchDeviceModel}</td>
</tr>
</tbody>
</table>
</div>
<div
className="modal-body text-center"
style={{ fontSize: "11px", color: "darkslategray" }}
>
{/* <h4>Configuration Summary</h4> */}
<table className="table table-striped" id="table1">
<thead>
<tr>
<th id="col1" scope="col">
Number
</th>
<th id="col2" scope="col">
New Description
</th>
{/* <th id='col3' scope="col">Type</th> */}
{/* <th id='col4' scope="col">VLAN</th> */}
<th id="col5" scope="col">
Template selected
</th>
</tr>
</thead>
{/* <tbody> */}
{ac.dc.allSelectedPorts.map((port) => (
<tbody key={port.number}>
<tr>
<th scope="row">{port.number}</th>
<td>{port.payload.name}</td>
{/* <td>{port.name}</td> */}
{/* <td>{port.type}</td> */}
{/* <td>{port.vlan}</td> */}
<td>{port.template}</td>
</tr>
</tbody>
))}
</table>
</div>
<div className="modal-footer text-center">
<button
onClick={() => ac.dc.settriggerDeploy(ac.dc.triggerDeploy + 1)}
className="btn-summary btn-primary"
disabled={ac.dc.loadingSummaryBtn}
>
{ac.dc.loadingSummaryBtn && (
<i
className="fa fa-refresh fa-spin"
style={{ marginRight: "5px" }}
/>
)}
{ac.dc.loadingSummaryBtn && <span>Deploy</span>}
{!ac.dc.loadingSummaryBtn && <span>Deploy</span>}
</button>
<button
style={{ float: "left" }}
type="button"
onClick={Cancel}
className="btn-summary btn-danger"
data-dismiss="modal"
>
Close
</button>
</div>
</div>
</div>
</div>
</Dialog>
);
}
|
<reponame>ninetails/test-babel-emotion-plugin
const util = require('util')
const { lstatSync, readdirSync, realpathSync } = require('fs')
const { join } = require('path')
const WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin')
const isDirectory = source => lstatSync(source).isDirectory()
const getDirectories = source =>
readdirSync(source).map(name => join(source, name)).filter(isDirectory)
const changeBabelLoader = () => ({
loader: 'babel-loader',
options: {
presets: [
["@babel/preset-env", { useBuiltIns: "entry" }],
"@babel/preset-react",
"@babel/preset-flow"
],
plugins: [
"emotion"
]
}
})
const filterBabel = transform => use => use.loader === 'babel-loader' ? transform(use) : use
const changeJs = rule => ({
...rule,
use: rule.use.map(filterBabel(changeBabelLoader)),
include: getDirectories(realpathSync(join(__dirname, '../..'))),
exclude: [/node_modules/]
})
const filterIfJs = transform => rule => rule.test.test('.js') ? transform(rule) : rule
// Export a function. Accept the base config as the only param.
module.exports = (storybookBaseConfig, configType) => {
const config = {
...storybookBaseConfig,
module: {
...storybookBaseConfig.module,
rules: storybookBaseConfig.module.rules.map(filterIfJs(changeJs))
},
plugins: storybookBaseConfig.plugins.filter(plugin => plugin !== storybookBaseConfig.plugins[4]),
resolve: {
...storybookBaseConfig.resolve,
modules: [
realpathSync(join(__dirname, '../../../node_modules')),
'node_modules'
]
}
}
console.log(util.inspect(config, false, null, true))
return config
}
|
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package mleko.brzdac.crawler.db.dao;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
import mleko.brzdac.crawler.db.DatabaseConnection;
import mleko.brzdac.crawler.exceptions.WTFException;
import mleko.brzdac.crawler.filescanner.Changes;
import mleko.brzdac.crawler.pojo.Host;
/**
*
* @author mleko
*/
public class HostDAO {
public static ArrayList<Host> fetchHostsToActivityScan(int ScanDelay) {
ArrayList<Host> hosts = new ArrayList<Host>();
try {
PreparedStatement selectStatement = DatabaseConnection.getInstance().prepareStatement(
"SELECT host, last_scan_attempt, last_file_scan_date FROM host "
+ "WHERE "
+ "(DATE_ADD(last_scan_attempt, INTERVAL ? SECOND) < ? OR last_scan_attempt IS NULL) "
+ "AND shared > 0"
);
long now = Calendar.getInstance().getTime().getTime();
selectStatement.setInt(1, ScanDelay);
selectStatement.setTimestamp(2, new java.sql.Timestamp(now));
ResultSet rows = selectStatement.executeQuery();
while (rows.next()) {
hosts.add(new Host(rows.getInt(1), rows.getTimestamp(2), rows.getTimestamp(3)));
}
} catch (SQLException ex) {
Logger.getLogger(HostDAO.class.getName()).log(Level.SEVERE, null, ex);
}
return hosts;
}
public static Date fetchOldestActivityScanDate() {
try {
PreparedStatement selectDateStatement = DatabaseConnection.getInstance().prepareStatement("SELECT MIN(last_scan_attempt) FROM host WHERE shared > 0");
ResultSet dateResultRow = selectDateStatement.executeQuery();
if (dateResultRow.next()) {
return dateResultRow.getTimestamp(1);
}
} catch (SQLException ex) {
Logger.getLogger(HostDAO.class.getName()).log(Level.SEVERE, null, ex);
}
return Calendar.getInstance().getTime();
}
public static void updateActivityState(long longIp, boolean active) {
try {
long now = Calendar.getInstance().getTime().getTime();
PreparedStatement insertUptimeRecordStatement = DatabaseConnection.getInstance().prepareStatement("INSERT INTO host_uptime(host,active,timestamp) VALUES(?,?,?)");
insertUptimeRecordStatement.setLong(1, longIp);
insertUptimeRecordStatement.setBoolean(2, active);
insertUptimeRecordStatement.setTimestamp(3, new java.sql.Timestamp(now));
insertUptimeRecordStatement.execute();
if (active) {
PreparedStatement updateStatement = DatabaseConnection.getInstance().prepareStatement(
"UPDATE host SET active = 1, last_scan_attempt = ?, last_activity_date = ? WHERE host = ?"
);
updateStatement.setTimestamp(1, new java.sql.Timestamp(now));
updateStatement.setTimestamp(2, new java.sql.Timestamp(now));
updateStatement.setLong(3, longIp);
updateStatement.execute();
} else {
PreparedStatement updateStatement = DatabaseConnection.getInstance().prepareStatement(
"UPDATE host SET active = 0, last_scan_attempt = ? WHERE host = ?"
);
updateStatement.setTimestamp(1, new java.sql.Timestamp(now));
updateStatement.setLong(2, longIp);
updateStatement.execute();
}
} catch (SQLException ex) {
Logger.getLogger(HostDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static void UpdateFileScanDate(long longIp) {
try {
PreparedStatement updateStatement = DatabaseConnection.getInstance().prepareStatement(
"UPDATE host SET last_file_scan_date = ? WHERE host = ?"
);
long now = Calendar.getInstance().getTime().getTime();
updateStatement.setTimestamp(1, new java.sql.Timestamp(now));
updateStatement.setLong(2, longIp);
updateStatement.execute();
} catch (SQLException ex) {
Logger.getLogger(HostDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static void UpdateShareSize(long longIp) {
try {
PreparedStatement selectStatement = DatabaseConnection.getInstance().prepareStatement(
"SELECT SUM(size) FROM host_file WHERE host = ?"
);
selectStatement.setLong(1, longIp);
ResultSet result = selectStatement.executeQuery();
if (!result.first()) {
return;
}
long shares = result.getLong(1);
PreparedStatement updateStatement = DatabaseConnection.getInstance().prepareStatement("UPDATE host SET shared = ? WHERE host = ?");
updateStatement.setLong(1, shares);
updateStatement.setLong(2, longIp);
updateStatement.execute();
} catch (SQLException ex) {
Logger.getLogger(HostDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static void applyFileChanges(long longIp, Changes changes) throws WTFException {
if (!changes.removedDirectories.isEmpty()) {
DirectoryDAO.removeDirectories(longIp, changes.removedDirectories);
}
if (!changes.removedFiles.isEmpty()) {
FileDAO.removeHostFiles(longIp, changes.removedFiles);
}
if (!changes.newFiles.isEmpty()) {
FileDAO.addHostFiles(longIp, changes.newFiles);
}
}
public static Host fetchHostToDiscovery(int ScanDelay) {
try {
PreparedStatement selectStatement = DatabaseConnection.getInstance().prepareStatement(
"SELECT host, last_scan_attempt, last_file_scan_date FROM host "
+ "WHERE "
+ "(DATE_ADD(last_scan_attempt, INTERVAL ? SECOND) < ? OR last_scan_attempt IS NULL) "
+ "AND shared = 0 LIMIT 1"
);
long now = Calendar.getInstance().getTime().getTime();
selectStatement.setInt(1, ScanDelay);
selectStatement.setTimestamp(2, new java.sql.Timestamp(now));
ResultSet rows = selectStatement.executeQuery();
if (rows.first()) {
return new Host(rows.getInt(1), rows.getTimestamp(2), rows.getTimestamp(3));
} else {
return null;
}
} catch (SQLException ex) {
Logger.getLogger(HostDAO.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
public static void discoveryStatus(long longIp, boolean active) {
try {
long now = Calendar.getInstance().getTime().getTime();
if (active) {
PreparedStatement discoverStatement = DatabaseConnection.getInstance().prepareStatement(
"INSERT INTO host(host,active,last_scan_attempt, last_activity_date) "
+ "VALUES "
+ "(?,1,?,?) "
+ "ON DUPLICATE KEY UPDATE "
+ "active = 1, last_scan_attempt = VALUES(last_scan_attempt), last_activity_date = VALUES(last_activity_date)"
);
discoverStatement.setLong(1, longIp);
discoverStatement.setTimestamp(2, new java.sql.Timestamp(now));
discoverStatement.setTimestamp(3, new java.sql.Timestamp(now));
discoverStatement.execute();
} else {
PreparedStatement updateStatement = DatabaseConnection.getInstance().prepareStatement(
"UPDATE host SET last_scan_attempt = ?, active = 0 WHERE host = ?");
updateStatement.setTimestamp(1, new java.sql.Timestamp(now));
updateStatement.setLong(2, longIp);
updateStatement.execute();
}
} catch (SQLException ex) {
Logger.getLogger(HostDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
|
/***************************** LICENSE START ***********************************
Copyright 2009-2020 ECMWF and INPE. This software is distributed under the terms
of the Apache License version 2.0. In applying this license, ECMWF does not
waive the privileges and immunities granted to it by virtue of its status as
an Intergovernmental Organization or submit itself to any jurisdiction.
***************************** LICENSE END *************************************/
#ifndef DASHBOARDWIDGET_HPP_
#define DASHBOARDWIDGET_HPP_
#include <string>
#include <QDockWidget>
#include <QWidget>
#include <QAction>
#include <QIcon>
#include "VInfo.hpp"
class DashboardDockTitleWidget;
class NodePathWidget;
class ServerFilter;
class VComboSettings;
class DashboardWidget : public QWidget
{
Q_OBJECT
public:
DashboardWidget(const std::string& type, QWidget* parent=nullptr);
~DashboardWidget() override = default;
virtual void populateDockTitleBar(DashboardDockTitleWidget*)=0;
virtual void populateDialog()=0;
virtual void reload()=0;
virtual void rerender()=0;
virtual bool initialSelectionInView() {return false;}
virtual VInfo_ptr currentSelection() {return VInfo_ptr(); }
QAction* detachedAction() const {return detachedAction_;}
QAction* maximisedAction() const {return maximisedAction_;}
virtual QList<QAction*> dockTitleActions() {return QList<QAction*>();}
bool detached() const;
void setDetached(bool b);
bool isMaximised() const;
void resetMaximised();
void setEnableMaximised(bool st);
bool isInDialog() const {return inDialog_;}
virtual void writeSettings(VComboSettings*);
virtual void readSettings(VComboSettings*);
virtual void writeSettingsForDialog() {}
virtual void readSettingsForDialog() {}
const std::string type() const {return type_;}
void id(const std::string& id) {id_=id;}
public Q_SLOTS:
virtual void setCurrentSelection(VInfo_ptr)=0;
Q_SIGNALS:
void titleUpdated(QString,QString type=QString());
void selectionChanged(VInfo_ptr);
void maximisedChanged(DashboardWidget*);
void popInfoPanel(VInfo_ptr,QString);
void dashboardCommand(VInfo_ptr,QString);
protected Q_SLOTS:
void slotDetachedToggled(bool);
void slotMaximisedToggled(bool);
protected:
virtual void detachedChanged()=0;
void setInDialog(bool);
std::string id_;
std::string type_;
bool acceptSetCurrent_;
QAction *detachedAction_;
QAction *maximisedAction_;
bool ignoreMaximisedChange_;
NodePathWidget* bcWidget_;
private:
bool inDialog_;
};
#endif
|
<filename>message-logger-tools-annotations/src/main/java/org/message/logger/tools/annotations/BusinessException.java
package org.message.logger.tools.annotations;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.CLASS;
@Target(METHOD)
@Retention(CLASS)
@Documented
public @interface BusinessException {
}
|
<reponame>Vensent/Camera2<filename>src/com/android/camera/one/v2/initialization/InitializedOneCameraFactory.java<gh_stars>0
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.camera.one.v2.initialization;
import android.annotation.TargetApi;
import android.hardware.camera2.CaptureResult;
import android.os.Build;
import android.os.Handler;
import android.view.Surface;
import com.android.camera.async.ConcurrentState;
import com.android.camera.async.FilteredUpdatable;
import com.android.camera.async.HandlerFactory;
import com.android.camera.async.Lifetime;
import com.android.camera.async.Listenable;
import com.android.camera.async.MainThread;
import com.android.camera.one.OneCamera;
import com.android.camera.one.PreviewSizeSelector;
import com.android.camera.one.v2.autofocus.ManualAutoFocus;
import com.android.camera.one.v2.camera2proxy.CameraCaptureSessionProxy;
import com.android.camera.one.v2.camera2proxy.CameraDeviceProxy;
import com.android.camera.one.v2.photo.PictureTaker;
import com.android.camera.ui.motion.LinearScale;
import com.android.camera.util.Size;
import com.google.common.util.concurrent.SettableFuture;
import java.util.List;
/**
* Simplifies the construction of OneCamera instances which use the camera2 API
* by handling the initialization sequence.
* <p>
* The type of camera created is specified by a {@link CameraStarter}.
* <p>
* This manages camera startup, which is nontrivial because it requires the
* asynchronous acquisition of several dependencies:
* <ol>
* <li>The camera2 CameraDevice, which is available immediately.</li>
* <li>The preview Surface, which is available after
* {@link OneCamera#startPreview} is called.</li>
* <li>The camera2 CameraCaptureSession, created asynchronously using the
* CameraDevice and preview Surface.</li>
* </ol>
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public class InitializedOneCameraFactory {
private final GenericOneCameraImpl mOneCamera;
/**
* @param cameraStarter Starts the camera, after initialization of the
* preview stream and capture session is complete.
* @param outputSurfaces The set of output Surfaces (excluding the
* not-yet-available preview Surface) to use when configuring the
* capture session.
*/
public InitializedOneCameraFactory(
final Lifetime lifetime, final CameraStarter cameraStarter, CameraDeviceProxy device,
List<Surface> outputSurfaces, MainThread mainThreadExecutor,
HandlerFactory handlerFactory, float maxZoom, List<Size> supportedPreviewSizes,
LinearScale lensRange, OneCamera.Facing direction) {
// Assembles and returns a OneCamera based on the CameraStarter.
// Create/wrap required threads.
final Handler cameraHandler = handlerFactory.create(lifetime, "CameraHandler");
// Since we cannot create an actual PictureTaker and ManualAutoFocus
// until the CaptureSession is available, so create ones which defer to
// a Future of the actual implementation.
final SettableFuture<PictureTaker> mPictureTaker = SettableFuture.create();
PictureTaker pictureTaker = new DeferredPictureTaker(mPictureTaker);
final SettableFuture<ManualAutoFocus> mManualAutoFocus = SettableFuture.create();
ManualAutoFocus manualAutoFocus = new DeferredManualAutoFocus(
mManualAutoFocus);
// The OneCamera interface exposes various types of state, either
// through getters, setters, or the ability to register listeners.
// Since these values are interacted with by multiple threads, we can
// use {@link ConcurrentState} to provide this functionality safely.
final ConcurrentState<Float> zoomState = new ConcurrentState<>(1.0f);
final ConcurrentState<Integer> afState = new ConcurrentState<>(
CaptureResult.CONTROL_AF_STATE_INACTIVE);
final ConcurrentState<OneCamera.FocusState> focusState = new ConcurrentState<>(new
OneCamera.FocusState(0.0f, false));
final ConcurrentState<Integer> afMode = new ConcurrentState<>(CaptureResult
.CONTROL_AF_MODE_OFF);
final ConcurrentState<Boolean> readyState = new ConcurrentState<>(false);
// Wrap state to be able to register listeners which run on the main
// thread.
Listenable<Integer> afStateListenable = new Listenable<>(afState,
mainThreadExecutor);
Listenable<OneCamera.FocusState> focusStateListenable = new Listenable<>(
focusState, mainThreadExecutor);
Listenable<Boolean> readyStateListenable = new Listenable<>(readyState,
mainThreadExecutor);
// Wrap each value in a filter to ensure that only differences pass
// through.
final MetadataCallback metadataCallback = new MetadataCallback(
new FilteredUpdatable<>(afState),
new FilteredUpdatable<>(focusState),
new FilteredUpdatable<>(afMode));
// The following handles the initialization sequence in which we receive
// various dependencies at different times in the following sequence:
// 1. CameraDevice
// 2. The Surface on which to render the preview stream
// 3. The CaptureSession
// When all three of these are available, the {@link #CameraFactory} can
// be used to assemble the actual camera functionality (e.g. to take
// pictures, and run AF scans).
// Note that these must be created in reverse-order to when they are run
// because each stage depends on the previous one.
final CaptureSessionCreator captureSessionCreator = new CaptureSessionCreator(device,
cameraHandler);
PreviewStarter mPreviewStarter = new PreviewStarter(outputSurfaces,
captureSessionCreator,
new PreviewStarter.CameraCaptureSessionCreatedListener() {
@Override
public void onCameraCaptureSessionCreated(CameraCaptureSessionProxy session,
Surface previewSurface) {
CameraStarter.CameraControls controls = cameraStarter.startCamera(
new Lifetime(lifetime),
session, previewSurface,
zoomState, metadataCallback, readyState);
mPictureTaker.set(controls.getPictureTaker());
mManualAutoFocus.set(controls.getManualAutoFocus());
}
});
PreviewSizeSelector previewSizeSelector =
new Camera2PreviewSizeSelector(supportedPreviewSizes);
mOneCamera = new GenericOneCameraImpl(lifetime, pictureTaker, manualAutoFocus, lensRange,
mainThreadExecutor, afStateListenable, focusStateListenable, readyStateListenable,
maxZoom, zoomState, direction, previewSizeSelector, mPreviewStarter);
}
public OneCamera provideOneCamera() {
return mOneCamera;
}
}
|
// pages/weather/weather.js
// 引用百度地图微信小程序JSAPI模块
let bmap = require('../../libs/bmap-wx.js');
let tools = require('../../utils/tools.js');
let utils = require('./util.js');
Page({
data: {
style: '',
show: 'hide',
mapIconSrc: '../../src/img/map.png',
todyWeather: '', //今天天气
futureThreeDay: [], //未来三天
variousIndex: [] //各项指数
},
onLoad: function(options) {
// 页面初始化 options为页面跳转所带来的参数
let _ = this;
tools.loading('加载中...');
let BMap = new bmap.BMapWX({
ak: 'g4I2oOxpdnhxmuQwYaDrrLayDqZBft78'
});
let fail = function(data) {
tools.loadingEnd();
tools.errorDialog('数据获取失败,重新加载', query);
};
let success = function(data) {
//处理数据,返回自定义格式数据
let _tody = _.dealTodayData(data.currentWeather[0]);
let _future = _.dealFuture(data.originalData.results[0].weather_data);
let _index = _.dealIndex(data.originalData.results[0].index);
console.log(data.originalData.results[0].index);
_.setData({
show: 'show',
todyWeather: _._addItemData(_tody),
futureThreeDay: _future,
variousIndex: _index
});
tools.loadingEnd();
}
let query = function() {
// 发起weather请求
BMap.weather({
fail: fail,
success: success
});
}
query();
},
dealTodayData: function(data) {
let _date = data.date.split('(')[0];
let _now = parseInt(data.date.split(':')[1].replace(/[\(-\)]/g, '')) + '°';
let _result = {
city: data.currentCity,
pm25: data.pm25,
date: _date,
realtimeTemperature: _now,
temperature: utils.dealTemperature(data.temperature),
weather: data.weatherDesc,
wind: data.wind,
iconSrc: utils.weatherLevel(data.weatherDesc),
};
return _result;
},
dealFuture: function(data) {
let _ = this;
let _result = [];
for (let i = 1; i < data.length; i++) {
let _item = {
weather: data[i].weather,
date: data[i].date,
temperature: utils.dealTemperature(data[i].temperature),
iconSrc: utils.weatherMoreLevel(data[i].weather)
};
_result.push(_item);
}
return _result;
},
dealIndex: (data) => {
let _result = [];
for (let i = 1; i < data.length; i++) {
let _item = {
title: data[i].title,
value: data[i].zs,
desc: data[i].des
};
_result.push(_item);
}
return _result;
},
// 返回背景颜色,并设置背景色
_addItemData: function(item) {
item.style = utils.returnStyle(item.weather);
return item;
},
onReady: function() {
// 页面渲染完成
},
onShow: function() {
// 页面显示
},
onHide: function() {
// 页面隐藏
},
onUnload: function() {
// 页面关闭
}
}) |
# combine lean(lede) into lienol source
cd /workdir/openwrt
echo "Creating dir /workdir/openwrt/package/lean"
mkdir -p /workdir/openwrt/package/lean
echo "============================================================================"
echo "Copying from /workdir/lede/package/lean to /workdir/openwrt/package/lean ..."
echo "----------------------------------------------------------------------------"
for dir in `ls /workdir/lede/package/lean`
do
ls /workdir/openwrt/package/lean/$dir >/dev/null 2>&1
if [ $? -ne 0 ]
then
echo "$dir is missing, copying..."
cp -rp /workdir/lede/package/lean/$dir /workdir/openwrt/package/lean/
else
echo "$dir exists, skipping..."
fi
done
ls -l /workdir/openwrt/package/lean
# Add openclash
git clone --depth=1 -b master https://github.com/vernesong/OpenClash
ARCH=armv8
cd /workdir/openwrt
mkdir -p files/etc/openclash/core
cd files/etc/openclash/core
clash_main_url=$(curl -sL https://api.github.com/repos/vernesong/OpenClash/releases/tags/Clash | grep /clash-linux-$ARCH | sed 's/.*url\": \"//g' | sed 's/\"//g')
#clash_tun_url=$(curl -sL https://api.github.com/repos/vernesong/OpenClash/releases/tags/TUN-Premium | grep /clash-linux-$ARCH | sed 's/.*url\": \"//g' | sed 's/\"//g')
#clash_game_url=$(curl -sL https://api.github.com/repos/vernesong/OpenClash/releases/tags/TUN | grep /clash-linux-$ARCH | sed 's/.*url\": \"//g' | sed 's/\"//g')
wget $clash_main_url && tar zxvf clash-linux-*.gz && rm -f clash-linux-*.gz
#wget -qO- $clash_main_url | gunzip -c > clash
#wget -qO- $clash_tun_url | gunzip -c > clash_tun
#wget -qO- $clash_game_url | tar xOvz > clash_game
chmod +x clash*
|
<filename>node/controller/ipfs.go
package main
import (
"log"
"os/exec"
)
func startIPFS() {
//start ipfs and report any errors...which there shouldn't be
log.Println("starting ipfs...")
exec.Command("bash", "-c", "ipfs daemon &").Start()
}
func stopIPFS() {
exec.Command("bash", "-c", "kil $(ps -ef | grep '[i]pfs' | awk '{print $2}')").Run()
}
|
#include "Config.h"
#include "Profile.h"
#include "packets/bnet/BNetProfile.h"
#include "cache/UserCache.h"
namespace Plugins
{
namespace BNet
{
bool Profile::Process( Network::TcpClient& cl, Utils::Stream& in )
{
Packets::BNet::BNetProfile packet;
std::string username;
in >> packet.token >> username;
Cache::UserCacheItem::Pointer ptr = Cache::userCache[username];
if(ptr.get() == NULL)
{
return false;
}
packet.result = 0;
packet.description = ptr->GetData("Profile\\Description");
packet.location = ptr->GetData("Profile\\Location");
packet.clan = 0; // TODO: clan support
packet.BuildAndSendTo(cl);
return true;
}
}
}
|
Color = {};
Color.BLACK = 0xff000000;
Color.CYAN = 0xff00ffff;
Color.WHITE = 0xffffffff;
Color.YELLOW = 0xffffff00;
Color.argb = function (alpha, red, green, blue)
{
return (alpha<<24) | (red<<16) | (green<<8) | blue;
};
Color.alpha = function (color)
{
return (color>>24)&0xff;
};
Color.blue = function (color)
{
return color&0xff;
};
Color.green = function (color)
{
return (color>>8)&0xff;
};
Color.red = function (color)
{
return (color>>16)&0xff;
};
|
<reponame>chendave/buildkit
package remotecache
import (
"context"
"encoding/json"
"io"
"sync"
"time"
"github.com/containerd/containerd/content"
"github.com/containerd/containerd/images"
v1 "github.com/moby/buildkit/cache/remotecache/v1"
"github.com/moby/buildkit/solver"
"github.com/moby/buildkit/util/imageutil"
"github.com/moby/buildkit/worker"
digest "github.com/opencontainers/go-digest"
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"golang.org/x/sync/errgroup"
)
// ResolveCacheImporterFunc returns importer and descriptor.
type ResolveCacheImporterFunc func(ctx context.Context, attrs map[string]string) (Importer, ocispec.Descriptor, error)
type Importer interface {
Resolve(ctx context.Context, desc ocispec.Descriptor, id string, w worker.Worker) (solver.CacheManager, error)
}
type DistributionSourceLabelSetter interface {
SetDistributionSourceLabel(context.Context, digest.Digest) error
SetDistributionSourceAnnotation(desc ocispec.Descriptor) ocispec.Descriptor
}
func NewImporter(provider content.Provider) Importer {
return &contentCacheImporter{provider: provider}
}
type contentCacheImporter struct {
provider content.Provider
}
func (ci *contentCacheImporter) Resolve(ctx context.Context, desc ocispec.Descriptor, id string, w worker.Worker) (solver.CacheManager, error) {
dt, err := readBlob(ctx, ci.provider, desc)
if err != nil {
return nil, err
}
var mfst ocispec.Index
if err := json.Unmarshal(dt, &mfst); err != nil {
return nil, err
}
allLayers := v1.DescriptorProvider{}
var configDesc ocispec.Descriptor
for _, m := range mfst.Manifests {
if m.MediaType == v1.CacheConfigMediaTypeV0 {
configDesc = m
continue
}
allLayers[m.Digest] = v1.DescriptorProviderPair{
Descriptor: m,
Provider: ci.provider,
}
}
if dsls, ok := ci.provider.(DistributionSourceLabelSetter); ok {
for dgst, l := range allLayers {
err := dsls.SetDistributionSourceLabel(ctx, dgst)
_ = err // error ignored because layer may not exist
l.Descriptor = dsls.SetDistributionSourceAnnotation(l.Descriptor)
allLayers[dgst] = l
}
}
if configDesc.Digest == "" {
return ci.importInlineCache(ctx, dt, id, w)
}
dt, err = readBlob(ctx, ci.provider, configDesc)
if err != nil {
return nil, err
}
cc := v1.NewCacheChains()
if err := v1.Parse(dt, allLayers, cc); err != nil {
return nil, err
}
keysStorage, resultStorage, err := v1.NewCacheKeyStorage(cc, w)
if err != nil {
return nil, err
}
return solver.NewCacheManager(id, keysStorage, resultStorage), nil
}
func readBlob(ctx context.Context, provider content.Provider, desc ocispec.Descriptor) ([]byte, error) {
maxBlobSize := int64(1 << 20)
if desc.Size > maxBlobSize {
return nil, errors.Errorf("blob %s is too large (%d > %d)", desc.Digest, desc.Size, maxBlobSize)
}
dt, err := content.ReadBlob(ctx, provider, desc)
if err != nil {
// NOTE: even if err == EOF, we might have got expected dt here.
// For instance, http.Response.Body is known to return non-zero bytes with EOF.
if err == io.EOF {
if dtDigest := desc.Digest.Algorithm().FromBytes(dt); dtDigest != desc.Digest {
err = errors.Wrapf(err, "got EOF, expected %s (%d bytes), got %s (%d bytes)",
desc.Digest, desc.Size, dtDigest, len(dt))
} else {
err = nil
}
}
}
return dt, errors.WithStack(err)
}
func (ci *contentCacheImporter) importInlineCache(ctx context.Context, dt []byte, id string, w worker.Worker) (solver.CacheManager, error) {
m := map[digest.Digest][]byte{}
if err := ci.allDistributionManifests(ctx, dt, m); err != nil {
return nil, err
}
var mu sync.Mutex
var cMap = map[digest.Digest]*v1.CacheChains{}
eg, ctx := errgroup.WithContext(ctx)
for dgst, dt := range m {
func(dgst digest.Digest, dt []byte) {
eg.Go(func() error {
var m ocispec.Manifest
if err := json.Unmarshal(dt, &m); err != nil {
return errors.WithStack(err)
}
if m.Config.Digest == "" || len(m.Layers) == 0 {
return nil
}
if dsls, ok := ci.provider.(DistributionSourceLabelSetter); ok {
for i, l := range m.Layers {
err := dsls.SetDistributionSourceLabel(ctx, l.Digest)
_ = err // error ignored because layer may not exist
m.Layers[i] = dsls.SetDistributionSourceAnnotation(l)
}
}
p, err := content.ReadBlob(ctx, ci.provider, m.Config)
if err != nil {
return errors.WithStack(err)
}
var img image
if err := json.Unmarshal(p, &img); err != nil {
return errors.WithStack(err)
}
if len(img.Rootfs.DiffIDs) != len(m.Layers) {
logrus.Warnf("invalid image with mismatching manifest and config")
return nil
}
if img.Cache == nil {
return nil
}
var config v1.CacheConfig
if err := json.Unmarshal(img.Cache, &config.Records); err != nil {
return errors.WithStack(err)
}
createdDates, createdMsg, err := parseCreatedLayerInfo(img)
if err != nil {
return err
}
layers := v1.DescriptorProvider{}
for i, m := range m.Layers {
if m.Annotations == nil {
m.Annotations = map[string]string{}
}
if createdAt := createdDates[i]; createdAt != "" {
m.Annotations["buildkit/createdat"] = createdAt
}
if createdBy := createdMsg[i]; createdBy != "" {
m.Annotations["buildkit/description"] = createdBy
}
m.Annotations["containerd.io/uncompressed"] = img.Rootfs.DiffIDs[i].String()
layers[m.Digest] = v1.DescriptorProviderPair{
Descriptor: m,
Provider: ci.provider,
}
config.Layers = append(config.Layers, v1.CacheLayer{
Blob: m.Digest,
ParentIndex: i - 1,
})
}
dt, err = json.Marshal(config)
if err != nil {
return errors.WithStack(err)
}
cc := v1.NewCacheChains()
if err := v1.ParseConfig(config, layers, cc); err != nil {
return err
}
mu.Lock()
cMap[dgst] = cc
mu.Unlock()
return nil
})
}(dgst, dt)
}
if err := eg.Wait(); err != nil {
return nil, err
}
cms := make([]solver.CacheManager, 0, len(cMap))
for _, cc := range cMap {
keysStorage, resultStorage, err := v1.NewCacheKeyStorage(cc, w)
if err != nil {
return nil, err
}
cms = append(cms, solver.NewCacheManager(id, keysStorage, resultStorage))
}
return solver.NewCombinedCacheManager(cms, nil), nil
}
func (ci *contentCacheImporter) allDistributionManifests(ctx context.Context, dt []byte, m map[digest.Digest][]byte) error {
mt, err := imageutil.DetectManifestBlobMediaType(dt)
if err != nil {
return err
}
switch mt {
case images.MediaTypeDockerSchema2Manifest, ocispec.MediaTypeImageManifest:
m[digest.FromBytes(dt)] = dt
case images.MediaTypeDockerSchema2ManifestList, ocispec.MediaTypeImageIndex:
var index ocispec.Index
if err := json.Unmarshal(dt, &index); err != nil {
return errors.WithStack(err)
}
for _, d := range index.Manifests {
if _, ok := m[d.Digest]; ok {
continue
}
p, err := content.ReadBlob(ctx, ci.provider, d)
if err != nil {
return errors.WithStack(err)
}
if err := ci.allDistributionManifests(ctx, p, m); err != nil {
return err
}
}
}
return nil
}
type image struct {
Rootfs struct {
DiffIDs []digest.Digest `json:"diff_ids"`
} `json:"rootfs"`
Cache []byte `json:"moby.buildkit.cache.v0"`
History []struct {
Created *time.Time `json:"created,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
EmptyLayer bool `json:"empty_layer,omitempty"`
} `json:"history,omitempty"`
}
func parseCreatedLayerInfo(img image) ([]string, []string, error) {
dates := make([]string, 0, len(img.Rootfs.DiffIDs))
createdBy := make([]string, 0, len(img.Rootfs.DiffIDs))
for _, h := range img.History {
if !h.EmptyLayer {
str := ""
if h.Created != nil {
dt, err := h.Created.MarshalText()
if err != nil {
return nil, nil, err
}
str = string(dt)
}
dates = append(dates, str)
createdBy = append(createdBy, h.CreatedBy)
}
}
return dates, createdBy, nil
}
|
import unittest
import os
from sqltxt.table import Table
from sqltxt.column import Column, ColumnName, AmbiguousColumnNameError
from sqltxt.expression import Expression
class TableTest(unittest.TestCase):
def setUp(self):
self.data_path = os.path.join(os.path.dirname(__file__), '../data')
table_header = ["col_a", "col_b"]
table_contents = """1,1
2,3
3,2"""
self.table_a = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e "{0}"'.format(table_contents),
columns = table_header
)
table_header = ["col_a", "col_b"]
table_contents = """1,w
2,x
2,y
5,z"""
self.table_b = Table.from_cmd(
name = 'table_b',
cmd = 'echo -e "{0}"'.format(table_contents),
columns = table_header
)
def test_subset_rows(self):
conditions = [
[Expression('col_b', '==', '1'), 'or', Expression('col_a', '==', '2')]
]
self.table_a.subset_rows(conditions)
cmds_actual = self.table_a.cmds
cmds_expected = [
'echo -e "1,1\n2,3\n3,2"',
"awk -F',' 'OFS=\",\" { if (($2 == 1 || $1 == 2)) { print $1,$2 } }'"]
self.assertEqual(cmds_actual, cmds_expected)
def test_order_columns(self):
col_name_order = [ColumnName('col_b'), ColumnName('col_a')]
self.table_a.order_columns(col_name_order)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"', "awk -F',' 'OFS=\",\" { print $2,$1 }'"]
self.assertEqual(cmds_actual, cmds_expected)
def test_sort(self):
sort_by_col_names = [ColumnName('col_a'), ColumnName('col_b')]
self.table_a.sort(sort_by_col_names)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"', "sort -t, -k 1,1 -k 2,2"]
self.assertEqual(cmds_actual, cmds_expected)
sort_by_cols = [self.table_a.get_column_for_name(cn) for cn in sort_by_col_names]
self.assertEqual(self.table_a.sorted_by, sort_by_cols)
def test_is_sorted_by(self):
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['col_a', 'col_b'])
table_from_cmd.sorted_by = [Column('table_a.col_a'), Column('table_a.col_b')]
self.assertTrue(table_from_cmd.is_sorted_by([0]))
self.assertFalse(table_from_cmd.is_sorted_by([1]))
self.assertTrue(table_from_cmd.is_sorted_by([0,1]))
def test_get_column_for_name_raises_on_ambiguity(self):
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['col_a', 'col_a'])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = ['ta.col_a', 'tb.col_a'])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
first_column = Column('ta.col_a')
first_column.add_name('col_alpha')
second_column = Column('tb.col_a')
table_from_cmd = Table.from_cmd(
name = 'table_a',
cmd = 'echo -e ""',
columns = [first_column, second_column])
with self.assertRaisesRegexp(AmbiguousColumnNameError, 'Ambiguous column reference'):
table_from_cmd.get_column_for_name(ColumnName('col_a'))
def test_sample_rows(self):
self.table_a.sample_rows(1)
cmds_actual = self.table_a.cmds
cmds_expected = ['echo -e "1,1\n2,3\n3,2"',
"""awk -v seed=$RANDOM -v n={0} '
BEGIN {{ srand(seed) }}
NR <= n {{ reservoir[NR] = $0 }}
NR > n {{ M = int(rand() * NR) + 1; if (M <= n) {{ reservoir[M] = $0 }}}}
END {{ for (key in reservoir) {{ print reservoir[key] }}}}'""".format(1)
]
self.assertEqual(cmds_actual, cmds_expected)
def test_get_cmd_str(self):
table_from_file = Table.from_file_path(os.path.join(self.data_path, 'table_a.txt'))
# output from a file-backed Table to STDOUT
cmd_actual = table_from_file.get_cmd_str()
cmd_expected = 'tail -n+2 {}/table_a.txt'.format(self.data_path)
self.assertEqual(cmd_actual, cmd_expected)
table_from_cmd = Table.from_cmd(
'table_a',
cmd = 'echo -e "1,2,3,4"',
columns = ['col_a', 'col_b', 'col_c', 'col_d'])
# output from a command-backed Table to STDOUT
cmd_actual = table_from_cmd.get_cmd_str()
cmd_expected = 'echo -e "1,2,3,4"'
self.assertEqual(cmd_actual, cmd_expected)
# add a command, then output
table_from_cmd.cmds += ['sort']
# to STDOUT
cmd_actual = table_from_cmd.get_cmd_str()
cmd_expected = 'echo -e "1,2,3,4" | sort'
self.assertEqual(cmd_actual, cmd_expected)
|
def get_sum(numbers):
s = 0
for num in numbers:
s += num
return s # Removed "+1" from the return statement. |
<form action="/orders" method="post">
<div>
<label>Customer Name:</label>
<input type="text" name="customer_name">
</div>
<div>
<label>Customer Email:</label>
<input type="email" name="customer_email">
</div>
<div>
<label>Product Quantity:</label>
<input type="number" name="quantity">
</div>
<input type="submit" value="Create Order">
</form> |
#!/bin/sh
echo "Setting up your Mac..."
# Check for Homebrew and install if we don't have it
if test ! $(which brew); then
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
# Update Homebrew recipes
echo "Updating Homebrew recipes..."
brew update
# Install all our dependencies with bundle (See Brewfile)
echo "Installing Homebrew dependencies..."
brew tap homebrew/bundle
brew bundle
# Set default MySQL root password and auth type.
echo "Starting MySQL service..."
brew services start mysql
mysql -u root -e "ALTER USER root@localhost IDENTIFIED WITH mysql_native_password BY 'password'; FLUSH PRIVILEGES;"
# Install PHP extensions with PECL
echo "Installing PHP extensions with PECL..."
pecl install memcached imagick
# Install global Composer packages
echo "Installing global composer dependencies..."
/usr/local/bin/composer global require laravel/installer laravel/valet
# Create a Sites directory
# This is a default directory for macOS user accounts but doesn't comes pre-installed
echo "Creating Sites folder..."
mkdir $HOME/Sites
# Install Laravel Valet
echo "Installing valet..."
$HOME/.composer/vendor/bin/valet install
# Set Sites folder to serve valet
(cd $HOME/Sites && valet park)
# Removes .zshrc from $HOME (if it exists) and symlinks the .zshrc file from the .dotfiles
echo "Setting up zsh..."
rm -rf $HOME/.zshrc
ln -s $HOME/.dotfiles/.zshrc $HOME/.zshrc
# Symlink the Mackup config file to the home directory
ln -s $HOME/.dotfiles/.mackup.cfg $HOME/.mackup.cfg
# Retreive the application preferences & profiles using mackup
echo "Restoring application preferences..."
mackup restore
# Load the shell dotfiles, and then some:
# * ~/.extra can be used for other settings you don’t want to commit.
for file in ~/extra/.{gitconfig,vscode}; do
[ -r "$file" ] && [ -f "$file" ] && source "$file";
done;
unset file;
# Set macOS preferences
# We will run this last because this will reload the shell
echo "Setting up MacOS preferences..."
source .macos
|
# Copyright 2014 (?) todo: research from which script this has been adapted from
# Copyright 2015 Language Technology, Technische Universitaet Darmstadt (author: Benjamin Milde)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
srcdir=data/local/lang
dir=data/local/lm
. ./utils/parse_options.sh
if [ -f path.sh ]; then
. path.sh; else
echo "missing path.sh"; exit 1;
fi
export LC_ALL=C
#train_lm.sh
mkdir -p $dir
. ./path.sh || exit 1; # for KALDI_ROOT
export PATH=$KALDI_ROOT/tools/kaldi_lm:$PATH
( # First make sure the kaldi_lm toolkit is installed.
cd $KALDI_ROOT/tools || exit 1;
if [ -d kaldi_lm ]; then
echo Not installing the kaldi_lm toolkit since it is already there.
else
echo Downloading and installing the kaldi_lm tools
if [ ! -f kaldi_lm.tar.gz ]; then
wget http://www.danielpovey.com/files/kaldi/kaldi_lm.tar.gz || exit 1;
fi
tar -xvzf kaldi_lm.tar.gz || exit 1;
cd kaldi_lm
make || exit 1;
echo Done making the kaldi_lm tools
fi
) || exit 1;
# OLDER version:
# Get a wordlist-- keep everything but silence, which should not appear in
# the LM.
awk '{print $1}' $srcdir/lexiconp.txt | grep -v -w '!SIL' > $dir/wordlist.txt
# Get training data with OOV words (w.r.t. our current vocab) replaced with <UNK>
echo "Getting training data with OOV words replaced with <UNK> (unkown word) (train_nounk.gz)"
gunzip -c $dir/cleaned.gz | awk -v w=$dir/wordlist.txt \
'BEGIN{while((getline<w)>0) v[$1]=1;}
{for (i=1;i<=NF;i++) if ($i in v) printf $i" ";else printf "<UNK> ";print ""}'|sed 's/ $//g' \
| gzip -c > $dir/train_nounk.gz
# Get unigram counts (without bos/eos, but this doens't matter here, it's
# only to get the word-map, which treats them specially & doesn't need their
# counts).
# Add a 1-count for each word in word-list by including that in the data,
# so all words appear.
gunzip -c $dir/train_nounk.gz | cat - $dir/wordlist.txt | \
awk '{ for(x=1;x<=NF;x++) count[$x]++; } END{for(w in count){print count[w], w;}}' | \
sort -nr > $dir/unigram.counts
# Get "mapped" words-- a character encoding of the words that makes the common words very short.
cat $dir/unigram.counts | awk '{print $2}' | get_word_map.pl "<s>" "</s>" "<UNK>" > $dir/word_map
gunzip -c $dir/train_nounk.gz | awk -v wmap=$dir/word_map 'BEGIN{while((getline<wmap)>0)map[$1]=$2;}
{ for(n=1;n<=NF;n++) { printf map[$n]; if(n<NF){ printf " "; } else { print ""; }}}' | gzip -c >$dir/train.gz
echo training kaldi_lm with 3gram-mincount
rm -r data/local/lm/3gram-mincount/
train_lm.sh --arpa --lmtype 3gram-mincount $dir
#prune_lm.sh --arpa 6.0 $dir/3gram-mincount/
#prune_lm.sh --arpa 8.0 $dir/3gram-mincount/
#prune_lm.sh --arpa 10.0 $dir/3gram-mincount/
#prune_lm.sh --arpa 16.0 $dir/3gram-mincount/
prune_lm.sh --arpa 10.0 $dir/3gram-mincount/
# create unpruned const arpa for best path rescoring
# utils/build_const_arpa_lm.sh data/local/lm/3gram-mincount/lm_unpruned.gz data/lang/ data/lang_const_arpa/
# we could also train a 4 gram model (omitted by default)
train_lm.sh --arpa --lmtype 4gram-mincount $dir
prune_lm.sh --arpa 10.0 $dir/4gram-mincount
echo done
exit 0
|
# frozen_string_literal: true
require 'avro/builder/type_factory'
require 'avro/builder/aliasable'
module Avro
module Builder
# This class represents a field in a record.
# A field must be initialized with a type.
class Field
include Avro::Builder::DslOptions
include Avro::Builder::DslAttributes
include Avro::Builder::Aliasable
include Avro::Builder::AnonymousTypes
INTERNAL_ATTRIBUTES = [:optional_field].to_set.freeze
# These attributes may be set as options or via a block in the DSL
dsl_attributes :doc, :default, :order
def initialize(name:, avro_type_or_name:, record:, cache:, internal: {}, options: {}, &block)
@cache = cache
@record = record
@name = name.to_s
internal.each do |key, value|
send("#{key}=", value) if INTERNAL_ATTRIBUTES.include?(key)
end
type_options = options.dup
options.keys.each do |key|
send(key, type_options.delete(key)) if dsl_attribute?(key)
end
# Find existing Type or build a new instance of a builtin Type using
# the supplied block
@field_type = type_lookup(avro_type_or_name, namespace) do |avro_type_name|
create_and_configure_builtin_type(avro_type_name,
field: self,
cache: cache,
internal: internal,
validate_type: false,
options: type_options)
end
# DSL calls must be evaluated after the type has been constructed
instance_eval(&block) if block_given?
@field_type.validate!
end
## Delegate additional DSL calls to the type
def respond_to_missing?(id, _include_all)
field_type.dsl_respond_to?(id) || super
end
def method_missing(id, *args, &block)
field_type.dsl_respond_to?(id) ? field_type.send(id, *args, &block) : super
end
def name_fragment
record.name_fragment
end
# Delegate setting namespace explicitly via DSL to type
# and return the namespace value from the enclosing record.
def namespace(value = nil)
if value
field_type.namespace(value)
else
record.namespace
end
end
# Delegate setting name explicitly via DSL to type
def name(value = nil)
if value
field_type.name(value)
else
# Return the name of the field
@name
end
end
def serialize(reference_state)
# TODO: order is not included here
{
name: name,
type: serialized_type(reference_state),
doc: doc,
default: default,
aliases: aliases
}.reject { |_, v| v.nil? }.tap do |result|
result.merge!(default: nil) if optional_field
end
end
private
attr_accessor :field_type, :optional_field, :cache, :record
# Optional fields must be serialized as a union -- an array of types.
def serialized_type(reference_state)
result = field_type.serialize(reference_state)
optional_field ? field_type.class.union_with_null(result) : result
end
end
end
end
|
import os
def count_files_in_dirs(file_paths):
dir_counts = {}
for path in file_paths:
directory = os.path.dirname(path)
if directory in dir_counts:
dir_counts[directory] += 1
else:
dir_counts[directory] = 1
return dir_counts |
<reponame>NajibAdan/kitsu-server
require 'simplecov'
SimpleCov.start do
add_filter '/spec/'
add_filter '/config/'
add_filter '/db/'
add_filter '/vendor/'
add_group 'Controllers', 'app/controllers'
add_group 'Models', 'app/models'
add_group 'Mailers', 'app/mailers'
add_group 'Services', 'app/services'
add_group 'Workers', 'app/workers'
add_group 'Serializers', 'app/serializers'
add_group 'Policies', 'app/policies'
add_group 'Libs', 'lib/'
track_files '{app,lib}/**/*.rb'
end
|
#!/usr/bin/env bash
# ~/macos-setup.sh — Based on mathiasbynens, paulirish & alrra's dotfiles
# =============================================================================
# Initialization
# =============================================================================
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `macos-setup.sh` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
# =============================================================================
# General UI/UX
# =============================================================================
# Set computer name (as done via System Preferences → Sharing)
sudo scutil --set ComputerName "Aluminium"
sudo scutil --set HostName "Aluminium.local"
sudo scutil --set LocalHostName "Aluminium"
sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.smb.server NetBIOSName -string "Aluminium"
# Disable the sound effects on boot
sudo nvram SystemAudioVolume=" "
# Show battery life percentage
defaults write com.apple.menuextra.battery ShowPercent -string "YES"
# Hide the Time Machine and User icons in menu bar
for domain in ~/Library/Preferences/ByHost/com.apple.systemuiserver.*; do
defaults write "${domain}" dontAutoLoad -array \
"/System/Library/CoreServices/Menu Extras/TimeMachine.menu" \
"/System/Library/CoreServices/Menu Extras/User.menu"
done
defaults write com.apple.systemuiserver menuExtras -array \
"/System/Library/CoreServices/Menu Extras/Bluetooth.menu" \
"/System/Library/CoreServices/Menu Extras/AirPort.menu" \
"/System/Library/CoreServices/Menu Extras/Battery.menu" \
"/System/Library/CoreServices/Menu Extras/Clock.menu"
# Set sidebar icon size to medium
defaults write NSGlobalDomain NSTableViewDefaultSizeMode -int 2
# Increase window resize speed for Cocoa applications
defaults write NSGlobalDomain NSWindowResizeTime -float 0.001
# Expand save panel by default
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode2 -bool true
# Expand print panel by default
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint2 -bool true
# Save to disk (not to iCloud) by default
defaults write NSGlobalDomain NSDocumentSaveNewDocumentsToCloud -bool false
# Automatically quit printer app once the print jobs complete
defaults write com.apple.print.PrintingPrefs "Quit When Finished" -bool true
# Disable the “Are you sure you want to open this application?” dialog
defaults write com.apple.LaunchServices LSQuarantine -bool false
# Remove duplicates in the “Open With” menu (also see `lscleanup` alias)
/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Support/lsregister -kill -r -domain local -domain system -domain user
# Display ASCII control characters using caret notation in standard text views
# Try e.g. `cd /tmp; unidecode "\x{0000}" > cc.txt; open -e cc.txt`
defaults write NSGlobalDomain NSTextShowsControlCharacters -bool true
# Set Help Viewer windows to non-floating mode
defaults write com.apple.helpviewer DevMode -bool true
# Restart automatically if the computer freezes
sudo systemsetup -setrestartfreeze on
# Check for software updates daily, not just once per week
defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 1
# Disable Notification Center and remove the menu bar icon
launchctl unload -w /System/Library/LaunchAgents/com.apple.notificationcenterui.plist 2> /dev/null
# Disable smart quotes as they’re annoying when typing code
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
# Disable smart dashes as they’re annoying when typing code
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
# =============================================================================
# SSD-specific tweaks
# You might want to disable these if you are not running an SSD
# =============================================================================
# Disable hibernation (speeds up entering sleep mode)
sudo pmset -a hibernatemode 0
# Disable the sudden motion sensor as it’s not useful for SSDs
sudo pmset -a sms 0
# =============================================================================
# Security
# =============================================================================
# Disable captive portal
sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.captive.control Active -bool false
# =============================================================================
# Various I/O
# =============================================================================
# Enable tap to click for this user and for the login screen
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true
defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
defaults write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
# Disable “natural” (Lion-style) scrolling
defaults write NSGlobalDomain com.apple.swipescrolldirection -bool false
# Increase sound quality for Bluetooth headphones/headsets
defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Min (editable)" -int 40
# Enable full keyboard access for all controls
# (e.g. enable Tab in modal dialogs)
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3
# Use scroll gesture with the Ctrl (^) modifier key to zoom
defaults write com.apple.universalaccess closeViewScrollWheelToggle -bool true
defaults write com.apple.universalaccess HIDScrollZoomModifierMask -int 262144
# Follow the keyboard focus while zoomed in
defaults write com.apple.universalaccess closeViewZoomFollowsFocus -bool true
# Disable press-and-hold for keys in favor of key repeat
defaults write NSGlobalDomain ApplePressAndHoldEnabled -bool false
# Set a blazingly fast keyboard repeat rate
defaults write NSGlobalDomain KeyRepeat -int 1
defaults write NSGlobalDomain InitialKeyRepeat -int 15
# Disable auto-correct
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
# =============================================================================
# Screen
# =============================================================================
# Require password immediately after sleep or screen saver begins
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
# Save screenshots to the Pictures/Screenshots
mkdir ${HOME}/Pictures/Screenshots
defaults write com.apple.screencapture location -string "${HOME}/Pictures/Screenshots"
# Save screenshots in PNG format (other options: BMP, GIF, JPG, PDF, TIFF)
defaults write com.apple.screencapture type -string "png"
# Disable shadow in screenshots
defaults write com.apple.screencapture disable-shadow -bool true
# Enable subpixel font rendering on non-Apple LCDs
defaults write NSGlobalDomain AppleFontSmoothing -int 2
# Enable HiDPI display modes (requires restart)
sudo defaults write /Library/Preferences/com.apple.windowserver DisplayResolutionEnabled -bool true
# =============================================================================
# Finder #
# =============================================================================
# Allow quitting via ⌘ + Q; doing so will also hide desktop icons
defaults write com.apple.finder QuitMenuItem -bool true
# Disable window animations and Get Info animations
defaults write com.apple.finder DisableAllAnimations -bool true
# Set Desktop as the default location for new Finder windows
# For other paths, use `PfLo` and `file:///full/path/here/`
defaults write com.apple.finder NewWindowTarget -string "PfDe"
defaults write com.apple.finder NewWindowTargetPath -string "file://${HOME}/Desktop/"
# Show icons for removable media on the desktop
defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool true
# Show hidden files by default
defaults write com.apple.finder AppleShowAllFiles -bool true
# Show all filename extensions
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Show path bar
defaults write com.apple.finder ShowPathbar -bool true
# Allow text selection in Quick Look
defaults write com.apple.finder QLEnableTextSelection -bool true
# Display full POSIX path as Finder window title
defaults write com.apple.finder _FXShowPosixPathInTitle -bool true
# When performing a search, search the current folder by default
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
# Disable the warning when changing a file extension
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Enable spring loading for directories
defaults write NSGlobalDomain com.apple.springing.enabled -bool true
# Tweak the spring loading delay for directories
defaults write NSGlobalDomain com.apple.springing.delay -float .5
# Avoid creating .DS_Store files on network volumes
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
# Open new finder windows in tabs
defaults write NSGlobalDomain AppleWindowTabbingMode -string "always"
# Automatically open a new Finder window when a volume is mounted
defaults write com.apple.frameworks.diskimages auto-open-ro-root -bool true
defaults write com.apple.frameworks.diskimages auto-open-rw-root -bool true
defaults write com.apple.finder OpenWindowForNewRemovableDisk -bool true
# Show item info near icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
# Show item info at the bottom of the icons on the desktop
/usr/libexec/PlistBuddy -c "Set DesktopViewSettings:IconViewSettings:labelOnBottom true" ~/Library/Preferences/com.apple.finder.plist
# Enable snap-to-grid for icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
# Set grid spacing for icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:gridSpacing 40" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:gridSpacing 40" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:gridSpacing 40" ~/Library/Preferences/com.apple.finder.plist
# Set the size of icons on the desktop and in other icon views
/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:iconSize 32" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:iconSize 32" ~/Library/Preferences/com.apple.finder.plist
/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:iconSize 32" ~/Library/Preferences/com.apple.finder.plist
# Use list view in all Finder windows by default
# Four-letter codes for the other view modes: `icnv`, `clmv`, `Flwv`
defaults write com.apple.finder FXPreferredViewStyle -string "Nlsv"
# Disable the warning before emptying the Trash
defaults write com.apple.finder WarnOnEmptyTrash -bool false
# Empty Trash securely by default
defaults write com.apple.finder EmptyTrashSecurely -bool true
# Enable AirDrop over Ethernet and on unsupported Macs running Lion
defaults write com.apple.NetworkBrowser BrowseAllInterfaces -bool true
# Show the ~/Library folder
chflags nohidden ~/Library
# Expand the following File Info panes:
# “General”, “Open with”, and “Sharing & Permissions”
defaults write com.apple.finder FXInfoPanesExpanded -dict \
General -bool true \
OpenWith -bool true \
Privileges -bool true
# =============================================================================
# Dock, Dashboard, and hot corners
# =============================================================================
# Set dark UI mode
defaults write NSGlobalDomain AppleInterfaceStyle -string "Dark"
# Enable highlight hover effect for the grid view of a stack (Dock)
defaults write com.apple.dock mouse-over-hilite-stack -bool true
# Set the icon size of Dock items to 40 pixels
defaults write com.apple.dock tilesize -int 40
# Change minimize/maximize window effect
defaults write com.apple.dock mineffect -string "scale"
# Minimize windows into their application’s icon
defaults write com.apple.dock minimize-to-application -bool true
# Enable spring loading for all Dock items
defaults write com.apple.dock enable-spring-load-actions-on-all-items -bool true
# Show indicator lights for open applications in the Dock
defaults write com.apple.dock show-process-indicators -bool true
# Don’t animate opening applications from the Dock
defaults write com.apple.dock launchanim -bool false
# Speed up Mission Control animations
defaults write com.apple.dock expose-animation-duration -float 0.1
# Don’t group windows by application in Mission Control
# (i.e. use the old Exposé behavior instead)
defaults write com.apple.dock expose-group-by-app -bool false
# Don’t show Dashboard as a Space
defaults write com.apple.dock dashboard-in-overlay -bool true
# Don’t automatically rearrange Spaces based on most recent use
defaults write com.apple.dock mru-spaces -bool false
# Speed up the auto-hiding Dock delay
defaults write com.apple.dock autohide-delay -float 0.15
# Automatically hide and show the Dock
defaults write com.apple.dock autohide -bool true
# =============================================================================
# Safari & WebKit
# =============================================================================
# Privacy: don’t send search queries to Apple
defaults write com.apple.Safari UniversalSearchEnabled -bool false
defaults write com.apple.Safari SuppressSearchSuggestions -bool true
# Press Tab to highlight each item on a web page
defaults write com.apple.Safari WebKitTabToLinksPreferenceKey -bool true
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2TabsToLinks -bool true
# Show the full URL in the address bar (note: this still hides the scheme)
defaults write com.apple.Safari ShowFullURLInSmartSearchField -bool true
# Set Safari’s home page to `about:blank` for faster loading
defaults write com.apple.Safari HomePage -string "about:blank"
# Prevent Safari from opening ‘safe’ files automatically after downloading
defaults write com.apple.Safari AutoOpenSafeDownloads -bool false
# Hide Safari’s bookmarks bar by default
defaults write com.apple.Safari ShowFavoritesBar -bool false
# Hide Safari’s sidebar in Top Sites
defaults write com.apple.Safari ShowSidebarInTopSites -bool false
# Disable Safari’s thumbnail cache for History and Top Sites
defaults write com.apple.Safari DebugSnapshotsUpdatePolicy -int 2
# Enable Safari’s debug menu
defaults write com.apple.Safari IncludeInternalDebugMenu -bool true
# Make Safari’s search banners default to Contains instead of Starts With
defaults write com.apple.Safari FindOnPageMatchesWordStartsOnly -bool false
# Remove useless icons from Safari’s bookmarks bar
defaults write com.apple.Safari ProxiesInBookmarksBar "()"
# Enable the Develop menu and the Web Inspector in Safari
defaults write com.apple.Safari IncludeDevelopMenu -bool true
defaults write com.apple.Safari WebKitDeveloperExtrasEnabledPreferenceKey -bool true
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2DeveloperExtrasEnabled -bool true
# Add a context menu item for showing the Web Inspector in web views
defaults write NSGlobalDomain WebKitDeveloperExtras -bool true
# =============================================================================
# Time Machine
# =============================================================================
# Prevent Time Machine from prompting to use new hard drives as backup volume
defaults write com.apple.TimeMachine DoNotOfferNewDisksForBackup -bool true
# =============================================================================
# Activity Monitor
# =============================================================================
# Show the main window when launching Activity Monitor
defaults write com.apple.ActivityMonitor OpenMainWindow -bool true
# Visualize CPU usage in the Activity Monitor Dock icon
defaults write com.apple.ActivityMonitor IconType -int 5
# Show all processes in Activity Monitor
defaults write com.apple.ActivityMonitor ShowCategory -int 0
# Sort Activity Monitor results by CPU usage
defaults write com.apple.ActivityMonitor SortColumn -string "CPUUsage"
defaults write com.apple.ActivityMonitor SortDirection -int 0
# =============================================================================
# Address Book, Dashboard, iCal, TextEdit, and Disk Utility
# =============================================================================
# Enable the debug menu in Address Book
defaults write com.apple.addressbook ABShowDebugMenu -bool true
# Enable Dashboard dev mode (allows keeping widgets on the desktop)
defaults write com.apple.dashboard devmode -bool true
# Enable the debug menu in iCal (pre-10.8)
defaults write com.apple.iCal IncludeDebugMenu -bool true
# Use plain text mode for new TextEdit documents
defaults write com.apple.TextEdit RichText -int 0
# Open and save files as UTF-8 in TextEdit
defaults write com.apple.TextEdit PlainTextEncoding -int 4
defaults write com.apple.TextEdit PlainTextEncodingForWrite -int 4
# Set tab width to 2 instead of the default 8
defaults write com.apple.TextEdit "TabWidth" '2'
# Enable the debug menu in Disk Utility
defaults write com.apple.DiskUtility DUDebugMenuEnabled -bool true
defaults write com.apple.DiskUtility advanced-image-options -bool true
# =============================================================================
# Mac App Store
# =============================================================================
# Enable the WebKit Developer Tools in the Mac App Store
defaults write com.apple.appstore WebKitDeveloperExtras -bool true
# Enable Debug Menu in the Mac App Store
defaults write com.apple.appstore ShowDebugMenu -bool true
# Enable auto-update
defaults write com.apple.commerce AutoUpdate -bool true
# =============================================================================
# Messages
# =============================================================================
# Disable smart quotes as it’s annoying for messages that contain code
defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticQuoteSubstitutionEnabled" -bool false
# Disable continuous spell checking
defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "continuousSpellCheckingEnabled" -bool false
# =============================================================================
# Google Chrome & Google Chrome Canary
# =============================================================================
# Use the system-native print preview dialog
defaults write com.google.Chrome DisablePrintPreview -bool true
defaults write com.google.Chrome.canary DisablePrintPreview -bool true
# Expand the print dialog by default
defaults write com.google.Chrome PMPrintingExpandedStateForPrint2 -bool true
defaults write com.google.Chrome.canary PMPrintingExpandedStateForPrint2 -bool true
# Disable backswipe navigation
defaults write com.google.Chrome AppleEnableSwipeNavigateWithScrolls -bool false
# =============================================================================
# Photos
# =============================================================================
# Prevent Photos from opening automatically when devices are plugged in
defaults -currentHost write com.apple.ImageCapture disableHotPlug -bool true
# =============================================================================
# Kill affected applications
# =============================================================================
for app in "Activity Monitor" "Address Book" "Calendar" "Contacts" "cfprefsd" \
"Dock" "Finder" "Google Chrome" "Mail" "Messages" "Safari" "SizeUp" \
"Spectacle" "SystemUIServer" "iCal"; do
killall "${app}" > /dev/null 2>&1
done
echo "Done."
|
# file: src/bash/qto/funcs/run-unit-tests.func.sh
# v0.7.8
# ---------------------------------------------------------
# call all the unit tests - fail if even one fails ...
# ---------------------------------------------------------
doRunUnitTests(){
export QTO_NO_AUTH=1
test -z "${PROJ_INSTANCE_DIR-}" && PROJ_INSTANCE_DIR="$PRODUCT_INSTANCE_DIR"
source $PROJ_INSTANCE_DIR/.env ; env_type=$ENV_TYPE
do_export_json_section_vars $PROJ_INSTANCE_DIR/cnf/env/$env_type.env.json '.env.db'
do_export_json_section_vars $PROJ_INSTANCE_DIR/cnf/env/$env_type.env.json '.env.app'
do_log "INFO START running the unit tests"
while read -r f ; do
do_log "INFO START unit test for $f"
perl $f
test $? -ne 0 && do_exit $? " the tests in the $f failed !!!"
do_log "INFO STOP unit test for $f"
sleep 1
clearTheScreen
done < <(find src/perl/qto/t/lib/Qto/App -name '*.t' -type f |grep -v benchmarks |sort)
export QTO_NO_AUTH=0
}
|
#!/usr/bin/env bash
samplefile='test_base_setup.sh'
assert_not_empty "${samplefile}"
assert_is_file "${samplefile}"
answer="$( generate_checksums )"
assert_failure $?
assert_equals ':' "${answer}"
answer="$( generate_checksums --filename "${samplefile}" )"
RC=$?
\which 'md5sum' >/dev/null 2>&1
has_md5=$?
\which 'sha1sum' >/dev/null 2>&1
has_sha1=$?
if [ "${has_md5}" -ne "${FAIL}" ] || [ "${has_sha1}" -ne "${FAIL}" ]
then
assert_success "${RC}"
assert_not_equals ':' "${answer}"
detail "Checksum --> ${answer}"
checksums='5d879b5c37f39391dc0afa5ad6581713:9d4982b4270c5b103980e1f6c710614466799711'
assert_equals "${checksums}" "${answer}"
fi |
# cdr, add-zsh-hook
autoload -Uz chpwd_recent_dirs cdr add-zsh-hook
add-zsh-hook chpwd chpwd_recent_dirs
#cdr
# zstyle ':completion:*' recent-dirs-insert both
# zstyle ':chpwd:*' recent-dirs-max 500
# zstyle ':chpwd:*' recent-dirs-default true
# zstyle ':chpwd:*' recent-dirs-file "$HOME/.cache/shell/chpwd-recent-dirs"
# zstyle ':chpwd:*' recent-dirs-pushd true
zle -N peco-select-history
zle -N peco-go-to-dir
zle -N peco-select-gitadd
# # --------------------------------------------------------------------
# # Completion settings
# # --------------------------------------------------------------------
# autoload predict-on
# #predict-on
# # Hilight ls command
# export LS_COLORS='no=00:fi=00:di=01;36:ln=36:pi=31:so=33:bd=44;37:cd=44;37:ex=01;32:mi=00:or=36'
# export LSCOLORS=GxgxdxbxCxegedabagacad
# zstyle ':completion:*' list-colors ${(s.:.)LS_COLORS}
# # Ignore upper/lower cases
# zstyle ':completion:*' matcher-list '' 'm:{a-z}={A-Z}' '+m:{a-z}={A-Z}'
# # Complete PID for killing
# zstyle ':completion:*:processes' command "ps au"
# zstyle ':completion:*:processes' menu yes select=2
# # Set separator between lists and descriptions
# zstyle ':completion:*' list-separator '-->'
# # Suggest typoed commands
# setopt correct
# # Pack lists
# setopt list_packed
# # Enable complete for arguments
# setopt magic_equal_subst
# # Enable brace expansion
# setopt brace_ccl
# # --------------------------------------------------------------------
# # Delimiter settings
# # --------------------------------------------------------------------
# autoload -Uz select-word-style
# select-word-style default
# # Set delimiter characters
# zstyle ':zle:*' word-chars ' /=;@:{}[]()<>,|.'
# zstyle ':zle:*' word-style unspecified
# # --------------------------------------------------------------------
# # Command history
# # --------------------------------------------------------------------
# HISTFILE=~/.zsh_history
# HISTSIZE=10000
# SAVEHIST=10000
# setopt hist_ignore_dups
# setopt hist_ignore_all_dups
# setopt hist_reduce_blanks
# #setopt share_history
# zshaddhistory() {
# whence ${${(z)1}[1]} >| /dev/null || return 1
# }
# # --------------------------------------------------------------------
# # Make cd comfortable
# # --------------------------------------------------------------------
# setopt auto_cd
# setopt auto_pushd
# setopt pushd_ignore_dups
# # --------------------------------------------------------------------
# # Others
# # --------------------------------------------------------------------
# # Enable hook functions
# autoload -Uz add-zsh-hook
# #add-zsh-hook preexec complete_action
# # Prevent alert
# setopt no_beep
# # Enable keymap 'Ctrl+q' on Vim
# stty -ixon
# unsetopt bg_nice
# autoload -Uz zmv
|
<filename>src/string_handle/Boj16171.java<gh_stars>1-10
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 16171번: 나는 친구가 적다. (small)
*
* @see https://www.acmicpc.net/problem/16171/
*
*/
public class Boj16171 {
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
String line = "";
for(char w: br.readLine().toCharArray()) { // 숫자를 제외하고 문자열에 저장
if(!(w >= '0' && w <= '9')) line += w;
}
System.out.println(line.contains(br.readLine()) ? 1 : 0); // 입력으로 들어오는 문자열이 포함된 경우 1 or 0
}
}
|
use std::collections::HashMap;
struct Pool<T> {
closures: HashMap<usize, T>,
next_id: usize,
}
impl<T> Pool<T> {
fn new() -> Self {
Pool {
closures: HashMap::new(),
next_id: 0,
}
}
fn install<F>(&mut self, closure: F) -> usize
where
F: Fn() -> T,
{
let id = self.next_id;
self.closures.insert(id, closure());
self.next_id += 1;
id
}
fn find_last<P>(&self, predicate: P) -> Option<usize>
where
P: Fn(&T) -> bool,
{
let mut last_id = None;
for (id, value) in &self.closures {
if predicate(value) {
last_id = Some(*id);
}
}
last_id
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_pool() {
let mut pool = Pool::new();
let _ = pool.install(|| 10);
let _ = pool.install(|| 20);
let _ = pool.install(|| 30);
let last_id = pool.find_last(|&x| x > 15);
assert_eq!(last_id, Some(2));
}
} |
<filename>src/ReactCodepenBody.tsx
import * as React from 'react';
import LoaderContent from "./LoaderContent";
import {PropsTypes} from "./ReactCodepen";
import LoadedContent from "./LoadedContent";
interface Props extends Pick<PropsTypes, "loader" | "user" | "hash" | "title" | "isLoaded" | "isLoading" | "error"> {
}
function ReactCodepenBody({isLoaded, error, hash, isLoading, loader, title, user}: Props) {
if (isLoaded) {
return <LoadedContent hash={hash} user={user} title={title}/>;
} else if (loader) {
return <LoaderContent isLoading={isLoading} error={error} loader={loader}/>;
} else {
return null;
}
}
export default ReactCodepenBody; |
<reponame>Julianrabino/supermarket.front<filename>src/app/config/config.model.ts
export interface Config {
usersUrl: string;
bonita: {
urls: {
cases: string;
humanTasks: string;
activities: string;
caseVariable: string;
businessDataVenta: string;
loginService: string;
logoutService: string;
archivedCase: string;
},
variables: {
nroDocumento: string;
productos: string;
productIdCompra: string;
cuponCompra: string;
finCompra: string;
ventaId: string;
esEmpleado: string;
},
tasks: {
iniciarCompra: string;
finalizarCompra: string;
},
processDefinitionId: string;
loginUsername: string;
loginPassword: string;
apiTokenHeader: string;
humanTaskAssignedId: string;
cantidadIntentosPolling: number;
msDelayPolling: number;
reintentoPolling: boolean;
cantidadElementosPagina: number;
};
sessionKeys: {
currentUser: string;
currentBonitaApiToken: string;
currentCase: string;
currentProducts: string;
currentCart: string;
currentVenta: string;
currentError: string;
};
}
|
class Script(Subject):
"""Main script to run the scaling algorithm."""
def __init__(self, params, experiments, reflections):
super(Script, self).__init__(
events=["merging_statistics", "run_script", "run_scale_and_filter"]
)
self.scaler = None
self.scaled_miller_array = None
def perform_scaling_algorithm(self, params, experiments, reflections):
# Implement the scaling algorithm using the provided parameters and data
# Your implementation of the scaling algorithm goes here
# This method should handle the scaling process based on the given inputs
def handle_event(self, event):
# Handle the specified events within the class
if event == "merging_statistics":
# Implement the logic to handle the "merging_statistics" event
pass
elif event == "run_script":
# Implement the logic to handle the "run_script" event
pass
elif event == "run_scale_and_filter":
# Implement the logic to handle the "run_scale_and_filter" event
pass |
import threading
def function():
num = 0
lock = threading.Lock()
def increment():
nonlocal num
with lock:
num += 1
return num
return increment |
package xyz.brassgoggledcoders.opentransport.modules.modularutilities;
import xyz.brassgoggledcoders.modularutilities.modules.ender.EnderModule;
import xyz.brassgoggledcoders.opentransport.api.blockwrappers.IHasWrappers;
import xyz.brassgoggledcoders.opentransport.blocks.BlockWrapperBase;
public class MoUBlockWrappers implements IHasWrappers {
@Override
public void registerWrappers() {
new BlockWrapperBase(EnderModule.ender_proxy).register();
}
}
|
<gh_stars>10-100
package io.opensphere.laf.dark;
import java.awt.Graphics;
import javax.swing.JComponent;
import javax.swing.JToolTip;
import javax.swing.border.Border;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.metal.MetalToolTipUI;
public class OSDarkLAFToolTipUI extends MetalToolTipUI
{
protected JToolTip aToolTip;
/**
* Utility method used to create the ComponentUI.
*
* @param pComponent the component for which the UI will be created.
* @return a new ComponentUI created for the supplied component.
*/
public static ComponentUI createUI(JComponent pComponent)
{
return new OSDarkLAFToolTipUI(pComponent);
}
public OSDarkLAFToolTipUI(JComponent pComponent)
{
super();
aToolTip = (JToolTip)pComponent;
aToolTip.setOpaque(false);
}
@Override
public void paint(Graphics graph, JComponent jComp)
{
int width = aToolTip.getWidth();
int height = aToolTip.getHeight();
final Border tipBorder = aToolTip.getBorder();
if (null != tipBorder)
{
width -= tipBorder.getBorderInsets(aToolTip).right;
height -= tipBorder.getBorderInsets(aToolTip).bottom;
}
graph.setColor(aToolTip.getBackground());
graph.fillRect(0, 0, width, height);
super.paint(graph, jComp);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.