text stringlengths 1 1.05M |
|---|
/*
* Copyright © 2019 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
import { KVStore, formatInt, getFirstPrefix, getLastPrefix, NotFoundError } from '@liskhq/lisk-db';
import { codec } from '@liskhq/lisk-codec';
import { getAddressFromPublicKey, hash } from '@liskhq/lisk-cryptography';
import { RawBlock, StateDiff } from '../types';
import { StateStore } from '../state_store';
import {
DB_KEY_BLOCKS_ID,
DB_KEY_BLOCKS_HEIGHT,
DB_KEY_TRANSACTIONS_BLOCK_ID,
DB_KEY_TRANSACTIONS_ID,
DB_KEY_TEMPBLOCKS_HEIGHT,
DB_KEY_ACCOUNTS_ADDRESS,
DB_KEY_CHAIN_STATE,
DB_KEY_CONSENSUS_STATE,
DB_KEY_DIFF_STATE,
} from './constants';
import { keyString } from '../utils';
import { stateDiffSchema } from '../schema';
export class Storage {
private readonly _db: KVStore;
public constructor(db: KVStore) {
this._db = db;
}
/*
Block headers
*/
public async getBlockHeaderByID(id: Buffer): Promise<Buffer> {
const block = await this._db.get(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`);
return block;
}
public async getBlockHeadersByIDs(arrayOfBlockIds: ReadonlyArray<Buffer>): Promise<Buffer[]> {
const blocks = [];
for (const id of arrayOfBlockIds) {
try {
const block = await this._db.get(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`);
blocks.push(block);
} catch (dbError) {
if (dbError instanceof NotFoundError) {
continue;
}
throw dbError;
}
}
return blocks;
}
public async getBlockHeaderByHeight(height: number): Promise<Buffer> {
const stringHeight = formatInt(height);
const id = await this._db.get(`${DB_KEY_BLOCKS_HEIGHT}:${stringHeight}`);
return this.getBlockHeaderByID(id);
}
public async getBlockHeadersByHeightBetween(
fromHeight: number,
toHeight: number,
): Promise<Buffer[]> {
const stream = this._db.createReadStream({
gte: `${DB_KEY_BLOCKS_HEIGHT}:${formatInt(fromHeight)}`,
lte: `${DB_KEY_BLOCKS_HEIGHT}:${formatInt(toHeight)}`,
reverse: true,
});
const blockIDs = await new Promise<Buffer[]>((resolve, reject) => {
const ids: Buffer[] = [];
stream
.on('data', ({ value }: { value: Buffer }) => {
ids.push(value);
})
.on('error', error => {
reject(error);
})
.on('end', () => {
resolve(ids);
});
});
return this.getBlockHeadersByIDs(blockIDs);
}
public async getBlockHeadersWithHeights(heightList: ReadonlyArray<number>): Promise<Buffer[]> {
const blocks = [];
for (const height of heightList) {
try {
const block = await this.getBlockHeaderByHeight(height);
blocks.push(block);
} catch (dbError) {
if (dbError instanceof NotFoundError) {
continue;
}
throw dbError;
}
}
return blocks;
}
public async getLastBlockHeader(): Promise<Buffer> {
const stream = this._db.createReadStream({
gte: getFirstPrefix(DB_KEY_BLOCKS_HEIGHT),
lte: getLastPrefix(DB_KEY_BLOCKS_HEIGHT),
reverse: true,
limit: 1,
});
const [blockID] = await new Promise<Buffer[]>((resolve, reject) => {
const ids: Buffer[] = [];
stream
.on('data', ({ value }: { value: Buffer }) => {
ids.push(value);
})
.on('error', error => {
reject(error);
})
.on('end', () => {
resolve(ids);
});
});
if (!blockID) {
throw new NotFoundError('Last block header not found');
}
return this.getBlockHeaderByID(blockID);
}
/*
Extended blocks with transaction payload
*/
public async getBlockByID(id: Buffer): Promise<RawBlock> {
const blockHeader = await this.getBlockHeaderByID(id);
const transactions = await this._getTransactions(id);
return {
header: blockHeader,
payload: transactions,
};
}
public async getBlocksByIDs(arrayOfBlockIds: ReadonlyArray<Buffer>): Promise<RawBlock[]> {
const blocks = [];
for (const id of arrayOfBlockIds) {
try {
const block = await this.getBlockByID(id);
blocks.push(block);
} catch (dbError) {
if (dbError instanceof NotFoundError) {
continue;
}
throw dbError;
}
}
return blocks;
}
public async getBlockByHeight(height: number): Promise<RawBlock> {
const header = await this.getBlockHeaderByHeight(height);
const blockID = hash(header);
const transactions = await this._getTransactions(blockID);
return {
header,
payload: transactions,
};
}
public async getBlocksByHeightBetween(fromHeight: number, toHeight: number): Promise<RawBlock[]> {
const headers = await this.getBlockHeadersByHeightBetween(fromHeight, toHeight);
const blocks = [];
for (const header of headers) {
const blockID = hash(header);
const transactions = await this._getTransactions(blockID);
blocks.push({ header, payload: transactions });
}
return blocks;
}
public async getLastBlock(): Promise<RawBlock> {
const header = await this.getLastBlockHeader();
const blockID = hash(header);
const transactions = await this._getTransactions(blockID);
return {
header,
payload: transactions,
};
}
public async getTempBlocks(): Promise<Buffer[]> {
const stream = this._db.createReadStream({
gte: getFirstPrefix(DB_KEY_TEMPBLOCKS_HEIGHT),
lte: getLastPrefix(DB_KEY_TEMPBLOCKS_HEIGHT),
reverse: true,
});
const tempBlocks = await new Promise<Buffer[]>((resolve, reject) => {
const blocks: Buffer[] = [];
stream
.on('data', ({ value }: { value: Buffer }) => {
blocks.push(value);
})
.on('error', error => {
reject(error);
})
.on('end', () => {
resolve(blocks);
});
});
return tempBlocks;
}
public async isTempBlockEmpty(): Promise<boolean> {
const stream = this._db.createReadStream({
gte: getFirstPrefix(DB_KEY_TEMPBLOCKS_HEIGHT),
lte: getLastPrefix(DB_KEY_TEMPBLOCKS_HEIGHT),
limit: 1,
});
const tempBlocks = await new Promise<Buffer[]>((resolve, reject) => {
const blocks: Buffer[] = [];
stream
.on('data', ({ value }: { value: Buffer }) => {
blocks.push(value);
})
.on('error', error => {
reject(error);
})
.on('end', () => {
resolve(blocks);
});
});
return tempBlocks.length === 0;
}
public async clearTempBlocks(): Promise<void> {
await this._db.clear({
gte: getFirstPrefix(DB_KEY_TEMPBLOCKS_HEIGHT),
lte: getLastPrefix(DB_KEY_TEMPBLOCKS_HEIGHT),
});
}
public async isBlockPersisted(blockID: Buffer): Promise<boolean> {
return this._db.exists(`${DB_KEY_BLOCKS_ID}:${keyString(blockID)}`);
}
/*
ChainState
*/
public async getChainState(key: string): Promise<Buffer | undefined> {
try {
const value = await this._db.get(`${DB_KEY_CHAIN_STATE}:${key}`);
return value;
} catch (error) {
if (error instanceof NotFoundError) {
return undefined;
}
throw error;
}
}
/*
ConsensusState
*/
public async getConsensusState(key: string): Promise<Buffer | undefined> {
try {
const value = await this._db.get(`${DB_KEY_CONSENSUS_STATE}:${key}`);
return value;
} catch (error) {
if (error instanceof NotFoundError) {
return undefined;
}
throw error;
}
}
// TODO: Remove in next version
// Warning: This function should never be used. This exist only for migration purpose.
// Specifically, only to set genesis state between 5.1.2 => 5.1.3
public async setConsensusState(key: string, val: Buffer): Promise<void> {
await this._db.put(`${DB_KEY_CONSENSUS_STATE}:${key}`, val);
}
/*
Accounts
*/
public async getAccountByAddress(address: Buffer): Promise<Buffer> {
const account = await this._db.get(`${DB_KEY_ACCOUNTS_ADDRESS}:${keyString(address)}`);
return account;
}
public async getAccountsByPublicKey(arrayOfPublicKeys: ReadonlyArray<Buffer>): Promise<Buffer[]> {
const addresses = arrayOfPublicKeys.map(getAddressFromPublicKey);
return this.getAccountsByAddress(addresses);
}
public async getAccountsByAddress(arrayOfAddresses: ReadonlyArray<Buffer>): Promise<Buffer[]> {
const accounts = [];
for (const address of arrayOfAddresses) {
try {
const account = await this.getAccountByAddress(address);
accounts.push(account);
} catch (dbError) {
if (dbError instanceof NotFoundError) {
continue;
}
throw dbError;
}
}
return accounts;
}
/*
Transactions
*/
public async getTransactionByID(id: Buffer): Promise<Buffer> {
const transaction = await this._db.get(`${DB_KEY_TRANSACTIONS_ID}:${keyString(id)}`);
return transaction;
}
public async getTransactionsByIDs(
arrayOfTransactionIds: ReadonlyArray<Buffer>,
): Promise<Buffer[]> {
const transactions = [];
for (const id of arrayOfTransactionIds) {
try {
const transaction = await this.getTransactionByID(id);
transactions.push(transaction);
} catch (dbError) {
if (dbError instanceof NotFoundError) {
continue;
}
throw dbError;
}
}
return transactions;
}
public async isTransactionPersisted(transactionId: Buffer): Promise<boolean> {
return this._db.exists(`${DB_KEY_TRANSACTIONS_ID}:${keyString(transactionId)}`);
}
/*
Save Block
*/
public async saveBlock(
id: Buffer,
height: number,
finalizedHeight: number,
header: Buffer,
payload: { id: Buffer; value: Buffer }[],
stateStore: StateStore,
removeFromTemp = false,
): Promise<void> {
const heightStr = formatInt(height);
const batch = this._db.batch();
batch.put(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`, header);
batch.put(`${DB_KEY_BLOCKS_HEIGHT}:${heightStr}`, id);
if (payload.length > 0) {
const ids = [];
for (const { id: txID, value } of payload) {
ids.push(txID);
batch.put(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`, value);
}
batch.put(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(id)}`, Buffer.concat(ids));
}
if (removeFromTemp) {
batch.del(`${DB_KEY_TEMPBLOCKS_HEIGHT}:${heightStr}`);
}
stateStore.finalize(heightStr, batch);
await batch.write();
await this._cleanUntil(finalizedHeight);
}
public async deleteBlock(
id: Buffer,
height: number,
txIDs: Buffer[],
fullBlock: Buffer,
stateStore: StateStore,
saveToTemp = false,
): Promise<StateDiff> {
const batch = this._db.batch();
const heightStr = formatInt(height);
batch.del(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`);
batch.del(`${DB_KEY_BLOCKS_HEIGHT}:${heightStr}`);
if (txIDs.length > 0) {
for (const txID of txIDs) {
batch.del(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`);
}
batch.del(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(id)}`);
}
if (saveToTemp) {
batch.put(`${DB_KEY_TEMPBLOCKS_HEIGHT}:${heightStr}`, fullBlock);
}
// Take the diff to revert back states
const diffKey = `${DB_KEY_DIFF_STATE}:${heightStr}`;
// If there is no diff, the key might not exist
const stateDiff = await this._db.get(diffKey);
const {
created: createdStates,
updated: updatedStates,
deleted: deletedStates,
} = codec.decode<StateDiff>(stateDiffSchema, stateDiff);
// Delete all the newly created states
for (const key of createdStates) {
batch.del(key);
}
// Revert all deleted values
for (const { key, value: previousValue } of deletedStates) {
batch.put(key, previousValue);
}
for (const { key, value: previousValue } of updatedStates) {
batch.put(key, previousValue);
}
stateStore.finalize(heightStr, batch);
// Delete stored diff at particular height
batch.del(diffKey);
// Persist the whole batch
await batch.write();
return {
deleted: deletedStates,
created: createdStates,
updated: updatedStates,
};
}
// This function is out of batch, but even if it fails, it will run again next time
private async _cleanUntil(height: number): Promise<void> {
await this._db.clear({
gte: `${DB_KEY_DIFF_STATE}:${formatInt(0)}`,
lt: `${DB_KEY_DIFF_STATE}:${formatInt(height)}`,
});
}
private async _getTransactions(blockID: Buffer): Promise<Buffer[]> {
const txIDs: Buffer[] = [];
try {
const ids = await this._db.get(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(blockID)}`);
const idLength = 32;
for (let i = 0; i < ids.length; i += idLength) {
txIDs.push(ids.slice(i, i + idLength));
}
} catch (error) {
if (!(error instanceof NotFoundError)) {
throw error;
}
}
if (txIDs.length === 0) {
return [];
}
const transactions = [];
for (const txID of txIDs) {
const tx = await this._db.get(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`);
transactions.push(tx);
}
return transactions;
}
}
|
def longest_zeros_seq(bin_string):
longest_seq = 0
zero_seq_len = 0
for bit in bin_string:
if bit == '0':
zero_seq_len += 1
else:
longest_seq = max(longest_seq, zero_seq_len)
zero_seq_len = 0
return longest_seq |
<gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.common.util;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Proxy;
/**
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
*/
public final class ProxyUtils {
private ProxyUtils() {
throw new UnsupportedOperationException("No instance");
}
public static <T> T newProxyInstance(Class<T> type, InvocationHandler handler) {
return newProxyInstance(type.getClassLoader(), type, handler);
}
public static <T> T newProxyInstance(ClassLoader cl, Class<T> type, InvocationHandler handler) {
Class<?>[] interfaces = { type };
Object wrapper = Proxy.newProxyInstance(cl, interfaces, handler);
return type.cast(wrapper);
}
public static Throwable unwrapInvocationThrowable(Throwable t) {
if (t instanceof InvocationTargetException) {
return unwrapInvocationThrowable(((InvocationTargetException) t).getTargetException());
} else {
return t;
}
}
}
|
rm -rf ./dist
mkdir dist
NODE_ENV=production tsc --build
NODE_ENV=production webpack --display
cp ./packages/semantic-validator/package.json ./dist/package.json
cp ./README.md ./dist/README.md
cp ./LICENSE ./dist/LICENSE |
from typing import Union, Dict, Tuple
def parse_config_file(file_path: str, setting_name: str) -> Union[str, Tuple[str, Dict[str, str]]]:
with open(file_path, 'r') as file:
config_content = file.read()
# Find the setting block with the given name
start_index = config_content.find(f"CONFIG_{setting_name.upper()}(")
if start_index == -1:
return "Setting not found"
end_index = config_content.find(")", start_index)
setting_block = config_content[start_index:end_index+1]
# Extract the attributes from the setting block
attributes = {}
exec(f"attributes = {setting_block}")
# Extract the value and questions (if any) from the attributes
setting_value = attributes.get('default', "")
questions = {}
if 'questions' in attributes:
for question_block in attributes['questions']:
question_name = question_block[0]
question_default = question_block[1].get('default', "")
questions[question_name] = question_default
if 'require_value' in attributes and attributes['require_value']:
return setting_value, questions
else:
return setting_value |
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: daprclient.proto
package daprclient
import (
context "context"
fmt "fmt"
math "math"
proto "github.com/golang/protobuf/proto"
any "github.com/golang/protobuf/ptypes/any"
duration "github.com/golang/protobuf/ptypes/duration"
empty "github.com/golang/protobuf/ptypes/empty"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type CloudEventEnvelope struct {
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
Source string `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"`
Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"`
SpecVersion string `protobuf:"bytes,4,opt,name=specVersion,proto3" json:"specVersion,omitempty"`
DataContentType string `protobuf:"bytes,5,opt,name=dataContentType,proto3" json:"dataContentType,omitempty"`
Topic string `protobuf:"bytes,6,opt,name=topic,proto3" json:"topic,omitempty"`
Data *any.Any `protobuf:"bytes,7,opt,name=data,proto3" json:"data,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CloudEventEnvelope) Reset() { *m = CloudEventEnvelope{} }
func (m *CloudEventEnvelope) String() string { return proto.CompactTextString(m) }
func (*CloudEventEnvelope) ProtoMessage() {}
func (*CloudEventEnvelope) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{0}
}
func (m *CloudEventEnvelope) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CloudEventEnvelope.Unmarshal(m, b)
}
func (m *CloudEventEnvelope) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CloudEventEnvelope.Marshal(b, m, deterministic)
}
func (m *CloudEventEnvelope) XXX_Merge(src proto.Message) {
xxx_messageInfo_CloudEventEnvelope.Merge(m, src)
}
func (m *CloudEventEnvelope) XXX_Size() int {
return xxx_messageInfo_CloudEventEnvelope.Size(m)
}
func (m *CloudEventEnvelope) XXX_DiscardUnknown() {
xxx_messageInfo_CloudEventEnvelope.DiscardUnknown(m)
}
var xxx_messageInfo_CloudEventEnvelope proto.InternalMessageInfo
func (m *CloudEventEnvelope) GetId() string {
if m != nil {
return m.Id
}
return ""
}
func (m *CloudEventEnvelope) GetSource() string {
if m != nil {
return m.Source
}
return ""
}
func (m *CloudEventEnvelope) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *CloudEventEnvelope) GetSpecVersion() string {
if m != nil {
return m.SpecVersion
}
return ""
}
func (m *CloudEventEnvelope) GetDataContentType() string {
if m != nil {
return m.DataContentType
}
return ""
}
func (m *CloudEventEnvelope) GetTopic() string {
if m != nil {
return m.Topic
}
return ""
}
func (m *CloudEventEnvelope) GetData() *any.Any {
if m != nil {
return m.Data
}
return nil
}
type BindingEventEnvelope struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Data *any.Any `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"`
Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BindingEventEnvelope) Reset() { *m = BindingEventEnvelope{} }
func (m *BindingEventEnvelope) String() string { return proto.CompactTextString(m) }
func (*BindingEventEnvelope) ProtoMessage() {}
func (*BindingEventEnvelope) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{1}
}
func (m *BindingEventEnvelope) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BindingEventEnvelope.Unmarshal(m, b)
}
func (m *BindingEventEnvelope) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BindingEventEnvelope.Marshal(b, m, deterministic)
}
func (m *BindingEventEnvelope) XXX_Merge(src proto.Message) {
xxx_messageInfo_BindingEventEnvelope.Merge(m, src)
}
func (m *BindingEventEnvelope) XXX_Size() int {
return xxx_messageInfo_BindingEventEnvelope.Size(m)
}
func (m *BindingEventEnvelope) XXX_DiscardUnknown() {
xxx_messageInfo_BindingEventEnvelope.DiscardUnknown(m)
}
var xxx_messageInfo_BindingEventEnvelope proto.InternalMessageInfo
func (m *BindingEventEnvelope) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BindingEventEnvelope) GetData() *any.Any {
if m != nil {
return m.Data
}
return nil
}
func (m *BindingEventEnvelope) GetMetadata() map[string]string {
if m != nil {
return m.Metadata
}
return nil
}
type BindingResponseEnvelope struct {
Data *any.Any `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"`
To []string `protobuf:"bytes,2,rep,name=to,proto3" json:"to,omitempty"`
State []*State `protobuf:"bytes,3,rep,name=state,proto3" json:"state,omitempty"`
Concurrency string `protobuf:"bytes,4,opt,name=concurrency,proto3" json:"concurrency,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BindingResponseEnvelope) Reset() { *m = BindingResponseEnvelope{} }
func (m *BindingResponseEnvelope) String() string { return proto.CompactTextString(m) }
func (*BindingResponseEnvelope) ProtoMessage() {}
func (*BindingResponseEnvelope) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{2}
}
func (m *BindingResponseEnvelope) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BindingResponseEnvelope.Unmarshal(m, b)
}
func (m *BindingResponseEnvelope) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BindingResponseEnvelope.Marshal(b, m, deterministic)
}
func (m *BindingResponseEnvelope) XXX_Merge(src proto.Message) {
xxx_messageInfo_BindingResponseEnvelope.Merge(m, src)
}
func (m *BindingResponseEnvelope) XXX_Size() int {
return xxx_messageInfo_BindingResponseEnvelope.Size(m)
}
func (m *BindingResponseEnvelope) XXX_DiscardUnknown() {
xxx_messageInfo_BindingResponseEnvelope.DiscardUnknown(m)
}
var xxx_messageInfo_BindingResponseEnvelope proto.InternalMessageInfo
func (m *BindingResponseEnvelope) GetData() *any.Any {
if m != nil {
return m.Data
}
return nil
}
func (m *BindingResponseEnvelope) GetTo() []string {
if m != nil {
return m.To
}
return nil
}
func (m *BindingResponseEnvelope) GetState() []*State {
if m != nil {
return m.State
}
return nil
}
func (m *BindingResponseEnvelope) GetConcurrency() string {
if m != nil {
return m.Concurrency
}
return ""
}
type InvokeEnvelope struct {
Method string `protobuf:"bytes,1,opt,name=method,proto3" json:"method,omitempty"`
Data *any.Any `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"`
Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *InvokeEnvelope) Reset() { *m = InvokeEnvelope{} }
func (m *InvokeEnvelope) String() string { return proto.CompactTextString(m) }
func (*InvokeEnvelope) ProtoMessage() {}
func (*InvokeEnvelope) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{3}
}
func (m *InvokeEnvelope) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_InvokeEnvelope.Unmarshal(m, b)
}
func (m *InvokeEnvelope) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_InvokeEnvelope.Marshal(b, m, deterministic)
}
func (m *InvokeEnvelope) XXX_Merge(src proto.Message) {
xxx_messageInfo_InvokeEnvelope.Merge(m, src)
}
func (m *InvokeEnvelope) XXX_Size() int {
return xxx_messageInfo_InvokeEnvelope.Size(m)
}
func (m *InvokeEnvelope) XXX_DiscardUnknown() {
xxx_messageInfo_InvokeEnvelope.DiscardUnknown(m)
}
var xxx_messageInfo_InvokeEnvelope proto.InternalMessageInfo
func (m *InvokeEnvelope) GetMethod() string {
if m != nil {
return m.Method
}
return ""
}
func (m *InvokeEnvelope) GetData() *any.Any {
if m != nil {
return m.Data
}
return nil
}
func (m *InvokeEnvelope) GetMetadata() map[string]string {
if m != nil {
return m.Metadata
}
return nil
}
type GetTopicSubscriptionsEnvelope struct {
Topics []string `protobuf:"bytes,1,rep,name=topics,proto3" json:"topics,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetTopicSubscriptionsEnvelope) Reset() { *m = GetTopicSubscriptionsEnvelope{} }
func (m *GetTopicSubscriptionsEnvelope) String() string { return proto.CompactTextString(m) }
func (*GetTopicSubscriptionsEnvelope) ProtoMessage() {}
func (*GetTopicSubscriptionsEnvelope) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{4}
}
func (m *GetTopicSubscriptionsEnvelope) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetTopicSubscriptionsEnvelope.Unmarshal(m, b)
}
func (m *GetTopicSubscriptionsEnvelope) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetTopicSubscriptionsEnvelope.Marshal(b, m, deterministic)
}
func (m *GetTopicSubscriptionsEnvelope) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetTopicSubscriptionsEnvelope.Merge(m, src)
}
func (m *GetTopicSubscriptionsEnvelope) XXX_Size() int {
return xxx_messageInfo_GetTopicSubscriptionsEnvelope.Size(m)
}
func (m *GetTopicSubscriptionsEnvelope) XXX_DiscardUnknown() {
xxx_messageInfo_GetTopicSubscriptionsEnvelope.DiscardUnknown(m)
}
var xxx_messageInfo_GetTopicSubscriptionsEnvelope proto.InternalMessageInfo
func (m *GetTopicSubscriptionsEnvelope) GetTopics() []string {
if m != nil {
return m.Topics
}
return nil
}
type GetBindingsSubscriptionsEnvelope struct {
Bindings []string `protobuf:"bytes,1,rep,name=bindings,proto3" json:"bindings,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetBindingsSubscriptionsEnvelope) Reset() { *m = GetBindingsSubscriptionsEnvelope{} }
func (m *GetBindingsSubscriptionsEnvelope) String() string { return proto.CompactTextString(m) }
func (*GetBindingsSubscriptionsEnvelope) ProtoMessage() {}
func (*GetBindingsSubscriptionsEnvelope) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{5}
}
func (m *GetBindingsSubscriptionsEnvelope) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetBindingsSubscriptionsEnvelope.Unmarshal(m, b)
}
func (m *GetBindingsSubscriptionsEnvelope) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetBindingsSubscriptionsEnvelope.Marshal(b, m, deterministic)
}
func (m *GetBindingsSubscriptionsEnvelope) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetBindingsSubscriptionsEnvelope.Merge(m, src)
}
func (m *GetBindingsSubscriptionsEnvelope) XXX_Size() int {
return xxx_messageInfo_GetBindingsSubscriptionsEnvelope.Size(m)
}
func (m *GetBindingsSubscriptionsEnvelope) XXX_DiscardUnknown() {
xxx_messageInfo_GetBindingsSubscriptionsEnvelope.DiscardUnknown(m)
}
var xxx_messageInfo_GetBindingsSubscriptionsEnvelope proto.InternalMessageInfo
func (m *GetBindingsSubscriptionsEnvelope) GetBindings() []string {
if m != nil {
return m.Bindings
}
return nil
}
type State struct {
Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
Value *any.Any `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
Etag string `protobuf:"bytes,3,opt,name=etag,proto3" json:"etag,omitempty"`
Metadata map[string]string `protobuf:"bytes,4,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
Options *StateOptions `protobuf:"bytes,5,opt,name=options,proto3" json:"options,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *State) Reset() { *m = State{} }
func (m *State) String() string { return proto.CompactTextString(m) }
func (*State) ProtoMessage() {}
func (*State) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{6}
}
func (m *State) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_State.Unmarshal(m, b)
}
func (m *State) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_State.Marshal(b, m, deterministic)
}
func (m *State) XXX_Merge(src proto.Message) {
xxx_messageInfo_State.Merge(m, src)
}
func (m *State) XXX_Size() int {
return xxx_messageInfo_State.Size(m)
}
func (m *State) XXX_DiscardUnknown() {
xxx_messageInfo_State.DiscardUnknown(m)
}
var xxx_messageInfo_State proto.InternalMessageInfo
func (m *State) GetKey() string {
if m != nil {
return m.Key
}
return ""
}
func (m *State) GetValue() *any.Any {
if m != nil {
return m.Value
}
return nil
}
func (m *State) GetEtag() string {
if m != nil {
return m.Etag
}
return ""
}
func (m *State) GetMetadata() map[string]string {
if m != nil {
return m.Metadata
}
return nil
}
func (m *State) GetOptions() *StateOptions {
if m != nil {
return m.Options
}
return nil
}
type StateOptions struct {
Concurrency string `protobuf:"bytes,1,opt,name=concurrency,proto3" json:"concurrency,omitempty"`
Consistency string `protobuf:"bytes,2,opt,name=consistency,proto3" json:"consistency,omitempty"`
RetryPolicy *RetryPolicy `protobuf:"bytes,3,opt,name=retryPolicy,proto3" json:"retryPolicy,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *StateOptions) Reset() { *m = StateOptions{} }
func (m *StateOptions) String() string { return proto.CompactTextString(m) }
func (*StateOptions) ProtoMessage() {}
func (*StateOptions) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{7}
}
func (m *StateOptions) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_StateOptions.Unmarshal(m, b)
}
func (m *StateOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_StateOptions.Marshal(b, m, deterministic)
}
func (m *StateOptions) XXX_Merge(src proto.Message) {
xxx_messageInfo_StateOptions.Merge(m, src)
}
func (m *StateOptions) XXX_Size() int {
return xxx_messageInfo_StateOptions.Size(m)
}
func (m *StateOptions) XXX_DiscardUnknown() {
xxx_messageInfo_StateOptions.DiscardUnknown(m)
}
var xxx_messageInfo_StateOptions proto.InternalMessageInfo
func (m *StateOptions) GetConcurrency() string {
if m != nil {
return m.Concurrency
}
return ""
}
func (m *StateOptions) GetConsistency() string {
if m != nil {
return m.Consistency
}
return ""
}
func (m *StateOptions) GetRetryPolicy() *RetryPolicy {
if m != nil {
return m.RetryPolicy
}
return nil
}
type RetryPolicy struct {
Threshold int32 `protobuf:"varint,1,opt,name=threshold,proto3" json:"threshold,omitempty"`
Pattern string `protobuf:"bytes,2,opt,name=pattern,proto3" json:"pattern,omitempty"`
Interval *duration.Duration `protobuf:"bytes,3,opt,name=interval,proto3" json:"interval,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *RetryPolicy) Reset() { *m = RetryPolicy{} }
func (m *RetryPolicy) String() string { return proto.CompactTextString(m) }
func (*RetryPolicy) ProtoMessage() {}
func (*RetryPolicy) Descriptor() ([]byte, []int) {
return fileDescriptor_127d5244029ccc8f, []int{8}
}
func (m *RetryPolicy) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_RetryPolicy.Unmarshal(m, b)
}
func (m *RetryPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_RetryPolicy.Marshal(b, m, deterministic)
}
func (m *RetryPolicy) XXX_Merge(src proto.Message) {
xxx_messageInfo_RetryPolicy.Merge(m, src)
}
func (m *RetryPolicy) XXX_Size() int {
return xxx_messageInfo_RetryPolicy.Size(m)
}
func (m *RetryPolicy) XXX_DiscardUnknown() {
xxx_messageInfo_RetryPolicy.DiscardUnknown(m)
}
var xxx_messageInfo_RetryPolicy proto.InternalMessageInfo
func (m *RetryPolicy) GetThreshold() int32 {
if m != nil {
return m.Threshold
}
return 0
}
func (m *RetryPolicy) GetPattern() string {
if m != nil {
return m.Pattern
}
return ""
}
func (m *RetryPolicy) GetInterval() *duration.Duration {
if m != nil {
return m.Interval
}
return nil
}
func init() {
proto.RegisterType((*CloudEventEnvelope)(nil), "daprclient.CloudEventEnvelope")
proto.RegisterType((*BindingEventEnvelope)(nil), "daprclient.BindingEventEnvelope")
proto.RegisterMapType((map[string]string)(nil), "daprclient.BindingEventEnvelope.MetadataEntry")
proto.RegisterType((*BindingResponseEnvelope)(nil), "daprclient.BindingResponseEnvelope")
proto.RegisterType((*InvokeEnvelope)(nil), "daprclient.InvokeEnvelope")
proto.RegisterMapType((map[string]string)(nil), "daprclient.InvokeEnvelope.MetadataEntry")
proto.RegisterType((*GetTopicSubscriptionsEnvelope)(nil), "daprclient.GetTopicSubscriptionsEnvelope")
proto.RegisterType((*GetBindingsSubscriptionsEnvelope)(nil), "daprclient.GetBindingsSubscriptionsEnvelope")
proto.RegisterType((*State)(nil), "daprclient.State")
proto.RegisterMapType((map[string]string)(nil), "daprclient.State.MetadataEntry")
proto.RegisterType((*StateOptions)(nil), "daprclient.StateOptions")
proto.RegisterType((*RetryPolicy)(nil), "daprclient.RetryPolicy")
}
func init() { proto.RegisterFile("daprclient.proto", fileDescriptor_127d5244029ccc8f) }
var fileDescriptor_127d5244029ccc8f = []byte{
// 759 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0xdd, 0x6a, 0xdb, 0x4a,
0x10, 0xb6, 0xe4, 0xdf, 0x8c, 0x73, 0x7c, 0x72, 0x86, 0x9c, 0x44, 0x51, 0xce, 0x49, 0x8d, 0x7a,
0x51, 0xb7, 0x14, 0x07, 0x5c, 0x4a, 0x7f, 0x02, 0x81, 0x26, 0x31, 0x69, 0x0b, 0xc5, 0x41, 0x09,
0x85, 0x42, 0x6f, 0x64, 0x79, 0xeb, 0x88, 0xc8, 0xbb, 0x62, 0xb5, 0x36, 0x08, 0x7a, 0xdf, 0xab,
0xbe, 0x42, 0x5f, 0xa2, 0xcf, 0xd2, 0xbb, 0xf6, 0x5d, 0x8a, 0x56, 0x2b, 0x7b, 0xe3, 0x9f, 0x84,
0x52, 0x7a, 0xa7, 0x9d, 0xef, 0x9b, 0x9d, 0xf9, 0xe6, 0x67, 0x05, 0x1b, 0x03, 0x2f, 0xe2, 0x7e,
0x18, 0x10, 0x2a, 0xda, 0x11, 0x67, 0x82, 0x21, 0xcc, 0x2c, 0xf6, 0xce, 0x90, 0xb1, 0x61, 0x48,
0xf6, 0x25, 0xd2, 0x1f, 0x7f, 0xd8, 0xf7, 0x68, 0x92, 0xd1, 0xec, 0xdd, 0x79, 0x88, 0x8c, 0x22,
0x91, 0x83, 0x7b, 0xf3, 0xe0, 0x60, 0xcc, 0x3d, 0x11, 0x30, 0x9a, 0xe1, 0xce, 0x77, 0x03, 0xf0,
0x38, 0x64, 0xe3, 0x41, 0x77, 0x42, 0xa8, 0xe8, 0xd2, 0x09, 0x09, 0x59, 0x44, 0xb0, 0x01, 0x66,
0x30, 0xb0, 0x8c, 0xa6, 0xd1, 0x5a, 0x73, 0xcd, 0x60, 0x80, 0x5b, 0x50, 0x89, 0xd9, 0x98, 0xfb,
0xc4, 0x32, 0xa5, 0x4d, 0x9d, 0x10, 0xa1, 0x24, 0x92, 0x88, 0x58, 0x45, 0x69, 0x95, 0xdf, 0xd8,
0x84, 0x7a, 0x1c, 0x11, 0xff, 0x2d, 0xe1, 0x71, 0xc0, 0xa8, 0x55, 0x92, 0x90, 0x6e, 0xc2, 0x16,
0xfc, 0x3d, 0xf0, 0x84, 0x77, 0xcc, 0xa8, 0x20, 0x54, 0x5c, 0xa4, 0x17, 0x94, 0x25, 0x6b, 0xde,
0x8c, 0x9b, 0x50, 0x16, 0x2c, 0x0a, 0x7c, 0xab, 0x22, 0xf1, 0xec, 0x80, 0x2d, 0x28, 0xa5, 0x44,
0xab, 0xda, 0x34, 0x5a, 0xf5, 0xce, 0x66, 0x3b, 0xd3, 0xd8, 0xce, 0x35, 0xb6, 0x5f, 0xd0, 0xc4,
0x95, 0x0c, 0xe7, 0x87, 0x01, 0x9b, 0x47, 0x01, 0x1d, 0x04, 0x74, 0x78, 0x5d, 0x20, 0x42, 0x89,
0x7a, 0x23, 0xa2, 0x24, 0xca, 0xef, 0xe9, 0xb5, 0xe6, 0x6d, 0xd7, 0xe2, 0x6b, 0xa8, 0x8d, 0x88,
0xf0, 0x24, 0xbb, 0xd8, 0x2c, 0xb6, 0xea, 0x9d, 0x76, 0x5b, 0x6b, 0xdf, 0xb2, 0x88, 0xed, 0x37,
0xca, 0xa1, 0x4b, 0x05, 0x4f, 0xdc, 0xa9, 0xbf, 0x7d, 0x00, 0x7f, 0x5d, 0x83, 0x70, 0x03, 0x8a,
0x57, 0x24, 0x51, 0x99, 0xa5, 0x9f, 0x69, 0x15, 0x26, 0x5e, 0x38, 0xce, 0x8b, 0x9f, 0x1d, 0x9e,
0x9b, 0x4f, 0x0d, 0xe7, 0x8b, 0x01, 0xdb, 0x2a, 0x9a, 0x4b, 0xe2, 0x88, 0xd1, 0x98, 0x4c, 0x25,
0xe6, 0x72, 0x8c, 0x5b, 0xe5, 0x34, 0xc0, 0x14, 0xcc, 0x32, 0x9b, 0xc5, 0xb4, 0xdb, 0x82, 0xe1,
0x3d, 0x28, 0xc7, 0xc2, 0x13, 0x44, 0x69, 0xfb, 0x47, 0xd7, 0x76, 0x9e, 0x02, 0x6e, 0x86, 0xa7,
0xad, 0xf6, 0x19, 0xf5, 0xc7, 0x9c, 0x13, 0xea, 0x27, 0x79, 0xab, 0x35, 0x93, 0xf3, 0xcd, 0x80,
0xc6, 0x2b, 0x3a, 0x61, 0x57, 0xb3, 0xbc, 0xb6, 0xa0, 0x32, 0x22, 0xe2, 0x92, 0xe5, 0xf3, 0xa5,
0x4e, 0xbf, 0x50, 0xfe, 0x93, 0x85, 0xf2, 0xb7, 0xf4, 0x14, 0xaf, 0xc7, 0xfb, 0x33, 0x85, 0x7f,
0x02, 0xff, 0x9f, 0x12, 0x71, 0x91, 0x8e, 0xe3, 0xf9, 0xb8, 0x1f, 0xfb, 0x3c, 0x88, 0xd2, 0xad,
0x8a, 0x75, 0x95, 0x72, 0x58, 0x63, 0xcb, 0x90, 0x75, 0x55, 0x27, 0xe7, 0x10, 0x9a, 0xa7, 0x44,
0xa8, 0x9e, 0xc5, 0xcb, 0x7d, 0x6d, 0xa8, 0xf5, 0x15, 0x41, 0x79, 0x4f, 0xcf, 0xce, 0x27, 0x13,
0xca, 0xb2, 0x07, 0x4b, 0xd2, 0x7d, 0xa0, 0xa7, 0xbb, 0xaa, 0x84, 0x19, 0x25, 0x5d, 0x00, 0x22,
0xbc, 0x61, 0xbe, 0xb9, 0xe9, 0x37, 0x1e, 0x68, 0x75, 0x2d, 0xc9, 0xba, 0xde, 0x59, 0x68, 0xfd,
0xaa, 0x72, 0x62, 0x07, 0xaa, 0x2c, 0xd3, 0x21, 0x97, 0xb9, 0xde, 0xb1, 0x16, 0x7c, 0x7b, 0x19,
0xee, 0xe6, 0xc4, 0xdf, 0x6b, 0xc1, 0x67, 0x03, 0xd6, 0xf5, 0x6b, 0xe7, 0xa7, 0xd1, 0x58, 0x98,
0x46, 0xc5, 0x88, 0x83, 0x58, 0x48, 0x86, 0x39, 0x65, 0xe4, 0x26, 0x7c, 0x06, 0x75, 0x4e, 0x04,
0x4f, 0xce, 0x58, 0x18, 0xf8, 0x89, 0xac, 0x4e, 0xbd, 0xb3, 0xad, 0x2b, 0x71, 0x67, 0xb0, 0xab,
0x73, 0x9d, 0x8f, 0x50, 0xd7, 0x30, 0xfc, 0x0f, 0xd6, 0xc4, 0x25, 0x27, 0xf1, 0x25, 0x0b, 0xb3,
0x49, 0x2f, 0xbb, 0x33, 0x03, 0x5a, 0x50, 0x8d, 0x3c, 0x21, 0x08, 0xa7, 0x2a, 0x8b, 0xfc, 0x88,
0x8f, 0xa1, 0x16, 0x50, 0x41, 0xf8, 0xc4, 0x0b, 0x55, 0xf8, 0x9d, 0x85, 0x3e, 0x9e, 0xa8, 0x47,
0xdc, 0x9d, 0x52, 0x3b, 0x5f, 0x8b, 0x00, 0x27, 0x5e, 0xc4, 0x8f, 0x65, 0x96, 0x78, 0x08, 0xb5,
0x1e, 0xcd, 0x16, 0x01, 0xed, 0xd5, 0xcb, 0x61, 0x2f, 0x9d, 0x11, 0xa7, 0x80, 0xef, 0xe1, 0xdf,
0xa5, 0xf3, 0x8d, 0x5b, 0x0b, 0x0e, 0xdd, 0xf4, 0x77, 0x63, 0xdf, 0xd7, 0x83, 0xdc, 0xb8, 0x1a,
0x4e, 0x01, 0xfb, 0x60, 0xad, 0x5a, 0x82, 0x95, 0x01, 0x1e, 0xce, 0x05, 0xb8, 0x71, 0x85, 0x9c,
0x02, 0xbe, 0x83, 0x46, 0x8f, 0xea, 0x2f, 0x31, 0x36, 0x6f, 0x7b, 0xa3, 0xed, 0xbb, 0x4b, 0x18,
0xf3, 0xef, 0xaa, 0x53, 0xc0, 0x97, 0xb0, 0xde, 0xa3, 0x52, 0x60, 0x76, 0xf1, 0x9e, 0xee, 0xb6,
0xf8, 0x37, 0xb5, 0x57, 0x48, 0x72, 0x0a, 0x47, 0xbb, 0x50, 0x0d, 0x98, 0xf4, 0x3e, 0xda, 0x98,
0x75, 0xef, 0x2c, 0xa5, 0xc5, 0xfd, 0x8a, 0xa4, 0x3f, 0xfa, 0x19, 0x00, 0x00, 0xff, 0xff, 0x30,
0x41, 0xfe, 0xa9, 0x1a, 0x08, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// DaprClientClient is the client API for DaprClient service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type DaprClientClient interface {
OnInvoke(ctx context.Context, in *InvokeEnvelope, opts ...grpc.CallOption) (*any.Any, error)
GetTopicSubscriptions(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*GetTopicSubscriptionsEnvelope, error)
GetBindingsSubscriptions(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*GetBindingsSubscriptionsEnvelope, error)
OnBindingEvent(ctx context.Context, in *BindingEventEnvelope, opts ...grpc.CallOption) (*BindingResponseEnvelope, error)
OnTopicEvent(ctx context.Context, in *CloudEventEnvelope, opts ...grpc.CallOption) (*empty.Empty, error)
}
type daprClientClient struct {
cc *grpc.ClientConn
}
func NewDaprClientClient(cc *grpc.ClientConn) DaprClientClient {
return &daprClientClient{cc}
}
func (c *daprClientClient) OnInvoke(ctx context.Context, in *InvokeEnvelope, opts ...grpc.CallOption) (*any.Any, error) {
out := new(any.Any)
err := c.cc.Invoke(ctx, "/daprclient.DaprClient/OnInvoke", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *daprClientClient) GetTopicSubscriptions(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*GetTopicSubscriptionsEnvelope, error) {
out := new(GetTopicSubscriptionsEnvelope)
err := c.cc.Invoke(ctx, "/daprclient.DaprClient/GetTopicSubscriptions", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *daprClientClient) GetBindingsSubscriptions(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*GetBindingsSubscriptionsEnvelope, error) {
out := new(GetBindingsSubscriptionsEnvelope)
err := c.cc.Invoke(ctx, "/daprclient.DaprClient/GetBindingsSubscriptions", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *daprClientClient) OnBindingEvent(ctx context.Context, in *BindingEventEnvelope, opts ...grpc.CallOption) (*BindingResponseEnvelope, error) {
out := new(BindingResponseEnvelope)
err := c.cc.Invoke(ctx, "/daprclient.DaprClient/OnBindingEvent", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *daprClientClient) OnTopicEvent(ctx context.Context, in *CloudEventEnvelope, opts ...grpc.CallOption) (*empty.Empty, error) {
out := new(empty.Empty)
err := c.cc.Invoke(ctx, "/daprclient.DaprClient/OnTopicEvent", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// DaprClientServer is the server API for DaprClient service.
type DaprClientServer interface {
OnInvoke(context.Context, *InvokeEnvelope) (*any.Any, error)
GetTopicSubscriptions(context.Context, *empty.Empty) (*GetTopicSubscriptionsEnvelope, error)
GetBindingsSubscriptions(context.Context, *empty.Empty) (*GetBindingsSubscriptionsEnvelope, error)
OnBindingEvent(context.Context, *BindingEventEnvelope) (*BindingResponseEnvelope, error)
OnTopicEvent(context.Context, *CloudEventEnvelope) (*empty.Empty, error)
}
func RegisterDaprClientServer(s *grpc.Server, srv DaprClientServer) {
s.RegisterService(&_DaprClient_serviceDesc, srv)
}
func _DaprClient_OnInvoke_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(InvokeEnvelope)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DaprClientServer).OnInvoke(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/daprclient.DaprClient/OnInvoke",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DaprClientServer).OnInvoke(ctx, req.(*InvokeEnvelope))
}
return interceptor(ctx, in, info, handler)
}
func _DaprClient_GetTopicSubscriptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(empty.Empty)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DaprClientServer).GetTopicSubscriptions(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/daprclient.DaprClient/GetTopicSubscriptions",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DaprClientServer).GetTopicSubscriptions(ctx, req.(*empty.Empty))
}
return interceptor(ctx, in, info, handler)
}
func _DaprClient_GetBindingsSubscriptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(empty.Empty)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DaprClientServer).GetBindingsSubscriptions(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/daprclient.DaprClient/GetBindingsSubscriptions",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DaprClientServer).GetBindingsSubscriptions(ctx, req.(*empty.Empty))
}
return interceptor(ctx, in, info, handler)
}
func _DaprClient_OnBindingEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BindingEventEnvelope)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DaprClientServer).OnBindingEvent(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/daprclient.DaprClient/OnBindingEvent",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DaprClientServer).OnBindingEvent(ctx, req.(*BindingEventEnvelope))
}
return interceptor(ctx, in, info, handler)
}
func _DaprClient_OnTopicEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(CloudEventEnvelope)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DaprClientServer).OnTopicEvent(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/daprclient.DaprClient/OnTopicEvent",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DaprClientServer).OnTopicEvent(ctx, req.(*CloudEventEnvelope))
}
return interceptor(ctx, in, info, handler)
}
var _DaprClient_serviceDesc = grpc.ServiceDesc{
ServiceName: "daprclient.DaprClient",
HandlerType: (*DaprClientServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "OnInvoke",
Handler: _DaprClient_OnInvoke_Handler,
},
{
MethodName: "GetTopicSubscriptions",
Handler: _DaprClient_GetTopicSubscriptions_Handler,
},
{
MethodName: "GetBindingsSubscriptions",
Handler: _DaprClient_GetBindingsSubscriptions_Handler,
},
{
MethodName: "OnBindingEvent",
Handler: _DaprClient_OnBindingEvent_Handler,
},
{
MethodName: "OnTopicEvent",
Handler: _DaprClient_OnTopicEvent_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "daprclient.proto",
}
|
#!/bin/sh
# Generate coverage
# This script is for development purposes.
# It provide as is, do not any support.
# It may change without notice.
set -eu
image="shellspec:coverage"
docker build -t "$image" -f "dockerfiles/.coverage" "$@" .
command="./shellspec --task fixture:stat:prepare; ./shellspec --kcov"
cid=$(docker create -it "$image" sh -c "$command")
docker start -ai "$cid"
rm -rf coverage
docker cp "$cid:/root/coverage" "coverage"
|
#include "rr.h"
namespace rr {
VALUE Constants::_Undefined;
VALUE Constants::_Null;
VALUE Constants::_True;
VALUE Constants::_False;
void Constants::Init() {
ModuleBuilder("V8::C").
defineSingletonMethod("Undefined", &Undefined).
defineSingletonMethod("Null", &Null).
defineSingletonMethod("True", &True).
defineSingletonMethod("False", &False);
_Undefined = _Null = _True = _False = Qnil;
rb_gc_register_address(&_Undefined);
rb_gc_register_address(&_Null);
rb_gc_register_address(&_True);
rb_gc_register_address(&_False);
}
VALUE Constants::Undefined(VALUE self) {
return cached<Primitive, v8::Primitive>(&_Undefined, v8::Undefined());
}
VALUE Constants::Null(VALUE self) {
return cached<Primitive, v8::Primitive>(&_Null, v8::Null());
}
VALUE Constants::True(VALUE self) {
return cached<Bool, v8::Boolean>(&_True, v8::True());
}
VALUE Constants::False(VALUE self) {
return cached<Bool, v8::Boolean>(&_False, v8::False());
}
} |
<filename>persistence/src/main/java/com/ctrip/persistence/repository/LinkHistoryRepository.java
package com.ctrip.persistence.repository;
import com.ctrip.persistence.entity.Link;
import com.ctrip.persistence.entity.LinkHistory;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
/**
* @author <NAME>
*/
public interface LinkHistoryRepository
extends JpaRepository<LinkHistory, Long>, JpaSpecificationExecutor<LinkHistory> {
List<Long> findIdByMlFlowId(Long id);
List<LinkHistory> findByMlFlowId(Long id);
@Query("select sourceId from LinkHistory l where l.mlFlowId = :mlFlowId and l.targetId = :targetId")
List<Long> getSourceIdByMlFlowIdAndTargetId(@Param("mlFlowId") Long mlFlowId, @Param("targetId") Long targetId);
}
|
#!/bin/bash
set -e
set -x
sass --style compressed scss/akoma-ntoso.scss css/akoma-ntoso.min.css
sass scss/akoma-ntoso.scss css/akoma-ntoso.css
|
template <typename T, size_t N>
class InplaceArray_Iterator {
public:
using value_type = T;
using reference = T&;
using pointer = T*;
using iterator_category = std::random_access_iterator_tag;
using difference_type = std::ptrdiff_t;
InplaceArray_Iterator(T* ptr) : ptr_(ptr) {}
reference operator*() const {
return *ptr_;
}
pointer operator->() const {
return ptr_;
}
// Other necessary iterator methods (not relevant to this problem) are already implemented
private:
T* ptr_;
}; |
<filename>create_backup_task_test.go
package akeebabackup
import (
"log"
"testing"
)
func init() {
log.SetFlags(log.Lshortfile)
}
func TestCreateBackupTask(qt *testing.T) {
websiteURL := "https://" // NOTE change for successful test execution
frontendKey := ""
task := NewCreateBackupTask(websiteURL, frontendKey)
if !task.Execute() {
qt.Fatal("execution failed")
}
}
|
package log
import (
"fmt"
"io"
"time"
"github.com/apex/log"
)
var thisIsAStackerLog struct{}
func addStackerLogSentinel(e *log.Entry) *log.Entry {
return e.WithField("isStacker", &thisIsAStackerLog)
}
func isStackerLog(e *log.Entry) bool {
v, ok := e.Fields["isStacker"]
return ok && v == &thisIsAStackerLog
}
type stackerLogFilterer struct {
underlying log.Handler
}
func (h stackerLogFilterer) HandleLog(e *log.Entry) error {
if !isStackerLog(e) {
return nil
}
delete(e.Fields, "isStacker")
return h.underlying.HandleLog(e)
}
func FilterNonStackerLogs(handler log.Handler, level log.Level) {
log.SetHandler(stackerLogFilterer{handler})
log.SetLevel(level)
}
func Debugf(msg string, v ...interface{}) {
addStackerLogSentinel(log.NewEntry(log.Log.(*log.Logger))).Debugf(msg, v...)
}
func Infof(msg string, v ...interface{}) {
addStackerLogSentinel(log.NewEntry(log.Log.(*log.Logger))).Infof(msg, v...)
}
type TextHandler struct {
out io.StringWriter
timestamp bool
}
func NewTextHandler(out io.StringWriter, timestamp bool) log.Handler {
return &TextHandler{out, timestamp}
}
func (th *TextHandler) HandleLog(e *log.Entry) error {
if th.timestamp {
_, err := th.out.WriteString(fmt.Sprintf("%s ", e.Timestamp.Format(time.RFC3339)))
if err != nil {
return err
}
}
_, err := th.out.WriteString(fmt.Sprintf(e.Message))
if err != nil {
return err
}
for _, name := range e.Fields.Names() {
_, err = th.out.WriteString(fmt.Sprintf(" %s=%s", name, e.Fields.Get(name)))
if err != nil {
return err
}
}
_, err = th.out.WriteString("\n")
if err != nil {
return err
}
return nil
}
|
#!/bin/bash
#Copyright (c) 2016, Virginia Tech
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
#disclaimer.
#2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
#disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
#INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
#SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
#WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#The views and conclusions contained in the software and documentation are those of the authors and should not be
#interpreted as representing official policies, either expressed or implied, of the FreeBSD Project.
#
#This material was prepared as an account of work sponsored by an agency of the United States Government. Neither the
#United States Government nor the United States Department of Energy, nor Virginia Tech, nor any of their employees,
#nor any jurisdiction or organization that has cooperated in the development of these materials, makes any warranty,
#express or implied, or assumes any legal liability or responsibility for the accuracy, completeness, or usefulness or
#any information, apparatus, product, software, or process disclosed, or represents that its use would not infringe
#privately owned rights.
#
#Reference herein to any specific commercial product, process, or service by trade name, trademark, manufacturer, or
#otherwise does not necessarily constitute or imply its endorsement, recommendation, favoring by the United States
#Government or any agency thereof, or Virginia Tech - Advanced Research Institute. The views and opinions of authors
#expressed herein do not necessarily state or reflect those of the United States Government or any agency thereof.
#
#VIRGINIA TECH – ADVANCED RESEARCH INSTITUTE
#under Contract DE-EE0006352
#
#__author__ = "BEMOSS Team"
#__credits__ = ""
#__version__ = "3.5"
#__maintainer__ = "BEMOSS Team"
#__email__ = "aribemoss@gmail.com"
#__website__ = "www.bemoss.org"
#__created__ = "2014-09-12 12:04:50"
#__lastUpdated__ = "2016-08-17 11:23:33"
export VOLTTRON_HOME=~/.volttron
PROJECT_DIR=$(dirname $(readlink -f $0))
cd $PROJECT_DIR
. env/bin/activate
#Step1: Find own IP
python bemoss_lib/utils/find_own_ip.py
#Step2: Run Platform Initiator
read -t 15 -p "Press ENTER to start fresh BEMOSS within 15 seconds...." input
if [[ $? -ne 0 ]]
then
echo "Retaining previous state of BEMOSS..."
else
echo "Performing fresh restart of BEMOSS..."
python bemoss_lib/utils/platform_initiator.py
sleep 2
fi
PYTHONPATH='.' python bemoss_lib/databases/cassandraAPI/startCassandra.py
sleep 2
#Step3: Build agents
source bemoss_lib/utils/buildAgents.sh $PROJECT_DIR
#Step4: Run Volttron Platform and Agents
cd $PROJECT_DIR
source bemoss_lib/utils/runPlatform.sh $PROJECT_DIR
#Step5: Configure webserver to run on own IP and Bind BACnet with IP
#source start_webserver.sh
|
######### Stylus
# is the stylus pen response, best not to change, otherwise the stylus will stop working
# Default - mouse / pointing
#xsetwacom --set 'GAOMON Gaomon Tablet Pen stylus' Button 1 "***"
# bottom stylus button
# CTRL + V
xsetwacom --set 'GAOMON Gaomon Tablet Pen stylus' Button 2 "key +ctrl v -ctrl"
# top stylus button
# RIGHT CLICK
xsetwacom --set 'GAOMON Gaomon Tablet Pen stylus' Button 3 3
######### Pad
# 1
# ALT+TAB
xsetwacom --set 'GAOMON Gaomon Tablet Pad pad' Button 1 "key +alt +tab -tab -alt"
# 2
# CTRL+C
xsetwacom --set 'GAOMON Gaomon Tablet Pad pad' Button 2 "key +ctrl c -ctrl"
# 3
# Enter
xsetwacom --set 'GAOMON Gaomon Tablet Pad pad' Button 3 "key 0xff0d"
# 4
# ESC
xsetwacom --set 'GAOMON Gaomon Tablet Pad pad' Button 8 "key +esc -esc"
|
<reponame>elp2/advent_of_code_2020<gh_stars>1-10
from collections import defaultdict
def return_default():
return 0
def dd():
return defaultdict(return_default)
CHALLENGE_DAY = "18"
REAL = open(CHALLENGE_DAY + ".txt").read()
SAMPLE = open(CHALLENGE_DAY + ".sample.txt").read()
SAMPLE_EXPECTED = 13632
# SAMPLE_EXPECTED = 71 + 51 + 26 + 437 + 12240 +
def parse_lines(raw):
# Groups.
# split = raw.split("\n\n")
# return list(map(lambda group: group.split("\n"), split))
split = raw.split("\n")
return split # raw
# return list(map(lambda l: l.split(" "), split)) # words.
# return list(map(int, split))
# return list(map(lambda l: l.strip(), split)) # beware leading / trailing WS
from collections import deque
def calc(line):
line = line.replace("(", " ( ")
line = line.replace(")", " ) ")
tokens = line.split(" ")
tokens = list(filter(lambda t: t, tokens))
return calc_tokens(tokens)
def calc_tokens(tokens):
print("CT!: ", tokens)
q = []
for i, t in enumerate(tokens):
print(t, q)
t = str(t)
ops = []
if t in "+*":
q.append(t)
elif t == "(":
q.append(t)
continue
elif t == ")":
while True:
h = q.pop()
if h == "(":
break
ops.append(h)
else:
num = int(t)
if len(q) and q[-1] in "+*":
ops.append(num)
ops.append(q.pop())
ops.append(q.pop())
else:
ops = [num]
if len(ops) == 1:
q.append(ops[0])
elif len(ops) == 3:
a, op, b = ops
if op == "+":
q.append(a + b)
elif op == "*":
q.append(a * b)
else:
assert False
if len(q) != 1:
return calc_tokens(q)
return q[0]
def solve(raw):
parsed = parse_lines(raw)
# Debug here to make sure parsing is good.
ret = 0
for l in parsed:
calced = calc(l)
ret += calced
print("!!! ", l, " => ", calced)
print("-----------------")
return ret
def test_parsing(lines):
if isinstance(lines, list):
for i in range(min(5, len(lines))):
print(lines[i])
elif isinstance(lines, dict) or isinstance(lines, defaultdict):
nd = {}
for k in list(lines.keys())[0: 5]:
print("\"" + k + "\": " + str(lines[k]))
test_parsing(parse_lines(SAMPLE))
print("^^^^^^^^^PARSED SAMPLE SAMPLE^^^^^^^^^")
# 1 + 2 * 3 + 4 * 5 + 6
# 1 + (2 * 3) + (4 * (5 + 6))
# 2 * 3 + (4 * 5)
# 5 + (8 * 3 + 9 + 3 * 4 * 3)
# 5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))
sample = solve(SAMPLE)
if SAMPLE_EXPECTED is None:
print("*** SKIPPING SAMPLE! ***")
else:
assert sample == SAMPLE_EXPECTED
print("*** SAMPLE PASSED ***")
solved = solve(REAL)
print("SOLUTION: ", solved)
import pandas as pd
df=pd.DataFrame([str(solved)])
df.to_clipboard(index=False,header=False)
print("COPIED TO CLIPBOARD")
# assert solved
|
#!/usr/bin/env bash
set -ex
cd "$(dirname "$0")"
../../../bpf-sdk/rust/build.sh ../../../examples/bpf-rust-noop
cp ../../../examples/bpf-rust-noop/target/bpfel-unknown-unknown/release/bitconch_bpf_rust_noop.so .
|
/** PSP helper library ***************************************/
/** **/
/** perf.h **/
/** **/
/** This file contains declarations for performance-related **/
/** PSP routines **/
/** **/
/** Copyright (C) <NAME> 2007 **/
/** You are not allowed to distribute this software **/
/** commercially. Please, notify me, if you make any **/
/** changes to this file. **/
/*************************************************************/
#ifndef _PSP_PERF_H
#define _PSP_PERF_H
#include <psptypes.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct PspFpsCounter
{
float TicksPerSecond;
int FrameCount;
u64 LastTick;
float Fps;
} PspFpsCounter;
void pspPerfInitFps(PspFpsCounter *counter);
float pspPerfGetFps(PspFpsCounter *counter);
#ifdef __cplusplus
}
#endif
#endif // _PSP_PERF_H
|
package mezz.jei.network.packets;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.network.PacketBuffer;
import net.minecraft.util.text.TextFormatting;
import mezz.jei.config.Config;
import mezz.jei.network.IPacketId;
import mezz.jei.network.PacketIdClient;
import mezz.jei.util.CommandUtilServer;
public class PacketCheatPermission extends PacketJei {
private final boolean hasPermission;
public PacketCheatPermission(boolean hasPermission) {
this.hasPermission = hasPermission;
}
@Override
public IPacketId getPacketId() {
return PacketIdClient.CHEAT_PERMISSION;
}
@Override
public void writePacketData(PacketBuffer buf) {
buf.writeBoolean(hasPermission);
}
public static void readPacketData(PacketBuffer buf, EntityPlayer player) {
boolean hasPermission = buf.readBoolean();
if (!hasPermission && Config.isCheatItemsEnabled()) {
CommandUtilServer.writeChatMessage(player, "jei.chat.error.no.cheat.permission.1", TextFormatting.RED);
CommandUtilServer.writeChatMessage(player, "jei.chat.error.no.cheat.permission.2", TextFormatting.RED);
Config.setCheatItemsEnabled(false);
player.closeScreen();
}
}
}
|
const webpack = require('webpack')
const path = require('path')
const ExtractTextPlugin = require('extract-text-webpack-plugin')
const CopyWebpackPlugin = require('copy-webpack-plugin')
const BabiliPlugin = require('babili-webpack-plugin')
const commonPlugins = [
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(
process.env.NODE_ENV || 'production'
)
}),
new ExtractTextPlugin('styles.css'),
new CopyWebpackPlugin([
{
from: 'src/manifest.json',
to: 'manifest.json'
},
{
from: 'src/options.html',
to: 'options.html'
},
{
from: 'src/icon.png',
to: 'icon.png'
}
])
]
// const prodPlugins = [new BabiliPlugin()]
const prodPlugins = []
const isProd = process.env.NODE_ENV === 'production'
module.exports = {
entry: {
inject: './src/inject.ts',
options: './src/options.ts'
},
output: {
path: path.join(__dirname, 'dist'),
filename: '[name].js'
},
watch: !isProd,
plugins: isProd ? [...commonPlugins, ...prodPlugins] : commonPlugins,
module: {
loaders: [
{
test: /\.css$/,
use: ExtractTextPlugin.extract({
use: ['css-loader', 'resolve-url-loader']
})
},
{
test: /\.ts$/,
loader: 'awesome-typescript-loader'
}
]
},
node: {
fs: 'empty',
module: 'empty'
},
resolve: {
extensions: ['.ts', '.js', '.json']
}
}
|
#!/bin/bash
current_dir=`pwd -P`
script_dir="$( cd "$(dirname "$0")" ; pwd -P )"
container_id=`cat "${script_dir}/docker_id"`
if [ "${container_id}" == "" ]
then
echo "Error: No docker id found in '${script_dir}/docker_id'"
exit 1
fi
# Check if the container is running
if [ "`sudo docker ps -qf "id=${container_id}"`" == "" ]
then
echo "Starting previously stopped container..."
sudo docker start "${container_id}"
fi
# Joining the container
sudo docker exec -ti ${container_id} bash
|
<reponame>Cecilxx/echarts-taro3-react<filename>src/pages/line/index.tsx
// import Taro from "@tarojs/taro";
import React, { Component } from "react";
import { View } from "@tarojs/components";
import { EChart } from "../../echarts-taro3-react";
import "./index.less";
export default class Line extends Component {
lineChart: any;
componentDidMount() {
const option = {
xAxis: {},
yAxis: {},
series: [
{
data: [
[20, 120],
[50, 200],
[40, 50],
],
type: "line",
},
],
};
this.lineChart.refresh(option);
}
refLineChart = (node) => (this.lineChart = node);
render() {
return (
<View className='line-chart'>
<EChart ref={this.refLineChart} canvasId='line-chart' />
</View>
);
}
}
|
import itertools
from itertools import chain, combinations
from scipy.optimize import linprog
import numpy as np
def powerset(iterable):
"powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
def mix_nash_powerset(x, y):
x_powerset = reversed(list(powerset(range(x))))
y_powerset = reversed(list(powerset(range(y))))
for i, j in itertools.product(x_powerset, y_powerset):
if len(i) + len(j) > 2 and i and j:
yield i, j
def giveProperTable(table, row_profile, column_profile):
temp = table.copy()
temp = np.delete(temp, row_profile, 0)
temp = np.delete(temp, column_profile, 1)
return temp
def find_mix_equilibrium_specified_strategy(table, row_profile, column_profile, row_size, column_size):
not_row_profile = set(range(row_size)) - set(row_profile)
not_column_profile = set(range(column_size)) - set(column_profile)
row_profile_size = len(row_profile)
column_profile_size = len(column_profile)
c = [0] * (row_profile_size + column_profile_size)
A_eq = []
b_eq = []
A_ub = []
b_ub = []
for row in row_profile:
temp = []
for column in column_profile:
temp.append(table[row, column, 0] - table[row_profile[0], column, 0])
temp += ([0] * row_profile_size)
A_eq += [temp]
b_eq.append(0)
for column in column_profile:
temp = [0] * column_profile_size
for row in row_profile:
temp.append(table[row, column, 1] - table[row, column_profile[0], 1])
A_eq += [temp]
b_eq.append(0)
for row in not_row_profile:
temp = []
for column in column_profile:
temp.append(table[row, column, 0] - table[row_profile[0], column, 0])
temp += ([0] * row_profile_size)
A_ub += [temp]
b_ub += [0]
for column in not_column_profile:
temp = [0] * column_profile_size
for row in row_profile:
temp.append(table[row, column, 1] - table[row, column_profile[0], 1])
A_ub += [temp]
b_ub += [0]
A_eq += [[1] * column_profile_size + [0] * row_profile_size]
A_eq += [[0] * column_profile_size + [1] * row_profile_size]
b_eq += [1, 1]
# tabled = giveProperTable(table, list(not_row_profile), list(not_column_profile))
tabled = table[row_profile, column_profile, :]
a = linprog(c, A_eq=A_eq, b_eq=b_eq,
A_ub=A_ub if A_ub else None,
b_ub=b_ub if b_ub else None,
)
b = find_nash_equilibrium(tabled)
return a.x, (a.success and not b)
def find_mixed_nash_equilibrium(table):
p1_strategy = []
p2_strategy = []
table = np.array(table)
row_size, column_size, _ = table.shape
for row_s_profile, column_s_profile in mix_nash_powerset(row_size, column_size):
answer = find_mix_equilibrium_specified_strategy(table, row_s_profile, column_s_profile, row_size, column_size)
if answer[1]:
p1_strategy = [0] * row_size
p2_strategy = [0] * column_size
for i, j in zip(column_s_profile, answer[0][:len(column_s_profile)].tolist()):
p2_strategy[i] = j
for i, j in zip(row_s_profile, answer[0][len(column_s_profile):].tolist()):
p1_strategy[i] = j
return p1_strategy, p2_strategy
def find_nash_equilibrium(table):
table = np.array(table)
column_table = table[:, :, 1]
row_table = table[:, :, 0]
max_column_player = np.max(column_table, axis=1)
max_row_player = np.max(row_table, axis=0)
pure_nash = np.zeros(row_table.shape, dtype=int)
for (x, y), value in np.ndenumerate(table[:, :, 0]):
pure_nash[x, y] = 1 if value == max_row_player[y] else 0
for (x, y), value in np.ndenumerate(table[:, :, 1]):
pure_nash[x, y] &= 1 if value == max_column_player[x] else 0
output = [[a[0] + 1, a[1] + 1] for a, v in np.ndenumerate(pure_nash) if v]
return output
def main(table):
all_nash_equilibriums = find_nash_equilibrium(table)
p1_mixed_strategy, p2_mixed_strategy = find_mixed_nash_equilibrium(table)
return [all_nash_equilibriums, p1_mixed_strategy, p2_mixed_strategy]
#
# main([[[3, 4], [7, 6], [1, 5]], [[2, 4], [1, 4], [2, 6]]])
# table = [
# [[1, -1], [-1, 1]],
# [[-1, 1], [1, -1]]
# ]
# table2 = [
# [[9, 1], [2, 8]],
# [[3, 7], [6, 4]]
# ]
# table3 = [
# [[2, -3], [1, 2]],
# [[1, 1], [4, -1]]
# ]
# table4 = [
# [[3, 1], [1, 2]],
# [[2, 3], [3, 4]],
# [[2, 4], [3, 1]],
# ]
#
# print(main(table4))
# table = np.array(table)
# table2 = np.array(table2)
# table3 = np.array(table3)
# table4 = np.array(table4)
print("new test")
print(main([[[3, 2], [2, 2], [3, 1]],
[[2, 4], [3, 1], [1, 3]],
[[3, 1], [3, 3], [2, 4]],
[[4, 4], [3, 3], [3, 1]]]))
print([[[4, 1]], [0, 0, 0.5, 0.5], [0, 1.0, 0]])
print("new test")
print(main([[[3, 1], [3, 3], [2, 3], [2, 1]],
[[3, 4], [3, 1], [1, 4], [3, 2]],
[[3, 1], [3, 2], [4, 2], [2, 4]]]))
print([[[1, 2], [2, 1]], [0, 0.5, 0.5], [0, 0, 0.25, 0.75]])
print("new test")
print(main([[[3, 2], [4, 1]],
[[4, 2], [3, 4]],
[[3, 1], [4, 3]]]))
print([[[3, 2]], [0.6666666666666666, 0.3333333333333333, 0], [0.5, 0.5]])
print("new test")
print(main([[[3, 2], [3, 4], [3, 2]],
[[3, 3], [1, 2], [3, 4]],
[[3, 3], [3, 4], [1, 1]]]))
print([[[1, 2], [2, 3], [3, 2]], [0, 0.5, 0.5], [1.0, 0, 0]])
print("new test")
print(main([[[1, 4], [4, 3], [3, 3], [4, 1]],
[[4, 4], [1, 3], [3, 1], [2, 4]],
[[4, 2], [4, 2], [4, 3], [3, 4]]]))
print([[[2, 1]], [0.33333333333333337, 0, 0.6666666666666666], [0, 0, 0.5, 0.5]])
print("new test")
print(main([[[2, 1], [4, 2], [1, 2]],
[[1, 1], [1, 4], [3, 4]],
[[2, 3], [4, 3], [2, 3]],
[[3, 4], [4, 3], [1, 1]]]))
print([[[1, 2], [2, 3], [3, 2], [4, 1]], [0, 0, 1.0, 0], [0.5, 0, 0.5]])
print("new test")
print(main([[[4, 4], [2, 3], [3, 4], [4, 1]],
[[1, 3], [3, 2], [4, 3], [3, 4]],
[[4, 1], [2, 3], [1, 2], [3, 3]]]))
print([[[1, 1]], [0.25, 0.75, 0], [0, 0, 0.5, 0.5]])
print("new test")
print(main([[[3, 4], [1, 4], [1, 2], [3, 3]],
[[3, 2], [4, 2], [2, 4], [3, 3]]]))
print([[[1, 1], [2, 3]], [0.5, 0.5], [0, 0, 0, 1.0]])
print("new test")
print(main([[[2, 1], [3, 2], [1, 3]],
[[2, 4], [2, 1], [4, 1]],
[[1, 4], [3, 3], [1, 1]]]))
print([[[2, 1]], [0.5, 0, 0.5], [0, 1.0, 0]])
print("new test")
print(main([[[1, 2], [4, 3], [2, 3]],
[[1, 4], [2, 3], [3, 3]],
[[2, 1], [4, 1], [2, 2]]]))
print([[[1, 2]], [0, 0.5, 0.5], [0.5, 0, 0.5]])
|
#!/bin/bash
if [ ! -d "/var/www/microting/" ]; then
export plugin_count=0
else
export current_folder=`pwd`
cd /var/www/microting
export plugin_count=`ls -lah | grep angular | grep plugin | wc -l`
cd $current_folder
fi
export TAB=$'\t'
if (( $plugin_count > 1 )); then
sed '/\/\/ INSERT ROUTES HERE/i ,{' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i path: "monitoring-pn",' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i canActivate: [AuthGuard],' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i loadChildren: "./modules/monitoring-pn/monitoring-pn.module#MonitoringPnModule"' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i }' src/app/plugins/plugins.routing.ts -i
else
sed '/\/\/ INSERT ROUTES HERE/i {' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i path: "monitoring-pn",' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i canActivate: [AuthGuard],' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i loadChildren: "./modules/monitoring-pn/monitoring-pn.module#MonitoringPnModule"' src/app/plugins/plugins.routing.ts -i
sed '/\/\/ INSERT ROUTES HERE/i }' src/app/plugins/plugins.routing.ts -i
sed "s/\"/'/g" src/app/plugins/plugins.routing.ts -i
fi
|
<reponame>MarcwL22/react-native-template-ts-styled<filename>template/src/pages/home.tsx
import React from 'react';
import { Typography, Box, Button } from 'components';
// Store
import { useStore } from 'store';
import { useActions } from '@actions';
const HomePage: React.FC = () => {
const { dispatch, state } = useStore();
const { setTheme } = useActions({ dispatch });
function changeTheme() {
return state.theme === 'dark' ? setTheme('white') : setTheme('dark');
}
return (
<Box backgroundColor="background" height="100%">
<Typography.H1>Title</Typography.H1>
<Typography.H2>SubTitle</Typography.H2>
<Typography.Text>CommonText</Typography.Text>
<Button onPress={changeTheme} title="Change Theme" />
</Box>
);
};
export default HomePage;
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2019 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/SDK/2019.1/bin;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2019.1/bin
else
PATH=C:/Xilinx/SDK/2019.1/bin;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2019.1/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='C:/Users/magallardo1/Desktop/Lab0_Fixed/Lab0_Fixed.runs/synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log top.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source top.tcl
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/KPLLogONE/KPLLogONE.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/KPLLogONE/KPLLogONE.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/bin/bash
source scripts/common.sh
function build_javadoc() {
version=$1
echo "Building the javadoc for version ${version}."
if [ "$version" == "latest" ]; then
javadoc_gen_dir="${ROOT_DIR}/build/docs/javadoc"
use_gradle=true
cd $ROOT_DIR
else
rm -rf /tmp/bookkeeper-${version}
git clone https://github.com/apache/bookkeeper -b "release-${version}" /tmp/bookkeeper-${version}
cd /tmp/bookkeeper-${version}
if [[ -f "pom.xml" ]]; then
use_gradle=false
javadoc_gen_dir="/tmp/bookkeeper-${version}/target/site/apidocs"
else
use_gradle=true
javadoc_gen_dir="/tmp/bookkeeper-${version}/build/docs/javadoc"
fi
fi
javadoc_dest_dir="${ROOT_DIR}/site/docs/${version}/api/javadoc"
rm -rf $javadoc_dest_dir
if [[ "$use_gradle" == "true" ]]; then
./gradlew generateApiJavadoc
else
mvn clean install javadoc:aggregate -DskipTests
fi
mv $javadoc_gen_dir $javadoc_dest_dir
echo "Built the javadoc for version ${version}."
}
# get arguments
version=$1
shift
case "${version}" in
latest)
build_javadoc ${version}
;;
all)
for d in `ls ${ROOT_DIR}/site/docs`; do
build_javadoc $d
done
;;
*)
echo "Unknown version '${version}' to build doc"
;;
esac
|
#set -x
#w
source proc/psqlproc.sh
verifyvariable() {
[ -z "$DBHOST" ] && logfail "Variable DBHOST not defined"
[ -z "$KILLINTERVAL" ] && logfail "Variable KILLINTERVAL not defined"
}
rundroptable() {
psqlscript $1
}
runcreatetable() {
psqlscript $1
}
killlong() {
local stmt="SELECT pid FROM pg_stat_activity WHERE datname='$DBNAME' and usename='$DBUSER' and state='active' and (now() - pg_stat_activity.query_start) > interval '$KILLINTERVAL'"
psqlcommand "$stmt" | while read PID
do
[ -n "$PID" ] && log "Killing = $PID"
[ -n "$PID" ] && psqlcommand "SELECT pg_cancel_backend($PID)"
done
}
loadfile() {
local -r tbl=$1
local -r file=$2
local -r TMP=`crtemp`
local -r TMPFILE=$LOADBUFFER
required_var LOADBUFFER
log "Transforming $file to $TMPFILE removing the last pipe"
sed "s/|$//"g $file >$TMPFILE
[ $? -eq 0 ] || logfail "Failed transforming load input file"
log "OK, completed, now loading $TEMPFILE"
cat <<EOF >$TMP
TRUNCATE $tbl;
\copy $tbl FROM '$TMPFILE' ( DELIMITER('|'), NULL(''), ENCODING 'latin1' );
EOF
psqlscript $TMP
}
testconnection() {
psqlcommand "\l"
}
runquery() {
killlong
cat $1
jdbcrunquery $1
}
verifyvariable
export DB=$DBNAME
export IFEXIST="IF EXISTS"
export REMOVEQUERYDAYS=X
export REQUIREDCOMMANDS="psql"
export NULLLAST=X
|
<filename>src/features/commonComponents/Input.js
import React from "react";
import PropTypes from "prop-types";
import styled from "styled-components";
const StyledInput = styled.input`
font-size: 1rem;
border: 2px solid transparent;
padding: 0.5rem;
overflow: hidden;
text-overflow: ellipsis;
width: 100%;
line-height: 1.5;
outline: 0;
color: ${props => props.theme.primary};
transition: ${props => props.theme.transitions.easeIn};
background-color: ${props => props.theme.backgroundInput};
&:focus {
border-color: ${props => props.theme.borderColor};
}
::placeholder {
color: ${props => props.theme.placeholder};
}
`;
function Input(props) {
const {
name,
disabled,
placeholder,
type,
onChange,
onBlur,
onPressEnter,
value,
children
} = props;
function handleOnPressEnter(event) {
const codes = {
enter: 13
};
if (event.code === codes.enter) {
onPressEnter();
}
}
return (
<StyledInput
name={name}
disabled={disabled}
placeholder={placeholder}
type={type}
onChange={onChange}
onBlur={onBlur}
onKeyDown={handleOnPressEnter}
value={value}
>
{children}
</StyledInput>
);
}
Input.propTypes = {
name: PropTypes.string,
disabled: PropTypes.bool,
placeholder: PropTypes.string,
type: PropTypes.oneOf(["text", "email", "password"]),
onChange: PropTypes.func,
onBlur: PropTypes.func,
onPressEnter: PropTypes.func,
value: PropTypes.string
};
Input.defaultProps = {
disabled: false,
type: "text"
};
export default Input;
|
package weixin.member.controller;
import com.google.gson.Gson;
import org.apache.log4j.Logger;
import org.jeecgframework.core.common.controller.BaseController;
import org.jeecgframework.core.common.model.json.DataGrid;
import org.jeecgframework.core.util.LogUtil;
import org.jeecgframework.core.util.ResourceUtil;
import org.jeecgframework.tag.core.easyui.TagUtil;
import org.jeecgframework.web.system.service.SystemService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import weixin.guanjia.account.entity.WeixinAccountEntity;
import weixin.guanjia.account.service.WeixinAccountServiceI;
import weixin.guanjia.account.util.CheckPic;
import weixin.liuliangbao.jsonbean.MoreFlow.attentionAndsignInFlowEntity;
import weixin.liuliangbao.jsonbean.MoreFlow.gameFlow;
import weixin.liuliangbao.jsonbean.MoreFlow.shareFlow;
import weixin.liuliangbao.weigatedoor.entity.WeidoorpptEntity;
import weixin.member.entity.WeixinMemberEntity;
import weixin.member.service.WeixinMemberServiceI;
import weixin.member.util.MoreFlowListService;
import weixin.oauth2.AdvancedUtil;
import weixin.oauth2.WeixinOauth2Token;
import weixin.source.controller.WeixinSourceController;
import weixin.tenant.entity.WeixinAcctEntity;
import weixin.tenant.entity.weixinAcctFlowAccountEntity;
import weixin.tenant.service.WeixinAcctServiceI;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
/**
* Created by aa on 2015/12/9.()关注和签到送流量
*/
@Controller
@RequestMapping("/earnFlowController")
public class EarnFlowController extends BaseController {
/**
* Logger for this class write for LOGGER
*/
private static final Logger LOGGER = Logger.getLogger(EarnFlowController.class);
@Autowired
private WeixinAccountServiceI weixinAccountService; //引入微信公众账号的信息
@Autowired
private WeixinMemberServiceI weixinMemberService; //引入的是粉丝管理实体
@Autowired
private WeixinAcctServiceI weixinAcctService;
@Autowired
private SystemService systemService;
private String message;
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
@RequestMapping(params = "moreFlow")
public ModelAndView earnFlowPage(HttpServletRequest request) {
long startTime = System.currentTimeMillis();//获取开始当前的时间 查询方法的执行时间
ModelAndView mav = new ModelAndView();
String linkType = "更多赚取流量";
//通过页面位置和公账号id去查图片,返回一个list(图片,图片跳转url)
String accountid = (String) request.getSession().getAttribute("accountid");
String nickname = (String) request.getSession().getAttribute("nickname");
String headimgUrl = (String) request.getSession().getAttribute("headimgUrl");
/* LOGGER.info(accountid);
LOGGER.info(nickname);
LOGGER.info(headimgUrl);*/
String openId = request.getParameter("openId");
request.setAttribute("openId", openId);
request.setAttribute("accountid", accountid);
//根据accountId和linkType去查图片名称
String hql = "from WeidoorpptEntity where accountid='" + accountid + "' and pageLocation='" + linkType + "'";
List<WeidoorpptEntity> weidoorpptList = this.systemService.findHql(hql, null);
LOGGER.info(weidoorpptList);
//获得图片路径
String prefixUrl = ResourceUtil.getMediaUrlPrefix();
LOGGER.info(prefixUrl);
//TODO:根据weidoorpptList中的图片url放入一个数组中或者一个String list中 每个url加上图片路径prefixUrl
List<WeidoorpptEntity> weidoorpptListResult = new ArrayList<WeidoorpptEntity>();
if (weidoorpptList.size() > 0) {
for (int i = 0; i < weidoorpptList.size(); i++) {
WeidoorpptEntity weidoor = new WeidoorpptEntity();
weidoor.setId(weidoorpptList.get(i).getId());
weidoor.setTitle(weidoorpptList.get(i).getTitle());
weidoor.setPictureName(weidoorpptList.get(i).getPictureName());
weidoor.setPictureUrl(prefixUrl + "/" + weidoorpptList.get(i).getPictureUrl());
weidoor.setJumpType(weidoorpptList.get(i).getJumpType());
weidoor.setJumpUrl(weidoorpptList.get(i).getJumpUrl());
weidoor.setOperatetime(weidoorpptList.get(i).getOperatetime());
weidoor.setAccountid(weidoorpptList.get(i).getAccountid());
weidoor.setDescription(weidoorpptList.get(i).getDescription());
weidoor.setPageLocation(weidoorpptList.get(i).getPageLocation());
weidoorpptListResult.add(weidoor);
}
} else {
String linkTypeDefalut = "首页默认";
//根据accountId和linkType去查图片名称
String hqlDefault = "from WeidoorpptEntity where pageLocation='" + linkTypeDefalut + "'";
List<WeidoorpptEntity> weidoorpptListDefault = this.systemService.findHql(hqlDefault, null);
LOGGER.info(weidoorpptListDefault);
//获得图片路径
String prefixUrlDefault = ResourceUtil.getMediaUrlPrefix();
LOGGER.info(prefixUrl);
WeidoorpptEntity weidoorDefault = new WeidoorpptEntity();
weidoorDefault.setId(weidoorpptListDefault.get(0).getId());
weidoorDefault.setTitle(weidoorpptListDefault.get(0).getTitle());
weidoorDefault.setPictureName(weidoorpptListDefault.get(0).getPictureName());
weidoorDefault.setPictureUrl(prefixUrl + "/" + weidoorpptListDefault.get(0).getPictureUrl());
weidoorDefault.setJumpType(weidoorpptListDefault.get(0).getJumpType());
weidoorDefault.setJumpUrl(weidoorpptListDefault.get(0).getJumpUrl());
weidoorDefault.setOperatetime(weidoorpptListDefault.get(0).getOperatetime());
weidoorDefault.setAccountid(weidoorpptListDefault.get(0).getAccountid());
weidoorDefault.setDescription(weidoorpptListDefault.get(0).getDescription());
weidoorDefault.setPageLocation(weidoorpptListDefault.get(0).getPageLocation());
weidoorpptListResult.add(weidoorDefault);
}
/*//图片名称和url拼接号url
String imagePathName = prefixUrl + "/" + doorImgUrl;
LOGGER.info(imagePathName);
*/
mav.addObject("weidoorpptlist", weidoorpptListResult);
mav.setViewName("weixin/member/moreFlow");
long endTime = System.currentTimeMillis();//获取结束的当前时间 检测程序的运行时间
LOGGER.info("主页的goMain方法运行时间:----" + (endTime - startTime) + "ms");
return mav;
// return new ModelAndView("weixin/member/moreFlow");
}
/**
* 用户点击关注时的跳转页面显示,跳转页面的时候是不需要添加注解 @ResponseBody
*
* @return
*/
@RequestMapping(params = "mysubscribe")
public ModelAndView subscribeAcctListPage(HttpServletRequest request) throws Exception {
StringBuilder subscribeAcctListPage = new StringBuilder();
long startTime = System.currentTimeMillis();//获取开始当前的时间 查询方法的执行时间
subscribeAcctListPage.append("方法开始时间_" + startTime + "ms" + "_"); //添加的日志
String bid = request.getParameter("bid");
if (bid.equals("1")) {
bid = "关注";
} else {
bid = "签到";
}
subscribeAcctListPage.append("bid" + bid + "_"); //添加的日志
String openId = request.getParameter("openId");
subscribeAcctListPage.append("获取的openId" + openId + "_"); //添加的日志
request.setAttribute("openId", openId);
request.setAttribute("bid", bid); //页面标题的显示,是显示关注还是签到
String messageInfo = null;
// 直接从前面进行获取
////从页面获取OpenId
// String mOpenId1 = request.getParameter("MopenId");
//
//// 模拟测试使用的效果图
//// String mOpenId1 = "ov5lNs4x6VIE2ZasPfsCyezGZZnc";
//// 根据用户关注某个商户获取到的隶属于这个商户的openId,来查询微信公众账号的信息
WeixinAccountEntity weixinAccountEntity = new WeixinAccountEntity();
WeixinMemberEntity weixinMemberEntity = new WeixinMemberEntity();
// weixinMemberEntity = weixinMemberService.findUniqueByProperty(WeixinMemberEntity.class, "openId", openId);
String account_id = request.getParameter("accountid");
if (account_id == null || account_id.equals("")) {
weixinMemberEntity = weixinMemberService.findUniqueByProperty(WeixinMemberEntity.class, "openId", openId);
account_id = weixinMemberEntity.getAccountId();
}
// Double flowvalue=
weixinAccountEntity = weixinAccountService.getEntity(WeixinAccountEntity.class, account_id);
//根据公众账号的实体可以获取到租户或者说是商户管理表
String id = weixinAccountEntity.getAcctId();
// String tennaId= ResourceUtil.getSessionUserName().getTenantId();
// weixinAcctEntity = weixinAcctService.getEntity(WeixinAcctEntity.class, tenantId);
WeixinAcctEntity weixinAcctEntity = new WeixinAcctEntity();
try {
weixinAcctEntity = weixinAcctService.getEntity(WeixinAcctEntity.class, id);
// weixinAcctEntity = weixinAcctService.findUniqueByProperty(WeixinAcctEntity.class, "id", tenantId);
if (weixinAcctEntity.getId() != null || !weixinAcctEntity.getId().equals("")) {
message = "查询结果不为空";
} else {
message = "查询结果为空值";
}
} catch (Exception e) {
e.printStackTrace();
}
//获取的商业类型
String accType = weixinAcctEntity.getBusinessType();
subscribeAcctListPage.append("获取的商业类型accType" + accType + "_"); //添加的日志
//获取省名
String provinceName = weixinAcctEntity.getProvince();
subscribeAcctListPage.append("获取的当前商户所在省provinceName" + provinceName + "_"); //添加的日志
String accId = weixinAcctEntity.getId(); //本省的商户的id
subscribeAcctListPage.append("本省的商户的id" + accId + "_"); //添加的日志
// List<weixinAcctFlowAccountEntity> lisFlow = new ArrayList<weixinAcctFlowAccountEntity>();
List<attentionAndsignInFlowEntity> lisFlow = new ArrayList<attentionAndsignInFlowEntity>();
// String hql0 = "select m from weixinAcctFlowAccountEntity m join m.weixinAcctEntity weixinAcctEntity where (m.countryFlowValue>0 OR m.provinceFlowValue>0) and weixinAcctEntity.province='" + provinceName + "' and weixinAcctEntity.businessType<>'" + accType + "'";
//查询数据库的sql语句
// String sql0 = " select m.*,r.flowValue from MerchantFlowAccount m join weixin_acct w Join weixin_account a join merchantFlowGiveRules r where r.merchantID=m.accountId and a.id=m.accountId and w.id=a.acct_id and(m.countryFlowValue>0 OR m.provinceFlowValue>0) and w.province='" + provinceName + "' and w.business_type<>'" + accType + "'and r.operateType='" + bid + "'";
// String sql0 = " select m.*,r.flowValue from MerchantFlowAccount m join weixin_acct w Join weixin_account a join merchantFlowGiveRules r where r.merchantID=m.accountId and a.id=m.accountId and w.id=a.acct_id and(m.countryFlowValue>0 OR m.provinceFlowValue>0) and w.province='" + provinceName + "' and w.business_type<>'1'and r.operateType='" + bid + "'";
// String sql0 = "select m.* from MerchantFlowAccount m join weixin_acct w where (m.countryFlowValue>0 OR m.provinceFlowValue>0) and w.province='江西省' and w.business_type<>'1'";
// String sql = " select w.*,r.flowValue from weixin_account w join MerchantFlowAccount m join weixin_acct a join merchantFlowGiveRules r where w.id=m.accountId and w.id=r.merchantID and(m.countryFlowValue>0 OR m.provinceFlowValue>0) and w.acct_id=a.id and a.province='" + provinceName + "' and (a.business_type<>'" + accType + "' or a.id='" + accId + "') and r.operateType='" + bid + "'";
String sql = " select w.*,r.flowValue from weixin_account w join MerchantFlowAccount m join weixin_acct a join merchantFlowGiveRules r where w.id=m.accountId and w.id=r.merchantID and(m.countryFlowValue>0 OR m.provinceFlowValue>0) and w.acct_id=a.id and (a.business_type<>'" + accType + "' or a.id='" + accId + "') and r.operateType='" + bid + "'"; //只是保留行业互斥
LogUtil.info("查询的是关注或者是签到的sql语句----------------------" + sql);
subscribeAcctListPage.append("查询的所有本省的并且商业类型不一样的但是包含自己的商户集合的sql语句" + sql + "_"); //添加的日志
LOGGER.info(sql);
Connection connection = null;
Statement stmt = null;
ResultSet es = null;
try {
// 创建的jdbc连接语句
connection = ConnectionsManager.getMysqlConn();
stmt = connection.createStatement();
es = stmt.executeQuery(sql);
es.last(); //指针移到最后一行
if (es.getRow() == 0) {
messageInfo = "暂时没有商家相关列表,不好意思";
}
es.beforeFirst(); //复位结果集
//获取读取配置文件的地址,从而加上图片的名字就是我们需要查找的地址
String prefixurl = ResourceUtil.getConfigByName("media.url.prefix");
while (es.next()) {
// weixinAcctFlowAccountEntity en = new weixinAcctFlowAccountEntity();
attentionAndsignInFlowEntity en = new attentionAndsignInFlowEntity();
en.setId(es.getString("id"));
en.setAccountName(es.getString("accountname"));
en.setLogoAccount(prefixurl + "/" + es.getString("logoAccount")); //企业logo的目录文件名
// en.setCountryFlowValue(es.getDouble("countryFlowValue"));
// en.setProvinceFlowValue(es.getDouble("provinceFlowValue"));
// en.setTenantId(es.getString("tenantId"));
// en.setAccountId(es.getString("accountId"));
// en.setQRcode(es.getString("QRcode"));
en.setFlowValue(es.getString("flowValue"));
lisFlow.add(en);
}
// }
// else {
// messageInfo = "暂时没有商家相关列表,不好意思";
// }
} catch (Exception e) {
e.printStackTrace();
} finally {
es.close();
stmt.close();
connection.close();
}
subscribeAcctListPage.append("查询的所有本省的并且商业类型不一样的但是包含自己的商户的个数" + lisFlow.size() + "_"); //添加的日志
request.setAttribute("message", messageInfo); //将提示信息写到request里面
request.setAttribute("listFor", lisFlow);
long endTime = System.currentTimeMillis();//获取结束的当前时间 检测程序的运行时间
subscribeAcctListPage.append("方法结束时间_" + endTime + "ms" + "_"); //添加的日志
long totaltime = endTime - startTime; //总耗时
subscribeAcctListPage.append("方法执行总的时间" + totaltime + "ms"); //添加的日志
LOGGER.info("earnFlowController的__subscribeAcctListPage__方法执行过程中的各个操作的输入输出参数以及结果_" + subscribeAcctListPage.toString());
if ("关注".equals(bid)) {
return new ModelAndView("weixin/member/attentionFlow/attentionFlow");
} else {
return new ModelAndView("weixin/member/SignIn/signInFlow"); //签到的跳转页面
}
}
@RequestMapping(params = "myPage")
public ModelAndView myPage(HttpServletRequest request) {
return new ModelAndView("weixin/member/myPage");
}
/**
* 这是现实的点击关注的时候弹出的商家的列表
*
* @param request
* @param response
* @param dataGrid
*/
@RequestMapping(params = "mydatagrid")
@ResponseBody
public void myacctListForflow(HttpServletRequest request, HttpServletResponse response, DataGrid dataGrid) {
//从页面获取OpenId
// String mOpenId = request.getParameter("MopenId");
// 模拟测试使用的效果图
String mOpenId1 = "ov5lNs4x6VIE2ZasPfsCyezGZZnc";
// 根据用户关注某个商户获取到的隶属于这个商户的openId,来查询微信公众账号的信息
WeixinAccountEntity weixinAccountEntity = new WeixinAccountEntity();
WeixinMemberEntity weixinMemberEntity = new WeixinMemberEntity();
weixinMemberEntity = weixinMemberService.findUniqueByProperty(WeixinMemberEntity.class, "openId", mOpenId1);
String account_id = weixinMemberEntity.getAccountId();
weixinAccountEntity = weixinAccountService.getEntity(WeixinAccountEntity.class, account_id);
//根据公众账号的实体可以获取到租户或者说是商户管理表
String id = weixinAccountEntity.getAcctId();
// String tennaId= ResourceUtil.getSessionUserName().getTenantId();
// weixinAcctEntity = weixinAcctService.getEntity(WeixinAcctEntity.class, tenantId);
WeixinAcctEntity weixinAcctEntity = new WeixinAcctEntity();
try {
weixinAcctEntity = weixinAcctService.getEntity(WeixinAcctEntity.class, id);
// weixinAcctEntity = weixinAcctService.findUniqueByProperty(WeixinAcctEntity.class, "id", tenantId);
if (weixinAcctEntity.getId() != null || !weixinAcctEntity.getId().equals("")) {
message = "查询结果不为空";
} else {
message = "查询结果为空值";
}
} catch (Exception e) {
e.printStackTrace();
}
String accType = weixinAcctEntity.getBusinessType();
// String hql=" from WeixinAcctEntity where 1 = 1 and id="+accType
// 根据省的来查询所在省的所有商户但是商户类型不能重复
// List<WeixinAcctEntity> lisAcct = new ArrayList<WeixinAcctEntity>();
String provinceName = weixinAcctEntity.getProvince();
// 这是之前查询不含有流量账户的查询
// String hql = " from WeixinAcctEntity t where t.province='" + provinceName + "' and t.businessType<>'" + accType + "'";
// lisAcct = systemService.findHql(hql, null);
List<weixinAcctFlowAccountEntity> lisFlow = new ArrayList<weixinAcctFlowAccountEntity>();
String hql0 = "select m from weixinAcctFlowAccountEntity m join m.weixinAcctEntity weixinAcctEntity where (m.countryFlowValue>0 OR m.provinceFlowValue>0) and weixinAcctEntity.province='" + provinceName + "' and weixinAcctEntity.businessType<>'" + accType + "'";
//可以使用el表达式接受或者是datagrid表格进行接收
lisFlow = systemService.findHql(hql0, null);
dataGrid.setResults(lisFlow);
TagUtil.datagrid(response, dataGrid);
}
@RequestMapping(params = "acctPage")
public ModelAndView acctPage(HttpServletRequest request, HttpServletResponse response) {
String id = request.getParameter("id");
if (id != null || !id.equals("")) {
WeixinAcctEntity weixinAcctEntity = new WeixinAcctEntity();
weixinAcctEntity = weixinAcctService.getEntity(WeixinAcctEntity.class, id);
return new ModelAndView("weixin/member/acctPage");
} else {
message = "请先扫描二维码";
// LOGGER.info(response);
return new ModelAndView("weixin/member/testQRCode");
}
}
// 携带商户的id从而查询商户的accountId
//增加授权页面
@RequestMapping(params = "startSubscribe")
public ModelAndView startSubscribe(HttpServletRequest request, HttpServletResponse response) {
// 携带的商户的id
// String acct_id = request.getParameter("id");
//// 根据商户管理表id查询商户的微信账号id
// WeixinAccountEntity weixinAccountEntity = new WeixinAccountEntity();
// weixinAccountEntity = weixinAccountService.findUniqueByProperty(WeixinAccountEntity.class, "acctId", acct_id);
// String accountid = request.getParameter("accountid");
String accountid = request.getParameter("id");
// WeixinLotteryEntity hdEntity = this.systemService.get(WeixinLotteryEntity.class, hdid);
// if (hdEntity == null) {
// return new ModelAndView("weixin/lottery/zhuanpanNotExists");
// }
String rdUrl = "earnFlowController.do?goSubscribe";
// String accountid = hdEntity.getAccountid();
WeixinAccountEntity account = this.systemService.get(WeixinAccountEntity.class, accountid);
String requestUrl = "https://open.weixin.qq.com/connect/oauth2/authorize?appid=APPID&redirect_uri=REDIRECT_URI&response_type=code&scope=SCOPE&state=STATE#wechat_redirect";
requestUrl = requestUrl.replace("APPID", account.getAccountappid());
requestUrl = requestUrl.replace("SCOPE", "snsapi_base");
requestUrl = requestUrl.replace("STATE", accountid);
String path = request.getContextPath();
String localhosturl = request.getScheme() + "://" + request.getServerName() + path + "/";
String url = "";
try {
url = URLEncoder.encode(localhosturl + rdUrl, "utf-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
requestUrl = requestUrl.replace("REDIRECT_URI", url);
return new ModelAndView("redirect:" + requestUrl);
}
//展示拿到的用户信息
@RequestMapping(params = "goSubscribe")
public ModelAndView goSubscribe(HttpServletRequest request, HttpServletResponse response) throws Exception {
LOGGER.info("进入关注");
String accountid = request.getParameter("state");
//// 根据微信账号id查询商户名称
// WeixinAccountEntity weixinAccountEntity = weixinAccountService.getEntity(WeixinAccountEntity.class, accountid);
// String accid = weixinAccountEntity.getAcctId();
// WeixinAcctEntity weixinAcctEntity = weixinAcctService.getEntity(WeixinAcctEntity.class, accid);
//// 将获取的商户管理表信息存储到request中去
// request.setAttribute("weixinAcct", weixinAcctEntity);
String code = request.getParameter("code");
if (!"authdeny".equals(code)) {
// WeixinLotteryEntity hdEntity = this.systemService.get(WeixinLotteryEntity.class, hdid);
// String accountid = hdEntity.getAccountid();
WeixinAccountEntity account = this.systemService.get(WeixinAccountEntity.class, accountid);
String sql = "select flowValue from merchantFlowGiveRules where operateType='关注' and merchantID='" + accountid + "'";
Connection connection = null;
Statement stmt = null;
ResultSet es = null;
try {
// 创建的jdbc连接语句
connection = ConnectionsManager.getMysqlConn();
stmt = connection.createStatement();
es = stmt.executeQuery(sql);
while (es.next()) {
String valueSub = es.getString("flowValue");
request.setAttribute("flowValue", valueSub);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
es.close();
stmt.close();
connection.close();
}
// 获取网页授权access_token
WeixinOauth2Token weixinOauth2Token = AdvancedUtil.getOauth2AccessToken(account.getAccountappid(),
account.getAccountappsecret(), code);
// 网页授权接口访问凭证
String accessToken = weixinOauth2Token.getAccessToken();
// 用户标识
String openId = weixinOauth2Token.getOpenId();
// 获取用户信息
// SNSUserInfo snsUserInfo = AdvancedUtil.getSnsUserInfo(accessToken, openId); // 设置要传递的参数
// request.getSession().setAttribute("hdId", hdid);
request.getSession().setAttribute("openId", openId);
request.getSession().setAttribute("accountid", accountid);
// request.getSession().setAttribute("nickname", snsUserInfo.getNickName());
//判断是否 绑定 是进入转盘,否进入 绑定 页面
WeixinMemberEntity weixinMemberEntity = null;
List<WeixinMemberEntity> weixinMemberEntities;
Gson gson = new Gson();
String hql1 = "from WeixinMemberEntity t where t.openId='" + openId + "' and t.accountId='" + accountid + "'";
weixinMemberEntities = weixinMemberService.findHql(hql1, null);
//二维码的地址,上传的二维码的图片的存放地址的读取配置路径是sysConfig.properties
String urlprefix = ResourceUtil.getConfigByName("media.url.prefix");
String impUrl = account.getQRcode();
if(CheckPic.checkImg(impUrl)){
impUrl = urlprefix + "/" + impUrl;
}
request.setAttribute("url", impUrl);
if(!impUrl.equals(null)){
String logoAccount = account.getLogoAccount();
if(CheckPic.checkImg(logoAccount)){
logoAccount = urlprefix + "/" + logoAccount;
}
request.setAttribute("logo", logoAccount);
} else {
request.setAttribute("logo", "plug-in/liuliangbao/css/0422/images/logo-1.png");
}
request.setAttribute("accountName", account.getAccountname());
// request.setAttribute();
if (weixinMemberEntities.size() > 0) {
weixinMemberEntity = weixinMemberEntities.get(0);
// if(weixinMemberEntities.get(0))
}
if (weixinMemberEntity != null) {
if (weixinMemberEntity.getSubscribe() == "1") {
return new ModelAndView("weixin/member/attentionPublicNum");
}
}
request.setAttribute("openId", openId);
// request.setAttribute("nickname", snsUserInfo.getNickName());
return new ModelAndView("weixin/member/NoattentionPublicNum");
//request.setAttribute("acc ");
} else {
return new ModelAndView("common/404");
}
}
/**
* 三个省略号显示的信息
* @param request
* @return
* @throws Exception
*/
@RequestMapping(params = "myallActivity")
public ModelAndView myallActivity(HttpServletRequest request) throws Exception{
String phoneNumber = request.getParameter("phone");
String accountid = request.getParameter("accountid");
String openId = request.getParameter("openId");
request.setAttribute("phoneNumber", phoneNumber); //用户的手机号
request.setAttribute("openId", openId); //用户的openId
// 根据accountid查询当前商户的商业类型
WeixinAccountEntity weixinAccountEntity = weixinAccountService.getEntity(WeixinAccountEntity.class, accountid);
WeixinAcctEntity weixinAcctEntity = weixinAcctService.getEntity(WeixinAcctEntity.class, weixinAccountEntity.getAcctId());
String tyep = weixinAcctEntity.getBusinessType();
String accId=weixinAccountEntity.getAcctId();
MoreFlowListService moreFlowService = new MoreFlowListService();
List<attentionAndsignInFlowEntity> moreGZEntities = moreFlowService.queryMoreGZ(accId, tyep); //关注签到的集合获取
List<attentionAndsignInFlowEntity> moreQDEntities = moreFlowService.queryMoreQD(accId, tyep); //关注签到的集合获取
List<gameFlow> moreGameEntities = moreFlowService.queryMoreGame(accId, tyep);
List<shareFlow> moreShareEntities = moreFlowService.queryMoreShare(accId, tyep);
request.setAttribute("mediaurl", ResourceUtil.getConfigByName("media.url.prefix") + "/");
request.setAttribute("url", ResourceUtil.getConfigByName("domain") + "/");
request.setAttribute("moreGZEntities", moreGZEntities);
request.setAttribute("moreQDEntities", moreQDEntities);
request.setAttribute("moreGameEntities", moreGameEntities);
request.setAttribute("moreShareEntities", moreShareEntities);
return new ModelAndView("weixin/member/moreFlowQuery"); //签到的跳转页面
}
}
|
from pandas import DataFrame
class CellMetabolismModel:
def __init__(self, model):
self.model = model
def optimize_growth_with_atp_maintenance(self, atp_m, atp_bd):
# Assuming the method to optimize growth rate while maintaining ATP level is implemented elsewhere
# This method should use the model to optimize the growth rate while maintaining the ATP level
# Placeholder for the actual implementation
pass
def simulate_growth_with_atp_maintenance(self, atp_m, atp_bd):
data = {'growth rate (h-1)': [], 'ATP maintenance (mmolATP/gbiomass)': []}
with self.model:
self.get_reaction(atp_m).bounds = (atp_bd, atp_bd)
data['growth rate (h-1)'].append(self.optimize_growth_with_atp_maintenance(atp_m, atp_bd))
data['ATP maintenance (mmolATP/gbiomass)'].append(atp_bd)
df = DataFrame.from_dict(data, orient='columns')
return df |
<filename>src/main/java/org/datacontract/schemas/_2004/_07/EEN_Merlin_Backend_Core_BO_PODService/ProfileReference.java
package org.datacontract.schemas._2004._07.EEN_Merlin_Backend_Core_BO_PODService;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for profileReference complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="profileReference">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="external" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="internal" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="type" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "profileReference", propOrder = {
"external",
"internal",
"type"
})
public class ProfileReference {
@XmlElementRef(name = "external", namespace = "http://schemas.datacontract.org/2004/07/EEN.Merlin.Backend.Core.BO.PODService", type = JAXBElement.class, required = false)
protected JAXBElement<String> external;
@XmlElementRef(name = "internal", namespace = "http://schemas.datacontract.org/2004/07/EEN.Merlin.Backend.Core.BO.PODService", type = JAXBElement.class, required = false)
protected JAXBElement<String> internal;
@XmlElementRef(name = "type", namespace = "http://schemas.datacontract.org/2004/07/EEN.Merlin.Backend.Core.BO.PODService", type = JAXBElement.class, required = false)
protected JAXBElement<String> type;
/**
* Gets the value of the external property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public JAXBElement<String> getExternal() {
return external;
}
/**
* Sets the value of the external property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public void setExternal(JAXBElement<String> value) {
this.external = value;
}
/**
* Gets the value of the internal property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public JAXBElement<String> getInternal() {
return internal;
}
/**
* Sets the value of the internal property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public void setInternal(JAXBElement<String> value) {
this.internal = value;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public JAXBElement<String> getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link String }{@code >}
*
*/
public void setType(JAXBElement<String> value) {
this.type = value;
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-rare/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-rare/1024+0+512-N-VB-IP-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_and_verbs_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
// Package hostdb defines types and functions relevant to scanning hosts.
package hostdb // import "lukechampine.com/us/hostdb"
import (
"context"
"crypto/ed25519"
"encoding/hex"
"encoding/json"
"net"
"strings"
"time"
"github.com/pkg/errors"
"gitlab.com/NebulousLabs/Sia/modules"
"gitlab.com/NebulousLabs/Sia/types"
"lukechampine.com/us/renterhost"
)
// A HostPublicKey is the public key announced on the blockchain by a host. A
// HostPublicKey can be assumed to uniquely identify a host. Hosts should
// always be identified by their public key, since other identifying
// information (like a host's current IP address) may change at a later time.
//
// The format of a HostPublicKey is:
//
// specifier:keydata
//
// Where specifier identifies the signature scheme used and keydata contains
// the hex-encoded bytes of the actual key. Currently, all public keys on Sia
// use the Ed25519 signature scheme, specified as "ed25519".
type HostPublicKey string
// Key returns the keydata portion of a HostPublicKey.
func (hpk HostPublicKey) Key() string {
specLen := strings.IndexByte(string(hpk), ':')
if specLen < 0 {
return ""
}
return string(hpk[specLen+1:])
}
// ShortKey returns the keydata portion of a HostPublicKey, truncated to 8
// characters. This is 32 bits of entropy, which is sufficient to prevent
// collisions in typical usage scenarios. A ShortKey is the preferred way to
// reference a HostPublicKey in user interfaces.
func (hpk HostPublicKey) ShortKey() string {
return hpk.Key()[:8]
}
// Ed25519 returns the HostPublicKey as an ed25519.PublicKey. The returned key
// is invalid if hpk is not a Ed25519 key.
func (hpk HostPublicKey) Ed25519() ed25519.PublicKey {
pk, _ := hex.DecodeString(hpk.Key())
return ed25519.PublicKey(pk)
}
// SiaPublicKey returns the HostPublicKey as a types.SiaPublicKey.
func (hpk HostPublicKey) SiaPublicKey() (spk types.SiaPublicKey) {
spk.LoadString(string(hpk))
return
}
// HostKeyFromPublicKey converts an ed25519.PublicKey to a HostPublicKey.
func HostKeyFromPublicKey(pk ed25519.PublicKey) HostPublicKey {
return HostKeyFromSiaPublicKey(types.SiaPublicKey{
Algorithm: types.SignatureEd25519,
Key: pk,
})
}
// HostKeyFromSiaPublicKey converts an types.SiaPublicKey to a HostPublicKey.
func HostKeyFromSiaPublicKey(spk types.SiaPublicKey) HostPublicKey {
return HostPublicKey(spk.String())
}
// HostSettings are the settings reported by a host.
type HostSettings struct {
AcceptingContracts bool `json:"acceptingContracts"`
MaxDownloadBatchSize uint64 `json:"maxDownloadBatchSize"`
MaxDuration types.BlockHeight `json:"maxDuration"`
MaxReviseBatchSize uint64 `json:"maxReviseBatchSize"`
NetAddress modules.NetAddress `json:"netAddress"`
RemainingStorage uint64 `json:"remainingStorage"`
SectorSize uint64 `json:"sectorSize"`
TotalStorage uint64 `json:"totalStorage"`
UnlockHash types.UnlockHash `json:"unlockHash"`
WindowSize types.BlockHeight `json:"windowSize"`
Collateral types.Currency `json:"collateral"`
MaxCollateral types.Currency `json:"maxCollateral"`
BaseRPCPrice types.Currency `json:"baseRPCPrice"`
ContractPrice types.Currency `json:"contractPrice"`
DownloadBandwidthPrice types.Currency `json:"downloadBandwidthPrice"`
SectorAccessPrice types.Currency `json:"sectorAccessPrice"`
StoragePrice types.Currency `json:"storagePrice"`
UploadBandwidthPrice types.Currency `json:"uploadBandwidthPrice"`
RevisionNumber uint64 `json:"revisionNumber"`
Version string `json:"version"`
}
// ScannedHost groups a host's settings with its public key and other scan-
// related metrics.
type ScannedHost struct {
HostSettings
PublicKey HostPublicKey
Latency time.Duration
}
// Scan dials the host with the given NetAddress and public key and requests
// its settings.
func Scan(ctx context.Context, addr modules.NetAddress, pubkey HostPublicKey) (host ScannedHost, err error) {
host.PublicKey = pubkey
dialStart := time.Now()
conn, err := (&net.Dialer{}).DialContext(ctx, "tcp", string(addr))
host.Latency = time.Since(dialStart)
if err != nil {
return host, err
}
defer conn.Close()
type res struct {
host ScannedHost
err error
}
ch := make(chan res, 1)
go func() {
err := func() error {
s, err := renterhost.NewRenterSession(conn, pubkey.Ed25519())
if err != nil {
return errors.Wrap(err, "could not initiate RPC session")
}
defer s.Close()
var resp renterhost.RPCSettingsResponse
if err := s.WriteRequest(renterhost.RPCSettingsID, nil); err != nil {
return err
} else if err := s.ReadResponse(&resp, 4096); err != nil {
return err
} else if err := json.Unmarshal(resp.Settings, &host.HostSettings); err != nil {
return err
}
return nil
}()
ch <- res{host, errors.Wrap(err, "could not read signed host settings")}
}()
select {
case <-ctx.Done():
conn.Close()
return host, ctx.Err()
case r := <-ch:
return r.host, r.err
}
}
|
#!/bin/bash
# this is the original setup for a raspberry pi to set itself up on.
function install {
sudo apt-get install -q -y $@
}
function pipinstall {
sudo pip install -q $@
}
function loadlist {
cat $1 | grep -v "\#" | grep .
}
function runinstallation { # 1=function_name 2=listname
for i in `loadlist $2`
do
$1 $i
done
}
# install the main list
runinstallation install install.list
# install pip's install list
runinstallation pipinstall pip-install.list
|
<gh_stars>0
import BaseError from 'baseerr'
import FastAbortController from 'fast-abort-controller'
import raceAbort from 'race-abort'
import timeout from 'abortable-timeout'
class RetryableError extends BaseError<{ originalError: Error }> {
originalError!: number
}
export type Opts = {
timeouts: Iterable<number> | AsyncIterable<number>
minTimeout?: number | null | undefined
maxTimeout?: number | null | undefined
jitter?: ((duration: number) => number) | null | undefined
signal?: AbortSignal | null | undefined
}
type TaskOpts<T> = {
retry: (err: Error) => Promise<T>
signal: AbortSignal
}
export default async function promiseBackoff<T>(
opts: Opts,
task: (opts: TaskOpts<T>) => Promise<T>,
): Promise<T> {
const timeouts = opts.timeouts
const minTimeout = opts.minTimeout ?? 0
const maxTimeout = opts.maxTimeout ?? Infinity
const signal = opts.signal ?? new FastAbortController().signal // unaborted signal
const jitter = opts.jitter ?? fullJitter
let iterator: Iterator<number> | null = null
if (Symbol.iterator in timeouts) {
iterator = (timeouts as Iterable<number>)[Symbol.iterator]()
}
let asyncIterator: AsyncIterator<number> | null = null
if (Symbol.asyncIterator in timeouts) {
asyncIterator = (timeouts as AsyncIterable<number>)[Symbol.asyncIterator]()
}
async function attempt(): Promise<T> {
try {
return await task({
retry: (err: Error) => {
throw new RetryableError('retryable', { originalError: err })
},
signal,
})
} catch (err) {
if (err instanceof RetryableError) {
// get backoff timeout duration from iterator
let result: IteratorResult<number> = { done: true, value: null }
if (iterator) result = iterator.next()
if (asyncIterator) {
result = await raceAbort(signal, asyncIterator.next())
}
// no more retries
if (result.done) {
throw err.originalError
}
// calculate backoff timeout duration
let timeoutDuration = result.value
timeoutDuration = jitter(timeoutDuration)
timeoutDuration = Math.max(timeoutDuration, minTimeout)
timeoutDuration = Math.min(timeoutDuration, maxTimeout)
// backoff timeout
await timeout(timeoutDuration, signal)
// attempt to retry task
return await raceAbort(signal, attempt())
}
throw err
}
}
// todo
return raceAbort(signal, attempt())
}
function fullJitter(duration: number): number {
return Math.random() * duration
}
|
/**
* @module Experiences/Experience0
*/
import React, { Profiler } from 'react'
import { Observable, asyncScheduler } from 'rxjs'
import { observeOn } from 'rxjs/operators'
const onRender = (id, phase, actualDuration) => {
console.log(id, phase, actualDuration)
}
const observable = new Observable((subscriber) => {
subscriber.next(1)
subscriber.next(2)
subscriber.next(3)
subscriber.complete()
})
const observableWithScheduler = new Observable((subscriber) => {
subscriber.next(1)
subscriber.next(2)
subscriber.next(3)
subscriber.complete()
}).pipe(observeOn(asyncScheduler))
const observer = {
next: (v) => console.log(`observerA: ${v}`),
complete: () => console.log('Completed')
}
/**
* @function Experience
* @return {Object} Return the dom of the Experience
*/
const Experience = () => {
const handleClick = () => {
console.log('just before subscribe')
const subscription = observable.subscribe(observer)
console.log('just after subscribe')
subscription.unsubscribe()
}
const handleClickScheduler = () => {
console.log('just before subscribe')
observableWithScheduler.subscribe(observer)
console.log('just after subscribe')
}
return (
<Profiler id="Experience" onRender={onRender}>
<div>Look at the console</div>
<button onClick={handleClick}>Call the Observable</button>
<button onClick={handleClickScheduler}>
Call the Observable with scheduler
</button>
</Profiler>
)
}
export default Experience
|
Pod::Spec.new do |s|
s.name = "SJLocation"
s.version = "1.0"
s.summary = "The easiest way to user location."
s.homepage = "https://github.com/zhoushejun/SJLocation"
s.license = "MIT"
s.author = { "shejunzhou" => "<EMAIL>" }
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/zhoushejun/SJLocation.git", :tag => s.version }
s.source_files = "SJLocation/Vendor/SJLocationManager/*.{h,m}"
s.resource = "SJLocation/Assets.xcassets"
s.requires_arc = true
end
|
<reponame>nasirghaznavi/surveyjs
import { NumericValidator, EmailValidator } from "../src/validator";
export default QUnit.module("Validators");
QUnit.test("Numeric validator", function(assert) {
var validator = new NumericValidator();
assert.notEqual(
validator.validate("s5").error,
null,
"Could not convert to numeric"
);
assert.equal(validator.validate(5), null, "There are no limits (non-zero)");
assert.equal(validator.validate(0), null, "There are no limits (zero)");
assert.equal(
validator.validate("5").value,
5,
"Convert to numeric (non-zero)"
);
assert.equal(
validator.validate("5").error,
null,
"There is no error (non-zero)"
);
assert.equal(validator.validate("0").value, 0, "Convert to numeric (zero)");
assert.equal(validator.validate("0").error, null, "There is no error (zero)");
validator.minValue = 10;
validator.maxValue = 20;
assert.notEqual(
validator.validate(5).error,
null,
"Value is too low. Limits are not 0."
);
assert.notEqual(
validator.validate(25).error,
null,
"Value is too high. Limits are not 0."
);
assert.equal(
validator.validate("15").error,
null,
"Value is between minValue and maxValue. Limits are not 0."
);
assert.equal(
validator.validate(15),
null,
"Value is between minValue and maxValue. Return no errors. Limits are not 0."
);
validator.minValue = 0;
validator.maxValue = 20;
assert.notEqual(
validator.validate(-1).error,
null,
"Value is too low. Low limit is 0."
);
assert.notEqual(
validator.validate(25).error,
null,
"Value is too high. Low limit is 0."
);
assert.equal(
validator.validate("15").error,
null,
"Value is between minValue and maxValue. Low limit is 0."
);
assert.equal(
validator.validate(15),
null,
"Value is between minValue and maxValue. Return no errors. Low limit is 0."
);
validator.minValue = -20;
validator.maxValue = 0;
assert.notEqual(
validator.validate(-21).error,
null,
"Value is too low. High limit is 0."
);
assert.notEqual(
validator.validate(1).error,
null,
"Value is too high. High limit is 0."
);
assert.equal(
validator.validate("-5").error,
null,
"Value is between minValue and maxValue. High limit is 0."
);
assert.equal(
validator.validate(-5),
null,
"Value is between minValue and maxValue. Return no errors. High limit is 0."
);
});
QUnit.test("Email validator", function(assert) {
var validator = new EmailValidator();
assert.equal(
validator.validate("<EMAIL>"),
null,
"Could convert the correct e-mail"
);
assert.notEqual(
validator.validate("<EMAIL>").error,
null,
"Could convert the incorrect correct e-mail"
);
});
|
/* eslint-disable @typescript-eslint/no-var-requires */
const fs = require('fs');
const path = require('path');
const rimraf = require('rimraf');
const iconFolder = 'src/assets/icons';
const componentsFolder = 'src/components/svg-icon/icons';
const iconFolderPath = path.join(__dirname, '..', iconFolder);
const componentsFolderPath = path.join(__dirname, '..', componentsFolder);
const typingFilePath = path.join(__dirname, '..', 'src/icon-names.d.ts');
rimraf.sync(componentsFolderPath);
rimraf.sync(typingFilePath);
fs.mkdirSync(componentsFolderPath);
const iconNames = fs
.readdirSync(iconFolderPath)
.filter((fileName) => fileName.endsWith('.svg'))
.map((fileName) => fileName.split('.')[0]);
iconNames.forEach((fileName) => {
const filePath = path.join(componentsFolderPath, `${fileName}-icon.tsx`);
const content = `import { ReactComponent } from 'assets/icons/${fileName}.svg';\nexport default ReactComponent;\n`;
fs.writeFileSync(filePath, content);
});
fs.writeFileSync(
typingFilePath,
`type IconName = ${iconNames.map((name) => `'${name}'`).join(' | ')};\n`,
);
|
<filename>INFO/Books Codes/Oracle Database 10g SQL/sql_book/SQL/object_schema2.sql
-- The SQL*Plus script object_schema2.sql performs the following:
-- 1. Creates object_user2
-- 2. Creates the database object types
-- 3. Populates the database tables with example data
-- This script should be run by the system user (or the DBA)
CONNECT system/manager;
-- drop object_user2
DROP USER object_user2 CASCADE;
-- create object_user2
CREATE USER object_user2 IDENTIFIED BY object_password;
-- allow object_user2 to connect and create database objects
GRANT connect, resource TO object_user2;
-- connect as object_user2
CONNECT object_user2/object_password;
-- create the object types
CREATE TYPE address_typ AS OBJECT (
street VARCHAR2(15),
city VARCHAR2(15),
state CHAR(2),
zip VARCHAR2(5)
);
/
CREATE TYPE person_typ AS OBJECT (
id NUMBER,
first_name VARCHAR2(10),
last_name VARCHAR2(10),
dob DATE,
phone VARCHAR2(12),
address address_typ
) NOT FINAL;
/
CREATE TYPE business_person_typ UNDER person_typ (
title VARCHAR2(20),
company VARCHAR2(20)
);
/
CREATE TYPE vehicle_typ AS OBJECT (
id NUMBER,
make VARCHAR2(15),
model VARCHAR2(15)
) NOT FINAL NOT INSTANTIABLE;
/
CREATE TYPE car_typ UNDER vehicle_typ (
convertible CHAR(1)
);
/
CREATE TYPE motorcycle_typ UNDER vehicle_typ (
sidecar CHAR(1)
);
/
CREATE OR REPLACE TYPE person_typ2 AS OBJECT (
id NUMBER,
first_name VARCHAR2(10),
last_name VARCHAR2(10),
dob DATE,
phone VARCHAR2(12),
CONSTRUCTOR FUNCTION person_typ2(
p_id NUMBER,
p_first_name VARCHAR2,
p_last_name VARCHAR2
) RETURN SELF AS RESULT,
CONSTRUCTOR FUNCTION person_typ2(
p_id NUMBER,
p_first_name VARCHAR2,
p_last_name VARCHAR2,
p_dob DATE,
p_phone VARCHAR2
) RETURN SELF AS RESULT
);
/
CREATE OR REPLACE TYPE BODY person_typ2 AS
CONSTRUCTOR FUNCTION person_typ2(
p_id NUMBER,
p_first_name VARCHAR2,
p_last_name VARCHAR2
) RETURN SELF AS RESULT IS
BEGIN
SELF.id := p_id;
SELF.first_name := p_first_name;
SELF.last_name := p_last_name;
SELF.dob := SYSDATE;
SELF.phone := '555-1212';
RETURN;
END;
CONSTRUCTOR FUNCTION person_typ2(
p_id NUMBER,
p_first_name VARCHAR2,
p_last_name VARCHAR2,
p_dob DATE,
p_phone VARCHAR2
) RETURN SELF AS RESULT IS
BEGIN
SELF.id := p_id;
SELF.first_name := p_first_name;
SELF.last_name := p_last_name;
SELF.dob := p_dob;
SELF.phone := p_phone;
RETURN;
END;
END;
/
-- create the tables
CREATE TABLE object_customers OF person_typ;
CREATE TABLE object_business_customers OF business_person_typ;
CREATE TABLE vehicles OF vehicle_typ;
CREATE TABLE cars OF car_typ;
CREATE TABLE motorcycles OF motorcycle_typ;
CREATE TABLE object_customers2 OF person_typ2;
-- insert sample data into object_customers table
INSERT INTO object_customers VALUES (
person_typ(1, 'John', 'Brown', '01-FEB-1955', '800-555-1211',
address_typ('2 State Street', 'Beantown', 'MA', '12345')
)
);
INSERT INTO object_customers (
id, first_name, last_name, dob, phone,
address
) VALUES (
2, 'Cynthia', 'Green', '05-FEB-1968', '800-555-1212',
address_typ('3 Free Street', 'Middle Town', 'CA', '12345')
);
-- insert sample data into object_business_customers table
INSERT INTO object_business_customers VALUES (
business_person_typ(1, 'John', 'Brown', '01-FEB-1955', '800-555-1211',
address_typ('2 State Street', 'Beantown', 'MA', '12345'),
'Manager', 'XYZ Corp'
)
);
-- insert sample data into cars table
INSERT INTO cars VALUES (
car_typ(1, 'Toyota', 'MR2', 'Y')
);
-- insert sample data into motorcycles table
INSERT INTO motorcycles VALUES (
motorcycle_typ(1, 'Harley-Davidson', 'V-Rod', 'N')
);
-- commit the transaction
COMMIT; |
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Gaming
module V1beta
# Request message for GameServerDeploymentsService.ListGameServerDeployments.
# @!attribute [rw] parent
# @return [::String]
# Required. The parent resource name. Uses the form:
# `projects/{project}/locations/{location}`.
# @!attribute [rw] page_size
# @return [::Integer]
# Optional. The maximum number of items to return. If unspecified, the
# server will pick an appropriate default. The server may return fewer items
# than requested. A caller should only rely on response's
# {::Google::Cloud::Gaming::V1beta::ListGameServerDeploymentsResponse#next_page_token next_page_token}
# to determine if there are more GameServerDeployments left to be queried.
# @!attribute [rw] page_token
# @return [::String]
# Optional. The next_page_token value returned from a previous List request,
# if any.
# @!attribute [rw] filter
# @return [::String]
# Optional. The filter to apply to list results.
# @!attribute [rw] order_by
# @return [::String]
# Optional. Specifies the ordering of results following syntax at
# https://cloud.google.com/apis/design/design_patterns#sorting_order.
class ListGameServerDeploymentsRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for GameServerDeploymentsService.ListGameServerDeployments.
# @!attribute [rw] game_server_deployments
# @return [::Array<::Google::Cloud::Gaming::V1beta::GameServerDeployment>]
# The list of game server deployments.
# @!attribute [rw] next_page_token
# @return [::String]
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# @!attribute [rw] unreachable
# @return [::Array<::String>]
# List of locations that could not be reached.
class ListGameServerDeploymentsResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for GameServerDeploymentsService.GetGameServerDeployment.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the game server delpoyment to retrieve. Uses the
# form:
#
# `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`.
class GetGameServerDeploymentRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for
# GameServerDeploymentsService.GetGameServerDeploymentRollout.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the game server delpoyment to retrieve. Uses the
# form:
#
# `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/rollout`.
class GetGameServerDeploymentRolloutRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for GameServerDeploymentsService.CreateGameServerDeployment.
# @!attribute [rw] parent
# @return [::String]
# Required. The parent resource name. Uses the form:
# `projects/{project}/locations/{location}`.
# @!attribute [rw] deployment_id
# @return [::String]
# Required. The ID of the game server delpoyment resource to be created.
# @!attribute [rw] game_server_deployment
# @return [::Google::Cloud::Gaming::V1beta::GameServerDeployment]
# Required. The game server delpoyment resource to be created.
class CreateGameServerDeploymentRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for GameServerDeploymentsService.DeleteGameServerDeployment.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the game server delpoyment to delete. Uses the form:
#
# `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`.
class DeleteGameServerDeploymentRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for GameServerDeploymentsService.UpdateGameServerDeployment.
# Only allows updates for labels.
# @!attribute [rw] game_server_deployment
# @return [::Google::Cloud::Gaming::V1beta::GameServerDeployment]
# Required. The game server delpoyment to be updated.
# Only fields specified in update_mask are updated.
# @!attribute [rw] update_mask
# @return [::Google::Protobuf::FieldMask]
# Required. Mask of fields to update. At least one path must be supplied in
# this field. For the `FieldMask` definition, see
#
# https:
# //developers.google.com/protocol-buffers
# // /docs/reference/google.protobuf#fieldmask
class UpdateGameServerDeploymentRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for
# GameServerDeploymentsService.UpdateGameServerRolloutDeployment.
# @!attribute [rw] rollout
# @return [::Google::Cloud::Gaming::V1beta::GameServerDeploymentRollout]
# Required. The game server delpoyment rollout to be updated.
# Only fields specified in update_mask are updated.
# @!attribute [rw] update_mask
# @return [::Google::Protobuf::FieldMask]
# Required. Mask of fields to update. At least one path must be supplied in
# this field. For the `FieldMask` definition, see
#
# https:
# //developers.google.com/protocol-buffers
# // /docs/reference/google.protobuf#fieldmask
class UpdateGameServerDeploymentRolloutRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for GameServerDeploymentsService.FetchDeploymentState.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the game server delpoyment. Uses the form:
#
# `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`.
class FetchDeploymentStateRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for GameServerDeploymentsService.FetchDeploymentState.
# @!attribute [rw] cluster_state
# @return [::Array<::Google::Cloud::Gaming::V1beta::FetchDeploymentStateResponse::DeployedClusterState>]
# The state of the game server deployment in each game server cluster.
# @!attribute [rw] unavailable
# @return [::Array<::String>]
# List of locations that could not be reached.
class FetchDeploymentStateResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# The game server cluster changes made by the game server deployment.
# @!attribute [rw] cluster
# @return [::String]
# The name of the cluster.
# @!attribute [rw] fleet_details
# @return [::Array<::Google::Cloud::Gaming::V1beta::DeployedFleetDetails>]
# The details about the Agones fleets and autoscalers created in the
# game server cluster.
class DeployedClusterState
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# A game server deployment resource.
# @!attribute [rw] name
# @return [::String]
# The resource name of the game server deployment. Uses the form:
#
# `projects/{project}/locations/{location}/gameServerDeployments/{deployment}`.
# For example,
#
# `projects/my-project/locations/{location}/gameServerDeployments/my-deployment`.
# @!attribute [r] create_time
# @return [::Google::Protobuf::Timestamp]
# Output only. The creation time.
# @!attribute [r] update_time
# @return [::Google::Protobuf::Timestamp]
# Output only. The last-modified time.
# @!attribute [rw] labels
# @return [::Google::Protobuf::Map{::String => ::String}]
# The labels associated with this game server deployment. Each label is a
# key-value pair.
# @!attribute [rw] etag
# @return [::String]
# ETag of the resource.
# @!attribute [rw] description
# @return [::String]
# Human readable description of the game server delpoyment.
class GameServerDeployment
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class LabelsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# A game server config override.
# @!attribute [rw] realms_selector
# @return [::Google::Cloud::Gaming::V1beta::RealmSelector]
# Selector for choosing applicable realms.
# @!attribute [rw] config_version
# @return [::String]
# The game server config for this override.
class GameServerConfigOverride
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The game server deployment rollout which represents the desired rollout
# state.
# @!attribute [rw] name
# @return [::String]
# The resource name of the game server deployment rollout. Uses the form:
#
# `projects/{project}/locations/{location}/gameServerDeployments/{deployment}/rollout`.
# For example,
#
# `projects/my-project/locations/{location}/gameServerDeployments/my-deployment/rollout`.
# @!attribute [r] create_time
# @return [::Google::Protobuf::Timestamp]
# Output only. The creation time.
# @!attribute [r] update_time
# @return [::Google::Protobuf::Timestamp]
# Output only. The last-modified time.
# @!attribute [rw] default_game_server_config
# @return [::String]
# The default game server config is applied to all realms unless overridden
# in the rollout. For example,
#
# `projects/my-project/locations/global/gameServerDeployments/my-game/configs/my-config`.
# @!attribute [rw] game_server_config_overrides
# @return [::Array<::Google::Cloud::Gaming::V1beta::GameServerConfigOverride>]
# Contains the game server config rollout overrides. Overrides are processed
# in the order they are listed. Once a match is found for a realm, the rest
# of the list is not processed.
# @!attribute [rw] etag
# @return [::String]
# ETag of the resource.
class GameServerDeploymentRollout
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Request message for PreviewGameServerDeploymentRollout.
# @!attribute [rw] rollout
# @return [::Google::Cloud::Gaming::V1beta::GameServerDeploymentRollout]
# Required. The game server deployment rollout to be updated.
# Only fields specified in update_mask are updated.
# @!attribute [rw] update_mask
# @return [::Google::Protobuf::FieldMask]
# Optional. Mask of fields to update. At least one path must be supplied in
# this field. For the `FieldMask` definition, see
#
# https:
# //developers.google.com/protocol-buffers
# // /docs/reference/google.protobuf#fieldmask
# @!attribute [rw] preview_time
# @return [::Google::Protobuf::Timestamp]
# Optional. The target timestamp to compute the preview. Defaults to the
# immediately after the proposed rollout completes.
class PreviewGameServerDeploymentRolloutRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response message for PreviewGameServerDeploymentRollout.
# This has details about the Agones fleet and autoscaler to be actuated.
# @!attribute [rw] unavailable
# @return [::Array<::String>]
# Locations that could not be reached on this request.
# @!attribute [rw] etag
# @return [::String]
# ETag of the game server deployment.
# @!attribute [rw] target_state
# @return [::Google::Cloud::Gaming::V1beta::TargetState]
# The target state.
class PreviewGameServerDeploymentRolloutResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
|
// set up express server and body-parser
const express = require("express");
const bodyParser = require("body-parser");
const app = express();
app.use(bodyParser.json());
// set up a PUT request to update a user database
app.put("/user", (req, res) => {
//get the database connection
const conn = require('./db');
// get the new user data from request body
const newUser = req.body;
// create an sql update query
const sql = "UPDATE user SET name = ?, email = ? WHERE id = ?";
// execute the query
conn.query(sql, [newUser.name, newUser.email, newUser.id], (err) => {
if (err) {
return res.status(500).send(err);
}
// send success response
return res.send({
message: "user has been updated"
});
});
});
// start up express server
app.listen(3000, () => console.log("Server started on port 3000")); |
import { ShadowDom } from '@/components/UI/ShadowDom';
import { useEditorContext } from '@/hooks/useEditorContext';
import React from 'react';
import { EmailContent } from '../EmailContent';
export function EditEmailPreview() {
const { pageData } = useEditorContext();
const pageMaxWidth = pageData.attributes.width || '600px';
return (
<ShadowDom
id='VisualEditorEditMode'
style={{
width: pageMaxWidth,
padding: '40px 0px',
margin: 'auto',
height: '100%',
}}
>
<EmailContent />
</ShadowDom>
);
} |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
// THIS IS A GENERATED FILE. DO NOT MODIFY MANUALLY. @see scripts/compile-icons.js
import * as React from 'react';
interface SVGRProps {
title?: string;
titleId?: string;
}
const EuiIconLogoAppSearch = ({
title,
titleId,
...props
}: React.SVGProps<SVGSVGElement> & SVGRProps) => (
<svg
xmlns="http://www.w3.org/2000/svg"
width={32}
height={32}
viewBox="0 0 32 32"
aria-labelledby={titleId}
{...props}
>
{title ? <title id={titleId}>{title}</title> : null}
<path
fill="#0080D5"
d="M19.5.938a7.002 7.002 0 00-7 0l-8 4.619A7 7 0 001 11.62v9.237a7 7 0 003.5 6.062l7.5 4.33V17.979a7 7 0 013.5-6.062L27 5.276 19.5.939z"
/>
<path
className="euiIcon__fillNegative"
d="M19.5.938a7.002 7.002 0 00-7 0L5 5.277l11 6.35 11-6.35-7.5-4.34z"
/>
<path
fill="#FA744E"
d="M28.435 7.76l-10.026 5.79a6.994 6.994 0 011.59 4.428v13.27l7.5-4.33a7 7 0 003.5-6.061v-9.238a6.992 6.992 0 00-1.586-4.422l-.978.564z"
/>
</svg>
);
export const icon = EuiIconLogoAppSearch;
|
require 'rails_helper'
describe Admin::UserAuctionViewModel do
describe '#skills' do
it 'should return a mm/dd/yy date in EST' do
auction = create(:auction)
skills = [create(:skill, name: 'sewing'), create(:skill, name: 'eating')]
auction.skills << skills
expect(Admin::UserAuctionViewModel.new(auction, user).skills).to eq(
'eating, sewing'
)
end
end
describe '#accepted_label' do
context 'when the user is the winning_bidder' do
it 'returns "Yes" when the auction was accepted' do
auction = create(:auction, :closed, :with_bids, :accepted)
bidder = WinningBid.new(auction).find.bidder
expect(Admin::UserAuctionViewModel.new(auction, bidder).accepted_label).to eq('Yes')
end
it 'returns "No" when auction was not accepted yet' do
auction = create(:auction, :closed, :rejected, :with_bids)
bidder = WinningBid.new(auction).find.bidder
expect(Admin::UserAuctionViewModel.new(auction, bidder).accepted_label).to eq('No')
end
end
context 'when the user is not the winning bidder' do
it 'returns "-"' do
auction = create(:auction, :closed, :with_bids, accepted_at: Time.now)
expect(Admin::UserAuctionViewModel.new(auction, user).accepted_label).to eq('-')
end
end
end
def user
@_user ||= create(:user)
end
end
|
cd src
python main.py ctdet --exp_id driving_resdcn18_fourth_vehicle_tmp --class_remap --arch resdcn_18 --load_model ../models/ctdet_coco_resdcn18.pth --batch_size 32 --master_batch 5 --lr 1.25e-4 --gpus 0,1,2,3 --num_workers 8 --num_epochs 100 --lr_step 180,210 --dataset half
# python test.py ctdet --exp_id driving_dla_2x --keep_res --resume |
<filename>auth-service/src/main/java/com/jellehuibregtse/cah/authservice/jwt/JwtUsernameAndPasswordAuthenticationFilter.java
package com.jellehuibregtse.cah.authservice.jwt;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jellehuibregtse.cah.authservice.service.JwtTokenService;
import lombok.Getter;
import lombok.Setter;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.security.web.util.matcher.AntPathRequestMatcher;
import javax.servlet.FilterChain;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.HttpMethod;
import java.io.IOException;
import java.util.Collections;
import java.util.stream.Collectors;
/**
* This filter authenticates a user with username and password then returns a JWT token.
*
* @author <NAME>
*/
public class JwtUsernameAndPasswordAuthenticationFilter extends UsernamePasswordAuthenticationFilter {
private final JwtConfig jwtConfig;
// We use auth manager to validate the user credentials.
private final AuthenticationManager authManager;
private final JwtTokenService jwtTokenService;
public JwtUsernameAndPasswordAuthenticationFilter(AuthenticationManager authManager,
JwtConfig jwtConfig,
JwtTokenService jwtTokenService) {
this.authManager = authManager;
this.jwtConfig = jwtConfig;
this.jwtTokenService = jwtTokenService;
// By default, UsernamePasswordAuthenticationFilter listens to "/login" path.
// In our case, we use "/auth". So, we need to override the defaults.
this.setRequiresAuthenticationRequestMatcher(new AntPathRequestMatcher(jwtConfig.getUri(), HttpMethod.POST));
}
@Override
public Authentication attemptAuthentication(HttpServletRequest request,
HttpServletResponse response) throws AuthenticationException {
try {
// Get credentials from request.
var userCredentials = new ObjectMapper().readValue(request.getInputStream(), AuthenticationRequest.class);
// Create auth object, that contains the credentials, which will be used by auth manager
var authToken = new UsernamePasswordAuthenticationToken(userCredentials.getUsername(),
userCredentials.getPassword(),
Collections.emptyList());
// Authentication manager authenticates the user, and uses the UserDetailsServiceImpl::loadUserByUsername() method to load the user.
return authManager.authenticate(authToken);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// Upon successful authentication, generate a token.
// The 'auth' passed to successfulAuthentication() is the current authenticated user.
@Override
protected void successfulAuthentication(HttpServletRequest request,
HttpServletResponse response,
FilterChain chain,
Authentication authentication) {
// Converts the authorities to list of strings.
// This is important because it affects the way we get them back at the gateway.
var token = jwtTokenService.generateToken(authentication.getName(),
authentication.getAuthorities()
.stream()
.map(GrantedAuthority::getAuthority)
.collect(Collectors.toList()));
// Add token to header.
response.addHeader(jwtConfig.getHeader(), jwtConfig.getPrefix() + token);
}
@Getter
@Setter
private static class AuthenticationRequest {
private String username;
private String password;
}
} |
<gh_stars>1-10
package com.prisma.api.mutations
import com.prisma.api.ApiBaseSpec
import com.prisma.api.database.DatabaseQueryBuilder
import com.prisma.shared.project_dsl.SchemaDsl
import org.scalatest.{FlatSpec, Matchers}
class NestedConnectMutationInsideCreateSpec extends FlatSpec with Matchers with ApiBaseSpec {
"a P1! to C1! relation with the child already in a relation" should "error when connecting by id since old required parent relation would be broken" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child = schema.model("Child").field_!("c", _.String, isUnique = true).oneToOneRelation_!("parentReq", "childReq", parent)
}
database.setup(project)
val child1Id = server
.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childReq: {
| create: {c: "c1"}
| }
| }){
| childReq{
| id
| }
| }
|}""".stripMargin,
project
)
.pathAsString("data.createParent.childReq.id")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
server.executeQuerySimpleThatMustFail(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childReq: {connect: {id: "$child1Id"}}
| }){
| childReq {
| c
| }
| }
|}
""".stripMargin,
project,
errorCode = 3042,
errorContains = "The change you are trying to make would violate the required relation '_ChildToParent' between Child and Parent"
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
}
"a P1! to C1 relation with the child already in a relation" should "should fail on existing old parent" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToOneRelation_!("childReq", "parentOpt", child, isRequiredOnFieldB = false)
}
database.setup(project)
val child1Id = server
.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childReq: {
| create: {c: "c1"}
| }
| }){
| childReq{
| id
| }
| }
|}""".stripMargin,
project
)
.pathAsString("data.createParent.childReq.id")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
server.executeQuerySimpleThatMustFail(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childReq: {connect: {id: "$child1Id"}}
| }){
| childReq {
| c
| }
| }
|}
""".stripMargin,
project,
errorCode = 3042,
errorContains = "The change you are trying to make would violate the required relation '_ParentToChild' between Parent and Child"
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
}
"a P1! to C1 relation with the child not in a relation" should "be connectable through a nested mutation by id" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToOneRelation_!("childReq", "parentOpt", child, isRequiredOnFieldB = false)
}
database.setup(project)
val child1Id = server
.executeQuerySimple(
"""mutation {
| createChild(data: {c: "c1"})
| {
| id
| }
|}""".stripMargin,
project
)
.pathAsString("data.createChild.id")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(0))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childReq: {connect: {id: "$child1Id"}}
| }){
| childReq {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childReq":{"c":"c1"}}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
}
"a P1 to C1 relation with the child already in a relation" should "be connectable through a nested mutation by id if the child is already in a relation" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToOneRelation("childOpt", "parentOpt", child)
}
database.setup(project)
val child1Id = server
.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childOpt: {
| create: {c: "c1"}
| }
| }){
| childOpt{
| id
| }
| }
|}""".stripMargin,
project
)
.pathAsString("data.createParent.childOpt.id")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childOpt: {connect: {id: "$child1Id"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childOpt":{"c":"c1"}}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
}
"a P1 to C1 relation with the child without a relation" should "be connectable through a nested mutation by id" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToOneRelation("childOpt", "parentOpt", child)
}
database.setup(project)
val child1Id = server
.executeQuerySimple(
"""mutation {
| createChild(data: {c: "c1"})
| {
| id
| }
|}""".stripMargin,
project
)
.pathAsString("data.createChild.id")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(0))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childOpt: {connect: {id: "$child1Id"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childOpt":{"c":"c1"}}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
}
"a PM to C1! relation with the child already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToManyRelation_!("childrenOpt", "parentReq", child)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childrenOpt: {
| create: {c: "c1"}
| }
| }){
| childrenOpt{
| c
| }
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childrenOpt: {connect: {c: "c1"}}
| }){
| childrenOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childrenOpt":[{"c":"c1"}]}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
}
"a P1 to C1! relation with the child already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child =
schema.model("Child").field_!("c", _.String, isUnique = true).oneToOneRelation_!("parentReq", "childOpt", parent, isRequiredOnFieldB = false)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childOpt: {
| create: {c: "c1"}
| }
| }){
| childOpt{
| c
| }
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childOpt: {connect: {c: "c1"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childOpt":{"c":"c1"}}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
}
"a PM to C1 relation with the child already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToManyRelation("childrenOpt", "parentOpt", child)
}
database.setup(project)
server
.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childrenOpt: {
| create: [{c: "c1"}, {c: "c2"}]
| }
| }){
| childrenOpt{
| c
| }
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(2))
// we are even resilient against multiple identical connects here -> twice connecting to c2
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childrenOpt: {connect: [{c: "c1"},{c: "c2"},{c: "c2"}]}
| }){
| childrenOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(2))
}
"a PM to C1 relation with the child without a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToManyRelation("childrenOpt", "parentOpt", child)
}
database.setup(project)
val child1Id = server
.executeQuerySimple(
"""mutation {
| createChild(data: {c: "c1"})
| {
| id
| }
|}""".stripMargin,
project
)
.pathAsString("data.createChild.id")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(0))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childrenOpt: {connect: {c: "c1"}}
| }){
| childrenOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childrenOpt":[{"c":"c1"}]}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(1))
}
"a PM to C1 relation with a child without a relation" should "error if also trying to connect to a non-existing node" in {
val project = SchemaDsl() { schema =>
val child = schema.model("Child").field_!("c", _.String, isUnique = true)
schema.model("Parent").field_!("p", _.String, isUnique = true).oneToManyRelation("childrenOpt", "parentOpt", child)
}
database.setup(project)
val child1Id = server
.executeQuerySimple(
"""mutation {
| createChild(data: {c: "c1"})
| {
| id
| }
|}""".stripMargin,
project
)
.pathAsString("data.createChild.id")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(0))
server.executeQuerySimpleThatMustFail(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childrenOpt: {connect: [{c: "c1"}, {c: "DOES NOT EXIST"}]}
| }){
| childrenOpt {
| c
| }
| }
|}
""".stripMargin,
project,
errorCode = 3039,
errorContains = "No Node for the model Child with value DOES NOT EXIST for c found."
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ParentToChild").as[Int]) should be(Vector(0))
}
"a P1! to CM relation with the child already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child = schema.model("Child").field_!("c", _.String, isUnique = true).oneToManyRelation_!("parentsOpt", "childReq", parent)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childReq: {
| create: {c: "c1"}
| }
| }){
| childReq{
| c
| }
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childReq: {connect: {c: "c1"}}
| }){
| childReq {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childReq":{"c":"c1"}}}}""")
server.executeQuerySimple(s"""query{children{parentsOpt{p}}}""", project).toString should be(
"""{"data":{"children":[{"parentsOpt":[{"p":"p1"},{"p":"p2"}]}]}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(2))
}
"a P1! to CM relation with the child not already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child = schema.model("Child").field_!("c", _.String, isUnique = true).oneToManyRelation_!("parentsOpt", "childReq", parent)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createChild(data: {c: "c1"}){
| c
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(0))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childReq: {connect: {c: "c1"}}
| }){
| childReq {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childReq":{"c":"c1"}}}}""")
server.executeQuerySimple(s"""query{children{parentsOpt{p}}}""", project).toString should be("""{"data":{"children":[{"parentsOpt":[{"p":"p2"}]}]}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
}
"a P1 to CM relation with the child already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child = schema.model("Child").field_!("c", _.String, isUnique = true).oneToManyRelation("parentsOpt", "childOpt", parent)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childOpt: {
| create: {c: "c1"}
| }
| }){
| childOpt{
| c
| }
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childOpt: {connect: {c: "c1"}}
| }){
| childOpt{
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childOpt":{"c":"c1"}}}}""")
server.executeQuerySimple(s"""query{children{parentsOpt{p}}}""", project).toString should be(
"""{"data":{"children":[{"parentsOpt":[{"p":"p1"},{"p":"p2"}]}]}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(2))
}
"a P1 to CM relation with the child not already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child = schema.model("Child").field_!("c", _.String, isUnique = true).oneToManyRelation("parentsOpt", "childOpt", parent)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createChild(data: {c: "c1"}){
| c
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(0))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childOpt: {connect: {c: "c1"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childOpt":{"c":"c1"}}}}""")
server.executeQuerySimple(s"""query{children{parentsOpt{p}}}""", project).toString should be("""{"data":{"children":[{"parentsOpt":[{"p":"p2"}]}]}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
}
"a PM to CM relation with the children already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child = schema.model("Child").field_!("c", _.String, isUnique = true).manyToManyRelation("parentsOpt", "childrenOpt", parent)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createParent(data: {
| p: "p1"
| childrenOpt: {
| create: [{c: "c1"},{c: "c2"}]
| }
| }){
| childrenOpt{
| c
| }
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(2))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childrenOpt: {connect: [{c: "c1"}, {c: "c2"}]}
| }){
| childrenOpt{
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}""")
server.executeQuerySimple(s"""query{children{parentsOpt{p}}}""", project).toString should be(
"""{"data":{"children":[{"parentsOpt":[{"p":"p1"},{"p":"p2"}]},{"parentsOpt":[{"p":"p1"},{"p":"p2"}]}]}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(4))
}
"a PM to CM relation with the child not already in a relation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val parent = schema.model("Parent").field_!("p", _.String, isUnique = true)
val child = schema.model("Child").field_!("c", _.String, isUnique = true).manyToManyRelation("parentsOpt", "childrenOpt", parent)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createChild(data: {c: "c1"}){
| c
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(0))
val res = server.executeQuerySimple(
s"""
|mutation {
| createParent(data:{
| p: "p2"
| childrenOpt: {connect: {c: "c1"}}
| }){
| childrenOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createParent":{"childrenOpt":[{"c":"c1"}]}}}""")
server.executeQuerySimple(s"""query{children{parentsOpt{p}}}""", project).toString should be("""{"data":{"children":[{"parentsOpt":[{"p":"p2"}]}]}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_ChildToParent").as[Int]) should be(Vector(1))
}
"a PM to CM relation without a backrelation" should "be connectable through a nested mutation by unique" in {
val project = SchemaDsl() { schema =>
val role = schema.model("Role").field_!("r", _.String, isUnique = true)
val user = schema.model("User").field_!("u", _.String, isUnique = true).manyToManyRelation("roles", "notexposed", role, includeFieldB = false)
}
database.setup(project)
server.executeQuerySimple(
"""mutation {
| createRole(data: {r: "r1"}){
| r
| }
|}""".stripMargin,
project
)
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_UserToRole").as[Int]) should be(Vector(0))
val res = server.executeQuerySimple(
s"""
|mutation {
| createUser(data:{
| u: "u2"
| roles: {connect: {r: "r1"}}
| }){
| roles {
| r
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"createUser":{"roles":[{"r":"r1"}]}}}""")
database.runDbActionOnClientDb(DatabaseQueryBuilder.itemCountForTable(project.id, "_UserToRole").as[Int]) should be(Vector(1))
}
"a many relation" should "be connectable through a nested mutation by id" in {
val project = SchemaDsl() { schema =>
val comment = schema.model("Comment").field_!("text", _.String)
schema.model("Todo").oneToManyRelation("comments", "todo", comment)
}
database.setup(project)
val comment1Id = server.executeQuerySimple("""mutation { createComment(data: {text: "comment1"}){ id } }""", project).pathAsString("data.createComment.id")
val comment2Id = server.executeQuerySimple("""mutation { createComment(data: {text: "comment2"}){ id } }""", project).pathAsString("data.createComment.id")
val result = server.executeQuerySimple(
s"""
|mutation {
| createTodo(data:{
| comments: {
| connect: [{id: "$comment1Id"}, {id: "$comment2Id"}]
| }
| }){
| id
| comments {
| id
| text
| }
| }
|}
""".stripMargin,
project
)
mustBeEqual(
actual = result.pathAsJsValue("data.createTodo.comments").toString,
expected = s"""[{"id":"$comment1Id","text":"comment1"},{"id":"$comment2Id","text":"comment2"}]"""
)
}
"a many relation" should "throw a proper error if connected by wrong id" in {
val project = SchemaDsl() { schema =>
val comment = schema.model("Comment").field_!("text", _.String)
schema.model("Todo").oneToManyRelation("comments", "todo", comment)
}
database.setup(project)
server.executeQuerySimpleThatMustFail(
s"""
|mutation {
| createTodo(data:{
| comments: {
| connect: [{id: "DoesNotExist"}]
| }
| }){
| id
| comments {
| id
| text
| }
| }
|}
""".stripMargin,
project,
errorCode = 3039,
errorContains = "No Node for the model Comment with value DoesNotExist for id found."
)
}
"a many relation" should "throw a proper error if connected by wrong id the other way around" in {
val project = SchemaDsl() { schema =>
val comment = schema.model("Comment").field_!("text", _.String)
schema.model("Todo").oneToManyRelation("comments", "todo", comment)
}
database.setup(project)
server.executeQuerySimpleThatMustFail(
s"""
|mutation {
| createComment(data:{
| text: "bla"
| todo: {
| connect: {id: "DoesNotExist"}
| }
| }){
| id
| }
|}
""".stripMargin,
project,
errorCode = 3039,
errorContains = "No Node for the model Todo with value DoesNotExist for id found."
)
}
"a many relation" should "throw a proper error if the id of a wrong model is provided" in {
val project = SchemaDsl() { schema =>
val comment = schema.model("Comment").field_!("text", _.String)
schema.model("Todo").oneToManyRelation("comments", "todo", comment)
}
database.setup(project)
val comment1Id = server.executeQuerySimple("""mutation { createComment(data: {text: "comment1"}){ id } }""", project).pathAsString("data.createComment.id")
val comment2Id = server.executeQuerySimple("""mutation { createComment(data: {text: "comment2"}){ id } }""", project).pathAsString("data.createComment.id")
val todoId = server
.executeQuerySimple(
s"""
|mutation {
| createTodo(data:{
| comments: {
| connect: [{id: "$comment1Id"}, {id: "$comment2Id"}]
| }
| }){
| id
| }
|}
""".stripMargin,
project
)
.pathAsString("data.createTodo.id")
server.executeQuerySimpleThatMustFail(
s"""
|mutation {
| createTodo(data:{
| comments: {
| connect: [{id: "$todoId"}]
| }
| }){
| id
| }
|}
""".stripMargin,
project,
errorCode = 3039,
errorContains = s"No Node for the model Comment with value $todoId for id found."
)
}
"a many relation" should "be connectable through a nested mutation by any unique argument" in {
val project = SchemaDsl() { schema =>
val comment = schema.model("Comment").field_!("text", _.String).field_!("alias", _.String, isUnique = true)
schema.model("Todo").oneToManyRelation("comments", "todo", comment)
}
database.setup(project)
val comment1Alias = server
.executeQuerySimple("""mutation { createComment(data: {text: "text comment1", alias: "comment1"}){ alias } }""", project)
.pathAsString("data.createComment.alias")
val comment2Alias = server
.executeQuerySimple("""mutation { createComment(data: {text: "text comment2", alias: "comment2"}){ alias } }""", project)
.pathAsString("data.createComment.alias")
val result = server.executeQuerySimple(
s"""
|mutation {
| createTodo(data:{
| comments: {
| connect: [{alias: "$comment1Alias"}, {alias: "$comment2Alias"}]
| }
| }){
| id
| comments {
| alias
| text
| }
| }
|}
""".stripMargin,
project
)
mustBeEqual(
actual = result.pathAsJsValue("data.createTodo.comments").toString,
expected = s"""[{"alias":"$comment1Alias","text":"text comment1"},{"alias":"$comment2Alias","text":"text comment2"}]"""
)
}
"a many relation" should "be connectable through a nested mutation by any unique argument in the opposite direction" in {
val project = SchemaDsl() { schema =>
val comment = schema.model("Comment")
schema.model("Todo").field_!("title", _.String).oneToManyRelation("comments", "todo", comment).field_!("alias", _.String, isUnique = true)
}
database.setup(project)
val todoAlias = server
.executeQuerySimple("""mutation { createTodo(data: {title: "the title", alias: "todo1"}){ alias } }""", project)
.pathAsString("data.createTodo.alias")
val result = server.executeQuerySimple(
s"""
|mutation {
| createComment(
| data: {
| todo: {
| connect: { alias: "$todoAlias"}
| }
| }
| )
| {
| todo {
| alias
| title
| }
| }
|}
""".stripMargin,
project
)
mustBeEqual(
actual = result.pathAsJsValue("data.createComment.todo").toString,
expected = s"""{"alias":"$todoAlias","title":"the title"}"""
)
}
}
|
<reponame>yagooliver/react-native-first-app<filename>my-app/views/login/LoginConnect.js<gh_stars>0
import {
changeEdit,
changeSignUpEdit,
changeTabIndex,
singUpUser,
loginUser
} from './actions/actions';
import {connect} from 'react-redux';
import LoginContainer from './LoginContainer';
const mapStateToProps = (state) => ({
login: state.login.loginForm,
signUp: state.login.signUp,
alert: state.login.alert,
message: state.login.message,
tabIndex: state.login.tabIndex,
isSubmiting: state.login.isSubmiting
});
const mapDispatchToProps = (dispatch) => ({
changeEdit: (item) => dispatch(changeEdit(item)),
changeSignUpEdit: (item) => dispatch(changeSignUpEdit(item)),
changeTabIndex: (index) => dispatch(changeTabIndex(index)),
singUpUser: (signUpForm) => dispatch(singUpUser(signUpForm)),
loginUser: (loginForm) => dispatch(loginUser(loginForm))
})
export default connect(mapStateToProps, mapDispatchToProps)(LoginContainer); |
package org.rzo.yajsw.wrapper;
import java.util.concurrent.Executor;
import java.util.logging.Logger;
import org.rzo.yajsw.util.Cycler;
class MissingTriggerAction implements TriggerAction
{
volatile private Cycler _cycler;
volatile private int _counter = 0;
private int _count;
private TriggerAction[] _actions;
Executor _executor;
MissingTriggerAction(Executor executor, long period, int count, TriggerAction[] actions, final boolean autoStop, final Logger logger)
{
_count = count;
_executor = executor;
_actions = actions;
_cycler = new Cycler(period, period, executor, new Runnable()
{
public void run()
{
// System.out.println("missing trigger "+_counter + " "+_count);
if (_counter < _count)
{
if (autoStop)
_cycler.stop();
for (final TriggerAction action : _actions)
if (action != null)
{
// run the action in a separate thread, because on
// restart the cycler thread will be interrupted
_executor.execute(new Runnable()
{
public void run()
{
// TODO add logger
logger.info("missing trigger executed, found # " + _counter + " triggers during check period");
action.execute("");
}
});
}
}
else
_counter = 0;
}
});
}
void start()
{
_cycler.start();
}
void stop()
{
_cycler.stop();
}
public Object execute(String line)
{
_counter++;
return null;
}
}
|
<filename>images/tabix.ui/src/app/draw/draw-d3.js
/*
* Licensed under the Apache License, Version 2.0 Copyright 2017 <NAME>,<NAME>,SMI2 LLC and other contributors
*/
'use strict';
class DrawD3 extends DrawBasicChart {
constructor(Widget) {
super(Widget);
this.library = 'd3';
}
}
|
import React, { useEffect, useState } from 'react';
import Skeleton from 'react-loading-skeleton';
import { useMutation, useQuery } from '@apollo/client';
import { ILanguage } from '@ng-scrappy/models';
import {
SUMMARIZED_UNSUPPORTED_LANGUAGES_QUERY,
SET_LANGUAGE_VOTE_QUERY,
} from '../../queries/translations.queries';
import { useLocalStorage } from '../../hooks/use-local-storage.hook';
const USER_LANGUAGE_VOTES = 'user_language_votes';
export function LanguageVoteCard() {
const { loading, data, refetch } = useQuery(
SUMMARIZED_UNSUPPORTED_LANGUAGES_QUERY
);
const [saveToDb, res] = useMutation(SET_LANGUAGE_VOTE_QUERY, {
refetchQueries: [{ query: SUMMARIZED_UNSUPPORTED_LANGUAGES_QUERY }],
});
const unSupportedLangs: ILanguage[] = data?.unsupportedLanguages;
/**
* Tracks user's cast votes in local storage
* Facilitates avoiding multiple casts
*/
const votedLanguages: string[] = JSON.parse(
localStorage.getItem(USER_LANGUAGE_VOTES)
);
const [votedLangs, setLocalStorageVotes] = useLocalStorage(
USER_LANGUAGE_VOTES,
votedLanguages
);
// Boolean used to track if user has voted for current language-set
const [hasVoted, setHasVoted] = useState(null);
useEffect(() => {
const hasVotedForSet =
!votedLangs || !votedLangs.length || !unSupportedLangs
? false
: !!votedLangs.filter((l) =>
unSupportedLangs.map((l) => l.language).includes(l)
).length;
setHasVoted(hasVotedForSet);
return () => null;
}, [votedLangs, unSupportedLangs]);
// Enable casting a vote
const [vote, setVote] = useState(null);
useEffect(() => {
if (hasVoted || !vote) {
return;
}
saveToDb({ variables: { language: vote } });
refetch();
// If update successful, update localStorage
const voted = !votedLangs ? [vote] : [...votedLanguages, vote];
setLocalStorageVotes(voted);
setHasVoted(true);
return () => null;
}, [
votedLangs,
vote,
votedLanguages,
hasVoted,
saveToDb,
setLocalStorageVotes,
refetch,
]);
return (
<div className="">
{loading ? (
<div className="container pt-3">
<Skeleton height={18} className="mb-2 d-block" />
<Skeleton height={18} width={150} className="mb-2 d-block" />
<Skeleton height={18} width={200} className="mb-2 d-block" />
</div>
) : hasVoted ? (
<p className="card-header text-dark" style={{ fontSize: '18px' }}>
Thanks for voting{' '}
<span role="img" aria-label="thumbsUp">
👍🏽
</span>{' '}
<br />
Keep posted to see whether your prefered language will be added! 😉
</p>
) : (
<p className="card-header text-dark" style={{ fontSize: '18px' }}>
Which language translations would you like to see added next? <br />
Let us know! 😏👇
</p>
)}
<div className="container-fluid pt-3">
{unSupportedLangs ? (
unSupportedLangs.map((lang, i) => (
<div
className="mb-3"
key={i}
role="button"
onClick={() => setVote(lang.language)}
>
<div
className="w-100 d-flex justify-content-between align-items-center text-capitalize pr-2 pl-2 rounded"
style={{ background: '#282828', height: 30 }}
>
<div style={{ zIndex: 1000 }} className="text-white text-left">
<b>{lang.language}</b>
</div>
{hasVoted ? (
<div style={{ zIndex: 1000 }} className="text-white ">
<b>{lang.votes * 10}%</b>
</div>
) : (
''
)}
</div>
{hasVoted ? (
<div
className="progress"
style={{ height: '25px', marginTop: '-28px', zIndex: 1 }}
>
<div
className="progress-bar rounded bg-warning"
role="progressbar"
style={{ height: '25px', width: `${lang.votes * 10}%` }}
aria-valuenow={lang.votes}
aria-valuemin={0}
aria-valuemax={10}
></div>
</div>
) : (
''
)}
</div>
))
) : (
<Skeleton count={5} height={25} className="w-100 mb-3 d-block" />
)}
</div>
<div className="card-footer">
{loading ? (
<div>
<Skeleton count={2} height={10} className="w-100 mb-1 d-block" />
<Skeleton count={1} height={10} className="w-50 mb-1 d-block" />
</div>
) : (
<p>
The first language to hit <b>100%</b> votes will have it's
crowd-sourced language translations added after an hour or so 😎.
</p>
)}
</div>
</div>
);
}
|
<gh_stars>0
module ExternalEvents
class ExternalPolicyMemberAdd
attr_reader :policy_node
attr_reader :added_member_ids
attr_reader :policy_to_update
include Handlers::EnrollmentEventXmlHelper
# p_node : Openhbx::Cv2::Policy
def initialize(pol_to_change, p_node, added_member_ids)
@policy_node = p_node
@added_member_ids = added_member_ids
@policy_to_update = pol_to_change
end
def extract_pre_amt_tot
p_enrollment = Maybe.new(@policy_node).policy_enrollment.value
return 0.00 if p_enrollment.blank?
BigDecimal.new(Maybe.new(p_enrollment).premium_total_amount.strip.value)
end
def extract_tot_res_amt
p_enrollment = Maybe.new(@policy_node).policy_enrollment.value
return 0.00 if p_enrollment.blank?
BigDecimal.new(Maybe.new(p_enrollment).total_responsible_amount.strip.value)
end
def extract_enrollee_premium(enrollee)
pre_string = Maybe.new(enrollee).benefit.premium_amount.value
return 0.00 if pre_string.blank?
BigDecimal.new(pre_string)
end
def extract_other_financials
p_enrollment = Maybe.new(@policy_node).policy_enrollment.value
return({}) if p_enrollment.blank?
if p_enrollment.shop_market
tot_emp_res_amt = Maybe.new(p_enrollment).shop_market.total_employer_responsible_amount.strip.value
employer = find_employer(@policy_node)
return({ :employer => employer }) if tot_emp_res_amt.blank?
{
:employer => employer,
:tot_emp_res_amt => BigDecimal.new(tot_emp_res_amt)
}
else
applied_aptc_val = Maybe.new(p_enrollment).individual_market.applied_aptc_amount.strip.value
return({}) if applied_aptc_val.blank?
{
:applied_aptc => BigDecimal.new(applied_aptc_val)
}
end
end
def extract_rel_from_me(rel)
simple_relationship = Maybe.new(rel).relationship_uri.strip.split("#").last.downcase.value
case simple_relationship
when "life_partner", "domestic_partner"
"life partner"
when "spouse"
"spouse"
when "ward"
"ward"
else
"child"
end
end
def extract_rel_from_sub(rel)
simple_relationship = Maybe.new(rel).relationship_uri.strip.split("#").last.downcase.value
case simple_relationship
when "life_partner", "domestic_partner"
"life partner"
when "spouse"
"spouse"
when "court_appointed_guardian"
"ward"
else
"child"
end
end
def extract_rel_code(enrollee)
sub_id = subscriber_id
mem_id = extract_member_id(enrollee)
prs = Maybe.new(enrollee).member.person_relationships.value
return "child" if prs.blank?
me_to_sub = prs.select do |pr|
subj_ind = Maybe.new(pr).subject_individual.strip.split("#").last.value
obj_ind = Maybe.new(pr).object_individual.strip.split("#").last.value
(subj_ind == mem_id) && (obj_ind == sub_id)
end
sub_to_me = prs.select do |pr|
subj_ind = Maybe.new(pr).subject_individual.strip.split("#").last.value
obj_ind = Maybe.new(pr).object_individual.strip.split("#").last.value
(subj_ind == sub_id) && (obj_ind == mem_id)
end
return "child" if (me_to_sub.empty? && sub_to_me.empty?)
return extract_rel_from_me(me_to_sub.first) if me_to_sub.any?
return extract_rel_from_sub(sub_to_me.first) if sub_to_me.any?
"child"
end
def build_enrollee(policy, enrollee_node)
member_id = extract_member_id(enrollee_node)
if @added_member_ids.include?(member_id)
policy.enrollees << Enrollee.new({
:m_id => member_id,
:rel_code => extract_rel_code(enrollee_node),
:ben_stat => policy.is_cobra? ? "cobra" : "active",
:emp_stat => "active",
:coverage_start => extract_enrollee_start(enrollee_node),
:pre_amt => extract_enrollee_premium(enrollee_node)
})
else
enrollee = policy.enrollees.detect { |en| en.m_id == member_id }
if enrollee
enrollee.pre_amt = extract_enrollee_premium(enrollee_node)
enrollee.save!
end
end
policy.save!
end
def subscriber_id
@subscriber_id ||= begin
sub_node = extract_subscriber(@policy_node)
extract_member_id(sub_node)
end
end
def persist
pol = policy_to_update
pol.update_attributes!({
:pre_amt_tot => extract_pre_amt_tot,
:tot_res_amt => extract_tot_res_amt
}.merge(extract_other_financials))
pol = Policy.find(pol._id)
@policy_node.enrollees.each do |en|
build_enrollee(pol, en)
end
true
end
end
end
|
USER=`whoami`
PASSWORD=`sudo cat /etc/shadow | grep $USER | python -c "import sys; line=sys.stdin.read().split(':'); sys.stdout.write(line[1])"`
echo USER=$USER > userenv.sh
echo PASSWORD=$PASSWORD >>userenv.sh
|
# these are configured automatically when impala is installed
export IMPALA_HOME={{ impala_path_result.files[0].path }}
{% set version = impala_path_result.files[0].path.split('-') %}
export MAPR_HOME=/opt/mapr
export IMPALA_VERSION={{ version[1] }}
export LIBHDFS_OPTS="-Dhadoop.login=hybrid -Dhadoop.login=hybrid_keytab -Djavax.security.auth.useSubjectCredsOnly=false"
# Get the generic mapr environment variables
[ -f ${MAPR_HOME}/conf/env.sh ] && . ${MAPR_HOME}/conf/env.sh
# This MUST point to the node running statestore
IMPALA_STATE_STORE_HOST={{ impala_statestore_string }}
IMPALA_STATE_STORE_PORT=24000
CATALOG_SERVICE_HOST={{ impala_catalog_string }}
#Set the Shared Memory to 128 MB
export MAPR_CLIENT_SHMEM=16384
# These impact the impala server and can be optionally changed
IMPALA_BACKEND_PORT=22000
IMPALA_LOG_DIR=${IMPALA_HOME}/logs
IMPALA_SERVER_ARGS=" \
-log_dir=${IMPALA_LOG_DIR} \
-state_store_port=${IMPALA_STATE_STORE_PORT} \
-use_statestore \
-authorized_proxy_user_config=mapr=* \
-state_store_host=${IMPALA_STATE_STORE_HOST} \
-catalog_service_host=${CATALOG_SERVICE_HOST} \
-be_port=${IMPALA_BACKEND_PORT} \
-mem_limit=20% \
"
# These impact the state store daemon and can be optionally changed
IMPALA_STATE_STORE_ARGS=" \
-log_dir=${IMPALA_LOG_DIR} \
-state_store_port=${IMPALA_STATE_STORE_PORT} \
-catalog_service_host=${CATALOG_SERVICE_HOST} \
"
IMPALA_CATALOG_ARGS=" \
-log_dir=${IMPALA_LOG_DIR} \
-state_store_port=${IMPALA_STATE_STORE_PORT} \
-use_statestore \
-state_store_host=${IMPALA_STATE_STORE_HOST} \
"
# for troubleshooting
ENABLE_CORE_DUMPS=false
# Impala figures these out at runtime, but they can be overridden here.
# (Normally, they should be commented out.)
# HIVE_HOME=${MAPR_HOME}/hive/hive-*
# HBASE_HOME=${MAPR_HOME}/hbase/hbase-*
# HADOOP_HOME=${MAPR_HOME}/hadoop/hodoop-*
# No longer used ...
# LIBHDFS_OPTS=-Djava.library.path=/usr/lib/impala/lib
# MYSQL_CONNECTOR_JAR=/usr/share/java/mysql-connector-java.jar
HIVE_METASTORE_URI={{ impala_hive_metastore_string }}
MAPR_HIVE_LOGIN_OPTS="-Dhadoop.login=maprsasl"
MAPR_HIVE_SERVER_LOGIN_OPTS="-Dhadoop.login=maprsasl_keytab"
|
#!/bin/bash
#
# Copyright (c) 2015 - 2022, Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
#
# DUDLEY BUILD ENVIRONMENT
#
# This script is intended to be sourced within an existing script or shell ONLY.
# It is NOT intended to be ./ executed.
export CC=icc
export CXX=icpc
export MPICC=mpicc
export MPICXX=mpic++
export FC=ifort
export F77=ifort
export F90=ifort
export MPIFORT=mpifort
export MPIFC=mpifort
export MPIF77=mpifort
export MPIF90=mpifort
export FI_PROVIDER=psm2
|
<reponame>RestUI/create-rest-ui-app<gh_stars>1-10
import React from 'react';
import {
Edit as CrudEdit,
SimpleForm,
DisabledInput,
ReferenceInput,
AutocompleteInput,
TextInput,
DateInput,
LongTextInput
} from 'rest-ui/lib/mui';
const Edit = ({ ...props }) => (
<CrudEdit {...props}>
<SimpleForm>
<DisabledInput source="id" />
<ReferenceInput source="post_id" reference="posts" perPage={5} sort={{ field: 'title', order: 'ASC' }}>
<AutocompleteInput optionText="title" />
</ReferenceInput>
<TextInput source="author.name" validation={{ minLength: 10 }} />
<DateInput source="created_at" />
<LongTextInput source="body" validation={{ minLength: 10 }} />
</SimpleForm>
</CrudEdit>
);
export default Edit; |
#!/bin/sh
URL_ROOT_ANCHORS_XML="https://data.iana.org/root-anchors/root-anchors.xml"
URL_ROOT_ANCHORS_SIG="https://data.iana.org/root-anchors/root-anchors.p7s"
FILE_ROOT_ANCHORS_XML=`mktemp /tmp/root-anchors.xml.XXXXXX` || exit 1
FILE_ROOT_ANCHORS_SIG=`mktemp /tmp/root-anchors.p7s.XXXXXX` || exit 1
FILE_ICANN_ROOT_CA_CERT=`mktemp /tmp/icannbundle.pem.XXXXXX` || exit 1
cat >$FILE_ICANN_ROOT_CA_CERT <<ICANN_ROOT_CA_CERT
-----BEGIN CERTIFICATE-----
MIIDdzCCAl+gAwIBAgIBATANBgkqhkiG9w0BAQsFADBdMQ4wDAYDVQQKEwVJQ0FO
TjEmMCQGA1UECxMdSUNBTk4gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxFjAUBgNV
BAMTDUlDQU5OIFJvb3QgQ0ExCzAJBgNVBAYTAlVTMB4XDTA5MTIyMzA0MTkxMloX
DTI5MTIxODA0MTkxMlowXTEOMAwGA1UEChMFSUNBTk4xJjAkBgNVBAsTHUlDQU5O
IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRYwFAYDVQQDEw1JQ0FOTiBSb290IENB
MQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKDb
cLhPNNqc1NB+u+oVvOnJESofYS9qub0/PXagmgr37pNublVThIzyLPGCJ8gPms9S
G1TaKNIsMI7d+5IgMy3WyPEOECGIcfqEIktdR1YWfJufXcMReZwU4v/AdKzdOdfg
ONiwc6r70duEr1IiqPbVm5T05l1e6D+HkAvHGnf1LtOPGs4CHQdpIUcy2kauAEy2
paKcOcHASvbTHK7TbbvHGPB+7faAztABLoneErruEcumetcNfPMIjXKdv1V1E3C7
MSJKy+jAqqQJqjZoQGB0necZgUMiUv7JK1IPQRM2CXJllcyJrm9WFxY0c1KjBO29
iIKK69fcglKcBuFShUECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8B
Af8EBAMCAf4wHQYDVR0OBBYEFLpS6UmDJIZSL8eZzfyNa2kITcBQMA0GCSqGSIb3
DQEBCwUAA4IBAQAP8emCogqHny2UYFqywEuhLys7R9UKmYY4suzGO4nkbgfPFMfH
6M+Zj6owwxlwueZt1j/IaCayoKU3QsrYYoDRolpILh+FPwx7wseUEV8ZKpWsoDoD
2JFbLg2cfB8u/OlE4RYmcxxFSmXBg0yQ8/IoQt/bxOcEEhhiQ168H2yE5rxJMt9h
15nu5JBSewrCkYqYYmaxyOC3WrVGfHZxVI7MpIFcGdvSb2a1uyuua8l0BKgk3ujF
0/wsHNeP22qNyVO+XVBzrM8fk8BSUFuiT/6tZTYXRtEt5aKQZgXbKU5dUF3jT9qg
j/Br5BZw3X/zd325TvnswzMC1+ljLzHnQGGk
-----END CERTIFICATE-----
ICANN_ROOT_CA_CERT
curl --silent --output $FILE_ROOT_ANCHORS_XML $URL_ROOT_ANCHORS_XML
curl --silent --output $FILE_ROOT_ANCHORS_SIG $URL_ROOT_ANCHORS_SIG
openssl smime -verify -inform der \
-CAfile $FILE_ICANN_ROOT_CA_CERT \
-in $FILE_ROOT_ANCHORS_SIG -out /dev/null \
-content $FILE_ROOT_ANCHORS_XML
if [ $? -eq 0 ]; then
python3 dnssec_ta_tool.py --anchors $FILE_ROOT_ANCHORS_XML $@
fi
rm -f $FILE_ROOT_ANCHORS_XML
rm -f $FILE_ROOT_ANCHORS_SIG
rm -f $FILE_ICANN_ROOT_CA_CERT
|
<reponame>levitnudi/Dala
/**
* Flym
* <p/>
* Copyright (c) 2012-2015 <NAME>
* <p/>
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p/>
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p/>
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package yali.org.activity;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.view.MenuItem;
import android.widget.TextView;
import yali.org.R;
import yali.org.utils.UiUtils;
public class AboutActivity extends BaseActivity {
//private GifImageView gifImageView;
@Override
protected void onCreate(Bundle savedInstanceState) {
UiUtils.setPreferenceTheme(this);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_about);
//initFBAds();
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
// getSupportActionBar().setLogo(R.mipmap.ic_launcher);
// getSupportActionBar().setDisplayUseLogoEnabled(true);
String title;
PackageManager manager = this.getPackageManager();
try {
PackageInfo info = manager.getPackageInfo(this.getPackageName(), 0);
title = "Version " + info.versionName;
} catch (NameNotFoundException unused) {
title = getString(R.string.app_name);
}
/*gifImageView = (GifImageView) findViewById(R.id.GifImageView);
gifImageView.setGifImageResource(R.drawable.ainimoto);*/
/* GifView gifView1 = (GifView) findViewById(R.id.gif1);
gifView1.setVisibility(View.VISIBLE);
gifView1.play();
gifView1.pause();
//gifView1.setGifResource(R.drawable.ainimoto);
gifView1.getGifResource();
gifView1.play();*/
// gifView1.setMovieTime(10000);
// gifView1.getMovie();
TextView titleView = findViewById(R.id.about_title);
titleView.setText(title);
String info = "<p><a href=\"https://www.afririse.com\"> <b>AfriRise</b></a> is your one stop shop for valuable information. We collect the latest information on " +
"<b>Scholarships</b>, <b>Jobs & Internships</b>, <b>Investments</b>, <b>Latest News Alerts</b> near you and <b>Infortainment</b> from <b>trusted</b> sources" +
"<p><a href=\"https://www.afririse.com\"> <b>AfriRise</b></a>, we <b>inform</b>, we <b>arise</b>, <b>together</b>...</p>"
+"<p>"+"Follow us on<a href=\"https://facebook.com/afririse\"> <b>facebook</b></a> and "
+"<a href=\"https://play.google.com/store/apps/details?id=yali.org\"><b>rate</b> </a>this app</p>";
TextView contentView = findViewById(R.id.about_content);
contentView.setText(Html.fromHtml(info));
contentView.setMovementMethod(LinkMovementMethod.getInstance());
}
@Override
public boolean onOptionsItemSelected(MenuItem menuItem) {
switch (menuItem.getItemId()) {
case android.R.id.home:
finish();
return true;
case R.id.about_content:
return true;
}
return (super.onOptionsItemSelected(menuItem));
}
public void onBackPressed(){
super.onBackPressed();
finish();
}
}
|
<gh_stars>1-10
import {TestBed} from '@angular/core/testing';
import {StorageJsonService} from './storage-json.service';
import {Injectable, OnDestroy} from '@angular/core';
import {StorageService} from 'projects/storage/src/lib/storage.service';
import {StorageListService} from 'projects/storage/src/lib/storage-list.service';
import {StorageNode} from 'projects/storage/src/lib/entities/storage-node';
import {testStorageDirectoryNode, testStorageFileNode} from 'projects/storage/src/lib/entities/storage-node.spec';
import {storageServiceSpy} from 'projects/storage/src/lib/storage.service.spec';
import {storageListServiceSpy} from 'projects/storage/src/lib/storage-list.service.spec';
import SpyObj = jasmine.SpyObj;
import {of} from 'rxjs';
import * as _ from 'lodash';
export class TestValue {
id: string;
}
@Injectable()
export class TestStorageJsonService extends StorageJsonService<TestValue> implements OnDestroy {
constructor(
storage: StorageService,
storageList: StorageListService,
) {
super(storage, storageList, node => node.path, value => value.id);
super.init('rootPath', 'matcher', 42);
}
ngOnDestroy(): void {
super.clearSubscriptions();
}
}
describe('StorageJsonService', () => {
let service: TestStorageJsonService;
let storage: SpyObj<StorageService>;
let storageList: SpyObj<StorageListService>;
let node: StorageNode;
let value: TestValue;
beforeEach(() => {
node = testStorageFileNode();
value = {id: node.path};
storage = storageServiceSpy();
storageList = storageListServiceSpy();
TestBed.configureTestingModule({
providers: [
{provide: StorageService, useValue: storage},
{provide: StorageListService, useValue: storageList},
TestStorageJsonService,
]
});
service = TestBed.get(TestStorageJsonService);
});
afterEach(() => {
service.ngOnDestroy();
});
it('should be created', () => {
expect(service).toBeTruthy();
expect(storageList.init).toHaveBeenCalledWith('rootPath', 'matcher', 42);
});
it('should set/get values', () => {
const values = [value];
service.values = values;
expect(service.values).toBe(values);
});
it('should find', () => {
service.values = [value];
expect(service.find(node)).toBe(value);
});
it('should _nodesListed', () => {
const nodes = [node];
const values = [value];
storage.listJSON.and.returnValue(of(values));
storageList.nodesListed.emit(nodes);
expect(service.values).toBe(values);
});
it('should _nodeCreated', () => {
const values = [value];
storage.getJSON.and.returnValue(of(value));
storageList.nodeCreated.emit(node);
expect(service.values).toEqual(values);
});
it('should _nodeCreated no duplicates', () => {
const values = [value];
service.values = values;
storage.getJSON.and.returnValue(of(value));
storageList.nodeCreated.emit(node);
expect(service.values.length).toBe(1);
});
it('should _nodesDeleted', () => {
const values = [value];
service.values = values;
storageList.nodesDeleted.emit([node]);
expect(service.values).toEqual([]);
});
it('should _nodesDeleted nope', () => {
const values = [value];
service.values = values;
storageList.nodesDeleted.emit([testStorageDirectoryNode()]);
expect(service.values).toBe(values);
});
it('should _nodeModified', () => {
service.values = [value];
const modified = _.cloneDeep(node);
modified.length = 666;
modified.lastModified = 666;
const modifiedValue = {id: modified.path};
storage.getJSON.and.returnValue(of(modifiedValue));
storageList.nodeModified.emit(modified);
expect(service.values[0]).toBe(modifiedValue);
});
it('should _nodeModified nope', () => {
service.values = [value];
storageList.nodeModified.emit(testStorageDirectoryNode());
expect(service.values[0]).toBe(value);
});
});
|
export CODEGH=$HOME/code/git/github.com
export MPD_HOST="$HOME/.mpd/socket"
export TERMINAL=st
|
<gh_stars>0
package com.github.yupc.admin.repository.authority.dao;
public interface RoleResourceMapper extends com.github.yupc.base.dao.BaseDao<Long,
com.github.yupc.admin.entity.authority.po.RoleResource,
com.github.yupc.admin.repository.authority.example.RoleResourceExample> {
} |
<reponame>ministryofjustice/prison-visits-2<filename>lib/types/nomis_prisoner_type.rb
class NomisPrisonerType < ActiveModel::Type::Value
def cast(value)
value
end
end
|
import {
Column,
CurrentColumn,
CurrentFilter,
CurrentPagination,
CurrentSorter,
ExtensionName,
GridOption,
GridState,
GridStateChange,
GridStateType,
} from './../models/index';
import { ExtensionService } from './extension.service';
import { FilterService } from './filter.service';
import { SortService } from './sort.service';
import { Subject, Subscription } from 'rxjs';
// using external non-typed js libraries
declare var $: any;
declare var Slick: any;
export class GridStateService {
private _eventHandler = new Slick.EventHandler();
private _columns: Column[] = [];
private _currentColumns: CurrentColumn[] = [];
private _grid: any;
private extensionService: ExtensionService;
private filterService: FilterService;
private sortService: SortService;
private subscriptions: Subscription[] = [];
onGridStateChanged = new Subject<GridStateChange>();
/** Getter for the Grid Options pulled through the Grid Object */
private get _gridOptions(): GridOption {
return (this._grid && this._grid.getOptions) ? this._grid.getOptions() : {};
}
/**
* Initialize the Export Service
* @param grid
* @param filterService
* @param sortService
* @param dataView
*/
init(grid: any, extensionService: ExtensionService, filterService: FilterService, sortService: SortService): void {
this._grid = grid;
this.extensionService = extensionService;
this.filterService = filterService;
this.sortService = sortService;
this.subscribeToAllGridChanges(grid);
}
/** Dispose of all the SlickGrid & Angular subscriptions */
dispose() {
// unsubscribe all SlickGrid events
this._eventHandler.unsubscribeAll();
// also unsubscribe all Angular Subscriptions
this.subscriptions.forEach((subscription: Subscription) => {
if (subscription && subscription.unsubscribe) {
subscription.unsubscribe();
}
});
this.subscriptions = [];
}
/**
* Get the current grid state (filters/sorters/pagination)
* @return grid state
*/
getCurrentGridState(): GridState {
const gridState: GridState = {
columns: this.getCurrentColumns(),
filters: this.getCurrentFilters(),
sorters: this.getCurrentSorters()
};
const currentPagination = this.getCurrentPagination();
if (currentPagination) {
gridState.pagination = currentPagination;
}
return gridState;
}
/**
* Get the Columns (and their state: visibility/position) that are currently applied in the grid
* @return current columns
*/
getColumns(): Column[] {
return this._columns || this._grid.getColumns();
}
/**
* From an array of Grid Column Definitions, get the associated Current Columns
* @param gridColumns
*/
getAssociatedCurrentColumns(gridColumns: Column[]): CurrentColumn[] {
const currentColumns: CurrentColumn[] = [];
if (gridColumns && Array.isArray(gridColumns)) {
gridColumns.forEach((column: Column, index: number) => {
if (column && column.id) {
currentColumns.push({
columnId: column.id as string,
cssClass: column.cssClass || '',
headerCssClass: column.headerCssClass || '',
width: column.width || 0
});
}
});
}
this._currentColumns = currentColumns;
return currentColumns;
}
/**
* From an array of Current Columns, get the associated Grid Column Definitions
* @param grid
* @param currentColumns
*/
getAssociatedGridColumns(grid: any, currentColumns: CurrentColumn[]): Column[] {
const columns: Column[] = [];
const gridColumns: Column[] = grid.getColumns();
if (currentColumns && Array.isArray(currentColumns)) {
currentColumns.forEach((currentColumn: CurrentColumn, index: number) => {
const gridColumn: Column = gridColumns.find((c: Column) => c.id === currentColumn.columnId);
if (gridColumn && gridColumn.id) {
columns.push({
...gridColumn,
cssClass: currentColumn.cssClass,
headerCssClass: currentColumn.headerCssClass,
width: currentColumn.width
});
}
});
}
this._columns = columns;
return columns;
}
/**
* Get the Columns (and their state: visibility/position) that are currently applied in the grid
* @return current columns
*/
getCurrentColumns(): CurrentColumn[] {
let currentColumns: CurrentColumn[] = [];
if (this._currentColumns && Array.isArray(this._currentColumns) && this._currentColumns.length > 0) {
currentColumns = this._currentColumns;
} else {
currentColumns = this.getAssociatedCurrentColumns(this._grid.getColumns());
}
return currentColumns;
}
/**
* Get the Filters (and their state, columnId, searchTerm(s)) that are currently applied in the grid
* @return current filters
*/
getCurrentFilters(): CurrentFilter[] | null {
if (this._gridOptions && this._gridOptions.backendServiceApi) {
const backendService = this._gridOptions.backendServiceApi.service;
if (backendService && backendService.getCurrentFilters) {
return backendService.getCurrentFilters() as CurrentFilter[];
}
} else if (this.filterService && this.filterService.getCurrentLocalFilters) {
return this.filterService.getCurrentLocalFilters();
}
return null;
}
/**
* Get current Pagination (and it's state, pageNumber, pageSize) that are currently applied in the grid
* @return current pagination state
*/
getCurrentPagination(): CurrentPagination | null {
if (this._gridOptions && this._gridOptions.backendServiceApi) {
const backendService = this._gridOptions.backendServiceApi.service;
if (backendService && backendService.getCurrentPagination) {
return backendService.getCurrentPagination();
}
} else {
// TODO implement this whenever local pagination gets implemented
}
return null;
}
/**
* Get the current Sorters (and their state, columnId, direction) that are currently applied in the grid
* @return current sorters
*/
getCurrentSorters(): CurrentSorter[] | null {
if (this._gridOptions && this._gridOptions.backendServiceApi) {
const backendService = this._gridOptions.backendServiceApi.service;
if (backendService && backendService.getCurrentSorters) {
return backendService.getCurrentSorters() as CurrentSorter[];
}
} else if (this.sortService && this.sortService.getCurrentLocalSorters) {
return this.sortService.getCurrentLocalSorters();
}
return null;
}
/**
* Hook a SlickGrid Extension Event to a Grid State change event
* @param extension name
* @param grid
*/
hookExtensionEventToGridStateChange(extensionName: ExtensionName, eventName: string) {
const extension = this.extensionService && this.extensionService.getExtensionByName(extensionName);
if (extension && extension.class && extension.class[eventName] && extension.class[eventName].subscribe) {
this._eventHandler.subscribe(extension.class[eventName], (e: Event, args: any) => {
const columns: Column[] = args && args.columns;
const currentColumns: CurrentColumn[] = this.getAssociatedCurrentColumns(columns);
this.onGridStateChanged.next({ change: { newValues: currentColumns, type: GridStateType.columns }, gridState: this.getCurrentGridState() });
});
}
}
/**
* Hook a Grid Event to a Grid State change event
* @param event name
* @param grid
*/
hookSlickGridEventToGridStateChange(eventName: string, grid: any) {
if (grid && grid[eventName] && grid[eventName].subscribe) {
this._eventHandler.subscribe(grid[eventName], (e: Event, args: any) => {
const columns: Column[] = grid.getColumns();
const currentColumns: CurrentColumn[] = this.getAssociatedCurrentColumns(columns);
this.onGridStateChanged.next({ change: { newValues: currentColumns, type: GridStateType.columns }, gridState: this.getCurrentGridState() });
});
}
}
resetColumns(columnDefinitions?: Column[]) {
const columns: Column[] = columnDefinitions || this._columns;
const currentColumns: CurrentColumn[] = this.getAssociatedCurrentColumns(columns);
this.onGridStateChanged.next({ change: { newValues: currentColumns, type: GridStateType.columns }, gridState: this.getCurrentGridState() });
}
/** if we use Row Selection or the Checkbox Selector, we need to reset any selection */
resetRowSelection() {
if (this._gridOptions.enableRowSelection || this._gridOptions.enableCheckboxSelector) {
// this also requires the Row Selection Model to be registered as well
const rowSelectionExtension = this.extensionService && this.extensionService.getExtensionByName && this.extensionService.getExtensionByName(ExtensionName.rowSelection);
if (rowSelectionExtension && rowSelectionExtension.extension) {
this._grid.setSelectedRows([]);
}
}
}
/**
* Subscribe to all necessary SlickGrid or Service Events that deals with a Grid change,
* when triggered, we will publish a Grid State Event with current Grid State
*/
subscribeToAllGridChanges(grid: any) {
// Subscribe to Event Emitter of Filter changed
this.subscriptions.push(
this.filterService.onFilterChanged.subscribe((currentFilters: CurrentFilter[]) => {
this.resetRowSelection();
this.onGridStateChanged.next({ change: { newValues: currentFilters, type: GridStateType.filter }, gridState: this.getCurrentGridState() });
})
);
// Subscribe to Event Emitter of Filter cleared
this.subscriptions.push(
this.filterService.onFilterCleared.subscribe(() => {
this.resetRowSelection();
this.onGridStateChanged.next({ change: { newValues: [], type: GridStateType.filter }, gridState: this.getCurrentGridState() });
})
);
// Subscribe to Event Emitter of Sort changed
this.subscriptions.push(
this.sortService.onSortChanged.subscribe((currentSorters: CurrentSorter[]) => {
this.resetRowSelection();
this.onGridStateChanged.next({ change: { newValues: currentSorters, type: GridStateType.sorter }, gridState: this.getCurrentGridState() });
})
);
// Subscribe to Event Emitter of Sort cleared
this.subscriptions.push(
this.sortService.onSortCleared.subscribe(() => {
this.resetRowSelection();
this.onGridStateChanged.next({ change: { newValues: [], type: GridStateType.sorter }, gridState: this.getCurrentGridState() });
})
);
// Subscribe to ColumnPicker and/or GridMenu for show/hide Columns visibility changes
this.hookExtensionEventToGridStateChange(ExtensionName.columnPicker, 'onColumnsChanged');
this.hookExtensionEventToGridStateChange(ExtensionName.gridMenu, 'onColumnsChanged');
// subscribe to Column Resize & Reordering
this.hookSlickGridEventToGridStateChange('onColumnsReordered', grid);
this.hookSlickGridEventToGridStateChange('onColumnsResized', grid);
}
}
|
<reponame>iAmVeryQT/jsonchat_hanjin
#include "trayicon.h"
#include <QCursor>
#include <QGuiApplication>
#include <QScreen>
TrayIcon::TrayIcon() : mMenu()
{
connect(this, SIGNAL(activated(QSystemTrayIcon::ActivationReason)),
this, SLOT(iconActivated(QSystemTrayIcon::ActivationReason)));
}
void TrayIcon::iconActivated(QSystemTrayIcon::ActivationReason r)
{
switch(r) {
case QSystemTrayIcon::Context:
case QSystemTrayIcon::Trigger:
case QSystemTrayIcon::MiddleClick:
case QSystemTrayIcon::DoubleClick:
{
QPoint ClickPos = QCursor::pos();
QRect ScreenRect = QGuiApplication::screenAt(ClickPos)->geometry();
QSize MenuSize = mMenu.size();
//
int PosX = (ScreenRect.right() < ClickPos.x() + MenuSize.width()) ?
ClickPos.x() - MenuSize.width() : ClickPos.x();
int PosY = (ScreenRect.bottom() < ClickPos.y() + MenuSize.height()) ?
ClickPos.y() - MenuSize.height() : ClickPos.y();
//
mMenu.move(PosX, PosY);
mMenu.show();
mMenu.activateWindow();
mMenu.setFocus();
}
break;
default:
break;
}
}
|
import Phaser from 'phaser'
export default class Test3 extends Phaser.Scene
{
constructor()
{
super('test-3')
}
create()
{
// In the middle
this.add.image(400, 300, 'pic')
// Why isn't this cropped?
this.add.image(400, 300, 'vulkaiser').setScale(4).setAlpha(0.3)
// In the corners
this.add.image(0, 0, 'mushroom').setOrigin(0, 0)
this.add.image(800, 0, 'mushroom').setOrigin(1, 0)
this.add.image(0, 600, 'mushroom').setOrigin(0, 1)
this.add.image(800, 600, 'mushroom').setOrigin(1, 1)
const cam = this.cameras.main
// Test 3 - Try half zoom and a half viewport size together - appears broken, image still pops out of the top, placement seems wrong
cam.setViewport(300, 100, 400, 300)
cam.zoom = 0.5
cam.scrollX = 200
cam.scrollY = 150
this.input.keyboard.once('keydown-ESC', () => {
this.scene.start('menu')
})
}
}
|
#!/usr/bin/env bash
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Builds the main Fabric network on a Kubernetes cluster.
# This script can be rerun if it fails. It will simply rerun the K8s commands, which will have
# no impact if they've been run previously
set -e
function main {
echo "Stopping Hyperledger Fabric on Kubernetes ..."
source $SCRIPTS/env.sh
cd $HOME/$REPO/fabric-main
source utilities.sh
cd $HOME
stopJobsFabric $HOME $REPO
set +e
stopTest $HOME $REPO
stopChannelArtifacts $HOME $REPO
stopRegisterOrgs $HOME $REPO
stopRegisterOrderers $HOME $REPO
stopOrderer $HOME $REPO
stopCLI $HOME $REPO
stopKafka $HOME $REPO
for DELETE_ORG in $ORGS; do
stopPeers $HOME $REPO $DELETE_ORG
stopRegisterPeers $HOME $REPO $DELETE_ORG
stopICA $HOME $REPO $DELETE_ORG
stopRCA $HOME $REPO $DELETE_ORG
stopPVC $HOME $REPO $DELETE_ORG
getDomain $DELETE_ORG
removeNamespaces $HOME $REPO $DOMAIN
done
kubectl delete pv --all
removeDirs $DATADIR
whatsRunning
echo "Hyperledger Fabric on Kubernetes stopped"
}
SDIR=$(dirname "$0")
DATADIR=/opt/share/
SCRIPTS=$DATADIR/rca-scripts
REPO=hyperledger-on-kubernetes
main
|
# *-*- Shell Script -*-*
# from VOID Linux (https://www.voidlinux.org)
[ "$VIRTUALIZATION" ] && return 0
TTYS=${TTYS:-12}
if [ -n "$FONT" ]; then
printf '=> Setting up TTYs font to %s ...\n' "$FONT"
_index=0
while [ ${_index} -le $TTYS ]; do
setfont ${FONT_MAP:+-m $FONT_MAP} ${FONT_UNIMAP:+-u $FONT_UNIMAP} "$FONT" -C "/dev/tty${_index}"
_index=$((_index + 1))
done
fi
if [ "$KEYMAP" ]; then
printf '=> Setting up keymap to %s ...\n' "$KEYMAP"
loadkeys -q -u "$KEYMAP"
fi
if [ -n "$HARDWARECLOCK" ]; then
printf '=> Setting up RTC to %s ...\n' "$HARDWARECLOCK"
TZ=$TIMEZONE hwclock --systz ${HARDWARECLOCK:+--$(echo "$HARDWARECLOCK" | tr '[:upper:]' '[:lower:]') --noadjfile} || true
fi
|
<gh_stars>1-10
import express from "express";
import userController from "../controller/user.controller";
import userValidation from "../request/user.validation";
const userRouter = express.Router();
// Get Users
userRouter.get("/", userValidation.getUser, userController.getUser);
// Get User
userRouter.get("/user/:userId", userValidation.getUser, userController.getUser);
// Insert User
userRouter.post("/user", userValidation.insertUser, userController.insertUser);
// Update User
userRouter.put(
"/user/:userId",
userValidation.updateUser,
userController.updateUser
);
// Delete User
userRouter.delete(
"/user/:userId",
userValidation.deleteUser,
userController.destroyUser
);
export default userRouter;
|
import { Construct } from '@aws-cdk/core';
import { BaseFeatureStack, BaseFeatureStackProps } from '../../components/base';
import { CoreVpcProps, ICoreVpc, CoreVpc } from '../../components/core-vpc';
import { IGalaxyCore, GalaxyCoreStack } from '../../galaxy/galaxy-core-stack';
export interface ISharedVpcFeature extends Construct {
readonly galaxy: IGalaxyCore;
readonly vpc: ICoreVpc;
}
export interface SharedVpcFeatureCoreStackProps extends BaseFeatureStackProps {
vpcProps?: Partial<CoreVpcProps>;
}
export class SharedVpcFeatureCoreStack extends BaseFeatureStack implements ISharedVpcFeature {
readonly galaxy: IGalaxyCore;
readonly vpc: CoreVpc;
constructor(galaxy: IGalaxyCore, id: string, props?: SharedVpcFeatureCoreStackProps) {
super(galaxy, id, props);
const { vpcProps } = props || {};
this.galaxy = galaxy;
const networkBuilder = this.networkBuilder || vpcProps?.networkBuilder;
if (!networkBuilder) throw new Error('Network Builder must be provided.');
this.vpc = new CoreVpc(this, 'Vpc', {
...vpcProps,
networkBuilder,
});
}
}
declare module '../../galaxy/galaxy-core-stack' {
export interface IGalaxyCore {
readonly sharedVpc?: ISharedVpcFeature;
}
export interface GalaxyCoreStack {
sharedVpc?: SharedVpcFeatureCoreStack;
addSharedVpc(props?: SharedVpcFeatureCoreStackProps): SharedVpcFeatureCoreStack;
}
}
GalaxyCoreStack.prototype.addSharedVpc = function (props?: SharedVpcFeatureCoreStackProps): SharedVpcFeatureCoreStack {
this.sharedVpc = new SharedVpcFeatureCoreStack(this, 'SharedVpc', props);
return this.sharedVpc;
};
|
def check_version(user_version: str) -> str:
try:
user_major, user_minor, user_patch = map(int, user_version.split('.'))
predefined_major, predefined_minor, predefined_patch = map(int, __version__.split('.'))
if user_major == predefined_major and user_minor == predefined_minor and user_patch == predefined_patch:
return "Up to date"
elif user_major < predefined_major or (user_major == predefined_major and user_minor < predefined_minor) or (user_major == predefined_major and user_minor == predefined_minor and user_patch < predefined_patch):
return "Outdated"
else:
return "Invalid version format"
except ValueError:
return "Invalid version format" |
#include <iostream>
using namespace std;
int main()
{
int n = 100;
for(int i = 2; i <= n; i++)
{
bool is_prime = true;
for(int j = 2; j * j <= i; j++)
{
if(i % j == 0)
{
is_prime = false;
break;
}
}
if(is_prime)
cout << i << " ";
}
return 0;
} |
<reponame>tmontes/python-wires
# ----------------------------------------------------------------------------
# Python Wires Tests
# ----------------------------------------------------------------------------
# Copyright (c) <NAME>.
# See LICENSE for details.
# ----------------------------------------------------------------------------
"""
Shared Wires instance API tests.
"""
from __future__ import absolute_import
import unittest
from . import mixin_test_api, mixin_use_shared_instance
class TestWiresAPI(mixin_test_api.TestWiresAPIMixin,
mixin_use_shared_instance.UseSharedInstanceMixin,
unittest.TestCase):
"""
API tests for the shared Wires instance.
"""
# ----------------------------------------------------------------------------
|
#!/usr/bin/env bash
#
# +-------------------------------------------------------------------------+
# | setup.sh |
# +-------------------------------------------------------------------------+
# | Copyright © 2019 Waldemar Schroeer |
# | waldemar.schroeer(at)rz-amper.de |
# +-------------------------------------------------------------------------+
backupdir=~/Backup.dotfiles.$$
cdir=$(pwd)
while :
do
/bin/clear && /bin/cat ${cdir}/setup-warning.txt
printf "\n Proceed installing .dotfiles? (Yes|No) >> "
read antwoord
case $antwoord in
[yY] | [yY][Ee][Ss] )
printf "\n Your current .dotfiles will be moved to $backupdir"
mkdir $backupdir
declare -a files=( ".vimrc" ".bashrc" ".bash_profile" ".bash_logout" ".bash_functions" ".screenrc" ".tmuxrc" ".motd" "template.sh" "template.php" )
for i in "${files[@]}"
do
if [ -f ~/$i ]; then
printf "\n Moving $i to $backupdir"
mv ~/$i $backupdir
fi
printf "\n Creating $i"
cp ${cdir}/${i} ~
done
printf "\n Installing .local and binaries directory to your home directory."
mkdir -p ~/.local
cp -r ${cdir}/.local/* ~/.local
break
;;
[nN] | [n|N][O|o] )
printf "\n Oh Boy, you should reconsider your decision.\n\n"
break
;;
*)
printf "\n Wut?\n\n"
;;
esac
done
exit 0
|
package com.java110.things.constant;
/**
* @ClassName AccessControlConstant
* @Description TODO
* @Author wuxw
* @Date 2020/5/10 21:58
* @Version 1.0
* add by wuxw 2020/5/10
**/
public class AccessControlConstant {
//HC小区管理系统 api 地址
//public final static String ACCESS_URL = "https://hc.demo.winqi.cn:8008";
//心跳
public final static String MACHINE_HEARTBEART = "/api/machineTranslate.machineHeartbeart";
//查询当前小区门禁
public final static String LIST_MACHINES = "/api/machine.listMachines";
//查询用户信息
public final static String MACHINE_QUERY_USER_INFO="/api/machineTranslate.machineQueryUserInfo";
//增加/更新人脸
public final static int CMD_ADD_UPDATE_FACE = 101;
//删除人脸
public final static int CMD_DELETE_FACE = 102;
//清空人脸
public final static int CMD_CLEAR_ALL_FACE = 103;
}
|
<html>
<head>
<script>
function validateEmail(email) {
const pattern = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return pattern.test(String(email).toLowerCase());
}
function isEmailValid(){
let email = document.getElementById("email");
if( validateEmail(email.value)) {
alert("Email is valid!");
} else {
alert("Email is not valid!");
}
}
</script>
</head>
<body>
<input id="email" type="text">
<button onclick="isEmailValid()">Check Validity</button>
</body>
</html> |
import { Component, OnInit, NgZone, NgModule, ElementRef, ViewChild } from '@angular/core';
import { DomSanitizer } from '@angular/platform-browser';
import { Router, ActivatedRoute, NavigationExtras } from '@angular/router';
import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http';
import { Observable } from 'rxjs/Observable'
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import { Chart } from 'chart.js';
declare var $: any;
import * as data from '../../../../config'
import { ToastrService } from '../../services/toastr.service';
@Component({
selector: 'home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.css']
})
export class HomeComponent implements OnInit {
user: any;
vmUrl: string;
constructor(private toastrService: ToastrService, public router: Router, private route: ActivatedRoute, private http: HttpClient, private zone: NgZone, public domSanitizer: DomSanitizer) {
this.user = JSON.parse(localStorage.getItem('user'));
var session = JSON.parse(localStorage.getItem('sessionConfiguration'));
console.log("@@@@@@@@@", session);
if (session != null) {
this.vmUrl = session.vmUrl;
}
}
ngOnInit() {
}
connectCamera() {
this.http.get<any[]>(this.vmUrl + '/aggregators?status=0,2'
).subscribe(data => {
console.log("Aggregators:", data);
var aggrLength = data.length;
this.http.get<any[]>(this.vmUrl + '/computeengines?status=0,2'
).subscribe(data => {
console.log("Compute engines:", data);
var compLength = data.length;
if (aggrLength != 0 && compLength != 0) {
this.router.navigateByUrl('/connectCameraSlider');
}
else {
this.toastrService.Warning("", "Add aggregator and compute engine before connecting the camera");
this.router.navigateByUrl('/layout/dashboard');
}
});
});
}
}
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#sed -i '$a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default
#sed -i '$a src-git small https://github.com/kenzok8/small' feeds.conf.default
#sed -i '$a src-git kenzo https://github.com/kenzok8/openwrt-packages' feeds.conf.default
|
<filename>Practice/Intro-To-Java-8th-Ed-Daniel-Y.-Liang/Chapter-6/Chapter06P17/test/junit/SelectionSortTest.java
package junit;
import static org.junit.Assert.*;
import org.junit.Test;
import main.SelectionSort;
/**
* @author <NAME>
*
*/
public class SelectionSortTest
{
@Test
public void testSelectionSort()
{
int size = (int)(Math.random() * 100);
double[] randomNumbers = new double[size];
for (int index = 0; index < size; index++)
{
randomNumbers[index] = (Math.random() * 100.0) - 50.0;
}
SelectionSort.selectionSort(randomNumbers);
boolean sorted = true;
for (int index = 0; index < (randomNumbers.length - 1); index++)
{
if (randomNumbers[index] > randomNumbers[index + 1])
{
sorted = false;
break;
}
}
assertEquals(true, sorted);
}
} |
import numpy as np
import pandas as pd
def generate_surface_of_section(orbit, constant_idx):
# Extract phase space coordinates at the constant index
phase_space_coords = orbit.get_phase_space_coords_at_constant(constant_idx)
# Create a DataFrame to store the surface of section
surface_of_section = pd.DataFrame(phase_space_coords, columns=['x', 'y'])
return surface_of_section |
#for x in *rep*; do python epa-star-simulation/simulate-datasets/get-to-qiime-format.py $x $x > $x.renamed.fa; done
source1='*10000.0*renamed.fa'
source2='*9000.1000*renamed.fa'
sink='*9500.500*renamed.fa'
combo='source.0.source.10.sink.05.combined.fa'
cat $source1 $source2 $sink > $combo
pick_closed_reference_otus.py -o $combo-qiime -i $combo -r /mnt/data1/lakelanier/16S-data/gg_13_8_otus/rep_set/97_otus.fasta -p param.txt -f
source1='*10000.0*renamed.fa'
source2='*9000.1000*renamed.fa'
sink='*9900.100*renamed.fa'
combo='source.0.source.10.sink.01.combined.fa'
cat $source1 $source2 $sink > $combo
pick_closed_reference_otus.py -o $combo-qiime -i $combo -r /mnt/data1/lakelanier/16S-data/gg_13_8_otus/rep_set/97_otus.fasta -p param.txt -f
source1='*10000.0*renamed.fa'
source2='*9000.1000*renamed.fa'
sink='*9990.10*renamed.fa'
combo='source.0.source.10.sink.001.combined.fa'
cat $source1 $source2 $sink > $combo
pick_closed_reference_otus.py -o $combo-qiime -i $combo -r /mnt/data1/lakelanier/16S-data/gg_13_8_otus/rep_set/97_otus.fasta -p param.txt -f
|
def bubble_sort(arr):
"""Function to implement bubble sort"""
for i in range(len(arr)):
for j in range(len(arr) - 1 - i):
if arr[j] > arr[j + 1]:
arr[j], arr[j + 1] = arr[j + 1], arr[j]
return arr
# Driver code
arr = [3, 8, 5, 2, 6]
result = bubble_sort(arr)
print(f'Sorted array: {result}') |
#!/bin/bash
set -x
exec > >(tee /var/log/user-data.log|logger -t user-data -s 2>/dev/console) 2>&1
# workaround for GCP running the startup script on boot
if [ -e "/etc/kafka/kafka_installed.tag" ]; then
echo "Kafka Install has already run, Exiting"
exit
fi
# Configure kafka
cat > /etc/kafka/server.properties << "EOF"
# The id of the broker. This must be set to a unique integer for each broker.
broker.id=-1
############################# Socket Server Settings #############################
listeners=PLAINTEXT://:9092
# The number of threads that the server uses for receiving requests from the network and sending responses to the network
num.network.threads=3
# The number of threads that the server uses for processing requests, which may include disk I/O
num.io.threads=8
# The send buffer (SO_SNDBUF) used by the socket server
socket.send.buffer.bytes=102400
# The receive buffer (SO_RCVBUF) used by the socket server
socket.receive.buffer.bytes=102400
# The maximum size of a request that the socket server will accept (protection against OOM)
socket.request.max.bytes=104857600
############################ Log Basics #############################
# A comma separated list of directories under which to store log files
log.dirs=/data/kafka
# The default number of log partitions per topic.
num.partitions=1
# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown.
# This value is recommended to be increased for installations with data dirs located in RAID array.
num.recovery.threads.per.data.dir=1
############################# Log Retention Policy #############################
log.retention.hours=168
#log.retention.bytes=1073741824
# The maximum size of a log segment file. When this size is reached a new log segment will be created.
log.segment.bytes=1073741824
# The interval at which log segments are checked to see if they can be deleted according
# to the retention policies
log.retention.check.interval.ms=300000
############################# Zookeeper #############################
zookeeper.connect=${zookeepers_string}
# Timeout in ms for connecting to zookeeper
zookeeper.connection.timeout.ms=6000
############################# Group Coordinator Settings #############################
# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance.
# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup.
# group.initial.rebalance.delay.ms=0
EOF
echo "advertised.listeners=PLAINTEXT://$(hostname):9092" >> /etc/kafka/server.properties
cat > /etc/kafka/log4j.properties << "EOF"
log4j.rootLogger=INFO, stdout, kafkaAppender
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.kafkaAppender=org.apache.log4j.RollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=$${kafka.logs.dir}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.MaxFileSize=100MB
log4j.appender.kafkaAppender.MaxBackupIndex=10
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.stateChangeAppender=org.apache.log4j.RollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=$${kafka.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.MaxFileSize=100MB
log4j.appender.stateChangeAppender.MaxBackupIndex=10
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.requestAppender=org.apache.log4j.RollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=$${kafka.logs.dir}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.MaxFileSize=100MB
log4j.appender.requestAppender.MaxBackupIndex=10
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.cleanerAppender=org.apache.log4j.RollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=$${kafka.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.MaxFileSize=100MB
log4j.appender.cleanerAppender.MaxBackupIndex=10
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.controllerAppender=org.apache.log4j.RollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=$${kafka.logs.dir}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.MaxFileSize=100MB
log4j.appender.controllerAppender.MaxBackupIndex=10
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.authorizerAppender=org.apache.log4j.RollingFileAppender
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.authorizerAppender.File=$${kafka.logs.dir}/kafka-authorizer.log
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.authorizerAppender.MaxFileSize=100MB
log4j.appender.authorizerAppender.MaxBackupIndex=10
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
# Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
log4j.logger.kafka=INFO
log4j.logger.org.apache.kafka=INFO
# Change to DEBUG or TRACE to enable request logging
log4j.logger.kafka.request.logger=WARN, requestAppender
log4j.additivity.kafka.request.logger=false
# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
# related to the handling of requests
# log4j.logger.kafka.network.Processor=TRACE, requestAppender
# log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
# log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false
log4j.logger.kafka.controller=TRACE, controllerAppender
log4j.additivity.kafka.controller=false
log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false
log4j.logger.state.change.logger=TRACE, stateChangeAppender
log4j.additivity.state.change.logger=false
# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender
log4j.additivity.kafka.authorizer.logger=false
EOF
sudo mkdir -p /data
# setup google cloud logging
sudo mkdir -p /etc/google-fluentd/config.d
cat > /etc/google-fluentd/config.d/kafka.conf << "EOF"
<source>
@type tail
format none
path /var/log/kafka/server.log
pos_file /var/lib/google-fluentd/pos/kafka-server.pos
read_from_head true
tag kafka-server
</source>
<source>
@type tail
format none
path /var/log/kafka/state-change.log
pos_file /var/lib/google-fluentd/pos/kafka-state.pos
read_from_head true
tag kafk-state
</source>
<source>
@type tail
format none
path /var/log/kafka/kafka-request.log
pos_file /var/lib/google-fluentd/pos/kafka-request.pos
read_from_head true
tag kafka-request
</source>
<source>
@type tail
format none
path /var/log/kafka/log-cleaner.log
pos_file /var/lib/google-fluentd/pos/kafka-cleaner.pos
read_from_head true
tag kafka-cleaner
</source>
<source>
@type tail
format none
path /var/log/kafka/controller.log
pos_file /var/lib/google-fluentd/pos/kafka-controller.pos
read_from_head true
tag kafka-controller
</source>
<source>
@type tail
format none
path /var/log/kafka/kafka-authorizer.log
pos_file /var/lib/google-fluentd/pos/kafka-authorizer.pos
read_from_head true
tag kafka-authorizer
</source>
EOF
cat > /etc/google-fluentd/config.d/syslog.conf << "EOF"
<source>
@type tail
# Parse the timestamp, but still collect the entire line as 'message'
format /^(?<message>(?<time>[^ ]*\s*[^ ]* [^ ]*) .*)$/
path /var/log/syslog
pos_file /var/lib/google-fluentd/pos/syslog.pos
read_from_head true
tag syslog
</source>
EOF
cat > /etc/google-fluentd/config.d/syslog_endpoint.conf << "EOF"
<source>
@type syslog
port 514
protocol_type udp
bind 127.0.0.1
format /(?<message>.*)/
tag syslog
</source>
<source>
@type syslog
port 514
protocol_type tcp
bind 127.0.0.1
format /(?<message>.*)/
tag syslog
</source>
EOF
service google-fluentd restart
# Mount data disk
echo 'about to mount disk'
export DEVICE_NAME=$(lsblk -ip | tail -n +2 | grep -v " rom" | awk '{print $1 " " ($7? "MOUNTEDPART" : "") }' | sed ':a;N;$!ba;s/\n`/ /g' | sed ':a;N;$!ba;s/\n|-/ /g' | grep -v MOUNTEDPART)
# export DEVICE_NAME=$(lsblk -ip | tail -n +2 | grep -v " rom" | awk '{print $1 " " ($7? "MOUNTEDPART" : "") }' | sed ':a;N;$!ba;s/\n`/ /g' | grep -v MOUNTEDPART)
# from es-aws-6 export DEVICE_NAME=$(lsblk -ip | tail -n +2 | awk '{print $1 " " ($7? "MOUNTEDPART" : "") }' | sed ':a;N;$!ba;s/\n`/ /g' | grep -v MOUNTEDPART)
if sudo mount -o defaults -t ext4 $${DEVICE_NAME} /data; then
echo 'Successfully mounted existing disk'
else
echo 'Trying to mount a fresh disk'
sudo mkfs.ext4 -m 0 -F -E lazy_itable_init=0,lazy_journal_init=0,discard $${DEVICE_NAME}
sudo mount -o defaults -t ext4 $${DEVICE_NAME} /data && echo 'Successfully mounted a fresh disk'
fi
echo "$$DEVICE_NAME /data ext4 defaults,nofail 0 2" | sudo tee -a /etc/fstab
echo "installed" > /etc/kafka/kafka_installed.tag
sudo mkdir -p /data/kafka
sudo chown -R cp-kafka:confluent /data/kafka
# Enable Zookeeper Auto Restart
sudo systemctl enable confluent-kafka
# Now Start the Zookeeper
sudo systemctl start confluent-kafka
|
<filename>pydata-chicago/plots.py
import numpy as np
import matplotlib as mpl
mpl.use('Qt4Agg')
import matplotlib.pyplot as plt
#from data import fizz_buzz_encode
def fizz_buzz_encode(i):
"""encodes the desired fizz-buzz output as a one-hot array of length 4:
[number, "fizz", "buzz", "fizzbuzz"]"""
if i % 15 == 0: return np.array([0, 0, 0, 1])
elif i % 5 == 0: return np.array([0, 0, 1, 0])
elif i % 3 == 0: return np.array([0, 1, 0, 0])
else: return np.array([1, 0, 0, 0])
d = ['1','buzz','fizz','4','buzz','fizz','7','8','fizz','buzz','11','fizz','13',
'14','fizzbuzz','16','buzz','fizz','buzz','buzz','buzz','22','23','fizz',
'fizz','26','fizz','28','29','fizzbuzz','31','32','fizz','buzz','buzz',
'fizz','37','buzz','fizz','buzz','41','fizz','43','44','fizzbuzz','46',
'47','fizz','buzz','buzz','fizz','52','53','fizz','buzz','56','fizz','58',
'59','fizzbuzz','61','62','fizz','64','buzz','fizz','67','68','69','buzz',
'71','fizz','73','74','fizzbuzz','76','77','fizz','79','buzz','buzz','82',
'83','84','buzz','86','87','88','89','fizzbuzz','91','92','93','94','buzz',
'fizz','97','fizz','fizz','buzz']
def make_array(data):
"""
turn a list of fuzzbuzz outputs into a 2-D array for plotting
"""
black = [0.0, 0.0, 0.0]
red = [1.0, 0.0, 0.0]
chicago_blue = [179/255,221/255,242/255]
lookup = { "fizz" : 1, "buzz" : 2, "fizzbuzz" : 3 }
n = len(data)
grid = np.full((4, n, 3), 1.0)
for i, output in enumerate(data):
actual = np.argmax(fizz_buzz_encode(i+1))
predicted = lookup.get(output, 0)
# correct predictions
grid[predicted][i] = black
if actual != predicted:
grid[actual][i] = chicago_blue
grid[predicted][i] = red
return grid
def plot(data, fn=None):
grid = make_array(data)
plt.axis('off')
plt.imshow(grid, interpolation='none')
if fn:
plt.savefig(fn, bbox_inches='tight')
else:
plt.show()
from PIL import Image
def plot2(output, fn, dim):
data = make_array(output)
data = np.uint8(data * 255)
Image.fromarray(data).convert('RGB').resize(dim).save(fn)
def ct(data):
"""
create a crosstab from the fizz buzz outputs
"""
lookup = { "fizz" : 1, "buzz" : 2, "fizzbuzz" : 3 }
grid = [[0 for _ in range(4)] for _ in range(4)]
for i, output in enumerate(data):
actual = np.argmax(fizz_buzz_encode(i+1))
predicted = lookup.get(output, 0)
grid[predicted][actual] += 1
return grid
|
mais = 10
total = 0 # Initialize total
cont = 1 # Initialize cont
termo = 1 # Initialize termo
razão = 2 # Assign a value to razão
while mais != 0:
total += mais
while cont <= total:
print(f'{termo} ', end='')
termo += razão
cont += 1
print('Pausa') |
#!/bin/sh
# dockerd-rootless.sh executes dockerd in rootless mode.
#
# Usage: dockerd-rootless.sh --experimental [DOCKERD_OPTIONS]
# Currently, specifying --experimental is mandatory.
# Also, to expose ports, you need to specify
# --userland-proxy-path=/path/to/rootlesskit-docker-proxy
#
# External dependencies:
# * newuidmap and newgidmap needs to be installed.
# * /etc/subuid and /etc/subgid needs to be configured for the current user.
# * Either one of slirp4netns (v0.3+), VPNKit, lxc-user-nic needs to be installed.
# slirp4netns is used by default if installed. Otherwise fallsback to VPNKit.
# The default value can be overridden with $DOCKERD_ROOTLESS_ROOTLESSKIT_NET=(slirp4netns|vpnkit|lxc-user-nic)
#
# See the documentation for the further information.
set -e -x
if ! [ -w $XDG_RUNTIME_DIR ]; then
echo "XDG_RUNTIME_DIR needs to be set and writable"
exit 1
fi
if ! [ -w $HOME ]; then
echo "HOME needs to be set and writable"
exit 1
fi
rootlesskit=""
for f in docker-rootlesskit rootlesskit; do
if which $f >/dev/null 2>&1; then
rootlesskit=$f
break
fi
done
if [ -z $rootlesskit ]; then
echo "rootlesskit needs to be installed"
exit 1
fi
: "${DOCKERD_ROOTLESS_ROOTLESSKIT_NET:=}"
: "${DOCKERD_ROOTLESS_ROOTLESSKIT_MTU:=}"
net=$DOCKERD_ROOTLESS_ROOTLESSKIT_NET
mtu=$DOCKERD_ROOTLESS_ROOTLESSKIT_MTU
if [ -z $net ]; then
if which slirp4netns >/dev/null 2>&1; then
if slirp4netns --help | grep -- --disable-host-loopback; then
net=slirp4netns
if [ -z $mtu ]; then
mtu=65520
fi
else
echo "slirp4netns does not support --disable-host-loopback. Falling back to VPNKit."
fi
fi
if [ -z $net ]; then
if which vpnkit >/dev/null 2>&1; then
net=vpnkit
else
echo "Either slirp4netns (v0.3+) or vpnkit needs to be installed"
exit 1
fi
fi
fi
if [ -z $mtu ]; then
mtu=1500
fi
if [ -z $_DOCKERD_ROOTLESS_CHILD ]; then
_DOCKERD_ROOTLESS_CHILD=1
export _DOCKERD_ROOTLESS_CHILD
# Re-exec the script via RootlessKit, so as to create unprivileged {user,mount,network} namespaces.
#
# --copy-up allows removing/creating files in the directories by creating tmpfs and symlinks
# * /etc: copy-up is required so as to prevent `/etc/resolv.conf` in the
# namespace from being unexpectedly unmounted when `/etc/resolv.conf` is recreated on the host
# (by either systemd-networkd or NetworkManager)
# * /run: copy-up is required so that we can create /run/docker (hardcoded for plugins) in our namespace
exec $rootlesskit \
--net=$net --mtu=$mtu \
--disable-host-loopback --port-driver=builtin \
--copy-up=/etc --copy-up=/run \
$DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS \
$0 $@
else
[ $_DOCKERD_ROOTLESS_CHILD = 1 ]
# remove the symlinks for the existing files in the parent namespace if any,
# so that we can create our own files in our mount namespace.
rm -f /run/docker /run/xtables.lock
exec dockerd $@
fi
|
<filename>Example/AVScreenRecorder/AVTestViewController.h
//
// AVTestViewController.h
// AVScreenRecorder
//
// Created by <NAME> on 16.09.16.
// Copyright © 2016 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface AVTestViewController : UIViewController
@end
|
from itertools import cycle
from math import ceil
import random
from django.contrib.auth import get_user_model
from core.tests.utils import MockModelTimestamps
from credit.constants import CREDIT_STATUS, LOG_ACTIONS
from credit.models import Log
from prison.models import Prison
User = get_user_model()
def get_owner_and_status_chooser():
clerks_per_prison = {}
for p in Prison.objects.all():
user_ids = p.prisonusermapping_set.filter(
user__is_staff=False, user__groups__name='PrisonClerk'
).values_list('user', flat=True)
clerks_per_prison[p.pk] = (
cycle(list(User.objects.filter(id__in=user_ids))),
cycle([
CREDIT_STATUS.CREDIT_PENDING,
CREDIT_STATUS.CREDITED
])
)
def internal_function(prison):
user, status = clerks_per_prison[prison.pk]
return next(user), next(status)
return internal_function
def create_credit_log(credit, created, modified):
with MockModelTimestamps(modified, modified):
log_data = {
'credit': credit,
'user': credit.owner,
}
if credit.credited:
log_data['action'] = LOG_ACTIONS.CREDITED
Log.objects.create(**log_data)
elif credit.refunded:
log_data['action'] = LOG_ACTIONS.REFUNDED
Log.objects.create(**log_data)
def random_amount():
if random.random() < 0.8:
amount = random.randrange(500, 5000, 500)
else:
amount = random.randrange(500, 30000, 500)
if random.random() < 0.1:
amount += random.randint(0, 1000)
return amount
def build_sender_prisoner_pairs(senders, prisoners):
number_of_senders = len(senders)
number_of_prisoners = len(prisoners)
sender_prisoner_pairs = []
for i in range(number_of_senders * 3):
prisoner_fraction = number_of_prisoners
if i <= number_of_senders:
sender_fraction = number_of_senders
if i % 3 == 1:
prisoner_fraction = ceil(number_of_prisoners / 2)
elif i % 3 == 2:
prisoner_fraction = ceil(number_of_prisoners / 15)
elif i <= number_of_senders * 2:
sender_fraction = ceil(number_of_senders / 2)
else:
sender_fraction = ceil(number_of_senders / 15)
sender_prisoner_pairs.append(
(senders[i % sender_fraction], prisoners[i % prisoner_fraction])
)
return sender_prisoner_pairs
|
package client
import (
"cloud.google.com/go/trace"
"github.com/ninnemana/huego"
)
type client struct {
trace *trace.Client
}
func New() (hue.Client, error) {
return &client{}, nil
}
|
# IntelliJ IDEA
if [[ -z "$SHOW_IDEA_PROJECT_PROMPT" ]]; then
export SHOW_IDEA_PROJECT_PROMPT=1
fi
ps aux | grep $PPID | grep -q 'IntelliJ IDEA'
export TERMINAL_IN_IDEA=$?
getAbsolutePath () {
if [[ -z "$1" ]]; then
echo "/"
return 0
fi
RELATIVE_DIR="$1"
if [[ -f "$1" ]]; then
RELATIVE_DIR="$(dirname "${RELATIVE_DIR}")"
fi
echo "$(cd "${RELATIVE_DIR}"; pwd)"
return 0
}
# TODO: fix this
getProjectNameInDir () {
[[ ${SHOW_IDEA_PROJECT_PROMPT} -eq 0 ]] && return 1
DIR="$(getAbsolutePath "$1")"
if [[ -d "${DIR}/.idea" && -f "${DIR}/.idea/.name" ]]; then
echo -n "{$(cat "${DIR}/.idea/.name")} "
return 0
elif [[ -e *.iml ]]; then
echo -n "{$(echo *.iml | head -n 1 | sed 's/.iml$//')} "
else
if [[ "${DIR}" != "/" ]]; then
getProjectNameInDir "${DIR%/*}"
return $?
else
return 1
fi
fi
return 1
}
if [[ "$(uname)" = "Darwin" ]]; then
idea_open() {
open -b com.jetbrains.intellij "$@"
}
fi
ij-config-hide-py-scientific-toolwindow() {
if [[ -d ".idea" ]]; then
if [[ -f ".idea/other.xml" ]]; then
echo "Editing .idea/other.xml is not supported yet" >&2
else
cat <<-EOF > .idea/other.xml
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PySciProjectComponent">
<option name="PY_MATPLOTLIB_IN_TOOLWINDOW" value="false" />
</component>
</project>
EOF
fi
else
echo "Not an IntelliJ Idea project directory" >&2
fi
}
|
<filename>pkg/config/kube-scheduler.go
/*
Copyright © 2021 Microshift Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"io/ioutil"
"os"
"path/filepath"
)
// KubeSchedulerConfig creates a config for kube-scheduler in option --config
func KubeSchedulerConfig(cfg *MicroshiftConfig) error {
data := []byte(`apiVersion: kubescheduler.config.k8s.io/v1beta1
kind: KubeSchedulerConfiguration
clientConnection:
kubeconfig: ` + cfg.DataDir + `/resources/kube-scheduler/kubeconfig
leaderElection:
leaderElect: false`)
os.MkdirAll(filepath.Dir(cfg.DataDir+"/resources/kube-scheduler/config/config.yaml"), os.FileMode(0755))
return ioutil.WriteFile(cfg.DataDir+"/resources/kube-scheduler/config/config.yaml", data, 0644)
}
|
import Post from 'models/Post'
import sendErrorMessage from 'utils/errorMessage'
const TOTAL_LIKES = 'totalLikes'
const TOTAL_COMMENTS = 'totalComments'
const mainResolver = field => async (_, { Input: { postID } }) => {
try {
const post = await Post.findById(postID, field)
const response = {}
response[field] = post[field]
return response
} catch (err) {
return sendErrorMessage(err)
}
}
const resolvers = {
Query: {
getTotalLikes: mainResolver(TOTAL_LIKES),
getTotalComments: mainResolver(TOTAL_COMMENTS),
},
}
export default resolvers
|
// Define the SubExpression and Step types for demonstration purposes
struct SubExpression;
struct Step;
// Define the Visitor trait
trait Visitor {
fn visit_negation(&mut self, _expression: &SubExpression);
fn visit_not_equal(&mut self, _left: &SubExpression, _right: &SubExpression);
fn visit_or(&mut self, _left: &SubExpression, _right: &SubExpression);
fn visit_path(&mut self, start_point: &SubExpression, steps: &[Step]);
}
// Define a concrete implementation of the Visitor trait for a specific use case
struct MyVisitor;
impl Visitor for MyVisitor {
fn visit_negation(&mut self, _expression: &SubExpression) {
println!("Visiting negation expression");
// Implement specific handling for negation expression
}
fn visit_not_equal(&mut self, _left: &SubExpression, _right: &SubExpression) {
println!("Visiting not equal expression");
// Implement specific handling for not equal expression
}
fn visit_or(&mut self, _left: &SubExpression, _right: &SubExpression) {
println!("Visiting OR expression");
// Implement specific handling for OR expression
}
fn visit_path(&mut self, start_point: &SubExpression, steps: &[Step]) {
println!("Visiting path expression");
// Implement specific handling for path expression
}
}
// Define the Processor struct
struct Processor;
impl Processor {
fn process_expression(&self, expression: &SubExpression, visitor: &mut dyn Visitor) {
// Dispatch the appropriate visit method based on the type of the expression
match expression {
// Match specific expression types and call the corresponding visit method
_ => unimplemented!(), // Implement matching for specific expression types
}
}
}
fn main() {
let expression = SubExpression; // Create an example expression
let mut visitor = MyVisitor; // Create an instance of the concrete visitor
let processor = Processor; // Create an instance of the processor
processor.process_expression(&expression, &mut visitor); // Process the expression using the processor and visitor
} |
<reponame>albertopombo/lab-ocu
export interface Colors {
primary: string
secondary: string
dark: string
light: string
hover: string
}
export const colors: Colors = {
primary: '#d7d7d7',
secondary: '#d0d0d0',
dark: '#7e7e7e',
light: '#ffffff',
hover: '#d9d9d9'
}
|
export GHPAGES_URL="https://xxxxxxxx@github.com/kvz/locutus.git"
# travis encrypt --add GHPAGES_URL=${GHPAGES_URL}
|
/*
* Copyright (c) 2011 Intel Corporation. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Authors:
* <NAME> <<EMAIL>>
* <NAME> <<EMAIL>>
*
*/
#include <X11/Xutil.h>
#include <X11/extensions/Xrandr.h>
#include <X11/extensions/dpms.h>
#include <va/va_dricommon.h>
#include <va/va_backend.h>
#include "psb_output.h"
#include "psb_surface.h"
#include "psb_buffer.h"
#include "psb_x11.h"
#include "psb_surface_ext.h"
#include <stdio.h>
#include <string.h>
#include <stdarg.h>
#include "psb_surface_ext.h"
#include <wsbm/wsbm_manager.h>
#define INIT_DRIVER_DATA psb_driver_data_p driver_data = (psb_driver_data_p) ctx->pDriverData
#define INIT_OUTPUT_PRIV psb_x11_output_p output = (psb_x11_output_p)(((psb_driver_data_p)ctx->pDriverData)->ws_priv)
#define SURFACE(id) ((object_surface_p) object_heap_lookup( &driver_data->surface_heap, id ))
#define BUFFER(id) ((object_buffer_p) object_heap_lookup( &driver_data->buffer_heap, id ))
#define IMAGE(id) ((object_image_p) object_heap_lookup( &driver_data->image_heap, id ))
#define SUBPIC(id) ((object_subpic_p) object_heap_lookup( &driver_data->subpic_heap, id ))
#define CONTEXT(id) ((object_context_p) object_heap_lookup( &driver_data->context_heap, id ))
void psb_x11_freeWindowClipBoxList(psb_x11_clip_list_t * pHead);
//X error trap
static int x11_error_code = 0;
static int (*old_error_handler)(Display *, XErrorEvent *);
static struct timeval inter_period = {0};
static void psb_doframerate(int fps)
{
struct timeval time_deta;
inter_period.tv_usec += 1000000 / fps;
/*recording how long it passed*/
if (inter_period.tv_usec >= 1000000) {
inter_period.tv_usec -= 1000000;
inter_period.tv_sec++;
}
gettimeofday(&time_deta, (struct timezone *)NULL);
time_deta.tv_usec = inter_period.tv_usec - time_deta.tv_usec;
time_deta.tv_sec = inter_period.tv_sec - time_deta.tv_sec;
if (time_deta.tv_usec < 0) {
time_deta.tv_usec += 1000000;
time_deta.tv_sec--;
}
if (time_deta.tv_sec < 0 || (time_deta.tv_sec == 0 && time_deta.tv_usec <= 0))
return;
select(0, NULL, NULL, NULL, &time_deta);
}
static uint32_t mask2shift(uint32_t mask)
{
uint32_t shift = 0;
while ((mask & 0x1) == 0) {
mask = mask >> 1;
shift++;
}
return shift;
}
static VAStatus psb_putsurface_x11(
VADriverContextP ctx,
VASurfaceID surface,
Drawable draw, /* X Drawable */
short srcx,
short srcy,
unsigned short srcw,
unsigned short srch,
short destx,
short desty,
unsigned short destw,
unsigned short desth,
unsigned int flags /* de-interlacing flags */
)
{
INIT_DRIVER_DATA;
GC gc;
XImage *ximg = NULL;
Visual *visual;
unsigned short width, height;
int depth;
int x = 0, y = 0;
VAStatus vaStatus = VA_STATUS_SUCCESS;
void *surface_data = NULL;
int ret;
uint32_t rmask = 0;
uint32_t gmask = 0;
uint32_t bmask = 0;
uint32_t rshift = 0;
uint32_t gshift = 0;
uint32_t bshift = 0;
if (srcw <= destw)
width = srcw;
else
width = destw;
if (srch <= desth)
height = srch;
else
height = desth;
object_surface_p obj_surface = SURFACE(surface);
if (NULL == obj_surface) {
vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
DEBUG_FAILURE;
return vaStatus;
}
psb_surface_p psb_surface = obj_surface->psb_surface;
psb__information_message("PutSurface: src w x h = %d x %d\n", srcw, srch);
psb__information_message("PutSurface: dest w x h = %d x %d\n", destw, desth);
psb__information_message("PutSurface: clipped w x h = %d x %d\n", width, height);
visual = DefaultVisual((Display *)ctx->native_dpy, ctx->x11_screen);
gc = XCreateGC((Display *)ctx->native_dpy, draw, 0, NULL);
depth = DefaultDepth((Display *)ctx->native_dpy, ctx->x11_screen);
if (TrueColor != visual->class) {
psb__error_message("PutSurface: Default visual of X display must be TrueColor.\n");
vaStatus = VA_STATUS_ERROR_UNKNOWN;
goto out;
}
ret = psb_buffer_map(&psb_surface->buf, &surface_data);
if (ret) {
vaStatus = VA_STATUS_ERROR_UNKNOWN;
goto out;
}
rmask = visual->red_mask;
gmask = visual->green_mask;
bmask = visual->blue_mask;
rshift = mask2shift(rmask);
gshift = mask2shift(gmask);
bshift = mask2shift(bmask);
psb__information_message("PutSurface: Pixel masks: R = %08x G = %08x B = %08x\n", rmask, gmask, bmask);
psb__information_message("PutSurface: Pixel shifts: R = %d G = %d B = %d\n", rshift, gshift, bshift);
ximg = XCreateImage((Display *)ctx->native_dpy, visual, depth, ZPixmap, 0, NULL, width, height, 32, 0);
if (ximg->byte_order == MSBFirst)
psb__information_message("PutSurface: XImage pixels has MSBFirst, %d bits / pixel\n", ximg->bits_per_pixel);
else
psb__information_message("PutSurface: XImage pixels has LSBFirst, %d bits / pixel\n", ximg->bits_per_pixel);
if (ximg->bits_per_pixel != 32) {
psb__error_message("PutSurface: Display uses %d bits/pixel which is not supported\n");
vaStatus = VA_STATUS_ERROR_UNKNOWN;
goto out;
}
void yuv2pixel(uint32_t * pixel, int y, int u, int v) {
int r, g, b;
/* Warning, magic values ahead */
r = y + ((351 * (v - 128)) >> 8);
g = y - (((179 * (v - 128)) + (86 * (u - 128))) >> 8);
b = y + ((444 * (u - 128)) >> 8);
if (r > 255) r = 255;
if (g > 255) g = 255;
if (b > 255) b = 255;
if (r < 0) r = 0;
if (g < 0) g = 0;
if (b < 0) b = 0;
*pixel = ((r << rshift) & rmask) | ((g << gshift) & gmask) | ((b << bshift) & bmask);
}
ximg->data = (char *) malloc(ximg->bytes_per_line * height);
if (NULL == ximg->data) {
vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
goto out;
}
uint8_t *src_y = surface_data + psb_surface->stride * srcy;
uint8_t *src_uv = surface_data + psb_surface->stride * (obj_surface->height + srcy / 2);
for (y = srcy; y < (srcy + height); y += 2) {
uint32_t *dest_even = (uint32_t *)(ximg->data + y * ximg->bytes_per_line);
uint32_t *dest_odd = (uint32_t *)(ximg->data + (y + 1) * ximg->bytes_per_line);
for (x = srcx; x < (srcx + width); x += 2) {
/* Y1 Y2 */
/* Y3 Y4 */
int y1 = *(src_y + x);
int y2 = *(src_y + x + 1);
int y3 = *(src_y + x + psb_surface->stride);
int y4 = *(src_y + x + psb_surface->stride + 1);
/* U V */
int u = *(src_uv + x);
int v = *(src_uv + x + 1);
yuv2pixel(dest_even++, y1, u, v);
yuv2pixel(dest_even++, y2, u, v);
yuv2pixel(dest_odd++, y3, u, v);
yuv2pixel(dest_odd++, y4, u, v);
}
src_y += psb_surface->stride * 2;
src_uv += psb_surface->stride;
}
XPutImage((Display *)ctx->native_dpy, draw, gc, ximg, 0, 0, destx, desty, width, height);
XFlush((Display *)ctx->native_dpy);
out:
if (NULL != ximg)
XDestroyImage(ximg);
if (NULL != surface_data)
psb_buffer_unmap(&psb_surface->buf);
XFreeGC((Display *)ctx->native_dpy, gc);
return vaStatus;
}
void *psb_x11_output_init(VADriverContextP ctx)
{
INIT_DRIVER_DATA;
psb_x11_output_p output = calloc(1, sizeof(psb_x11_output_s));
if (output == NULL) {
psb__error_message("Can't malloc memory\n");
return NULL;
}
if (getenv("PSB_VIDEO_EXTEND_FULLSCREEN"))
driver_data->extend_fullscreen = 1;
if (getenv("PSB_VIDEO_PUTSURFACE_X11")) {
psb__information_message("Putsurface force to SW rendering\n");
driver_data->output_method = PSB_PUTSURFACE_X11;
return output;
}
psb_init_xvideo(ctx, output);
output->output_drawable = 0;
output->extend_drawable = 0;
output->pClipBoxList = NULL;
output->ui32NumClipBoxList = 0;
output->frame_count = 0;
output->bIsVisible = 0;
/* always init CTEXTURE and COVERLAY */
driver_data->coverlay = 1;
driver_data->color_key = 0x11;
driver_data->ctexture = 1;
driver_data->xrandr_dirty = 0;
driver_data->xrandr_update = 0;
if (getenv("PSB_VIDEO_EXTEND_FULLSCREEN")) {
driver_data->extend_fullscreen = 1;
}
driver_data->xrandr_thread_id = 0;
if (getenv("PSB_VIDEO_NOTRD") || IS_MRST(driver_data)) {
psb__information_message("Force not to start psb xrandr thread.\n");
driver_data->use_xrandr_thread = 0;
} else {
psb__information_message("By default, use psb xrandr thread.\n");
driver_data->use_xrandr_thread = 1;
}
if (IS_MFLD(driver_data) && /* force MFLD to use COVERLAY */
(driver_data->output_method == PSB_PUTSURFACE_OVERLAY)) {
psb__information_message("Use client overlay mode for post-processing\n");
driver_data->output_method = PSB_PUTSURFACE_COVERLAY;
}
if (getenv("PSB_VIDEO_TEXTURE") && output->textured_portID) {
psb__information_message("Putsurface force to use Textured Xvideo\n");
driver_data->output_method = PSB_PUTSURFACE_FORCE_TEXTURE;
}
if (getenv("PSB_VIDEO_OVERLAY") && output->overlay_portID) {
psb__information_message("Putsurface force to use Overlay Xvideo\n");
driver_data->output_method = PSB_PUTSURFACE_FORCE_OVERLAY;
}
if (getenv("PSB_VIDEO_CTEXTURE")) {
psb__information_message("Putsurface force to use Client Texture\n");
driver_data->output_method = PSB_PUTSURFACE_FORCE_CTEXTURE;
}
if (getenv("PSB_VIDEO_COVERLAY")) {
psb__information_message("Putsurface force to use Client Overlay\n");
driver_data->coverlay = 1;
driver_data->output_method = PSB_PUTSURFACE_FORCE_COVERLAY;
}
return output;
}
static int
error_handler(Display *dpy, XErrorEvent *error)
{
x11_error_code = error->error_code;
return 0;
}
void psb_x11_output_deinit(VADriverContextP ctx)
{
INIT_DRIVER_DATA;
INIT_OUTPUT_PRIV;
struct dri_state *dri_state = (struct dri_state *)ctx->dri_state;
psb_x11_freeWindowClipBoxList(output->pClipBoxList);
output->pClipBoxList = NULL;
if (output->extend_drawable) {
XDestroyWindow(ctx->native_dpy, output->extend_drawable);
output->extend_drawable = 0;
}
psb_deinit_xvideo(ctx);
/* close dri fd and release all drawable buffer */
if (driver_data->ctexture == 1)
(*dri_state->close)(ctx);
}
static void
x11_trap_errors(void)
{
x11_error_code = 0;
old_error_handler = XSetErrorHandler(error_handler);
}
static int
x11_untrap_errors(void)
{
XSetErrorHandler(old_error_handler);
return x11_error_code;
}
static int
is_window(Display *dpy, Drawable drawable)
{
XWindowAttributes wattr;
x11_trap_errors();
XGetWindowAttributes(dpy, drawable, &wattr);
return x11_untrap_errors() == 0;
}
static int pnw_check_output_method(VADriverContextP ctx, object_surface_p obj_surface, int width, int height, int destw, int desth, Drawable draw)
{
INIT_DRIVER_DATA;
INIT_OUTPUT_PRIV;
if (driver_data->output_method == PSB_PUTSURFACE_FORCE_TEXTURE ||
driver_data->output_method == PSB_PUTSURFACE_FORCE_OVERLAY ||
driver_data->output_method == PSB_PUTSURFACE_FORCE_CTEXTURE ||
driver_data->output_method == PSB_PUTSURFACE_FORCE_COVERLAY) {
psb__information_message("Force to use %08x for PutSurface\n", driver_data->output_method);
return 0;
}
/* Assign default value for MRST */
if (IS_MRST(driver_data))
driver_data->output_method = PSB_PUTSURFACE_OVERLAY;
else if (IS_MFLD(driver_data))
driver_data->output_method = PSB_PUTSURFACE_COVERLAY;
if (driver_data->overlay_auto_paint_color_key)
driver_data->output_method = PSB_PUTSURFACE_COVERLAY;
/* Avoid call is_window()/XGetWindowAttributes() every frame */
if (output->output_drawable_save != draw) {
output->output_drawable_save = draw;
if (!is_window(ctx->native_dpy, draw))
output->is_pixmap = 1;
else
output->is_pixmap = 0;
}
/*FIXME: overlay path can't handle subpicture scaling. when surface size > dest box, fallback to texblit.*/
if ((output->is_pixmap == 1)
|| (IS_MRST(driver_data) && obj_surface->subpic_count > 0)
|| (IS_MFLD(driver_data) && obj_surface->subpic_count && ((width > destw) || (height > desth)))
|| (width >= 2048)
|| (height >= 2048)
) {
psb__information_message("Putsurface fall back to use Client Texture\n");
driver_data->output_method = PSB_PUTSURFACE_CTEXTURE;
}
if (IS_MFLD(driver_data) &&
(driver_data->xrandr_dirty & PSB_NEW_ROTATION)) {
psb_RecalcRotate(ctx);
driver_data->xrandr_dirty &= ~PSB_NEW_ROTATION;
}
return 0;
}
VAStatus psb_PutSurface(
VADriverContextP ctx,
VASurfaceID surface,
void *drawable, /* X Drawable */
short srcx,
short srcy,
unsigned short srcw,
unsigned short srch,
short destx,
short desty,
unsigned short destw,
unsigned short desth,
VARectangle *cliprects, /* client supplied clip list */
unsigned int number_cliprects, /* number of clip rects in the clip list */
unsigned int flags /* de-interlacing flags */
)
{
INIT_DRIVER_DATA;
object_surface_p obj_surface;
VAStatus vaStatus = VA_STATUS_SUCCESS;
Drawable draw = (Drawable)drawable;
obj_surface = SURFACE(surface);
if (NULL == obj_surface) {
vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
DEBUG_FAILURE;
return vaStatus;
}
if (driver_data->dummy_putsurface) {
psb__information_message("vaPutSurface: dummy mode, return directly\n");
return VA_STATUS_SUCCESS;
}
if (driver_data->output_method == PSB_PUTSURFACE_X11) {
psb_putsurface_x11(ctx, surface, draw, srcx, srcy, srcw, srch,
destx, desty, destw, desth, flags);
return VA_STATUS_SUCCESS;
}
if (driver_data->fixed_fps > 0) {
if ((inter_period.tv_sec == 0) && (inter_period.tv_usec == 0))
gettimeofday(&inter_period, (struct timezone *)NULL);
psb_doframerate(driver_data->fixed_fps);
}
pnw_check_output_method(ctx, obj_surface, srcw, srch, destw, desth, draw);
pthread_mutex_lock(&driver_data->output_mutex);
if ((driver_data->output_method == PSB_PUTSURFACE_CTEXTURE) ||
(driver_data->output_method == PSB_PUTSURFACE_FORCE_CTEXTURE)) {
psb__information_message("Using client Texture for PutSurface\n");
psb_putsurface_ctexture(ctx, surface, draw,
srcx, srcy, srcw, srch,
destx, desty, destw, desth,
flags);
} else if ((driver_data->output_method == PSB_PUTSURFACE_COVERLAY) ||
(driver_data->output_method == PSB_PUTSURFACE_FORCE_COVERLAY)) {
psb__information_message("Using client Overlay for PutSurface\n");
srcw = srcw <= 1920 ? srcw : 1920;
/* init overlay*/
if (!driver_data->coverlay_init) {
psb_coverlay_init(ctx);
driver_data->coverlay_init = 1;
}
psb_putsurface_coverlay(
ctx, surface, draw,
srcx, srcy, srcw, srch,
destx, desty, destw, desth,
cliprects, number_cliprects, flags);
} else
psb_putsurface_xvideo(
ctx, surface, draw,
srcx, srcy, srcw, srch,
destx, desty, destw, desth,
cliprects, number_cliprects, flags);
pthread_mutex_unlock(&driver_data->output_mutex);
driver_data->frame_count++;
return VA_STATUS_SUCCESS;
}
|
<filename>vendor/github.com/coreswitch/openconfigd/cli/examples/loadables/printenv.c
/*
* printenv -- minimal builtin clone of BSD printenv(1).
*
* usage: printenv [varname]
*
*/
/*
Copyright (C) 1999-2009 Free Software Foundation, Inc.
This file is part of GNU Bash.
Bash is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Bash is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Bash. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#include <stdio.h>
#include "builtins.h"
#include "shell.h"
#include "bashgetopt.h"
#include "common.h"
extern char **export_env;
int
printenv_builtin (list)
WORD_LIST *list;
{
register char **envp;
int opt;
SHELL_VAR *var;
reset_internal_getopt ();
while ((opt = internal_getopt (list, "")) != -1)
{
switch (opt)
{
default:
builtin_usage ();
return (EX_USAGE);
}
}
list = loptend;
/* printenv */
if (list == 0)
{
maybe_make_export_env (); /* this allows minimal code */
for (envp = export_env; *envp; envp++)
printf ("%s\n", *envp);
return (EXECUTION_SUCCESS);
}
/* printenv varname */
var = find_variable (list->word->word);
if (var == 0 || (exported_p (var) == 0))
return (EXECUTION_FAILURE);
if (function_p (var))
print_var_function (var);
else
print_var_value (var, 0);
printf("\n");
return (EXECUTION_SUCCESS);
}
char *printenv_doc[] = {
"Display environment.",
"",
"Print names and values of environment variables",
(char *)NULL
};
struct builtin printenv_struct = {
"printenv",
printenv_builtin,
BUILTIN_ENABLED,
printenv_doc,
"printenv [varname]",
0
};
|
import React from 'react';
import { PageTitle } from '../../layout-components';
import { ExampleWrapperSimple } from '../../layout-components';
import ElementsPagination1 from '../../example-components/ElementsPagination/Pagination1';
import ElementsPagination2 from '../../example-components/ElementsPagination/Pagination2';
import ElementsPagination3 from '../../example-components/ElementsPagination/Pagination3';
export default function ElementsPagination() {
return (
<>
<PageTitle
titleHeading="Pagination"
titleDescription="Basic and dynamic pagination for use in your next awesome application."
/>
<ExampleWrapperSimple>
<ElementsPagination1 />
</ExampleWrapperSimple>
<ExampleWrapperSimple>
<ElementsPagination2 />
</ExampleWrapperSimple>
<ExampleWrapperSimple>
<ElementsPagination3 />
</ExampleWrapperSimple>
</>
);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.