text stringlengths 1 1.05M |
|---|
<filename>pages/_app.js
import reducer, { initialState } from "../reducer";
import { AppProvider } from "../StateProvider";
import "../styles/globals.css";
export default function App({
Component,
pageProps: { session, ...pageProps },
}) {
return (
<AppProvider initialState={initialState} reducer={reducer}>
<Component {...pageProps} />
</AppProvider>
);
}
|
import { create } from '@storybook/theming/create';
export default create({
base: 'light',
colorPrimary: '#3273dc',
colorSecondary: '#01d1b2',
// UI
appBg: '#f2f2f2',
appContentBg: '#fcfcfc',
appBorderColor: '#dee5ec',
appBorderRadius: 4,
// Typography
fontBase: '-apple-system, system-ui, BlinkMacSystemFont, Noto, Lato, Helvetica, Arial, sans-serif',
fontCode: '',
// Text colors
textColor: '#333333',
textInverseColor: '#f2f2f2',
brandTitle: 'SEED-VUE Storybook',
brandImage: 'https://raw.githubusercontent.com/rogeriotaques/seed-vue/master/src/__stories__/assets/images/logo-seed-vue.png',
brandUrl: 'https://rogeriotaques.github.io/seed-vue'
});
|
#!/bin/bash
mkdir -p /tmp/sock_agent_0
docker run -it --rm --ipc=host --runtime nvidia -e DISPLAY=$DISPLAY \
-v /tmp/.X11-unix/:/tmp/.X11-unix \
-v $PWD/conf:/home/agent/conf \
-v $PWD/models:/home/agent/models \
-v $PWD/certs:/home/agent/certs \
-v /tmp/sock_agent_0:/home/agent/sock \
edge_manager:1.0
|
package aha.graphql.adapter.filestore;
import aha.graphql.domain.FileRepository;
import aha.graphql.domain.model.ImageStream;
import org.springframework.stereotype.Repository;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@Repository
public class FilestoreImpl implements FileRepository {
@Override
public Path storeImage(ImageStream imageStream) throws IOException {
Path tempDirWithPrefix = Files.createTempDirectory(imageStream.getProductId().toString());
Path tmpFile = tempDirWithPrefix.resolve(Path.of(imageStream.getName()));
//Path tmpFile = Files.createTempFile(imageStream.getProductId().toString(), imageStream.getName());
Files.write(tmpFile, imageStream.getBase64Stream().getBytes());
return tmpFile;
}
}
|
'use strict';
const pm2p = require('@carnesen/pm2-as-promised');
const getBitcoindVersion = require('./getBitcoindVersion');
const { bitcoind } = require('./constants');
const log = require('./log');
module.exports = function* getVersionRunning() {
log.debug('getVersionRunning');
const descriptions = yield pm2p.describe(bitcoind);
if (descriptions.length === 0) {
log.debug(`getVersionRunning: ${ bitcoind } has not been started`);
return;
}
if (descriptions.length > 1) {
const message = `Expected to find 0 or 1 instance of ${ bitcoind }. Found ${ descriptions.length }`;
throw new Error(message);
}
const { status, pm_exec_path } = descriptions[0].pm2_env;
if (status !== 'online') {
log.debug(`getVersionRunning: ${ bitcoind } status is ${ status }`);
return;
}
log.debug('getRunningVersion: status is "online"');
return yield getBitcoindVersion(pm_exec_path);
};
|
import express from 'express'
const router = express.Router()
const controller = require('../../controllers/Controller')
// @route get api/parliament/getCabinetMinisters
// @desc get all cabinet members
// @access Public
router.get('/getCabinetMinisters', controller.getCabinetMinisters)
// @route get api/parliament/getPartyInfo
// @desc get information about current parties
// @access Public
router.get('/getPartyInfo', controller.getPartyInfo)
// @route post api/parliament/getRoleDescription
// @desc get descriptions of all cabinet positions
// @access Public
router.post('/getRoleDescription', controller.getRoleDescription)
module.exports = router
|
import React, { useCallback, useEffect, useRef, useState } from "react"
import { useSpring } from "react-spring"
import styled from "@emotion/styled"
import { MdFilterList, MdClose } from "react-icons/md"
import { Link } from "gatsby"
import { breakpoints, colors } from "../../utils/styles"
import AnimatedMenuWrapper from "../AnimatedMenuWrapper/AnimatedMenuWrapper"
import ButtonToggle from "../Button/ButtonToggle"
const MobileWrapper = styled.div`
@media (min-width: ${breakpoints.lg}) {
display: none;
}
`
const FilterButton = styled.button`
border: 1px solid var(--main-dark);
background: none;
padding: 0.5rem 1rem;
width: 100%;
max-width: 150px;
display: flex;
justify-content: space-around;
margin: 0 auto;
span {
//font-weight: 700;
text-transform: uppercase;
}
`
const TopContainer = styled.div`
display: flex;
flex-direction: row;
justify-content: space-between;
align-items: center;
border-bottom: 1px solid ${colors.lightGrey};
padding: 1rem;
`
const Title = styled.h6`
font-weight: 700;
font-size: 1rem;
margin: 0;
text-transform: uppercase;
text-align: left;
`
const FilterMenu = styled(AnimatedMenuWrapper)`
width: 80%;
border-right: 1px solid var(--light-grey);
overflow-y: hidden;
`
const ListContainer = styled.ul`
padding: 0 1rem;
list-style: none;
overflow-y: auto;
margin: 0;
height: 100vh;
`
const NavItem = styled.li`
margin-top: 1rem;
text-align: left;
`
const NavLink = styled(Link)`
text-decoration: none;
font-size: 0.75rem;
:hover {
color: var(--accent);
}
`
const MobileSideNav = ({ collections }) => {
const [isFilterOpen, setFilterOpen] = useState(false)
const filterMenu = useSpring({
transform: isFilterOpen ? `translate3d(0,0,0)` : `translate3d(-100%,0,0)`,
})
const toggleFilter = () => {
setFilterOpen(!isFilterOpen)
}
const node = useRef()
const handleClickOutside = useCallback(
e => {
if (node.current.contains(e.target)) {
// inside click
return
}
// outside click
setFilterOpen(false)
},
[setFilterOpen]
)
useEffect(() => {
if (isFilterOpen) {
document.addEventListener("mousedown", handleClickOutside)
} else {
document.removeEventListener("mousedown", handleClickOutside)
}
return () => {
document.removeEventListener("mousedown", handleClickOutside)
}
}, [isFilterOpen, handleClickOutside])
return (
<MobileWrapper ref={node}>
<FilterButton onClick={toggleFilter}>
<MdFilterList />
<span>filter</span>
</FilterButton>
<FilterMenu style={filterMenu}>
<TopContainer>
<Title>Filter by Collection</Title>
<ButtonToggle onClick={() => setFilterOpen(false)}>
<MdClose />
</ButtonToggle>
</TopContainer>
<ListContainer>
{collections.nodes.map(collection => {
const { title, handle } = collection
return (
<NavItem key={handle}>
<NavLink
to={`/collections/${handle}`}
activeStyle={{ color: "var(--grey)" }}
>
{title}
</NavLink>
</NavItem>
)
})}
</ListContainer>
</FilterMenu>
</MobileWrapper>
)
}
export default MobileSideNav
|
import {
clearDirtyParam,
replaceKeywordForArrayParams,
replaceKeywordForObjectParams,
} from './utils';
import type {
TParams,
TOptions,
IOptionsBaseProps,
IOptionsWithPatternProps,
IOptionsWithRegExpPatternProps,
} from './interface';
function interpole(source: string, params: TParams, options?: IOptionsWithPatternProps): string;
function interpole(source: string, params: TParams, options?: IOptionsWithRegExpPatternProps): string;
function interpole(source: string, params: TParams, options?: IOptionsBaseProps): string;
function interpole(source: string, params: TParams, options: TOptions = { clearDirtyParam: false }): string {
if (typeof source !== 'string') {
return source;
}
const { clearDirtyParam: needClearDirtyParam, ...otherOptions } = options;
if (Array.isArray(params)) {
source = replaceKeywordForArrayParams(source, params, otherOptions);
} else {
source = replaceKeywordForObjectParams(source, params, otherOptions);
}
if (needClearDirtyParam) {
return clearDirtyParam(source);
}
return source;
}
export default interpole;
|
#!/bin/bash
current_dir=$(dirname "$0")
"$SPLUNK_HOME/bin/splunk" cmd node "$current_dir/app/github_commits.js" $@
|
/**
* Used to define the anatomy/parts of a component in a way that provides
* a consistent API for `className`, css selector and `theming`.
*/
export declare class Anatomy<T extends string = string> {
private name;
private map;
private called;
constructor(name: string);
/**
* Prevents user from calling `.parts` multiple times.
* It should only be called once.
*/
private assert;
/**
* Add the core parts of the components
*/
parts: <V extends string>(...values: V[]) => Pick<Anatomy<V>, "extend" | "selectors" | "classNames" | "keys" | "toPart" | "__type">;
/**
* Extend the component anatomy to includes new parts
*/
extend: <U extends string>(...parts: U[]) => Pick<Anatomy<T | U>, "extend" | "selectors" | "classNames" | "keys" | "toPart" | "__type">;
/**
* Get all selectors for the component anatomy
*/
get selectors(): Record<T, string>;
/**
* Get all classNames for the component anatomy
*/
get classNames(): Record<T, string>;
/**
* Get all parts as array of string
*/
get keys(): T[];
/**
* Creates the part object for the given part
*/
toPart: (part: string) => {
className: string;
selector: string;
toString: () => string;
} & string;
/**
* Used to get the derived type of the anatomy
*/
__type: T;
}
export declare function anatomy(name: string): Anatomy<string>;
//# sourceMappingURL=anatomy.d.ts.map |
package secrets
import (
"context"
"time"
"github.com/pkg/errors"
"go.uber.org/zap"
core_v1 "k8s.io/api/core/v1"
kubeErrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
core_v1_client "k8s.io/client-go/kubernetes/typed/core/v1"
)
type Client struct {
Logger *zap.Logger
Namespace string
Labels labels.Set
SecretsClient core_v1_client.SecretsGetter
}
func (client *Client) Write(ctx context.Context, secret *core_v1.Secret) error {
nameSpaced := client.SecretsClient.Secrets(client.Namespace)
retry := false
for {
if retry {
select {
case <-ctx.Done():
client.Logger.Sugar().Infof("Terminated early working on secret: %s", secret.Name)
return ctx.Err()
case <-time.After(5 * time.Second):
}
}
prev, err := nameSpaced.Get(secret.Name, metav1.GetOptions{})
if err != nil {
if kubeErrors.IsNotFound(err) {
_, createErr := nameSpaced.Create(secret)
if createErr != nil {
if kubeErrors.IsAlreadyExists(createErr) {
retry = true
continue // Concurrent create, retry
}
return errors.Wrapf(createErr, "unable to create secret: %s", secret.Name)
}
client.Logger.Sugar().Infof("Created secret: %s", secret.Name)
break
}
return errors.Wrapf(err, "unexpected error condition for get secret: %s", secret.Name)
}
prev.Data = secret.Data
_, err = nameSpaced.Update(prev)
if err == nil {
client.Logger.Sugar().Infof("Updated secret: %s", secret.Name)
break
}
if kubeErrors.IsConflict(err) || kubeErrors.IsNotFound(err) {
retry = true
continue // Concurrent update/delete, retry
}
return errors.Wrapf(err, "failed to update secret: %s", secret.Name)
}
return nil
}
|
package org.mnode.jot4j.dynamodb.mapper;
import com.amazonaws.services.dynamodbv2.datamodeling.*;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
@EqualsAndHashCode(callSuper = true)
@Data
@NoArgsConstructor
public class Calendar extends AbstractMapper {
@DynamoDBAttribute(attributeName = "Uid")
@DynamoDBAutoGeneratedKey
private String uid;
@DynamoDBAttribute(attributeName = "Data")
@DynamoDBTypeConverted(converter = CalendarConverter.class)
private net.fortuna.ical4j.model.Calendar data;
@Override
@DynamoDBHashKey(attributeName = "PK")
public String getPK() {
return "GROUP#" + getGroupId() + "#CALENDAR#" + uid;
}
@Override
@DynamoDBRangeKey(attributeName = "SK")
public String getSK() {
return getPK();
}
@Override
@DynamoDBAttribute(attributeName = "TYPE")
public String getType() {
return "CALENDAR";
}
@DynamoDBIndexHashKey(attributeName = "GSI2_PK", globalSecondaryIndexName = "GSI2")
public String getGSI2PK() {
return getPK();
}
@DynamoDBIndexRangeKey(attributeName = "GSI2_SK", globalSecondaryIndexName = "GSI2")
public String getGSI2SK() {
return getSK();
}
}
|
<reponame>camplight/hylo-evo
const newrelic = process.env.NEW_RELIC_LICENSE_KEY && process.env.NODE_ENV !== 'test'
? require('newrelic')
: null
export function getBrowserSnippet () {
if (!newrelic) return ''
// FIXME: this should be changed to represent the actual page being loaded.
// it's here for now only because it's required for the browser snippet
newrelic.setTransactionName('/')
return newrelic.getBrowserTimingHeader()
}
|
<filename>pages.js
const express = require('express')
const app = express()
const bodyParser = require('body-parser');
const db = require('./database.js');
app.use(bodyParser.urlencoded({ extended: true }));
app.set('view engine', 'ejs')
//css
app.use(express.static('public')); //inline
function homePage() {
app.get('/', function (req, res) {
res.render('index', {});
})
}
function login() {
app.get('/login', function (req, res) {
var email = req.body.email;
var password = <PASSWORD>.body.password;
if (db.verifyCredentials(email, password)) {
//login successfull
//redirect
res.render('home', {});
}
else {
//error
res.render('index', {error: "Error signing you in."});
}
})
}
function signup() {
app.get('/signup', function (req, res) {
var username = req.body.username;
var email = req.body.email;
var password = <PASSWORD>.body.password;
if (db.addUserToDb(username, email, password)) {
//account created
//redirect
res.render('home', {});
}
else {
//not created
res.render('incex', { error: "error creating account" });
}
})
}
function about() {
app.get('/about', function (req, res) {
res.render('about', {});
})
}
function courses() {
app.get('/courses', function (req, res) {
res.render('courses', {});
})
}
function autoNavigate() {
homePage();
about();
login();
signup();
app.listen(3000, function () {
console.log('listening on port 3000!')
})
}
//exporting
module.exports.homePage = homePage;
module.exports.about = about;
module.exports.autoNavigate = autoNavigate;
|
<html>
<head>
<title>My Website</title>
</head>
<body>
<div class="navigation-bar">
<div class="nav-logo"><img src="logo.png" alt="Logo"/></div>
<div class="links">
<a href="index.html">Home</a>
<a href="info.html">Info</a>
</div>
</div>
<div class="content">
<h1>Information Section</h1>
<!-- Content here -->
</div>
</body>
</html> |
cp ~/.mujoco/mjpro150/mjkey.txt .
|
/**
* Copyright 2017 iovation, Inc.
* <p>
* Licensed under the MIT License.
* You may not use this file except in compliance with the License.
* A copy of the License is located in the "LICENSE.txt" file accompanying
* this file. This file is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iovation.launchkey.sdk.integration.entities;
import com.iovation.launchkey.sdk.domain.servicemanager.Service;
import java.net.URI;
import java.security.interfaces.RSAPublicKey;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
public class ServiceEntity {
private final UUID id;
private final String name;
private final String description;
private final java.net.URI icon;
private final URI callbackURL;
private final Boolean active;
private final List<PublicKeyEntity> publicKeys;
public ServiceEntity(UUID id, String name, String description, URI icon, URI callbackURL, Boolean active) {
if (id == null) throw new IllegalArgumentException("Argument \"id\" cannot be null.");
this.id = id;
this.name = name;
this.description = description;
this.icon = icon;
this.callbackURL = callbackURL;
this.active = active;
publicKeys = new ArrayList<>();
}
public UUID getId() {
return id;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public URI getIcon() {
return icon;
}
public URI getCallbackURL() {
return callbackURL;
}
public Boolean getActive() {
return active;
}
public List<PublicKeyEntity> getPublicKeys() {
return publicKeys;
}
public static ServiceEntity fromService(Service service) {
return new ServiceEntity(service.getId(), service.getName(), service.getDescription(),
service.getIcon(), service.getCallbackURL(), service.isActive());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ServiceEntity)) return false;
ServiceEntity that = (ServiceEntity) o;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
if (name != null ? !name.equals(that.name) : that.name != null) return false;
if (description != null ? !description.equals(that.description) : that.description != null) return false;
if (icon != null ? !icon.equals(that.icon) : that.icon != null) return false;
if (callbackURL != null ? !callbackURL.equals(that.callbackURL) : that.callbackURL != null) return false;
return active != null ? active.equals(that.active) : that.active == null;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (description != null ? description.hashCode() : 0);
result = 31 * result + (icon != null ? icon.hashCode() : 0);
result = 31 * result + (callbackURL != null ? callbackURL.hashCode() : 0);
result = 31 * result + (active != null ? active.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "ServiceEntity{" +
"id=" + id +
", name='" + name + '\'' +
", description='" + description + '\'' +
", icon=" + icon +
", callbackURL=" + callbackURL +
", active=" + active +
'}';
}
}
|
import { ChangeDetectionStrategy, Component, forwardRef, Injector, Input, OnInit } from '@angular/core';
import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
import { FilterBase, FilterElement, FilterMatchType, Identifiable } from '@price-depo-ui/data-handling';
import 'rxjs/add/observable/merge';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/debounceTime';
import 'rxjs/add/operator/distinctUntilChanged';
import 'rxjs/add/operator/do';
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/switchMap';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import { Observable } from 'rxjs/Observable';
import { ReplaySubject } from 'rxjs/ReplaySubject';
import { Subject } from 'rxjs/Subject';
import { searchProviderTokens } from '../../../../../apps/admin/src/app/tokens/search-provider.tokens';
import { SearchableDropdownControlDef } from '../../models/dynamic-form.interface';
import { SearchProvider } from '../../models/search-provider.interface';
@Component( {
selector: 'pd-dynamic-form-searchable-dropdown',
templateUrl: './searchable-dropdown.component.html',
styleUrls: [ './searchable-dropdown.component.scss' ],
providers: [
{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef( () => DynamicFormSearchableDropdownComponent ),
multi: true
}
],
changeDetection: ChangeDetectionStrategy.OnPush
} )
export class DynamicFormSearchableDropdownComponent<T extends Identifiable<ID>, ID> implements OnInit, ControlValueAccessor {
@Input() controlDef: SearchableDropdownControlDef;
isDisabled = false;
selectedValue$: Observable<T>;
searchResults$: Observable<T[]>;
private controlValueSource = new ReplaySubject<ID>( 1 );
private selectedValueSource = new BehaviorSubject<T>( undefined );
private searchExpressionSource = new Subject<string>();
private searchProvider: SearchProvider<T, ID, any>;
private _onChange: ( changed: any ) => void = ( _: any ) => {};
constructor( private readonly injector: Injector ) {
this.selectedValue$ = this.createSelectedValue$();
this.searchResults$ = this.createSearchResult$();
}
createSelectedValue$(): Observable<T> {
return Observable.merge(
this.selectedValueSource.asObservable(),
this.handleControlValueChange$()
) as Observable<T>;
}
handleControlValueChange$(): Observable<T> {
return this.controlValueSource.asObservable()
.distinctUntilChanged()
.switchMap( selectedId => this.searchProvider.getById( selectedId ) )
.catch( () => Observable.of( undefined ) );
}
createSearchResult$(): Observable<T[]> {
return this.searchExpressionSource.asObservable()
.debounceTime( 300 )
.distinctUntilChanged()
.map( ( searchExpression: string ): FilterBase<any> => {
if ( !searchExpression ) {
return null;
}
const filterElement: FilterElement = {
value: searchExpression,
matchType: FilterMatchType.contains
};
return {
[ this.controlDef.displayKey ]: filterElement
};
} )
.switchMap( filter => {
if ( filter ) {
return this.searchProvider.filterBy( filter );
} else {
return Observable.of( [] );
}
} );
}
ngOnInit() {
const token = searchProviderTokens[ this.controlDef.searchProviderName ];
if ( !token ) {
throw new Error( 'Unrecognized search provider name: ' + this.controlDef.searchProviderName );
}
this.searchProvider = this.injector.get( token );
}
writeValue( newValue: ID ): void {
if ( newValue !== undefined && newValue !== null ) {
this.controlValueSource.next( newValue );
}
}
registerOnChange( fn: any ): void {
this._onChange = fn;
}
registerOnTouched( fn: any ): void {
}
setDisabledState( isDisabled: boolean ): void {
this.isDisabled = isDisabled;
}
onSearch( text: string ) {
this.searchExpressionSource.next( text );
}
onSelect( selected: T ) {
this.selectedValueSource.next( selected );
this._onChange( selected ? selected.id : selected );
}
}
|
<gh_stars>1-10
import idx from 'idx';
import produce from 'immer';
import * as R from 'ramda';
import {
deleteProjectCameraLocations,
getAllProjectCameraLocations,
getProjectCameraLocations,
getProjectStudyAreas,
getSpeciesGroupByCameraLocation,
getSpeciesGroupByStudyArea,
lockProjectCameraLocations,
postProjectCameraLocations,
postProjectStudyAreas,
putProjectCameraLocations,
putProjectStudyAreas,
unlockProjectCameraLocations,
} from '@/service';
import { getLanguage } from '@/utils/i18n';
import { setTwoDigitFormat } from '@/utils/dateHelper';
// 計畫內的樣區資訊,全放在 project 會過大
const state = {
studyAreas: [], // 計畫樣區列表
cameraLocations: [], // 指定樣區內的相機位置列表,只保存最後一次的查詢結果
speciesGroup: {
byStudyArea: [], // 各樣區的前五大物種與回收影像數量
byCameraLocation: [], // 各相機的前五大物種與回收影像數量
},
};
const getters = {
studyAreas: state =>
produce(JSON.parse(JSON.stringify(state.studyAreas)), draft =>
draft.forEach(d => {
d.title = d.title[getLanguage()];
d.children.forEach(v => (v.title = v.title[getLanguage()]));
const { latitude, longitude } =
d.cameraLocation || idx(d, _ => _.children[0].cameraLocation) || {};
d.position = {
lat: latitude,
lng: longitude,
};
}),
),
cameraLocations: state => state.cameraLocations,
studyAreaTitle: (_, getters) => id => {
let title = '';
getters.studyAreas.forEach(v => {
// study area 第一層
if (v.id === id) {
title = v.title;
} else if (v.children.length > 0) {
v.children.forEach(v2 => {
// study area 第二層
if (v2.id === id) {
title = `${v.title} - ${v2.title}`;
}
});
}
});
return title;
},
cameraLocationsTitle: (_, getters) => id => {
return R.pipe(
R.find(R.propEq('id', id)),
R.ifElse(R.isNil, R.always(''), v => v.name),
)(getters.cameraLocations);
},
speciesGroupStartDate: state => {
const metrics = idx(state, _ => _.speciesGroup.byStudyArea[0].metrics);
if (!metrics || metrics.length === 0) return '';
// API response 沒有照日期順序, 需要前端自行 loop 找
const { startYear, startMonth } = metrics.reduce(
({ startYear, startMonth }, { year, month }) => {
const startDate = parseInt(
`${startYear}${setTwoDigitFormat(startMonth)}`,
10,
);
const currentDate = parseInt(`${year}${setTwoDigitFormat(month)}`, 10);
if (!startYear || !startMonth || startDate > currentDate) {
return {
startYear: year,
startMonth: month,
};
}
return { startYear, startMonth };
},
{},
);
return `${startYear}-${setTwoDigitFormat(startMonth)}`;
},
speciesGroupEndDate: state => {
const metrics = idx(state, _ => _.speciesGroup.byStudyArea[0].metrics);
if (!metrics || metrics.length === 0) return '';
// API response 沒有照日期順序, 需要前端自行 loop 找
const { endYear, endMonth } = metrics.reduce(
({ endYear, endMonth }, { year, month }) => {
const endDate = parseInt(
`${endYear}${setTwoDigitFormat(endMonth)}`,
10,
);
const currentDate = parseInt(`${year}${setTwoDigitFormat(month)}`, 10);
if (!endYear || !endMonth || endDate < currentDate) {
return {
endYear: year,
endMonth: month,
};
}
return { endYear, endMonth };
},
{},
);
return `${endYear}-${setTwoDigitFormat(endMonth)}`;
},
topFiveSpecies: state => {
const speciesGroup = idx(state, _ => _.speciesGroup.byStudyArea) || [];
const allSpeciesData = speciesGroup.reduce((res, { metrics }) => {
return metrics.reduce(
(merge, { species }) => [...merge, ...species],
res,
);
}, []);
let topFive = {};
allSpeciesData.some(({ speciesId, species }) => {
if (speciesId && species) topFive[speciesId] = species;
return topFive.length >= 5;
});
return topFive;
},
getSpeciesGroups: state => ({ type, date }) => {
const group = idx(state, _ => _.speciesGroup[type]) || [];
return group.reduce((res, { studyAreaId, cameraLocationId, metrics }) => {
const selectedDateSpecies =
metrics.find(
({ year, month }) => `${year}-${setTwoDigitFormat(month)}` === date,
) || [];
return {
...res,
[studyAreaId || cameraLocationId]: selectedDateSpecies.species || [],
};
}, {});
},
};
const mutations = {
setStudyAreas(state, payload) {
state.studyAreas = payload;
},
setCameraLocations(state, payload) {
state.cameraLocations = payload;
},
resetCameraLocations(state) {
state.cameraLocations = [];
},
setSpeciesGroup(state, { type, data }) {
state.speciesGroup[type] = data;
},
};
const actions = {
async getProjectStudyAreas({ commit }, id) {
const data = await getProjectStudyAreas(id);
commit('setStudyAreas', data);
},
async postProjectStudyAreas({ dispatch }, { id, area }) {
const body = {
...area,
title: {
[getLanguage()]: area.title,
},
};
await postProjectStudyAreas(id, body);
dispatch('getProjectStudyAreas', id);
},
async putProjectStudyAreas({ dispatch }, { id, area, areaId }) {
const body = {
...area,
title: {
[getLanguage()]: area.title,
},
};
await putProjectStudyAreas(id, body, areaId);
dispatch('getProjectStudyAreas', id);
},
async getProjectCameraLocations({ commit }, { projectId, studyAreaId }) {
const data = await getProjectCameraLocations(projectId, studyAreaId);
commit('setCameraLocations', idx(data, _ => _.items) || []);
},
async getAllProjectCameraLocations(
{ commit },
{ projectId, studyAreaId, query },
) {
const data = await getAllProjectCameraLocations(
projectId,
studyAreaId,
query,
);
commit('setCameraLocations', idx(data, _ => _.items) || []);
},
async modifyProjectCameraLocations(
{ state, dispatch },
{ projectId, studyAreaId, payload },
) {
const post = payload.filter(v => v.id === undefined); // 沒有 id 表示新增
const put = R.innerJoin(
(record, v) => record.id === v.id && R.equals(record, v) === false, // id 相同,並且內容不相同的
payload,
state.cameraLocations,
);
const del = R.differenceWith(
(a, b) => a.id === b.id,
state.cameraLocations,
payload,
);
await Promise.all([
...post.map(v => postProjectCameraLocations(projectId, v.studyArea, v)),
...put.map(v => putProjectCameraLocations(projectId, v.id, v)),
...del.map(v => deleteProjectCameraLocations(projectId, v.id)),
]);
dispatch('getProjectCameraLocations', { projectId, studyAreaId });
},
async setLockProjectCameraLocations(
{ dispatch },
{ projectId, studyAreaId, cameraLocations, isLock },
) {
await Promise.all(
cameraLocations.map(id =>
isLock
? lockProjectCameraLocations(projectId, id)
: unlockProjectCameraLocations(projectId, id),
),
);
dispatch('getProjectCameraLocations', { projectId, studyAreaId });
dispatch('getProjectStudyAreas', projectId);
},
async loadSpeciesGroupByStudyArea({ commit }, projectId) {
const data = await getSpeciesGroupByStudyArea(projectId);
commit('setSpeciesGroup', { type: 'byStudyArea', data });
},
async loadSpeciesGroupByCameraLocation(
{ commit },
{ projectId, studyAreaId },
) {
const data = await getSpeciesGroupByCameraLocation({
projectId,
studyAreaId,
});
commit('setSpeciesGroup', { type: 'byCameraLocation', data });
},
};
export default {
namespaced: true,
state,
getters,
mutations,
actions,
};
/*
https://github.com/TaiBIF/camera-trap-api/wiki/API-v1-Document#post-projectsprojectidstudy-areasstudyAreaIdcamera-locations
*/
|
<reponame>karsinkk/Anomaly-Detection
import numpy as np
import datetime
import json
import pyspark
from pyspark.sql import *
import pyspark.sql.functions as Func
from pyspark.sql.types import *
from pyspark import SparkContext
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
import pyspark.mllib.clustering as clustering
from pyspark.mllib.linalg import Vectors
from kafka import KafkaProducer
sc = SparkContext(appName="PythonSparkStreamingKafka")
sc.setLogLevel("WARN")
spark = SparkSession \
.builder \
.appName("SparkSession") \
.getOrCreate()
ssc = StreamingContext(sc, 5)
print(sc)
model = clustering.KMeansModel.load(sc,"hdfs://localhost:9000/data/KMeansModel")
producer = KafkaProducer(value_serializer=lambda v: json.dumps(v).encode('utf-8'))
def distance(point):
center = model.centers[model.predict(point)]
return Vectors.dense(point).squared_distance(center)
def PreProcessData(rdd):
rdd = rdd.map(lambda line : [x if x != ''else '0' for x in line ])
rdd = rdd.map(lambda line : [float(x) for x in line])
df = spark.createDataFrame(data=rdd,schema=DataSchema)
return df.rdd
def ComputeDistances(rdd):
df = spark.createDataFrame(data=rdd,schema=DataSchema)
# Convert UTC timestamp to Hours
data = df
Hours = Func.udf(lambda x :datetime.datetime.utcfromtimestamp(float(x)/1000).strftime('%H'),StringType())
data = data.withColumn('time',Hours(data.time))
data = data.withColumn('time', data.time.cast(IntegerType()))
# Calculate Activity.
data = data.withColumn('sms_in',data.sms_in+data.sms_out+data.call_in+data.call_out+data.internet)
data = data.withColumnRenamed('sms_in','total')
data = data.drop('square_id','country','sms_out','call_out','internet','call_in')
data = data.select(Func.array('total','time').alias("value")).rdd.map(lambda x: x.value)
Distances = data.map(lambda point: distance(point))
print("distances--- ",Distances.take(3))
df = df.rdd.map(lambda x :[y for y in x])
Result = Distances.zip(df)
print("Result---",Result.take(2))
return Distances.zipWithIndex().map(lambda e: (e[1], [e[0]]))
def SendResult(records):
print(type(records))
producer = KafkaProducer(value_serializer=lambda v: json.dumps(v).encode('utf-8'))
for record in records:
record = record[1]
d0 = record[0][0]
d1 = record[1][0].asDict()
d1['error']=d0
print(type(d1))
print(d1)
producer.send('result', d1)
producer.flush()
producer.close()
# Read Data files with a Custom Schema - CDR Data was obtained from the Open Big Data project by Dandelion,
# It is availabe at https://dandelion.eu/datamine/open-big-data/
DataSchema = StructType([StructField("square_id", FloatType(), True), \
StructField("time", StringType(), True), \
StructField("country", FloatType(), True), \
StructField("sms_in", FloatType(), True), \
StructField("sms_out", FloatType(), True), \
StructField("call_in", FloatType(), True), \
StructField("call_out", FloatType(), True), \
StructField("internet", FloatType(), True)])
kvs = KafkaUtils.createStream(ssc, 'localhost:2181', 'spark-streaming', {'realtime':1})
lines = kvs.map(lambda x: x[1])
rows = lines.map(lambda line:line.split("\n")[0])
rows = rows.map(lambda line: line.split("\t"))
Data = rows.transform(lambda rdd : PreProcessData(rdd))
Result = Data.transform(lambda rdd: ComputeDistances(rdd))
Data = Data.transform(lambda rdd : rdd.zipWithIndex().map(lambda e: (e[1], [e[0]])))
Data.pprint()
Result.pprint()
Final = Result.join(Data)
Final.pprint()
Final.foreachRDD(lambda rdd: rdd.foreachPartition(SendResult))
ssc.start()
ssc.awaitTermination()
|
import tensorflow as tf
import numpy as np
model = tf.keras.Sequential()
model.add(tf.keras.layers.Dense(1))
model.compile(loss='mean_squared_error', optimizer='sgd', metrics=['accuracy'])
# Generate dummy data
x_train = np.array([[1], [2], [3], [4]], dtype=float)
y_train = np.array([[0], [-1], [-2], [-3]], dtype=float)
model.fit(x_train, y_train, epochs=100)
# predict
x_test = np.array([5], dtype=float)
print (model.predict(x_test)) |
<filename>openvalidation-antlr/src/test/java/io/openvalidation/antlr/test/transformation/PTModelTransformerTest.java<gh_stars>10-100
/*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.openvalidation.antlr.test.transformation;
import static io.openvalidation.common.unittesting.astassertion.ModelRootAssertion.assertAST;
import io.openvalidation.antlr.ANTLRExecutor;
import io.openvalidation.common.ast.ASTComparisonOperator;
import io.openvalidation.common.ast.ASTModel;
import io.openvalidation.common.utils.GrammarBuilder;
import org.junit.jupiter.api.Test;
class PTModelTransformerTest {
@Test
void should_create_ast_with_no_global_elements() throws Exception {
// assemble
String input = "";
// act
ASTModel ast = ANTLRExecutor.run(input);
// assert
assertAST(ast).sizeOfElements(0).hasPreprocessedSource(input);
}
@Test
void should_create_2_rules() throws Exception {
String input =
GrammarBuilder.createRule()
.EQ("a", "b")
.THEN("error")
.PARAGRAPH()
.IF()
.EQ("c", "d")
.THEN("error")
.getText();
// act
ASTModel ast = ANTLRExecutor.run(input);
// assert
assertAST(ast)
.sizeOfElements(2)
.hasPreprocessedSource(input)
.variables()
.hasSizeOf(0)
.parentRoot()
.comments()
.hasSizeOf(0)
.parentRoot()
.rules()
.hasSizeOf(2)
.first()
.condition()
.hasOperator(ASTComparisonOperator.EQUALS)
.leftString()
.hasValue("a")
.parentCondition()
.rightString()
.hasValue("b")
.parentModel()
.rules()
.second()
.condition()
.hasOperator(ASTComparisonOperator.EQUALS)
.leftString()
.hasValue("c")
.parentCondition()
.rightString()
.hasValue("d");
}
@Test
void should_create_2_rules_from_constraint_rules() throws Exception {
String input =
GrammarBuilder.create()
.with(1337)
.MULTIPLY(42)
.MUST()
.with(1000)
.PARAGRAPH()
.with(1337)
.MULTIPLY(42)
.MUST()
.with(1000)
.getText();
// act
ASTModel ast = ANTLRExecutor.run(input);
// assert
assertAST(ast)
.sizeOfElements(2)
.hasPreprocessedSource(input)
.variables()
.hasSizeOf(0)
.parentRoot()
.comments()
.hasSizeOf(0)
.parentRoot()
.rules()
.hasSizeOf(2);
}
@Test
void should_create_2_comments() throws Exception {
String input =
GrammarBuilder.create()
.COMMENT("Das ist Kommentar 1")
.PARAGRAPH()
.COMMENT("Das ist Kommentar 2")
.getText();
// act
ASTModel ast = ANTLRExecutor.run(input);
// assert
assertAST(ast)
.sizeOfElements(2)
.hasPreprocessedSource(input)
.variables()
.hasSizeOf(0)
.parentRoot()
.rules()
.hasSizeOf(0)
.parentRoot()
.comments()
.hasSizeOf(2);
}
@Test
void should_create_2_variables() throws Exception {
String input =
GrammarBuilder.create()
.VARIABLE("something", "varx")
.PARAGRAPH()
.VARIABLE("anything", "vary")
.getText();
// act
ASTModel ast = ANTLRExecutor.run(input);
// assert
assertAST(ast)
.sizeOfElements(2)
.hasPreprocessedSource(input)
.rules()
.hasSizeOf(0)
.parentRoot()
.comments()
.hasSizeOf(0)
.parentRoot()
.variables()
.hasSizeOf(2);
}
@Test
void should_create_multiple_different_global_elements() throws Exception {
String input =
GrammarBuilder.create()
.VARIABLE("something", "varx")
.PARAGRAPH()
.COMMENT("This is a comment")
.PARAGRAPH()
.IF()
.EQ("left", "right")
.THEN("error")
.PARAGRAPH()
.getText();
// act
ASTModel ast = ANTLRExecutor.run(input);
// assert
assertAST(ast)
.sizeOfElements(3)
.hasPreprocessedSource(input)
.rules()
.hasSizeOf(1)
.parentRoot()
.comments()
.hasSizeOf(1)
.parentRoot()
.variables()
.hasSizeOf(1);
}
@Test
void should_create_variable_with_noise_paragraphs() throws Exception {
String input =
GrammarBuilder.create().PARAGRAPH().VARIABLE("anything", "varx").PARAGRAPH().getText();
// act
ASTModel ast = ANTLRExecutor.run(input);
// assert
assertAST(ast)
.sizeOfElements(1)
.hasPreprocessedSource(input)
.rules()
.hasSizeOf(0)
.parentRoot()
.comments()
.hasSizeOf(0)
.parentRoot()
.variables()
.hasSizeOf(1);
}
}
|
<gh_stars>10-100
package com.github.robindevilliers.welcometohell.wizard.expression.function;
import com.github.robindevilliers.welcometohell.wizard.expression.Function;
import java.util.Map;
public class FalseFunction implements Function<Boolean> {
@Override
public Boolean apply(Map<String, Object> scope) {
return false;
}
}
|
TORCH_VER=1.2.0
CUDA_VER=10.1
CUDA_CODE=cu101
source /cm/shared/engaging/anaconda/2018.12/etc/profile.d/conda.sh
conda activate retroxpert_
conda install -y pytorch=${TORCH_VER} torchvision cudatoolkit=${CUDA_VER} torchtext -c pytorch
conda install -y rdkit=2019.03.4.0 -c rdkit
pip install dgl==0.4.2
pip install OpenNMT-py==1.0.0 networkx==2.4
|
import pytest
from uri.qso import QSO, SENTINEL, Bucket
EXAMPLES = [
# Abstract
('', (), {}),
('foo=bar', (), {'foo': 'bar'}),
# Multiple Arguments
('foo&bar&baz&diz', ('foo', 'bar', 'baz', 'diz'), {}),
# From Wikipedia - https://en.wikipedia.org/wiki/Query_string
('name=ferret', (), {'name': 'ferret'}),
('name=ferret&color=purple', (), {'name': 'ferret', 'color': 'purple'}),
('field1=value1&field2=value2&field3=value3', (), {'field1': 'value1', 'field2': 'value2', 'field3': 'value3'}),
('argument1+argument2+argument3', ('argument1 argument2 argument3', ), {}),
# RFC 3986 (URI) - http://pretty-rfc.herokuapp.com/RFC3986
('objectClass?one', ('objectClass?one', ), {}),
('objectClass/one', ('objectClass/one', ), {}),
#('', (), {}),
]
MULTI_VALUE_EXAMPLES = [
('key=value1&key=value2&key=value3', {'key': ['value1', 'value2', 'value3']}),
('key=value1&foo=bar&key=value2', {'key': ['value1', 'value2']}),
('key=value1&foo&key=value2&bar&key=value3', {'key': ['value1', 'value2', 'value3'], None: ['foo', 'bar']}),
('foo&key=value1&foo=bar&key=value2&foo=baz&diz', {'key': ['value1', 'value2'], None: ['foo', 'diz'], 'foo': ['bar', 'baz']}),
('foo=bar&key=value1&diz=foo&key=value2&foo=baz', {'foo': ['bar', 'baz'], 'key': ['value1', 'value2'], 'diz': 'foo'}),
#('', {}),
]
ASSIGNMENT_EXAMPLES = [
('key=value1&key=value2', 0, 'value3', 'key=value3&key=value2'),
('key=value1&key=value2', 1, 'value3', 'key=value1&key=value3'),
('key=value1&key=value2', 0, ('foo', 'value'), 'foo=value&key=value2'),
('key=value1&key=value2', 1, ('foo', 'value'), 'key=value1&foo=value'),
('key=value1&key=value2', 'key', ('foo', 'value'), 'foo=value'),
('bar=baz&key=value1&key=value2', 'key', ('foo', 'value'), 'bar=baz&foo=value'),
('key=value1&bar=baz&key=value2', 'key', ('foo', 'value'), 'bar=baz&foo=value'),
('key=value1&key=value2&bar=baz', 'key', ('foo', 'value'), 'bar=baz&foo=value'),
('bar=baz&key=value1&key=value2', 1, ('foo', 'value'), 'bar=baz&foo=value&key=value2'),
('key=value1&bar=baz&key=value2', 0, ('foo', 'value'), 'foo=value&bar=baz&key=value2'),
('key=value1&bar=baz&key=value2', 2, ('foo', 'value'), 'key=value1&bar=baz&foo=value'),
('key=value1&key=value2&bar=baz', 0, ('foo', 'value'), 'foo=value&key=value2&bar=baz'),
('key=value1&key=value2&bar=baz', 1, ('foo', 'value'), 'key=value1&foo=value&bar=baz'),
('key=value1&key=value2&bar=baz', 2, ('foo', 'value'), 'key=value1&key=value2&foo=value'),
#('', , '', ''),
]
DELETION_EXAMPLES = [
('key=value1&key=value2&bar=baz', 0, 'key=value2&bar=baz', ['key', 'value1']),
('key=value1&key=value2&bar=baz', 1, 'key=value1&bar=baz', ['key', 'value2']),
('key=value1&key=value2&bar=baz', 2, 'key=value1&key=value2', ['bar', 'baz']),
('key=value1&key=value2&bar=baz', 'key', 'bar=baz', [Bucket('key', 'value1'), Bucket('key', 'value2')]),
('key=value1&key=value2&bar=baz', 'bar', 'key=value1&key=value2', ['bar', 'baz']),
('key=value1&bar=baz&key=value2', 'bar', 'key=value1&key=value2', ['bar', 'baz']),
#('', , ''),
]
POP_EXAMPLES = [
('key=value1&key=value2&bar=baz', 0, 'key=value2&bar=baz', Bucket('key', 'value1')),
('key=value1&key=value2&bar=baz', 1, 'key=value1&bar=baz', Bucket('key', 'value2')),
('key=value1&key=value2&bar=baz', 2, 'key=value1&key=value2', Bucket('bar', 'baz')),
('key=value1&key=value2&bar=baz', 'key', 'key=value1&bar=baz', 'value2'),
('key=value1&key=value2&bar=baz', 'bar', 'key=value1&key=value2', 'baz'),
('key=value1&bar=baz&key=value2', 'bar', 'key=value1&key=value2', 'baz'),
('key=value1&bar=baz&key=value2', SENTINEL, 'key=value1&bar=baz', Bucket('key', 'value2')),
#('', , ''),
]
UPDATE_EXAMPLES = [
('key=value1&key=value2&bar=baz', 'foo=bar', 'key=value1&key=value2&bar=baz&foo=bar'),
('key=value1&key=value2&bar=baz', 'key=value3', 'bar=baz&key=value3'),
('key=value1&key=value2&bar=baz', 'bar=diz', 'key=value1&key=value2&bar=diz'),
('key=value1&key=value2&bar=baz', dict(foo='bar'), 'key=value1&key=value2&bar=baz&foo=bar'),
('key=value1&key=value2&bar=baz', ('foo=bar', 'baz=diz'), 'key=value1&key=value2&bar=baz&foo=bar&baz=diz'),
('key=value1&key=value2&bar=baz', Bucket('foo', 'bar'), 'key=value1&key=value2&bar=baz&foo=bar'),
('key=value1&key=value2&bar=baz', QSO("foo=baz&bar=diz"), 'key=value1&key=value2&bar=diz&foo=baz'),
]
COMBINATION_EXAMPLES = [
('key=value1&key=value2&bar=baz', 'foo=bar', 'key=value1&key=value2&bar=baz&foo=bar'),
('key=value1&key=value2&bar=baz', 'key=value3', 'key=value1&key=value2&bar=baz&key=value3'),
('key=value1&key=value2&bar=baz', 'bar=diz', 'key=value1&key=value2&bar=baz&bar=diz'),
('key=value1&key=value2&bar=baz', dict(foo='bar'), 'key=value1&key=value2&bar=baz&foo=bar'),
('key=value1&key=value2&bar=baz', ('foo=bar', ), 'key=value1&key=value2&bar=baz&foo=bar'),
('key=value1&key=value2&bar=baz', Bucket('foo', 'bar'), 'key=value1&key=value2&bar=baz&foo=bar'),
('key=value1&key=value2&bar=baz', QSO("foo=baz&bar=diz"), 'key=value1&key=value2&bar=baz&foo=baz&bar=diz'),
]
COMPARISON_EXAMPLES = [
('', ''),
('key=value1&key=value2&bar=baz', QSO('key=value1&key=value2&bar=baz')),
]
class TestQSO:
@pytest.mark.parametrize('string,values', MULTI_VALUE_EXAMPLES)
def test_multiple_values(self, string, values):
instance = QSO(string)
for key in values:
if not isinstance(values[key], list): continue
result = list(instance[key])
assert result == values[key]
@pytest.mark.parametrize('src,key,value,expect', ASSIGNMENT_EXAMPLES)
def test_multiple_reassignment(self, src, key, value, expect):
instance = QSO(src)
instance[key] = value
assert str(instance) == expect
def test_numeric_deletion(self):
instance = QSO('key=value1&key=value2&bar=baz')
assert len(instance) == 3
assert len(instance.groups['key']) == 2
del instance[0]
assert len(instance) == 2
assert len(instance.groups['key']) == 1
assert str(instance) == 'key=value2&bar=baz'
@pytest.mark.parametrize('src,key,expect,value', DELETION_EXAMPLES)
def test_deletion_examples(self, src, key, expect, value):
instance = QSO(src)
del instance[key]
assert str(instance) == expect
@pytest.mark.parametrize('src,change,expect', UPDATE_EXAMPLES)
def test_update(self, src, change, expect):
instance = QSO(src)
instance.update(change)
assert str(instance) == expect
def test_update_keywords(self):
instance = QSO("key=value1&key=value2&bar=baz")
instance.update(bar="diz")
assert str(instance) == "key=value1&key=value2&bar=diz"
instance.update(diz="doz")
assert str(instance) == "key=value1&key=value2&bar=diz&diz=doz"
instance.update(key="value3")
assert str(instance) == "bar=diz&diz=doz&key=value3"
@pytest.mark.parametrize('src,change,expect', COMBINATION_EXAMPLES)
def test_inline_add(self, src, change, expect):
instance = QSO(src)
instance += change
assert str(instance) == expect
def test_index(self):
instance = QSO("foo=bar&baz=diz")
assert instance.index('foo=bar') == 0
assert instance.index('baz=diz') == 1
with pytest.raises(ValueError):
instance.index('diz')
def test_count(self):
instance = QSO("")
assert instance.count('foo') == 0
instance = QSO("foo&bar=value1&baz=diz&bar=value2")
assert instance.count('foo') == 1
assert instance.count('bar') == 2
assert instance.count('baz') == 1
def test_insert(self):
instance = QSO("foo&bar&baz")
instance.insert(0, "diz")
assert str(instance) == "diz&foo&bar&baz"
instance.insert(-1, "doz")
assert str(instance) == "diz&foo&bar&doz&baz"
assert len(instance.groups[None]) == 5
instance.insert(99, "twentyseven")
assert str(instance) == "diz&foo&bar&doz&baz&twentyseven"
@pytest.mark.parametrize('src,value', COMPARISON_EXAMPLES)
def test_comparison(self, src, value):
instance = QSO(src)
assert instance == value
assert not (instance != value)
@pytest.mark.parametrize('src,key,expect,value', POP_EXAMPLES)
def test_pop_examples(self, src, key, expect, value):
instance = QSO(src)
result = instance.pop(key)
assert str(instance) == expect
assert result == value
@pytest.mark.parametrize('key', ['baz', 2, SENTINEL])
def test_pop_failures(self, key):
instance = QSO()
with pytest.raises(KeyError):
instance.pop(key)
def test_pop_defaults(self):
instance = QSO()
assert instance.pop(default=None) is None
assert instance.pop(0, None) is None
assert instance.pop('named', None) is None
def test_pop_failure(self):
instance = QSO()
with pytest.raises(KeyError):
instance.pop('key')
def test_reverse(self):
instance = QSO("key=value1&key=value2&bar=baz")
instance.reverse()
assert str(instance) == "bar=baz&key=value2&key=value1"
assert tuple(instance['key']) == ("value2", "value1")
def test_keys(self):
instance = QSO("key=value1&key=value2&bar=baz")
assert tuple(instance.keys()) == ('key', 'key', 'bar')
def test_items(self):
instance = QSO("key=value1&key=value2&bar=baz")
assert tuple(instance.items()) == (('key', 'value1'), ('key', 'value2'), ('bar', 'baz'))
def test_values(self):
instance = QSO("key=value1&key=value2&bar=baz")
assert tuple(instance.values()) == ('value1', 'value2', 'baz')
def test_get(self):
instance = QSO("key=value1&key=value2&bar=baz")
assert tuple(instance.get('key')) == ('value1', 'value2')
assert instance.get('bar') == 'baz'
assert instance.get('baz') is None
def test_clear(self):
instance = QSO("key=value1&key=value2&bar=baz")
assert len(instance) == 3
instance.clear()
assert len(instance) == 0
assert not instance
assert not instance.groups
@pytest.mark.parametrize('string,args,kw', EXAMPLES)
class TestQSOExamples:
def test_repr(self, string, args, kw):
instance = QSO(string)
assert repr(instance) == 'QSO("' + string + '")'
def test_str(self, string, args, kw):
instance = QSO(string)
assert str(instance) == string
def test_length(self, string, args, kw):
instance = QSO(string)
if len(instance) != (len(args) + len(kw)):
__import__('pudb').set_trace()
instance = QSO(string)
assert len(instance) == (len(args) + len(kw))
def test_contains(self, string, args, kw):
instance = QSO(string)
for i in range(len(args) + len(kw)):
assert i in instance
def test_named_assignment(self, string, args, kw):
instance = QSO(string)
instance['doz'] = '27'
assert str(instance).endswith(('&' if (args or kw) else '') + 'doz=27')
@pytest.mark.parametrize('string,args,kw', [i for i in EXAMPLES if i[1]])
class TestQSOPositionalUse:
def test_iteration_view(self, string, args, kw):
instance = QSO(string)
for bucket, arg in zip(instance, args):
assert bucket.value == arg
@pytest.mark.parametrize('string,args,kw', [i for i in EXAMPLES if i[2]])
class TestQSOKeywordUse:
def test_contains(self, string, args, kw):
instance = QSO(string)
for key in kw:
assert key in instance
def test_grouped_indexing(self, string, args, kw):
instance = QSO(string)
for key, value in kw.items():
assert instance[key] == value
def test_grouped_replacement(self, string, args, kw):
instance = QSO(string)
instance['foo'] = 'doz'
assert 'foo=doz' in str(instance)
@pytest.mark.parametrize('string,args,kw', [i for i in EXAMPLES if len(i[1]) > 1])
class TestQSOMultiplePositional:
def test_reversing(self, string, args, kw):
instance = QSO(string)
result = list(reversed(instance))
assert len(result) == len(args)
assert tuple(i.value for i in result) == args[::-1]
def test_numeric_indexing(self, string, args, kw):
instance = QSO(string)
for i, arg in enumerate(args):
assert instance[i] == arg
def test_indexed_replacement(self, string, args, kw):
instance = QSO(string)
instance[1] = 'doz'
assert '&doz'
|
<filename>src/live-parser.js
import { Vogoor } from './vogoor.js'
class LiveParser extends Vogoor {
constructor() {
super();
this.init();
}
init() {
document.addEventListener("DOMContentLoaded", (event) => {
let elements = this.prefix ?
document.querySelectorAll("[class*="+this.prefix+"]") :
document.querySelectorAll("[class*=\\:]");
for (let i = 0; i < elements.length; ++i) {
let classes = this.findClass(elements[i].className.split(" "));
for (let j = 0; j < classes.length; ++j) {
let translatedClass = this.translateClass(classes[j]);
this.createCSSSelector(translatedClass.name, translatedClass.propertyName+':'+translatedClass.propertyValue);
}
}
document.body.style.visibility = "visible";
});
}
}
export { LiveParser } |
<reponame>minuk8932/Algorithm_BaekJoon
package disjoint_set;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
public class Boj11724 {
private static int parent[];
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int M = Integer.parseInt(st.nextToken());
parent = new int[N + 1];
init();
while(M-- > 0) {
st = new StringTokenizer(br.readLine());
merge(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()));
}
int count = 0;
for(int i = 1; i < parent.length; i++) {
if(parent[i] < 0) count++;
}
System.out.println(count);
}
private static void init() {
for(int i = 1; i < parent.length; i++) {
parent[i] = -1;
}
}
private static int find(int x) {
if(parent[x] < 0) return x;
else return parent[x] = find(parent[x]);
}
private static void merge(int x, int y) {
x = find(x);
y = find(y);
if(x == y) return;
if(parent[x] < parent[y]) {
parent[x] += parent[y];
parent[y] = x;
}
else {
parent[y] += parent[x];
parent[x] = y;
}
}
} |
<reponame>Musashi-Sakamoto/GeolocationShare
module.exports = (sequelize, DataTypes) => {
const user = sequelize.define('user', {
email: DataTypes.STRING,
username: DataTypes.STRING,
password: DataTypes.STRING,
isVerified: DataTypes.BOOLEAN
}, {});
user.associate = function (models) {
// associations can be defined here
models.user.hasMany(models.comment, {
foreignKey: 'user_id',
targetKey: 'id',
foreignKeyConstraint: true
});
models.user.hasMany(models.location, {
foreignKey: 'user_id',
targetKey: 'id',
foreignKeyConstraint: true
});
models.user.hasOne(models.token, {
foreignKey: 'user_id',
foreignKeyConstraint: true
});
};
return user;
};
|
docker-compose -p dynacert up --build
|
#!/bin/bash
# DO NOT RUN UNLESS APACHE, GIT, and everything is installed first
cd /var/www/html
git clone https://github.com/StoutCEE/general_cardswipe . |
#!/usr/bin/env bash
# Creates and installs the /etc/birdnet/birdnet.conf file
#set -x # Uncomment to enable debugging
set -e
trap 'exit 1' SIGINT SIGHUP
my_dir=$(realpath $(dirname $0))
BIRDNET_CONF="$(dirname ${my_dir})/birdnet.conf"
get_RECS_DIR() {
read -p "What is the full path to your recordings directory (locally)? " RECS_DIR
}
get_LATITUDE() {
read -p "What is the latitude where the recordings were made? " LATITUDE
}
get_LONGITUDE() {
read -p "What is the longitude where the recordings were made? " LONGITUDE
}
get_DO_EXTRACTIONS() {
while true; do
read -n1 -p "Do you want this device to perform the extractions? " DO_EXTRACTIONS
echo
case $DO_EXTRACTIONS in
[Yy] ) break;;
[Nn] ) break;;
* ) echo "You must answer with Yes or No (y or n)";;
esac
done
}
get_DO_RECORDING() {
while true; do
read -n1 -p "Is this device also doing the recording? " DO_RECORDING
echo
case $DO_RECORDING in
[Yy] ) break;;
[Nn] ) break;;
* ) echo "You must answer with Yes or No (y or n)";;
esac
done
}
get_REMOTE() {
while true; do
read -n1 -p "Are the recordings mounted on a remote file system?" REMOTE
echo
case $REMOTE in
[Yy] )
read -p "What is the remote hostname or IP address for the recorder? " REMOTE_HOST
read -p "Who is the remote user? " REMOTE_USER
read -p "What is the absolute path of the recordings directory on the remote host? " REMOTE_RECS_DIR
break;;
[Nn] ) break;;
* ) echo "Please answer Yes or No (y or n)";;
esac
done
}
get_EXTRACTIONS_URL() {
while true;do
read -n1 -p "Would you like to access the extractions via a web browser?
*Note: It is recommended, (but not required), that you run the web
server on the same host that does the extractions. If the extraction
service and web server are on different hosts, the \"By_Species\" and
\"Processed\" symbolic links won't work. The \"By-Date\" extractions,
however, will work as expected." CADDY_SERVICE
echo
case $CADDY_SERVICE in
[Yy] ) read -p "What URL would you like to publish the extractions to?
*Note: Set this to http://localhost if you do not want to make the
extractions publically available: " EXTRACTIONS_URL
get_CADDY_PWD
get_ICE_PWD
break;;
[Nn] ) EXTRACTIONS_URL= CADDY_PWD= ICE_PWD=;break;;
* ) echo "Please answer Yes or No";;
esac
done
}
get_CADDY_PWD() {
if [ -z ${CADDY_PWD} ]; then
while true; do
read -p "Please set a password to protect your data: " CADDY_PWD
case $CADDY_PWD in
"" ) echo "The password cannot be empty. Please try again.";;
* ) break;;
esac
done
fi
}
get_ICE_PWD() {
if [ ! -z ${CADDY_PWD} ] && [[ ${DO_RECORDING} =~ [Yy] ]];then
while true; do
read -n1 -p "Would you like to enable the live audio streaming service?" LIVE_STREAM
echo
case $LIVE_STREAM in
[Yy] )
read -p "Please set the icecast password. Use only alphanumeric characters." ICE_PWD
echo
case ${ICE_PWD} in
"" ) echo "The password cannot be empty. Please try again.";;
*) break;;
esac
break;;
[Nn] ) break;;
* ) echo "You must answer Yes or No (y or n).";;
esac
done
fi
}
get_PUSHED() {
while true; do
read -n1 -p "Do you have a free App key to receive mobile notifications via Pushed.co?" YN
echo
case $YN in
[Yy] ) read -p "Enter your Pushed.co App Key: " PUSHED_APP_KEY
read -p "Enter your Pushed.co App Key Secret: " PUSHED_APP_SECRET
break;;
[Nn] ) PUSHED_APP_KEY=
PUSHED_APP_SECRET=
break;;
* ) echo "A simple Yea or Nay will do";;
esac
done
}
get_INSTALL_NOMACHINE() {
while true; do
read -n1 -p "Would you like to also install NoMachine for remote desktop access?" INSTALL_NOMACHINE
echo
case $INSTALL_NOMACHINE in
[Yy] ) break;;
[Nn] ) break;;
* ) echo "You must answer with Yes or No (y or n)";;
esac
done
}
get_CHANNELS() {
REC_CARD="\$(sudo -u pi aplay -L \
| grep dsnoop \
| cut -d, -f1 \
| grep -ve 'vc4' -e 'Head' -e 'PCH' \
| uniq)"
[ -f $(dirname ${my_dir})/soundcard_params.txt ] || touch $(dirname ${my_dir})/soundcard_params.txt
SOUND_PARAMS=$(dirname ${my_dir})/soundcard_params.txt
SOUND_CARD="$(sudo -u ${USER} aplay -L \
| awk -F, '/^hw:/ {print $1}' \
| grep -ve 'vc4' -e 'Head' -e 'PCH' \
| uniq)"
script -c "arecord -D ${SOUND_CARD} --dump-hw-params" -a "${SOUND_PARAMS}" &> /dev/null
CHANNELS=$(awk '/CHANN/ { print $2 }' "${SOUND_PARAMS}" | sed 's/\r$//')
[ -z REC_CARD ] || REC_CARD=default
[ -z CHANNELS ] || CHANNELS=2
echo "REC_CARD variable set to ${REC_CARD}"
echo "Number of channels available: ${CHANNELS}"
}
configure() {
get_RECS_DIR
get_LATITUDE
get_LONGITUDE
get_DO_EXTRACTIONS
get_DO_RECORDING
get_REMOTE
get_EXTRACTIONS_URL
get_PUSHED
get_INSTALL_NOMACHINE
get_CHANNELS
}
install_birdnet_conf() {
cat << EOF > $(dirname ${my_dir})/birdnet.conf
################################################################################
# Configuration settings for BirdNET as a service #
################################################################################
INSTALL_DATE="$(date "+%D")"
#___________The four variables below are the only that are required.___________#
## BIRDNET_USER should be the non-root user systemd should use to execute each
## service.
BIRDNET_USER=${USER}
## RECS_DIR is the location birdnet_analysis.service will look for the data-set
## it needs to analyze. Be sure this directory is readable and writable for
## the BIRDNET_USER. If you are going to be accessing a remote data-set, you
## still need to set this, as this will be where the remote directory gets
## mounted locally. See REMOTE_RECS_DIR below for mounting remote data-sets.
RECS_DIR=${RECS_DIR}
## LATITUDE and LONGITUDE are self-explanatroy. Find them easily at
## maps.google.com. Only go to the thousanths place for these variables
## Example: these coordinates would indicate the Eiffel Tower in Paris, France.
## LATITUDE=48.858
## LONGITUDE=2.294
LATITUDE="${LATITUDE}"
LONGITUDE="${LONGITUDE}"
################################################################################
#------------------------------ Extraction Service ---------------------------#
# Keep this EMPTY if you do not want this device to perform the extractions #
## DO_EXTRACTIONS is simply a setting for enabling the extraction.service.
## Set this to Y or y to enable extractions.
DO_EXTRACTIONS=${DO_EXTRACTIONS}
################################################################################
#----------------------------- Recording Service ----------------------------#
# Keep this EMPTY if you do not want this device to perform the recording. #
## DO_RECORDING is simply a setting for enabling the 24/7 birdnet_recording.service.
## Set this to Y or y to enable recording.
DO_RECORDING=${DO_RECORDING}
################################################################################
#----------------- Mounting a remote directory with systemd -----------------#
#_______________The four variables below can be set to enable a_______________#
#___________________systemd.mount for analysis, extraction,____________________#
#______________________________or file-serving_________________________________#
# Leave these settings EMPTY if your data-set is local. #
## REMOTE is simply a setting for enabling the systemd.mount to use a remote
## filesystem for the data storage and service.
## Set this to Y or y to enable the systemd.mount.
REMOTE=${REMOTE}
## REMOTE_HOST is the IP address, hostname, or domain name SSH should use to
## connect for FUSE to mount its remote directories locally.
REMOTE_HOST=${REMOTE_HOST}
## REMOTE_USER is the user SSH will use to connect to the REMOTE_HOST.
REMOTE_USER=${REMOTE_USER}
## REMOTE_RECS_DIR is the directory on the REMOTE_HOST which contains the
## data-set SSHFS should mount to this system for local access. This is NOT the
## directory where you will access the data on this machine. See RECS_DIR for
## that.
REMOTE_RECS_DIR=${REMOTE_RECS_DIR}
################################################################################
#----------------------- Web-hosting/Caddy File-server -----------------------#
#__________The two variables below can be set to enable web access_____________#
#____________to your data,(e.g., extractions, raw data, live___________________#
#______________audio stream, BirdNET.selection.txt files)______________________#
# Leave these EMPTY if you do not want to enable web access #
## EXTRACTIONS_URL is the URL where the extractions, data-set, and live-stream
## will be web-hosted. If you do not own a domain, or would just prefer to keep
## BirdNET-system on your local network, you can set this to http://localhost.
## Setting this (even to http://localhost) will also allow you to enable the
## GoTTY web logging features below.
EXTRACTIONS_URL=${EXTRACTIONS_URL}
## CADDY_PWD is the plaintext password (that will be hashed) and used to access
## the "Processed" directory and live audio stream. This MUST be set if you
## choose to enable this feature.
CADDY_PWD=${CADDY_PWD}
################################################################################
#------------------------- Live Audio Stream --------------------------------#
#_____________The variable below configures/enables the live___________________#
#_____________________________audio stream.____________________________________#
# Keep this EMPTY if you do not wish to enable the live stream #
# or if this device is not doing the recording #
## ICE_PWD is the password that icecast2 will use to authenticate ffmpeg as a
## trusted source for the stream. You will never need to enter this manually
## anywhere other than here.
ICE_PWD=${ICE_PWD}
################################################################################
#------------------- Mobile Notifications via Pushed.co ---------------------#
#____________The two variables below enable mobile notifications_______________#
#_____________See https://pushed.co/quick-start-guide to get___________________#
#_________________________these values for your app.___________________________#
# Keep these EMPTY if haven't setup a Pushed.co App yet. #
## Pushed.co App Key and App Secret
PUSHED_APP_KEY=${PUSHED_APP_KEY}
PUSHED_APP_SECRET=${PUSHED_APP_SECRET}
################################################################################
#------------------------------- NoMachine ----------------------------------#
#_____________The variable below can be set include NoMachine__________________#
#_________________remote desktop software to be installed._____________________#
# Keep this EMPTY if you do not want to install NoMachine. #
## INSTALL_NOMACHINE is simply a setting that can be enabled to install
## NoMachine alongside the BirdNET-system for remote desktop access. This in-
## staller assumes personal use. Please reference the LICENSE file included
## in this repository for more information.
## Set this to Y or y to install NoMachine alongside the BirdNET-system
INSTALL_NOMACHINE=${INSTALL_NOMACHINE}
################################################################################
#-------------------------------- Defaults ----------------------------------#
#________The six variables below are default settings that you (probably)______#
#__________________don't need to change at all, but can._______________________#
## REC_CARD is the sound card you would want the birdnet_recording.service to
## use. This setting is irrelevant if you are not planning on doing data
## collection via recording on this machine. The command substitution below
## looks for a USB microphone's dsnoop alsa device. The dsnoop device lets
## birdnet_recording.service and livestream.service share the raw audio stream
## from the microphone. If you would like to use a different microphone than
## what this produces, or if your microphone does not support creating a
## dsnoop device, you can set this explicitly from a list of the available
## devices from the output of running 'aplay -L'
REC_CARD=${REC_CARD}
## PROCESSED is the directory where the formerly 'Analyzed' files are moved
## after extractions have been made from them. This includes both WAVE and
## BirdNET.selection.txt files.
PROCESSED=${RECS_DIR}/Processed
## EXTRACTED is the directory where the extracted audio selections are moved.
EXTRACTED=${RECS_DIR}/Extracted
## IDFILE is the file that keeps a complete list of every spececies that
## BirdNET has identified from your data-set. It is persistent across
## data-sets, so would need to be whiped clean through deleting or renaming
## it. A backup is automatically made from this variable each time it is
## updated (structure: ${IDFILE}.bak), and would also need to be removed
## or renamed to start a new file between data-sets. Alternately, you can
## change this variable between data-sets to preserve records of disparate
## data-sets according to name.
IDFILE=${HOME}/BirdNET-system/IdentifiedSoFar.txt
## OVERLAP is the value in seconds which BirdNET should use when analyzing
## the data. The values must be between 0.0-2.9.
OVERLAP="0.0"
## CONFIDENCE is the minimum confidence level from 0.0-1.0 BirdNET's analysis
## should reach before creating an entry in the BirdNET.selection.txt file.
## Don't set this to 1.0 or you won't have any results.
CONFIDENCE="0.7"
################################################################################
#------------------------------ Auto-Generated ------------------------------#
#_____________________The variables below are auto-generated___________________#
#______________________________during installation_____________________________#
## CHANNELS holds the variabel that corresponds to the number of channels the
## sound card supports.
CHANNELS=${CHANNELS}
# Don't touch the three below
## ANALYZED is where the extraction.service looks for audio and
## BirdNET.selection.txt files after they have been processed by the
## birdnet_analysis.service. This is NOT where the analyzed files are moved --
## analyzed files are always created within the same directory
## birdnet_analysis.service finds them.
ANALYZED=${RECS_DIR}/*/*Analyzed
## SYSTEMD_MOUNT is created from the RECS_DIR variable to comply with systemd
## mount naming requirements.
SYSTEMD_MOUNT=$(echo ${RECS_DIR#/} | tr / -).mount
## VENV is the virtual environment where the the BirdNET python build is found,
## i.e, VENV is the virtual environment miniforge built for BirdNET.
VENV=$(dirname ${my_dir})/miniforge/envs/birdnet
EOF
[ -d /etc/birdnet ] || sudo mkdir /etc/birdnet
sudo ln -sf $(dirname ${my_dir})/birdnet.conf /etc/birdnet/birdnet.conf
}
# Checks for a birdnet.conf file in the BirdNET-system directory for a
# non-interactive installation. Otherwise,the installation is interactive.
if [ -f ${BIRDNET_CONF} ];then
source ${BIRDNET_CONF}
install_birdnet_conf
else
configure
install_birdnet_conf
fi
|
package org.opentele.server.dgks.monitoringdataset.version1_0_1.generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
/**
* Landeidentifikations kode baseret på de 4 forskellige formater.
*
* <p>Java class for CountryIdentificationCodeType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="CountryIdentificationCodeType">
* <simpleContent>
* <extension base="<http://rep.oio.dk/ebxml/xml/schemas/dkcc/2003/02/13/>_CountryIdentificationCodeType">
* <attribute name="scheme" use="required" type="{http://rep.oio.dk/ebxml/xml/schemas/dkcc/2003/02/13/}_CountryIdentificationSchemeType" />
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "CountryIdentificationCodeType", namespace = "http://rep.oio.dk/ebxml/xml/schemas/dkcc/2003/02/13/", propOrder = {
"value"
})
public class CountryIdentificationCodeType {
@XmlValue
protected String value;
@XmlAttribute(name = "scheme", required = true)
protected CountryIdentificationSchemeType scheme;
/**
* Dette er en støttetype til CountryIdentificationCodeType. Det regulære udtryk er et valg for de 4 forskellige regulære udtryk for de forskellige formater. ISO 3166 standard, alpha 2: [a-z,A-Z]{2}. Eksempel "DK" for Danmark. ISO 3166 standard, alpha 3: [a-z,A-Z]{3}. Eksempel "DKN" for Danmark. UN Statistics Divisions country codes: [0-9]{3}. Eksempel "208" for Danmark AuthorityCode from the Central Office of Civil Registration: [0-9]{4}. Eksempel "5100" for Danmark.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
/**
* Gets the value of the scheme property.
*
* @return
* possible object is
* {@link CountryIdentificationSchemeType }
*
*/
public CountryIdentificationSchemeType getScheme() {
return scheme;
}
/**
* Sets the value of the scheme property.
*
* @param value
* allowed object is
* {@link CountryIdentificationSchemeType }
*
*/
public void setScheme(CountryIdentificationSchemeType value) {
this.scheme = value;
}
}
|
package chylex.hee.gui.slots;
import net.minecraft.inventory.Slot;
import chylex.hee.tileentity.base.IInventoryInvalidateable;
public class SlotTableSubject extends Slot{
private final IInventoryInvalidateable inv;
public SlotTableSubject(IInventoryInvalidateable inv, int id, int x, int z){
super(inv, id, x, z);
this.inv = inv;
}
@Override
public void onSlotChanged(){
super.onSlotChanged();
inv.invalidateInventory();
}
}
|
package io.smallrye.mutiny;
import static io.smallrye.mutiny.helpers.ParameterValidation.nonNull;
import java.util.concurrent.Executor;
import java.util.function.Function;
import java.util.function.Predicate;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import io.smallrye.mutiny.groups.MultiBroadcast;
import io.smallrye.mutiny.groups.MultiCollect;
import io.smallrye.mutiny.groups.MultiConvert;
import io.smallrye.mutiny.groups.MultiCreate;
import io.smallrye.mutiny.groups.MultiCreateBy;
import io.smallrye.mutiny.groups.MultiGroup;
import io.smallrye.mutiny.groups.MultiOnCompletion;
import io.smallrye.mutiny.groups.MultiOnEvent;
import io.smallrye.mutiny.groups.MultiOnFailure;
import io.smallrye.mutiny.groups.MultiOnItem;
import io.smallrye.mutiny.groups.MultiOverflow;
import io.smallrye.mutiny.groups.MultiSubscribe;
import io.smallrye.mutiny.groups.MultiTransform;
@SuppressWarnings("PublisherImplementation")
public interface Multi<T> extends Publisher<T> {
static MultiCreate createFrom() {
return MultiCreate.INSTANCE;
}
/**
* Creates new instances of {@link Multi} by merging, concatenating or associating items from others {@link Multi}
* and {@link Publisher}.
*
* @return the object to configure the creation process.
*/
static MultiCreateBy createBy() {
return MultiCreateBy.INSTANCE;
}
/**
* Configures the subscriber consuming this {@link Multi}.
*
* @return the object to configure the subscriber
*/
MultiSubscribe<T> subscribe();
/**
* Configures the behavior when an {@code item} event is received from the this {@link Multi}
*
* @return the object to configure the behavior.
*/
MultiOnItem<T> onItem();
/**
* Allows structuring the pipeline by creating a logic separation:
*
* <pre>
* {@code
* Multi multi = upstream
* .then(m -> { ...})
* .then(m -> { ...})
* .then(m -> { ...})
* }
* </pre>
* <p>
* With `then` you can structure and chain groups of processing.
*
* @param stage the function receiving this {@link Multi} as parameter and producing the outcome (can be a
* {@link Multi} or something else), must not be {@code null}.
* @param <O> the outcome type
* @return the outcome of the function.
*/
default <O> O then(Function<Multi<T>, O> stage) {
return nonNull(stage, "stage").apply(this);
}
/**
* Creates a {@link Uni} from this {@link Multi}.
* <p>
* When a subscriber subscribes to the returned {@link Uni}, it subscribes to this {@link Multi} and requests one
* item. The event emitted by this {@link Multi} are then forwarded to the {@link Uni}:
*
* <ul>
* <li>on item event, the item is fired by the produced {@link Uni}</li>
* <li>on failure event, the failure is fired by the produced {@link Uni}</li>
* <li>on completion event, a {@code null} item is fired by the produces {@link Uni}</li>
* <li>any item or failure events received after the first event is dropped</li>
* </ul>
* <p>
* If the subscription on the produced {@link Uni} is cancelled, the subscription to the passed {@link Multi} is
* also cancelled.
*
* @return the produced {@link Uni}
*/
Uni<T> toUni();
/**
* Like {@link #onFailure(Predicate)} but applied to all failures fired by the upstream multi.
* It allows configuring the on failure behavior (recovery, retry...).
*
* @return a MultiOnFailure on which you can specify the on failure action
*/
MultiOnFailure<T> onFailure();
/**
* Configures a predicate filtering the failures on which the behavior (specified with the returned
* {@link MultiOnFailure}) is applied.
* <p>
* For instance, to only when an {@code IOException} is fired as failure you can use:
* <code>multi.onFailure(IOException.class).recoverWithItem("hello")</code>
* <p>
* The fallback value ({@code hello}) will only be used if the upstream multi fires a failure of type
* {@code IOException}.
*
* @param predicate the predicate, {@code null} means applied to all failures
* @return a MultiOnFailure configured with the given predicate on which you can specify the on failure action
*/
MultiOnFailure<T> onFailure(Predicate<? super Throwable> predicate);
/**
* Configures a type of failure filtering the failures on which the behavior (specified with the returned
* {@link MultiOnFailure}) is applied.
* <p>
* For instance, to only when an {@code IOException} is fired as failure you can use:
* <code>multi.onFailure(IOException.class).recoverWithItem("hello")</code>
* <p>
* The fallback value ({@code hello}) will only be used if the upstream multi fire a failure of type
* {@code IOException}.*
*
* @param typeOfFailure the class of exception, must not be {@code null}
* @return a MultiOnFailure configured with the given predicate on which you can specify the on failure action
*/
MultiOnFailure<T> onFailure(Class<? extends Throwable> typeOfFailure);
/**
* Allows adding behavior when various type of events are emitted by the current {@link Multi} (item, failure,
* completion) or by the subscriber (cancellation, request, subscription)
*
* @return the object to configure the action to execute when events happen
*/
MultiOnEvent<T> on();
/**
* Creates a new {@link Multi} that subscribes to this upstream and caches all of its events and replays them, to
* all the downstream subscribers.
*
* @return a multi replaying the events from the upstream.
*/
Multi<T> cache();
/**
* Produces {@link Multi} or {@link Uni} collecting items from this {@link Multi}. You can accumulate the items
* into a {@link java.util.List} ({@link MultiCollect#asList()}), {@link java.util.Map}
* ({@link MultiCollect#asMap(Function)}...
* <p>
* You can also retrieve the first and list items using {@link MultiCollect#first()} and {@link MultiCollect#last()}.
*
* @return the object to configure the collection process.
*/
MultiCollect<T> collectItems();
/**
* Produces {@link Multi} grouping items from this {@link Multi} into various "form of chunks" (list, {@link Multi}).
* The grouping can be done linearly ({@link MultiGroup#intoLists()} and {@link MultiGroup#intoMultis()}, or based
* on a grouping function ({@link MultiGroup#by(Function)})
*
* @return the object to configure the grouping.
*/
MultiGroup<T> groupItems();
/**
* Produces a new {@link Multi} invoking the {@code onItem}, {@code onFailure} and {@code onCompletion} methods
* on the supplied {@link Executor}.
* <p>
* Instead of receiving the {@code item} event on the thread firing the event, this method influences the
* threading context to switch to a thread from the given executor. Same behavior for failure and completion.
* <p>
* Note that the subscriber is guaranteed to never be called concurrently.
*
* @param executor the executor to use, must not be {@code null}
* @return a new {@link Multi}
*/
Multi<T> emitOn(Executor executor);
/**
* When a subscriber subscribes to this {@link Multi}, execute the subscription to the upstream {@link Multi} on a
* thread from the given executor. As a result, the {@link Subscriber#onSubscribe(Subscription)} method will be called
* on this thread (except mentioned otherwise)
*
* @param executor the executor to use, must not be {@code null}
* @return a new {@link Multi}
*/
Multi<T> subscribeOn(Executor executor);
/**
* Allows configures the actions or continuation to execute when this {@link Multi} fires the completion event.
*
* @return the object to configure the action.
*/
MultiOnCompletion<T> onCompletion();
/**
* Transforms the streams by skipping, selecting, or merging.
*
* @return the object to configure the transformation.
*/
MultiTransform<T> transform();
/**
* Configures the back-pressure behavior when the consumer cannot keep up with the emissions from this
* {@link Multi}.
*
* @return the object to configure the overflow strategy
*/
MultiOverflow<T> onOverflow();
/**
* Makes this {@link Multi} be able to broadcast its events ({@code items}, {@code failure}, and {@code completion})
* to multiple subscribers.
*
* @return the object to configure the broadcast
*/
MultiBroadcast<T> broadcast();
/**
* Converts a {@link Multi} to other types
*
* <p>
* Examples:
* </p>
*
* <pre>
* {@code
* multi.convert().with(multi -> x); // Convert with a custom lambda converter
* }
* </pre>
*
* @return the object to convert an {@link Multi} instance
* @see MultiConvert
*/
MultiConvert<T> convert();
/**
* Produces a new {@link Multi} invoking the given function for each item emitted by the upstream {@link Multi}.
* <p>
* The function receives the received item as parameter, and can transform it. The returned object is sent
* downstream as {@code item} event.
* <p>
* This method is a shortcut for {@code multi.onItem().apply(mapper)}.
*
* @param mapper the mapper function, must not be {@code null}
* @param <O> the type of item produced by the mapper function
* @return the new {@link Multi}
*/
default <O> Multi<O> map(Function<? super T, ? extends O> mapper) {
return onItem().apply(nonNull(mapper, "mapper"));
}
/**
* Produces a {@link Multi} containing the items from {@link Publisher} produced by the {@code mapper} for each
* item emitted by this {@link Multi}.
* <p>
* The operation behaves as follows:
* <ul>
* <li>for each item emitted by this {@link Multi}, the mapper is called and produces a {@link Publisher}
* (potentially a {@code Multi}). The mapper must not return {@code null}</li>
* <li>The items emitted by each of the produced {@link Publisher} are then <strong>merged</strong> in the
* produced {@link Multi}. The flatten process may interleaved items.</li>
* </ul>
* This method is a shortcut for {@code multi.onItem().producePublisher(mapper).merge()}.
*
* @param mapper the {@link Function} producing {@link Publisher} / {@link Multi} for each items emitted by the
* upstream {@link Multi}
* @param <O> the type of item emitted by the {@link Publisher} produced by the {@code mapper}
* @return the produced {@link Multi}
*/
default <O> Multi<O> flatMap(Function<? super T, ? extends Publisher<? extends O>> mapper) {
return onItem().producePublisher(mapper).merge();
}
/**
* Produces a {@link Multi} containing the items from {@link Publisher} produced by the {@code mapper} for each
* item emitted by this {@link Multi}.
* <p>
* The operation behaves as follows:
* <ul>
* <li>for each item emitted by this {@link Multi}, the mapper is called and produces a {@link Publisher}
* (potentially a {@code Multi}). The mapper must not return {@code null}</li>
* <li>The items emitted by each of the produced {@link Publisher} are then <strong>concatenated</strong> in the
* produced {@link Multi}. The flatten process makes sure that the items are not interleaved.
* </ul>
* <p>
* This method is equivalent to {@code multi.onItem().producePublisher(mapper).concatenate()}.
*
* @param mapper the {@link Function} producing {@link Publisher} / {@link Multi} for each items emitted by the
* upstream {@link Multi}
* @param <O> the type of item emitted by the {@link Publisher} produced by the {@code mapper}
* @return the produced {@link Multi}
*/
default <O> Multi<O> concatMap(Function<? super T, ? extends Publisher<? extends O>> mapper) {
return onItem().producePublisher(mapper).concatenate();
}
}
|
#!/usr/bin/env bash
set -euo pipefail
SEED_FILE=''
ENTIRE_DATABASE=''
while getopts ":f:dh" opt; do
case $opt in
f)
SEED_FILE="$OPTARG"
;;
h)
echo "load.sh [-f /path/to/seed/file] [-d]" >&2
echo "WARNING: this drops and recreates the database!" >&2
exit
;;
:)
echo "Missing argument for -$OPTARG" >&2
exit
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit
;;
esac
done
if [[ -z "$SEED_FILE" ]]; then
SEED_FILE=./local_seed.sql
fi
# echo "DROP DATABASE simpletrack" | psql postgres
# echo "create database simpletrack with owner stdb_user" | psql postgres
# echo 'create extension if not exists "uuid-ossp"' | psql simpletrack
# pgdump output
# pg_restore -j 4 -e -c --if-exists -h localhost -p 5432 -d simpletrack -U stdb_user -O $SEED_FILE
# "entire db"
pg_restore -e -O -x -c --if-exists -j 4 -h localhost -p 5432 -U clinics_user -d shared_clinics "$SEED_FILE"
|
<gh_stars>0
// import React from 'react';
import { h, render } from 'preact';
export default function SuggestionsDataList(props) {
return (
<datalist id={props.id}>
{props.suggestions.all.map(sug => (
<option key={sug.id}>{sug.title}</option>
))}
</datalist>
);
}
|
<reponame>ml-worthing/reinforcement-learning<gh_stars>0
/*
* Copyright 2019 ml-worthing
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mlworthing.rl.utils
import com.github.mlworthing.rl.Environment
trait Printer {
def printDeterministicPolicy[State, Action](
headline: String,
policy: scala.collection.Map[State, Action],
environment: Environment[State, Action]): Unit = {
println(headline)
println()
println(
environment
.show(policy.get, (_: State, action: Action) => action.toString, cellLength = 1, showForTerminalTiles = false))
println()
}
def printStochasticPolicy[State, Action](
headline: String,
policy: scala.collection.Map[State, scala.collection.Map[Action, Double]],
environment: Environment[State, Action]): Unit = {
val actionsCount = policy.map(_._2.size).max
println(headline)
println()
println(
environment
.show[scala.collection.Map[Action, Double]](
policy.get,
(_: State, actions: scala.collection.Map[Action, Double]) => actions.filter(_._2 > 0).keys.mkString(""),
cellLength = actionsCount,
showForTerminalTiles = false
))
println()
}
def printStateValue[State, Action](
headline: String,
stateValue: scala.collection.Map[State, Double],
environment: Environment[State, Action]): Unit = {
println(headline)
println()
println(
environment
.show(stateValue.get, (_: State, d: Double) => f"$d%+2.4f", cellLength = 10, showForTerminalTiles = true))
println()
}
}
|
<filename>src/example-components/MarketingHero/MarketingHero8/index.js
import React from 'react';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Grid, Container, Card, Button } from '@material-ui/core';
import PerfectScrollbar from 'react-perfect-scrollbar';
import hero8 from '../../../assets/images/hero-bg/hero-8.jpg';
import product1 from '../../../assets/images/stock-products/product-1.png';
import product2 from '../../../assets/images/stock-products/product-2.png';
import product3 from '../../../assets/images/stock-products/product-3.png';
import product4 from '../../../assets/images/stock-products/product-4.png';
import product5 from '../../../assets/images/stock-products/product-5.png';
import product6 from '../../../assets/images/stock-products/product-6.png';
import MarketingHeaders3 from '../../MarketingHeaders/MarketingHeaders3';
export default function LivePreviewExample() {
return (
<>
<div className="hero-wrapper bg-composed-wrapper bg-night-sky">
<div className="header-top-section pb-2">
<MarketingHeaders3 />
</div>
<div className="hero-wrapper--content">
<div
className="bg-composed-wrapper--image bg-composed-filter-rm"
style={{ backgroundImage: 'url(' + hero8 + ')' }}
/>
<div className="bg-composed-wrapper--bg bg-second opacity-5" />
<div className="bg-composed-wrapper--content">
<Container className="text-white pt-0 pt-lg-5 z-over">
<Grid container spacing={6} className="py-5">
<Grid item lg={6} xl={7}>
<div className="pt-0 pt-xl-3 pr-0 pr-xl-5">
<h2 className="display-3 font-weight-bold">
Bamburgh React Admin Dashboard with Material-UI PRO
</h2>
<p className="font-size-xl py-3 text-white-50">
Premium admin template powered by the most popular UI
components framework available for React: Material-UI.
Features hundreds of examples making web development fast
and easy. Start from one of the individual apps included
or from the general dashboard and build beautiful scalable
applications and presentation websites.
</p>
<div className="pt-3">
<Button
href="#/"
onClick={(e) => e.preventDefault()}
size="large"
className="btn-pill shadow-second-sm btn-first">
<span className="btn-wrapper--label">
Browse gallery
</span>
<span className="btn-wrapper--icon">
<FontAwesomeIcon icon={['fas', 'arrow-right']} />
</span>
</Button>
<Button
href="#/"
onClick={(e) => e.preventDefault()}
size="large"
className="bg-white-10 text-white btn-pill ml-3">
<span>Documentation</span>
</Button>
</div>
</div>
</Grid>
<Grid item lg={6} xl={5} className="d-flex align-items-center">
<Card className="card-box w-100 mb-5 mt-5 mt-xl-0">
<div className="card-tr-actions">
<Button
variant="text"
className="p-0 d-30 border-0 btn-transition-none text-second"
disableRipple>
<FontAwesomeIcon
icon={['fas', 'ellipsis-h']}
className="font-size-lg"
/>
</Button>
</div>
<div className="card-header-alt d-flex justify-content-center px-4 pt-4">
<div className="text-center">
<h6 className="font-weight-bold font-size-lg mb-1 text-black">
Shopping Cart
</h6>
<p className="text-black-50 mb-0">
Checkout is almost done!
</p>
</div>
</div>
<div className="divider mt-4" />
<div className="scroll-area shadow-overflow">
<PerfectScrollbar options={{ wheelPropagation: false }}>
<div className="d-flex p-4 align-items-center justify-content-between">
<div className="d-flex align-items-center">
<div>
<Card className="card-transparent mb-3 mb-sm-0">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="card-img-wrapper card-box-hover rounded">
<img
alt="..."
className="card-img-top rounded-sm"
src={product1}
style={{ width: 110 }}
/>
</a>
</Card>
</div>
<div className="pl-3">
<b className="font-weight-bold font-size-lg text-black">
Apple TV Gen 5
</b>
<div className="d-flex pt-1 align-items-center">
<div className="badge badge-neutral-first text-first font-weight-normal font-size-sm h-auto py-1 px-2">
$299
</div>
</div>
</div>
</div>
<div>
<Button className="btn-primary p-0 btn-icon btn-animated-icon-sm hover-scale-sm mr-2 d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'plus']}
className="font-size-xs"
/>
</span>
</Button>
<Button className="btn-neutral-danger p-0 btn-icon btn-animated-icon-sm hover-scale-sm d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</span>
</Button>
</div>
</div>
<div className="divider opacity-7" />
<div className="d-flex p-4 align-items-center justify-content-between">
<div className="d-flex align-items-center">
<div>
<Card className="card-transparent mb-3 mb-sm-0">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="card-img-wrapper card-box-hover rounded">
<img
alt="..."
className="card-img-top rounded-sm"
src={product2}
style={{ width: 110 }}
/>
</a>
</Card>
</div>
<div className="pl-3">
<b className="font-weight-bold font-size-lg text-black">
iPhone 11 Pro Max
</b>
<div className="d-flex pt-1 align-items-center">
<div className="badge badge-neutral-first text-first font-weight-normal font-size-sm h-auto py-1 px-2">
<b>$1999</b>
</div>
</div>
</div>
</div>
<div>
<Button className="btn-primary p-0 btn-icon btn-animated-icon-sm hover-scale-sm mr-2 d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'plus']}
className="font-size-xs"
/>
</span>
</Button>
<Button className="btn-neutral-danger p-0 btn-icon btn-animated-icon-sm hover-scale-sm d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</span>
</Button>
</div>
</div>
<div className="divider opacity-7" />
<div className="d-flex p-4 align-items-center justify-content-between">
<div className="d-flex align-items-center">
<div>
<Card className="card-transparent mb-3 mb-sm-0">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="card-img-wrapper card-box-hover rounded">
<img
alt="..."
className="card-img-top rounded-sm"
src={product3}
style={{ width: 110 }}
/>
</a>
</Card>
</div>
<div className="pl-3">
<b className="font-weight-bold font-size-lg text-black">
IPad Pro Gen. 3
</b>
<div className="d-flex pt-1 align-items-center">
<div className="badge badge-neutral-first text-first font-weight-normal font-size-sm h-auto py-1 px-2">
$299
</div>
</div>
</div>
</div>
<div>
<Button className="btn-primary p-0 btn-icon btn-animated-icon-sm hover-scale-sm mr-2 d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'plus']}
className="font-size-xs"
/>
</span>
</Button>
<Button className="btn-neutral-danger p-0 btn-icon btn-animated-icon-sm hover-scale-sm d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</span>
</Button>
</div>
</div>
<div className="divider opacity-7" />
<div className="d-flex p-4 align-items-center justify-content-between">
<div className="d-flex align-items-center">
<div>
<Card className="card-transparent mb-3 mb-sm-0">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="card-img-wrapper card-box-hover rounded">
<img
alt="..."
className="card-img-top rounded-sm"
src={product4}
style={{ width: 110 }}
/>
</a>
</Card>
</div>
<div className="pl-3">
<b className="font-weight-bold font-size-lg text-black">
Apple Macbook PRO
</b>
<div className="d-flex pt-1 align-items-center">
<div className="badge badge-neutral-first text-first font-weight-normal font-size-sm h-auto py-1 px-2">
$299
</div>
</div>
</div>
</div>
<div>
<Button className="btn-primary p-0 btn-icon btn-animated-icon-sm hover-scale-sm mr-2 d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'plus']}
className="font-size-xs"
/>
</span>
</Button>
<Button className="btn-neutral-danger p-0 btn-icon btn-animated-icon-sm hover-scale-sm d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</span>
</Button>
</div>
</div>
<div className="divider opacity-7" />
<div className="d-flex p-4 align-items-center justify-content-between">
<div className="d-flex align-items-center">
<div>
<Card className="card-transparent mb-3 mb-sm-0">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="card-img-wrapper card-box-hover rounded">
<img
alt="..."
className="card-img-top rounded-sm"
src={product5}
style={{ width: 110 }}
/>
</a>
</Card>
</div>
<div className="pl-3">
<b className="font-weight-bold font-size-lg text-black">
PRO Headphones V3
</b>
<div className="d-flex pt-1 align-items-center">
<div className="badge badge-neutral-first text-first font-weight-normal font-size-sm h-auto py-1 px-2">
$1199
</div>
</div>
</div>
</div>
<div>
<Button className="btn-primary p-0 btn-icon btn-animated-icon-sm hover-scale-sm mr-2 d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'plus']}
className="font-size-xs"
/>
</span>
</Button>
<Button className="btn-neutral-danger p-0 btn-icon btn-animated-icon-sm hover-scale-sm d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</span>
</Button>
</div>
</div>
<div className="divider opacity-7" />
<div className="d-flex p-4 align-items-center justify-content-between">
<div className="d-flex align-items-center">
<div>
<Card className="card-transparent mb-3 mb-sm-0">
<a
href="#/"
onClick={(e) => e.preventDefault()}
className="card-img-wrapper card-box-hover rounded">
<img
alt="..."
className="card-img-top rounded-sm"
src={product6}
style={{ width: 110 }}
/>
</a>
</Card>
</div>
<div className="pl-3">
<b className="font-weight-bold font-size-lg text-black">
Apple Watch 42mm
</b>
<div className="d-flex pt-1 align-items-center">
<div className="badge badge-neutral-first text-first font-weight-normal font-size-sm h-auto py-1 px-2">
$699
</div>
</div>
</div>
</div>
<div>
<Button className="btn-primary p-0 btn-icon btn-animated-icon-sm hover-scale-sm mr-2 d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'plus']}
className="font-size-xs"
/>
</span>
</Button>
<Button className="btn-neutral-danger p-0 btn-icon btn-animated-icon-sm hover-scale-sm d-30">
<span className="btn-wrapper--icon">
<FontAwesomeIcon
icon={['fas', 'times']}
className="font-size-xs"
/>
</span>
</Button>
</div>
</div>
</PerfectScrollbar>
</div>
<div className="divider" />
<div className="bg-secondary text-black py-3 px-4 text-right">
<Grid container spacing={6}>
<Grid item md={9}>
<div className="text-black-50 text-uppercase">
Total
</div>
</Grid>
<Grid item md={3}>
$12,549
</Grid>
</Grid>
</div>
<div className="divider" />
<div className="p-4">
<Button
size="large"
fullWidth
className="btn-primary font-weight-bold font-size-sm text-uppercase">
Proceed to checkout
</Button>
</div>
</Card>
</Grid>
</Grid>
</Container>
<div className="shadow-container-blocks-2 z-below">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1440 320">
<path
fill="var(--light)"
fillOpacity="1"
d="M0,288L15,266.7C30,245,60,203,90,202.7C120,203,150,245,180,240C210,235,240,181,270,170.7C300,160,330,192,360,176C390,160,420,96,450,96C480,96,510,160,540,186.7C570,213,600,203,630,186.7C660,171,690,149,720,165.3C750,181,780,235,810,218.7C840,203,870,117,900,69.3C930,21,960,11,990,10.7C1020,11,1050,21,1080,42.7C1110,64,1140,96,1170,96C1200,96,1230,64,1260,48C1290,32,1320,32,1350,69.3C1380,107,1410,181,1425,218.7L1440,256L1440,320L1425,320C1410,320,1380,320,1350,320C1320,320,1290,320,1260,320C1230,320,1200,320,1170,320C1140,320,1110,320,1080,320C1050,320,1020,320,990,320C960,320,930,320,900,320C870,320,840,320,810,320C780,320,750,320,720,320C690,320,660,320,630,320C600,320,570,320,540,320C510,320,480,320,450,320C420,320,390,320,360,320C330,320,300,320,270,320C240,320,210,320,180,320C150,320,120,320,90,320C60,320,30,320,15,320L0,320Z"></path>
</svg>
</div>
</div>
</div>
</div>
</>
);
}
|
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
int main(int argc, char* argv[])
{
if (argc <= 2) {
printf("Invalid number of arguments");
return 0;
}
// convert dates to time_t
struct tm start_date;
strptime(argv[1], "%m/%d/%Y", &start_date);
time_t start_time = mktime(&start_date);
struct tm end_date;
strptime(argv[2], "%m/%d/%Y", &end_date);
time_t end_time = mktime(&end_date);
// calculate difference between times in seconds
double diff = difftime(end_time, start_time);
// convert difference in seconds to days
int days = (int) (diff / (60 * 60 * 24));
printf("Number of days between %s and %s is %d", argv[1], argv[2], days);
return 0;
} |
<reponame>Salsa-Trading/rvt
'use strict';
var path = require('path');
var webpack = require('webpack');
var projectRoot = __dirname.split('/').slice(0, -2).join('/');
var env;
module.exports = function(_env) {
env = _env;
return {
context: projectRoot,
resolve: resolve(),
module: moduleObj(),
entry: entry(),
output: output(),
devtool: devtool(),
externals: externals(),
plugins: plugins()
};
};
function resolve() {
return {
extensions: ['.ts', '.tsx', '.js', '.jsx', '.json']
};
}
function moduleObj() {
return {
loaders: [
{
test: /\.[t|j]s(x?)$/,
exclude: /node_modules/,
loader: 'ts-loader',
options: {
configFile: `${projectRoot}/tsconfig.json`,
compilerOptions: {
declaration: false
}
}
}
]
};
}
function entry() {
return path.join(projectRoot, 'src/index.ts');
}
function output() {
return {
path: path.join(projectRoot, '/dist/'),
filename: 'rvt.js',
library: ['RVT'],
libraryTarget: 'umd',
};
}
function devtool() {
if (env == 'development' || env == 'test') {
return 'eval-cheap-module-source-map';
}
}
function externals() {
return {
'react': 'react',
'react-dom': 'react-dom'
};
}
function plugins() {
var plugins = [
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify(env)
}
})
];
if (env == 'production') {
plugins.push(new webpack.optimize.OccurrenceOrderPlugin());
plugins.push(new webpack.NoEmitOnErrorsPlugin());
} else if (env == 'development') {
plugins.push(new webpack.NoEmitOnErrorsPlugin());
plugins.push(new webpack.HotModuleReplacementPlugin());
} else if (env == 'test') {
plugins.push(new webpack.SourceMapDevToolPlugin({
test: /\.(ts|js|tsx)($|\?)/i // process .js, .ts, .tsx files only
}));
}
return plugins;
}
|
#!/usr/bin/env bash
set -euo pipefail
set -x
MAKE_TARGET="${1:-}"
case "${MAKE_TARGET}" in
emqx-enterprise-*)
EMQX_NAME='emqx-enterprise'
;;
emqx-edge-*)
EMQX_NAME='emqx-edge'
;;
emqx-*)
EMQX_NAME='emqx'
;;
*)
echo "Usage $0 <PKG_TARGET>"
exit 1
;;
esac
case "${MAKE_TARGET}" in
*-tgz)
PACKAGE_TYPE='tgz'
;;
*-pkg)
PACKAGE_TYPE='pkg'
;;
*)
echo "Unknown package type ${1}"
exit 2
;;
esac
case "${MAKE_TARGET}" in
*elixir*)
IS_ELIXIR='yes'
;;
*)
IS_ELIXIR='no'
;;
esac
export DEBUG=1
export CODE_PATH=${CODE_PATH:-"/emqx"}
export SCRIPTS="${CODE_PATH}/scripts"
export EMQX_NAME
export PACKAGE_PATH="${CODE_PATH}/_packages/${EMQX_NAME}"
export RELUP_PACKAGE_PATH="${CODE_PATH}/_upgrade_base"
if [ "$PACKAGE_TYPE" = 'tgz' ]; then
PKG_SUFFIX="tar.gz"
else
SYSTEM="$("$SCRIPTS"/get-distro.sh)"
case "${SYSTEM:-}" in
ubuntu*|debian*|raspbian*)
PKG_SUFFIX='deb'
;;
*)
PKG_SUFFIX='rpm'
;;
esac
fi
PACKAGE_VERSION="$("$CODE_PATH"/pkg-vsn.sh "${EMQX_NAME}")"
PACKAGE_VERSION_LONG="$("$CODE_PATH"/pkg-vsn.sh "${EMQX_NAME}" --long --elixir "${IS_ELIXIR}")"
PACKAGE_NAME="${EMQX_NAME}-${PACKAGE_VERSION_LONG}"
PACKAGE_FILE_NAME="${PACKAGE_NAME}.${PKG_SUFFIX}"
PACKAGE_FILE="${PACKAGE_PATH}/${PACKAGE_FILE_NAME}"
if ! [ -f "$PACKAGE_FILE" ]; then
echo "$PACKAGE_FILE is not a file"
exit 1
fi
emqx_prepare(){
mkdir -p "${PACKAGE_PATH}"
if [ ! -d "/paho-mqtt-testing" ]; then
git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho-mqtt-testing
fi
pip3 install pytest
}
emqx_test(){
cd "${PACKAGE_PATH}"
local packagename="${PACKAGE_FILE_NAME}"
case "$PKG_SUFFIX" in
"tar.gz")
tar -zxf "${PACKAGE_PATH}/${packagename}"
export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60
export EMQX_MQTT__MAX_TOPIC_ALIAS=10
export EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug
export EMQX_LOG__FILE_HANDLERS__DEFAULT__LEVEL=debug
if [[ $(arch) == *arm* || $(arch) == aarch64 ]]; then
export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
fi
# sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins
echo "running ${packagename} start"
if ! "${PACKAGE_PATH}"/emqx/bin/emqx start; then
cat "${PACKAGE_PATH}"/emqx/log/erlang.log.1 || true
cat "${PACKAGE_PATH}"/emqx/log/emqx.log.1 || true
exit 1
fi
IDLE_TIME=0
while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ]
then
echo "emqx running error"
exit 1
fi
sleep 10
IDLE_TIME=$((IDLE_TIME+1))
done
pytest -v /paho-mqtt-testing/interoperability/test_client/V5/test_connect.py::test_basic
if ! "${PACKAGE_PATH}"/emqx/bin/emqx stop; then
cat "${PACKAGE_PATH}"/emqx/log/erlang.log.1 || true
cat "${PACKAGE_PATH}"/emqx/log/emqx.log.1 || true
exit 1
fi
echo "running ${packagename} stop"
rm -rf "${PACKAGE_PATH}"/emqx
;;
"deb")
dpkg -i "${PACKAGE_PATH}/${packagename}"
if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "ii" ]
then
echo "package install error"
exit 1
fi
echo "running ${packagename} start"
run_test
echo "running ${packagename} stop"
dpkg -r "${EMQX_NAME}"
if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "rc" ]
then
echo "package remove error"
exit 1
fi
dpkg -P "${EMQX_NAME}"
if dpkg -l |grep -q emqx
then
echo "package uninstall error"
exit 1
fi
;;
"rpm")
yum install -y "${PACKAGE_PATH}/${packagename}"
if ! rpm -q "${EMQX_NAME}" | grep -q "${EMQX_NAME}"; then
echo "package install error"
exit 1
fi
echo "running ${packagename} start"
run_test
echo "running ${packagename} stop"
rpm -e "${EMQX_NAME}"
if [ "$(rpm -q emqx)" != "package emqx is not installed" ];then
echo "package uninstall error"
exit 1
fi
;;
esac
}
run_test(){
# sed -i '/emqx_telemetry/d' /var/lib/emqx/loaded_plugins
emqx_env_vars=$(dirname "$(readlink "$(command -v emqx)")")/../releases/emqx_vars
if [ -f "$emqx_env_vars" ];
then
tee -a "$emqx_env_vars" <<EOF
export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60
export EMQX_MQTT__MAX_TOPIC_ALIAS=10
export EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug
export EMQX_LOG__FILE_HANDLERS__DEFAULT__LEVEL=debug
EOF
## for ARM, due to CI env issue, skip start of quic listener for the moment
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && tee -a "$emqx_env_vars" <<EOF
export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
EOF
else
echo "Error: cannot locate emqx_vars"
exit 1
fi
if ! emqx 'start'; then
cat /var/log/emqx/erlang.log.1 || true
cat /var/log/emqx/emqx.log.1 || true
exit 1
fi
IDLE_TIME=0
while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ]
then
echo "emqx running error"
exit 1
fi
sleep 10
IDLE_TIME=$((IDLE_TIME+1))
done
pytest -v /paho-mqtt-testing/interoperability/test_client/V5/test_connect.py::test_basic
# shellcheck disable=SC2009 # pgrep does not support Extended Regular Expressions
ps -ef | grep -E '\-progname\s.+emqx\s'
if ! emqx 'stop'; then
# shellcheck disable=SC2009 # pgrep does not support Extended Regular Expressions
ps -ef | grep -E '\-progname\s.+emqx\s'
echo "ERROR: failed_to_stop_emqx_with_the_stop_command"
cat /var/log/emqx/erlang.log.1 || true
cat /var/log/emqx/emqx.log.1 || true
exit 1
fi
}
relup_test(){
if [ ! -d "${RELUP_PACKAGE_PATH}" ]; then
echo "WARNING: ${RELUP_PACKAGE_PATH} is not a dir, skipped relup test!"
return 0
fi
cd "${RELUP_PACKAGE_PATH}"
local pattern
pattern="$EMQX_NAME-$("$CODE_PATH"/pkg-vsn.sh "${EMQX_NAME}" --long --vsn_matcher)"
while read -r pkg; do
packagename=$(basename "${pkg}")
tar -zxf "$packagename"
if ! ./emqx/bin/emqx start; then
cat emqx/log/erlang.log.1 || true
cat emqx/log/emqx.log.1 || true
exit 1
fi
./emqx/bin/emqx_ctl status
./emqx/bin/emqx versions
cp "$PACKAGE_FILE" ./emqx/releases/
./emqx/bin/emqx install "${PACKAGE_VERSION}"
[ "$(./emqx/bin/emqx versions |grep permanent | awk '{print $2}')" = "${PACKAGE_VERSION}" ] || exit 1
./emqx/bin/emqx_ctl status
./emqx/bin/emqx stop
rm -rf emqx
done < <(find . -maxdepth 1 -name "${pattern}.tar.gz")
}
emqx_prepare
emqx_test
if [ "$IS_ELIXIR" = 'yes' ]; then
echo "WARNING: skipped relup test for elixir"
else
relup_test
fi
|
#!/bin/bash
#$ -V # Inherit the submission environment
#$ -cwd # Start job in submission directory
#$ -j y # Combine stderr and stdout
#$ -o $JOB_NAME.o$JOB_ID
#$ -pe 4way 8 # Requests 16 tasks/node, 32 cores total
#$ -q normal
#$ -l h_rt=06:00:00 # Run time (hh:mm:ss)
#$ -M borgeson@utexas.edu
#$ -m e # Email at Begin and End of job
#$ -P data
set -x # Echo commands, use "set echo" with csh
SPEC="12spe34"
RESULTS="$HOME/src.MS/tide/1.0/tide-results"
PROT="$SPEC/DB/${SPEC}_all_combined_miss2.protidx"
#AUX="${PROT/_miss2.protidx/.fasta.auxlocs}"
#--aux_locations=$AUX --show_all_proteins=True
SUFFIX=".results.one"
#$ -N tider_12spe34
for SR in $(ls $SPEC/tide/*.spectrumrecords)
do
RES=${SR/.spectrumrecords/.tideres}
OUT=$(basename $SR)
OUT=$SPEC/tide/${OUT/.spectrumrecords/}$SUFFIX
$RESULTS --proteins=$PROT --spectra=$SR --results_file=$RES --out_filename=$OUT --out_format=text
done
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
# Read the data
data = pd.read_csv('housing_data.csv')
# Define features and target
X = data.iloc[:, :-1].values
y = data.iloc[:, -1].values
# Split the data
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2)
# Standardize the features
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
# Create the model
model = LinearRegression()
model.fit(X_train, y_train)
# Make predictions
y_pred = model.predict(X_test)
# Calculate accuracy
accuracy = metrics.r2_score(y_test, y_pred)
print('Accuracy:', accuracy) |
import collections
texts = [
"This is a text about dogs",
"This is a text about cats"
]
# Create a counter to count the words
counter = collections.Counter()
# Go through each text
for text in texts:
# Split the text and count the words
words = text.split()
for word in words:
counter[word] += 1
# Get the 10 most common words
most_common_words = counter.most_common(10)
# Print the most common words
for word, count in most_common_words:
print(word, count) |
SELECT name FROM employees
WHERE salary > (SELECT AVG(salary) FROM employees); |
<gh_stars>0
import discord
import os
import asyncio
import time
import requests
from discord import FFmpegPCMAudio
from youtube_dl import YoutubeDL
from discord.ext import commands
from bot.models.track import AsyncAudioSource
from bot.cogs.track import Music
from bot.cogs.latency import Latency
CACHED_USER_ENDPOINT = os.environ['CACHED_USER_ENDPOINT']
USER_MSG_ENDPOINT = os.environ['USER_MSG_ENDPOINT']
from bot.util.log import setup_logging_queue
setup_logging_queue()
client = commands.Bot(intents=discord.Intents.all(), command_prefix='!')
# Music Cog inspired by https://gist.github.com/vbe0201/ade9b80f2d3b64643d854938d40a0a2d
client.add_cog(Music(client))
client.add_cog(Latency(client))
# Called when the client is done preparing the data received
# from Discord. Usually after login is successful and the
# Client.guilds and co. are filled up.
@client.event
async def on_ready():
print("Connected")
# Called when the client has disconnected from Discord.
# This could happen either through the internet being disconnected,
# explicit calls to logout, or Discord terminating
# the connection one way or the other.
# This function can be called many times.
@client.event
async def on_disconnect():
print("Disconnected")
# Called when a member updates their profile
# This is called when one or more of the following things change
# status, activity, nickname, roles, pending
@client.event
async def on_member_update(previous_member_state, current_member_state):
pass
@client.event
async def on_message(message):
await client.process_commands(message)
if message.author == client.user or message.content.startswith("!"):
return
if "sing your favorite song" in message.content:
ctx = await client.get_context(message)
await ctx.send("🥰")
return await ctx.invoke(client.get_command('play'), url="https://www.youtube.com/watch?v=l1uoTMkhUiE")
params = {
'name': message.author.name,
'id': message.author.discriminator
}
user_response = get(CACHED_USER_ENDPOINT, params=params)
if not user_response:
print("No user found, exiting gracefully")
return
target_user_id = user_response["id"]
message_response = get(USER_MSG_ENDPOINT + str(target_user_id))
if message_response:
await _type(message.channel, message_response['message'])
async def _type(channel, msg):
await channel.trigger_typing()
await asyncio.sleep(2)
await channel.send(msg)
def get(req, params=None):
try:
resp = requests.get(req, params=params)
resp.raise_for_status()
return resp.json()
except requests.exceptions.HTTPError as err:
print(err)
return None
client.run(os.environ.get("BOT_TOKEN"))
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.interpolation;
import java.util.List;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.analytics.math.interpolation.data.InterpolatorNDDataBundle;
import com.opengamma.util.tuple.Pair;
/**
*
*/
public class ShepardInterpolatorND extends InterpolatorND {
private final Function1D<Double, Double> _basisFunction;
public ShepardInterpolatorND(final double power) {
_basisFunction = new ShepardNormalizedRadialBasisFunction(power);
}
@Override
public Double interpolate(final InterpolatorNDDataBundle data, final double[] x) {
validateInput(data, x);
final List<Pair<double[], Double>> rawData = data.getData();
final int n = rawData.size();
double sum = 0;
double normSum = 0;
double[] xi;
double yi;
double phi;
double dist;
Pair<double[], Double> temp;
for (int i = 0; i < n; i++) {
temp = rawData.get(i);
xi = temp.getFirst();
yi = temp.getSecond();
dist = DistanceCalculator.getDistance(x, xi);
if (dist == 0.0) {
return yi;
}
phi = _basisFunction.evaluate(dist);
sum += yi * phi;
normSum += phi;
}
return sum / normSum;
}
@Override
public InterpolatorNDDataBundle getDataBundle(final double[] x, final double[] y, final double[] z, final double[] values) {
return new InterpolatorNDDataBundle(transformData(x, y, z, values));
}
@Override
public InterpolatorNDDataBundle getDataBundle(final List<Pair<double[], Double>> data) {
return new InterpolatorNDDataBundle(data);
}
}
|
public class MsgBase
{
// Base class for messages
}
public class MsgTest : MsgBase
{
public void SendMessage(string message)
{
Console.WriteLine("Test Message: " + message);
}
} |
#!/bin/bash
#
# Copyright (c) 2012-2017 Kris Jusiak (kris at jusiak dot net)
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#
pph() {
version=$1 revision=$2 patch=$3
echo "//"
echo "// Copyright (c) 2016-2017 Kris Jusiak (kris at jusiak dot net)"
echo "//"
echo "// Distributed under the Boost Software License, Version 1.0."
echo "// (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)"
echo "//"
echo "#pragma once"
echo "#if (__cplusplus < 201305L && _MSC_VER < 1900)"
echo "#error \"[Boost].SML requires C++14 support (Clang-3.4+, GCC-5.1+, MSVC-2015+)\""
echo "#else"
echo "#define BOOST_SML_VERSION ${version}'${revision}'${patch}"
echo "#define BOOST_SML_NAMESPACE_BEGIN namespace boost { namespace sml { inline namespace v${version}_${revision}_${patch} {"
echo "#define BOOST_SML_NAMESPACE_END }}}"
echo "#if defined(__clang__)"
echo "#define __BOOST_SML_UNUSED __attribute__((unused))"
echo "#define __BOOST_SML_VT_INIT {}"
echo "#define __BOOST_SML_ZERO_SIZE_ARRAY(...) __VA_ARGS__ _[0]"
echo "#define __BOOST_SML_ZERO_SIZE_ARRAY_CREATE(...)"
echo "#define __BOOST_SML_TEMPLATE_KEYWORD template"
echo "#pragma clang diagnostic push"
echo "#pragma clang diagnostic ignored \"-Wgnu-string-literal-operator-template\""
echo "#pragma clang diagnostic ignored \"-Wzero-length-array\""
echo "#elif defined(__GNUC__)"
echo "#if !defined(__has_builtin)"
echo "#define __has_builtin(...) 0"
echo "#endif"
echo "#define __BOOST_SML_UNUSED __attribute__((unused))"
echo "#define __BOOST_SML_VT_INIT {}"
echo "#define __BOOST_SML_ZERO_SIZE_ARRAY(...) __VA_ARGS__ _[0]"
echo "#define __BOOST_SML_ZERO_SIZE_ARRAY_CREATE(...) __VA_ARGS__ ? __VA_ARGS__ : 1"
echo "#define __BOOST_SML_TEMPLATE_KEYWORD template"
echo "#pragma GCC diagnostic push"
echo "#pragma GCC diagnostic ignored \"-Wpedantic\""
echo "#elif defined(_MSC_VER)"
echo "#define __has_builtin(...) __has_builtin##__VA_ARGS__"
echo "#define __has_builtin__make_integer_seq(...) 1"
echo "#define __BOOST_SML_UNUSED"
echo "#define __BOOST_SML_VT_INIT"
echo "#define __BOOST_SML_ZERO_SIZE_ARRAY(...)"
echo "#define __BOOST_SML_ZERO_SIZE_ARRAY_CREATE(...) __VA_ARGS__ ? __VA_ARGS__ : 1"
echo "#if (_MSC_VER >= 1910) // MSVC 2017"
echo "#define __BOOST_SML_TEMPLATE_KEYWORD template "
echo "#else"
echo "#define __BOOST_SML_TEMPLATE_KEYWORD"
echo "#endif"
echo "#pragma warning(disable:4503)"
echo "#pragma warning(disable:4200)"
echo "#endif"
rm -rf tmp && mkdir tmp && cp -r boost tmp && cd tmp
find . -iname "*.hpp" | xargs sed -i "s/\(.*\)__pph__/\/\/\/\/\1/g"
find . -iname "*.hpp" | xargs sed -i "s/.*\(clang-format.*\)/\/\/\/\/\1/g"
echo '
BOOST_SML_NAMESPACE_BEGIN
#include "boost/sml/state_machine.hpp"
#include "boost/sml/transition_table.hpp"' > tmp.hpp
cpp -C -P -nostdinc -I. tmp.hpp 2>/dev/null | \
sed "s/\/\/\/\///" | \
sed "s/[ \t$]*#[ \t]*define/##define/g" | \
cpp -P -I. -fpreprocessed - 2>/dev/null | \
sed "s/clang-format\(.*\)/\/\/ clang-format\1/g" | \
sed "s/^##define/#define/g"
cd .. && rm -rf tmp
echo "BOOST_SML_NAMESPACE_END"
echo "#undef __BOOST_SML_UNUSED"
echo "#undef __BOOST_SML_VT_INIT"
echo "#undef __BOOST_SML_ZERO_SIZE_ARRAY"
echo "#undef __BOOST_SML_ZERO_SIZE_ARRAY_CREATE"
echo "#undef __BOOST_SML_TEMPLATE_KEYWORD"
echo "#if defined(__clang__)"
echo "#pragma clang diagnostic pop"
echo "#elif defined(__GNUC__)"
echo "#undef __has_builtin"
echo "#pragma GCC diagnostic pop"
echo "#elif defined(_MSC_VER)"
echo "#undef __has_builtin"
echo "#undef __has_builtin__make_integer_seq"
echo "#endif"
echo "#endif"
echo
}
set -e
cd ${0%/*}/../include && pph `head -1 ../doc/CHANGELOG.md | sed "s/.*\[\(.*\)\].*/\1/" | tr '.' ' '` > "boost/sml.hpp"
${CLANG_FORMAT:=clang-format} -i "boost/sml.hpp"
|
<gh_stars>0
/*
Copyright (c) 2016 Miouyouyou <Myy>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef _MYY_EGL_ATTRS_H_
#define _MYY_EGL_ATTRS_H_ 1
#include <EGL/egl.h>
#ifdef EGL_OPENGL_ES3_BIT
#define MYY_GLES3_BIT EGL_OPENGL_ES3_BIT
#else
#include <EGL/eglext.h>
#define MYY_GLES3_BIT EGL_OPENGL_ES3_BIT_KHR
#endif
#define MYY_EGL_COMMON_PC_ATTRIBS \
EGL_SURFACE_TYPE, EGL_WINDOW_BIT, \
EGL_CONFORMANT, EGL_OPENGL_ES2_BIT, \
EGL_SAMPLES, 4, \
EGL_RED_SIZE, 5, \
EGL_GREEN_SIZE, 6, \
EGL_BLUE_SIZE, 5, \
EGL_ALPHA_SIZE, 8, \
EGL_DEPTH_SIZE, 16
#define MYY_EGL_COMMON_MOBILE_ATTRIBS MYY_EGL_COMMON_PC_ATTRIBS
#define MYY_CURRENT_GL_CONTEXT EGL_CONTEXT_CLIENT_VERSION, 2
#endif
|
# This is a custom module we've made.
# Modules are files full of code that you can import into your programs.
# This one teaches our turtle to draw various shapes.
import turtle
def draw_circle(turtle, color, size, x, y):
turtle.penup()
turtle.color(color)
turtle.fillcolor(color)
turtle.goto(x,y)
turtle.pendown()
turtle.begin_fill()
turtle.circle(size)
turtle.end_fill()
def draw_triangle(turtle, color, size, x, y):
turtle.penup()
turtle.color(color)
turtle.fillcolor(color)
turtle.goto(x,y)
turtle.pendown()
turtle.begin_fill()
for i in range (3):
turtle.forward(size*3)
turtle.left(120)
turtle.end_fill()
turtle.setheading(0)
def draw_square(turtle, color, size, x, y):
turtle.penup()
turtle.color(color)
turtle.fillcolor(color)
turtle.goto(x,y)
turtle.pendown()
turtle.begin_fill()
for i in range (4):
turtle.forward(size*2)
turtle.left(90)
turtle.end_fill()
turtle.setheading(0)
def draw_star(turtle, color, size, x, y):
turtle.penup()
turtle.color(color)
turtle.fillcolor(color)
turtle.goto(x,y)
turtle.pendown()
turtle.begin_fill()
turtle.right(144)
for i in range(5):
turtle.forward(size*2)
turtle.right(144)
turtle.forward(size*2)
turtle.end_fill()
turtle.setheading(0) |
<gh_stars>0
import nock from 'nock'
import { createTestEvent, createTestIntegration } from '@segment/actions-core'
import Sendgrid from '..'
const sendgrid = createTestIntegration(Sendgrid)
const timestamp = new Date().toISOString()
for (const environment of ['stage', 'production']) {
const settings = {
sendGridApiKey: 'sendGridApiKey',
profileApiEnvironment: environment,
profileApiAccessToken: 'c',
spaceId: 'spaceId',
sourceId: 'sourceId'
}
const endpoint = `https://profiles.segment.${environment === 'production' ? 'com' : 'build'}`
describe(`${environment} - send Email`, () => {
it('should send Email', async () => {
nock(`${endpoint}/v1/spaces/spaceId/collections/users/profiles/user_id:jane`)
.get('/traits?limit=200')
.reply(200, {
traits: {
firstName: '<NAME>',
lastName: 'Browning'
}
})
nock(`${endpoint}/v1/spaces/spaceId/collections/users/profiles/user_id:jane`)
.get('/external_ids?limit=25')
.reply(200, {
data: [
{
type: 'user_id',
id: 'jane'
},
{
type: 'phone',
id: '+1234567891'
},
{
type: 'email',
id: '<EMAIL>'
}
]
})
const expectedSendGridRequest = {
personalizations: [
{
to: [
{
email: '<EMAIL>',
name: '<NAME>'
}
],
bcc: [
{
email: '<EMAIL>'
}
],
custom_args: {
source_id: 'sourceId',
space_id: 'spaceId',
user_id: 'jane'
}
}
],
from: {
email: '<EMAIL>',
name: '<NAME>'
},
reply_to: {
email: '<EMAIL>',
name: '<NAME>'
},
subject: 'Hello Browning First Name.',
content: [
{
type: 'text/html',
value: 'Hi First Name, Welcome to segment'
}
]
}
const sendGridRequest = nock('https://api.sendgrid.com')
.post('/v3/mail/send', expectedSendGridRequest)
.reply(200, {})
const responses = await sendgrid.testAction('sendEmail', {
event: createTestEvent({
timestamp,
event: 'Audience Entered',
userId: 'jane'
}),
settings,
mapping: {
userId: { '@path': '$.userId' },
fromEmail: '<EMAIL>',
fromName: '<NAME>',
replyToEmail: '<EMAIL>',
replyToName: 'Test user',
bcc: JSON.stringify([
{
email: '<EMAIL>'
}
]),
previewText: '',
subject: 'Hello {{profile.traits.lastName}} {{profile.traits.firstName}}.',
body: 'Hi {{profile.traits.firstName}}, Welcome to segment',
bodyType: 'html',
bodyHtml: 'Hi {{profile.traits.firstName}}, Welcome to segment'
}
})
expect(responses.length).toEqual(3)
expect(sendGridRequest.isDone()).toEqual(true)
})
})
}
|
#Training details
#GPU: NVIDIA® Tesla® V100 4cards 120epochs 55h
export CUDA_VISIBLE_DEVICES=0,1,2,3
export FLAGS_fast_eager_deletion_mode=1
export FLAGS_eager_delete_tensor_gb=0.0
export FLAGS_fraction_of_gpu_memory_to_use=0.98
python train.py \
--model=MobileNetV1 \
--batch_size=256 \
--model_save_dir=output/ \
--lr_strategy=piecewise_decay \
--num_epochs=120 \
--lr=0.1 \
--l2_decay=3e-5 \
|
<reponame>oooofeiger/ReactBasic-project
import React from 'react';
import {Row, Col} from 'antd';
export default class PCNewsDetails extends React.Component{
constructor(){
super();
this.state = {
newsItem:''
}
}
componentDidMount(){
fetch('http://www.feiger.com.cn/Handler.ashx?action=getnewsitem&uniquekey=' + this.props.match.params.uniquekey)
.then(res=>res.json())
.then(json => {
this.setState({newsItem: json});
document.title = this.state.newsItem.title + ' - React News | React 驱动的新闻平台';
})
}
createMarkup(){
return {__html: this.state.newsItem.pagecontent};
}
render(){
return (
<div>
<Row>
<Col span={2}></Col>
<Col span={14} class='container'>
<div class='articleContainer' dangerousSetInnerHTML={this.createMarkup()}></div>
</Col>
<Col span={6}></Col>
<Col span={2}></Col>
</Row>
</div>
)
}
}
|
<gh_stars>0
import { Typography } from "@material-ui/core";
import React from "react";
const SimplePropertyContent = ({content}) => {
return (
<Typography variant="body1">{content}</Typography>
)
}
export default SimplePropertyContent; |
var margin = {top: 60, right: 10, bottom: 60, left: 10},
width = width - margin.left - margin.right,
height = height - margin.top - margin.bottom;
var dataset;
// define svg object
var svg = d3.select("body")
.append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
// poke data at it
r2d3.onRender(function(root, svg, width, height, options, error){
if(error) {
console.log(error);
}
data.forEach(function(d) {
d.indx = +d.indx;
d.idealPoint = +d.idealPoint;
d.lo = +d.lo;
d.up = +d.up;
});
dataset = data;
colorValue = function(d) {
return d.party; },
color = d3.scale.ordinal()
.domain(["R","D","I"])
.range(["red","blue","green"]);
var xmin = d3.min(dataset, function(d) { return +d.lo; });
var xmax = d3.max(dataset, function(d) { return +d.up; });
var xrange = xmax-xmin;
var ymin = d3.min(dataset, function(d) { return +d.indx; });
var ymax = d3.max(dataset, function(d) { return +d.indx; });
var yrange = ymax - ymin;
var yup = ymax + (.0025 * yrange);
var ylo = ymin - (.0025 * yrange);
var xScale = d3.scale.linear()
.range([0, width])
.domain([xmin - .01*xrange, xmax + .01*xrange]);
var yScale = d3.scale.linear()
.range([height, 0])
.domain([ylo, yup]);
var xTickValues = d3.range(-2,2,1);
// axes grids
function make_x_grid(){
return d3.svg.axis()
.scale(xScale)
.orient("bottom")
}
svg.append("g")
.attr("class", "grid")
.style("opacity",0.15)
.call(make_x_grid()
.tickSize(height, 0, 0)
.tickFormat("")
);
// plot data
svg.selectAll("rect")
.data(dataset)
.enter()
.append("svg:rect")
.attr("x",function(d){
//console.log(d.lastnm,d.idealPoint);
return xScale(d.idealPoint) - 1.5;
})
.attr("y",function(d){
return yScale(d.indx) - 1.5;
})
.attr("width",4)
.attr("height",4)
.style("opacity",.50)
.style("fill",function(d) {
return color(colorValue(d));
});
var horizontal = svg.append("g")
.append("svg:line")
.style("stroke-width", "1px")
.style("stroke","#999");
var highLighted = svg.append("g")
.append("svg:rect")
.attr("class","rect")
.attr("width",11)
.attr("height",11)
.attr("x",1)
.attr("y",1)
.style("fill","black")
.style("opacity",0.00);
var confidenceInterval = svg.append("g")
.attr("class","line")
.style("opacity",0.00)
.attr("stroke-width", "4px");
var info = svg.append("g")
.attr("transform", "translate(" + (width - -40) + ",0)")
.style("fill", "#777")
.style("letter-spacing","-1px")
.attr("font-size", (width*0.003) + "em")
.style("font-size", "14px");
info.append("text")
.attr("class","label");
// info.append("text")
// .attr("class","party")
// .attr("transform", "translate(0, 34)");
info.append("text")
.attr("class","rank")
.attr("transform", "translate(0, 34)");
// confidence intervals
svg.selectAll("confidenceIntervals")
.data(dataset)
.enter()
.append("svg:line")
.attr("class","line")
.attr("x1",function(d){
return xScale(d.lo);
})
.attr("x2",function(d){
return xScale(d.up);
})
.attr("y1",function(d){
return yScale(d.indx);
})
.attr("y2",function(d){
return yScale(d.indx);
})
.style("opacity",0.11)
.style("stroke","lightslategrey")
.style("stroke-width","4px")
.on("mouseover",
function(){
d3.select(this).style("opacity",.55);
}
)
.on("mouseout",
function(){
d3.select(this).style("opacity",.11);
highLighted.style("opacity",0.00);
}
)
.on("mousemove",mymousemove);
function mymousemove() {
y0 = yScale.invert(d3.mouse(this)[1]);
i = d3.round(y0);
if(i>ymax){
i = ymax;
}
if(i<ymin){
i = ymin;
}
d = dataset[i-1];
xLoc = xScale(d.idealPoint);
yCoord = yScale(d.indx);
highLighted.style("opacity",.85);
highLighted.attr("x",xLoc-5);
highLighted.attr("y",yCoord-5);
hcol = color(colorValue(d));
highLighted.style("fill",hcol);
if(yCoord>(height-100)){
yCoord2 = height-100;
} else if(yCoord<(margin.top+100)) {
yCoord2=margin.top+100;
} else {
yCoord2 = yCoord;
}
xCoord1 = xScale(d.up)+6;
if(xCoord1<margin.left+480){
xCoord2 = xCoord1+40;
xTextLoc = xCoord2+6;
} else{
xCoord2 = xCoord1-100;
xTextLoc = xCoord2-6;
}
info.attr("transform", "translate(" + xTextLoc + "," + (yCoord2+9) + ")");
info.select(".name").text(d.firstnm + " " + d.lastnm);
info.select(".party").text("(" + d.party + " " + d.state + "-" + d.district + ")");
info.select(".rank").text("Rank: " + d.indx + " of " + ymax);
info.select(".label").text(d.label);
horizontal.attr("x1", xCoord1);
horizontal.attr("x2", xCoord2);
horizontal.attr("y1", yCoord);
horizontal.attr("y2", yCoord2);
}
// titling
// svg.append("text")
// .attr("class", "title")
// .attr("x", 12)
// .attr("y", 31)
// .style("fill","#333")
// .attr("font-size", 14)
// .text("Ideal points, 115th U.S. House of Representatives");
// svg.append("text")
// .attr("class", "title")
// .attr("x", 12)
// .attr("y", 51)
// .style("fill","#333")
// .attr("font-size", 14)
// .text("Computed by <NAME>.");
svg.append("text")
.attr("class", "title")
.attr("x", 12)
.attr("y", 61)
.style("fill","#333")
.attr("font-size", 14)
.text("Legislators sorted by estimated ideal point.");
svg.append("text")
.attr("class", "title")
.attr("x",12)
.attr("y",71)
.attr("font-size", 12)
.style("fill","#444")
.attr("text-anchor","start")
.text("Horizontal bars cover 95% credible intervals.");
svg.append("g")
.append("svg:a")
.attr({
"xlink:href": "https://dx.doi.org/10.1017/S0003055404001194",
"target": "_blank"})
.append("svg:text")
.attr("x",12)
.attr("y",81)
.attr("font-size", 10)
.style("fill","#aaa")
.on('mouseover', function(d){
d3.select(this).style("text-decoration","underline");
d3.select(this).style("fill","blue");
})
.on('mouseout', function(d){
d3.select(this).style("text-decoration","none");
d3.select(this).style("fill","#aaa");
})
.attr("text-anchor","start")
.text("Methodological details: Clinton, Jackman & Rivers, APSR 2004.");
}
)
|
<gh_stars>0
package org.springaop.chapter.one.schema.throwsadvice;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class TestException {
public static void main(String[] args) {
String[] paths = { "org/springaop/chapter/one/schema/throwsadvice/applicationContext.xml" };
ApplicationContext ctx = new ClassPathXmlApplicationContext(paths);
ExceptionTarget exceptiontarget = (ExceptionTarget) ctx.getBean("exceptionTarget");
try {
exceptiontarget.errorMethod();
} catch (Exception ignored) {
}
}
}
|
#!/bin/bash
cat .bashrc > ~/.nbashrc
cat .vimrc > ~/.nvimrc
if [[ -z $(grep "nbashrc" ~/.bashrc) ]]; then
echo "source ~/.nbashrc" >> ~/.bashrc
fi
if [[ -z $(grep "nvimrc" ~/.vimrc) ]]; then
echo "so ~/.nvimrc" >> ~/.vimrc
fi
source ~/.bashrc
|
#!/bin/bash
rm modules.rst
rm setup.rst
rm fishergw*.rst
|
package com.example.imooc.dataobject;
import lombok.Data;
import javax.persistence.Entity;
import javax.persistence.Id;
import java.math.BigDecimal;
/**
* 商品
* Created by 廖师兄
* 2017-05-09 11:30
*/
@Entity
@Data
public class ProductInfo {
@Id
private String productId;
/** 名字. */
private String productName;
/** 单价. */
private BigDecimal productPrice;
/** 库存. */
private Integer productStock;
/** 描述. */
private String productDescription;
/** 小图. */
private String productIcon;
/** 状态, 0正常1下架. */
private Integer productStatus;
/** 类目编号. */
private Integer categoryType;
}
|
const movieDetailTemplate = ({ title, release, genre }) =>
`<div>
<h1>${title}</h1>
<p>Release date: ${release}</p>
<p>Genre: ${genre}</p>
</div>`;
const movie = {
title: 'Star Wars',
release: '1977',
genre: 'Science fiction'
};
console.log(movieDetailTemplate(movie));
/* Output:
<div>
<h1>Star Wars</h1>
<p>Release date: 1977</p>
<p>Genre: Science fiction</p>
</div> */ |
#!/usr/bin/env bash
set -euo pipefail
cd "$(dirname "${0}")/.."
APP_ROOT=$(pwd)
function ensureBuiltinExtensitions() {
cd "${APP_ROOT}/lib/vscode"
if [ ! -e "extensions/emmet/dist/browser" ]
then
echo "compile vscode builtin extensions..."
yarn gulp compile-web
# yarn gulp compile-extension-media
fi
}
function main() {
ensureBuiltinExtensitions
cd ${APP_ROOT}
mkdir -p "dist/extensions"
node scripts/copy-extensions.js
echo "copy vscode builtin extensions done!"
}
main "$@"
|
<filename>src/router/index.js
import Vue from 'vue'
import Router from 'vue-router'
const _import = require('./_import_' + process.env.NODE_ENV)
// in development-env not use lazy-loading, because lazy-loading too many pages will cause webpack hot update too slow. so only in production use lazy-loading;
// detail: https://panjiachen.github.io/vue-element-admin-site/#/lazy-loading
Vue.use(Router)
/* Layout */
import Layout from '../views/layout/Layout'
/**
* hidden: true if `hidden:true` will not show in the sidebar(default is false)
* redirect: noredirect if `redirect:noredirect` will no redirct in the breadcrumb
* name:'router-name' the name is used by <keep-alive> (must set!!!)
* meta : {
title: 'title' the name show in submenu and breadcrumb (recommend set)
icon: 'svg-name' the icon show in the sidebar,
}
**/
export const constantRouterMap = [
{ path: '/login', component: _import('login/index'), hidden: true },
{ path: '/authredirect', component: _import('login/authredirect'), hidden: true },
{ path: '/404', component: _import('errorPage/404'), hidden: true },
{ path: '/401', component: _import('errorPage/401'), hidden: true },
{
path: '/',
component: Layout,
redirect: '/dashboard',
name: 'Dashboard',
hidden: true,
children: [{
path: 'dashboard',
component: _import('dashboard/index')
}]
},
/* {
path: '/example',
component: Layout,
redirect: '/example/table',
name: 'Example',
meta: { title: 'Example', icon: 'example' },
children: [
{
path: 'table',
name: 'Table',
component: _import('home/table/index'),
meta: { title: 'Table', icon: 'table' }
},
{
path: 'tree',
name: 'Tree',
component: _import('home/tree/index'),
meta: { title: 'Tree', icon: 'tree' }
}
]
},
{
path: '/form',
component: Layout,
children: [
{
path: 'index',
name: 'Form',
component: _import('home/form/index'),
meta: { title: 'Form', icon: 'form' }
}
]
}, */
// 路由和侧边栏关联
{
path: '/personManage',
component: Layout,
name: 'PersonManage',
meta: { title: '人事管理', icon: 'geren' },
redirect: 'department',
children: [
{
path: 'department',
component: _import('personManage/department/layout'),
children: [
{
path: 'index',
name: 'Department',
component: _import('personManage/department/index'),
meta: { title: '部门管理', icon: 'tipsdepartmentup' }
},
// 人事管理
{
path: 'add',
name: 'departmentAdd',
noredirect: true,
hidden: true,
component: _import('personManage/department/add')
},
{
path: 'edit',
name: 'departmentEdit',
noredirect: true,
hidden: true,
component: _import('personManage/department/edit')
},
{
path: 'delete',
name: 'departmentDelete',
noredirect: true,
hidden: true,
component: _import('personManage/department/delete')
}
]
},
{
path: 'post',
name: 'Post',
component: _import('personManage/post/index'),
meta: { title: '岗位管理', icon: 'post' }
},
{
path: 'personnel',
name: 'Personnel',
component: _import('personManage/personnel/index'),
meta: { title: '人员管理', icon: 'user' }
}
]
}
// { path: '*', redirect: '/404', hidden: true }
]
export default new Router({
// mode: 'history', //后端支持可开
scrollBehavior: () => ({ y: 0 }),
routes: constantRouterMap
})
|
/*
*
*/
package net.community.chest.swing.component.button;
import java.awt.Component;
import java.awt.event.ActionListener;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Map;
import java.util.TreeMap;
import javax.swing.AbstractButton;
import javax.swing.JToolBar;
/**
* <P>Copyright GPLv2</P>
*
* @author <NAME>.
* @since May 12, 2009 2:03:04 PM
*/
public final class ButtonUtils {
private ButtonUtils ()
{
// no instance
}
/**
* @param org Original {@link Map} if <code>null</code> one will be
* allocated if necessary
* @param b The {@link JToolBar} to be explored - may be null
* @param errIfDuplicate <code>true</code> throw an exception if same
* command already mapped to other button
* @return A {@link Map} whose key=the action command, value=the {@link AbstractButton}
* mapped to that command. May be null/empty if no initial map supplied
* and no commands found
* @throws IllegalStateException if null/empty action command found or
* duplicate mapping and duplicates not allowed.
*/
public static final Map<String,AbstractButton> updateButtonsMap (
final Map<String,AbstractButton> org,
final JToolBar b,
final boolean errIfDuplicate)
throws IllegalStateException
{
final Component[] ca=(null == b) ? null : b.getComponents();
if ((null == ca) || (ca.length <= 0))
return org;
Map<String,AbstractButton> ret=org;
for (final Component c : ca)
{
if (!(c instanceof AbstractButton))
continue;
final AbstractButton btn=(AbstractButton) c;
final String cmd=btn.getActionCommand();
if ((null == cmd) || (cmd.length() <= 0))
throw new IllegalStateException("updateButtonsMap(" + btn + ") no action command");
if (null == ret)
ret = new TreeMap<String,AbstractButton>(String.CASE_INSENSITIVE_ORDER);
final AbstractButton prev=ret.put(cmd, btn);
if ((prev != null) && errIfDuplicate && (prev != btn))
throw new IllegalStateException("updateButtonsMap(" + btn + ") duplicate command: " + cmd);
}
return ret;
}
public static final Map<String,AbstractButton> setButtonActionHandlers (
final Map<String,? extends AbstractButton> bm,
final Collection<? extends Map.Entry<String,? extends ActionListener>> ll)
{
if ((null == ll) || (ll.size() <= 0))
return null;
Map<String,AbstractButton> ret=null;
for (final Map.Entry<String,? extends ActionListener> le : ll)
{
final String cmd=(null == le) ? null : le.getKey();
final ActionListener l=(null == le) ? null : le.getValue();
if ((null == cmd) || (cmd.length() <= 0) || (null == l))
continue; // should not happen
final AbstractButton btn=bm.get(cmd);
if (null == btn)
continue;
btn.addActionListener(l);
if (null == ret)
ret = new TreeMap<String,AbstractButton>(String.CASE_INSENSITIVE_ORDER);
ret.put(cmd, btn);
}
return ret;
}
public static final Map<String,AbstractButton> setButtonActionHandlers (
final Map<String,? extends AbstractButton> bm,
final Map<String,? extends ActionListener> lm)
{
if ((null == bm) || (bm.size() <= 0))
return null;
final Collection<? extends Map.Entry<String,? extends ActionListener>> ll=
((null == lm) || (lm.size() <= 0)) ? null : lm.entrySet();
return setButtonActionHandlers(bm, ll);
}
/**
* @param <B> Type of {@link AbstractButton} being managed
* @param bm A {@link Map} where key=action command, value=associated
* {@link AbstractButton} for the command
* @param sl A {@link Collection} of pairs as {@link java.util.Map.Entry}-ies
* where key=action command, value=TRUE/FALSE parameter to
* {@link AbstractButton#setEnabled(boolean)} method
* @return A {@link Collection} of all buttons whose {@link AbstractButton#setEnabled(boolean)}
* method has been successfully invoked - may be null/empty if no methods
* invoked
*/
public static final <B extends AbstractButton> Collection<B> updateButtonsStates (
final Map<String,? extends B> bm,
final Collection<? extends Map.Entry<String,Boolean>> sl)
{
if ((null == sl) || (sl.size() <= 0)
|| (null == bm) || (bm.size() <= 0))
return null;
Collection<B> ret=null;
for (final Map.Entry<String,Boolean> be : sl)
{
final String cmd=(null == be) ? null : be.getKey();
final Boolean val=(null == be) ? null : be.getValue();
final B btn=
((null == cmd) || (cmd.length() <= 0) || (null == val)) ? null : bm.get(cmd);
if (null == btn)
continue;
btn.setEnabled(val.booleanValue());
if (null == ret)
ret = new LinkedList<B>();
ret.add(btn);
}
return ret;
}
/**
* @param <B> Type of {@link AbstractButton} being managed
* @param bm A {@link Map} where key=action command, value=associated
* {@link AbstractButton} for the command
* @param sm A {@link Map} where key=action command, value=TRUE/FALSE
* parameter to {@link AbstractButton#setEnabled(boolean)} method
* @return A {@link Collection} of all buttons whose {@link AbstractButton#setEnabled(boolean)}
* method has been successfully invoked - may be null/empty if no methods
* invoked
*/
public static final <B extends AbstractButton> Collection<B> updateButtonsStates (
final Map<String,? extends B> bm, final Map<String,Boolean> sm)
{
return updateButtonsStates(bm, ((null == sm) || (sm.size() <= 0)) ? null : sm.entrySet());
}
}
|
package util.silhouette
import scala.concurrent.Future
import com.mohiva.play.silhouette.core.services.IdentityService
import com.mohiva.play.silhouette.core.LoginInfo
import com.mohiva.play.silhouette.contrib.User
/**
* Base implementation to show how Guice works.
*/
class IdentityServiceImpl extends IdentityService[User] {
/**
* Retrieves an identity that matches the specified login info.
*
* @param loginInfo The login info to retrieve an identity.
* @return The retrieved identity or None if no identity could be retrieved for the given login info.
*/
def retrieve(loginInfo: LoginInfo): Future[Option[User]] = {
Future.successful(Some(User(
loginInfo = LoginInfo("facebook", "12345"),
firstName = "Christian",
lastName = "Kaps",
fullName = "<NAME>",
email = None,
avatarURL = None
)))
}
}
|
package did;
import java.io.IOException;
import java.io.Reader;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.jsonldjava.core.JsonLdConsts;
import com.github.jsonldjava.core.JsonLdError;
import com.github.jsonldjava.core.JsonLdOptions;
import com.github.jsonldjava.core.JsonLdProcessor;
import com.github.jsonldjava.utils.JsonUtils;
import did.parser.ParserException;
public class DIDDocument {
public static final String MIME_TYPE = "application/did+ld+json";
public static final String JSONLD_TERM_ID = "id";
public static final String JSONLD_TERM_TYPE = "type";
public static final String JSONLD_TERM_SERVICE = "service";
public static final String JSONLD_TERM_SERVICEENDPOINT = "serviceEndpoint";
public static final String JSONLD_TERM_PUBLICKEY = "publicKey";
public static final String JSONLD_TERM_PUBLICKEYBASE64 = "publicKeyBase64";
public static final String JSONLD_TERM_PUBLICKEYBASE58 = "publicKeyBase58";
public static final String JSONLD_TERM_PUBLICKEYHEX = "publicKeyHex";
public static final String JSONLD_TERM_PUBLICKEYPEM = "publicKeyPem";
public static final String JSONLD_TERM_AUTHENTICATION = "authentication";
private static final ObjectMapper objectMapper = new ObjectMapper();
private final Map<String, Object> jsonLdObject;
private DIDDocument(Map<String, Object> jsonLdObject) {
this.jsonLdObject = jsonLdObject;
}
/*
* Factory methods
*/
public static DIDDocument build(Map<String, Object> jsonLdObject) {
return new DIDDocument(jsonLdObject);
}
@JsonCreator
public static DIDDocument build() {
return new DIDDocument(newJsonLdObject(false));
}
public static DIDDocument build(Object context, String id, List<PublicKey> publicKeys, List<Authentication> authentications, List<Service> services) {
Map<String, Object> jsonLdObject = newJsonLdObject(context == null);
// add '@context'
if (context != null) {
jsonLdObject.put(JsonLdConsts.CONTEXT, context);
}
// add 'id'
jsonLdObject.put(JSONLD_TERM_ID, id);
// add 'publicKey'
if (publicKeys != null) {
LinkedList<Object> publicKeysJsonLdArray = new LinkedList<Object> ();
for (PublicKey publicKey : publicKeys) {
Map<String, Object> publicKeyJsonLdObject = publicKey.getJsonLdObject();
publicKeysJsonLdArray.add(publicKeyJsonLdObject);
}
jsonLdObject.put(JSONLD_TERM_PUBLICKEY, publicKeysJsonLdArray);
}
// add 'publicKey'
if (publicKeys != null) {
LinkedList<Object> publicKeysJsonLdArray = new LinkedList<Object> ();
for (PublicKey publicKey : publicKeys) {
Map<String, Object> publicKeyJsonLdObject = publicKey.getJsonLdObject();
publicKeysJsonLdArray.add(publicKeyJsonLdObject);
}
jsonLdObject.put(JSONLD_TERM_PUBLICKEY, publicKeysJsonLdArray);
}
// add 'service'
if (services != null) {
LinkedList<Object> servicesJsonLdArray = new LinkedList<Object> ();
for (Service service : services) {
Map<String, Object> serviceJsonLdObject = service.getJsonLdObject();
servicesJsonLdArray.add(serviceJsonLdObject);
}
jsonLdObject.put(JSONLD_TERM_SERVICE, servicesJsonLdArray);
}
// add 'authentication'
if (authentications != null && authentications.size() > 0) {
LinkedList<Object> authenticationsJsonLdArray = new LinkedList<Object> ();
for (Authentication authentication : authentications) {
Map<String, Object> authenticationJsonLdObject = authentication.getJsonLdObject();
authenticationsJsonLdArray.add(authenticationJsonLdObject);
}
jsonLdObject.put(JSONLD_TERM_AUTHENTICATION, authenticationsJsonLdArray);
}
// done
return new DIDDocument(jsonLdObject);
}
public static DIDDocument build(String id, List<PublicKey> publicKeys, List<Authentication> authentications, List<Service> services) {
return build(null, id, publicKeys, authentications, services);
}
/*
* Serialization
*/
public static DIDDocument fromJson(String json) throws IOException {
return objectMapper.readValue(json, DIDDocument.class);
}
public static DIDDocument fromJson(Reader reader) throws JsonParseException, JsonMappingException, IOException {
return objectMapper.readValue(reader, DIDDocument.class);
}
public String toJson() throws IOException, JsonLdError {
if (this.jsonLdObject == null) return "null";
Object context = this.jsonLdObject.get(JsonLdConsts.CONTEXT);
if (context == null) throw new IllegalStateException("No @context.");
JsonLdOptions options = new JsonLdOptions();
HashMap<String, Object> compacted = (HashMap<String, Object>) JsonLdProcessor.compact(this.jsonLdObject, context, options);
compacted.remove(JsonLdConsts.CONTEXT);
LinkedHashMap<String, Object> json = new LinkedHashMap<String, Object> ();
json.put(JsonLdConsts.CONTEXT, context);
json.putAll(compacted);
// done
String result = JsonUtils.toPrettyString(json);
return result;
}
/*
* Service selection
*/
public Map<Integer, Service> selectServices(String selectServiceName, String selectServiceType) {
int i = -1;
Map<Integer, Service> selectedServices = new HashMap<Integer, Service> ();
if (this.getServices() == null) return selectedServices;
for (Service service : this.getServices()) {
i++;
if (selectServiceName != null) {
DIDURL serviceDidUrl;
try { serviceDidUrl = DIDURL.fromString(service.getId()); } catch (ParserException ex) { serviceDidUrl = null; }
String serviceName = serviceDidUrl == null ? null : serviceDidUrl.getFragment();
if (serviceName == null) continue;
if (! serviceName.equals(selectServiceName)) continue;
}
if (selectServiceType != null) {
if (service.getTypes() == null) continue;
if (! Arrays.asList(service.getTypes()).contains(selectServiceType)) continue;
}
selectedServices.put(Integer.valueOf(i), service);
}
return selectedServices;
}
public Map<Integer, PublicKey> selectKeys(String selectKeyName, String selectKeyType) {
int i = -1;
Map<Integer, PublicKey> selectedKeys = new HashMap<Integer, PublicKey> ();
if (this.getPublicKeys() == null) return selectedKeys;
for (PublicKey publicKey : this.getPublicKeys()) {
i++;
if (selectKeyName != null) {
DIDURL publicKeyDidUrl;
try { publicKeyDidUrl = DIDURL.fromString(publicKey.getId()); } catch (ParserException ex) { publicKeyDidUrl = null; }
String publicKeyName = publicKeyDidUrl == null ? null : publicKeyDidUrl.getFragment();
if (publicKeyName == null) continue;
if (! publicKeyName.equals(selectKeyName)) continue;
}
if (selectKeyType != null) {
if (publicKey.getTypes() == null) continue;
if (! Arrays.asList(publicKey.getTypes()).contains(selectKeyType)) continue;
}
selectedKeys.put(Integer.valueOf(i), publicKey);
}
return selectedKeys;
}
/*
* Helper methods
*/
public static final Object DID_DOCUMENT_SKELETON;
static {
try {
DID_DOCUMENT_SKELETON = JsonUtils.fromInputStream(DIDDocument.class.getResourceAsStream("diddocument-skeleton.jsonld"));
} catch (IOException ex) {
throw new ExceptionInInitializerError(ex);
}
}
@SuppressWarnings("unchecked")
private static HashMap<String, Object> newJsonLdObject(boolean defaultContext) {
if (defaultContext) {
return new LinkedHashMap<String, Object> ((Map<String, Object>) DID_DOCUMENT_SKELETON);
} else {
return new LinkedHashMap<String, Object> ();
}
}
/*
* Getters and setters
*/
@JsonValue
public Map<String, Object> getJsonLdObject() {
return this.jsonLdObject;
}
@JsonAnySetter
public void setJsonLdObjectKeyValue(String key, Object value) {
this.jsonLdObject.put(key, value);
}
@SuppressWarnings("unchecked")
public List<Object> getContexts() {
Object entry = this.jsonLdObject.get(JsonLdConsts.CONTEXT);
if (entry == null) return null;
if (entry instanceof URI) entry = Collections.singletonList(entry);
if (entry instanceof String) entry = Collections.singletonList(entry);
if (! (entry instanceof List<?>)) return null;
return (List<Object>) entry;
}
public String getId() {
Object entry = this.jsonLdObject.get(JSONLD_TERM_ID);
if (entry instanceof URI) return ((URI) entry).toString();
if (entry instanceof String) return (String) entry;
return null;
}
@SuppressWarnings("unchecked")
public List<Service> getServices() {
Object entry = this.jsonLdObject.get(JSONLD_TERM_SERVICE);
if (entry == null) return null;
if (entry instanceof LinkedHashMap<?, ?>) entry = Collections.singletonList(entry);
if (! (entry instanceof List<?>)) return null;
List<Object> servicesJsonLdArray = (List<Object>) entry;
List<Service> services = new ArrayList<Service> ();
for (Object entry2 : servicesJsonLdArray) {
if (! (entry2 instanceof LinkedHashMap<?, ?>)) continue;
Map<String, Object> serviceJsonLdObject = (Map<String, Object>) entry2;
services.add(Service.build(serviceJsonLdObject));
}
return services;
}
@SuppressWarnings("unchecked")
public List<PublicKey> getPublicKeys() {
Object entry = this.jsonLdObject.get(JSONLD_TERM_PUBLICKEY);
if (entry == null) return null;
if (entry instanceof LinkedHashMap<?, ?>) entry = Collections.singletonList(entry);
if (! (entry instanceof List<?>)) return null;
List<Object> publicKeysJsonLdArray = (List<Object>) entry;
List<PublicKey> publicKeys = new ArrayList<PublicKey> ();
for (Object entry2 : publicKeysJsonLdArray) {
if (! (entry2 instanceof LinkedHashMap<?, ?>)) continue;
LinkedHashMap<String, Object> publicKeyJsonLdObject = (LinkedHashMap<String, Object>) entry2;
publicKeys.add(PublicKey.build(publicKeyJsonLdObject));
}
return publicKeys;
}
@SuppressWarnings("unchecked")
public List<Authentication> getAuthentications() {
Object entry = this.jsonLdObject.get(JSONLD_TERM_AUTHENTICATION);
if (entry == null) return null;
if (entry instanceof LinkedHashMap<?, ?>) entry = Collections.singletonList(entry);
if (! (entry instanceof List<?>)) return null;
List<Object> authenticationsJsonLdArray = (List<Object>) entry;
List<Authentication> authentications = new ArrayList<Authentication> ();
for (Object entry2 : authenticationsJsonLdArray) {
if (! (entry2 instanceof LinkedHashMap<?, ?>)) continue;
LinkedHashMap<String, Object> authenticationJsonLdObject = (LinkedHashMap<String, Object>) entry2;
authentications.add(Authentication.build(authenticationJsonLdObject));
}
return authentications;
}
/*
* Object methods
*/
@Override
public String toString() {
try {
return this.toJson();
} catch (IOException | JsonLdError ex) {
throw new RuntimeException(ex.getMessage(), ex);
}
}
}
|
/**
* Combine all reducers in this file and export the combined reducers.
* If we were to do this in store.js, reducers wouldn't be hot reloadable.
*/
import { combineReducers } from 'redux-immutable';
import { fromJS } from 'immutable';
import { LOCATION_CHANGE } from 'react-router-redux';
import languageProviderReducer from 'containers/LanguageProvider/reducer';
import CONSTANTS from 'constants'
/*
* routeReducer
*
* The reducer merges route location changes into our immutable state.
* The change is necessitated by moving to react-router-redux@4
*
*/
// Initial routing state
const routeInitialState = fromJS({
locationBeforeTransitions: null,
});
/**
* Merge route into the global application state
*/
function routeReducer(state = routeInitialState, action) {
switch (action.type) {
/* istanbul ignore next */
case LOCATION_CHANGE:
return state.merge({
locationBeforeTransitions: action.payload,
});
default:
return state;
}
}
/*
* Session/user state
*/
const userInitialState = fromJS({
user: null
})
function userReducer(state = userInitialState, action) {
switch (action.type) {
case CONSTANTS.USER_SIGNED_IN:
// set action.user to user key
case CONSTANTS.USER_SIGNED_OUT:
return userInitialState
default:
return state
}
}
/*
* Global error state
*/
const errorInitialState = fromJS({
errors: null
})
function errorReducer(state = errorInitialState, action) {
switch (action.type) {
case CONSTANTS.ERROR_SESSIONS:
// return with session error
case CONSTANTS.ERROR_REGISTRATIONS:
// return with registration error
default:
return state
}
}
/**
* Creates the main reducer with the asynchronously loaded ones
*/
export default function createReducer(asyncReducers) {
return combineReducers({
errors: errorReducer,
user: userReducer,
route: routeReducer,
language: languageProviderReducer,
...asyncReducers,
});
}
|
// Copyright (c) 2017, <NAME>.
// All rights reserved.
// License: "BSD-3-Clause"
var results = [];
var expected = [];
function y() {
eval("var x=2");
}
void
function() {
y();
try {
throw 3;
} catch (x) {
eval("var x=4");
}
results.push(x);
expected.push(undefined);
}();
[results, expected, "DONE"];
|
class LoggedInConstraint
def matches?(request)
request.env['warden'].authenticated?(:default) ||
request.env['warden'].authenticated?(:private) ||
request.env['warden'].authenticated?(:public)
end
end
|
// (C) Copyright <NAME> 2005.
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
// See http://www.boost.org/libs/iostreams for documentation.
#ifndef BOOST_IOSTREAMS_DETAIL_BASIC_ADAPTER_HPP_INCLUDED
#define BOOST_IOSTREAMS_DETAIL_BASIC_ADAPTER_HPP_INCLUDED
#include <boost/iostreams/categories.hpp>
#include <boost/iostreams/detail/call_traits.hpp>
#include <boost/iostreams/detail/ios.hpp>
#include <boost/iostreams/operations.hpp>
#include <boost/iostreams/traits.hpp>
#include <boost/static_assert.hpp>
namespace boost { namespace iostreams { namespace detail {
template<typename T>
class basic_adapter {
private:
typedef typename detail::value_type<T>::type value_type;
typedef typename detail::param_type<T>::type param_type;
public:
explicit basic_adapter(param_type t) : t_(t) { }
T& component() { return t_; }
void close(BOOST_IOS::openmode which = BOOST_IOS::in | BOOST_IOS::out)
{
BOOST_STATIC_ASSERT(is_device<T>::value);
iostreams::close(t_, which);
}
template<typename Device>
void close( Device& dev,
BOOST_IOS::openmode which =
BOOST_IOS::in | BOOST_IOS::out )
{
BOOST_STATIC_ASSERT(is_filter<T>::value);
iostreams::close(t_, dev, which);
}
bool flush()
{
BOOST_STATIC_ASSERT(is_device<T>::value);
return iostreams::flush(t_);
}
template<typename Device>
void flush(Device& dev)
{
BOOST_STATIC_ASSERT(is_filter<T>::value);
return iostreams::flush(t_, dev);
}
template<typename Locale> // Avoid dependency on <locale>
void imbue(const Locale& loc) { iostreams::imbue(t_, loc); }
std::streamsize optimal_buffer_size() const
{ return iostreams::optimal_buffer_size(t_); }
public:
value_type t_;
};
//----------------------------------------------------------------------------//
} } } // End namespaces detail, iostreams, boost.
#endif // #ifndef BOOST_IOSTREAMS_DETAIL_BASIC_ADAPTER_HPP_INCLUDED
|
<gh_stars>1-10
//package entities;
//
//import android.net.Uri;
//import android.os.Parcel;
//import android.os.Parcelable;
//
//import com.google.firebase.firestore.Exclude;
//import com.google.firebase.firestore.IgnoreExtraProperties;
//import com.google.gson.annotations.Expose;
//import com.google.gson.annotations.SerializedName;
//
//import java.util.Date;
//import java.util.List;
//
//@IgnoreExtraProperties
//public class Post implements Parcelable {
//
// public static final Parcelable.Creator CREATOR = new Parcelable.Creator() {
// public Post createFromParcel(Parcel in) {
// return new Post(in);
// }
//
// public Post[] newArray(int size) {
// return new Post[size];
// }
// };
//
// @Expose
// @SerializedName("title")
// private String mTitle;
//
// @Expose
// @SerializedName("body")
// private String mBody;
//
// @Expose
// @SerializedName("user")
// private String mAuthor;
//
// @Expose
// @SerializedName("image_uri")
// private Uri mImageUri;
//
// @Expose
// @SerializedName("post_date")
// private Date mPostDate = new Date();
//
// @Expose
// @SerializedName("user_pic")
// private String mProfilePic;
//
// @Expose
// @SerializedName("author_name")
// private String mAuthorName;
//
// @Expose
// @SerializedName("likes")
// private int mLikes;
//
// @Expose
// @SerializedName("comments")
// private List<Comment> mComments;
// @Exclude
// private String mId;
//
//
//
// public Post() {
//
// }
//
// Post(Post.Builder builder) {
// mAuthor = builder.mAuthor;
// mBody = builder.mBody;
// mImageUri = builder.mImg;
// mPostDate = builder.mPostDate;
// mTitle = builder.mTitle;
// mProfilePic = builder.mProfilePic;
// mAuthorName = builder.mAuthorName;
// mLikes = builder.mLikes;
// }
//
// Post(Parcel in) {
// mAuthor = in.readString();
// mAuthorName = in.readString();
// mBody = in.readString();
// mTitle = in.readString();
// mLikes = in.readInt();
// mPostDate = (Date) in.readSerializable();
// mId = in.readString();
// }
//
// public String getTitle() {
// return mTitle;
// }
//
// public void setTitle(String title) {
// mTitle = title;
// }
//
// public String getBody() {
// return mBody;
// }
//
// public void setBody(String body) {
// mBody = body;
// }
//
// public String getAuthor() {
// return mAuthor;
// }
//
// public void setAuthor(String author) {
// mAuthor = author;
// }
//
// public Uri getImageUri() {
// return mImageUri;
// }
//
// public void setImageUri(Uri imageUri) {
// mImageUri = imageUri;
// }
//
// public String getProfilePic() {
// return mProfilePic;
// }
//
// public Date getPostDate() {
// return mPostDate;
// }
//
// public void setProfilePic(String postUri) {
// mProfilePic = postUri;
// }
//
// public String getAuthorName() {
// return mAuthorName;
// }
//
// public void setAuthorName(String authorName) {
// mAuthorName = authorName;
// }
//
// public int getLikes() {
// return mLikes;
// }
//
// public void setLikes(int likes) {
// mLikes = likes;
// }
//
// public List<Comment> getComments() {
// return mComments;
// }
//
// public void setComments(List<Comment> comments) {
// mComments = comments;
// }
//
// public void addComment(Comment comment) {
// mComments.add(comment);
// }
//
// public String getId() {
// return mId;
// }
//
// public void setId(String id) {
// mId = id;
// }
//
// @Override
// public int describeContents() {
// return 0;
// }
//
// @Override
// public void writeToParcel(Parcel dest, int flags) {
// dest.writeInt(this.mLikes);
// dest.writeString(this.mAuthor);
// dest.writeString(this.mId);
// dest.writeString(this.mAuthorName);
// dest.writeString(this.mBody);
// dest.writeString(this.mTitle);
// dest.writeSerializable(this.mPostDate);
// }
//
// public static class Builder {
// private String mTitle;
// private String mBody;
// private String mAuthor;
// private final Date mPostDate = new Date();
// private Uri mImg;
// private String mProfilePic;
// private String mAuthorName;
// private int mLikes;
//
// public Builder title(String title) {
// mTitle = title;
// return this;
// }
//
// public Builder body(String body) {
// mBody = body;
// return this;
// }
//
// public Builder author(String author) {
// mAuthor = author;
// return this;
// }
//
// public Builder profilePic(String profilePic) {
// mProfilePic = profilePic;
// return this;
// }
//
// public Builder image(Uri image) {
// mImg = image;
// return this;
// }
//
// public Builder authorName(String name) {
// mAuthorName = name;
// return this;
// }
//
// public Builder likes(int likes) {
// mLikes = likes;
// return this;
// }
//
//
// public Post build() {
// return new Post(this);
// }
//
// }
//}
|
#!/bin/bash
## @file
# For development.
#
#
# Copyright (C) 2006-2017 Oracle Corporation
#
# This file is part of VirtualBox Open Source Edition (OSE), as
# available from http://www.virtualbox.org. This file is free software;
# you can redistribute it and/or modify it under the terms of the GNU
# General Public License (GPL) as published by the Free Software
# Foundation, in version 2 as it comes in the "COPYING" file of the
# VirtualBox OSE distribution. VirtualBox OSE is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
#
# The contents of this file may alternatively be used under the terms
# of the Common Development and Distribution License Version 1.0
# (CDDL) only, as it comes in the "COPYING.CDDL" file of the
# VirtualBox OSE distribution, in which case the provisions of the
# CDDL are applicable instead of those of the GPL.
#
# You may elect to license modified versions of this file under the
# terms and conditions of either the GPL or the CDDL or both.
#
SCRIPT_NAME="loadusb"
XNU_VERSION=`LC_ALL=C uname -r | LC_ALL=C cut -d . -f 1`
DRVNAME="VBoxUSB.kext"
BUNDLE="org.virtualbox.kext.VBoxUSB"
DEP_DRVNAME="VBoxDrv.kext"
DEP_BUNDLE="org.virtualbox.kext.VBoxDrv"
DIR=`dirname "$0"`
DIR=`cd "$DIR" && pwd`
DEP_DIR="$DIR/$DEP_DRVNAME"
DIR="$DIR/$DRVNAME"
if [ ! -d "$DIR" ]; then
echo "Cannot find $DIR or it's not a directory..."
exit 1;
fi
if [ ! -d "$DEP_DIR" ]; then
echo "Cannot find $DEP_DIR or it's not a directory... (dependency)"
exit 1;
fi
if [ -n "$*" ]; then
OPTS="$*"
else
OPTS="-t"
fi
trap "sudo chown -R `whoami` $DIR $DEP_DIR; exit 1" INT
# Try unload any existing instance first.
LOADED=`kextstat -b $BUNDLE -l`
if test -n "$LOADED"; then
echo "${SCRIPT_NAME}.sh: Unloading $BUNDLE..."
sudo kextunload -v 6 -b $BUNDLE
LOADED=`kextstat -b $BUNDLE -l`
if test -n "$LOADED"; then
echo "${SCRIPT_NAME}.sh: failed to unload $BUNDLE, see above..."
exit 1;
fi
echo "${SCRIPT_NAME}.sh: Successfully unloaded $BUNDLE"
fi
set -e
# Copy the .kext to the symbols directory and tweak the kextload options.
if test -n "$VBOX_DARWIN_SYMS"; then
echo "${SCRIPT_NAME}.sh: copying the extension the symbol area..."
rm -Rf "$VBOX_DARWIN_SYMS/$DRVNAME"
mkdir -p "$VBOX_DARWIN_SYMS"
cp -R "$DIR" "$VBOX_DARWIN_SYMS/"
OPTS="$OPTS -s $VBOX_DARWIN_SYMS/ "
sync
fi
# On smbfs, this might succeed just fine but make no actual changes,
# so we might have to temporarily copy the driver to a local directory.
if sudo chown -R root:wheel "$DIR" "$DEP_DIR"; then
OWNER=`/usr/bin/stat -f "%u" "$DIR"`
else
OWNER=1000
fi
if test "$OWNER" -ne 0; then
TMP_DIR=/tmp/${SCRIPT_NAME}.tmp
echo "${SCRIPT_NAME}.sh: chown didn't work on $DIR, using temp location $TMP_DIR/$DRVNAME"
# clean up first (no sudo rm)
if test -e "$TMP_DIR"; then
sudo chown -R `whoami` "$TMP_DIR"
rm -Rf "$TMP_DIR"
fi
# make a copy and switch over DIR
mkdir -p "$TMP_DIR/"
sudo cp -Rp "$DIR" "$TMP_DIR/"
DIR="$TMP_DIR/$DRVNAME"
# load.sh puts it here.
DEP_DIR="/tmp/loaddrv.tmp/$DEP_DRVNAME"
# retry
sudo chown -R root:wheel "$DIR" "$DEP_DIR"
fi
sudo chmod -R o-rwx "$DIR"
sync
if [ "$XNU_VERSION" -ge "10" ]; then
echo "${SCRIPT_NAME}.sh: loading $DIR... (kextutil $OPTS -d \"$DEP_DIR\" \"$DIR\")"
sudo kextutil $OPTS -d "$DEP_DIR" "$DIR"
else
echo "${SCRIPT_NAME}.sh: loading $DIR... (kextload $OPTS -d \"$DEP_DIR\" \"$DIR\")"
sudo kextload $OPTS -d "$DEP_DIR" "$DIR"
fi
sync
sudo chown -R `whoami` "$DIR" "$DEP_DIR"
kextstat | grep org.virtualbox.kext
|
<gh_stars>1-10
import { Http } from '../utils/http'
class Theme {
static ThemeA = 't-1'
static ThemeB = 't-2'
static ThemeC = 't-3'
static ThemeD = 't-4'
// 用类对象存储数据 改写方法 避免请求多次服务
themes = []
async getHomeThemes() {
const res = await Http.request({
url: '/theme/by/names',
data: {
names: `${Theme.ThemeA},${Theme.ThemeB},${Theme.ThemeC},${Theme.ThemeD}`,
},
})
this.themes = res.data
}
// 直接获取locationA LocationB 保证调用方的请求代码简洁
async getThemeA() {
return this.themes.find((t) => t.name === Theme.ThemeA)
}
async getThemeB() {
return this.themes.find((t) => t.name === Theme.ThemeB)
}
async getThemeC() {
return this.themes.find((t) => t.name === Theme.ThemeC)
}
async getThemeD() {
return this.themes.find((t) => t.name === Theme.ThemeD)
}
static getHomeLocationESpu() {
return Theme.getThemeSpuByName(Theme.ThemeB)
}
static getHomeThemeCSpu(){
return Theme.getThemeSpuByName(Theme.ThemeC)
}
static async getThemeSpuByName(name) {
const res =await Http.request({
url: `/theme/name/${name}/with_spu`,
})
return res.data
}
}
export { Theme }
|
<filename>node_modules/react-icons-kit/noto_emoji_regular/u1F312.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.u1F312 = void 0;
var u1F312 = {
"viewBox": "0 0 2600 2760.837",
"children": [{
"name": "path",
"attribs": {
"d": "M2149 1920q-132 228-359 359.5T1300 2411t-490-131.5T451 1920t-132-490q0-265 133-493.5t360.5-358T1300 449t487.5 129.5 360.5 358 133 493.5q0 262-132 490zm-849 374q231 0 431.5-116.5t317-317T2165 1429t-116.5-431.5-317-317T1300 564l-20 1q230 110 371.5 345t141.5 519-141.5 519-371.5 345z"
},
"children": []
}]
};
exports.u1F312 = u1F312; |
package template;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
public abstract class FileCheck {
protected static Object fileRead(final String name) {
FileInputStream in = null;
ObjectInputStream objectInputStream = null;
Object object = null;
try {
in = new FileInputStream(name);
object = (objectInputStream = new ObjectInputStream(in)).readObject();
}
catch (IOException ex) {
ex.printStackTrace();
return null;
}
catch (ClassNotFoundException ex2) {
System.out.println("Class not found");
ex2.printStackTrace();
return null;
}
finally {
try {
if (in != null) {
((FileInputStream)in).close();
}
if (objectInputStream != null) {
objectInputStream.close();
}
}
catch (IOException ex3) {
// in = (InputStream)ex3;
ex3.printStackTrace();
}
}
try {
((FileInputStream)in).close();
objectInputStream.close();
}
catch (IOException ex4) {
ex4.printStackTrace();
}
return object;
}
protected static void fileWrite(final String name, final Object obj) {
FileOutputStream out = null;
ObjectOutputStream objectOutputStream = null;
try {
out = new FileOutputStream(name);
(objectOutputStream = new ObjectOutputStream(out)).writeObject(obj);
objectOutputStream.close();
((FileOutputStream)out).close();
}
catch (IOException ex) {
ex.printStackTrace();
try {
if (out != null) {
((FileOutputStream)out).close();
}
if (objectOutputStream != null) {
objectOutputStream.close();
}
return;
}
catch (IOException ex2) {
ex2.printStackTrace();
return;
}
}
finally {
try {
if (out != null) {
((FileOutputStream)out).close();
}
if (objectOutputStream != null) {
objectOutputStream.close();
}
}
catch (IOException ex3) {
ex3.printStackTrace();
}
}
try {
((FileOutputStream)out).close();
objectOutputStream.close();
}
catch (IOException ex4) {
ex4.printStackTrace();
}
}
}
|
#!/usr/bin/env bash
# fail if anything goes wrong
set -e
# print each line before executing
set -x
# get list of all packages with dependencies to install
packages_with_aur_dependencies="$(aur depends --pkgname $INPUT_PACKAGES $INPUT_MISSING_AUR_DEPENDENCIES)"
echo "AUR Packages requested to install: $INPUT_PACKAGES"
echo "AUR Packages to fix missing dependencies: $INPUT_MISSING_AUR_DEPENDENCIES"
echo "AUR Packages to install (including dependencies): $packages_with_aur_dependencies"
# sync repositories
pacman -Sy
if [ -n "$INPUT_MISSING_PACMAN_DEPENDENCIES" ]
then
echo "Additional Pacman packages to install: $INPUT_MISSING_PACMAN_DEPENDENCIES"
pacman --noconfirm -S $INPUT_MISSING_PACMAN_DEPENDENCIES
fi
# add them to the local repository
aur sync \
--noconfirm --noview \
--database aurci2 --root /workspace \
$packages_with_aur_dependencies
# move the local repository to the workspace
if [ -n "$GITHUB_WORKSPACE" ]
then
rm /workspace/*.old
echo "Moving repository to github workspace"
mv /workspace/* $GITHUB_WORKSPACE/
# make sure that the .db/.files files are in place
# Note: Symlinks fail to upload, so copy those files
cd $GITHUB_WORKSPACE
rm aurci2.db aurci2.files
cp aurci2.db.tar.gz aurci2.db
cp aurci2.files.tar.gz aurci2.files
fi
|
<filename>.local/share/Trash/files/ch7/singleton_class.rb
# The Book of Ruby - http://www.sapphiresteel.com
ob = Object.new
# singleton class
class << ob
def blather( aStr )
puts("blather, blather #{aStr}")
end
end
# ob has all the usual Object methods
# puts( ob.inspect )
# puts( ob.class )
ob.blather( "weeble" )
ob2 = Object.new
# singleton method
def ob2.blather( aStr )
puts( "grippity, grippity #{aStr}" )
end
ob2.blather( "ping!" )
|
#!/usr/bin/env sh
# Create JWT Token if not exists
#if [ ! -f var/jwt/private.pem ] || [ ! -f var/jwt/public.pem ]; then
#
# echo 'JWT Token is not exists, creating...'
# rm -rf var/jwt
# mkdir var/jwt
# openssl genrsa -out var/jwt/private.pem -aes256 -passout pass:${JWT_PASS} 4096
# openssl rsa -pubout -in var/jwt/private.pem -out var/jwt/public.pem -passin pass:${JWT_PASS}
#else
# echo 'JWT Token exists'
#fi
echo 'RabbbitMQ preparations'
./docker/wait-for-it.sh ${RABBITMQ_HOST}:15672 -- echo 'RabbbitMQ is up'
echo 'Create vhost'
curl -i -u ${RABBITMQ_LOGIN}:${RABBITMQ_PASSWORD} -H "content-type:application/json" \
-XPUT -s ${RABBITMQ_HOST}:15672/api/vhosts/gtr
echo 'Add permissions to vhost'
curl -i -u ${RABBITMQ_LOGIN}:${RABBITMQ_PASSWORD} -H "content-type:application/json" \
-XPUT -s -d '{"configure":".*","write":".*","read":".*"}' \
${RABBITMQ_HOST}:15672/api/permissions/gtr/${RABBITMQ_LOGIN}
composer install --ignore-platform-reqs --optimize-autoloader --prefer-dist
#chmod +x bin/console
#rm -rf var/cache/*
echo 'Container deployed...'
tailf -n 0 storage/logs/laravel.log |
#!/bin/bash
#=================================================
# Description: DIY script
# Lisence: MIT
# Author: P3TERX
# Blog: https://p3terx.com
#=================================================
# Modify default IP
#sed -i 's/192.168.1.1/192.168.50.5/g' package/base-files/files/bin/config_generate
sed -i 's/192.168.1.1/192.168.50.2/g' package/base-files/files/bin/config_generate
|
#!/bin/sh
set -e
# add to read pools
./zboxcli/zbox --wallet testing.json rp-lock \
--duration=1h --allocation "$(cat ~/.zcn/allocation.txt)" --tokens 2.0
# auth user
# ./zboxcli/zbox --wallet testing-auth.json rp-lock \
# --duration=1h --allocation "$(cat ~/.zcn/allocation.txt)" --tokens 2.0
go run 0chain/code/go/0chain.net/conductor/sdkproxy/main.go -f read_marker \
-run 0chain/docker.local/bin/conductor/proxied/download_b.sh
|
<filename>CS lab/Assignment 3/prog3.c
// Name of coder: <NAME>
// Roll No: 1801CS16
// Date created: 29/01/2019
// Brief objective of the program: Returns the smallest prime number p >= n of form 4k+1 (p = a2 + b2). Prints the values of p, a, b.
#include <stdio.h>
#include <math.h>
int is_prime(long long int p);
void main()
{
//Input variables
long long int a = 0, b = 0, p = 0, n = 0 ;
char roll_number[10] = "1801CS16";
printf("Enter a positive integer (n): ");
scanf("%lld", &n);
int check1 = 1,check2 = 1;
p = n;
// Check for smallest no. of form 4k+1 >= n.
if (p % 4 == 0)
{
p++;
}
else if (p % 4 == 2)
{
p = p + 3;
}
else if (p % 4 == 3)
{
p = p + 2;
}
// Check for prime.
while (check1 == 1)
{
if (is_prime(p))
{
check1 = 0;
}
else
{
p = p + 4;
}
}
// Find values of a and b.
int i = 0;
while (check2 == 1)
{
if ( sqrt(p - (i*i)) == (int)sqrt(p - (i*i)) )
{
a = i;
check2 = 0;
}
i++;
}
b = sqrt(p - (a*a));
// Output
printf("\nFinal values are: Roll Number = %s Input n = %lld, p = %lld, a = %lld, b = %lld", roll_number, n, p, a, b);
}
// Function to check given no. is prime or not.
int is_prime(long long int p)
{
int divisors = 0;
for (int i = 2; i <= sqrt(p); i++)
{
if(p % i == 0)
{
divisors++;
break;
}
}
if (divisors == 0)
{
return 1;
}
else
{
return 0;
}
}
|
#!/bin/bash
set -e
#### BASIC IMAGE
yum install -y wget qemu-img sg3_utils libgcrypt
cd /tmp
wget -q https://cloud-images.ubuntu.com/releases/focal/release/ubuntu-20.04-server-cloudimg-amd64.img
wget -q https://cloud-images.ubuntu.com/releases/focal/release/MD5SUMS
if [[ $(md5sum -c MD5SUMS 2>&1 | grep -c OK) < 1 ]]; then exit 1; fi
mv *.img focal.img
qemu-img convert ./focal.img -O raw /dev/sda
|
#!/bin/bash -e
cmake_version="${1}"
echo "Installing CMake ${cmake_version}"
curl -o /tmp/cmake.tar.gz -L https://cmake.org/files/v${cmake_version%.*}/cmake-${cmake_version}-Linux-x86_64.tar.gz
mkdir -p /opt/local/cmake
tar -C /opt/local/cmake --strip-components=1 -xf /tmp/cmake.tar.gz
rm -f /tmp/cmake.tar.gz
|
#!/usr/bin/env bash
source ./lib/util/echos.sh
minibot "Little Gary here! Fonts will be saved to your local Fonts directory."
action "saving fonts"
for file in ./fonts/**/*; do
if [[ "$OSTYPE" == "darwin"* ]]; then
cp "$file" ~/Library/Fonts
else
cp "$file" ~/.local/share/fonts
fi
done
ok "done saving fonts."
if [[ "$OSTYPE" != "darwin"* ]]; then
action "building font chaches"
fc-cache -f -v
ok "done building chaches."
fi
|
#! /bin/bash
#SBATCH -o fftw_plan_028.txt
#SBATCH -J fftw_plan_028
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=1
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=03:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=1
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd
cd ../../../
. local_software/env_vars.sh
mpiexec.hydra -genv OMP_NUM_THREADS 28 -envall -ppn 1 ./fftw_gen_wisdoms_all.sh 28 FFTW_WISDOM_nofreq_T28
|
/*
* Copyright © 2012-2016 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an “AS IS” BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
'use strict';
var module = angular.module('lightwave.ui.sso');
module.controller('ServerMgmtCntrl', ['$scope', '$rootScope','ServerService', 'TenantService', 'Util', 'popupUtil',
'AuthenticationService',
function($scope, $rootScope, ServerService, TenantService, Util, popupUtil, AuthenticationService) {
$scope.vm = this;
$scope.vm.getcomputers = getcomputers;
$scope.vm.gettenants = gettenants;
$scope.vm.addTenant = addTenant;
$scope.vm.deleteTenant = deleteTenant;
init();
function init() {
$scope.vm.tenants = [];
$scope.vm.filteredtenants = [];
$scope.vm.newtenant = {
credentials: {
certificates: []
}
};
}
function addTenant(){
var template = 'sso/servermgmt/tenant/tenant.add.html';
var controller = 'TenantCntrl';
popupUtil.open($scope, template, controller);
}
function getcomputers(searchText) {
if($rootScope.globals.currentUser.isSystemTenant) {
$rootScope.globals.errors = null;
$scope.vm.computersdataLoading = true;
ServerService
.Get($rootScope.globals.currentUser)
.then(function (res) {
if (res.status == 200) {
var comps = res.data;
if (!searchText || searchText == '') {
$scope.vm.computers = comps;
} else if (comps != null) {
$scope.vm.computers = [];
for (var i = 0; i < comps.length; i++) {
if (comps[i].hostname.indexOf(searchText) > -1) {
$scope.vm.computers.push(comps[i]);
}
}
}
}
else {
$rootScope.globals.errors = res.data;
}
$scope.vm.computersdataLoading = false;
});
}
}
function gettenants(searchText) {
$rootScope.globals.errors = null;
var tenants = $scope.vm.tenants;
if (!searchText || searchText == '') {
$scope.vm.filteredtenants = tenants;
} else if (tenants != null && tenants.length > 0) {
$scope.vm.filteredtenants = [];
for (var i = 0; i < $scope.vm.tenants.length; i++) {
if (tenants[i].name.indexOf(searchText) > -1) {
$scope.vm.filteredtenants.push(tenants[i]);
}
}
}
}
function deleteTenant(){
$rootScope.globals.errors = null;
TenantService
.Delete($rootScope.globals.currentUser)
.then(function (res) {
if (res.status == 200 || res.status == 204) {
TenantService.Cleanup($rootScope.globals.currentUser);
AuthenticationService.redirectToHome();
}
else {
$rootScope.globals.errors = res.data;
}
});
}
}]); |
#!/bin/bash
export TEST_MPI_COMMAND="mpirun -n 1"
unset CUDAFLAGS
./cmake_clean.sh
cmake -DCMAKE_CXX_COMPILER=mpic++ \
-DPNETCDF_PATH=${PNETCDF_PATH} \
-DCXXFLAGS="-O3 -std=c++11" \
-DCUDA_FLAGS="-arch sm_50 -ccbin mpic++" \
-DNX=200 \
-DNZ=100 \
-DSIM_TIME=5 \
-DARCH="CUDA" \
..
|
#!/bin/sh
set +e
gcloud functions deploy sentry-to-discord \
--entry-point=F \
--memory=128MB \
--region=us-central1 \
--runtime=go113 \
--env-vars-file=.env.yaml \
--trigger-http \
--timeout=10s
|
<reponame>publisherfk/oauth2lab<gh_stars>0
package io.spring2go.clientresttemplate.security;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
@Configuration
@EnableWebSecurity
public class SecurityConfiguration extends WebSecurityConfigurerAdapter {
@Autowired
private UserDetailsService userDetailsService;
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.userDetailsService(userDetailsService);
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.authorizeRequests().antMatchers("/", "/index.html").permitAll().anyRequest().authenticated().and()
.formLogin().and().logout().permitAll().and().csrf().disable();
}
@Bean
public OrchidPasswordEncoder passwordEncoder() {
return new OrchidPasswordEncoder();
}
}
|
// Dependencies
import { Router } from 'express'
import { getUser, getUserInfo } from '../../models'
const router = Router()
router.get('/:id', async (req, res) => {
try {
// Get id
const { id } = req.params
// Get user from db
const user = await getUser(parseInt(id, 10))
// Get user info with balance
const info = await getUserInfo(req.telegram, user)
// Respond with info
res.json(info)
} catch (error) {
res.status(500).json(error)
}
})
export const UserRoute = router
|
package ru.cbrrate.services;
import ru.cbrrate.model.GetUpdatesResponse;
public interface MessageSender {
void send(GetUpdatesResponse.Message message);
}
|
class BankAccount:
total_transactions = 0
def __init__(self):
self.balance = 0
def deposit(self, amount):
self.balance += amount
BankAccount.total_transactions += 1
def withdraw(self, amount):
if amount > self.balance:
return "Insufficient funds"
else:
self.balance -= amount
BankAccount.total_transactions += 1
def get_balance(self):
return self.balance |
#!/bin/bash
adduser -u 1000 jr
su -c /io/packaging/many_linux/build_script.sh jr
|
<gh_stars>1-10
from ...AST.statements.write import WriteStatement
def write_stmt():
""" Parsing 'write' statement. """
from ..common import keyword
from ..expressions import objects, arithmetic
def process(parsed):
(((_, _), name), _) = parsed
return WriteStatement(name)
return keyword('write') + \
keyword('(') + (objects.object_method() | objects.object_val() | arithmetic.aexp()) + keyword(')') ^ process
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.alert = void 0;
var alert = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M320,480H192v-96h128V480z M304,320h-96L192,32h128L304,320z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M320,480H192v-96h128V480z M304,320h-96L192,32h128L304,320z"
},
"children": []
}]
}]
}]
};
exports.alert = alert; |
<filename>components/Speakers/index.js
import React from "react";
import PropTypes from "prop-types";
import styled from "styled-components";
import Row from "antd/lib/row";
import Col from "antd/lib/col";
import SocialIcons from "components/SocialIcons";
import media from "components/layouts/Master/MediaQuery";
const SpSubject = styled.p`
text-align: left;
font-size: 1rem;
color: #000;
margin: 0;
${media.phone`
text-align: center;
`};
`;
const SpDesc = styled.p`
text-align: left;
font-size: 1rem;
${media.phone`
text-align: center;
`};
`;
const Titles = styled.h3`
text-align: left;
font-family: "LatoWebMedium";
font-weight: 900;
font-size: 1.5em;
${media.phone`
text-align: center;
`};
`;
const Avatar = styled.div`
&:hover ${SocialOverlay} {
opacity: 1;
}
text-align: center;
width: 80%;
max-width: 200px;
margin: 0 auto 10px auto;
border: 5px solid #fff;
border-radius: 50%;
overflow: hidden;
position: relative;
img {
border-radius: 50%;
width: 100%;
//filter: grayscale(100%);
}
${media.phone`
max-width: 100%;
`};
`;
const AvatarEffect = styled.div`
position: absolute;
width: 100%;
height: 100%;
left: 0;
top: 0;
/*background: rgb(3, 241, 244, 0.25);
background: -moz-linear-gradient(
-45deg,
rgba(3, 241, 244, 0.25) 0%,
rgba(95, 3, 244, 0.25) 100%
);
background: -webkit-linear-gradient(
-45deg,
rgba(3, 241, 244, 0.25) 0%,
rgba(95, 3, 244, 0.25) 100%
);
background: linear-gradient(
135deg,
rgba(3, 241, 244, 0.25) 0%,
rgba(95, 3, 244, 0.25) 100%
);
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#03f1f4', endColorstr='#5f03f4',GradientType=1 );*/
`;
const SocialOverlay = styled.div`
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
height: 100%;
width: 100%;
opacity: 0;
transition: 0.5s ease;
background: linear-gradient(
135deg,
rgba(132, 104, 245, 0.51) 0%,
rgba(236, 116, 231, 0.55) 50%
);
`;
const AvatarEffectJury = styled.div`
position: absolute;
width: 100%;
height: 100%;
left: 0;
top: 0;
/*background: rgb(198, 40, 40, 0.25);
background: -moz-linear-gradient(
-45deg,
rgba(198, 40, 40, 0.25) 0%,
rgba(255, 213, 79, 0.25) 100%
);
background: -webkit-linear-gradient(
-45deg,
rgba(198, 40, 40, 0.25) 0%,
rgba(255, 213, 79, 0.25) 100%
);
background: linear-gradient(
135deg,
rgba(198, 40, 40, 0.25) 0%,
rgba(255, 213, 79, 0.25) 100%
);
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#FFD54F', endColorstr='#c62828',GradientType=1 );*/
`;
const Wrapper = styled.div`
.ant-col-xs-24:hover ${SocialOverlay} {
opacity: 1;
}
.ant-col-xs-23 .i : {
color: white;
}
`;
function renderFarm(items, type) {
const effect = type == "team" ? <AvatarEffect /> : <AvatarEffectJury />;
return items.map((item, index) => {
return (
<Col
xs={24}
sm={12}
md={24}
lg={12}
xl={12}
key={index}
style={{ marginBottom: 20 }}
>
<Col xs={24} sm={24} md={12} lg={12} xl={12}>
<Avatar>
<img src={item.image} alt={item.name} />
{effect}
<SocialOverlay>
<SocialIcons
type={"speakers"}
items={item.socials}
size={1.4}
/>
</SocialOverlay>
</Avatar>
</Col>
<Col xs={24} sm={24} md={12} lg={12} xl={12}>
<Titles>{item.name}</Titles>
<SpSubject> {item.lecture}</SpSubject>
<SpDesc> {item.desc} </SpDesc>
<br />
</Col>
</Col>
);
});
}
const Speakers = ({ items, type }) => {
return (
<Wrapper>
<Row type="flex" justify="center">
{renderFarm(items, type)}
</Row>
</Wrapper>
);
};
Speakers.propTypes = {
items: PropTypes.array.isRequired,
type: PropTypes.string.isRequired,
};
export default Speakers;
|
<form>
<label>Name:</label>
<input type="text" name="name">
<br>
<label>Address:</label>
<input type="text" name="address">
<br>
<input type="submit" value="Submit Information">
</form>
<script>
function handleFormSubmit(event) {
event.preventDefault();
let name = document.name;
let address = document.address;
// write code to handle user input
}
document.querySelector("form").addEventListener("submit", handleFormSubmit);
</script> |
<reponame>kkiyama117/hangon
package html
import (
"pumpkin/codes/interface_adapters/html/user/controllers"
"pumpkin/codes/interface_adapters/html/user/driver_ports"
"pumpkin/codes/interface_adapters/html/user/presenters"
"pumpkin/codes/usecases/html/interactor"
)
func InjectedShowUsers(output driver_ports.APIOutput) controllers.ShowUsersController {
pres := presenters.NewShowUsersPresenter(output)
inter := interactor.NewShowUsersInteractor(pres)
c := controllers.NewShowUsersController(inter)
return c
}
|
import app from './app'
import * as dotenv from 'dotenv'
dotenv.config()
app.listen(process.env.SERVER_PORT, () =>
console.log(`Listening on port ${process.env.SERVER_PORT}`)
)
|
module PersonMatchStrategies
class FirstLastDob
def match(options = {})
return([nil, nil]) if options[:dob].blank?
name_first_regex = Regexp.compile(Regexp.escape(options[:name_first].to_s.strip.downcase), true)
name_last_regex = Regexp.compile(Regexp.escape(options[:name_last].to_s.strip.downcase), true)
search_dob = cast_dob(options[:dob], options)
found_people = Person.where({"members.dob" => search_dob, "name_first" => name_first_regex, "name_last" => name_last_regex})
if found_people.any?
if found_people.many?
raise AmbiguousMatchError.new("Multiple people with same first, last, and dob: #{options[:name_first]}, #{options[:name_last]}, #{options[:dob]}")
else
select_authority_member(found_people.first, options)
end
else
[nil, nil]
end
end
def cast_dob(dob, options)
if dob.kind_of?(Date)
return dob
elsif dob.kind_of?(DateTime)
return dob
end
begin
Date.parse(dob)
rescue
raise AmbiguousMatchError.new("Invalid DOB: #{options[:name_first]}, #{options[:name_last]}, #{options[:dob]}")
end
end
def select_authority_member(person, options)
if !person.authority_member.present?
raise AmbiguousMatchError.new("No authority member for person with first, last, and dob: #{options[:name_first]}, #{options[:name_last]}, #{options[:dob]}")
end
[person, person.authority_member]
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.