text
stringlengths
1
1.05M
<gh_stars>0 import BigNumber from "bignumber.js"; export const ZERO = new BigNumber(0); export const TEN = new BigNumber(10); export const DEFAULT_DECIMALS = TEN.pow(18); export const MAX_UINT256 = new BigNumber(2).pow(256).minus(1);
import logging logger = logging.getLogger(__name__) class ChainOfTransfiguration(object): """ A chain of responsibility implementation that channel through a series of transifgurations. One may depend on previous step with respect to Context """ _chain = [] _context = {} def __init__(self): self._chain = [] self._context = {} def add(self, transfiguration): """ Add a transfiguration into the chain of execution. :param transfiguration: a transfiguration to be added """ self._chain.append(transfiguration) logger.debug('Add transfiguration : [%s] to chain', transfiguration.__class__) def get(self, index): """ Retrieve a transifguration in the chain at position [index] :param index: index from 0 to size-of-chain :return: the transfiguration at chain[index] """ return self._chain[index] def size(self): """ Retrieve the # of transigurations in chain. :return: length of chain """ return len(self._chain) def execute(self, context = None): """ Perform execution of transfiguration one-by-one in the chain :param context: a map of key-value attributes to perform """ for transfiguration in self._chain : logger.info("Performing Transfiguration [%s]", transfiguration.__class__) transfiguration.perform(context)
# This script is designed to be included in your ~/.bashrc or equivalent file loaded on bash startup. # Retrieves the size on disk value in bytes for the working directory. alias dirsize=GetDirectorySize function GetDirectorySize() { find . -type f -exec ls -l {} \; | awk '{sum += $5} END {print sum}' }
<reponame>Swordce/client-master package com.lepao.ydcgkf.ui; import android.content.Intent; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.widget.LinearLayout; import com.just.agentweb.AgentWeb; import com.just.agentweb.DefaultWebClient; import com.lepao.ydcgkf.R; import com.lepao.ydcgkf.base.BaseActivity; import com.lepao.ydcgkf.utils.AppManager; import butterknife.ButterKnife; import butterknife.OnClick; /** * created by zwj on 2018/9/10 0010 */ public class DataStatisticsActivity extends BaseActivity { private String url = "http://www.javaer.com.cn/lepao/h5/#/dataPlatformH5?token="; private AgentWeb mAgentWeb; private LinearLayout mLinearLayout; @Override public void initView() { String token = getIntent().getStringExtra("token"); if(!TextUtils.isEmpty(token)) { url = url + token; }else { url = url + "lepao123456"; } mLinearLayout = (LinearLayout) this.findViewById(R.id.container); mAgentWeb = AgentWeb.with(this) .setAgentWebParent(mLinearLayout, new LinearLayout.LayoutParams(-1, -1)) .useDefaultIndicator() .setMainFrameErrorView(R.layout.agentweb_error_page, -1) .setSecurityType(AgentWeb.SecurityType.STRICT_CHECK) .setOpenOtherPageWays(DefaultWebClient.OpenOtherPageWays.ASK)//打开其他应用时,弹窗咨询用户是否前往其他应用 .interceptUnkownUrl() //拦截找不到相关页面的Scheme .createAgentWeb() .ready() .go(url); } @Override public void getData() { } @Override public int getLayout() { return R.layout.activity_data_statistic; } @OnClick({R.id.ll_finish,R.id.ll_right}) public void onViewClicked(View view) { switch (view.getId()) { case R.id.ll_finish: AppManager.getAppManager().finishActivity(); break; case R.id.ll_right: Intent intent = new Intent(this,DataStatisticsQRCodeActivity.class); intent.putExtra("url",url); startActivity(intent); break; } } }
def repeatString(string, num): return string * num result = repeatString("xyz", 3) print(result)
<filename>src/client/components/materialapp.tsx // Core react imports import * as React from 'react'; // Material Imports import * as Material from '@material-ui/core'; import * as Icons from '@material-ui/icons'; import {withStyles, MuiThemeProvider} from '@material-ui/core/styles'; import * as MuiColors from '@material-ui/core/colors'; // Public utilities import classNames from 'classnames'; // Core OT import { OT, Util, Poly, G } from "@dra2020/baseclient"; import * as DT from '@dra2020/dra-types'; // App libraries import { Environment } from '../env'; import * as ClientActions from '../clientactions'; import * as TV from './tableview'; import * as Profile from './profileview'; import * as ResetView from './resetview'; import * as ForgotView from './forgotview'; import * as AlertView from './alertview'; import * as ProgressView from './progressview'; import * as STV from './statictextview'; import * as Hash from '../hash'; import * as AnlzView from './analyticsview'; export const appBarHeight: number = 48; // Calculated sizes other other panes based on this export const appBackgroundColor: string = '#fafafa'; let AppActionID = 5000; export const ActionProfileEditField = AppActionID++; export const ActionProfileClose = AppActionID++; export const ActionProfileOpen = AppActionID++; export const ActionProfile = AppActionID++; export const ActionLoginOpen = AppActionID++; export const ActionLogin = AppActionID++; export const ActionSignupOpen = AppActionID++; export const ActionSignup = AppActionID++; export const ActionVisitorClose = AppActionID++; export const ActionVisitor = AppActionID++; export const ActionForgotOpen = AppActionID++; export const ActionForgotClose = AppActionID++; export const ActionForgot = AppActionID++; export const ActionResetOpen = AppActionID++; export const ActionResetClose = AppActionID++; export const ActionReset = AppActionID++; export const ActionVerifyEmail = AppActionID++; export const ActionAlertOpen = AppActionID++; export const ActionAlertClose = AppActionID++; export const ActionProgressOpen = AppActionID++; export const ActionProgressClose = AppActionID++; export const ActionLogout = AppActionID++; export enum DW // Enum representing size ranges for Available Width { PHONE, PHONEPLUS, NARROW, NARROWPLUS, NARROWPLUS2, TABLET, MEDIUM, MEDIUMPLUS, WIDE, WIDER, WIDEST, } export class AppActions extends ClientActions.ClientActions { app: ClientActions.IClientActions; constructor(app: ClientActions.IClientActions) { super(app.env); this.app = app; } fireIcon(cmd: ClientActions.ParamTableIcon): void { switch (cmd.name) { } } fire(id: number, arg?: any): boolean { switch (id) { case ClientActions.TableIconSelect: this.fireIcon(arg as ClientActions.ParamTableIcon); break; case -1: // If we ever have any return this.app.fire(id, arg); default: return this._fire(id, arg);; } return true; } } export interface Viewer { name: string, open: (appProps: AppProps, params: any) => { props: any, state: any }, render: (props: any, state: any) => any, } export type ViewerIndex = { [name: string]: Viewer }; export interface AppProps { env: Environment; title: string; actions: ClientActions.ClientActions; viewMode: string; // Mode: VIEW_FILELIST or MAPVIEW_ANLZ // Dialogs viewers: ViewerIndex; viewerProps: ClientActions.ViewerProps; viewerState: ClientActions.ViewerState; // Home or per-session isAnon: boolean; roles: { [role: string]: boolean }; // Stuff to display rows: any[]; selectedRow: string; clearState?: boolean; classes?: any; theme?: any; } // Items that purely control visibility export interface AppState { } const shadingColor = MuiColors.indigo[50]; export function AppStyles(theme: any): any { return ({ root: { flexGrow: 1, }, spacer: { flex: '1 1 100%', }, fillSpace: { width: '100%', }, fillSpread: { width: '100%', justifyContent: 'space-between', }, actions: { color: theme.palette.text.secondary, }, secondary: { color: theme.palette.secondary.main, }, primary: { color: theme.palette.primary.main, }, appFrame: { zIndex: 1, overflow: 'hidden', position: 'relative', display: 'flex', width: '100%', height: 'calc(100vh)' }, menuButton: { marginLeft: 2, marginRight: 2, }, hide: { display: 'none', }, bigDialogPaper: { maxWidth: '80vw', minWidth: '80vw', }, dialogRoot: { minWidth: 552, }, tableTitle: { marginLeft: 2, }, simpleRow: { display: 'flex', flexDirection: 'row', alignItems: 'center', justifyContent: 'flex-start', width: '100%', maxWidth: 552, }, spreadRow: { display: 'flex', flexDirection: 'row', alignItems: 'center', justifyContent: 'space-between', width: '100%', maxWidth: 552, }, simpleColumn: { display: 'flex', flexDirection: 'column', flexShrink: 0, // Fix Safari bug where scrolling content gets shrunk instead of scrolling alignItems: 'flex-start', justifyContent: 'flex-start', width: '100%', maxWidth: 552, }, firstColumn: { width: 120, paddingRight: 12, textAlign: 'right', }, table: { position: 'relative', width: '100%', height: '90vh', }, tableHeader: { display: 'flex', alignItems: 'left', justifyContent: 'flex-start', padding: 0, ...theme.mixins.toolbar, }, tableHeadColor: { backgroundColor: '#d5dbdb', }, tableAlternatingShading: { backgroundColor: '#ebdef0', }, themeBackgroundColor: { backgroundColor: theme.palette.background.default, }, sessionTable: { minWidth: 520, }, sessionTableWrapper: { overflowX: 'auto', overflowY: 'scroll', width: '100%', height: 'calc(100vh - 136px)', }, noMargin: { marginLeft: 0, marginRight: 0, padding: 0, borderWidth: 0, }, noPadding: { padding: 0, }, denseIcon: { width: 24, height: 24, }, iconLarge: { fontSize: 36, }, veryDenseIcon: { width: '16px', height: '16px', fontSize: '16px', }, denseLabel: { fontSize: '0.7rem', marginLeft: 2, marginRight: 2, }, smallLabel: { fontSize: '0.6rem', marginLeft: 0, marginRight: 0, }, denseFormControl: { marginLeft: 2, marginRight: 2, }, padding4: { padding: 4, }, shortControl: { minWidth: 120, fontSize: theme.typography.body1.fontSize, }, denseInput: { fontSize: theme.typography.body1.fontSize, width: 120, }, denseMenuItem: { fontSize: '0.7rem', marginLeft: 0, marginRight: 0, height: 24, }, denseSelect: { fontSize: '0.7rem', marginLeft: 0, marginRight: 0, minWidth: 48, }, denseIconWithLabel: { width: 'fit-content', minWidth: '60px', }, denseLabel1: { fontSize: '0.75rem', marginLeft: 0, marginRight: 0, }, denseLabel2: { fontSize: '0.8rem', margin: 1, padding: 1, }, denseLabel3: { fontSize: '0.8rem', margin: '1px 0px 1px 1px', // no right margin/padding, because chevron there padding: '1px 0px 1px 1px', }, checkCell: { fontSize: '0.7rem', padding: 0, margin: 0, width: 0, }, denseCell: { fontSize: '0.7rem', padding: 0, margin: 0, '&:last-child': { paddingRight: 1, }, }, subheading: { marginTop: 6, marginBottom: 3, }, smallPadding: { padding: 2, margin: 2, }, smallIcon: { fontSize: 'small', padding: 2, }, linkText: { '&:hover': { textDecoration: 'underline', cursor: 'pointer', }, }, blueLinkText: { color: theme.palette.secondary.main, fontSize: '1rem', fontWeight: 500, padding: 8, fontFamily: theme.typography.body2.fontFamily, '&:hover': { textDecoration: 'underline', cursor: 'pointer', }, }, dialogScrollBlock: { height: 400, overflowY: 'scroll', fontFamily: theme.typography.body2.fontFamily, }, commentText: { fontSize: '0.85rem', color: 'black', }, commentName: { fontSize: '0.85rem', color: 'black', fontWeight: 'bold', }, commentMeta: { fontSize: '0.85rem', color: 'grey', }, separator: { display: 'flex', flexDirection: 'row', alignItems: 'center', justifyContent: 'flex-start', height: 36, width: '100%', maxWidth: 552, }, font875: { fontSize: '0.875rem', }, font1rem: { fontSize: '1rem', }, loginButton: { fontSize: '1.0rem', }, cellNoBorder: { border: '0px', }, }); } export class TableActions extends ClientActions.ClientActions { actions: ClientActions.ClientActions; constructor(env: Environment, actions: ClientActions.ClientActions) { super(env); this.actions = actions; } fire(id: number, arg?: any): boolean { switch (id) { case ClientActions.SelectionClear: break; case ClientActions.SelectionEmpty: break; case ClientActions.SelectionDouble: // id case ClientActions.SelectionSet: // id break; case ClientActions.TableButtonSelect: // console.log(`analyze button for ${arg.id}, ${arg.name} clicked`); this.actions.fire(ClientActions.SetRowToAnalyze, arg.id); break; } return true; } } class InternalMaterialApp extends React.Component<AppProps, AppState> { env: Environment; appActions: AppActions; tableActions: TableActions; viewers: ViewerIndex; constructor(props: AppProps) { super(props); this.env = props.actions.env; this.appActions = new AppActions(this); this.tableActions = new TableActions(this.env, props.actions); props.actions.mixin(this.appActions); this.handleKeyPress = this.handleKeyPress.bind(this); this.handlePick = this.handlePick.bind(this); } fire(id: number, e?: any): boolean { const { env, actions } = this.props; const u = env.account.user; let param: any; switch (id) { case ActionAlertOpen: actions.fire(ClientActions.OpenAlert, e); break; case ActionAlertClose: actions.fire(ClientActions.CloseAlert); break; case ActionProgressOpen: actions.fire(ClientActions.OpenProgress, e); break; case ActionProgressClose: actions.fire(ClientActions.CloseProgress); break; default: console.log('materialapp.fire: Unexpected app action'); return false; break; } return true; } componentWillReceiveProps(props: AppProps): void { } componentDidMount() { document.addEventListener('keydown', this.handleKeyPress); } handleKeyPress(evt: any): void { const {actions} = this.props; if (evt.altKey || evt.metaKey || evt.ctrlKey) { let action = 0; switch (evt.key) { case 'f': break; } if (action) { actions.fire(action); evt.stopPropagation(); evt.preventDefault(); } } } renderTable(): any { const {classes, actions, rows, viewMode} = this.props; if (viewMode !== VIEW_FILELIST) return null; let Columns: TV.ColumnList = [ { id: 'name', fieldType: 'string', disablePadding: true, label: 'Name' }, { id: 'isjson', fieldType: 'boolean', disablePadding: true, label: 'J?' }, { id: 'analyze', fieldType: 'button', disablePadding: true, label: 'Analyze' }, ]; function Sorter(rows: TV.RowList, orderBy: string, order: TV.Ordering): TV.RowList { return TV.TableViewSorter(rows, Columns, orderBy, order); } let tablerows = rows.map((r: any) => { return ({ id: r.id, name: r.name, isjson: r.json != null, analyze: 'Analyze' }) }); let tvProps: TV.TableViewProps = { actions: this.tableActions, selection: null, columns: Columns, rows: tablerows, sorter: Sorter, ordering: 'ASC', orderBy: 'name', //outerHeight: "400px", rowHeight: 36, disableHeader: false, showCheck: false, }; return ( <div className={classes.table}> <TV.TableView {...tvProps} /> </div>); } renderAnalyticsView(): JSX.Element { const {classes, env, roles, actions, rows, selectedRow, viewMode} = this.props; let rowToRender: any = this.props.rows.find((r: any) => r.id === selectedRow); if (viewMode === VIEW_FILELIST || rowToRender === undefined) return null; // console.log(`renderAnalyticsView: ${rowToRender ? 'row to render' : 'no row to render'}`); const stateXX: string = 'XX'; const bHidePartisanData: boolean = false; // HACK - set the designSize <<< cloned from updateDesignSize() in client.tsx let el: any = document.getElementById('root'); let w: number = el.clientWidth; let h: number = el.clientHeight; let designSize: DW; if (w < 376) designSize = DW.PHONE; else if (w < 475) designSize = DW.PHONEPLUS; else if (w < 575) designSize = DW.NARROW; else if (w < 645) designSize = DW.NARROWPLUS; else if (w < 725) designSize = DW.NARROWPLUS2; else if (w < 770) designSize = DW.TABLET; else if (w < 870) designSize = DW.MEDIUM; else if (w < 930) designSize = DW.MEDIUMPLUS; else if (w < 1155) designSize = DW.WIDE; else if (w < 1250) designSize = DW.WIDER; else designSize = DW.WIDEST; return (<AnlzView.AnalyticsView {...{actions, xx: stateXX, env, roles, designSize, bHidePartisanData, openView: true, row: rows[+selectedRow] }} />); } renderViewers(): any[] { const { viewerProps } = this.props; const { viewerState } = this.props; let views: any[] = []; Object.keys(viewerProps).forEach(key => { views.push(this.props.viewers[key].render(viewerProps[key], viewerState[key])); }); return views; } handlePick(): void { const {actions} = this.props; const alertParam: ClientActions.ParamAlert = { message: 'Pick Files', ok: 'Pick', cancel: 'Cancel' }; const pickParam: ClientActions.ParamPick = { alertParam: alertParam, multiple: true }; actions.fire(ClientActions.Open, { name: 'pick', params: pickParam }); } render(): any { const { classes, actions } = this.props; let result = ( <MuiThemeProvider theme={MaterialTheme}> <div className={classes.root}> {this.renderViewers()} <Material.Button onClick={this.handlePick} > Pick Files </Material.Button> {this.renderTable()} {this.renderAnalyticsView()} </div> </MuiThemeProvider> ); return result; } } let MaterialTheme: any = Material.createMuiTheme( { transitions: { // So we have transition: none; everywhere create: () => 'none', }, palette: { primary: { light: MuiColors.blue['200'], main: MuiColors.blue['700'], dark: MuiColors.blue['900'], contrastText: '#ebf0f0', }, secondary: { light: MuiColors.red['200'], main: MuiColors.red['600'], dark: MuiColors.red['800'], contrastText: '#ebf0f0', }, background: { default: '#ffffff', }, }, } ); let StyledMaterialApp: any = withStyles(AppStyles, { withTheme: true })(InternalMaterialApp); export const MaterialApp: new () => React.Component<AppProps, AppState> = StyledMaterialApp; export function isWide(designSize: DW): boolean { return designSize >= DW.WIDE; } export function getTooltip(tip: string): any { return ( <Material.Typography style={{ fontSize: '0.8rem', color: 'white' }}> {tip} </Material.Typography> ) } export function shortLabel(label: string): string { return label.length > 4 ? label.slice(0, 4) + '..' : label; } // short label Element with optional tooltip, if elided export function shortLabelOptionalTip(label: string): JSX.Element { const labelP: string = shortLabel(label); const elided: boolean = labelP.endsWith('..'); return elided ? <Material.Tooltip title={getTooltip(label)}> <span>{labelP}</span> </Material.Tooltip> : <span>{labelP}</span>; } export const MAPVIEW_ANLZ = 'anlz'; export const VIEW_FILELIST = 'filelist';
/** * @file A content script that is injected in https://0.facebook.com/* iframes * @author <NAME> <<EMAIL>> */ const ZeroWorker = {} ZeroWorker._pageDate = Date.now(); // important for updating ZeroWorker._pageLink = window.location.toString(); // caching ZeroWorker._addMeta = function _addMeta(obj) { obj._pageDate = ZeroWorker._pageDate obj._pageLink = ZeroWorker._pageLink } /** * A general purpose link finder * @param {string|RegExp} criteria - Prefix (string) or a pattern (regex) * @returns {string} */ ZeroWorker.getLink = function getLink(criteria) { try { const hasPrefix = ($a) => $a.href.startsWith(criteria); const hasPattern = ($a) => criteria.test($a.href); const predicate = typeof criteria === 'string' ? hasPrefix : hasPattern; const $links = Array.from(document.querySelectorAll('a')) const link = $links.find(predicate).href return link } catch (e) { console.warn(e) return '' } } /** * Extract href string from the anchor `$a` maybe * Used often in different places * @param {HTMLAnchorElement} $a - Anchor element maybe * @return {string} Link or empty string */ ZeroWorker.getHref = function getHref($a) { if ($a && typeof $a.href === 'string') { return $a.href } else { return '' } } /** * A general purpose CSS class names finder * Class name changes randomly, we use this to determine them * @example * // having a rule .xx{font-weight:bold;} * getClassName('{font-weight:bold;}') === 'xx' * * @param {string} rule * @returns {string} */ ZeroWorker.getClassName = function getClassName(rule) { const styleHtml = document.querySelector('style').innerHTML const escapedRule = rule.replace(/[{}().]/g, '\\$&') const rRule = RegExp('\\.(\\w+?)' + escapedRule) const [, className] = styleHtml.match(rRule) || [] return className || '' } /** * Preprocess texts in thread and message divs to preserve emojis and emotions. * * @todo handle urls, photo, audio, video, * @param {HTMLElement} $div * @returns {void} */ ZeroWorker.textify = function textify($div) { // Surround str with ASCII Start/End of Text control characters const STX = '\x02' const ETX = '\x03' const mark = (str) => STX + str + ETX $div.querySelectorAll('i[style]').forEach( ($emoImage) => { const imgUrl = $emoImage.style.backgroundImage const url = imgUrl.slice(5, -2) const text = mark('emoticon:' + url) $emoImage.replaceWith(text) }) const emoClass = ZeroWorker.getClassName('{display:table-cell;padding:4px;') $div.querySelectorAll(`[class="${emoClass}"]`).forEach( ($emoText) => { const text = mark('emoji:' + $emoText.innerText.trim()) $emoText.parentElement.replaceWith(text) }) } /** @return {string} Error text or an empty string */ ZeroWorker.getError = function getError() { const errBorderColor = "rgb(221, 60, 16)" // red const $root = document.querySelector('#root') let err = ''; if ($root.children.length === 3) { const $firstDiv = $root.children[0] if (getComputedStyle($firstDiv).borderColor === errBorderColor) { err = $firstDiv.innerText } } return err } /** * Obeys Master's order and sends a response. * * @param {MessageEvent} event * @listens Window:message * @fires Window:message */ ZeroWorker.onOrder = function onOrder(event) { const job = event.data if (typeof job !== 'object' || typeof job.fn !== 'string' || typeof ZeroWorker[job.fn] !== 'function' || !Array.isArray(job.args)) { return event.source.postMessage({job, response: {error: 'bad job'}}, '*') } const output = ZeroWorker[job.fn](...job.args) if (output) { const isObject = typeof output === 'object' && !Array.isArray(output) const response = isObject ? output : {value: output} ZeroWorker._addMeta(response) event.source.postMessage({job, response}, '*') } } window.addEventListener('message', ZeroWorker.onOrder, false)
#!/bin/bash python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/small_tag_mc_data/tag_mc_SourceS5_Th228_px2550_py39_pz100_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7124_ml.h5","tag_run_7125_ml.h5" \ "S5" "Th228" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/small_tag_mc_data/tag_mc_SourceS5_Ra226_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7146_ml.h5","tag_run_7147_ml.h5" \ "S5" "Ra226" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS5_Co60_px2550_py39_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7239_ml.h5","tag_run_7504_ml.h5" \ "S5" "Co60" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS2_Ra226_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7145_ml.h5","tag_run_7491_ml.h5","tag_run_7887_ml.h5" \ "S2" "Ra226" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS2_Co60_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7502_ml.h5","tag_run_7888_ml.h5","tag_run_8263_ml.h5" \ "S2" "Co60" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS2_Th228_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7929_ml.h5","tag_run_7930_ml.h5","tag_run_7931_ml.h5" \ "S2" "Th228" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS8_Ra226_py23_pz2953_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7144_ml.h5","tag_run_7886_ml.h5","tag_run_8242_ml.h5" \ "S8" "Ra226" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS8_Th228_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7121_ml.h5","tag_run_7123_ml.h5","tag_run_7537_ml.h5" \ "S8" "Th228" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS8_Co60_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7238_ml.h5","tag_run_7898_ml.h5","tag_run_7503_ml.h5" \ "S8" "Co60" 1 python3 -W ignore::RuntimeWarning shapement_.py "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/\ data/small_tag_mc_data/tag_mc_SourceS11_Th228_ml.h5" \ "/dybfs2/nEXO/fuys/EXO-200/shape_agreement/2019_0vbb/Phase2/162_10_182_173_indE_DNN_v2/data/tag_rec_data/" \ "tag_run_7912_ml.h5","tag_run_7911_ml.h5","tag_run_7910_ml.h5" \ "S11" "Th228" 1
<filename>src/sentry/static/sentry/app/icons/iconPrint.tsx import React from 'react'; import SvgIcon from './svgIcon'; type Props = React.ComponentProps<typeof SvgIcon>; const IconPrint = React.forwardRef(function IconPrint( props: Props, ref: React.Ref<SVGSVGElement> ) { return ( <SvgIcon {...props} ref={ref}> <path d="M13.12,16H2.88a.76.76,0,0,1-.75-.75V6a.76.76,0,0,1,.75-.75H13.12a.76.76,0,0,1,.75.75v9.21A.76.76,0,0,1,13.12,16Zm-9.49-1.5h8.74V6.77H3.63Z" /> <path d="M13.25,12.12h-.13v-1.5h.13A1.25,1.25,0,0,0,14.5,9.38V2.75A1.25,1.25,0,0,0,13.25,1.5H2.75A1.25,1.25,0,0,0,1.5,2.75V9.38a1.25,1.25,0,0,0,1.25,1.24h.13v1.5H2.75A2.75,2.75,0,0,1,0,9.38V2.75A2.75,2.75,0,0,1,2.75,0h10.5A2.75,2.75,0,0,1,16,2.75V9.38A2.75,2.75,0,0,1,13.25,12.12Z" /> <path d="M11.08,10H4.92a.75.75,0,0,1,0-1.5h6.16a.75.75,0,1,1,0,1.5Z" /> <path d="M4.43,3.61H2.88a.75.75,0,0,1,0-1.5H4.43a.75.75,0,0,1,0,1.5Z" /> <path d="M11.08,12.8H4.92a.75.75,0,1,1,0-1.5h6.16a.75.75,0,1,1,0,1.5Z" /> </SvgIcon> ); }); IconPrint.displayName = 'IconPrint'; export {IconPrint};
#!/usr/bin/env bash # Copyright 2018 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ # Include seconds since script execution as a prefix for all command echo logs. PS4='+ $SECONDS s\011 ' # Octal 9, for tab set -x # print commands as they are executed set -e # fail and exit on any command erroring set -u # fail and exit on any undefined variable reference # Make sure the environment variables are set. if [ -z "${SHARD}" ]; then echo "SHARD is unset." exit -1 fi if [ -z "${NUM_SHARDS}" ]; then echo "NUM_SHARDS is unset." exit -1 fi call_with_log_folding() { local command=$1 echo "travis_fold:start:$command" $command echo "travis_fold:end:$command" } install_bazel() { # Install Bazel for tests. Based on instructions at # https://docs.bazel.build/versions/master/install-ubuntu.html#install-on-ubuntu # (We skip the openjdk8 install step, since travis lets us have that by # default). # Add Bazel distribution URI as a package source echo "deb [arch=amd64] http://storage.googleapis.com/bazel-apt stable jdk1.8" \ | sudo tee /etc/apt/sources.list.d/bazel.list curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - # Update apt and install bazel (use -qq to minimize log cruft) sudo apt-get update sudo apt-get install bazel } find_version_str() { PKG_NAME=$1 # These are nightly builds we'd like to avoid for some reason; separated by # regex OR operator. BAD_NIGHTLY_DATES="20200112\|20200113" # This will fail to find version 'X" and log available version strings to # stderr. We then sort, remove bad versions and take the last entry. This # allows us to avoid hardcoding the main version number, which would then need # to be updated on every new TF release. pip install $PKG_NAME==X 2>&1 \ | grep -o "[0-9.]\+dev[0-9]\{8\}" \ | sort \ | grep -v "$BAD_NIGHTLY_DATES" \ | tail -n1 } install_python_packages() { # Ensure newer than 18.x pip version, which is necessary after tf-nightly # switched to manylinux2010. pip install --upgrade 'pip>=19.2' # NB: tf-nightly pulls in other deps, like numpy, absl, and six, transitively. TF_VERSION_STR=$(find_version_str tf-nightly) pip install tf-nightly==$TF_VERSION_STR # The following unofficial dependencies are used only by tests. # TODO(b/148685448): Unpin Hypothesis version. pip install hypothesis==4.36.0 matplotlib mock scipy # Install additional TFP dependencies. pip install decorator cloudpickle # Upgrade numpy to the latest to address issues that happen when testing with # Python 3 (https://github.com/tensorflow/tensorflow/issues/16488). pip install -U numpy # Print out all versions, as an FYI in the logs. python --version pip --version pip freeze } call_with_log_folding install_bazel call_with_log_folding install_python_packages test_tags_to_skip="(gpu|requires-gpu-sm35|notap|no-oss-ci|tfp_jax|tf2-broken|tf2-kokoro-broken)" # Given a test size (small, medium, large), a number of shards and a shard ID, # query and print a list of tests of the given size to run in the given shard. query_and_shard_tests_by_size() { size=$1 bazel_query="attr(size, ${size}, tests(//tensorflow_probability/...)) \ except \ attr(tags, \"${test_tags_to_skip}\", \ tests(//tensorflow_probability/...))" bazel query ${bazel_query} \ | awk -v n=${NUM_SHARDS} -v s=${SHARD} 'NR%n == s' } # Generate a list of tests for this shard, consisting of a subset of tests of # each size (small, medium and large). By evenly splitting the various test # sizes across shards, we help ensure the shards have comparable runtimes. sharded_tests="$(query_and_shard_tests_by_size small)" sharded_tests="${sharded_tests} $(query_and_shard_tests_by_size medium)" sharded_tests="${sharded_tests} $(query_and_shard_tests_by_size large)" # Run tests. Notes on less obvious options: # --notest_keep_going -- stop running tests as soon as anything fails. This is # to minimize load on Travis, where we share a limited number of concurrent # jobs with a bunch of other TensorFlow projects. # --test_timeout -- comma separated values correspond to various test sizes # (short, moderate, long or eternal) # --action_env -- specify environment vars to pass through to action # environment. (We need these in order to run inside a virtualenv.) # See https://github.com/bazelbuild/bazel/issues/6648 and b/121259040. echo "${sharded_tests}" \ | xargs bazel test \ --compilation_mode=opt \ --copt=-O3 \ --copt=-march=native \ --notest_keep_going \ --test_timeout 300,450,1200,3600 \ --test_env=TFP_HYPOTHESIS_MAX_EXAMPLES=2 \ --action_env=PATH \ --action_env=LD_LIBRARY_PATH \ --test_output=errors
package io.digitalstate.camunda.client.externaltask.models.failure; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import io.digitalstate.camunda.client.common.EngineName; import org.immutables.value.Value; import javax.validation.constraints.Min; import javax.validation.constraints.NotBlank; import java.util.Optional; import static com.fasterxml.jackson.annotation.JsonInclude.Include.NON_EMPTY; @Value.Immutable @Value.Style(jdkOnly = true, typeAbstract = "*Model", typeImmutable = "*", validationMethod = Value.Style.ValidationMethod.NONE, depluralize = true) @JsonSerialize(as = HandleFailure.class) @JsonDeserialize(builder = HandleFailure.Builder.class) public interface HandleFailureModel extends EngineName { /** * The Id of the task to be completed. * Used in in the path params. * Excluded from JSON. * @return */ @JsonIgnore @NotBlank String getId(); @JsonProperty("workerId") @Value.Default @NotBlank default String getWorkerId(){ return "worker"; } @JsonProperty("errorMessage") @JsonInclude(value = NON_EMPTY, content= NON_EMPTY) String getErrorMessage(); @JsonProperty("errorDetails") @JsonInclude(value = NON_EMPTY, content= NON_EMPTY) Optional<String> getErrorDetails(); /** * Must be >= 0. Defaults to 3. * @return */ @JsonProperty("retires") @JsonInclude(value = NON_EMPTY, content= NON_EMPTY) @Min(0) @Value.Default default int getRetries(){ return 10; } /** * A timeout in milliseconds before the external task becomes available again for fetching. * Must be >= 0. * @return */ @JsonProperty("retryTimeout") @JsonInclude(value = NON_EMPTY, content= NON_EMPTY) @Min(0) Optional<Long> getRetryTimeout(); }
<gh_stars>1-10 package net.zomis.monopoly.model; public class GameActionResult { private final boolean ok; private final String message; public GameActionResult(boolean ok, String message) { this.ok = ok; this.message = message; } public String getMessage() { return message; } public boolean isOk() { return ok; } }
import React from "react" import { ScriptDefinitionDetails, WorkspaceDefinition, } from "../../types/settings" import { ScriptProcessor, ScriptArgument, ScriptOutputLine, } from "../../types/scripts" import { createProcessor } from "../../services/scripts/scriptInitializer" import { initArgumanets } from "../../converters/argsConverter" interface ScriptContextData { definition: ScriptDefinitionDetails | undefined processor: ScriptProcessor | undefined args: ScriptArgument[] output: ScriptOutputLine[] selectScript(value: ScriptDefinitionDetails): void deselectScript(): void updateArgument(argument: ScriptArgument): void appendOutput(line: ScriptOutputLine): void setOutput(lines: ScriptOutputLine[]): void clearOutput(): void } const ScriptContext = React.createContext<ScriptContextData>({ definition: undefined, processor: undefined, args: [], output: [], selectScript: () => {}, deselectScript: () => {}, updateArgument: () => {}, appendOutput: () => {}, setOutput: () => {}, clearOutput: () => {}, }) interface ScriptProviderProps { workspace: WorkspaceDefinition children: React.ReactNode } export const ScriptProvider = ({ workspace, children, }: ScriptProviderProps) => { const [definition, setDefinition] = React.useState< ScriptDefinitionDetails | undefined >() const [processor, setProcessor] = React.useState< ScriptProcessor | undefined >() const [args, setArgs] = React.useState<ScriptArgument[]>([]) const [output, setOutputLines] = React.useState<ScriptOutputLine[]>([]) const deselectScript = () => { setDefinition(undefined) setProcessor(undefined) setArgs([]) setOutputLines([]) } const selectScript = (value: ScriptDefinitionDetails) => { setDefinition(value) setProcessor(createProcessor(value.group, value.definition, workspace)) setArgs(initArgumanets(value.definition.args ?? [])) setOutputLines([]) } const updateArgument = (argument: ScriptArgument) => { const newArgs = [...args] newArgs[argument.index] = argument setArgs(newArgs) } const appendOutput = (line: ScriptOutputLine) => { setOutputLines([...output, line]) } const setOutput = (lines: ScriptOutputLine[]) => { setOutputLines(lines) } const clearOutput = () => { setOutput([]) } return ( <ScriptContext.Provider value={{ definition, processor, args, output, selectScript, deselectScript, updateArgument, appendOutput, setOutput, clearOutput, }} > {children} </ScriptContext.Provider> ) } export default ScriptContext
package io.github.mynametsthad.helpfulutilsbotline.core; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; public class ShoppingList { public long createdTimestamp; public String name; public List<ShoppingListElement> elements = new ArrayList<>(); public List<String> allowedAccess = new ArrayList<>(); public ShoppingList(String name, List<String> allowedAccess) { this.name = name; this.createdTimestamp = new Date().getTime(); this.allowedAccess = allowedAccess; } public void AddElements(ShoppingListElement... elements){ this.elements.addAll(Arrays.asList(elements)); } public void RemoveElement(int index){ elements.remove(index); } }
def calculator(nums): result = nums[0] for i in range(1, len(nums)): if nums[i] == '+': result += nums[i + 1] elif nums[i] == '-': result -= nums[i + 1] elif nums[i] == '*': result *= nums[i + 1] else: result /= nums[i + 1] return result def optimizeCalculator(nums): result = calculator(nums[:2]) i = 2 while i < len(nums): if nums[i-1] == '*': result = calculator([result, '*', nums[i] * nums[i+1]]) i += 2 elif nums[i-1] == '/': result = calculator([result, '/', nums[i] / nums[i+1]]) i += 2 else: result = calculator([result, nums[i], nums[i+1]]) i += 2 return result
#include <stdio.h> #include <string.h> int isPalindrome(char str[]) { int l = 0; int h = strlen(str) - 1; while (h > l) { if (str[l++] != str[h--]) { return 0; } } return 1; } int main() { char str[20]; scanf("%s", str); if (isPalindrome(str)) printf("The string is a palindrome."); else printf("The string is not a palindrome."); return 0; }
<filename>main.cpp<gh_stars>0 #define WINVER 0x0500 #include <windows.h> //#include <commctrl.h> #include <stdio.h> //#include "resource.h" #include <iostream> //#include "powrprof.h" int APIENTRY WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nShowCmd) { INPUT ip; ip.type = INPUT_MOUSE; ip.mi.dx=0; ip.mi.dy=0; ip.mi.mouseData=0; ip.mi.dwFlags=MOUSEEVENTF_MOVE; ip.mi.time=0; const DWORD SleepTime = (1000 * 60 * 5)-500; do { SendInput(1, &ip, sizeof(INPUT)); Sleep(SleepTime); }while(1 == 1); return 0; }
curl -v -d '{"messages":1000, "threads":2, "sidecarUrl":"http://127.0.0.1:8080/publish"}' -H "Content-Type: application/json" -X POST http://localhost:8090/test
chmod 0640 /etc/ftpusers /etc/vsftpd.ftpusers /etc/vsftpd/ftpusers 2>/dev/null
import { Link, graphql, useStaticQuery } from "gatsby" import React from "react" import "./styles.scss" const Navbar = () => { const data = useStaticQuery(graphql` query { site { siteMetadata { title author } } } `) return ( <nav> <div> <Link className="logo" to="/"> {data.site.siteMetadata.title} </Link> </div> <ul> <li> <Link to="/products">Presets</Link> </li> <li> <Link to="/help">Help</Link> </li> <li> <Link to="/contact">Contact</Link> </li> <li> <Link to="/cart">Cart 🛍</Link> </li> </ul> </nav> ) } export default Navbar // export const query = graphql` // { // site { // siteMetadata { // title // author // } // } // } // `
<filename>src/main/tmdb/directives/money.js /** * */ define( [ 'angular', 'tmdb/partials/money/MoneyController'], function(angular, MoneyController) { "use strict"; return function() { return { transclude: true, replace: true, controller: MoneyController, templateUrl: '/tmdb/partials/money/money.html', restrict: 'E', scope: { budget: '=', revenue: '=' } }; }; } );
<filename>include/kiwaku/allocator/heap_allocator.hpp //================================================================================================== /** KIWAKU - Containers Well Made Copyright 2020 <NAME> Licensed under the MIT License <http://opensource.org/licenses/MIT>. SPDX-License-Identifier: MIT **/ //================================================================================================== #pragma once #include <kiwaku/assert.hpp> #include <kiwaku/allocator/block.hpp> #include <cstdlib> namespace kwk { struct heap_allocator { [[nodiscard]] block allocate(std::ptrdiff_t n) noexcept { return (n!=0) ? block{ malloc(n), n } : block{ nullptr, n }; } void deallocate(block & b) noexcept { if(b.data) free(b.data); } void swap(heap_allocator&) {} }; }
<filename>public/10.js (window["webpackJsonp"] = window["webpackJsonp"] || []).push([[10],{ /***/ "./node_modules/@babel/runtime/helpers/extends.js": /*!********************************************************!*\ !*** ./node_modules/@babel/runtime/helpers/extends.js ***! \********************************************************/ /*! no static exports found */ /***/ (function(module, exports) { function _extends() { module.exports = _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } module.exports = _extends; /***/ }), /***/ "./node_modules/@babel/runtime/helpers/interopRequireDefault.js": /*!**********************************************************************!*\ !*** ./node_modules/@babel/runtime/helpers/interopRequireDefault.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } module.exports = _interopRequireDefault; /***/ }), /***/ "./node_modules/@material-ui/icons/ColorLens.js": /*!******************************************************!*\ !*** ./node_modules/@material-ui/icons/ColorLens.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var _interopRequireDefault = __webpack_require__(/*! @babel/runtime/helpers/interopRequireDefault */ "./node_modules/@babel/runtime/helpers/interopRequireDefault.js"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _react = _interopRequireDefault(__webpack_require__(/*! react */ "./node_modules/react/index.js")); var _createSvgIcon = _interopRequireDefault(__webpack_require__(/*! ./utils/createSvgIcon */ "./node_modules/@material-ui/icons/utils/createSvgIcon.js")); var _default = (0, _createSvgIcon.default)(_react.default.createElement("path", { d: "M12 3c-4.97 0-9 4.03-9 9s4.03 9 9 9c.83 0 1.5-.67 1.5-1.5 0-.39-.15-.74-.39-1.01-.23-.26-.38-.61-.38-.99 0-.83.67-1.5 1.5-1.5H16c2.76 0 5-2.24 5-5 0-4.42-4.03-8-9-8zm-5.5 9c-.83 0-1.5-.67-1.5-1.5S5.67 9 6.5 9 8 9.67 8 10.5 7.33 12 6.5 12zm3-4C8.67 8 8 7.33 8 6.5S8.67 5 9.5 5s1.5.67 1.5 1.5S10.33 8 9.5 8zm5 0c-.83 0-1.5-.67-1.5-1.5S13.67 5 14.5 5s1.5.67 1.5 1.5S15.33 8 14.5 8zm3 4c-.83 0-1.5-.67-1.5-1.5S16.67 9 17.5 9s1.5.67 1.5 1.5-.67 1.5-1.5 1.5z" }), 'ColorLens'); exports.default = _default; /***/ }), /***/ "./node_modules/@material-ui/icons/Gradient.js": /*!*****************************************************!*\ !*** ./node_modules/@material-ui/icons/Gradient.js ***! \*****************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var _interopRequireDefault = __webpack_require__(/*! @babel/runtime/helpers/interopRequireDefault */ "./node_modules/@babel/runtime/helpers/interopRequireDefault.js"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _react = _interopRequireDefault(__webpack_require__(/*! react */ "./node_modules/react/index.js")); var _createSvgIcon = _interopRequireDefault(__webpack_require__(/*! ./utils/createSvgIcon */ "./node_modules/@material-ui/icons/utils/createSvgIcon.js")); var _default = (0, _createSvgIcon.default)(_react.default.createElement("path", { d: "M11 9h2v2h-2zm-2 2h2v2H9zm4 0h2v2h-2zm2-2h2v2h-2zM7 9h2v2H7zm12-6H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM9 18H7v-2h2v2zm4 0h-2v-2h2v2zm4 0h-2v-2h2v2zm2-7h-2v2h2v2h-2v-2h-2v2h-2v-2h-2v2H9v-2H7v2H5v-2h2v-2H5V5h14v6z" }), 'Gradient'); exports.default = _default; /***/ }), /***/ "./node_modules/@material-ui/icons/Landscape.js": /*!******************************************************!*\ !*** ./node_modules/@material-ui/icons/Landscape.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var _interopRequireDefault = __webpack_require__(/*! @babel/runtime/helpers/interopRequireDefault */ "./node_modules/@babel/runtime/helpers/interopRequireDefault.js"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _react = _interopRequireDefault(__webpack_require__(/*! react */ "./node_modules/react/index.js")); var _createSvgIcon = _interopRequireDefault(__webpack_require__(/*! ./utils/createSvgIcon */ "./node_modules/@material-ui/icons/utils/createSvgIcon.js")); var _default = (0, _createSvgIcon.default)(_react.default.createElement("path", { d: "M14 6l-3.75 5 2.85 3.8-1.6 1.2C9.81 13.75 7 10 7 10l-6 8h22L14 6z" }), 'Landscape'); exports.default = _default; /***/ }), /***/ "./node_modules/@material-ui/icons/utils/createSvgIcon.js": /*!****************************************************************!*\ !*** ./node_modules/@material-ui/icons/utils/createSvgIcon.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var _interopRequireDefault = __webpack_require__(/*! @babel/runtime/helpers/interopRequireDefault */ "./node_modules/@babel/runtime/helpers/interopRequireDefault.js"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = createSvgIcon; var _extends2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime/helpers/extends */ "./node_modules/@babel/runtime/helpers/extends.js")); var _react = _interopRequireDefault(__webpack_require__(/*! react */ "./node_modules/react/index.js")); var _SvgIcon = _interopRequireDefault(__webpack_require__(/*! @material-ui/core/SvgIcon */ "./node_modules/@material-ui/core/esm/SvgIcon/index.js")); function createSvgIcon(path, displayName) { var Component = _react.default.memo(_react.default.forwardRef(function (props, ref) { return _react.default.createElement(_SvgIcon.default, (0, _extends2.default)({ ref: ref }, props), path); })); if (true) { Component.displayName = "".concat(displayName, "Icon"); } Component.muiName = _SvgIcon.default.muiName; return Component; } /***/ }), /***/ "./node_modules/@react-page/plugins-background/lib-es/Controls/BackgroundDefaultControls.js": /*!**************************************************************************************************!*\ !*** ./node_modules/@react-page/plugins-background/lib-es/Controls/BackgroundDefaultControls.js ***! \**************************************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony import */ var _material_ui_core_FormControlLabel__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @material-ui/core/FormControlLabel */ "./node_modules/@material-ui/core/esm/FormControlLabel/index.js"); /* harmony import */ var _material_ui_core_Slider__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @material-ui/core/Slider */ "./node_modules/@material-ui/core/esm/Slider/index.js"); /* harmony import */ var _material_ui_core_Switch__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @material-ui/core/Switch */ "./node_modules/@material-ui/core/esm/Switch/index.js"); /* harmony import */ var _material_ui_core_Tab__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! @material-ui/core/Tab */ "./node_modules/@material-ui/core/esm/Tab/index.js"); /* harmony import */ var _material_ui_core_Tabs__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! @material-ui/core/Tabs */ "./node_modules/@material-ui/core/esm/Tabs/index.js"); /* harmony import */ var _material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! @material-ui/core/Typography */ "./node_modules/@material-ui/core/esm/Typography/index.js"); /* harmony import */ var _material_ui_icons_ColorLens__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! @material-ui/icons/ColorLens */ "./node_modules/@material-ui/icons/ColorLens.js"); /* harmony import */ var _material_ui_icons_ColorLens__WEBPACK_IMPORTED_MODULE_6___default = /*#__PURE__*/__webpack_require__.n(_material_ui_icons_ColorLens__WEBPACK_IMPORTED_MODULE_6__); /* harmony import */ var _material_ui_icons_Gradient__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! @material-ui/icons/Gradient */ "./node_modules/@material-ui/icons/Gradient.js"); /* harmony import */ var _material_ui_icons_Gradient__WEBPACK_IMPORTED_MODULE_7___default = /*#__PURE__*/__webpack_require__.n(_material_ui_icons_Gradient__WEBPACK_IMPORTED_MODULE_7__); /* harmony import */ var _material_ui_icons_Landscape__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! @material-ui/icons/Landscape */ "./node_modules/@material-ui/icons/Landscape.js"); /* harmony import */ var _material_ui_icons_Landscape__WEBPACK_IMPORTED_MODULE_8___default = /*#__PURE__*/__webpack_require__.n(_material_ui_icons_Landscape__WEBPACK_IMPORTED_MODULE_8__); /* harmony import */ var _react_page_ui__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! @react-page/ui */ "./node_modules/@react-page/ui/lib-es/index.js"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! react */ "./node_modules/react/index.js"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_10___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_10__); /* harmony import */ var _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ../types/ModeEnum */ "./node_modules/@react-page/plugins-background/lib-es/types/ModeEnum.js"); /* harmony import */ var _sub_Color__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./sub/Color */ "./node_modules/@react-page/plugins-background/lib-es/Controls/sub/Color.js"); /* harmony import */ var _sub_Image__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./sub/Image */ "./node_modules/@react-page/plugins-background/lib-es/Controls/sub/Image.js"); /* harmony import */ var _sub_LinearGradient__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ./sub/LinearGradient */ "./node_modules/@react-page/plugins-background/lib-es/Controls/sub/LinearGradient.js"); var __extends = (undefined && undefined.__extends) || (function () { var extendStatics = function (d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return extendStatics(d, b); }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var __assign = (undefined && undefined.__assign) || function () { __assign = Object.assign || function(t) { for (var s, i = 1, n = arguments.length; i < n; i++) { s = arguments[i]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } return t; }; return __assign.apply(this, arguments); }; var BackgroundDefaultControls = /** @class */ (function (_super) { __extends(BackgroundDefaultControls, _super); function BackgroundDefaultControls(props) { var _this = _super.call(this, props) || this; _this.renderModeSwitch = function () { var _a = _this.props.state.modeFlag, modeFlag = _a === void 0 ? _this.props.defaultModeFlag : _a; var label = _this.props.translations.onOff; switch (_this.state.mode) { case _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].COLOR_MODE_FLAG: // label = 'Use color' break; case _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].IMAGE_MODE_FLAG: // label = 'Use image' break; case _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].GRADIENT_MODE_FLAG: // label = 'Use gradient' break; default: label = 'Unknown mode'; break; } return (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_FormControlLabel__WEBPACK_IMPORTED_MODULE_0__["default"], { control: react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Switch__WEBPACK_IMPORTED_MODULE_2__["default"], { onChange: _this.props.handleChangeModeSwitch(_this.state.mode, modeFlag), checked: Boolean(modeFlag & _this.state.mode) }), label: label })); }; _this.renderUI = function () { switch (_this.state.mode) { case _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].COLOR_MODE_FLAG: return (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](react__WEBPACK_IMPORTED_MODULE_10__["Fragment"], null, _this.renderModeSwitch(), react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_sub_Color__WEBPACK_IMPORTED_MODULE_12__["default"], __assign({}, _this.props, { ensureModeOn: _this.ensureModeOn(_types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].COLOR_MODE_FLAG), onChangeBackgroundColorPreview: _this.props.handleChangeBackgroundColorPreview, backgroundColorPreview: _this.props.backgroundColorPreview })))); case _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].GRADIENT_MODE_FLAG: return (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](react__WEBPACK_IMPORTED_MODULE_10__["Fragment"], null, _this.renderModeSwitch(), react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_sub_LinearGradient__WEBPACK_IMPORTED_MODULE_14__["default"], __assign({}, _this.props, { ensureModeOn: _this.ensureModeOn(_types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].GRADIENT_MODE_FLAG), gradientDegPreview: _this.props.gradientDegPreview, gradientDegPreviewIndex: _this.props.gradientDegPreviewIndex, gradientOpacityPreview: _this.props.gradientOpacityPreview, gradientOpacityPreviewIndex: _this.props.gradientOpacityPreviewIndex, gradientColorPreview: _this.props.gradientColorPreview, gradientColorPreviewIndex: _this.props.gradientColorPreviewIndex, gradientColorPreviewColorIndex: _this.props.gradientColorPreviewColorIndex, onChangeGradientDegPreview: _this.props.handleChangeGradientDegPreview, onChangeGradientOpacityPreview: _this.props.handleChangeGradientOpacityPreview, onChangeGradientColorPreview: _this.props.handleChangeGradientColorPreview })))); case _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].IMAGE_MODE_FLAG: default: return (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](react__WEBPACK_IMPORTED_MODULE_10__["Fragment"], null, _this.renderModeSwitch(), react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_sub_Image__WEBPACK_IMPORTED_MODULE_13__["default"], __assign({}, _this.props, { onImageLoaded: _this.props.handleImageLoaded, onImageUploaded: _this.props.handleImageUploaded, ensureModeOn: _this.ensureModeOn(_types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].IMAGE_MODE_FLAG) })))); } }; _this.ensureModeOn = function (mode) { return function () { var _a = _this.props.state.modeFlag, modeFlag = _a === void 0 ? _this.props.defaultModeFlag : _a; if ((modeFlag & mode) === 0) { _this.props.handleChangeModeSwitch(mode, modeFlag)(); } }; }; _this.handleChangeMode = function (e, mode) { return _this.setState({ mode: mode }); }; _this.state = { mode: props.defaultMode, }; return _this; } BackgroundDefaultControls.prototype.render = function () { var _this = this; var _a = this.props, focused = _a.focused, remove = _a.remove, _b = _a.state, _c = _b.hasPadding, hasPadding = _c === void 0 ? this.props.defaultHasPadding : _c, _d = _b.modeFlag, modeFlag = _d === void 0 ? this.props.defaultModeFlag : _d, _e = _b.darken, darken = _e === void 0 ? this.props.defaultDarken : _e, _f = _b.lighten, lighten = _f === void 0 ? this.props.defaultLighten : _f; var darkenFinal = this.props.darkenPreview !== undefined ? this.props.darkenPreview : darken; var lightenFinal = this.props.lightenPreview !== undefined ? this.props.lightenPreview : lighten; return (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_react_page_ui__WEBPACK_IMPORTED_MODULE_9__["BottomToolbar"], __assign({ open: focused, title: this.props.translations.pluginName, icon: this.props.IconComponent, onDelete: remove }, this.props), react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Tabs__WEBPACK_IMPORTED_MODULE_4__["default"], { value: this.state.mode, onChange: this.handleChangeMode, centered: true }, (this.props.enabledModes & _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].IMAGE_MODE_FLAG) > 0 && (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Tab__WEBPACK_IMPORTED_MODULE_3__["default"], { icon: react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_icons_Landscape__WEBPACK_IMPORTED_MODULE_8___default.a, { color: (modeFlag & _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].IMAGE_MODE_FLAG) > 0 ? 'secondary' : undefined }), label: this.props.translations.imageMode, value: _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].IMAGE_MODE_FLAG })), (this.props.enabledModes & _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].COLOR_MODE_FLAG) > 0 && (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Tab__WEBPACK_IMPORTED_MODULE_3__["default"], { icon: react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_icons_ColorLens__WEBPACK_IMPORTED_MODULE_6___default.a, { color: (modeFlag & _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].COLOR_MODE_FLAG) > 0 ? 'secondary' : undefined }), label: this.props.translations.colorMode, value: _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].COLOR_MODE_FLAG })), (this.props.enabledModes & _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].GRADIENT_MODE_FLAG) > 0 && (react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Tab__WEBPACK_IMPORTED_MODULE_3__["default"], { icon: react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_icons_Gradient__WEBPACK_IMPORTED_MODULE_7___default.a, { color: (modeFlag & _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].GRADIENT_MODE_FLAG) > 0 ? 'secondary' : undefined }), label: this.props.translations.gradientMode, value: _types_ModeEnum__WEBPACK_IMPORTED_MODULE_11__["ModeEnum"].GRADIENT_MODE_FLAG }))), this.renderUI(), react__WEBPACK_IMPORTED_MODULE_10__["createElement"]("br", null), react__WEBPACK_IMPORTED_MODULE_10__["createElement"]("div", { style: { display: 'flex' } }, react__WEBPACK_IMPORTED_MODULE_10__["createElement"]("div", { style: { flex: '1', marginRight: '8px' } }, react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_5__["default"], { variant: "body1", id: "linear-gradient-darken-label" }, this.props.translations.darken, " (", (darkenFinal * 100).toFixed(0), "%)"), react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Slider__WEBPACK_IMPORTED_MODULE_1__["default"], { "aria-labelledby": "linear-gradient-darken-label", value: darkenFinal, onChange: function (e, value) { return _this.props.handleChangeDarkenPreview(value instanceof Array ? value[0] : value); }, onChangeCommitted: this.props.handleChangeDarken, step: 0.01, min: 0, max: 1 })), react__WEBPACK_IMPORTED_MODULE_10__["createElement"]("div", { style: { flex: '1', marginLeft: '8px' } }, react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_5__["default"], { variant: "body1", id: "linear-gradient-lighten-label" }, this.props.translations.lighten, " (", (lightenFinal * 100).toFixed(0), "%)"), react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Slider__WEBPACK_IMPORTED_MODULE_1__["default"], { "aria-labelledby": "linear-gradient-lighten-label", value: lightenFinal, onChange: function (e, value) { return _this.props.handleChangeLightenPreview(value instanceof Array ? value[0] : value); }, onChangeCommitted: this.props.handleChangeLighten, step: 0.01, min: 0, max: 1 }))), react__WEBPACK_IMPORTED_MODULE_10__["createElement"]("div", { style: { display: 'flex' } }, react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_FormControlLabel__WEBPACK_IMPORTED_MODULE_0__["default"], { control: react__WEBPACK_IMPORTED_MODULE_10__["createElement"](_material_ui_core_Switch__WEBPACK_IMPORTED_MODULE_2__["default"], { onChange: this.props.handleChangeHasPadding, checked: hasPadding }), label: this.props.translations.usePadding })))); }; return BackgroundDefaultControls; }(react__WEBPACK_IMPORTED_MODULE_10__["Component"])); /* harmony default export */ __webpack_exports__["default"] = (BackgroundDefaultControls); //# sourceMappingURL=BackgroundDefaultControls.js.map /***/ }), /***/ "./node_modules/@react-page/plugins-background/lib-es/Controls/sub/Color.js": /*!**********************************************************************************!*\ !*** ./node_modules/@react-page/plugins-background/lib-es/Controls/sub/Color.js ***! \**********************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! react */ "./node_modules/react/index.js"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var _react_page_ui__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @react-page/ui */ "./node_modules/@react-page/ui/lib-es/index.js"); var __extends = (undefined && undefined.__extends) || (function () { var extendStatics = function (d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return extendStatics(d, b); }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var ColorComponent = /** @class */ (function (_super) { __extends(ColorComponent, _super); function ColorComponent() { var _this = _super !== null && _super.apply(this, arguments) || this; _this.handleChangePickerBackgroundColor = function (e) { return _this.props.onChangeBackgroundColorPreview && _this.props.onChangeBackgroundColorPreview(e); }; _this.handleChangePickerBackgroundColorComplete = function (e) { if (_this.props.onChangeBackgroundColorPreview) { _this.props.onChangeBackgroundColorPreview(undefined); } _this.props.onChange({ backgroundColor: e }); }; return _this; } ColorComponent.prototype.render = function () { var _a = this.props, backgroundColorPreview = _a.backgroundColorPreview, _b = _a.state.backgroundColor, backgroundColor = _b === void 0 ? this.props.defaultBackgroundColor : _b; return (react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("div", { style: { display: 'flex' } }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(_react_page_ui__WEBPACK_IMPORTED_MODULE_1__["ColorPicker"], { color: backgroundColorPreview ? backgroundColorPreview : backgroundColor, onChange: this.handleChangePickerBackgroundColor, onDialogOpen: this.props.ensureModeOn, onChangeComplete: this.handleChangePickerBackgroundColorComplete, style: { margin: 'auto' } }))); }; return ColorComponent; }(react__WEBPACK_IMPORTED_MODULE_0__["Component"])); /* harmony default export */ __webpack_exports__["default"] = (ColorComponent); //# sourceMappingURL=Color.js.map /***/ }), /***/ "./node_modules/@react-page/plugins-background/lib-es/Controls/sub/Image.js": /*!**********************************************************************************!*\ !*** ./node_modules/@react-page/plugins-background/lib-es/Controls/sub/Image.js ***! \**********************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! react */ "./node_modules/react/index.js"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var _material_ui_core_Switch__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @material-ui/core/Switch */ "./node_modules/@material-ui/core/esm/Switch/index.js"); /* harmony import */ var _material_ui_core_TextField__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @material-ui/core/TextField */ "./node_modules/@material-ui/core/esm/TextField/index.js"); /* harmony import */ var _material_ui_core_FormControlLabel__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! @material-ui/core/FormControlLabel */ "./node_modules/@material-ui/core/esm/FormControlLabel/index.js"); /* harmony import */ var _react_page_ui__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! @react-page/ui */ "./node_modules/@react-page/ui/lib-es/index.js"); /* harmony import */ var _material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! @material-ui/core/Typography */ "./node_modules/@material-ui/core/esm/Typography/index.js"); var __extends = (undefined && undefined.__extends) || (function () { var extendStatics = function (d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return extendStatics(d, b); }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var ImageComponent = /** @class */ (function (_super) { __extends(ImageComponent, _super); function ImageComponent() { var _this = _super !== null && _super.apply(this, arguments) || this; _this.handleChangeBackground = function (e) { _this.props.ensureModeOn(); _this.props.onChange({ background: e.target.value }); }; _this.handleChangeIsParallax = function (e) { _this.props.ensureModeOn(); _this.props.onChange({ isParallax: _this.props.state.isParallax === undefined ? false : !_this.props.state.isParallax, }); }; _this.handleImageLoaded = function (image) { _this.props.ensureModeOn(); _this.props.onImageLoaded(image); }; _this.handleImageUploaded = function (resp) { _this.props.onImageUploaded(); _this.props.onChange({ background: resp.url }); }; return _this; } ImageComponent.prototype.render = function () { var _a = this.props.state, _b = _a.isParallax, isParallax = _b === void 0 ? true : _b, _c = _a.background, background = _c === void 0 ? '' : _c; return (react__WEBPACK_IMPORTED_MODULE_0__["createElement"]("div", null, react__WEBPACK_IMPORTED_MODULE_0__["createElement"]("div", { style: { display: 'flex' } }, this.props.imageUpload && (react__WEBPACK_IMPORTED_MODULE_0__["createElement"](react__WEBPACK_IMPORTED_MODULE_0__["Fragment"], null, react__WEBPACK_IMPORTED_MODULE_0__["createElement"](_react_page_ui__WEBPACK_IMPORTED_MODULE_4__["ImageUpload"], { imageUpload: this.props.imageUpload, imageLoaded: this.handleImageLoaded, imageUploaded: this.handleImageUploaded }), react__WEBPACK_IMPORTED_MODULE_0__["createElement"](_material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_5__["default"], { variant: "body1", style: { marginLeft: '20px', marginRight: '20px' } }, "OR"))), react__WEBPACK_IMPORTED_MODULE_0__["createElement"](_material_ui_core_TextField__WEBPACK_IMPORTED_MODULE_2__["default"], { placeholder: "http://example.com/image.png", label: this.props.imageUpload ? 'I have a URL' : 'Image URL', style: { width: '256px' }, value: background, onChange: this.handleChangeBackground })), react__WEBPACK_IMPORTED_MODULE_0__["createElement"]("br", null), react__WEBPACK_IMPORTED_MODULE_0__["createElement"]("div", { style: { display: 'flex' } }, react__WEBPACK_IMPORTED_MODULE_0__["createElement"](_material_ui_core_FormControlLabel__WEBPACK_IMPORTED_MODULE_3__["default"], { control: react__WEBPACK_IMPORTED_MODULE_0__["createElement"](_material_ui_core_Switch__WEBPACK_IMPORTED_MODULE_1__["default"], { onChange: this.handleChangeIsParallax, checked: isParallax }), label: "Is parallax" })))); }; return ImageComponent; }(react__WEBPACK_IMPORTED_MODULE_0__["Component"])); /* harmony default export */ __webpack_exports__["default"] = (ImageComponent); //# sourceMappingURL=Image.js.map /***/ }), /***/ "./node_modules/@react-page/plugins-background/lib-es/Controls/sub/LinearGradient.js": /*!*******************************************************************************************!*\ !*** ./node_modules/@react-page/plugins-background/lib-es/Controls/sub/LinearGradient.js ***! \*******************************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony import */ var _material_ui_core_Button__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @material-ui/core/Button */ "./node_modules/@material-ui/core/esm/Button/index.js"); /* harmony import */ var _material_ui_core_IconButton__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @material-ui/core/IconButton */ "./node_modules/@material-ui/core/esm/IconButton/index.js"); /* harmony import */ var _material_ui_core_Slider__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @material-ui/core/Slider */ "./node_modules/@material-ui/core/esm/Slider/index.js"); /* harmony import */ var _material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! @material-ui/core/Typography */ "./node_modules/@material-ui/core/esm/Typography/index.js"); /* harmony import */ var _material_ui_icons_Delete__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! @material-ui/icons/Delete */ "./node_modules/@material-ui/icons/Delete.js"); /* harmony import */ var _material_ui_icons_Delete__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(_material_ui_icons_Delete__WEBPACK_IMPORTED_MODULE_4__); /* harmony import */ var _react_page_ui__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! @react-page/ui */ "./node_modules/@react-page/ui/lib-es/index.js"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! react */ "./node_modules/react/index.js"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_6___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_6__); var __extends = (undefined && undefined.__extends) || (function () { var extendStatics = function (d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return extendStatics(d, b); }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var __assign = (undefined && undefined.__assign) || function () { __assign = Object.assign || function(t) { for (var s, i = 1, n = arguments.length; i < n; i++) { s = arguments[i]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } return t; }; return __assign.apply(this, arguments); }; var LinearGradientComponent = /** @class */ (function (_super) { __extends(LinearGradientComponent, _super); function LinearGradientComponent() { var _this = _super !== null && _super.apply(this, arguments) || this; _this.addGradient = function () { _this.props.ensureModeOn(); _this.props.onChange({ gradients: (_this.props.state.gradients ? _this.props.state.gradients : []).concat({ deg: 45, opacity: 1, }), }); }; _this.handleChangeDeg = function (index, value) { return function () { _this.props.onChangeGradientDegPreview && _this.props.onChangeGradientDegPreview(undefined, undefined); _this.props.onChange({ gradients: (_this.props.state.gradients ? _this.props.state.gradients : []).map(function (g, i) { return (i === index ? __assign(__assign({}, g), { deg: value }) : g); }), }); }; }; _this.handleChangeDegPreview = function (index) { return function (e, value) { _this.props.onChangeGradientDegPreview && _this.props.onChangeGradientDegPreview(value, index); }; }; _this.handleChangeOpacity = function (index, value) { return function () { _this.props.onChangeGradientOpacityPreview && _this.props.onChangeGradientOpacityPreview(undefined, undefined); _this.props.onChange({ gradients: (_this.props.state.gradients ? _this.props.state.gradients : []).map(function (g, i) { return (i === index ? __assign(__assign({}, g), { opacity: value }) : g); }), }); }; }; _this.handleChangeOpacityPreview = function (index) { return function (e, value) { _this.props.onChangeGradientOpacityPreview && _this.props.onChangeGradientOpacityPreview(value, index); }; }; _this.handleChangeGradientColor = function (index, cpIndex) { return function (e) { _this.props.onChangeGradientColorPreview && _this.props.onChangeGradientColorPreview(undefined, undefined, undefined); _this.props.onChange({ gradients: [] .concat(_this.props.state.gradients ? _this.props.state.gradients : []) .map(function (g, i) { return i === index ? __assign(__assign({}, g), { colors: (g.colors ? g.colors : []).map(function (c, cpI) { return cpI === cpIndex ? __assign(__assign({}, c), { color: e }) : c; }) }) : g; }), }); }; }; _this.handleChangeGradientColorPreview = function (index, cpIndex) { return function (e) { _this.props.onChangeGradientColorPreview && _this.props.onChangeGradientColorPreview(e, index, cpIndex); }; }; _this.addColor = function (index) { return function () { _this.props.ensureModeOn(); _this.props.onChange({ gradients: (_this.props.state.gradients ? _this.props.state.gradients : []).map(function (g, i) { return i === index ? __assign(__assign({}, g), { colors: (g.colors ? g.colors : []).concat({ color: (g.colors ? g.colors : []).length % 2 === index % 2 ? _this.props.defaultGradientColor : _this.props.defaultGradientSecondaryColor, }) }) : g; }), }); }; }; _this.removeColor = function (index, cpIndex) { return function () { _this.props.onChange({ gradients: [] .concat(_this.props.state.gradients ? _this.props.state.gradients : []) .map(function (g, i) { return i === index ? __assign(__assign({}, g), { colors: (g.colors ? g.colors : []).filter(function (c, cpI) { return cpI !== cpIndex; }) }) : g; }), }); }; }; _this.removeGradient = function (index) { return function () { _this.props.onChange({ gradients: [] .concat(_this.props.state.gradients ? _this.props.state.gradients : []) .filter(function (item, i) { return i !== index; }), }); }; }; return _this; } LinearGradientComponent.prototype.render = function () { var _this = this; var _a = this.props, gradientDegPreview = _a.gradientDegPreview, gradientDegPreviewIndex = _a.gradientDegPreviewIndex, gradientOpacityPreview = _a.gradientOpacityPreview, gradientOpacityPreviewIndex = _a.gradientOpacityPreviewIndex, gradientColorPreview = _a.gradientColorPreview, gradientColorPreviewIndex = _a.gradientColorPreviewIndex, gradientColorPreviewColorIndex = _a.gradientColorPreviewColorIndex, _b = _a.state.gradients, gradients = _b === void 0 ? [] : _b; return (react__WEBPACK_IMPORTED_MODULE_6__["createElement"]("div", null, gradients.map(function (gradient, i) { var colors = gradient.colors ? gradient.colors : []; var deg = i === gradientDegPreviewIndex && gradientDegPreview !== undefined ? gradientDegPreview : gradient.deg; var opacity = i === gradientOpacityPreviewIndex && gradientOpacityPreview !== undefined ? gradientOpacityPreview : gradient.opacity; return (react__WEBPACK_IMPORTED_MODULE_6__["createElement"]("div", { key: i, style: { marginBottom: '8px', borderLeft: '2px', borderLeftStyle: 'solid', paddingLeft: '8px', } }, react__WEBPACK_IMPORTED_MODULE_6__["createElement"]("div", null, react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_3__["default"], { variant: "body1", id: "linear-gradient-degree-label" }, _this.props.translations.gradientRotation, " (", deg, _this.props.translations.degrees, ")"), react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_Slider__WEBPACK_IMPORTED_MODULE_2__["default"], { "aria-labelledby": "linear-gradient-degree-label", value: deg, onChange: _this.handleChangeDegPreview(i), onChangeCommitted: _this.handleChangeDeg(i, deg), step: 5, min: 0, max: 360 })), react__WEBPACK_IMPORTED_MODULE_6__["createElement"]("div", null, react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_Typography__WEBPACK_IMPORTED_MODULE_3__["default"], { variant: "body1", id: "linear-gradient-opacity-label" }, _this.props.translations.gradientOpacity, " (", (opacity * 100).toFixed(0), "%)"), react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_Slider__WEBPACK_IMPORTED_MODULE_2__["default"], { "aria-labelledby": "linear-gradient-opacity-label", value: opacity, onChange: _this.handleChangeOpacityPreview(i), onChangeCommitted: _this.handleChangeOpacity(i, opacity), step: 0.01, min: 0, max: 1 })), colors.map(function (c, cpIndex) { var color = i === gradientColorPreviewIndex && cpIndex === gradientColorPreviewColorIndex && gradientColorPreview !== undefined ? gradientColorPreview : c.color; return (react__WEBPACK_IMPORTED_MODULE_6__["createElement"](react__WEBPACK_IMPORTED_MODULE_6__["Fragment"], { key: cpIndex }, react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_react_page_ui__WEBPACK_IMPORTED_MODULE_5__["ColorPicker"], { style: { marginLeft: '8px' }, color: color, onChange: _this.handleChangeGradientColorPreview(i, cpIndex), onChangeComplete: _this.handleChangeGradientColor(i, cpIndex) }), react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_IconButton__WEBPACK_IMPORTED_MODULE_1__["default"], { "aria-label": "Delete", onClick: _this.removeColor(i, cpIndex) }, react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_icons_Delete__WEBPACK_IMPORTED_MODULE_4___default.a, null)))); }), react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_Button__WEBPACK_IMPORTED_MODULE_0__["default"], { variant: "contained", onClick: _this.addColor(i), style: { marginLeft: '8px' } }, _this.props.translations.addColor), react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_IconButton__WEBPACK_IMPORTED_MODULE_1__["default"], { "aria-label": "Delete", onClick: _this.removeGradient(i) }, react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_icons_Delete__WEBPACK_IMPORTED_MODULE_4___default.a, null)))); }), react__WEBPACK_IMPORTED_MODULE_6__["createElement"]("div", { style: { display: 'flex' } }, react__WEBPACK_IMPORTED_MODULE_6__["createElement"](_material_ui_core_Button__WEBPACK_IMPORTED_MODULE_0__["default"], { style: { margin: 'auto', }, variant: "contained", onClick: this.addGradient, disabled: gradients.length > 5 }, this.props.translations.addGradient)))); }; return LinearGradientComponent; }(react__WEBPACK_IMPORTED_MODULE_6__["Component"])); /* harmony default export */ __webpack_exports__["default"] = (LinearGradientComponent); //# sourceMappingURL=LinearGradient.js.map /***/ }) }]);
echo "========================= virtualenv version ============================" echo "virtualenv --version" virtualenv --version printf "\n\n" declare -a versions=('python2.7' 'python3.5' 'pypy' 'pypy3') for version in "${versions[@]}" do echo "============ Setting up Virtual Environment for $version ===========" echo "mkdir ~/test_$version" mkdir ~/test_$version echo "cd ~/test_$version" cd ~/test_$version echo "virtualenv venv" virtualenv venv echo "virtualenv -p /usr/bin/$version" "venv" virtualenv -p /usr/bin/$version venv echo "source venv/bin/activate" source venv/bin/activate printf "\n\n" echo "======================== Python version =============================" echo "python --version" python --version printf "\n\n" echo "==================== Deactivating virtualenv ========================" echo "deactivate" deactivate echo "cd ~" cd ~ echo "rm -rf test_$version" rm -rf test_$version echo "rm -rf venv" rm -rf venv printf "\n\n" done
#include <fstream> #include "MnvH1DToCSV.h" #include <string> #include <iostream> // function to dump histograms to CSV files namespace PlotUtils{ void MnvH1DToCSV(PlotUtils::MnvH1D *hist, std::string name, std::string directory = "./", double scale=1.0, bool fullprecision, bool syserrors){ std::cout << "entering 1DToCSV " << name << std::endl; std::ofstream *f_values =new std::ofstream(); std::ofstream *f_err =new std::ofstream(); std::ofstream *f_staterr =new std::ofstream(); std::ofstream *f_syserr =new std::ofstream(); std::ofstream *f_bins =new std::ofstream(); std::ofstream *f_corr =new std::ofstream(); f_values->open((directory+name+"_1d.csv").c_str()); f_err->open((directory+name+"_errors_1d.csv").c_str()); f_staterr->open((directory+name+"_staterrors_1d.csv").c_str()); f_syserr->open((directory+name+"_syserrors_1d.csv").c_str()); f_bins->open((directory+name+"_bins_1d.csv").c_str()); f_corr->open((directory+name+"_covariance.csv").c_str()); TH1D stat=hist->GetStatError(); //stat error TH1D total=hist->GetCVHistoWithError(); // CV with total error TH1D sys=hist->GetTotalError(false); //sys error only // *f_bins<<hist->GetXaxis()->GetBinLowEdge(1); //<<std::endl; *f_bins<<"Bins"; *f_values << "Values\t"; *f_err << "err\t"; *f_staterr << "staterr\t"; *f_syserr << "syserr\t"; *f_bins << std::endl; *f_values << std::endl; *f_err << std::endl; *f_staterr << std::endl; *f_syserr << std::endl; *f_bins<<hist->GetXaxis()->GetBinLowEdge(1)<< "\t"; //<<std::endl; if(fullprecision){ for (int i=1;i<=hist->GetXaxis()->GetNbins();i++) { if (i>1) { *f_values << ",\t"; *f_bins << ",\t"; *f_err << ",\t"; *f_staterr << ",\t"; *f_syserr << ",\t"; } // Bin width normalize if not enu when we want a total x sec *f_bins<<hist->GetXaxis()->GetBinUpEdge(i)<< "\t";//<<std::endl; *f_values<<Form("%.17e ",total.GetBinContent(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); *f_err<<Form("%.17e ",total.GetBinError(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); *f_staterr<<Form("%.17e ",stat.GetBinContent(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); *f_syserr<<Form("%.17e ",sys.GetBinContent(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); } } else{ // *f_bins<<hist->GetXaxis()->GetBinLowEdge(1); //<<std::endl; /**f_bins<<"Bins\t"; *f_bins<<hist->GetXaxis()->GetBinLowEdge(1)<< "\t"; //<<std::endl; *f_values << "Values\t"; *f_err << "err\t"; *f_staterr << "staterr\t"; *f_syserr << "syserr\t"; */ for (int i=1;i<=hist->GetXaxis()->GetNbins();i++) { if (i>1) { *f_values << ",\t"; *f_bins << ",\t"; *f_err << ",\t"; *f_staterr << ",\t"; *f_syserr << ",\t"; } *f_bins<<hist->GetXaxis()->GetBinUpEdge(i)<< "\t";//<<std::endl; // Bin width normalize if not enu when we want a total x sec *f_values<<Form("%.2f ",total.GetBinContent(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); *f_err<<Form("%.2f ",total.GetBinError(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); *f_staterr<<Form("%.2f ",stat.GetBinContent(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); *f_syserr<<Form("%.2f ",sys.GetBinContent(i)/(hist->GetXaxis()->GetBinWidth(i))*scale); } } *f_bins << std::endl; *f_values << std::endl; *f_err << std::endl; *f_staterr << std::endl; *f_syserr << std::endl; f_values->close(); f_err->close(); f_staterr->close(); f_syserr->close(); f_bins->close(); // TMatrixD correlation_matrix= hist->GetTotalCorrelationMatrix(); TMatrixD correlation_matrix= hist->GetTotalErrorMatrix(); correlation_matrix *= (scale*scale); // scale by factor of 10^41 int nbins_x=hist->GetNbinsX(); int totalbins=(nbins_x+2); *f_corr<<std::endl; for (int x=0;x<totalbins;x++) { double binwidcorri; binwidcorri = hist->GetXaxis()->GetBinWidth(x); if (x==0 || x==nbins_x+1 ) continue; // Do not print overflow and underflow for (int this_x=0;this_x<totalbins;this_x++) { if (this_x==0 || this_x==nbins_x+1 ) continue; // Do not print overflow and underflow double binwidcorrj; binwidcorrj = hist->GetXaxis()->GetBinWidth(this_x); if (this_x > 1) *f_corr<< ",\t"; *f_corr<<Form("%.17e ",correlation_matrix[x][this_x]/binwidcorri/binwidcorrj); // need to include bin widths } *f_corr<<std::endl; } f_corr->close(); // if (!syserrors){ // std::cout << " no systematic errors to consider " << std::endl; // return; // } std::cout << "are we doing systematics " << std::endl; std::ofstream * f_errors = new std::ofstream(); f_errors->open((directory+name+"_sysdump.csv").c_str()); std::vector<std::string> vert_errBandNames = hist->GetVertErrorBandNames(); std::vector<std::string> lat_errBandNames = hist->GetLatErrorBandNames(); std::vector<std::string> uncorr_errBandNames = hist->GetUncorrErrorNames(); std::vector<std::string> cov_errNames = hist->GetCovMatricesNames(); *f_errors << " vert " << vert_errBandNames.size() << std::endl; for( std::vector<std::string>::iterator name=vert_errBandNames.begin(); name!=vert_errBandNames.end(); ++name ){ MnvVertErrorBand* v = hist->GetVertErrorBand(*name); unsigned int nunis = v->GetNHists(); for (int i = 0; i< nunis; i++){ *f_errors << Form("%s_%d",name->c_str(),i) << std::endl; TH1* h = v->GetHist(i); for (int j=1;j <=h->GetXaxis()->GetNbins();j++) { if (j>1) { *f_errors << ",\t"; } *f_errors<<Form("%.17e ",h->GetBinContent(j)/(h->GetXaxis()->GetBinWidth(j))*scale); } *f_errors << std::endl; } } *f_errors << " Lat " << lat_errBandNames.size() << std::endl; for( std::vector<std::string>::iterator name=lat_errBandNames.begin(); name!=lat_errBandNames.end(); ++name ){ MnvLatErrorBand* v = hist->GetLatErrorBand(*name); unsigned int nunis = v->GetNHists(); for (int i = 0; i< nunis; i++){ *f_errors << Form("%s_%d",name->c_str(),i) << std::endl; TH1* h = v->GetHist(i); for (int j=1;j <=h->GetXaxis()->GetNbins();j++) { if (j>1) { *f_errors << ",\t"; } *f_errors<<Form("%.17e ",h->GetBinContent(j)/(h->GetXaxis()->GetBinWidth(j))*scale); } *f_errors << std::endl; } } *f_errors << " covariance " << cov_errNames.size() << std::endl; for( std::vector<std::string>::iterator name=cov_errNames.begin(); name!=cov_errNames.end(); ++name ){ TMatrixD v = hist->GetSysErrorMatrix(*name); *f_errors << Form("%s",name->c_str()) << std::endl; int nbins_x = hist->GetXaxis()->GetNbins(); int totalbins=(nbins_x+2); for (int x=0;x<totalbins;x++) { double binwidcorri; binwidcorri = hist->GetXaxis()->GetBinWidth(x); if (x==0 || x==nbins_x+1 ) continue; // Do not print overflow and underflow for (int this_x=0;this_x<totalbins;this_x++) { if (this_x==0 || this_x==nbins_x+1 ) continue; // Do not print overflow and underflow double binwidcorrj; binwidcorrj = hist->GetXaxis()->GetBinWidth(this_x); if (this_x > 1) *f_errors<< ",\t"; *f_errors<<Form("%.17e ",v[x][this_x]/binwidcorri/binwidcorrj*scale*scale); // need to include bin widths } *f_errors<<std::endl; } } f_errors->close(); } }
<filename>src/scripts/actions/genres.js<gh_stars>0 import { GENRES_LOADING, GENRES_LOAD_SUCCESS, GENRES_LOAD_ERROR, GENRE_TOGGLE } from "./constants"; import { fetchGenres } from "../services/api"; const genresLoading = payload => ({ type: GENRES_LOADING, payload }); const genresLoadSuccess = payload => ({ type: GENRES_LOAD_SUCCESS, payload }); const genresLoadError = payload => ({ type: GENRES_LOAD_ERROR, payload }); const genreToggle = payload => ({ type: GENRE_TOGGLE, payload }); const getGenres = dispatch => { dispatch(genresLoading(true)); const left = ({ message }) => dispatch(genresLoadError(message)); const right = ({ data: { genres } }) => dispatch(genresLoadSuccess(genres)); fetchGenres().fork(left, right); dispatch(genresLoading(false)); }; export { genresLoading, genresLoadSuccess, genresLoadError, genreToggle, getGenres };
<filename>packages/sorted-set/tests/functions/has.ts import test from 'ava'; import { SortedSetStructure, has } from '../../src'; import { fromStringArray } from '../test-utils'; const values = ['A', 'B', 'C', 'D', 'E']; let set: SortedSetStructure<string>; test.before(() => { set = fromStringArray(values); }); test('returns true if the set contains the input item', t => { values.forEach(c => t.true(has(c, set))); }); test('returns false if the set does not contain the input item', t => { t.false(has('a', set)); });
/** * */ package exam1; /** * @author Justin * */ import java.io.PrintWriter; import java.util.Arrays; /** * A simple command-line program to average long values. */ public class Average { public static void main(String[] args) throws Exception { PrintWriter pen = new PrintWriter(System.out, true); long[] values = new long[args.length]; for (int i = 0; i < args.length; i++) { values[i] = Long.parseLong(args[i]); } // for pen.println("average(" + Arrays.toString(values) + ") = " + MathUtils.average(values)); } // main(String[]) } // Average
/* eslint-disable no-underscore-dangle */ /* eslint-disable no-void */ /* * Forked from vue-bundle-renderer v0.2.10 NPM package */ const { extname } = require('path'); const requireFromApp = require('../helpers/require-from-app'); const jsRE = /\.js(\?[^.]+)?$/; const jsModuleRE = /\.mjs(\?[^.]+)?$/; const cssRE = /\.css(\?[^.]+)?$/; const jsCssRE = /\.(js|css)($|\?)/; const queryRE = /\?.*/; const extRE = /[^./]+\.[^./]+$/; const trailingSlashRE = /([^/])$/; function isPlainObject(obj) { return Object.prototype.toString.call(obj) === '[object Object]'; } function deepClone(val) { if (isPlainObject(val)) { const res = {}; Object.keys(val).forEach((key) => { res[key] = deepClone(val[key]); }); return res; } if (Array.isArray(val)) { return val.slice(); } return val; } function mapIdToFile(id, clientManifest) { const files = []; const fileIndices = clientManifest.modules[id]; if (fileIndices !== void 0) { fileIndices.forEach((index) => { const file = clientManifest.all[index]; // only include async files or non-js, non-css assets if ( file && (clientManifest.async.includes(file) || !jsCssRE.test(file)) ) { files.push(file); } }); } return files; } /** * Creates a mapper that maps components used during a server-side render * to async chunk files in the client-side build, so that we can inline them * directly in the rendered HTML to avoid waterfall requests. */ function createMapper(clientManifest) { const map = new Map(); Object.keys(clientManifest.modules).forEach((id) => { map.set(id, mapIdToFile(id, clientManifest)); }); // map server-side moduleIds to client-side files return function mapper(moduleIds) { const res = new Set(); for (let i = 0; i < moduleIds.length; i += 1) { const mapped = map.get(moduleIds[i]); if (mapped) { for (let j = 0; j < mapped.length; j += 1) { const entry = mapped[j]; if (entry !== void 0) { res.add(mapped[j]); } } } } return Array.from(res); }; } function isModule(file) { return jsModuleRE.test(file) || !extRE.test(file); } function getPreloadType(ext) { if (ext === 'js' || ext === 'cjs' || ext === 'mjs') { return 'script'; } if (ext === 'css') { return 'style'; } if (/jpe?g|png|svg|gif|webp|ico/.test(ext)) { return 'image'; } if (/woff2?|ttf|otf|eot/.test(ext)) { return 'font'; } // not exhausting all possibilities here, but above covers common cases return ''; } function normalizeFile(file) { const fileWithoutQuery = file.replace(queryRE, ''); const extension = extname(fileWithoutQuery).slice(1); return { file, extension, fileWithoutQuery, asType: getPreloadType(extension), }; } function ensureTrailingSlash(path) { return path === '' ? path : path.replace(trailingSlashRE, '$1/'); } function createRenderContext({ clientManifest, shouldPrefetch, shouldPreload }) { return { clientManifest, shouldPrefetch, shouldPreload, publicPath: ensureTrailingSlash(clientManifest.publicPath || '/'), preloadFiles: (clientManifest.initial || []).map(normalizeFile), prefetchFiles: (clientManifest.async || []).map(normalizeFile), mapFiles: createMapper(clientManifest), }; } function renderPreloadLinks(renderContext, usedAsyncFiles) { const { shouldPreload, preloadFiles } = renderContext; const files = (preloadFiles || []).concat(usedAsyncFiles || []); if (!(files.length > 0)) { return ''; } return files.map(({ file, extension, fileWithoutQuery, asType, }) => { let extra = ''; if (!shouldPreload(fileWithoutQuery, asType, extension)) { return ''; } if (asType === 'font') { extra = ` type="font/${extension}" crossorigin`; } return `<link rel="${isModule(file) ? 'modulepreload' : 'preload'}" href="${renderContext.publicPath}${file}"${asType !== '' ? ` as="${asType}"` : ''}${extra}>`; }).join(''); } function renderPrefetchLinks(renderContext, usedAsyncFiles) { const { shouldPrefetch } = renderContext; if (!(renderContext.prefetchFiles.length > 0)) { return ''; } const alreadyRendered = (file) => usedAsyncFiles && usedAsyncFiles.some((f) => f.file === file); return renderContext.prefetchFiles.map(({ file, fileWithoutQuery, asType, extension, }) => { if (!shouldPrefetch(fileWithoutQuery, asType, extension)) { return ''; } if (alreadyRendered(file)) { return ''; } return `<link ${isModule(file) ? 'type="module" ' : ''}rel="prefetch${cssRE.test(file) ? ' stylesheet' : ''}" href="${renderContext.publicPath}${file}">`; }).join(''); } function renderResourceHints(renderContext, usedAsyncFiles) { return renderPreloadLinks(renderContext, usedAsyncFiles) + renderPrefetchLinks(renderContext, usedAsyncFiles); } function renderStyles(renderContext, usedAsyncFiles, ssrContext) { const initial = renderContext.preloadFiles; const cssFiles = initial.concat(usedAsyncFiles).filter(({ file }) => cssRE.test(file)); return ( // render links for css files ( cssFiles.length ? cssFiles.map(({ file }) => `<link rel="stylesheet" href="${renderContext.publicPath}${file}">`).join('') : '' ) // ssrContext.styles is a getter exposed by vue-style-loader which contains // the inline component styles collected during SSR + (ssrContext.styles || '') ); } const autoRemove = 'var currentScript=document.currentScript;currentScript.parentNode.removeChild(currentScript)'; function renderVuexState(ssrContext, nonce) { if (ssrContext.state !== void 0) { const serialize = requireFromApp('serialize-javascript'); const state = serialize(ssrContext.state, { isJSON: true }); return `<script${nonce}>window.__INITIAL_STATE__=${state};${autoRemove}</script>`; } return ''; } function renderScripts(renderContext, usedAsyncFiles, nonce) { if (renderContext.preloadFiles.length > 0) { const initial = renderContext.preloadFiles.filter(({ file }) => jsRE.test(file)); const async = usedAsyncFiles.filter(({ file }) => jsRE.test(file)); return [initial[0]].concat(async, initial.slice(1)) .map(({ file }) => `<script${nonce} src="${renderContext.publicPath}${file}" defer></script>`) .join(''); } return ''; } module.exports = function createRenderer(opts) { if (!opts.serverManifest) { throw new Error('Missing server bundle'); } if (!opts.clientManifest) { throw new Error('Missing client manifest'); } const createBundle = requireFromApp('@quasar/ssr-helpers/lib/create-bundle'); const renderContext = createRenderContext(opts); global.__VUE_SSR_CONTEXT__ = {}; const initialContext = global.__VUE_SSR_CONTEXT__; opts.runningScriptOptions = global; const { evaluateEntry, rewriteErrorTrace } = createBundle(opts); async function runApp(ssrContext) { try { const entry = await evaluateEntry(); // On subsequent renders, __VUE_SSR_CONTEXT__ will not be available // to prevent cross-request pollution. delete global.__VUE_SSR_CONTEXT__; // vue-style-loader styles imported outside of component lifecycle hooks if (initialContext._styles) { ssrContext._styles = deepClone(initialContext._styles); // https://github.com/vuejs/vue/issues/6353 // ensure "styles" is exposed even if no styles are injected // in component lifecycles. // the renderStyles fn is exposed by vue-style-loader >= 3.0.3 if (initialContext._renderStyles) { Object.defineProperty(ssrContext, 'styles', { enumerable: true, get() { return initialContext._renderStyles(ssrContext._styles); }, }); } } return await entry(ssrContext); } catch (err) { await rewriteErrorTrace(err); throw err; } } return async function renderToString(ssrContext, renderTemplate) { try { const onRenderedList = []; Object.assign(ssrContext, { _modules: new Set(), _meta: {}, onRendered: (fn) => { onRenderedList.push(fn); }, }); const app = await runApp(ssrContext); const resourceApp = await opts.vueRenderToString(app, ssrContext); const usedAsyncFiles = renderContext .mapFiles(Array.from(ssrContext._modules)) .map(normalizeFile); onRenderedList.forEach((fn) => { fn(); }); // maintain compatibility with some well-known Vue plugins // like @vue/apollo-ssr: if (typeof ssrContext.rendered === 'function') { ssrContext.rendered(); } const nonce = ssrContext.nonce !== void 0 ? ` nonce="${ssrContext.nonce}" ` : ''; Object.assign(ssrContext._meta, { resourceApp, resourceHints: renderResourceHints(renderContext, usedAsyncFiles), resourceStyles: renderStyles(renderContext, usedAsyncFiles, ssrContext), resourceScripts: ( (opts.manualStoreSerialization !== true && ssrContext.state !== void 0 ? renderVuexState(ssrContext, nonce) : '') + renderScripts(renderContext, usedAsyncFiles, nonce) ), }); return renderTemplate(ssrContext); } catch (err) { await rewriteErrorTrace(err); throw err; } }; };
from .core.urls import * from .slack.urls import * from .ui.urls import
<reponame>ttarce1612/hotel_book const Hotel = require("../models/HotelModel"); const apiResponse = require("../helpers/apiResponse"); const _ = require('lodash') var mongoose = require("mongoose"); mongoose.set("useFindAndModify", false); exports.hotelList = [ function (req, res) { try { let condition = {}; if (req.params && req.params.length > 0) { condition = _.pick(req.params, 'name', 'room', 'star_rating', 'furniture'); if (req.params.type) { condition['type'] = {$in: req.params.type.split(',')} } if (req.params.startPrice) { condition['pricePerMonth'] = {$gte: req.params.startPrice} } if (req.params.endPrice) { condition['pricePerMonth'] = condition.pricePerMonth ? condition.pricePerMonth : {}; condition.pricePerMonth['$lte'] = req.params.endPrice } } Hotel.find(condition).then((hotels) => { if (hotels.length > 0) { return apiResponse.successResponseWithData(res, "Operation success", hotels); } else { return apiResponse.successResponseWithData(res, "Operation success", []); } }); } catch (err) { //throw error in json response with status 500. return apiResponse.ErrorResponse(res, err); } } ]; /** * hotel Detail. * * @param {string} id * * @returns {Object} */ exports.hotelDetail = [ function (req, res) { if (!mongoose.Types.ObjectId.isValid(req.params.id)) { return apiResponse.successResponseWithData(res, "Operation success", {}); } try { Hotel.findOne({_id: req.params.id}).then((hotel) => { if (hotel !== null) { return apiResponse.successResponseWithData(res, "Operation success", hotel); } else { return apiResponse.successResponseWithData(res, "Operation success", {}); } }); } catch (err) { //throw error in json response with status 500. return apiResponse.ErrorResponse(res, err); } } ]; exports.hotelBook = [ async (req, res) => { try { // Validate request ... Hotel.findOneAndUpdate({_id: req.params.id}, { startDay: req.body.startDay, endDay: req.body.endDay, bookedBy: req.body.bookedBy, isBook: true }, {}, function (err, docs) { if (err) { return apiResponse.ErrorResponse(res, err); } else { return apiResponse.successResponseWithData(res, "Hotel book Success.", docs); } }); } catch (err) { //throw error in json response with status 500. return apiResponse.ErrorResponse(res, err); } } ]; exports.hotelNew = [ function (req, res) { let _this = req.body; try { Hotel.create({ name: _this.name, address: _this.address, type: _this.type, room: _this.room, createdAt: _this.createdAt, pricePerMonth: _this.pricePerMonth, furniture: _this.furniture, noted: _this.noted }).then((hotel) => { if (hotel !== null) { return apiResponse.successResponseWithData(res, "Hotel create success", hotel); } else { return apiResponse.successResponseWithData(res, "Operation success", {}); } }); } catch (err) { //throw error in json response with status 500. return apiResponse.ErrorResponse(res, err); } } ];
#!/bin/bash FILES=$(git status --porcelain | egrep -v '^\?\?') if [ "$FILES" != "" ] then echo "Working Directory not clean" echo $FILES exit 1 fi
#!/bin/bash -eu cmd="$1" function running_as_root { test "$(id -u)" = "0" } function secure_mode_enabled { test "${SECURE_FILE_PERMISSIONS:=no}" = "yes" } containsElement () { local e match="$1" shift for e; do [[ "$e" == "$match" ]] && return 0; done return 1 } function is_readable { # this code is fairly ugly but works no matter who this script is running as. # It would be nice if the writability tests could use this logic somehow. local _file=${1} perm=$(stat -c %a "${_file}") # everyone permission if [[ ${perm:2:1} -ge 4 ]]; then return 0 fi # owner permissions if [[ ${perm:0:1} -ge 4 ]]; then if [[ "$(stat -c %U ${_file})" = "${userid}" ]] || [[ "$(stat -c %u ${_file})" = "${userid}" ]]; then return 0 fi fi # group permissions if [[ ${perm:1:1} -ge 4 ]]; then if containsElement "$(stat -c %g ${_file})" "${groups[@]}" || containsElement "$(stat -c %G ${_file})" "${groups[@]}" ; then return 0 fi fi return 1 } function is_not_writable { local _file=${1} # Not using "test -w ${_file}" here because we need to check if the neo4j user or supplied user # has write access to the file, and this script might not be running as that user. # echo "comparing to ${userid}:${groupid}" test "$(stat -c %U ${_file})" != "${userid}" && \ test "$(stat -c %u ${_file})" != "${userid}" && \ ! containsElement "$(stat -c %g ${_file})" "${groups[@]}" && \ ! containsElement "$(stat -c %G ${_file})" "${groups[@]}" } function print_permissions_advice_and_fail () { _directory=${1} echo >&2 " Folder ${_directory} is not accessible for user: ${userid} or group ${groupid} or groups ${groups[@]}, this is commonly a file permissions issue on the mounted folder. Hints to solve the issue: 1) Make sure the folder exists before mounting it. Docker will create the folder using root permissions before starting the Neo4j container. The root permissions disallow Neo4j from writing to the mounted folder. 2) Pass the folder owner's user ID and group ID to docker run, so that docker runs as that user. If the folder is owned by the current user, this can be done by adding this flag to your docker run command: --user=\$(id -u):\$(id -g) " exit 1 } function check_mounted_folder_readable { local _directory=${1} if ! is_readable "${_directory}"; then print_permissions_advice_and_fail "${_directory}" fi } function check_mounted_folder_with_chown { # The /data and /log directory are a bit different because they are very likely to be mounted by the user but not # necessarily writable. # This depends on whether a user ID is passed to the container and which folders are mounted. # # No user ID passed to container: # 1) No folders are mounted. # The /data and /log folder are owned by neo4j by default, so should be writable already. # 2) Both /log and /data are mounted. # This means on start up, /data and /logs are owned by an unknown user and we should chown them to neo4j for # backwards compatibility. # # User ID passed to container: # 1) Both /data and /logs are mounted # The /data and /logs folders are owned by an unknown user but we *should* have rw permission to them. # That should be verified and error (helpfully) if not. # 2) User mounts /data or /logs *but not both* # The unmounted folder is still owned by neo4j, which should already be writable. The mounted folder should # have rw permissions through user id. This should be verified. # 3) No folders are mounted. # The /data and /log folder are owned by neo4j by default, and these are already writable by the user. # (This is a very unlikely use case). local mountFolder=${1} if running_as_root; then if is_not_writable "${mountFolder}" && ! secure_mode_enabled; then # warn that we're about to chown the folder and then chown it echo "Warning: Folder mounted to \"${mountFolder}\" is not writable from inside container. Changing folder owner to ${userid}." chown -R "${userid}":"${groupid}" "${mountFolder}" fi else if [[ ! -w "${mountFolder}" ]] && [[ "$(stat -c %U ${mountFolder})" != "neo4j" ]]; then print_permissions_advice_and_fail "${mountFolder}" fi fi } function load_plugin_from_github { # Load a plugin at runtime. The provided github repository must have a versions.json on the master branch with the # correct format. local _plugin_name="${1}" #e.g. apoc, graph-algorithms, graph-ql local _plugins_dir="${NEO4J_HOME}/plugins" if [ -d /plugins ]; then local _plugins_dir="/plugins" fi local _versions_json_url="$(jq --raw-output "with_entries( select(.key==\"${_plugin_name}\") ) | to_entries[] | .value" /plugins.json )" # Using the same name for the plugin irrespective of version ensures we don't end up with different versions of the same plugin local _destination="${_plugins_dir}/${_plugin_name}.jar" local _neo4j_version="$(neo4j --version | cut -d' ' -f2)" # Now we call out to github to get the versions.json for this plugin and we parse that to find the url for the correct plugin jar for our neo4j version echo "Fetching versions.json for Plugin '${_plugin_name}' from ${_versions_json_url}" local _versions_json="$(curl --silent --show-error --fail --retry 30 --retry-max-time 300 -L "${_versions_json_url}")" local _plugin_jar_url="$(echo "${_versions_json}" | jq --raw-output ".[] | select(.neo4j==\"${_neo4j_version}\") | .jar")" if [[ -z "${_plugin_jar_url}" ]]; then echo >&2 "No jar URL found for version '${_neo4j_version}' in versions.json from '${_versions_json_url}'" echo >&2 "${_versions_json}" fi echo "Installing Plugin '${_plugin_name}' from ${_plugin_jar_url} to ${_destination} " curl --silent --show-error --fail --retry 30 --retry-max-time 300 -L -o "${_destination}" "${_plugin_jar_url}" if ! is_readable "${_destination}"; then echo >&2 "Plugin at '${_destination}' is not readable" exit 1 fi } # If we're running as root, then run as the neo4j user. Otherwise # docker is running with --user and we simply use that user. Note # that su-exec, despite its name, does not replicate the functionality # of exec, so we need to use both if running_as_root; then userid="neo4j" groupid="neo4j" groups=($(id -G neo4j)) exec_cmd="exec gosu neo4j:neo4j" else userid="$(id -u)" groupid="$(id -g)" groups=($(id -G)) exec_cmd="exec" fi readonly userid readonly groupid readonly groups readonly exec_cmd # Need to chown the home directory - but a user might have mounted a # volume here (notably a conf volume). So take care not to chown # volumes (stuff not owned by neo4j) if running_as_root; then # Non-recursive chown for the base directory chown "${userid}":"${groupid}" "${NEO4J_HOME}" chmod 700 "${NEO4J_HOME}" find "${NEO4J_HOME}" -mindepth 1 -maxdepth 1 -user root -type d -exec chown -R ${userid}:${groupid} {} \; find "${NEO4J_HOME}" -mindepth 1 -maxdepth 1 -user root -type d -exec chmod -R 700 {} \; fi # Env variable naming convention: # - prefix NEO4J_ # - double underscore char '__' instead of single underscore '_' char in the setting name # - underscore char '_' instead of dot '.' char in the setting name # Example: # NEO4J_dbms_tx__log_rotation_retention__policy env variable to set # dbms.tx_log.rotation.retention_policy setting # Backward compatibility - map old hardcoded env variables into new naming convention (if they aren't set already) # Set some to default values if unset : ${NEO4J_dbms_tx__log_rotation_retention__policy:=${NEO4J_dbms_txLog_rotation_retentionPolicy:-"100M size"}} : ${NEO4J_wrapper_java_additional:=${NEO4J_UDC_SOURCE:-"-Dneo4j.ext.udc.source=docker"}} : ${NEO4J_dbms_unmanaged__extension__classes:=${NEO4J_dbms_unmanagedExtensionClasses:-}} : ${NEO4J_dbms_allow__format__migration:=${NEO4J_dbms_allowFormatMigration:-}} : ${NEO4J_dbms_connectors_default__advertised__address:=${NEO4J_dbms_connectors_defaultAdvertisedAddress:-}} if [ "${NEO4J_EDITION}" == "enterprise" ]; then : ${NEO4J_causal__clustering_expected__core__cluster__size:=${NEO4J_causalClustering_expectedCoreClusterSize:-}} : ${NEO4J_causal__clustering_initial__discovery__members:=${NEO4J_causalClustering_initialDiscoveryMembers:-}} : ${NEO4J_causal__clustering_discovery__advertised__address:=${NEO4J_causalClustering_discoveryAdvertisedAddress:-"$(hostname):5000"}} : ${NEO4J_causal__clustering_transaction__advertised__address:=${NEO4J_causalClustering_transactionAdvertisedAddress:-"$(hostname):6000"}} : ${NEO4J_causal__clustering_raft__advertised__address:=${NEO4J_causalClustering_raftAdvertisedAddress:-"$(hostname):7000"}} # Custom settings for dockerized neo4j : ${NEO4J_causal__clustering_discovery__advertised__address:=$(hostname):5000} : ${NEO4J_causal__clustering_transaction__advertised__address:=$(hostname):6000} : ${NEO4J_causal__clustering_raft__advertised__address:=$(hostname):7000} fi : ${NEO4J_wrapper_java_additional:=-Dneo4j.ext.udc.source=docker} : ${NEO4J_dbms_jvm_additional:=-Dunsupported.dbms.udc.source=docker} # unset old hardcoded unsupported env variables unset NEO4J_dbms_txLog_rotation_retentionPolicy NEO4J_UDC_SOURCE \ NEO4J_dbms_unmanagedExtensionClasses NEO4J_dbms_allowFormatMigration \ NEO4J_dbms_connectors_defaultAdvertisedAddress NEO4J_ha_serverId \ NEO4J_ha_initialHosts NEO4J_causalClustering_expectedCoreClusterSize \ NEO4J_causalClustering_initialDiscoveryMembers \ NEO4J_causalClustering_discoveryListenAddress \ NEO4J_causalClustering_discoveryAdvertisedAddress \ NEO4J_causalClustering_transactionListenAddress \ NEO4J_causalClustering_transactionAdvertisedAddress \ NEO4J_causalClustering_raftListenAddress \ NEO4J_causalClustering_raftAdvertisedAddress if [ -d /conf ]; then if secure_mode_enabled; then check_mounted_folder_readable "/conf" fi find /conf -type f -exec cp {} "${NEO4J_HOME}"/conf \; fi if [ -d /ssl ]; then if secure_mode_enabled; then check_mounted_folder_readable "/ssl" fi : ${NEO4J_dbms_directories_certificates:="/ssl"} fi if [ -d /plugins ]; then if secure_mode_enabled; then if [[ ! -z "${NEO4JLABS_PLUGINS:-}" ]]; then # We need write permissions check_mounted_folder_with_chown "/plugins" fi check_mounted_folder_readable "/plugins" fi : ${NEO4J_dbms_directories_plugins:="/plugins"} fi if [ -d /import ]; then if secure_mode_enabled; then check_mounted_folder_readable "/import" fi : ${NEO4J_dbms_directories_import:="/import"} fi if [ -d /metrics ]; then if secure_mode_enabled; then check_mounted_folder_readable "/metrics" fi : ${NEO4J_dbms_directories_metrics:="/metrics"} fi if [ -d /logs ]; then check_mounted_folder_with_chown "/logs" : ${NEO4J_dbms_directories_logs:="/logs"} fi if [ -d /data ]; then check_mounted_folder_with_chown "/data" fi # set the neo4j initial password only if you run the database server if [ "${cmd}" == "neo4j" ]; then if [ "${NEO4J_AUTH:-}" == "none" ]; then NEO4J_dbms_security_auth__enabled=false elif [[ "${NEO4J_AUTH:-}" == neo4j/* ]]; then password="${NEO4J_AUTH#neo4j/}" if [ "${password}" == "neo4j" ]; then echo >&2 "Invalid value for password. It cannot be 'neo4j', which is the default." exit 1 fi if running_as_root; then # running set-initial-password as root will create subfolders to /data as root, causing startup fail when neo4j can't read or write the /data/dbms folder # creating the folder first will avoid that mkdir -p /data/dbms chown "${userid}":"${groupid}" /data/dbms fi # Will exit with error if users already exist (and print a message explaining that) # we probably don't want the message though, since it throws an error message on restarting the container. neo4j-admin set-initial-password "${password}" 2>/dev/null || true elif [ -n "${NEO4J_AUTH:-}" ]; then echo >&2 "Invalid value for NEO4J_AUTH: '${NEO4J_AUTH}'" exit 1 fi fi declare -A COMMUNITY declare -A ENTERPRISE COMMUNITY=( [dbms.tx_log.rotation.retention_policy]="100M size" [dbms.memory.pagecache.size]="512M" [dbms.connectors.default_listen_address]="0.0.0.0" [dbms.connector.https.listen_address]="0.0.0.0:7473" [dbms.connector.http.listen_address]="0.0.0.0:7474" [dbms.connector.bolt.listen_address]="0.0.0.0:7687" [dbms.udc.enabled]="false" ) ENTERPRISE=( [causal_clustering.transaction_listen_address]="0.0.0.0:6000" [causal_clustering.raft_listen_address]="0.0.0.0:7000" [causal_clustering.discovery_listen_address]="0.0.0.0:5000" ) for conf in ${!COMMUNITY[@]} ; do if ! grep -q "^$conf" "${NEO4J_HOME}"/conf/neo4j.conf then echo -e "\n"$conf=${COMMUNITY[$conf]} >> "${NEO4J_HOME}"/conf/neo4j.conf fi done for conf in ${!ENTERPRISE[@]} ; do if [ "${NEO4J_EDITION}" == "enterprise" ]; then if ! grep -q "^$conf" "${NEO4J_HOME}"/conf/neo4j.conf then echo -e "\n"$conf=${ENTERPRISE[$conf]} >> "${NEO4J_HOME}"/conf/neo4j.conf fi fi done # list env variables with prefix NEO4J_ and create settings from them unset NEO4J_AUTH NEO4J_SHA256 NEO4J_TARBALL for i in $( set | grep ^NEO4J_ | awk -F'=' '{print $1}' | sort -rn ); do setting=$(echo ${i} | sed 's|^NEO4J_||' | sed 's|_|.|g' | sed 's|\.\.|_|g') value=$(echo ${!i}) # Don't allow settings with no value or settings that start with a number (neo4j converts settings to env variables and you cannot have an env variable that starts with a number) if [[ -n ${value} ]]; then if [[ ! "${setting}" =~ ^[0-9]+.*$ ]]; then if grep -q -F "${setting}=" "${NEO4J_HOME}"/conf/neo4j.conf; then # Remove any lines containing the setting already sed --in-place "/^${setting}=.*/d" "${NEO4J_HOME}"/conf/neo4j.conf fi # Then always append setting to file echo "${setting}=${value}" >> "${NEO4J_HOME}"/conf/neo4j.conf else echo >&2 "WARNING: ${setting} not written to conf file because settings that start with a number are not permitted" fi fi done if [ "${cmd}" == "dump-config" ]; then if is_not_writable "/conf"; then print_permissions_advice_and_fail "/conf" fi cp --recursive "${NEO4J_HOME}"/conf/* /conf echo "Config Dumped" exit 0 fi if [[ ! -z "${NEO4JLABS_PLUGINS:-}" ]]; then # NEO4JLABS_PLUGINS should be a json array of plugins like '["graph-algorithms", "apoc-procedures", "streams", "graphql"]' for plugin_name in $(echo "${NEO4JLABS_PLUGINS}" | jq --raw-output '.[]'); do load_plugin_from_github "${plugin_name}" done fi [ -f "${EXTENSION_SCRIPT:-}" ] && . ${EXTENSION_SCRIPT} # Use su-exec to drop privileges to neo4j user # Note that su-exec, despite its name, does not replicate the # functionality of exec, so we need to use both if [ "${cmd}" == "neo4j" ]; then ${exec_cmd} neo4j console else ${exec_cmd} "$@" fi
const path = require('path'); const fs = require('fs'); const SSI = require('node-ssi'); /** * 返回处理html的中间件 * @param {[type]} webpackDevMiddlewareInstance [description] * @param {[type]} options [description] * @return {[type]} [description] */ module.exports = function (webpackDevMiddlewareInstance, options) { const ssi = new SSI(options); return (req, res, next) => { webpackDevMiddlewareInstance.waitUntilValid(() => { const webpackFs = webpackDevMiddlewareInstance.fileSystem; let filename = webpackDevMiddlewareInstance.getFilenameFromUrl(req.url); let stat = null; let fileContent = void 0; try { stat = webpackFs.statSync(filename); } catch (e) { return next(); } // 如果访问的是目录,将文件名接上index.html if (stat.isDirectory()) { filename = path.join(filename, 'index.html'); } if (filename && filename.match(/\.html$/)) { // 先尝试从webpackdevmiddleware中读取文内容 try { fileContent = webpackFs.readFileSync(filename).toString(); } catch (e) { return next(); } if (fileContent === void 0) { // 如果webpackdevmiddleware中无法读取文件 // 再尝试从本地读取文件内容 try { fileContent = fs.readFileSync(filename, { encoding: 'utf8' }); } catch (e) { return next(); } } ssi.compile(fileContent, (err, content) => { if (err) { console.error(err); return next(); } res.statusCode = 200; res.send ? res.send(content) : res.end(content); }); } else { next(); } }); }; };
#!/bin/sh set -e set -x ISTIO_VERSION=1.2.4 NS=istio-system # Helm auto completion: source <(helm completion bash) echo 'source <(helm completion bash)' >> ~/.bashrc echo "Download istio (version $ISTIO_VERSION)" curl -L https://git.io/getLatestIstio | ISTIO_VERSION="$ISTIO_VERSION" sh - cd istio-"$ISTIO_VERSION" echo "Init helm" kubectl apply -f install/kubernetes/helm/helm-service-account.yaml helm init --service-account=tiller echo "Install the Istio initializer (istio-init) chart to bootstrap all the Istio’s CRDs" helm install install/kubernetes/helm/istio-init --name istio-init --namespace istio-system echo "Wait until all pods' status are **Completed**" # Wait for all pods to be in running state while true do sleep 2 kubectl get pods -n istio-system -o=jsonpath='{range .items[*]}{.metadata.name}{"\t"}{.status.phase}{"\n"}{end}' | egrep -v "Completed" || break done echo "Verify that all 23 Istio CRDs were committed to the Kubernetes api-server" kubectl get crds | grep 'istio.io\|certmanager.k8s.io' | wc -l # NAME READY STATUS RESTARTS AGE # istio-init-crd-10-nbksb 0/1 Completed 0 45s # istio-init-crd-11-6gtct 0/1 Completed 0 45s # istio-init-crd-12-qdpb5 0/1 Completed 0 45s echo "Install the istio chart with a custom profile" helm install install/kubernetes/helm/istio --name istio --namespace istio-system \ --set gateways.istio-ingressgateway.type=NodePort \ --set grafana.enabled=true \ --set tracing.enabled=true \ --set kiali.enabled=true \ --set prometheus.enabled=true \ --set prometheus.service.nodePort.enabled=true echo "Wait until all pods' status are **Completed** or **Running** or **Succeeded**. This step takes a longer time then previous ones." while true do sleep 2 kubectl get pods -n istio-system -o=jsonpath='{range .items[*]}{.metadata.name}{"\t"}{.status.phase}{"\n"}{end}' | egrep -v "Running|Completed|Succeeded" || break done echo "Ensure all Helm charts (istio-init and istio) are correctly deployed to kubernetes cluster" helm ls # NAME REVISION UPDATED STATUS CHART APP VERSION NAMESPACE # istio 1 Sun Aug 18 16:15:45 2019 DEPLOYED istio-1.2.4 1.2.4 istio-system # istio-init 1 Sun Aug 18 16:13:49 2019 DEPLOYED istio-init-1.2.4 1.2.4 istio-system
<filename>packages/playwright/src/screenplay/questions/Selected.ts import { Answerable, AnswersQuestions, Question, UsesAbilities, } from '@serenity-js/core'; import { formatted } from '@serenity-js/core/lib/io'; import { ElementHandleAnswer, } from '../../answerTypes/ElementHandleAnswer'; import { BrowseTheWeb } from '../abilities'; interface Option { value: string; label: string; } /** * @desc * Represents options and values selected in a * [HTML `<select>` element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select). * * @see {@link Select} */ export class Selected { /** * @desc * Represents the value of a single option selected in a * [HTML `<select>` element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select). * * @example <caption>Example widget</caption> * <select data-test='countries'> * <option value='UK'>United Kingdom</option> * <option value='PL'>Poland</option> * <option value='US'>United States</option> * </select> * * @example <caption>Lean Page Object</caption> * import { Target } from '@serenity-js/playwright'; * * class Countries { * static dropdown = Target.the('countries dropdown') * .located('data-test=countries')); * } * * @example <caption>Retrieving the selected value</caption> * import { actorCalled } from '@serenity-js/core'; * import { BrowseTheWeb, Select, Selected } from '@serenity-js/playwright'; * import { Ensure, equals } from '@serenity-js/assertions'; * import { chromium } from 'playwright'; * * actorCalled('Nick') * .whoCan(BrowseTheWeb.using(chromium)) * .attemptsTo( * Select.value('UK').from(Countries.dropdown), * Ensure.that(Selected.valueOf(Countries.dropdown), equals('UK')), * ); * * @param {Answerable<ElementHandleAnswer>} target * A {@link Target} identifying the `<select>` element of interest * * @returns {Question<Promise<string>>} * * @see {@link Select.value} */ static valueOf( target: Answerable<ElementHandleAnswer> ): Question<Promise<string>> { return new SelectedValue(target); } /** * @desc * Represents values of options selected in a * [HTML `<select multiple>` element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select#attr-multiple) * * @example <caption>Example widget</caption> * <select multiple data-test='countries'> * <option value='UK'>United Kingdom</option> * <option value='PL'>Poland</option> * <option value='US'>United States</option> * </select> * * @example <caption>Lean Page Object</caption> * import { Target } from '@serenity-js/playwright'; * * class Countries { * static dropdown = Target.the('countries dropdown') * .located('data-test=countries'); * } * * @example <caption>Retrieving the selected value</caption> * import { actorCalled } from '@serenity-js/core'; * import { BrowseTheWeb, Select, Selected } from '@serenity-js/playwright'; * import { Ensure, equals } from '@serenity-js/assertions'; * import { chromium } from 'playwright'; * * actorCalled('Nick') * .whoCan(BrowseTheWeb.using(chromium)) * .attemptsTo( * Select.values('UK').from(Countries.dropdown), * Ensure.that(Selected.valuesOf(Countries.dropdown), equals([ 'UK' ])), * ); * * @param {Answerable<ElementHandleAnswer>} target * A {@link Target} identifying the `<select>` element of interest * * @returns {Question<Promise<string[]>>} * * @see {@link Select.values} */ static valuesOf( target: Answerable<ElementHandleAnswer> ): Question<Promise<string[]>> { return new SelectedValues(target); } /** * @desc * Represents a single option selected in a * [HTML `<select>` element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select#attr-multiple) * * @example <caption>Example widget</caption> * <select data-test='countries'> * <option value='UK'>United Kingdom</option> * <option value='PL'>Poland</option> * <option value='US'>United States</option> * </select> * * @example <caption>Lean Page Object</caption> * import { Target } from '@serenity-js/playwright'; * * class Countries { * static dropdown = Target.the('countries dropdown') * .located('data-test=countries'); * } * * @example <caption>Retrieving the selected value</caption> * import { actorCalled } from '@serenity-js/core'; * import { BrowseTheWeb, Select, Selected } from '@serenity-js/playwright'; * import { Ensure, equals } from '@serenity-js/assertions'; * import { chromium } from 'playwright'; * * actorCalled('Nick') * .whoCan(BrowseTheWeb.using(chromium)) * .attemptsTo( * Select.option('Poland').from(Countries.dropdown), * Ensure.that( * Selected.optionIn(Countries.dropdown), * equals('Poland') * ), * ); * * @param {Answerable<ElementHandleAnswer>} target * A {@link Target} identifying the `<select>` element of interest * * @returns {Question<Promise<string>>} * * @see {@link Select.option} */ static optionIn( target: Answerable<ElementHandleAnswer> ): Question<Promise<string>> { return new SelectedOptionLabel(target); } /** * @desc * Represents options selected in a * [HTML `<select multiple>` element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select#attr-multiple) * * @example <caption>Example widget</caption> * <select multiple data-test='countries'> * <option value='UK'>United Kingdom</option> * <option value='PL'>Poland</option> * <option value='US'>United States</option> * </select> * * @example <caption>Lean Page Object</caption> * import { Target } from '@serenity-js/playwright'; * * class Countries { * static dropdown = Target.the('countries dropdown') * .located('data-test=countries'); * } * * @example <caption>Retrieving the selected value</caption> * import { actorCalled } from '@serenity-js/core'; * import { BrowseTheWeb, Select, Selected } from '@serenity-js/playwright'; * import { Ensure, equals } from '@serenity-js/assertions'; * import { chromium } from 'playwright'; * * actorCalled('Nick') * .whoCan(BrowseTheWeb.using(chromium)) * .attemptsTo( * Select.options('Poland', 'United States').from(Countries.dropdown), * Ensure.that( * Selected.optionsIn(Countries.dropdown), * equals([ 'Poland', 'United States' ]) * ), * ); * * @param {Answerable<ElementHandleAnswer>} target * A {@link Target} identifying the `<select>` element of interest * * @returns {Question<Promise<string[]>>} * * @see {@link Select.options} */ static optionsIn( target: Answerable<ElementHandleAnswer> ): Question<Promise<string[]>> { return new SelectedOptionLabels(target); } } /** * @package */ class SelectedValue extends Question<Promise<string>> { constructor(private readonly target: Answerable<ElementHandleAnswer>) { super(formatted`value selected in ${target}`); } async answeredBy(actor: AnswersQuestions & UsesAbilities): Promise<string> { const selectedOptions = await actor.answer(SelectedOptions.of(this.target)); return (selectedOptions[0] as any).value; } } /** * @package */ class SelectedValues extends Question<Promise<string[]>> { constructor(private readonly target: Answerable<ElementHandleAnswer>) { super(formatted`values selected in ${target}`); } async answeredBy(actor: AnswersQuestions & UsesAbilities): Promise<string[]> { const selectedOptions = await actor.answer(SelectedOptions.of(this.target)); return selectedOptions.map((option: any) => option.value); } } /** * @package */ class SelectedOptionLabel extends Question<Promise<string>> { constructor(private target: Answerable<ElementHandleAnswer>) { super(formatted`option selected in ${target}`); } async answeredBy(actor: AnswersQuestions & UsesAbilities): Promise<string> { const options = await SelectedOptions.of(this.target).answeredBy(actor); return (options[0] as any).label; } } /** * @package */ class SelectedOptionLabels extends Question<Promise<string[]>> { constructor(private target: Answerable<ElementHandleAnswer>) { super(formatted`options selected in ${target}`); } async answeredBy(actor: AnswersQuestions & UsesAbilities): Promise<string[]> { const selectedOptions = await actor.answer(SelectedOptions.of(this.target)); return selectedOptions.map((option: any) => option.label); } } /** * @package */ class SelectedOptions extends Question<Promise<Option[]>> { static of(target: Answerable<ElementHandleAnswer>) { return new SelectedOptions(target); } protected constructor(private target: Answerable<ElementHandleAnswer>) { super(formatted`options selected in ${target}`); } async answeredBy(actor: AnswersQuestions & UsesAbilities): Promise<Option[]> { const element = await actor.answer(this.target); const selectedOptions = await actor .abilityTo(BrowseTheWeb) .evaluate((element: any) => { return Array.from(element.selectedOptions).map((opt: any) => ({ value: opt.value, label: opt.label, })); }, element); return selectedOptions; } }
package play.libs.ws.ahc import org.specs2.mock.Mockito import org.specs2.mutable._ class AhcWSRequestSpec extends Specification with Mockito { "AhcWSRequest" should { "should respond to getMethod" in { val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.buildRequest().getMethod must be_==("GET") } "should set virtualHost appropriately" in { val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setVirtualHost("foo.com") val actual = request.buildRequest().getVirtualHost() actual must beEqualTo("foo.com") } "should support setting a request timeout" in { requestWithTimeout(1000) must beEqualTo(1000) } "should support setting an infinite request timeout" in { requestWithTimeout(-1) must beEqualTo(-1) } "should not support setting a request timeout < -1" in { requestWithTimeout(-2) must throwA[IllegalArgumentException] } "should not support setting a request timeout > Integer.MAX_VALUE" in { requestWithTimeout(Int.MaxValue.toLong + 1) must throwA[IllegalArgumentException] } "Only send first content type header and add charset=utf-8 to the Content-Type header if it's manually adding but lacking charset" in { import scala.collection.JavaConverters._ val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setBody("HELLO WORLD") request.setHeader("Content-Type", "application/json") request.setHeader("Content-Type", "application/xml") val req = request.buildRequest() req.getHeaders.get("Content-Type") must be_==("application/json; charset=utf-8") } "Only send first content type header and keep the charset if it has been set manually with a charset" in { import scala.collection.JavaConverters._ val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setBody("HELLO WORLD") request.setHeader("Content-Type", "application/json; charset=US-ASCII") request.setHeader("Content-Type", "application/xml") val req = request.buildRequest() req.getHeaders.get("Content-Type") must be_==("application/json; charset=US-ASCII") } } def requestWithTimeout(timeout: Long) = { val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setRequestTimeout(timeout) request.buildRequest().getRequestTimeout() } }
import UIKit import RxCocoa import RxSwift class ViewController: UIViewController { private let disposeBag = DisposeBag() private let searchBar = UISearchBar() private let tableView = UITableView() override func viewDidLoad() { super.viewDidLoad() setupUI() bindSearchBar() } private func setupUI() { searchBar.placeholder = "Search Movies" tableView.register(UITableViewCell.self, forCellReuseIdentifier: "cell") tableView.dataSource = nil view.addSubview(searchBar) view.addSubview(tableView) // Layout code for searchBar and tableView } private func bindSearchBar() { searchBar.rx.text.orEmpty .throttle(.milliseconds(500), scheduler: MainScheduler.instance) .distinctUntilChanged() .flatMapLatest { query in return OMDbAPI.searchMovies(withQuery: query) .catchErrorJustReturn([]) } .bind(to: tableView.rx.items(cellIdentifier: "cell", cellType: UITableViewCell.self)) { row, movie, cell in cell.textLabel?.text = "\(movie.title) (\(movie.year))" } .disposed(by: disposeBag) } }
<filename>spec/notification_hub/envelope/fallback_spec.rb require 'spec_helper' RSpec.describe NotificationHub::Envelope::Fallback do subject { NotificationHub::Envelope::Fallback.new(messages: [], options: {}) } it { is_expected.to be_kind_of(NotificationHub::Envelope::Base) } it { expect(subject.strategy).to eq('fallback') } end
<filename>alg_climbing_stairs_three.py """Climbing Stairs. How many paths up a stair of say 100 steps if the child jumps 1, 2 or 3 steps? F(n) = F(n - 1) + F(n - 2) + F(n - 3) - F(0) = 1 # Stay put. - F(1) = 1 # Take 1-step leap. - F(2) = 2 # Take 2 1-step leaps or 1 2-step leap. Remark: This is just like a variant of Fibonacci series. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import List def climbing_stairs_three_recur(steps: int) -> int: """Staircase by top-down recursion. Time complexity: O(3^n). Space complexity: O(n). """ if steps < 0: return 0 if steps == 0: return 1 return (climbing_stairs_three_recur(steps - 1) + climbing_stairs_three_recur(steps - 2) + climbing_stairs_three_recur(steps - 3)) def _climbing_stairs_three_memo(steps: int, T: List[int]) -> int: if steps < 0: return 0 if steps == 0: return 1 if T[steps]: return T[steps] T[steps] = (_climbing_stairs_three_memo(steps - 1, T) + _climbing_stairs_three_memo(steps - 2, T) + _climbing_stairs_three_memo(steps - 3, T)) return T[steps] def climbing_stairs_three_memo(steps: int) -> int: """Staircase by top-down memoization. Time complexity: O(n). Space complexity: O(n). """ T = [0] * (steps + 1) return _climbing_stairs_three_memo(steps, T) def climbing_stairs_three_dp(steps: int) -> int: """Staircase by bottom-up dynamic programming. Time complexity: O(n). Space complexity: O(n). """ T = [0] * (steps + 1) T[0] = 1 T[1] = 1 T[2] = 2 for s in range(3, steps + 1): T[s] = T[s - 1] + T[s - 2] + T[s - 3] return T[steps] def climbing_stairs_three_iter(steps: int) -> int: """Staircase by bottom-up iteration w/ optimized space. Time complexity: O(n). Space complexity: O(1). """ if steps <= 1: return 1 if steps == 2: return 2 # Track the last three staircase results. a, b, c = 1, 1, 2 # Iterate through the remaining steps 3 ~ end. for s in range(3, steps + 1): # Add three numbers and then shift position by one. a, b, c = b, c, a + b + c return c def main(): import time steps = 20 start_time = time.time() print('Recur: {}'.format(climbing_stairs_three_recur(steps))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('Memo: {}'.format(climbing_stairs_three_memo(steps))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('DP: {}'.format(climbing_stairs_three_dp(steps))) print('Time: {}'.format(time.time() - start_time)) start_time = time.time() print('Iter: {}'.format(climbing_stairs_three_iter(steps))) print('Time: {}'.format(time.time() - start_time)) if __name__ == '__main__': main()
import React, { useState } from 'react'; import firebase from 'firebase'; function App() { const [name, setName] = useState(''); const [email, setEmail] = useState(''); const handleSubmit = async (e) => { e.preventDefault(); try { const db = firebase.firestore(); await db.collection('users').add({ name, email, }); } catch (error) { console.log(error); } }; return ( <form onSubmit={handleSubmit}> <input type="text" placeholder="Name" value={name} onChange={(e) => setName(e.target.value)} /> <input type="text" placeholder="Email" value={email} onChange={(e) => setEmail(e.target.value)} /> <button type="submit">Submit</button> </form> ); } export default App;
#!/bin/sh /usr/bin/getent group trellis || /usr/sbin/groupadd -r trellis /usr/bin/getent passwd trellis || /usr/sbin/useradd -r -g trellis -d /opt/trellis -m -s /bin/false trellis
<reponame>JasonLiu798/javautil package com.atjl.dbservice.mapper.biz; import com.atjl.dbservice.api.domain.DataBaseConfig; import com.atjl.dbservice.api.domain.DataCoverteConfig; import com.atjl.dbservice.api.domain.DataCpConfig; import com.atjl.dbservice.api.domain.SearchCondBase; import com.atjl.common.domain.KeyValue; import com.atjl.dbservice.domain.TgtTableDataPkg; import com.atjl.dbservice.domain.TgtTableDataUpdatePkg; import org.apache.ibatis.annotations.Param; import java.util.List; import java.util.Map; public interface DataTransferMapper { /** * 导数据相关 */ List<Map> getRawTableData(@Param("config") DataCpConfig req, @Param("otherCond") SearchCondBase searchCondBase); int getRawTableDataCount(@Param("config") DataCpConfig req, @Param("otherCond") SearchCondBase searchCondBase); /** * 转换相关 */ List<Map> getCoverteTableData(@Param("config") DataCoverteConfig config, @Param("otherCond") SearchCondBase searchCondBase); int getCoverteTableCount(@Param("config") DataCoverteConfig config, @Param("otherCond") SearchCondBase searchCondBase); // int getTgtTableDataCout(@Param("config") DataCpConfig req, @Param("conds") List<CondValue> conds); List<Map> getTgtTableData(@Param("config") DataCpConfig req, @Param("conds") List<List<KeyValue>> conds); int insertBatch(@Param("config") DataCpConfig req, @Param("dataPkg") TgtTableDataPkg dataPkg); int updateBatch(@Param("config") DataBaseConfig req, @Param("dataPkg") TgtTableDataUpdatePkg dataPkg); // int updateCovBatch(@Param("config") DataCoverteConfig conf, @Param("dataPkg") TgtTableDataUpdatePkg dataPkg); }
#!/usr/bin/env sh # generated from catkin/cmake/template/setup.sh.in # Sets various environment variables and sources additional environment hooks. # It tries it's best to undo changes from a previously sourced setup file before. # Supported command line options: # --extend: skips the undoing of changes from a previously sourced setup file # since this file is sourced either use the provided _CATKIN_SETUP_DIR # or fall back to the destination set at configure time : ${_CATKIN_SETUP_DIR:=/home/ubuntu/catkin_ws/devel} _SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py" unset _CATKIN_SETUP_DIR if [ ! -f "$_SETUP_UTIL" ]; then echo "Missing Python script: $_SETUP_UTIL" return 22 fi # detect if running on Darwin platform _UNAME=`uname -s` _IS_DARWIN=0 if [ "$_UNAME" = "Darwin" ]; then _IS_DARWIN=1 fi unset _UNAME # make sure to export all environment variables export CMAKE_PREFIX_PATH if [ $_IS_DARWIN -eq 0 ]; then export LD_LIBRARY_PATH else export DYLD_LIBRARY_PATH fi unset _IS_DARWIN export PATH export PKG_CONFIG_PATH export PYTHONPATH # remember type of shell if not already set if [ -z "$CATKIN_SHELL" ]; then CATKIN_SHELL=sh fi # invoke Python script to generate necessary exports of environment variables # use TMPDIR if it exists, otherwise fall back to /tmp if [ -d "${TMPDIR}" ]; then _TMPDIR="${TMPDIR}" else _TMPDIR=/tmp fi _SETUP_TMP=`mktemp "${_TMPDIR}/setup.sh.XXXXXXXXXX"` unset _TMPDIR if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then echo "Could not create temporary file: $_SETUP_TMP" return 1 fi CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ >> "$_SETUP_TMP" _RC=$? if [ $_RC -ne 0 ]; then if [ $_RC -eq 2 ]; then echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?" else echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC" fi unset _RC unset _SETUP_UTIL rm -f "$_SETUP_TMP" unset _SETUP_TMP return 1 fi unset _RC unset _SETUP_UTIL . "$_SETUP_TMP" rm -f "$_SETUP_TMP" unset _SETUP_TMP # source all environment hooks _i=0 while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i unset _CATKIN_ENVIRONMENT_HOOKS_$_i eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE # set workspace for environment hook CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace . "$_envfile" unset CATKIN_ENV_HOOK_WORKSPACE _i=$((_i + 1)) done unset _i unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
echo 'Building C' build=`gcc -O0 -g3 -Wall -c ../*.c main.c` create=`gcc -O0 -g3 -Wall -o test_c.exe otpuri.o cotp.o crypt.o main.o -lcrypto -lm` echo $build $create
#!/usr/bin/env bash set -eo pipefail if [[ -z "${GCLOUD_SERVICE_KEY}" ]]; then echo >&2 "ERROR :: environment variable GCLOUD_SERVICE_KEY not set" exit 1 fi if [[ -z "${GOOGLE_PROJECT_ID}" ]]; then echo >&2 "ERROR :: environment variable GOOGLE_PROJECT_ID not set" exit 1 fi # Update gcloud sdk components # gcloud --quiet components update # Decode base64-encoded service key json echo "${GCLOUD_SERVICE_KEY}" | base64 --decode -i >"${HOME}/gcloud-service-key.json" # Configure project gcloud config set project "${GOOGLE_PROJECT_ID}" # Authenticate gcloud auth activate-service-account --key-file "${HOME}/gcloud-service-key.json"
"use strict";function goTest(){window.location.href="/test"} //# sourceMappingURL=index.min.js.map
/* * StorageOS API * * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * * API version: 2.4.0-alpha * Contact: <EMAIL> * Generated by: OpenAPI Generator (https://openapi-generator.tech) */ package api // AttachType The attachment type of a volume. \"host\" indicates that the volume is consumed by the node it is attached to. type AttachType string // List of AttachType const ( ATTACHTYPE_UNKNOWN AttachType = "unknown" ATTACHTYPE_DETACHED AttachType = "detached" ATTACHTYPE_NFS AttachType = "nfs" ATTACHTYPE_HOST AttachType = "host" )
# -*- coding: utf-8 -*- from .conf import COMMAND_SEPARATOR, ARGS_SEPARATOR, commands_map def parse(program): """ Парсинг программы в стековом коде (преобразование в список команд): простой split строк """ commands = program.split(COMMAND_SEPARATOR) command_classes = [] for command in commands: command = command.split(ARGS_SEPARATOR) args = [] for arg in command[1:]: args.append(int(arg)) command_classes.append(commands_map[command[0]](*args)) return command_classes
SCRIPT_DIR=$(dirname "$0") TEST_ANSWER=112 TEST_INPUT_FILE=$SCRIPT_DIR/input_test.txt MAIN_INPUT_FILE=$SCRIPT_DIR/input_1.txt echo "TEST: Running on $TEST_INPUT_FILE. Expected result: $TEST_ANSWER" python3 $SCRIPT_DIR/main.py --verbosity DEBUG $TEST_INPUT_FILE echo "MAIN: Running on $MAIN_INPUT_FILE" python3 $SCRIPT_DIR/main.py $MAIN_INPUT_FILE
"""Base revision for SQL-backed event log storage Revision ID: 567bc23fd1ac Revises: Create Date: 2019-11-21 09:59:57.028730 """ # pylint: disable=no-member # alembic dynamically populates the alembic.context module import sqlalchemy as sa from alembic import op from sqlalchemy import Column from sqlalchemy.engine import reflection # revision identifiers, used by Alembic. revision = "<KEY>" down_revision = None branch_labels = None depends_on = None def upgrade(): # This is our root migration, and we don't have a common base. Before this revision, sqlite- and # postgres-based event logs had different schemas. The conditionality below is to deal with dev # databases that might not have been stamped by Alembic. bind = op.get_context().bind inspector = reflection.Inspector.from_engine(bind) if "postgresql" not in inspector.dialect.dialect_description: raise Exception( "Bailing: refusing to run a migration for postgres-backed event log storage against " "a non-postgres database of dialect {dialect}".format( dialect=inspector.dialect.dialect_description ) ) has_tables = inspector.get_table_names() if "event_log" in has_tables: op.drop_column( table_name="event_log", column_name="id", ) op.alter_column( table_name="event_log", column_name="run_id", nullable=True, type_=sa.types.String(255), existing_type=sa.types.VARCHAR(255), ) op.alter_column( table_name="event_log", column_name="event_body", nullable=False, new_column_name="event", type_=sa.types.Text, existing_type=sa.types.VARCHAR, ) op.add_column(table_name="event_log", column=Column("dagster_event_type", sa.types.Text)) op.add_column(table_name="event_log", column=Column("timestamp", sa.types.TIMESTAMP)) op.execute( "update event_log\n" "set\n" " dagster_event_type = event::json->'dagster_event'->>'event_type_value',\n" " timestamp = to_timestamp((event::json->>'timestamp')::double precision)" ) # op.execute('''select setval(pg_get_serial_sequence('event_logs', 'id'), greatest(select max(id) from event_log, select max(id) from event_logs))''') op.execute( "insert into event_logs (run_id, event, dagster_event_type, timestamp) " "select run_id, event, dagster_event_type, timestamp " "from event_log" ) op.drop_table("event_log") def downgrade(): raise Exception("Base revision, no downgrade is possible")
<filename>src/edu/washington/cse/instrumentation/analysis/utils/IOAnalysis.java package edu.washington.cse.instrumentation.analysis.utils; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; import soot.ArrayType; import soot.FastHierarchy; import soot.PackManager; import soot.Scene; import soot.SceneTransformer; import soot.SootClass; import soot.SootMethod; import soot.Transform; import soot.Type; public class IOAnalysis { interface ProcessMethod { public Object handleMethod(SootMethod m); public void genRules(List<Object> output, SootClass cls); } public static void main(final String[] args) throws IOException { PackManager.v().getPack("wjtp").add(new Transform("wjtp.io", new SceneTransformer() { private FastHierarchy fh; @Override protected void internalTransform(final String phaseName, final Map<String, String> options) { final SootClass readerClass = Scene.v().getSootClass("java.io.Reader"); final SootClass writerClass = Scene.v().getSootClass("java.io.Writer"); final SootClass inputClass = Scene.v().getSootClass("java.io.InputStream"); final SootClass outputClass = Scene.v().getSootClass("java.io.OutputStream"); final List<Object> output = new ArrayList<>(); fh = Scene.v().getOrMakeFastHierarchy(); processOutputRoot(outputClass, output); processOutputRoot(writerClass, output); processInputRoot(readerClass, output); processInputRoot(inputClass, output); final DumperOptions dOptions = new DumperOptions(); dOptions.setWidth(Integer.MAX_VALUE); final Yaml y = new Yaml(dOptions); System.out.println(y.dump(output)); System.exit(0); } private void worklist(final SootClass root, final List<Object> output, final ProcessMethod handler) { final LinkedList<SootClass> worklist = new LinkedList<>(); worklist.add(root); while(!worklist.isEmpty()) { final SootClass cls = worklist.removeFirst(); if(cls.getName().startsWith("com.sun") || cls.getName().startsWith("sun") || cls.getName().startsWith("org.omg.CORBA")) { continue; } worklist.addAll(fh.getSubclassesOf(cls)); processClassCommon(output, cls); handler.genRules(output, cls); output.add("^<" + cls.getName() + ":<init>>"); final Set<String> sMethods = getSuperClassMethods(cls.getSuperclass()); for(final SootMethod m : cls.getMethods()) { if(m.isPrivate()) { continue; } if(m.isStaticInitializer()) { continue; } if(m.isConstructor()) { continue; } else if(sMethods.contains(m.getSubSignature())) { continue; } else { final Object toAdd = handler.handleMethod(m); if(toAdd != null) { output.add(toAdd); } } } } } private void processOutputRoot(final SootClass writerClass, final List<Object> output) { worklist(writerClass, output, new ProcessMethod() { @Override public Object handleMethod(final SootMethod m) { for(final String iPref : ignored) { if(m.getName().startsWith(iPref)) { return null; } } final Map<String, Object> toRet = new HashMap<>(); toRet.put("sig", m.getSignature()); toRet.put("target", "IDENTITY"); return toRet; } private final String[] ignored = new String[]{ "print", "write", "append", "flush", "close", "access$" }; @Override public void genRules(final List<Object> output, final SootClass cls) { outer: for(final String iPref : ignored) { for(final SootMethod m : cls.getMethods()) { if(!m.isPublic()) { continue; } if(m.getName().startsWith(iPref)) { output.add("=<" + cls.getName() + ":" + iPref + "*>"); continue outer; } } } } }); } private void processClassCommon(final List<Object> output, final SootClass cls) { final Set<SootClass> superclasses = getSuperclasses(cls); if(superclasses.size() > 0) { final HashMap<String, Object> entry = new HashMap<>(); entry.put("extend", cls.getName()); final List<String> parents = new ArrayList<>(); for(final SootClass p : superclasses) { parents.add(p.toString()); } entry.put("parents", parents); output.add(entry); } } private void processInputRoot(final SootClass readerClass, final List<Object> output) { worklist(readerClass, output, new ProcessMethod() { @Override public Object handleMethod(final SootMethod m) { for(final String pr : toIgnore) { if(m.getName().startsWith(pr)) { return null; } } final Map<String, Object> t = new HashMap<>(); t.put("sig", m.getSignature()); if(isReadOutArg(m)) { t.put("target", "IDENTITY"); } else { t.put("target", "???"); } return t; } final String[] toIgnore = new String[]{ "read", "skip", "close", "available", "reset", "markSupported", "mark", "access$" }; @Override public void genRules(final List<Object> output, final SootClass cls) { outer: for(final String iPref : toIgnore) { for(final SootMethod m : cls.getMethods()) { if(!m.isPublic()) { continue; } if(m.getName().startsWith(iPref)) { output.add("=<" + cls.getName() + ":" + iPref + "*>"); continue outer; } } } } }); } private boolean isReadOutArg(final SootMethod m) { if(!m.getName().startsWith("read") || m.getParameterCount() == 0) { return false; } for(int i = 0; i < m.getParameterCount(); i++) { final Type ty = m.getParameterType(i); if(ty instanceof ArrayType) { return true; } } return false; } private Set<SootClass> getSuperclasses(final SootClass cls) { final HashSet<SootClass> toReturn = new HashSet<>(); SootClass it = cls.getSuperclass(); while(!it.getName().equals("java.lang.Object")) { toReturn.add(it); it = it.getSuperclass(); } return toReturn; } private Set<String> getSuperClassMethods(final SootClass superClass) { SootClass it = superClass; final Set<String> toReturn = new HashSet<>(); while(!it.getName().equals("java.lang.Object")) { for(final SootMethod m : it.getMethods()) { if(m.isConstructor()) { continue; } if(!m.isPublic()) { continue; } toReturn.add(m.getSubSignature()); } it = it.getSuperclass(); } return toReturn; } })); final List<String> toProcess = new ArrayList<>(); try(final ZipFile zf = new ZipFile("/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/rt.jar")) { final Enumeration<? extends ZipEntry> entries = zf.entries(); while(entries.hasMoreElements()) { final ZipEntry ze = entries.nextElement(); final String nm = ze.getName(); if(!nm.endsWith(".class")) { continue; } final String internalName = nm.substring(0, nm.length() - 6).replace('/', '.'); toProcess.add(internalName); } } final ArrayList<String> newArgs = new ArrayList<>(Arrays.asList(args)); newArgs.addAll(toProcess); soot.Main.main(newArgs.toArray(new String[newArgs.size()])); } }
import java.util.Scanner; import java.util.SortedMap; import java.util.TreeMap; import java.util.regex.Pattern; import java.util.regex.Matcher; public class TraditionalWordCountApp { public static void main(String[] args) { SortedMap<String, Integer> counts = new TreeMap<>(); Pattern wordPattern = Pattern.compile("[a-z']+"); Scanner scanner = new Scanner(System.in); while (scanner.hasNext()) { String line = scanner.nextLine().toLowerCase(); Matcher matcher = wordPattern.matcher(line); while (matcher.find()) { String word = matcher.group(); counts.put(word, counts.getOrDefault(word, 0) + 1); } } for (SortedMap.Entry<String, Integer> e : counts.entrySet()) { System.out.printf("%s %d\n", e.getKey(), e.getValue()); } } }
package io.github.vampirestudios.obsidian.minecraft.obsidian; import io.github.vampirestudios.obsidian.api.obsidian.block.Block; import net.minecraft.block.BlockState; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.state.StateManager; import net.minecraft.state.property.BooleanProperty; import net.minecraft.util.ActionResult; import net.minecraft.util.Hand; import net.minecraft.util.hit.BlockHitResult; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Direction; import net.minecraft.world.World; public class HorizontalFacingSittableBlock extends HorizontalFacingBlockImpl { public static final BooleanProperty OCCUPIED = BooleanProperty.of("occupied"); public HorizontalFacingSittableBlock(Block block, Settings settings) { super(block, settings); this.setDefaultState(this.getDefaultState().with(OCCUPIED, false).with(FACING, Direction.NORTH)); } @Override public ActionResult onUse(BlockState state, World world, BlockPos pos, PlayerEntity player, Hand hand, BlockHitResult hit) { if(state.get(OCCUPIED) || !world.getBlockState(pos.up()).isAir() || player.hasVehicle()) return super.onUse(state, world, pos, player, hand, hit); if(!world.isClient) { SeatEntity entity = new SeatEntity(world); entity.setPos(pos.getX() + 0.5, pos.getY() + 0.6, pos.getZ() + 0.5); world.spawnEntity(entity); player.startRiding(entity); world.setBlockState(pos, state.with(OCCUPIED, true)); } world.setBlockState(pos, state.with(OCCUPIED, false)); return ActionResult.SUCCESS; } @Override protected void appendProperties(StateManager.Builder<net.minecraft.block.Block, BlockState> builder) { super.appendProperties(builder.add(OCCUPIED)); } }
<?php $timestamp = $_GET['timestamp']; if(empty($timestamp)) { echo json_encode(["error" => "Please provide a valid UNIX timestamp."]); die(); } $date = date("Y-m-d H:i", $timestamp); echo json_encode(["date" => $date]); ?>
#!/bin/bash set -e QUAYDIR=${QUAYDIR:-"/"} cd $QUAYDIR echo "[Local Dev] - Downloading AWS IP Ranges..." curl -fsSL https://ip-ranges.amazonaws.com/ip-ranges.json -o util/ipresolver/aws-ip-ranges.json echo "[Local Dev] - Building Front End..." mkdir -p $QUAYDIR/static/webfonts && \ mkdir -p $QUAYDIR/static/fonts && \ mkdir -p $QUAYDIR/static/ldn && \ PYTHONPATH=$QUAYPATH python -m external_libraries && \ npm install --ignore-engines && \ npm run watch & cd -
var express = require('express'); var app = express(); var path = require('path'); var fs = require('fs'); var request = require('request'); var bodyParser = require('body-parser'); // Translate var for our real time translation const Translate = require('@google-cloud/translate'); // Firebase cloud message service var FCM = require('fcm-node'); var fcm = new FCM(config.SK); var crypto = require('crypto'); var config = require('./configure.js') app.use(express.static('public')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: true })); // external routes folder require("./routes/routes.js")(app); // Listen on port specified app.listen(config.PORT, function(){ console.log("Listening on port " + config.PORT); }); //Functions used to pass a json message to our client, denoted by a token // json format: /* { data: { to_id : int from_id : int content : string date : date } } */ function sendMessageToUser(user, message){ request({ url: 'https://fcm.googleapis.com/fcm/send', method: 'POST', headers: { 'Content-Type' : 'application/json', 'Authorization' : 'key=' + config.SK }, body : message }, function(error, response, body){ if(error){ console.log(error, response, body); }else if(response.statusCode >= 400){ console.error('HTTP Error: ' + response.statusCode+' -'+response.statusMessage+'\n'+body); }else{ console.log('Message Sent'); } }); } exports.sendMessageToUser = sendMessageToUser; // Google API function to translate messages in real time function translateText(input, target_lang){ if (!Array.isArray(input)) { input = [input]; } var request = require('request'); request.post({ url: "https://www.googleapis.com/language/translate/v2?", headers: {"X-HTTP-Method-Override": "GET"}, form: { key: config.APIKEY, target: target_lang, q: input } }, function(error, response, results) { if (!error && response.statusCode == 200) { var jsoncontent = JSON.parse(results); var content = jsoncontent.data.translations[0].translatedText; console.log(content) return content; } else { console.log(error); console.log("something went wrong") return null; } }); } exports.translateText = translateText; function genRandomString(length){ return crypto.randomBytes(Math.ceil(length/2)) .toString('hex') /** convert to hexadecimal format */ .slice(0,length); /** return required number of characters */ }; function hash(password, salt){ var res = crypto.createHmac('sha512', salt); res.update(password); var value = res.digest('hex'); return { salt : salt, passwordHash: value } } exports.hash = hash; function saltHash(userpassword){ var salt = genRandomString(16); var passwordData = hash(userpassword, salt); } exports.saltHash = saltHash;
import re # Function to validate phone number format def validate_phone_number(phone): pattern = re.compile(r'^\d{3}-\d{3}-\d{4}$') return bool(pattern.match(phone)) # Function to prompt user for input and validate def get_user_input(prompt, validation_func): while True: user_input = input(prompt) if validation_func(user_input): return user_input else: print("Invalid input. Please try again.") # Prompt user for input phone = get_user_input("Enter your phone number (XXX-XXX-XXXX): ", validate_phone_number) pre_add = input("Enter your present address: ") per_add = input("Enter your permanent address: ") occupation = input("Enter your occupation: ") city = input("Enter your city: ") license = get_user_input("Do you have a motor bike license? (Yes/No): ", lambda x: x.lower() in ['yes', 'no']) bi_cycle = get_user_input("Do you ride a bi-cycle? (Yes/No): ", lambda x: x.lower() in ['yes', 'no']) # Generate HTML output html_output = f''' <h4>Phone: </h4> <p>{phone}</p> <h4>Present Address: </h4> <p>{pre_add}</p> <h4>Permanent Address: </h4> <p>{per_add}</p> <h4>Occupation: </h4> <p>{occupation}</p> <h4>City: </h4> <p>{city}</p> <p>Motor Bike License : {license} .</p> <p>Ride Bi-Cycle : {bi_cycle} . ''' print(html_output)
#!/bin/bash #*************************************************************** # optimize document images / photos to have better OCR results * # and minimized PDF file size * #*************************************************************** echo echo "# runninig pre-process to optimize images #" | tee --append $LOGFILE
#! /usr/bin/env bash set -exu cd "$(dirname "`readlink -f "$0"`")"/.. for p in */ ; do ( set -xeu [[ ! -f $p/configure.ac ]] || [[ $p = m4-common ]] || continue cd $p if [[ -d m4 ]] ; then [[ ! -d .git ]] || git rm -r m4 rm -rf m4 fi [[ -d .git ]] || continue git remote remove m4 || : git remote remove m4-common || : git add . git commit -m 'remove m4' git push origin master ) || { echo $p; exit 2; } ; done
package com.udacity.pricing; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.udacity.pricing.domain.price.Price; import com.udacity.pricing.domain.price.PriceRepository; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.Bean; import org.springframework.core.annotation.Order; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.transaction.annotation.EnableTransactionManagement; import javax.sql.DataSource; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.List; import static org.springframework.core.io.ResourceLoader.CLASSPATH_URL_PREFIX; /** * Creates a Spring Boot Application to run the Pricing Service. * TODO: Convert the application from a REST API to a microservice. * See config/EurekaConfig.java */ @SpringBootApplication @EnableJpaRepositories @EnableTransactionManagement public class PricingServiceApplication { private static final Logger log = LoggerFactory.getLogger(PricingServiceApplication.class); @Autowired ResourceLoader resourceLoader; public static void main(String[] args) { log.info( String.format("Application starting with command-line arguments: %s.%n" + "To kill this application, press Ctrl + C.", Arrays.toString(args)) ); SpringApplication.run(PricingServiceApplication.class, args); } /** * Bootstraps an in-memory H2 database. */ @Order(1) @Bean public DataSource dataSource(@Value("${database_name}") String databaseName) { EmbeddedDatabaseBuilder builder = new EmbeddedDatabaseBuilder(); return builder.setType(EmbeddedDatabaseType.H2) .setName(databaseName) .build(); } public static final String PRELOAD_FILE = "preload.file"; @Order(2) @Bean public CommandLineRunner run(PriceRepository priceRepository, @Value("${"+PRELOAD_FILE+":}") String filePath) { return args -> { if (!StringUtils.isBlank(filePath)) { // load prices from resources Resource resource = resourceLoader.getResource(CLASSPATH_URL_PREFIX + filePath); ObjectMapper objectMapper = new ObjectMapper(); TypeReference<List<Price>> mapType = new TypeReference<>() { }; try (InputStream is = resource.getInputStream()) { List<Price> priceList = objectMapper.readValue(is, mapType); priceRepository.saveAll(priceList); log.info("Loaded {} prices", priceRepository.count()); } catch (IOException e) { e.printStackTrace(); } } }; } }
<gh_stars>0 import p5 from "p5"; import { ParamNum, Color, rgbToHSB } from "./modules/param"; import { StillSketch } from "./modules/sketch"; /* 参考元 巴山竜来, 数学から創るジェネラティブアート, 技術評論社 https://gihyo.jp/book/2019/978-4-297-10463-4 */ type EuclidRect1Params = { ratio: ParamNum; ratio2: ParamNum; thr: ParamNum; lineColor: Color; lineWidth: ParamNum; hueMax: ParamNum; hueMin: ParamNum; saturationMax: ParamNum; saturationMin: ParamNum; brightnessMax: ParamNum; brightnessMin: ParamNum; } class EuclidRect1 extends StillSketch { params: EuclidRect1Params = { ratio: {val: 0.6, min: 0.2, max: 1.1, isInt: false}, ratio2: {val: 1.0, min: 0.0, max: 2.0, isInt: false}, thr: {val: 50, min: 10, max: 320, isInt: true}, lineColor: {r: 0, g: 0, b: 0}, lineWidth: {val: 1.0, min: 0.0, max: 20.0, isInt: true}, hueMax: {val: 1.0, min: 0.0, max: 1.0, isInt: false}, hueMin: {val: 0.0, min: 0.0, max: 1.0, isInt: false}, saturationMax: {val: 0.5, min: 0.0, max: 1.0, isInt: false}, saturationMin: {val: 0.3, min: 0.0, max: 1.0, isInt: false}, brightnessMax: {val: 1.0, min: 0.0, max: 1.0, isInt: false}, brightnessMin: {val: 1.0, min: 0.0, max: 1.0, isInt: false}, }; ratio: number; thr: number; lineColor: p5.Color; lineWidth: number; hueMax: number; hueMin: number; saturationMax: number; saturationMin: number; brightnessMax: number; brightnessMin: number; updateStat(p: p5): void { this.ratio = this.params.ratio.val + this.params.ratio2.val; this.thr = this.params.thr.val; let lineCol = rgbToHSB(this.params.lineColor); this.lineColor = p.color(lineCol.r, lineCol.g, lineCol.b); this.lineWidth = this.params.lineWidth.val; this.hueMax = this.params.hueMax.val; this.hueMin = this.params.hueMin.val; this.saturationMax = this.params.saturationMax.val; this.saturationMin = this.params.saturationMin.val; this.brightnessMax = this.params.brightnessMax.val; this.brightnessMin = this.params.brightnessMin.val; } getColor(p: p5): p5.Color { return p.color( p.random(this.hueMin,this.hueMax), p.random(this.saturationMin, this.saturationMax), p.random(this.brightnessMin, this.brightnessMax), ); } setup(p: p5): void { p.colorMode(p.HSB, 1); } draw(p: p5): void { p.stroke(this.lineColor); p.strokeWeight(this.lineWidth); this.divSquare(p, 0, 0, p.max(p.width, p.height)); } divSquare(p: p5, xPos: number, yPos: number, wd: number) { let itr: number = 0; let xEndPos: number = wd + xPos; let yEndPos: number = wd + yPos; p.fill(this.getColor(p)); p.rect(xPos, yPos, wd, wd); while (wd > this.thr) { itr++; if (itr % 2 === 1) { while (xPos + wd * this.ratio < xEndPos + 0.1) { this.divRect(p, xPos, yPos, wd * this.ratio); xPos += wd * this.ratio; } wd = xEndPos - xPos; } else { while (yPos + wd / this.ratio < yEndPos + 0.1) { this.divRect(p, xPos, yPos, wd); yPos += wd / this.ratio; } wd = yEndPos - yPos; } } } divRect(p: p5, xPos: number, yPos: number, wd: number) { let itr: number = 0; let xEndPos: number = xPos + wd; let yEndPos: number = yPos + wd / this.ratio; p.fill(this.getColor(p)); p.rect(xPos, yPos, wd, wd / this.ratio); while(wd > this.thr) { itr++; if (itr % 2 === 0) { while (xPos + wd < xEndPos + 0.1) { this.divSquare(p, xPos, yPos, wd); xPos += wd; } wd = xEndPos - xPos; } else { while (yPos + wd < yEndPos + 0.1) { this.divSquare(p, xPos, yPos, wd); yPos += wd; } wd = yEndPos - yPos; } } } } new EuclidRect1().showSketch();
#first import the libraries import folium import pandas data = pandas.read_csv("C:\GitHub\Python_Scripts\WebmapsWithFolium\Crimes_2015_Reduced.csv") #data.columns - to return all columns #then create list from columns - "Longitude" is a column name lon = list(data["Longitude"]) lat = list(data["Latitude"]) ptype = list(data["Primary Type"]) desc = list(data["Description"]) def color_producer(primary_type): if primary_type == 'BURGLARY': return 'green' elif primary_type == 'OFFENSE INVOLVING CHILDREN': return 'orange' elif primary_type == 'BATTERY': return 'yellow' else: return 'red' #create map object over city map = folium.Map(location=[41.00, -87.00], zoom_start=8) #between map and save method you can add objects to map # BURGLARY # OFFENSE INVOLVING CHILDREN # BATTERY # THEFT # CRIMINAL DAMAGE # ASSAULT # DECEPTIVE PRACTICE # OTHER OFFENSE # CRIM SEXUAL ASSAULT # PUBLIC PEACE VIOLATION # NARCOTICS # CRIMINAL TRESPASS # BURGLARY # OFFENSE INVOLVING CHILDREN # BATTERY # THEFT # CRIMINAL DAMAGE # ASSAULT # DECEPTIVE PRACTICE # OTHER OFFENSE # CRIM SEXUAL ASSAULT # PUBLIC PEACE VIOLATION # NARCOTICS # CRIMINAL TRESPASS # MOTOR VEHICLE THEFT # ROBBERY # INTERFERENCE WITH PUBLIC OFFICER # STALKING # WEAPONS VIOLATION # LIQUOR LAW VIOLATION # SEX OFFENSE #copy block to create new feature groups for the layer control object #----------------------------------------------------------------- #add marker to features group fgb = folium.FeatureGroup(name="BURGLARY") dfburglz = data.loc[data['Primary Type']=="BURGLARY"] lon = list(data["Longitude"]) lat = list(data["Latitude"]) ptype = list(data["Primary Type"]) desc = list(data["Description"]) #change the color dynamically and add a popup for lt, ln, prtype in zip(lat, lon, ptype): fgb.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", fill_color=color_producer(prtype), color = 'grey', fill=True , fill_opacity=0.7)) #burglary filter fg_b = folium.FeatureGroup(name="BURGLARY") dfburglz = data.loc[data['Primary Type']=="BURGLARY"] lon = list(dfburglz["Longitude"]) lat = list(dfburglz["Latitude"]) ptype = list(dfburglz["Primary Type"]) desc = list(dfburglz["Description"]) for lt, ln, prtype in zip(lat, lon, ptype): fg_b.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", fill_color='red', color = 'grey', fill=True , fill_opacity=0.7)) map.add_child(fg_b) #---------------------------- #OFFENSE INVOLVING CHILDREN filter fg_oac = folium.FeatureGroup(name="OFFENSE INVOLVING CHILDREN") df_oac = data.loc[data['Primary Type']=="OFFENSE INVOLVING CHILDREN"] lon = list(df_oac["Longitude"]) lat = list(df_oac["Latitude"]) ptype = list(df_oac["Primary Type"]) desc = list(df_oac["Description"]) for lt, ln, prtype in zip(lat, lon, ptype): fg_oac.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", fill_color='red', color = 'grey', fill=True , fill_opacity=0.7)) map.add_child(fg_oac) #------------------------------- #ALL Others filter fg_all_others = folium.FeatureGroup(name="All OTHERS") df_all_others = data.loc[data['Primary Type']!="BURGLARY"] lon = list(df_all_others["Longitude"]) lat = list(df_all_others["Latitude"]) ptype = list(df_all_others["Primary Type"]) desc = list(df_all_others["Description"]) for lt, ln, prtype in zip(lat, lon, ptype): fg_all_others.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", fill_color='red', color = 'grey', fill=True , fill_opacity=0.7)) map.add_child(fg_all_others) #------------------------------- #THEFT filter fg_THEFT = folium.FeatureGroup(name="THEFT") df_THEFT = data.loc[data['Primary Type']=="THEFT"] lon = list(df_THEFT["Longitude"]) lat = list(df_THEFT["Latitude"]) ptype = list(df_THEFT["Primary Type"]) desc = list(df_THEFT["Description"]) for lt, ln, prtype in zip(lat, lon, ptype): fg_THEFT.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", fill_color='red', color = 'grey', fill=True , fill_opacity=0.7)) map.add_child(fg_THEFT) #------------------------------- #CRIMINAL DAMAGE filter fg_CRIMINAL_DAMAGE = folium.FeatureGroup(name="CRIMINAL DAMAGE") df_CRIMINAL_DAMAGE = data.loc[data['Primary Type']=="CRIMINAL DAMAGE"] lon = list(df_CRIMINAL_DAMAGE["Longitude"]) lat = list(df_CRIMINAL_DAMAGE["Latitude"]) ptype = list(df_CRIMINAL_DAMAGE["Primary Type"]) desc = list(df_CRIMINAL_DAMAGE["Description"]) for lt, ln, prtype in zip(lat, lon, ptype): fg_CRIMINAL_DAMAGE.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", fill_color='red', color = 'grey', fill=True , fill_opacity=0.7)) map.add_child(fg_CRIMINAL_DAMAGE) #------------------------------- #ASSAULT filter fg_ASSAULT = folium.FeatureGroup(name="ASSAULT") df_ASSAULT = data.loc[data['Primary Type']=="ASSAULT"] lon = list(df_ASSAULT["Longitude"]) lat = list(df_ASSAULT["Latitude"]) ptype = list(df_ASSAULT["Primary Type"]) desc = list(df_ASSAULT["Description"]) for lt, ln, prtype in zip(lat, lon, ptype): fg_ASSAULT.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", fill_color='red', color = 'grey', fill=True , fill_opacity=0.7)) map.add_child(fg_ASSAULT) #------------------------------- #DECEPTIVE PRACTICE filter fg_DECEPTIVE_PRACTICE = folium.FeatureGroup(name="DECEPTIVE PRACTICE") df_DECEPTIVE_PRACTICE = data.loc[data['Primary Type']=="DECEPTIVE PRACTICE"] lon = list(df_DECEPTIVE_PRACTICE["Longitude"]) lat = list(df_DECEPTIVE_PRACTICE["Latitude"]) ptype = list(df_DECEPTIVE_PRACTICE["Primary Type"]) desc = list(df_DECEPTIVE_PRACTICE["Description"]) # for lt, ln, prtype in zip(lat, lon, ptype): # fg_DECEPTIVE_PRACTICE.add_child(folium.CircleMarker(location=[lt, ln], radius=6, popup=str(prtype)+" m", # fill_color='red', color = 'grey', fill=True , fill_opacity=0.7)) for lt, ln, prtype in zip(lat, lon, ptype): fg_DECEPTIVE_PRACTICE.add_child(folium.) map.add_child(fg_DECEPTIVE_PRACTICE) #------------------------------- #add a layer control object map.add_child(folium.LayerControl()) #create map map.save("C:\GitHub\Python_Scripts\WebmapsWithFolium\Chicago_Crimes_Reduced.html")
<filename>exercises/city_temperature_prediction.py import IMLearn.learners.regressors.linear_regression from IMLearn.learners.regressors import PolynomialFitting from IMLearn.utils import split_train_test import numpy as np import pandas as pd import plotly.express as px import plotly.graph_objects as go import plotly.io as pio pio.templates.default = "simple_white" All_Months = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"] All_Countries = ['South Africa', 'The Netherlands', 'Jordan'] def load_data(filename: str) -> pd.DataFrame: """ Load city daily temperature dataset and preprocess data. Parameters ---------- filename: str Path to house prices dataset Returns ------- Design matrix and response vector (Temp) """ # removes NaN values and duplicates temp_city_data = pd.read_csv(filename, parse_dates=['Date']).dropna().drop_duplicates() # removes temperatures with invalid data of sample (after april the 1'st) temp_city_data = temp_city_data[temp_city_data['Date'] < '2022-04-01'] # removes temperatures lower than -25 degrees celsius - google says it's never happened in those areas temp_city_data = temp_city_data[temp_city_data['Temp'] > -25] # adds Day of Year feature to the data temp_city_data['DayOfYear'] = temp_city_data['Date'].apply(lambda x: x.day_of_year) return temp_city_data if __name__ == '__main__': np.random.seed(0) # Question 1 - Load and preprocessing of city temperature dataset X = load_data("../datasets/City_Temperature.csv") # Question 2 - Exploring data for specific country data_Israel = X[X['Country'] == "Israel"] # GRAPH - Average daily temperature in Israel as function of the day in the year px.scatter(data_Israel, x='DayOfYear', y="Temp", color=data_Israel['Year'].astype(str), title="Average daily temperature in Israel as function of the day in the year", labels={ "DayOfYear": "Day of Year", "Temp": "Average Temperature (Celsius)", "color": "Year"}).show() data_Israel_std = data_Israel.groupby('Month').agg(np.std) data_Israel_std["TempStandardDeviation"] = data_Israel_std['Temp'] # GRAPH - Standard Deviation of the daily Temperatures per Month px.bar(data_Israel_std, x=All_Months, y="TempStandardDeviation", text_auto=True, title="Standard Deviation of the daily Temperatures per Month", labels={"TempStandardDeviation": "Temperature Standard Deviation (Celsius)"}).show() # Question 3 - Exploring differences between countries data_std = X.groupby(["Country", "Month"]).agg(np.std) data_avg = X.groupby(["Country", "Month"]).agg(np.average) # currently, works, in future versions of panda might not # GRAPH - Average monthly temperature per Country with error bars go.Figure(data=[go.Scatter(x=All_Months, y=data_avg.loc[x]['Temp'], error_y=go.scatter. ErrorY(array=data_std.loc[x]['Temp']), name=x) for x in X['Country'].unique()], layout=go.Layout(title="Average monthly temperature per Country with error bars", xaxis_title=dict(text="Month"), yaxis_title=dict(text="Average Temperature (Celsius)"), legend_title=dict(text='Country'))).show() # Question 4 - Fitting model for different values of `k` train_X, train_y, test_X, test_y = split_train_test(data_Israel.drop(columns=["Temp"]), data_Israel['Temp'], train_proportion=0.75) all_losses = list() for k in range(1, 11): es = PolynomialFitting(k) es.fit(train_X['DayOfYear'].to_numpy(), train_y.to_numpy()) loss = es.loss(test_X['DayOfYear'].to_numpy(), test_y.to_numpy()).__round__(2) print(f"for k = {k} we get a MSE loss value of {loss}") all_losses.append(loss) # GRAPH - MSE loss for each Polynom degree between 1 and 10 px.bar(x=range(1, 11), y=all_losses, text_auto=True, title="MSE loss for each Polynom degree between 1 and 10", labels={"x": "k - Polynom degree", "y": "MSE loss"}). \ update_xaxes(type="category").show() # Question 5 - Evaluating fitted model on different countries es = PolynomialFitting(k=5) es.fit(data_Israel['DayOfYear'].to_numpy(), data_Israel['Temp'].to_numpy()) countries_losses = list() for country in All_Countries: country_data = X[X['Country'] == country] countries_losses.append(es.loss(country_data['DayOfYear'], country_data['Temp'])) # GRAPH - MSE loss of a Polynomial fitting Estimator of degree 5 for each Country px.bar(x=All_Countries, y=countries_losses, text_auto=True, color=All_Countries, title="MSE loss of a Polynomial fitting Estimator of degree 5 for each Country ", labels={"x": "Country", "y": "MSE loss", "color": "Countries"}).update_xaxes(type="category").show() # Extra stuff !!! # In this section I proof my claim in question 3, that by adding a constant of -10 to the Israel training # response vector we can get a better Estimator for The Netherlands. countries_losses.clear() Israel_Netherlands_diff_constant = 10 for country in All_Countries: country_data = X[X['Country'] == country] if country == "The Netherlands": es.fit(data_Israel['DayOfYear'].to_numpy(), (data_Israel['Temp'] - Israel_Netherlands_diff_constant).to_numpy()) else: es.fit(data_Israel['DayOfYear'].to_numpy(), data_Israel['Temp'].to_numpy()) countries_losses.append(es.loss(country_data['DayOfYear'], country_data['Temp'])) # GRAPH - EXTRA - MSE loss of The Netherlands (with a constant) and other Countries px.bar(x=All_Countries, y=countries_losses, text_auto=True, color=All_Countries, title=" EXTRA - MSE loss of The Netherlands (with a constant) and other Countries", labels={"x": "Country", "y": "MSE loss", "color": "Countries"}).update_xaxes(type="category").show()
func generateWords(string: String) -> [String] { if string.count == 0 { return [] } if string.count == 1 { return [String(string.first!)] } var words = [String]() let firstChar = string.first! let remainChars = String(string.dropFirst()) let remainWords = generateWords(string: remainChars) for word in remainWords { words.append(String(firstChar) + word) words.append(word) } return words } generateWords(string: "abc") // Returns ["abc", "acb", "bac", "bca", "cab", "cba"]
<reponame>kyledecot/hard_cider # frozen_string_literal: true RSpec.describe HardCider::CLI do before do stub_request(:get, %r{api\.appstoreconnect\.apple\.com/v1/apps}) .to_return(body: fixture('apps.json')) stub_request(:get, %r{api\.appstoreconnect\.apple\.com/v1/builds}) .to_return(body: fixture('builds.json')) end describe '.run' do context 'when command is `wait`' do it 'should delegate' do expect(HardCider) .to receive(:wait) .and_call_original described_class .run(['wait', "--private-key-path=#{private_key_path}", '--frequency=0', '--timeout=0', '--bundle-id=com.kyledecot.Example']) end end end end
import tensorflow as tf # Define constants for data preprocessing SEQ_LENGTH = 300 VOCAB_SIZE = 5000 # Define model architecture model = tf.keras.Sequential([ tf.keras.layers.Embedding(VOCAB_SIZE, 16), tf.keras.layers.LSTM(64), tf.keras.layers.Dense(1, activation='sigmoid') ]) # Compile the model model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) # Train the model model.fit(training_data, training_labels, epochs=10)
source get_model_specific_info.sh python3 cell_generate_data.py \ --model_name ${MODEL_NAME}
<gh_stars>10-100 # Generated by Django 3.1.1 on 2020-10-01 20:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('letters', '0014_remove_letter_ordering'), ] operations = [ migrations.AlterField( model_name='documenttype', name='name', field=models.CharField(help_text='Type of letter', max_length=256, unique=True, verbose_name='Document type'), ), migrations.AlterField( model_name='letter', name='comment', field=models.TextField(blank=True, help_text='Comment for letter.', verbose_name='Comment'), ), ]
let path = require("path"); module.exports = function(app) { app.get("/", (req, res) => { res.render("index"); }) app.get("/legislature", (req, res) => { res.render("legislature"); }) app.get("/about", (req, res) => { res.render("about"); }) app.get("/media", (req, res) => { res.render("media"); }) app.get("/help", (req, res) => { res.render("help"); }) app.get("/legislature/priorities", (req, res) => { res.render("legPriorities"); }) app.get("/legislature/billTracker", (req, res) => { res.render("billTracker"); }) app.get("/legislature/committees", (req, res) => { res.render("legCommittees"); }) app.get("/legislature/resources", (req, res) => { res.render("resources"); }) }
import string def remove_all_punctuations(text): punctuations = [char for char in text if char in string.punctuation] for punc in punctuations: text = text.replace(punc, '') return text text = "Hey there, what's up!" result = remove_all_punctuations(text) print(result)
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for USN-2833-1 # # Security announcement date: 2015-12-15 00:00:00 UTC # Script generation date: 2017-01-01 21:05:02 UTC # # Operating System: Ubuntu 15.04 # Architecture: i686 # # Vulnerable packages fix on version: # - firefox:43.0+build1-0ubuntu0.15.04.1 # # Last versions recommanded by security team: # - firefox:43.0+build1-0ubuntu0.15.04.1 # # CVE List: # - CVE-2015-7201 # - CVE-2015-7202 # - CVE-2015-7203 # - CVE-2015-7220 # - CVE-2015-7221 # - CVE-2015-7204 # - CVE-2015-7205 # - CVE-2015-7207 # - CVE-2015-7208 # - CVE-2015-7210 # - CVE-2015-7211 # - CVE-2015-7212 # - CVE-2015-7213 # - CVE-2015-7214 # - CVE-2015-7215 # - CVE-2015-7216 # - CVE-2015-7217 # - CVE-2015-7218 # - CVE-2015-7219 # - CVE-2015-7222 # - CVE-2015-7223 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade firefox=43.0+build1-0ubuntu0.15.04.1 -y
<gh_stars>0 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports["default"] = void 0; var _react = _interopRequireDefault(require("react")); var _propTypes = _interopRequireDefault(require("prop-types")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } var Plus = function Plus(props) { return _react["default"].createElement("svg", _extends({ xmlns: "http://www.w3.org/2000/svg", viewBox: "0 0 13 13", width: "13px", height: "13px" }, props), _react["default"].createElement("g", { "data-name": "Layer 2" }, _react["default"].createElement("path", { d: "M6.5.5v12m6-6H.5", fill: "none", stroke: props.fill, strokeLinecap: "round" }))); }; Plus.defaultProps = { fill: '#fff' }; Plus.propTypes = { fill: _propTypes["default"].string }; var _default = Plus; exports["default"] = _default;
/* * This is the image processing thread. It takes images from the webcam and uses * an analysis algorithm to separate out the backboard targets. It has a method to return * an array of coordinates for the processed targets. The thread runs at the minimum priority * as to reduce lag in the robot. * * It was not used in competition, and it was still buggy near the end of testing. The problems * are more likely to be in the tuning of the ranges and not in the logic itself. * */ package com.badrobots.y2012.technetium; import edu.wpi.first.wpilibj.DriverStationLCD; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.camera.AxisCamera; import edu.wpi.first.wpilibj.camera.AxisCameraException; import edu.wpi.first.wpilibj.image.*; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * Thread that analyzed images from the webcam to detect rectangles. * @author 1014 Team */ public class ImageProcessing extends Thread { /** * whether or not it is printing debugging lines to the terminal */ protected static final boolean LOGGING = false; /** * The webcam on top of the shooter */ protected AxisCamera camera; /** * The particle analysis report which is returned after processing an image */ protected ParticleAnalysisReport[] toReturn; /** * How long to wait in between images. Increasing this value decreaces processing power used */ protected int sleepTimer = 100; /** * Whether or not images are currently being processed */ protected boolean running; /** * The coordinates of the detected targets */ protected int[] coords; /** * Filter used for certain operations. Currently unused */ protected CriteriaCollection criteria; /** * @param c AxisCamera being used */ public ImageProcessing(AxisCamera c) { //The thread starts with the images being processed, and the thread at minimum priority running = true; this.setPriority(MIN_PRIORITY); //The camera is constructed here, and the resolution is set to the minimum camera = c; camera.writeResolution(AxisCamera.ResolutionT.k160x120); //The coordinates are filled with the null case, -1(invalid) coords = new int[2]; coords[0] = -1; coords[1] = -1; } /** * This method is called to start the thread, and runs in an infinite loop */ public void run() { //Initializes the images for the progression of the analysis ColorImage img = null; BinaryImage binary = null; BinaryImage noSmall = null; //Infinite loop while (true) { //Processes the image if running is true if (running) { //Processes the image setHoopCoords(img, binary, noSmall); //Frees the memory allocated to the images (redundant?) try { if(img != null) img.free(); if(binary != null) binary.free(); if(noSmall != null) noSmall.free(); Thread.sleep(sleepTimer); } catch (Exception ex) { ex.printStackTrace(); } } } } /** * If LOGGING, prints out the string argument in the terminal with an ImageProcessing tag * @param string the intended output. */ protected void println(String string) { if (LOGGING) { System.out.println("ImageProcessing - ".concat(string)); } } int i = 0; /** * Analyzes an image to meet the backboard requirements. Currently buggy. * @deprecated * @param img Image to analyze * @param binary Memory location for a binary image used partway through the process * @param noSmall Memory location for the final image */ public void setHoopCoords(ColorImage img, BinaryImage binary, BinaryImage noSmall) { int current = 0; try { //gets and stores the current camera image img = camera.getImage(); binary = img.thresholdHSL(100, 156, 30, 255, 145, 255); //binary = img.thresholdHSL(98, 155, 45, 255, 137, 255);//Works great on scottie for some reason. Whatever, sure. //binary = img.thresholdHSL(10, 155, 20, 255, 55, 255); //Convex Hull binary.convexHull(true); //Remove small objects (parameters are connectivity and number of erosions) noSmall = binary.removeSmallObjects(true, 3); //noSmall.particleFilter(criteria); ParticleAnalysisReport[] report = noSmall.getOrderedParticleAnalysisReports(); //Finds the lowest image int size = report.length; if (size > 0) { double lowest = report[0].center_mass_y; int biggestIndex = 0; for (int i = 0; i < report.length; i++) { if (report[i].center_mass_y < lowest) { lowest = report[i].center_mass_y; biggestIndex = i; } } //records the coordinates of the lowest image coords[0] = report[biggestIndex].center_mass_x; coords[1] = report[biggestIndex].center_mass_y; println("size of analysis: " + report.length + "center of mass x: " + report[0].center_mass_x); } else { println(" no particles detected"); coords[0] = -1; coords[1] = -1; } //frees the memory locations. img.free(); binary.free(); noSmall.free(); } catch(Exception e) { e.printStackTrace(); } } /** * @return coords The coordinates of the current lowest image */ public int[] getCoords() { synchronized (coords) { return coords; } } /** * @return running Whether or not the thread is currently processing images. */ public boolean getRunning() { return running; } /** * @param b Whether or not the thread should be processing images */ public void setRunning(boolean b) { running = b; } }
from .posts import ( Post ) from .comments import ( Comment ) __all__ = ['Post', 'Comment']
/* eslint-disable no-console */ import React from 'react'; import { Card, ComboBox, Option } from 'belle'; export default React.createClass({ getInitialState() { return { comboValue: 'te' }; }, _handleChange(newValue) { this.setState({ comboValue: newValue }); }, render() { const valueLink = { value: this.state.comboValue, requestChange: this._handleChange, }; return ( <div> <h2>ComboBox</h2> <Card> <h3>Default Value Example</h3> <div style={{ marginBottom: 20 }}> <ComboBox defaultValue={ 'test' }> <Option value="te">Te</Option> <Option value="tes">Tes</Option> <Option value="test">Test</Option> <Option value="test1">Test1</Option> <Option value="test123">Test123</Option> <Option value="orange">Orange</Option> </ComboBox> </div> <h3>Value Example</h3> <div style={{ marginBottom: 20 }}> <ComboBox value={ this.state.comboValue } onUpdate={ (event) => { console.log(`${event.value} - ${event.identifier} - ${event.isOptionSelection} - ${event.isMatchingOption}`); }} > <Option value="te" identifier="123">Te</Option> <Option value="tes" identifier="123">Tes</Option> <Option value="test" identifier="123">Test</Option> <Option value="test1" identifier="123">Test1</Option> <Option value="test123" identifier="123">Test123</Option> <Option value="orange" identifier="123">Orange</Option> </ComboBox> </div> <h3>Value Link Example</h3> <div style={{ marginBottom: 20 }}> <ComboBox valueLink={ valueLink } onUpdate={ (event) => { console.log(event.value); }}> <Option value="te">Te</Option> <Option value="tes">Tes</Option> <Option value="test">Test</Option> <Option value="test1">Test1</Option> <Option value="test123">Test123</Option> <Option value="orange">Orange</Option> </ComboBox> </div> <div style={{ marginBottom: 20 }}> <ComboBox disabled> <Option value="te">Te</Option> </ComboBox> </div> <div> <ComboBox displayCaret={ false }> <Option value="te">Te</Option> <Option value="tes">Tes</Option> <Option value="test">Test</Option> <Option value="test1">Test1</Option> <Option value="test123">Test123</Option> <Option value="orange">Orange</Option> </ComboBox> </div> <div> <ComboBox displayCaret disabled> <Option value="te">Te</Option> <Option value="tes">Tes</Option> <Option value="test">Test</Option> <Option value="test1">Test1</Option> <Option value="test123">Test123</Option> <Option value="orange">Orange</Option> </ComboBox> </div> <div> <ComboBox> <Option value={ 0 }>Zero</Option> <Option value={ 1 }>One</Option> </ComboBox> </div> </Card> </div> ); }, });
#!/bin/bash set -e cd $BASEDIR/$1 ./gradlew clean build ./gradlew check
import { pipe } from 'rxjs'; import { distinctUntilChanged, map } from 'rxjs/operators'; export const select = function(..._cb: Function[]) { const args: any = [...arguments]; return pipe( map((state = {}) => args.reduce((acc, selector) => selector(acc), state)), distinctUntilChanged() ); };
<gh_stars>1-10 /* GENERATED FILE */ import { html, svg, define } from "hybrids"; const PhLockLaminated = { color: "currentColor", size: "1em", weight: "regular", mirrored: false, render: ({ color, size, weight, mirrored }) => html` <svg xmlns="http://www.w3.org/2000/svg" width="${size}" height="${size}" fill="${color}" viewBox="0 0 256 256" transform=${mirrored ? "scale(-1, 1)" : null} > ${weight === "bold" && svg`<line x1="39.99414" y1="132" x2="215.99414" y2="132" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/> <line x1="39.99414" y1="172" x2="215.99414" y2="172" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/> <rect x="39.99414" y="88" width="176" height="128" rx="8" stroke-width="24" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/> <path d="M91.99414,88V52a36,36,0,1,1,72,0V88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>`} ${weight === "duotone" && svg`<rect x="39.99414" y="88" width="176" height="128" rx="8" opacity="0.2"/> <line x1="39.99414" y1="120" x2="215.99414" y2="120" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="39.99414" y1="152" x2="215.99414" y2="152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="39.99414" y1="184" x2="215.99414" y2="184" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <rect x="39.99414" y="88" width="176" height="128" rx="8" stroke-width="16" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/> <path d="M91.99414,88V52a36,36,0,1,1,72,0V88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`} ${weight === "fill" && svg`<path d="M207.96484,80h-36V52a44,44,0,0,0-88,0V80h-36a16.01833,16.01833,0,0,0-16,16V208a16.01833,16.01833,0,0,0,16,16h160a16.01833,16.01833,0,0,0,16-16V96A16.01833,16.01833,0,0,0,207.96484,80Zm-108-28a28,28,0,0,1,56,0V80h-56Zm84,140h-112a8,8,0,1,1,0-16h112a8,8,0,0,1,0,16Zm0-32h-112a8,8,0,1,1,0-16h112a8,8,0,0,1,0,16Zm0-32h-112a8,8,0,1,1,0-16h112a8,8,0,0,1,0,16Z"/>`} ${weight === "light" && svg`<line x1="39.99414" y1="120" x2="215.99414" y2="120" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/> <line x1="39.99414" y1="152" x2="215.99414" y2="152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/> <line x1="39.99414" y1="184" x2="215.99414" y2="184" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/> <rect x="39.99414" y="88" width="176" height="128" rx="8" stroke-width="12" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/> <path d="M91.99414,88V52a36,36,0,1,1,72,0V88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>`} ${weight === "thin" && svg`<line x1="39.99414" y1="120" x2="215.99414" y2="120" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/> <line x1="39.99414" y1="152" x2="215.99414" y2="152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/> <line x1="39.99414" y1="184" x2="215.99414" y2="184" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/> <rect x="39.99414" y="88" width="176" height="128" rx="8" stroke-width="8" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/> <path d="M91.99414,88V52a36,36,0,1,1,72,0V88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>`} ${weight === "regular" && svg`<line x1="39.99414" y1="120" x2="215.99414" y2="120" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="39.99414" y1="152" x2="215.99414" y2="152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <line x1="39.99414" y1="184" x2="215.99414" y2="184" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <rect x="39.99414" y="88" width="176" height="128" rx="8" stroke-width="16" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/> <path d="M91.99414,88V52a36,36,0,1,1,72,0V88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`} </svg> `, }; define("ph-lock-laminated", PhLockLaminated); export default PhLockLaminated;
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: tendermint/privval/types.proto package privval import ( fmt "fmt" crypto "github.com/arcology-network/consensus-engine/proto/tendermint/crypto" types "github.com/arcology-network/consensus-engine/proto/tendermint/types" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" io "io" math "math" math_bits "math/bits" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type Errors int32 const ( Errors_ERRORS_UNKNOWN Errors = 0 Errors_ERRORS_UNEXPECTED_RESPONSE Errors = 1 Errors_ERRORS_NO_CONNECTION Errors = 2 Errors_ERRORS_CONNECTION_TIMEOUT Errors = 3 Errors_ERRORS_READ_TIMEOUT Errors = 4 Errors_ERRORS_WRITE_TIMEOUT Errors = 5 ) var Errors_name = map[int32]string{ 0: "ERRORS_UNKNOWN", 1: "ERRORS_UNEXPECTED_RESPONSE", 2: "ERRORS_NO_CONNECTION", 3: "ERRORS_CONNECTION_TIMEOUT", 4: "ERRORS_READ_TIMEOUT", 5: "ERRORS_WRITE_TIMEOUT", } var Errors_value = map[string]int32{ "ERRORS_UNKNOWN": 0, "ERRORS_UNEXPECTED_RESPONSE": 1, "ERRORS_NO_CONNECTION": 2, "ERRORS_CONNECTION_TIMEOUT": 3, "ERRORS_READ_TIMEOUT": 4, "ERRORS_WRITE_TIMEOUT": 5, } func (x Errors) String() string { return proto.EnumName(Errors_name, int32(x)) } func (Errors) EnumDescriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{0} } type RemoteSignerError struct { Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` } func (m *RemoteSignerError) Reset() { *m = RemoteSignerError{} } func (m *RemoteSignerError) String() string { return proto.CompactTextString(m) } func (*RemoteSignerError) ProtoMessage() {} func (*RemoteSignerError) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{0} } func (m *RemoteSignerError) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *RemoteSignerError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_RemoteSignerError.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *RemoteSignerError) XXX_Merge(src proto.Message) { xxx_messageInfo_RemoteSignerError.Merge(m, src) } func (m *RemoteSignerError) XXX_Size() int { return m.Size() } func (m *RemoteSignerError) XXX_DiscardUnknown() { xxx_messageInfo_RemoteSignerError.DiscardUnknown(m) } var xxx_messageInfo_RemoteSignerError proto.InternalMessageInfo func (m *RemoteSignerError) GetCode() int32 { if m != nil { return m.Code } return 0 } func (m *RemoteSignerError) GetDescription() string { if m != nil { return m.Description } return "" } // PubKeyRequest requests the consensus public key from the remote signer. type PubKeyRequest struct { ChainId string `protobuf:"bytes,1,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` } func (m *PubKeyRequest) Reset() { *m = PubKeyRequest{} } func (m *PubKeyRequest) String() string { return proto.CompactTextString(m) } func (*PubKeyRequest) ProtoMessage() {} func (*PubKeyRequest) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{1} } func (m *PubKeyRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *PubKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_PubKeyRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *PubKeyRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_PubKeyRequest.Merge(m, src) } func (m *PubKeyRequest) XXX_Size() int { return m.Size() } func (m *PubKeyRequest) XXX_DiscardUnknown() { xxx_messageInfo_PubKeyRequest.DiscardUnknown(m) } var xxx_messageInfo_PubKeyRequest proto.InternalMessageInfo func (m *PubKeyRequest) GetChainId() string { if m != nil { return m.ChainId } return "" } // PubKeyResponse is a response message containing the public key. type PubKeyResponse struct { PubKey crypto.PublicKey `protobuf:"bytes,1,opt,name=pub_key,json=pubKey,proto3" json:"pub_key"` Error *RemoteSignerError `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` } func (m *PubKeyResponse) Reset() { *m = PubKeyResponse{} } func (m *PubKeyResponse) String() string { return proto.CompactTextString(m) } func (*PubKeyResponse) ProtoMessage() {} func (*PubKeyResponse) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{2} } func (m *PubKeyResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *PubKeyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_PubKeyResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *PubKeyResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_PubKeyResponse.Merge(m, src) } func (m *PubKeyResponse) XXX_Size() int { return m.Size() } func (m *PubKeyResponse) XXX_DiscardUnknown() { xxx_messageInfo_PubKeyResponse.DiscardUnknown(m) } var xxx_messageInfo_PubKeyResponse proto.InternalMessageInfo func (m *PubKeyResponse) GetPubKey() crypto.PublicKey { if m != nil { return m.PubKey } return crypto.PublicKey{} } func (m *PubKeyResponse) GetError() *RemoteSignerError { if m != nil { return m.Error } return nil } // SignVoteRequest is a request to sign a vote type SignVoteRequest struct { Vote *types.Vote `protobuf:"bytes,1,opt,name=vote,proto3" json:"vote,omitempty"` ChainId string `protobuf:"bytes,2,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` } func (m *SignVoteRequest) Reset() { *m = SignVoteRequest{} } func (m *SignVoteRequest) String() string { return proto.CompactTextString(m) } func (*SignVoteRequest) ProtoMessage() {} func (*SignVoteRequest) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{3} } func (m *SignVoteRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SignVoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SignVoteRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SignVoteRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_SignVoteRequest.Merge(m, src) } func (m *SignVoteRequest) XXX_Size() int { return m.Size() } func (m *SignVoteRequest) XXX_DiscardUnknown() { xxx_messageInfo_SignVoteRequest.DiscardUnknown(m) } var xxx_messageInfo_SignVoteRequest proto.InternalMessageInfo func (m *SignVoteRequest) GetVote() *types.Vote { if m != nil { return m.Vote } return nil } func (m *SignVoteRequest) GetChainId() string { if m != nil { return m.ChainId } return "" } // SignedVoteResponse is a response containing a signed vote or an error type SignedVoteResponse struct { Vote types.Vote `protobuf:"bytes,1,opt,name=vote,proto3" json:"vote"` Error *RemoteSignerError `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` } func (m *SignedVoteResponse) Reset() { *m = SignedVoteResponse{} } func (m *SignedVoteResponse) String() string { return proto.CompactTextString(m) } func (*SignedVoteResponse) ProtoMessage() {} func (*SignedVoteResponse) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{4} } func (m *SignedVoteResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SignedVoteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SignedVoteResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SignedVoteResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_SignedVoteResponse.Merge(m, src) } func (m *SignedVoteResponse) XXX_Size() int { return m.Size() } func (m *SignedVoteResponse) XXX_DiscardUnknown() { xxx_messageInfo_SignedVoteResponse.DiscardUnknown(m) } var xxx_messageInfo_SignedVoteResponse proto.InternalMessageInfo func (m *SignedVoteResponse) GetVote() types.Vote { if m != nil { return m.Vote } return types.Vote{} } func (m *SignedVoteResponse) GetError() *RemoteSignerError { if m != nil { return m.Error } return nil } // SignProposalRequest is a request to sign a proposal type SignProposalRequest struct { Proposal *types.Proposal `protobuf:"bytes,1,opt,name=proposal,proto3" json:"proposal,omitempty"` ChainId string `protobuf:"bytes,2,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` } func (m *SignProposalRequest) Reset() { *m = SignProposalRequest{} } func (m *SignProposalRequest) String() string { return proto.CompactTextString(m) } func (*SignProposalRequest) ProtoMessage() {} func (*SignProposalRequest) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{5} } func (m *SignProposalRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SignProposalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SignProposalRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SignProposalRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_SignProposalRequest.Merge(m, src) } func (m *SignProposalRequest) XXX_Size() int { return m.Size() } func (m *SignProposalRequest) XXX_DiscardUnknown() { xxx_messageInfo_SignProposalRequest.DiscardUnknown(m) } var xxx_messageInfo_SignProposalRequest proto.InternalMessageInfo func (m *SignProposalRequest) GetProposal() *types.Proposal { if m != nil { return m.Proposal } return nil } func (m *SignProposalRequest) GetChainId() string { if m != nil { return m.ChainId } return "" } // SignedProposalResponse is response containing a signed proposal or an error type SignedProposalResponse struct { Proposal types.Proposal `protobuf:"bytes,1,opt,name=proposal,proto3" json:"proposal"` Error *RemoteSignerError `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` } func (m *SignedProposalResponse) Reset() { *m = SignedProposalResponse{} } func (m *SignedProposalResponse) String() string { return proto.CompactTextString(m) } func (*SignedProposalResponse) ProtoMessage() {} func (*SignedProposalResponse) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{6} } func (m *SignedProposalResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SignedProposalResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SignedProposalResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SignedProposalResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_SignedProposalResponse.Merge(m, src) } func (m *SignedProposalResponse) XXX_Size() int { return m.Size() } func (m *SignedProposalResponse) XXX_DiscardUnknown() { xxx_messageInfo_SignedProposalResponse.DiscardUnknown(m) } var xxx_messageInfo_SignedProposalResponse proto.InternalMessageInfo func (m *SignedProposalResponse) GetProposal() types.Proposal { if m != nil { return m.Proposal } return types.Proposal{} } func (m *SignedProposalResponse) GetError() *RemoteSignerError { if m != nil { return m.Error } return nil } // PingRequest is a request to confirm that the connection is alive. type PingRequest struct { } func (m *PingRequest) Reset() { *m = PingRequest{} } func (m *PingRequest) String() string { return proto.CompactTextString(m) } func (*PingRequest) ProtoMessage() {} func (*PingRequest) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{7} } func (m *PingRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *PingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_PingRequest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *PingRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_PingRequest.Merge(m, src) } func (m *PingRequest) XXX_Size() int { return m.Size() } func (m *PingRequest) XXX_DiscardUnknown() { xxx_messageInfo_PingRequest.DiscardUnknown(m) } var xxx_messageInfo_PingRequest proto.InternalMessageInfo // PingResponse is a response to confirm that the connection is alive. type PingResponse struct { } func (m *PingResponse) Reset() { *m = PingResponse{} } func (m *PingResponse) String() string { return proto.CompactTextString(m) } func (*PingResponse) ProtoMessage() {} func (*PingResponse) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{8} } func (m *PingResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *PingResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_PingResponse.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *PingResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_PingResponse.Merge(m, src) } func (m *PingResponse) XXX_Size() int { return m.Size() } func (m *PingResponse) XXX_DiscardUnknown() { xxx_messageInfo_PingResponse.DiscardUnknown(m) } var xxx_messageInfo_PingResponse proto.InternalMessageInfo type Message struct { // Types that are valid to be assigned to Sum: // *Message_PubKeyRequest // *Message_PubKeyResponse // *Message_SignVoteRequest // *Message_SignedVoteResponse // *Message_SignProposalRequest // *Message_SignedProposalResponse // *Message_PingRequest // *Message_PingResponse Sum isMessage_Sum `protobuf_oneof:"sum"` } func (m *Message) Reset() { *m = Message{} } func (m *Message) String() string { return proto.CompactTextString(m) } func (*Message) ProtoMessage() {} func (*Message) Descriptor() ([]byte, []int) { return fileDescriptor_cb4e437a5328cf9c, []int{9} } func (m *Message) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Message.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Message) XXX_Merge(src proto.Message) { xxx_messageInfo_Message.Merge(m, src) } func (m *Message) XXX_Size() int { return m.Size() } func (m *Message) XXX_DiscardUnknown() { xxx_messageInfo_Message.DiscardUnknown(m) } var xxx_messageInfo_Message proto.InternalMessageInfo type isMessage_Sum interface { isMessage_Sum() MarshalTo([]byte) (int, error) Size() int } type Message_PubKeyRequest struct { PubKeyRequest *PubKeyRequest `protobuf:"bytes,1,opt,name=pub_key_request,json=pubKeyRequest,proto3,oneof" json:"pub_key_request,omitempty"` } type Message_PubKeyResponse struct { PubKeyResponse *PubKeyResponse `protobuf:"bytes,2,opt,name=pub_key_response,json=pubKeyResponse,proto3,oneof" json:"pub_key_response,omitempty"` } type Message_SignVoteRequest struct { SignVoteRequest *SignVoteRequest `protobuf:"bytes,3,opt,name=sign_vote_request,json=signVoteRequest,proto3,oneof" json:"sign_vote_request,omitempty"` } type Message_SignedVoteResponse struct { SignedVoteResponse *SignedVoteResponse `protobuf:"bytes,4,opt,name=signed_vote_response,json=signedVoteResponse,proto3,oneof" json:"signed_vote_response,omitempty"` } type Message_SignProposalRequest struct { SignProposalRequest *SignProposalRequest `protobuf:"bytes,5,opt,name=sign_proposal_request,json=signProposalRequest,proto3,oneof" json:"sign_proposal_request,omitempty"` } type Message_SignedProposalResponse struct { SignedProposalResponse *SignedProposalResponse `protobuf:"bytes,6,opt,name=signed_proposal_response,json=signedProposalResponse,proto3,oneof" json:"signed_proposal_response,omitempty"` } type Message_PingRequest struct { PingRequest *PingRequest `protobuf:"bytes,7,opt,name=ping_request,json=pingRequest,proto3,oneof" json:"ping_request,omitempty"` } type Message_PingResponse struct { PingResponse *PingResponse `protobuf:"bytes,8,opt,name=ping_response,json=pingResponse,proto3,oneof" json:"ping_response,omitempty"` } func (*Message_PubKeyRequest) isMessage_Sum() {} func (*Message_PubKeyResponse) isMessage_Sum() {} func (*Message_SignVoteRequest) isMessage_Sum() {} func (*Message_SignedVoteResponse) isMessage_Sum() {} func (*Message_SignProposalRequest) isMessage_Sum() {} func (*Message_SignedProposalResponse) isMessage_Sum() {} func (*Message_PingRequest) isMessage_Sum() {} func (*Message_PingResponse) isMessage_Sum() {} func (m *Message) GetSum() isMessage_Sum { if m != nil { return m.Sum } return nil } func (m *Message) GetPubKeyRequest() *PubKeyRequest { if x, ok := m.GetSum().(*Message_PubKeyRequest); ok { return x.PubKeyRequest } return nil } func (m *Message) GetPubKeyResponse() *PubKeyResponse { if x, ok := m.GetSum().(*Message_PubKeyResponse); ok { return x.PubKeyResponse } return nil } func (m *Message) GetSignVoteRequest() *SignVoteRequest { if x, ok := m.GetSum().(*Message_SignVoteRequest); ok { return x.SignVoteRequest } return nil } func (m *Message) GetSignedVoteResponse() *SignedVoteResponse { if x, ok := m.GetSum().(*Message_SignedVoteResponse); ok { return x.SignedVoteResponse } return nil } func (m *Message) GetSignProposalRequest() *SignProposalRequest { if x, ok := m.GetSum().(*Message_SignProposalRequest); ok { return x.SignProposalRequest } return nil } func (m *Message) GetSignedProposalResponse() *SignedProposalResponse { if x, ok := m.GetSum().(*Message_SignedProposalResponse); ok { return x.SignedProposalResponse } return nil } func (m *Message) GetPingRequest() *PingRequest { if x, ok := m.GetSum().(*Message_PingRequest); ok { return x.PingRequest } return nil } func (m *Message) GetPingResponse() *PingResponse { if x, ok := m.GetSum().(*Message_PingResponse); ok { return x.PingResponse } return nil } // XXX_OneofWrappers is for the internal use of the proto package. func (*Message) XXX_OneofWrappers() []interface{} { return []interface{}{ (*Message_PubKeyRequest)(nil), (*Message_PubKeyResponse)(nil), (*Message_SignVoteRequest)(nil), (*Message_SignedVoteResponse)(nil), (*Message_SignProposalRequest)(nil), (*Message_SignedProposalResponse)(nil), (*Message_PingRequest)(nil), (*Message_PingResponse)(nil), } } func init() { proto.RegisterEnum("tendermint.privval.Errors", Errors_name, Errors_value) proto.RegisterType((*RemoteSignerError)(nil), "tendermint.privval.RemoteSignerError") proto.RegisterType((*PubKeyRequest)(nil), "tendermint.privval.PubKeyRequest") proto.RegisterType((*PubKeyResponse)(nil), "tendermint.privval.PubKeyResponse") proto.RegisterType((*SignVoteRequest)(nil), "tendermint.privval.SignVoteRequest") proto.RegisterType((*SignedVoteResponse)(nil), "tendermint.privval.SignedVoteResponse") proto.RegisterType((*SignProposalRequest)(nil), "tendermint.privval.SignProposalRequest") proto.RegisterType((*SignedProposalResponse)(nil), "tendermint.privval.SignedProposalResponse") proto.RegisterType((*PingRequest)(nil), "tendermint.privval.PingRequest") proto.RegisterType((*PingResponse)(nil), "tendermint.privval.PingResponse") proto.RegisterType((*Message)(nil), "tendermint.privval.Message") } func init() { proto.RegisterFile("tendermint/privval/types.proto", fileDescriptor_cb4e437a5328cf9c) } var fileDescriptor_cb4e437a5328cf9c = []byte{ // 774 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x4f, 0x4f, 0xe3, 0x46, 0x18, 0xc6, 0x6d, 0xc8, 0x1f, 0x78, 0x43, 0x42, 0x18, 0x28, 0x0d, 0x11, 0x35, 0x69, 0xaa, 0xb6, 0x28, 0x52, 0x93, 0x8a, 0x4a, 0xbd, 0xd0, 0x4b, 0x01, 0xab, 0x8e, 0x22, 0x9c, 0x74, 0x12, 0x0a, 0x42, 0xaa, 0xac, 0xc4, 0x99, 0x3a, 0x16, 0x89, 0xc7, 0xf5, 0x38, 0x48, 0x39, 0xf7, 0xd6, 0x53, 0xa5, 0x7e, 0x89, 0x3d, 0xef, 0xa7, 0xe0, 0xc8, 0x71, 0x4f, 0xab, 0x15, 0x7c, 0x91, 0x55, 0xc6, 0x13, 0xc7, 0xf9, 0x87, 0x76, 0xc5, 0xcd, 0xf3, 0xbe, 0xef, 0xfc, 0xde, 0xe7, 0xb1, 0x9f, 0x28, 0xa0, 0xf8, 0xc4, 0xe9, 0x12, 0x6f, 0x60, 0x3b, 0x7e, 0xc5, 0xf5, 0xec, 0xfb, 0xfb, 0x76, 0xbf, 0xe2, 0x8f, 0x5c, 0xc2, 0xca, 0xae, 0x47, 0x7d, 0x8a, 0xd0, 0xb4, 0x5f, 0x16, 0xfd, 0xfc, 0x61, 0xe4, 0x8e, 0xe9, 0x8d, 0x5c, 0x9f, 0x56, 0xee, 0xc8, 0x48, 0xdc, 0x98, 0xe9, 0x72, 0x52, 0x94, 0x97, 0xdf, 0xb3, 0xa8, 0x45, 0xf9, 0x63, 0x65, 0xfc, 0x14, 0x54, 0x8b, 0x55, 0xd8, 0xc1, 0x64, 0x40, 0x7d, 0xd2, 0xb4, 0x2d, 0x87, 0x78, 0xaa, 0xe7, 0x51, 0x0f, 0x21, 0x88, 0x99, 0xb4, 0x4b, 0x72, 0x72, 0x41, 0x3e, 0x8e, 0x63, 0xfe, 0x8c, 0x0a, 0x90, 0xea, 0x12, 0x66, 0x7a, 0xb6, 0xeb, 0xdb, 0xd4, 0xc9, 0xad, 0x15, 0xe4, 0xe3, 0x4d, 0x1c, 0x2d, 0x15, 0x4b, 0x90, 0x6e, 0x0c, 0x3b, 0x35, 0x32, 0xc2, 0xe4, 0xef, 0x21, 0x61, 0x3e, 0x3a, 0x80, 0x0d, 0xb3, 0xd7, 0xb6, 0x1d, 0xc3, 0xee, 0x72, 0xd4, 0x26, 0x4e, 0xf2, 0x73, 0xb5, 0x5b, 0xfc, 0x57, 0x86, 0xcc, 0x64, 0x98, 0xb9, 0xd4, 0x61, 0x04, 0x9d, 0x42, 0xd2, 0x1d, 0x76, 0x8c, 0x3b, 0x32, 0xe2, 0xc3, 0xa9, 0x93, 0xc3, 0x72, 0xe4, 0x0d, 0x04, 0x6e, 0xcb, 0x8d, 0x61, 0xa7, 0x6f, 0x9b, 0x35, 0x32, 0x3a, 0x8b, 0x3d, 0xbc, 0x3f, 0x92, 0x70, 0xc2, 0xe5, 0x10, 0x74, 0x0a, 0x71, 0x32, 0x96, 0xce, 0x75, 0xa5, 0x4e, 0xbe, 0x2d, 0x2f, 0xbe, 0xbc, 0xf2, 0x82, 0x4f, 0x1c, 0xdc, 0x29, 0xde, 0xc0, 0xf6, 0xb8, 0xfa, 0x07, 0xf5, 0xc9, 0x44, 0x7a, 0x09, 0x62, 0xf7, 0xd4, 0x27, 0x42, 0xc9, 0x7e, 0x14, 0x17, 0xbc, 0x53, 0x3e, 0xcc, 0x67, 0x66, 0x6c, 0xae, 0xcd, 0xda, 0xfc, 0x47, 0x06, 0xc4, 0x17, 0x76, 0x03, 0xb8, 0xb0, 0xfa, 0xe3, 0xa7, 0xd0, 0x85, 0xc3, 0x60, 0xc7, 0xab, 0xfc, 0xf5, 0x60, 0x77, 0x5c, 0x6d, 0x78, 0xd4, 0xa5, 0xac, 0xdd, 0x9f, 0x78, 0xfc, 0x19, 0x36, 0x5c, 0x51, 0x12, 0x4a, 0xf2, 0x8b, 0x4a, 0xc2, 0x4b, 0xe1, 0xec, 0x4b, 0x7e, 0xff, 0x97, 0x61, 0x3f, 0xf0, 0x3b, 0x5d, 0x26, 0x3c, 0xff, 0xf2, 0x39, 0xdb, 0x84, 0xf7, 0xe9, 0xce, 0x57, 0xf9, 0x4f, 0x43, 0xaa, 0x61, 0x3b, 0x96, 0xf0, 0x5d, 0xcc, 0xc0, 0x56, 0x70, 0x0c, 0x94, 0x15, 0xdf, 0xc6, 0x21, 0x79, 0x49, 0x18, 0x6b, 0x5b, 0x04, 0xd5, 0x60, 0x5b, 0x84, 0xd0, 0xf0, 0x82, 0x71, 0x21, 0xf6, 0xeb, 0x65, 0x1b, 0x67, 0xe2, 0xae, 0x49, 0x38, 0xed, 0xce, 0xe4, 0x5f, 0x87, 0xec, 0x14, 0x16, 0x2c, 0x13, 0xfa, 0x8b, 0x2f, 0xd1, 0x82, 0x49, 0x4d, 0xc2, 0x19, 0x77, 0xf6, 0x17, 0xf2, 0x3b, 0xec, 0x30, 0xdb, 0x72, 0x8c, 0x71, 0x22, 0x42, 0x79, 0xeb, 0x1c, 0xf8, 0xcd, 0x32, 0xe0, 0x5c, 0xa8, 0x35, 0x09, 0x6f, 0xb3, 0xb9, 0x9c, 0xdf, 0xc2, 0x1e, 0xe3, 0xdf, 0x6b, 0x02, 0x15, 0x32, 0x63, 0x9c, 0xfa, 0xdd, 0x2a, 0xea, 0x6c, 0x9e, 0x35, 0x09, 0x23, 0xb6, 0x98, 0xf2, 0x3f, 0xe1, 0x0b, 0x2e, 0x77, 0xf2, 0x11, 0x43, 0xc9, 0x71, 0x0e, 0xff, 0x7e, 0x15, 0x7c, 0x2e, 0xa7, 0x9a, 0x84, 0x77, 0xd9, 0x92, 0xf8, 0xfe, 0x05, 0x39, 0x21, 0x3d, 0xb2, 0x40, 0xc8, 0x4f, 0xf0, 0x0d, 0xa5, 0xd5, 0xf2, 0xe7, 0xe3, 0xa9, 0x49, 0x78, 0x9f, 0x2d, 0x0f, 0xee, 0x05, 0x6c, 0xb9, 0xb6, 0x63, 0x85, 0xea, 0x93, 0x9c, 0x7d, 0xb4, 0xf4, 0x0b, 0x4e, 0x53, 0xa6, 0x49, 0x38, 0xe5, 0x4e, 0x8f, 0xe8, 0x37, 0x48, 0x0b, 0x8a, 0x90, 0xb8, 0xc1, 0x31, 0x85, 0xd5, 0x98, 0x50, 0xd8, 0x96, 0x1b, 0x39, 0x9f, 0xc5, 0x61, 0x9d, 0x0d, 0x07, 0xa5, 0x37, 0x32, 0x24, 0x78, 0xc8, 0x19, 0x42, 0x90, 0x51, 0x31, 0xae, 0xe3, 0xa6, 0x71, 0xa5, 0xd7, 0xf4, 0xfa, 0xb5, 0x9e, 0x95, 0x90, 0x02, 0xf9, 0xb0, 0xa6, 0xde, 0x34, 0xd4, 0xf3, 0x96, 0x7a, 0x61, 0x60, 0xb5, 0xd9, 0xa8, 0xeb, 0x4d, 0x35, 0x2b, 0xa3, 0x1c, 0xec, 0x89, 0xbe, 0x5e, 0x37, 0xce, 0xeb, 0xba, 0xae, 0x9e, 0xb7, 0xaa, 0x75, 0x3d, 0xbb, 0x86, 0xbe, 0x82, 0x03, 0xd1, 0x99, 0x96, 0x8d, 0x56, 0xf5, 0x52, 0xad, 0x5f, 0xb5, 0xb2, 0xeb, 0xe8, 0x4b, 0xd8, 0x15, 0x6d, 0xac, 0xfe, 0x7a, 0x11, 0x36, 0x62, 0x11, 0xe2, 0x35, 0xae, 0xb6, 0xd4, 0xb0, 0x13, 0x3f, 0x33, 0x1e, 0x9e, 0x14, 0xf9, 0xf1, 0x49, 0x91, 0x3f, 0x3c, 0x29, 0xf2, 0x7f, 0xcf, 0x8a, 0xf4, 0xf8, 0xac, 0x48, 0xef, 0x9e, 0x15, 0xe9, 0x56, 0xb5, 0x6c, 0xbf, 0x37, 0xec, 0x94, 0x4d, 0x3a, 0xa8, 0x68, 0x8d, 0x6a, 0xb3, 0x45, 0xcc, 0x9e, 0x43, 0xfb, 0xd4, 0xb2, 0x09, 0xab, 0x98, 0x63, 0xbb, 0x0e, 0x1b, 0xb2, 0x1f, 0x88, 0x63, 0xd9, 0x0e, 0xa9, 0x04, 0xff, 0x5d, 0x8b, 0xff, 0x9a, 0x9d, 0x04, 0xef, 0xfc, 0xf4, 0x31, 0x00, 0x00, 0xff, 0xff, 0xf3, 0x06, 0x11, 0xb1, 0x52, 0x07, 0x00, 0x00, } func (m *RemoteSignerError) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *RemoteSignerError) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *RemoteSignerError) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Description) > 0 { i -= len(m.Description) copy(dAtA[i:], m.Description) i = encodeVarintTypes(dAtA, i, uint64(len(m.Description))) i-- dAtA[i] = 0x12 } if m.Code != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.Code)) i-- dAtA[i] = 0x8 } return len(dAtA) - i, nil } func (m *PubKeyRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *PubKeyRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *PubKeyRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.ChainId) > 0 { i -= len(m.ChainId) copy(dAtA[i:], m.ChainId) i = encodeVarintTypes(dAtA, i, uint64(len(m.ChainId))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *PubKeyResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *PubKeyResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *PubKeyResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Error != nil { { size, err := m.Error.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } { size, err := m.PubKey.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa return len(dAtA) - i, nil } func (m *SignVoteRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SignVoteRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SignVoteRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.ChainId) > 0 { i -= len(m.ChainId) copy(dAtA[i:], m.ChainId) i = encodeVarintTypes(dAtA, i, uint64(len(m.ChainId))) i-- dAtA[i] = 0x12 } if m.Vote != nil { { size, err := m.Vote.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *SignedVoteResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SignedVoteResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SignedVoteResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Error != nil { { size, err := m.Error.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } { size, err := m.Vote.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa return len(dAtA) - i, nil } func (m *SignProposalRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SignProposalRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SignProposalRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.ChainId) > 0 { i -= len(m.ChainId) copy(dAtA[i:], m.ChainId) i = encodeVarintTypes(dAtA, i, uint64(len(m.ChainId))) i-- dAtA[i] = 0x12 } if m.Proposal != nil { { size, err := m.Proposal.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *SignedProposalResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SignedProposalResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SignedProposalResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Error != nil { { size, err := m.Error.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } { size, err := m.Proposal.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa return len(dAtA) - i, nil } func (m *PingRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *PingRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *PingRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l return len(dAtA) - i, nil } func (m *PingResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *PingResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *PingResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l return len(dAtA) - i, nil } func (m *Message) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Message) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Sum != nil { { size := m.Sum.Size() i -= size if _, err := m.Sum.MarshalTo(dAtA[i:]); err != nil { return 0, err } } } return len(dAtA) - i, nil } func (m *Message_PubKeyRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_PubKeyRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.PubKeyRequest != nil { { size, err := m.PubKeyRequest.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Message_PubKeyResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_PubKeyResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.PubKeyResponse != nil { { size, err := m.PubKeyResponse.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } return len(dAtA) - i, nil } func (m *Message_SignVoteRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_SignVoteRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.SignVoteRequest != nil { { size, err := m.SignVoteRequest.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } return len(dAtA) - i, nil } func (m *Message_SignedVoteResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_SignedVoteResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.SignedVoteResponse != nil { { size, err := m.SignedVoteResponse.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x22 } return len(dAtA) - i, nil } func (m *Message_SignProposalRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_SignProposalRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.SignProposalRequest != nil { { size, err := m.SignProposalRequest.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x2a } return len(dAtA) - i, nil } func (m *Message_SignedProposalResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_SignedProposalResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.SignedProposalResponse != nil { { size, err := m.SignedProposalResponse.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x32 } return len(dAtA) - i, nil } func (m *Message_PingRequest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_PingRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.PingRequest != nil { { size, err := m.PingRequest.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x3a } return len(dAtA) - i, nil } func (m *Message_PingResponse) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Message_PingResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.PingResponse != nil { { size, err := m.PingResponse.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTypes(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x42 } return len(dAtA) - i, nil } func encodeVarintTypes(dAtA []byte, offset int, v uint64) int { offset -= sovTypes(v) base := offset for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } dAtA[offset] = uint8(v) return base } func (m *RemoteSignerError) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Code != 0 { n += 1 + sovTypes(uint64(m.Code)) } l = len(m.Description) if l > 0 { n += 1 + l + sovTypes(uint64(l)) } return n } func (m *PubKeyRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.ChainId) if l > 0 { n += 1 + l + sovTypes(uint64(l)) } return n } func (m *PubKeyResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l l = m.PubKey.Size() n += 1 + l + sovTypes(uint64(l)) if m.Error != nil { l = m.Error.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *SignVoteRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Vote != nil { l = m.Vote.Size() n += 1 + l + sovTypes(uint64(l)) } l = len(m.ChainId) if l > 0 { n += 1 + l + sovTypes(uint64(l)) } return n } func (m *SignedVoteResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l l = m.Vote.Size() n += 1 + l + sovTypes(uint64(l)) if m.Error != nil { l = m.Error.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *SignProposalRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Proposal != nil { l = m.Proposal.Size() n += 1 + l + sovTypes(uint64(l)) } l = len(m.ChainId) if l > 0 { n += 1 + l + sovTypes(uint64(l)) } return n } func (m *SignedProposalResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l l = m.Proposal.Size() n += 1 + l + sovTypes(uint64(l)) if m.Error != nil { l = m.Error.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *PingRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l return n } func (m *PingResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l return n } func (m *Message) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Sum != nil { n += m.Sum.Size() } return n } func (m *Message_PubKeyRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.PubKeyRequest != nil { l = m.PubKeyRequest.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *Message_PubKeyResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.PubKeyResponse != nil { l = m.PubKeyResponse.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *Message_SignVoteRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.SignVoteRequest != nil { l = m.SignVoteRequest.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *Message_SignedVoteResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.SignedVoteResponse != nil { l = m.SignedVoteResponse.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *Message_SignProposalRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.SignProposalRequest != nil { l = m.SignProposalRequest.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *Message_SignedProposalResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.SignedProposalResponse != nil { l = m.SignedProposalResponse.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *Message_PingRequest) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.PingRequest != nil { l = m.PingRequest.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func (m *Message_PingResponse) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.PingResponse != nil { l = m.PingResponse.Size() n += 1 + l + sovTypes(uint64(l)) } return n } func sovTypes(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } func sozTypes(x uint64) (n int) { return sovTypes(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (m *RemoteSignerError) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: RemoteSignerError: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: RemoteSignerError: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Code", wireType) } m.Code = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Code |= int32(b&0x7F) << shift if b < 0x80 { break } } case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Description", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } m.Description = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *PubKeyRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: PubKeyRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: PubKeyRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ChainId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } m.ChainId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *PubKeyResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: PubKeyResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: PubKeyResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PubKey", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if err := m.PubKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Error", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if m.Error == nil { m.Error = &RemoteSignerError{} } if err := m.Error.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SignVoteRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SignVoteRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SignVoteRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Vote", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if m.Vote == nil { m.Vote = &types.Vote{} } if err := m.Vote.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ChainId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } m.ChainId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SignedVoteResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SignedVoteResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SignedVoteResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Vote", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if err := m.Vote.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Error", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if m.Error == nil { m.Error = &RemoteSignerError{} } if err := m.Error.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SignProposalRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SignProposalRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SignProposalRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Proposal", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if m.Proposal == nil { m.Proposal = &types.Proposal{} } if err := m.Proposal.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ChainId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } m.ChainId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SignedProposalResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SignedProposalResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SignedProposalResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Proposal", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if err := m.Proposal.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Error", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } if m.Error == nil { m.Error = &RemoteSignerError{} } if err := m.Error.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *PingRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: PingRequest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: PingRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *PingResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: PingResponse: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: PingResponse: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Message) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Message: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Message: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PubKeyRequest", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &PubKeyRequest{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_PubKeyRequest{v} iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PubKeyResponse", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &PubKeyResponse{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_PubKeyResponse{v} iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SignVoteRequest", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &SignVoteRequest{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_SignVoteRequest{v} iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SignedVoteResponse", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &SignedVoteResponse{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_SignedVoteResponse{v} iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SignProposalRequest", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &SignProposalRequest{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_SignProposalRequest{v} iNdEx = postIndex case 6: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SignedProposalResponse", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &SignedProposalResponse{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_SignedProposalResponse{v} iNdEx = postIndex case 7: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PingRequest", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &PingRequest{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_PingRequest{v} iNdEx = postIndex case 8: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PingResponse", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTypes } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTypes } if postIndex > l { return io.ErrUnexpectedEOF } v := &PingResponse{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.Sum = &Message_PingResponse{v} iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { return ErrInvalidLengthTypes } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipTypes(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 depth := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTypes } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTypes } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if dAtA[iNdEx-1] < 0x80 { break } } case 1: iNdEx += 8 case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTypes } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if length < 0 { return 0, ErrInvalidLengthTypes } iNdEx += length case 3: depth++ case 4: if depth == 0 { return 0, ErrUnexpectedEndOfGroupTypes } depth-- case 5: iNdEx += 4 default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } if iNdEx < 0 { return 0, ErrInvalidLengthTypes } if depth == 0 { return iNdEx, nil } } return 0, io.ErrUnexpectedEOF } var ( ErrInvalidLengthTypes = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowTypes = fmt.Errorf("proto: integer overflow") ErrUnexpectedEndOfGroupTypes = fmt.Errorf("proto: unexpected end of group") )
<reponame>tadashi-aikawa/todoistoggl import * as Axios from 'axios'; import {stringify} from 'query-string'; import Sync from '../../models/api/todoist/Sync'; // Proxyを経由するため開発中はURLが異なる // デバッグでelectron-dev-serverを使う際にCORSに引っかかるため const baseURL = 'https://todoist.com/API/v7/'; const fetchSync = async(token: string): Promise<Sync> => await Axios.post( '/sync', stringify({ token, sync_token: '*', resource_types: '["items"]' }), {baseURL} ).then(r => <Sync>(r.data)); export { fetchSync }
#!/bin/bash set -eu set -o pipefail OX_INSTALL_DIRECTORY=${OX_INSTALL_DIRECTORY?="Orchestra SDK directory (OX_INSTALL_DIRECTORY) not set!"} TOOLBOX_PATH=${TOOLBOX_PATH?="BART directory (TOOLBOX_PATH) not set!"} export CC=${CC:=gcc-4.8} export CXX=${CXX:=g++-4.8} VERBOSE=${VERBOSE:=0} mkdir -p build pushd build cmake -DOX_INSTALL_DIRECTORY=${OX_INSTALL_DIRECTORY} ../src if [[ ${VERBOSE} -gt "0" ]] ; then make -j64 VERBOSE=1 else make -j64 fi popd
package com.netcracker.ncstore.exception.general; /** * General exception for any action ended with not found. * Web services should throw this exception when wrapping exceptions from business services. */ public class GeneralPermissionDeniedException extends RuntimeException { public GeneralPermissionDeniedException() { } public GeneralPermissionDeniedException(String message) { super(message); } public GeneralPermissionDeniedException(String message, Throwable cause) { super(message, cause); } }
/// <reference types="node" /> import { inspect, InspectOptions } from "util"; import { MultiError } from "verror"; import VError = require("verror"); interface CustomInspectOptions extends InspectOptions { stylize(s: string, style: string): string; } interface HasCustomInspect { [inspect.custom]?(depth: number, options: CustomInspectOptions): string; } export declare class PrettyVError extends VError implements HasCustomInspect { static errorFromList<T extends Error>(errors: T[]): null | T | PrettyMultiError; private _ownStack?; constructor(options: VError.Options | Error, message: string, ...params: any[]); constructor(message?: string, ...params: any[]); [inspect.custom](depth?: number, options?: CustomInspectOptions): string; } export declare class PrettyMultiError extends MultiError implements HasCustomInspect { private _ownStack?; constructor(errors: Error[]); [inspect.custom](depth?: number, options?: CustomInspectOptions): string; } export {};
# Given a string, find the length of the longest substring without repeating characters. import unittest from hamcrest import assert_that, equal_to class Solution: def lengthOfLongestSubstring(self, s): m = set() max = 0 i = 0 for c in s: if c in m: i = 0 m = set() i += 1 m.update(c) if i > max: max = i return max class Test(unittest.TestCase): def test(self): assert_that(Solution().lengthOfLongestSubstring('abrkaabcdefghijjxxx'), equal_to(10))
function mergeSortedArrays(arr1, arr2) { let mergedArray = []; let arr1Item = arr1[0]; let arr2Item = arr2[0]; let i = 1; let j = 1; if (arr1.length === 0) { return arr2; } if (arr2.length === 0) { return arr1; } while (arr1Item || arr2Item) { if (arr2Item === undefined || arr1Item < arr2Item) { mergedArray.push(arr1Item); arr1Item = arr1[i]; i++; } else { mergedArray.push(arr2Item); arr2Item = arr2[j]; j++; } } return mergedArray; }
<reponame>NYCMOTI/open-bid class DefaultDateTime HOUR = "13".freeze MINUTE = "00".freeze attr_reader :dc_time def initialize(time = Time.current) @dc_time = DcTimePresenter.new(time).convert end def convert @_converted ||= dc_time.change(hour: HOUR, min: MINUTE, sec: 0) end def hour convert.strftime('%l').strip end def minute convert.strftime('%M').strip end def meridiem convert.strftime('%p').strip end end
#!/usr/bin/bash # Will exit the Bash script the moment any command will itself exit with a non-zero status, thus an error. set -e BUILD_PATH=$1 OPENEXR_VERSION=${REZ_BUILD_PROJECT_VERSION} # We print the arguments passed to the Bash script. echo -e "\n" echo -e "===============" echo -e "=== INSTALL ===" echo -e "===============" echo -e "\n" echo -e "[INSTALL][ARGS] BUILD PATH: ${BUILD_PATH}" echo -e "[INSTALL][ARGS] OPENEXR VERSION: ${OPENEXR_VERSION}" # We check if the arguments variables we need are correctly set. # If not, we abort the process. if [[ -z ${BUILD_PATH} || -z ${OPENEXR_VERSION} ]]; then echo -e "\n" echo -e "[INSTALL][ARGS] One or more of the argument variables are empty. Aborting..." echo -e "\n" exit 1 fi # We install OpenEXR. echo -e "\n" echo -e "[INSTALL] Installing OpenEXR-${OPENEXR_VERSION}..." echo -e "\n" cd ${BUILD_PATH} make \ -j${REZ_BUILD_THREAD_COUNT} \ install echo -e "\n" echo -e "[INSTALL] Finished installing OpenEXR-${OPENEXR_VERSION}!" echo -e "\n"
#!/bin/bash set -x exec &> /tmp/cloud-init.log apt-get update apt-get install -y apt-transport-https \ ca-certificates curl gnupg-agent \ software-properties-common curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - add-apt-repository \ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \ $(lsb_release -cs) \ stable" apt-get update apt-get install -y docker-ce docker-ce-cli containerd.io mkdir -p /root/data sleep 10 docker run -d \ --env POSTGRES_PASSWORD=${db_master_pass} \ --env POSTGRES_USER=${db_master_user} -v /root/data:/var/lib/postgresql/data \ -p 5432:5432 \ postgres:13.1
<reponame>eddie4941/servicetalk /* * Copyright © 2019 Apple Inc. and the ServiceTalk project authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.servicetalk.concurrent.jdkflow; import io.servicetalk.concurrent.PublisherSource; import io.servicetalk.concurrent.api.TestPublisher; import io.servicetalk.concurrent.api.TestSubscription; import io.servicetalk.concurrent.internal.ScalarValueSubscription; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import java.util.concurrent.ExecutionException; import java.util.concurrent.Flow.Publisher; import java.util.concurrent.Flow.Subscriber; import java.util.concurrent.Flow.Subscription; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import static io.servicetalk.concurrent.api.Publisher.failed; import static io.servicetalk.concurrent.api.Publisher.from; import static io.servicetalk.concurrent.internal.DeliberateException.DELIBERATE_EXCEPTION; import static io.servicetalk.concurrent.internal.EmptySubscriptions.EMPTY_SUBSCRIPTION; import static io.servicetalk.concurrent.jdkflow.JdkFlowAdapters.fromFlowPublisher; import static io.servicetalk.concurrent.jdkflow.JdkFlowAdapters.toFlowPublisher; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; class JdkFlowAdaptersTest { @Test void fromFlowSuccess() throws Exception { Publisher<Integer> flowPublisher = newMockFlowPublisher((subscriber, __) -> { subscriber.onNext(1); subscriber.onComplete(); }); Integer result = fromFlowPublisher(flowPublisher).firstOrElse(() -> null).toFuture().get(); assertThat("Unexpected result", result, is(1)); } @Test void fromFlowError() { Publisher<Integer> flowPublisher = newMockFlowPublisher((subscriber, __) -> subscriber.onError(DELIBERATE_EXCEPTION)); Future<Integer> future = fromFlowPublisher(flowPublisher).firstOrElse(() -> null).toFuture(); ExecutionException ex = assertThrows(ExecutionException.class, future::get); assertThat(ex.getCause(), sameInstance(DELIBERATE_EXCEPTION)); } @Test void fromFlowCancel() { AtomicReference<Subscription> receivedSubscription = new AtomicReference<>(); Publisher<Integer> flowPublisher = newMockFlowPublisher((__, subscription) -> receivedSubscription.set(subscription)); fromFlowPublisher(flowPublisher).firstOrElse(() -> null).toFuture().cancel(true); Subscription subscription = receivedSubscription.get(); assertThat("Subscription not received.", subscription, is(notNullValue())); verify(subscription).cancel(); } @Test void toFlowSuccess() { verifyFlowSuccess(toFlowPublisherAndSubscribe(from(1))); } @Test void toFlowFromSourceSuccess() { PublisherSource<Integer> source = s -> s.onSubscribe(new ScalarValueSubscription<>(1, s)); verifyFlowSuccess(toFlowPublisherFromSourceAndSubscribe(source)); } private void verifyFlowSuccess(final Subscriber<Integer> subscriber) { verify(subscriber).onSubscribe(any()); verify(subscriber).onNext(1); verify(subscriber).onComplete(); verifyNoMoreInteractions(subscriber); } @Test void toFlowError() { verifyFlowError(toFlowPublisherAndSubscribe(failed(DELIBERATE_EXCEPTION))); } @Test void toFlowFromSourceError() { PublisherSource<Integer> source = s -> { s.onSubscribe(EMPTY_SUBSCRIPTION); s.onError(DELIBERATE_EXCEPTION); }; verifyFlowError(toFlowPublisherFromSourceAndSubscribe(source)); } private void verifyFlowError(final Subscriber<Integer> subscriber) { verify(subscriber).onSubscribe(any()); verify(subscriber).onError(DELIBERATE_EXCEPTION); verifyNoMoreInteractions(subscriber); } @Test void toFlowCancel() { TestPublisher<Integer> stPublisher = new TestPublisher<>(); Subscriber<Integer> subscriber = toFlowPublisherAndSubscribe(stPublisher); TestSubscription subscription = new TestSubscription(); stPublisher.onSubscribe(subscription); assertThat("Source not subscribed.", stPublisher.isSubscribed(), is(true)); ArgumentCaptor<Subscription> subscriptionCaptor = ArgumentCaptor.forClass(Subscription.class); verify(subscriber).onSubscribe(subscriptionCaptor.capture()); subscriptionCaptor.getValue().cancel(); assertThat("Subscription not cancelled.", subscription.isCancelled(), is(true)); } @Test void toFlowFromSourceCancel() { PublisherSource.Subscription srcSubscription = mock(PublisherSource.Subscription.class); PublisherSource<Integer> source = s -> s.onSubscribe(srcSubscription); Subscriber<Integer> subscriber = toFlowPublisherFromSourceAndSubscribe(source); ArgumentCaptor<Subscription> flowSubscriptionCaptor = ArgumentCaptor.forClass(Subscription.class); verify(subscriber).onSubscribe(flowSubscriptionCaptor.capture()); flowSubscriptionCaptor.getValue().cancel(); verify(srcSubscription).cancel(); } private Subscriber<Integer> toFlowPublisherAndSubscribe( final io.servicetalk.concurrent.api.Publisher<Integer> stPublisher) { Publisher<Integer> flowPublisher = toFlowPublisher(stPublisher); return subscribeToFlowPublisher(flowPublisher); } private Subscriber<Integer> toFlowPublisherFromSourceAndSubscribe(final PublisherSource<Integer> source) { Publisher<Integer> flowPublisher = toFlowPublisher(source); return subscribeToFlowPublisher(flowPublisher); } private Subscriber<Integer> subscribeToFlowPublisher(final Publisher<Integer> flowPublisher) { @SuppressWarnings("unchecked") Subscriber<Integer> subscriber = mock(Subscriber.class); flowPublisher.subscribe(subscriber); ArgumentCaptor<Subscription> subscriptionCaptor = ArgumentCaptor.forClass(Subscription.class); verify(subscriber).onSubscribe(subscriptionCaptor.capture()); subscriptionCaptor.getValue().request(1); return subscriber; } private Publisher<Integer> newMockFlowPublisher( BiConsumer<Subscriber<? super Integer>, Subscription> subscriberTerminator) { @SuppressWarnings("unchecked") Publisher<Integer> flowPublisher = mock(Publisher.class); doAnswer(invocation -> { Subscriber<? super Integer> subscriber = invocation.getArgument(0); Subscription subscription = mock(Subscription.class); doAnswer(invocation1 -> { subscriberTerminator.accept(subscriber, subscription); return null; }).when(subscription).request(anyLong()); subscriber.onSubscribe(subscription); return null; }).when(flowPublisher).subscribe(any()); return flowPublisher; } }
#!/bin/bash # simple script to creat backups of the bety database. This script will # create a copy of the datbase daily, weekly, monthly and yearly. The # files will be called: # - bety-d-X, daily backup, where X is the day of the month. # - bety-w-X, weekly backup, where X is the week number in the year # - bety-m-X, montly backup, where X is the month of the year # - bety-y-X, yearly backup, where X is the actual year. # variables to use for the database dump DATABASE=bety BETYUSER=bety # location where backup should be written BACKUPDIR=$( dirname $0 )/backup mkdir -p ${BACKUPDIR} # set path if needed #export PATH=<location to postgresql>/bin;${PATH} # some handy variables TODAY=$( date +"%d" ) TOMORROW=$( date -d "tomorrow" +"%d" ) DOW=$( date +"%u" ) WEEK=$( date +"%W" ) MONTH=$( date +"%m" ) YEAR=$( date +"%Y" ) # DAILY BACKUP pg_dump -U ${BETYUSER} -d ${DATABASE} | gzip -9 > ${BACKUPDIR}/bety-d-${TODAY}.sql.gz # WEEKLY BACKUP if [ "${DOW}" == "7" ]; then pg_dump -U ${BETYUSER} -d ${DATABASE} | gzip -9 > ${BACKUPDIR}/bety-w-${WEEK}.sql.gz fi # MONTHLY BACKUP if [ "${TOMORROW}" == "01" ]; then pg_dump -U ${BETYUSER} -d ${DATABASE} | gzip -9 > ${BACKUPDIR}/bety-m-${MONTH}.sql.gz fi # YEARLY BACKUP if [ "${TOMORROW}" == "01" -a "${MONTH}" == "12" ]; then pg_dump -U ${BETYUSER} -d ${DATABASE} | gzip -9 > ${BACKUPDIR}/bety-y-${YEAR}.sql.gz fi
#ifndef _LCOM_I8254_H_ #define _LCOM_I8254_H_ #include <lcom/lcf.h> /** @defgroup i8254 i8254 * @{ * * Constants for programming the i8254 Timer. Needs to be completed. */ #define TIMER_FREQ 1193182 /**< @brief clock frequency for timer in PC and AT */ #define TIMER0_IRQ 0 /**< @brief Timer 0 IRQ line */ #define TIMER_MAX_FREQ 1193182 /**< @brief maximum clock frequency accepted */ #define TIMER_MIN_FREQ 19 /**< @brief minimum clock frequency accepted */ /* I/O port addresses */ #define TIMER_0 0x40 /**< @brief Timer 0 count register */ #define TIMER_1 0x41 /**< @brief Timer 1 count register */ #define TIMER_2 0x42 /**< @brief Timer 2 count register */ #define TIMER_PORT(n) (0x40+n) /**< @brief General Timer count register */ #define TIMER_CTRL 0x43 /**< @brief Control register */ #define SPEAKER_CTRL 0x61 /**< @brief Register for speaker control */ /* Timer control */ /* Timer selection: bits 7 and 6 */ #define TIMER_SEL0 0x00 /**< @brief Control Word for Timer 0 */ #define TIMER_SEL1 BIT(6) /**< @brief Control Word for Timer 1 */ #define TIMER_SEL2 BIT(7) /**< @brief Control Word for Timer 2 */ #define TIMER_SEL(n) ((0x00 + n) << 6) /**< @brief Generic Timer Control Word */ #define TIMER_RB_CMD (BIT(7) | BIT(6)) /**< @brief Read Back Command */ /* Register selection: bits 5 and 4 */ #define TIMER_LSB BIT(4) /**< @brief Initialize Counter LSB only */ #define TIMER_MSB BIT(5) /**< @brief Initialize Counter MSB only */ #define TIMER_LSB_MSB (TIMER_LSB | TIMER_MSB) /**< @brief Initialize LSB first and MSB afterwards */ /* Operating mode: bits 3, 2 and 1 */ #define TIMER_SQR_WAVE (BIT(2) | BIT(1)) /**< @brief Mode 3: square wave generator */ #define TIMER_RATE_GEN BIT(2) /**< @brief Mode 2: rate generator */ /* Counting mode: bit 0 */ #define TIMER_BCD 0x01 /**< @brief Count in BCD */ #define TIMER_BIN 0x00 /**< @brief Count in binary */ /* READ-BACK COMMAND FORMAT */ #define TIMER_RB_COUNT_ BIT(5) #define TIMER_RB_STATUS_ BIT(4) #define TIMER_RB_SEL(n) BIT((n) + 1) /* READ-BACK COMMAND MASKS */ #define TIMER_COUNT_MODE_MASK (BIT(3) | BIT(2) | BIT(1)) /**< @brief Mask for count mode */ #define TIMER_INIT_DCB_MASK BIT(2) /**< @brief Mask for "don't care bits" in inital mode */ /* FREQUENCY SETTING MASKS */ #define TIMER_PRESERVE_MODE (BIT(3) | BIT(2) | BIT(1) | BIT(0)) /**< @brief Mask for preserving the timer's first 4 bits in the configuration */ /** * @} */ #endif /* _LCOM_I8254_H */
GPU_ID=1 EVERY=1000 MODEL=LstmGateModel MODEL_DIR="../model/frame_level_lstm_gate_distillchain_v2_model" EVAL_DIR="../model/frame_level_lstm_gate_distillchain_v2_model" start=0 DIR="$(pwd)" for checkpoint in $(cd $MODEL_DIR && python ${DIR}/training_utils/select.py $EVERY); do echo $checkpoint; if [[ $checkpoint -gt $start ]]; then echo $checkpoint; CUDA_VISIBLE_DEVICES=$GPU_ID python eval_distill.py \ --train_dir="$EVAL_DIR" \ --model_checkpoint_path="${MODEL_DIR}/model.ckpt-${checkpoint}" \ --eval_data_pattern="/Youtube-8M/data/frame/validate/validatea*" \ --distill_data_pattern="/Youtube-8M/model_predictions/validatea/distillation/ensemble_v2_matrix_model/*.tfrecord" \ --frame_features=True \ --feature_names="rgb,audio" \ --distill_names="predictions" \ --feature_sizes="1024,128" \ --distill_sizes="4716" \ --batch_size=64 \ --model=$MODEL \ --video_level_classifier_model=MoeDistillChainModel \ --moe_num_mixtures=8 \ --distillation_features=True \ --distillation_type=0 \ --ensemble_w=0.2 \ --train=False \ --run_once=True fi done
<reponame>NYCMOTI/open-bid class SkillPresenter attr_reader :skill def initialize(skill) @skill = skill end def name skill.name end def evaluated_auction_count SkillQuery.new(skill).evaluated_auction_count end def accepted_auction_count SkillQuery.new(skill).accepted_auction_count end end
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Copies the bcsymbolmap files of a vendored framework install_bcsymbolmap() { local bcsymbolmap_path="$1" local destination="${BUILT_PRODUCTS_DIR}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/EPSignatureUpdated/EPSignatureUpdated.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/EPSignatureUpdated/EPSignatureUpdated.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
#include <bits/stdc++.h> using namespace std; #define rep(i, a, n) for (int i = a; i < n; i++) #define repe(i, a, n) for (int i = a; i <= n; i++) #define pb push_back typedef vector<int> VI; //header int main() { ios::sync_with_stdio(0); cin.tie(0); int t; string s; cin >> t; rep(i, 0, t) { cin >> s; if (s.back() == 'o') cout << "FILIPINO\n"; else if (s.back() == 'u') cout << "JAPANESE\n"; else cout << "KOREAN\n"; } return 0; }
#include "Paralysis.hpp" Paralysis::Paralysis(int rounds) : m_rounds(rounds) {} std::vector<std::pair<phazeType, Involve>> Paralysis::operator()(std::weak_ptr<Character> self, std::weak_ptr<Character> enemy) { noused(self); int time = m_rounds; std::pair<phazeType, Involve> instantParalysis = {phazeType::Start,[this, enemy, time]() mutable { auto enemyPtr = enemy.lock(); enemyPtr->setParalysisState(true); return --time; }}; return {} }
const fs = require('fs'); const zlib = require('zlib'); const gunzip = zlib.createGunzip(); let inp = fs.createReadStream('test.zip'); let out = fs.createWriteStream('test_unzipped.zip'); inp .pipe(gunzip) .pipe(out);
BINS="bin/home-data:bin/home-init:bin/home-new:bin/home-run" DEPS="gitlab.com/shellm/doc" # BASH_COMPLETIONS="cmp/home.comp.bash" # ZSH_COMPLETIONS="cmp/home.comp.zsh" SHELLM_LIBS="lib/home.sh"
class Combination: def __init__(self, n_max, mod=10**9+7): # O(n_max + log(mod)) f = 1 self.mod = mod self.factorials = factorials = [f] for i in range(1, n_max + 1): f *= i % mod factorials.append(f) f = pow(f, mod - 2, mod) self.invs = invs = [f] for i in range(n_max, 0, -1): f *= i % mod invs.append(f) invs.reverse() def nCr(self, n, r): if not 0 <= r <= n: return 0 return self.factorials[n] * self.invs[r] % self.mod * self.invs[n - r] % self.mod def nPr(self, n, r): if not 0 <= r <= n: return 0 return self.factorials[n] * self.invs[n - r] % self.mod def nHr(self, n, r): if (n == 0 and r > 0) or r < 0: return 0 return self.factorials[n + r - 1] * self.invs[r] % self.mod * self.invs[n - 1] % self.mod def rising_factorial(self, n, r): return self.factorials[n + r - 1] * self.invs[n - 1] % self.mod def stirling_first(self, n, k): if n == k: return 1 if k == 0: return 0 return (self.stirling_first(n - 1, k - 1) + (n - 1) * self.stirling_first(n - 1, k)) % self.mod def stirling_second(self, n, k): if n == k: return 1 value = 0 for m in range(1, k + 1): value += (-1) ** (k - m) * self.nCr(k, m) * pow(m, n, self.mod) return self.invs[k] * value % self.mod def balls_and_boxes_3(self, n, k): value = 0 for m in range(1, k + 1): value += (-1) ** (k - m) * self.nCr(k, m) * pow(m, n, self.mod) value %= self.mod return value def bernoulli(self, n): if n == 0: return 1 if n % 2 and n >= 3: return 0 value = 0 for k in range(n): value += self.nCr(n + 1, k) * self.bernoulli(k) % self.mod return (- pow(n + 1, self.mod - 2, self.mod) * value) % self.mod def faulhaber(self, k, n): value = 0 for i in range(k + 1): value += self.nCr(k + 1, i) * self.bernoulli(i) % self.mod * pow(n, k - i + 1, self.mod) % self.mod return pow(k + 1, self.mod - 2, self.mod) * value % self.mod def lah(self, n, k): return self.nCr(n - 1, k - 1) * self.factorials[n] % self.mod * self.invs[k] % self.mod def bell(self, n, k): value = 0 for i in range(1, k + 1): value += self.stirling_second(n, i) value %= self.mod return value