text
stringlengths
1
1.05M
"""Plot statistis about missing values from given indicators.""" import matplotlib import seaborn as sns import matplotlib.pyplot as plt import pandas as pd import numpy as np import matplotlib.ticker as ticker from mpl_toolkits.axes_grid1.parasite_axes import SubplotHost # Plot functions: each indicator has a different way of being plotted def plot_global(indicators, plot=False, show=True, ax=None): """Plot statistics on the full database.""" # Get required indicators df = indicators['global'] n_rows = df.at[0, 'n_rows'] n_cols = df.at[0, 'n_cols'] n_values = df.at[0, 'n_values'] n_mv = df.at[0, 'n_mv'] n_mv1 = df.at[0, 'n_mv1'] n_mv2 = df.at[0, 'n_mv2'] n_not_mv = df.at[0, 'n_not_mv'] f_mv = df.at[0, 'f_mv'] f_mv1 = df.at[0, 'f_mv1'] f_mv2 = df.at[0, 'f_mv2'] f_not_mv = df.at[0, 'f_not_mv'] # Print these statistics if show: print( f'\n' f'Statistics on the full data frame:\n' f'---------------------------------\n' f'[{n_rows} rows x {n_cols} columns]\n' f'{n_values} values\n' f'N NMV: {f_not_mv:.1f}% or {n_not_mv}\n' f'N MV: {f_mv:.1f}% or {n_mv}\n' f' N MV 1: {f_mv1:.1f}% or {n_mv1}\n' f' N MV 2: {f_mv2:.1f}% or {n_mv2}\n' ) # If asked, plot these statistics if plot: if ax is None: _, ax = plt.subplots(figsize=(10, 4)) df_show = pd.DataFrame({ 'MV1': [n_mv1], 'MV2': [n_mv2], 'MV': [n_mv], 'V': [n_values], 'type': ['Full data frame'] }) sns.set_color_codes('pastel') sns.barplot(x='V', y='type', data=df_show, color='lightgray', ax=ax, label=f'Not missing ({f_not_mv:.1f}%)') sns.set_color_codes('muted') sns.barplot(x='MV', y='type', data=df_show, color='b', ax=ax, label=f'Missing - Not applicable ({f_mv1:.1f}%)') sns.set_color_codes('dark') sns.barplot(x='MV2', y='type', data=df_show, color='b', ax=ax, label=f'Missing - Not available ({f_mv2:.1f}%)') box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.5, box.height*0.5]) ax.legend(ncol=1, loc='center left', frameon=True, title='Type of values', bbox_to_anchor=(1.05, 0.5)) ax.set(ylabel='', xlabel=f'Number of values (Total {n_values})') ax.set_title('Proportion of missing values') sns.despine(left=True, bottom=True, ax=ax) # Remove y labels ax.tick_params(axis='y', which='both', left=False, labelleft=False) def plot_features(indicators, plot=False, show=True, ax=None): """Plot the number of features with missing values.""" # Get required indicators df = pd.concat([indicators['features'], indicators['global']], axis=1) n_f_w_mv = df.at[0, 'n_f_w_mv'] n_f_w_mv1_o = df.at[0, 'n_f_w_mv1_o'] n_f_w_mv2_o = df.at[0, 'n_f_w_mv2_o'] n_f_w_mv_1a2 = df.at[0, 'n_f_w_mv_1a2'] n_f_wo_mv = df.at[0, 'n_f_wo_mv'] f_f_w_mv = df.at[0, 'f_f_w_mv'] f_f_w_mv1_o = df.at[0, 'f_f_w_mv1_o'] f_f_w_mv2_o = df.at[0, 'f_f_w_mv2_o'] f_f_w_mv_1a2 = df.at[0, 'f_f_w_mv_1a2'] f_f_wo_mv = df.at[0, 'f_f_wo_mv'] n_cols = df.at[0, 'n_cols'] if show: print( f'\n' f'Statistics on features:\n' f'-----------------------\n' f'N features: {n_cols}\n' f'N features with MV: {n_f_w_mv} ({f_f_w_mv:.1f}%)\n' f' N features with MV1 only: {n_f_w_mv1_o} ({f_f_w_mv1_o:.1f}%)\n' f' N features with MV2 only: {n_f_w_mv2_o} ({f_f_w_mv2_o:.1f}%)\n' f' N features with MV1 and MV2: {n_f_w_mv_1a2} ({f_f_w_mv_1a2:.1f}%)\n' ) if plot: # Plot proportion of features with missing values df_show = pd.DataFrame({ 'N MV': [n_f_w_mv], 'N MV1 only': [n_f_w_mv1_o], 'N MV2 only': [n_f_w_mv2_o], 'N MV 1 xor 2': [n_f_w_mv1_o + n_f_w_mv2_o], 'N F': [n_cols], 'type': ['Full data frame'] }) if ax is None: _, ax = plt.subplots(figsize=(10, 4)) sns.set_color_codes('pastel') sns.barplot(x='N F', y='type', data=df_show, color='lightgray', ax=ax, label=f'No missing values ({n_f_wo_mv} • {f_f_wo_mv:.1f}%)') sns.set_color_codes('pastel') sns.barplot(x='N MV', y='type', data=df_show, color='g', ax=ax, label=f'Not applicable and not available ({n_f_w_mv_1a2} • {f_f_w_mv_1a2:.1f}%)') sns.set_color_codes('muted') sns.barplot(x='N MV 1 xor 2', y='type', data=df_show, color='g', ax=ax, label=f'Not applicable only ({n_f_w_mv1_o} • {f_f_w_mv1_o:.1f}%)') sns.set_color_codes('dark') sns.barplot(x='N MV2 only', y='type', data=df_show, color='g', ax=ax, label=f'Not available only ({n_f_w_mv2_o} • {f_f_w_mv2_o:.1f}%)') box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.5, box.height*0.5]) ax.legend(ncol=1, loc='center left', frameon=True, title='Type of missing values contained in the feature', bbox_to_anchor=(1.05, 0.5)) ax.set(ylabel='', xlabel=f'Number of features (Total {n_cols})') ax.set_title('Proportion of features having missing values') sns.despine(left=True, bottom=True, ax=ax) # Remove y labels ax.tick_params(axis='y', which='both', left=False, labelleft=False) def plot_feature_wise(indicators, plot=False, show=True, ax=None, nf_max=40): """Plot the statistics feature-wise.""" n_mv_fw = indicators['feature-wise'] n_rows = indicators['global'].at[0, 'n_rows'] if show: with pd.option_context('display.max_rows', None): print( f'\n' f'Statistics feature-wise:\n' f'------------------------\n' f'\n' f'{n_mv_fw}' ) if plot: # Plot proportion of missing values in each feature # Copy index in a column for the barplot method n_mv_fw['feature'] = n_mv_fw.index n_mv_fw['feature_shortened'] = n_mv_fw['id'].astype(str) + ': ' + n_mv_fw.index # Truncate if n_mv_fw.shape[0] <= nf_max: def truncate(string): if len(string) <= 20: return string return string[:27]+'...' n_mv_fw['feature_shortened'] = n_mv_fw['feature_shortened'].apply(truncate) # Add the total number of values for each feature n_mv_fw['N V'] = n_rows # Get rid of the features with no missing values n_mv_fw_l = n_mv_fw[(n_mv_fw['N MV1'] != 0) | (n_mv_fw['N MV2'] != 0)] n_mv_fw_l = n_mv_fw_l.head(20) if ax is None: fig, ax = plt.subplots(figsize=(10, 8)) else: fig = plt.gcf() if n_mv_fw_l.empty: return fig, ax sns.set_color_codes('pastel') sns.barplot(x='N V', y='feature_shortened', data=n_mv_fw_l, ax=ax, color='lightgray', label=f'Not missing', dodge=False) sns.set_color_codes('muted') sns.barplot(x='N MV', y='feature_shortened', data=n_mv_fw_l, ax=ax, color='b', label=f'Missing - Not applicable') sns.set_color_codes("dark") sns.barplot(x='N MV2', y='feature_shortened', data=n_mv_fw_l, ax=ax, color="b", label=f'Missing - Not available') ax.legend(ncol=1, loc='lower right', frameon=True, title='Type of values') ax.set(ylabel='Features', xlabel='Number of values') ax.tick_params(labelsize=7) sns.despine(left=True, bottom=True, ax=ax) # Remove y labels if more than 40 if n_mv_fw_l.shape[0] > nf_max: ax.tick_params(axis='y', which='both', left=False, labelleft=False) fig.tight_layout(rect=(0, 0, 1, .92)) else: fig.tight_layout(rect=(0., 0, 1, .92)) return fig, ax def plot_feature_wise_v2(indicators, plot=False, show=True, ax=None, nf_max=40, color='b'): """Plot the statistics feature-wise.""" n_mv_fw = indicators['feature-wise'] if show: with pd.option_context('display.max_rows', None): print( f'\n' f'Statistics feature-wise:\n' f'------------------------\n' f'\n' f'{n_mv_fw}' ) if plot: # Plot proportion of missing values in each feature # Copy index in a column for the barplot method n_mv_fw['feature'] = n_mv_fw.index n_mv_fw['id'] = np.arange(n_mv_fw.shape[0]) # Get rid of the features with no missing values n_mv_fw_l = n_mv_fw if ax is None: fig, ax = plt.subplots(figsize=(10, 8)) else: fig = plt.gcf() sns.set_color_codes('pastel') handle_nm, = ax.stackplot(n_mv_fw_l['id'].values, 100, color='lightgray', labels=['Not missing']) handle_m, = ax.stackplot(n_mv_fw_l['id'].values, n_mv_fw_l['F MV'].values, color=color, labels=['Missing']) # ax.stackplot(n_mv_fw_l['id'].values, n_mv_fw_l['N V'].values, color='lightgray', labels=['Not missing']) # ax.stackplot(n_mv_fw_l['id'].values, n_mv_fw_l['N MV'].values, color='b', labels=['Missing']) # ax.legend(ncol=1, loc='upper right', frameon=True, # title='Type of values') # ax.set(xlabel='Features', ylabel='Proportion') # ax.set(xlabel='Features', ylabel='Number of values') ax.tick_params(labelsize=7) sns.despine(left=True, bottom=True, ax=ax) # Remove y labels if more than 40 if n_mv_fw.shape[0] > nf_max: fig.tight_layout(rect=(0, 0, 1, .92)) else: fig.tight_layout(rect=(0., 0, 1, .92)) return fig, ax, (handle_nm, handle_m) def plot_rows(indicators, plot=False, show=True, ax=None): """Plot stats on rows without missing values.""" # Get required indicators df = pd.concat([indicators['rows'], indicators['global']], axis=1) n_r_wo_mv = df.at[0, 'n_r_wo_mv'] n_r_w_mv = df.at[0, 'n_r_w_mv'] n_r_w_mv1_o = df.at[0, 'n_r_w_mv1_o'] n_r_w_mv2_o = df.at[0, 'n_r_w_mv2_o'] n_r_w_mv_1a2 = df.at[0, 'n_r_w_mv_1a2'] f_r_wo_mv = df.at[0, 'f_r_wo_mv'] f_r_w_mv = df.at[0, 'f_r_w_mv'] f_r_w_mv1_o = df.at[0, 'f_r_w_mv1_o'] f_r_w_mv2_o = df.at[0, 'f_r_w_mv2_o'] f_r_w_mv_1a2 = df.at[0, 'f_r_w_mv_1a2'] n_rows = df.at[0, 'n_rows'] if show: print( f'\n' f'Statistics on rows:\n' f'-------------------\n' f'N rows: {n_rows}\n' f'N rows without MV: {n_r_wo_mv} ({f_r_wo_mv:.2f}%)\n' f'N rows with MV: {n_r_w_mv} ({f_r_w_mv:.2f}%)\n' f' N rows with MV1 only: {n_r_w_mv1_o} ({f_r_w_mv1_o:.2f}%)\n' f' N rows with MV2 only: {n_r_w_mv2_o} ({f_r_w_mv2_o:.2f}%)\n' f' N rows with MV1 and MV2: {n_r_w_mv_1a2} ({f_r_w_mv_1a2:.2f}%)\n' ) if plot: # Plot proportion of features with missing values df_show = pd.DataFrame({ 'N MV': [n_r_w_mv], 'N MV1 only': [n_r_w_mv1_o], 'N MV2 only': [n_r_w_mv2_o], 'N MV 1 xor 2': [n_r_w_mv1_o + n_r_w_mv2_o], 'N R': [n_rows], 'type': ['Full data frame'] }) if ax is None: _, ax = plt.subplots(figsize=(10, 4)) sns.set_color_codes('pastel') sns.barplot(x='N R', y='type', data=df_show, color='lightgray', ax=ax, label=f'No missing values ({n_r_wo_mv} • {f_r_wo_mv:.2f}%)') sns.set_color_codes('pastel') sns.barplot(x='N MV', y='type', data=df_show, color='r', ax=ax, label=f'Not applicable and not available ({n_r_w_mv_1a2} • {f_r_w_mv_1a2:.2f}%)') sns.set_color_codes('muted') sns.barplot(x='N MV 1 xor 2', y='type', data=df_show, color='r', ax=ax, label=f'Not applicable only ({n_r_w_mv1_o} • {f_r_w_mv1_o:.2f}%)') sns.set_color_codes('dark') sns.barplot(x='N MV2 only', y='type', data=df_show, color='r', ax=ax, label=f'Not available only ({n_r_w_mv2_o} • {f_r_w_mv2_o:.2f}%)') box = ax.get_position() ax.set_position([box.x0, box.y0, box.width*0.5, box.height*0.5]) ax.legend(ncol=1, loc='center left', frameon=True, title='Type of missing values contained in the row', bbox_to_anchor=(1.05, 0.5)) ax.set(ylabel='', xlabel=f'Number of rows (Total {n_rows})') ax.set_title('Proportion of rows having missing values') sns.despine(left=True, bottom=True, ax=ax) # Remove y labels ax.tick_params(axis='y', which='both', left=False, labelleft=False) def plot_rm_rows(indicators, plot=False, show=True, ax=None): """Plot number of rows affected if we remove features with MV.""" # Get required indicators df = pd.concat([indicators['rm_rows'], indicators['global']], axis=1) n_r_a_rm_mv1 = df.at[0, 'n_r_a_rm_mv1'] n_r_a_rm_mv2 = df.at[0, 'n_r_a_rm_mv2'] n_r_a_rm_mv_1o2 = df.at[0, 'n_r_a_rm_mv_1o2'] n_r_a_rm_mv1_o = df.at[0, 'n_r_a_rm_mv1_o'] n_r_a_rm_mv2_o = df.at[0, 'n_r_a_rm_mv2_o'] n_r_a_rm_mv_1a2 = df.at[0, 'n_r_a_rm_mv_1a2'] f_r_a_rm_mv1 = df.at[0, 'f_r_a_rm_mv1'] f_r_a_rm_mv2 = df.at[0, 'f_r_a_rm_mv2'] f_r_a_rm_mv_1o2 = df.at[0, 'f_r_a_rm_mv_1o2'] f_r_a_rm_mv1_o = df.at[0, 'f_r_a_rm_mv1_o'] f_r_a_rm_mv2_o = df.at[0, 'f_r_a_rm_mv2_o'] f_r_a_rm_mv_1a2 = df.at[0, 'f_r_a_rm_mv_1a2'] n_rows = df.at[0, 'n_rows'] if show: print( f'N rows losing information if we remove features with :\n' f' MV1: {n_r_a_rm_mv1} ({f_r_a_rm_mv1:.2f}%)\n' f' MV2: {n_r_a_rm_mv2} ({f_r_a_rm_mv2:.2f}%)\n' f' MV: {n_r_a_rm_mv_1o2} ({f_r_a_rm_mv_1o2:.2f}%)\n' f' MV1 only: {n_r_a_rm_mv1_o} ({f_r_a_rm_mv1_o:.2f}%)\n' f' MV2 only: {n_r_a_rm_mv2_o} ({f_r_a_rm_mv2_o:.2f}%)\n' f' MV1 and MV2: {n_r_a_rm_mv_1a2} ({f_r_a_rm_mv_1a2:.2f}%)\n' ) if plot: # Plot number of rows losing information when removing features with MV df_show = pd.DataFrame({ 'N rows affected': [ n_r_a_rm_mv1, n_r_a_rm_mv2, n_r_a_rm_mv_1o2, # n_r_a_rm_mv1_o, # n_r_a_rm_mv2_o, # n_r_a_rm_mv_1a2, ], 'N R': [n_rows for _ in range(3)], # 'N R': [n_rows for _ in range(6)], 'type': [ f'Not applicable\n{f_r_a_rm_mv1:.2f}%', f'Not available\n{f_r_a_rm_mv2:.2f}%', f'Not applicable or\nnot available\n{f_r_a_rm_mv_1o2:.2f}%', # f'MV1 only\n{f_r_a_rm_mv1_o:.2f}%', # f'MV2 only\n{f_r_a_rm_mv2_o:.2f}%', # f'MV1 and MV2\n{f_r_a_rm_mv_1a2:.2f}%' ], }) df_show.sort_values('N rows affected', ascending=False, inplace=True) if ax is None: _, ax = plt.subplots(figsize=(10, 4)) sns.set_color_codes('muted') sns.barplot(x='N rows affected', y='type', data=df_show, color='r', ax=ax) box = ax.get_position() ax.set_position([1.1*box.x0, box.y0, box.width, box.height]) ax.set_title('Number of rows losing information (non-missing values)\n' 'when removing features containing missing values of type:') ax.set(ylabel='', xlabel=f'Number of rows (Total {n_rows})') ax.set_xlim(right=n_rows) sns.despine(left=True, bottom=True, ax=ax) def plot_rm_features(indicators, plot=False, show=True, ax=None): """Plot the part of information lost when removing features with MV.""" # Get required indicators df = pd.concat([indicators['rm_features'], indicators['global']], axis=1) n_v_lost_mv1 = df.at[0, 'n_v_lost_mv1'] n_v_lost_mv2 = df.at[0, 'n_v_lost_mv2'] n_v_lost_mv_1o2 = df.at[0, 'n_v_lost_mv_1o2'] n_v_lost_mv1_o = df.at[0, 'n_v_lost_mv1_o'] n_v_lost_mv2_o = df.at[0, 'n_v_lost_mv2_o'] n_v_lost_mv_1a2 = df.at[0, 'n_v_lost_mv_1a2'] f_v_lost_mv1 = df.at[0, 'f_v_lost_mv1'] f_v_lost_mv2 = df.at[0, 'f_v_lost_mv2'] f_v_lost_mv_1o2 = df.at[0, 'f_v_lost_mv_1o2'] f_v_lost_mv1_o = df.at[0, 'f_v_lost_mv1_o'] f_v_lost_mv2_o = df.at[0, 'f_v_lost_mv2_o'] f_v_lost_mv_1a2 = df.at[0, 'f_v_lost_mv_1a2'] n_rows = df.at[0, 'n_rows'] n_values = df.at[0, 'n_values'] if show: print( f'N values lost if we remove features with :\n' f' MV1: {n_v_lost_mv1} ({f_v_lost_mv1:.2f}%)\n' f' MV2: {n_v_lost_mv2} ({f_v_lost_mv2:.2f}%)\n' f' MV: {n_v_lost_mv_1o2} ({f_v_lost_mv_1o2:.2f}%)\n' f' MV1 only: {n_v_lost_mv1_o} ({f_v_lost_mv1_o:.2f}%)\n' f' MV2 only: {n_v_lost_mv2_o} ({f_v_lost_mv2_o:.2f}%)\n' f' MV1 and MV2: {n_v_lost_mv_1a2} ({f_v_lost_mv_1a2:.2f}%)\n' ) if plot: # Plot number of values lost when removing features with MV df_show = pd.DataFrame({ 'N values lost': [ n_v_lost_mv1, n_v_lost_mv2, n_v_lost_mv_1o2, # n_v_lost_mv1_o, # n_v_lost_mv2_o, # n_v_lost_mv_1a2, ], 'N R': [n_rows for _ in range(3)], # 'N R': [n_rows for _ in range(6)], 'type': [ f'Not applicable\n{f_v_lost_mv1:.2f}%', f'Not available\n{f_v_lost_mv2:.2f}%', f'Not applicable or\nnot available\n{f_v_lost_mv_1o2:.2f}%', # f'MV1 only\n{f_v_lost_mv1_o:.2f}%', # f'MV2 only\n{f_v_lost_mv2_o:.2f}%', # f'MV1 and MV2\n{f_v_lost_mv_1a2:.2f}%' ], }) df_show.sort_values('N values lost', ascending=False, inplace=True) if ax is None: _, ax = plt.subplots(figsize=(10, 4)) sns.set_color_codes('muted') sns.barplot(x='N values lost', y='type', data=df_show, color='b', ax=ax) box = ax.get_position() ax.set_position([1.1*box.x0, box.y0, box.width, box.height]) ax.set_title('Number of non-missing values lost' '\nwhen removing features containing missing values of type:') ax.set(ylabel='', xlabel=f'Number of values (Total {n_values})') ax.set_xlim(right=n_values) sns.despine(left=True, bottom=True, ax=ax) def figure1(indicators, plot=True, db_name=None, table=None): """Print a global overview of missing values""" fig1, axes1 = plt.subplots(3, 1, figsize=(12, 6)) fig1.tight_layout(pad=2) if all((db_name, table)): fig1.suptitle(f'Overview of missing values in {db_name} (table "$\\verb|{table}|$")', fontsize='xx-large') matplotlib.rcParams.update({'font.size': 13}) plot_global(indicators, plot=plot, ax=axes1[0]) plot_features(indicators, plot=plot, ax=axes1[1]) plot_rows(indicators, plot=plot, ax=axes1[2]) def figure2(indicators, plot=True, db_name=None, table=None): matplotlib.rcParams.update({ 'font.size': 14, 'axes.titlesize': 14, 'axes.labelsize': 13, 'xtick.labelsize': 13, 'ytick.labelsize': 13, }) fig2, _ = plot_feature_wise(indicators, plot=plot) if all((db_name, table)): fig2.suptitle(f'Proportion of missing values in each feature' f'\nof {db_name} (table "$\\verb|{table}|$")', fontsize='x-large') def figure2bis(indicators, plot=True, db_name=None, table=None): matplotlib.rcParams.update({ 'font.size': 14, 'axes.titlesize': 14, 'axes.labelsize': 13, 'xtick.labelsize': 13, 'ytick.labelsize': 13, }) fig2, *_ = plot_feature_wise_v2(indicators, plot=plot) if all((db_name, table)): fig2.suptitle(f'Proportion of missing values in each feature' f'\nof {db_name} (table "$\\verb|{table}|$")', fontsize='x-large') def figure3(indicators, plot=True, db_name=None, table=None): matplotlib.rcParams.update({ # 'font.size': 14, 'axes.titlesize': 14, 'axes.labelsize': 13, 'xtick.labelsize': 13, 'ytick.labelsize': 13, }) fig3, axes3 = plt.subplots(2, 1, figsize=(10, 8)) fig3.tight_layout(pad=5, h_pad=7, rect=(0.05, 0, 1, .92)) if all((db_name, table)): fig3.suptitle(f'Effect of removing features containing missing values' f'\non {db_name} (table "$\\verb|{table}|$")', fontsize='x-large') plot_rm_rows(indicators, plot=plot, ax=axes3[0]) plot_rm_features(indicators, plot=plot, ax=axes3[1]) def plot_feature_types(props, ax=None): matplotlib.rcParams.update({ 'font.size': 12, # 'axes.titlesize': 10, 'axes.labelsize': 11, 'xtick.labelsize': 8, 'ytick.labelsize': 11, 'legend.fontsize': 11, 'legend.title_fontsize': 12, }) if ax is None: fig, ax = plt.subplots(figsize=(4.5, 6.5)) props.reset_index(inplace=True) props['tag'] = props['tag'].str.replace('_', '-') props['task'] = props['task'].str.replace('_', r'\_') props['task'] = props['task'].str.replace('pvals', r'screening') # Compute cumsums for plotting props['categorical+ordinal'] = props['categorical'] + props['ordinal'] props['continuous+ordinal'] = props['continuous'] + props['ordinal'] c1, c2, c3 = sns.color_palette(['tab:grey', 'tab:olive', 'tab:cyan']) g1 = sns.barplot(y='tag', x='n', data=props, orient='h', hue='T', color=c1, palette=[c1], ax=ax) g2 = sns.barplot(y='tag', x='categorical+ordinal', data=props, orient='h', hue='T', color=c3, palette=[c3], ax=ax) g3 = sns.barplot(y='tag', x='categorical', data=props, orient='h', hue='T', color=c2, palette=[c2], ax=ax) handles, labels = ax.get_legend_handles_labels() ax.legend(handles=[handles[10], handles[5], handles[0]], labels=['Categorical', 'Ordinal', 'Numerical'], title='Feature type', fancybox=True, shadow=False, loc='upper center', bbox_to_anchor=(0.215, 1.17), ncol=3) ax.set_xlabel('Number of features') ax.set_ylabel('') props.set_index(['db', 'task', 'T'], inplace=True) subprops = props.iloc[props.index.get_level_values('T') == 0] minor_YT = ax.get_yaxis().get_majorticklocs() subprops['YT_V'] = minor_YT major = subprops.groupby(level=[0])['YT_V'].first() ax.set_yticklabels(subprops.index.get_level_values(1)) xlim = ax.get_xlim() for i, (idx, value) in enumerate(major.items()): ax.text(-40, major.iloc[i], idx, va='center', ha='right') ax.set_xlim(xlim)
<gh_stars>0 import { Component } from "@/models/Component"; import { Layout } from "@/models/Layout"; import { Style } from "@/models/Style"; import { Theme } from "@/models/Theme"; import AppRepository from "@/repository"; import { ObjectId } from "mongodb"; import { sampleComponents } from "./component"; import { sampleLayouts } from "./layout"; import { sampleStyles } from "./style"; import { sampleTheme } from "./theme"; const generateTestData = async (): Promise<void> => { const styleInfos = [] as ObjectId[]; const layoutInfos = { BLACK_THEME_STYLE: [], WHITE_THEME_STYLE: [], GREEN_THEME_STYLE: [], } as { [name: string]: ObjectId[]; }; const componentInfos = { BLACK_THEME_STYLE: [], WHITE_THEME_STYLE: [], GREEN_THEME_STYLE: [], } as { [name: string]: ObjectId[]; }; // Generate Component Collection const generateComponentCollection = async () => { const componentNames = Object.keys(sampleComponents); for (const name of componentNames) { const component = new Component(); component.name = name; component.attribute = sampleComponents[name]; const outputComponent = await AppRepository.Component.save(component); if (outputComponent.name.indexOf("BLACK_THEME") !== -1) { componentInfos.BLACK_THEME_STYLE.push(outputComponent._id); } else if (outputComponent.name.indexOf("WHITE_THEME") !== -1) { componentInfos.WHITE_THEME_STYLE.push(outputComponent._id); } else if (outputComponent.name.indexOf("GREEN_THEME") !== -1) { componentInfos.GREEN_THEME_STYLE.push(outputComponent._id); } } }; // Generate Layout Collection const generateLayoutCollection = async () => { const layoutNames = Object.keys(sampleLayouts); for (const name of layoutNames) { const layout = new Layout(); layout.name = name; layout.attribute = sampleLayouts[name]; const outputLayout = await AppRepository.Layout.save(layout); if (outputLayout.name.indexOf("BLACK_THEME") !== -1) { layoutInfos.BLACK_THEME_STYLE.push(outputLayout._id); } else if (outputLayout.name.indexOf("WHITE_THEME") !== -1) { layoutInfos.WHITE_THEME_STYLE.push(outputLayout._id); } else if (outputLayout.name.indexOf("GREEN_THEME") !== -1) { layoutInfos.GREEN_THEME_STYLE.push(outputLayout._id); } } }; // Generate Style Collection const generateStyleCollection = async () => { for (const name of sampleStyles) { const style = new Style(); style.name = name; style.components = componentInfos[name]; style.layouts = layoutInfos[name]; const outputStyle = await AppRepository.Style.save(style); styleInfos.push(outputStyle._id); } }; // Generate Theme Collection const generateThemeCollection = async () => { for (const name of sampleTheme) { const theme = new Theme(); theme.name = name; // populate or aggregation theme.styles = styleInfos; await AppRepository.Theme.save(theme); } }; await generateComponentCollection(); await generateLayoutCollection(); await generateStyleCollection(); await generateThemeCollection(); }; export default generateTestData;
// Define the custom error type within the error module mod error { // Define the generic type for additional error information struct Generic; // Define the custom error type for deleting a profile object pub struct DeleteProfileObjectError { pub meta: Generic, } // Implement the custom error type for the "BadRequestException" scenario impl From<reqwest::Error> for DeleteProfileObjectError { fn from(_: reqwest::Error) -> Self { DeleteProfileObjectError { meta: Generic } } } }
<?php //Encryption $salt = openssl_random_pseudo_bytes(8); $key = openssl_random_pseudo_bytes(32); $iv = openssl_random_pseudo_bytes(16); $ciphertext = openssl_encrypt($plaintext, 'AES-256-CBC', $key, $options=0, $iv, $tag); //Decryption $originalPlaintext = openssl_decrypt($ciphertext, 'AES-256-CBC', $key, $options=0, $iv, $tag); echo 'The original plaintext was: ' . $originalPlaintext; ?>
import collections def most_common_letters(s): s = s.lower() count = collections.Counter(s) most_common = count.most_common() common_letters = [letter[0] for letter in most_common] print(common_letters) most_common_letters("hello world!") # Output: ['l', 'o', 'e', 'h', 'd', 'r', 'w']
package wangmin.modbus.entity; import com.google.common.collect.Lists; import wangmin.modbus.util.ContinuousAddressBlock; import wangmin.modbus.entity.type.ModbusRegisterType; import java.util.List; /** * Created by wm on 2017/1/3. * modbus 的一个slave的地址信息 */ public class ModbusSlaveAddressInfo { public ModbusRegisterType registerType; public ContinuousAddressBlock cab; public List<List<Boolean>> digitalDataList; public List<byte[]> registerDataList; public ModbusSlaveAddressInfo(ModbusRegisterType registerType) { this.registerType = registerType; cab = new ContinuousAddressBlock(); if (registerType.isDigital()) digitalDataList = Lists.newArrayList(); else if (registerType.isRegister()) registerDataList = Lists.newArrayList(); } }
source env.sh for i in `seq 1 20`; do export OMP_NUM_THREADS=20 numactl --membind 0 --cpubind 0 ./bin/hostf 3972 192 0 $i > 3972_192_single_0.0_${i}.log done
<reponame>Evangelize/classes<gh_stars>1-10 import PPTX from '../src/lib/pptx'; import fs from 'fs'; const path = process.argv[2]; const outPath = 'test-slide.json'; fs.readFile(path, (err, data) => { if (err) throw err; const pptx = new PPTX(); pptx.parse(data).then( (results) => { fs.writeFile( outPath, JSON.stringify(results), (error) => { if (error) { console.error("write error: " + error.message); } else { console.log("Successful Write to " + outPath); } } ); } ); });
import android.app.SearchManager; import android.content.Context; import android.os.AsyncTask; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.Menu; import android.widget.SearchView; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.util.ArrayList; public class MainActivity extends AppCompatActivity { private static final String TAG = "MainActivity"; private RecyclerView mRecyclerView; private RecipeAdapter mRecipeAdapter; private ArrayList<Recipe> mRecipes; private SearchView mSearchView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mRecyclerView = findViewById(R.id.recycler_view); mSearchView = findViewById(R.id.search_view); mRecyclerView.setLayoutManager(new LinearLayoutManager(this)); mRecipes = new ArrayList<>(); setupSearchView(); } private void setupSearchView() { mSearchView.setIconifiedByDefault(true); mSearchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { fetchRecipes(query); return true; } @Override public boolean onQueryTextChange(String newText) { // No need to implement this return false; } }); } private void fetchRecipes(String query) { String url = "https://api.example.com/search?q=" + query; NetworkTask task = new NetworkTask(); task.execute(url); } private class NetworkTask extends AsyncTask<String, Void, String> { @Override protected String doInBackground(String... strings) { String url = strings[0]; String result = null; try { result = NetworkUtils.doHttpGet(url); } catch (IOException e) { e.printStackTrace(); } return result; } @Override protected void onPostExecute(String s) { super.onPostExecute(s); Log.d(TAG, "onPostExecute: " + s); if (s == null) { Log.e(TAG, "onPostExecute: Error getting the recipes"); return; } try { JSONObject root = new JSONObject(s); JSONArray recipesArray = root.getJSONArray("recipes"); for (int i=0; i<recipesArray.length(); i++) { JSONObject recipeJSON = recipesArray.getJSONObject(i); Recipe recipe = new Recipe(recipeJSON); mRecipes.add(recipe); } } catch (JSONException e) { e.printStackTrace(); } mRecipeAdapter = new RecipeAdapter(mRecipes); mRecyclerView.setAdapter(mRecipeAdapter); } } }
import unittest class TINCTestLoader(unittest.TestLoader): def loadTestsFromTestCase(self, testCaseClass): test_suite = super().loadTestsFromTestCase(testCaseClass) return test_suite def getTestCaseByName(self, test_case_name): test_loader = TINCTestLoader() test_suite = test_loader.loadTestsFromTestCase(MockSQLPerformanceTestCase) test_case = None for case in test_suite._tests: if case._testMethodName == test_case_name: test_case = case break return test_case
<filename>src/commands/commands.ts import { Command as CommanderCommand } from 'commander'; import { CommandFeature } from './CommandFeature'; import { CommandRelease } from './CommandRelease'; import { CommandScaffold } from './CommandScaffold'; import { ActionCallback } from './type'; export class Commands { protected static getCommands() { return [CommandFeature, CommandRelease, CommandScaffold]; } public static getDefaultCommand() { return null; } public static processCLI(program: CommanderCommand) {} public static attachCommands( program: CommanderCommand, actionCallback: ActionCallback, ) { this.getCommands().forEach((command) => command.attach(program, actionCallback), ); } }
import React, { useState, useEffect } from "react"; import { makeStyles } from "@material-ui/core/styles"; import io from "socket.io-client"; import Paper from "@material-ui/core/Paper"; import Grid from "@material-ui/core/Grid"; import Box from "@material-ui/core/Box"; import Divider from "@material-ui/core/Divider"; import TextField from "@material-ui/core/TextField"; import Typography from "@material-ui/core/Typography"; import List from "@material-ui/core/List"; import ListItem from "@material-ui/core/ListItem"; import ListItemIcon from "@material-ui/core/ListItemIcon"; import ListItemText from "@material-ui/core/ListItemText"; import Avatar from "@material-ui/core/Avatar"; import Fab from "@material-ui/core/Fab"; import SendIcon from "@material-ui/icons/Send"; import { useForm } from "react-hook-form"; const socket = io("http://localhost:4000", { transports: ["websocket", "polling", "flashsocket"], }); const useStyles = makeStyles((theme) => ({ root: { display: "flex", flexWrap: "wrap", justifyContent: "space-around", overflow: "hidden", backgroundColor: theme.palette.background.paper, }, gridList: { width: 500, height: 450, }, icon: { color: "rgba(255, 255, 255, 0.54)", }, chatSection: { width: "100%", height: "150px", overflowY: "auto", }, headBG: { backgroundColor: "#e0e0e0", }, borderRight500: { borderRight: "1px solid #e0e0e0", }, messageArea: { overflowY: "auto", }, avatar: { marginLeft: "5px", }, small: { width: theme.spacing(3), height: theme.spacing(3), }, large: { width: theme.spacing(7), height: theme.spacing(7), }, listItem: { padding: 0, }, })); const Chat = () => { const classes = useStyles(); const [state, setStaet] = useState({ message: "", name: "" }); const [chat, setChat] = useState([]); const { register, errors, handleSubmit, clearErrors } = useForm(); const onSubmit = (data, e) => { // console.log(e); e.preventDefault(); e.target.reset(); const { name, message } = data; socket.emit("message", data); }; let search = window.location.search; let params = new URLSearchParams(search); let foo = params.has("player") ? params.get("player") : 1; socket.on("message", function (data) { const { name, message } = data; setChat([...chat, { name, message }]); // setStaet([...chat, { name, message }]); }); const renderChat = () => { // console.log(chat); // console.log(chat); const html = (name, message) => { if (name === "1") { // console.log(name); return ( <> <Grid item xs={1} className={classes.avatar}> <Avatar alt="Alice" src="https://material-ui.com/static/images/avatar/3.jpg" className={classes.small} /> </Grid> <Grid item xs={1} className={classes.avatar}> <ListItemText secondary="09:30"></ListItemText> </Grid> <Grid item xs={8}> <ListItemText align="right" primary={message}></ListItemText> </Grid> </> ); } else { // console.log(name); return ( <> <Grid item xs={8}> <ListItemText align="right" primary={message}></ListItemText> </Grid> <Grid item xs={1} className={classes.avatar}> <Avatar alt="Alice" src="https://material-ui.com/static/images/avatar/3.jpg" className={classes.small} /> </Grid> <Grid item xs={1} className={classes.avatar}> <ListItemText secondary="09:30"></ListItemText> </Grid> </> ); } }; return chat.map(({ name, message }, index) => ( <div> <ListItem key={index} className={classes.listItem}> <Grid container alignItems="center"> {html(name, message)} </Grid> </ListItem> </div> )); }; return ( <> <Box style={{ maxHeight: 120, overflow: "auto" }}>{renderChat()}</Box> <Divider /> <Grid container> <form onSubmit={handleSubmit(onSubmit)}> <Grid item> <Box width={1}> <TextField name="message" id="outlined-basic-email" label="Type Something" inputRef={register({ required: true })} /> <TextField type="hidden" name="name" width="5%" value={foo} inputRef={register({ required: true })} /> <Fab color="primary" aria-label="Talk" type="submit"> <SendIcon /> </Fab> </Box> {errors.message && errors.message.type === "required" && <span>This is required</span>} {errors.message && errors.message.type === "maxLength" && <span>Max length exceeded</span>} </Grid> </form> </Grid> </> ); }; export default Chat;
from django import forms from django.forms import ModelForm from django.contrib.auth.forms import UserCreationForm from .models.mus_models import Song from .models.vbt_models import Backtest ############################################################### # MUS FORMS class SongSearchForm(forms.Form): title = forms.CharField(label='Song', max_length=100) artist = forms.CharField(label='Artist', max_length=100) class Meta: model = Song fields = ("title", "artist") ############################################################### # VBT FORMS class SymbolForm(forms.Form): symbol = forms.CharField(label='Symbol', max_length=12) class Meta: model = Backtest fields = ("symbol") class BacktestForm(ModelForm): ENTRY = ( ('rsi', 'RSI'), ('obv', 'OBV'), ('ma', '200-SMA'), ) EXIT = ( ('rsi', 'RSI'), ('obv', 'OBV'), ('ma', '200-SMA'), ) # FOREIGN KEY -> BACKTEST # ENTRY SIGNAL backtest_entry = forms.ChoiceField(choices=ENTRY) # INPUT FIELD REQUIRED # EXIT SIGNAL backtest_exit = forms.ChoiceField(choices=EXIT) # INPUT FIELD REQUIRED # PARAMS init_cash = forms.IntegerField() # INPUT FIELD REQUIRED class Meta: model = Backtest fields = ( 'backtest_entry', 'backtest_exit', 'init_cash', )
import { Pattern } from './pattern'; /** * Timestamp ISO 8601 validator class. */ export declare class Timestamp extends Pattern { /** * Min timestamp value. */ private min?; /** * Max timestamp value. */ private max?; /** * Determines whether the specified timestamp is the range. * @param timestamp Timestamp to be tested. * @return Returns true when the timestamp is between the range, false otherwise. */ private testRange; /** * Default constructor. * @param min Min date value. * @param max Max date value. */ constructor(min?: Date, max?: Date); /** * Validator name. */ readonly name: string; /** * Validate the specified data. * @param data Data to be validated. * @returns Returns true when the data is valid, false otherwise. */ validate(data: any): boolean; }
<filename>BiblioSpringUrjc/src/main/java/com/BiblioSpring/controllers/ContactoController.java<gh_stars>0 package com.BiblioSpring.controllers; import javax.annotation.PostConstruct; import javax.servlet.http.HttpServletRequest; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import com.BiblioSpring.entity.Contacto; import com.BiblioSpring.repository.ContactosRepository; @Controller public class ContactoController { @Autowired private ContactosRepository repository; @PostConstruct public void init() { repository.save(new Contacto("Pepe", "<NAME>", "XXXX", "eww")); repository.save(new Contacto("Juan", "<NAME>", "XXXX", "sdad")); } @RequestMapping("/BiblioSpring/nuevoContacto") public String addcontacto(Model model, Pageable page, HttpServletRequest request) { model.addAttribute("contacto", repository.findAll(page)); model.addAttribute("admin", request.isUserInRole("ADMIN")); model.addAttribute("user", request.isUserInRole("USER")); return "nuevoContacto"; } @RequestMapping("/BiblioSpring/Contacto/nuevo") public String nuevoContacto(Model model, Contacto contacto, HttpServletRequest request) { repository.save(contacto); model.addAttribute("admin", request.isUserInRole("ADMIN")); model.addAttribute("user", request.isUserInRole("USER")); return "contacto_guardado"; } @RequestMapping("/BiblioSpring/ver_Contacto") public String viewContacto(Model model, Pageable page, HttpServletRequest request) { model.addAttribute("contactos", repository.findAll(page)); model.addAttribute("admin", request.isUserInRole("ADMIN")); model.addAttribute("user", request.isUserInRole("USER")); return "ver_Contacto"; } }
#!/bin/bash git clone https://github.com/jpmeijers/RN2483-Arduino-Library.git ../libraries/RN2483-Arduino-Library/
/* * Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, without * warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the * License for the specific language governing permissions and limitations * under the License. */ /** * A helper class to create Web Socket Api instances. */ ClientSupportApiFactory = {}; ClientSupportApiFactory._factoryByComponentId = {}; ClientSupportApiFactory.registerApi = function(name, factoryMethod) { ClientSupportApiFactory._factoryByComponentId[name] = factoryMethod; }; ClientSupportApiFactory.createApi = function(name, conn) { var fn = ClientSupportApiFactory._factoryByComponentId[name]; if (fn == null) { throw new Error("Api not found: " + name); } return fn(conn); };
var subscribe = function () { var submit = $('#mc-embedded-subscribe'), input = $('#mce-EMAIL'), $resultElement = $('#resultElement'), $content_form = $('.content-email'), $form = $("#mc-embedded-subscribe-form"); /* Validar que haya correo valido ------------------------------*/ submit.on('click', function(e){ e.preventDefault(); // console.log('clicked'); $form.validate(); if ($form.valid()) { // console.log('valid'); submitSubscribeForm($form, $resultElement); setTimeout(function(){ input.val(''); }, 1000); } else{ // console.log('no valid'); $('#mce-EMAIL').focus(); } }); function submitSubscribeForm($form, $resultElement){ $.ajax({ type: "GET", url: $form.attr("action"), data: $form.serialize(), cache: false, dataType: "jsonp", jsonp: "c", // trigger MailChimp to return a JSONP response contentType: "application/json; charset=utf-8", error: function(error){ // According to jquery docs, this is never called for cross-domain JSONP requests }, success: function(data){ if (data.result != "success") { var message = data.msg || "Sorry. Unable to subscribe. Please try again later."; $content_form.fadeOut(); $resultElement.addClass("is-error").fadeIn(); if (data.msg && data.msg.indexOf("already subscribed") >= 0) { message = "<p>You're already subscribed. Thank you.</p> <br> <a href='http://keyicons.com/public/keyicons_library.zip' class='btn btn-secondary'>Download Library again</a> "; $content_form.fadeOut(); $resultElement.removeClass("is-error").addClass("is-success").fadeIn(); } $resultElement.html(message); } else { $content_form.fadeOut(); $resultElement.removeClass("is-error").addClass("is-success"); $resultElement.html("Thank you!<br>You must confirm the subscription in your inbox.").fadeIn(); } } }); } var modal = $('.Modal'), btn_download = $('.js-download'), btn_close = $('#Modal-Close'); btn_download.on('click', function(e){ e.preventDefault(); modal.addClass('is-open'); $('#mce-EMAIL').focus(); }); function closeModal(){ modal.removeClass('is-open'); $('#mce-EMAIL').val(''); $('#mce-NAME').val(''); $content_form.fadeIn(); $resultElement.fadeOut(); } $(document) .on('click', '[href="#closer"]', function(e){ e.preventDefault(); closeModal(); }) .keydown(function(tecla){ if(modal.hasClass('is-open')){ if (tecla.keyCode == 27) { closeModal(); } } }); } module.exports = subscribe;
<gh_stars>0 #include <stdio.h> #include <string.h> #include <stdlib.h> #include <limits.h> //char article[20] = "<a href= \"/wiki/"; //char title[7] = "title="; //char quote[2] = "\""; //const char* filename = arg; //char *ref; //char *token1; //char *token2; int main(int argc, char *argv[]){ for(int i =1; i<argc; i++){ const char* fname= argv[i]; //create a file pointer FILE *fp; //open the file and have the file pointer point to it fp = fopen(fname, "r+"); //print an error message if fp is null if(fp == NULL){ printf("ERROR: There is an error with the file."); return -1; } //copy the file contents to the file char s[INT_MAX/2]; fread(s, 1, INT_MAX/2-2, fp); s[INT_MAX/2-1] = '\0'; //set the start of the open pointer to the beginning of the haystack char *open = s; //this keeps track of the open pointer char *timeStamp = open; //pointers to be used in the while loop char *close; char *innerOpen; char *innerClose; //use a while loop to iterate through the whole file while(1){ //set open to the next occurance of "<a href ..." open = strstr(timeStamp, "<a href=\"/wiki/" ); //break the loop if open is null if(open == NULL){ break; } //set close to the next occurance of "</a>" close= strstr(open,"</a>"); //break the loop if close is null if(close == NULL){ break; } //create a char array labRat that is the size of the difference of the pointers size_t sizelabRat = close - open; char labRat[sizelabRat]; //copy the characters between the pointers open and close into labRat strncpy(labRat, open, close - open); labRat[sizelabRat] = '\0'; char *c; //test to see if there is another occurance between "<a href... " in the labRat if((c = strstr(labRat + 16, "<a href=\"/wiki/" )) != NULL){ timeStamp = open + 16; continue; } //look for "title=\" and "\>" in the string array innerOpen = strstr(open, "title=\""); innerClose = strstr(innerOpen, "\">"); //if "title=\" is in labRat, get the name and print it if(innerOpen != NULL && innerClose != NULL){ size_t sizeInner = innerClose - (innerOpen + 7); char pageName[sizeInner]; strncpy(pageName, innerOpen + 7, innerClose - (innerOpen + 7)); pageName[sizeInner] = '\0'; printf("%s \n", pageName); } //increase the position of the timeStamp by 15 (the length of "<a href ...") timeStamp = open + 16; } } } //FILE *fp = fopen(argv[1],"r"); //while(fgets(s, 9000, fp)){ // ref = strstr(s, article); // token1 = strstr(ref, title); // token2 = strtok(token1, quote); // printf("%s\n", token2);
#pragma once namespace BF { enum class QuadTreeDirection { None, NorthEast, NorthWest, SouthEast, SouthWest }; }
<filename>src/energyplus/ForwardTranslator/ForwardTranslateHumidifierSteamGas.cpp /*********************************************************************************************************************** * OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. * * (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided with the distribution. * * (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products * derived from this software without specific prior written permission from the respective party. * * (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works * may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior * written permission from Alliance for Sustainable Energy, LLC. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED * STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ***********************************************************************************************************************/ #include "../ForwardTranslator.hpp" #include "../../model/HumidifierSteamGas.hpp" #include "../../model/Schedule.hpp" #include "../../model/Curve.hpp" #include <utilities/idd/Humidifier_Steam_Gas_FieldEnums.hxx> #include "../../utilities/idd/IddEnums.hpp" #include <utilities/idd/IddEnums.hxx> using namespace openstudio::model; namespace openstudio { namespace energyplus { boost::optional<IdfObject> ForwardTranslator::translateHumidifierSteamGas(HumidifierSteamGas& modelObject) { OptionalDouble d; OptionalString s; OptionalModelObject temp; // Name IdfObject idfObject = createRegisterAndNameIdfObject(openstudio::IddObjectType::Humidifier_Steam_Gas, modelObject); // Availability Schedule Name if ((temp = modelObject.availabilitySchedule())) { if (boost::optional<IdfObject> _schedule = translateAndMapModelObject(temp.get())) { idfObject.setString(Humidifier_Steam_GasFields::AvailabilityScheduleName, _schedule->name().get()); } } // Rated Capacity if (modelObject.isRatedCapacityAutosized()) { idfObject.setString(Humidifier_Steam_GasFields::RatedCapacity, "Autosize"); } if ((d = modelObject.ratedCapacity())) { idfObject.setDouble(Humidifier_Steam_GasFields::RatedCapacity, d.get()); } // Rated Gas Use Rate if ((d = modelObject.ratedGasUseRate())) { idfObject.setDouble(Humidifier_Steam_GasFields::RatedGasUseRate, d.get()); } else if (modelObject.isRatedGasUseRateAutosized()) { idfObject.setString(Humidifier_Steam_GasFields::RatedGasUseRate, "Autosize"); } // Thermal Efficiency if ((d = modelObject.thermalEfficiency())) { idfObject.setDouble(Humidifier_Steam_GasFields::ThermalEfficiency, d.get()); } // Thermal Efficiency Modifier Curve Name if (boost::optional<model::Curve> curve = modelObject.thermalEfficiencyModifierCurve()) { if (boost::optional<IdfObject> _curve = translateAndMapModelObject(curve.get())) { idfObject.setString(Humidifier_Steam_GasFields::ThermalEfficiencyModifierCurveName, _curve->name().get()); } } // Rated Fan Power if ((d = modelObject.ratedFanPower())) { idfObject.setDouble(Humidifier_Steam_GasFields::RatedFanPower, d.get()); } // Auxiliary Electric Power if ((d = modelObject.auxiliaryElectricPower())) { idfObject.setDouble(Humidifier_Steam_GasFields::AuxiliaryElectricPower, d.get()); } // Air Inlet Node Name temp = modelObject.inletModelObject(); if (temp) { idfObject.setString(Humidifier_Steam_GasFields::AirInletNodeName, temp->name().get()); } // Air Outlet Node Name temp = modelObject.outletModelObject(); if (temp) { idfObject.setString(Humidifier_Steam_GasFields::AirOutletNodeName, temp->name().get()); } // Water Storage Tank Name // not currently used // Inlet Water Temperature Option if ((s = modelObject.inletWaterTemperatureOption())) { idfObject.setString(Humidifier_Steam_GasFields::InletWaterTemperatureOption, s.get()); } return idfObject; } } // namespace energyplus } // namespace openstudio
# Source to configure pip CI builds # Wheelhouse for various packages missing from pypi. EXTRA_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" # Wheelhouse for daily builds of some packages. PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" # Build package list to avoid empty package=versions; only needed for versioned packages PKGS="${PKGS} numpy"; if [ ${NUMPY} ]; then PKGS="${PKGS}==${NUMPY}"; fi PKGS="${PKGS} scipy"; if [ ${SCIPY} ]; then PKGS="${PKGS}==${SCIPY}"; fi PKGS="${PKGS} patsy"; if [ ${PATSY} ]; then PKGS="${PKGS}==${PATSY}"; fi PKGS="${PKGS} pandas"; if [ ${PANDAS} ]; then PKGS="${PKGS}==${PANDAS}"; fi PKGS="${PKGS} Cython"; if [ ${CYTHON} ]; then PKGS="${PKGS}==${CYTHON}"; fi if [ ${USEMPL} = true ]; then PKGS="${PKGS} matplotlib" if [ ${MATPLOTLIB} ]; then PKGS="${PKGS}==${MATPLOTLIB}" fi fi if [ "${PIP_PRE}" = true ]; then EXTRA_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" fi # travis osx python support is limited. Use homebrew/pyenv to install python. if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update && brew upgrade pyenv eval "$(pyenv init -)" pyenv install "$PYTHON" pyenv local "$PYTHON" pyenv global "$PYTHON" pyenv shell "$PYTHON" fi # Install in our own virtualenv python -m pip install --upgrade pip pip install --upgrade virtualenv virtualenv --python=python venv source venv/bin/activate python --version # just to check python -m pip install --upgrade pip pip install ${EXTRA_PIP_FLAGS} ${PKGS} ${DEPEND_ALWAYS}
/** * Copyright (c) 2008 <NAME>. All rights reserved. * * This file is part of XBee-API. * * XBee-API is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * XBee-API is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with XBee-API. If not, see <http://www.gnu.org/licenses/>. */ package com.rapplogic.xbee.api; import com.rapplogic.xbee.util.IntArrayOutputStream; /** * Supported by both series 1 (10C8 firmware and later) and series 2. * Allows AT commands to be sent to a remote radio. * <p/> * Warning: this command may not return a response if the remote radio is unreachable. * You will need to set your own timeout when waiting for a response from this command, * or you may wait forever. * <p/> * API ID: 0x17 * <p/> * @author andrew * */ public class RemoteAtRequest extends AtCommand { private XBeeAddress64 remoteAddr64; private XBeeAddress16 remoteAddr16; private boolean applyChanges; /** * Creates a Remote AT request for setting an AT command on a remote XBee * <p/> * Note: When setting a value, you must set applyChanges for the setting to * take effect. When sending several requests, you can wait until the last * request before setting applyChanges=true. * <p/> * @param frameId * @param remoteAddress64 * @param remoteAddress16 * @param applyChanges set to true if setting a value or issuing a command that changes the state of the radio (e.g. FR); not applicable to query requests * @param command two character AT command to set or query * @param value if null then the current setting will be queried */ public RemoteAtRequest(int frameId, XBeeAddress64 remoteAddress64, XBeeAddress16 remoteAddress16, boolean applyChanges, String command, int[] value) { super(command, value); this.setFrameId(frameId); this.remoteAddr64 = remoteAddress64; this.remoteAddr16 = remoteAddress16; this.applyChanges = applyChanges; } public RemoteAtRequest(int frameId, XBeeAddress64 remoteAddress64, XBeeAddress16 remoteAddress16, boolean applyChanges, String command, int value) { this(frameId, remoteAddress64, remoteAddress16, applyChanges, command, new int[] {value}); } /** * Creates a Remote AT request for querying the current value of an AT command on a remote XBee * * @param frameId * @param remoteAddress64 * @param remoteAddress16 * @param applyChanges * @param command */ public RemoteAtRequest(int frameId, XBeeAddress64 remoteAddress64, XBeeAddress16 remoteAddress16, boolean applyChanges, String command) { this(frameId, remoteAddress64, remoteAddress16, applyChanges, command, null); } /** * Abbreviated Constructor for setting an AT command on a remote XBee. * This defaults to the DEFAULT_FRAME_ID, and true for apply changes * * @param dest64 * @param command * @param value */ public RemoteAtRequest(XBeeAddress64 dest64, String command, int[] value) { // Note: the ZNET broadcast also works for series 1. We could also use ffff but then that wouldn't work for series 2 this(XBeeRequest.DEFAULT_FRAME_ID, dest64, XBeeAddress16.ZNET_BROADCAST, true, command, value); } public RemoteAtRequest(XBeeAddress64 dest64, String command, int value) { this(XBeeRequest.DEFAULT_FRAME_ID, dest64, XBeeAddress16.ZNET_BROADCAST, true, command, new int[] {value}); } /** * Abbreviated Constructor for querying the value of an AT command on a remote XBee. * This defaults to the DEFAULT_FRAME_ID, and false for apply changes * * @param dest64 * @param command */ public RemoteAtRequest(XBeeAddress64 dest64, String command) { this(dest64, command, null); // apply changes doesn't make sense for a query this.setApplyChanges(false); } /** * Creates a Remote AT instance for querying the value of an AT command on a remote XBee, * by specifying the 16-bit address. Uses the broadcast address for 64-bit address (00 00 00 00 00 00 ff ff) * <p/> * Defaults are: frame id: 1, applyChanges: false * * @param dest64 * @param command */ public RemoteAtRequest(XBeeAddress16 dest16, String command) { this(dest16, command, null); // apply changes doesn't make sense for a query this.setApplyChanges(false); } /** * Creates a Remote AT instance for setting the value of an AT command on a remote XBee, * by specifying the 16-bit address and value. Uses the broadcast address for 64-bit address (00 00 00 00 00 00 ff ff) * <p/> * Defaults are: frame id: 1, applyChanges: true * * @param remoteAddress16 * @param command */ public RemoteAtRequest(XBeeAddress16 remoteAddress16, String command, int[] value) { this(XBeeRequest.DEFAULT_FRAME_ID, XBeeAddress64.BROADCAST, remoteAddress16, true, command, value); } public RemoteAtRequest(XBeeAddress16 remoteAddress16, String command, int value) { this(XBeeRequest.DEFAULT_FRAME_ID, XBeeAddress64.BROADCAST, remoteAddress16, true, command, new int[] {value}); } public int[] getFrameData() { IntArrayOutputStream out = new IntArrayOutputStream(); // api id out.write(this.getApiId().getValue()); // frame id (arbitrary byte that will be sent back with ack) out.write(this.getFrameId()); out.write(remoteAddr64.getAddress()); // 16-bit address out.write(remoteAddr16.getAddress()); // TODO S2B remote command options // TODO 0x40 is a bit field, ugh // 0x01 - Disable retries and route repair // 0x02 - Apply changes. // 0x20 - Enable APS encryption (if EE=1) // 0x40 - Use the extended transmission timeout if (applyChanges) { out.write(2); } else { // queue changes -- don't forget to send AC command out.write(0); } // command name ascii [1] out.write((int) this.getCommand().substring(0, 1).toCharArray()[0]); // command name ascii [2] out.write((int) this.getCommand().substring(1, 2).toCharArray()[0]); if (this.getValue() != null) { out.write(this.getValue()); } return out.getIntArray(); } public ApiId getApiId() { return ApiId.REMOTE_AT_REQUEST; } public XBeeAddress64 getRemoteAddr64() { return remoteAddr64; } public void setRemoteAddr64(XBeeAddress64 remoteAddr64) { this.remoteAddr64 = remoteAddr64; } public XBeeAddress16 getRemoteAddr16() { return remoteAddr16; } public void setRemoteAddr16(XBeeAddress16 remoteAddr16) { this.remoteAddr16 = remoteAddr16; } public boolean isApplyChanges() { return applyChanges; } public void setApplyChanges(boolean applyChanges) { this.applyChanges = applyChanges; } public String toString() { return super.toString() + ",remoteAddr64=" + this.remoteAddr64 + ",remoteAddr16=" + this.remoteAddr16 + ",applyChanges=" + this.applyChanges; } }
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-LPMI/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-LPMI/512+512+512-N-VB-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_first_third_sixth --eval_function penultimate_sixth_eval
<gh_stars>0 /* * File: HangmanCanvas.java * ------------------------ * This file keeps track of the Hangman display. */ import java.applet.AudioClip; import acm.graphics.*; import acm.util.MediaTools; public class HangmanCanvasExtension extends GCanvas { private static final int HEART_WIDTH = 20; private static final int HEART_SEP = 10; private static final double HEART_HEIGHT = 20; AudioClip loseClip = MediaTools.loadAudioClip("loseMusic.au"); AudioClip winClip = MediaTools.loadAudioClip("winMusic.au"); private GLabel note = new GLabel(""); // Add gif and music when you lose game public void losingMessage(String word) { removeAll(); addFoundWords(word); GImage loose = new GImage("cimaka.gif"); loose.setSize(LOOSING_P_WIDTH, LOOSING_P_HEIGHT); add(loose, 0, LOOSING_P_FROM_TOP); loseClip.play(); } // Add gif and music when you win game public void winningMessage(String word) { removeAll(); addFoundWords(word); GImage win = new GImage("moriarti.gif"); win.setSize(WINNING_P_WIDTH, WINNING_P_HEIGHT); add(win, 0, WINNING_P_FROM_TOP); winClip.play(); } /** Resets the display so that only the scaffold appears */ private int index = 0; public void reset() { removeAll(); note.setLabel(""); addScaffold(); createHearts(); loseClip.stop(); winClip.stop(); index = 0; } GImage[] heart = new GImage[8]; private void createHearts() { for (int i = 0; i < heart.length; i++) { heart[i] = new GImage("heart.png", i * (HEART_WIDTH + HEART_SEP), 0); heart[i].setSize(HEART_WIDTH, HEART_HEIGHT); add(heart[i]); heart[i].setVisible(true); } } private void addScaffold() { int width = getWidth() / 2 - BEAM_LENGTH; add(new GLine(width, SCAFFOLD_OFFSET_FROM_TOP, width, SCAFFOLD_HEIGHT)); add(new GLine(width, SCAFFOLD_OFFSET_FROM_TOP, width + BEAM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP)); add(new GLine(width + BEAM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP, width + BEAM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH)); } /** * Updates the word on the screen to correspond to the current state of the * game. The argument string shows what letters have been guessed so far; * unguessed letters are indicated by hyphens. */ public void displayWord(String word) { addFoundWords(word); } private GLabel label = new GLabel(""); // Add words and dashes in canvas private void addFoundWords(String word) { String s = ""; for (int i = 0; i < word.length(); i++) { s += word.charAt(i) + " "; } label.setLabel(s); label.setFont("HELVETICA-26-BOLDITALIC"); add(label, FOUND_WORDS_FROM_LEFT, getHeight() - FOUND_WORDS_FROM_BOTTOM); } /** * Updates the display to correspond to an incorrect guess by the user. Calling * this method causes the next body part to appear on the scaffold and adds the * letter to the list of incorrect guesses that appears at the bottom of the * window. */ public void noteIncorrectGuess(char letter) { addUnfoundWords(letter); addNewPartOfBody(); heart[(heart.length - index) % 8].setVisible(false); } // Add parts of body depending on how many guesses is left private void addNewPartOfBody() { if (index == 0) { addHead(); } if (index == 1) { addBody(); } if (index == 2) { addLeftHand(); } if (index == 3) { addRightHand(); } if (index == 4) { addLeftLeg(); } if (index == 5) { addRightLeg(); } if (index == 6) { addLeftFoot(); } if (index == 7) { addRightFoot(); } index = (index + 1) % 8; } // Add each part of body by using animation to make game more funny // ---------------------------------------------------------------- private void addRightFoot() { GLine rightFoot = new GLine(getWidth() / 2 + HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH, getWidth() / 2 + HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH); add(rightFoot); for (int i = 0; i <= FOOT_LENGTH; i++) { rightFoot.setEndPoint(getWidth() / 2 + HIP_WIDTH + i, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH); rightFoot.pause(SLOW_OTHER_PARTS); } } private void addLeftFoot() { GLine leftFoot = new GLine(getWidth() / 2 - HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH, getWidth() / 2 - HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH); add(leftFoot); for (int i = 0; i <= FOOT_LENGTH; i++) { leftFoot.setEndPoint(getWidth() / 2 - HIP_WIDTH - i, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH); leftFoot.pause(SLOW_OTHER_PARTS); } } private void addRightLeg() { GLine rightHip = new GLine(getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH, getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH); add(rightHip); for (int i = 0; i <= HIP_WIDTH; i++) { rightHip.setEndPoint(getWidth() / 2 + i, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH); rightHip.pause(SLOW_OTHER_PARTS); } GLine rightLeg = new GLine(getWidth() / 2 + HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH, getWidth() / 2 + HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH); add(rightLeg); for (int i = 0; i <= LEG_LENGTH; i++) { rightLeg.setEndPoint(getWidth() / 2 + HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH - i); rightLeg.pause(SLOW_OTHER_PARTS); } } private void addLeftLeg() { GLine leftHip = new GLine(getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH, getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH); add(leftHip); for (int i = 0; i <= HIP_WIDTH; i++) { leftHip.setEndPoint(getWidth() / 2 - i, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH); leftHip.pause(SLOW_OTHER_PARTS); } GLine leftLeg = new GLine(getWidth() / 2 - HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH, getWidth() / 2 - HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH); add(leftLeg); for (int i = 0; i <= LEG_LENGTH; i++) { leftLeg.setEndPoint(getWidth() / 2 - HIP_WIDTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + BODY_LENGTH + LEG_LENGTH - i); leftLeg.pause(SLOW_OTHER_PARTS); } } private void addRightHand() { GLine rightArm = new GLine(getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD, getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD); add(rightArm); for (int i = 0; i <= UPPER_ARM_LENGTH; i++) { rightArm.setEndPoint(getWidth() / 2 + i, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD); rightArm.pause(SLOW_OTHER_PARTS); } GLine rightHand = new GLine(getWidth() / 2 + UPPER_ARM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD + LOWER_ARM_LENGTH, getWidth() / 2 + UPPER_ARM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD + LOWER_ARM_LENGTH); add(rightHand); for (int i = 0; i <= LOWER_ARM_LENGTH; i++) { rightHand.setEndPoint(getWidth() / 2 + UPPER_ARM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD + LOWER_ARM_LENGTH - i); rightHand.pause(SLOW_OTHER_PARTS); } } private void addLeftHand() { GLine leftArm = new GLine(getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD, getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD); add(leftArm); for (int i = 0; i <= UPPER_ARM_LENGTH; i++) { leftArm.setEndPoint(getWidth() / 2 - i, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD); leftArm.pause(SLOW_OTHER_PARTS); } GLine leftHand = new GLine(getWidth() / 2 - UPPER_ARM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD + LOWER_ARM_LENGTH, getWidth() / 2 - UPPER_ARM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD + LOWER_ARM_LENGTH); add(leftHand); for (int i = 0; i <= LOWER_ARM_LENGTH; i++) { leftHand.setEndPoint(getWidth() / 2 - UPPER_ARM_LENGTH, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS + ARM_OFFSET_FROM_HEAD + LOWER_ARM_LENGTH - i); leftHand.pause(SLOW_OTHER_PARTS); } } private void addBody() { int finalY = SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + 2 * HEAD_RADIUS; GLine body = new GLine(1, 1, 1, 1); add(body); for (int i = 1; i <= BODY_LENGTH / 2; i++) { body.setStartPoint(getWidth() / 2, finalY + BODY_LENGTH / 2 - i); body.setEndPoint(getWidth() / 2, finalY + BODY_LENGTH / 2 + i); turnBody(body); } } private void turnBody(GLine body) { double centreX = getWidth() / 2; double bodyLength = body.getHeight(); double centreY = body.getStartPoint().getY() + bodyLength / 2; body.setStartPoint(centreX - bodyLength / 2, centreY); body.setEndPoint(centreX - bodyLength, centreY); body.pause(SLOW_BODY); body.setStartPoint(centreX, centreY - bodyLength / 2); body.setEndPoint(centreX, centreY + bodyLength / 2); body.pause(SLOW_BODY); body.setStartPoint(centreX - bodyLength / 2, centreY); body.setEndPoint(centreX + bodyLength / 2, centreY); body.pause(SLOW_BODY); body.setStartPoint(centreX, centreY - bodyLength / 2); body.setEndPoint(centreX, centreY + bodyLength / 2); body.pause(SLOW_BODY); } private void addHead() { GOval head = new GOval(1, 1); add(head); for (int radius = 1; radius <= HEAD_RADIUS; radius++) { head.setSize(2 * radius, 2 * radius); head.setLocation(getWidth() / 2 - head.getWidth() / 2, SCAFFOLD_OFFSET_FROM_TOP + ROPE_LENGTH + HEAD_RADIUS - radius); head.pause(SLOW_HEAD); } } private void addUnfoundWords(char letter) { note.setLabel(note.getLabel() + letter); note.setFont("HELVETICA-15-ITALIC"); add(note, UNFOUND_WORDS_FROM_LEFT, getHeight() - UNFOUND_WORDS_FROM_BOTTOM); } /* Constants for the simple version of the picture (in pixels) */ private static final int SCAFFOLD_HEIGHT = 360; private static final int SCAFFOLD_OFFSET_FROM_TOP = 35; private static final int FOUND_WORDS_FROM_BOTTOM = 70; private static final int FOUND_WORDS_FROM_LEFT = 30; private static final int UNFOUND_WORDS_FROM_BOTTOM = 40; private static final int UNFOUND_WORDS_FROM_LEFT = 30; private static final int BEAM_LENGTH = 144; private static final int ROPE_LENGTH = 18; private static final int HEAD_RADIUS = 36; private static final int BODY_LENGTH = 144; private static final int ARM_OFFSET_FROM_HEAD = 28; private static final int UPPER_ARM_LENGTH = 72; private static final int LOWER_ARM_LENGTH = 44; private static final int HIP_WIDTH = 36; private static final int LEG_LENGTH = 108; private static final int FOOT_LENGTH = 28; private static final int WINNING_P_WIDTH = 377; private static final int WINNING_P_HEIGHT = 212; private static final int WINNING_P_FROM_TOP = 50; private static final int LOOSING_P_WIDTH = 377; private static final int LOOSING_P_HEIGHT = 211; private static final int LOOSING_P_FROM_TOP = 50; private static final int SLOW_HEAD = 30; private static final int SLOW_BODY = 5; private static final int SLOW_OTHER_PARTS = 12; }
package org.jfteam; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.ConfigurableApplicationContext; /** * SpringBoot应用程序启动类 */ @SpringBootApplication public class AppStarter { private static final Logger LOGGER = LoggerFactory.getLogger(AppStarter.class); public static void main(String[] args) { ConfigurableApplicationContext context = SpringApplication.run(AppStarter.class, args); LOGGER.info("Application: {} was started!..........", context.getApplicationName()); } }
mvn -f ./pom.xml -U clean compile -DskipTests
course=(input()) Example=(input()) Day1=input("Started day:12.10.20") print("Course:",course) print("Lesson 1: Introduction") print("Lesson 2:", Example) print(Day1)
#!/usr/bin/env bash docker run \ -u root \ --rm \ -d \ --name jenkins \ --network infrastructure-net \ -p 8081:8080 \ -p 50000:50000 \ -v $HOME/my-docker-volumes/jenkins-data:/var/jenkins_home:rw \ -v /var/run/docker.sock:/var/run/docker.sock \ jenkins/jenkins:2.121.1
#!/bin/bash # init echo echo echo "=============== CREATE A NEW GATEWAY INSTANCE ===============" echo echo echo "ℹ️ Press [ENTER] for default values:" echo echo read -p " Enter Gateway version you want to use [latest/development] (default = \"latest\") >>> " GATEWAY_TAG if [ "$GATEWAY_TAG" == "" ] then GATEWAY_TAG="latest" fi # Ask the user for the name of the new Gateway instance read -p " Enter a name for your new Gateway instance (default = \"gateway-instance\") >>> " GATEWAY_INSTANCE_NAME if [ "$GATEWAY_INSTANCE_NAME" == "" ] then GATEWAY_INSTANCE_NAME="gateway-instance" fi # Ask the user for the hummingobt data folder location prompt_hummingbot_data_path () { read -p " Enter the location where your Hummingbot files are located (example: /Users/hbot/hummingbot_files) >>> " FOLDER if [ "$FOLDER" == "" ] then prompt_hummingbot_data_path else if [[ ${FOLDER::1} != "/" ]]; then FOLDER=$PWD/$FOLDER fi if [ ! -d "$FOLDER" ]; then echo "‼️ Directory not found in ${FOLDER}" prompt_hummingbot_data_path else if [[ -d "$FOLDER/hummingbot_conf" && -d "$FOLDER/hummingbot_certs" ]]; then CERT_PATH=$FOLDER/hummingbot_certs else echo "‼️ hummingbot_conf & hummingbot_certs directory missing from path $FOLDER" prompt_hummingbot_data_path fi if [[ -f "$FOLDER/hummingbot_certs/server_cert.pem" && -f "$FOLDER/hummingbot_certs/server_key.pem" && -f "$FOLDER/hummingbot_certs/ca_cert.pem" ]]; then echo else echo "‼️ SSL Certs missing from path $FOLDER" echo " Required: server_cert.pem, server_key.pem, ca_cert.pem" prompt_hummingbot_data_path fi # get log folder path if [ -d "$FOLDER/hummingbot_logs" ]; then LOG_PATH=$FOLDER/hummingbot_logs else echo "‼️ hummingbot_logs directory missing from path $FOLDER" prompt_hummingbot_data_path fi fi fi } prompt_hummingbot_data_path read_global_config () { GLOBAL_CONFIG="$FOLDER/hummingbot_conf/conf_global.yml" # check for missing config if [[ ! -f "$GLOBAL_CONFIG" ]] then echo "‼️ conf_global.yml missing from path $GLOBAL_CONFIG" echo "Error! Unable to continue setup" exit fi while IFS=: read key value || [[ -n "$value" ]] do # hummingbot instance id if [ "$key" == "instance_id" ] then HUMMINGBOT_INSTANCE_ID="$(echo -e "${value}" | tr -d '[:space:]')" fi # done < "$GLOBAL_CONFIG" } read_global_config # prompt to setup balancer, uniswap prompt_ethereum_setup () { read -p " Do you want to setup Balancer or Uniswap? [Y/N] (default \"Y\") >>> " PROCEED if [[ "$PROCEED" == "Y" || "$PROCEED" == "y" || "$PROCEED" == "" ]] then ETHEREUM_SETUP=true echo read -p " Enter Ethereum chain you want to use [mainnet/kovan] (default = \"mainnet\") >>> " ETHEREUM_CHAIN # chain selection if [ "$ETHEREUM_CHAIN" == "" ] then ETHEREUM_CHAIN="mainnet" fi if [[ "$ETHEREUM_CHAIN" != "mainnet" && "$ETHEREUM_CHAIN" != "kovan" ]] then echo "‼️ ERROR. Unsupported chains (mainnet/kovan). " prompt_ethereum_setup fi # set subgraph url, exchange_proxy if [[ "$ETHEREUM_CHAIN" == "mainnet" ]] then ETHEREUM_CHAIN="mainnet" REACT_APP_SUBGRAPH_URL="https://api.thegraph.com/subgraphs/name/balancer-labs/balancer" EXCHANGE_PROXY="0x3E66B66Fd1d0b02fDa6C811Da9E0547970DB2f21" else if [[ "$ETHEREUM_CHAIN" == "kovan" ]] then REACT_APP_SUBGRAPH_URL="https://api.thegraph.com/subgraphs/name/balancer-labs/balancer-kovan" EXCHANGE_PROXY="0x4e67bf5bD28Dd4b570FBAFe11D0633eCbA2754Ec" fi fi fi } prompt_ethereum_setup # prompt to ethereum rpc prompt_ethereum_rpc_setup () { if [ "$ETHEREUM_RPC_URL" == "" ] then read -p " Enter the Ethereum RPC node URL to connect to >>> " ETHEREUM_RPC_URL if [ "$ETHEREUM_RPC_URL" == "" ] then prompt_ethereum_rpc_setup fi else read -p " Use the this Ethereum RPC node ($ETHEREUM_RPC_URL) setup in Hummingbot client? [Y/N] (default = \"Y\") >>> " PROCEED if [[ "$PROCEED" == "Y" || "$PROCEED" == "y" || "$PROCEED" == "" ]] then echo else ETHEREUM_RPC_URL="" prompt_ethereum_rpc_setup fi fi } prompt_ethereum_rpc_setup # prompt to setup ethereum token list prompt_token_list_source () { echo echo " Enter the token list url available at https://tokenlists.org/" read -p " (default = \"https://wispy-bird-88a7.uniswap.workers.dev/?url=http://tokens.1inch.eth.link\") >>> " ETHEREUM_TOKEN_LIST_URL if [ "$ETHEREUM_TOKEN_LIST_URL" == "" ] then echo echo "ℹ️ Retrieving config from Hummingbot config file ... " ETHEREUM_SETUP=true ETHEREUM_TOKEN_LIST_URL=https://wispy-bird-88a7.uniswap.workers.dev/?url=http://tokens.1inch.eth.link fi } prompt_token_list_source # prompt to setup eth gas level prompt_eth_gasstation_gas_level () { echo read -p " Enter gas level you want to use for Ethereum transactions (fast, fastest, safeLow, average) (default = \"fast\") >>> " ETH_GAS_STATION_GAS_LEVEL if [ "$ETH_GAS_STATION_GAS_LEVEL" == "" ] then ETH_GAS_STATION_GAS_LEVEL=fast else if [[ "$ETH_GAS_STATION_GAS_LEVEL" != "fast" && "$ETH_GAS_STATION_GAS_LEVEL" != "fastest" && "$ETH_GAS_STATION_GAS_LEVEL" != "safeLow" && "$ETH_GAS_STATION_GAS_LEVEL" != "safelow" && "$ETH_GAS_STATION_GAS_LEVEL" != "average" ]] then prompt_eth_gasstation_gas_level fi fi } # prompt to setup eth gas station prompt_eth_gasstation_setup () { echo read -p " Enable dynamic Ethereum gas price lookup? [Y/N] (default = \"Y\") >>> " PROCEED if [[ "$PROCEED" == "Y" || "$PROCEED" == "y" || "$PROCEED" == "" ]] then ENABLE_ETH_GAS_STATION=true read -p " Enter API key for Eth Gas Station (https://ethgasstation.info/) >>> " ETH_GAS_STATION_API_KEY if [ "$ETH_GAS_STATION_API_KEY" == "" ] then prompt_eth_gasstation_setup else # set gas level prompt_eth_gasstation_gas_level # set refresh interval read -p " Enter refresh time for Ethereum gas price lookup (in seconds) (default = \"120\") >>> " ETH_GAS_STATION_REFRESH_TIME if [ "$ETH_GAS_STATION_REFRESH_TIME" == "" ] then ETH_GAS_STATION_REFRESH_TIME=120 fi fi else if [[ "$PROCEED" == "N" || "$PROCEED" == "n" ]] then ENABLE_ETH_GAS_STATION=false ETH_GAS_STATION_API_KEY=null ETH_GAS_STATION_GAS_LEVEL=fast ETH_GAS_STATION_REFRESH_TIME=60 MANUAL_GAS_PRICE=100 else prompt_eth_gasstation_setup fi fi echo } prompt_eth_gasstation_setup prompt_balancer_setup () { # Ask the user for the Balancer specific settings echo "ℹ️ Balancer setting " read -p " Enter the maximum Balancer swap pool (default = \"4\") >>> " BALANCER_MAX_SWAPS if [ "$BALANCER_MAX_SWAPS" == "" ] then BALANCER_MAX_SWAPS="4" echo fi } prompt_uniswap_setup () { # Ask the user for the Uniswap specific settings echo "ℹ️ Uniswap setting " read -p " Enter the allowed slippage for swap transactions (default = \"1.5\") >>> " UNISWAP_SLIPPAGE if [ "$UNISWAP_SLIPPAGE" == "" ] then UNISWAP_SLIPPAGE="1.5" echo fi } if [[ "$ETHEREUM_SETUP" == true ]] then prompt_balancer_setup prompt_uniswap_setup fi prompt_xdai_setup () { # Ask the user for the Uniswap specific settings echo "ℹ️ XDAI setting " read -p " Enter preferred XDAI rpc provider (default = \"https://rpc.xdaichain.com\") >>> " XDAI_PROVIDER if [ "$XDAI_PROVIDER" == "" ] then XDAI_PROVIDER="https://rpc.xdaichain.com" echo fi } prompt_xdai_setup # Ask the user for ethereum network prompt_terra_network () { echo read -p " Enter Terra chain you want to use [mainnet/testnet] (default = \"mainnet\") >>> " TERRA # chain selection if [ "$TERRA" == "" ] then TERRA="mainnet" fi if [[ "$TERRA" != "mainnet" && "$TERRA" != "testnet" ]] then echo "‼️ ERROR. Unsupported chains (mainnet/testnet). " prompt_terra_network fi # setup chain params if [[ "$TERRA" == "mainnet" ]] then TERRA_LCD_URL="https://lcd.terra.dev" TERRA_CHAIN="columbus-4" elif [ "$TERRA" == "testnet" ] then TERRA_LCD_URL="https://tequila-lcd.terra.dev" TERRA_CHAIN="tequila-0004" fi } prompt_terra_setup () { echo read -p " Do you want to setup Terra? [Y/N] (default \"Y\") >>> " PROCEED if [[ "$PROCEED" == "Y" || "$PROCEED" == "y" || "$PROCEED" == "" ]] then TERRA_SETUP=true prompt_terra_network fi } prompt_terra_setup # setup uniswap config UNISWAP_ROUTER="0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D" UNISWAP_V3_CORE="0x1F98431c8aD98523631AE4a59f267346ea31F984" UNISWAP_V3_ROUTER="0xE592427A0AEce92De3Edee1F18E0157C05861564" UNISWAP_V3_NFT_MANAGER="0xC36442b4a4522E871399CD717aBDD847Ab11FE88" # network setup verifications if [[ "$ETHEREUM_SETUP" != true && "$TERRA_SETUP" != true ]] then echo echo "‼️ ERROR. Balancer/Uniswap & Terra Setup are both not selected. " echo " Setup will not continue." exit fi # Ask the user for the hummingobt data folder location prompt_password () { echo read -s -p " Enter the your Gateway cert passphrase configured in Hummingbot >>> " PASSWORD if [ "$PASSWORD" == "" ] then echo echo echo "‼️ ERROR. Certificates are not empty string. " prompt_password fi } prompt_password # Get GMT offset from local system time GMT_OFFSET=$(date +%z) # Check available open port for Gateway PORT=5000 LIMIT=$((PORT+1000)) while [[ $PORT -le LIMIT ]] do if [[ $(netstat -nat | grep "$PORT") ]]; then # check another port ((PORT = PORT + 1)) else break fi done echo echo "ℹ️ Confirm below if the instance and its folders are correct:" echo printf "%30s %5s\n" "Gateway instance name:" "$GATEWAY_INSTANCE_NAME" printf "%30s %5s\n" "Version:" "coinalpha/gateway-api:$GATEWAY_TAG" echo printf "%30s %5s\n" "Hummingbot Instance ID:" "$HUMMINGBOT_INSTANCE_ID" printf "%30s %5s\n" "Ethereum Chain:" "$ETHEREUM_CHAIN" printf "%30s %5s\n" "Ethereum RPC URL:" "$ETHEREUM_RPC_URL" printf "%30s %5s\n" "Ethereum Token List URL:" "$ETHEREUM_TOKEN_LIST_URL" printf "%30s %5s\n" "Manual Gas Price:" "$MANUAL_GAS_PRICE" printf "%30s %5s\n" "Enable Eth Gas Station:" "$ENABLE_ETH_GAS_STATION" printf "%30s %5s\n" "Eth Gas Station API:" "$ETH_GAS_STATION_API_KEY" printf "%30s %5s\n" "Eth Gas Station Level:" "$ETH_GAS_STATION_GAS_LEVEL" printf "%30s %5s\n" "Eth Gas Station Refresh Interval:" "$ETH_GAS_STATION_REFRESH_TIME" printf "%30s %5s\n" "Balancer Subgraph:" "$REACT_APP_SUBGRAPH_URL" printf "%30s %5s\n" "Balancer Exchange Proxy:" "$EXCHANGE_PROXY" printf "%30s %5s\n" "Balancer Max Swaps:" "$BALANCER_MAX_SWAPS" printf "%30s %5s\n" "Uniswap Router:" "$UNISWAP_ROUTER" printf "%30s %5s\n" "Uniswap V3 Core:" "$UNISWAP_V3_CORE" printf "%30s %5s\n" "Uniswap V3 Router:" "$UNISWAP_V3_ROUTER" printf "%30s %5s\n" "Uniswap V3 NFT Manager:" "$UNISWAP_V3_NFT_MANAGER" printf "%30s %5s\n" "Uniswap Allowed Slippage:" "$UNISWAP_SLIPPAGE" printf "%30s %5s\n" "Terra Chain:" "$TERRA" printf "%30s %5s\n" "Gateway Log Path:" "$LOG_PATH" printf "%30s %5s\n" "Gateway Cert Path:" "$CERT_PATH" printf "%30s %5s\n" "Gateway Port:" "$PORT" echo ENV_FILE="$FOLDER/hummingbot_conf/global_conf.yml" echo " Writing config to environment file" echo "" > $ENV_FILE # clear existing file data echo "# gateway-api script generated env" >> $ENV_FILE echo "" >> $ENV_FILE echo "CORE:" >> $ENV_FILE echo " NODE_ENV: prod" >> $ENV_FILE echo " PORT: $PORT" >> $ENV_FILE echo "" >> $ENV_FILE echo "HUMMINGBOT_INSTANCE_ID: $HUMMINGBOT_INSTANCE_ID" >> $ENV_FILE # ethereum config echo "" >> $ENV_FILE echo "# Ethereum Settings" >> $ENV_FILE echo "ETHEREUM_CHAIN: $ETHEREUM_CHAIN" >> $ENV_FILE echo "ETHEREUM_RPC_URL: $ETHEREUM_RPC_URL" >> $ENV_FILE echo "ETHEREUM_TOKEN_LIST_URL: $ETHEREUM_TOKEN_LIST_URL" >> $ENV_FILE echo "" >> $ENV_FILE echo "ENABLE_ETH_GAS_STATION: $ENABLE_ETH_GAS_STATION" >> $ENV_FILE echo "ETH_GAS_STATION_API_KEY: $ETH_GAS_STATION_API_KEY" >> $ENV_FILE echo "ETH_GAS_STATION_GAS_LEVEL: $ETH_GAS_STATION_GAS_LEVEL" >> $ENV_FILE echo "ETH_GAS_STATION_REFRESH_TIME: $ETH_GAS_STATION_REFRESH_TIME" >> $ENV_FILE echo "MANUAL_GAS_PRICE: $MANUAL_GAS_PRICE" >> $ENV_FILE # balancer config echo "" >> $ENV_FILE echo "# Balancer Settings" >> $ENV_FILE echo "REACT_APP_SUBGRAPH_URL: $REACT_APP_SUBGRAPH_URL" >> $ENV_FILE # must used "REACT_APP_SUBGRAPH_URL" for balancer-sor echo "EXCHANGE_PROXY: '$EXCHANGE_PROXY'" >> $ENV_FILE echo "BALANCER_MAX_SWAPS: $BALANCER_MAX_SWAPS" >> $ENV_FILE # uniswap config echo "" >> $ENV_FILE echo "# Uniswap Settings" >> $ENV_FILE echo "UNISWAP_ROUTER: '$UNISWAP_ROUTER'" >> $ENV_FILE echo "UNISWAP_V3_CORE: '$UNISWAP_V3_CORE'" >> $ENV_FILE echo "UNISWAP_V3_ROUTER: '$UNISWAP_V3_ROUTER'" >> $ENV_FILE echo "UNISWAP_V3_NFT_MANAGER: '$UNISWAP_V3_NFT_MANAGER'" >> $ENV_FILE echo "UNISWAP_ALLOWED_SLIPPAGE: $UNISWAP_SLIPPAGE" >> $ENV_FILE echo "UNISWAP_NO_RESERVE_CHECK_INTERVAL: 300000" >> $ENV_FILE echo "UNISWAP_PAIRS_CACHE_TIME: 1000" >> $ENV_FILE # terra config echo "" >> $ENV_FILE echo "# Terra Settings" >> $ENV_FILE echo "TERRA_LCD_URL: $TERRA_LCD_URL" >> $ENV_FILE echo "TERRA_CHAIN: $TERRA_CHAIN" >> $ENV_FILE # perpeptual finance config echo "" >> $ENV_FILE echo "# Perpeptual Settings" >> $ENV_FILE echo "XDAI_PROVIDER: $XDAI_PROVIDER" >> $ENV_FILE # certs echo "" >> $ENV_FILE echo "# cert" >> $ENV_FILE echo "CERT_PATH: ./certs" >> $ENV_FILE echo "CERT_PASSPHRASE: $PASSWORD" >> $ENV_FILE echo "GMT_OFFSET: '+0800'" >> $ENV_FILE echo "" >> $ENV_FILE prompt_proceed () { echo read -p " Do you want to proceed with installation? [Y/N] >>> " PROCEED if [ "$PROCEED" == "" ] then prompt_proceed else if [[ "$PROCEED" != "Y" && "$PROCEED" != "y" ]] then PROCEED="N" fi fi } # Execute docker commands create_instance () { echo echo "Creating Gateway instance ... " echo # Launch a new instance of hummingbot docker run -d \ --name $GATEWAY_INSTANCE_NAME \ -p 127.0.0.1:$PORT:$PORT \ --mount "type=bind,source=$CERT_PATH,destination=/usr/src/app/certs/" \ --mount "type=bind,source=$LOG_PATH,destination=/usr/src/app/logs/" \ --mount "type=bind,source=$FOLDER/hummingbot_conf/,destination=/usr/src/app/conf/" \ coinalpha/gateway-api:$GATEWAY_TAG } prompt_proceed if [[ "$PROCEED" == "Y" || "$PROCEED" == "y" ]] then create_instance else echo " Aborted" echo fi
<reponame>abwah/hcsshim<gh_stars>0 package main import ( "context" "runtime" "strings" "sync" "sync/atomic" "github.com/Microsoft/hcsshim/internal/oc" "github.com/Microsoft/hcsshim/internal/shimdiag" "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/runtime/v2/task" google_protobuf1 "github.com/gogo/protobuf/types" "go.opencensus.io/trace" ) type cdevent struct { topic string event interface{} } var _ = (task.TaskService)(&service{}) type service struct { events publisher // tid is the original task id to be served. This can either be a single // task or represent the POD sandbox task id. The first call to Create MUST // match this id or the shim is considered to be invalid. // // This MUST be treated as readonly for the lifetime of the shim. tid string // isSandbox specifies if `tid` is a POD sandbox. If `false` the shim will // reject all calls to `Create` where `tid` does not match. If `true` // multiple calls to `Create` are allowed as long as the workload containers // all have the same parent task id. // // This MUST be treated as readonly for the lifetime of the shim. isSandbox bool // taskOrPod is either the `pod` this shim is tracking if `isSandbox == // true` or it is the `task` this shim is tracking. If no call to `Create` // has taken place yet `taskOrPod.Load()` MUST return `nil`. taskOrPod atomic.Value // cl is the create lock. Since each shim MUST only track a single task or // POD. `cl` is used to create the task or POD sandbox. It SHOULD not be // taken when creating tasks in a POD sandbox as they can happen // concurrently. cl sync.Mutex } func (s *service) State(ctx context.Context, req *task.StateRequest) (resp *task.StateResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "State") defer span.End() defer func() { if resp != nil { span.AddAttributes( trace.StringAttribute("status", resp.Status.String()), trace.Int64Attribute("exitStatus", int64(resp.ExitStatus)), trace.StringAttribute("exitedAt", resp.ExitedAt.String())) } oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID)) r, e := s.stateInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Create(ctx context.Context, req *task.CreateTaskRequest) (resp *task.CreateTaskResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Create") defer span.End() defer func() { if resp != nil { span.AddAttributes(trace.Int64Attribute("pid", int64(resp.Pid))) } oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("bundle", req.Bundle), // trace.StringAttribute("rootfs", req.Rootfs), TODO: JTERRY75 - // OpenCensus doesnt support slice like our logrus hook trace.BoolAttribute("terminal", req.Terminal), trace.StringAttribute("stdin", req.Stdin), trace.StringAttribute("stdout", req.Stdout), trace.StringAttribute("stderr", req.Stderr), trace.StringAttribute("checkpoint", req.Checkpoint), trace.StringAttribute("parentcheckpoint", req.ParentCheckpoint)) r, e := s.createInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Start(ctx context.Context, req *task.StartRequest) (resp *task.StartResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Start") defer span.End() defer func() { if resp != nil { span.AddAttributes(trace.Int64Attribute("pid", int64(resp.Pid))) } oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID)) r, e := s.startInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Delete(ctx context.Context, req *task.DeleteRequest) (resp *task.DeleteResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Delete") defer span.End() defer func() { if resp != nil { span.AddAttributes( trace.Int64Attribute("pid", int64(resp.Pid)), trace.Int64Attribute("exitStatus", int64(resp.ExitStatus)), trace.StringAttribute("exitedAt", resp.ExitedAt.String())) } oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID)) r, e := s.deleteInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Pids(ctx context.Context, req *task.PidsRequest) (_ *task.PidsResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Pids") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes(trace.StringAttribute("tid", req.ID)) r, e := s.pidsInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Pause(ctx context.Context, req *task.PauseRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Pause") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes(trace.StringAttribute("tid", req.ID)) r, e := s.pauseInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Resume(ctx context.Context, req *task.ResumeRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Resume") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes(trace.StringAttribute("tid", req.ID)) r, e := s.resumeInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Checkpoint(ctx context.Context, req *task.CheckpointTaskRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Checkpoint") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("path", req.Path)) r, e := s.checkpointInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Kill(ctx context.Context, req *task.KillRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Kill") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID), trace.Int64Attribute("signal", int64(req.Signal)), trace.BoolAttribute("all", req.All)) r, e := s.killInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Exec(ctx context.Context, req *task.ExecProcessRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Exec") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID), trace.BoolAttribute("terminal", req.Terminal), trace.StringAttribute("stdin", req.Stdin), trace.StringAttribute("stdout", req.Stdout), trace.StringAttribute("stderr", req.Stderr)) r, e := s.execInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) DiagExecInHost(ctx context.Context, req *shimdiag.ExecProcessRequest) (_ *shimdiag.ExecProcessResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "DiagExecInHost") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("args", strings.Join(req.Args, " ")), trace.StringAttribute("workdir", req.Workdir), trace.BoolAttribute("terminal", req.Terminal), trace.StringAttribute("stdin", req.Stdin), trace.StringAttribute("stdout", req.Stdout), trace.StringAttribute("stderr", req.Stderr)) r, e := s.diagExecInHostInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) ResizePty(ctx context.Context, req *task.ResizePtyRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "ResizePty") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID), trace.Int64Attribute("width", int64(req.Width)), trace.Int64Attribute("height", int64(req.Height))) r, e := s.resizePtyInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) CloseIO(ctx context.Context, req *task.CloseIORequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "CloseIO") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID), trace.BoolAttribute("stdin", req.Stdin)) r, e := s.closeIOInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Update(ctx context.Context, req *task.UpdateTaskRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Update") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes(trace.StringAttribute("tid", req.ID)) r, e := s.updateInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Wait(ctx context.Context, req *task.WaitRequest) (resp *task.WaitResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Wait") defer span.End() defer func() { if resp != nil { span.AddAttributes( trace.Int64Attribute("exitStatus", int64(resp.ExitStatus)), trace.StringAttribute("exitedAt", resp.ExitedAt.String())) } oc.SetSpanStatus(span, err) }() span.AddAttributes( trace.StringAttribute("tid", req.ID), trace.StringAttribute("eid", req.ExecID)) r, e := s.waitInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Stats(ctx context.Context, req *task.StatsRequest) (_ *task.StatsResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Stats") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes(trace.StringAttribute("tid", req.ID)) r, e := s.statsInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Connect(ctx context.Context, req *task.ConnectRequest) (resp *task.ConnectResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Connect") defer span.End() defer func() { if resp != nil { span.AddAttributes( trace.Int64Attribute("shimPid", int64(resp.ShimPid)), trace.Int64Attribute("taskPid", int64(resp.TaskPid)), trace.StringAttribute("version", resp.Version)) } oc.SetSpanStatus(span, err) }() span.AddAttributes(trace.StringAttribute("tid", req.ID)) r, e := s.connectInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) Shutdown(ctx context.Context, req *task.ShutdownRequest) (_ *google_protobuf1.Empty, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "Shutdown") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() span.AddAttributes(trace.StringAttribute("tid", req.ID)) r, e := s.shutdownInternal(ctx, req) return r, errdefs.ToGRPC(e) } func (s *service) DiagStacks(ctx context.Context, req *shimdiag.StacksRequest) (_ *shimdiag.StacksResponse, err error) { defer panicRecover() ctx, span := trace.StartSpan(ctx, "DiagStacks") defer span.End() defer func() { oc.SetSpanStatus(span, err) }() buf := make([]byte, 4096) for { buf = buf[:runtime.Stack(buf, true)] if len(buf) < cap(buf) { break } buf = make([]byte, 2*len(buf)) } return &shimdiag.StacksResponse{Stacks: string(buf)}, nil }
import { useState, useEffect } from 'react'; const formStateMap = new Map(); const useFormStore = (formId, selector) => { const [selectedProperty, setSelectedProperty] = useState(selector(formStateMap.get(formId))); useEffect(() => { const handleChange = () => { setSelectedProperty(selector(formStateMap.get(formId))); }; formStateMap.set(formId, { // Initialize form state properties here formProps: { // Initialize formProps properties here subaction: 'initialValue', // Example initial value }, }); const formState = formStateMap.get(formId); const unsubscribe = () => { // Implement logic to unsubscribe from form state changes }; // Subscribe to form state changes // Implement logic to subscribe to form state changes and call handleChange on change return () => { unsubscribe(); formStateMap.delete(formId); }; }, [formId, selector]); return selectedProperty; }; export default useFormStore;
<filename>test/factories/product/nfe/transporte/volume.rb FactoryGirl.define do factory :product_transporte_volume, class: BrNfe::Product::Nfe::Transporte::Volume do quantidade 1 end end
<filename>MenuDuamView.java //-------------------------------------------------------------------------------------------------------------------------------------------// // Projeto Megasoft 1 Versão 2.0 do Sistema de Tributos Municipais com Banco de Dados: IPTU. ITBI e ISS, com implementação básica do REFIS; // Autor: <NAME>; //-------------------------------------------------------------------------------------------------------------------------------------------// // Pacote View; package View; //-------------------------------------------------------------------------------------------------------------------------------------------// // Classe MenuDuamView; public class MenuDuamView extends javax.swing.JFrame { //---------------------------------------------------------------------------------------------------------------------------------------// // Métodos Especiais // Construtor da Classe MenuDuamView /** * Creates new form MenuDuam */ public MenuDuamView() { initComponents(); } //---------------------------------------------------------------------------------------------------------------------------------------// // Métodos /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { IconImage = new javax.swing.JLabel(); TitleMunicipalTaxSystem = new javax.swing.JTextField(); BotaoMenuCadastrar = new javax.swing.JButton(); BotaoMenuConsultar = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setBackground(new java.awt.Color(51, 51, 51)); setResizable(false); IconImage.setHorizontalAlignment(javax.swing.SwingConstants.CENTER); IconImage.setIcon(new javax.swing.ImageIcon(getClass().getResource("/View/image/index.jpeg"))); // NOI18N TitleMunicipalTaxSystem.setEditable(false); TitleMunicipalTaxSystem.setBackground(new java.awt.Color(51, 51, 51)); TitleMunicipalTaxSystem.setFont(new java.awt.Font("Dialog", 0, 18)); // NOI18N TitleMunicipalTaxSystem.setForeground(new java.awt.Color(255, 255, 255)); TitleMunicipalTaxSystem.setHorizontalAlignment(javax.swing.JTextField.CENTER); TitleMunicipalTaxSystem.setText("Sistema Tributário Municipal"); TitleMunicipalTaxSystem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { TitleMunicipalTaxSystemActionPerformed(evt); } }); BotaoMenuCadastrar.setBackground(new java.awt.Color(51, 51, 51)); BotaoMenuCadastrar.setFont(new java.awt.Font("Dialog", 1, 14)); // NOI18N BotaoMenuCadastrar.setForeground(new java.awt.Color(255, 255, 255)); BotaoMenuCadastrar.setText("Cadastrar DUAM"); BotaoMenuCadastrar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { BotaoMenuCadastrarActionPerformed(evt); } }); BotaoMenuConsultar.setBackground(new java.awt.Color(51, 51, 51)); BotaoMenuConsultar.setFont(new java.awt.Font("Dialog", 1, 14)); // NOI18N BotaoMenuConsultar.setForeground(new java.awt.Color(255, 255, 255)); BotaoMenuConsultar.setText("Consultar DUAM"); BotaoMenuConsultar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { BotaoMenuConsultarActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(IconImage) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(TitleMunicipalTaxSystem, javax.swing.GroupLayout.DEFAULT_SIZE, 292, Short.MAX_VALUE)) .addComponent(BotaoMenuConsultar, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(BotaoMenuCadastrar, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(IconImage, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(TitleMunicipalTaxSystem)) .addGap(18, 18, 18) .addComponent(BotaoMenuCadastrar) .addGap(18, 18, 18) .addComponent(BotaoMenuConsultar) .addContainerGap(35, Short.MAX_VALUE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void TitleMunicipalTaxSystemActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_TitleMunicipalTaxSystemActionPerformed // TODO add your handling code here: }//GEN-LAST:event_TitleMunicipalTaxSystemActionPerformed private void BotaoMenuCadastrarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BotaoMenuCadastrarActionPerformed // TODO add your handling code here: MenuCadastroView menuCadas = new MenuCadastroView(); menuCadas.setVisible(true); }//GEN-LAST:event_BotaoMenuCadastrarActionPerformed private void BotaoMenuConsultarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BotaoMenuConsultarActionPerformed // TODO add your handling code here: MenuConsultaView menuCons = new MenuConsultaView(); menuCons.setVisible(true); }//GEN-LAST:event_BotaoMenuConsultarActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(MenuDuamView.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(MenuDuamView.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(MenuDuamView.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(MenuDuamView.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new MenuDuamView().setVisible(true); } }); } //---------------------------------------------------------------------------------------------------------------------------------------// // Atributos // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton BotaoMenuCadastrar; private javax.swing.JButton BotaoMenuConsultar; private javax.swing.JLabel IconImage; private javax.swing.JTextField TitleMunicipalTaxSystem; // End of variables declaration//GEN-END:variables //---------------------------------------------------------------------------------------------------------------------------------------// // Fim da Classe MenuDuamView; } //-------------------------------------------------------------------------------------------------------------------------------------------//
import requests from bs4 import BeautifulSoup import urllib.parse import json import discord import youtube_dl import asyncio class YT: def __init__(self, search_terms: str, max_results=None): self.search_terms = search_terms self.max_results = max_results self.videos = self.search() def search(self): encoded_search = urllib.parse.quote(self.search_terms) BASE_URL = "https://youtube.com" url = f"{BASE_URL}/results?search_query={encoded_search}&pbj=1" response = BeautifulSoup(requests.get(url).text, "html.parser") results = self.parse_html(response) if self.max_results is not None and len(results) > self.max_results: return results[:self.max_results] return results def parse_html(self, soup): results = [] for video in soup.select(".yt-uix-tile-link"): if video["href"].startswith("/watch?v="): video_info = { "title": video["title"], "link": video["href"], "id": video["href"][video["href"].index("=")+1:] } results.append(video_info) return results def to_dict(self): return self.videos def to_json(self): return json.dumps({"videos": self.videos}) def export(self): results = [] for video in self.videos: title = video["title"] link = video["link"] results.append([title, link]) return results youtube_dl.utils.bug_reports_message = lambda: '' ytdl_format_options = { 'format': 'bestaudio/best', 'outtmpl': '%(extractor)s-%(id)s-%(title)s.%(ext)s', 'restrictfilenames': True, 'noplaylist': True, 'nocheckcertificate': True, 'ignoreerrors': False, 'logtostderr': False, 'quiet': True, 'no_warnings': True, 'default_search': 'auto', 'source_address': '0.0.0.0' # bind to ipv4 since ipv6 addresses cause issues sometimes } ffmpeg_options = { 'options': '-vn' } ytdl = youtube_dl.YoutubeDL(ytdl_format_options) class YTDLSource(discord.PCMVolumeTransformer): def __init__(self, source, *, data, volume=0.5): super().__init__(source, volume) self.data = data self.title = data.get('title') self.url = data.get('url') @classmethod async def from_url(cls, url, *, loop=None, stream=False): loop = loop or asyncio.get_event_loop() data = await loop.run_in_executor(None, lambda: ytdl.extract_info(url, download=not stream)) if 'entries' in data: # take first item from a playlist data = data['entries'][0] filename = data['url'] if stream else ytdl.prepare_filename(data) return cls(discord.FFmpegPCMAudio(filename, **ffmpeg_options), data=data)
#!/bin/bash SERVER_NAME=sharding-jdbc-onlinebank-test cd `dirname $0` cd .. DEPLOY_DIR=`pwd` LOGS_DIR=${DEPLOY_DIR}/logs if [ ! -d ${LOGS_DIR} ]; then mkdir ${LOGS_DIR} fi STDOUT_FILE=${LOGS_DIR}/stdout.log PIDS=`ps -ef | grep java | grep "$DEPLOY_DIR" | grep -v grep | awk '{print $2}'` if [ -n "$PIDS" ]; then echo "ERROR: The $SERVER_NAME already started!" echo "PID: $PIDS" exit 1 fi CLASS_PATH=.:${DEPLOY_DIR}/conf:${DEPLOY_DIR}/lib/* JAVA_OPTS=" -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true " JAVA_MEM_OPTS=" -server -Xmx2g -Xms2g -Xmn1g -Xss256k -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=128m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70 " MAIN_CLASS=io.shardingsphere.example.jdbc.poc.POCBootStarter echo "Starting the $SERVER_NAME ..." if [ $# == 1 ]; then JAVA_OPTS=${JAVA_OPTS}" "$1 fi if [ $# == 2 ]; then JAVA_OPTS=${JAVA_OPTS}" "$1" "$2 fi nohup java ${JAVA_OPTS} ${JAVA_MEM_OPTS} -classpath ${CLASS_PATH} ${MAIN_CLASS} > ${STDOUT_FILE} 2>&1 & sleep 1 echo "Please check the STDOUT file: $STDOUT_FILE"
<filename>web/src/components/ui/TextAreaField.tsx import { useField } from "formik"; import React, { InputHTMLAttributes } from "react"; type TextAreaFieldProps = React.DetailedHTMLProps< React.TextareaHTMLAttributes<HTMLTextAreaElement>, HTMLTextAreaElement > & { name: string; label: string; textarea?: boolean; }; export const TextAreaField: React.FC<TextAreaFieldProps> = ({ label, ...props }) => { const [field, { error }] = useField(props as any); return ( <div className={"input__container"}> <label className={"input__label"} htmlFor={field.name}> {label} </label> <textarea style={{ borderColor: !!error ? "red" : "", maxWidth: "659px", }} className={"input"} {...field} {...props} id={field.name} placeholder={props.placeholder} /> {error ? <span className={"input__error"}>{error}</span> : null} </div> ); };
#!/bin/bash set -e -x cd $(dirname $0)/.. wget https://nodejs.org/dist/v8.11.1/node-v8.11.1-win-x64.zip unzip node-v8.11.1-win-x64.zip wget https://www.sqlite.org/2019/sqlite-amalgamation-3290000.zip unzip sqlite-amalgamation-3290000.zip
<reponame>kovacsandor/ReactReduxRouting import { APIKey, Action, RootURL } from '../constants' import Axios from 'axios' export default function () { const request = Axios.get(`${RootURL}/posts${APIKey}`) return { payload: request, type: Action.POST_LIST_FETCH, } }
<gh_stars>1-10 // Clean up tokens after emphasis and strikethrough postprocessing: // merge adjacent text nodes into one and re-calculate all token levels // // This is necessary because initially emphasis delimiter markers (*, _, ~) // are treated as their own separate text tokens. Then emphasis rule either // leaves them as text (needed to merge with adjacent text) or turns them // into opening/closing tags (which messes up levels inside). // 'use strict'; module.exports = function fragments_join(state) { var curr, last, level = 0, tokens = state.tokens, max = state.tokens.length; for (curr = last = 0; curr < max; curr++) { // re-calculate levels after emphasis/strikethrough turns some text nodes // into opening/closing tags if (tokens[curr].nesting < 0) level--; // closing tag tokens[curr].level = level; if (tokens[curr].nesting > 0) level++; // opening tag if (tokens[curr].type === 'text' && curr + 1 < max && tokens[curr + 1].type === 'text') { // collapse two adjacent text nodes tokens[curr + 1].content = tokens[curr].content + tokens[curr + 1].content; } else { if (curr !== last) { tokens[last] = tokens[curr]; } last++; } } if (curr !== last) { tokens.length = last; } };
#!/bin/bash cd $SCRATCH/gcncc.bioinformatics/slurm/process/ sbatch 9_selection.slurm
#!/bin/bash -e docker build -t sshd-service-test . docker run --rm \ -v $PWD:/src \ sshd-service-test \ bash -c 'rspec --format documentation spec/'
def most_common_string(l): freq = {} for s in l: if s in freq: freq[s] += 1 else: freq[s] = 1 max_freq = 0 most_common = None for k, v in freq.items(): if v > max_freq: most_common = k max_freq = v return most_common l1 = ['a', 'b', 'a', 'c', 'a', 'b'] most_common = most_common_string(l1) print(most_common) # Output: a
#!/usr/bin/env bash # Exit on error, unset variable, or error in pipe chain set -o errexit -o nounset -o pipefail # For setenforce & xfs_info PATH=$PATH:/usr/sbin:/sbin echo "Validating distro..." distro="$(source /etc/os-release && echo "${ID}")" if [[ "${distro}" == 'coreos' ]]; then echo "Distro: CoreOS" echo "CoreOS includes all prerequisites by default." >&2 exit 0 elif [[ "${distro}" == 'rhel' ]]; then echo "Distro: RHEL" elif [[ "${distro}" == 'centos' ]]; then echo "Distro: CentOS" else echo "Distro: ${distro}" echo "Error: Distro ${distro} is not supported. Only CoreOS, RHEL, and CentOS are supported." >&2 exit 1 fi echo "Validating distro version..." # CentOS & RHEL < 7 have inconsistent release file locations distro_major_version="$(source /etc/os-release && echo "${VERSION_ID}" | sed -e 's/^\([0-9][0-9]*\).*$/\1/')" if [[ ${distro_major_version} -lt 7 ]]; then echo "Error: Distro version ${distro_major_version} is not supported. Only >= 7 is supported." >&2 exit 1 fi # CentOS & RHEL >= 7 both have the full version in /etc/redhat-release distro_minor_version="$(cat /etc/redhat-release | sed -e 's/[^0-9]*[0-9][0-9]*\.\([0-9][0-9]*\).*/\1/')" echo "Distro Version: ${distro_major_version}.${distro_minor_version}" if [[ ${distro_major_version} -eq 7 && ${distro_minor_version} -lt 2 ]]; then echo "Error: Distro version ${distro_major_version}.${distro_minor_version} is not supported. "\ "Only >= 7.2 is supported." >&2 exit 1 fi echo "Validating kernel version..." kernel_major_version="$(uname -r | sed -e 's/\([0-9][0-9]*\).*/\1/')" kernel_minor_version="$(uname -r | sed -e "s/${kernel_major_version}\.\([0-9][0-9]*\).*/\1/")" echo "Kernel Version: ${kernel_major_version}.${kernel_minor_version}" if [[ ${kernel_major_version} -lt 3 ]] || [[ ${kernel_major_version} -eq 3 && ${kernel_minor_version} -lt 10 ]]; then echo -n "Error: Kernel version ${kernel_major_version}.${kernel_minor_version} is not supported. " >&2 echo "Only >= 3.10 is supported." >&2 exit 1 fi echo "Validating kernel modules..." if ! lsmod | grep -q overlay; then echo "Enabling OverlayFS kernel module..." # Enable now sudo modprobe overlay # Load on reboot via systemd sudo tee /etc/modules-load.d/overlay.conf <<-'EOF' overlay EOF fi echo "Detecting Docker..." if hash docker 2>/dev/null; then docker_client_version="$(docker --version | sed -e 's/Docker version \(.*\),.*/\1/')" echo "Docker Client Version: ${docker_client_version}" if ! sudo docker info &>/dev/null; then echo "Docker Server not found. Please uninstall Docker and try again." >&2 exit 1 fi docker_server_version="$(sudo docker info | grep 'Server Version:' | sed -e 's/Server Version: \(.*\)/\1/')" echo "Docker Server Version: ${docker_server_version}" if [[ "${docker_client_version}" != "${docker_server_version}" ]]; then echo "Docker Server and Client versions do not match. Please uninstall Docker and try again." >&2 exit 1 fi # Require Docker >= 1.11 docker_major_version="$(echo "${docker_server_version}" | sed -e 's/\([0-9][0-9]*\)\.\([0-9][0-9]*\).*/\1/')" docker_minor_version="$(echo "${docker_server_version}" | sed -e 's/\([0-9][0-9]*\)\.\([0-9][0-9]*\).*/\2/')" if [[ ${docker_major_version} -lt 1 ]] || [[ ${docker_major_version} -eq 1 && ${docker_minor_version} -lt 11 ]]; then echo -n "Docker version ${docker_major_version}.${docker_minor_version} not supported. " >&2 echo "Please uninstall Docker and try again." >&2 exit 1 fi install_docker='false' else echo "Docker not found (install queued)" install_docker='true' fi echo "Validating Docker Data Root..." if [[ "${install_docker}" == 'true' ]]; then docker_data_root="/var/lib/docker" else docker_data_root="$(sudo docker info | grep 'Docker Root Dir:' | sed -e 's/Docker Root Dir: \(.*\)/\1/')" fi echo "Docker Data Root: ${docker_data_root}" sudo mkdir -p "${docker_data_root}" file_system="$(sudo df --output=fstype "${docker_data_root}" | tail -1)" echo "File System: ${file_system}" if [[ "${file_system}" != 'xfs' ]] || ! sudo xfs_info "${docker_data_root}" | grep -q 'ftype=1'; then echo "Error: "${docker_data_root}" must use XFS provisioned with ftype=1 to avoid known issues with OverlayFS." >&2 exit 1 fi function yum_install() { local cmd="$1" echo "Validating ${cmd}..." if ! hash "${cmd}" 2>/dev/null; then echo "Installing ${cmd}..." sudo yum install -y ${cmd} fi # print installed version rpm -q "${cmd}" } echo "Installing Utilities..." yum_install wget yum_install curl yum_install git yum_install unzip yum_install xz yum_install ipset yum_install bind-utils # required by dcos-iam-ldap-sync echo "Validating SELinux..." if [[ "$(getenforce)" == "Enforcing" ]]; then echo "Disabling enforcement..." sudo setenforce 0 fi if ! grep -q '^SELINUX=disabled' /etc/sysconfig/selinux; then echo "Disabling SELinux..." sudo sed -i --follow-symlinks 's/^SELINUX=.*/SELINUX=disabled/g' /etc/sysconfig/selinux fi if [[ "${install_docker}" == 'true' ]]; then echo "Installing Docker..." # Add Docker Yum Repo sudo tee /etc/yum.repos.d/docker.repo <<-'EOF' [dockerrepo] name=Docker Repository baseurl=https://yum.dockerproject.org/repo/main/centos/7 enabled=1 gpgcheck=1 gpgkey=https://yum.dockerproject.org/gpg EOF # Add Docker systemd service sudo mkdir -p /etc/systemd/system/docker.service.d sudo tee /etc/systemd/system/docker.service.d/override.conf <<- EOF [Service] Restart=always StartLimitInterval=0 RestartSec=15 ExecStartPre=-/sbin/ip link del docker0 ExecStart= ExecStart=/usr/bin/dockerd --storage-driver=overlay --data-root=${docker_data_root} EOF # Install and enable Docker sudo yum install -y docker-engine-17.05.0.ce docker-engine-selinux-17.05.0.ce sudo systemctl start docker sudo systemctl enable docker fi if ! sudo getent group nogroup >/dev/null; then echo "Creating 'nogroup' group..." sudo groupadd nogroup fi echo "Prerequisites installed."
def calculate_total_duration(operations): total_duration = 0 for operation in operations: if operation in DURATION.COUNTER: total_duration += DURATION.COUNTER[operation] return total_duration
import React from 'react'; import { IconChartBar } from './icon.chartBar'; import { IconCloudDownload } from './icon.cloudDownload'; import { IconDownload } from './icon.download'; import { IconExclamation } from './icon.exclamation'; import { IconHome } from './icon.home'; import { IconLogout } from './icon.logout'; import { IconMinusCircle } from './icon.minusCircle'; import { IconPlus } from './icon.plus'; import { IconPlusCircle } from './icon.plusCircle'; import { IconSwitchHorizontal } from './icon.switchHorizontal'; import { IconTag } from './icon.tag'; import { IconUpload } from './icon.upload'; import { IconUser } from './icon.user'; import { IconUserCircle } from './icon.userCircle'; import { IconViewGrid } from './icon.viewGrid'; export type IconName = | 'switch-horizontal' | 'plus-circle' | 'minus-circle' | 'home' | 'user' | 'chart-bar' | 'view-grid' | 'user-circle' | 'plus' | 'download' | 'upload' | 'logout' | 'tag' | 'exclamation' | 'cloud-download'; interface IconProps { type: IconName; } export const Icon = ({ type }: IconProps): JSX.Element => { const defaultIconClasses = 'h-6 w-6'; switch (type) { case 'switch-horizontal': return <IconSwitchHorizontal className={defaultIconClasses} />; case 'plus-circle': return <IconPlusCircle className={defaultIconClasses} />; case 'minus-circle': return <IconMinusCircle className={defaultIconClasses} />; case 'home': return <IconHome className={defaultIconClasses} />; case 'user': return <IconUser className={defaultIconClasses} />; case 'chart-bar': return <IconChartBar className={defaultIconClasses} />; case 'view-grid': return <IconViewGrid className={defaultIconClasses} />; case 'user-circle': return <IconUserCircle className={defaultIconClasses} />; case 'plus': return <IconPlus className={defaultIconClasses} />; case 'download': return <IconDownload className={defaultIconClasses} />; case 'upload': return <IconUpload className={defaultIconClasses} />; case 'logout': return <IconLogout className={defaultIconClasses} />; case 'tag': return <IconTag className={defaultIconClasses} />; case 'exclamation': return <IconExclamation className={defaultIconClasses} />; case 'cloud-download': return <IconCloudDownload className={defaultIconClasses} />; default: break; } return <div />; };
#!/bin/bash REQT_LIB=~/reqT/lib java -jar $REQT_LIB/reqT.jar $@
<filename>src/main/java/ed/biodare2/backend/features/tsdata/datahandling/TSDataExporter.java /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package ed.biodare2.backend.features.tsdata.datahandling; import ed.biodare2.backend.repo.isa_dom.dataimport.DataTrace; import ed.biodare2.backend.repo.system_dom.AssayPack; import ed.robust.dom.data.DetrendingType; import ed.robust.dom.data.TimeSeries; import ed.robust.util.timeseries.TimeSeriesFileHandler; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.springframework.stereotype.Service; /** * * @author tzielins */ @Service public class TSDataExporter { public Path export(List<DataTrace> dataSet, AssayPack exp, DetrendingType detrending, Path file) throws IOException { List<List<String>> setDescription = renderSetDescription(exp,detrending); List<List<String>> dataHeaders = renderDataHeaders(dataSet); List<TimeSeries> data = renderData(dataSet); save(setDescription,dataHeaders,data,file); return file; } public Path export(List<DataTrace> dataSet, AssayPack exp, DetrendingType detrending) throws IOException { Path file = Files.createTempFile(null, null); return export(dataSet, exp, detrending, file); } protected List<List<String>> renderSetDescription(AssayPack exp, DetrendingType detrending) { List<List<String>> rows = new ArrayList<>(); List<String> row; row = Arrays.asList("Set type:","Experiment Data"); rows.add(row); row = Arrays.asList("Data type"); rows.add(row); row = Arrays.asList("Detrending:",detrending.longName); rows.add(row); row = new ArrayList<>(); row.add("Contributors:"); row.addAll(exp.getAssay() .contributionDesc .authors.stream() .map(p -> p.getName()) .collect(Collectors.toSet())); rows.add(row); row = Arrays.asList("Experiment Id:",""+exp.getId()); rows.add(row); row = Arrays.asList("Experiment URL:",experimentURL(exp)); rows.add(row); row = Arrays.asList("Experiment Name:",exp.getAssay().generalDesc.name); rows.add(row); return rows; } protected String experimentURL(AssayPack exp) { return "https://biodare2.ed.ac.uk/experiment/"+exp.getId(); } protected List<List<String>> renderDataHeaders(List<DataTrace> dataSet) { List<List<String>> rows = new ArrayList<>(); List<String> row; row = new ArrayList<>(); row.add("Data Nr:"); row.addAll(dataSet.stream() .map(dt -> ""+dt.traceNr) .collect(Collectors.toList())); rows.add(row); row = new ArrayList<>(); row.add("Data Ref:"); row.addAll(dataSet.stream() .map(dt -> dt.traceRef) .collect(Collectors.toList())); rows.add(row); row = new ArrayList<>(); row.add("Label:"); row.addAll(dataSet.stream() .map(dt -> dt.details.dataLabel) .collect(Collectors.toList())); rows.add(row); return rows; } protected List<TimeSeries> renderData(List<DataTrace> dataSet) { return dataSet.stream() .map(dt -> dt.trace) .collect(Collectors.toList()); } protected void save(List<List<String>> setDescription, List<List<String>> dataHeaders, List<TimeSeries> data, Path file) throws IOException { setDescription = escape(setDescription); dataHeaders = escape(dataHeaders); TimeSeriesFileHandler.saveToText(data, dataHeaders, setDescription, file.toFile(), ","); } protected List<List<String>> escape(List<List<String>> setDescription) { return setDescription.stream() .map(l -> l.stream() .map(t -> t != null ? t.replace(",", ";") : "") .collect(Collectors.toList()) ) .collect(Collectors.toList()); } }
# frozen_string_literal: true require_relative 'nonterminal_methods' require_relative 'mixin' require_relative 'parse_tree' require_relative 'parse_error' require_relative 'terminal' require_relative 'alternation' require_relative 'eos' require_relative 'eol' module Yardp class Grammar include ParserMixin include NonterminalMethods def parse(text) ret = get_start.parse(text, self.class.options) raise Error, 'Parse returned a nil tree.' if ret.nil? # unless text[ret[1]..].strip.empty? # raise ParseError.new(ret[1], "Expected end of string but encountered '#{text[ret[1] + 1]}'", text) # end ret[0].simplify if self.class.options[:simplify_parse_tree] ret[0] end def graphviz g = nil self.class.rules.each { |rule| g = send(rule).graphviz(g) } g end end end
<reponame>ValerijusA/GildedRose<gh_stars>0 # -*- coding: utf-8 -*- import pytest import mock from python.gilded_rose import Item, GildedRose def test_item(): # line 46 items = [Item("foo", 0, 0)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert ("foo" == items[0].name) def test_items_value_decreases_by_one_everyday(): # line 12-13 items = [Item("foo", 1, 0)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 0 def test_items_value_does_not_drop_below_zero(): # line 12-13 items = [Item("foo", 0, 0)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 0 def test_aged_brie_value_increases_twice_faster_when_sell_in_below_zero(): # line 35-36 items = [Item("Aged Brie", 0, 8)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 10 def test_aged_brie_quality_increases_when_sell_in_decreases(): items = [Item("Aged Brie", 10, 10)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 11 assert items[0].sell_in == 9 def test_items_quality_is_never_more_than_50(): items = [Item("Aged Brie", 0, 50), Item("Aged Brie", 0, 49)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 50 assert items[1].quality == 50 def test_sulfuras_never_sold_or_decreases_in_quality(): items = [Item("Sulfuras, Hand of Ragnaros", 10, 10)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 10 assert items[0].sell_in == 10 def test_aged_brie_and_backstage_quality_increases_when_sell_in_decreases(): # line 15-16 items = [Item("Aged Brie", 10, 10), Item("Backstage passes to a TAFKAL80ETC concert", 20, 20), Item("Backstage passes to a TAFKAL80ETC concert", 8, 20), Item("Backstage passes to a TAFKAL80ETC concert", 2, 20)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 11 assert items[0].sell_in == 9 assert items[1].quality == 21 assert items[1].sell_in == 19 assert items[2].quality == 22 assert items[2].sell_in == 7 assert items[3].quality == 23 assert items[3].sell_in == 1 def test_conjured_quality_decreases_twice_faster(): items = [Item("Conjured", 10, 10)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 8 assert items[0].sell_in == 9 def test_item_quality_drops_twice_faster_when_sell_in_below_zero(): items = [Item("foo", 0, 40)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 38 def test_backstage_quality_zeroed_when_sell_in_below_zero(): items = [Item("Backstage passes to a TAFKAL80ETC concert", 0, 40)] gilded_rose = GildedRose(items) gilded_rose.update_quality() assert items[0].quality == 0
<reponame>Hypercubed/fantom-cat<filename>app/components/genes/genes.route.js import controller from './genes.controller'; export default { title: 'FANTOM CAT | Genes', templateUrl: 'components/genes/genes.html', controller, controllerAs: '$ctrl', datapackageUrl: 'components/genes/datapackage.json' };
<filename>offer/src/main/java/com/java/study/answer/zuo/bbasic/class_08/Code_05_Cow.java package com.java.study.answer.zuo.bbasic.class_08; public class Code_05_Cow { public static int cowNumber1(int n) { if (n < 1) { return 0; } if (n == 1 || n == 2 || n == 3) { return n; } return cowNumber1(n - 1) + cowNumber1(n - 3); } public static int cowNumber2(int n) { if (n < 1) { return 0; } if (n == 1 || n == 2 || n == 3) { return n; } int res = 3; int pre = 2; int prepre = 1; int tmp1 = 0; int tmp2 = 0; for (int i = 4; i <= n; i++) { tmp1 = res; tmp2 = pre; res = res + prepre; pre = tmp1; prepre = tmp2; } return res; } public static void main(String[] args) { int n = 20; System.out.println(cowNumber1(n)); System.out.println(cowNumber2(n)); } }
#!/bin/bash . ../../settings.bash if [ "$DEFAULT_EXECMODE" = "devel" ]; then echo "the $DEFAULT_EXECMODE has to be release in the settings.bash in order to export" exit 1 fi export DATE=$(date +%Y_%m_%d-%H_%M) PROJECT_NAME_NO_SUBFOLDER=${PROJECT_NAME//\//_} IMAGE_NAME=${RELEASE_REGISTRY:+${RELEASE_REGISTRY}/}$WORKSPACE_RELEASE_IMAGE SCRIPTFOLDER=${PROJECT_NAME_NO_SUBFOLDER}_scripts_${DATE} mkdir -p $SCRIPTFOLDER #build a scripts zip: echo "creating scripts archive: $SCRIPTFOLDER.tar.gz" cp ../../docker_commands.bash ./$SCRIPTFOLDER/ cp ../../settings.bash ./$SCRIPTFOLDER/ cp ../../exec.bash ./$SCRIPTFOLDER/ cp ../../stop.bash ./$SCRIPTFOLDER/ cp ../../doc/010_Setup_Docker.md ./$SCRIPTFOLDER/Readme_Docker.md cp ./Readme_scripts.md ./$SCRIPTFOLDER/Readme.md echo "complete -W \"$(ls ../../startscripts | xargs) /bin/bash\" ./exec.bash" >> $SCRIPTFOLDER/autocomplete.me tar czf $SCRIPTFOLDER.tar.gz $SCRIPTFOLDER rm -rf $SCRIPTFOLDER echo "saving ${IMAGE_NAME} to ${PROJECT_NAME_NO_SUBFOLDER}_image_${DATE}.tar.gz" docker save ${IMAGE_NAME} | gzip > ${PROJECT_NAME_NO_SUBFOLDER}_image_${DATE}.tar.gz
#!/bin/sh set -e set -x python tools/clean.py cd docs make cd .. rm -f m4/libtool.m4 m4/lt~obsolete.m4 m4/ltsugar.m4 m4/ltversion.m4 m4/ltoptions.m4 chmod a-x docs/*.rst docs/*.htm* src/*.cpp include/libtorrent/*.hpp ./autotool.sh ./configure --enable-python-binding --enable-examples=yes --enable-encryption --enable-tests=yes make dist VERSION=1.2.5 tar xvzf libtorrent-rasterbar-${VERSION}.tar.gz cd libtorrent-rasterbar-${VERSION}/test bjam link=static $1
package com.udacity.jdnd.course3.critter.schedule; import com.udacity.jdnd.course3.critter.pet.Pet; import com.udacity.jdnd.course3.critter.user.Employee; import com.udacity.jdnd.course3.critter.user.EmployeeSkill; import javax.persistence.metamodel.ListAttribute; import javax.persistence.metamodel.SingularAttribute; import javax.persistence.metamodel.StaticMetamodel; import java.time.LocalDate; @StaticMetamodel(Schedule.class) public class Schedule_ { public static volatile SingularAttribute<Schedule, Long> id; public static volatile ListAttribute<Schedule, Employee> employees; public static volatile ListAttribute<Schedule, Pet> pets; public static volatile SingularAttribute<Schedule, LocalDate> date; public static volatile ListAttribute<Schedule, EmployeeSkill> activities; }
#!/bin/bash inputdir=$(dirname $1) outputdir=$2 filename=$(basename $1 .pdb) # Make copy of input file cp $1 ./${filename}_t.pdb || exit 1 # Extract backbone, get_ala_backbone.pl for next script, get_seq_backbone.pl for the last perl ./bin/get_ala_backbone.pl ${filename}_t || exit 2 perl ./bin/get_seq_backbone.pl ${filename}_t || exit 3 # Generate fragments echo -e "\"${filename}_t.ALA.pdb\"\\t`grep CA ${filename}_t.ALA.pdb | wc -l`" > ${filename}_t.list # Usage: main libdir/ liblist querydir list ./bin/featuregen_fragments ./data/db/ ./data/db_list ./ ./${filename}_t.list || exit 4 perl ./bin/get_SP.pl ${filename}_t || exit 5 # Generate rotomers # Usage: RUN dfirelib1 sdir pdb ./bin/featuregen_rotomers ./dfirelib1 ./ ${filename}_t.ALA.pdb > ${filename}_t.sc || exit 6 perl ./bin/get_SC.pl ${filename}_t || exit 7 cat ${filename}_t.sc.nml | tr -s " " | cut -d " " -f 2-27,29-114 > ${filename}_t.features.rotomers || exit 8 # Extract atom positions # Print file extract columns separate squashed fields del spaces remove CB del start spaces del trailing sp replace N replace CA replace C replace O write to file # cat ${filename}_t.ALA.pdb | cut -c 13-16,23-26,31-54 | sed 's:^\(.\{4\}\)\(.\{4\}\)\(.\{8\}\)\(.\{8\}\)\(.\{8\}\).*$:\1 \2 \3 \4 \5:' | tr -s " " | grep -v CB | sed 's:^[ ]*::' | sed 's:[ ]*$::' | sed 's:N:0:' | sed 's:CA:1:' | sed 's:C:2:' | sed 's:O:3:' > ${filename}_t.positions || exit 9 mv ${filename}_t.features.fragments ${outputdir}/${filename}.fragments mv ${filename}_t.features.rotomers ${outputdir}/${filename}.rotomers mv ${filename}_t.SEQ.pdb ${outputdir}/${filename}_fragroto.pdb # Clean things up rm -f ${filename}_t* || exit 10
from wordcloud import WordCloud import matplotlib.pyplot as plt from PIL import Image import numpy as np def create_wordcloud(text, image_path=None): # Generate word cloud wordcloud = WordCloud(width=800, height=400, background_color='white', max_words=200, colormap='viridis').generate(text) if image_path: # If image_path is provided, use it as a mask for the word cloud mask = np.array(Image.open(image_path)) wordcloud = WordCloud(width=800, height=400, background_color='white', max_words=200, mask=mask, contour_color='steelblue').generate(text) # Display word cloud using matplotlib plt.figure(figsize=(10, 5)) plt.imshow(wordcloud, interpolation='bilinear') plt.axis('off') plt.show() # Save word cloud as an image file if image_path: wordcloud.to_file('wordcloud_image.png') else: wordcloud.to_file('wordcloud.png')
/* * Reserved.sql * Chapter 3, Oracle10g PL/SQL Programming * by <NAME>, <NAME>, <NAME> * * This script prints a list of reserved words */ exec clean_schema.trigs exec clean_schema.procs exec clean_schema.tables SET PAGES 9999 PROMPT PROMPT ** Reserved words ** PROMPT COL keyword FORMAT A30 SELECT keyword, length FROM v_$reserved_words WHERE (length > 1 OR keyword = 'A') AND keyword != '<<' ORDER BY keyword; PROMPT PROMPT PROMPT ** Special Characters ** PROMPT SELECT keyword FROM v_$reserved_words WHERE length = 1 AND keyword != 'A' OR keyword = '<<';
#!/usr/bin/env bash # Copyright 2021 Hewlett Packard Enterprise Development LP set -exo pipefail ROOTDIR="$(dirname "${BASH_SOURCE[0]}")" source "${ROOTDIR}/lib/version.sh" source "${ROOTDIR}/lib/install.sh" : "${BUILDDIR:="${ROOTDIR}/build"}" mkdir -p "$BUILDDIR" # Assumes site-init customizations has been properly updated [[ -f "${BUILDDIR}/customizations.yaml" ]] && rm -f "${BUILDDIR}/customizations.yaml" kubectl get secrets -n loftsman site-init -o jsonpath='{.data.customizations\.yaml}' | base64 -d > "${BUILDDIR}/customizations.yaml" # Generate manifests with customizations mkdir -p "${BUILDDIR}/manifests" find "${ROOTDIR}/manifests" -name "*.yaml" | while read manifest; do manifestgen -i "$manifest" -c "${BUILDDIR}/customizations.yaml" -o "${BUILDDIR}/manifests/$(basename "$manifest")" done function deploy() { # XXX Loftsman may not be able to connect to $NEXUS_URL due to certificate # XXX trust issues, so use --charts-path instead of --charts-repo. loftsman ship --charts-path "${ROOTDIR}/helm" --manifest-path "$1" } # Undeploy the chart if it exists on the system. # Use this if a chart has been removed from a manifest and needs # to be removed from the system as part of an upgrade. function undeploy() { # If the chart is missing (rc==1) just return success. helm status "$@" || return 0 # Remove the chart. helm uninstall "$@" } # Check for manually create unbound PSP that is not managed by helm function unbound_psp_check() { echo "Checking for manually created cray-unbound-coredns-psp" unbound_psp_exist="$(kubectl get ClusterRoleBinding -n services |grep cray-unbound-coredns-psp |wc -l)"||true if [[ "$unbound_psp_exist" -eq "1" ]]; then unbound_psp_helm_check="$(kubectl get ClusterRoleBinding -n services cray-unbound-coredns-psp -o yaml |grep helm |wc -l)"||true if [[ "$unbound_psp_helm_check" -eq "0" ]]; then echo "Found ClusterRoleBinding cray-dns-unbound-psp NOT managed by helm" kubectl delete ClusterRoleBinding -n services cray-unbound-coredns-psp echo "Delete ClusterRoleBinding cray-dns-unbound-psp" fi fi echo "cray-unbound-coredns-psp check Done" } # Deploy services critical for Nexus to run deploy "${BUILDDIR}/manifests/storage.yaml" deploy "${BUILDDIR}/manifests/platform.yaml" deploy "${BUILDDIR}/manifests/keycloak-gatekeeper.yaml" # TODO How to upgrade metallb? # Deploy metal-lb configuration # kubectl apply -f "$METALLB_YAML" # Create secret with HPE signing key if [[ -f "${ROOTDIR}/hpe-signing-key.asc" ]]; then kubectl create secret generic hpe-signing-key -n services --from-file=gpg-pubkey="${ROOTDIR}/hpe-signing-key.asc" --dry-run=client --save-config -o yaml | kubectl apply -f - fi # Save previous Unbound IP pre_upgrade_unbound_ip="$(kubectl get -n services service cray-dns-unbound-udp-nmn -o jsonpath='{.status.loadBalancer.ingress[0].ip}')" # Check for manually create unbound PSP that is not managed by helm unbound_psp_check deploy "${BUILDDIR}/manifests/core-services.yaml" # Wait for Unbound to come up "${ROOTDIR}/lib/wait-for-unbound.sh" # Verify Unbound settings unbound_ip="$(kubectl get -n services service cray-dns-unbound-udp-nmn -o jsonpath='{.status.loadBalancer.ingress[0].ip}')" if [[ "$pre_upgrade_unbound_ip" != "$unbound_ip" ]]; then echo >&2 "WARNING: Unbound IP has changed: $unbound_ip" echo >&2 "WARNING: Need to update nameserver settings on NCNs" # TODO pdsh command to update nameserver settings fi # In 1.5 the cray-conman Helm chart is replaced by console-[data,node,operator] charts but # cray-conman needs to be removed if it exists. undeploy -n services cray-conman # Deploy remaining system management applications deploy "${BUILDDIR}/manifests/sysmgmt.yaml" # Deploy Nexus deploy "${BUILDDIR}/manifests/nexus.yaml" set +x cat >&2 <<EOF + CSM applications and services upgraded ${0##*/}: OK EOF
# platform = Red Hat Enterprise Linux 6 # # Disable qpidd for all run levels # /sbin/chkconfig --level 0123456 qpidd off # # Stop qpidd if currently running # /sbin/service qpidd stop
var class_smol_dock_1_1_bond = [ [ "BondType", "class_smol_dock_1_1_bond.html#a6cbf152f682501c998bd06a55400c9cf", [ [ "singlebond", "class_smol_dock_1_1_bond.html#a6cbf152f682501c998bd06a55400c9cfaccd9d70d0e74b128e8fdd74b302934f1", null ], [ "doublebond", "class_smol_dock_1_1_bond.html#a6cbf152f682501c998bd06a55400c9cfa076120e243046998cc8ecff65e716c35", null ], [ "triplebond", "class_smol_dock_1_1_bond.html#a6cbf152f682501c998bd06a55400c9cfa7381550bb1c4acbc5e3801c17ef7cae3", null ], [ "defaultbond", "class_smol_dock_1_1_bond.html#a6cbf152f682501c998bd06a55400c9cfa0a3db58cd06e63f99bfd2d9e6773f555", null ], [ "aromatic", "class_smol_dock_1_1_bond.html#a6cbf152f682501c998bd06a55400c9cfab055cca5ddd5fd7702867dffa64c1f8f", null ] ] ], [ "Bond", "class_smol_dock_1_1_bond.html#a7f046d71521a269171643bdf147d5c80", null ], [ "Bond", "class_smol_dock_1_1_bond.html#ae864507cf1018efa1964e7b77b2d6e4f", null ], [ "getBondID", "class_smol_dock_1_1_bond.html#a382edad0cb2bd339e3ff676cd232d093", null ], [ "getBondType", "class_smol_dock_1_1_bond.html#a6b8180f0f2b339c521f8a74b2445e8c8", null ], [ "getEndA", "class_smol_dock_1_1_bond.html#a8ca51ba3f2abc758ecd7b4b040681f21", null ], [ "getEndB", "class_smol_dock_1_1_bond.html#a0a2feeb4cc88cbb8920ae5e9b142ead5", null ], [ "publicizeToAtom", "class_smol_dock_1_1_bond.html#a225d51b7edd43bc5aa16984981bc2dfe", null ], [ "setBondType", "class_smol_dock_1_1_bond.html#a4ebf28309120c880429f205edf67b9f8", null ], [ "bond_end_a", "class_smol_dock_1_1_bond.html#ad909ef34f3b9b10acc221ac6d54e423a", null ], [ "bond_end_b", "class_smol_dock_1_1_bond.html#adbbc73bd7bc4fc579237321be516ed2f", null ], [ "BondID", "class_smol_dock_1_1_bond.html#a6fb49dc80f21a23c8b039f85d3b7b5bf", null ], [ "bondtype", "class_smol_dock_1_1_bond.html#a7795dad7005c83f8702d8593eb81b1ff", null ], [ "nextBondID", "class_smol_dock_1_1_bond.html#acb9a24240df4164e4840611d7df39962", null ] ];
const box = require('./index'); const params = { boltX: 70, boltY: 50, depth: 25, wallThickness: 2, bottomThickness: 2, bodyHoleRadius: 1.5, holeThroughBottom: true, lidThickness: 2, lidHoleRadius: 2, lidInsetThickness: 1, lidInsetClearance: .25, maxArcFacet: .25 }; const test = box(params); console.log(test);
<reponame>MrBattary/ncstore-back package com.netcracker.ncstore.dto.body; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Getter; import lombok.extern.jackson.Jacksonized; import java.util.UUID; /** * DTO containing request body for cart PUT request */ @Jacksonized @Builder @JsonIgnoreProperties(ignoreUnknown = true) @AllArgsConstructor @Getter public class CartPutBody { private final UUID productId; private final int productCount; }
// Copyright 2009 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.secmgr.config; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Maps; import com.google.enterprise.secmgr.common.HttpUtil; import com.google.gson.GsonBuilder; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonSerializationContext; import com.google.gson.JsonSerializer; import java.lang.reflect.Type; import java.net.URI; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.ThreadSafe; /** * An authentication authority describes an authority that validates credentials. */ @ThreadSafe @ParametersAreNonnullByDefault public class AuthnAuthority { @GuardedBy("itself") @Nonnull static final Map<URI, AuthnAuthority> AUTHORITIES = Maps.newHashMap(); @Nonnull private final URI uri; private AuthnAuthority(URI uri) { this.uri = uri; } /** * Makes an authority. * * @param uri The URI for this authority. * @return The unique authority with that URI, creating it if necessary. * @throws IllegalArgumentException if the URI is invalid. */ @Nonnull public static AuthnAuthority make(URI uri) { checkUri(uri); AuthnAuthority authority; synchronized (AUTHORITIES) { authority = AUTHORITIES.get(uri); if (authority == null) { authority = new AuthnAuthority(uri); AUTHORITIES.put(uri, authority); } } return authority; } private static void checkUri(URI uri) { Preconditions.checkNotNull(uri); Preconditions.checkArgument(uri.isAbsolute()); Preconditions.checkArgument(!uri.isOpaque()); Preconditions.checkArgument(uri.getQuery() == null); Preconditions.checkArgument(uri.getFragment() == null); } static AuthnAuthority make(String configName) { return make(HttpUtil.smUriBuilder().addSegment(configName).build()); } @VisibleForTesting public static AuthnAuthority make() { return make(HttpUtil.smUriBuilder().addRandomSegment(16).build()); } /** * Gets a globally-unique URI for this authority. * * @return The authority's URI. */ @Nonnull public URI getUri() { return uri; } /** * Gets a globally-unique name for this authority. Equivalent to * {@code getUri().toString()}. * * @return The authority's name. */ @Nonnull public String getName() { return uri.toString(); } /** * Gets an authority given its URI. * * @param uri The URI of the authority to find. * @return The corresponding authority, or {@code null} if no authority has that URI. */ @Nullable public static AuthnAuthority lookupByUri(URI uri) { checkUri(uri); synchronized (AUTHORITIES) { return AUTHORITIES.get(uri); } } /** * Gets an authority given its name. * * @param name The name of the authority to find. * @return The corresponding authority, or {@code null} if no authority has that name. */ @Nullable public static AuthnAuthority lookupByName(String name) { return lookupByUri(URI.create(name)); } /** * Gets an authority given its name. * * @param name The name of the authority to find. * @return The corresponding authority. * @throw IllegalArgumentException if no such authority. */ @Nonnull public static AuthnAuthority getByName(String name) { AuthnAuthority authority = lookupByName(name); if (authority == null) { throw new IllegalArgumentException("No authority of this name: " + name); } return authority; } @VisibleForTesting public static void clearAuthorities() { synchronized (AUTHORITIES) { AUTHORITIES.clear(); } } @Override public boolean equals(Object object) { if (object == this) { return true; } if (!(object instanceof AuthnAuthority)) { return false; } AuthnAuthority other = (AuthnAuthority) object; return getUri().equals(other.getUri()); } @Override public int hashCode() { return getUri().hashCode(); } @Override public String toString() { return getName(); } /** * Gets a predicate that is true for an authority with a given URI. * * @param uri The authority URI to test for. * @return The predicate corresponding to the name. */ @Nonnull public static Predicate<AuthnAuthority> getUriPredicate(final URI uri) { return new Predicate<AuthnAuthority>() { public boolean apply(AuthnAuthority authority) { return uri.equals(authority.getUri()); } }; } static void registerTypeAdapters(GsonBuilder builder) { builder.registerTypeAdapter(AuthnAuthority.class, new LocalTypeAdapter()); } private static final class LocalTypeAdapter implements JsonSerializer<AuthnAuthority>, JsonDeserializer<AuthnAuthority> { LocalTypeAdapter() { } @Override public JsonElement serialize(AuthnAuthority value, Type type, JsonSerializationContext context) { return context.serialize(value.getName(), String.class); } @Override public AuthnAuthority deserialize(JsonElement elt, Type type, JsonDeserializationContext context) { String string = context.deserialize(elt, String.class); return make(URI.create(string)); } } }
SELECT records.*, employees.name FROM records INNER JOIN employees ON employees.id = records.id WHERE employees.name = "John";
<filename>klaytn-08-event-monitor/index.js // const rpcURL = "https://public-node-api.klaytnapi.com/v1/cypress"; // const networkID = "8217"; const rpcURL = "https://api.baobab.klaytn.net:8651/"; const networkID = "1001"; const Caver = require("caver-js"); const caver = new Caver(rpcURL); const CONTRACT_ABI = require("./abi/MyGame.json"); const contract_addr = CONTRACT_ABI.networks[networkID].address; const contract = new caver.klay.Contract(CONTRACT_ABI.abi, contract_addr); console.log("contract_addr", contract_addr); console.log("가위바위보 주소: https://klaytngame-1.gunillee.repl.co/"); let ret; let busy = false; let call_count = 0; let last_block_id = 90628000; let depth = 99; async function read_event() { if (busy) { console.log("read_event busy"); return; } busy = true; call_count++; try { const bn = await caver.klay.getBlockNumber(); const fromBlock = last_block_id + 1; const toBlock = last_block_id + depth; if (toBlock > bn) { console.log("bn not yet", toBlock - bn); busy = false; return; } ret = await contract.getPastEvents("play_log", { filter: {}, fromBlock, toBlock, }); console.log("play_log ret", fromBlock, ret.length); for (let i = 0; i < ret.length; i++) { let e = ret[i]; console.log(i, e.blockNumber, e.returnValues); } last_block_id = toBlock; } catch (e) { console.log("read_event fail", e); } busy = false; } setInterval(read_event, 1000 * 10);
package gq.optimalorange.account.sample; import gq.optimalorange.account.AuthenticationService.AuthenticateFailure; import gq.optimalorange.account.AuthenticationService.ChangeCertificateFailure; import gq.optimalorange.account.Identifier; import gq.optimalorange.account.Result; import gq.optimalorange.account.SubjectService.CreateFailure; import gq.optimalorange.account.SubjectService.SetIdentifierFailure; import gq.optimalorange.account.sample.inject.ServiceComponent; import rx.Observable; import static gq.optimalorange.account.Certificate.password; public class Samples { public static Observable<Result<?, ? extends Enum<? extends Enum<?>>>> getUseCase( ServiceComponent serviceComponent) { final double suffix = Math.random() * 1_000_000; final String username = "test username " + suffix; final String initPassword = "<PASSWORD> " + suffix; final String changedPassword = "<PASSWORD>" + suffix; // 1. create account final Observable<Result<Identifier, CreateFailure>> signUp = serviceComponent.getSubjectService().create(password(initPassword)).toObservable().cache(); // 2. set user name final Observable<Result<Void, SetIdentifierFailure>> setUsername = signUp .filter(Result::succeeded) .flatMap(r -> serviceComponent.getSubjectService() .setIdentifier(r.result(), password(initPassword), Identifier.username(username)) .toObservable()); // 3. change password final Observable<Result<Void, ChangeCertificateFailure>> changePassword = signUp .filter(Result::succeeded) .flatMap(r -> serviceComponent.getAuthenticationService() .changeCertificate(r.result(), password(initPassword), password(initPassword), password(changedPassword)) .toObservable()) .cache(); // 4. authenticate final Observable<Result<Void, AuthenticateFailure>> authenticate = changePassword .map(r -> { if (r.succeeded()) { return changedPassword; } else { return initPassword; } }) .flatMap(r -> serviceComponent.getAuthenticationService() .authenticate(Identifier.username(username), password(r)) .toObservable()); return Observable.concat(signUp, setUsername, changePassword, authenticate); } }
def fetch_evals(year, term): # Assume the existence of a function or API to retrieve evaluation data from a remote server # Here, we simulate the retrieval process using a placeholder function def retrieve_evaluation_data(year, term): # Simulated retrieval of evaluation data from a remote server # Replace this with actual code to fetch evaluation data from the server evaluation_data = f"Evaluation data for year {year} and term {term}" return evaluation_data # Call the function to retrieve evaluation data evaluation_data = retrieve_evaluation_data(year, term) return evaluation_data
import random def scramble_list(list): random.shuffle(list) return list scramble_list(list) # Output: ['g', 'h', 'f', 'e', 'b', 'a', 'c', 'd']
export default () => { return { name: 'created_at', type: 'timestamptz', isNullable: false, default: 'now()' }; };
<filename>bin/esprit.ts #!/usr/bin/env node import { createCommand } from "commander"; import { SAO } from "sao"; import { createAddCommand } from "../commands/add"; import { createGenerateCommand } from "../commands/generate"; import { createMigrationCommand } from "../commands/migration"; import { createOpenapiCommand } from "../commands/openapi"; import { ORM } from "../enums/orm"; import { config } from "../utils/config"; const program = createCommand(); program.command("new <name>").action((name) => { new SAO({ generator: `${__dirname}/../packages/new`, outDir: name, }).run(); }); program.addCommand(createAddCommand()); program.addCommand(createGenerateCommand()); if (config.database && config.database.orm === ORM.TypeORM) { program.addCommand(createMigrationCommand()); } if (config.mode == "REST") { program.addCommand(createOpenapiCommand()); } program.parse(process.argv);
from flask import Flask, render_template app = Flask(__name__) # Data temperatures = [20,22,21,19,20,24,26] years = [2018,2019] @app.route('/') def display_chart(): # render web page with chart return render_template('chart.html', temperatures=temperatures, years=years) if __name__ == '__main__': app.run()
const add = (a: number, b: number): number => { return a + b; }; //wrong : without a return annotation, typescript will not notice the fault //thats why even typescirpt can interfer return type of function //a better practice is to add the annotation const subtract = (a: number, b: number) => { a - b; }; // other form of funciton function divide(a: number, b: number): number { return a / b; } const multiply = function(a: number, b: number): number { return a * b; }; //but actually it can return/undefined null const logger = (message: string): void => { console.log(message); return null; }; //never const throwError = (message: string): never => { throw new Error(message); }; const forecast = { date: new Date(), weather: "sunny" }; const logWeather = ({ date, weather }: { date: Date; weather: string; }): void => { console.log(date); console.log(weather); }; logWeather(forecast);
function hasArrayTwoCandidates(arr, k){ let object = {}; for(let i=0; i<arr.length; i++){ let temp = k - arr[i]; if(temp in object){ return true; } object[arr[i]] = true; } return false; } console.log(hasArrayTwoCandidates(arr, k));
<gh_stars>0 (function ($) { "use strict"; var init = function (selectTarget, options) { var settings = $.extend({ url: "setOptions.php" }, options), value = $(selectTarget).next().find("input").val(), tableName; if (value === "") { return; } if (value === $(selectTarget).data("placeholder")) { value = undefined; } if ($(selectTarget).prop('id') !== "") { tableName = $(selectTarget).prop('id'); } else if ($(selectTarget).prop('class') !== "") { tableName = $(selectTarget).prop("class"); } else { tableName = ""; } $.ajax({ type: "GET", url: settings.url, data: { value: value, table: tableName }, success: function (result) { $(selectTarget).html(result); $(selectTarget).trigger("chosen:updated"); $(selectTarget).trigger("ready"); if (value !== "") { $(selectTarget).trigger("change"); } }, error: function (error) { console.error(error); } }); return selectTarget; }, chosenAddSelectOption = function (selectTarget) { init(selectTarget); }, getServerData = function () { $(this).next().find(".chosen-results li").html($(this).next().find("input").val() + "\t\t\t (press ENTER to add)"); var ENTER = 13; $(this).next().find(".search-field input").one("keyup", { origin: $(this) }, function (event) { if (event.which === ENTER) { chosenAddSelectOption(event.data.origin); $(event.data.origin).trigger("chosen:updated"); return $(this); } }); }; $.fn.ChosenAddSelected = function (options) { chosenAddSelectOption($(this), options); getServerData(); $(this).on("chosen:no_results", getServerData); }; })(jQuery);
#!/bin/bash sudo python /home/ozymandias/proj_code/code/xmlrpc_server.py &
package com.sanctionco.opconnect.model; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; class FieldTest { @Test void shouldBuildUsername() { Field field = Field.username("testname").withLabel("username").build(); assertAll("Field properties are correct", () -> assertEquals(Purpose.USERNAME, field.getPurpose()), () -> assertEquals("testname", field.getValue()), () -> assertEquals("username", field.getLabel())); } @Test void shouldBuildPassword() { Field field = Field.password("<PASSWORD>").withLabel("password").build(); assertAll("Field properties are correct", () -> assertEquals(Purpose.PASSWORD, field.getPurpose()), () -> assertEquals("pass", field.getValue()), () -> assertEquals("password", field.getLabel())); } @Test void shouldBuildGeneratedPassword() { Field field = Field.generatedPassword().withLabel("password").build(); assertAll("Field properties are correct", () -> assertEquals(Purpose.PASSWORD, field.getPurpose()), () -> assertTrue(field.getGenerate()), () -> assertEquals("password", field.getLabel())); } @Test void shouldBuildGeneratedPasswordWithRecipe() { Field field = Field.generatedPassword(GeneratorRecipe.letters().ofLength(30)) .withLabel("password") .build(); assertAll("Field properties are correct", () -> assertEquals(Purpose.PASSWORD, field.getPurpose()), () -> assertTrue(field.getGenerate()), () -> assertEquals(GeneratorRecipe.letters().ofLength(30), field.getRecipe()), () -> assertEquals("password", field.getLabel())); } @Test void shouldBuildNote() { Field field = Field.note("My Note Contents").withLabel("note").build(); assertAll("Field properties are correct", () -> assertEquals(Purpose.NOTES, field.getPurpose()), () -> assertEquals("My Note Contents", field.getValue()), () -> assertEquals("note", field.getLabel())); } }
import paddle import paddle.nn as nn import numpy as np from paddle.fluid.initializer import Initializer class SInitializer(Initializer): def __init__(self, local_init=True, gamma=None): self.local_init = local_init self.gamma = gamma def __call__(self, m): if isinstance(m, (nn.BatchNorm1D, nn.BatchNorm2D, nn.BatchNorm3D, nn.InstanceNorm1D, nn.InstanceNorm2D, nn.InstanceNorm3D, nn.GroupNorm, nn.SyncBatchNorm)) or 'BatchNorm' in m.__class__.__name__: if m.weight is not None: self._init_gamma(m.weight) if m.bias is not None: self._init_beta(m.bias) else: if getattr(m, 'weight', None) is not None: self._init_weight(m.weight) if getattr(m, 'bias', None) is not None: self._init_bias(m.bias) def _init_weight(self, param): initializer = nn.initializer.Uniform(-0.07, 0.07) initializer(param, param.block) def _init_bias(self, param): initializer = nn.initializer.Constant(0) initializer(param, param.block) def _init_gamma(self, param): if self.gamma is None: initializer = nn.initializer.Constant(0) initializer(param, param.block) else: initializer = nn.initializer.Normal(1, self.gamma) initializer(param, param.block) def _init_beta(self, param): initializer = nn.initializer.Constant(0) initializer(param, param.block) class XavierGluon(SInitializer): def __init__(self, rnd_type='uniform', factor_type='avg', magnitude=3, **kwargs): super().__init__(**kwargs) self.rnd_type = rnd_type self.factor_type = factor_type self.magnitude = float(magnitude) def _init_weight(self, arr): fan_in, fan_out =self._compute_fans(arr) if self.factor_type == 'avg': factor = (fan_in + fan_out) / 2.0 elif self.factor_type == 'in': factor = fan_in elif self.factor_type == 'out': factor = fan_out else: raise ValueError('Incorrect factor type') scale = np.sqrt(self.magnitude / factor) if self.rnd_type == 'uniform': initializer = nn.initializer.Uniform(-scale, scale) initializer(arr, arr.block) elif self.rnd_type == 'gaussian': initializer = nn.initializer.Normal(0, scale) initializer(arr, arr.block) else: raise ValueError('Unknown random type')
<reponame>harshitKyal/ngx-wireframe import { Component, OnInit, Input } from '@angular/core'; import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'; import { position } from 'html2canvas/dist/types/css/property-descriptors/position'; @Component({ selector: 'ngx-add-step', templateUrl: './add-step.component.html', styleUrls: ['./add-step.component.scss'] }) export class AddStepComponent implements OnInit { step_name = ''; @Input() position; step_position = 1; positionValues = []; @Input() stepList = []; @Input() editStep: any; submitButton = "Add"; constructor(public activeModal: NgbActiveModal) { } ngOnInit() { if (!this.editStep) { if (this.position > 0) { this.step_position = this.position; for (let i = 1; i <= this.position; i++) { this.positionValues.push(i); } } } else { this.submitButton = "Update"; if (this.position > 0) { this.step_position = this.position; this.step_name = this.stepList[this.position - 1].step_name; for (let i = 1; i <= this.stepList.length; i++) { this.positionValues.push(i); } } } } onCreate() { let obj = { 'name': this.step_name, 'position': this.step_position }; this.activeModal.close(obj); } }
<reponame>v0ldemar01/thread-app<filename>server/src/comment-reaction/comment-reaction.module.ts import { Module } from '@nestjs/common'; import { TypeOrmModule } from '@nestjs/typeorm'; import { CommentReactionService } from './comment-reaction.service'; import { CommentReactionController } from './comment-reaction.controller'; import { CommentReaction } from './comment-reaction.entity'; @Module({ imports: [TypeOrmModule.forFeature([CommentReaction])], providers: [CommentReactionService], controllers: [CommentReactionController], }) export class CommentReactionModule {}
<reponame>azjezz/waffle<filename>src/Waffle/Config/Provider/IniFileProvider.hh <?hh // strict namespace Waffle\Config\Provider; use type Generator; use function parse_ini_file; use const INI_SCANNER_NORMAL; class IniFileProvider extends MultipleResourcesProvider { use GlobTrait; public function __construct( protected string $pattern, protected bool $processSections = true, protected int $scannerMode = INI_SCANNER_NORMAL ) {} <<__Override>> protected function loadMany(): Generator<string, KeyedContainer<arraykey, mixed>, void> { foreach ($this->glob($this->pattern) as $file) { yield $file => parse_ini_file($file, $this->processSections, $this->scannerMode); } } }
@app.route('/add', methods=['POST']) def add(): # get data from the request data = request.get_json(force=True) # Add the two numbers result = int(data['a']) + int(data['b']) # return the result in JSON format return jsonify({'result': result})
#!/usr/bin/env bash set -euo pipefail; [[ -z ${TRACE:-} ]] || set -x systemctl is-enabled inspeqtor && false systemctl enable inspeqtor && systemctl start inspeqtor goss -g - validate --format documentation <<-EOF service: inspeqtor: enabled: true running: true process: inspeqtor: running: true EOF systemctl stop inspeqtor && systemctl disable inspeqtor goss -g - validate --format documentation <<-EOF service: inspeqtor: enabled: false running: false process: inspeqtor: running: false EOF
<gh_stars>0 wordList = ['quail'] def isValidWord(word, hand, wordList): """ Returns True if word is in the wordList and is entirely composed of letters in the hand. Otherwise, returns False. Does not mutate hand or wordList. word: string hand: dictionary (string -> int) wordList: list of lowercase strings """ if len(word) == 0: return False for letter in word: if word.count(letter) > hand.get(letter, 0) or word not in wordList: return False return True hand = {'a':1, 'q':1, 'l':2, 'm':1, 'u':1, 'i':1} print(isValidWord('quail', hand, wordList))
<filename>js/controller/ScriptTableController.js const ScriptTableController = (function() { // Message listener function onMessage(e) { var scriptId = 0; var action = ''; var scriptRow = null; if (e.source === window) { scriptId = e.data['scriptId']; action = e.data['action']; if (Number.isInteger(scriptId)) { scriptRow = tScriptTable.getRow(scriptId); } else { scriptRow = tScriptTable.getAllRows(); } switch (action) { case 'sales-users': showSalesUsers(scriptRow); break; case 'sales-profit': showSalesProfit(scriptRow); break; case 'sales-latest': showSalesLatest(scriptRow); break; case 'sales-renewals': showSalesRenewals(scriptRow); break; case 'trial-total': showTrialTotal(scriptRow); break; case 'trial-active': showTrialActive(scriptRow); break; case 'trial-expired': showTrialExpired(scriptRow); break; case 'all-sales-users': showAllSalesUsers(scriptRow); break; case 'all-sales-profit': showAllSalesProfit(scriptRow); break; case 'all-sales-latest': showAllSalesLatest(scriptRow); break; case 'all-sales-renewals': showAllSalesRenewals(scriptRow); break; case 'all-trial-total': showAllTrialTotal(scriptRow); break; case 'all-trial-active': showAllTrialActive(scriptRow); break; case 'all-trial-expired': showAllTrialExpired(scriptRow); break; case 'stacktrace': showStacktrace(scriptRow); break; case 'add-trial': showAddTrial(scriptRow); break; } } } // Script row function showAddTrial(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var scriptId = scriptRow.dataset.scriptId; var trialTime = parseInt(scriptRow.querySelector(`input[name="trial-time-${scriptId}"]`).value || localStorage.getItem(`trial-time-${scriptId}`)); // Try obtain from field. If doesn't work, try get it from local cache. tPopup.set(`${scriptName} (${trialTime} Hours)`, 512, 256, tNewTrial.html, (async function(e) { var selectedUsers = tNewTrial.getSelectedUsers(); tPopup.setContents(); if (selectedUsers && selectedUsers.length > 0) { for (let i = 0; i < selectedUsers.length; i++) { await addTrialAndNotifyUser(scriptRow.dataset, selectedUsers[i], trialTime); } //selectedUsers.forEach(user => addTrialAndNotifyUser(scriptRow.dataset, user, trialTime)); } else { await addTrialForSelf(scriptId); } tPopup.hide(); tScriptTable.reload(); })); tNewTrial.validate(); tPopup.show(); } function showSalesUsers(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var data = scriptRow.dataset.salesRecords; data = ChartUtil.generateUserLineChart(data); data = JSON.stringify(data); tPopup.set(scriptName, 650, 650, prepareIFrameChart(tLineChart, { 'chart.title': `Users for ${scriptName}`, 'chart.sub-title': 'Purchases and refunds by month', 'chart.data': data, 'chart.xAxis': 'Date (by month)', 'chart.yAxis': 'Number of new customers', 'chart.width': '650px', 'chart.height': '650px' })); tPopup.show(); } function showSalesProfit(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var data = scriptRow.dataset.salesRecords; data = ChartUtil.generateSalesMaterialBarChart(data); data = JSON.stringify(data); tPopup.set(scriptName, '100%', 650, prepareIFrameChart(tMaterialBarChart, { 'chart.title': `Transactions for ${scriptName}`, 'chart.sub-title': 'Purchases and refunds by month', 'chart.data': data, 'chart.xAxis': 'Scripts', 'chart.yAxis': 'Customers', 'chart.width': '100%', 'chart.height': '650px' })); tPopup.show(); } function showSalesLatest(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var data = scriptRow.dataset.salesRecords; data = ChartUtil.generateSalesMaterialBarChart(data, sale => sale.latest); data = JSON.stringify(data); tPopup.set(scriptName, '100%', 650, prepareIFrameChart(tMaterialBarChart, { 'chart.title': `Latest Transactions for ${scriptName}`, 'chart.sub-title': 'Purchases, refunds, and renewals by month', 'chart.data': data, 'chart.xAxis': 'Scripts', 'chart.yAxis': 'Customers', 'chart.width': '100%', 'chart.height': '650px' })); tPopup.show(); } function showSalesRenewals(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var data = scriptRow.dataset.salesRecords; data = ChartUtil.generateRenewalsLineChart(data, r => r.renewal); data = JSON.stringify(data); tPopup.set(scriptName, 650, 650, prepareIFrameChart(tLineChart, { 'chart.title': `Renewals for ${scriptName}`, 'chart.data': data, 'chart.xAxis': 'Scripts', 'chart.yAxis': 'Renewals', 'chart.width': '650px', 'chart.height': '650px' })); tPopup.show(); } function showTrialTotal(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var trials = JSON.parse(scriptRow.dataset.trialRecords); if (trials) { preparePopupForTrials(`All Trials for ${scriptName}`, scriptRow, trials); tPopup.show(); } } function showTrialActive(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var trials = JSON.parse(scriptRow.dataset.trialRecords); if (trials) { trials = trials.filter(trial => !trial.expired); preparePopupForTrials(`Active Trials for ${scriptName}`, scriptRow, trials); tPopup.show(); } } function showTrialExpired(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var trials = JSON.parse(scriptRow.dataset.trialRecords); if (trials) { trials = trials.filter(trial => trial.expired); preparePopupForTrials(`Expired Trials for ${scriptName}`, scriptRow, trials); tPopup.show(); } } // All script rows ("arguments") function showAllSalesUsers(scriptRows) { var html = ''; var rows = scriptRows.filter(tr => Boolean(tr.dataset.salesRecords)) .map(tr => Object.assign({ name: tr.dataset.scriptName, data: tr.dataset.salesRecords })); // Line chart data = ChartUtil.consolidateForDataUserArray(rows); data = JSON.stringify(data); html += prepareIFrameChart(tLineChart, { 'chart.title': 'Users for all Scripts', 'chart.data': data, 'chart.xAxis': 'Scripts', 'chart.yAxis': 'Customers', 'chart.width': '650px', 'chart.height': '650px' }); // Pie chart data = ChartUtil.processForCustomerPieChart(rows, r => !r.renewal); data = JSON.stringify(data); html += prepareIFrameChart(tPieChart, { 'chart.title': 'Users for all Scripts', 'chart.data': data, 'chart.width': '650px', 'chart.height': '650px' }); // Render tPopup.set('All Users', 1300, 650, html); tPopup.show(); } function showAllSalesProfit(scriptRows) { var html = ''; var rows = scriptRows.filter(tr => Boolean(tr.dataset.salesRecords)).map(tr => Object.assign({ name: tr.dataset.scriptName, data: tr.dataset.salesRecords })); var data = ChartUtil.generateAllSalesMaterialBarChart(rows); tPopup.set('All Transactions', '100%', 650, prepareIFrameChart(tMaterialBarChart, { 'chart.title': `All Transactions`, 'chart.sub-title': 'All purchases and refunds by month', 'chart.data': JSON.stringify(data), 'chart.xAxis': 'Date (by month)', 'chart.yAxis': 'Number of new customers', 'chart.width': '100%', 'chart.height': '650px' })); tPopup.show(); } function showAllSalesLatest(scriptRows) { var html = ''; var rows = scriptRows.filter(tr => Boolean(tr.dataset.salesRecords)).map(tr => Object.assign({ name: tr.dataset.scriptName, data: tr.dataset.salesRecords })); var data = ChartUtil.generateAllSalesMaterialBarChart(rows, sale => sale.latest); tPopup.set('All Transactions', '100%', 650, prepareIFrameChart(tMaterialBarChart, { 'chart.title': `All Transactions`, 'chart.sub-title': 'All purchases and refunds by month', 'chart.data': JSON.stringify(data), 'chart.xAxis': 'Date (by month)', 'chart.yAxis': 'Number of new customers', 'chart.width': '100%', 'chart.height': '650px' })); tPopup.show(); } function showAllSalesRenewals(scriptRows) { var html = ''; var rows = scriptRows.filter(tr => Boolean(tr.dataset.salesRecords)) .map(tr => Object.assign({ name: tr.dataset.scriptName, data: tr.dataset.salesRecords })); // Line chart data = ChartUtil.consolidateForDataUserArray(rows, r => r.renewal); data = JSON.stringify(data); html += prepareIFrameChart(tLineChart, { 'chart.title': 'Renewals for all Scripts', 'chart.data': data, 'chart.xAxis': 'Scripts', 'chart.yAxis': 'Customers', 'chart.width': '650px', 'chart.height': '650px' }); // Pie chart data = ChartUtil.processForCustomerPieChart(rows, r => r.renewal); data = JSON.stringify(data); html += prepareIFrameChart(tPieChart, { 'chart.title': 'Renewals for all Scripts', 'chart.data': data, 'chart.width': '650px', 'chart.height': '650px' }); // Render tPopup.set('All Renewals', 1300, 650, html); tPopup.show(); } function showAllTrialTotal(scriptRows) { console.debug('Unsupported feature - maybe implemented in the future :)'); } function showAllTrialActive(scriptRows) { console.debug('Unsupported feature - maybe implemented in the future :)'); } function showAllTrialExpired(scriptRows) { console.debug('Unsupported feature - maybe implemented in the future :)'); } // Other function showStacktrace(scriptRow) { var scriptName = scriptRow.dataset.scriptName; var stacktraceCode = scriptRow.dataset.stacktraceCode; var stacktrace = scriptRow.dataset.stacktrace; if (stacktraceCode && stacktrace) { tPopup.set(`Reverse Stacktrace - ${scriptName}`, 650, 350); tPopup.show(); Stacktrace.get(stacktraceCode, stacktrace) .then(preparePopupForReverseStacktrace) .catch(preparePopupForReverseStacktrace); } } /* * Prepare functions */ /** * Creates an iFrame with pre-defined contents. */ function prepareIFrameChart(chartTemplate, data) { return `<iframe src="data:text/html;charset=utf-8,${escape(chartTemplate.prepare(data))}" width="${data['chart.width']}" height="${data['chart.height']}"></iframe>`; } /** * Creates a trial row. */ function preparePopupForTrials(title, scriptRow, trials) { var scriptId = scriptRow.dataset.scriptId; var trialTime = parseInt(scriptRow.querySelector(`input[name="trial-time-${scriptId}"]`).value || localStorage.getItem(`trial-time-${scriptId}`)); tPopup.set(title, 350, 650, tTrialContainer.html, (async function() { var action = tTrialContainer.getSelectedAction(); var selectedUsers = tTrialContainer.getSelectedUsers(); if (action && selectedUsers && selectedUsers.length > 0) { tPopup.setContents(); switch (action) { case 'reset': await resetTrials(scriptRow.dataset, selectedUsers, trialTime); break; case 'remove': await removeTrials(scriptRow.dataset, selectedUsers); break; } tPopup.hide(); tScriptTable.reload(); } return false; })); if (tTrialContainer.validate()) { // Sort so your trial record appears at the top of the list trials.sort((a, b) => (a.userId === ipsSettings.memberID)+(b.userId === ipsSettings.memberID)); for (let i = 0; i < trials.length; i++) { tTrialContainer.addTrial(trials[i]); } } } /** * Prepares popup for reverse stacktrace. */ function preparePopupForReverseStacktrace(text) { tPopup.setContents(`<textarea readonly>${text}</textarea>`); } return { onMessage: onMessage }; });
package com.yunusseker.mvvmarchitecture.ui.detail; /** * Created by yunus.seker on 12.4.2018 */ public class MainDetailViewModel { }
/* @flow */ import MaterialUI from './test-functions/MaterialUI'; import MaterialUITable from './test-functions/MaterialUITable'; import ChangeLanguage from './test-functions/ChangeLanguage'; import FunctionComponent from './test-functions/FunctionComponent'; import AntDesignUI from './test-functions/AntDesignUI'; import IBMCarbonUI from './test-functions/IBMCarbonUI'; import UserList from './UserList'; import UserCard from './UserCard'; import ErrorBoundary from './ErrorBoundary'; import Loading from './Loading'; import TopInfomation from './TopInfomation'; export { UserList, UserCard, ErrorBoundary, Loading, MaterialUI, MaterialUITable, ChangeLanguage, FunctionComponent, AntDesignUI, IBMCarbonUI, TopInfomation };
#!/bin/sh rm -rf Tables Columns Indexes tbl* sizes_file rids_file *.op *.pp
import React, { useState } from 'react'; import Tilt from '../../src'; import './TiltDisableAxis.storytab.scss'; const TiltDisableAxis = () => { const [axisEnabled, toggleAxis] = useState('x'); return ( <Tilt tiltAxis={axisEnabled}> <div className="tilt-disable-axis"> <div className="header"> <div>Toggle Axis</div> <hr /> </div> <div className="form"> <label> <input onChange={ev => toggleAxis(ev.target.value)} value={'x'} checked={axisEnabled === 'x'} type="radio" /> Enable x axis </label> <label> <input onChange={ev => toggleAxis(ev.target.value)} value={'y'} checked={axisEnabled === 'y'} type="radio" /> Enable y axis </label> </div> </div> </Tilt> ); }; export default TiltDisableAxis;
<reponame>MrHadess/controltool2<gh_stars>0 package com.mh.controltool2.config; import com.mh.controltool2.exceptions.BeanFactoryException; import com.mh.controltool2.util.Assert; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class BeanRegistry { private static final String PACKAGE_NAME_LANG = "java.lang"; private final Map<String,Object> beanMap = new ConcurrentHashMap<>(); public void registerBean(Object obj) throws BeanFactoryException { Assert.notNull(obj,"Target object must not be null"); String beanName= obj.getClass().getName(); if (PACKAGE_NAME_LANG.startsWith(beanName)) { throw new BeanFactoryException(BeanFactoryException.ExpType.UnsupportedDataType,beanName); } registerBean(beanName,obj); } public void registerBean(Class interfaceFormClass, Object obj) throws BeanFactoryException { Assert.notNull(interfaceFormClass, "Target object must not be null"); if (beanMap.containsKey(interfaceFormClass.getName())) { throw new BeanFactoryException(BeanFactoryException.ExpType.ReplaceBean); } registerBean(interfaceFormClass.getName(), obj); } public void registerBean(String beanName, Object obj) throws BeanFactoryException { Assert.notNull(beanName,"Bean value must not be null"); Assert.notNull(obj,"Object must not be null"); synchronized (this.beanMap) { if (beanMap.containsKey(beanName)) { throw new BeanFactoryException(BeanFactoryException.ExpType.ReplaceBean); } beanMap.put(beanName,obj); } } /** * Return all bean */ protected Map<String,Object> getBeanMap() { return this.beanMap; } }
export PYTHONPATH=$PYTHONPATH:`pwd` python -u $@
#!/bin/bash set -euo pipefail IFS=$'\n\t' # A list of paths to the crate to be published. # It will be published in the order listed. MEMBERS=( "arci" "openrr-sleep" "openrr-planner" "openrr-config" # depend on arci "openrr-plugin" "openrr-remote" # depend on arci and openrr-planner "openrr-client" # depend on arci and openrr-client "openrr-command" "openrr-gui" # depend on arci, openrr-client and openrr-command "openrr-teleop" # depend on arci and some openrr-* crates "arci-gamepad-gilrs" "arci-gamepad-keyboard" "arci-ros" "arci-speak-audio" "arci-speak-cmd" "arci-urdf-viz" # depend on all arci-* crates "openrr-apps" # depend on all openrr-* crates "openrr" ) cd "$(cd "$(dirname "${0}")" && pwd)"/.. set -x for i in "${!MEMBERS[@]}"; do ( cd "${MEMBERS[${i}]}" cargo clean cargo publish ) if [[ $((i + 1)) != "${#MEMBERS[@]}" ]]; then sleep 45 fi done
public class Student { private String name; private int age; private double gpa; public Student(String name, int age, double gpa) { this.name = name; this.age = age; this.gpa = gpa; } // getters and setters // ... }
import Queue from "../../src"; export function doQueue(done) { let tags: string[] = []; let queue = new Queue(); queue.push(() => { return Promise.resolve(tags.push("one")); }); queue.push(() => { return new Promise((done, error) => { setTimeout(() => { tags.push("two"); done(); }, 300); }); }); queue.push(() => { return Promise.resolve(tags.push("three")).then(() => { expect(tags[0]).toBe("one"); expect(tags[1]).toBe("two"); expect(tags[2]).toBe("three"); done(); }); }); }
public static void sendMails(String[] messages) { for (String message : messages) { String[] words = message.split("\\s+"); String recipient = words[1].replaceAll("[, . :]", ""); sendMail(recipient, message); } } public static void sendMail(String recipient, String message) { // code to connect to the mail server and send the message }
<reponame>ylleonv/pack // #include "binomial.h" // using namespace std; // using namespace Rcpp ; // // FisherScoring::FisherScoring(void) { // Rcpp::Rcout << "FisherScoring is being created" << std::endl; // } // // Eigen::MatrixXd FisherScoring::GLMm(Eigen::MatrixXd X_M, Eigen::VectorXd Y_M, std::string link){ // // Create initial beta // const int N = X_M.rows() ; // const int K = X_M.cols() ; // // initialize betas to 0 // Eigen::MatrixXd beta = Eigen::MatrixXd::Zero(K,1); // Eigen::VectorXd Mu; // Eigen::VectorXd D_M; // Eigen::VectorXd Deviance; // double LogLik; // // double check_tutz = 1.0; // double tol = 0.001; // int n_iter = -1; // // // algorithm // while (check_tutz > tol){ // // Vector of probabilities: // Eigen::MatrixXd eta = X_M * beta; // if(link == "logistic"){ // Mu = Logistic::InverseLinkCumulativeFunction(eta); // D_M = Logistic::InverseLinkDensityFunction(eta); // }else if(link == "probit"){ // Mu = Normal::InverseLinkCumulativeFunction(eta); // D_M = Normal::InverseLinkDensityFunction(eta); // }else if(link == "cauchit"){ // Mu = Cauchit::InverseLinkCumulativeFunction(eta); // D_M = Cauchit::InverseLinkDensityFunction(eta); // }else if(link == "student"){ // Mu = Student::InverseLinkCumulativeFunction(eta); // D_M = Student::InverseLinkDensityFunction(eta); // }else if(link == "gumbel"){ // Mu = Gumbel::InverseLinkCumulativeFunction(eta); // D_M = Gumbel::InverseLinkDensityFunction(eta); // }else if(link == "gompertz"){ // Mu = Gompertz::InverseLinkCumulativeFunction(eta); // D_M = Gompertz::InverseLinkDensityFunction(eta); // } // // // // D // Eigen::MatrixXd D_M1 = Eigen::MatrixXd(D_M.asDiagonal()); // // // Covariance // Eigen::VectorXd Ones = Eigen::VectorXd::Ones(Mu.rows()); // Eigen::MatrixXd Covinv = ((Eigen::VectorXd(Mu.array()*(Ones-Mu).array())).asDiagonal()).inverse(); // // Eigen::MatrixXd Score = X_M.transpose() * (D_M1 * Covinv) * (Y_M-Mu); // // // W // Eigen::MatrixXd W_M = (D_M1 * Covinv) * D_M1; // // Second derivate - FisherInformation // Eigen::MatrixXd dffm = (-X_M.transpose() * (W_M * X_M)).inverse() ; // // // Stop criteria Tutz // Eigen::VectorXd beta_old = beta; // Eigen::VectorXd beta_new = beta - (dffm * Score); // check_tutz = ((beta_new - beta_old).norm())/(beta_old.norm()); // // // Deviance for ungrouped -> bernulli // // // Deviance = -2*(Y_M.transpose()*log(Mu)) + ((1-Y_M.transpose())*log(1-Mu)); // // // LogLik // LogLik = (Y_M.transpose()*Eigen::VectorXd(Mu.array().log())) + ( ((Ones - Y_M).array() * ( (Ones - Mu).array()).log()).sum() ); // beta = beta_new; // n_iter = n_iter + 1; // } // // Rcpp::Rcout << "Number of iterations" << std::endl; // Rcpp::Rcout << n_iter << std::endl; // // // Rcpp::Rcout << "Deviance" << std::endl; // // Rcpp::Rcout << Deviance << std::endl; // // Rcpp::Rcout << "LogLik" << std::endl; // Rcpp::Rcout << LogLik << std::endl; // return beta; // } // // RCPP_MODULE(fishder){ // Rcpp::class_<FisherScoring>("FisherScoring") // .constructor() // .method( "GLMm", &FisherScoring::GLMm ) // ; // }
import genomicsdb def query_genomicsdb(workspace, callset_json, vid_json, fasta_gz, attributes, batch_size): try: genomicsdb.version() gdb = genomicsdb.connect(workspace, callset_json, vid_json, fasta_gz, attributes, batch_size) gdb.query_variant_calls() except Exception as e: print(e)
#!/bin/bash set -eu source ./env.sh docker run --interactive --tty --rm \ --env MQTT_SERVER_URL \ --volume $(pwd):/workspace \ ${IMAGE_NAME} \ /bin/bash
<reponame>carlesaraguz/abs-software package abs.com.test.appmodule; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import java.util.ArrayList; import java.util.List; import abs.com.test.appmodule.classes.Item; import abs.com.test.appmodule.classes.ItemAdapter; import abs.com.test.appmodule.services.TestService; public class MainActivity extends Activity { public ListView listView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); this.listView = (ListView) findViewById(R.id.listView); List<Item> items = createList(); this.listView.setAdapter(new ItemAdapter(this, items)); /* Set the click listener for listView items and start the service if the user clicks one of them */ listView.setOnItemClickListener(itemClickHandler()); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); return (id == R.id.action_settings) || super.onOptionsItemSelected(item); } @Override protected void onDestroy() { super.onDestroy(); stopService(new Intent(this, TestService.class)); } /** * Creates and populates the list of items * @return List<Item> */ private List<Item> createList() { List<Item> items = new ArrayList<>(); items.add(new Item("latency", "Latency")); items.add(new Item("events", "Event collision")); items.add(new Item("services", "Multiple services")); return items; } /** * Handler method for the click event on any of the list items */ private AdapterView.OnItemClickListener itemClickHandler() { return new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { /* Identify the item that is clicked */ Item item = (Item) parent.getItemAtPosition(position); /* Start the corresponding activity */ try { Class c = Class.forName(item.get_activity()); Intent intent = new Intent(MainActivity.this, c); /* Get the item id and pass it to the activity to know which test to start */ intent.putExtra("id", item.get_id()); intent.putExtra("activity", item.get_activity()); startActivity(intent); } catch (ClassNotFoundException e) { System.out.println("Class not found: " + e.getMessage()); } } }; } }
const webpack = require('webpack'); const path = require('path'); const DashboardPlugin = require('webpack-dashboard/plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const ExtractTextPlugin = require('extract-text-webpack-plugin'); const SpritePlugin = require('svg-sprite-loader/plugin'); const autoprefixer = require('autoprefixer'); const nodeEnv = process.env.NODE_ENV || 'development'; const version = require('./package.json').version; const isProduction = nodeEnv === 'production'; const jsPath = path.join(__dirname, './src/js'); const buildPath = path.join(__dirname, './build'); const imgPath = path.join(__dirname, './src/assets/img'); const iconPath = path.join(__dirname, './src/assets/icons'); const srcPath = path.join(__dirname, './src'); // Webpack Plugins const plugins = [ new SpritePlugin(), new webpack.optimize.CommonsChunkPlugin({ name: 'build', filename: 'build-[hash].js', minChuncks(module) { const context = module.context; return context && context.indexOf('node_modules') >= 0; }, }), new webpack.DefinePlugin({ 'process.env': { NODE_ENV: JSON.stringify(nodeEnv), VERSION: JSON.stringify(version), }, }), new webpack.NamedModulesPlugin(), new HtmlWebpackPlugin({ template: path.join(srcPath, 'index.html'), path: __dirname, filename: 'index.html', }), new webpack.LoaderOptionsPlugin({ options: { postcss: [ autoprefixer({ browsers: [ 'last 3 version', 'ie >= 10', ], }), ], context: srcPath, }, }), ]; // Webpack Rules const rules = [ { test: /\.(js|jsx)$/, exclude: /node_modules/, use: [ 'babel-loader', ], }, { test: /\.svg$/, use: [ { loader: 'svg-sprite-loader', options: { extract: true, spriteFilename: 'icons-sprite.svg', }, }, 'svgo-loader', ], include: iconPath, }, { test: /\.(png|gif|jpg|svg)$/, include: imgPath, use: 'url-loader?limit=20480&name=assets/[name]-[hash].[ext]', }, ]; if (isProduction) { plugins.push( new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false, screw_ie8: true, conditionals: true, unused: true, comparisons: true, sequences: true, dead_code: true, evaluate: true, if_return: true, join_vars: true, }, output: { comments: false, }, }), new ExtractTextPlugin('style-[hash].css') ); rules.push( { test: /\.scss$/, loader: ExtractTextPlugin.extract({ fallback: 'style-loader', use: 'css-loader!postcss-loader!sass-loader', }), } ); } else { plugins.push( new webpack.HotModuleReplacementPlugin(), new DashboardPlugin() ); rules.push( { test: /\.scss$/, exclude: /node_modules/, use: [ 'style-loader', 'css-loader', 'postcss-loader', 'sass-loader?sourceMap', ], } ); } module.exports = { devtool: isProduction ? false : 'source-map', context: jsPath, entry: { js: './app.js', }, output: { path: buildPath, publicPath: '', filename: 'arkade-[hash].js', }, module: { rules, }, resolve: { extensions: ['.webpack-loader.js', '.web-loader.js', '.loader.js', '.js', '.jsx'], modules: [ path.resolve(__dirname, 'node_modules'), jsPath, ], }, plugins, devServer: { contentBase: isProduction ? buildPath : srcPath, historyApiFallback: true, port: 8080, compress: isProduction, inline: !isProduction, hot: !isProduction, host: '0.0.0.0', disableHostCheck: true, stats: { assets: true, children: false, chunks: false, hash: false, modules: false, publicPath: false, timings: true, version: false, warnings: true, colors: { green: '\u001b[32m', }, }, }, };