text stringlengths 1 1.05M |
|---|
<filename>front/src/components/Body/HomePage/HomePage.js
import PageHeading from '../../PageHeading/PageHeading'
export default function HomePage() {
return (
<>
<PageHeading text="Welcome" />
<p>
Lorem ipsum dolor sit amet consectetur adipisicing elit. Ex vel velit
nihil illo est! Quos cum rerum dolores voluptates odio iste est nam
excepturi placeat eligendi voluptatibus a illo eos ipsam, necessitatibus
quo at quae pariatur et asperiores odit! Quasi sunt odit omnis at
deserunt placeat ipsa earum dignissimos magni voluptatum quisquam veniam
libero qui fugit accusantium cum ratione, facilis tempore in!
Voluptates, minus nesciunt sed optio voluptate et quae accusamus est
eos, dolorum quibusdam dolorem debitis perferendis voluptas rem quos
eius ab, commodi cumque dolor. Repellendus porro impedit, enim
temporibus quibusdam eum natus corporis id? Ducimus fugit consequatur
consequuntur.
</p>
<p>
Lorem ipsum dolor sit amet consectetur adipisicing elit. Ex vel velit
nihil illo est! Quos cum rerum dolores voluptates odio iste est nam
excepturi placeat eligendi voluptatibus a illo eos ipsam, necessitatibus
quo at quae pariatur et asperiores odit! Quasi sunt odit omnis at
deserunt placeat ipsa earum dignissimos magni voluptatum quisquam veniam
libero qui fugit accusantium cum ratione, facilis tempore in!
Voluptates, minus nesciunt sed optio voluptate et quae accusamus est
eos, dolorum quibusdam dolorem debitis perferendis voluptas rem quos
eius ab, commodi cumque dolor. Repellendus porro impedit, enim
temporibus quibusdam eum natus corporis id? Ducimus fugit consequatur
consequuntur.
</p>
<p>
Lorem ipsum dolor sit amet consectetur adipisicing elit. Ex vel velit
nihil illo est! Quos cum rerum dolores voluptates odio iste est nam
excepturi placeat eligendi voluptatibus a illo eos ipsam, necessitatibus
quo at quae pariatur et asperiores odit! Quasi sunt odit omnis at
deserunt placeat ipsa earum dignissimos magni voluptatum quisquam veniam
libero qui fugit accusantium cum ratione, facilis tempore in!
Voluptates, minus nesciunt sed optio voluptate et quae accusamus est
eos, dolorum quibusdam dolorem debitis perferendis voluptas rem quos
eius ab, commodi cumque dolor. Repellendus porro impedit, enim
temporibus quibusdam eum natus corporis id? Ducimus fugit consequatur
consequuntur.
</p>
<p>
Lorem ipsum dolor sit amet consectetur adipisicing elit. Ex vel velit
nihil illo est! Quos cum rerum dolores voluptates odio iste est nam
excepturi placeat eligendi voluptatibus a illo eos ipsam, necessitatibus
quo at quae pariatur et asperiores odit! Quasi sunt odit omnis at
deserunt placeat ipsa earum dignissimos magni voluptatum quisquam veniam
libero qui fugit accusantium cum ratione, facilis tempore in!
Voluptates, minus nesciunt sed optio voluptate et quae accusamus est
eos, dolorum quibusdam dolorem debitis perferendis voluptas rem quos
eius ab, commodi cumque dolor. Repellendus porro impedit, enim
temporibus quibusdam eum natus corporis id? Ducimus fugit consequatur
consequuntur.
</p>
</>
)
}
|
<gh_stars>1-10
import React from 'react';
import { Alert } from '../../components/alert/alert';
import { Form } from '../../components/form/form';
import { Input } from '../../components/input/input';
import { Select, IOption } from '../../components/select/select';
interface IAccountFormProps {
errors: string[];
name?: string;
balance?: number;
type?: string;
onSubmit(account: IAccount): void;
formHeading: string;
submitLabel: string;
}
export const AccountForm = ({
errors,
name = '',
balance = NaN,
type = 'savings',
onSubmit,
formHeading,
submitLabel,
}: IAccountFormProps): JSX.Element => {
const accountTypes: IOption[] = [
{
value: 'cash',
label: 'Cash',
},
{
value: 'savings',
label: 'Savings',
},
{
value: 'investment',
label: 'Investment',
},
{
value: 'credit',
label: 'Credit',
},
{
value: 'loan',
label: 'Loan',
},
];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const handleSubmit = async (event: any) => {
event.preventDefault();
const { account, amount, type: newType } = event.target;
const newAccountData: IAccount = {
balance: parseFloat((amount.value as string).replace(',', '.')),
name: account.value,
type: newType.value,
};
onSubmit(newAccountData);
};
return (
<>
{errors.length > 0 && (
<Alert additionalInformation={errors} testId="form-errors">
There were {errors.length} errors with your submission
</Alert>
)}
<Form
submitLabel={submitLabel}
formHeading={formHeading}
handleSubmit={handleSubmit}
accentColor="blue"
>
<section>
<div className="grid gap-y-4 gap-x-4 sm:grid-cols-2">
<Input id="account" isRequired value={name}>
Account
</Input>
<Input
id="amount"
type="number"
step={0.01}
isCurrency
isRequired
value={Number.isNaN(balance) ? '' : balance}
>
Balance
</Input>
<Select
id="type"
options={accountTypes}
defaultValue={type}
isRequired
>
Type
</Select>
</div>
</section>
</Form>
</>
);
};
|
#!/bin/bash
export NVM_DIR="$HOME/.nvm"
node_versions=("$NVM_DIR"/versions/node/*)
if (("${#node_versions[@]}" > 0)); then
PATH="$PATH:${node_versions[$((${#node_versions[@]} - 1))]}/bin"
fi
if [ -s "$NVM_DIR/nvm.sh" ]; then
# load the real nvm on first use
nvm() {
# shellcheck disable=SC1090,SC1091
source "$NVM_DIR"/nvm.sh
nvm "$@"
}
fi
if [ -s "$NVM_DIR/bash_completion" ]; then
# shellcheck disable=SC1090,SC1091
source "$NVM_DIR"/bash_completion
fi
|
<filename>C2CRIBuildDir/projects/C2C-RI/src/C2CRIReportsDBLibrary/src/org/fhwa/c2cri/reports/dao/RawOTWMessageDAO.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.reports.dao;
import java.io.File;
import java.sql.DriverManager;
import java.util.ArrayList;
import org.fhwa.c2cri.reports.RawOTWMessage;
/**
* Provides support for the RawOTWMessage Table.
*
* @author TransCore ITS
*/
public class RawOTWMessageDAO extends ReportsDAO {
// Keep a cache of RawOTWMessage objects. To speed up processing they will all be written out in batches.
private ArrayList<RawOTWMessage> rawOTWMessageEventList = new ArrayList<>();
// The maximum number of objects to hold before writing them out.
private int maxHold = 100;
public RawOTWMessageDAO(File outdb) throws Exception {
// Create a SQLite connection
Class.forName("org.sqlite.JDBC");
super.conn = DriverManager.getConnection("jdbc:sqlite:" + outdb.getAbsolutePath());
}
/**
* Write a list of Raw Over the Wire Message objects in a batch operation.
*
* @param RawOTWMessageList the list of Raw Over the Wire Message objects.
*/
public void insert(ArrayList<RawOTWMessage> RawOTWMessageList) {
try {
// Disable auto-commit
conn.setAutoCommit(false);
pstmt = super.conn.prepareStatement("INSERT INTO " + RAWOTWMESSAGE_TABLE + "("
+ "ID, testCase, connectionName, ProcessType, SourceAddress, DestinationAddress,"
+ "sequenceCount,message,TimeStampInMillis) VALUES (?,?,?,?,?,?,?,?,?)");
for (RawOTWMessage thisRawOTWMessage : RawOTWMessageList) {
int col = 1;
pstmt.setString(col++, thisRawOTWMessage.getId().toString());
pstmt.setString(col++, thisRawOTWMessage.getTestCase());
pstmt.setString(col++, thisRawOTWMessage.getConnectionName());
pstmt.setString(col++, thisRawOTWMessage.getProcessType());
pstmt.setString(col++, thisRawOTWMessage.getSourceAddress());
pstmt.setString(col++, thisRawOTWMessage.getDestinationAddress());
pstmt.setString(col++, thisRawOTWMessage.getSequenceCount().toString());
pstmt.setString(col++, thisRawOTWMessage.getMessage());
String tmpString = thisRawOTWMessage.getTimeStampInMillis().toString();
pstmt.setString(col++, tmpString);
pstmt.addBatch();
}
executeBatch(pstmt);
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* Store the provided raw over the wire message data object. If we've reached the maxHold value, write out the current records.
*
* @param thisRawOTWMessage the current raw over the wire message data object.
*/
public void insert(RawOTWMessage thisRawOTWMessage) {
rawOTWMessageEventList.add(thisRawOTWMessage);
if (rawOTWMessageEventList.size() == maxHold){
insert(rawOTWMessageEventList);
rawOTWMessageEventList.clear();
System.out.println("Wrote out "+maxHold+" RawOTWMessage Events.");
}
}
/**
* Write out all remaining data objects in the stored raw over the wire message list.
*/
public void flush() {
if (rawOTWMessageEventList.size() > 0){
insert(rawOTWMessageEventList);
System.out.println("Wrote out the remaining "+rawOTWMessageEventList.size()+" RawOTWMessage Events.");
rawOTWMessageEventList.clear();
}
}
}
|
function buildMetadata(sample) {
d3.json("/metadata/" + sample).then(function(data){
var metadata = d3.select("#sample-metadata");
metadata.html("")
metadata.append("p").text("AGE: " + data["AGE"])
metadata.append("p").text("BBTYPE: " + data["BBTYPE"])
metadata.append("p").text("ETHNICITY: " + data["ETHNICITY"])
metadata.append("p").text("GENDER: " + data["GENDER"])
metadata.append("p").text("LOCATION: " + data["LOCATION"])
metadata.append("p").text("SAMPLEID: " + data["sample"])
metadata.append("p").text("WFREQ: " + data["WFREQ"])
buildGuage(data["WFREQ"])
});
// Object.entries(sample).forEach(([key, value]) => {
// var row = metadata.append("p");
// row.text(`${key}: ${value}`);
}
function buildCharts(sample) {
d3.json("/samples/" + sample).then(function(data){
var otuIds = data.otu_ids.slice(0,10);
var otuLabel = data.otu_labels.slice(0,10);
var sampleValues = data.sample_values.slice(0,10);
var graph1 = [{ "labels": otuIds,
"values": sampleValues,
"hovertext": otuLabel,
"type": "pie"
}];
Plotly.newPlot("pie", graph1);
var graph2 = {
x: data.otu_ids,
y: data.sample_values,
text: data.otu_labels,
mode: 'markers',
marker: {color: data.otu_ids,size: data.sample_values}
};
var bubble = [graph2];
var layout = {
height: 600,
width: 900,
showlegend: true,
hovermode: 'closest',
xaxis: {title: 'otu id'}
};
Plotly.newPlot('bubble', bubble, layout);
});
}
function init() {
// Grab a reference to the dropdown select element.
var selector = d3.select("#selDataset");
// Use the list of sample names to populate the select options.
d3.json("/names").then((sampleNames) => {
sampleNames.forEach((sample) => {
selector
.append("option")
.text("BB_" + sample)
.property("value", sample);
});
// Use the first sample from the list to build the initial plots.
const firstSample = sampleNames[0];
buildCharts(firstSample);
buildMetadata(firstSample);
// buildGuage('2');
// console.log(firstSample);
});
}
function buildGuage(level){
var level = level * 180 / 12;
// Using PI, sin and cosin to calculate the pointer location.
var degrees = 180 - level;
var radii = degrees * Math.PI / 180;
var radius = .5;
var x = radius * Math.cos(radii);
var y = radius * Math.sin(radii);
// Path: This may need to be adjusted, but these settings worked.
var mainPath = 'M -.0 -0.025 L .0 0.025 L ';
var pathX = String(x);
var space = ' ';
var pathY = String(y);
var pathEnd = ' Z';
var path = mainPath.concat(pathX,space,pathY,pathEnd);
var data = [{ type: 'scatter',
x: [0],
y:[0],
marker: {size: 28, color:'850000'},
showlegend: false,
name: 'speed',
text: level,
hoverinfo: 'text'},
{ values: [50/6, 50/6, 50/6, 50/6, 50/6, 50/6, 50],
rotation: 90,
text: ['10-12', '8-10', '6-8', '4-6', '2-4', '0-2'],
textinfo: 'text',
textposition:'inside',
marker: {colors:['rgba(0, 255, 255, .5)',
'rgba(60, 255, 255, .5)',
'rgba(100, 255, 255, .5)',
'rgba(150, 255, 255, .5)',
'rgba(175, 255, 255, .5)',
'rgba(222, 255, 255, .5)',
'rgba(255, 255, 255, 0)',
'rgba(255, 255, 255, 0)',
'rgba(255, 255, 255, 0)']},
labels: ['10-12', '8-10', '6-8', '4-6', '2-4', '0-2'],
hoverinfo: 'label',
hole: .5,
type: 'pie',
showlegend: false
}];
var layout = {
shapes:[{
type: 'path',
path: path,
fillcolor: '850000',
line: {color: '850000'}
}],
height: 500,
width: 500,
xaxis: {zeroline:false, showticklabels:false, showgrid: false, range: [-1, 1]},
yaxis: {zeroline:false, showticklabels:false, showgrid: false, range: [-1, 1]}
};
Plotly.newPlot('gauge', data, layout, {showSendToCloud:false});
}
function optionChanged(newSample) {
buildCharts(newSample);
buildMetadata(newSample);
buildGuage(newSample["WFREQ"]);
console.log(newSample);
}
// Initialize the dashboard.
init();
|
package io.opensphere.core.util.cache;
import java.util.Map;
import java.util.function.Function;
/**
* A simple generic cache.
*
* @param <K> the key type
* @param <V> the value type
*/
public class SimpleCache<K, V> implements Function<K, V>
{
/** The cache map. */
private final Map<K, V> myCacheMap;
/** The look up function. */
private final Function<K, V> myLookupFunction;
// /** The instrumentation. */
// private final CacheInstrumentation myInstrumentation = new CacheInstrumentation();
/**
* Constructor.
*
* @param cacheMap the cache map
* @param lookupFunction the loop up function
*/
public SimpleCache(Map<K, V> cacheMap, Function<K, V> lookupFunction)
{
myCacheMap = cacheMap;
myLookupFunction = lookupFunction;
}
@Override
public V apply(K key)
{
// myInstrumentation.start();
V value = myCacheMap.get(key);
if (value == null)
{
value = myLookupFunction.apply(key);
myCacheMap.put(key, value);
// myInstrumentation.miss();
}
// else
// {
// myInstrumentation.hit();
// }
return value;
}
/**
* Clears the cache.
*/
public void clear()
{
myCacheMap.clear();
}
/**
* Invalidates the given key.
*
* @param key the key
*/
public void invalidate(K key)
{
myCacheMap.remove(key);
}
}
|
<reponame>Decipher/druxt.js<filename>packages/site/src/index.js
import { DruxtSiteNuxtModule } from './nuxtModule'
/**
* Vue.js Mixin.
*
* Registers props for use by Druxt slot theme components.
*
* @type {object}
* @exports DruxtSiteMixin
* @see {@link ./mixins/site|DruxtSiteMixin}
* @example @lang vue
* <template>
* <div>
* <slot v-for="region of regions" :key="region" :name="region" />
* </div>
* </template>
*
* <script>
* import { DruxtSiteMixin } from 'druxt-site'
*
* export default {
* mixins: [DruxtSiteMixin],
* }
* </script>
*/
export { DruxtSiteMixin } from './mixins/site'
/**
* The Nuxt.js module functions.
*
* Installs and configures all DruxtJS Site modules.
*
* @type {Function}
* @exports default
* @name DruxtSiteNuxtModule
* @see {@link ./nuxtModule|DruxtSiteNuxtModule}
*
* @example <caption>nuxt.config.js</caption> @lang js
* module.exports = {
* modules: [
* 'druxt-site'
* ],
* druxt: {
* baseUrl: 'https://demo-api.druxtjs.org'
* }
* }
*/
export default DruxtSiteNuxtModule
|
<filename>src/sentry/static/sentry/app/icons/iconPrevious.tsx
import React from 'react';
import SvgIcon from './svgIcon';
type Props = React.ComponentProps<typeof SvgIcon>;
const IconPrevious = React.forwardRef(function IconPrevious(
props: Props,
ref: React.Ref<SVGSVGElement>
) {
return (
<SvgIcon {...props} ref={ref}>
<path d="M15.25,15.48a.69.69,0,0,1-.37-.1L3.22,8.65a.75.75,0,0,1,0-1.3L14.88.62a.75.75,0,0,1,.74,0,.73.73,0,0,1,.38.65V14.73a.73.73,0,0,1-.38.65A.69.69,0,0,1,15.25,15.48ZM5.09,8l9.41,5.43V2.57Z" />
<path d="M.75,15.94A.76.76,0,0,1,0,15.19V.81A.76.76,0,0,1,.75.06.76.76,0,0,1,1.5.81V15.19A.76.76,0,0,1,.75,15.94Z" />
</SvgIcon>
);
});
IconPrevious.displayName = 'IconPrevious';
export {IconPrevious};
|
<filename>evaluation/getBestThr_Pairs.py
import sys, os
from sklearn.metrics import roc_auc_score, recall_score, precision_score, roc_curve
import pandas as pd
import numpy as np
from evaluation.evaluateScoresList import evaluateScoresLists
DO_ROC_CURVE= False
def loadResults( resultsPath, fnameResults):
prefix= fnameResults.split("_")[0].split(".")[0]
scoresDf= pd.read_table(os.path.join(resultsPath, fnameResults), comment="#", sep="\s+",
dtype={"structResIdL":str, "structResIdR":str, "chainIdL":str, "chainIdR":str})
return scoresDf
def get_pairs_statistics(prefix, labels, scores):
EVAL_PAIRS_AT= [ 2**2, 2**3]
precisionAt=[]
recallAt=[]
scores= np.array(scores)
labels= np.array(labels)
try:
roc_complex= roc_auc_score(labels, scores)
except ValueError:
roc_complex= np.nan
probability_sorted_indexes = scores.argsort(axis=0)
probability_sorted_indexes = probability_sorted_indexes[::-1]
for evalPoint in EVAL_PAIRS_AT:
try:
label_predictions= np.ones(scores.shape[0])* np.min( labels)
label_predictions[probability_sorted_indexes[0 : evalPoint]]= np.repeat(1, evalPoint)
precisionAt.append( precision_score(labels[probability_sorted_indexes[0 : evalPoint]],
label_predictions[probability_sorted_indexes[0 : evalPoint]]))
recallAt.append( recall_score(labels, label_predictions))
# print(sum(labels==1), sum(label_predictions==1),precisionAt[-1], recallAt[-1])
except IndexError:
precisionAt.append( 0.0)
recallAt.append( 0.0)
summary= pd.DataFrame({"pdb":[prefix]})
summary["auc_pairs"]= [roc_complex]
for evalPoint, precisionAt, recallAt in zip(EVAL_PAIRS_AT,precisionAt, recallAt):
summary["prec_%d"%evalPoint]= [precisionAt]
summary["reca_%d"%evalPoint]= [recallAt]
rocCurve= None
if DO_ROC_CURVE:
fpr, tpr, __= roc_curve(labels, scores)
rocCurve= (fpr, tpr, roc_complex)
return summary, rocCurve
def getOptimThr(resultsPath):
allScores=[]
allLabels=[]
perComplexSummaries=[]
rocCurves= []
for fname in sorted(os.listdir(resultsPath)):
if fname.endswith(".tab"):
print(fname)
results= loadResults( resultsPath, fname)
if results is None: continue
scores= list(results["prediction"].values)
labels= list(results["categ"].values)
summary, rocCurve= get_pairs_statistics(fname, labels, scores)
if rocCurve: rocCurves.append(rocCurve)
perComplexSummaries.append(summary)
# print("%s %f"%(fname,roc_complex))
allScores+= scores
allLabels+= labels
summary= pd.concat(perComplexSummaries, ignore_index=True)
means= summary.mean(axis=0)
summary= summary.append( summary.ix[summary.shape[0]-1,:],ignore_index=True )
summary.ix[summary.shape[0]-1,0]= "mean"
summary.ix[summary.shape[0]-1, 1:]= means
evaluateScoresLists(allLabels, allScores, summary, summary.iloc[-1,1], None if not DO_ROC_CURVE else rocCurves)
if __name__=="__main__":
'''
python -m evaluation.getBestThr_Pairs ~/Tesis/rriPredMethod/data/bench5Data/newCodeData/results/mixed_2/
'''
resultsPath= "/home/rsanchez/Tesis/rriPredMethod/data/bench5Data/newCodeData/results_xgb/mixed_2/"
if len(sys.argv)==2:
resultsPath= sys.argv[1]
getOptimThr(resultsPath)
|
import random
for _ in range(10):
print(random.randint(1, 10)) |
<reponame>healer1064/Gimbal
import program from 'commander';
import deepmerge from 'deepmerge';
import resolver from '@/config/resolver';
import EventEmitter from '@/event';
import { PluginConfig, Plugin, PluginOptions } from '@/typings/config/plugin';
import { CommandOptions } from '@/typings/utils/command';
import { LoadEndEvent } from '@/typings/config';
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
const bus = async (name: string): Promise<any> => {
if (name === 'commander') {
return program;
}
const imported = await import(`${__dirname}/../../${name}`);
if (imported.default) {
return imported.default;
}
return imported;
};
interface Map {
[label: string]: PluginConfig;
}
// this is the object that gets passed to a plugin function
const options: PluginOptions = {
bus,
dir: __dirname,
};
const map: Map = {};
const parsePlugins = async (
plugins: (string | PluginConfig)[],
dir: string,
commandOptions: CommandOptions,
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
): Promise<any[]> => {
const pluginConfigs = await Promise.all(
plugins.map(
async (plugin: string | PluginConfig): Promise<PluginConfig> => {
const obj: PluginConfig = typeof plugin === 'string' ? { plugin, name: plugin } : plugin;
const resolved = await import(resolver(obj.plugin as string, dir, 'plugin'));
return {
...obj,
plugin: resolved as Plugin,
};
},
),
);
return Promise.all(
pluginConfigs.map(
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
(config: PluginConfig): any => {
const { plugin, ...pluginConfig } = config;
if (pluginConfig.enabled === false) {
// global config to disable plugin
return undefined;
}
const func = (plugin as Plugin).default;
map[config.name] = config;
// since we could be in user land, let's clone
// the options object incase someone messes with
// it that could cause issues.
// Also return it in case it's a promise, we can
// wait for it.
return func({ ...options, commandOptions: { ...commandOptions }, dir }, deepmerge(pluginConfig, {}));
},
),
);
};
EventEmitter.on(
'config/load/end',
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
async (_eventName: string, { commandOptions, config: { plugins }, dir }: LoadEndEvent): Promise<void | any[]> =>
plugins && plugins.length ? parsePlugins(plugins, dir, commandOptions) : undefined,
);
export default parsePlugins;
|
<reponame>awslabs/flux-swf-client
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package software.amazon.aws.clients.swf.flux.step;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.aws.clients.swf.flux.metrics.MetricRecorder;
import software.amazon.aws.clients.swf.flux.metrics.MetricRecorderFactory;
import software.amazon.aws.clients.swf.flux.poller.TaskNaming;
/**
* Utility class for extracting data about a workflow step.
*/
public final class WorkflowStepUtil {
private static final Logger log = LoggerFactory.getLogger(WorkflowStepUtil.class);
private WorkflowStepUtil() {}
/**
* Given a type, finds the method that has the specified annotation.
* Throws an exception if the type does not have exactly one method with that annotation.
*/
public static Method getUniqueAnnotatedMethod(Class<?> clazz, Class<? extends Annotation> annotationType) {
Method annotatedMethod = null;
for (Method method : clazz.getMethods()) {
if (method.isAnnotationPresent(annotationType)) {
if (annotatedMethod != null) {
String message = String.format("Class %s must not have more than one @%s method.",
clazz.getSimpleName(), annotationType.getSimpleName());
log.error(message);
throw new RuntimeException(message);
}
annotatedMethod = method;
}
}
if (annotatedMethod == null) {
String message = String.format("Class %s must have a @%s method.", clazz.getSimpleName(),
annotationType.getSimpleName());
log.error(message);
throw new RuntimeException(message);
}
log.debug("Found @{} method {}.{}", annotationType.getSimpleName(), clazz.getSimpleName(), annotatedMethod.getName());
return annotatedMethod;
}
/**
* Given a type, finds all methods that have the specified annotation and maps those methods to the specified annotation.
*/
public static <T extends Annotation> Map<Method, T> getAllMethodsWithAnnotation(Class<?> clazz, Class<T> annotationType) {
Map<Method, T> matching = new HashMap<>();
for (Method method : clazz.getMethods()) {
if (method.isAnnotationPresent(annotationType)) {
matching.put(method, method.getAnnotation(annotationType));
}
}
if (matching.isEmpty()) {
log.debug("No @{} methods found in {}.", annotationType.getSimpleName(), clazz.getSimpleName());
} else {
String names = matching.keySet().stream().map(Method::getName).collect(Collectors.joining(", "));
log.debug("Found {} @{} methods in {}: {}", matching.size(), annotationType.getSimpleName(),
clazz.getSimpleName(), names);
}
return matching;
}
/**
* Locates the @PartitionIdGenerator annotated method on the provided PartitionedWorkflowStep,
* and calls it with the appropriate arguments. Returns the resulting list of partition IDs.
*/
public static PartitionIdGeneratorResult getPartitionIdsForPartitionedStep(PartitionedWorkflowStep step,
Map<String, String> stepInput,
String workflowName, String workflowId,
MetricRecorderFactory metricsFactory) {
String activityName = TaskNaming.activityName(workflowName, step);
Method partitionIdMethod = WorkflowStepUtil.getUniqueAnnotatedMethod(step.getClass(), PartitionIdGenerator.class);
try (MetricRecorder stepMetrics = metricsFactory.newMetricRecorder(activityName + "." + partitionIdMethod.getName())) {
Map<String, String> generatorInput = new TreeMap<>(stepInput);
Object[] arguments = WorkflowStepUtil.generateArguments(step.getClass(), partitionIdMethod, stepMetrics,
generatorInput);
PartitionIdGeneratorResult result;
if (List.class.equals(partitionIdMethod.getReturnType())) {
List<String> partitionIds = (List<String>)partitionIdMethod.invoke(step, arguments);
result = PartitionIdGeneratorResult.create(new HashSet<>(partitionIds));
} else if (PartitionIdGeneratorResult.class.equals(partitionIdMethod.getReturnType())) {
result = (PartitionIdGeneratorResult)(partitionIdMethod.invoke(step, arguments));
} else {
// the return type of this method is validated by the workflow graph builder, so this shouldn't happen
throw new RuntimeException(String.format("%s.%s must have return type List<String>"
+ " or PartitionIdGeneratorResult.",
step.getClass().getSimpleName(), partitionIdMethod.getName()));
}
return result;
} catch (IllegalAccessException | InvocationTargetException e) {
String message = "Got an exception while attempting to request partition ids for workflow " + workflowId
+ " for step " + activityName;
log.error(message, e);
// throwing this exception should cause the decision task to fail and be rescheduled.
throw new RuntimeException(message, e);
}
}
/**
* Given a type, the method being called, and the available input attributes,
* generates an array of input parameters for the method.
*/
public static Object[] generateArguments(Class<?> clazz, Method method, MetricRecorder metrics,
Map<String, String> input) {
Object[] args = new Object[method.getParameterCount()];
int arg = 0;
for (Parameter param : method.getParameters()) {
if (param.getType().isAssignableFrom(MetricRecorder.class)) {
args[arg] = metrics;
} else {
Attribute attr = param.getAnnotation(Attribute.class);
if (attr == null || attr.value().equals("")) {
String message = String.format("The %s.%s parameter %s must have the @Attribute annotation"
+ " and its value must not be blank.",
clazz.getSimpleName(),
method.getName(),
param.getName());
log.error(message);
throw new RuntimeException(message);
}
args[arg] = StepAttributes.decode(param.getType(), input.get(attr.value()));
}
arg++;
}
return args;
}
/**
* Executes a set of step hooks with the specified input attributes, for any hooks whose type matches the specified hook type.
* Returns null unless the step should be retried due to a hook failure.
*/
public static StepResult executeHooks(List<WorkflowStepHook> hooks, Map<String, String> hookInput, StepHook.HookType hookType,
String activityName, MetricRecorder fluxMetrics, MetricRecorder hookMetrics) {
for (WorkflowStepHook hook : hooks) {
Map<Method, StepHook> methods = WorkflowStepUtil.getAllMethodsWithAnnotation(hook.getClass(), StepHook.class);
for (Map.Entry<Method, StepHook> method : methods.entrySet()) {
if (method.getValue().hookType() != hookType) {
continue;
}
String hookExecutionTimeMetricName = formatHookExecutionTimeName(hook.getClass().getSimpleName(),
method.getKey().getName(), activityName);
fluxMetrics.startDuration(hookExecutionTimeMetricName);
try {
Object result = method.getKey().invoke(hook, WorkflowStepUtil.generateArguments(hook.getClass(),
method.getKey(),
hookMetrics,
hookInput));
if (result != null) {
log.info("Hook {} for activity {} returned value: {}",
hook.getClass().getSimpleName(), activityName, result);
}
} catch (InvocationTargetException e) {
String message = String.format("Hook %s for activity %s threw an exception (%s)",
hook.getClass().getSimpleName(), activityName, e.getCause().toString());
if (method.getValue().retryOnFailure()) {
message += ", and the hook is configured to retry on failure.";
log.info(message, e.getCause());
return StepResult.retry(message);
} else {
log.info("{}, but the hook is configured to ignore failures.", message, e.getCause());
}
} catch (IllegalAccessException e) {
// IllegalAccessException shouldn't happen, since we only looked for public methods, but we'll handle it
// the same way as if the hook itself threw an exception.
String message = String.format("Hook %s for activity %s threw an exception (%s)",
hook.getClass().getSimpleName(), activityName, e.toString());
if (method.getValue().retryOnFailure()) {
message += ", and the hook is configured to retry on failure.";
log.error(message, e.getCause());
return StepResult.retry(message);
} else {
log.error("{}, but the hook is configured to ignore failures.", message, e.getCause());
}
} finally {
fluxMetrics.endDuration(hookExecutionTimeMetricName);
}
}
}
return null;
}
// public only for testing visibility
public static String formatHookExecutionTimeName(String hookClassName, String hookMethodName, String activityName) {
return String.format("Flux.HookExecutionTime.%s.%s:Activity.%s", hookClassName, hookMethodName, activityName);
}
}
|
package com.g4mesoft.graphics;
public class ColorPalette {
public static final int NUM_COLORS = 256;
public static final int COLORS_PER_CHANNEL = 6;
public static final int NUM_VISIBLE_COLORS = COLORS_PER_CHANNEL *
COLORS_PER_CHANNEL *
COLORS_PER_CHANNEL;
private static int[] colors = null;
public final int[] palette;
public ColorPalette() {
if (colors == null)
colors = generatePalette();
palette = colors;
}
private static int[] generatePalette() {
int[] palette = new int[NUM_COLORS];
int i = 0;
for (int r = 0; r < 6; r++) {
for (int g = 0; g < 6; g++) {
for (int b = 0; b < 6; b++) {
int rr = r * 255 / 5;
int gg = g * 255 / 5;
int bb = b * 255 / 5;
int mid = (rr * 30 + gg * 59 + bb * 11) / 100;
int r1 = (rr + mid) / 2 * 230 / 255 + 10;
int g1 = (gg + mid) / 2 * 230 / 255 + 10;
int b1 = (bb + mid) / 2 * 230 / 255 + 10;
palette[i++] = (r1 << 16 | g1 << 8 | b1);
}
}
}
return palette;
}
public static int getColors(int rgb1, int rgb2, int rgb3, int rgb4) {
return getColor(rgb4) << 24 |
getColor(rgb3) << 16 |
getColor(rgb2) << 8 |
getColor(rgb1) << 0;
}
public static int getColor(int rgb) {
int r = rgb / 100;
int g = (rgb % 100) / 10;
int b = rgb % 10;
return (b + (g + r * COLORS_PER_CHANNEL) * COLORS_PER_CHANNEL) & 0xFF;
}
}
|
import sys
from cx_Freeze import setup, Executable
build_exe_options = {"packages": ["os", "matplotlib"], "includes": ["tkinter"]}
base = None
if sys.platform == 'win32':
base = 'Win32GUI'
setup(
name="Randomness",
version="1.0",
description="Plot Randomness with numbers",
options={"build_exe": build_exe_options},
executables=[Executable("main.py", base=base)]
)
|
# Program to find the largest number from a list
numbers = [23, 75, 39, 63]
# Find the largest number
largest = max(numbers)
print('Largest number:',largest)
# Solution 1:
# Find the second largest number
second_largest = sorted(numbers)[-2]
print('Second largest number:', second_largest)
# Solution 2:
# Find the average of the numbers
average = sum(numbers)/len(numbers)
print('Average of the numbers:', average) |
<reponame>Organizational-Proof-Of-Work/clearinghoused_build
httplib = __import__('httplib')
from geventhttpclient import response
import gevent.socket
import gevent.ssl
class HTTPResponse(response.HTTPSocketResponse):
def __init__(self, sock, method='GET', strict=0, debuglevel=0,
buffering=False, **kw):
if method is None:
method = 'GET'
else:
method = method.upper()
super(HTTPResponse, self).__init__(sock, method=method, **kw)
@property
def version(self):
v = self.get_http_version()
if v == 'HTTP/1.1':
return 11
return 10
@property
def status(self):
return self.status_code
@property
def reason(self):
return self.msg
@property
def msg(self):
return httplib.responses[self.status_code]
def _read_status(self):
return (self.version, self.status_code, self.msg)
def begin(self):
pass
def close(self):
self.release()
def isclosed(self):
return self._sock is None
def read(self, amt=None):
return super(HTTPResponse, self).read(amt)
def getheader(self, name, default=None):
return self.get(name.lower(), default)
def getheaders(self):
return self._headers_index.items()
@property
def will_close(self):
return self.message_complete and not self.should_keep_alive()
def _check_close(self):
return not self.should_keep_alive()
HTTPLibConnection = httplib.HTTPConnection
class HTTPConnection(httplib.HTTPConnection):
response_class = HTTPResponse
def __init__(self, *args, **kw):
HTTPLibConnection.__init__(self, *args, **kw)
# python 2.6 compat
if not hasattr(self, "source_address"):
self.source_address = None
def connect(self):
self.sock = gevent.socket.create_connection(
(self.host,self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self._tunnel()
class HTTPSConnection(HTTPConnection):
default_port = 443
def __init__(self, host, port=None, key_file=None, cert_file=None, **kw):
HTTPConnection.__init__(self, host, port, **kw)
self.key_file = key_file
self.cert_file = cert_file
def connect(self):
"Connect to a host on a given (SSL) port."
sock = gevent.socket.create_connection((self.host, self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self.sock = sock
self._tunnel()
self.sock = gevent.ssl.wrap_socket(
sock, self.key_file, self.cert_file)
def patch():
httplib.HTTPConnection = HTTPConnection
httplib.HTTPSConnection = HTTPSConnection
httplib.HTTPResponse = HTTPResponse
|
def generate_phase_summary(default_phases):
phase_summary = {}
for phase_template, configurations in default_phases:
phase_name = phase_template[0] % 'S' # Extract phase name from the template
for parameters, code, inheritance in configurations:
phase_summary[parameters['name']] = {'code': code, 'inheritance': inheritance}
return phase_summary |
#!/bin/bash
export LC_ALL=en_US.UTF-8
TIME="$(date '+%a, %d %b %Y %T %z')"
YEAR="$(date '+%Y')"
echo "preparing debian package build for $1-${CIRCLE_TAG}"
# create debian package construction directory
mkdir -p "build/deb/$1-${CIRCLE_TAG}"
# copy software resources
cp -r gradle "build/deb/$1-${CIRCLE_TAG}/gradle"
cp -r src "build/deb/$1-${CIRCLE_TAG}/src"
cp build.gradle.kts "build/deb/$1-${CIRCLE_TAG}/"
cp CHANGELOG "build/deb/$1-${CIRCLE_TAG}/"
cp gradle.properties "build/deb/$1-${CIRCLE_TAG}/"
cp gradlew "build/deb/$1-${CIRCLE_TAG}/"
cp LICENSE "build/deb/$1-${CIRCLE_TAG}/"
cp readme.md "build/deb/$1-${CIRCLE_TAG}/"
cp settings.gradle.kts "build/deb/$1-${CIRCLE_TAG}/"
cd "build/deb"
echo "create tarball"
tar -czf "$1_${CIRCLE_TAG}.orig.tar.gz" "$1-${CIRCLE_TAG}"
tar -ztf "$1_${CIRCLE_TAG}.orig.tar.gz"
cd ../..
# copy debian packaging files
cp -r "debian" "build/deb/$1-${CIRCLE_TAG}/debian"
echo "building debian package of $1-${CIRCLE_TAG}"
cd "build/deb/$1-${CIRCLE_TAG}"
sed -i "s/%version%/${CIRCLE_TAG}/" "debian/changelog"
sed -i "s/%time%/${TIME}/" "debian/changelog"
sed -i "s/%year%/${YEAR}/g" "debian/copyright"
debuild -us -uc
cd ..
ls -l
mkdir "deploy"
mv "$1_${CIRCLE_TAG}_amd64.deb" deploy/
cd ../..
echo "deb and tar.gz files are in build/deb/deploy" |
<gh_stars>1-10
package tkohdk.lib.calcstr.tree.node;
/**
*
*/
public interface TreeBinNode extends TreeNode {
TreeNode setLeftNode(TreeNode left);
TreeNode setRightNode(TreeNode right);
TreeNode getLeftNode();
TreeNode getRightNode();
}
|
/**
* node-disk-storage
* @author Copyright(c) 2021 by <NAME>
* MIT Licensed
*/
export const matchProperty = (compare: Record<string, any>): boolean | undefined => {
const defaultProperty = { minSize: undefined, maxSize: undefined, compress: undefined }
const compareIn: string[] = Object.keys(compare)
const newInData: boolean[] = compareIn.map((v) => `${v}` in defaultProperty)
return newInData.includes(false) ? undefined : true
}
|
#include <iostream>
// Struct definition and deleteNode function implementation as described in the problem description
int main() {
// Example usage of the deleteNode function
ListNode* head = new ListNode(1);
head->next = new ListNode(2);
head->next->next = new ListNode(3);
head->next->next->next = new ListNode(4);
std::cout << "Before deletion: ";
ListNode* current = head;
while (current != nullptr) {
std::cout << current->val << " ";
current = current->next;
}
std::cout << std::endl;
deleteNode(head, 3);
std::cout << "After deletion: ";
current = head;
while (current != nullptr) {
std::cout << current->val << " ";
current = current->next;
}
std::cout << std::endl;
// Proper memory deallocation should be performed for the remaining nodes in the linked list
while (head != nullptr) {
ListNode* temp = head;
head = head->next;
delete temp;
}
return 0;
} |
<filename>src/CommandParser.hpp
#pragma once
#include <boost/algorithm/string.hpp>
#include <string>
namespace cnt {
struct NoneCommand {};
struct UnknownCommand {};
struct PrintHelpCommand {};
struct PrintRecordsCommand {};
struct QuitCommand {};
struct AddRecordCommand {
std::string recordName{};
explicit AddRecordCommand(std::string s) : recordName{std::move(s)} {
}
};
struct RemoveRecordCommand {
std::string recordName{};
explicit RemoveRecordCommand(std::string s) : recordName{std::move(s)} {
}
};
struct RemoveAllRecordsCommand {};
struct DumpRecordsCommand {
std::string fileName{};
explicit DumpRecordsCommand(std::string s) : fileName{std::move(s)} {
}
};
struct DumpRecordsCsvCommand {
std::string fileName{};
explicit DumpRecordsCsvCommand(std::string s) : fileName{std::move(s)} {
}
};
struct LoadRecordsCommand {
std::string fileName{};
explicit LoadRecordsCommand(std::string s) : fileName{std::move(s)} {
}
};
using Command = std::variant<NoneCommand, UnknownCommand, PrintHelpCommand, PrintRecordsCommand, QuitCommand,
AddRecordCommand, RemoveRecordCommand, RemoveAllRecordsCommand, DumpRecordsCommand,
DumpRecordsCsvCommand, LoadRecordsCommand>;
struct CommandParser {
Command operator()(const std::string &commandString) const {
if (commandString.empty())
return NoneCommand{};
if (commandString == "?") {
return PrintHelpCommand{};
}
if (commandString == "!") {
return PrintRecordsCommand{};
}
if (commandString == "q") {
return QuitCommand{};
}
if (commandString == "*") {
return RemoveAllRecordsCommand{};
}
if (commandString.size() < 3) {
return UnknownCommand{};
}
if (commandString.starts_with("+ ")) {
return AddRecordCommand{boost::trim_copy(commandString.substr(2))};
}
if (commandString.starts_with("- ")) {
return RemoveRecordCommand{boost::trim_copy(commandString.substr(2))};
}
if (commandString.starts_with("d ")) {
return DumpRecordsCommand{boost::trim_copy(commandString.substr(2))};
}
if (commandString.starts_with("dc ")) {
return DumpRecordsCsvCommand{boost::trim_copy(commandString.substr(2))};
}
if (commandString.starts_with("l ")) {
return LoadRecordsCommand{boost::trim_copy(commandString.substr(2))};
}
return UnknownCommand{};
}
};
}// namespace cnt |
package org.multibit.hd.ui.fest.use_cases;
import com.google.common.util.concurrent.Uninterruptibles;
import org.fest.swing.fixture.FrameFixture;
import org.multibit.hd.testing.message_event_fixtures.MessageEventFixtures;
import org.multibit.hd.testing.hardware_wallet_fixtures.HardwareWalletFixture;
import org.multibit.hd.ui.languages.MessageKey;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* <p>Use case to provide the following to FEST testing:</p>
* <ul>
* <li>Verify an alert is shown when a hardware wallet is connected</li>
* <li>Verify selecting Yes will switch to the "credentials" wizard</li>
* </ul>
*
* @since 0.0.5
*/
public class SwitchToHardwareWalletUseCase extends AbstractHardwareWalletFestUseCase {
/**
* @param window The FEST window frame fixture
* @param hardwareWalletFixture The hardware wallet fixture
*/
public SwitchToHardwareWalletUseCase(FrameFixture window, HardwareWalletFixture hardwareWalletFixture) {
super(window, hardwareWalletFixture);
}
@Override
public void execute(Map<String, Object> parameters) {
// TODO Link the hardware wallet fixture
// Start the attach use case
// Allow time for the view to react
Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
// Check that an alert message is present
assertLabelContainsValue("alert_message_label", MessageEventFixtures.STANDARD_LABEL);
// Check the 'Yes' button on the alert is present and click it
window
.button(MessageKey.YES.getKey())
.click();
// Allow time for the switch to take place
pauseForWalletSwitch();
// Verify the "credentials" wizard appears after a switch in Trezor mode
window
.label(MessageKey.HARDWARE_UNLOCK_TITLE.getKey())
.requireVisible();
}
}
|
<!DOCTYPE html>
<html>
<head>
<title>My Form</title>
</head>
<body>
<form>
<!-- Input fields -->
<label for="name">Name:</label>
<input type="text" id="name"/>
<label for="email">Email:</label>
<input type="email" id="email">
<input type="submit" value="Submit"/>
</form>
</body>
</html> |
export class MathHelpers {
public static CalculateDistanceBetweenPoints(posA: RoomPosition, posB: RoomPosition): number {
const a = Math.abs(posA.x - posB.x);
const b = Math.abs(posA.y - posB.y);
const c = Math.sqrt(Math.pow(a, 2) + Math.pow(b, 2));
return c;
}
public static getRandomInt(min: number, max: number): number {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min) + min); //The maximum is exclusive and the minimum is inclusive
}
}
|
#!/bin/bash
if [[ $# -ne 2 ]]; then
echo "This script needs base image and node version as arguments"
echo "example : ./build.sh alpine 8.12.0"
echo "example : ./build.sh debian 8.12.0"
exit -1
fi
sed "s/<nodeversion>/${2}/g" Dockerfile.${1} > Dockerfile
docker build -t accelbyte/node:${2}-${1} .
docker tag accelbyte/node:${2}-${1} accelbyte/node:latest
rm -rf Dockerfile
|
<gh_stars>0
package workers
import (
"fmt"
"log"
"os"
"testing"
"time"
"github.com/nicholasjackson/sorcery/entities"
"github.com/nicholasjackson/sorcery/global"
"github.com/nicholasjackson/sorcery/handlers"
"github.com/nicholasjackson/sorcery/mocks"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
var mockDeadDispatcher *mocks.MockEventDispatcher
var mockDeadDal *mocks.MockDal
var mockDeadStatsD *mocks.MockStatsD
var deadWorker *DeadLetterWorker
var deadReg []*entities.Registration
var deadError error
func getDeadRegistrations() []*entities.Registration {
return deadReg
}
func getDeadRegistration() (*entities.Registration, error) {
if len(deadReg) > 0 {
return deadReg[0], deadError
} else {
return nil, deadError
}
}
func setupDeadTests(t *testing.T) {
mockDeadDispatcher = &mocks.MockEventDispatcher{}
mockDeadDal = &mocks.MockDal{}
mockDeadStatsD = &mocks.MockStatsD{}
deadWorker = NewDeadLetterWorker(mockDeadDispatcher, mockDeadDal, log.New(os.Stdout, "testing: ", log.Lshortfile), mockDeadStatsD)
deadReg = []*entities.Registration{&entities.Registration{CallbackUrl: "myendpoint"}}
deadError = nil
global.Config.RetryIntervals = []string{"1d", "2d", "5d"}
mockDeadDispatcher.Mock.On("DispatchEvent", mock.Anything, mock.Anything).Return(200, nil)
mockDeadDal.Mock.On("GetRegistrationsByEvent", mock.Anything).Return(getDeadRegistrations, nil)
mockDeadDal.Mock.On("GetRegistrationByEventAndCallback", mock.Anything, mock.Anything).Return(getDeadRegistration)
mockDeadDal.Mock.On("DeleteRegistration", mock.Anything).Return(nil)
mockDeadDal.Mock.On("UpsertEventStore", mock.Anything).Return(nil)
mockDeadDal.Mock.On("UpsertDeadLetterItem", mock.Anything).Return(nil)
mockDeadStatsD.Mock.On("Increment", mock.Anything).Return()
}
func TestHandleItemDoesNothingIfNoRegisteredEndpoint(t *testing.T) {
setupDeadTests(t)
event := entities.Event{EventName: "mytestevent"}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl"}
deadError = fmt.Errorf("Not found")
deadReg = []*entities.Registration{}
deadWorker.HandleItem(deadLetter)
mockDeadDispatcher.Mock.AssertNotCalled(t, "DispatchEvent", mock.Anything, mock.Anything)
mockDeadStatsD.Mock.AssertCalled(t, "Increment", handlers.DEAD_LETTER_QUEUE+handlers.WORKER+handlers.HANDLE)
mockDeadStatsD.Mock.AssertCalled(t, "Increment", handlers.DEAD_LETTER_QUEUE+handlers.WORKER+handlers.NO_ENDPOINT)
}
func TestHandleItemDispatchesEvent(t *testing.T) {
setupDeadTests(t)
event := entities.Event{EventName: "mytestevent"}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl"}
deadWorker.HandleItem(deadLetter)
mockDeadDispatcher.Mock.AssertCalled(t, "DispatchEvent", mock.Anything, deadLetter.CallbackUrl)
mockDeadStatsD.Mock.AssertCalled(t, "Increment", handlers.DEAD_LETTER_QUEUE+handlers.WORKER+handlers.DISPATCH)
}
func TestHandleItemDispatchesEventDoesNotRetry(t *testing.T) {
setupDeadTests(t)
event := entities.Event{EventName: "mytestevent"}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl"}
deadWorker.HandleItem(deadLetter)
mockDeadDal.Mock.AssertNotCalled(t, "UpsertDeadLetterItem", mock.Anything)
}
func TestHandleItemDispatchesEventDoesNotDeleteRegistration(t *testing.T) {
setupDeadTests(t)
event := entities.Event{EventName: "mytestevent"}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl"}
deadWorker.HandleItem(deadLetter)
mockDeadDal.Mock.AssertNotCalled(t, "DeleteRegistration", mock.Anything)
}
func TestHandleItemWithUndeliverableSetsRedeliveryCriteria(t *testing.T) {
setupDeadTests(t)
mockDeadDispatcher.Mock.ExpectedCalls = []*mock.Call{} // reset calls
mockDeadDispatcher.Mock.On("DispatchEvent", mock.Anything, mock.Anything).Return(500, fmt.Errorf("Unable to complete"))
event := entities.Event{}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl", FailureCount: 1, NextRetryDate: time.Now()}
deadWorker.HandleItem(deadLetter)
duration, _ := time.ParseDuration(global.Config.RetryIntervals[1])
retryDate := deadLetter.NextRetryDate.Add(duration)
assert.Equal(t, 2, deadLetter.FailureCount)
assert.Equal(t, retryDate, deadLetter.NextRetryDate)
}
func TestHandleItemWithErrorStateAddsToDeadLetterQueue(t *testing.T) {
setupDeadTests(t)
mockDeadDispatcher.Mock.ExpectedCalls = []*mock.Call{} // reset calls
mockDeadDispatcher.Mock.On("DispatchEvent", mock.Anything, mock.Anything).Return(500, fmt.Errorf("Unable to complete"))
event := entities.Event{}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl", FailureCount: 1, NextRetryDate: time.Now()}
deadWorker.HandleItem(deadLetter)
mockDeadDal.Mock.AssertCalled(t, "UpsertDeadLetterItem", deadLetter)
mockDeadStatsD.Mock.AssertCalled(t, "Increment", handlers.DEAD_LETTER_QUEUE+handlers.WORKER+handlers.PROCESS_REDELIVERY)
}
func TestHandleItemWithErrorStateWithExceededRetryCountDoesNotReAdd(t *testing.T) {
setupDeadTests(t)
mockDeadDispatcher.Mock.ExpectedCalls = []*mock.Call{} // reset calls
mockDeadDispatcher.Mock.On("DispatchEvent", mock.Anything, mock.Anything).Return(500, fmt.Errorf("Unable to complete"))
event := entities.Event{}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl", FailureCount: 3, NextRetryDate: time.Now()}
deadWorker.HandleItem(deadLetter)
mockDeadDal.Mock.AssertNumberOfCalls(t, "UpsertDeadLetterItem", 0)
}
func TestHandleItemWithErrorStateWithExceededRetryCountDeletesRegisteredEndpoint(t *testing.T) {
setupDeadTests(t)
mockDeadDispatcher.Mock.ExpectedCalls = []*mock.Call{} // reset calls
mockDeadDispatcher.Mock.On("DispatchEvent", mock.Anything, mock.Anything).Return(500, fmt.Errorf("Unable to complete"))
event := entities.Event{}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl", FailureCount: 3, NextRetryDate: time.Now()}
deadWorker.HandleItem(deadLetter)
mockDeadDal.Mock.AssertNumberOfCalls(t, "DeleteRegistration", 1)
mockDeadStatsD.Mock.AssertCalled(t, "Increment", handlers.DEAD_LETTER_QUEUE+handlers.WORKER+handlers.DELETE_REGISTRATION)
}
func TestHandleItemWithUndeliverableDeletesRegisteredEndpoint(t *testing.T) {
setupDeadTests(t)
mockDeadDispatcher.Mock.ExpectedCalls = []*mock.Call{} // reset calls
mockDeadDispatcher.Mock.On("DispatchEvent", mock.Anything, mock.Anything).Return(404, fmt.Errorf("Unable to complete"))
event := entities.Event{}
deadLetter := &entities.DeadLetterItem{Event: event, CallbackUrl: "myurl", FailureCount: 1, NextRetryDate: time.Now()}
deadWorker.HandleItem(deadLetter)
mockDeadDal.Mock.AssertNumberOfCalls(t, "DeleteRegistration", 1)
mockDeadStatsD.Mock.AssertCalled(t, "Increment", handlers.DEAD_LETTER_QUEUE+handlers.WORKER+handlers.DELETE_REGISTRATION)
}
|
<filename>dist/index.min.js
/*!
* name: @jswork/next-tx-cos-object
* description: Tencent cos object for next.
* homepage: https://github.com/afeiship/next-tx-cos-object
* version: 1.0.0
* date: 2020-11-21 12:19:31
* license: MIT
*/
!function(){function c(e){return{Key:e.Key}}var r=(this||window||Function("return this")()).nx||require("@jswork/next"),e=require("@jswork/next-tx-abstract-cos"),t=require("bluebird"),n={del:"deleteObjectAsync",dels:"deleteMultipleObjectAsync"},e=r.declare("nx.TxCosObject",{extends:e,methods:{"put,del,dels":function(t){return function(e){return this.parseOptions(e),this.cos[n[t]||t+"ObjectAsync"](e)}},empty:function(n){var s=this;return this.parseOptions(n),new t(function(t){s.cos.getBucketAsync(n).then(function(e){e=e.Contents.map(c),e=r.mix(null,n,{Objects:e});s.dels(e).then(t).catch(t)}).catch(t)})}}});"undefined"!=typeof module&&module.exports&&(module.exports=e)}(); |
<filename>0718-Maximum-Length-of-Repeated-Subarray/cpp_0718/Solution2.h
/**
* @author ooooo
* @date 2020/10/5 13:15
*/
#ifndef CPP_0718__SOLUTION2_H_
#define CPP_0718__SOLUTION2_H_
#include <iostream>
#include <vector>
using namespace std;
class Solution {
public:
int findLength(vector<int> &A, vector<int> &B) {
int m = A.size(), n = B.size();
int ans = 0;
vector<int> dp(n + 1, 0);
// cout<<endl;
int prev = 0, help = 0;
for (int i = 0; i < m; i++) {
help = 0;
for (int j = 0; j < n; j++) {
prev = help;
help = dp[j + 1]; // 保留上个状态,避免被覆盖
if (A[i] == B[j]) {
dp[j + 1] = prev + 1;
ans = max(ans, dp[j + 1]);
} else {
dp[j + 1] = 0;
}
// cout << " i: "<< i <<" j: " << j << " : " << dp[j+1] << "\t ";
}
// cout<< endl;
}
return ans;
}
};
#endif //CPP_0718__SOLUTION2_H_
|
./gradlew clean build bintrayUpload -PbintrayUser=$BINTRAY_USERNAME -PbintrayKey=$BINTRAY_KEY -PdryRun=false |
apt-get update
apt-get install -y docker.io
sudo usermod -a -G docker vagrant
sudo sed -i "/$(hostname)/d" /etc/hosts
sudo echo "
192.168.50.2 k8s-master
192.168.50.3 k8s-worker-0
192.168.50.4 k8s-worker-1
" >> /etc/hosts
# Required by Weave networking (because of some CNI plugins):
# https://kubernetes.io/docs/setup/independent/create-cluster-kubeadm/#pod-network
sysctl net.bridge.bridge-nf-call-iptables=1
## Disable swap
swapoff -a
# Comment out the swap partition, so swap is also disabled after reboot
sudo sed -i.bak '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab
apt-get update && apt-get install -y apt-transport-https
curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
cat > /etc/apt/sources.list.d/kubernetes.list <<EOF
deb http://apt.kubernetes.io/ kubernetes-xenial main
EOF
apt-get update
apt-get install -y kubelet kubeadm kubectl
systemctl enable kubelet
systemctl enable docker
|
# calculate accuracy
accuracy = (TP + TN) / (TP + TN + FP + FN) |
<gh_stars>0
function reverse_number(n) {
var m = 0;
while (n != 0) {
m *= 10;
m += n % 10;
n = Math.floor(n / 10);
}
return m;
}
function is_palindromic(n) {
return n == reverse_number(n);
}
var max = 0;
for (var a = 1; a < 1000; a += 1) {
for (var b = a; b < 1000; b += 1) {
var p = a * b;
if (is_palindromic(p) && max < p) {
max = p;
}
}
}
console.log(max);
|
total_sum = 0
for n in range(1000):
if n % 3 == 0 or n % 5 == 0:
total_sum += n
print("The sum of all the natural numbers below 1000 that are multiples of 3 or 5 is {}".format(total_sum)) |
<gh_stars>1-10
var searchData=
[
['backward_5ffilter_5f',['backward_filter_',['../class_d_f_e.html#aa6953c7cf764551e2f4b14f7ea1759a9',1,'DFE']]],
['backward_5ffilter_5foutput_5f',['backward_filter_output_',['../class_d_f_e.html#a76d12aa6b5972a1d7aade0202c855699',1,'DFE']]],
['bit_5ftime_5f',['bit_time_',['../class_a_m_i_model.html#ad0b6751b3b3a69fb8951fde0fcdf4f27',1,'AMIModel']]],
['boost_5ffusion_5fadapt_5fstruct',['BOOST_FUSION_ADAPT_STRUCT',['../amimodel_8h.html#a3b6210648aa742440c6393c5c5ff64b3',1,'amimodel.h']]]
];
|
list = [1, 9, 4, 6, 2]
max = list[0]
list.each do |number|
if number > max
max = number
end
end
puts "Largest Number: #{max}" |
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import SortAmountDescSvg from '@rsuite/icon-font/lib/legacy/SortAmountDesc';
const SortAmountDesc = createSvgIcon({
as: SortAmountDescSvg,
ariaLabel: 'sort amount desc',
category: 'legacy',
displayName: 'SortAmountDesc'
});
export default SortAmountDesc;
|
import selection, {D3Selection, D3BindSelection} from "../selection";
import ObservableArray from "../observable/array";
import Observable, {ObservableHandler} from '../observable/observable'
import BindRepeatIndexProxy from './bind-repeat-index';
import BindRepeatDatumProxy from "./bind-repeat-datum";
import {WritableObservable} from "../observable/observable";
import {setUnbindForSelectionField, unbindSelectionField} from '../bindings/unbind';
import Logger from '../utils/logger';
const REPEAT_PREFIX = '__d3bind_repeat';
enum BindRepeatEvent {
BUILD,
INSERT,
REMOVE,
REPLACE,
INSERT_REINDEXING,
REMOVE_REINDEXING
}
interface BindRepeatItem<T> {
id: number,
index: number,
selection: D3BindSelection,
indexProxy: BindRepeatIndexProxy,
datumProxy: BindRepeatDatumProxy<T>
}
export interface BindRepeatOptions<T> {
customReplace: boolean,
customRemove: (modelItem: T, index: number, parent: D3BindSelection) => void
}
export type BindRepeatRenderer<T> = (modelItem: T | WritableObservable<T>, index: Observable<number>, parent: D3BindSelection) => void;
export default class BindRepeat<T> {
private selectionProxy: D3BindSelection;
private repeatItems: BindRepeatItem<T>[] = [];
private repeatItemsById: { [id: string]: BindRepeatItem<T>} = {};
// state variables, the source of all evil
private currentIndex: number;
private currentEvent: BindRepeatEvent;
private itemCounter = 0;
private logger: Logger;
constructor(
public modelList: ObservableArray<T>,
private renderer: BindRepeatRenderer<T>,
private options: BindRepeatOptions<T> = <BindRepeatOptions<T>>{},
private selection: D3BindSelection
) {
this.logger = Logger.get('Selection', 'repeat');
this.selectionProxy = this.createSelectionProxy();
this.build();
var unsubscribeFunc = modelList.subscribe({
insert: (item, index) => { this.onInsert(item, index); },
remove: (item, index) => { this.onRemove(item, index); },
replace: options.customReplace ? (item, index, oldValue, caller) => { this.onReplace(item, index, oldValue, caller); } : undefined
});
setUnbindForSelectionField(selection, 'repeat', () => unsubscribeFunc() ? 1 : 0);
}
private createRepeatItem() {
var id = this.itemCounter++;
var indexProxy = new BindRepeatIndexProxy(id, this);
var datumProxy = this.options.customReplace ? new BindRepeatDatumProxy<T>(id, this) : null;
var repeatItem = <BindRepeatItem<T>>{
id: id,
selection: null,
indexProxy: indexProxy,
datumProxy: datumProxy,
index: this.currentIndex
};
if (this.currentIndex === this.repeatItems.length) {
this.repeatItems.push(repeatItem);
} else {
this.repeatItems.splice(this.currentIndex, 0, repeatItem);
}
this.repeatItemsById[id] = repeatItem;
return repeatItem;
}
private build() {
this.currentEvent = BindRepeatEvent.BUILD;
for (this.currentIndex = 0; this.currentIndex < this.modelList.length; this.currentIndex++) {
var repeatItem = this.createRepeatItem();
var modelItem = this.modelList.get(this.currentIndex);
var rendererItem = this.options.customReplace ? repeatItem.datumProxy : modelItem;
this.renderer.call(this.selectionProxy, rendererItem, repeatItem.indexProxy, this.selectionProxy); // 'this' passed in twice, intentional redundancy
}
this.currentEvent = null;
this.currentIndex = null;
}
private onInsert(item: T, index: number) {
this.currentEvent = BindRepeatEvent.INSERT;
this.currentIndex = index;
var repeatItem = this.createRepeatItem();
var rendererItem = this.options.customReplace ? repeatItem.datumProxy : item;
this.renderer.call(this.selectionProxy, rendererItem, repeatItem.indexProxy, this.selectionProxy);
this.logger.log('insert:', item, '| index:', index, '| node:', repeatItem.selection.node());
this.currentEvent = BindRepeatEvent.INSERT_REINDEXING;
this.currentIndex++;
this.updateIndexes();
this.currentEvent = null;
this.currentIndex = null;
}
private onRemove(item: T, index: number) {
this.currentEvent = BindRepeatEvent.REMOVE;
this.currentIndex = index;
var itemToRemove = this.repeatItems.splice(index, 1)[0];
delete this.repeatItemsById[itemToRemove.id];
this.logger.log('remove:', item, '| index:', index, '| node:', itemToRemove.selection.node());
if (this.options.customRemove) {
this.options.customRemove.call(itemToRemove.selection, item, index, itemToRemove.selection);
} else {
itemToRemove.selection.remove();
}
itemToRemove.indexProxy.unsubscribeAll();
if (itemToRemove.datumProxy) {
itemToRemove.datumProxy.unsubscribeAll();
}
this.currentEvent = BindRepeatEvent.REMOVE_REINDEXING;
this.updateIndexes();
this.currentEvent = null;
this.currentIndex = null;
}
private onReplace(item: T, index: number, oldValue: T, caller: any) {
this.currentEvent = BindRepeatEvent.REPLACE;
this.currentIndex = index;
var repeatItem = this.repeatItems[index];
this.logger.log('replace:', item, '| index:', index, '| oldValue:', oldValue, '| caller:', caller, ' node:', repeatItem.selection.node());
repeatItem.datumProxy._trigger(item, oldValue, caller);
this.currentEvent = null;
this.currentIndex = null;
}
private updateIndexes() {
// I wanted to optimize this to only run, if there are subscribers on $i or $d, but they can use $i.get() without subscribing to it
for (; this.currentIndex < this.repeatItems.length; this.currentIndex++) {
this.repeatItems[this.currentIndex].index = this.currentIndex;
if (this.repeatItems[this.currentIndex].indexProxy._getSubscriberCount() > 0) { // to avoid polluting the logs
this.repeatItems[this.currentIndex].indexProxy._trigger();
}
}
}
getCurrentValueOfItem(id: number) {
if (this.currentIndex !== null) {
return this.currentIndex;
} else {
var index = this.repeatItemsById[id] && this.repeatItemsById[id].index;
if (index == null) console.warn("bindRepeat index not found!");
return index;
}
}
getCurrentAndPreviousValueOfItem(id: number) {
var newValue = this.getCurrentValueOfItem(id);
var oldValue: number = null;
if (this.currentEvent === null || BindRepeatEvent.REPLACE) {
oldValue = newValue;
} else if (this.currentEvent === BindRepeatEvent.INSERT || this.currentEvent === BindRepeatEvent.REMOVE ||
this.currentEvent === BindRepeatEvent.BUILD) {
oldValue = null;
} else if (this.currentEvent === BindRepeatEvent.INSERT_REINDEXING ||
this.currentEvent === BindRepeatEvent.REMOVE_REINDEXING) {
oldValue = this.currentIndex - 1;
}
return { newValue, oldValue };
}
private createSelectionProxy(): D3BindSelection {
var proxy: D3BindSelection = Object.create(this.selection);
proxy.append = (input: any): D3BindSelection => {
return this.insertRepeatItem(input);
};
proxy.insert = (input: any, before: any): D3BindSelection => {
if (before !== undefined) throw "before parameter of .insert() not supported inside bindRepeat";
return this.insertRepeatItem(input);
};
return proxy;
}
private insertRepeatItem(input: string): D3BindSelection;
private insertRepeatItem(input: () => EventTarget): D3BindSelection;
private insertRepeatItem(input: any): D3BindSelection {
if (this.currentIndex == null) {
// TODO this.getCurrentIndexOfSelectionProxy(); - but there would be N different selection proxies then
throw "the bindRepeat render function must call the append/insert method synchronously!";
}
var i = this.currentIndex;
var newItem: D3BindSelection = null;
if (i >= this.repeatItems.length) {
newItem = this.selection.append(input);
} else {
/* I wanted to use something like '> :nth-child($i+1)', but querySelector and thus d3 .insert() doesn't support
selectors for direct children only, except with polyfills:
http://stackoverflow.com/questions/6481612/queryselector-search-immediate-children */
newItem = this.selection.insert(input, () => this.selection.node().childNodes[i]);
}
this.repeatItems[i].selection = newItem;
return newItem;
}
}
function bindRepeat<T>(modelList: ObservableArray<T>, renderer: BindRepeatRenderer<T>, options?: BindRepeatOptions<T>): D3BindSelection {
this.node()[REPEAT_PREFIX] = new BindRepeat<T>(modelList, renderer, options, this);
return this;
}
selection.bindRepeat = bindRepeat;
selection.unbindRepeat = function(): D3BindSelection {
unbindSelectionField(this, 'repeat');
var repeatItems: BindRepeatItem<any>[] = this.node()[REPEAT_PREFIX].repeatItems;
repeatItems.forEach(repeatItem => {
repeatItem.indexProxy.unsubscribeAll();
if (repeatItem.datumProxy) {
repeatItem.datumProxy.unsubscribeAll();
}
});
return this;
};
|
#pragma once
#include <cstddef>
const std::size_t N = 50; |
# the out most directory
outer_dir="merged_IMGT_alleles/"
allele_path="../plot_tree/BCRV_alleles.fasta"
novel_path="CHM13_bcrv_novel_alleles/corrected_alleles_filtered.fasta"
output_name="BCRV_with_CHM13_novel_alleles.fasta"
echo "[MERGE NOVEL] Merge novel alleles"
mkdir -p ${outer_dir}
python3 merge_novel_alleles.py -fa ${allele_path} \
-fn ${novel_path} \
-fom ${outer_dir}${output_name}
echo "[MERGE NOVEL] Finished!"
|
<reponame>cschladetsch/KAI<filename>Include/KAI/Language/Common/ProcessCommon.h
#pragma once
#include <KAI/Core/Value.h>
#include <KAI/Core/Registry.h>
#include <KAI/Language/Common/Process.h>
KAI_BEGIN
struct ProcessCommon : Process
{
template <class T>
Value<T> New()
{
return _reg->New<T>();
}
template <class T>
Value<T> New(const T& val)
{
return _reg->New<T>(val);
}
ProcessCommon() { }
ProcessCommon(Registry &r) : _reg(&r) { }
protected:
Registry *_reg = 0;
};
KAI_END
|
package org.zalando.intellij.swagger.intention.reference;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.zalando.intellij.swagger.reference.ReferenceValueExtractor;
import org.zalando.intellij.swagger.traversal.JsonTraversal;
public class CreateJsonReferenceIntentionAction implements IntentionAction {
private final String referenceValueWithPrefix;
public CreateJsonReferenceIntentionAction(final String referenceValueWithPrefix) {
this.referenceValueWithPrefix = referenceValueWithPrefix;
}
@Nls
@NotNull
@Override
public String getText() {
return "Create";
}
@Nls
@NotNull
@Override
public String getFamilyName() {
return "Create";
}
@Override
public boolean isAvailable(
@NotNull final Project project, final Editor editor, final PsiFile psiFile) {
return true;
}
@Override
public void invoke(@NotNull final Project project, final Editor editor, final PsiFile psiFile) {
final String referenceType = ReferenceValueExtractor.extractType(referenceValueWithPrefix);
final String referenceValueWithoutPrefix =
ReferenceValueExtractor.extractValue(referenceValueWithPrefix);
new ReferenceCreator(referenceValueWithoutPrefix, referenceType, psiFile, new JsonTraversal())
.create();
}
@Override
public boolean startInWriteAction() {
return true;
}
}
|
"""Attempt to parse a database "connection string", retrieving the relevant component parts."""
from pytest import fixture
from .. import URI
def parse_dburi(url:str, uppercase:bool=False) -> dict:
"""Parse a given URL or URI string and return the component parts relevant for database connectivity.
These come in the general UNIX form:
engine://[user:pass@]host[:port]/database[?options]
"""
uri = URI(url)
parts = {
'engine': str(uri.scheme),
'name': uri.path.parts[0],
'host': uri.host,
'user': uri.user,
'password': <PASSWORD>,
'port': uri.port,
'options': uri.query,
}
if not uri.scheme: del parts['engine'] # Parity with dj-mongohq-url
if ',' in parts['host']:
parts['hosts'] = [i.strip() for i in parts.pop('host').split(',')]
if uppercase:
for k in list(parts): parts[k.upper()] = parts.pop(k)
return parts
|
#!/bin/bash
bundle exec rackup --host 0.0.0.0 -p $PORT
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-STG/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-STG/7-512+0+512-shuffled-N-VB-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_and_verbs_first_half_quarter --eval_function penultimate_quarter_eval |
import hashlib
class Library():
def __init__(self):
self.books = {}
def add_book(self, book):
h = hashlib.sha256(book.title.encode()).hexdigest()
self.books[h] = book
def remove_book(self, title):
h = hashlib.sha256(title.encode()).hexdigest()
if h in self.books:
del self.books[h]
def is_duplicate(self, title):
h = hashlib.sha256(title.encode()).hexdigest()
return h in self.books |
package seatgeek
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"github.com/gosimple/slug"
)
const (
clientIdEnvVar = "SEATGEEK_CLIENT_ID"
)
type Client struct {
http *http.Client
baseAddr string
clientId string
}
// NewClient returns a client for SeatGeek API
func NewClient() *Client {
clientId := os.Getenv(clientIdEnvVar)
if clientId == "" {
log.Fatalf("Please provide client id by setting %s", clientIdEnvVar)
}
return &Client{
http: &http.Client{},
baseAddr: "https://api.seatGeek.com/2/",
clientId: clientId}
}
type Concert struct {
Id string
Title string
City string
Datetime string
}
// GetArtistsConcerts uses GetArtistConcerts to return a list of all provided
// artists' upcoming concerts.
func (c *Client) GetArtistsConcerts(names []string) ([]Concert, error) {
concerts := make([]Concert, 0, len(names))
// TODO: Make requests concurrently after validating seatgeek doesn't rate limit; dedup concerts
for _, name := range names {
es, err := c.GetArtistConcerts(name)
if err != nil {
return nil, fmt.Errorf("Could not retrieve concerts for artist `%s`: %s", name, err)
}
for _, e := range es {
if (e != Concert{}) {
concerts = append(concerts, e)
}
}
}
return concerts, nil
}
// GetArtistConcerts looks up artist by name and returns a list of
// their upcoming concerts.
func (c *Client) GetArtistConcerts(name string) ([]Concert, error) {
name = slug.Make(name)
path := fmt.Sprintf("events?performers.slug=%s", name)
res, err := c.get(path)
if err != nil {
return nil, err
}
concertsMeta, ok := res["events"]
if !ok {
return nil, fmt.Errorf("Could not find 'events' array in response")
}
concerts := make([]Concert, len(concertsMeta.([]interface{})))
for i, concertMeta := range concertsMeta.([]interface{}) {
var ct Concert
concert := concertMeta.(map[string]interface{})
ct.Id = fmt.Sprintf("%.0f", concert["id"].(float64))
ct.Title = concert["title"].(string)
venue := concert["venue"].(map[string]interface{})
ct.City = venue["city"].(string)
ct.Datetime = concert["datetime_utc"].(string)
concerts[i] = ct
}
return concerts, nil
}
// TODO: consider changing to the .NewRequest() and .Do model.
// get executes a GET call and returns a map of the response object
func (c *Client) get(path string) (map[string]interface{}, error) {
url := c.baseAddr + path + "&client_id=" + c.clientId
res, err := c.http.Get(url)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode > 299 || res.StatusCode < 200 {
return nil, fmt.Errorf("Received non-OK HTTP Status code: %d", res.StatusCode)
}
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, err
}
var obj map[string]interface{}
err = json.Unmarshal([]byte(body), &obj)
if err != nil {
return nil, err
}
return obj, nil
}
|
#!/usr/bin/env bash
set -ex
# 1
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_01_02MasterChefCan_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/01_02MasterChefCan/model_final_wo_optim-f9b45add.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 2
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_02_03CrackerBox_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/02_03CrackerBox/model_final_wo_optim-8085b93a.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 3
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_03_04SugarBox_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/03_04SugarBox/model_final_wo_optim-09d8712e.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 4
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_04_05TomatoSoupCan_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/04_05TomatoSoupCan/model_final_wo_optim-1b91bfde.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 5
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_05_06MustardBottle_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/05_06MustardBottle/model_final_wo_optim-76d617ad.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 6
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_06_07TunaFishCan_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/06_07TunaFishCan/model_final_wo_optim-df0d3e00.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 7
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_07_08PuddingBox_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/07_08PuddingBox/model_final_wo_optim-be3b4685.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 8
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_08_09GelatinBox_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/08_09GelatinBox/model_final_wo_optim-9cb36af3.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 9
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_09_10PottedMeatCan_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/09_10PottedMeatCan/model_final_wo_optim-0d651cea.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 10
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_10_11Banana_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/10_11Banana/model_final_wo_optim-2885507c.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 11
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_11_19PitcherBase_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/11_19PitcherBase/model_final_wo_optim-a6dbc5e6.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 12
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_12_21BleachCleanser_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/12_21BleachCleanser/model_final_wo_optim-94c0fbf0.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 13
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_13_24Bowl_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/13_24Bowl/model_final_wo_optim-d2cbe903.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 14
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_14_25Mug_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/14_25Mug/model_final_wo_optim-450d3409.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 15
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_15_35PowerDrill_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/15_35PowerDrill/model_final_wo_optim-ca66ffb9.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 16
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_16_36WoodBlock_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/16_36WoodBlock/model_final_wo_optim-3159154a.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 17
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_17_37Scissors_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/17_37Scissors/model_final_wo_optim-ae581f60.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 18
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_18_40LargeMarker_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/18_40LargeMarker/model_final_wo_optim-96fdb54f.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 19
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_19_51LargeClamp_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/19_51LargeClamp/model_final_wo_optim-081a3e92.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 20
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_20_52ExtraLargeClamp_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/20_52ExtraLargeClamp/model_final_wo_optim-22a5a504.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# 21
./core/deepim/test_deepim.sh \
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_Pbr_21_61FoamBrick_bop_test.py 1 \
output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/21_61FoamBrick/model_final_wo_optim-90d8317c.pth \
VAL.SAVE_BOP_CSV_ONLY=True
# eval merged csv
#python core/deepim/engine/test_utils.py \
# --result_dir output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveV3_Flat_ycbvPbr_SO/merged-bop-iter4/ \
# --result_names FlowNet512-1.5AugCosyAAEGray-AggressiveV3-Flat-Pbr-merged-bop-test-test-iter4_ycbv-test.csv \
# --dataset ycbv \
# --split test \
# --split-type "" \
# --targets_name test_targets_bop19.json \
# --error_types mspd,mssd,vsd,reS,teS,reteS,ad \
# --render_type cpp
|
<reponame>jonqiao/SpringCloud-StockMarketCharting<filename>fsdms-angular-app/src/app/component/admin/company/company.component.ts<gh_stars>1-10
import { Component, OnInit } from '@angular/core';
import { FormBuilder, Validators } from '@angular/forms';
import { LogService } from '../../../service/log.service';
import { CompanyService } from '../../../service/company.service';
import { Company } from '../../../model/company.model';
import { finalize } from 'rxjs/operators';
declare var $: any;
@Component({
selector: 'app-company',
templateUrl: './company.component.html',
styleUrls: ['./company.component.less']
})
export class CompanyComponent implements OnInit {
companyArr: Array<Company> = [];
updateFlag = false;
companyForm = this.formBuilder.group({
companyName: ['', Validators.required],
turnover: ['', Validators.required],
ceo: ['', Validators.required],
boardOfDirectors: ['', Validators.required],
stockExchange: ['', Validators.required],
sectorName: ['', Validators.required],
briefWriteup: ['', Validators.required],
companyStockCode: ['', Validators.required],
active: ['', Validators.required]
});
constructor(
private formBuilder: FormBuilder,
private CompanySrv: CompanyService,
private logSrv: LogService
) { }
ngOnInit() {
this.logSrv.log('CompanyComponent-ngOnInit');
this.refreshTable();
}
get companyName() {
return this.companyForm.get('companyName');
}
get turnover() {
return this.companyForm.get('turnover');
}
get ceo() {
return this.companyForm.get('ceo');
}
get boardOfDirectors() {
return this.companyForm.get('boardOfDirectors');
}
get stockExchange() {
return this.companyForm.get('stockExchange');
}
get sectorName() {
return this.companyForm.get('sectorName');
}
get briefWriteup() {
return this.companyForm.get('briefWriteup');
}
get companyStockCode() {
return this.companyForm.get('companyStockCode');
}
get active() {
return this.companyForm.get('active');
}
comModalNewOpen() {
this.companyFormReset();
}
comModalUptOpen() {
if ($('#companyTable').bootstrapTable('getSelections').length > 0) {
this.companyForm.patchValue({
companyName: $('#companyTable').bootstrapTable('getSelections')[0]
.companyName,
turnover: $('#companyTable').bootstrapTable('getSelections')[0]
.turnover,
ceo: $('#companyTable').bootstrapTable('getSelections')[0].ceo,
boardOfDirectors: $('#companyTable').bootstrapTable('getSelections')[0]
.boardOfDirectors,
stockExchange: $('#companyTable').bootstrapTable('getSelections')[0]
.stockExchange,
sectorName: $('#companyTable').bootstrapTable('getSelections')[0]
.sectorName,
briefWriteup: $('#companyTable').bootstrapTable('getSelections')[0]
.briefWriteup,
companyStockCode: $('#companyTable').bootstrapTable('getSelections')[0]
.companyStockCode,
active: $('#companyTable').bootstrapTable('getSelections')[0].active
});
this.companyName.disable();
this.active.disable();
}
}
comActive() {
if ($('#companyTable').bootstrapTable('getSelections').length > 0) {
this.logSrv.log($('#companyTable').bootstrapTable('getSelections')[0]);
const companyName = $('#companyTable').bootstrapTable('getSelections')[0]
.companyName;
this.CompanySrv.activeCompany(companyName).subscribe(response => {
if (response.status === 200) {
this.logSrv.log('activeCompany-result = ', response.data.result);
}
});
}
}
comDeactive() {
if ($('#companyTable').bootstrapTable('getSelections').length > 0) {
this.logSrv.log($('#companyTable').bootstrapTable('getSelections')[0]);
const companyName = $('#companyTable').bootstrapTable('getSelections')[0]
.companyName;
this.CompanySrv.deactiveCompany(companyName).subscribe(response => {
if (response.status === 200) {
this.logSrv.log('deactiveCompany-result = ', response.data.result);
}
});
}
}
onSubmit() {
if (this.companyForm.valid) {
this.logSrv.log('comNewForm value = ', this.companyForm.value);
const com = new Company();
com.companyName = this.companyName.value;
com.turnover = this.turnover.value;
com.ceo = this.ceo.value;
com.boardOfDirectors = this.boardOfDirectors.value;
com.stockExchange = this.stockExchange.value;
com.sectorName = this.sectorName.value;
com.briefWriteup = this.briefWriteup.value;
com.companyStockCode = this.companyStockCode.value;
com.active = this.active.value;
if (this.updateFlag) {
this.CompanySrv.updateCompany(com.companyName, com)
.pipe(
finalize(() => {
this.companyFormReset();
})
)
.subscribe(
// must call subscribe() or nothing happens. Just call post does not initiate the expected request
response => {
if (response.status === 200) {
this.logSrv.log(
'updateCompany result = ',
response.data.result
);
}
}
);
} else {
this.CompanySrv.newCompany(com)
.pipe(
finalize(() => {
this.companyFormReset();
})
)
.subscribe(response => {
if (response.status === 200) {
this.logSrv.log('newCompany result = ', response.data.result);
}
});
}
}
}
companyFormReset() {
// NOTES: this其实是一个Html 元素。but $this 只是个变量名,加$是为说明其是个jquery对象。
// 而$(this)是个转换,将this表示的dom对象转为jquery对象,这样就可以使用jquery提供的方法操作。
$('#companyModal').modal('hide');
this.updateFlag = false;
this.companyForm.reset();
this.companyForm.enable();
if ($('#companyTable').bootstrapTable('getSelections').length > 0) {
$('#companyTable').bootstrapTable('uncheckAll');
}
}
clickEvent() {
if ($('#companyTable').bootstrapTable('getSelections').length > 0) {
this.updateFlag = true;
} else {
this.updateFlag = false;
}
}
refreshTable() {
this.CompanySrv.getCompanies().subscribe(response => {
if (response.status === 200) {
this.companyArr = response.data.result;
$('#companyTable')
.bootstrapTable('load', this.companyArr)
.bootstrapTable('refresh');
}
});
}
}
|
#!/bin/bash
# Copyright (c) 2021 MotoAcidic
HEIGHT=15
WIDTH=40
CHOICE_HEIGHT=6
BACKTITLE="StrongHands Compile Wizard"
TITLE="StrongHands Compile Wizard"
MENU="Choose one of the following bases to compile from:"
OPTIONS=(1 "Compile Windows"
2 "Compile Linux"
0 "Exit Script"
)
CHOICE=$(whiptail --clear\
--backtitle "$BACKTITLE" \
--title "$TITLE" \
--menu "$MENU" \
$HEIGHT $WIDTH $CHOICE_HEIGHT \
"${OPTIONS[@]}" \
2>&1 >/dev/tty)
clear
case $CHOICE in
0) # Exit
exit
;;
1) # Windows compile
cd ../../
cd platform/win/
bash win.sh
;;
2) # Linux Compile
cd ../../
cd platform/linux/
bash linux.sh
;;
esac |
<filename>src/helpers/getToken.js
const getToken = request =>
request.body.token ||
request.query.token ||
request.headers["x-access-token"];
module.exports = getToken;
|
<reponame>iumarchenko/recsys<filename>final_proj/src/recommenders.py
#!/usr/bin/env python
# coding: utf-8
# In[3]:
import pandas as pd
import numpy as np
# Для работы с матрицами
from scipy.sparse import csr_matrix
# Матричная факторизация
from implicit.als import AlternatingLeastSquares
from implicit.nearest_neighbours import ItemItemRecommender # нужен для одного трюка
from implicit.nearest_neighbours import bm25_weight, tfidf_weight
class MainRecommender:
"""Рекоммендации, которые можно получить из ALS
Input
-----
user_item_matrix: pd.DataFrame
Матрица взаимодействий user-item
"""
def __init__(self, data, top_popular, item_features, item_mean_cost, popular_exp_item, weighting=True):
# Топ покупок каждого юзера
self.top_popular = top_popular
self.item_features = item_features
self.item_mean_cost = item_mean_cost
self.popular_exp_item = popular_exp_item
self.user_item_matrix = self.prepare_matrix(data) # pd.DataFrame
self.id_to_itemid, self.id_to_userid, self.itemid_to_id, self.userid_to_id = self.prepare_dicts(self.user_item_matrix)
if weighting:
self.user_item_matrix = bm25_weight(self.user_item_matrix.T, K1=12,B=0.165).T
self.model = self.fit(self.user_item_matrix)
self.own_recommender = self.fit_own_recommender(self.user_item_matrix)
self.all_recommendations = self.get_all_recommendations(self.model,N=200)
@staticmethod
def prepare_matrix(data):
#your_code
user_item_matrix = pd.pivot_table(data,
index='user_id', columns='item_id',
values='quantity', # Можно пробоват ьдругие варианты
aggfunc='count',
fill_value=0
)
user_item_matrix = user_item_matrix.astype(float)
return user_item_matrix
@staticmethod
def prepare_dicts(user_item_matrix):
"""Подготавливает вспомогательные словари"""
userids = user_item_matrix.index.values
itemids = user_item_matrix.columns.values
matrix_userids = np.arange(len(userids))
matrix_itemids = np.arange(len(itemids))
id_to_itemid = dict(zip(matrix_itemids, itemids))
id_to_userid = dict(zip(matrix_userids, userids))
itemid_to_id = dict(zip(itemids, matrix_itemids))
userid_to_id = dict(zip(userids, matrix_userids))
return id_to_itemid, id_to_userid, itemid_to_id, userid_to_id
@staticmethod
def fit_own_recommender(user_item_matrix):
"""Обучает модель, которая рекомендует товары, среди товаров, купленных юзером"""
own_recommender = ItemItemRecommender(K=1, num_threads=8)
own_recommender.fit(csr_matrix(user_item_matrix).T.tocsr(),show_progress=True)
return own_recommender
@staticmethod
def fit(user_item_matrix, n_factors=20, regularization=0.001, iterations=15, num_threads=4):
"""Обучает ALS"""
model = AlternatingLeastSquares(factors=7,
regularization=0.001,
iterations=15,
calculate_training_loss=True,
num_threads=16)
model.fit(csr_matrix(user_item_matrix).T.tocsr())
return model
def get_all_recommendations(self, model, N=200):
recommendations = model.recommend_all(N=N,
user_items=csr_matrix(self.user_item_matrix).tocsr(),
filter_already_liked_items=True,
filter_items=False,
recalculate_user=True,
show_progress=True,
batch_size=500)
return recommendations
def get_recommendations(self, user, model, N=5):
res = []
if user in self.userid_to_id:
res = [self.id_to_itemid[rec[0]] for rec in
model.recommend(userid=self.userid_to_id[user],
user_items=csr_matrix(self.user_item_matrix).tocsr(), # на вход user-item matrix
N=N,
filter_already_liked_items=False,
filter_items=False,
recalculate_user=False)]
return res
def get_recommendations_price(self, recommendations):
res = [self.item_mean_cost.loc[self.item_mean_cost['item_id']==pr,'mean_price'].values[0] for pr in recommendations]
return res
|
import { QueryParamsToStringPipe } from './query-params-to-string.pipe';
describe('QueryParamsToStringPipe', () => {
it('create an instance', () => {
const pipe = new QueryParamsToStringPipe();
expect(pipe).toBeTruthy();
expect(pipe.transform()).toBe('');
expect(pipe.transform({})).toBe('');
expect(pipe.transform({key: 'value'})).toBe('?key=value');
expect(pipe.transform({key1: 'value1', key2: 'value2'})).toBe('?key1=value1&key2=value2');
});
});
|
#!/bin/bash
# SPDX-License-Identifier: Apache-2.0
# Copyright Authors of Cilium
# Simple script to make sure viper.GetStringMapString should not be used.
# Related upstream issue https://github.com/spf13/viper/issues/911
if grep -r --exclude-dir={.git,_build,vendor,contrib} -i --include \*.go "viper.GetStringMapString" .; then
echo "Found viper.GetStringMapString(key) usage. Please use command.GetStringMapString(viper.GetViper(), key) instead";
exit 1
fi
if grep -r --exclude-dir={.git,_build,vendor,contrib} -i --include \*.go "StringToStringVar" .; then
echo "Found flags.StringToStringVar usage. Please use option.NewNamedMapOptions instead";
exit 1
fi
|
#!/bin/sh
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
if [ -z $BASE ]; then
BASE=../..
fi
ghci -fglasgow-exts -package $BASE_PKG -hide-package syb -i$BASE/lib/hs/src -i$BASE/test/hs/gen-hs Client.hs
|
#!/bin/bash
# COPY PHASE
cp -v assets/ftequakeworld.sh "$diststart/common/dist/"
cp -v assets/ftequakeworld-quake1.sh "$diststart/common/dist/"
cp -v assets/ftequakeworld-quake1-mg1.sh "$diststart/common/dist/"
cp -v assets/ftequakeworld-quake1-dopare.sh "$diststart/common/dist/"
cp -v assets/ftequakeworld-quake1-re.sh "$diststart/common/dist/"
|
<gh_stars>0
import React from "react"
import { Fade } from "react-reveal"
import { CoverLayout } from "../components/Layout"
import ImageCover from "../components/ImageCover"
import Section from "../components/Section"
import { Helmet } from "react-helmet"
export default () => (
<CoverLayout
navbarTextColor="white"
coverElement={
<ImageCover
text="Case Study Competition Sample Case"
coverImage="casestudy.jpg"
textColor="white"
darkGradient
/>
}
>
<Helmet>
<title>Case Study Competition | Delta Sigma Pi - Pi Sigma Chapter</title>
<meta
name="Delta Sigma Pi - Pi Sigma Chapter Case Study Competition Page"
content="Delta Sigma Pi - Pi Sigma Chapter Case Study Competition Page"
/>
</Helmet>
<Fade>
<Section sx={{ justifyContent: "left" }}>
<h1 style={{ marginBottom: 10, fontSize: 40 }}>
Delta Sigma Pi: Professionalism Week - Winter 2020
</h1>
<h1 style={{ marginBottom: 10, fontSize: 40 }}>
Case Study Competition - Sample Case
</h1>
</Section>
<Section sx={{ justifyContent: "left" }}>
<h1 style={{ marginBottom: 10 }}>Case Instructions:</h1>
<div>
- You have 20 minutes to review this case with your teammates. No
phones/tech during case prep.
<br />
- You may take notes (notecards/pens provided at event) and use them
during the presentation.
<br />
- You will assume the role of a consulting firm hired to present the
case deliverables to the client (judges).
<br />- Presentation to judges: 5-7 minutes, Q&A: 3 minutes,
Feedback/Discussion: 5 minutes
</div>
</Section>
<Section sx={{ justifyContent: "left" }}>
<h1 style={{ marginBottom: 10 }}>Case Introduction:</h1>
<div>
You are to assume the role of a consultant working for your client,
ANTEATER HOTELS, a moderately priced hotel chain with properties
across the country. The C-Suite of the company (judges) has asked you
to create marketing strategies and recommend operational changes that
focus on teenagers and young adults as the target market.
</div>
<br />
<div>
<NAME> has over 1,500 properties across the United States.
Each ANTEATER HOTEL location boasts full-service restaurants, spa and
fitness centers, and pools with playground areas along with
beautifully appointed guest rooms and suites. The company has the
highest rating in overall customer service and is in tight competition
with its closest competitor for most rewards club members. While
business isn’t hurting, the C-Suite (judges) feels that more attention
needs to be focused on expanding brand loyalty, especially to
teenagers (age 13-18) and young adults (age 18-25). Company data also
shows that young adults make up the smallest portion of their rewards
club members.
</div>
<br />
<div>
<NAME> is interested in hearing about effective marketing
strategies that can draw in teenagers and young adults to their
hotels. Your client is also interested in operational changes to
restaurants, fitness centers, and other amenities offered. Lastly,
touch upon how the rewards program can be modified to appeal to young
adults. The C-Suite (judges) will begin the role-play by greeting you
and asking to hear your ideas. After you have presented your
strategies and have answered their questions, the C-Suite (judges)
will conclude by thanking you for your work and breaking character to
go into feedback/discussion of the case.
</div>
</Section>
<Section sx={{ justifyContent: "left" }}>
<h1 style={{ marginBottom: 10 }}>Case Deliverables:</h1>
<div>
- Identify company’s unique value proposition.
<br />
- Identify steps and information needed prior to implementing a
marketing campaign.
<br />
- Explain trends in the industry pertaining to teenagers and young
adults.
<br />
- Conduct a cost-benefit analysis on potential amenities and offerings
at hotel restaurants, spa, fitness centers, and other operations
<br />- Identify potential pain-points of the current rewards program
for young adult club members.
</div>
</Section>
<Section sx={{ justifyContent: "left" }}>
<h1 style={{ marginBottom: 10 }}>Case Solution:</h1>
<div>
The solution and detailed breakdown of this case, along with why case
studies are used in interviews, case techniques/frameworks, and
general business topics will be reviewed at:
</div>
</Section>
<Section>
<h1 style={{ marginBottom: 10 }}>Case Study Workshop - Business 101</h1>
<h1 style={{ marginBottom: 10 }}>February 13th, 2020 | 4-6PM</h1>
<h1 style={{ marginBottom: 10 }}>
UCI Student Center | <NAME>
</h1>
</Section>
</Fade>
</CoverLayout>
)
|
<reponame>swishcloud/goblog
package internal
import (
"log"
"github.com/swishcloud/gostudy/logger"
)
const (
TimeLayout1 = "2006-01-02 15:04"
TimeLayout2 = "15:04:05"
TimeLayoutMysqlDateTime = "2006-01-02 15:04:05"
)
var Logger *log.Logger
var LoggerWriter *logger.FileConcurrentWriter
|
import { OnDestroy, ɵmarkDirty as markDirty } from '@angular/core';
import { untilDestroyed } from '@ngneat/until-destroy';
import { from, Observable, ReplaySubject, Subject } from 'rxjs';
import { mergeMap, tap, switchMap, startWith, filter } from 'rxjs/operators';
type ObservableDictionary<T> = {
[P in keyof T]: Observable<T[P]>;
};
type SubjectDictionary<T> = {
[P in keyof T]: Subject<T[P]>;
};
type LoadingDictionary<T> = {
[P in keyof T]: boolean;
};
type Impossible<K extends keyof any> = {
[P in K]?: never;
};
interface ConnectStateUtility<T = {}> {
$: SubjectDictionary<T>,
loading: LoadingDictionary<T>,
reload: (keyToReload?: keyof T) => void,
};
/**
* Create a dynamic state sink based on an observable-based definition
*
* @param component component instance, most likely just "this"
* @param sources object with observable values - make sure to avoid special key values "$", "reload" and "loading"
*/
export function connectState<C extends OnDestroy, T>(
component: C,
sources: ObservableDictionary<T & Impossible<keyof ConnectStateUtility>>,
) {
const sourceKeys = Object.keys(sources) as (keyof T)[];
const reload$ = new Subject<keyof T | null>();
const sink: T & ConnectStateUtility<T> = {
...{} as T,
$: {} as SubjectDictionary<T>,
loading: {} as LoadingDictionary<T>,
reload: (keyToReload?: keyof T) => reload$.next(keyToReload),
};
const reload = (singleKey?: keyof T) => {
for (const key of (singleKey ? [ singleKey ] : sourceKeys)) {
sink.$[key] = new ReplaySubject<any>(1);
sink.loading[key] = true;
delete sink[key];
}
}
const updateSink$ = from(sourceKeys).pipe(
mergeMap((sourceKey: keyof T) => {
const source$ = sources[sourceKey];
return reload$.pipe(
filter(keyToLoad => keyToLoad === sourceKey),
tap(() => reload(sourceKey)),
startWith(null),
switchMap(() => source$),
).pipe(
tap((sinkValue: any) => {
sink.loading[sourceKey] = false;
sink.$[sourceKey].next(sinkValue);
sink[sourceKey] = sinkValue;
}),
);
}),
);
reload$.pipe(
filter(keyToReload => !keyToReload),
startWith(null),
switchMap(() => {
reload();
return updateSink$;
}),
untilDestroyed(component)
).subscribe(() => {
try {
markDirty(component);
} catch (err) {
// this can error when observables run before component
// is fully initialized
}
});
return sink;
}
|
#!/bin/bash
rm -rf ./PDESolver
rm -rf ./PDESolver.jl
rm -rf ./ODLCommonTools
rm -rf ./SummationByParts
rm -rf ./PumiInterface
rm -rf ./ArrayViews
rm -rf ./BinDeps
rm -rf ./Compat
rm -rf ./FactCheck
rm -rf ./MPI
rm -rf ./NamedArrays
rm -rf ./SHA
rm -rf ./URIParser
rm -rf ./NaNMath
rm -rf ./Calculus
rm -rf ./DualNumbers
rm -rf ./ForwardDiff
rm -rf ./Debug
#rm -rf ./PETSc
rm -v ./REQUIRE
|
import argparse
import os
import requests
import threading
def download_file(url, target_directory):
filename = url.split('/')[-1]
filepath = os.path.join(target_directory, filename)
response = requests.get(url, stream=True)
with open(filepath, 'wb') as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
def main():
parser = argparse.ArgumentParser(description='Parallel Dataset Downloader')
parser.add_argument(
'--n-download-threads', metavar='INT', type=int,
default=10,
help="Number of parallel download threads, defaults to 10"
)
parser.add_argument(
'--target-directory', metavar='DIR', type=str,
default='/gfs/model_data',
help="directory where the dataset should be downloaded"
)
parser.add_argument(
'--semaphore-file', metavar='FILE', type=str,
default='/gfs/.__success__',
help="file path for semaphore signaling successful download"
)
args = parser.parse_args()
urls = [ # Replace with actual URLs of the dataset files
'http://example.com/dataset/file1',
'http://example.com/dataset/file2',
# Add more URLs as needed
]
threads = []
for url in urls:
for _ in range(args.n_download_threads):
thread = threading.Thread(target=download_file, args=(url, args.target_directory))
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
# Create semaphore file to signal successful download
with open(args.semaphore_file, 'w') as file:
file.write('Download successful')
if __name__ == "__main__":
main() |
import numpy as np
import pyamg
from scipy import sparse
from fem.fem import FiniteElement
from linsolver import sparse_solver
class PossionBlending():
def __init__(self, source, mask, target, mask_offset, slow_solver=False):
self.source = source
self.mask = mask
self.target = target
self.mask_offset = mask_offset
self.slow_solver = slow_solver
def judge_border(self, p):
x, y = p
if not self.mask[x, y]:
return False
row, col = self.mask.shape
for i in range(x - 1, x + 2):
for j in range(y - 1, y + 2):
if (i >= 0 and i < row) and (j >= 0 and j < col) and ((i, j) != (x, y)):
if not self.mask[i, j]:
return True
return False
def find_points_in_mask(self):
if len(self.mask.shape) == 3:
self.mask = self.mask[:, :, 0]
self.mask = self.mask.astype(np.int) / 255
nonzero = np.nonzero(self.mask)
self.mask[self.mask == 0] = False
self.mask[self.mask != False] = True
# record all points in Omega and Omega border
self.point_indexes = list(zip(nonzero[0], nonzero[1]))
# find border points
self.border_judge = [False] * len(self.point_indexes)
for i, p in enumerate(self.point_indexes):
if self.judge_border(p):
self.border_judge[i] = True
def laplace_stencil(self, coord, channel):
i, j = coord
source = self.source[:, :, channel]
val = (4 * source[i, j]) \
- (1 * source[i + 1, j]) \
- (1 * source[i - 1, j]) \
- (1 * source[i, j + 1]) \
- (1 * source[i, j - 1])
return val
def fdm_solver(self):
print('- FDM solver.')
self.find_points_in_mask()
target_rst = np.copy(self.target)
A_rows = []
A_cols = []
A_data = []
for i, p in enumerate(self.point_indexes):
x, y = p
if self.border_judge[i]:
A_rows.append(i)
A_cols.append(i)
A_data.append(1)
else:
A_rows.append(i)
A_cols.append(i)
A_data.append(4)
for p_adj in [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]:
j = self.point_indexes.index(p_adj)
A_rows.append(i)
A_cols.append(j)
A_data.append(-1)
A = sparse.coo_matrix((A_data, (A_rows, A_cols))).tocsr()
# 3 channels
for ch in range(self.target.shape[2]):
b = np.zeros(len(self.point_indexes))
for i, p in enumerate(self.point_indexes):
x, y = p
if self.border_judge[i]:
b[i] = self.target[x + self.mask_offset[0], y + self.mask_offset[1], ch]
else:
b[i] = self.laplace_stencil(p, ch)
print('Solving...')
if not self.slow_solver:
X = pyamg.solve(A, b, verb=False, tol=1e-10)
else:
X = sparse_solver.sparse_gauss_seidel([A_rows, A_cols, A_data], b, max_iter_time=20000,
sparse_input=True)
print("End one channel.")
X[X > 255] = 255
X[X < 0] = 0
X = np.array(X, self.target.dtype)
for i, p in enumerate(self.point_indexes):
x, y = p
target_rst[x + self.mask_offset[0], y + self.mask_offset[1], ch] = X[i]
return target_rst
def fem_solver(self):
print('- FEM solver.')
self.find_points_in_mask()
target_rst = np.copy(self.target)
# 3 channels
fem = None
for ch in range(self.target.shape[2]):
border_values = []
rhs_dict = dict()
for i, p in enumerate(self.point_indexes):
x, y = p
rhs_dict[tuple(p)] = self.laplace_stencil(p, ch)
if self.border_judge[i]:
border_values.append((i, self.target[x + self.mask_offset[0], y + self.mask_offset[1], ch]))
print('Solving...')
if not fem:
fem = FiniteElement(self.point_indexes, border_values, A=np.array([[1, 0], [0, 1]]), func=rhs_dict, q=0,
slow_solver=self.slow_solver)
else:
fem.update_border_and_func(border_values, rhs_dict)
fem.solve()
X = fem.solution
print("End one channel.")
X[X > 255] = 255
X[X < 0] = 0
X = np.array(X, self.target.dtype)
for i, p in enumerate(self.point_indexes):
x, y = p
target_rst[x + self.mask_offset[0], y + self.mask_offset[1], ch] = X[i]
return target_rst
def direct_solver(self):
print('- Direct paste.')
self.find_points_in_mask()
target_rst = np.copy(self.target)
# 3 channels
for ch in range(self.target.shape[2]):
for i, p in enumerate(self.point_indexes):
x, y = p
target_rst[x + self.mask_offset[0], y + self.mask_offset[1], ch] = self.source[x, y, ch]
return target_rst
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.boldDown = void 0;
var boldDown = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"d": "M2.5,10H6V3h8v7h3.5L10,17.5L2.5,10z"
}
}]
};
exports.boldDown = boldDown; |
// from https://i.redd.it/j0bsovt727i01.png
var notes_fingers = [ // note we add index via map at end of the array.
{'stage': 3, 'note': 'B', 'octave': '5', 'fingering': ['2']},
{'stage': 3, 'note': 'Bb', 'octave': '5', 'fingering': ['1']},
{'stage': 3, 'note': 'A', 'octave': '5', 'fingering': ['12']},
{'stage': 3, 'note': 'Ab', 'octave': '5', 'fingering': ['23']},
{'stage': 3, 'note': 'G', 'octave': '5', 'fingering': ['']},
{'stage': 3, 'note': 'F#', 'octave': '5', 'fingering': ['2']},
{'stage': 2, 'note': 'F', 'octave': '5', 'fingering': ['1']},
{'stage': 2, 'note': 'E', 'octave': '5', 'fingering': ['']},
{'stage': 2, 'note': 'Eb', 'octave': '5', 'fingering': ['2']},
{'stage': 2, 'note': 'D', 'octave': '5', 'fingering': ['1']},
{'stage': 2, 'note': 'C#', 'octave': '5', 'fingering': ['12']},
{'stage': 2, 'note': 'C', 'octave': '5', 'fingering': ['']},
{'stage': 1, 'note': 'B', 'octave': '4', 'fingering': ['2']},
{'stage': 1, 'note': 'Bb', 'octave': '4', 'fingering': ['1']},
{'stage': 1, 'note': 'A', 'octave': '4', 'fingering': ['12']},
{'stage': 1, 'note': 'Ab', 'octave': '4', 'fingering': ['23']},
{'stage': 0, 'note': 'G', 'octave': '4', 'fingering': ['']},
{'stage': 1, 'note': 'F#', 'octave': '4', 'fingering': ['2']},
{'stage': 0, 'note': 'F', 'octave': '4', 'fingering': ['1']},
{'stage': 0, 'note': 'E', 'octave': '4', 'fingering': ['12']},
{'stage': 1, 'note': 'Eb', 'octave': '4', 'fingering': ['23']},
{'stage': 0, 'note': 'D', 'octave': '4', 'fingering': ['13']},
{'stage': 1, 'note': 'C#', 'octave': '4', 'fingering': ['123']},
{'stage': 0, 'note': 'C', 'octave': '4', 'fingering': ['']},
{'stage': 1, 'note': 'B', 'octave': '3', 'fingering': ['2']},
{'stage': 2, 'note': 'Bb', 'octave': '3', 'fingering': ['1']},
{'stage': 2, 'note': 'A', 'octave': '3', 'fingering': ['12']},
{'stage': 2, 'note': 'Ab', 'octave': '3', 'fingering': ['23']},
{'stage': 2, 'note': 'G', 'octave': '3', 'fingering': ['13']},
{'stage': 2, 'note': 'F#', 'octave': '3', 'fingering': ['123']}
].map(function(el, index){
el['id'] = index;
return el;
});
var NOTES_TO_NAMES = 'NOTES_TO_NAMES', NOTES_TO_VALVES = 'NOTES_TO_VALVES', NAMES_TO_NOTES = 'NAMES_TO_NOTES', NOTES_TO_SOUND = 'NOTES_TO_SOUND';
const learn_type = {
NOTES_TO_NAMES: 'music notation to note name',
NOTES_TO_VALVES: 'music notation to valves',
//NAMES_TO_NOTES: 'note name to musical notation',
//NOTES_TO_SOUND: 'music note to sound',
}
const default_learn_type = NOTES_TO_NAMES;
function setup_learn_params(setup_callback){
function _extract_learning_levels() {
var levels = {};
for(var note_finger of notes_fingers){
var stage = note_finger.stage;
if(!levels[stage]) levels[stage] = [];
var note = note_finger.note + '/' + note_finger.octave;
if(levels[stage].indexOf(note)===-1) levels[stage].push(note);
}
return levels;
}
function gen_option(nam, select_type, info, checked, data){
if(!data) data = '';
return ' <div class="form-check">\n' +
' <input class="form-check-input" type="radio" '+ data + ' name="'+select_type+'" '+checked+' value="'+nam+'"\n>\n' +
' <label class="form-check-label" for="type_1">\n' +
' '+ info +'\n' +
' </label>\n' +
' </div>'
}
var testing_types_container = $('#testing_types_container');
$(testing_types_container).append('<b>Learning type</b>');
for(var id in learn_type){
var checked = id===NOTES_TO_NAMES?'checked':'';
$(testing_types_container).append(gen_option(id, 'learning_type', learn_type[id], checked));
}
var learning_levels = _extract_learning_levels();
$(testing_types_container).append('<div class="mt-3"><b>Learning level (lower options include notes from above options)</b></div>');
var first = true;
for(var key in learning_levels){
var checked = first?'checked':'';
var info = 'level ' + key + ': ' + learning_levels[key].join(', ');
var data = 'data-learn_level="'+key+'"';
$(testing_types_container).append(gen_option(id, 'learning_level', info, checked, data));
first = false;
}
$("input[name='learning_type']").click(function(){
refresh_everything();
});
$("input[name='learning_level']").click(function(){
refresh_everything();
});
function refresh_everything(){
var learning_type = $("input[name='learning_type']:checked").val();
var learning_level = parseInt($("input[name='learning_level']:checked").data('learn_level'));
setup_callback(learning_type, learning_level);
}
refresh_everything();
}
function generate_vocab(learn_type, level){
return notes_fingers.filter(function(el){
return el.stage <= level;
}).map(function(el){
el['learn_type'] = learn_type
return el;
})
} |
/*
* MIT License
*
* Copyright (c) 2020-present Cloudogu GmbH and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.cloudogu.scm.commitmessagechecker.config;
import com.cloudogu.scm.commitmessagechecker.AvailableValidators;
import com.cloudogu.scm.commitmessagechecker.Context;
import com.cloudogu.scm.commitmessagechecker.Validator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.util.ThreadContext;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import sonia.scm.api.v2.resources.ScmPathInfoStore;
import sonia.scm.repository.Repository;
import sonia.scm.repository.RepositoryTestData;
import javax.validation.ConstraintViolationException;
import java.net.URI;
import java.util.Collections;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class ConfigurationMapperTest {
private static final Repository REPOSITORY = RepositoryTestData.createHeartOfGold();
@Mock
private AvailableValidators availableValidators;
@Mock
private ConfigurationValidator configurationValidator;
@Mock
private ScmPathInfoStore scmPathInfoStore;
@Mock
private Subject subject;
@InjectMocks
private ConfigurationMapperImpl mapper;
@BeforeEach
void setUp() {
ThreadContext.bind(subject);
lenient().when(scmPathInfoStore.get()).thenReturn(() -> URI.create("scm/"));
}
@AfterEach
void tearDown() {
ThreadContext.unbindSubject();
}
@Test
void shouldMapGlobalConfigWithLinks() {
when(subject.isPermitted("configuration:write:commitMessageChecker")).thenReturn(true);
GlobalConfiguration globalConfiguration = new GlobalConfiguration(true, Collections.emptyList(), true);
GlobalConfigurationDto dto = mapper.map(globalConfiguration);
assertThat(dto.isDisableRepositoryConfiguration()).isTrue();
assertThat(dto.isEnabled()).isTrue();
assertThat(dto.getValidations()).isEmpty();
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/");
assertThat(dto.getLinks().getLinkBy("update").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/");
assertThat(dto.getLinks().getLinkBy("availableValidators").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/validators");
}
@Test
void shouldMapGlobalConfigWithoutUpdateLink() {
when(subject.isPermitted("configuration:write:commitMessageChecker")).thenReturn(false);
when(subject.isPermitted("configuration:read:commitMessageChecker")).thenReturn(true);
GlobalConfiguration globalConfiguration = new GlobalConfiguration(true, Collections.emptyList(), true);
GlobalConfigurationDto dto = mapper.map(globalConfiguration);
assertThat(dto.isDisableRepositoryConfiguration()).isTrue();
assertThat(dto.isEnabled()).isTrue();
assertThat(dto.getValidations()).isEmpty();
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/");
assertThat(dto.getLinks().getLinkBy("availableValidators").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/validators");
assertThat(dto.getLinks().getLinkBy("update")).isNotPresent();
}
@Test
void shouldMapGlobalConfigWithSelfLinkOnly() {
GlobalConfiguration globalConfiguration = new GlobalConfiguration(true, Collections.emptyList(), true);
GlobalConfigurationDto dto = mapper.map(globalConfiguration);
assertThat(dto.isDisableRepositoryConfiguration()).isTrue();
assertThat(dto.isEnabled()).isTrue();
assertThat(dto.getValidations()).isEmpty();
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/");
assertThat(dto.getLinks().getLinkBy("availableValidators")).isNotPresent();
assertThat(dto.getLinks().getLinkBy("update")).isNotPresent();
}
@Test
void shouldMapGlobalConfigDto() {
GlobalConfigurationDto dto = new GlobalConfigurationDto();
GlobalConfiguration globalConfiguration = mapper.map(dto);
assertThat(globalConfiguration.isDisableRepositoryConfiguration()).isFalse();
assertThat(globalConfiguration.isEnabled()).isFalse();
assertThat(globalConfiguration.getValidations()).isNullOrEmpty();
}
@Test
void shouldMapRepoConfigWithLinks() {
when(subject.isPermitted("repository:writeCommitMessageCheckerConfig:" + REPOSITORY.getId())).thenReturn(true);
Configuration configuration = new Configuration(true, Collections.emptyList());
ConfigurationDto dto = mapper.map(configuration, REPOSITORY);
assertThat(dto.isEnabled()).isTrue();
assertThat(dto.getValidations()).isEmpty();
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/" + REPOSITORY.getNamespaceAndName());
assertThat(dto.getLinks().getLinkBy("update").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/" + REPOSITORY.getNamespaceAndName());
assertThat(dto.getLinks().getLinkBy("availableValidators").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/validators");
}
@Test
void shouldMapRepoConfigWithoutUpdateLink() {
when(subject.isPermitted("repository:writeCommitMessageCheckerConfig:" + REPOSITORY.getId())).thenReturn(false);
when(subject.isPermitted("repository:readCommitMessageCheckerConfig:" + REPOSITORY.getId())).thenReturn(true);
Configuration configuration = new Configuration(true, Collections.emptyList());
ConfigurationDto dto = mapper.map(configuration, REPOSITORY);
assertThat(dto.isEnabled()).isTrue();
assertThat(dto.getValidations()).isEmpty();
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/" + REPOSITORY.getNamespaceAndName());
assertThat(dto.getLinks().getLinkBy("availableValidators").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/validators");
assertThat(dto.getLinks().getLinkBy("update")).isNotPresent();
}
@Test
void shouldMapRepoConfigWithSelfLinkOnly() {
Configuration configuration = new Configuration(true, Collections.emptyList());
ConfigurationDto dto = mapper.map(configuration, REPOSITORY);
assertThat(dto.isEnabled()).isTrue();
assertThat(dto.getValidations()).isEmpty();
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo("scm/v2/commit-message-checker/configuration/" + REPOSITORY.getNamespaceAndName());
assertThat(dto.getLinks().getLinkBy("availableValidators")).isNotPresent();
assertThat(dto.getLinks().getLinkBy("update")).isNotPresent();
}
@Test
void shouldMapRepoConfigDto() {
ConfigurationDto dto = new ConfigurationDto();
Configuration configuration = mapper.map(dto);
assertThat(configuration.getValidations()).isNullOrEmpty();
assertThat(configuration.isEnabled()).isFalse();
}
@Test
void shouldMapValidation() {
String validatorName = "mock";
when(availableValidators.validatorFor(validatorName)).thenReturn(new SimpleValidator());
Validation validation = new Validation(validatorName);
ValidationDto dto = mapper.map(validation);
assertThat(dto.getName()).isEqualTo(validatorName);
}
@Test
void shouldMapComplexValidation() {
String validatorName = "complex";
when(availableValidators.validatorFor(validatorName)).thenReturn(new ConfiguredValidator());
Validation validation = new Validation(validatorName);
validation.setConfiguration(new ConfiguredValidatorConfig("abc", "master,develop"));
ValidationDto dto = mapper.map(validation);
assertThat(dto.getName()).isEqualTo(validatorName);
assertThat(dto.getConfiguration()).hasToString("{\"pattern\":\"abc\",\"branches\":\"master,develop\"}");
}
@Test
void shouldMapValidationDto() {
String validatorName = "mock";
when(availableValidators.validatorFor(validatorName)).thenReturn(new SimpleValidator());
ValidationDto dto = new ValidationDto();
dto.setName(validatorName);
Validation validation = mapper.map(dto);
assertThat(validation.getName()).isEqualTo(dto.getName());
}
@Test
void shouldMapComplexValidationDto() throws JsonProcessingException {
String validatorName = "mock";
when(availableValidators.validatorFor(validatorName)).thenReturn(new ConfiguredValidator());
ValidationDto dto = new ValidationDto();
dto.setName(validatorName);
dto.setConfiguration(new ObjectMapper().readTree("{\"pattern\":\"abc\",\"branches\":\"master,develop\"}"));
Validation validation = mapper.map(dto);
verify(configurationValidator).validate(any());
assertThat(validation.getName()).isEqualTo(dto.getName());
assertThat(validation.getConfiguration()).hasFieldOrProperty("pattern");
assertThat(validation.getConfiguration()).hasFieldOrProperty("branches");
}
@Test
void shouldFailOnMapComplexValidationDto() throws JsonProcessingException {
String validatorName = "mock";
when(availableValidators.validatorFor(validatorName)).thenReturn(new ConfiguredValidator());
doThrow(new ConstraintViolationException("violations", null)).when(configurationValidator).validate(any());
ValidationDto dto = new ValidationDto();
dto.setName(validatorName);
dto.setConfiguration(new ObjectMapper().readTree("{\"pattern\":\"abc\",\"branches\":\"master,develop\"}"));
assertThrows(ConstraintViolationException.class, () -> mapper.map(dto));
}
static class SimpleValidator implements Validator {
@Override
public void validate(Context context, String commitMessage) {
}
}
static class ConfiguredValidator implements Validator {
@Override
public Optional<Class<?>> getConfigurationType() {
return Optional.of(ConfiguredValidatorConfig.class);
}
@Override
public void validate(Context context, String commitMessage) {
// Do nothing
}
}
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
static class ConfiguredValidatorConfig {
private String pattern;
private String branches;
}
}
|
db.employees.find({
age: { $gt: 30 },
},
{
firstname: 1,
lastname: 1
}) |
#!/bin/sh
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
#
set -e
./check_env_variables.sh
LIST_ID=${1-ip_list}
PAGE=${2-1}
PER_PAGE=${3-20}
SORT_FIELD=${4-value}
SORT_ORDER=${5-asc}
CURSOR=${6-invalid}
# Example: ./find_list_items_with_sort_cursor.sh ip_list 1 20 value asc <cursor>
curl -s -k \
-u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD} \
-X GET "${KIBANA_URL}${SPACE_URL}/api/lists/items/_find?list_id=${LIST_ID}&page=${PAGE}&per_page=${PER_PAGE}&sort_field=${SORT_FIELD}&sort_order=${SORT_ORDER}&cursor=${CURSOR}" | jq .
|
<reponame>syncfusion/ej2-react
import { ComplexBase } from '@syncfusion/ej2-react-base';
import { AxisModel } from '@syncfusion/ej2-charts';
/**
* `Axis` directive represent a axis row of the react Chart.
* It must be contained in a Chart component(`ChartComponent`).
* ```tsx
* <ChartComponent>
* <AxesDirective>
* <AxisDirective></AxisDirective>
* </AxesDirective>
* </ChartComponent>
* ```
*/
export class AxisDirective extends ComplexBase<AxisModel & { children?: React.ReactNode }, AxisModel> {
public static moduleName: string = 'axis';
}
export class AxesDirective extends ComplexBase<{}, {}> {
public static propertyName: string = 'axes';
public static moduleName: string = 'axes';
}
|
<gh_stars>10-100
/*******************************************************************************
Copyright (c) 2017, Honda Research Institute Europe GmbH
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*******************************************************************************/
#include "Rcs_guiFactory.h"
#include "ForceControlGui.h"
#include "SliderCheck1Dof.h"
#include <Rcs_macros.h>
#include <QVBoxLayout>
Rcs::ForceControlGui* Rcs::ForceControlGui::create(const Rcs::ControllerBase* controller,
MatNd* ft_des,
MatNd* s_des,
MatNd* ft_curr,
const char* title,
pthread_mutex_t* mutex)
{
std::vector<Rcs::ForceControlGui::Entry> guiEntries;
Rcs::ForceControlGui::populateFTguiList(guiEntries, controller, ft_des, ft_curr, s_des);
int handle = RcsGuiFactory_requestGUI(Rcs::ForceControlGui::threadFunction, &guiEntries);
return (Rcs::ForceControlGui*) RcsGuiFactory_getPointer(handle);
}
int Rcs::ForceControlGui::getTaskEntryByName(const Rcs::ControllerBase* controller, const char* name)
{
for (size_t taskEntry = 0; taskEntry < controller->getNumberOfTasks(); taskEntry++)
{
if (STREQ(controller->getTaskName(taskEntry).c_str(), name))
{
return taskEntry;
}
}
RFATAL("\'%s\' Task not found", name);
return -1;
}
void Rcs::ForceControlGui::populateFTguiList(std::vector<Rcs::ForceControlGui::Entry>& guiEntries, const Rcs::ControllerBase* controller, MatNd* ft_des, MatNd* ft_task, MatNd* s)
{
for (size_t id = 0; id < controller->getNumberOfTasks(); id++)
{
const Rcs::Task* tsk = controller->getTask(id);
size_t curr_index = controller->getTaskArrayIndex(id);
for (size_t i = 0; i < tsk->getDim(); i++)
{
const Rcs::Task::Parameters& param = tsk->getParameter(i);
guiEntries.push_back(
Rcs::ForceControlGui::Entry(&ft_des->ele[curr_index+i], &ft_task->ele[curr_index+i], &s->ele[curr_index+i], param.name.c_str(), -20.0, 0.0, 20.0, 0.1));
}
}
}
void* Rcs::ForceControlGui::threadFunction(void* arg)
{
RCHECK(arg);
Rcs::ForceControlGui* gui = new Rcs::ForceControlGui((std::vector<ForceControlGui::Entry>*) arg);
// widget->move(600,0);
gui->show();
return gui;
}
Rcs::ForceControlGui::ForceControlGui(std::vector<Entry>* entries):
QScrollArea()
{
QString windowTitle("RCS ForceControl Viewer GUI");
setWindowTitle(windowTitle);
// The layout for the overall mp widget
QVBoxLayout* main_layout = new QVBoxLayout();
main_layout->setMargin(3);
main_layout->setSpacing(1);
for (std::vector<Entry>::iterator it = entries->begin(); it != entries->end(); ++it)
{
RLOG(2, "Adding entry %s", it->name);
SliderCheck1Dof* widget = new SliderCheck1Dof(it->q_des, it->q_curr, it->active, it->name, it->lowerBound, it->zeroPos, it->upperBound, it->ticSize);
main_layout->addWidget(widget);
}
//BOOST_FOREACH(ForceControlGui::Entry entry, *entries)
//{
// RLOG(2, "Adding entry %s", entry.name);
// SliderCheck1Dof* widget = new SliderCheck1Dof(entry.q_des, entry.q_curr, entry.active, entry.name, entry.lowerBound, entry.zeroPos, entry.upperBound, entry.ticSize);
// main_layout->addWidget(widget);
//}
main_layout->addStretch();
QWidget* scroll_widget = new QWidget(this);
scroll_widget->setLayout(main_layout);
this->setWidget(scroll_widget);
this->setWidgetResizable(true);
this->resize(650, 300);
RLOG(5, "ForceControlGui generated");
}
Rcs::ForceControlGui::~ForceControlGui()
{
}
|
echo "import React from 'react';
import './$1.css';
import styled, { ThemeProvider } from 'styled-components';
// import theme from '../somewhere';
const theme = {}; // delete this if you're importing the theme or don't need a theme at all
const $1 = styled.div\`
\`
const Themed$1 = props => (
<ThemeProvider theme={theme}>
<$1 />
</ThemeProvider>
);
const $1PropTypes = {};
$1.propTypes = $1PropTypes;
Themed$1.propTypes = $1PropTypes;
export default Themed$1;" > src/components/$1/$1.js |
<gh_stars>0
package modell.raetsel;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
/**
* Diese Klasse symbolisiert ein Memento. Sie verwaltet die Speicherung und das
* Abrufen der Raetsel sowie deren Zustaende und erstellt eine
* Sicherungs-Textdatei.
* @author janne
*
*/
public class Memento {
private List<String> memento;
private int abschlussStufe = 0;
public Memento() {
liesMementoDatei();
}
public int gibStufenSicherung() {
this.liesMementoDatei();
return this.abschlussStufe;
}
/** Löscht Daten von Memento: überschreibt textdatei, resettet Memento.
*
*/
public void loesche() {
Writer fw = null;
try {
fw = new FileWriter("Resources/Sicherung/Sicherung.txt");
} catch (IOException e) {
new praesentation.FehlerDialog("Sicherung konnte nicht gelöscht werden.");
} finally {
if (fw != null) {
try {
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
this.memento = new ArrayList<String>();
}
/**
* Quelle: http://openbook.rheinwerk-verlag.de/javainsel9/
* javainsel_17_001.htm#mj87f7ea8c7b8051417049399df2c5782a
* Erstellt eine Textdatei, welche den aktuellen Spielstand, sprich die höchste
* geloeste Stufe und den Raetselnamen enthaelt.
* @return True, wenn die Datei erfolgreich erstellt wurde.
*/
public boolean erstelleMementoDatei(Raetsel raetsel) {
Writer fw = null;
liesMementoDatei();
if (istNeu(raetsel.gibName())) {
memento.add(raetsel.gibName());
}
try {
fw = new FileWriter("Resources/Sicherung/Sicherung.txt");
if (raetsel.gibStufe() > this.abschlussStufe) {
fw.write(raetsel.gibStufe() + "\n");
} else {
fw.write(abschlussStufe + "\n");
}
fw.write("##\n");
for (int i = 0; i < memento.size(); i++) {
fw.write(memento.get(i) + "\n"); //alle bisher geloesten Raetselnamen
}
} catch (IOException e) {
new praesentation.FehlerDialog("Sicherung konnte nicht erstellt werden.");
} finally {
if (fw != null) {
try {
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return true;
}
private boolean istNeu(String name) {
boolean output = true;
for (String temp : memento) {
if (temp.equals(name)) {
output = false;
}
}
return output;
}
/** Überprüft, ob die Textdatei zur Sicherung der Informationen vorhanden ist
* und erzeugt diese wenn nicht.
* @return true, wenn die Datei nach ablauf der Methode existiert und auslesbar ist.
*/
private boolean pruefeTextdatei() {
File file = new File("Resources/Sicherung/Sicherung.txt");
if (!file.exists()) {
String fileName = "Resources/Sicherung/Sicherung.txt";
String encoding = "UTF-8";
try {
PrintWriter writer = new PrintWriter(fileName, encoding);
writer.println("0");
writer.println("##");
writer.close();
} catch (IOException e) {
new praesentation.FehlerDialog("Fehler beim Erstellen. Ist der Ordner entpackt?");
}
}
if (file.isFile() && file.canWrite() && file.canRead()) {
return true;
}
return false;
}
/**
* Liest die Memento-Textdatei und schreibt den Inhalt in eine String-Liste.
* Erstellt außerdem eine Liste nur mit den Namen der bisher geloesten Raetsel.
*/
private void liesMementoDatei() {
if (!pruefeTextdatei()) {
new praesentation.FehlerDialog("Die Sicherungsdatei ist nicht erstellbar.");
}
try {
memento = extrahiere(Files.readAllLines(
FileSystems.getDefault().getPath("Resources/Sicherung/Sicherung.txt"),
StandardCharsets.UTF_8));
} catch (IOException e) {
e.printStackTrace();
}
if (memento != null && !memento.isEmpty()) {
this.abschlussStufe = Integer.parseInt(memento.get(0));
}
memento.remove(0);
}
private List<String> extrahiere(List<String> input) {
List<String> output = new ArrayList<String>();
for (String temp : input) {
if (!temp.equals("##")) {
output.add(temp);
}
}
return output;
}
public List<String> gibGeloesteRaetsel() {
liesMementoDatei();
return memento;
}
}
|
#!/bin/bash
if [ $# -eq 0 ]; then
echo "<4>No file name specified"
exit 1
fi
if mkdir "/tmp/interceptty.lock"; then
echo "No interceptty-save service is already active"
else
echo "An interceptty-save service is already active"
exit 0
fi
dir="/tmp/interceptty/"
name="${1##*/}"
echo "Commanding multilog to rotate logs and waiting up to 10s..."
inotifywait -t 10 -e attrib /tmp/interceptty/current &>/dev/null &
pkill -SIGALRM -fx "/usr/bin/multilog .* /tmp/interceptty" &
wait
echo "...done"
unset -v latest
unset -v previousLatest
for file in "$dir"*; do
if [[ $file == $dir@* ]]; then
[[ $file -nt $latest ]] && previousLatest=$latest && latest=$file
fi
done
# Remark: If this service was called before multilog has rotated logs at least once
# on its own, previousLatest will be empty and thus cat will only take latest.
echo "Concatenating $previousLatest $latest and saving as log file /mnt/data/interceptty/$name.log"
mkdir -p /mnt/data/interceptty
cat "$previousLatest" "$latest" > "/mnt/data/interceptty/$name.log"
rm -r "/tmp/interceptty.lock"
|
<gh_stars>0
import UserEsApi from './user_es_api';
export default UserEsApi;
|
java -jar /Users/keerthinelaturu/Keerthi/source/smart-contracts/verificationTools/bip-to-nusmv.jar /Users/keerthinelaturu/Keerthi/source/smart-contracts/projectOutputs/ItemsStoragef07e82f0-76ee-beac-69db-d0e37574c1b7/ItemsStorage.bip /Users/keerthinelaturu/Keerthi/source/smart-contracts/projectOutputs/ItemsStoragef07e82f0-76ee-beac-69db-d0e37574c1b7/ItemsStorage.smv |
#!/usr/bin/env bash
# ----------------------------- FONTES ----------------------------- #
URL_GOOGLE_EARTH_PRO="https://dl.google.com/dl/linux/direct/google-earth-pro-stable_7.3.2_amd64.deb"
URL_SPRING="http://www.dpi.inpe.br/spring/download/bin/linux/Spring5_5_5_Port_Ubuntu1604_x64.tar.gz"
URL_TERRAVIEW="http://www.dpi.inpe.br/terralib5/download/download.php?FileName=terralib-5.4.3-ubuntu-16.04.tar.gz"
URL_DRAFTSIGHT="http://dl-ak.solidworks.com/nonsecure/draftsight/2019SP0/draftSight.deb"
URL_GVSIG="http://downloads.gvsig.org/download/gvsig-desktop-testing/dists/2.4.0/builds/2850/gvSIG-desktop-2.4.0-2850-final-lin-x86_64-standard.run"
URL_GPSXSEE="https://download.opensuse.org/repositories/home:/tumic:/GPXSee/Debian_10/amd64/gpxsee_7.16_amd64.deb"
DIRETORIO_DOWNLOADS="$HOME/Downloads/GISonLinux"
# ------------------------------------------------------------------ #
# --------------------------- REQUISITOS --------------------------- #
## Removendo travas eventuais do apt ##
sudo rm /var/lib/dpkg/lock-frontend
sudo rm /var/cache/apt/archives/lock
## Atualizando o repositório ##
sudo apt update -y
# ------------------------------------------------------------------ #
# ------------------- EXECUTANDO PRÉ-REQUISITOS -------------------- #
sudo apt install snapd -y
sudo apt install flatpak
# ------------------------------------------------------------------ #
# ------------------- DOWNLOAD DE PROGRAMAS DEB -------------------- #
mkdir "$DIRETORIO_DOWNLOADS"
wget -c "$URL_GOOGLE_EARTH_PRO" -P "$DIRETORIO_DOWNLOADS"
wget -c "$URL_DRAFTSIGHT" -P "$DIRETORIO_DOWNLOADS"
wget -c "$URL_GVSIG" -P "$DIRETORIO_DOWNLOADS"
wget -c "$URL_GPSXSEE" -P "$DIRETORIO_DOWNLOADS"
## Instalando pacotes .deb baixados na sessão anterior ##
sudo dpkg -i $DIRETORIO_DOWNLOADS/*.deb
## Instalando pacotes .run baixados na sessão anterior ##
sudo chmod +x $DIRETORIO_DOWNLOADS/*.run
sudo -i $DIRETORIO_DOWNLOADS/./*.run
## Instalando programas do repositório APT##
sudo apt install saga -y
sudo apt install grass -y
sudo apt install hugin -y
sudo apt install gpsprune -y
##Instalando pacotes Flatpak ##
flatpak install flathub org.qgis.qgis -y
flatpak install flathub org.octave.Octave -y
flatpak install flathub org.openstreetmap.josm -y
flatpak install flathub net.oz9aec.Gpredict -y
##Instalando pacotes Snap ##
sudo snap install openmapper-desktop
sudo snap install gnss-sdr-next
# ------------------------------------------------------------------ #
# ------------------------ PÓS-INSTALAÇÃO -------------------------- #
## Finalização, atualização e limpeza do sistema ##
sudo apt update && sudo apt upgrade && sudo apt dist-upgrade -y
flatpak update
sudo apt autoclean
sudo apt autoremove -y
# ------------------------------------------------------------------ #
|
#!/usr/bin/env bash
hlint --hint .hlint.yaml . --ignore='Parse error'
|
module Promulgate
module Utils
module_function
def valid_url?(url)
begin
uri = URI.parse(url)
uri.is_a?(URI::HTTP) || uri.is_a?(URI::HTTPS)
rescue URI::InvalidURIError
end
end
def hmac(secret, body)
digest = OpenSSL::Digest.new('sha256')
OpenSSL::HMAC.hexdigest(digest, secret, body)
end
end
end
|
import preprocessor as p
import matplotlib.pyplot as plt
import re
import string
import numpy as np
from nltk.corpus import stopwords
import nltk
import matplotlib.pyplot as plt
from wordcloud import WordCloud, STOPWORDS
from datetime import datetime
from PIL import Image, ImageFont, ImageDraw
import tweepy
import config
import string
import os
import time
consumer_key = config.consumer_key
consumer_secret = config.consumer_secret
access_token = config.access_token
access_token_secret = config.access_token_secret
bearer_token = config.bearer_token
username = config.username
password = config.password
# nltk.download('stopwords')
# URL p.OPT.URL
# Mention p.OPT.MENTION
# Hashtag p.OPT.HASHTAG
# Reserved Words p.OPT.RESERVED
# Emoji p.OPT.EMOJI
# Smiley p.OPT.SMILEY
# Number p.OPT.NUMBER
def clean_text(text):
"""
Function to clean the text.
Parameters:
text: the raw text as a string value that needs to be cleaned
Returns:
cleaned_text: the cleaned text as string
"""
# convert to lower case
cleaned_text = text.lower()
# remove HTML tags
html_pattern = re.compile('<.*?>')
cleaned_text = re.sub(html_pattern, '', cleaned_text)
# remove punctuations
cleaned_text = cleaned_text.translate(
str.maketrans('', '', string.punctuation))
return cleaned_text.strip()
def remove_whitespace(text):
return " ".join(text.split())
def clean_tweets(tweet_text):
p.set_options(p.OPT.URL, p.OPT.MENTION, p.OPT.EMOJI)
clean_tweet_text = p.clean(tweet_text)
clean_tweet_text = clean_tweet_text.replace('&', "")
clean_tweet_text = clean_tweet_text.replace('\\n', "")
return (clean_tweet_text)
def create_filename(root, ext):
current_datetime = datetime.now()
str_current_datetime = str(current_datetime)
new_filename = root + str_current_datetime
new_filename = remove_whitespace(new_filename)
new_filename = new_filename.replace(":", "").replace(".", "")
new_filename = new_filename.replace(' ', "")
new_filename = new_filename + ext
return (new_filename)
def delete_files():
os.remove("molegtoconst.txt")
os.remove(mfile_name_png)
# os.remove(mfile_name_jpg)
morewords = [
'moleg', 'Missouri', 'make', 'whatever', 'say', 'self', 'defense',
'morning', 'back', 'stand', 'says', 'ground', 'rt', 'will', 'one', 'now',
'im', 'new', 'mo', 'dont'
'u', 'thank', 'state'
]
STOPWORDS.update(morewords)
stopwords_ls = list(set(stopwords.words("english")))
stopwords_ls = [clean_text(word) for word in stopwords_ls]
mo_mask = np.array(Image.open('legislator.jpg'))
title_font = ImageFont.truetype('AllerDisplay.ttf', 65)
# user_id = "4591016128"
# mtitle_text = "<NAME> \n(@nickbschroer) \nlast 100 Tweets\n5-2-2022"
# user_id = "605754185"
# mtitle_text = "Koenig LikeddfdfTweets"
# user_id = "1604949372"
# mtitle_text = "<NAME>'s \n(@bobondermo) \nlast 100 Tweets\n5-2-2022"
# user_id = "27098871"
# mtitle_text = "<NAME>'s \n(@DLHoskins) \nliked Tweets\n5-2-2022"
user_id = "2352629311"
mtitle_text = "<NAME>'s \n(@Dhawyleymo) \nlast 100 tweets\n6-8-2022"
# MOLEG to CONST
client = tweepy.Client(bearer_token=bearer_token)
response = client.get_users_tweets(user_id,
tweet_fields=['created_at'],
max_results=25)
tweets = response.data
for tweet in tweets:
tweet_text = tweet.text
tweet_clean_text = clean_tweets(tweet.text)
tweet_created_at = tweet.created_at
tweet_clean_text = clean_text(tweet_clean_text)
print("moleg list")
print(tweet_clean_text)
print('\n')
print(tweet_created_at)
print('\n')
print(
'---------------------------------f-----------------------------------'
)
with open('molegtoconst.txt', 'a') as f:
f.write(tweet_clean_text)
f.write('\n')
f.close()
####################################################################################################################
text_file = open("molegtoconst.txt", "r")
data = text_file.read()
data = clean_tweets(data)
text = clean_text(data) # double clean
cloud = WordCloud(scale=3,
max_words=100,
colormap='RdYlGn',
mask=mo_mask,
background_color='black',
stopwords=STOPWORDS,
collocations=True).generate_from_text(data)
plt.figure(figsize=(10, 8))
plt.imshow(cloud)
plt.axis('off')
mfile_name_png = create_filename("molegtoconst", ".png")
mfile_name_jpg = create_filename("molegtoconst", ".jpg")
cloud = cloud.to_file(mfile_name_png)
my_image = Image.open(mfile_name_png)
image_editable = ImageDraw.Draw(my_image)
image_editable.text((15, 500), mtitle_text, (143, 24, 16), font=title_font)
my_image.save('UserTweets.jpg')
my_image.close()
text_file.close()
time.sleep(1)
delete_files()
|
#! /usr/bin/env bash
set -exu
cd "`dirname "$(readlink -f "$0")"`"/..
for k in */ ; do (
set +e
cd $k
git pull origin
)
done
|
#!/bin/bash
#
# Script to generate test files.
EXIT_SUCCESS=0;
EXIT_FAILURE=1;
TEST_DIRECTORY="/tmp/test";
if ! test -x "setup.py";
then
echo "Unable to find: ./setup.py";
exit ${EXIT_FAILURE};
fi
rm -rf build/ dist/;
./setup.py -q sdist_test_data;
if test $? -ne ${EXIT_SUCCESS};
then
echo "Unable to run: ./setup.py sdist_test_data";
exit ${EXIT_FAILURE};
fi
SDIST_PACKAGE=`ls -1 dist/plaso-*.tar.gz | head -n1 | sed 's?^dist/??'`;
if ! test "dist/${SDIST_PACKAGE}";
then
echo "Missing sdist package: dist/${SDIST_PACKAGE}";
exit ${EXIT_FAILURE};
fi
OLD_PWD=${PWD};
mkdir ${TEST_DIRECTORY};
if ! test -d "${TEST_DIRECTORY}";
then
echo "Missing test directory: ${TEST_DIRECTORY}";
exit ${EXIT_FAILURE};
fi
cp dist/${SDIST_PACKAGE} ${TEST_DIRECTORY};
cd ${TEST_DIRECTORY};
if ! test -f "${SDIST_PACKAGE}";
then
echo "Missing sdist package: ${SDIST_PACKAGE}";
exit ${EXIT_FAILURE};
fi
tar xfv ${SDIST_PACKAGE};
SOURCE_DIRECTORY=${SDIST_PACKAGE/.tar.gz/};
if ! test -d "./${SOURCE_DIRECTORY}";
then
echo "Missing source directory: ${SOURCE_DIRECTORY}";
exit ${EXIT_FAILURE};
fi
cp -rf ${SOURCE_DIRECTORY}/* .;
TEST_FILE="psort_test.plaso";
# Syslog does not contain a year we must pass preferred year to prevent the parser failing early on non-leap years.
PYTHONPATH=. python ./tools/log2timeline.py --buffer_size=300 --quiet --preferred_year 2012 --storage-file ${TEST_FILE} test_data/syslog;
PYTHONPATH=. python ./tools/log2timeline.py --quiet --timezone=Iceland --preferred_year 2012 --storage-file ${TEST_FILE} test_data/syslog;
cat > tagging.txt <<EOI
anacron1
body contains 'anacron'
exit1
body contains ' exit '
repeated
body contains 'last message repeated'
EOI
PYTHONPATH=. python ./tools/psort.py --analysis tagging --output-format=null --tagging-file=tagging.txt ${TEST_FILE};
# Run tagging twice.
cat > tagging.txt <<EOI
anacron2
body contains 'anacron'
exit2
body contains ' exit '
repeated
body contains 'last message repeated'
EOI
PYTHONPATH=. python ./tools/psort.py --analysis tagging --output-format=null --tagging-file=tagging.txt ${TEST_FILE};
mv ${TEST_FILE} ${OLD_PWD}/test_data/;
TEST_FILE="pinfo_test.plaso";
PYTHONPATH=. python ./tools/log2timeline.py --partition=all --quiet --storage-file ${TEST_FILE} test_data/tsk_volume_system.raw;
mv ${TEST_FILE} ${OLD_PWD}/test_data/;
cd ${OLD_PWD};
rm -rf ${TEST_DIRECTORY};
|
;(function($) {
var _defaults = {
/**
* Automatically submit the form after it is bound
*/
submitOnBind: false,
/**
* The expected response dataType
*/
dataType: 'json',
/**
* Called when a valid form is submitted.
*
* Return false to halt further event execution.
*/
onSubmit: $.noop,
/**
* Called when a form returns a successful response.
*
* Takes one argument: response
* Return false to halt further event execution.
*/
onSuccess: $.noop,
/**
* Called when a form returns an error response.
*
* Takes three arguments: jqXHR, textStatus, errorThrown
* Return false to halt further event execution.
*/
onError: $.noop,
/**
* @see jQuery.validate.settings.showErrors
*
* Overrides the default, funky show/hide functionality.
* If using this implementation, highlight and unhighlight are responsible
* for showing and hiding elements, respectively.
*/
showErrors: function() {
var i, elements;
for (i = 0; this.errorList[i]; i++) {
var error = this.errorList[i];
if (this.settings.highlight) {
this.settings.highlight.call(this, error.element, this.settings.errorClass, this.settings.validClass);
}
this.showLabel(error.element, error.message);
}
if (this.settings.unhighlight) {
for (i = 0, elements = this.validElements(); elements[i]; i++) {
this.settings.unhighlight.call(this, elements[i], this.settings.errorClass, this.settings.validClass);
}
}
},
/**
* @see jQuery.validate.settings.errorPlacement
*/
errorPlacement: function (error, element) {
var $helpBlock = $(element).closest('.form-group').find('.help-block');
$helpBlock.data('last-text', $helpBlock.html());
$helpBlock.html(error.text()).show();
},
/**
* @see jQuery.validate.settings.highlight
*/
highlight: function(element, errorClass, validClass) {
var $helpBlock = $(element).closest('.form-group').addClass(errorClass).find('.help-block').show();
},
/**
* @see jQuery.validate.settings.unhighlight
*/
unhighlight: function(element, errorClass, validClass) {
var $helpBlock = $(element).closest('.form-group').removeClass(errorClass).find('.help-block');
if ($helpBlock.data('last-text')) {
$helpBlock.html($helpBlock.data('last-text'));
} else {
$helpBlock.text('').hide();
}
},
errorClass: 'error has-error',
validClass: ''
};
var _submitHandlerFactory = function(options) {
var _chainHandlers = function(handlerA, handlerB) {
return function() {
if (false !== handlerA.apply(null, arguments)) {
handlerB.apply(null, arguments);
}
};
};
var successHandler = _chainHandlers(options.onSuccess, Orkestra.response.successHandler);
var errorHandler = _chainHandlers(options.onError, Orkestra.response.errorHandler);
var defaultSubmitHandler = function(form) {
$(form).ajaxSubmit({
dataType: options.dataType,
success: successHandler,
error: errorHandler
});
};
return _chainHandlers(options.onSubmit, defaultSubmitHandler);
};
var _bindEnhancements = function(form) {
var $form = $(form);
// Date and time pickers
if ($.isFunction($.fn.datetimepicker)) {
$('input.date', $form).each(function (index,elem) {
var data = $(elem).data() || {};
$(elem).datetimepicker({
pickTime : false,
format : data['format'] || Orkestra.dateFormat || 'MM/DD/YY'
});
});
$('input.timepicker', $form).each(function (index,elem) {
var data = $(elem).data() || {};
$(elem).datetimepicker({
useSeconds : data['useSeconds'] || false,
pickDate : false,
minuteStepping : data['minuteStep'] || 1,
format : data['format'] || Orkestra.timeFormat || 'hh:mm A'
});
});
$('input.datetimepicker', $form).each(function (index,elem) {
var data = $(elem).data() || {};
$(elem).datetimepicker({
useSeconds : data['useSeconds'] || false,
format : data['format'] || ((Orkestra.dateFormat && Orkestra.timeFormat) ? (Orkestra.dateFormat + ' ' + Orkestra.timeFormat) : false) || 'MM/DD/YY hh:mm A'
});
});
}
// USA Currency
if ($.isFunction($.fn.formatCurrency)) {
// Currency
$('input.currency', $form).formatCurrency(true);
}
// USA Telephone
if ($.isFunction($.fn.formatPhoneNumber)) {
// Telephone
$('input.telephone', $form).formatPhoneNumber();
}
// USA SSN
if ($.isFunction($.fn.formatSocial)) {
$('input.social', $form).formatSocial();
}
// USA EIN
if ($.isFunction($.fn.formatEin)) {
$('input.ein', $form).formatEin();
}
// URL
if ($.isFunction($.fn.formatUrl)) {
$('input.url', $form).formatUrl();
}
// Integer
if ($.isFunction($.fn.formatNumber)) {
$('input.integer', $form).formatNumber({
initValue: true
});
}
};
var helper = function() {
};
helper.prototype = $.extend(helper.prototype, {
bind: function(form, options) {
options = $.extend({}, _defaults, options);
options.submitHandler = options.submitHandler || _submitHandlerFactory(options);
$(form).data('validator', null).unbind('submit').validate(options);
_bindEnhancements.call(this, form);
if (true === options.submitOnBind) {
$(function() {
$(form).submit();
});
}
},
bindEnhancements: _bindEnhancements,
setDefaults: function(options) {
$.extend(_defaults, options);
}
});
window.Orkestra = window.Orkestra || {};
window.Orkestra.FormHelper = helper;
})(jQuery);
|
<filename>src/reader/test_cases/test_import_context.py
from django.test import TestCase
from reader.importer import TextImporter
class TestImportContext(TestCase):
def test_division_level(self):
context = TextImporter.ImportContext("TestCase")
self.assertEquals(context.get_division_level_count(2), 0)
context.increment_division_level(2)
self.assertEquals(context.get_division_level_count(2), 1)
|
#include "stdafx.h"
#include "igame_level.h"
#include "IGame_Persistent.h"
#include "igame_objectpool.h"
#include "xr_object.h"
IGame_ObjectPool::IGame_ObjectPool(void)
{
}
IGame_ObjectPool::~IGame_ObjectPool(void)
{
R_ASSERT (m_PrefetchObjects.empty());
}
void IGame_ObjectPool::prefetch ()
{
R_ASSERT (m_PrefetchObjects.empty());
int p_count = 0;
::Render->model_Logging (FALSE);
string256 section;
// prefetch objects
strconcat (sizeof(section),section,"prefetch_objects_",g_pGamePersistent->m_game_params.m_game_type);
CInifile::Sect& sect = pSettings->r_section(section);
for (CInifile::SectCIt I=sect.Data.begin(); I!=sect.Data.end(); I++) {
const CInifile::Item& item= *I;
CLASS_ID CLS = pSettings->r_clsid(item.first.c_str(),"class");
p_count ++;
CObject* pObject = (CObject*) NEW_INSTANCE(CLS);
pObject->Load (item.first.c_str());
VERIFY2 (pObject->cNameSect().c_str(),item.first.c_str());
m_PrefetchObjects.push_back (pObject);
}
// out statistic
::Render->model_Logging (TRUE);
}
void IGame_ObjectPool::clear()
{
// Clear POOL
ObjectVecIt it = m_PrefetchObjects.begin();
ObjectVecIt itE = m_PrefetchObjects.end();
for (; it!=itE; it++)
xr_delete (*it);
m_PrefetchObjects.clear ();
}
CObject* IGame_ObjectPool::create ( LPCSTR name )
{
CLASS_ID CLS = pSettings->r_clsid (name,"class");
CObject* O = (CObject*) NEW_INSTANCE (CLS);
O->cNameSect_set (name);
O->Load (name);
return O;
}
void IGame_ObjectPool::destroy ( CObject* O )
{
xr_delete (O);
}
/*
void IGame_ObjectPool::prefetch ()
{
R_ASSERT (map_POOL.empty());
u32 mem_0 = Memory.mem_usage();
float p_time = 1000.f*Device.GetTimerGlobal()->GetElapsed_sec();
int p_count = 0;
::Render->model_Logging (FALSE);
string256 section;
// prefetch objects
strconcat (section,"prefetch_objects_",g_pGamePersistent->m_game_params.m_game_type);
CInifile::Sect& sect = pSettings->r_section(section);
for (CInifile::SectIt I=sect.begin(); I!=sect.end(); I++) {
CInifile::Item& item= *I;
CLASS_ID CLS = pSettings->r_clsid(item.first.c_str(),"class");
int count = atoi(item.second.c_str());
count += (count==0)?1:0;
R_ASSERT2 ((count>0) && (count<=128), "Too many objects for prefetching");
p_count += count;
for (int c=0; c<count; c++){
CObject* pObject= (CObject*) NEW_INSTANCE(CLS);
pObject->Load (item.first.c_str());
VERIFY2 (pObject->cNameSect().c_str(),item.first.c_str());
map_POOL.insert (mk_pair(pObject->cNameSect(),pObject));
}
}
// out statistic
::Render->model_Logging (TRUE);
p_time = 1000.f*Device.GetTimerGlobal()->GetElapsed_sec() - p_time;
u32 p_mem = Memory.mem_usage() - mem_0;
if (p_count){
float a_time = float(p_time)/float(p_count);
Msg ("* [Object-prefetch] objects: %d", p_count);
Msg ("* [Object-prefetch] time: %d ms", iFloor(p_time));
Msg ("* [Object-prefetch] memory: %dKb", p_mem/1024);
Msg ("* [Object-prefetch] average: %2.2f ms, %d bytes", a_time, p_mem/p_count);
}
}
void IGame_ObjectPool::clear ( )
{
// Clear POOL
for (POOL_IT it=map_POOL.begin(); it!=map_POOL.end(); it++)
xr_delete (it->second);
map_POOL.clear();
}
CObject* IGame_ObjectPool::create ( LPCSTR name )
{
string256 l_name;
POOL_IT it = map_POOL.find (shared_str(strlwr(strcpy_s(l_name,name))));
if (it!=map_POOL.end())
{
// Instance found
CObject* O = it->second;
map_POOL.erase (it);
return O;
} else {
// Create and load _new instance
// Msg ("! Uncached loading '%s'...",name);
CLASS_ID CLS = pSettings->r_clsid (name,"class");
CObject* O = (CObject*) NEW_INSTANCE (CLS);
O->Load (name);
return O;
}
}
void IGame_ObjectPool::destroy ( CObject* O )
{
map_POOL.insert (mk_pair(O->cNameSect(),O));
}
*/ |
c() { cd ~/projects/$1; }
_c() { _files -W ~/projects -/; }
compdef _c c
|
#!/bin/sh
EXPECTED_SIGNATURE=$(wget -q -O - https://composer.github.io/installer.sig)
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
ACTUAL_SIGNATURE=$(php -r "echo hash_file('SHA384', 'composer-setup.php');")
if [ "$EXPECTED_SIGNATURE" != "$ACTUAL_SIGNATURE" ]
then
>&2 echo 'ERROR: Invalid installer signature'
rm composer-setup.php
exit 1
fi
php composer-setup.php --install-dir=/usr/local/bin --filename=composer --quiet
RESULT=$?
rm composer-setup.php
exit $RESULT
|
var structCatch_1_1Matchers_1_1Impl_1_1MatchNotOf =
[
[ "MatchNotOf", "structCatch_1_1Matchers_1_1Impl_1_1MatchNotOf.html#a47afdd9e4c3354cef85adc3186097ae4", null ],
[ "describe", "structCatch_1_1Matchers_1_1Impl_1_1MatchNotOf.html#ac5fb4ef6a9069d23a4098c3c818f06b0", null ],
[ "match", "structCatch_1_1Matchers_1_1Impl_1_1MatchNotOf.html#a181d693c0258e582d80dc6117a1f2b66", null ],
[ "m_underlyingMatcher", "structCatch_1_1Matchers_1_1Impl_1_1MatchNotOf.html#af7ac67f112b0e93796b048a47329aad4", null ]
]; |
FIGURE=figure_supp_protein_cluster_analysis.png
convert vmdscene.png -trim vmdscene.trim.png
montage -pointsize 96 -geometry 'x1200' -tile 2x1 -font Liberation-Sans-Bold -label 'A' clustering-dist.png_ar20.5_apdtrpap.png -label 'B' clustering-dist.png_ar20.5_apdtrpap_Tn.png tmp_AB.png
convert clustering-linear.png_ar20.5_apdtrpap.png -crop 0x690+0+30 strip1.png
convert clustering-linear.png_ar20.5_apdtrpap_Tn.png -crop 0x690+0+30 strip2.png
montage -pointsize 192 -geometry 'x1200' -tile 1x2 -font Liberation-Sans-Bold -label 'C' strip1.png -label 'D' strip2.png tmp_CD.png
montage -pointsize 96 -geometry 'x1200' -tile 1x3 -font Liberation-Sans-Bold tmp_AB.png tmp_CD.png -label 'E' vmdscene.trim.png $FIGURE
|
#!/usr/bin/env bash
source /secrets.sh
ENVIRONMENT=monitoring
if [[ $# -lt 1 ]]; then
echo usage: osism-$ENVIRONMENT SERVICE [...]
exit 1
fi
service=$1
shift
ANSIBLE_DIRECTORY=/ansible
CONFIGURATION_DIRECTORY=/opt/configuration
ENVIRONMENTS_DIRECTORY=$CONFIGURATION_DIRECTORY/environments
VAULT=${VAULT:-$ENVIRONMENTS_DIRECTORY/.vault_pass}
if [[ -e /ansible/ara.env ]]; then
source /ansible/ara.env
fi
export ANSIBLE_INVENTORY=$ANSIBLE_DIRECTORY/inventory
export ANSIBLE_CONFIG=$ENVIRONMENTS_DIRECTORY/ansible.cfg
if [[ -e $ENVIRONMENTS_DIRECTORY/$ENVIRONMENT/ansible.cfg ]]; then
export ANSIBLE_CONFIG=$ENVIRONMENTS_DIRECTORY/$ENVIRONMENT/ansible.cfg
fi
if [[ -w $ANSIBLE_INVENTORY ]]; then
rsync -a /ansible/group_vars/ /ansible/inventory/group_vars/
rsync -a /ansible/inventory.generics/ /ansible/inventory/
rsync -a /opt/configuration/inventory/ /ansible/inventory/
python3 /src/handle-inventory-overwrite.py
cat /ansible/inventory/[0-9]* > /ansible/inventory/hosts
rm /ansible/inventory/[0-9]*
fi
cd $ENVIRONMENTS_DIRECTORY/$ENVIRONMENT
if [[ -e $ENVIRONMENTS_DIRECTORY/$ENVIRONMENT/playbook-$service.yml ]]; then
ansible-playbook \
--vault-password-file $VAULT \
-e @$ENVIRONMENTS_DIRECTORY/configuration.yml \
-e @$ENVIRONMENTS_DIRECTORY/secrets.yml \
-e @secrets.yml \
-e @images.yml \
-e @configuration.yml \
"$@" \
playbook-$service.yml
elif [[ -e $ANSIBLE_DIRECTORY/$ENVIRONMENT-$service.yml ]]; then
ansible-playbook \
--vault-password-file $VAULT \
-e @$ENVIRONMENTS_DIRECTORY/configuration.yml \
-e @$ENVIRONMENTS_DIRECTORY/secrets.yml \
-e @secrets.yml \
-e @images.yml \
-e @configuration.yml \
"$@" \
$ANSIBLE_DIRECTORY/$ENVIRONMENT-$service.yml
else
echo "ERROR: service $service in environment $ENVIRONMENT not available"
exit 1
fi
|
'use strict';
var _20 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 20,
height: 20,
},
content: [
{
elem: 'path',
attrs: {
d:
'M13 11H7a3 3 0 0 0-3 3v2h2v-2a1 1 0 0 1 1-1h6a1 1 0 0 1 1 1v2h2v-2a3 3 0 0 0-3-3zm-3-1a4 4 0 1 0-4-4 4 4 0 0 0 4 4zm0-6a2 2 0 1 1-2 2 2 2 0 0 1 2-2zm10 0h10v2H20zm0 4h10v2H20zm0 4h6v2h-6zM5 30H3a2 2 0 0 1-2-2v-6a2 2 0 0 1 2-2h3v2H3v2h2a2 2 0 0 1 2 2v2a2 2 0 0 1-2 2zm-2-4v2h2v-2zm10 4h-2a2 2 0 0 1-2-2v-6a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2v6a2 2 0 0 1-2 2zm-2-8v6h2v-6zm10 8h-2a2 2 0 0 1-2-2v-6a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2v6a2 2 0 0 1-2 2zm-2-8v6h2v-6zm10 8h-2a2 2 0 0 1-2-2v-6a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2v6a2 2 0 0 1-2 2zm-2-8v6h2v-6z',
},
},
],
name: 'dicom--6000',
size: 20,
};
module.exports = _20;
|
import React, { Component, Fragment } from "react";
import PropTypes from "prop-types";
import { Row, Col, Spin, Typography } from "antd";
import "./style.scss";
import MeetingTypeCard from "./MeetingTypeCard";
import { connect } from "react-redux";
import meetingTypesBranch from "../../state/meeting-types";
class MeetingTypesMarkup extends Component {
componentDidMount() {
this.props.requestMeetingTypes();
}
render() {
const { Title } = Typography;
const cardMarkup = this.props.loading ? (
<div className="loader">
<Spin size="large" />
</div>
) : this.props.allMeetingTypes.length > 0 ? (
this.props.allMeetingTypes.map((meetingType) => (
<Col xs={24} md={12} key={meetingType.id}>
<MeetingTypeCard meetingType={meetingType} />
</Col>
))
) : (
""
);
return (
<Fragment>
{this.props.loading || <Title level={2}>All Meeting Types</Title>}
<Row gutter={[16, 16]} type="flex">
{cardMarkup}
</Row>
</Fragment>
);
}
}
const mapStateToProps = (state) => ({
allMeetingTypes: state.meetingTypes.allMeetingTypes,
loading: state.meetingTypes.loading,
});
const mapDispatchToProps = (dispatch) => ({
requestMeetingTypes: () =>
dispatch(meetingTypesBranch.actions.requestMeetingTypes()),
});
MeetingTypesMarkup.propTypes = {
allMeetingTypes: PropTypes.array.isRequired,
loading: PropTypes.bool.isRequired,
requestMeetingTypes: PropTypes.func.isRequired,
};
export default connect(mapStateToProps, mapDispatchToProps)(MeetingTypesMarkup);
|
DROP PROCEDURE IF EXISTS getEthnicOutput;
CREATE DEFINER=`ineqbench_user`@`%` PROCEDURE `getEthnicOutput`(IN start_age int(2), IN end_age int(2), IN sexIn VARCHAR(25), IN locality VARCHAR(25))
/* procedure for getting south asians, black afro-carribeans, and gypsies/travelers =
gets totalPopulation using the totalPopulation function
and totalDeprived from ETHNIC_GROUP
*/
BEGIN
SELECT (
SELECT totalPopulation(start_age,end_age,sexIn,locality)) AS 'totalPopulation',
(SELECT FORMAT((SUM(WHITE_TRAVELLER) + SUM(ASIAN_PAKISTANI) + SUM(ASIAN_INDIAN) +
SUM(ASIAN_BANGLADESHI) + SUM(CARIBBEAN_BRITISH) +
SUM(CARIBBEAN_BLACK))*(SELECT SUM(PERCENT)/100
FROM POP_AGE_PERCENT
WHERE AGE BETWEEN start_age AND end_age),0) -- estimating age group size from POP_AGE_PERCENT
FROM ETHNIC_GROUP AS e
WHERE SEX = sexIn -- filtering by gender
AND AGE != 'Total'
AND( locality = "Dumfries & Galloway" -- if locality is D&G, no need to filter by locality using postcode
OR POSTCODE in (SELECT SUBSTRING(Postcode,1, LENGTH( e.POSTCODE )) FROM GEOGRAPHY_LOOKUP WHERE locality = GEOGRAPHY_LOOKUP.Locality) )
) AS 'totalDeprived';
END
|
<filename>assets/content.js
// Inspect all subdirectories from the context (this file) and require files
// matching the regex.
// https://webpack.js.org/guides/dependency-management/#require-context
require.context(".", true, /^\.\/.*\.(jpe?g|png|gif|svg|woff2?|ttf|otf|eot|ico)$/);
|
package service
import "shippo-server/internal/model"
type PermissionAccessService struct {
*Service
}
func NewPermissionAccessService(s *Service) *PermissionAccessService {
return &PermissionAccessService{s}
}
func (t *PermissionAccessService) PermissionAccessCreate(p model.PermissionAccess) (err error) {
err = t.dao.PermissionAccess.PermissionAccessCreate(p)
return
}
func (t *PermissionAccessService) PermissionAccessDel(p model.PermissionAccess) (err error) {
err = t.dao.PermissionAccess.PermissionAccessDel(p.ID)
return
}
func (t *PermissionAccessService) PermissionAccessUpdate(p model.PermissionAccess) (err error) {
err = t.dao.PermissionAccess.PermissionAccessUpdate(p)
return
}
func (t *PermissionAccessService) PermissionAccessFindAllExtStatus(id uint) (
p []model.PermissionAccessStatus, err error) {
p, err = t.dao.PermissionAccess.PermissionAccessFindAllExtStatus(id)
return
}
func (t *PermissionAccessService) PermissionAccessFindAll() (p []model.PermissionAccessCount, err error) {
p, err = t.dao.PermissionAccess.PermissionAccessFindAll()
return
}
func (t *PermissionAccessService) PermissionAccessFind(p model.PermissionAccess) (
list model.PermissionAccessCount, err error) {
list, err = t.dao.PermissionAccess.PermissionAccessFind(p.ID)
return
}
|
<reponame>jedrula/survey
var jsonapify = require('jsonapify');
var User = require('../models/user');
const userResource = new jsonapify.Resource(User, {
type: 'users',
id: {
value: new jsonapify.Property('_id'),
writable: false,
},
//links: {
// self: {
// value: new jsonapify.Template('/users/${FirstName}'),
// writable: false,
// },
//},
attributes: {
identification: new jsonapify.Property('identification'),
password: {
value: new jsonapify.Property('password'),
readable: false
},
hours: new jsonapify.Property('hours'),
},
//'relationships': {
// 'entries': new jsonapify.Refs('Entry', 'entries'), //TODO this might be a bug in jsonapify - i think according to jsonapi specs it would make sense if the key was simply 'user' - simillarly to the example from jsonapify README
//},
//'relationships': {
// 'entries': new jsonapify.Property('entries'), //<- this sorts of works - it attaches entities in an array
//}
});
//registry.add ? - used to work with circular - now i have problems: https://github.com/alex94puchades/jsonapify/pull/3
jsonapify.Runtime.addResource('User', userResource);
module.exports = userResource; //TODO check if it works - if so maybe also try using import |
#!/bin/bash
set -e
apt-get update && apt-get -y install lsb-release
UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
LANG=${LANG:-en_US.UTF-8}
LC_ALL=${LC_ALL:-en_US.UTF-8}
CRAN=${CRAN:-https://cran.r-project.org}
## mechanism to force source installs if we're using RSPM
CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION"/""}
export DEBIAN_FRONTEND=noninteractive
# Set up and install R
R_HOME=${R_HOME:-/usr/local/lib/R}
READLINE_VERSION=8
OPENBLAS=libopenblas-dev
if [ ${UBUNTU_VERSION} == "bionic" ]; then
READLINE_VERSION=7
OPENBLAS=libopenblas-dev
fi
apt-get update \
&& apt-get install -y --no-install-recommends \
bash-completion \
ca-certificates \
devscripts \
file \
fonts-texgyre \
g++ \
gfortran \
gsfonts \
libblas-dev \
libbz2-* \
libcurl4 \
libicu* \
libpcre2* \
libjpeg-turbo* \
${OPENBLAS} \
libpangocairo-* \
libpng16* \
libreadline${READLINE_VERSION} \
libtiff* \
liblzma* \
locales \
make \
unzip \
zip \
zlib1g
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen en_US.utf8
/usr/sbin/update-locale LANG=en_US.UTF-8
BUILDDEPS="curl \
default-jdk \
libbz2-dev \
libcairo2-dev \
libcurl4-openssl-dev \
libpango1.0-dev \
libjpeg-dev \
libicu-dev \
libpcre2-dev \
libpng-dev \
libreadline-dev \
libtiff5-dev \
liblzma-dev \
libx11-dev \
libxt-dev \
perl \
rsync \
subversion \
tcl-dev \
tk-dev \
texinfo \
texlive-extra-utils \
texlive-fonts-recommended \
texlive-fonts-extra \
texlive-latex-recommended \
texlive-latex-extra \
x11proto-core-dev \
xauth \
xfonts-base \
xvfb \
wget \
zlib1g-dev"
apt-get install -y --no-install-recommends $BUILDDEPS
if [[ "$R_VERSION" == "devel" ]]; then \
wget https://stat.ethz.ch/R/daily/R-devel.tar.gz; \
elif [[ "$R_VERSION" == "patched" ]]; then \
wget https://stat.ethz.ch/R/daily/R-patched.tar.gz; \
else \
wget https://cran.r-project.org/src/base/R-3/R-${R_VERSION}.tar.gz || \
wget https://cran.r-project.org/src/base/R-4/R-${R_VERSION}.tar.gz; \
fi && \
tar xzf R-${R_VERSION}.tar.gz &&
cd R-${R_VERSION}
R_PAPERSIZE=letter \
R_BATCHSAVE="--no-save --no-restore" \
R_BROWSER=xdg-open \
PAGER=/usr/bin/pager \
PERL=/usr/bin/perl \
R_UNZIPCMD=/usr/bin/unzip \
R_ZIPCMD=/usr/bin/zip \
R_PRINTCMD=/usr/bin/lpr \
LIBnn=lib \
AWK=/usr/bin/awk \
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \
./configure --enable-R-shlib \
--enable-memory-profiling \
--with-readline \
--with-blas \
--with-lapack \
--with-tcltk \
--disable-nls \
--with-recommended-packages
make
make install
make clean
## Add a default CRAN mirror
echo "options(repos = c(CRAN = '${CRAN}'), download.file.method = 'libcurl')" >> ${R_HOME}/etc/Rprofile.site
## Set HTTPUserAgent for RSPM (https://github.com/rocker-org/rocker/issues/400)
echo 'options(HTTPUserAgent = sprintf("R/%s R (%s)", getRversion(),
paste(getRversion(), R.version$platform,
R.version$arch, R.version$os)))' >> ${R_HOME}/etc/Rprofile.site
## Add a library directory (for user-installed packages)
mkdir -p ${R_HOME}/site-library
chown root:staff ${R_HOME}/site-library
chmod g+ws ${R_HOME}/site-library
## Fix library path
echo "R_LIBS=\${R_LIBS-'${R_HOME}/site-library:${R_HOME}/library'}" >> ${R_HOME}/etc/Renviron
echo "TZ=${TZ}" >> ${R_HOME}/etc/Renviron
## Use littler installation scripts
Rscript -e "install.packages(c('littler', 'docopt'), repos='${CRAN_SOURCE}')"
ln -s ${R_HOME}/site-library/littler/examples/install2.r /usr/local/bin/install2.r
ln -s ${R_HOME}/site-library/littler/examples/installGithub.r /usr/local/bin/installGithub.r
ln -s ${R_HOME}/site-library/littler/bin/r /usr/local/bin/r
## Clean up from R source install
cd /
rm -rf /tmp/*
rm -rf R-${R_VERSION}
rm -rf R-${R_VERSION}.tar.gz
apt-get remove --purge -y $BUILDDEPS
apt-get autoremove -y
apt-get autoclean -y
rm -rf /var/lib/apt/lists/*
|
# Step 1: Create a custom exception class
class ConfigException(Exception):
pass
# Step 2: Modify the given code snippet to raise ConfigException
try:
# Your code that may raise a KeyError
raise KeyError("example_key")
except KeyError as e:
raise ConfigException(
'Integration "{}" is not registered in this configuration'.format(e.args[0])
) |
<reponame>TheSpicyMeatball/jsdoc-parse-plus
import { first, getTagRegExp, isNotNullOrEmpty } from '../_private/utils';
/**
* Removes a set of tags from jsdoc
*
* @param {string} jsdoc - The entire jsdoc string
* @param {string[]} tags - Array of string tags to remove
* @returns {string} The jsdoc string the specified tags removed
*/
export const removeTags = (jsdoc: string, tags: string[]) : string => {
for (const tag of tags) {
const _tag = tag.startsWith('@') ? tag : '@' + tag;
if (_tag === '@description') {
jsdoc = removeTaglessDescription(jsdoc);
}
const matches = Array.from(jsdoc.matchAll(getTagRegExp(_tag)));
for (const match of matches) {
jsdoc = jsdoc.replace(match[0], '');
}
}
return /^\/\*\*( *)?\/|\/\*\*( *)?(?:\r\n|\r|\n)*(?: ?\*(?:\r\n|\r|\n)?\/?)*$/.test(jsdoc) ? '/** */' : jsdoc.replace(/\*\*\//g, '*/');
};
const removeTaglessDescription = (jsdoc: string) : string => {
const regex = /\/\*\*( *)(.*)(\r\n|\r|\n)*((?:(?:(?! @).)(?:\{@link|\{@tutorial))*(?:(?!( @)).)*(\r\n|\r|\n)?)*/gm;
const match = first(Array.from(jsdoc.matchAll(regex)));
if (isNotNullOrEmpty(match[2])) {
const end = new RegExp(/\*\/ *$/);
return end.test(match[2]) ? jsdoc.replace(regex, '/** */') : jsdoc.replace(regex, '/**\n *');
}
return jsdoc.replace(regex, '/**\n *');
}; |
package code.generator;
public class ComparisonDifferenceException extends Exception {
private static final long serialVersionUID = 1L;
private final String reference;
private final String actual;
private final Integer index;
public ComparisonDifferenceException(String reference, String actual, Integer index) {
this.reference = reference;
this.actual = actual;
this.index = index;
}
public final String getReference() {
return reference;
}
public final String getActual() {
return actual;
}
public final Integer getIndex() {
return index;
}
}
|
<reponame>GuRuGuMaWaRu/CodeProblems<gh_stars>0
function checkGrid(grid) {
const numbers = grid.reduce((nums, num) => {
if (!nums[num]) nums[num] = true;
return nums;
}, {});
return Object.keys(numbers).length === 9;
}
function sudoku(grid) {
for (let i = 0, len = grid.length; i < len; i += 1) {
let newSet = new Set(grid[i]);
if (newSet.size < 9) {
return false;
}
let checkArray = [];
for (let j = 0, lenJ = grid.length; j < lenJ; j += 1) {
checkArray.push(grid[j][i]);
}
newSet = new Set(checkArray);
if (newSet.size < 9) {
return false;
}
}
for (let i = 0, len = grid.length; i < len; i += 3) {
for (let j = 0, lenJ = grid.length; j < lenJ; j += 3) {
const isSudoku = checkGrid([
...grid[i].slice(j, j + 3),
...grid[i + 1].slice(j, j + 3),
...grid[i + 2].slice(j, j + 3)
]);
if (!isSudoku) {
return false;
}
}
}
return true;
}
|
#!/bin/bash
function jdlVal() {
FILE=$1
KEY="$2"
RESULT=`cat $FILE | grep "$KEY" | awk -F "$KEY " '{print $2}'`
echo "$RESULT"
return 0
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.