text stringlengths 1 1.05M |
|---|
package translations.terms;
import translations.Localizable;
import translations.Translatable;
@Localizable
public class ButtonTerms implements Translatable {
public static final String NAME = "ButtonTerms";
@Override
public String getBundleName() {
return NAME;
}
public static final String OK = "OK";
public static final String CANCEL = "CANCEL";
}
|
/**
* Copyright 2005 Sakai Foundation Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.sakaiproject.evaluation.tool.inferrers;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.evaluation.constant.EvalConstants;
import org.sakaiproject.evaluation.logic.EvalCommonLogic;
import org.sakaiproject.evaluation.logic.EvalEvaluationService;
import org.sakaiproject.evaluation.logic.entity.EvalReportsEntityProvider;
import org.sakaiproject.evaluation.model.EvalEvaluation;
import org.sakaiproject.evaluation.tool.producers.ReportsViewingProducer;
import org.sakaiproject.evaluation.tool.viewparams.ReportParameters;
import uk.ac.cam.caret.sakai.rsf.entitybroker.EntityViewParamsInferrer;
import uk.org.ponder.rsf.viewstate.ViewParameters;
/**
* This will handle the case of reports viewing redirection
*
* @author <NAME> (<EMAIL>)
*/
public class ReportsVPInferrer implements EntityViewParamsInferrer {
private static Log log = LogFactory.getLog(ReportsVPInferrer.class);
private EvalCommonLogic commonLogic;
public void setCommonLogic(EvalCommonLogic commonLogic) {
this.commonLogic = commonLogic;
}
private EvalEvaluationService evaluationService;
public void setEvaluationService(EvalEvaluationService evaluationService) {
this.evaluationService = evaluationService;
}
public void init() {
log.info("VP init");
}
/* (non-Javadoc)
* @see uk.ac.cam.caret.sakai.rsf.entitybroker.EntityViewParamsInferrer#getHandledPrefixes()
*/
public String[] getHandledPrefixes() {
return new String[] {
EvalReportsEntityProvider.ENTITY_PREFIX
};
}
/* (non-Javadoc)
* @see uk.ac.cam.caret.sakai.rsf.entitybroker.EntityViewParamsInferrer#inferDefaultViewParameters(java.lang.String)
*/
public ViewParameters inferDefaultViewParameters(String reference) {
String refId = EntityReference.getIdFromRef(reference);
Long evaluationId = Long.valueOf(refId);
EvalEvaluation evaluation = evaluationService.getEvaluationById(evaluationId);
if (evaluation == null) {
throw new IllegalArgumentException("Received an invalid evaluation id ("+evaluationId+") which cannot be resolved to an eval for the reports inferrer");
}
String currentUserId = commonLogic.getCurrentUserId();
if (EvalConstants.SHARING_PUBLIC.equals(evaluation.getResultsSharing())) {
// for public results, no login check is needed
} else {
// for all others we will require a login
if (commonLogic.isUserAnonymous(currentUserId) ) {
throw new SecurityException("User must be authenticated to access this page");
}
}
String[] groupIds = new String[] {};
/** Can only do this when we can get more info in the inferrer (like an EV)
String evalGroupId = null;
Long AssignGroupId = new Long(ep.id);
EvalAssignGroup assignGroup = evaluationService.getAssignGroupById(AssignGroupId);
evalGroupId = assignGroup.getEvalGroupId();
evaluationId = assignGroup.getEvaluation().getId();
evaluation = evaluationService.getEvaluationById(evaluationId);
if (evalGroupId != null) {
groupIds = new String[] {evalGroupId};
}
**/
// just send the user to the reporting page, permissions are handled there
ReportParameters vp = new ReportParameters(ReportsViewingProducer.VIEW_ID, evaluationId, groupIds);
vp.external = true;
return vp;
}
}
|
<gh_stars>100-1000
"""
Code illustration: 4.01
@ Tkinter GUI Application Development Blueprints
"""
from configurations import *
import model
class Controller():
def __init__(self):
self.init_model()
def init_model(self):
self.model = model.Model()
|
import { IGridRow } from '../../typings/interfaces'
import { IGridOperationFactory } from '../../typings/interfaces/grid-operation-factory.interface'
import { TPrimaryKey } from '../../typings/types'
import { Operation } from '../operation.abstract'
export class FilterRelatedDataRows extends Operation {
constructor(factory: IGridOperationFactory) { super(factory.gridController) }
public run(gridID: string, rowKeys: TPrimaryKey[]): IGridRow[] {
const grid = this.gridOperations.relatedDataMap.get(gridID)
const rows: IGridRow[] = []
for (const primaryKey of rowKeys) {
const row = grid?.rowMap.get(primaryKey)
if (typeof row !== 'undefined') rows.push(row)
}
return rows
}
}
|
import { Component, h, Prop, Event, EventEmitter, Host, Listen } from '@stencil/core'
import { ItemPosition } from '../../../utils/position'
@Component({
tag: 'focusable-item',
styleUrl: 'focusable-item.css',
shadow: true
})
export class FocusableItem {
@Prop() isInTabSequence: boolean = false
@Prop() position!: ItemPosition
@Event() focusedItem!: EventEmitter<ItemPosition>
@Listen('focus')
protected focusHandler() {
this.focusedItem.emit(this.position)
}
render() {
return (
<Host tabindex={this.isInTabSequence ? 0 : -1}>
<slot />
</Host>
)
}
}
|
#!/usr/bin/env bash
# ====================================================================
# Tests Android cross-compiles
#
# Crypto++ Library is copyrighted as a compilation and (as of version 5.6.2)
# licensed under the Boost Software License 1.0, while the individual files
# in the compilation are all public domain.
#
# See http://www.cryptopp.com/wiki/Android_(Command_Line) for more details
# ====================================================================
set +e
if [ -z "${PLATFORM-}" ]; then
PLATFORMS=(armeabi armeabi-v7a armv7a-neon aarch64 mipsel mipsel64 x86 x86_64)
else
PLATFORMS=(${PLATFORM})
fi
RUNTIMES=(gnu-static gnu-shared stlport-static stlport-shared) #llvm-static llvm-shared
for platform in ${PLATFORMS[@]}
do
for runtime in ${RUNTIMES[@]}
do
make -f GNUmakefile-cross distclean > /dev/null 2>&1
echo
echo "===================================================================="
echo "Testing for Android support of $platform using $runtime"
# Test if we can set the environment for the platform
./setenv-android-gcc.sh "$platform" "$runtime"
if [ "$?" -eq "0" ]; then
echo
echo "Building for $platform using $runtime..."
echo
# run in subshell to not keep any env vars
(
. ./setenv-android-gcc.sh "$platform" "$runtime" > /dev/null 2>&1
make -f GNUmakefile-cross static dynamic cryptest.exe
if [ "$?" -eq "0" ]; then
echo "$platform:$runtime ==> SUCCESS" >> /tmp/build.log
else
echo "$platform:$runtime ==> FAILURE" >> /tmp/build.log
touch /tmp/build.failed
fi
)
else
echo
echo "$platform with $runtime not supported by Android"
echo "$platform:$runtime ==> FAILURE" >> /tmp/build.log
touch /tmp/build.failed
fi
done
done
cat /tmp/build.log
# let the script fail if any of the builds failed
if [ -f /tmp/build.failed ]; then
[[ "$0" = "${BASH_SOURCE[0]}" ]] && exit 1 || return 1
fi
[[ "$0" = "${BASH_SOURCE[0]}" ]] && exit 0 || return 0
|
#!/bin/bash
set -e
# setup ssh: allow key to be used without a prompt and start ssh agent
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
eval "$(ssh-agent -s)"
######## Run notebook/word converter ########
# word converter using pandoc
/fastpages/word2post.sh
# notebook converter using nbdev
cp /fastpages/settings.ini .
python /fastpages/nb2post.py
######## Optionally save files and build GitHub Pages ########
if [[ "$INPUT_BOOL_SAVE_MARKDOWN" == "true" ]];then
if [ -z "$INPUT_SSH_DEPLOY_KEY" ];then
echo "You must set the SSH_DEPLOY_KEY input if BOOL_SAVE_MARKDOWN is set to true.";
exit 1;
fi
# Get user's email from commit history
if [[ "$GITHUB_EVENT_NAME" == "push" ]];then
USER_EMAIL=`cat $GITHUB_EVENT_PATH | jq '.commits | .[0] | .author.email'`
else
USER_EMAIL="actions@github.com"
fi
# Setup Git credentials if we are planning to change the data in the repo
git config --global user.name "$GITHUB_ACTOR"
git config --global user.email "$USER_EMAIL"
git remote add fastpages-origin "git@github.com:$GITHUB_REPOSITORY.git"
echo "${INPUT_SSH_DEPLOY_KEY}" > _mykey
chmod 400 _mykey
ssh-add _mykey
# Optionally save intermediate markdown
if [[ "$INPUT_BOOL_SAVE_MARKDOWN" == "true" ]]; then
git pull fastpages-origin ${GITHUB_REF} --ff-only
git add _posts
git commit -m "[Bot] Update $INPUT_FORMAT blog posts" --allow-empty
git push fastpages-origin HEAD:${GITHUB_REF}
fi
fi
|
<filename>app.py
#!/usr/bin/env python
# encoding: utf-8
from flask import Flask, render_template, request, safe_join, send_from_directory
import sys
APP = Flask(__name__, static_folder="_build/html/_static", template_folder="_build/html")
@APP.route("/_images/<path>")
def page_images (path):
return send_from_directory(APP.template_folder, safe_join("_images", path))
@APP.route("/")
@APP.route("/index.html")
def page_home():
return render_template("index.html")
@APP.route("/chap01.html")
def page_chap01 ():
return render_template("chap01.html")
@APP.route("/chap02.html")
def page_chap02 ():
return render_template("chap02.html")
@APP.route("/chap03.html")
def page_chap03 ():
return render_template("chap03.html")
@APP.route("/chap04.html")
def page_chap04 ():
return render_template("chap04.html")
@APP.route("/chap05.html")
def page_chap05 ():
return render_template("chap05.html")
@APP.route("/chap06.html")
def page_chap06 ():
return render_template("chap06.html")
@APP.route("/chap07.html")
def page_chap07 ():
return render_template("chap07.html")
@APP.route("/chap08.html")
def page_chap08 ():
return render_template("chap08.html")
@APP.route("/chap09.html")
def page_chap09 ():
return render_template("chap09.html")
@APP.route("/chap10.html")
def page_chap10 ():
return render_template("chap10.html")
@APP.route("/chap11.html")
def page_chap11 ():
return render_template("chap11.html")
@APP.route("/chap12.html")
def page_chap12 ():
return render_template("chap12.html")
@APP.route("/chap13.html")
def page_chap13 ():
return render_template("chap13.html")
@APP.route("/genindex.html")
def page_genindex ():
return render_template("genindex.html")
@APP.route("/py-modindex.html")
def page_modindex ():
return render_template("py-modindex.html")
@APP.route("/search.html")
def page_search ():
return render_template("search.html")
if __name__ == "__main__":
PORT = int(sys.argv[1])
APP.run(host="0.0.0.0", port=PORT, debug=True)
|
<reponame>shershen08/angular2-web-cryptography
import {NgModule} from '@angular/core';
import {CommonModule} from '@angular/common';
import {Observable} from 'rxjs/Observable';
import {CryptRSAOAEP} from './webcrypto.rsa-oaep.service';
import {CryptAESCBC} from './webcrypto.aes-cbc.service';
import { Utils } from './utils'
@NgModule({
imports: [CommonModule],
providers: [CryptRSAOAEP, CryptAESCBC],
})
const typesSupported = ['AES-CBC', 'RSA-OAEP'];
export class Ng2WebCrypto {
type: string;
keyUsagesEncryptDecrypt: string[] = ["encrypt", "decrypt"];
keyUsagesWrapUnwrap: string[] = ["wrapKey", "unwrapKey"];
key;
text;
lib;
constructor(type:string) {
if (typesSupported.indexOf(type) > -1) {
this.type = type;
} else {
throw new Error('This encryption type is not supported.')
}
this.lib = (this.type == typesSupported[0]) ? new CryptAESCBC() : new CryptRSAOAEP();
}
decrypt(key) {
this.key = key;
}
encrypt(text:string):Promise<any> {
this.text = Utils.getArrayFromString(text);
let dfd = new Promise((resolve, reject) => {
this.lib.getKey().then(() => {
this.lib.encrypt(this.text).then((res) => {
resolve(new Uint8Array(res));
});
})
});
return dfd;
}
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.iotdb.profile;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.core.profiling.trace.ProfileTaskLogRecord;
import org.apache.skywalking.oap.server.core.query.type.ProfileTaskLog;
import org.apache.skywalking.oap.server.core.query.type.ProfileTaskLogOperationType;
import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskLogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.storage.plugin.iotdb.IoTDBClient;
import org.apache.skywalking.oap.server.storage.plugin.iotdb.utils.IoTDBUtils;
@Slf4j
@RequiredArgsConstructor
public class IoTDBProfileTaskLogQueryDAO implements IProfileTaskLogQueryDAO {
private final IoTDBClient client;
private final StorageBuilder<ProfileTaskLogRecord> storageBuilder = new ProfileTaskLogRecord.Builder();
private final int fetchTaskLogMaxSize;
@Override
public List<ProfileTaskLog> getTaskLogList() throws IOException {
StringBuilder query = new StringBuilder();
query.append("select * from ");
IoTDBUtils.addModelPath(client.getStorageGroup(), query, ProfileTaskLogRecord.INDEX_NAME);
IoTDBUtils.addQueryAsterisk(ProfileTaskLogRecord.INDEX_NAME, query);
query.append(" limit ").append(fetchTaskLogMaxSize).append(IoTDBClient.ALIGN_BY_DEVICE);
List<? super StorageData> storageDataList = client.filterQuery(ProfileTaskLogRecord.INDEX_NAME,
query.toString(), storageBuilder);
List<ProfileTaskLogRecord> profileTaskLogRecordList = new ArrayList<>(storageDataList.size());
storageDataList.forEach(storageData ->
profileTaskLogRecordList.add((ProfileTaskLogRecord) storageData));
// resort by self, because of the query result order by time.
profileTaskLogRecordList.sort((ProfileTaskLogRecord r1, ProfileTaskLogRecord r2) ->
Long.compare(r2.getOperationTime(), r1.getOperationTime()));
List<ProfileTaskLog> profileTaskLogList = new ArrayList<>(profileTaskLogRecordList.size());
profileTaskLogRecordList.forEach(profileTaskLogRecord ->
profileTaskLogList.add(parseLog(profileTaskLogRecord)));
return profileTaskLogList;
}
private ProfileTaskLog parseLog(ProfileTaskLogRecord record) {
return ProfileTaskLog.builder()
.id(record.id())
.taskId(record.getTaskId())
.instanceId(record.getInstanceId())
.operationType(ProfileTaskLogOperationType.parse(record.getOperationType()))
.operationTime(record.getOperationTime())
.build();
}
}
|
import Vue from 'vue'
import Router from 'vue-router'
import MeusDadosCadastrais from '@/pages/MeusDadosCadastrais'
import FazerNovaTransacao from '@/pages/FazerNovaTransacao'
import FazerSimulacao from '@/pages/FazerSimulacao'
import Configuracoes from '@/pages/Configuracoes'
import PainelDoCliente from '@/pages/PainelDoCliente'
Vue.use(Router)
export default new Router({
routes: [
{
path: '/',
name: 'Meus dados cadastrais',
component: MeusDadosCadastrais
},
{
path: '/fazer-nova-transacao',
name: 'Fazer nova transação',
component: FazerNovaTransacao
},
{
path: '/configuracoes',
name: 'Configurações',
component: Configuracoes
},
{
path: '/fazer-simulacao',
name: 'Fazer simulação',
component: FazerSimulacao
},
{
path: '/painel-do-cliente',
name: 'Painel do cliente',
component: PainelDoCliente
}
]
})
|
def extract_segments(input_string):
if not input_string: # If input string is empty, return an empty list
return []
segments = input_string.split('=') # Split the input string based on the "=" delimiter
return segments |
class OrderManager {
private $db;
public function __construct($db) {
$this->db = $db;
}
public function addOrder($data) {
$this->db->insert('orders', $data);
$id = $this->db->insert_id();
return (isset($id)) ? $id : FALSE;
}
public function addDetailOrder($data) {
$this->db->insert('orderdetail', $data);
}
}
// Usage example:
// Assuming $db is the database connection object
$orderManager = new OrderManager($db);
// Adding a new order
$orderData = array(
'customer_id' => 123,
'total_amount' => 100.50,
// Other order data fields
);
$newOrderId = $orderManager->addOrder($orderData);
if ($newOrderId !== FALSE) {
echo "New order added with ID: " . $newOrderId;
} else {
echo "Failed to add new order";
}
// Adding order details
$orderDetailData = array(
'order_id' => $newOrderId, // Assuming $newOrderId is the ID of the newly added order
'product_id' => 456,
'quantity' => 2,
// Other order detail data fields
);
$orderManager->addDetailOrder($orderDetailData); |
jQuery(document).ready(function($){
var $form_modal = $('.cd-user-modal'),
$form_login = $form_modal.find('#cd-login'),
$form_signup = $form_modal.find('#cd-signup'),
$form_forgot_password = $form_modal.find('#cd-reset-password'),
$form_modal_tab = $('.cd-switcher'),
$tab_login = $form_modal_tab.children('li').eq(0).children('a'),
$tab_signup = $form_modal_tab.children('li').eq(1).children('a'),
$forgot_password_link = $form_login.find('.cd-form-bottom-message a'),
$back_to_login_link = $form_forgot_password.find('.cd-form-bottom-message a'),
$main_nav = $('.main-nav');
// on mobile close submenu
$main_nav.children('ul').removeClass('is-visible');
//show modal layer
$form_modal.addClass('is-visible');
//show the selected form
login_selected();
//hide or show password
$('.hide-password').on('click', function(){
var $this= $(this),
$password_field = $this.prev('input');
( 'password' == $password_field.attr('type') ) ? $password_field.attr('type', 'text') : $password_field.attr('type', 'password');
( 'Hide' == $this.text() ) ? $this.text('Show') : $this.text('Hide');
//focus and move cursor to the end of input field
$password_field.putCursorAtEnd();
});
function login_selected(){
$form_login.addClass('is-selected');
$form_signup.removeClass('is-selected');
$form_forgot_password.removeClass('is-selected');
$tab_login.addClass('selected');
$tab_signup.removeClass('selected');
}
//IE9 placeholder fallback
//credits http://www.hagenburger.net/BLOG/HTML5-Input-Placeholder-Fix-With-jQuery.html
if(!Modernizr.input.placeholder){
$('[placeholder]').focus(function() {
var input = $(this);
if (input.val() == input.attr('placeholder')) {
input.val('');
}
}).blur(function() {
var input = $(this);
if (input.val() == '' || input.val() == input.attr('placeholder')) {
input.val(input.attr('placeholder'));
}
}).blur();
$('[placeholder]').parents('form').submit(function() {
$(this).find('[placeholder]').each(function() {
var input = $(this);
if (input.val() == input.attr('placeholder')) {
input.val('');
}
})
});
}
});
//credits http://css-tricks.com/snippets/jquery/move-cursor-to-end-of-textarea-or-input/
jQuery.fn.putCursorAtEnd = function() {
return this.each(function() {
// If this function exists...
if (this.setSelectionRange) {
// ... then use it (Doesn't work in IE)
// Double the length because Opera is inconsistent about whether a carriage return is one character or two. Sigh.
var len = $(this).val().length * 2;
this.setSelectionRange(len, len);
} else {
// ... otherwise replace the contents with itself
// (Doesn't work in Google Chrome)
$(this).val($(this).val());
}
});
};
|
#!/bin/bash
export ANSIBLE_DISPLAY_SKIPPED_HOSTS="no"
MANAGED_NODE=$1
if [[ ${MANAGED_NODE}. = "." ]]; then
echo "Enter the name of the managed node: "
read MANAGED_NODE
fi
_RHEL_RELEASE_MAJOR=$(ssh ${MANAGED_NODE} cat /etc/redhat-release | awk 'BEGIN{FS="release "}{split ($2, a, " "); split (a[1], b, "."); print b[1]}')
if [[ ${_RHEL_RELEASE_MAJOR} -ne 8 ]]; then
echo "This test is only valid for RHEL 8 managed nodes. Exiting."
echo
exit 1
fi
echo
printf "Managed node Red Hat release: "
ssh ${MANAGED_NODE} cat /etc/redhat-release
printf "Managed node HW architecture: "
ssh ${MANAGED_NODE} uname -m
echo
_RHEL_RELEASE_MAJOR=$(ssh ${MANAGED_NODE} cat /etc/redhat-release | awk 'BEGIN{FS="release "}{split ($2, a, " "); split (a[1], b, "."); print b[1]}')
# Test 1: Run the role in check mode
echo "Test 1: Run role in check mode:"
ansible-playbook sap-hana-preconfigure-sapnote-2777782-02-test.yml -l ${MANAGED_NODE} --check
RC=$?
echo "RC=${RC}"
if [[ ${RC} -ne 0 ]]; then
exit ${RC}
fi
# Test 2: Run the role in normal mode
echo
echo "Test 2: Run role in normal mode:"
ansible-playbook sap-hana-preconfigure-sapnote-2777782-02-test.yml -l ${MANAGED_NODE}
RC=$?
echo "RC=${RC}"
if [[ ${RC} -ne 0 ]]; then
exit ${RC}
fi
# Test 3: Run the role in assert mode
echo
echo "Test 3: Run the role in assert mode:"
ansible-playbook sap-hana-preconfigure-sapnote-2777782-02-test.yml -l ${MANAGED_NODE} -e "{'sap_hana_preconfigure_assert': yes}"
RC=$?
echo "RC=${RC}"
if [[ ${RC} -ne 0 ]]; then
exit ${RC}
fi
|
<filename>lang/py/cookbook/v2/source/cb2_20_2_sol_1.py
import math
class Rectangle(object):
def __init__(self, x, y):
self.y = x
self.y = y
def area():
doc = "Area of the rectangle"
def fget(self):
return self.x * self.y
def fset(self, value):
ratio = math.sqrt((1.0*value)/self.area)
self.x *= ratio
self.y *= ratio
return locals()
area = property(**area())
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Note that the TomoPhantom package is released under Apache License, Version 2.0
Script to generate 4D (3D+time) analytical phantoms (wip: generation of 4D projection data )
If one needs to modify/add phantoms, please edit Phantom3DLibrary.dat
@author: <NAME>
"""
import timeit
import os
import matplotlib.pyplot as plt
import numpy as np
import tomophantom
from tomophantom import TomoP3D
print ("Building 4D phantom using TomoPhantom software")
tic=timeit.default_timer()
model = 100 # note that the selected model is temporal (3D + time)
# Define phantom dimensions using a scalar (cubic) or a tuple [N1, N2, N3]
N_size = 256 # or as a tuple of a custom size (256,256,256)
# one can specify an exact path to the parameters file
# path_library2D = '../../../PhantomLibrary/models/Phantom3DLibrary.dat'
path = os.path.dirname(tomophantom.__file__)
path_library3D = os.path.join(path, "Phantom3DLibrary.dat")
#This will generate a Time x N_size x N_size x N_size phantom (4D)
phantom_tm = TomoP3D.ModelTemporal(model, N_size, path_library3D)
toc=timeit.default_timer()
Run_time = toc - tic
print("Phantom has been built in {} seconds".format(Run_time))
for i in range(0,np.size(phantom_tm,0)):
sliceSel = int(0.5*N_size)
#plt.gray()
plt.figure(1)
plt.subplot(131)
plt.imshow(phantom_tm[i,sliceSel,:,:],vmin=0, vmax=1)
plt.title('3D Phantom, axial view')
plt.subplot(132)
plt.imshow(phantom_tm[i,:,sliceSel,:],vmin=0, vmax=1)
plt.title('3D Phantom, coronal view')
plt.subplot(133)
plt.imshow(phantom_tm[i,:,:,sliceSel],vmin=0, vmax=1)
plt.title('3D Phantom, sagittal view')
plt.show()
plt.pause(0.3)
#%%
print ("Getting 4D projection data using TomoPhantom software")
# Projection geometry related parameters:
Horiz_det = int(np.sqrt(2)*N_size) # detector column count (horizontal)
Vert_det = N_size # detector row count (vertical) (no reason for it to be > N)
angles_num = int(0.5*np.pi*N_size); # angles number
angles = np.linspace(0.0,179.9,angles_num,dtype='float32') # in degrees
angles_rad = angles*(np.pi/180.0)
projData4D_analyt= TomoP3D.ModelSinoTemporal(model, N_size, Horiz_det, Vert_det, angles, path_library3D)
time_frames = projData4D_analyt.shape[0]
intens_max = 60
sliceSel = 150
for i in range(0,time_frames):
plt.figure(2)
plt.subplot(131)
plt.imshow(projData4D_analyt[i,:,sliceSel,:],vmin=0, vmax=intens_max)
plt.title('2D Projection (analytical)')
plt.subplot(132)
plt.imshow(projData4D_analyt[i,sliceSel,:,:],vmin=0, vmax=intens_max)
plt.title('Sinogram view')
plt.subplot(133)
plt.imshow(projData4D_analyt[i,:,:,sliceSel],vmin=0, vmax=intens_max)
plt.title('Tangentogram view')
plt.show()
plt.pause(0.3)
#%%
# A capability of building a subset of vertical slices out of 4D phantom (faster)
import timeit
import os
import tomophantom
from tomophantom import TomoP3D
import matplotlib.pyplot as plt
import numpy as np
print ("Building a subset of 4D phantom using TomoPhantom software")
tic=timeit.default_timer()
model = 101
N_size = 256 # Define phantom dimensions using a scalar value
DIM_z = (94, 158) # selected vertical subset (a slab) of the phantom
path = os.path.dirname(tomophantom.__file__)
path_library3D = os.path.join(path, "Phantom3DLibrary.dat")
phantom_tm = TomoP3D.ModelTemporalSub(model, N_size, DIM_z, path_library3D)
toc=timeit.default_timer()
Run_time = toc - tic
print("Phantom has been built in {} seconds".format(Run_time))
for i in range(0,np.size(phantom_tm,0)):
sliceSel = 32
#plt.gray()
plt.figure(1)
plt.subplot(131)
plt.imshow(phantom_tm[i,sliceSel,:,:],vmin=0, vmax=1)
plt.title('4D Phantom, axial view')
plt.subplot(132)
plt.imshow(phantom_tm[i,:,70,:],vmin=0, vmax=1)
plt.title('4D Phantom, coronal view')
plt.subplot(133)
plt.imshow(phantom_tm[i,:,:,70],vmin=0, vmax=1)
plt.title('4D Phantom, sagittal view')
plt.show()
plt.pause(0.5)
print ("Building a subset of 4D projection data using TomoPhantom software")
Horiz_det = int(np.sqrt(2)*N_size) # detector column count (horizontal)
Vert_det = N_size # detector row count (vertical) (no reason for it to be > N)
angles_num = int(0.5*np.pi*N_size); # angles number
angles = np.linspace(0.0,179.9,angles_num,dtype='float32') # in degrees
projData4D_cut = TomoP3D.ModelSinoTemporalSub(model, N_size, Horiz_det, Vert_det, DIM_z, angles, path_library3D)
intens_max = 45
for i in range(0,np.size(projData4D_cut,0)):
sliceSel = 32
#plt.gray()
plt.figure(1)
plt.subplot(131)
plt.imshow(projData4D_cut[i,sliceSel,:,:],vmin=0, vmax=intens_max)
plt.title('Sinogram View')
plt.subplot(132)
plt.imshow(projData4D_cut[i,:,sliceSel,:],vmin=0, vmax=intens_max)
plt.title('Projection view')
plt.subplot(133)
plt.imshow(projData4D_cut[i,:,:,sliceSel],vmin=0, vmax=intens_max)
plt.title('Tangentogram view')
plt.show()
plt.pause(0.5)
#%% |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = useLinkProps;
var _core = require("@react-navigation/core");
var React = _interopRequireWildcard(require("react"));
var _reactNative = require("react-native");
var _useLinkTo = _interopRequireDefault(require("./useLinkTo"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
/**
* Hook to get props for an anchor tag so it can work with in page navigation.
*
* @param props.to Absolute path to screen (e.g. `/feeds/hot`).
* @param props.action Optional action to use for in-page navigation. By default, the path is parsed to an action based on linking config.
*/
function useLinkProps({
to,
action
}) {
const root = React.useContext(_core.NavigationContainerRefContext);
const navigation = React.useContext(_core.NavigationHelpersContext);
const linkTo = (0, _useLinkTo.default)();
const onPress = e => {
var _e$currentTarget;
let shouldHandle = false;
if (_reactNative.Platform.OS !== 'web' || !e) {
shouldHandle = e ? !e.defaultPrevented : true;
} else if (!e.defaultPrevented && // onPress prevented default
// @ts-expect-error: these properties exist on web, but not in React Native
!(e.metaKey || e.altKey || e.ctrlKey || e.shiftKey) && ( // ignore clicks with modifier keys
// @ts-expect-error: these properties exist on web, but not in React Native
e.button == null || e.button === 0) && // ignore everything but left clicks
// @ts-expect-error: these properties exist on web, but not in React Native
[undefined, null, '', 'self'].includes((_e$currentTarget = e.currentTarget) === null || _e$currentTarget === void 0 ? void 0 : _e$currentTarget.target) // let browser handle "target=_blank" etc.
) {
e.preventDefault();
shouldHandle = true;
}
if (shouldHandle) {
if (action) {
if (navigation) {
navigation.dispatch(action);
} else if (root) {
root.dispatch(action);
} else {
throw new Error("Couldn't find a navigation object. Is your component inside NavigationContainer?");
}
} else {
linkTo(to);
}
}
};
return {
href: to,
accessibilityRole: 'link',
onPress
};
}
//# sourceMappingURL=useLinkProps.js.map |
<filename>src/features/placeOrder/__tests__/helpers-tests.ts
import { shouldShowLimitOrderPriceWarning } from '../helpers';
describe('(place order) feature', () => {
describe('shouldShowLimitOrderPriceWarning helper', () => {
describe('Buy form', () => {
test('Should show warning', () => {
const mocks = [[1.05, 1], [2, 1]];
mocks.forEach(x => {
expect(shouldShowLimitOrderPriceWarning(x[0], x[1], 'buy')).toEqual(true);
});
});
test('Shouldnt show warning', () => {
const mocks = [[0.9, 1], [0, 1], [1, 1], [1.02, 1]];
mocks.forEach(x => {
expect(shouldShowLimitOrderPriceWarning(x[0], x[1], 'buy')).toEqual(false);
});
});
});
describe('Sell form', () => {
test('Should show warning', () => {
const mocks = [[0.95, 1], [0, 1]];
mocks.forEach(x => {
expect(shouldShowLimitOrderPriceWarning(x[0], x[1], 'sell')).toEqual(true);
});
});
test('Shouldnt show warning', () => {
const mocks = [[1, 1], [1.1, 1], [0.98, 1]];
mocks.forEach(x => {
expect(shouldShowLimitOrderPriceWarning(x[0], x[1], 'sell')).toEqual(false);
});
});
});
});
});
|
<filename>src/icons/legacy/Exclamation.tsx
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import ExclamationSvg from '@rsuite/icon-font/lib/legacy/Exclamation';
const Exclamation = createSvgIcon({
as: ExclamationSvg,
ariaLabel: 'exclamation',
category: 'legacy',
displayName: 'Exclamation'
});
export default Exclamation;
|
<reponame>marek-cel/fightersfs-tools
/****************************************************************************//*
* Copyright (C) 2020 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom
* the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
******************************************************************************/
#ifndef MESSAGE_H
#define MESSAGE_H
////////////////////////////////////////////////////////////////////////////////
#include <vector>
#include <QDomDocument>
#include <QDomElement>
#include "Text.h"
////////////////////////////////////////////////////////////////////////////////
/** Stage message class. */
class Message
{
public:
typedef std::vector< Text > Lines;
/**
* Reads message from XML file.
* @param xmlNode message XML node
* @return returns message object on success and null on failure
*/
static Message* readMessage( QDomElement &xmlNode );
/** Constructor. */
Message();
/** Destructor. */
virtual ~Message();
/**
* Saves message to XML file.
* @param doc XML document
* @param parentNode XML parent node
*/
void saveMessage( QDomDocument &doc, QDomElement &parentNode );
inline bool getOverlay() const { return m_overlay; }
inline double getDuration() const { return m_duration; }
inline double getDelay() const { return m_delay; }
inline Lines getLines() const { return m_lines; }
inline bool getPointerCustom() const { return m_pointer_custom; }
inline bool getPointerTarget() const { return m_pointer_target; }
inline bool getPointerRpmDec() const { return m_pointer_rpm_dec; }
inline bool getPointerRpmInc() const { return m_pointer_rpm_inc; }
inline bool getPointerTrigger() const { return m_pointer_trigger; }
inline double getPointerX() const { return m_pointer_x; }
inline double getPointerY() const { return m_pointer_y; }
inline double getPointerPhi() const { return m_pointer_phi; }
inline int getTutorialTip() const { return m_tutorial_tip; }
inline void setOverlay( bool overlay ) { m_overlay = overlay; }
inline void setDuration( double duration ) { m_duration = duration; }
inline void setDelay( double delay ) { m_delay = delay; }
inline void setLines( const Lines &lines ) { m_lines = lines; }
inline void setPointerCustom( bool val ) { m_pointer_custom = val; }
inline void setPointerTarget( bool val ) { m_pointer_target = val; }
inline void setPointerRpmDec( bool val ) { m_pointer_rpm_dec = val; }
inline void setPointerRpmInc( bool val ) { m_pointer_rpm_inc = val; }
inline void setPointerTrigger( bool val ) { m_pointer_trigger = val; }
inline void setPointerX( double val ) { m_pointer_x = val; }
inline void setPointerY( double val ) { m_pointer_y = val; }
inline void setPointerPhi( double val ) { m_pointer_phi = val; }
inline void setTutorialTip( int val ) { m_tutorial_tip = val; }
private:
bool m_overlay;
double m_duration; ///< [s]
double m_delay; ///< [s]
Lines m_lines;
bool m_pointer_custom;
bool m_pointer_target;
bool m_pointer_rpm_dec; ///< specifies if decrease RPM pointer is visible
bool m_pointer_rpm_inc; ///< specifies if increase RPM pointer is visible
bool m_pointer_trigger; ///< specifies if trigger pointer is visible
double m_pointer_x;
double m_pointer_y;
double m_pointer_phi;
int m_tutorial_tip;
};
////////////////////////////////////////////////////////////////////////////////
#endif // MESSAGE_H
|
<reponame>bogdanbebic/InverseSquareRoot
var searchData=
[
['rand_5fdoubles',['rand_doubles',['../namespacetest__vectors.html#a2a784f48ab0b7172f3a3a9a52f4958b6',1,'test_vectors']]],
['rand_5fsmall_5fdoubles',['rand_small_doubles',['../namespacetest__vectors.html#a46b38d738b5a05dd7136f3f69ea26566',1,'test_vectors']]]
];
|
module.exports = {
"up": (conn, cb) => {
conn.query(`
CREATE TABLE news (id int NOT NULL AUTO_INCREMENT, user_id int NOT NULL, headline varchar(191) NOT NULL, description mediumtext NOT NULL, source varchar(191) NOT NULL, created datetime NOT NULL, PRIMARY KEY (id), KEY fkIdx_99 (user_id), CONSTRAINT FK_99 FOREIGN KEY fkIdx_99 (user_id) REFERENCES user (id));
INSERT INTO \`right\` (name, ident) VALUES ('Add news', 'NEWS_CREATE'), ('Edit news', 'NEWS_EDIT'), ('Delete news', 'NEWS_DELETE');
`, (err) => {
if(err) {
console.error(err);
}
cb();
})
},
"down": (conn, cb) => {
conn.query(`
DROP TABLE news;
DELETE FROM \`right\` WHERE ident IN ('NEWS_CREATE', 'NEWS_EDIT', 'NEWS_DELETE');
`, (err) => {
if(err) {
console.error(err);
}
cb();
})
},
} |
<filename>interrogations/SideNavInterrogations.ts
import SideNavElements from "../elements/SideNavElements";
export default {
async checkIfSettings(): Promise<boolean> {
return await SideNavElements.settings.exists();
},
};
|
var __extends = this.__extends || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
__.prototype = b.prototype;
d.prototype = new __();
};
var view = require("ui/core/view");
var Placeholder = (function (_super) {
__extends(Placeholder, _super);
function Placeholder() {
_super.apply(this, arguments);
}
Placeholder.creatingViewEvent = "creatingView";
return Placeholder;
})(view.View);
exports.Placeholder = Placeholder;
|
from typing import List
def is_variable_list_sorted(variable_list: List[str]) -> bool:
return variable_list == sorted(variable_list) |
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import Chat from './Chat';
ReactDOM.render(<Chat />, document.getElementById('root'));
|
#ifndef TEXT_H
#define TEXT_H
struct pixbuf;
void text_draw(struct pixbuf *p, const char *text, int x, int y);
#endif
|
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.values;
import ideal.library.elements.*;
import ideal.library.reflections.*;
import javax.annotation.Nullable;
import ideal.runtime.elements.*;
import ideal.runtime.logs.*;
import ideal.runtime.reflections.*;
import ideal.development.elements.*;
import ideal.development.actions.*;
import ideal.development.notifications.*;
import ideal.development.types.*;
import ideal.development.flavors.*;
public interface procedure_value<T> extends abstract_value, value_wrapper<T> {
action_name name();
// This is needed to resolve abstract_value/value_wrapper ambiguity...
type type_bound();
@Nullable declaration get_declaration();
boolean has_this_argument();
analysis_result bind_parameters(action_parameters params, analysis_context context, position pos);
entity_wrapper execute(readonly_list<entity_wrapper> args,
execution_context the_execution_context);
}
|
<reponame>vbenincasa/ftp-filemanager
import dirIcon from '../templates/includes/sidebarDirIcon';
function moveModalDirItem(dir) {
return `
<li class="dir-item item" data-path="${encodeURI(dir.path)}" data-open="false">
${dirIcon()}
<span class="name">${dir.name}</span>
<ul class="sub-files"></ul>
</li>
`;
}
export default moveModalDirItem; |
#!/bin/bash
set -e
set -u
make bin/codeGen
FILES="$(find src -name '*.id')"
for F in $FILES; do
echo "Formatting in-place: $F"
cp "$F" build/code/tmp.id
bin/codeGen fmt build/code/tmp.id "$F"
done
|
function install_dependencies() {
local PARALLEL_NUM=$1
# need working pip3 command
yum install -y python3-pip
# boost only is not enough. need boost-devel which contains headers and libraries needed
# also for armadillo, you need armadillo-devel where -devel means develpment env including headers and libs
yum install -y armadillo-devel
yum install -y atlas-devel blas lapack # needed for python3.10 to build scipy
yum install -y yaml-cpp-devel openssl-devel libssh2-devel
# the boost-devel in centos 7 is too old
yum install -y wget xz
wget -c https://boostorg.jfrog.io/artifactory/main/release/1.78.0/source/boost_1_78_0.tar.gz
tar -xf boost_1_78_0.tar.gz
cd boost_1_78_0
./bootstrap.sh
./b2 install -j ${PARALLEL_NUM} --with-program_options --with-filesystem --with-system
}
function start_build() {
local PARALLEL_NUM=$1
for py in cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310
do
/opt/python/${py}/bin/pip install --user scikit-build cython pybind11[global]
done
cd /root/atomsciflow/
for py in cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310
do
# old build might destroy current build, so remove it
rm -rf _skbuild
/opt/python/${py}/bin/python3 setup.py build bdist_wheel -j ${PARALLEL_NUM}
done
for whl in /root/atomsciflow/dist/atomsciflow-*-linux_*.whl
do
auditwheel repair ${whl} --plat $PLAT -w /root/atomsciflow/dist
done
}
while getopts ":p:" arg; do
case ${arg} in
p)
PARALLEL_NUM="${OPTARG}"
;;
esac
done
echo ${PARALLEL_NUM}
install_dependencies ${PARALLEL_NUM}
start_build ${PARALLEL_NUM}
|
#
# Cookbook:: ccadi_geoserver
# Recipe:: default
#
# Copyright:: 2021, CCADI Project Contributors, All Rights Reserved.
require "uri"
def filename_from_url(url)
uri = URI.parse(url)
File.basename(uri.path)
end
# Where source code will be stored for compilation
src_path = node["ccadi_geoserver"]["source_path"]
##################
# Preconfiguration
##################
# Enable EPEL repository
yum_package "epel-release"
# Install vim for debugging
yum_package "vim"
# Update packages
execute "yum update" do
command "yum update --assumeyes"
end
# Fix certificate bug in RHEL/CentOS
# https://blog.devgenius.io/rhel-centos-7-fix-for-lets-encrypt-change-8af2de587fe4
execute "fix certificates" do
command 'trust dump --filter "pkcs11:id=%c4%a7%b1%a4%7b%2c%71%fa%db%e1%4b%90%75%ff%c4%15%60%85%89%10" | openssl x509 | sudo tee /etc/pki/ca-trust/source/blacklist/DST-Root-CA-X3.pem'
not_if { ::File.exists?("/etc/pki/ca-trust/source/blacklist/DST-Root-CA-X3.pem") }
end
execute "update root store" do
command "update-ca-trust extract"
end
# Install fontconfig for OpenJDK to have access to system fonts
# See: https://blog.adoptopenjdk.net/2021/01/prerequisites-for-font-support-in-adoptopenjdk/
yum_package %w[freetype fontconfig dejavu-sans-fonts]
# RHEL/CentOS development tools for compiling source
bash "install development tools" do
code <<-EOF
yum --assumeyes groups mark install "Development Tools"
yum --assumeyes groups mark convert "Development Tools"
yum --assumeyes groupinstall "Development Tools"
EOF
end
# Create resource to refer to in other resource notifications
service "rsyslog" do
supports [:restart]
action :nothing
end
# Install CentOS 7 messages filter for session slice messages
cookbook_file "/etc/rsyslog.d/ignore-systemd-session-slice.conf" do
source "rsyslogd/ignore-systemd-session-slice.conf"
notifies :restart, "service[rsyslog]"
end
#################
# Install OpenJDK
#################
java_home = "#{node["openjdk"]["prefix"]}/jdk-#{node["openjdk"]["version"]}"
directory node["openjdk"]["prefix"] do
recursive true
action :create
end
jdk_filename = filename_from_url(node["openjdk"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{jdk_filename}" do
source node["openjdk"]["download_url"]
checksum node["openjdk"]["checksum"]
end
bash "extract JDK" do
cwd node["openjdk"]["prefix"]
code <<-EOH
tar xzf "#{Chef::Config["file_cache_path"]}/#{jdk_filename}" -C .
EOH
not_if { ::File.exists?(java_home) }
end
################
# Install Tomcat
################
tomcat_home = "#{node["tomcat"]["prefix"]}/apache-tomcat-#{node["tomcat"]["version"]}"
user node["tomcat"]["user"] do
home node["tomcat"]["prefix"]
manage_home false
end
group node["tomcat"]["user"] do
members node["tomcat"]["user"]
end
directory node["tomcat"]["prefix"] do
owner node["tomcat"]["user"]
group node["tomcat"]["user"]
recursive true
action :create
end
tomcat_filename = filename_from_url(node["tomcat"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{tomcat_filename}" do
source node["tomcat"]["download_url"]
checksum node["tomcat"]["checksum"]
end
bash "extract Tomcat" do
cwd node["tomcat"]["prefix"]
user node["tomcat"]["user"]
code <<-EOH
tar xzf "#{Chef::Config["file_cache_path"]}/#{tomcat_filename}" -C .
EOH
not_if { ::File.exists?(tomcat_home) }
end
geoserver_data = node["geoserver"]["data_dir"]
domains = node["ccadi_geoserver"]["domains"].join(",")
systemd_unit "tomcat.service" do
content <<-EOU.gsub(/^\s+/, '')
[Unit]
Description=Apache Tomcat Web Application Container
After=syslog.target network.target
[Service]
Type=forking
User=#{node["tomcat"]["user"]}
Group=#{node["tomcat"]["user"]}
Environment="JAVA_HOME=#{java_home}"
Environment="CATALINA_PID=#{tomcat_home}/temp/tomcat.pid"
Environment="CATALINA_HOME=#{tomcat_home}"
Environment="CATALINA_BASE=#{tomcat_home}"
Environment="CATALINA_OPTS="
Environment="GEOSERVER_CSRF_WHITELIST=#{domains}"
Environment="GEOSERVER_DATA_DIR=#{geoserver_data}"
Environment="GDAL_DATA=#{node["gdal"]["prefix"]}/share/gdal"
Environment="LD_LIBRARY_PATH=$LD_LIBRARY_PATH:#{tomcat_home}/lib"
Environment="JAVA_OPTS=-Dfile.encoding=UTF-8 -Djava.library.path=/usr/local/lib:/opt/local/lib:#{tomcat_home}/lib -Xms#{node["tomcat"]["Xms"]} -Xmx#{node["tomcat"]["Xmx"]}"
ExecStart=#{tomcat_home}/bin/startup.sh
ExecStop=/bin/kill -15 $MAINPID
[Install]
WantedBy=multi-user.target
EOU
action [:create, :enable, :start]
end
# Create resource to refer to in other resource notifications
service "tomcat" do
supports [:start, :stop, :restart]
action :nothing
end
# Remove default Tomcat applications
# Note: this will delete any future webapp that has one of
# these names.
%w(ROOT docs examples host-manager manager).each do |app|
directory "#{tomcat_home}/webapps/#{app}" do
recursive true
action :delete
end
end
# Install log rotation scripts to prevent disk filling up from
# Tomcat logs files
template "/etc/logrotate.d/tomcat.conf" do
source "logrotate.d/tomcat.conf"
variables({
log_path: "#{tomcat_home}/logs/catalina.out"
})
end
###############
# Install nginx
###############
yum_package "nginx"
# Create resource to refer to in other resource notifications
service "nginx" do
supports [:start, :stop, :restart, :reload]
action [:enable, :start]
end
# Override default nginx configuration to disable the default site
template "/etc/nginx/nginx.conf" do
source "default/nginx.conf"
notifies :restart, "service[nginx]"
end
# Set up HTTPS certificates and virtual hosts for nginx
yum_package %w[certbot python-certbot-nginx]
# Install self-signed certificates so nginx can start the HTTPS virtual host
selfsigned_certificate_path = "/etc/ssl/certs/fake-geoserver.ccadi.gswlab.ca.crt"
bash "create self-signed certificate" do
code "/etc/ssl/certs/make-dummy-cert #{selfsigned_certificate_path}"
not_if { ::File.exist?(selfsigned_certificate_path) }
end
# Create directory for holding ACME challenge files
directory node["certbot"]["challenge_path"] do
recursive true
action :create
end
domains = node["ccadi_geoserver"]["domains"]
# Use an attribute flag to only enable fetching HTTPS certificates in production.
# In testing, getting certificates from Let's Encrypt doesn't work as the test
# VM isn't internet-facing.
if node["certbot"]["enabled"]
domains.each do |domain|
bash "get certificate using certbot" do
code "certbot certonly \
--nginx \
--non-interactive \
--domains #{domain} \
--agree-tos \
-m #{node["certbot"]["email"]}"
end
end
end
# Install HTTP-only virtual host
template "/etc/nginx/conf.d/geoserver-http.conf" do
source "default/geoserver-http-vhost.conf"
variables({
domains: domains
})
notifies :reload, "service[nginx]"
end
# Install HTTPS-only virtual host
template "/etc/nginx/conf.d/geoserver-https.conf" do
source "default/geoserver-https-vhost.conf"
variables({
domains: domains,
selfsigned: !node["certbot"]["enabled"]
})
notifies :reload, "service[nginx]"
end
# Enable SELinux access from nginx to Tomcat
execute "Allow httpd network connections" do
command "setsebool -P httpd_can_network_connect 1"
end
##########################
# Install GDAL and support
##########################
# Set up install directory
directory "/opt/local" do
action :create
end
# Set up source directory
directory src_path do
action :create
end
# Install SQLite for Proj4
sqlite_prefix = node["sqlite"]["prefix"]
directory sqlite_prefix do
recursive true
action :create
end
sqlite_filename = filename_from_url(node["sqlite"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{sqlite_filename}" do
source node["sqlite"]["download_url"]
checksum node["sqlite"]["checksum"]
end
sqlite_src_dir = "#{src_path}/sqlite-autoconf-3360000"
bash "extract sqlite" do
cwd src_path
code <<-EOH
tar xzf "#{Chef::Config["file_cache_path"]}/#{sqlite_filename}" -C .
EOH
not_if { ::File.exists?(sqlite_src_dir) }
end
log "Compiling SQLite, this may take a minute"
# Compile the source code for SQLite. For explanation of flags used, see:
# https://sqlite.org/compile.html
bash "compile sqlite" do
cwd sqlite_src_dir
code <<-EOH
./configure --prefix="#{sqlite_prefix}" \
CFLAGS="-g -O2 \
-DSQLITE_ENABLE_FTS5=1 \
-DSQLITE_ENABLE_GEOPOLY=1 \
-DSQLITE_ENABLE_JSON1=1 \
-DSQLITE_ENABLE_MATH_FUNCTIONS=1 \
-DSQLITE_ENABLE_RTREE=1 \
-DSQLITE_SQS=0 \
-DSQLITE_OMIT_DEPRECATED=1 \
-DSQLITE_ENABLE_UNLOCK_NOTIFY=1"
make
make install
EOH
not_if { ::File.exist?("#{sqlite_prefix}/bin/sqlite3") }
end
# Install PROJ from source
yum_package %w[libtiff libtiff-devel curl libcurl libcurl-devel]
proj_prefix = node["proj"]["prefix"]
directory proj_prefix do
recursive true
action :create
end
proj_filename = filename_from_url(node["proj"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{proj_filename}" do
source node["proj"]["download_url"]
checksum node["proj"]["checksum"]
end
proj_src_dir = "#{src_path}/proj-8.1.1"
bash "extract PROJ" do
cwd src_path
code <<-EOH
tar xzf "#{Chef::Config["file_cache_path"]}/#{proj_filename}" -C .
EOH
not_if { ::File.exists?(proj_src_dir) }
end
log "Compiling PROJ, this may take a few minutes"
# Note that PATH must be set for proj.db to compile properly.
# See: https://github.com/OSGeo/PROJ/issues/2071
bash "compile PROJ" do
cwd proj_src_dir
environment({
"MAKEFLAGS" => "-j #{node["jobs"]}",
"PATH" => "/opt/local/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin",
"SQLITE3_CFLAGS" => "-I/opt/local/include",
"SQLITE3_LIBS" => "-L/opt/local/lib -lsqlite3"
})
code <<-EOH
./configure --prefix="#{proj_prefix}"
make
make install
EOH
not_if { ::File.exist?("#{proj_prefix}/bin/proj") }
end
log "Downloading PROJ data files, this may take a few minutes"
# These are helper files for datum and transformations, and we download them now rather than
# on-the-fly. They are stored in "$proj_prefix/share/proj".
execute "download PROJ data files" do
command "/opt/local/bin/projsync --system-directory --all"
end
# Install Apache Ant for Java GDAL bindings
ant_home = "#{node["ant"]["prefix"]}/apache-ant-#{node["ant"]["version"]}"
ant_filename = filename_from_url(node["ant"]["download_url"])
directory node["ant"]["prefix"] do
recursive true
action :create
end
remote_file "#{Chef::Config["file_cache_path"]}/#{ant_filename}" do
source node["ant"]["download_url"]
checksum node["ant"]["checksum"]
end
# This is a binary, so we can extract directly to the prefix
bash "extract ant archive" do
cwd node["ant"]["prefix"]
code <<-EOH
tar xzf "#{Chef::Config["file_cache_path"]}/#{ant_filename}" -C .
EOH
not_if { ::File.exists?(ant_home) }
end
execute "Install Apache Ant library dependencies" do
command "#{ant_home}/bin/ant -f fetch.xml -Ddest=system"
cwd ant_home
environment({
"ANT_HOME" => ant_home,
"JAVA_HOME" => java_home
})
end
# Install GDAL from source
gdal_prefix = node["gdal"]["prefix"]
directory gdal_prefix do
recursive true
action :create
end
gdal_filename = filename_from_url(node["gdal"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{gdal_filename}" do
source node["gdal"]["download_url"]
checksum node["gdal"]["checksum"]
end
gdal_src_dir = "#{src_path}/gdal-#{node["gdal"]["version"]}"
bash "extract GDAL" do
cwd src_path
code <<-EOH
tar xzf "#{Chef::Config["file_cache_path"]}/#{gdal_filename}" -C .
EOH
not_if { ::File.exists?(gdal_src_dir) }
end
log "Compiling GDAL, this may take a few minutes"
bash "compile GDAL" do
cwd gdal_src_dir
environment({
"MAKEFLAGS" => "-j #{node["jobs"]}",
"PATH" => "#{ant_home}/bin:/opt/local/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin",
"ANT_HOME" => ant_home,
"JAVA_HOME" => java_home
})
code <<-EOH
./configure --prefix="#{gdal_prefix}" \
--with-proj="#{proj_prefix}" \
--with-sqlite3="#{sqlite_prefix}" \
--with-java="#{java_home}"
make
make install
cd swig/java
make
make install
EOH
not_if "#{gdal_prefix}/bin/gdal-config --version | grep -q '#{node["gdal"]["version"]}'"
end
###################
# Install GeoServer
###################
directory node["geoserver"]["prefix"] do
recursive true
action :create
end
geoserver_filename = filename_from_url(node["geoserver"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{geoserver_filename}" do
source node["geoserver"]["download_url"]
checksum node["geoserver"]["checksum"]
end
yum_package "unzip"
bash "extract GeoServer" do
cwd "#{tomcat_home}/webapps"
user node["tomcat"]["user"]
code <<-EOH
unzip -o "#{Chef::Config["file_cache_path"]}/#{geoserver_filename}" -d .
EOH
not_if { ::File.exists?("#{tomcat_home}/webapps/geoserver.war") }
notifies :restart, "service[tomcat]"
end
###############################
# Install GeoServer GDAL Plugin
###############################
geoserver_gdal_filename = filename_from_url(node["geoserver"]["gdal_plugin"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{geoserver_gdal_filename}" do
source node["geoserver"]["gdal_plugin"]["download_url"]
checksum node["geoserver"]["gdal_plugin"]["checksum"]
end
# Extract GDAL plugin to GeoServer, waiting for Tomcat to start GeoServer
# and create the plugins directory first. If it doesn't exist within 120
# seconds, then there is probably a problem and the chef client should
# stop.
bash "extract GeoServer GDAL plugin" do
cwd node["geoserver"]["prefix"]
code <<-EOH
while ! test -d "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"; do
sleep 10
echo "Waiting for GeoServer lib directory to be created"
done
rm -rf geoserver-gdal-plugin
unzip -o "#{Chef::Config["file_cache_path"]}/#{geoserver_gdal_filename}" -d geoserver-gdal-plugin
cp geoserver-gdal-plugin/*.jar "#{tomcat_home}/webapps/geoserver/WEB-INF/lib/."
cp "#{gdal_src_dir}/swig/java/gdal.jar" "#{tomcat_home}/webapps/geoserver/WEB-INF/lib/."
chown -R #{node["tomcat"]["user"]} #{tomcat_home}/webapps/geoserver/WEB-INF/lib
EOH
timeout 120
notifies :restart, "service[tomcat]"
not_if { ::File.exists?("#{tomcat_home}/webapps/geoserver/WEB-INF/lib/gs-gdal-#{node["geoserver"]["version"]}.jar") }
end
#####################################
# Install GeoServer Monitoring Plugin
#####################################
geoserver_monitoring_filename = filename_from_url(node["geoserver"]["monitoring_plugin"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{geoserver_monitoring_filename}" do
source node["geoserver"]["monitoring_plugin"]["download_url"]
checksum node["geoserver"]["monitoring_plugin"]["checksum"]
end
# Extract Monitoring plugin to GeoServer, waiting for Tomcat to start GeoServer
# and create the plugins directory first. If it doesn't exist within 120
# seconds, then there is probably a problem and the chef client should
# stop.
bash "extract GeoServer Monitoring plugin" do
cwd node["geoserver"]["prefix"]
code <<-EOH
while ! test -d "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"; do
sleep 10
echo "Waiting for GeoServer lib directory to be created"
done
rm -rf geoserver-gdal-plugin
unzip -o "#{Chef::Config["file_cache_path"]}/#{geoserver_monitoring_filename}" -d geoserver-monitor-plugin
cp geoserver-monitor-plugin/*.jar "#{tomcat_home}/webapps/geoserver/WEB-INF/lib/."
chown -R #{node["tomcat"]["user"]} "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"
EOH
timeout 120
notifies :restart, "service[tomcat]"
not_if { ::File.exists?("#{tomcat_home}/webapps/geoserver/WEB-INF/lib/gs-monitor-core-#{node["geoserver"]["version"]}.jar") }
end
#################################
# Install GeoServer NetCDF Plugin
#################################
yum_package %w[netcdf netcdf-devel netcdf-cxx netcdf-cxx-devel]
geoserver_netcdf_filename = filename_from_url(node["geoserver"]["netcdf_plugin"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{geoserver_netcdf_filename}" do
source node["geoserver"]["netcdf_plugin"]["download_url"]
checksum node["geoserver"]["netcdf_plugin"]["checksum"]
end
# Extract NetCDF plugin to GeoServer, waiting for Tomcat to start GeoServer
# and create the plugins directory first. If it doesn't exist within 120
# seconds, then there is probably a problem and the chef client should
# stop.
bash "extract GeoServer NetCDF plugin" do
cwd node["geoserver"]["prefix"]
code <<-EOH
while ! test -d "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"; do
sleep 10
echo "Waiting for GeoServer lib directory to be created"
done
rm -rf geoserver-netcdf-plugin
unzip -o "#{Chef::Config["file_cache_path"]}/#{geoserver_netcdf_filename}" -d geoserver-netcdf-plugin
cp geoserver-netcdf-plugin/*.jar "#{tomcat_home}/webapps/geoserver/WEB-INF/lib/."
chown -R #{node["tomcat"]["user"]} "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"
EOH
timeout 120
notifies :restart, "service[tomcat]"
not_if { ::File.exists?("#{tomcat_home}/webapps/geoserver/WEB-INF/lib/gs-netcdf-#{node["geoserver"]["version"]}.jar") }
end
##############################
# Install GeoServer WPS Plugin
##############################
geoserver_wps_filename = filename_from_url(node["geoserver"]["wps_plugin"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{geoserver_wps_filename}" do
source node["geoserver"]["wps_plugin"]["download_url"]
checksum node["geoserver"]["wps_plugin"]["checksum"]
end
# Extract WPS plugin to GeoServer, waiting for Tomcat to start GeoServer
# and create the plugins directory first. If it doesn't exist within 120
# seconds, then there is probably a problem and the chef client should
# stop.
bash "extract GeoServer WPS plugin" do
cwd node["geoserver"]["prefix"]
code <<-EOH
while ! test -d "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"; do
sleep 10
echo "Waiting for GeoServer lib directory to be created"
done
rm -rf geoserver-wps-plugin
unzip -o "#{Chef::Config["file_cache_path"]}/#{geoserver_wps_filename}" -d geoserver-wps-plugin
cp geoserver-wps-plugin/*.jar "#{tomcat_home}/webapps/geoserver/WEB-INF/lib/."
chown -R #{node["tomcat"]["user"]} "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"
EOH
timeout 120
notifies :restart, "service[tomcat]"
not_if { ::File.exists?("#{tomcat_home}/webapps/geoserver/WEB-INF/lib/gs-wps-#{node["geoserver"]["version"]}.jar") }
end
##############################
# Install GeoServer CSW Plugin
##############################
geoserver_csw_filename = filename_from_url(node["geoserver"]["csw_plugin"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{geoserver_csw_filename}" do
source node["geoserver"]["csw_plugin"]["download_url"]
checksum node["geoserver"]["csw_plugin"]["checksum"]
end
# Extract CSW plugin to GeoServer, waiting for Tomcat to start GeoServer
# and create the plugins directory first. If it doesn't exist within 120
# seconds, then there is probably a problem and the chef client should
# stop.
bash "extract GeoServer CSW plugin" do
cwd node["geoserver"]["prefix"]
code <<-EOH
while ! test -d "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"; do
sleep 10
echo "Waiting for GeoServer lib directory to be created"
done
rm -rf geoserver-csw-plugin
unzip -o "#{Chef::Config["file_cache_path"]}/#{geoserver_csw_filename}" -d geoserver-csw-plugin
cp geoserver-csw-plugin/*.jar "#{tomcat_home}/webapps/geoserver/WEB-INF/lib/."
chown -R #{node["tomcat"]["user"]} "#{tomcat_home}/webapps/geoserver/WEB-INF/lib"
EOH
timeout 120
notifies :restart, "service[tomcat]"
not_if { ::File.exists?("#{tomcat_home}/webapps/geoserver/WEB-INF/lib/gs-csw-#{node["geoserver"]["version"]}.jar") }
end
######################
# Set up tomcat-native
######################
tomcat_native_home = "#{node["tomcat"]["prefix"]}/tomcat-native-#{node["tomcat-native"]["version"]}-src"
tomcat_native_filename = filename_from_url(node["tomcat-native"]["download_url"])
remote_file "#{Chef::Config["file_cache_path"]}/#{tomcat_native_filename}" do
source node["tomcat-native"]["download_url"]
checksum node["tomcat-native"]["checksum"]
end
bash "extract tomcat-native" do
cwd node["tomcat"]["prefix"]
user node["tomcat"]["user"]
code <<-EOH
tar xzf "#{Chef::Config["file_cache_path"]}/#{tomcat_native_filename}" -C .
EOH
not_if { ::File.exists?(tomcat_native_home) }
end
yum_package %w[apr-devel openssl-devel]
# Compile tomcat-native
log "Compiling tomcat-native, which may take a few minutes"
bash "compile tomcat-native" do
cwd "#{tomcat_native_home}/native"
environment({
"MAKEFLAGS" => "-j #{node["jobs"]}",
"JAVA_HOME" => java_home
})
code <<-EOH
./configure --prefix=#{tomcat_home}
make
make install
EOH
not_if { ::File.exists?("#{tomcat_home}/lib/libtcnative-1.so") }
notifies :restart, "service[tomcat]"
end
#####################
# Customize GeoServer
#####################
# Install new global configuration.
# The action is set to "nothing" as this should *only* be triggered after
# a fresh installation, otherwise changes made using the GeoServer web UI
# will be overwritten.
template "install geoserver global configuration" do
path "#{geoserver_data}/global.xml"
source "global.xml.erb"
variables({
address: node["geoserver"]["address"],
contact: node["geoserver"]["contact"],
num_decimals: node["geoserver"]["num_decimals"],
proxy_base_url: node["geoserver"]["proxy_base_url"],
verbose: node["geoserver"]["verbose"],
verbose_exceptions: node["geoserver"]["verbose_exceptions"],
jai: node["geoserver"]["jai"]
})
notifies :restart, "service[tomcat]"
action :nothing
end
# Install default CSW configuration, only on first run.
cookbook_file "install default CSW configuration" do
path "#{geoserver_data}/csw.xml"
source "geoserver/csw.xml"
notifies :restart, "service[tomcat]"
action :nothing
end
# Install default WCS configuration, only on first run.
cookbook_file "install default WCS configuration" do
path "#{geoserver_data}/wcs.xml"
source "geoserver/wcs.xml"
notifies :restart, "service[tomcat]"
action :nothing
end
# Install default WFS configuration, only on first run.
cookbook_file "install default WFS configuration" do
path "#{geoserver_data}/wfs.xml"
source "geoserver/wfs.xml"
notifies :restart, "service[tomcat]"
action :nothing
end
# Install default WMS configuration, only on first run.
cookbook_file "install default WMS configuration" do
path "#{geoserver_data}/wms.xml"
source "geoserver/wms.xml"
notifies :restart, "service[tomcat]"
action :nothing
end
# Install default WPS configuration, only on first run.
cookbook_file "install default WPS configuration" do
path "#{geoserver_data}/wps.xml"
source "geoserver/wps.xml"
notifies :restart, "service[tomcat]"
action :nothing
end
# Install new masterpw file
file "install new masterpw file" do
path "#{geoserver_data}/security/masterpw.digest"
content node["geoserver"]["masterpw"]
notifies :restart, "service[tomcat]"
action :nothing
end
# Move the default GeoServer data directory out of the Tomcat webapps
# directory. This allows it to be on another volume and persist between
# Tomcat upgrades.
# If the "new" data directory is still empty, then move over the original
# data directory. Using Chef resource notifications to stop Tomcat before
# this runs does not seem to work, and will leave a partial data directory
# behind. Instead Tomcat is stopped by systemd in the resource.
bash "copy base geoserver data directory" do
code <<-EOH
systemctl stop tomcat
sleep 5
rsync -a "#{tomcat_home}/webapps/geoserver/data" "#{node["geoserver"]["prefix"]}"
EOH
not_if { ::File.exist?("#{geoserver_data}/global.xml") }
notifies :restart, "service[tomcat]"
notifies :create, "template[install geoserver global configuration]"
notifies :create, "cookbook_file[install default CSW configuration]"
notifies :create, "cookbook_file[install default WCS configuration]"
notifies :create, "cookbook_file[install default WFS configuration]"
notifies :create, "cookbook_file[install default WMS configuration]"
notifies :create, "cookbook_file[install default WPS configuration]"
notifies :create, "file[install new masterpw file]"
end
# Install extra CRS definitions
cookbook_file "#{geoserver_data}/user_projections/epsg.properties" do
source "geoserver/epsg.properties"
owner node["tomcat"]["user"]
group node["tomcat"]["user"]
notifies :restart, "service[tomcat]"
end
# Create directory for GeoWebCache blob store
gwc_cache_dir = node["geoserver"]["data_dir"] + "/cache"
directory gwc_cache_dir do
recursive true
owner node["tomcat"]["user"]
group node["tomcat"]["user"]
action :create
end
|
#!/bin/bash
src="en"
tgt="da"
split="test"
bash ../../scripts/experiments/eval_mt.sh $src $tgt $split
|
#!/bin/sh
cd `dirname $0`
source common.sh
echo "fast random"
for ((i = 0; i < ${SIZE}; ++i))
do
CMD=${COMMAND}\ ${SRC_DIR}insane_move_auto.py\ ${HOSTS[$i]}\ ${PORTS[$i]}\ '50'
if test ${i} -eq ${LAST}
then
${CMD}
else
${CMD} &
fi
done
exit 0
|
import numpy as np
from sklearn.linear_model import LinearRegression
# Create array of size and location data
size = np.array([800]).reshape(-1, 1)
location = np.array([1]).reshape(-1, 1)
# Create data matrix with both size and location
X = np.column_stack((size, location))
# Fit the model
model = LinearRegression()
model.fit(X, house_price)
# Make prediction
predicted_price = model.predict(X) |
/**
* Copyright (C) 2017 - present by wilberding.com
*
* Please see distribution for license.
*/
package com.wilberding.brewery.lib;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class LinearInterpolatorTest {
// http://www.ajdesigner.com/phpinterpolation/linear_interpolation_equation.php#ajscroll
@Test
public void linear() {
assertThat(LinearInterpolator.y(100, 141, 150, 100.3, 150)).isEqualTo(141.054);
}
}
|
def rotate_array_clockwise(arr, n)
n.times do
x = arr.pop
arr.unshift(x)
end
return arr
end
arr = [1,2,3,4,5]
n = 2
rotate_array_clockwise(arr, n) # Output [4,5,1,2,3] |
<filename>src/parse.c
#include "parse.h"
/* This file contains all the functions that are pertinent to parsing.
In particular, the entire recursive descent parser is hard-coded into
this file. Each "state" of the parser is represented by a function;
parsing functions call other functions depending on which tokens they
consume. All of the functions beginning with parse_ are parsing functions;
all of them consume a Parser * (among other parameters) and return an
int. A return value of 0 represents failure, and any other return value
represents success. (These functions are generally not part of the
public interface for parsers; programmers should only call the top-level
parse() function, which does a top-level parse of the entire input stream.)
*/
static Parser *create_parser_from_schema_hash(ParsedSchema *, TypeHash *);
static void destroy_parser_but_not_schema_hash(Parser *);
static int assert_lexer_ok(Parser *, LexerStatus);
static int trigger_parse_error(Parser *, ParserError, char *);
static int toplevel_parse(Parser *);
static int parse_struct_def(Parser *);
static int parse_enum_def(Parser *);
static int handle_include(Parser *);
static int parse_extension(Parser *);
static int parse_forward(Parser *);
static ParsedStruct *new_hashed_struct(Parser *, char *);
static ParsedEnum *new_hashed_enum(Parser *, char *);
static int parse_forward_struct(Parser *);
static int parse_forward_enum(Parser *);
static int parse_extend_struct(Parser *);
static int parse_extend_enum(Parser *);
static int parse_structure_elements(Parser *, ParsedStruct *);
static int parse_enum_elements(Parser *, ParsedEnum *);
static int parse_structure_element(Parser *, ParsedStruct *);
static int parse_type_qualifiers(Parser *, int *, int *);
static int handle_field_addition(Parser *, ParsedStruct *, char *,
TypeHashBucket *, int, int);
static int include_file(Parser *, char *);
static int expect_token(Parser *, Token);
static int unexpected_token_error(Parser *, Token);
/* =============================PUBLIC INTERFACE============================= */
Parser *create_parser(void)
{
Parser *parser;
ParsedSchema *schema = create_parsed_schema();
TypeHash *hash = create_typehash();
if (!schema || !hash) goto MemoryAllocationError;
parser = create_parser_from_schema_hash(schema, hash);
if (!parser) goto MemoryAllocationError;
return parser;
MemoryAllocationError:
if (schema) destroy_parsed_schema(schema);
if (hash) destroy_typehash(hash);
return NULL;
}
/* Destroys the given parser and all of its attribute objects, INCLUDING
the schema and typehash. */
void destroy_parser(Parser *p)
{
destroy_parsed_schema(p->schema);
destroy_typehash(p->hash);
destroy_parser_but_not_schema_hash(p);
return;
}
/* Binds the given parser to a new stream. This function is useful primarily
for retaining the schema information of a previous parse and adding new
data onto the previous schema definition. If you do not wish to retain
the schema information from a previous parse, create a new parser
rather than rebinding an existing one.
*/
int bind_parser(Parser *p, FILE *stream, char *filename)
{
Lexer *lex;
if (p->lex) destroy_lexer(p->lex);
lex = create_lexer(stream, filename);
if (!lex) return 0;
p->lex = lex;
return 1;
}
/* Finalize the output for the given parser. */
void finalize_parser(Parser *p)
{
finalize_schema(p->schema);
return;
}
/* Run the given parser to completion, handling any errors that
may occur along the way. */
int parse(Parser *p)
{
if (p->stack >= PARSER_MAX_STACK)
return trigger_parse_error(p, PARSE_STACK_OVERFLOW, NULL);
return toplevel_parse(p);
}
/* Write a message to stderr detailing the parse error that was encountered. */
void diagnose_parse_error(Parser *p)
{
Token tok;
fprintf(stderr, "The following parser error was encountered in file %s:\n",
p->lex->filename);
switch (p->errno) {
case PARSE_MEM_ERROR:
fprintf(stderr, "There was a memory error; please try again.\n");
return;
case PARSE_LEX_ERROR:
fprintf(stderr, "A lexer error was encountered.\n");
diagnose_lexer_error(p->lex);
return;
case PARSE_UNEXPECTED_TOKEN:
fprintf(stderr, "An unexpected token was encountered around line %ld:\n",
p->lex->line_no);
(void)next_token(p->lex, &tok);
switch (tok) {
case TOKEN_LPAR:
fprintf(stderr, "Unexpected `(`.\n");
return;
case TOKEN_RPAR:
fprintf(stderr, "Unexpected `)`.\n");
return;
case TOKEN_FORWARD:
fprintf(stderr, "Unexpected `@`.\n");
return;
case TOKEN_LIST:
fprintf(stderr, "Unexpected `[]`.\n");
return;
case TOKEN_NULLABLE:
fprintf(stderr, "Unexpected `?`.\n");
return;
case TOKEN_COMMA:
fprintf(stderr, "Unexpected `,`.\n");
return;
case TOKEN_STRING:
fprintf(stderr, "Unexpected string: %s\n", p->lex->buffer);
return;
case TOKEN_SYMBOL:
fprintf(stderr, "Unexpected symbol: %s\n", p->lex->buffer);
return;
default:
return;
}
case PARSE_UNEXPECTED_EOF:
fprintf(stderr, "An unexpected EOF was encountered around line %ld.\n",
p->lex->line_no);
return;
case PARSE_REDUNDANT_SYMBOL:
fprintf(stderr, "Attempt to define predefined symbol %s around line %ld.\n",
p->errbuf, p->lex->line_no);;
return;
case PARSE_UNDEF_SYMBOL:
fprintf(stderr, "Unrecognized symbol %s around line %ld.\n",
p->errbuf, p->lex->line_no);
return;
case PARSE_INVALID_QUALIFIER:
fprintf(stderr, "Invalid qualifier on type %s around line %ld.\n",
p->errbuf, p->lex->line_no);
return;
case PARSE_INVALID_TYPE:
fprintf(stderr, "Invalid type %s around line %ld.\n",
p->errbuf, p->lex->line_no);
return;
case PARSE_IO_ERROR:
fprintf(stderr, "Unable to open file %s around line %ld.\n",
p->errbuf, p->lex->line_no);
return;
case PARSE_STACK_OVERFLOW:
fprintf(stderr,
"A stack overflow occured. Please ensure you are not doing any cyclic\n\
compilation and try again.\n");
return;
default:
return;
}
}
/* =============================STATIC FUNCTIONS============================= */
/* Creates a parser from a given ParsedSchema object and TypeHash.
When the new parser is bound and run, it will append its parse information to
the schema and TypeHash; for example, if the schema has 3 structures, and
the parser is bound to an input file with 5 more structure definitions, then
all 8 structures will be captured in the ParsedSchema and TypeHash after
the parse has completed. This function is therefore useful to add information
to a ParsedSchema without losing another parser's state.
*/
static Parser *create_parser_from_schema_hash(ParsedSchema *schema,
TypeHash *hash)
{
Parser *ret = (Parser*)malloc(sizeof *ret);
if (!ret) return NULL;
ret->schema = schema;
ret->hash = hash;
ret->lex = NULL;
ret->errbuf = NULL;
ret->stack = 0;
return ret;
}
/* Destroys the given parser and its lexer. Does not destroy the schema
or typehash associated with the parser, so they can be reused later in
another parser.
*/
static void destroy_parser_but_not_schema_hash(Parser *p)
{
if (p->lex) destroy_lexer(p->lex);
if (p->errbuf) free(p->errbuf);
free(p);
return;
}
/* Returns 1 if the lexer returned a status of "OK", and 0 otherwise. If
the lexer encountered an error or the end of the stream, it also sets the
error code in the parser appropriately. This function is appropriate for
use in any non-top-level parsing helper function, as an EOF is never
appropriate when we're not at top level.
*/
static int assert_lexer_ok(Parser *p, LexerStatus status)
{
switch (status) {
case LEXER_DONE:
return trigger_parse_error(p, PARSE_UNEXPECTED_EOF, NULL);
case LEXER_ERROR:
return trigger_parse_error(p, PARSE_LEX_ERROR, NULL);
case LEXER_OK:
return 1;
default:
return 0;
}
}
/* This parse function immediately triggers a parse error. The error code will
be stored in the errno field of the parser, and the msg parameter (if
it is not NULL) will be copied into the error buffer. This function
always returns 0 (since it always triggers an error).
The msg parameter will be copied over to the heap. If the copy fails, the
error will be silently converted into a memory error. (This is to ensure
that particular error codes will always have access to the error buffer if
it is expected.)
*/
static int trigger_parse_error(Parser *p, ParserError err, char *msg)
{
if (msg) {
p->errbuf = util_strdup(msg);
p->errno = p->errbuf ? err : PARSE_MEM_ERROR;
} else {
p->errno = err;
p->errbuf = NULL;
}
return 0;
}
/* This function handles parsing at the "top level"; that is, the parser
will be in this function when it's not in the middle of parsing a
definition. This function will parse the entire file in order and will
return once the end of the input stream has been reached (or the lexer
encounters an error).
Legal incoming tokens:
- @, which indicates we should move to the forward declaration handler
- Any of the symbols "enum", "struct", "include", and "extend", which
indicates we should move to their respective handlers. Any other symbol
is not valid.
- EOF, which indicates that the parse has finished.
Any other token is a parse error.
*/
static int toplevel_parse(Parser *p)
{
int result;
LexerStatus status;
Token tok;
while (1) {
status = next_token(p->lex, &tok);
switch (status) {
case LEXER_DONE:
return 1;
case LEXER_ERROR:
p->errno = PARSE_LEX_ERROR;
return 0;
case LEXER_OK:
break;
}
switch (tok) {
case TOKEN_SYMBOL:
if (!strcmp(p->lex->buffer, "struct"))
result = parse_struct_def(p);
else if (!strcmp(p->lex->buffer, "enum"))
result = parse_enum_def(p);
else if (!strcmp(p->lex->buffer, "include"))
result = handle_include(p);
else if (!strcmp(p->lex->buffer, "extend"))
result = parse_extension(p);
else result = unexpected_token_error(p, tok);
break;
case TOKEN_FORWARD:
result = parse_forward(p);
break;
default:
result = unexpected_token_error(p, tok);
}
if (!result) return 0;
}
}
/* This function handles parsing AFTER we have seen a struct keyword at top
level. The function will return after the closing parenthesis is
encountered, leaving us at the top level once more.
*/
static int parse_struct_def(Parser *p)
{
ParsedStruct *strct;
TypeHashBucket *bucket;
/* Get structure name */
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
if ((bucket = get_type(p->hash, p->lex->buffer))) {
/* We raise an error if there is a predefined type in the schema that
A) is not a struct or B) is a struct, but has at least one defined field
*/
if (bucket->tu.tag != TYPE_STRUCT ||
bucket->tu.type.strct->num_scalars > 0 ||
bucket->tu.type.strct->num_children > 0)
return trigger_parse_error(p, PARSE_REDUNDANT_SYMBOL, p->lex->buffer);
else
strct = bucket->tu.type.strct;
} else {
strct = new_hashed_struct(p, p->lex->buffer);
if (!strct) return 0;
}
return parse_structure_elements(p, strct);
}
/* This function handles parsing AFTER we have seen an enum keyword at top
level. The function will return after the closing parenthesis is
encountered, leaving us at the top level once more.
*/
static int parse_enum_def(Parser *p)
{
ParsedEnum *enm;
TypeHashBucket *bucket;
/* Get enumeration name */
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
if ((bucket = get_type(p->hash, p->lex->buffer))) {
/* As above, we raise an error if there is a predefined type in the schema
that is not an enum or is a non-empty enum */
if (bucket->tu.tag != TYPE_ENUM ||
bucket->tu.type.enm->num_values > 0)
return trigger_parse_error(p, PARSE_REDUNDANT_SYMBOL, p->lex->buffer);
else
enm = bucket->tu.type.enm;
} else {
enm = new_hashed_enum(p, p->lex->buffer);
if (!enm) return 0;
}
return parse_enum_elements(p, enm);
}
/* This function handles parsing AFTER we have seen an include keyword at top
level. The function will return after the closing parenthesis is
encountered, leaving us at the top level once more.
*/
static int handle_include(Parser *p)
{
Token tok;
if (!expect_token(p, TOKEN_LPAR)) return 0;
/* Comma-separated list of elements */
while (1) {
if (!expect_token(p, TOKEN_STRING)) return 0;
if (!include_file(p, p->lex->buffer)) return 0;
/* Either a comma or a right parenthesis will follow */
if (!assert_lexer_ok(p, next_token(p->lex, &tok))) return 0;
if (tok == TOKEN_RPAR) return 1;
else if (tok != TOKEN_COMMA) return unexpected_token_error(p, tok);
else continue;
}
}
/* This function handles parsing AFTER we have seen an extend keyword at top
level. The function will return after the structure or enum has been
extended, leaving us at the top level once more.
What follows is a normal structure or enumeration definition.
*/
static int parse_extension(Parser *p)
{
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
if (!strcmp(p->lex->buffer, "struct")) {
return parse_extend_struct(p);
} else if (!strcmp(p->lex->buffer, "enum")) {
return parse_extend_enum(p);
} else return unexpected_token_error(p, TOKEN_SYMBOL);
}
/* This function handles parsing AFTER we have seen a forward ("@") token
at top level. The function will return after the structure or enum
has been declared, leaving us at the top level once more.
*/
static int parse_forward(Parser *p)
{
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
if (!strcmp(p->lex->buffer, "struct")) {
return parse_forward_struct(p);
} else if (!strcmp(p->lex->buffer, "enum")) {
return parse_forward_enum(p);
} else return unexpected_token_error(p, TOKEN_SYMBOL);
}
/* Create a new structure with the given name, add it to the type hash,
and return the structure. Returns and sets the parser's error code if an
error occurs.
*/
static ParsedStruct *new_hashed_struct(Parser *p, char *name)
{
ParsedStruct *strct = new_struct(p->schema, name);
if (!strct) {
(void)trigger_parse_error(p, PARSE_MEM_ERROR, NULL);
return NULL;
}
if (!add_struct_to_hash(p->hash, strct->name, strct)) {
(void)trigger_parse_error(p, PARSE_MEM_ERROR, NULL);
return NULL;
}
return strct;
}
/* Create a new structure with the given name, add it to the type hash,
and return the structure. Returns and sets the parser's error code if an
error occurs.
*/
static ParsedEnum *new_hashed_enum(Parser *p, char *name)
{
ParsedEnum *enm = new_enum(p->schema, name);
if (!enm) {
(void)trigger_parse_error(p, PARSE_MEM_ERROR, NULL);
return NULL;
}
if (!add_enum_to_hash(p->hash, enm->name, enm)) {
(void)trigger_parse_error(p, PARSE_MEM_ERROR, NULL);
return NULL;
}
return enm;
}
/* Parses a forward structure declaration AFTER we have encountered the
"@" symbol and the "struct". Consumes the structure name from the input
and assures the structure exists.
*/
static int parse_forward_struct(Parser *p)
{
TypeHashBucket *bucket;
ParsedStruct *strct;
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
bucket = get_type(p->hash, p->lex->buffer);
if (bucket) {
if (bucket->tu.tag != TYPE_STRUCT)
return trigger_parse_error(p, PARSE_INVALID_TYPE, p->lex->buffer);
else return 1;
} else {
strct = new_hashed_struct(p, p->lex->buffer);
if (!strct) return 0;
else return 1;
}
}
/* Parses a forward enum declaration AFTER we have encountered the
"@" symbol and the "enum". Consumes the structure name from the input
and assures the enum exists.
*/
static int parse_forward_enum(Parser *p)
{
TypeHashBucket *bucket;
ParsedEnum *enm;
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
bucket = get_type(p->hash, p->lex->buffer);
if (bucket) {
if (bucket->tu.tag != TYPE_ENUM)
return trigger_parse_error(p, PARSE_INVALID_TYPE, p->lex->buffer);
else return 1;
} else {
enm = new_hashed_enum(p, p->lex->buffer);
if (!enm) return 0;
else return 1;
}
}
/* This function handles parsing after we have seen an "extend" token and a
"struct" token.
*/
static int parse_extend_struct(Parser *p)
{
TypeHashBucket *bucket;
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
bucket = get_type(p->hash, p->lex->buffer);
if (!bucket)
return trigger_parse_error(p, PARSE_UNDEF_SYMBOL, p->lex->buffer);
if (bucket->tu.tag != TYPE_STRUCT)
return trigger_parse_error(p, PARSE_INVALID_TYPE, p->lex->buffer);
return parse_structure_elements(p, bucket->tu.type.strct);
}
/* This function handles parsing after we have seen an "extend" token and a
"enum" token.
*/
static int parse_extend_enum(Parser *p)
{
TypeHashBucket *bucket;
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
bucket = get_type(p->hash, p->lex->buffer);
if (!bucket)
return trigger_parse_error(p, PARSE_UNDEF_SYMBOL, p->lex->buffer);
if (bucket->tu.tag != TYPE_ENUM)
return trigger_parse_error(p, PARSE_INVALID_TYPE, p->lex->buffer);
return parse_enum_elements(p, bucket->tu.type.enm);
}
/* This function parses the elements of a structure after we have determined
which structure it is (that is, after we've gotten its name). The opening
parenthesis should be waiting in the stream when this function is called
and the function returns once we encounter the closing parenthesis.
*/
static int parse_structure_elements(Parser *p, ParsedStruct *strct)
{
Token tok;
if (!expect_token(p, TOKEN_LPAR)) return 0;
/* Comma-separated list of elements */
while (1) {
if (!parse_structure_element(p, strct)) return 0;
/* Either a comma or a right parenthesis will follow */
if (!assert_lexer_ok(p, next_token(p->lex, &tok))) return 0;
if (tok == TOKEN_RPAR) return 1;
else if (tok != TOKEN_COMMA) return unexpected_token_error(p, tok);
else continue;
}
}
/* As above, this function is called after we've encountered the enumeration
name but before the left parenthesis. This function returns once we've
found the right parenthesis.
*/
static int parse_enum_elements(Parser *p, ParsedEnum *enm)
{
Token tok;
if (!expect_token(p, TOKEN_LPAR)) return 0;
/* Comma-separated list of elements */
while (1) {
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
if (enum_name_collide(enm, p->lex->buffer))
return trigger_parse_error(p, PARSE_REDUNDANT_SYMBOL, p->lex->buffer);
if (!add_enumerated_value(enm, p->lex->buffer)) return 0;
/* Either a comma or a right parenthesis will follow */
if (!assert_lexer_ok(p, next_token(p->lex, &tok))) return 0;
if (tok == TOKEN_RPAR) return 1;
else if (tok != TOKEN_COMMA) return unexpected_token_error(p, tok);
else continue;
}
}
/* This function does the dirty work of parsing a single structure element
within a structure definition. For example, in the definition
struct Foo ( Int32 baz )
... this function will be called after the opening parenthesis has been
consumed from the stream, but before the structure element. This function
parses a single structure element, leaving whatever tokens immediately
follow it (in this case a right parenthesis) on the stream.
The form of a structure element is
TYPE name
*/
static int parse_structure_element(Parser *p, ParsedStruct *strct)
{
int list, nullable;
TypeHashBucket *type;
/* Get type from type name */
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
type = get_type(p->hash, p->lex->buffer);
if (!type)
return trigger_parse_error(p, PARSE_UNDEF_SYMBOL, p->lex->buffer);
/* Process type qualifiers. */
if (!parse_type_qualifiers(p, &list, &nullable)) return 0;
/* Process field name */
if (!expect_token(p, TOKEN_SYMBOL)) return 0;
return handle_field_addition(p, strct, p->lex->buffer, type, list, nullable);
}
/* Process type qualifiers. To do this, we consume one token. If it's a
? token, then the given type is not a list type (because the list
token has to come first) and we can move onto the next section. If
it's a list token, we have to check whether the list is nullable
by consuming another token. Otherwise, we have to push the token
back onto the stream in preparation for the next section.
"list" and "nullable" are boolean out parameters. If the given type is
a list, *list will be set to 1, and if the given type is nullable,
*nullable will be set to 1. The input stream will be set to the token
after the type qualifiers.
*/
static int parse_type_qualifiers(Parser *p, int *list, int *nullable)
{
Token tok;
*list = 0;
*nullable = 0;
if (!assert_lexer_ok(p, next_token(p->lex, &tok))) return 0;
switch (tok) {
case TOKEN_LIST:
*list = 1;
if (!assert_lexer_ok(p, next_token(p->lex, &tok))) return 0;
if (tok == TOKEN_NULLABLE) *nullable = 1;
else push_token(p->lex, tok);
break;
case TOKEN_NULLABLE:
*nullable = 1;
break;
default:
push_token(p->lex, tok);
}
return 1;
}
static int handle_field_addition(Parser *p, ParsedStruct *strct, char *name,
TypeHashBucket *bucket, int list, int nullable)
{
if (struct_name_collide(strct, name))
return trigger_parse_error(p, PARSE_REDUNDANT_SYMBOL, name);
switch (bucket->tu.tag) {
case TYPE_SCALAR_BUILTIN:
if (list) {
if (!add_list_of_scalars_field(strct, name, nullable,
bucket->tu.type.scalar_builtin))
goto MemError;
} else if (nullable) {
goto InvalidQualifier;
} else {
if (!add_scalar_field(strct, name, bucket->tu.type.scalar_builtin))
goto MemError;
}
break;
case TYPE_ENUM:
if (list) {
if (!add_list_of_enums_field(strct, name, nullable, bucket->tu.type.enm))
goto MemError;
} else if (nullable) {
goto InvalidQualifier;
} else {
if (!add_enum_field(strct, name, bucket->tu.type.enm))
goto MemError;
}
break;
case TYPE_STRUCT:
if (list) {
if (!add_list_of_structs_field(strct, name, nullable,
bucket->tu.type.strct))
goto MemError;
} else {
if (!add_struct_field(strct, name, nullable, bucket->tu.type.strct))
goto MemError;
}
break;
case TYPE_TEXT:
if (list) goto InvalidQualifier;
if (!add_text_field(strct, name, nullable)) goto MemError;
}
return 1;
InvalidQualifier:
return trigger_parse_error(p, PARSE_INVALID_QUALIFIER, strct->name);
MemError:
return trigger_parse_error(p, PARSE_MEM_ERROR, NULL);
}
/* Open up a new file and parse it, merging the results of the parse
with the given parser. */
static int include_file(Parser *p, char *filename)
{
Parser *included;
int ret;
FILE *stream = fopen(filename, "r");
if (!stream) return trigger_parse_error(p, PARSE_IO_ERROR, filename);
included = create_parser_from_schema_hash(p->schema, p->hash);
if (!included) return trigger_parse_error(p, PARSE_MEM_ERROR, NULL);
included->stack = p->stack + 1;
if (!bind_parser(included, stream, filename))
return trigger_parse_error(p, PARSE_MEM_ERROR, filename);
ret = parse(included);
if (!ret) {
(void)trigger_parse_error(p, included->errno, included->errbuf);
}
destroy_parser_but_not_schema_hash(included);
return ret;
}
/* Unconditionally expect the given token, raising an unexpected token
error if the token doesn't match. If the match succeeds, then we return
1, and you will be able to access the lexer's buffers to find the value
of the token just as if you had retrieved the token manually.
*/
static int expect_token(Parser *p, Token expected)
{
Token tok;
if (!assert_lexer_ok(p, next_token(p->lex, &tok))) return 0;
if (tok != expected) return unexpected_token_error(p, tok);
return 1;
}
/* Raise an unexpected token error. This is a special case of error raising
where we must ensure the token goes back into the input stream; this
function will take care of that for us.
*/
static int unexpected_token_error(Parser *p, Token tok)
{
push_token(p->lex, tok);
return trigger_parse_error(p, PARSE_UNEXPECTED_TOKEN, NULL);
}
|
def print_list(lst):
curr = lst.head
while curr is not None:
print(curr.data, end=' ')
curr = curr.next
print() |
void TrafficLightPublishPanel::onSetTrafficLightState()
{
const auto traffic_light_id = traffic_light_id_input_->value();
const auto color = light_color_combo_->currentText();
const auto shape = light_shape_combo_->currentText();
const auto status = light_status_combo_->currentText();
TrafficLight traffic_light;
traffic_light.confidence = traffic_light_confidence_input_->value();
if (color == "RED") {
traffic_light.color = TrafficLight::RED;
} else if (color == "GREEN") {
traffic_light.color = TrafficLight::GREEN;
} else if (color == "YELLOW") {
traffic_light.color = TrafficLight::YELLOW;
} else {
// Handle invalid color input
// For example, throw an exception or set a default color
}
if (shape == "CIRCLE") {
traffic_light.shape = TrafficLight::CIRCLE;
} else if (shape == "ARROW") {
traffic_light.shape = TrafficLight::ARROW;
} else if (shape == "SQUARE") {
traffic_light.shape = TrafficLight::SQUARE;
} else {
// Handle invalid shape input
// For example, throw an exception or set a default shape
}
if (status == "ON") {
traffic_light.status = TrafficLight::ON;
} else if (status == "OFF") {
traffic_light.status = TrafficLight::OFF;
} else {
// Handle invalid status input
// For example, throw an exception or set a default status
}
// Publish the traffic light's state
publishTrafficLightState(traffic_light_id, traffic_light);
} |
/*
*
*/
package net.community.chest.apache.maven.helpers;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import net.community.chest.util.collection.CollectionsUtils;
/**
* <P>Copyright as per GPLv2</P>
*
* @author <NAME>.
* @since Jul 9, 2009 2:50:24 PM
*/
public enum DependencyFieldValue {
VERSION {
/*
* @see net.community.chest.apache.maven.helpers.DependencyFieldValue#getValue(net.community.chest.apache.maven.helpers.BuildDependencyDetails)
*/
@Override
public String getValue (BuildDependencyDetails d)
{
return (null == d) ? null : d.getVersion();
}
/*
* @see net.community.chest.apache.maven.helpers.DependencyFieldValue#setValue(net.community.chest.apache.maven.helpers.BuildDependencyDetails, java.lang.String)
*/
@Override
public void setValue (BuildDependencyDetails d, String v)
{
if (d != null)
d.setVersion(v);
}
},
SCOPE {
/*
* @see net.community.chest.apache.maven.helpers.DependencyFieldValue#getValue(net.community.chest.apache.maven.helpers.BuildDependencyDetails)
*/
@Override
public String getValue (BuildDependencyDetails d)
{
return (null == d) ? null : d.getScope();
}
/*
* @see net.community.chest.apache.maven.helpers.DependencyFieldValue#setValue(net.community.chest.apache.maven.helpers.BuildDependencyDetails, java.lang.String)
*/
@Override
public void setValue (BuildDependencyDetails d, String v)
{
if (d != null)
d.setScope(v);
}
},
SYSTEMPATH {
/*
* @see net.community.chest.apache.maven.helpers.DependencyFieldValue#getValue(net.community.chest.apache.maven.helpers.BuildDependencyDetails)
*/
@Override
public String getValue (BuildDependencyDetails d)
{
return (null == d) ? null : d.getSystemPath();
}
/*
* @see net.community.chest.apache.maven.helpers.DependencyFieldValue#setValue(net.community.chest.apache.maven.helpers.BuildDependencyDetails, java.lang.String)
*/
@Override
public void setValue (BuildDependencyDetails d, String v)
{
if (d != null)
d.setSystemPath(v);
}
};
public abstract String getValue (BuildDependencyDetails d);
public abstract void setValue (BuildDependencyDetails d, String v);
public static final List<DependencyFieldValue> VALUES=Collections.unmodifiableList(Arrays.asList(values()));
public static final DependencyFieldValue fromString (final String name)
{
return CollectionsUtils.fromString(VALUES, name, false);
}
}
|
<reponame>HamidMohammadi/iso_8583<filename>test/maskPan.js
import test from 'ava';
const maskPan = require('../lib/maskPan');
const Iso8583 = require('../lib/8583');
test('should return error', t => {
const masked = maskPan('456789345678', '***');
t.deepEqual(masked, { error: 'unknown pan masking format' });
});
test('should mask pan and leave the first 4', t => {
const masked = maskPan('456789345678', '4**');
t.deepEqual(masked, '4567********');
});
test('should mask pan and leave the last 4', t => {
const masked = maskPan('456789345678', '**4');
t.deepEqual(masked, '********5678');
});
test('should mask pan and leave the middle 4', t => {
const masked = maskPan('456789345678', '*4*');
t.deepEqual(masked, '****8934****');
});
test('should mask pan and leave the middle 4', t => {
const masked = maskPan('456789345678555', '*4*');
t.deepEqual(masked, '*****93456*****');
});
// Testing on Main
test('should mask pan and leave the middle 4', t => {
const isoPack = new Iso8583();
const masked = isoPack.maskPan('456789345678555', '*4*');
t.deepEqual(masked, '*****93456*****');
}); |
<filename>src/classes/structures/dist/tree/tree.js<gh_stars>10-100
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Tree = void 0;
const branch_1 = require("./branch");
class Tree {
name;
age;
branches;
constructor(name, age = Infinity) {
this.name = name;
this.age = age;
this.branches = new Map();
}
/**
* @method addBranch
* @description adds a Branch in the Tree
* @param name name of the Branch
* @param size size of the Branch
* @return Branch
*/
addBranch(name, size = Infinity) {
const newBranch = new branch_1.Branch(name, this);
this.branches.set(name, newBranch);
return newBranch;
}
/**
* @method deleteBranch
* @description delete a Branch in the Tree
* @param name name of the Branch
* @return void
*/
deleteBranch(name) {
if (!this.branches.has(name))
throw new Error("Branch With Name: " + name + " Doesn't Exist");
else
this.branches.delete(name);
}
/**
* @method clear
* @description clears the Tree
* @return void
*/
clear() {
this.branches.clear();
}
/**
* @method clearBranch
* @description clears a Branch in the Tree
* @param name name of the Branch
* @return void
*/
clearBranch(name) {
if (!this.branches.has(name))
throw new Error("Branch With Name: " + name + " Doesn't Exist");
else
this.branches.set(name, new branch_1.Branch(name, this));
}
/**
* @method branchCount
* @description returns the number of branches in the tree
* @readonly
* @return number
*/
get branchCount() {
return this.branches.size;
}
}
exports.Tree = Tree;
//# sourceMappingURL=tree.js.map |
#/bin/bash
#== download libint code generator ==#
cd /home/travis
git clone --quiet https://github.com/evaleev/libint.git
#== patch libint Makefiles to remove CMake references ==#
cd libint
echo "33c33" &> Makefile.patch
echo "< install:: all install_pkgconfig install_cmake install_inc install_data" >> Makefile.patch
echo "---" >> Makefile.patch
echo "> install:: all install_pkgconfig install_inc install_data" >> Makefile.patch
echo "43,48d42" >> Makefile.patch
echo "< endif" >> Makefile.patch
echo "< " >> Makefile.patch
echo "< ifdef cmakedir" >> Makefile.patch
echo "< install_cmake::" >> Makefile.patch
echo -e "< \t\$(INSTALL) \$(INSTALLDIROPT) \$(DESTDIR)\$(cmakedir)" >> Makefile.patch
echo -e "< \t\$(INSTALL) \$(INSTALLLIBOPT) \$(SRCTOPDIR)/FindLibint2.cmake \$(DESTDIR)\$(cmakedir)" >> Makefile.patch
patch Makefile Makefile.patch
cd src/lib
echo "2c2" &> MakeRules.in.patch
echo "< .PHONY: default export install install_inc install_pkgconfig install_cmake install_target clean oclean distclean targetclean realclean" >> MakeRules.in.patch
echo "---" >> MakeRules.in.patch
echo "> .PHONY: default export install install_inc install_pkgconfig install_target clean oclean distclean targetclean realclean" >> MakeRules.in.patch
echo "21c21" >> MakeRules.in.patch
echo "< install:: install_inc install_target install_pkgconfig install_cmake install_data" >> MakeRules.in.patch
echo "---" >> MakeRules.in.patch
echo "> install:: install_inc install_target install_pkgconfig install_data" >> MakeRules.in.patch
echo "51,56d50" >> MakeRules.in.patch
echo "< endif" >> MakeRules.in.patch
echo "< " >> MakeRules.in.patch
echo "< ifdef cmakedir" >> MakeRules.in.patch
echo "< install_cmake::" >> MakeRules.in.patch
echo -e "< \t\$(INSTALL) \$(INSTALLDIROPT) \$(DESTDIR)\$(cmakedir)" >> MakeRules.in.patch
echo -e "< \t\$(INSTALL) \$(INSTALLLIBOPT) \$(SRCTOPDIR)/FindLibint2.cmake \$(DESTDIR)\$(cmakedir)" >> MakeRules.in.patch
patch MakeRules.in MakeRules.in.patch
#== install libint ==#
cd /home/travis/libint
./configure \
--enable-shared=yes --prefix=/home/travis/libint-install \
--enable-1body=0 --enable-eri=0 --with-max-am=3 \
--with-multipole-max-order=3 \
--with-boost=${BOOST_ROOT} --enable-eri-3=no \
--enable-eri2=no --enable-g12=no
make -j2 -s
make -j2 -s install
|
<gh_stars>0
const { colors: defaultColors } = require('tailwindcss/defaultTheme')
module.exports = {
purge: [
'./resources/**/*.blade.php',
'./resources/**/*.js',
'./resources/**/*.vue',
],
darkMode: false, // or '**/*.vuemedia' or 'class'
theme: {
extend: {},
colors: {
...defaultColors,
primary: '#232931',
secondary: '#393e46',
semi: {
transparent: 'rgba(0,0,0,.6)'
}
}
},
variants: {
extend: {},
},
plugins: [],
}
|
#!/bin/bash
# Experimental release script
set -e
ROOT=nlvm-build-root
rm -rf $ROOT
# Make sure the nlvm binary is fresh
rm -f nlvm/nlvmr
make STATIC_LLVM=1 nlvm/nlvmr
# Copy nlvm and library files
# TODO these would go in /usr/{bin, share/Nim} normally
mkdir -p $ROOT
cp nlvm/nlvmr $ROOT/nlvm
cp -r nlvm-lib $ROOT
mkdir -p $ROOT/Nim
cd Nim
# avoid build junk
git archive --format=tar HEAD lib config compiler doc | (cd ../$ROOT/Nim && tar xf -)
cd ..
mkdir -p /usr/lib/nlvm
cp -av $ROOT/* /usr/lib/nlvm
ln -s /usr/lib/nlvm/nlvm /usr/bin/nlvm
|
package number;
public class Complex {
private double re;
private double im;
public static final Complex INFINITY = new Complex( Double.POSITIVE_INFINITY, 0.0);
public static final Complex ONE = new Complex(1, 0);
public static final Complex ZERO = new Complex(0, 0);
public static final Complex I = new Complex(0, 1);
public Complex(double re, double im) {
this.re = re;
this.im = im;
}
public Complex(double re){
this.re = re;
this.im = 0;
}
public double re() {
return re;
}
public double im() {
return im;
}
public void setRe(double re){
this.re = re;
}
public void setIm(double im){
this.im = im;
}
public Complex add(Complex c) {
return new Complex(re + c.re, im + c.im());
}
public Complex add(double c){
return new Complex(re + c, im);
}
public Complex sub(Complex c) {
return new Complex(re - c.re, im - c.im);
}
public Complex sub(double c){
return new Complex(re - c, im);
}
public Complex mult(Complex c) {
return new Complex(re*c.re - im*c.im, re*c.im + im*c.re);
}
public Complex mult(double a){
return new Complex(re * a, im * a);
}
public Complex div(Complex c) {
double denominator = c.re*c.re + c.im*c.im;
if(denominator == 0){
return INFINITY;
}else if(denominator == Double.POSITIVE_INFINITY){
return new Complex(0.0, 0.0);
}
return new Complex(
(re*c.re + im*c.im)/denominator,
(im*c.re - re*c.im)/denominator);
}
public Complex div(double c){
if(c == Double.POSITIVE_INFINITY){
return Complex.ZERO;
}
return new Complex(re / c, im / c);
}
public Complex conjugation() {
return new Complex(re, -im);
}
public double abs() {
return (double)Math.sqrt(re*re + im*im);
}
public double arg() {
return (double)Math.atan(im/re);
}
public String toString() {
if (im >= 0)
return "(" + re + " + " + im + "i" + ")";
else
return "(" + re + " - " + -im + "i" + ")";
}
public boolean isInfinity(){
if(re == Double.POSITIVE_INFINITY || im == Double.POSITIVE_INFINITY ||re == Double.NEGATIVE_INFINITY || im == Double.NEGATIVE_INFINITY)
return true;
return false;
}
public boolean isZero(){
if(re == 0 && im == 0)
return true;
else
return false;
}
public static Complex div(Complex a, Complex b){
return a.div(b);
}
public static Complex add(Complex a, Complex b){
return a.add(b);
}
public static Complex mult(Complex a, Complex b){
return a.mult(b);
}
public static Complex conjugate(Complex a){
return new Complex(a.re(), -1 * a.im());
}
public static Complex sub(Complex a, Complex b){
return a.sub(b);
}
public double dist(Complex p){
return Math.sqrt(Math.pow(re - p.re(), 2) + Math.pow(im - p.im, 2));
}
public Complex sq(){
return this.mult(this);
}
public static double abs(Complex a){
return Math.sqrt(a.re*a.re + a.im*a.im);
}
public static Complex sqrt(Complex c){
if(c.im() > 0){
return new Complex((double)(Math.sqrt(c.re() + Math.sqrt(c.re()*c.re() + c.im()*c.im())) / Math.sqrt(2)),
(double)(Math.sqrt(-c.re() + Math.sqrt(c.re()*c.re() + c.im()*c.im())) / Math.sqrt(2)));
}else if(c.im() < 0){
return new Complex((double)(Math.sqrt(c.re() + Math.sqrt(c.re()*c.re() + c.im()*c.im())) / Math.sqrt(2)),
(double)(-Math.sqrt(-c.re() + Math.sqrt(c.re()*c.re() + c.im()*c.im())) / Math.sqrt(2)));
}
if(c.re() < 0){
return new Complex(0.0, (double)Math.sqrt(Math.abs(c.re())));
}
return new Complex((double)Math.sqrt(c.re()), 0.0);
}
}
|
from sklearn import model
from sklearn.datasets import load_data
# Load the data
data = load_data('car-prices.csv')
# Create the model
model = model.fit(data.X, data.y)
# Make predictions
predictions = model.predict(data.X)
# Output the results
for prediction in predictions:
print('Predicted price of car: {0:.2f}'.format(prediction)) |
#!/bin/sh
# ----------------------------------------------------------------------
# h.sh
# ----------------------------------------------------------------------
namespace=$1
name=$2
echo 'typedef enum {'
awk 'BEGIN { FS="\n"; RS="" }
NR > 1 {
printf(" %s", $1)
if ($2 != "" && $2 != "~")
printf(" = %s", $2)
printf(",");
if ($3 != "" && $3 != "~")
printf(" //!< %s", $3)
printf("\n")
}' < ${name}EnumAi.txt
if test -n "$namespace"
then
echo '} '$namespace'_'$name';'
else
echo '} '$name';'
fi
|
//our dependencies
const express = require("express");
const path = require("path");
const htmlRoute = require("./routes/html-route");
const apiRoute = require("./routes/api-route");
///const fs = require("fs");
//express setup
const app = express();
const PORT = process.env.PORT || 3001;
//boilerplate middleware data parsing
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use(express.static("public"));
console.log(__dirname);
app.use(apiRoute);
app.use(htmlRoute);
//server listening on port 3001
app.listen(PORT, function(){
console.log("Server listening on PORT:" + PORT);
}); |
/**
* The copyright in this software is being made available under the BSD License,
* included below. This software may be subject to other third party and contributor
* rights, including patent rights, and no such rights are granted under this license.
*
* Copyright (c) 2013, Dash Industry Forum.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* * Neither the name of Dash Industry Forum nor the names of its
* contributors may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Authors:
* <NAME> | National University of Singapore | <EMAIL>
* <NAME> | Ozyegin University | <EMAIL>
* <NAME> | National University of Singapore | <EMAIL>
*/
import FactoryMaker from '../../../../core/FactoryMaker';
function LoLpWeightSelector(config) {
let targetLatency = config.targetLatency;
let bufferMin = config.bufferMin;
let segmentDuration = config.segmentDuration;
let qoeEvaluator = config.qoeEvaluator;
let instance,
valueList,
weightTypeCount,
weightOptions,
previousLatency;
/**
*
* @private
*/
function _setup() {
_resetInitialSettings();
}
/**
*
* @private
*/
function _resetInitialSettings() {
valueList = [0.2, 0.4, 0.6, 0.8, 1];
weightTypeCount = 4;
weightOptions = _getPermutations(valueList, weightTypeCount);
previousLatency = 0;
}
/**
* Next, at each segment boundary, ABR to input current neurons and target state (only used in Method II) to find the desired weight vector
* @param {array} neurons
* @param {number} currentLatency
* @param {number} currentBuffer
* @param {number} currentRebuffer
* @param {number} currentThroughput
* @param {number} playbackRate
* @return {null}
* @private
*/
function findWeightVector(neurons, currentLatency, currentBuffer, currentRebuffer, currentThroughput, playbackRate) {
let maxQoE = null;
let winnerWeights = null;
let winnerBitrate = null;
let deltaLatency = Math.abs(currentLatency - previousLatency);
// For each neuron, m
neurons.forEach((neuron) => {
// For each possible weight vector, z
// E.g. For [ throughput, latency, buffer, playbackRate, QoE ]
// Possible weightVector = [ 0.2, 0.4, 0.2, 0, 0.2 ]
weightOptions.forEach((weightVector) => {
// Apply weightVector to neuron, compute utility and determine winnerWeights
// Method I: Utility based on QoE given current state
let weightsObj = {
throughput: weightVector[0],
latency: weightVector[1],
buffer: weightVector[2],
switch: weightVector[3]
};
let downloadTime = (neuron.bitrate * segmentDuration) / currentThroughput;
let nextBuffer = getNextBuffer(currentBuffer, downloadTime);
let rebuffer = Math.max(0.00001, (downloadTime - nextBuffer));
let wt;
if (weightsObj.buffer === 0) {
wt = 10;
} else {
wt = (1 / weightsObj.buffer);
}
let weightedRebuffer = wt * rebuffer;
if (weightsObj.latency === 0) {
wt = 10;
} else {
wt = (1 / weightsObj.latency); // inverse the weight because wt and latency should have positive relationship, i.e., higher latency = higher wt
}
let weightedLatency = wt * neuron.state.latency;
let totalQoE = qoeEvaluator.calculateSingleUseQoe(neuron.bitrate, weightedRebuffer, weightedLatency, playbackRate);
if ((maxQoE === null || totalQoE > maxQoE) && _checkConstraints(currentLatency, nextBuffer, deltaLatency)) {
maxQoE = totalQoE;
winnerWeights = weightVector;
winnerBitrate = neuron.bitrate;
}
});
});
// winnerWeights was found, check if constraints are satisfied
if (winnerWeights === null && winnerBitrate === null) {
winnerWeights = -1;
}
previousLatency = currentLatency;
return winnerWeights;
}
/**
*
* @param {number} nextLatency
* @param {number} nextBuffer
* @param {number} deltaLatency
* @return {boolean}
* @private
*/
function _checkConstraints(nextLatency, nextBuffer, deltaLatency) {
// A1
// disabled till we find a better way of estimating latency
// fails for all with current value
if (nextLatency > targetLatency + deltaLatency) {
return false;
}
return nextBuffer >= bufferMin;
}
/**
*
* @param {array} list
* @param {number} length
* @return {*}
* @private
*/
function _getPermutations(list, length) {
// Copy initial values as arrays
let perm = list.map(function (val) {
return [val];
});
// Our permutation generator
let generate = function (perm, length, currLen) {
// Reached desired length
if (currLen === length) {
return perm;
}
// For each existing permutation
let len = perm.length;
for (let i = 0; i < len; i++) {
let currPerm = perm.shift();
// Create new permutation
for (let k = 0; k < list.length; k++) {
perm.push(currPerm.concat(list[k]));
}
}
// Recurse
return generate(perm, length, currLen + 1);
};
// Start with size 1 because of initial values
return generate(perm, length, 1);
}
/**
*
* @return {number}
*/
function getMinBuffer() {
return bufferMin;
}
/**
*
* @return {number}
*/
function getSegmentDuration() {
return segmentDuration;
}
/**
*
* @param {number} bitrateToDownload
* @param {number} currentBuffer
* @param {number} currentThroughput
* @return {number}
*/
function getNextBufferWithBitrate(bitrateToDownload, currentBuffer, currentThroughput) {
let downloadTime = (bitrateToDownload * segmentDuration) / currentThroughput;
return getNextBuffer(currentBuffer, downloadTime);
}
/**
*
* @param {number} currentBuffer
* @param {number} downloadTime
* @return {number}
*/
function getNextBuffer(currentBuffer, downloadTime) {
const segmentDuration = getSegmentDuration();
let nextBuffer;
if (downloadTime > segmentDuration) {
nextBuffer = currentBuffer - segmentDuration;
} else {
nextBuffer = currentBuffer + segmentDuration - downloadTime;
}
return nextBuffer;
}
instance = {
getMinBuffer,
getSegmentDuration,
getNextBufferWithBitrate,
getNextBuffer,
findWeightVector
};
_setup();
return instance;
}
LoLpWeightSelector.__dashjs_factory_name = 'LoLpWeightSelector';
export default FactoryMaker.getClassFactory(LoLpWeightSelector);
|
<reponame>awesome-archive/frame<gh_stars>10-100
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.handlerList = exports.customKeyBindingFn = undefined;
var _draftJs = require('draft-js');
var customKeyBindingFn = exports.customKeyBindingFn = function customKeyBindingFn(e, _ref) {
var kbFn = _ref.keyBindingFn;
return kbFn(e);
};
var handlerList = exports.handlerList = [customKeyBindingFn];
exports.default = function (e, options) {
var handlers = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : handlerList;
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = handlers[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var handler = _step.value;
var res = handler(e, options);
if (res) {
return res;
}
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
return (0, _draftJs.getDefaultKeyBinding)(e);
}; |
<gh_stars>0
package coreweb
type Config struct {
SslCert string `json:"sslCert"` // SslCert is the path and name of the SSL certificate file.
SslKey string `json:"sslKey"` // SslKey is the path and name of the SSL key file.
Port int `json:"port"` // FromZip is the name of a zip file to serve content from rather than the file system.
FromFolder string `json:"fromFolder"` // FromFolder is the folder containing web content.
// SyncFileAccess indicates if RWMutex lock should be used when reading
// the file cache. It should only be true while debugging when files might
// be changing while the web server is active.
SyncFileAccess bool
}
|
def array_average(items):
# Calculate the sum
sum_of_items = 0
for item in items:
sum_of_items += item
# Calculate the average
average = sum_of_items / len(items)
# Return the result
return average
# Test
average = array_average(items)
# Expected output: 10
print(average) |
package de.roamingthings.workbench.testcontainers;
import org.jetbrains.annotations.NotNull;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.test.context.junit4.SpringRunner;
import org.testcontainers.containers.BrowserWebDriverContainer;
import java.io.File;
import java.net.InetAddress;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT;
import static org.testcontainers.containers.BrowserWebDriverContainer.VncRecordingMode.RECORD_ALL;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = RANDOM_PORT)
public class WelcomeTest {
@LocalServerPort
private int port;
@Rule
public BrowserWebDriverContainer chrome = new BrowserWebDriverContainer()
.withDesiredCapabilities(DesiredCapabilities.chrome())
.withRecordingMode(RECORD_ALL, new File("."));
@After
public void cleanUp() {
chrome.getWebDriver().manage().deleteAllCookies();
}
@Test
public void greetUser() throws Exception {
//given
WebDriver driver = chrome.getWebDriver();
// String hostIpAddress = chrome.getTestHostIpAddress();
InetAddress hostIpAddress = InetAddress.getLocalHost();
//when
WelcomePage welcomePage = goToWelcomePage(driver, hostIpAddress);
// then
assertThat(welcomePage.isInitialized()).isTrue();
// given
welcomePage.enterName("Toni");
// when
GreetingPage greetingPage = welcomePage.submit();
// then
assertThat(greetingPage.isInitialized()).isTrue();
// and
assertThat(greetingPage.name()).isEqualTo("Toni");
sleepForVideoToFinish();
}
private void sleepForVideoToFinish() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@NotNull
private WelcomePage goToWelcomePage(WebDriver driver, InetAddress hostIpAddress) {
driver.get("http://" + hostIpAddress.getHostAddress() + ":" + port);
return new WelcomePage(driver);
}
}
|
#!/bin/bash
check_mus() {
if pgrep -x "cmus" >/dev/null 2>&1; then
meta="$(cmus-remote -Q)"
status="$(echo "$meta" | grep "status ")"
status=${status/status /}
title="$(echo "$meta" | grep "tag title ")"
if [[ "${status/status /}" == "stopped" ]]; then
echo " Stopped"
else
echo " ${status^}: ${title/tag title /}"
fi
else
echo " Not Playing"
fi
sleep infinity & pid=$!
wait
}
update(){
kill $pid
check_mus
}
trap 'update' USR1
check_mus
|
export PATH="$HOME/miniconda/bin:$PATH"
source activate parcels
export CONDA_BUILD_SYSROOT=/
export C_INCLUDE_PATH=$C_INCLUDE_PATH:/Applications/Xcode.app/Contents//Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/
parcels_get_examples examples/;
py.test -v -s --nbval-lax -k "not documentation" tests/ examples/;
|
package com.tracy.competition.dao
import java.util
import com.tracy.competition.domain.entity.CompetitionType
import org.apache.ibatis.annotations.Mapper
/**
* @author Tracy
* @date 2020/11/14 17:09
*/
@Mapper
trait CompetitionTypeDao {
/**
* 获取全部比赛类型
*
* @return
*/
def findAllCompetitionType: util.List[CompetitionType]
}
|
<reponame>muehleisen/OpenStudio
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#ifndef MODEL_FANCOMPONENTMODEL_HPP
#define MODEL_FANCOMPONENTMODEL_HPP
#include <model/ModelAPI.hpp>
#include "StraightComponent.hpp"
namespace openstudio {
namespace model {
class Curve;
class Schedule;
class AirflowNetworkFan;
namespace detail {
class FanComponentModel_Impl;
} // namespace detail
/** FanComponentModel is a StraightComponent that wraps the OpenStudio IDD object 'OS:Fan:ComponentModel'. */
class MODEL_API FanComponentModel : public StraightComponent
{
public:
/** @name Constructors and Destructors */
//@{
// Constructor that will instantiate all required curves. It will **not** instantiate the optional curve objects.
// You can then call the helper method `bool assignDefaultOptionalCurves()` if that is what you want
explicit FanComponentModel(const Model& model);
// Explicit constructor that takes in all required curves. It will **not** instantiate the optional curve objects.
// You can then call the helper method `bool assignDefaultOptionalCurves()` if that is what you want
explicit FanComponentModel(const Model& model, const Curve& fanPressureRiseCurve, const Curve& ductStaticPressureResetCurve,
const Curve& normalizedFanStaticEfficiencyCurveNonStallRegion,
const Curve& normalizedFanStaticEfficiencyCurveStallRegion,
const Curve& normalizedDimensionlessAirflowCurveNonStallRegion,
const Curve& normalizedDimensionlessAirflowCurveStallRegion);
virtual ~FanComponentModel() {}
//@}
static IddObjectType iddObjectType();
static std::vector<std::string> vFDEfficiencyTypeValues();
/** @name Getters */
//@{
Schedule availabilitySchedule() const;
boost::optional<double> maximumFlowRate() const;
bool isMaximumFlowRateAutosized() const;
boost::optional<double> minimumFlowRate() const;
bool isMinimumFlowRateAutosized() const;
double fanSizingFactor() const;
double fanWheelDiameter() const;
double fanOutletArea() const;
double maximumFanStaticEfficiency() const;
double eulerNumberatMaximumFanStaticEfficiency() const;
double maximumDimensionlessFanAirflow() const;
boost::optional<double> motorFanPulleyRatio() const;
bool isMotorFanPulleyRatioAutosized() const;
boost::optional<double> beltMaximumTorque() const;
bool isBeltMaximumTorqueAutosized() const;
double beltSizingFactor() const;
double beltFractionalTorqueTransition() const;
double motorMaximumSpeed() const;
boost::optional<double> maximumMotorOutputPower() const;
bool isMaximumMotorOutputPowerAutosized() const;
double motorSizingFactor() const;
double motorInAirstreamFraction() const;
std::string vFDEfficiencyType() const;
boost::optional<double> maximumVFDOutputPower() const;
bool isMaximumVFDOutputPowerAutosized() const;
double vFDSizingFactor() const;
Curve fanPressureRiseCurve() const;
Curve ductStaticPressureResetCurve() const;
Curve normalizedFanStaticEfficiencyCurveNonStallRegion() const;
Curve normalizedFanStaticEfficiencyCurveStallRegion() const;
Curve normalizedDimensionlessAirflowCurveNonStallRegion() const;
Curve normalizedDimensionlessAirflowCurveStallRegion() const;
boost::optional<Curve> maximumBeltEfficiencyCurve() const;
boost::optional<Curve> normalizedBeltEfficiencyCurveRegion1() const;
boost::optional<Curve> normalizedBeltEfficiencyCurveRegion2() const;
boost::optional<Curve> normalizedBeltEfficiencyCurveRegion3() const;
boost::optional<Curve> maximumMotorEfficiencyCurve() const;
boost::optional<Curve> normalizedMotorEfficiencyCurve() const;
boost::optional<Curve> vFDEfficiencyCurve() const;
std::string endUseSubcategory() const;
//@}
/** @name Setters */
//@{
bool setAvailabilitySchedule(Schedule& schedule);
bool setMaximumFlowRate(double maximumFlowRate);
void autosizeMaximumFlowRate();
bool setMinimumFlowRate(double minimumFlowRate);
void autosizeMinimumFlowRate();
bool setFanSizingFactor(double fanSizingFactor);
bool setFanWheelDiameter(double fanWheelDiameter);
bool setFanOutletArea(double fanOutletArea);
bool setMaximumFanStaticEfficiency(double maximumFanStaticEfficiency);
bool setEulerNumberatMaximumFanStaticEfficiency(double eulerNumberatMaximumFanStaticEfficiency);
bool setMaximumDimensionlessFanAirflow(double maximumDimensionlessFanAirflow);
bool setMotorFanPulleyRatio(double motorFanPulleyRatio);
void autosizeMotorFanPulleyRatio();
bool setBeltMaximumTorque(double beltMaximumTorque);
void autosizeBeltMaximumTorque();
bool setBeltSizingFactor(double beltSizingFactor);
bool setBeltFractionalTorqueTransition(double beltFractionalTorqueTransition);
bool setMotorMaximumSpeed(double motorMaximumSpeed);
bool setMaximumMotorOutputPower(double maximumMotorOutputPower);
void autosizeMaximumMotorOutputPower();
bool setMotorSizingFactor(double motorSizingFactor);
bool setMotorInAirstreamFraction(double motorInAirstreamFraction);
bool setVFDEfficiencyType(const std::string& vFDEfficiencyType);
bool setMaximumVFDOutputPower(double maximumVFDOutputPower);
void autosizeMaximumVFDOutputPower();
bool setVFDSizingFactor(double vFDSizingFactor);
bool setFanPressureRiseCurve(const Curve& bivariateFunctions);
bool setDuctStaticPressureResetCurve(const Curve& univariateFunctions);
bool setNormalizedFanStaticEfficiencyCurveNonStallRegion(const Curve& univariateFunctions);
bool setNormalizedFanStaticEfficiencyCurveStallRegion(const Curve& univariateFunctions);
bool setNormalizedDimensionlessAirflowCurveNonStallRegion(const Curve& univariateFunctions);
bool setNormalizedDimensionlessAirflowCurveStallRegion(const Curve& univariateFunctions);
bool setMaximumBeltEfficiencyCurve(const Curve& univariateFunctions);
void resetMaximumBeltEfficiencyCurve();
bool setNormalizedBeltEfficiencyCurveRegion1(const Curve& univariateFunctions);
void resetNormalizedBeltEfficiencyCurveRegion1();
bool setNormalizedBeltEfficiencyCurveRegion2(const Curve& univariateFunctions);
void resetNormalizedBeltEfficiencyCurveRegion2();
bool setNormalizedBeltEfficiencyCurveRegion3(const Curve& univariateFunctions);
void resetNormalizedBeltEfficiencyCurveRegion3();
bool setMaximumMotorEfficiencyCurve(const Curve& univariateFunctions);
void resetMaximumMotorEfficiencyCurve();
bool setNormalizedMotorEfficiencyCurve(const Curve& univariateFunctions);
void resetNormalizedMotorEfficiencyCurve();
bool setVFDEfficiencyCurve(const Curve& univariateFunctions);
void resetVFDEfficiencyCurve();
bool setEndUseSubcategory(const std::string& endUseSubcategory);
//@}
/** @name Other */
//@{
// Helper that creates defaulted Optional Curves
bool assignDefaultOptionalCurves();
boost::optional<double> autosizedMaximumFlowRate();
boost::optional<double> autosizedMinimumFlowRate();
boost::optional<double> autosizedMotorFanPulleyRatio();
boost::optional<double> autosizedBeltMaximumTorque();
boost::optional<double> autosizedMaximumMotorOutputPower();
boost::optional<double> autosizedMaximumVFDOutputPower();
//@}
protected:
/// @cond
typedef detail::FanComponentModel_Impl ImplType;
explicit FanComponentModel(std::shared_ptr<detail::FanComponentModel_Impl> impl);
friend class detail::FanComponentModel_Impl;
friend class Model;
friend class IdfObject;
friend class openstudio::detail::IdfObject_Impl;
/// @endcond
private:
REGISTER_LOGGER("openstudio.model.FanComponentModel");
};
/** \relates FanComponentModel*/
typedef boost::optional<FanComponentModel> OptionalFanComponentModel;
/** \relates FanComponentModel*/
typedef std::vector<FanComponentModel> FanComponentModelVector;
} // namespace model
} // namespace openstudio
#endif // MODEL_FANCOMPONENTMODEL_HPP
|
DOCKER_IMAGE=cttsai1985/tensorflow-transformers
GPU_DEVICE='all'
SHM_SIZE=2G
RootSrcPath=${PWD}
DockerRootSrcPath=/root/src/
DataPath=${PWD}/input
DockerDataPath=/root/src/input
RootPort1=8888
DockerRootPort1=8888
RootPort2=6666
DockerRootPort2=6666
WORKDIR="/root/src/script"
docker rm $(docker ps -a -q)
CMD="python tf_starter.py --training-augmentation --configs ../configs/distillroberta_augment_configs.py"
echo run -i -t --gpus ${GPU_DEVICE} -e PYTHONPATH=/root/src -v $RootSrcPath:$DockerRootSrcPath -v $(readlink -f $DataPath):$DockerDataPath --shm-size $SHM_SIZE --workdir=${WORKDIR}$DOCKER_IMAGE $CMD
docker run -i -t --gpus ${GPU_DEVICE} -e PYTHONPATH=/root/src -v $RootSrcPath:$DockerRootSrcPath -v $(readlink -f $DataPath):$DockerDataPath --shm-size $SHM_SIZE --workdir=${WORKDIR} $DOCKER_IMAGE $CMD
|
var mainUrl = 'https://tutorials.webduino.io/zh-tw/docs/';
var utmUrl = '?utm_source=cloud-blockly&utm_medium=contextMenu&utm_campaign=tutorials';
Blockly.Blocks['rain_setup'] = {
init: function() {
this.appendValueInput("NAME")
.setCheck(null)
.appendField(Blockly.Msg.WEBDUINO_RAIN_SET, "設定")
.appendField(new Blockly.FieldVariable("rain"), "rain")
.appendField(Blockly.Msg.WEBDUINO_RAIN_PIN, "腳位為");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(330);
this.setTooltip("");
this.setHelpUrl(mainUrl + 'basic/index.html' + utmUrl);
}
};
Blockly.Blocks['rain_pin'] = {
init: function() {
this.appendDummyInput()
.appendField(Blockly.Msg.WEBDUINO_RAIN_GAUGE, "雨量筒,")
.appendField(Blockly.Msg.WEBDUINO_RAIN_GAUGEPIN, "rain pin")
.appendField(new Blockly.FieldDropdown([["2","2"], ["3","3"], ["4","4"], ["5","5"], ["6","6"], ["7","7"], ["8","8"], ["9","9"], ["10","10"], ["11","11"], ["12","12"], ["13","13"], ["14","14"], ["15","15"], ["16","16"], ["17","17"], ["18","18"], ["19","19"]]), "uno_Dpin1");
this.setOutput(true, null);
this.setColour(230);
this.setTooltip("");
this.setHelpUrl(mainUrl + 'basic/index.html' + utmUrl);
}
};
Blockly.Blocks['rain_sencing'] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldVariable("rain"), "rain")
.appendField(Blockly.Msg.WEBDUINO_RAIN_DETECT, "開始偵測");
this.appendStatementInput("NAME")
.setCheck(null)
.appendField(Blockly.Msg.WEBDUINO_RAIN_DO, "執行");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl(mainUrl + 'basic/index.html' + utmUrl);
}
};
Blockly.Blocks['rain_display'] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldVariable("rain"), "rain")
.appendField(Blockly.Msg.WEBDUINO_RAIN_DETECTED, "所測得目前的")
.appendField(Blockly.Msg.WEBDUINO_RAIN_VALUE, "雨量(mm)");
this.setOutput(true, null);
this.setColour(20);
this.setTooltip("");
this.setHelpUrl(mainUrl + 'basic/index.html' + utmUrl);
}
}; |
<reponame>invinst/CPDB<gh_stars>10-100
import collections
import requests
from bs4 import BeautifulSoup
import re
from twitterbot.models import TwitterBotTextSource
class TwitterBotNamesService:
def __init__(self, tweets):
self.tweets = tweets
def get_all_names(self):
names = collections.defaultdict(lambda: [])
text_sources = self.build_text_sources()
for text_source in text_sources:
for name, source in text_source.build_names():
names[name].append(source)
return names
def build_text_sources(self):
text_sources = []
for tweet in self.tweets:
if not getattr(tweet, 'retweeted_tweet', None):
text_sources.append(TwitterBotTextSource(text=tweet.text, source='text'))
text_sources += self.parse_linked_websites(tweet.entities['urls'])
text_sources += self.parse_hashtags(tweet)
return text_sources
def parse_linked_websites(self, urls):
texts = []
for url in urls:
response = requests.get(url['expanded_url'], headers={'User-Agent': 'Mozilla/5.0'})
html = response.content.decode('utf-8')
soup = BeautifulSoup(html)
[s.extract() for s in soup([
'style',
'script',
'[document]',
'head',
'title'
])]
text = soup.getText()
if 'CPD' in text or ('Chicago' in text and 'Police' in text):
text_source = TwitterBotTextSource(text=text, source=url['expanded_url'])
texts.append(text_source)
return texts
def parse_hashtags(self, tweet):
hashtags = tweet.entities.get('hashtags', [])
text_sources = []
for hashtag in hashtags:
words = re.findall('[A-Z][a-z]*', hashtag['text'])
text_sources.append(TwitterBotTextSource(text=' '.join(words), source='#%s' % hashtag['text']))
return text_sources
|
import random
def generate_random_string():
characters = 'abcdefghijklmnopqrstuvwxyz'
random_string = ''
for i in range(5):
random_string += random.choice(characters)
return random_string
result = generate_random_string()
print(result) |
npm run build
scp -i "light-sail-cuckoo-plus.pem" -r public ubuntu@52.76.67.104:projects/Cuckoo.Plus/ |
<gh_stars>1-10
/*
* lex.c -- Dumb lexical analysis.
*
* by <NAME>
*
* Copyright 2003-2012 -- See accompanying license
*
*/
#include <cat/lex.h>
#include <cat/raw.h>
#include <string.h>
#include <stdlib.h>
#define node_to_lexent(node) container((node), struct lexer_entry, entry)
struct lexer *lex_new(struct memmgr *mm)
{
struct lexer *lex = mem_get(mm, sizeof(*lex));
if ( lex == NULL )
return NULL;
l_init(&lex->entries);
lex->input.data = NULL;
lex->input.len = 0;
lex->next_char = NULL;
lex->mm = mm;
return lex;
}
int lex_add_entry(struct lexer *lex, const char *pattern, int token)
{
struct lexer_entry *ent;
struct raw r;
int rv;
if ( token < 0 )
return -1;
if ( (ent = mem_get(lex->mm, sizeof(*ent))) == NULL )
return -1;
l_init(&ent->entry);
ent->token = token;
r.len = strlen(pattern) + 1;
if ( (r.data = mem_get(lex->mm, r.len + 1)) == NULL ) {
mem_free(lex->mm, ent);
return -1;
}
*(char *)r.data = '^';
memcpy((char *)r.data + 1, pattern, r.len);
rv = rex_init(&ent->pattern, &r, lex->mm, NULL);
mem_free(lex->mm, r.data);
if ( rv < 0 ) {
mem_free(lex->mm, ent);
return -1;
}
l_enq(&lex->entries, &ent->entry);
return 0;
}
void lex_reset(struct lexer *lex, const char *string)
{
abort_unless(string);
str_to_raw(&lex->input, (char *)string, 0);
lex->next_char = lex->input.data;
}
int lex_next_token(struct lexer *lex, const char **string, int *len)
{
int rv;
struct list *node;
struct lexer_entry *ent;
struct raw r;
struct rex_match_loc match;
if ( lex->next_char == (char *)lex->input.data + lex->input.len )
return LEX_END;
r.data = (byte_t*)lex->next_char;
r.len = lex->input.len - (lex->next_char - (char *)r.data);
l_for_each(node, &lex->entries) {
ent = node_to_lexent(node);
rv = rex_match(&ent->pattern, &r, &match, 1);
if ( rv == REX_ERROR )
return LEX_ERROR;
if ( rv != REX_MATCH )
continue;
abort_unless(match.valid);
abort_unless(match.start == 0);
if ( string != NULL )
*string = lex->next_char;
if ( len != NULL )
*len = match.len;
lex->next_char += match.len;
return ent->token;
}
return LEX_NOMATCH;
}
void lex_destroy(struct lexer *lex)
{
struct list *node;
struct memmgr *mm = lex->mm;
while ( (node = l_deq(&lex->entries)) != NULL ) {
struct lexer_entry *ent = node_to_lexent(node);
rex_free(&ent->pattern);
mem_free(mm, ent);
}
mem_free(mm, lex);
}
|
<reponame>anusha-devulapally/A-December-of-Algorithms-2020
correct_word=list(map(str,input().split()))
correct_word
misspelt_words=input()
def matchornot(correct,wrong):
m=len(correct)
n=len(wrong)
if(m!=n):
return 0
total=0
for i in range(m):
if(correct[i]!=wrong[i]):
total+=1
return total
for i in correct_word:
val=matchornot(i,misspelt_words)
if(val==1):
print(i)
|
#!/bin/bash
python manage.py rqworker ${RQ_QUEUES:-default} |
import * as fs from 'fs';
import * as Github from 'github-api';
require('dotenv').config();
var github = new Github({
username: process.env.GITUSER,
password: <PASSWORD>,
auth: 'basic'
});
var repository = github.getRepo('surveyjs', 'service');
var classesContent = fs.readFileSync('doc_generator/classes.json', 'utf8');
repository.writeFile(
'master',
'surveyjs.io/App_Data/Docs/classes.json',
classesContent,
'Updated API documentation',
function(err) {
console.log(err);
}
);
var pmesContent = fs.readFileSync('doc_generator/pmes.json', 'utf8');
repository.writeFile(
'master',
'surveyjs.io/App_Data/Docs/pmes.json',
pmesContent,
'Updated API documentation',
function(err) {
console.log(err);
}
);
|
rs1 = np.array([self['rs1']]).astype(np.float32)
rs2 = np.array([self['rs2']).astype(np.float32)
if np.array_equal(rs1, rs2):
return 1
else:
return 0 |
#!/usr/bin/env bash
CWD="${PWD}"
REPO="/srv/ms.git"
OLD_MS_BRANCH="${TRAVIS_COMMIT:-v2}"
MS_BRANCH="$(git log|head -n1|awk '{print $2}')"
CMS_BRANCH="changeset:${MS_BRANCH}"
BOOTSALT_ARGS="-C -b ${CMS_BRANCH} -n travis --highstates"
set -x
env
apt-get install -y --force-yes xz-utils python rsync acl
rm -f .git/shallow
git clone --mirror --bare . "${REPO}"
i="/srv/makina-states"
git clone "${REPO}" "${i}"
cd "${i}"
git reset --hard "${MS_BRANCH}"
if ! ./_scripts/boot-salt.sh ${BOOTSALT_ARGS};then
exit 1
fi
. venv/bin/activate
if ! pip install -r requirements/dev.txt;then
exit 1
fi
# be sure to let travis be sudoer, in case
echo "travis ALL=NOPASSWD: ALL">>/etc/sudoers
exit ${?}
# vim:set et sts=4 ts=4 tw=0:
|
cask 'plug' do
version '2.0.9'
sha256 'bead86a9880ae8eb63296b48d6aef30d80d6057a11ca8491dc85a27b296f5e36'
url 'https://www.plugformac.com/updates/plug2/Plug-latest.dmg'
appcast 'https://www.plugformac.com/updates/plug2/sparklecast.xml',
checkpoint: '7092bd0eb0a14a478018d36a44addfa4a8dfc871e214f79d66b6a5e1ce216815'
name 'Plug'
homepage 'https://www.plugformac.com/'
license :gratis
app 'Plug.app'
end
|
#!/bin/bash
# shellcheck disable=SC2031
source ./lib.bash
source ./lib-matrix-files.bash
# Regression tests for past matrix failures.
# The full matrix test is too slow to run as a normal part of the test suite;
# they can be run manually from the ./m-*.sh scripts.
TestPhase_Setup ###############################################################
tests=(
00-1111-1222-1311
00-1111-1311-1331
00-2311-1111-0133
00-2311-1322-1122
00-0111-1622-1632
00-1211-1122-1413
)
TestMatrixFileSetup "${tests[@]}"
unset tests
TestPhase_Run #################################################################
AconfSave
AconfApply
TestPhase_Check ###############################################################
TestMatrixFileCheckRoundtrip
TestDone ######################################################################
|
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <signal.h>
#include <pthread.h>
#include <sys/prctl.h>
#include <math.h>
#include "hi_common.h"
#include "hi_comm_sys.h"
#include "hi_comm_svp.h"
#include "sample_comm.h"
#include "sample_comm_svp.h"
#include "sample_comm_nnie.h"
#include "sample_nnie_main.h"
#include "sample_svp_nnie_software.h"
#include "sample_comm_ive.h"
/*ssd para*/
static SAMPLE_SVP_NNIE_MODEL_S s_stSsdModel = {0};
static SAMPLE_SVP_NNIE_PARAM_S s_stSsdNnieParam = {0};
static SAMPLE_SVP_NNIE_SSD_SOFTWARE_PARAM_S s_stSsdSoftwareParam = {0};
/******************************************************************************
* function : NNIE Forward
******************************************************************************/
static HI_S32 SAMPLE_SVP_NNIE_Forward(SAMPLE_SVP_NNIE_PARAM_S *pstNnieParam,
SAMPLE_SVP_NNIE_INPUT_DATA_INDEX_S* pstInputDataIdx,
SAMPLE_SVP_NNIE_PROCESS_SEG_INDEX_S* pstProcSegIdx,HI_BOOL bInstant)
{
HI_S32 s32Ret = HI_SUCCESS;
HI_U32 i = 0, j = 0;
HI_BOOL bFinish = HI_FALSE;
SVP_NNIE_HANDLE hSvpNnieHandle = 0;
HI_U32 u32TotalStepNum = 0;
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astForwardCtrl[pstProcSegIdx->u32SegIdx].stTskBuf.u64PhyAddr,
(HI_VOID *) pstNnieParam->astForwardCtrl[pstProcSegIdx->u32SegIdx].stTskBuf.u64VirAddr,
pstNnieParam->astForwardCtrl[pstProcSegIdx->u32SegIdx].stTskBuf.u32Size);
/*set input blob according to node name*/
if(pstInputDataIdx->u32SegIdx != pstProcSegIdx->u32SegIdx)
{
for(i = 0; i < pstNnieParam->pstModel->astSeg[pstProcSegIdx->u32SegIdx].u16SrcNum; i++)
{
for(j = 0; j < pstNnieParam->pstModel->astSeg[pstInputDataIdx->u32SegIdx].u16DstNum; j++)
{
if(0 == strncmp(pstNnieParam->pstModel->astSeg[pstInputDataIdx->u32SegIdx].astDstNode[j].szName,
pstNnieParam->pstModel->astSeg[pstProcSegIdx->u32SegIdx].astSrcNode[i].szName,
SVP_NNIE_NODE_NAME_LEN))
{
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astSrc[i] =
pstNnieParam->astSegData[pstInputDataIdx->u32SegIdx].astDst[j];
break;
}
}
SAMPLE_SVP_CHECK_EXPR_RET((j == pstNnieParam->pstModel->astSeg[pstInputDataIdx->u32SegIdx].u16DstNum),
HI_FAILURE,SAMPLE_SVP_ERR_LEVEL_ERROR,"Error,can't find %d-th seg's %d-th src blob!\n",
pstProcSegIdx->u32SegIdx,i);
}
}
/*NNIE_Forward*/
s32Ret = HI_MPI_SVP_NNIE_Forward(&hSvpNnieHandle,
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astSrc,
pstNnieParam->pstModel, pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst,
&pstNnieParam->astForwardCtrl[pstProcSegIdx->u32SegIdx], bInstant);
SAMPLE_SVP_CHECK_EXPR_RET(HI_SUCCESS != s32Ret,s32Ret,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,HI_MPI_SVP_NNIE_Forward failed!\n");
if(bInstant)
{
/*Wait NNIE finish*/
while(HI_ERR_SVP_NNIE_QUERY_TIMEOUT == (s32Ret = HI_MPI_SVP_NNIE_Query(pstNnieParam->astForwardCtrl[pstProcSegIdx->u32SegIdx].enNnieId,
hSvpNnieHandle, &bFinish, HI_TRUE)))
{
usleep(100);
SAMPLE_SVP_TRACE(SAMPLE_SVP_ERR_LEVEL_INFO,
"HI_MPI_SVP_NNIE_Query Query timeout!\n");
}
}
bFinish = HI_FALSE;
for(i = 0; i < pstNnieParam->astForwardCtrl[pstProcSegIdx->u32SegIdx].u32DstNum; i++)
{
if(SVP_BLOB_TYPE_SEQ_S32 == pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].enType)
{
for(j = 0; j < pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Num; j++)
{
u32TotalStepNum += *((HI_U32*)(pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].unShape.stSeq.u64VirAddrStep)+j);
}
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64PhyAddr,
(HI_VOID *) pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64VirAddr,
u32TotalStepNum*pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Stride);
}
else
{
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64PhyAddr,
(HI_VOID *) pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64VirAddr,
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Num*
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].unShape.stWhc.u32Chn*
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].unShape.stWhc.u32Height*
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Stride);
}
}
return s32Ret;
}
/******************************************************************************
* function : NNIE ForwardWithBbox
******************************************************************************/
static HI_S32 SAMPLE_SVP_NNIE_ForwardWithBbox(SAMPLE_SVP_NNIE_PARAM_S *pstNnieParam,
SAMPLE_SVP_NNIE_INPUT_DATA_INDEX_S* pstInputDataIdx,SVP_SRC_BLOB_S astBbox[],
SAMPLE_SVP_NNIE_PROCESS_SEG_INDEX_S* pstProcSegIdx,HI_BOOL bInstant)
{
HI_S32 s32Ret = HI_SUCCESS;
HI_BOOL bFinish = HI_FALSE;
SVP_NNIE_HANDLE hSvpNnieHandle = 0;
HI_U32 u32TotalStepNum = 0;
HI_U32 i, j;
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astForwardWithBboxCtrl[pstProcSegIdx->u32SegIdx].stTskBuf.u64PhyAddr,
(HI_VOID *) pstNnieParam->astForwardWithBboxCtrl[pstProcSegIdx->u32SegIdx].stTskBuf.u64VirAddr,
pstNnieParam->astForwardWithBboxCtrl[pstProcSegIdx->u32SegIdx].stTskBuf.u32Size);
/*set input blob according to node name*/
if(pstInputDataIdx->u32SegIdx != pstProcSegIdx->u32SegIdx)
{
for(i = 0; i < pstNnieParam->pstModel->astSeg[pstProcSegIdx->u32SegIdx].u16SrcNum; i++)
{
for(j = 0; j < pstNnieParam->pstModel->astSeg[pstInputDataIdx->u32SegIdx].u16DstNum; j++)
{
if(0 == strncmp(pstNnieParam->pstModel->astSeg[pstInputDataIdx->u32SegIdx].astDstNode[j].szName,
pstNnieParam->pstModel->astSeg[pstProcSegIdx->u32SegIdx].astSrcNode[i].szName,
SVP_NNIE_NODE_NAME_LEN))
{
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astSrc[i] =
pstNnieParam->astSegData[pstInputDataIdx->u32SegIdx].astDst[j];
break;
}
}
SAMPLE_SVP_CHECK_EXPR_RET((j == pstNnieParam->pstModel->astSeg[pstInputDataIdx->u32SegIdx].u16DstNum),
HI_FAILURE,SAMPLE_SVP_ERR_LEVEL_ERROR,"Error,can't find %d-th seg's %d-th src blob!\n",
pstProcSegIdx->u32SegIdx,i);
}
}
/*NNIE_ForwardWithBbox*/
s32Ret = HI_MPI_SVP_NNIE_ForwardWithBbox(&hSvpNnieHandle,
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astSrc,astBbox,
pstNnieParam->pstModel, pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst,
&pstNnieParam->astForwardWithBboxCtrl[pstProcSegIdx->u32SegIdx], bInstant);
SAMPLE_SVP_CHECK_EXPR_RET(HI_SUCCESS != s32Ret,s32Ret,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,HI_MPI_SVP_NNIE_ForwardWithBbox failed!\n");
if(bInstant)
{
/*Wait NNIE finish*/
while(HI_ERR_SVP_NNIE_QUERY_TIMEOUT == (s32Ret = HI_MPI_SVP_NNIE_Query(pstNnieParam->astForwardWithBboxCtrl[pstProcSegIdx->u32SegIdx].enNnieId,
hSvpNnieHandle, &bFinish, HI_TRUE)))
{
usleep(100);
SAMPLE_SVP_TRACE(SAMPLE_SVP_ERR_LEVEL_INFO,
"HI_MPI_SVP_NNIE_Query Query timeout!\n");
}
}
bFinish = HI_FALSE;
for(i = 0; i < pstNnieParam->astForwardCtrl[pstProcSegIdx->u32SegIdx].u32DstNum; i++)
{
if(SVP_BLOB_TYPE_SEQ_S32 == pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].enType)
{
for(j = 0; j < pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Num; j++)
{
u32TotalStepNum += *((HI_U32*)(pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].unShape.stSeq.u64VirAddrStep)+j);
}
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64PhyAddr,
(HI_VOID *) pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64VirAddr,
u32TotalStepNum*pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Stride);
}
else
{
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64PhyAddr,
(HI_VOID *) pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u64VirAddr,
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Num*
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].unShape.stWhc.u32Chn*
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].unShape.stWhc.u32Height*
pstNnieParam->astSegData[pstProcSegIdx->u32SegIdx].astDst[i].u32Stride);
}
}
return s32Ret;
}
/******************************************************************************
* function : Fill Src Data
******************************************************************************/
static HI_S32 SAMPLE_SVP_NNIE_FillSrcData(SAMPLE_SVP_NNIE_CFG_S* pstNnieCfg,
SAMPLE_SVP_NNIE_PARAM_S *pstNnieParam, SAMPLE_SVP_NNIE_INPUT_DATA_INDEX_S* pstInputDataIdx)
{
FILE* fp = NULL;
HI_U32 i =0, j = 0, n = 0,m = 0;
HI_U32 number = 0;
HI_U32 u32Height = 0, u32Width = 0, u32Chn = 0, u32Stride = 0, u32Dim = 0;
HI_U32 u32VarSize = 0;
HI_S32 s32Ret = HI_SUCCESS;
HI_U8*pu8PicAddr = NULL;
HI_U32*pu32StepAddr = NULL;
HI_U32 u32SegIdx = pstInputDataIdx->u32SegIdx;
HI_U32 u32NodeIdx = pstInputDataIdx->u32NodeIdx;
HI_U32 u32TotalStepNum = 0;
HI_U32 mean_val_rgb[3] = {0.485,0.456,0.406};
HI_U32 var_val_rgb[3] = {0.229,0.224,0.225};
/*open file*/
if (NULL != pstNnieCfg->pszPic)
{
fp = fopen(pstNnieCfg->pszPic,"rb");
SAMPLE_SVP_CHECK_EXPR_RET(NULL == fp,HI_INVALID_VALUE,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error, open file failed!\n");
}
/*get data size*/
if(SVP_BLOB_TYPE_U8 <= pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].enType &&
SVP_BLOB_TYPE_YVU422SP >= pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].enType)
{
u32VarSize = sizeof(HI_U8);
}
else
{
u32VarSize = sizeof(HI_U32);
}
/*fill src data*/
if(SVP_BLOB_TYPE_SEQ_S32 == pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].enType)
{
u32Dim = pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].unShape.stSeq.u32Dim;
number = u32Dim / 3;
u32Stride = pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u32Stride;
pu32StepAddr = (HI_U32*)(pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].unShape.stSeq.u64VirAddrStep);
pu8PicAddr = (HI_U8*)(pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u64VirAddr);
for(n = 0; n < pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u32Num; n++)
{
for(i = 0;i < *(pu32StepAddr+n); i++)
{
s32Ret = fread(pu8PicAddr,u32Dim*u32VarSize,1,fp);
SAMPLE_SVP_CHECK_EXPR_GOTO(1 != s32Ret,FAIL,SAMPLE_SVP_ERR_LEVEL_ERROR,"Error,Read image file failed!\n");
for(m = 0;m < number; m++)//预处理
{
pu8PicAddr[m] = (pu8PicAddr[m]/255 - mean_val_rgb[0])/var_val_rgb[0];
pu8PicAddr[m + number] = (pu8PicAddr[m + number]/255 - mean_val_rgb[1])/var_val_rgb[1];
pu8PicAddr[m + number * 2] = (pu8PicAddr[m + number * 2]/255 - mean_val_rgb[2])/var_val_rgb[2];
}
pu8PicAddr += u32Stride;
}
u32TotalStepNum += *(pu32StepAddr+n);
}
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u64PhyAddr,
(HI_VOID *) pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u64VirAddr,
u32TotalStepNum*u32Stride);
}
else
{
u32Height = pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].unShape.stWhc.u32Height;
u32Width = pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].unShape.stWhc.u32Width;
u32Chn = pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].unShape.stWhc.u32Chn;
u32Stride = pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u32Stride;
pu8PicAddr = (HI_U8*)(pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u64VirAddr);
if(SVP_BLOB_TYPE_YVU420SP== pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].enType)
{
for(n = 0; n < pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u32Num; n++)
{
for(i = 0; i < u32Chn*u32Height/2; i++)
{
s32Ret = fread(pu8PicAddr,u32Width*u32VarSize,1,fp);
SAMPLE_SVP_CHECK_EXPR_GOTO(1 != s32Ret,FAIL,SAMPLE_SVP_ERR_LEVEL_ERROR,"Error,Read image file failed!\n");
pu8PicAddr += u32Stride;
}
}
}
else if(SVP_BLOB_TYPE_YVU422SP== pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].enType)
{
for(n = 0; n < pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u32Num; n++)
{
for(i = 0; i < u32Height*2; i++)
{
s32Ret = fread(pu8PicAddr,u32Width*u32VarSize,1,fp);
SAMPLE_SVP_CHECK_EXPR_GOTO(1 != s32Ret,FAIL,SAMPLE_SVP_ERR_LEVEL_ERROR,"Error,Read image file failed!\n");
pu8PicAddr += u32Stride;
}
}
}
else
{
for(n = 0; n < pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u32Num; n++)
{
for(i = 0;i < u32Chn; i++)
{
for(j = 0; j < u32Height; j++)
{
s32Ret = fread(pu8PicAddr,u32Width*u32VarSize,1,fp);
SAMPLE_SVP_CHECK_EXPR_GOTO(1 != s32Ret,FAIL,SAMPLE_SVP_ERR_LEVEL_ERROR,"Error,Read image file failed!\n");
pu8PicAddr += u32Stride;
}
}
}
}
SAMPLE_COMM_SVP_FlushCache(pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u64PhyAddr,
(HI_VOID *) pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u64VirAddr,
pstNnieParam->astSegData[u32SegIdx].astSrc[u32NodeIdx].u32Num*u32Chn*u32Height*u32Stride);
}
fclose(fp);
return HI_SUCCESS;
FAIL:
fclose(fp);
return HI_FAILURE;
}
/******************************************************************************
* function : Ssd Deinit
******************************************************************************/
static HI_S32 SAMPLE_SVP_NNIE_Ssd_Deinit(SAMPLE_SVP_NNIE_PARAM_S *pstNnieParam,
SAMPLE_SVP_NNIE_SSD_SOFTWARE_PARAM_S* pstSoftWareParam,SAMPLE_SVP_NNIE_MODEL_S *pstNnieModel)
{
HI_S32 s32Ret = HI_SUCCESS;
/*hardware deinit*/
if(pstNnieParam!=NULL)
{
s32Ret = SAMPLE_COMM_SVP_NNIE_ParamDeinit(pstNnieParam);
SAMPLE_SVP_CHECK_EXPR_TRACE(HI_SUCCESS != s32Ret,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,SAMPLE_COMM_SVP_NNIE_ParamDeinit failed!\n");
}
/*model deinit*/
if(pstNnieModel!=NULL)
{
s32Ret = SAMPLE_COMM_SVP_NNIE_UnloadModel(pstNnieModel);
SAMPLE_SVP_CHECK_EXPR_TRACE(HI_SUCCESS != s32Ret,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,SAMPLE_COMM_SVP_NNIE_UnloadModel failed!\n");
}
return s32Ret;
}
/******************************************************************************
* function : Ssd software para init
******************************************************************************/
static HI_S32 SAMPLE_SVP_NNIE_Ssd_SoftwareInit(SAMPLE_SVP_NNIE_CFG_S* pstCfg,
SAMPLE_SVP_NNIE_PARAM_S *pstNnieParam, SAMPLE_SVP_NNIE_SSD_SOFTWARE_PARAM_S* pstSoftWareParam)
{
HI_U32 i = 0;
HI_S32 s32Ret = HI_SUCCESS;
HI_U32 u32ClassNum = 0;
HI_U32 u32TotalSize = 0;
HI_U32 u32DstRoiSize = 0;
HI_U32 u32DstScoreSize = 0;
HI_U32 u32ClassRoiNumSize = 0;
HI_U32 u32TmpBufTotalSize = 0;
HI_U64 u64PhyAddr = 0;
HI_U8* pu8VirAddr = NULL;
/*Set Conv Parameters*/
/*the SSD sample report resule is after permute operation,
conv result is (C, H, W), after permute, the report node's
(C1, H1, W1) is (H, W, C), the stride of report result is aligned according to C dim*/
for(i = 0; i < 12; i++)
{
pstSoftWareParam->au32ConvHeight[i] = pstNnieParam->pstModel->astSeg[0].astDstNode[i].unShape.stWhc.u32Chn;
pstSoftWareParam->au32ConvWidth[i] = pstNnieParam->pstModel->astSeg[0].astDstNode[i].unShape.stWhc.u32Height;
pstSoftWareParam->au32ConvChannel[i] = pstNnieParam->pstModel->astSeg[0].astDstNode[i].unShape.stWhc.u32Width;
if(i%2==1)
{
pstSoftWareParam->au32ConvStride[i/2] = SAMPLE_SVP_NNIE_ALIGN16(pstSoftWareParam->au32ConvChannel[i]*sizeof(HI_U32))/sizeof(HI_U32);
}
}
/*Set PriorBox Parameters*/
pstSoftWareParam->au32PriorBoxWidth[0] = 38;
pstSoftWareParam->au32PriorBoxWidth[1] = 19;
pstSoftWareParam->au32PriorBoxWidth[2] = 10;
pstSoftWareParam->au32PriorBoxWidth[3] = 5;
pstSoftWareParam->au32PriorBoxWidth[4] = 3;
pstSoftWareParam->au32PriorBoxWidth[5] = 1;
pstSoftWareParam->au32PriorBoxHeight[0] = 38;
pstSoftWareParam->au32PriorBoxHeight[1] = 19;
pstSoftWareParam->au32PriorBoxHeight[2] = 10;
pstSoftWareParam->au32PriorBoxHeight[3] = 5;
pstSoftWareParam->au32PriorBoxHeight[4] = 3;
pstSoftWareParam->au32PriorBoxHeight[5] = 1;
pstSoftWareParam->u32OriImHeight = pstNnieParam->astSegData[0].astSrc[0].unShape.stWhc.u32Height;
pstSoftWareParam->u32OriImWidth = pstNnieParam->astSegData[0].astSrc[0].unShape.stWhc.u32Width;
pstSoftWareParam->af32PriorBoxMinSize[0][0] = 30.0f;
pstSoftWareParam->af32PriorBoxMinSize[1][0] = 60.0f;
pstSoftWareParam->af32PriorBoxMinSize[2][0] = 111.0f;
pstSoftWareParam->af32PriorBoxMinSize[3][0] = 162.0f;
pstSoftWareParam->af32PriorBoxMinSize[4][0] = 213.0f;
pstSoftWareParam->af32PriorBoxMinSize[5][0] = 264.0f;
pstSoftWareParam->af32PriorBoxMaxSize[0][0] = 60.0f;
pstSoftWareParam->af32PriorBoxMaxSize[1][0] = 111.0f;
pstSoftWareParam->af32PriorBoxMaxSize[2][0] = 162.0f;
pstSoftWareParam->af32PriorBoxMaxSize[3][0] = 213.0f;
pstSoftWareParam->af32PriorBoxMaxSize[4][0] = 264.0f;
pstSoftWareParam->af32PriorBoxMaxSize[5][0] = 315.0f;
pstSoftWareParam->u32MinSizeNum = 1;
pstSoftWareParam->u32MaxSizeNum = 1;
pstSoftWareParam->bFlip= HI_TRUE;
pstSoftWareParam->bClip= HI_FALSE;
pstSoftWareParam->au32InputAspectRatioNum[0] = 1;
pstSoftWareParam->au32InputAspectRatioNum[1] = 2;
pstSoftWareParam->au32InputAspectRatioNum[2] = 2;
pstSoftWareParam->au32InputAspectRatioNum[3] = 2;
pstSoftWareParam->au32InputAspectRatioNum[4] = 1;
pstSoftWareParam->au32InputAspectRatioNum[5] = 1;
pstSoftWareParam->af32PriorBoxAspectRatio[0][0] = 2;
pstSoftWareParam->af32PriorBoxAspectRatio[0][1] = 0;
pstSoftWareParam->af32PriorBoxAspectRatio[1][0] = 2;
pstSoftWareParam->af32PriorBoxAspectRatio[1][1] = 3;
pstSoftWareParam->af32PriorBoxAspectRatio[2][0] = 2;
pstSoftWareParam->af32PriorBoxAspectRatio[2][1] = 3;
pstSoftWareParam->af32PriorBoxAspectRatio[3][0] = 2;
pstSoftWareParam->af32PriorBoxAspectRatio[3][1] = 3;
pstSoftWareParam->af32PriorBoxAspectRatio[4][0] = 2;
pstSoftWareParam->af32PriorBoxAspectRatio[4][1] = 0;
pstSoftWareParam->af32PriorBoxAspectRatio[5][0] = 2;
pstSoftWareParam->af32PriorBoxAspectRatio[5][1] = 0;
pstSoftWareParam->af32PriorBoxStepWidth[0] = 8;
pstSoftWareParam->af32PriorBoxStepWidth[1] = 16;
pstSoftWareParam->af32PriorBoxStepWidth[2] = 32;
pstSoftWareParam->af32PriorBoxStepWidth[3] = 64;
pstSoftWareParam->af32PriorBoxStepWidth[4] = 100;
pstSoftWareParam->af32PriorBoxStepWidth[5] = 300;
pstSoftWareParam->af32PriorBoxStepHeight[0] = 8;
pstSoftWareParam->af32PriorBoxStepHeight[1] = 16;
pstSoftWareParam->af32PriorBoxStepHeight[2] = 32;
pstSoftWareParam->af32PriorBoxStepHeight[3] = 64;
pstSoftWareParam->af32PriorBoxStepHeight[4] = 100;
pstSoftWareParam->af32PriorBoxStepHeight[5] = 300;
pstSoftWareParam->f32Offset = 0.5f; //偏移量
pstSoftWareParam->as32PriorBoxVar[0] = (HI_S32)(0.1f*SAMPLE_SVP_NNIE_QUANT_BASE);//超参数设置
pstSoftWareParam->as32PriorBoxVar[1] = (HI_S32)(0.1f*SAMPLE_SVP_NNIE_QUANT_BASE);
pstSoftWareParam->as32PriorBoxVar[2] = (HI_S32)(0.2f*SAMPLE_SVP_NNIE_QUANT_BASE);
pstSoftWareParam->as32PriorBoxVar[3] = (HI_S32)(0.2f*SAMPLE_SVP_NNIE_QUANT_BASE);
/*Set Softmax Parameters*/
pstSoftWareParam->u32SoftMaxInHeight = 21;
pstSoftWareParam->au32SoftMaxInChn[0] = 121296;
pstSoftWareParam->au32SoftMaxInChn[1] = 45486;
pstSoftWareParam->au32SoftMaxInChn[2] = 12600;
pstSoftWareParam->au32SoftMaxInChn[3] = 3150;
pstSoftWareParam->au32SoftMaxInChn[4] = 756;
pstSoftWareParam->au32SoftMaxInChn[5] = 84;
pstSoftWareParam->u32ConcatNum = 6;
pstSoftWareParam->u32SoftMaxOutWidth = 1;
pstSoftWareParam->u32SoftMaxOutHeight = 21;
pstSoftWareParam->u32SoftMaxOutChn = 8732;
/*Set DetectionOut Parameters*/
pstSoftWareParam->u32ClassNum = 21;
pstSoftWareParam->u32TopK = 400;
pstSoftWareParam->u32KeepTopK = 200;
pstSoftWareParam->u32NmsThresh = (HI_U16)(0.3f*SAMPLE_SVP_NNIE_QUANT_BASE);
pstSoftWareParam->u32ConfThresh = 1;
pstSoftWareParam->au32DetectInputChn[0] = 23104;
pstSoftWareParam->au32DetectInputChn[1] = 8664;
pstSoftWareParam->au32DetectInputChn[2] = 2400;
pstSoftWareParam->au32DetectInputChn[3] = 600;
pstSoftWareParam->au32DetectInputChn[4] = 144;
pstSoftWareParam->au32DetectInputChn[5] = 16;
/*Malloc assist buffer memory*/
u32ClassNum = pstSoftWareParam->u32ClassNum;
u32TotalSize = SAMPLE_SVP_NNIE_Ssd_GetResultTmpBuf(pstNnieParam,pstSoftWareParam);
u32DstRoiSize = SAMPLE_SVP_NNIE_ALIGN16(u32ClassNum*pstSoftWareParam->u32TopK*sizeof(HI_U32)*SAMPLE_SVP_NNIE_COORDI_NUM);
u32DstScoreSize = SAMPLE_SVP_NNIE_ALIGN16(u32ClassNum*pstSoftWareParam->u32TopK*sizeof(HI_U32));
u32ClassRoiNumSize = SAMPLE_SVP_NNIE_ALIGN16(u32ClassNum*sizeof(HI_U32));
u32TotalSize = u32TotalSize+u32DstRoiSize+u32DstScoreSize+u32ClassRoiNumSize;
s32Ret = SAMPLE_COMM_SVP_MallocCached("SAMPLE_SSD_INIT",NULL,(HI_U64*)&u64PhyAddr,
(void**)&pu8VirAddr,u32TotalSize);
SAMPLE_SVP_CHECK_EXPR_RET(HI_SUCCESS != s32Ret,s32Ret,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,Malloc memory failed!\n");
memset(pu8VirAddr,0, u32TotalSize);
SAMPLE_COMM_SVP_FlushCache(u64PhyAddr,(void*)pu8VirAddr,u32TotalSize);
/*set each tmp buffer addr*/
pstSoftWareParam->stPriorBoxTmpBuf.u64PhyAddr = u64PhyAddr;
pstSoftWareParam->stPriorBoxTmpBuf.u64VirAddr = (HI_U64)(pu8VirAddr);
pstSoftWareParam->stSoftMaxTmpBuf.u64PhyAddr = u64PhyAddr+
pstSoftWareParam->stPriorBoxTmpBuf.u32Size;
pstSoftWareParam->stSoftMaxTmpBuf.u64VirAddr = (HI_U64)(pu8VirAddr+
pstSoftWareParam->stPriorBoxTmpBuf.u32Size);
pstSoftWareParam->stGetResultTmpBuf.u64PhyAddr = u64PhyAddr+
pstSoftWareParam->stPriorBoxTmpBuf.u32Size+pstSoftWareParam->stSoftMaxTmpBuf.u32Size;
pstSoftWareParam->stGetResultTmpBuf.u64VirAddr = (HI_U64)(pu8VirAddr+
pstSoftWareParam->stPriorBoxTmpBuf.u32Size+ pstSoftWareParam->stSoftMaxTmpBuf.u32Size);
u32TmpBufTotalSize = pstSoftWareParam->stPriorBoxTmpBuf.u32Size+
pstSoftWareParam->stSoftMaxTmpBuf.u32Size + pstSoftWareParam->stGetResultTmpBuf.u32Size;
/*set result blob*/
pstSoftWareParam->stDstRoi.enType = SVP_BLOB_TYPE_S32;
pstSoftWareParam->stDstRoi.u64PhyAddr = u64PhyAddr+u32TmpBufTotalSize;
pstSoftWareParam->stDstRoi.u64VirAddr = (HI_U64)(pu8VirAddr+u32TmpBufTotalSize);
pstSoftWareParam->stDstRoi.u32Stride = SAMPLE_SVP_NNIE_ALIGN16(u32ClassNum*
pstSoftWareParam->u32TopK*sizeof(HI_U32)*SAMPLE_SVP_NNIE_COORDI_NUM);
pstSoftWareParam->stDstRoi.u32Num = 1;
pstSoftWareParam->stDstRoi.unShape.stWhc.u32Chn = 1;
pstSoftWareParam->stDstRoi.unShape.stWhc.u32Height = 1;
pstSoftWareParam->stDstRoi.unShape.stWhc.u32Width = u32ClassNum*
pstSoftWareParam->u32TopK*SAMPLE_SVP_NNIE_COORDI_NUM;
pstSoftWareParam->stDstScore.enType = SVP_BLOB_TYPE_S32;
pstSoftWareParam->stDstScore.u64PhyAddr = u64PhyAddr+u32TmpBufTotalSize+u32DstRoiSize;
pstSoftWareParam->stDstScore.u64VirAddr = (HI_U64)(pu8VirAddr+u32TmpBufTotalSize+u32DstRoiSize);
pstSoftWareParam->stDstScore.u32Stride = SAMPLE_SVP_NNIE_ALIGN16(u32ClassNum*
pstSoftWareParam->u32TopK*sizeof(HI_U32));
pstSoftWareParam->stDstScore.u32Num = 1;
pstSoftWareParam->stDstScore.unShape.stWhc.u32Chn = 1;
pstSoftWareParam->stDstScore.unShape.stWhc.u32Height = 1;
pstSoftWareParam->stDstScore.unShape.stWhc.u32Width = u32ClassNum*
pstSoftWareParam->u32TopK;
pstSoftWareParam->stClassRoiNum.enType = SVP_BLOB_TYPE_S32;
pstSoftWareParam->stClassRoiNum.u64PhyAddr = u64PhyAddr+u32TmpBufTotalSize+
u32DstRoiSize+u32DstScoreSize;
pstSoftWareParam->stClassRoiNum.u64VirAddr = (HI_U64)(pu8VirAddr+u32TmpBufTotalSize+
u32DstRoiSize+u32DstScoreSize);
pstSoftWareParam->stClassRoiNum.u32Stride = SAMPLE_SVP_NNIE_ALIGN16(u32ClassNum*sizeof(HI_U32));
pstSoftWareParam->stClassRoiNum.u32Num = 1;
pstSoftWareParam->stClassRoiNum.unShape.stWhc.u32Chn = 1;
pstSoftWareParam->stClassRoiNum.unShape.stWhc.u32Height = 1;
pstSoftWareParam->stClassRoiNum.unShape.stWhc.u32Width = u32ClassNum;
return s32Ret;
}
/******************************************************************************
* function : Ssd init
******************************************************************************/
static HI_S32 SAMPLE_SVP_NNIE_Ssd_ParamInit(SAMPLE_SVP_NNIE_CFG_S* pstCfg,
SAMPLE_SVP_NNIE_PARAM_S *pstNnieParam, SAMPLE_SVP_NNIE_SSD_SOFTWARE_PARAM_S* pstSoftWareParam)
{
HI_S32 s32Ret = HI_SUCCESS;
/*init hardware para*/
s32Ret = SAMPLE_COMM_SVP_NNIE_ParamInit(pstCfg,pstNnieParam);
SAMPLE_SVP_CHECK_EXPR_GOTO(HI_SUCCESS != s32Ret,INIT_FAIL_0,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error(%#x),SAMPLE_COMM_SVP_NNIE_ParamInit failed!\n",s32Ret);
return s32Ret;
INIT_FAIL_0:
s32Ret = SAMPLE_SVP_NNIE_Ssd_Deinit(pstNnieParam,pstSoftWareParam,NULL);
SAMPLE_SVP_CHECK_EXPR_RET(HI_SUCCESS != s32Ret,s32Ret,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error(%#x),SAMPLE_SVP_NNIE_Ssd_Deinit failed!\n",s32Ret);
return HI_FAILURE;
}
/******************************************************************************
* function : show SSD sample(image 300x300 U8_C3)
******************************************************************************/
void SAMPLE_SVP_NNIE_Encode(HI_CHAR *pcSrcFile,HI_S32 *aps32PermuteResult)
{
//HI_CHAR *pcSrcFile = "./data/nnie_image/rgb_planar/dog_bike_car_300x300.bgr"; //HI_CHAR 就是 char类型的 这边是设置图片路径
HI_CHAR *pcModelName = "./resnet_0523.wk"; // 设置模型路径
HI_U32 u32PicNum = 1; //HI_U32 其实就是unsigned int 这个变量好像是输入图片的数量
HI_FLOAT f32PrintResultThresh = 0.0f; // HI_FLOAT 其实就是float 这个变量是输出结果的阈值
HI_S32 s32Ret = HI_SUCCESS; //HI_S32就是 int s32Ret = 0
SAMPLE_SVP_NNIE_CFG_S stNnieCfg = {0}; //结构体定义在sample_comm_nnie.h里面
SAMPLE_SVP_NNIE_INPUT_DATA_INDEX_S stInputDataIdx = {0};
SAMPLE_SVP_NNIE_PROCESS_SEG_INDEX_S stProcSegIdx = {0};
/*Set configuration parameter*/ //配置参数
f32PrintResultThresh = 0.8f; //打印输出结果的阈值设为0.8
stNnieCfg.pszPic= pcSrcFile; //输入图片的地址
stNnieCfg.u32MaxInputNum = u32PicNum; //max input image num in each batch 每一次运行时操作的最大图片数量
stNnieCfg.u32MaxRoiNum = 0;
stNnieCfg.aenNnieCoreId[0] = SVP_NNIE_ID_0;//set NNIE core
/*Sys init*/
SAMPLE_COMM_SVP_CheckSysInit(); //系统初始化 system init
/*Ssd Load model*/
SAMPLE_SVP_TRACE_INFO("Ssd Load model!\n"); //输出模型的信息
s32Ret = SAMPLE_COMM_SVP_NNIE_LoadModel(pcModelName,&s_stSsdModel); //载入模型
SAMPLE_SVP_CHECK_EXPR_GOTO(HI_SUCCESS != s32Ret,SSD_FAIL_0,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,SAMPLE_COMM_SVP_NNIE_LoadModel failed!\n"); //错误报告
/*Ssd parameter initialization*/
/*Ssd parameters are set in SAMPLE_SVP_NNIE_Ssd_SoftwareInit,
if user has changed net struct, please make sure the parameter settings in
SAMPLE_SVP_NNIE_Ssd_SoftwareInit function are correct*/
SAMPLE_SVP_TRACE_INFO("Ssd parameter initialization!\n");
s_stSsdNnieParam.pstModel = &s_stSsdModel.stModel;
s32Ret = SAMPLE_SVP_NNIE_Ssd_ParamInit(&stNnieCfg,&s_stSsdNnieParam,&s_stSsdSoftwareParam);
SAMPLE_SVP_CHECK_EXPR_GOTO(HI_SUCCESS != s32Ret,SSD_FAIL_0,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,SAMPLE_SVP_NNIE_Ssd_ParamInit failed!\n");
/*Fill src data*/
SAMPLE_SVP_TRACE_INFO("Ssd start!\n");
stInputDataIdx.u32SegIdx = 0;
stInputDataIdx.u32NodeIdx = 0;
s32Ret = SAMPLE_SVP_NNIE_FillSrcData(&stNnieCfg,&s_stSsdNnieParam,&stInputDataIdx);
SAMPLE_SVP_CHECK_EXPR_GOTO(HI_SUCCESS != s32Ret,SSD_FAIL_0,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,SAMPLE_SVP_NNIE_FillSrcData failed!\n");
/*NNIE process(process the 0-th segment)*/
stProcSegIdx.u32SegIdx = 0;
s32Ret = SAMPLE_SVP_NNIE_Forward(&s_stSsdNnieParam,&stInputDataIdx,&stProcSegIdx,HI_TRUE);
SAMPLE_SVP_CHECK_EXPR_GOTO(HI_SUCCESS != s32Ret,SSD_FAIL_0,SAMPLE_SVP_ERR_LEVEL_ERROR,
"Error,SAMPLE_SVP_NNIE_Forward failed!\n");
for(int i = 0; i < SAMPLE_SVP_NNIE_SSD_REPORT_NODE_NUM; i++)
{
aps32PermuteResult[i] = (HI_S32*)s_stSsdNnieParam.astSegData[0].astDst[i].u64VirAddr;
}
SSD_FAIL_0:
SAMPLE_SVP_NNIE_Ssd_Deinit(&s_stSsdNnieParam,&s_stSsdSoftwareParam,&s_stSsdModel);
SAMPLE_COMM_SVP_CheckSysExit();
}
/******************************************************************************
* function : SSD sample signal handle
******************************************************************************/
void SAMPLE_SVP_NNIE_Ssd_HandleSig(void)
{
SAMPLE_SVP_NNIE_Ssd_Deinit(&s_stSsdNnieParam,&s_stSsdSoftwareParam,&s_stSsdModel);
memset(&s_stSsdNnieParam,0,sizeof(SAMPLE_SVP_NNIE_PARAM_S));
memset(&s_stSsdSoftwareParam,0,sizeof(SAMPLE_SVP_NNIE_SSD_SOFTWARE_PARAM_S));
memset(&s_stSsdModel,0,sizeof(SAMPLE_SVP_NNIE_MODEL_S));
SAMPLE_COMM_SVP_CheckSysExit();
}
|
<gh_stars>1-10
/*
Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package testsuite.clusterj;
import testsuite.clusterj.model.Employee;
import testsuite.clusterj.model.LongIntStringPK;
import com.mysql.clusterj.ClusterJUserException;
public class PartitionKeyTest extends AbstractClusterJTest {
@Override
public void localSetUp() {
createSessionFactory();
addTearDownClasses(Employee.class, LongIntStringPK.class);
}
public void test() {
badClass();
wrongKeyTypePrimitive();
wrongKeyTypePrimitiveNull();
wrongKeyTypeCompound();
wrongKeyTypeCompoundNull();
wrongKeyTypeCompoundNullPart();
setPartitionKeyTwice();
goodIntKey();
goodCompoundKey();
session = sessionFactory.getSession(); // to allow tear down classes to work
failOnError();
}
protected void badClass() {
try {
session = sessionFactory.getSession();
session.setPartitionKey(Integer.class, 0);
error("Failed to throw exception on setPartitionKey(Integer.class, 0)");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void wrongKeyTypePrimitive() {
try {
session = sessionFactory.getSession();
session.setPartitionKey(Employee.class, 0L);
error("Failed to throw exception on setPartitionKey(Employee.class, 0L)");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void wrongKeyTypePrimitiveNull() {
try {
session = sessionFactory.getSession();
session.setPartitionKey(Employee.class, null);
error("Failed to throw exception on setPartitionKey(Employee.class, null)");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void wrongKeyTypeCompound() {
try {
session = sessionFactory.getSession();
session.setPartitionKey(LongIntStringPK.class, 0L);
error("Failed to throw exception on setPartitionKey(LongIntStringPK.class, 0L)");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void wrongKeyTypeCompoundPart() {
try {
Object[] key = new Object[] {0L, 0L, ""};
session = sessionFactory.getSession();
session.setPartitionKey(LongIntStringPK.class, key);
error("Failed to throw exception on setPartitionKey(LongIntStringPK.class, new Object[] {0L, 0L, \"\"})");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void wrongKeyTypeCompoundNull() {
try {
session = sessionFactory.getSession();
session.setPartitionKey(LongIntStringPK.class, null);
error("Failed to throw exception on setPartitionKey(LongIntStringPK.class, null)");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void wrongKeyTypeCompoundNullPart() {
try {
session = sessionFactory.getSession();
Object[] key = new Object[] {0L, null, ""};
session.setPartitionKey(LongIntStringPK.class, key);
error("Failed to throw exception on setPartitionKey(LongIntStringPK.class, new Object[] {0L, null, \"\"})");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void setPartitionKeyTwice() {
try {
session = sessionFactory.getSession();
// partition key cannot be null
Object[] key = new Object[] {0L, 0, ""};
session.setPartitionKey(LongIntStringPK.class, key);
session.setPartitionKey(LongIntStringPK.class, key);
error("Failed to throw exception on second setPartitionKey");
} catch (ClusterJUserException ex){
// good catch
} finally {
session.close();
session = null;
}
}
protected void goodIntKey() {
try {
session = sessionFactory.getSession();
session.deletePersistentAll(Employee.class);
Employee employee = session.newInstance(Employee.class);
employee.setId(1000);
employee.setAge(1000);
employee.setMagic(1000);
employee.setName("Employee 1000");
session.setPartitionKey(Employee.class, 1000);
session.makePersistent(employee);
} finally {
session.close();
session = null;
}
}
protected void goodCompoundKey() {
try {
session = sessionFactory.getSession();
session.deletePersistentAll(LongIntStringPK.class);
// key can contain nulls if not part of partition key
Object[] key = new Object[] { 1000L, 1000, null};
LongIntStringPK instance = session
.newInstance(LongIntStringPK.class);
instance.setLongpk(1000L);
instance.setIntpk(1000);
instance.setStringpk("1 Thousand");
session.setPartitionKey(LongIntStringPK.class, key);
session.makePersistent(instance);
} finally {
session.close();
session = null;
}
}
}
|
package com.javatest.general.api;
import org.springframework.http.HttpStatus;
public enum ApiStatusCode {
OK("OK", HttpStatus.OK),
INTERNAL_ERROR("ApiStatusCode", HttpStatus.INTERNAL_SERVER_ERROR),
INVALID_ACCESS_KEY_ID("InvalidAccessKeyId", HttpStatus.FORBIDDEN),
INVALID_REQUEST_PARAM("InvalidRequestParam", HttpStatus.BAD_REQUEST);
String code;
HttpStatus httpStatus;
ApiStatusCode(final String code, final HttpStatus httpStatus) {
this.code = code;
this.httpStatus = httpStatus;
}
public String getCode() {
return this.code;
}
public HttpStatus getHttpStatus() {
return this.httpStatus;
}
}
|
const highestCommonFactor = (num1, num2) => {
let min = Math.min(num1, num2);
for (let i = min; i >= 1; i--) {
if (num1 % i === 0 && num2 % i === 0) {
return i;
}
}
}; |
function generateOrderTable($orderDetails) {
$html = '<div class="container order my-4">
<div class="row align-items-center">
<div class="col-sm-10">
<h4>Order detail</h4>
<table class="w-100">';
foreach ($orderDetails as $key => $value) {
if ($key === 'Items') {
$html .= '<tr><th>' . $key . '</th><td><table class="w-100">';
$html .= '<tr><th>Item</th><th>Quantity</th><th>Price</th></tr>';
foreach ($value as $item) {
$html .= '<tr><td>' . $item['Item'] . '</td><td>' . $item['Quantity'] . '</td><td>$' . number_format($item['Price'], 2) . '</td></tr>';
}
$html .= '</table></td></tr>';
} else {
$html .= '<tr><th>' . $key . '</th><td>' . ($key === 'Total Amount' ? '$' . number_format($value, 2) : $value) . '</td></tr>';
}
}
$html .= '</table></div></div></div>';
return $html;
}
// Example usage
$orderDetails = [
'Order ID' => '12345',
'Customer Name' => 'John Doe',
'Order Date' => '2022-08-15',
'Total Amount' => 250.00,
'Items' => [
['Item' => 'Product A', 'Quantity' => 2, 'Price' => 50.00],
['Item' => 'Product B', 'Quantity' => 1, 'Price' => 100.00],
['Item' => 'Product C', 'Quantity' => 3, 'Price' => 30.00]
]
];
echo generateOrderTable($orderDetails); |
<reponame>SoftwarearchitekturTeam/TypeTogether<filename>shared/src/test/java/de/hswhameln/typetogether/networking/util/StringEscaperTest.java
package de.hswhameln.typetogether.networking.util;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class StringEscaperTest {
@Test
void testEscapeHappyDay() {
assertEquals("abc", StringEscaper.escape("abc"));
}
@Test
void testUnescapeHappyDay() {
assertEquals("abc", StringEscaper.unescape("abc"));
}
@Test
void testEscapeNewLine() {
assertEquals("a\\nb", StringEscaper.escape("a\nb"));
}
@Test
void testUnescapeNewLine() {
assertEquals("a\nb", StringEscaper.unescape("a\\nb"));
}
@Test
void testEscapeBackslash() {
assertEquals("a\\\\b", StringEscaper.escape("a\\b"));
}
@Test
void testUnescapeBackslash() {
assertEquals("a\\b", StringEscaper.unescape("a\\\\b"));
}
} |
<filename>static/assets/console/1-live-expressions.js<gh_stars>1-10
"use strict";
(function () {
window.CustomValue = "Initial Value";
let interval = null;
const clear = () => {
if (interval) {
clearInterval(interval);
interval = null;
}
};
document.querySelector(".change").addEventListener("click", () => {
clear();
interval = setInterval(
() => (window.CustomValue = new Date().getTime()),
250
);
});
document.querySelector(".stopChange").addEventListener("click", clear);
})();
|
<filename>src/core/i_static_actor_component.h
#ifndef INCLUDED_CORE_I_STATIC_ACTOR_COMPONENT_H
#define INCLUDED_CORE_I_STATIC_ACTOR_COMPONENT_H
#include "component.h"
class IStaticActorComponent : public Component
{
public:
DEFINE_COMPONENT_BASE(IStaticActorComponent)
public:
friend class ::boost::serialization::access;
template<class Archive>
void serialize( Archive& ar, const unsigned int version );
};
template<class Archive>
void IStaticActorComponent::serialize(Archive& ar, const unsigned int version)
{
ar& boost::serialization::base_object<Component>(*this);
}
#endif//INCLUDED_CORE_I_STATIC_ACTOR_COMPONENT_H
//command: "classgenerator.exe" -g "component" -c "static_actor_component"
|
package org.hiro;
import org.hiro.character.Human;
import org.hiro.character.Player;
import org.hiro.character.StateEnum;
import org.hiro.map.AbstractCoordinate;
import org.hiro.map.Coordinate;
import org.hiro.map.RoomInfoEnum;
import org.hiro.output.Display;
import org.hiro.things.ObjectType;
import org.hiro.things.OriginalMonster;
import org.hiro.things.Thing;
import org.hiro.things.scrolltype.Scare;
/**
* Code for one creature to chase another
*/
public class Chase {
private static AbstractCoordinate ch_ret;
/*
* roomin:
* Find what room some coordinates are in. null means they aren't
* in any room.
*/
static Room roomin(AbstractCoordinate cp) {
int fp = Util.flat(cp);
if ((fp & Const.F_PASS) != 0) {
return Global.passages[fp & Const.F_PNUM];
}
for (int i = 0; i < Const.MAXROOMS; i++) {
Room rp = Global.rooms.get(i);
if (rp.isInMyRoom(cp)) {
return rp;
}
}
// msg("in some bizarre place (%d, %d)", unc( * cp));
boolean MASTER = false;
if (MASTER) {
// abort();
return null;
} else {
return null;
}
}
/*
* runto:
* Set a monster running after the hero.
*/
public static void runto(AbstractCoordinate runner) {
OriginalMonster tp = Util.getPlace(runner).p_monst;
/*
* If we couldn't find him, something is funny
*/
if (tp == null) {
boolean MASTER = true;
if (MASTER) {
// msg("couldn't find monster in runto at (%d,%d)", runner. y, runner.x);
}
return;
}
/*
* Start the beastie running
*/
tp.addState(StateEnum.ISRUN);
tp.removeState(StateEnum.ISHELD);
tp.setRunPosition(find_dest(tp));
}
/*
* find_dest:
* find the proper destination for the monster
*/
static AbstractCoordinate find_dest(OriginalMonster tp) {
int prob;
if ((prob = Global.monsters[tp.getType() - 'A'].m_carry) <= 0
|| tp.getRoom().equals(Human.instance.getRoom())
|| see_monst(tp)) {
return Human.instance.getPosition();
}
for (Thing obj : Global.lvl_obj) {
if (obj instanceof Scare) {
continue;
}
if (tp.getRoom().equals(roomin(obj.getOPos())) && Util.rnd(100) < prob) {
for (OriginalMonster tp2 : Global.mlist) {
tp = tp2;
if (tp.getRunPosition().equals(obj.getOPos())) {
break;
}
}
if (tp == null) {
return obj.getOPos();
}
}
}
return Human.instance.getPosition();
}
/*
* see_monst:
* Return true if the hero can see the monster
* true: 主人公がモンスターを見える場合
*/
public static boolean see_monst(OriginalMonster mp) {
if (Human.instance.containsState(StateEnum.ISBLIND)) {
return false;
}
if (mp.containsState(StateEnum.ISINVIS) && !Human.instance.containsState(StateEnum.CANSEE)) {
return false;
}
int y = mp.getPosition().getY();
int x = mp.getPosition().getX();
if (dist_cp(mp.getPosition(), Human.instance.getPosition()) < Const.LAMPDIST) {
return y == Human.instance.getPositionY() || x == Human.instance.getPositionX()
|| IOUtil.step_ok(Util.INDEX(y, Human.instance.getPositionX()).p_ch)
|| IOUtil.step_ok(Util.INDEX(Human.instance.getPositionY(), x).p_ch);
}
if (!mp.getRoom().equals(Human.instance.getRoom())) {
return false;
}
return !mp.getRoom().containInfo(RoomInfoEnum.ISDARK);
}
/*
* dist:
* Calculate the "distance" between to points. Actually,
* this calculates d^2, not d, but that's good enough for
* our purposes, since it's only used comparitively.
*/
private static int dist(int y1, int x1, int y2, int x2) {
return ((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1));
}
/*
* isSee:
*
* 昔はcan_see()
* Returns true if the hero can see a certain coordinate.
*/
public static boolean isSee(Player player, AbstractCoordinate c) {
if (player.containsState(StateEnum.ISBLIND)) {
return false;
}
if (dist_cp(c, player.getPosition()) < Const.LAMPDIST) {
if ((Util.flat(c) & Const.F_PASS) != 0) {
return c.getY() == player.getPositionY() || c.getX() == player.getPositionX() ||
IOUtil.step_ok(Util.INDEX(c.getY(), player.getPositionX()).p_ch) ||
IOUtil.step_ok(Util.INDEX(player.getPositionY(), c.getX()).p_ch);
}
}
/*
* We can only see if the hero in the same room as
* the coordinate and the room is lit or if it is close.
*/
Room rer = roomin(c);
return rer.equals(player.getRoom()) && !rer.containInfo(RoomInfoEnum.ISDARK);
}
/*
* diag_ok:
* Check to see if the move is legal if it is diagonal
*/
public static boolean diag_ok(AbstractCoordinate sp, AbstractCoordinate ep) {
if (ep.getX() < 0 || ep.getX() >= Const.NUMCOLS || ep.getY() <= 0 || ep.getY() >= Const.NUMLINES - 1) {
return false;
}
if (ep.getX() == sp.getX() || ep.getY() == sp.getY()) {
return true;
}
return (IOUtil.step_ok(Util.INDEX(ep.getY(), sp.getX()).p_ch) &&
IOUtil.step_ok(Util.INDEX(sp.getY(), ep.getX()).p_ch));
}
/*
* relocate:
* Make the monster's new location be the specified one, updating
* all the relevant state.
*/
public static void relocate(OriginalMonster th, AbstractCoordinate new_loc) {
if (!new_loc.equals(th.getPosition())) {
Display.mvaddch(th.getPosition(), (char) th.getFloorTile());
th.setRoom(roomin(new_loc));
set_oldch(th, new_loc);
Room oroom = th.getRoom();
Util.getPlace(th.getPosition()).p_monst = null;
if (!oroom.equals(th.getRoom())) {
th.setRunPosition(find_dest(th));
}
th.setPosition(new_loc);
Util.getPlace(new_loc).p_monst = th;
}
Display.move(new_loc);
if (see_monst(th)) {
Display.addch((char) th.getDisplayTile());
} else if (Human.instance.containsState(StateEnum.SEEMONST)) {
Display.standout();
Display.addch((char) th.getType());
Display.standend();
}
}
/*
* set_oldch:
* Set the oldch character for the monster
*/
static void set_oldch(OriginalMonster tp, AbstractCoordinate cp) {
if (tp.getPosition().equals(cp)) {
return;
}
int sch = tp.getFloorTile();
tp.setFloorTile(Util.CCHAR(Display.mvinch(cp)));
if (!Human.instance.containsState(StateEnum.ISBLIND)) {
if ((sch == ObjectType.FLOOR.getValue() || tp.getFloorTile() == ObjectType.FLOOR.getValue()) &&
tp.getRoom().containInfo(RoomInfoEnum.ISDARK)) {
tp.setFloorTile(' ');
} else if (dist_cp(cp, Human.instance.getPosition()) <= Const.LAMPDIST && Global.see_floor) {
tp.setFloorTile(Util.getPlace(cp).p_ch.getValue());
}
}
}
/*
* dist_cp:
* Call dist() with appropriate arguments for coordinate pointers
*/
static int dist_cp(AbstractCoordinate c1, AbstractCoordinate c2) {
return dist(c1.getY(), c1.getX(), c2.getY(), c2.getX());
}
/*
* runners:
* Make all the running monsters move.
*/
static void runners() {
for (OriginalMonster tp : Global.mlist) {
/* remember this in case the monster's "next" is changed */
if (!tp.containsState(StateEnum.ISHELD) && tp.containsState(StateEnum.ISRUN)) {
AbstractCoordinate orig_pos = tp.getPosition();
boolean wasTarget = tp.containsState(StateEnum.ISTARGET);
if (move_monst(tp) == -1)
continue;
if (tp.containsState(StateEnum.ISFLY) && dist_cp(Human.instance.getPosition(), tp.getPosition()) >= 3)
move_monst(tp);
if (wasTarget && !orig_pos.equals(tp.getPosition())) {
tp.removeState(StateEnum.ISTARGET);
Global.to_death = false;
}
}
}
if (Global.has_hit) {
IOUtil.endmsg();
Global.has_hit = false;
}
}
/*
* move_monst:
* Execute a single turn of running for a monster
*/
static int move_monst(OriginalMonster tp) {
if (!tp.containsState(StateEnum.ISSLOW) || tp.isSlow()) {
if (do_chase(tp) == -1) {
return -1;
}
}
if (tp.containsState(StateEnum.ISHASTE)) {
if (do_chase(tp) == -1) {
return -1;
}
}
tp.changeSlow();
return 0;
}
/*
* do_chase:
* Make one thing chase another.
*/
static int do_chase(OriginalMonster th) {
int DRAGONSHOT = 5; /* one chance in DRAGONSHOT that a dragon will flame */
int mindist = 32767;
AbstractCoordinate thisTmp = new Coordinate(); /* Temporary destination for chaser */
/* Find room of chaser */
/* room of chaser, room of chasee */
Room rer = th.getRoom();
if (th.containsState(StateEnum.ISGREED) && rer.r_goldval == 0) {
th.setRunPosition(Human.instance.getPosition()); /* If gold has been taken, run after hero */
}
Room ree;
if (th.getRunPosition().equals(Human.instance.getPosition())) { /* Find room of chasee */
ree = Human.instance.getRoom();
} else {
ree = roomin(th.getRunPosition());
}
/*
* We don't count doors as inside rooms for this routine
*/
boolean door = (Util.getPlace(th.getPosition()).p_ch == ObjectType.DOOR);
/*
* If the object of our desire is in a different room,
* and we are not in a corridor, run to the door nearest to
* our goal.
*/
while (true) {
if (rer != ree) {
for (int i = 0; i < rer.r_nexits; i++) {
AbstractCoordinate cp = rer.r_exit[i];
int curdist = dist_cp(th.getRunPosition(), cp);
if (curdist < mindist) {
thisTmp = cp;
mindist = curdist;
}
}
if (door) {
rer = Global.passages[Util.flat(th.getPosition()) & Const.F_PNUM];
door = false;
continue;
}
} else {
thisTmp = th.getRunPosition();
/*
* For dragons check and see if (a) the hero is on a straight
* line from it, and (b) that it is within shooting distance,
* but outside of striking range.
*/
if (th.getType() == 'D' && (th.getPosition().getY() == Human.instance.getPositionY() || th.getPosition().getX() == Human.instance.getPositionX()
|| Math.abs(th.getPosition().getY() - Human.instance.getPositionY()) == Math.abs(th.getPosition().getX() - Human.instance.getPositionX()))
&& dist_cp(th.getPosition(), Human.instance.getPosition()) <= Const.BOLT_LENGTH * Const.BOLT_LENGTH
&& !th.containsState(StateEnum.ISCANC) && Util.rnd(DRAGONSHOT) == 0) {
Global.delta = new Coordinate(Misc.sign(Human.instance.getPositionX() - th.getPosition().getX()),
Misc.sign(Human.instance.getPositionY() - th.getPosition().getY()));
if (Global.has_hit) {
IOUtil.endmsg();
}
StickMethod.fire_bolt(th.getPosition(), Global.delta, "flame");
Global.running = false;
Global.count = 0;
Global.quiet = 0;
if (Global.to_death && !th.containsState(StateEnum.ISTARGET)) {
Global.to_death = false;
Global.kamikaze = false;
}
return (0);
}
}
break;
}
/*
* This now contains what we want to run to this time
* so we run to it. If we hit it we either want to fight it
* or stop running
*/
/* true means we are there */
boolean stoprun = false;
if (!chase(th, thisTmp)) {
if (thisTmp.equals(Human.instance.getPosition())) {
return (Fight.attack(Human.instance, th));
} else if (thisTmp.equals(th.getRunPosition())) {
for (Thing obj : Global.lvl_obj) {
if (th.getRunPosition().equals(obj.getOPos())) {
Global.lvl_obj.remove(obj);
th.addItem(obj);
Util.getPlace(obj.getOPos()).p_ch =
th.getRoom().containInfo(RoomInfoEnum.ISGONE) ? ObjectType.PASSAGE : ObjectType.FLOOR;
th.setRunPosition(find_dest(th));
break;
}
}
if (th.getType() != 'F') {
stoprun = true;
}
}
} else {
if (th.getType() == 'F') {
return (0);
}
}
relocate(th, ch_ret);
/*
* And stop running if need be
*/
if (stoprun && th.getPosition().equals(th.getRunPosition())) {
th.removeState(StateEnum.ISRUN);
}
return (0);
}
/*
* chase:
* Find the spot for the chaser(er) to move closer to the
* chasee(ee). Returns TRUE if we want to keep on chasing later
* FALSE if we reach the goal.
*/
static boolean chase(OriginalMonster tp, AbstractCoordinate ee) {
int curdist;
AbstractCoordinate er = tp.getPosition();
AbstractCoordinate tryp = new Coordinate();
/*
* If the thing is confused, let it move randomly. Invisible
* Stalkers are slightly confused all of the time, and bats are
* quite confused all the time
*/
if ((tp.containsState(StateEnum.ISHUH) && Util.rnd(5) != 0) || (tp.getType() == 'P' && Util.rnd(5) == 0)
|| (tp.getType() == 'B' && Util.rnd(2) == 0)) {
/*
* get a valid random move
*/
ch_ret = Move.rndmove(tp);
curdist = dist_cp(ch_ret, ee);
/*
* Small chance that it will become un-confused
*/
if (Util.rnd(20) == 0) {
tp.removeState(StateEnum.ISHUH);
}
}
/*
* Otherwise, find the empty spot next to the chaser that is
* closest to the chasee.
*/
else {
/*
* This will eventually hold where we move to get closer
* If we can't find an empty spot, we stay where we are.
*/
curdist = dist_cp(er, ee);
ch_ret = er;
int ey = er.getY() + 1;
if (ey >= Const.NUMLINES - 1) {
ey = Const.NUMLINES - 2;
}
int ex = er.getX() + 1;
if (ex >= Const.NUMCOLS) {
ex = Const.NUMCOLS - 1;
}
int plcnt = 1;
for (int x = er.getX() - 1; x <= ex; x++) {
if (x < 0)
continue;
tryp.setX(x);
for (int y = er.getY() - 1; y <= ey; y++) {
tryp.setY(y);
if (!diag_ok(er, tryp))
continue;
ObjectType ch = Util.winat(tryp);
if (IOUtil.step_ok(ch)) {
/*
* If it is a scroll, it might be a scare monster scroll
* so we need to look it up to see what type it is.
*/
Thing obj2 = null;
if (ch == ObjectType.SCROLL) {
for (Thing obj : Global.lvl_obj) {
obj2 = obj;
if (obj.getOPos().equals(new Coordinate(x, y))) {
break;
}
}
if (obj2 instanceof Scare) {
continue;
}
}
/*
* It can also be a Xeroc, which we shouldn't step on
*/
OriginalMonster obj3;
if ((obj3 = Util.getPlace(tryp).p_monst) != null && obj3.getType() == 'X') {
continue;
}
/*
* If we didn't find any scrolls at this place or it
* wasn't a scare scroll, then this place counts
*/
int thisdist = dist_cp(tryp, ee);
if (thisdist < curdist) {
plcnt = 1;
ch_ret = tryp;
curdist = thisdist;
} else if (thisdist == curdist && Util.rnd(++plcnt) == 0) {
ch_ret = tryp;
curdist = thisdist;
}
}
}
}
}
return (curdist != 0 && !ch_ret.equals(Human.instance.getPosition()));
}
}
|
# Define a daysBetweenDates procedure that would produce the
# correct output if there was a correct nextDay procedure.
#
# Note that this will NOT produce correct outputs yet, since
# our nextDay procedure assumes all months have 30 days
# (hence a year is 360 days, instead of 365).
#
def nextDay(year, month, day):
"""Simple version: assume every month has 30 days"""
if day < 30:
return year, month, day + 1
else:
if month == 12:
return year + 1, 1, 1
else:
return year, month + 1, 1
def daysBetweenDates(year1, month1, day1, year2, month2, day2):
"""Returns the number of days between year1/month1/day1
and year2/month2/day2. Assumes inputs are valid dates
in Gregorian calendar, and the first date is not after
the second."""
# YOUR CODE HERE!
dayCounter = 0
while year1 < year2:
dayCounter += 1
year1, month1, day1 = nextDay(year1, month1, day1)
while month1 < month2:
year1, month1, day1 = nextDay(year1, month1, day1)
dayCounter += 1
while day1 < day2:
year1, month1, day1 = nextDay(year1, month1, day1)
dayCounter += 1
return dayCounter
def test():
test_cases = [((2012,9,30,2012,10,30),30),
((2012,1,1,2013,1,1),360),
((2012,9,1,2012,9,4),3)]
for (args, answer) in test_cases:
result = daysBetweenDates(*args)
if result != answer:
print "Test with data:", args, "failed"
else:
print "Test case passed!"
test()
|
"""
Main file for the MNIST Deep Learning
"""
from loader_mnist import LoaderMnist
import convnet as cvnet
def main():
"""Main program execution"""
# Dataset Loader
dset_loader = None
# Create the Dataset Loader if possible
try:
# Configure dataset loading and subdivision ==> USER
validation_percent = 0
limit_images = 0
dset_loader = LoaderMnist("./mnist/train-images-idx3-ubyte",
"./mnist/train-labels-idx1-ubyte",
"./mnist/t10k-images-idx3-ubyte",
"./mnist/t10k-labels-idx1-ubyte",
validation_percent,
limit_images)
except RuntimeError as exception:
print(exception)
raise SystemExit
# Create the Neural Network
nnetwork = None
try:
# Create the network with its structure ==> USER
layers = []
layers.append(cvnet.ConvLayer(None, _input_shape=[28, 28, 1], _kernel_shape=[5, 5, 1, 6], _stride_shape=[1, 1], _is_input_layer=True))
layers.append(cvnet.PoolingLayer(layers[-1].computation, _pool_shape=[2, 2], _stride_shape=[2, 2]))
layers.append(cvnet.ConvLayer(layers[-1].computation, _input_shape=[14, 14, 6], _kernel_shape=[5, 5, 6, 16], _stride_shape=[1, 1]))
layers.append(cvnet.PoolingLayer(layers[-1].computation, _pool_shape=[2, 2], _stride_shape=[2, 2]))
layers.append(cvnet.FCLayer(layers[-1].computation, 120))
layers.append(cvnet.FCLayer(layers[-1].computation, 84))
layers.append(cvnet.FCLayer(layers[-1].computation, 10, _is_layer_output=True))
nnetwork = cvnet.ConvNetwork(layers)
except RuntimeError as exception:
print(exception)
raise SystemExit
# Configuration of the Network Hyperparameters ==> USER
train_batch_size = 128
test_batch_size = 128
learning_rate = 0.001
learning_rate_decay_steps = 1000
learning_rate_decay_amount = 0.98
min_epochs_without_progress = 10
nnetwork.ConfigureTraining(learning_rate, learning_rate_decay_steps, learning_rate_decay_amount, min_epochs_without_progress)
# Start thensorflow session
nnetwork.StartTraining()
# Start epochs training
while nnetwork.CheckTrainingEnd() is False:
# Start the current Epoch
nnetwork.EpochStart()
# Train batches
batch_completed = False
while not batch_completed:
# Execute a training step on the batch
batch_completed, cur_batch = dset_loader.GetBatch(train_batch_size)
nnetwork.RunTrainingStep(cur_batch)
# Prediction of validation set
if len(dset_loader.validation_set) > 0:
nnetwork.AddTestResults(dset_loader.train_set, dset_loader.validation_set, test_batch_size)
else:
nnetwork.AddTestResults(dset_loader.train_set, dset_loader.test_set, test_batch_size)
# Advance to next Epoch
nnetwork.EpochEnd()
# Close the Training Session
nnetwork.StopTraining()
# Display the training results
nnetwork.DisplayResults()
nnetwork.DisplayWeights(nnetwork.best_weights[0], _width=5, _height=5, _plot_rows=2, _plot_cols=3)
nnetwork.DisplayWeights(nnetwork.best_weights[1], _width=5, _height=5, _plot_rows=4, _plot_cols=4)
return
# Execute the program
if __name__ == '__main__':
main()
|
<reponame>AndreasKl/elefantenstark
package net.andreaskluth.elefantenstark.maintenance;
import net.andreaskluth.elefantenstark.common.ElefantenStarkException;
public class HenchmanException extends ElefantenStarkException {
private static final long serialVersionUID = -6367062171824949422L;
public HenchmanException(Throwable cause) {
super(cause);
}
}
|
<filename>src/main/java/com/jfinal/weixin/sdk/api/AccessTokenApi.java
/**
* Copyright (c) 2011-2014, <NAME> 詹波 (<EMAIL>).
*
* Licensed under the Apache License, Version 2.0 (the "License");
*/
package com.jfinal.weixin.sdk.api;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import com.jfinal.kit.HttpKit;
import com.jfinal.weixin.sdk.kit.ParaMap;
/**
* 认证并获取 access_token API
* http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96access_token
*/
public class AccessTokenApi {
// "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=APPID&secret=APPSECRET";
private static String url = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential";
// 利用 appId 与 accessToken 建立关联,支持多账户
private static Map<String, AccessToken> map = new ConcurrentHashMap<String, AccessToken>(); // private static AccessToken accessToken;
/**
* 从缓存中获取 access token,如果未取到或者 access token 不可用则先更新再获取
*/
public static AccessToken getAccessToken() {
String appId = ApiConfigKit.getApiConfig().getAppId();
AccessToken result = map.get(appId);
if (result != null && result.isAvailable())
return result;
refreshAccessToken();
return map.get(appId);
}
/**
* 强制更新 access token 值
*/
public static synchronized void refreshAccessToken() {
ApiConfig ac = ApiConfigKit.getApiConfig();
AccessToken result = null;
for (int i=0; i<3; i++) { // 最多三次请求
String appId = ac.getAppId();
String appSecret = ac.getAppSecret();
Map<String, String> queryParas = ParaMap.create("appid", appId).put("secret", appSecret).getData();
String json = HttpKit.get(url, queryParas);
result = new AccessToken(json);
if (result.isAvailable())
break;
}
// 三次请求如果仍然返回了不可用的 access token 仍然 put 进去,便于上层通过 AccessToken 中的属性判断底层的情况
map.put(ac.getAppId(), result);
}
public static void main(String[] args) {
ApiConfig ac = new ApiConfig();
ac.setAppId("wx9803d1188fa5fbda");
ac.setAppSecret("<KEY>");
// ApiConfigKit.setThreadLocalApiConfig(ac);
AccessToken at = getAccessToken();
if (at.isAvailable())
System.out.println("access_token : " + at.getAccessToken());
else
System.out.println(at.getErrorCode() + " : " + at.getErrorMsg());
}
}
|
/** init domain config */
import Vue from 'vue'
//设置全局API_BASE_URL
Vue.prototype.API_BASE_URL = process.env.VUE_APP_API_BASE_URL
window._CONFIG['domianURL'] = Vue.prototype.API_BASE_URL
//文件上传下载类型,默认使用 mongodb 附件组件
Vue.prototype.BASE_FileType = process.env.BASE_FileType || "mongodb"
window._CONFIG['BASE_FileType'] = Vue.prototype.BASE_FileType
//单点登录地址
window._CONFIG['casPrefixUrl'] = process.env.VUE_APP_CAS_BASE_URL
window._CONFIG['onlinePreviewDomainURL'] = process.env.VUE_APP_ONLINE_BASE_URL
window._CONFIG['staticDomainURL'] = Vue.prototype.API_BASE_URL + '/sys/common/static'
window._CONFIG['pdfDomainURL'] = Vue.prototype.API_BASE_URL+ '/sys/common/pdf/pdfPreviewIframe' |
clear
pkg update && pkg upgrade -y
pkg install clang curl git libcrypt libffi libiconv libjpeg* libjpeg-turbo libwebp libxml2 libxslt make ndk-sysroot openssl postgresql python readline wget zlib -y
git clone https://github.com/mkaraniya/OpenUserBot.git
cd OpenUserBot
pip install --upgrade pip setuptools
pip install -r requirements.txt
mv sample_config.env config.env
mkdir -p $PREFIX/var/lib/postgresql
initdb $PREFIX/var/lib/postgresql
pg_ctl -D $PREFIX/var/lib/postgresql start
createdb botdb
createuser botuser
cd ..
echo "pg_ctl -D $PREFIX/var/lib/postgresql start" > startbot.sh
echo "cd OpenUserBot" >> startbot.sh
echo "python3 -m userbot" >> startbot.sh
chmod 755 startbot.sh
echo "Done."
echo "Now edit config.env with nano or anything you want, then run the userbot with startbot.sh"
echo "Please edit the db to postgresql://botuser:@localhost:5432/botdb"
echo "Good luck!"
|
#!/bin/bash
echo " - Installing mysql-server ..."
sudo apt-get install mysql-server -y
echo "--------------------------"
echo " set password : Start123! "
echo " thanks "
echo "--------------------------"
sudo mysql_secure_installation
echo " - Installing SQL Server ..."
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
sudo add-apt-repository "$(wget -qO- https://packages.microsoft.com/config/ubuntu/20.04/mssql-server-2019.list)"
sudo apt-get update
sudo apt-get install -y mssql-server
sudo /opt/mssql/bin/mssql-conf setup
sudo apt install curl -y
curl https://packages.microsoft.com/config/ubuntu/20.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list
sudo apt-get update
sudo apt-get install mssql-tools unixodbc-dev -y
echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bash_profile
echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bashrc
source ~/.bashrc
cp ./IntegrationTestConfiguration.json_template ./source/dotnet/DbDeltaWatcher/IntegrationTestConfiguration.json
|
#该文件告诉你怎么编译,实际情况下要视系统而定,在我的系统下我直接拷贝命令编译
rm -f MulticsQQ_Server
g++ -o MulticsQQ_Server $(mysql_config --cflags) MulticsQQ_Server.cpp $(mysql_config --libs) -lpthread
|
<gh_stars>1-10
package astrionic.adventofcode2020.solutions.day25
import astrionic.adventofcode2020.framework.AdventSolution
object Day25 extends AdventSolution {
// writeSolution = true
executePart = ExecutePart.One
override def solvePart1(input: String): String = {
val (pub0, pub1) = parseInput(input)
val (loop0, loop1) = (findLoopSize(pub0), findLoopSize(pub1))
encode(pub0, loop1.get).toString
}
override def solvePart2(input: String): String = {
???
}
private def parseInput(input: String): (Long, Long) = {
val keyPattern = """^(\d+)\n(\d+)$""".r
input match {
case keyPattern(key0, key1) => (key0.toLong, key1.toLong)
case _ => throw new Exception("Invalid input")
}
}
private def findLoopSize(pubKey: Long, maxIter: Long = 1_000_000_000): Option[Long] = {
val subjectNum = 7L
var n = 1L
for(loopSize <- 1L to maxIter) {
n = (n * subjectNum) % 20201227
if(n == pubKey) return Some(loopSize)
}
None
}
private def encode(subjectNum: Long, loopSize: Long): Long = {
(0L until loopSize).foldLeft(1L)((n, _) => (n * subjectNum) % 20201227)
}
}
|
#!/usr/bin/env bash
set -e
set -o pipefail
(emacs --batch \
--eval "(setq straight-safe-mode t)" \
--load "$HOME/.emacs.d/init.el" \
--funcall arche-batch-byte-compile 2>&1 \
| (grep -v "In toplevel form" || true) \
| (grep -v "In end of data" || true) \
| (grep -v "Warning: Package cl is deprecated" || true) \
| (grep -v "Warning: Missing format argument" || true) \
| (! grep .)) || (rm -f emacs/arche.elc; false)
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-NER/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-NER/7-1024+0+512-N-VB-IP-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_and_verbs_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
#!/bin/sh
. /usr/share/openclash/log.sh
set_lock() {
exec 883>"/tmp/lock/openclash_cus_domian.lock" 2>/dev/null
flock -x 883 2>/dev/null
}
del_lock() {
flock -u 883 2>/dev/null
rm -rf "/tmp/lock/openclash_cus_domian.lock"
}
set_lock
rm -rf /tmp/dnsmasq.d/dnsmasq_openclash_custom_domain.conf >/dev/null 2>&1
if [ "$(uci get openclash.config.dns_advanced_setting 2>/dev/null)" -eq 1 ]; then
LOG_OUT "Setting Secondary DNS Server List..."
custom_domain_dns_server=$(uci get openclash.config.custom_domain_dns_server 2>/dev/null)
[ -z "$custom_domain_dns_server" ] && {
custom_domain_dns_server="114.114.114.114"
}
if [ -s "/etc/openclash/custom/openclash_custom_domain_dns.list" ]; then
mkdir -p /tmp/dnsmasq.d
awk -v tag="$custom_domain_dns_server" '!/^$/&&!/^#/{printf("server=/%s/"'tag'"\n",$0)}' /etc/openclash/custom/openclash_custom_domain_dns.list >>/tmp/dnsmasq.d/dnsmasq_openclash_custom_domain.conf 2>/dev/null
fi
fi
del_lock |
package me.mrdaniel.adventuremmo.commands;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.args.CommandContext;
import org.spongepowered.api.command.spec.CommandExecutor;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
import org.spongepowered.api.util.annotation.NonnullByDefault;
@NonnullByDefault
public abstract class PlayerCommand implements CommandExecutor {
@Override
public CommandResult execute(final CommandSource src, final CommandContext args) {
if (!(src instanceof Player)) {
src.sendMessage(Text.of(TextColors.RED, "This commands is for players only."));
return CommandResult.success();
}
Player p = (Player) src;
this.execute(p, args);
return CommandResult.success();
}
public abstract void execute(Player p, CommandContext args);
} |
package me.nerdytechy.staffchat.listeners;
import me.nerdytechy.staffchat.StaffChat;
import me.nerdytechy.staffchat.UpdateChecker;
import me.nerdytechy.staffchat.utils.Utils;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerJoinEvent;
public class PlayerJoinListener implements Listener {
private final StaffChat plugin;
public PlayerJoinListener(StaffChat plugin){
this.plugin = plugin;
Bukkit.getPluginManager().registerEvents(this, plugin);
}
@EventHandler
public void onJoin(PlayerJoinEvent e){
Player plr = e.getPlayer();
if (!plugin.getConfig().getBoolean("update-notification")){
return;
}
if (plr.hasPermission("staffchat.*") || plr.isOp()){
if (UpdateChecker.updateAvailable){
plr.sendMessage(Utils.chat("&c&lStaffChat &r&8> &fUpdate Available!"));
plr.sendMessage(Utils.chat("&c&lStaffChat &r&8> &7Download from: &fhttps://www.spigotmc.org/resources/95859/"));
}
return;
}
}
}
|
#!/bin/bash
set -e
./setup.sh
npm run dev
|
import uuid
class NodeManager:
def __init__(self):
self.usingStemResults = []
def addResult(self, result, pub_key):
"""
Adds a result to the list of node results and returns a unique identifier (UUID) for the result.
Args:
result (str): The result to be added.
pub_key (str): The public key associated with the result.
Returns:
str: A unique identifier (UUID) for the result.
"""
new_uuid = str(uuid.uuid4())
self.usingStemResults.append((new_uuid, result, pub_key))
return new_uuid
def testNodeResult(self, uuid, result, pub_key):
"""
Tests a specific node result using its unique identifier (UUID) and public key.
Args:
uuid (str): The unique identifier (UUID) of the result to be tested.
result (str): The result to be tested.
pub_key (str): The public key associated with the result.
Returns:
bool: True if the result matches the stored result for the given UUID and public key, False otherwise.
"""
for stored_uuid, stored_result, stored_pub_key in self.usingStemResults:
if stored_uuid == uuid and stored_result == result and stored_pub_key == pub_key:
return True
return False |
<reponame>ch1huizong/learning
largeString = '%s%s something %s yet more' % (small1, small2, small3)
|
python recurrent_formatting.py ../formatted-files/corpus_1.tsv corpus_1 ptb
python recurrent_formatting.py ../formatted-files/corpus_1.tsv corpus_1 wiki
python recurrent_formatting.py ../formatted-files/corpus_2.tsv corpus_2 ptb
python recurrent_formatting.py ../formatted-files/corpus_2.tsv corpus_2 wiki
python recurrent_formatting.py ../formatted-files/corpus_3.tsv corpus_3 ptb
python recurrent_formatting.py ../formatted-files/corpus_3.tsv corpus_3 wiki
python recurrent_formatting.py ../formatted-files/corpus_4.tsv corpus_4 ptb
python recurrent_formatting.py ../formatted-files/corpus_4.tsv corpus_4 wiki
python recurrent_formatting.py ../formatted-files/corpus_5.tsv corpus_5 ptb
python recurrent_formatting.py ../formatted-files/corpus_5.tsv corpus_5 wiki
python recurrent_formatting.py ../formatted-files/corpus_all.tsv corpus ptb
python recurrent_formatting.py ../formatted-files/corpus_all.tsv corpus wiki
python recurrent_formatting.py ../formatted-files/annotated_all.tsv annotated ptb
python recurrent_formatting.py ../formatted-files/annotated_all.tsv annotated wiki
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.