text stringlengths 1 1.05M |
|---|
import {MigrationInterface, QueryRunner} from "typeorm"
export class UsersTable1594021018180 implements MigrationInterface {
name = 'UsersTable1594021018180'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`CREATE TABLE "public"."users" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "email" character varying(255) NOT NULL, "password_hash" character varying(255) NOT NULL, "created_at" TIMESTAMP NOT NULL DEFAULT now(), "updated_at" TIMESTAMP NOT NULL DEFAULT now(), "deleted_at" TIMESTAMP, CONSTRAINT "UQ_12ffa5c867f6bb71e2690a526ce" UNIQUE ("email"), CONSTRAINT "PK_a6cc71bedf15a41a5f5ee8aea97" PRIMARY KEY ("id"))`)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE "public"."users"`)
}
}
|
package com.boot.feign.article.fallback.impl;
import com.boot.feign.article.fallback.CategoryFallbackFeign;
import com.boot.pojo.Article;
import com.boot.pojo.Category;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
@Slf4j
public class CategoryFallbackFeignImpl implements CategoryFallbackFeign {
@Override
public List<String> selectCategoryName() {
return null;
}
@Override
public Category selectCategoryByName(String categoryName) {
return null;
}
@Override
public int selectCountByName(String categoryName) {
return 0;
}
@Override
public List<Category> selectCategories(int page, int limit) {
return null;
}
@Override
public int selectCategoryCount() {
return 0;
}
@Override
public List<Category> selectCategories_echarts() {
return null;
}
}
|
package com.globalcollect.gateway.sdk.java.gc.payment.definitions;
import com.globalcollect.gateway.sdk.java.gc.payment.definitions.MerchantAction;
import com.globalcollect.gateway.sdk.java.gc.payment.definitions.Payment;
import com.globalcollect.gateway.sdk.java.gc.payment.definitions.PaymentCreationOutput;
public class CreatePaymentResult {
private PaymentCreationOutput creationOutput = null;
private MerchantAction merchantAction = null;
private Payment payment = null;
public PaymentCreationOutput getCreationOutput() {
return creationOutput;
}
public void setCreationOutput(PaymentCreationOutput value) {
this.creationOutput = value;
}
public MerchantAction getMerchantAction() {
return merchantAction;
}
public void setMerchantAction(MerchantAction value) {
this.merchantAction = value;
}
public Payment getPayment() {
return payment;
}
public void setPayment(Payment value) {
this.payment = value;
}
}
|
<filename>src/main/java/evilcraft/items/BloodContainerConfig.java
package evilcraft.items;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fluids.FluidContainerRegistry;
import net.minecraftforge.fluids.FluidRegistry;
import evilcraft.api.config.ElementTypeCategory;
import evilcraft.api.config.ItemConfig;
import evilcraft.api.config.configurable.ConfigurableProperty;
import evilcraft.fluids.Blood;
/**
* Config for the {@link BloodContainer}.
* @author rubensworks
*
*/
public class BloodContainerConfig extends ItemConfig {
/**
* The unique instance.
*/
public static BloodContainerConfig _instance;
/**
* Base container size in mB that will be multiplied every tier.
*/
@ConfigurableProperty(category = ElementTypeCategory.GENERAL, comment = "The base amount of blood (mB) this container can hold * the level of container.")
public static int containerSizeBase = 5000;
/**
* The different containers.
*/
public static String[] containerLevelNames = {"bloodCell", "bloodCan", "bloodBasin", "creativeBloodContainer"};
/**
* Make a new instance.
*/
public BloodContainerConfig() {
super(
true,
"bloodContainer",
null,
BloodContainer.class
);
}
/**
* Get the amount of container tiers.
* @return The container tiers.
*/
public static int getContainerLevels() {
return containerLevelNames.length;
}
@Override
public void onRegistered() {
for(int level = 0; level < getContainerLevels(); level ++) {
ItemStack itemStack = new ItemStack(BloodContainer.getInstance(), 1, level);
FluidContainerRegistry.registerFluidContainer(
FluidRegistry.getFluidStack(Blood.getInstance().getName(), BloodContainer.getInstance().getCapacity(itemStack)),
itemStack
);
}
}
}
|
#!/bin/sh
# Let's compile Galore daemon
chmod 755 src/leveldb/build_detect_platform
chmod 755 src/secp256k1/autogen.sh
cd src/leveldb
make libleveldb.a libmemenv.a
cd ..
make -f makefile.unix
strip galored
|
"""
Generate a visual ASCII art in Python
"""
def generate_ascii(drawing):
height = len(drawing) + 2
width = len(drawing[0]) + 2
def get_ascii_cell(cell):
# Define the character in the ASCII art
if cell == '*':
return '*'
if cell == ' ':
return ' '
if cell == '.':
return '.'
# Render the picture as ASCII
result = ''
for row in range(height):
for col in range(width):
if row == 0 or row == height - 1 or col == 0 or col == width - 1:
result += '*'
else:
result += get_ascii_cell(drawing[row-1][col-1])
result += '\n'
return result
if __name__ == '__main__':
drawing = [['*', ' ', '*'],
[' ', '*', ' '],
['*', ' ', '*']]
print(generate_ascii(drawing)) |
<filename>java/ql/src/DeadCode/DeadField.java
public class FieldOnlyRead {
private int deadField;
private int getDeadField() {
return deadField;
}
} |
<gh_stars>1-10
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
/* This file has been modified by Open Source Strategies, Inc. */
package org.ofbiz.workflow.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.StringUtil;
import org.ofbiz.base.util.UtilDateTime;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.service.GenericResultWaiter;
import org.ofbiz.service.LocalDispatcher;
import org.ofbiz.service.job.Job;
import org.ofbiz.service.job.JobManager;
import org.ofbiz.service.job.JobManagerException;
import org.ofbiz.workflow.AlreadyRunning;
import org.ofbiz.workflow.CannotChangeRequester;
import org.ofbiz.workflow.CannotStart;
import org.ofbiz.workflow.CannotStop;
import org.ofbiz.workflow.InvalidData;
import org.ofbiz.workflow.InvalidPerformer;
import org.ofbiz.workflow.InvalidState;
import org.ofbiz.workflow.NotRunning;
import org.ofbiz.workflow.ResultNotAvailable;
import org.ofbiz.workflow.WfActivity;
import org.ofbiz.workflow.WfEventAudit;
import org.ofbiz.workflow.WfException;
import org.ofbiz.workflow.WfFactory;
import org.ofbiz.workflow.WfProcess;
import org.ofbiz.workflow.WfProcessMgr;
import org.ofbiz.workflow.WfRequester;
import org.ofbiz.workflow.WfUtil;
import org.ofbiz.workflow.client.StartActivityJob;
/**
* WfProcessImpl - Workflow Process Object implementation
*/
public class WfProcessImpl extends WfExecutionObjectImpl implements WfProcess {
public static final String module = WfProcessImpl.class.getName();
protected WfRequester requester = null;
protected WfProcessMgr manager = null;
public WfProcessImpl(GenericValue valueObject, WfProcessMgr manager) throws WfException {
super(valueObject, null);
this.manager = manager;
this.requester = null;
init();
}
/**
* @see org.ofbiz.workflow.impl.WfExecutionObjectImpl#WfExecutionObjectImpl(org.ofbiz.entity.Delegator, java.lang.String)
*/
public WfProcessImpl(Delegator delegator, String workEffortId) throws WfException {
super(delegator, workEffortId);
if (UtilValidate.isNotEmpty(activityId))
throw new WfException("Execution object is not of type WfProcess.");
this.manager = WfFactory.getWfProcessMgr(delegator, packageId, packageVersion, processId, processVersion);
this.requester = null;
}
private void init() throws WfException {
// since we are a process we don't have a context yet
// get the context to use with parsing descriptions from the manager
Map<String, Object> context = manager.getInitialContext();
this.parseDescriptions(context);
}
/**
* @see org.ofbiz.workflow.WfProcess#setRequester(org.ofbiz.workflow.WfRequester)
*/
public void setRequester(WfRequester newValue) throws WfException, CannotChangeRequester {
if (requester != null)
throw new CannotChangeRequester();
requester = newValue;
}
/**
* @see org.ofbiz.workflow.WfProcess#getSequenceStep(int)
*/
public List<WfActivity> getSequenceStep(int maxNumber) throws WfException {
if (maxNumber > 0)
return new ArrayList<WfActivity>(activeSteps().subList(0, maxNumber - 1));
return activeSteps();
}
/**
* @see org.ofbiz.workflow.WfExecutionObject#abort()
*/
@Override
public void abort() throws WfException, CannotStop, NotRunning {
super.abort();
// cancel the active activities
for (WfActivity activity : this.activeSteps()) {
activity.abort();
}
}
/**
* @see org.ofbiz.workflow.WfProcess#start()
*/
public void start() throws WfException, CannotStart, AlreadyRunning {
start(null);
}
/**
* @see org.ofbiz.workflow.WfProcess#start()
*/
public void start(String activityId) throws WfException, CannotStart, AlreadyRunning {
if (state().equals("open.running"))
throw new AlreadyRunning("Process is already running");
if (activityId == null && getDefinitionObject().get("defaultStartActivityId") == null)
throw new CannotStart("Initial activity is not defined.");
changeState("open.running");
// start the first activity (using the defaultStartActivityId)
GenericValue start = null;
try {
if (activityId != null) {
GenericValue processDef = getDefinitionObject();
Map<String, Object> fields = UtilMisc.toMap("packageId", (Object) processDef.getString("packageId"), "packageVersion",
processDef.getString("packageVersion"), "processId", processDef.getString("processId"),
"processVersion", processDef.getString("processVersion"), "activityId", activityId);
start = getDelegator().findByPrimaryKey("WorkflowActivity", fields);
// here we must check and make sure this activity is defined to as a starting activity
if (!start.getBoolean("canStart").booleanValue())
throw new CannotStart("The specified activity cannot initiate the workflow process");
} else {
// this is either the first activity defined or specified as an ExtendedAttribute
// since this is defined in XPDL, we don't care if canStart is set.
start = getDefinitionObject().getRelatedOne("DefaultStartWorkflowActivity");
}
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e.getNested());
}
if (start == null)
throw new CannotStart("No initial activity available");
if (Debug.verboseOn())
Debug.logVerbose("[WfProcess.start] : Started the workflow process.", module);
// set the actualStartDate
try {
GenericValue v = getRuntimeObject();
v.set("actualStartDate", UtilDateTime.nowTimestamp());
v.store();
} catch (GenericEntityException e) {
Debug.logWarning("Could not set 'actualStartDate'.", module);
e.printStackTrace();
}
startActivity(start);
}
/**
* @see org.ofbiz.workflow.WfProcess#manager()
*/
public WfProcessMgr manager() throws WfException {
return manager;
}
/**
* @see org.ofbiz.workflow.WfProcess#requester()
*/
public WfRequester requester() throws WfException {
return requester;
}
/**
* @see org.ofbiz.workflow.WfProcess#getIteratorStep()
*/
public Iterator<WfActivity> getIteratorStep() throws WfException {
return activeSteps().iterator();
}
/**
* @see org.ofbiz.workflow.WfProcess#isMemberOfStep(org.ofbiz.workflow.WfActivity)
*/
public boolean isMemberOfStep(WfActivity member) throws WfException {
return activeSteps().contains(member);
}
/**
* @see org.ofbiz.workflow.WfProcess#getActivitiesInState(java.lang.String)
*/
public Iterator<WfActivity> getActivitiesInState(String state) throws WfException, InvalidState {
List<WfActivity> res = new ArrayList<WfActivity>();
for (WfActivity a : activeSteps()) {
if (a.state().equals(state))
res.add(a);
}
return res.iterator();
}
/**
* @see org.ofbiz.workflow.WfProcess#result()
*/
public Map<String, Object> result() throws WfException, ResultNotAvailable {
Map<String, Object> resultSig = manager().resultSignature();
Map<String, Object> results = new HashMap<String, Object>();
Map<String, Object> context = processContext();
if (resultSig != null) {
Set<String> resultKeys = resultSig.keySet();
for (String key : resultKeys) {
if (context.containsKey(key))
results.put(key, context.get(key));
}
}
return results;
}
/**
* @see org.ofbiz.workflow.WfProcess#howManyStep()
*/
public int howManyStep() throws WfException {
return activeSteps().size();
}
/**
* @see org.ofbiz.workflow.WfProcess#receiveResults(org.ofbiz.workflow.WfActivity, java.util.Map)
*/
public synchronized void receiveResults(WfActivity activity, Map<String, Object> results) throws WfException, InvalidData {
Map<String, Object> context = processContext();
context.putAll(results);
setSerializedData(context);
}
/**
* @see org.ofbiz.workflow.WfProcess#activityComplete(org.ofbiz.workflow.WfActivity)
*/
public synchronized void activityComplete(WfActivity activity) throws WfException {
if (!activity.state().equals("closed.completed"))
throw new WfException("Activity state is not completed");
if (Debug.verboseOn()) Debug.logVerbose("[WfProcess.activityComplete] : Activity (" + activity.name() + ") is complete", module);
queueNext(activity);
}
/**
* @see org.ofbiz.workflow.impl.WfExecutionObjectImpl#executionObjectType()
*/
@Override
public String executionObjectType() {
return "WfProcess";
}
// Queues the next activities for processing
private void queueNext(WfActivity fromActivity) throws WfException {
List<GenericValue> nextTrans = getTransFrom(fromActivity);
if (nextTrans.size() > 0) {
for (GenericValue trans : nextTrans) {
// Get the activity definition
GenericValue toActivity = null;
try {
toActivity = trans.getRelatedOne("ToWorkflowActivity");
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e);
}
// check for a join
String join = "WJT_AND"; // default join is AND
if (toActivity.get("joinTypeEnumId") != null)
join = toActivity.getString("joinTypeEnumId");
if (Debug.verboseOn()) Debug.logVerbose("[WfProcess.queueNext] : " + join + " join.", module);
// activate if XOR or test the join transition(s)
if (join.equals("WJT_XOR"))
startActivity(toActivity);
else
joinTransition(toActivity, trans);
}
} else {
if (Debug.verboseOn())
Debug.logVerbose("[WfProcess.queueNext] : No transitions left to follow.", module);
this.finishProcess();
}
}
// Follows the and-join transition
private void joinTransition(GenericValue toActivity,
GenericValue transition) throws WfException {
// get all TO transitions to this activity
GenericValue dataObject = getRuntimeObject();
List<GenericValue> toTrans = null;
try {
toTrans = toActivity.getRelated("ToWorkflowTransition");
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e);
}
// get a list of followed transition to this activity
List<GenericValue> followed = null;
try {
Map<String, Object> fields = new HashMap<String, Object>();
fields.put("processWorkEffortId", dataObject.getString("workEffortId"));
fields.put("toActivityId", toActivity.getString("activityId"));
followed = getDelegator().findByAnd("WorkEffortTransBox", fields);
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e);
}
if (Debug.verboseOn()) Debug.logVerbose("[WfProcess.joinTransition] : toTrans (" + toTrans.size() + ") followed (" +
(followed.size() + 1) + ")", module);
// check to see if all transition requirements are met
if (toTrans.size() == (followed.size() + 1)) {
Debug.logVerbose("[WfProcess.joinTransition] : All transitions have followed.", module);
startActivity(toActivity);
try {
Map<String, Object> fields = new HashMap<String, Object>();
fields.put("processWorkEffortId", dataObject.getString("workEffortId"));
fields.put("toActivityId", toActivity.getString("activityId"));
getDelegator().removeByAnd("WorkEffortTransBox", fields);
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e);
}
} else {
Debug.logVerbose("[WfProcess.joinTransition] : Waiting for transitions to finish.", module);
try {
Map<String, Object> fields = new HashMap<String, Object>();
fields.put("processWorkEffortId", dataObject.getString("workEffortId"));
fields.put("toActivityId", toActivity.getString("activityId"));
fields.put("transitionId", transition.getString("transitionId"));
GenericValue obj = getDelegator().makeValue("WorkEffortTransBox", fields);
getDelegator().create(obj);
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e);
}
}
}
// Activates an activity object
private void startActivity(GenericValue value) throws WfException {
WfActivity activity = WfFactory.getWfActivity(value, workEffortId);
GenericResultWaiter req = new GenericResultWaiter();
if (Debug.verboseOn()) Debug.logVerbose("[WfProcess.startActivity] : Attempting to start activity (" + activity.name() + ")", module);
// locate the dispatcher to use
LocalDispatcher dispatcher = this.getDispatcher();
// get the job manager
JobManager jm = dispatcher.getJobManager();
if (jm == null) {
throw new WfException("No job manager found on the service dispatcher; cannot start activity");
}
// using the StartActivityJob class to run the activity within its own thread
try {
Job activityJob = new StartActivityJob(activity, req);
jm.runJob(activityJob);
} catch (JobManagerException e) {
throw new WfException("JobManager error", e);
}
// the GenericRequester object will hold any exceptions; and report the job as failed
if (req.status() == GenericResultWaiter.SERVICE_FAILED) {
Throwable reqt = req.getThrowable();
if (reqt instanceof CannotStart)
Debug.logVerbose("[WfProcess.startActivity] : Cannot start activity. Waiting for manual start.", module);
else if (reqt instanceof AlreadyRunning)
throw new WfException("Activity already running", reqt);
else
throw new WfException("Activity error", reqt);
}
}
// Determine the next activity or activities
private List<GenericValue> getTransFrom(WfActivity fromActivity) throws WfException {
List<GenericValue> transList = new ArrayList<GenericValue>();
// get the from transitions
List<GenericValue> fromTransitions = null;
try {
fromTransitions = fromActivity.getDefinitionObject().getRelated("FromWorkflowTransition");
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e);
}
// check for a split
String split = "WST_AND"; // default split is AND
if (fromActivity.getDefinitionObject().get("splitTypeEnumId") != null)
split = fromActivity.getDefinitionObject().getString("splitTypeEnumId");
// the default value is TRUE, so if no expression is supplied we evaluate as true
boolean transitionOk = true;
// the otherwise condition (only used by XOR splits)
GenericValue otherwise = null;
// iterate through the possible transitions
for (GenericValue transition : fromTransitions) {
// if this transition is OTHERWISE store it for later and continue on
if (transition.get("conditionTypeEnumId") != null && transition.getString("conditionTypeEnumId").equals("WTC_OTHERWISE")) {
// there should be only one of these, if there is more then one we will use the last one defined
otherwise = transition;
continue;
}
// get the condition body from the condition tag
String conditionBody = transition.getString("conditionExpr");
// get the extended attributes for the transition
Map<String, String> extendedAttr = StringUtil.strToMap(transition.getString("extendedAttributes"));
// check for a conditionClassName attribute if exists use it
if (extendedAttr != null && extendedAttr.get("conditionClassName") != null) {
String conditionClassName = (String) extendedAttr.get("conditionClassName");
transitionOk = this.evalConditionClass(conditionClassName, conditionBody, this.processContext(), extendedAttr);
} else {
// since no condition class is supplied, evaluate the expression using bsh
if (conditionBody != null) {
transitionOk = this.evalBshCondition(conditionBody, this.processContext());
}
}
if (transitionOk) {
transList.add(transition);
if (split.equals("WST_XOR"))
break;
}
}
// we only use the otherwise transition for XOR splits
if (split.equals("WST_XOR") && transList.size() == 0 && otherwise != null) {
transList.add(otherwise);
Debug.logVerbose("Used OTHERWISE Transition.", module);
}
if (Debug.verboseOn()) Debug.logVerbose("[WfProcess.getTransFrom] : Transitions: " + transList.size(), module);
return transList;
}
// Complete this workflow
private void finishProcess() throws WfException {
changeState("closed.completed");
Debug.logVerbose("[WfProcess.finishProcess] : Workflow Complete. Calling back to requester.", module);
if (requester != null) {
WfEventAudit audit = WfFactory.getWfEventAudit(this, null); // this will need to be updated
try {
requester.receiveEvent(audit);
} catch (InvalidPerformer e) {
throw new WfException(e.getMessage(), e);
}
}
}
// Get the active process activities
private List<WfActivity> activeSteps() throws WfException {
List<WfActivity> steps = new ArrayList<WfActivity>();
List<GenericValue> workEffortList = null;
try {
workEffortList = getDelegator().findByAnd("WorkEffort", UtilMisc.toMap("workEffortParentId", runtimeKey()));
} catch (GenericEntityException e) {
throw new WfException(e.getMessage(), e);
}
if (workEffortList == null) {
return steps;
}
for (GenericValue workEffort : workEffortList) {
if (workEffort.get("currentStatusId") != null &&
WfUtil.getOMGStatus(workEffort.getString("currentStatusId")).startsWith("open."))
steps.add(WfFactory.getWfActivity(getDelegator(), workEffort.getString("workEffortId")));
}
return steps;
}
}
|
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla Universal charset detector code.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 2001
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* <NAME> <<EMAIL>>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#pragma GCC visibility push(hidden)
#include "nsLatin1Prober.h"
#include "prmem.h"
#include <stdio.h>
#define UDF 0 // undefined
#define OTH 1 //other
#define ASC 2 // ascii capital letter
#define ASS 3 // ascii small letter
#define ACV 4 // accent capital vowel
#define ACO 5 // accent capital other
#define ASV 6 // accent small vowel
#define ASO 7 // accent small other
#define CLASS_NUM 8 // total classes
static unsigned char Latin1_CharToClass[] =
{
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 00 - 07
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 08 - 0F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 10 - 17
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 18 - 1F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 20 - 27
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 28 - 2F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 30 - 37
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 38 - 3F
OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, // 40 - 47
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, // 48 - 4F
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, // 50 - 57
ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, // 58 - 5F
OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, // 60 - 67
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, // 68 - 6F
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, // 70 - 77
ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, // 78 - 7F
OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, // 80 - 87
OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, // 88 - 8F
UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // 90 - 97
OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, // 98 - 9F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // A0 - A7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // A8 - AF
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // B0 - B7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, // B8 - BF
ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, // C0 - C7
ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, // C8 - CF
ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, // D0 - D7
ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, // D8 - DF
ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, // E0 - E7
ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, // E8 - EF
ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, // F0 - F7
ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, // F8 - FF
};
/* 0 : illegal
1 : very unlikely
2 : normal
3 : very likely
*/
static unsigned char Latin1ClassModel[] =
{
/* UDF OTH ASC ASS ACV ACO ASV ASO */
/*UDF*/ 0, 0, 0, 0, 0, 0, 0, 0,
/*OTH*/ 0, 3, 3, 3, 3, 3, 3, 3,
/*ASC*/ 0, 3, 3, 3, 3, 3, 3, 3,
/*ASS*/ 0, 3, 3, 3, 1, 1, 3, 3,
/*ACV*/ 0, 3, 3, 3, 1, 2, 1, 2,
/*ACO*/ 0, 3, 3, 3, 3, 3, 3, 3,
/*ASV*/ 0, 3, 1, 3, 1, 1, 1, 3,
/*ASO*/ 0, 3, 1, 3, 1, 1, 3, 3,
};
void nsLatin1Prober::Reset(void)
{
mState = eDetecting;
mLastCharClass = OTH;
for (int i = 0; i < FREQ_CAT_NUM; i++)
mFreqCounter[i] = 0;
}
nsProbingState nsLatin1Prober::HandleData(const char* aBuf, PRUint32 aLen)
{
char *newBuf1 = 0;
PRUint32 newLen1 = 0;
if (!FilterWithEnglishLetters(aBuf, aLen, &newBuf1, newLen1)) {
newBuf1 = (char*)aBuf;
newLen1 = aLen;
}
unsigned char charClass;
unsigned char freq;
for (PRUint32 i = 0; i < newLen1; i++)
{
charClass = Latin1_CharToClass[(unsigned char)newBuf1[i]];
freq = Latin1ClassModel[mLastCharClass*CLASS_NUM + charClass];
if (freq == 0) {
mState = eNotMe;
break;
}
mFreqCounter[freq]++;
mLastCharClass = charClass;
}
if (newBuf1 != aBuf)
PR_FREEIF(newBuf1);
return mState;
}
float nsLatin1Prober::GetConfidence(void)
{
if (mState == eNotMe)
return 0.01f;
float confidence;
PRUint32 total = 0;
for (PRInt32 i = 0; i < FREQ_CAT_NUM; i++)
total += mFreqCounter[i];
if(!total)
confidence = 0.0f;
else
{
confidence = mFreqCounter[3]*1.0f / total;
confidence -= mFreqCounter[1]*20.0f/total;
}
if (confidence < 0.0f)
confidence = 0.0f;
// lower the confidence of latin1 so that other more accurate detector
// can take priority.
confidence *= 0.50f;
return confidence;
}
#ifdef DEBUG_chardet
void nsLatin1Prober::DumpStatus()
{
printf(" Latin1Prober: %1.3f [%s]\r\n", GetConfidence(), GetCharSetName());
}
#endif
#pragma GCC visibility pop
|
<reponame>shadowcrypto1/dsynths-app-v2<filename>src/components/Icons/ArrowBubble.tsx
import React from 'react'
import { useIsDarkMode } from 'state/user/hooks'
import { useTheme } from 'styled-components'
export default function ArrowBubble({ size = 12, ...rest }: { size?: number; [x: string]: any }) {
const isDarkMode = useIsDarkMode()
const theme = useTheme()
const circleFill = isDarkMode ? 'black' : theme.bg1
const circleStroke = isDarkMode ? 'none' : theme.text2
const arrow = isDarkMode ? 'white' : theme.text1
return (
<svg width={size} height={size} viewBox="0 0 30 30" fill="none" {...rest}>
<circle cx="15" cy="15" r="14.5" transform="rotate(90 15 15)" fill={circleFill} stroke={circleStroke} />
<path
fillRule="evenodd"
clipRule="evenodd"
fill={arrow}
d="M16.3808 10.1989L20.7018 14.5199C20.967 14.785 20.967 15.215 20.7018 15.4801L16.3808 19.8011C16.1156 20.0663 15.6857 20.0663 15.4206 19.8011C15.1554 19.536 15.1554 19.1061 15.4206 18.8409L18.5825 15.679L8.67898 15.679C8.30399 15.679 8 15.375 8 15C8 14.625 8.30399 14.321 8.67898 14.321L18.5825 14.321L15.4206 11.1591C15.1554 10.8939 15.1554 10.464 15.4206 10.1989C15.6857 9.93371 16.1156 9.93371 16.3808 10.1989Z"
/>
</svg>
)
}
|
<filename>second-example-application/src/main/java/com/github/robindevilliers/welcometohell/wizard/expression/function/MatchesFunction.java
package com.github.robindevilliers.welcometohell.wizard.expression.function;
import com.github.robindevilliers.welcometohell.wizard.expression.Function;
import java.util.Map;
public class MatchesFunction implements Function<Boolean> {
private Function<?> lhs;
private Function<?> rhs;
public MatchesFunction(Function<?> lhs, Function<?> rhs){
this.lhs = lhs;
this.rhs = rhs;
}
@Override
public Boolean apply(Map<String, Object> scope) {
Object lhs = this.lhs.apply(scope);
Object rhs = this.rhs.apply(scope);
if (lhs instanceof String && rhs instanceof String) {
String string = (String) lhs;
String pattern = (String) rhs;
return string.matches(pattern);
} else {
throw new RuntimeException("Invalid expression. Unsupported types.");
}
}
}
|
package kz.chesschicken.chickenextensions.content.overworld
import kz.chesschicken.chickenextensions.content.overworld.OverworldListener._
import kz.chesschicken.chickenextensions.content.overworld.block.{TileFlower, TileLeaves, TileSapling}
import net.minecraft.block.BlockBase
import net.minecraft.item.ItemBase
import net.modificationstation.stationapi.api.client.texture.{TextureFactory, TextureRegistry}
import net.modificationstation.stationapi.api.common.event.EventListener
import net.modificationstation.stationapi.api.common.event.registry.RegistryEvent
import net.modificationstation.stationapi.api.common.registry.{Identifier, ModID}
import net.modificationstation.stationapi.api.common.mod.entrypoint.Entrypoint
import net.modificationstation.stationapi.api.common.util.Null
import net.modificationstation.stationapi.template.common.block.{TemplateRedstoneTorch, TemplateTorch}
object OverworldListener
{
@Entrypoint.ModID val MOD_ID: ModID = Null.get
var boneTorch : net.minecraft.block.BlockBase = _
var itemSteakRaw: ItemBase = _
var itemSteakCooked: ItemBase = _
var itemMuttonRaw: ItemBase = _
var itemMuttonCooked: ItemBase = _
var itemChickenRaw: ItemBase = _
var itemChickenCooked: ItemBase = _
var itemRottenFlesh: ItemBase = _
var colourWood: BlockBase = _
var colourSapling: BlockBase = _
var colourLeaves: BlockBase = _
var colourFlower: BlockBase = _
var textureColourWood = 0
var textureColourSapling = 0
var textureColourLeaves = 0
var textureColourFlower = 0
}
class OverworldListener {
@EventListener
def registerBlocks(event: RegistryEvent.Blocks): Unit =
{
boneTorch = new TemplateTorch(Identifier.of(MOD_ID, "bonetorch"), 0).setTranslationKey(MOD_ID, "bonetorch")
colourSapling = new TileSapling(Identifier.of(MOD_ID, "coloursapling")).setTranslationKey(MOD_ID, "coloursapling")
colourLeaves = new TileLeaves(Identifier.of(MOD_ID, "colourleaves")).setTranslationKey(MOD_ID,"colourleaves")
colourFlower = new TileFlower(Identifier.of(MOD_ID, "colourflower")).setTranslationKey(MOD_ID, "colourflower")
}
@EventListener
def registerTextures(): Unit = {
itemSteakRaw.setTexturePosition(TextureFactory.INSTANCE.addTexture(TextureRegistry.getRegistry("GUI_ITEMS"), "/assets/goldenfeaturessap/textures/content/overworld/item/steakRaw.png"))
itemSteakCooked.setTexturePosition(TextureFactory.INSTANCE.addTexture(TextureRegistry.getRegistry("GUI_ITEMS"), "/assets/goldenfeaturessap/textures/content/overworld/item/steakCooked.png"))
itemMuttonRaw.setTexturePosition(TextureFactory.INSTANCE.addTexture(TextureRegistry.getRegistry("GUI_ITEMS"), "/assets/goldenfeaturessap/textures/content/overworld/item/muttonRaw.png"))
itemMuttonCooked.setTexturePosition(TextureFactory.INSTANCE.addTexture(TextureRegistry.getRegistry("GUI_ITEMS"), "/assets/goldenfeaturessap/textures/content/overworld/item/muttonCooked.png"))
itemChickenRaw.setTexturePosition(TextureFactory.INSTANCE.addTexture(TextureRegistry.getRegistry("GUI_ITEMS"), "/assets/goldenfeaturessap/textures/content/overworld/item/chickenRaw.png"))
itemChickenCooked.setTexturePosition(TextureFactory.INSTANCE.addTexture(TextureRegistry.getRegistry("GUI_ITEMS"), "/assets/goldenfeaturessap/textures/content/overworld/item/chickenCooked.png"))
itemRottenFlesh.setTexturePosition(TextureFactory.INSTANCE.addTexture(TextureRegistry.getRegistry("GUI_ITEMS"), "/assets/goldenfeaturessap/textures/content/overworld/item/fleshRotten.png"))
}
}
|
module Chewy
class Repository
include Singleton
attr_reader :analyzers, :tokenizers, :filters, :char_filters
def self.delegated
public_instance_methods - superclass.public_instance_methods - Singleton.public_instance_methods
end
def self.repository(name)
plural_name = name.to_s.pluralize
class_eval <<-METHOD, __FILE__, __LINE__ + 1
def #{name}(name, options = nil)
options ? #{plural_name}[name.to_sym] = options : #{plural_name}[name.to_sym]
end
METHOD
end
# Analysers repository:
#
# Chewy.analyzer :my_analyzer2, {
# type: custom,
# tokenizer: 'my_tokenizer1',
# filter : ['my_token_filter1', 'my_token_filter2']
# char_filter : ['my_html']
# }
# Chewy.analyzer(:my_analyzer2) # => {type: 'custom', tokenizer: ...}
#
repository :analyzer
# Tokenizers repository:
#
# Chewy.tokenizer :my_tokenizer1, {type: standard, max_token_length: 900}
# Chewy.tokenizer(:my_tokenizer1) # => {type: standard, max_token_length: 900}
#
repository :tokenizer
# Token filters repository:
#
# Chewy.filter :my_token_filter1, {type: stop, stopwords: [stop1, stop2, stop3, stop4]}
# Chewy.filter(:my_token_filter1) # => {type: stop, stopwords: [stop1, stop2, stop3, stop4]}
#
repository :filter
# Char filters repository:
#
# Chewy.char_filter :my_html, {type: html_strip, escaped_tags: [xxx, yyy], read_ahead: 1024}
# Chewy.char_filter(:my_html) # => {type: html_strip, escaped_tags: [xxx, yyy], read_ahead: 1024}
#
repository :char_filter
def initialize
@analyzers = {}
@tokenizers = {}
@filters = {}
@char_filters = {}
end
end
end
|
#!/bin/bash
#
# @author André Storhaug <andr3.storhaug@gmail.com>
# @date 2021-05-01
# @license MIT
# @version 3.2.4
set -o pipefail
shopt -s extglob globstar nullglob dotglob
PERSONAL_TOKEN="$INPUT_PERSONAL_TOKEN"
SRC_PATH="$INPUT_SRC_PATH"
DST_PATH="$INPUT_DST_PATH"
DST_OWNER="$INPUT_DST_OWNER"
DST_REPO_NAME="$INPUT_DST_REPO_NAME"
DST_BRANCH="$INPUT_DST_BRANCH"
CLEAN="$INPUT_CLEAN"
FILE_FILTER="$INPUT_FILE_FILTER"
FILTER="$INPUT_FILTER"
EXCLUDE="$INPUT_EXCLUDE"
SRC_WIKI="$INPUT_SRC_WIKI"
DST_WIKI="$INPUT_DST_WIKI"
COMMIT_MESSAGE="$INPUT_COMMIT_MESSAGE"
USERNAME="$INPUT_USERNAME"
EMAIL="$INPUT_EMAIL"
if [[ -z "$SRC_PATH" ]]; then
echo "SRC_PATH environment variable is missing. Cannot proceed."
exit 1
fi
if [[ -z "$DST_OWNER" ]]; then
echo "DST_OWNER environment variable is missing. Cannot proceed."
exit 1
fi
if [[ -z "$DST_REPO_NAME" ]]; then
echo "DST_REPO_NAME environment variable is missing. Cannot proceed."
exit 1
fi
if [ "$SRC_WIKI" = "true" ]; then
SRC_WIKI=".wiki"
else
SRC_WIKI=""
fi
if [ "$DST_WIKI" = "true" ]; then
DST_WIKI=".wiki"
else
DST_WIKI=""
fi
if [[ -n "$EXCLUDE" && -z "$FILTER" ]]; then
FILTER="**"
fi
BASE_PATH=$(pwd)
DST_PATH="${DST_PATH:-${SRC_PATH}}"
USERNAME="${USERNAME:-${GITHUB_ACTOR}}"
EMAIL="${EMAIL:-${GITHUB_ACTOR}@users.noreply.github.com}"
DST_BRANCH="${DST_BRANCH:-master}"
SRC_REPO="${GITHUB_REPOSITORY}${SRC_WIKI}"
SRC_REPO_NAME="${GITHUB_REPOSITORY#*/}${SRC_WIKI}"
DST_REPO="${DST_OWNER}/${DST_REPO_NAME}${DST_WIKI}"
DST_REPO_NAME="${DST_REPO_NAME}${DST_WIKI}"
DST_REPO_DIR=dst_repo_dir
FINAL_SOURCE="${BASE_PATH}/${SRC_PATH}"
git config --global user.name "${USERNAME}"
git config --global user.email "${EMAIL}"
if [[ -z "$FILE_FILTER" ]]; then
echo "Copying \"${BASE_PATH}/${SRC_PATH}\" and pushing it to ${DST_OWNER}/${DST_REPO_NAME}"
else
echo "Copying files matching \"${FILE_FILTER}\" from \"${BASE_PATH}/${SRC_PATH}\" and pushing it to ${DST_OWNER}/${DST_REPO_NAME}"
fi
if [ "$?" -ne 0 ]; then
echo >&2 "Cloning '$SRC_REPO' failed"
exit 1
fi
rm -rf ${BASE_PATH}/.git
if [[ -n "$FILE_FILTER" ]]; then
find ${BASE_PATH}/ -type f -not -name "${FILE_FILTER}" -exec rm {} \;
fi
if [[ -n "$FILTER" ]]; then
tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX)
mkdir ${temp_dir}/${SRC_REPO_NAME}
cd ${BASE_PATH}
FINAL_SOURCE="${tmp_dir}/${SRC_REPO_NAME}/${SRC_PATH}"
SAVEIFS=$IFS
IFS=$(echo -en "\n\b")
for f in ${FILTER} ; do
[ -e "$f" ] || continue
[ -d "$f" ] && continue
if [[ -n "$EXCLUDE" ]] ; then
[[ "$f" == $EXCLUDE ]] && continue
fi
file_dir=$(dirname "${f}")
mkdir -p "${tmp_dir}/${SRC_REPO_NAME}/${file_dir}" && cp "${f}" "${tmp_dir}/${SRC_REPO_NAME}/${file_dir}"
done
IFS=$SAVEIFS
cd ..
fi
git clone --branch ${DST_BRANCH} --single-branch --depth 1 https://${PERSONAL_TOKEN}@github.com/${DST_REPO}.git ${DST_REPO_DIR}
if [ "$?" -ne 0 ]; then
echo >&2 "Cloning branch '$DST_BRANCH' in '$DST_REPO' failed"
echo >&2 "Falling back to default branch"
git clone --single-branch --depth 1 https://${PERSONAL_TOKEN}@github.com/${DST_REPO}.git ${DST_REPO_DIR}
cd ${DST_REPO_DIR} || exit "$?"
echo >&2 "Creating branch '$DST_BRANCH'"
git checkout -b ${DST_BRANCH}
if [ "$?" -ne 0 ]; then
echo >&2 "Creation of Branch '$DST_BRANCH' failed"
exit 1
fi
cd ..
fi
if [ "$CLEAN" = "true" ]; then
if [ -f "${DST_REPO_DIR}/${DST_PATH}" ] ; then
find "${DST_REPO_DIR}/${DST_PATH}" -type f -not -path '*/\.git/*' -delete
elif [ -d "${DST_REPO_DIR}/${DST_PATH}" ] ; then
find "${DST_REPO_DIR}/${DST_PATH%/*}"/* -type f -not -path '*/\.git/*' -delete
else
echo >&2 "Nothing to clean 🧽"
fi
fi
mkdir -p "${DST_REPO_DIR}/${DST_PATH%/*}" || exit "$?"
echo "Copying \"${FINAL_SOURCE}\" to \"${DST_REPO_DIR}/${DST_PATH}\""
cp -rf "${FINAL_SOURCE}" "${DST_REPO_DIR}/${DST_PATH}" || exit "$?"
cd "${DST_REPO_DIR}" || exit "$?"
if [[ -z "${COMMIT_MESSAGE}" ]]; then
if [ -f "${FINAL_SOURCE}" ]; then
COMMIT_MESSAGE="Update file in \"${SRC_PATH}\" from \"${GITHUB_REPOSITORY}\""
else
COMMIT_MESSAGE="Update file(s) \"${SRC_PATH}\" from \"${GITHUB_REPOSITORY}\""
fi
fi
if [ -z "$(git status --porcelain)" ]; then
# Working directory is clean
echo "No changes detected "
else
# Uncommitted changes
git add -A
git commit --message "${COMMIT_MESSAGE}"
git push origin ${DST_BRANCH}
fi
echo "Copying complete 👌"
|
#!/bin/sh -ex
# Copyright 2014-present Viktor Szakats <https://vsz.me/>
# See LICENSE.md
export _NAM
export _VER
export _OUT
export _BAS
export _DST
_NAM="$(basename "$0")"
_NAM="$(echo "${_NAM}" | cut -f 1 -d '.')"
_VER="$1"
(
cd "${_NAM}" || exit
if [ "${_OS}" != 'win' ]; then
# https://clang.llvm.org/docs/CrossCompilation.html
unset _HOST
case "${_OS}" in
win) _HOST='x86_64-pc-mingw32';;
linux) _HOST='x86_64-pc-linux';; # x86_64-pc-linux-gnu
mac) _HOST='x86_64-apple-darwin';;
bsd) _HOST='x86_64-pc-bsd';;
esac
options="--build=${_HOST} --host=${_TRIPLET}"
fi
# Build
rm -r -f pkg
export LDFLAGS="${_OPTM}"
unset ldonly
# No success in convincing the build system to work correctly with clang:
if [ "${CC}" = 'mingw-clang' ]; then
# Skip 'gltests' build due to errors like this:
# ./signal.h:922:3: error: unknown type name 'uid_t'; did you mean 'pid_t'?
sed -i.bak -E 's| gltests||g' ./Makefile.am
export CC='clang'
if [ "${_OS}" != 'win' ]; then
export options="${options} --target=${_TRIPLET} --with-sysroot=${_SYSROOT}"
LDFLAGS="${LDFLAGS} -target ${_TRIPLET} --sysroot ${_SYSROOT}"
[ "${_OS}" = 'linux' ] && ldonly="${ldonly} -L$(find "/usr/lib/gcc/${_TRIPLET}" -name '*posix' | head -n 1)"
fi
export AR=${_CCPREFIX}ar
export NM=${_CCPREFIX}nm
export RANLIB=${_CCPREFIX}ranlib
export RC=${_CCPREFIX}windres
else
export CC="${_CCPREFIX}gcc -static-libgcc"
fi
export CFLAGS="${LDFLAGS} -fno-ident"
LDFLAGS="${LDFLAGS}${ldonly}"
[ "${_CPU}" = 'x86' ] && CFLAGS="${CFLAGS} -fno-asynchronous-unwind-tables"
# shellcheck disable=SC2086
./configure ${options} \
--disable-dependency-tracking \
--disable-silent-rules \
--disable-rpath \
--enable-static \
--disable-shared \
--enable-scram-sha1 \
--enable-scram-sha256 \
--disable-obsolete \
--disable-valgrind-tests \
'--prefix=/usr/local' \
--silent
make --jobs 2 clean >/dev/null
make --jobs 2 install "DESTDIR=$(pwd)/pkg" # >/dev/null # V=1
# DESTDIR= + --prefix=
_pkg='pkg/usr/local'
# Build fixups for clang
# libgsasl configure misdetects CC=clang as MSVC and then uses '.lib'
# extension. So rename these to '.a':
if [ -f "${_pkg}/lib/libgsasl.lib" ]; then
sed -i.bak -E "s|\.lib'$|.a'|g" "${_pkg}/lib/libgsasl.la"
mv "${_pkg}/lib/libgsasl.lib" "${_pkg}/lib/libgsasl.a"
fi
# Make steps for determinism
readonly _ref='NEWS'
"${_CCPREFIX}strip" --preserve-dates --strip-debug --enable-deterministic-archives ${_pkg}/lib/*.a
touch -c -r "${_ref}" ${_pkg}/lib/*.a
touch -c -r "${_ref}" ${_pkg}/lib/pkgconfig/*.pc
touch -c -r "${_ref}" ${_pkg}/include/*.h
# Create package
_OUT="${_NAM}-${_VER}${_REV}${_PKGSUFFIX}"
_BAS="${_NAM}-${_VER}${_PKGSUFFIX}"
_DST="$(mktemp -d)/${_BAS}"
mkdir -p "${_DST}/include"
mkdir -p "${_DST}/lib/pkgconfig"
cp -f -p ${_pkg}/lib/*.a "${_DST}/lib/"
cp -f -p ${_pkg}/lib/pkgconfig/*.pc "${_DST}/lib/pkgconfig/"
cp -f -p ${_pkg}/include/*.h "${_DST}/include/"
cp -f -p NEWS "${_DST}/NEWS.txt"
cp -f -p AUTHORS "${_DST}/AUTHORS.txt"
cp -f -p COPYING "${_DST}/COPYING.txt"
cp -f -p README "${_DST}/README.txt"
unix2dos --quiet --keepdate "${_DST}"/*.txt
../_pkg.sh "$(pwd)/${_ref}"
)
|
hijss
|
#!/bin/bash
export LWAN_ROOT=${IROOT}/lwan
export LWAN_BUILD=${LWAN_ROOT}/build
rm -rf ${LWAN_BUILD}
mkdir -p ${LWAN_BUILD}
cd ${LWAN_BUILD}
cmake ${LWAN_ROOT} -DCMAKE_BUILD_TYPE=Release && make techempower
cd $LWAN_ROOT/techempower
$LWAN_BUILD/techempower/techempower &
|
<?hh // strict
namespace Waffle\Lib\Exception;
use type InvalidArgumentException;
class JsonEncodeException extends InvalidArgumentException implements ExceptionInterface
{
}
|
package org.apache.servicecomb.demo.springmvc.client;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.servicecomb.core.BootListener;
import org.apache.servicecomb.demo.TestMgr;
import org.apache.servicecomb.registry.RegistrationManager;
import org.springframework.stereotype.Component;
/**
* Testing after bootup.
*/
@Component
public class ConsumerTestsAfterBootup implements BootListener {
private void testRegisterPath() {
TestMgr.check(RegistrationManager.INSTANCE.getMicroservice().getPaths().size(), 0);
}
private void testSchemaContent() {
String content = RegistrationManager.INSTANCE.getMicroservice().getSchemaMap().get("SpringMVCSchema");
TestMgr.check(content.replaceAll("\\s", ""),
readFile("SpringMVCSchema.yaml").replaceAll("[\\s#]", ""));
}
private String readFile(String restController) {
// test code, make simple
try {
InputStream inputStream = this.getClass().getResource("/" + restController).openStream();
byte[] buffer = new byte[2048 * 10];
inputStream.skip(1000);
int len = inputStream.read(buffer);
TestMgr.check(2048 * 10 > len, true);
inputStream.close();
return new String(buffer, 0, len, StandardCharsets.UTF_8);
} catch (IOException e) {
TestMgr.failed(e.getMessage(), e);
return null;
}
}
@Override
public void onBootEvent(BootEvent event) {
if (event.getEventType() == BootListener.EventType.AFTER_REGISTRY) {
testRegisterPath();
testSchemaContent();
if (!TestMgr.isSuccess()) {
TestMgr.summary();
throw new IllegalStateException("some tests are failed. ");
}
}
}
}
|
<reponame>AlexCollins01/myPython-projects
# Symbols Identifier
a = input("Enter the symbols: ")
z = "abcdefghijklmnopqrstuvwxyz"
y = z.upper()
g = "1234567890"
def identifySymbols(a):
for letter in a:
if letter == "a [len(a) - 1]":
break
if letter in z:
print(letter + " is an alphabet.")
elif letter in y:
print(letter + " is an alphabet.")
elif letter in g:
print(letter + " is a figure.")
else:
print(letter + " is an unknown character.")
identifySymbols(a)
|
#!/bin/bash
# This downloads and installs a pinned version of miniconda
set -ex
cd $(dirname $0)
MINICONDA_VERSION=4.6.14
CONDA_VERSION=4.6.14
# Only MD5 checksums are available for miniconda
# Can be obtained from https://repo.continuum.io/miniconda/
MD5SUM="718259965f234088d785cad1fbd7de03"
URL="https://repo.continuum.io/miniconda/Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh"
INSTALLER_PATH=/tmp/miniconda-installer.sh
# make sure we don't do anything funky with user's $HOME
# since this is run as root
unset HOME
wget --quiet $URL -O ${INSTALLER_PATH}
chmod +x ${INSTALLER_PATH}
# check md5 checksum
if ! echo "${MD5SUM} ${INSTALLER_PATH}" | md5sum --quiet -c -; then
echo "md5sum mismatch for ${INSTALLER_PATH}, exiting!"
exit 1
fi
bash ${INSTALLER_PATH} -b -p ${CONDA_DIR}
export PATH="${CONDA_DIR}/bin:$PATH"
# Allow easy direct installs from conda forge
conda config --system --add channels conda-forge
# Do not attempt to auto update conda or dependencies
conda config --system --set auto_update_conda false
conda config --system --set show_channel_urls true
# bug in conda 4.3.>15 prevents --set update_dependencies
echo 'update_dependencies: false' >> ${CONDA_DIR}/.condarc
# install conda itself
if [[ "${CONDA_VERSION}" != "${MINICONDA_VERSION}" ]]; then
conda install -yq conda==${CONDA_VERSION}
fi
echo "installing notebook env:"
cat /tmp/environment.yml
conda env create -p ${NB_PYTHON_PREFIX} -f /tmp/environment.yml
# empty conda history file,
# which seems to result in some effective pinning of packages in the initial env,
# which we don't intend.
# this file must not be *removed*, however
echo '' > ${NB_PYTHON_PREFIX}/conda-meta/history
# enable nteract-on-jupyter, which was installed with pip
jupyter serverextension enable nteract_on_jupyter --sys-prefix
if [[ -f /tmp/kernel-environment.yml ]]; then
# install kernel env and register kernelspec
echo "installing kernel env:"
cat /tmp/kernel-environment.yml
conda env create -p ${KERNEL_PYTHON_PREFIX} -f /tmp/kernel-environment.yml
${KERNEL_PYTHON_PREFIX}/bin/ipython kernel install --prefix "${NB_PYTHON_PREFIX}"
echo '' > ${KERNEL_PYTHON_PREFIX}/conda-meta/history
conda list -p ${KERNEL_PYTHON_PREFIX}
fi
# Clean things out!
conda clean -tipsy
# Remove the big installer so we don't increase docker image size too much
rm ${INSTALLER_PATH}
chown -R $NB_USER:users ${CONDA_DIR}
conda list -n root
conda list -p ${NB_PYTHON_PREFIX}
|
////////////////////////////////////////////////////////////////
// OOP244 Workshop 9: Inheritance & Virtual Functions
// File SportCar.cpp
// Version 1.0
// Date 2016/11/21
// Author <NAME>
// Number ID 0385151
// Email <EMAIL>
////////////////////////////////////////////////////////////////
#include "SportCar.h"
#include "Car.h"
#include "Vehicle.h"
namespace sict
{
SportCar::SportCar()
{
noOfPassengers_ = 1;
}
SportCar::SportCar(int maxSpeed, int passengers) : Car(maxSpeed)
{
noOfPassengers_ = passengers;
}
void SportCar::brake()
{
int brakespeed = speed() - 10;
speed(brakespeed);
}
void SportCar::accelerate()
{
int acceleratespeed = speed() + 40;
speed(acceleratespeed);
}
std::ostream & SportCar::display(std::ostream & ostr) const
{
if (speed() > 0) {
ostr << "This sport car is carrying " << noOfPassengers_ << " passengers and is traveling at a speed of " << speed() << " km/h.";
}
else {
ostr << "This sport car is carrying " << noOfPassengers_ << " passengers and is parked.";
}
return ostr;
}
} |
#!/usr/bin/env bash
set -e
echo "Running unload-copy utility"
# Required
CONFIG_FILE=${CONFIG_FILE:-}
AWS_REGION=${AWS_REGION:-us-east-1}
AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-}
AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-}
if [ "${CONFIG_FILE}" == "" ]; then
echo "Environment Var 'CONFIG_FILE' must be defined"
else
cd UnloadCopyUtility
python redshift-unload-copy.py "${CONFIG_FILE}" "${AWS_REGION}"
echo "Done"
fi
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package br.com.bancoSessaoOriginal.servlet.cliente;
import br.com.bancoSessaoOriginal.dao.ClienteDAO;
import javax.servlet.annotation.WebServlet;
import java.io.IOException;
import java.math.BigDecimal;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import br.com.bancoSessaoOriginal.model.Cliente;
/**
*
* @author mario.agjunior
*/
import br.com.bancoSessaoOriginal.model.Endereco;
import java.text.ParseException;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
@WebServlet(name = "AdicionarClienteServlet", urlPatterns = {"/AdicionarClienteServlet"})
public class AdicionarClienteServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest request,
HttpServletResponse response)
throws ServletException, IOException {
ClienteDAO idCliente = new ClienteDAO();
try {
List<Cliente> clientes = idCliente.obterIdCliente();
request.setAttribute("clientes", clientes);
} catch (Exception ex) {
Logger.getLogger(AdicionarClienteServlet.class.getName()).log(Level.SEVERE, null, ex);
}
request.setAttribute("nome", "");
RequestDispatcher dispatcher
= request.getRequestDispatcher("/WEB-INF/Cliente/cadastroCliente.jsp");
dispatcher.forward(request, response);
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String nome = request.getParameter("txtNome");
String rg = request.getParameter("txtRg");
String cpf = request.getParameter("txtCpf");
String rua = request.getParameter("txtRua");
String numero = request.getParameter("txtNum");
String complemento = request.getParameter("txtComp");
String bairro = request.getParameter("txtBairro");
String cidade = request.getParameter("txtCidade");
String estado = request.getParameter("txtEstado");
Endereco endereco = new Endereco(rua, numero, complemento, bairro, cidade, estado);
Cliente cliente = new Cliente(nome, cpf, rg,endereco);
ClienteDAO clienteDAO = new ClienteDAO();
try {
// cliente.setEndereco(endereco);
clienteDAO.cadastrar(cliente);
} catch (Exception e) {
System.out.println(e.getMessage());
}
request.setAttribute("cliente", cliente);
request.setAttribute("endereco", endereco);
RequestDispatcher dispatcher
= request.getRequestDispatcher(
"/WEB-INF/Cliente/cadastroCliente.jsp");
dispatcher.forward(request, response);
}
}
|
package leveldb
import (
"encoding/json"
dbm "github.com/tendermint/tmlibs/db"
"github.com/reed/errors"
"github.com/reed/types"
)
var (
txPrefix = "TX:"
utxoPrefix = "UTXO:"
storeErr = errors.New("leveldb error")
)
type Store struct {
db dbm.DB
}
func NewStore(db dbm.DB) *Store {
return &Store{
db: db,
}
}
func getTxKey(id []byte) []byte {
return []byte(txPrefix + string(id))
}
func getUtxoKey(id []byte) []byte {
return []byte(utxoPrefix + string(id))
}
func (store *Store) AddTx(tx *types.Tx) error {
b, err := json.Marshal(tx)
if err != nil {
return errors.Wrapf(err, "AddTx json marshal error")
}
store.db.Set(getTxKey(tx.ID.Bytes()), b)
return nil
}
func (store *Store) GetTx(id []byte) (*types.Tx, error) {
b := store.db.Get(getTxKey(id))
if b == nil {
return nil, nil
}
tx := &types.Tx{}
if err := json.Unmarshal(b, tx); err != nil {
return nil, errors.Wrapf(storeErr, "tx(id=%x) unmarshal failed", id)
}
return tx, nil
}
func (store *Store) GetUtxo(id []byte) (*types.UTXO, error) {
data := store.db.Get(getUtxoKey(id))
if data == nil {
return nil, errors.Wrapf(storeErr, "utxo(id=%x) does not exists", id)
}
utxo := &types.UTXO{}
if err := json.Unmarshal(data, &utxo); err != nil {
return nil, err
}
return utxo, nil
}
func (store *Store) SaveUtxos(expiredUtxoIds []types.Hash, utxos *[]types.UTXO) error {
batch := store.db.NewBatch()
for _, e := range expiredUtxoIds {
batch.Delete(e.Bytes())
}
for _, utxo := range *utxos {
b, err := json.Marshal(utxo)
if err != nil {
return errors.Wrapf(err, "SaveUtxos json marshal error")
}
batch.Set(getUtxoKey(utxo.OutputId.Bytes()), b)
}
batch.Write()
return nil
}
|
<reponame>zjin-lcf/HeCBench
/*=========================================================================
Copyright (c) 2007, Los Alamos National Security, LLC
All rights reserved.
Copyright 2007. Los Alamos National Security, LLC.
This software was produced under U.S. Government contract DE-AC52-06NA25396
for Los Alamos National Laboratory (LANL), which is operated by
Los Alamos National Security, LLC for the U.S. Department of Energy.
The U.S. Government has rights to use, reproduce, and distribute this software.
NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
If software is modified to produce derivative works, such modified software
should be clearly marked, so as not to confuse it with the version available
from LANL.
Additionally, redistribution and use in source and binary forms, with or
without modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of Los Alamos National Security, LLC, Los Alamos National
Laboratory, LANL, the U.S. Government, nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=========================================================================*/
#include <iostream>
#include <fstream>
#include <sstream>
#include <iomanip>
#include <set>
#include <math.h>
#include <time.h>
#include "Timings.h"
#include "BHForceTree.h"
using namespace std;
/////////////////////////////////////////////////////////////////////////
//
// FParticle contains information about particles
//
/////////////////////////////////////////////////////////////////////////
FParticle::FParticle()
{
this->parent = -1;
this->nextNode = -1;
this->sibling = -1;
this->force = 0.0;
}
/////////////////////////////////////////////////////////////////////////
//
// FNode is a region of physical space divided into octants
//
/////////////////////////////////////////////////////////////////////////
FNode::FNode(POSVEL_T* minLoc, POSVEL_T* maxLoc)
{
for (int dim = 0; dim < DIMENSION; dim++) {
this->geoSide[dim] = maxLoc[dim] - minLoc[dim];
this->geoCenter[dim] = minLoc[dim] + this->geoSide[dim] * 0.5;
}
for (int i = 0; i < NUM_CHILDREN; i++)
this->u.child[i] = -1;
}
/////////////////////////////////////////////////////////////////////////
//
// FNode constructed from an octant of a parent node
//
/////////////////////////////////////////////////////////////////////////
FNode::FNode(FNode* parent, int oindx)
{
for (int dim = 0; dim < DIMENSION; dim++) {
this->geoSide[dim] = parent->geoSide[dim] * 0.5;
}
// Vary Z fastest when making octtree children
// If this changes must also change getChildIndex()
if (oindx & 4)
this->geoCenter[0] = parent->geoCenter[0] + this->geoSide[0] * 0.5;
else
this->geoCenter[0] = parent->geoCenter[0] - this->geoSide[0] * 0.5;
if (oindx & 2)
this->geoCenter[1] = parent->geoCenter[1] + this->geoSide[1] * 0.5;
else
this->geoCenter[1] = parent->geoCenter[1] - this->geoSide[1] * 0.5;
if (oindx & 1)
this->geoCenter[2] = parent->geoCenter[2] + this->geoSide[2] * 0.5;
else
this->geoCenter[2] = parent->geoCenter[2] - this->geoSide[2] * 0.5;
for (int i = 0; i < NUM_CHILDREN; i++)
this->u.child[i] = -1;
}
/////////////////////////////////////////////////////////////////////////
//
// Barnes Hut Tree
//
/////////////////////////////////////////////////////////////////////////
BHForceTree::BHForceTree(
POSVEL_T* minLoc,
POSVEL_T* maxLoc,
ID_T count,
POSVEL_T* xLoc,
POSVEL_T* yLoc,
POSVEL_T* zLoc,
POSVEL_T* xVel,
POSVEL_T* yVel,
POSVEL_T* zVel,
POSVEL_T* ms,
POSVEL_T avgMass)
{
// Extract the contiguous data block from a vector pointer
this->particleCount = count;
this->nodeOffset = this->particleCount;
this->xx = xLoc;
this->yy = yLoc;
this->zz = zLoc;
this->vx = xVel;
this->vy = yVel;
this->vz = zVel;
this->mass = ms;
this->particleMass = avgMass;
// Find the grid size of this chaining mesh
for (int dim = 0; dim < DIMENSION; dim++) {
this->minRange[dim] = minLoc[dim];
this->maxRange[dim] = maxLoc[dim];
}
this->boxSize = this->maxRange[0] - this->minRange[0];
//maybe change this to Newton's law or something
this->m_fl = new ForceLawNewton();
this->m_fcoeff = 1.0;
// Create the recursive BH tree from the particle locations
createBHForceTree();
// Thread the recursive tree turning it into an iterative tree
ID_T rootIndx = this->particleCount;
ID_T sibling = -1;
ID_T parent = -1;
ID_T lastIndx = -1;
POSVEL_T radius = 0.0;
threadBHForceTree(rootIndx, sibling, parent, &lastIndx, &radius);
}
BHForceTree::BHForceTree(
POSVEL_T* minLoc,
POSVEL_T* maxLoc,
ID_T count,
POSVEL_T* xLoc,
POSVEL_T* yLoc,
POSVEL_T* zLoc,
POSVEL_T* xVel,
POSVEL_T* yVel,
POSVEL_T* zVel,
POSVEL_T* ms,
POSVEL_T avgMass,
ForceLaw *fl,
float fcoeff)
{
// Extract the contiguous data block from a vector pointer
this->particleCount = count;
this->nodeOffset = this->particleCount;
this->xx = xLoc;
this->yy = yLoc;
this->zz = zLoc;
this->vx = xVel;
this->vy = yVel;
this->vz = zVel;
this->mass = ms;
this->particleMass = avgMass;
// Find the grid size of this chaining mesh
for (int dim = 0; dim < DIMENSION; dim++) {
this->minRange[dim] = minLoc[dim];
this->maxRange[dim] = maxLoc[dim];
}
this->boxSize = this->maxRange[0] - this->minRange[0];
this->m_fl = fl;
this->m_fcoeff = fcoeff;
// Create the recursive BH tree from the particle locations
createBHForceTree();
// Thread the recursive tree turning it into an iterative tree
ID_T rootIndx = this->particleCount;
ID_T sibling = -1;
ID_T parent = -1;
ID_T lastIndx = -1;
POSVEL_T radius = 0.0;
threadBHForceTree(rootIndx, sibling, parent, &lastIndx, &radius);
}
BHForceTree::~BHForceTree()
{
/* empty */
}
/////////////////////////////////////////////////////////////////////////
//
// Find the subhalos of the FOF halo using SUBFIND algorithm which
// requires subhalos to be locally overdense and self-bound
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::createBHForceTree()
{
// Create the FParticles
this->fParticle.resize(this->particleCount);
// Reserve a basic amount of space in the BH tree
this->fNode.reserve(this->particleCount/NUM_CHILDREN);
// Create the root node of the BH tree
FNode root(this->minRange, this->maxRange);
this->fNode.push_back(root);
ID_T nodeIndex = 0;
// Iterate on all particles placing them in the BH tree
// Child slots in the tree contain the index of the FParticle or
// the index of the FNode offset by the number of particles
// This is so we can use an integer instead of pointers to refer to objects
//
for (ID_T pindx = 0; pindx < this->particleCount; pindx++) {
// Start at root of tree for insertion of a new particle
// pindx is index into the halo particles where location is stored
// tindx is index into the BH tree nodes
// oindx is index into the octant of the tree node
ID_T tindx = 0;
int oindx = getChildIndex(&this->fNode[tindx], pindx);
while (this->fNode[tindx].u.child[oindx] != -1) {
// Child slot in tree contains another SPHNode so go there
if (this->fNode[tindx].u.child[oindx] > this->particleCount) {
tindx = this->fNode[tindx].u.child[oindx] - this->particleCount;
oindx = getChildIndex(&this->fNode[tindx], pindx);
}
// Otherwise there is a particle in the slot and we make a new FNode
else {
// Get the particle index of particle already in the node
ID_T pindx2 = this->fNode[tindx].u.child[oindx];
// First, check to make sure that this particle is not at the exact
// same location as the particle that is already there. If it is, then
// we'll double the mass of the existing particle and leave this one
// out.
if (this->xx[pindx2] == this->xx[pindx] &&
this->yy[pindx2] == this->yy[pindx] &&
this->zz[pindx2] == this->zz[pindx]) {
this->mass[pindx2] += this->mass[pindx];
goto next_particle;
}
// Make sure that the vector does not over allocate
if (this->fNode.capacity() == this->fNode.size()) {
this->fNode.reserve(this->fNode.capacity()
+ this->particleCount/NUM_CHILDREN);
}
FNode node(&this->fNode[tindx], oindx);
this->fNode.push_back(node);
nodeIndex++;
ID_T tindx2 = nodeIndex;
// Place the node that was sitting there already
int oindx2 = getChildIndex(&this->fNode[tindx2], pindx2);
this->fNode[tindx2].u.child[oindx2] = pindx2;
// Add the new SPHNode to the BHTree
this->fNode[tindx].u.child[oindx] = tindx2 + this->particleCount;
// Set to new node
tindx = tindx2;
oindx = getChildIndex(&this->fNode[tindx], pindx);
}
}
// Place the current particle in the BH tree
this->fNode[tindx].u.child[oindx] = pindx;
next_particle:;
}
this->nodeCount = this->fNode.size();
}
/////////////////////////////////////////////////////////////////////////
//
// Update the FNode vector by walking using a depth first recursion
// Set parent and sibling indices which can replace the child[8] already
// there, and supply extra information about center of mass, total particle
// mass and particle radius which is the distance from the center of mass
// to the furthest particle.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::threadBHForceTree(
ID_T curIndx, // Current node/particle
ID_T sibling, // Sibling of current
ID_T parent, // Parent of current
ID_T* lastIndx, // Last node/particle
POSVEL_T* radius) // Needed to pass up partRadius
{
// Set the next index in the threading for node or particle
// Particles and nodes are threaded together so all are touched in iteration
if (*lastIndx >= 0) {
if (*lastIndx >= this->nodeOffset)
this->fNode[(*lastIndx - this->nodeOffset)].u.n.nextNode = curIndx;
else
this->fParticle[*lastIndx].nextNode = curIndx;
}
*lastIndx = curIndx;
// FParticle saves the parent and sibling FNode id
if (curIndx < this->nodeOffset) {
this->fParticle[curIndx].parent = parent;
this->fParticle[curIndx].sibling = sibling;
// FNode recurses on each of the children
} else {
ID_T child[NUM_CHILDREN];
FNode* curNode = &this->fNode[curIndx - this->nodeOffset];
// Store mass and center of mass for each child node or particle
POSVEL_T childMass[NUM_CHILDREN];
POSVEL_T childRadius[NUM_CHILDREN];
POSVEL_T childCenter[NUM_CHILDREN][DIMENSION];
for (int j = 0; j < NUM_CHILDREN; j++) {
child[j] = curNode->u.child[j];
childMass[j] = 0.0;
childRadius[j] = 0.0;
}
////////////////////////////////////////////////////////////////////////
//
// Recurse on each of the children, recording information on the way up
//
for (int j = 0; j < NUM_CHILDREN; j++) {
// Skip any children which contain neither a particle or node
ID_T childIndx, childIndxNext, nextSibling;
if ((childIndx = child[j]) >= 0) {
// Check for a sibling on this level or set to the next level up
int jj;
for (jj = j + 1; jj < NUM_CHILDREN; jj++)
if ((childIndxNext = child[jj]) >= 0)
break;
if (jj < NUM_CHILDREN)
nextSibling = childIndxNext;
else
nextSibling = -1;
// Recursion to child
// Since value of partRadius set in child is not necessarily the
// distance between center of mass and futhest child return it
threadBHForceTree(childIndx, nextSibling, curIndx, lastIndx, radius);
// Child is a node or a particle
if (childIndx >= this->nodeOffset) {
// FNode, gather mass and center of mass of all contained particles
FNode* childNode = &this->fNode[childIndx - this->nodeOffset];
childMass[j] = childNode->u.n.partMass;
childRadius[j] = *radius;
for (int dim = 0; dim < DIMENSION; dim++)
childCenter[j][dim] = childNode->u.n.partCenter[dim];
} else {
// FParticle, set mass and center of mass using particle location
childMass[j] = this->particleMass;
childRadius[j] = 0.0;
childCenter[j][0] = this->xx[childIndx];
childCenter[j][1] = this->yy[childIndx];
childCenter[j][2] = this->zz[childIndx];
}
}
}
////////////////////////////////////////////////////////////////////////
//
// Finished processing all children, collect information for this node
//
curNode->u.n.partMass = 0.0;
for (int dim = 0; dim < DIMENSION; dim++)
curNode->u.n.partCenter[dim] = 0.0;
// Collect total mass and total center of mass for all children
for (int j = 0; j < NUM_CHILDREN; j++) {
if (childMass[j] > 0) {
curNode->u.n.partMass += childMass[j];
for (int dim = 0; dim < DIMENSION; dim++)
curNode->u.n.partCenter[dim] +=
childCenter[j][dim] * childMass[j];
}
}
// Calculate center of mass for current node
if (curNode->u.n.partMass > 0.0) {
for (int dim = 0; dim < DIMENSION; dim++)
curNode->u.n.partCenter[dim] /= curNode->u.n.partMass;
} else {
for (int dim = 0; dim < DIMENSION; dim++)
curNode->u.n.partCenter[dim] = curNode->geoCenter[dim];
}
// First method for calculating particle radius
// Calculate the radius from node center of mass to furthest node corner
POSVEL_T partRadius1 = distanceToFarCorner(
curNode->u.n.partCenter[0],
curNode->u.n.partCenter[1],
curNode->u.n.partCenter[2],
curNode);
// Second method for calculating particle radius
// Calculate the radius from center of mass to furthest child
POSVEL_T partRadius2 = 0.0;
for (int j = 0; j < NUM_CHILDREN; j++) {
if (childMass[j] > 0.0) {
// Calculate the distance between this center of mass and that of child
POSVEL_T dist = distanceToCenterOfMass(childCenter[j][0],
childCenter[j][1],
childCenter[j][2],
curNode);
// Add in the particle radius of the child to get furthest point
dist += childRadius[j];
if (dist > partRadius2)
partRadius2 = dist;
}
}
// Used by parent of this node
*radius = partRadius2;
// Save the smaller of the two particle radii
curNode->u.n.partRadius = min(partRadius1, partRadius2);
// Set threaded structure for this node
curNode->u.n.sibling = sibling;
curNode->u.n.parent = parent;
}
}
/////////////////////////////////////////////////////////////////////////
//
// Particle i has location vector x_i (xx_i, yy_i, zz_i)
// Particle i has velocity vector v_i (vx_i, vy_i, vz_i)
//
// Node j has center of particle mass c_j (cx_j, cy_j, cz_j)
// Node j has total particle mass of M_j
// Node j has bounding radius of R_j
//
// Distance between particle i and node j is
// d_ji = fabs(c_j - x_i)
//
// Rule for updating
// v_i(t_1) = v_i(t_0) + alpha * Sum over j of f_ji(d_ji, Mj)
// where f_ji is the short range force over finite range r_f
// where alpha is some coeffient
// where Sum over j nodes is determined by a tree walk
//
// An opening angle is defined as
// theta_ji = (2 R_j) / d_ji
//
// This angle determines whether a node should be opened to a higher resolution
// or whether it can be used as is because it is small enough or far enough away
// This is determined by comparing to a passed in theta_0
//
// Three actions can occur for a node encountered on the walk
//
// 1. Node is too far away to contribute to force
// if d_ji - R_j > r_f
// or distance of x_i to nearest cornder of node > r_f
//
// 2. Node is close enough to contribute so check the opening angle
// if theta_ji > theta_0 follow nextNode to open this node to children
//
// 3. Node is close enough and theta_ji < theta_0
// calculate f_ji(d_ji, Mj) and update v_i
// follow the sibling link and not the nextNode link
//
// Force is calculated for each particle i by
// Starting at the root node and walking the entire tree collecting force
// Starting at the particle and walking up parents until a criteria is met
//
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
//
// Short range force full N^2 calculation
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::treeForceN2(
POSVEL_T critRadius) // Radius of furthest point
{
POSVEL_T critRadius2 = critRadius * critRadius;
POTENTIAL_T* force = new POTENTIAL_T[this->particleCount];
for (int i = 0; i < this->particleCount; i++)
force[i] = 0.0;
// First particle in halo to calculate force on
for (int p = 0; p < this->particleCount; p++) {
// Next particle in halo force loop
for (int q = p+1; q < this->particleCount; q++) {
POSVEL_T dx = (POSVEL_T) fabs(this->xx[p] - this->xx[q]);
POSVEL_T dy = (POSVEL_T) fabs(this->yy[p] - this->yy[q]);
POSVEL_T dz = (POSVEL_T) fabs(this->zz[p] - this->zz[q]);
POSVEL_T r2 = dx * dx + dy * dy + dz * dz;
if (r2 != 0.0 && r2 < critRadius2) {
force[p] -= (this->mass[q] / r2);
force[q] -= (this->mass[p] / r2);
}
}
}
for (int p = 0; p < this->particleCount; p++) {
this->fParticle[p].force = force[p];
}
delete [] force;
}
/////////////////////////////////////////////////////////////////////////
//
// Short range gravity calculation for group of particles in a node
// Walk down the tree from the root until reaching node with less than the
// maximum number of particles in a group. Create an interaction list that
// will work for all particles and calculate force. For particles within
// the group the calculation will be n^2. For nodes outside the group
// decisions are made on whether to include the node or ignore it, or
// to accept it or open it.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::treeForceGroup(
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius, // Accept or ignore node not opened
int minGroup, // Minimum particles in a group
int maxGroup) // Maximum particles in a group
{
ID_T root = this->particleCount;
POSVEL_T maxMass = maxGroup * this->particleMass;
POSVEL_T minMass = minGroup * this->particleMass;
walkTreeGroup(root, minMass, maxMass, bhAngle, critRadius);
}
/////////////////////////////////////////////////////////////////////////
//
// Walk the tree in search of nodes which are less than the maximum
// number of particles to constitute a group. All particles in the group
// will be treated together with the n^2 force calculated between members
// of the group and then having an interaction list applied. The group
// may consist of other nodes and particles and so the recursive descent
// will be needed to find all particles in the group.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::walkTreeGroup(
ID_T curId, // Particle to calculate force on
POSVEL_T minMass, // Minimum mass for a group
POSVEL_T maxMass, // Maximum mass for a group
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius) // Accept or ignore node not opened
{
if (curId < this->nodeOffset) {
// Current object is a particle
vector<ID_T>* partInteract = new vector<ID_T>;
vector<ID_T>* nodeInteract = new vector<ID_T>;
createParticleInteractList(curId, bhAngle, critRadius,
partInteract, nodeInteract);
this->fParticle[curId].force = forceCalculation(
curId, partInteract, nodeInteract);
delete partInteract;
delete nodeInteract;
}
else {
// Current object is a node
ID_T child = this->fNode[curId - this->nodeOffset].u.n.nextNode;
while (child != -1) {
if (child < this->nodeOffset) {
// Child is a particle
vector<ID_T>* partInteract = new vector<ID_T>;
vector<ID_T>* nodeInteract = new vector<ID_T>;
createParticleInteractList(child, bhAngle, critRadius,
partInteract, nodeInteract);
this->fParticle[child].force = forceCalculation(
child, partInteract, nodeInteract);
child = this->fParticle[child].sibling;
delete partInteract;
delete nodeInteract;
}
else {
// Child is a node
FNode* childNode = &this->fNode[child - this->nodeOffset];
if (childNode->u.n.partMass < maxMass &&
childNode->u.n.partRadius < (critRadius * 0.5)) {
// If the group is too small it can't function as a group
// so run the topdown method on those particles
if (childNode->u.n.partMass < minMass) {
// Collect particles in subgroup
vector<ID_T>* particles = new vector<ID_T>;
collectParticles(child, particles);
int count = particles->size();
for (int i = 0; i < count; i++) {
ID_T pId = (*particles)[i];
treeForceGadgetTopDown(pId, bhAngle, critRadius);
}
}
else {
vector<ID_T>* partInteract = new vector<ID_T>;
vector<ID_T>* nodeInteract = new vector<ID_T>;
createNodeInteractList(child, bhAngle, critRadius,
partInteract, nodeInteract);
forceCalculationGroup(child, bhAngle, critRadius,
partInteract, nodeInteract);
delete partInteract;
delete nodeInteract;
}
}
else {
walkTreeGroup(child, minMass, maxMass, bhAngle, critRadius);
}
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Create the interaction list for the particle starting at root
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::createParticleInteractList(
ID_T p, // Particle to calculate force on
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius , // Accept or ignore node not opened
vector<ID_T>* partInteract,
vector<ID_T>* nodeInteract)
{
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
POSVEL_T pos_x = this->xx[p];
POSVEL_T pos_y = this->yy[p];
POSVEL_T pos_z = this->zz[p];
// Follow thread through tree from root choosing nodes and particles
// which will contribute to the force of the given particle
ID_T root = this->particleCount;
ID_T index = root;
while (index >= 0) {
if (index < this->nodeOffset) {
// Particle
dx = this->xx[index] - pos_x;
dy = this->yy[index] - pos_y;
dz = this->zz[index] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if (r < critRadius) {
partInteract->push_back(index);
}
index = this->fParticle[index].nextNode;
}
else {
// Node
FNode* curNode = &this->fNode[index - this->nodeOffset];
partRadius = curNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, curNode);
dx = curNode->u.n.partCenter[0] - pos_x;
dy = curNode->u.n.partCenter[1] - pos_y;
dz = curNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent - this->nodeOffset].u.n.sibling;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
}
else {
if (2*partRadius > (r * bhAngle)) {
// Open node
index = curNode->u.n.nextNode;
} else {
// Accept
nodeInteract->push_back(index);
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent - this->nodeOffset].u.n.sibling;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
}
}
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Create the interaction list for the node starting at root
// must test for acceptance based on a radius from center of mass to
// furthest particle to make sure it is most inclusive.
// Make sure my definition of partRadius does this
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::createNodeInteractList(
ID_T node,
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius, // Accept or ignore node not opened
vector<ID_T>* partInteract,
vector<ID_T>* nodeInteract)
{
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
FNode* curNode = &this->fNode[node - this->nodeOffset];
POSVEL_T pos_x = curNode->u.n.partCenter[0];
POSVEL_T pos_y = curNode->u.n.partCenter[1];
POSVEL_T pos_z = curNode->u.n.partCenter[2];
// Follow thread through tree from root choosing nodes and particles
// which will contribute to the force of the given particle
ID_T root = this->particleCount;
ID_T index = root;
while (index >= 0) {
if (index < this->nodeOffset) {
// Particle
dx = this->xx[index] - pos_x;
dy = this->yy[index] - pos_y;
dz = this->zz[index] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if (r < critRadius) {
partInteract->push_back(index);
}
index = this->fParticle[index].nextNode;
}
else {
// Node
FNode* childNode = &this->fNode[index - this->nodeOffset];
// If the child is the node we are building the list for skip
if (childNode != curNode) {
partRadius = childNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(
pos_x, pos_y, pos_z, childNode);
dx = childNode->u.n.partCenter[0] - pos_x;
dy = childNode->u.n.partCenter[1] - pos_y;
dz = childNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
index = childNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = childNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent - this->nodeOffset].u.n.sibling;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
}
else {
if (2*partRadius > (r * bhAngle)) {
// Open node
index = childNode->u.n.nextNode;
} else {
// Accept
nodeInteract->push_back(index);
index = childNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = childNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent - this->nodeOffset].u.n.sibling;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
}
}
}
else {
index = childNode->u.n.sibling;
ID_T parent = childNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent-nodeOffset].u.n.sibling;
parent = this->fNode[parent-nodeOffset].u.n.parent;
}
}
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Force calculation for a group of particles
// Force is calculated between every pair of particles in the group
// Interaction lists are applied to every particle in the group
// Tree walk will hav to continue from this node to locate all the
// particles which might be in subnodes of this node.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::forceCalculationGroup(
ID_T node,
POSVEL_T bhAngle, // Open or accepc
POSVEL_T critRadius, // Accept or ignore node not opened
vector<ID_T>* partInteract,
vector<ID_T>* nodeInteract)
{
// Collect all particles in the tree from this node downwards
vector<ID_T>* particles = new vector<ID_T>;
collectParticles(node, particles);
int count = particles->size();
// Process each particle against all others in the group
// Use the minimumPotential() code to make this n^2/2 for upper triangular
// Arrange in an upper triangular grid to save computation
POTENTIAL_T* force = new POTENTIAL_T[count];
for (int i = 0; i < count; i++)
force[i] = 0.0;
// First particle in halo to calculate force on
for (int p = 0; p < count; p++) {
// Next particle in halo force loop
for (int q = p+1; q < count; q++) {
ID_T pId = (*particles)[p];
ID_T qId = (*particles)[q];
POSVEL_T dx = (POSVEL_T) fabs(this->xx[pId] - this->xx[qId]);
POSVEL_T dy = (POSVEL_T) fabs(this->yy[pId] - this->yy[qId]);
POSVEL_T dz = (POSVEL_T) fabs(this->zz[pId] - this->zz[qId]);
POSVEL_T r2 = dx * dx + dy * dy + dz * dz;
if (r2 != 0.0) {
force[p] -= (this->mass[qId] / r2);
force[q] -= (this->mass[pId] / r2);
}
}
}
// Process each particle against the interaction lists
// Node interact list was created using the node this particle is in
// so it may need to be adjusted first
for (int p = 0; p < count; p++) {
ID_T pId = (*particles)[p];
POSVEL_T value =
forceCalculationParticle(pId, critRadius,
partInteract, nodeInteract);
force[p] += value;
this->fParticle[pId].force = force[p];
}
delete particles;
delete [] force;
}
/////////////////////////////////////////////////////////////////////////
//
// Short range force calculation
// Potential is calculated and is used to determine the acceleration of
// the particle. Acceleration is applied to the current velocity to
// produce the velocity at the next time step.
//
/////////////////////////////////////////////////////////////////////////
POSVEL_T BHForceTree::forceCalculationParticle(
ID_T p0, // Target particle index
POSVEL_T critRadius,
vector<ID_T>* partInteract, // Particles acting on p
vector<ID_T>* nodeInteract) // Nodes acting on p
{
POSVEL_T accel[DIMENSION];
POSVEL_T phi = 0.0;
POSVEL_T pos0_x = this->xx[p0];
POSVEL_T pos0_y = this->yy[p0];
POSVEL_T pos0_z = this->zz[p0];
for (int dim = 0; dim < DIMENSION; dim++)
accel[dim] = 0.0;
int numberOfNodes = (int) nodeInteract->size();
int numberOfParticles = (int) partInteract->size();
// Particles contributing to the force use location and mass of one particle
for (int p = 0; p < numberOfParticles; p++) {
ID_T particle = (*partInteract)[p];
if (p0 != particle) {
POSVEL_T dx = this->xx[particle] - pos0_x;
POSVEL_T dy = this->yy[particle] - pos0_y;
POSVEL_T dz = this->zz[particle] - pos0_z;
POSVEL_T r2 = dx * dx + dy * dy + dz * dz;
POSVEL_T r = sqrt(r2);
if (r < critRadius) {
POSVEL_T f_over_r = this->mass[particle] * m_fl->f_over_r(r2);
//POSVEL_T f_over_r = this->mass[particle] / r2;
phi -= f_over_r;
accel[0] += dx * f_over_r * m_fcoeff;
accel[1] += dy * f_over_r * m_fcoeff;
accel[2] += dz * f_over_r * m_fcoeff;
this->vx[p0] += dx * f_over_r * m_fcoeff;
this->vy[p0] += dy * f_over_r * m_fcoeff;
this->vz[p0] += dz * f_over_r * m_fcoeff;
}
}
}
// Nodes contributing to force use center of mass and total particle mass
for (int n = 0; n < numberOfNodes; n++) {
FNode* node = &this->fNode[(*nodeInteract)[n] - this->nodeOffset];
POSVEL_T partRadius = node->u.n.partRadius;
POSVEL_T distToNearPoint = distanceToNearestPoint(
pos0_x, pos0_y, pos0_z, node);
POSVEL_T dx = node->u.n.partCenter[0] - pos0_x;
POSVEL_T dy = node->u.n.partCenter[1] - pos0_y;
POSVEL_T dz = node->u.n.partCenter[2] - pos0_z;
POSVEL_T r2 = dx * dx + dy * dy + dz * dz;
POSVEL_T r = sqrt(r2);
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore
} else {
POSVEL_T f_over_r = node->u.n.partMass * m_fl->f_over_r(r2);
//POSVEL_T f_over_r = node->u.n.partMass / r2;
phi -= f_over_r;
accel[0] += dx * f_over_r * m_fcoeff;
accel[1] += dy * f_over_r * m_fcoeff;
accel[2] += dz * f_over_r * m_fcoeff;
this->vx[p0] += dx * f_over_r * m_fcoeff;
this->vy[p0] += dy * f_over_r * m_fcoeff;
this->vz[p0] += dz * f_over_r * m_fcoeff;
}
}
return phi;
}
/////////////////////////////////////////////////////////////////////////
//
// Collect all particle ids from this node downwards in tree
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::collectParticles(ID_T curId, vector<ID_T>* particles)
{
FNode* curNode = &this->fNode[curId - this->nodeOffset];
ID_T child = curNode->u.n.nextNode;
while (child != -1) {
if (child < this->nodeOffset) {
particles->push_back(child);
child = this->fParticle[child].sibling;
} else {
collectParticles(child, particles);
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Short range gravity calculation for a single particle
// Starting with the root and following threads and siblings makes decisions
// about which nodes are opened, accepted or ignored
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::treeForceGadgetTopDown(
ID_T p, // Particle to calculate force on
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius) // Accept or ignore node not opened
{
// Keep vectors for now but eventually force can be accumulated
// on all particles and on nodes that have been accepted
vector<ID_T>* partInteract = new vector<ID_T>;
vector<ID_T>* nodeInteract = new vector<ID_T>;
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
POSVEL_T pos_x = this->xx[p];
POSVEL_T pos_y = this->yy[p];
POSVEL_T pos_z = this->zz[p];
// Follow thread through tree from root choosing nodes and particles
// which will contribute to the force of the given particle
ID_T root = this->particleCount;
ID_T index = root;
while (index >= 0) {
if (index < this->nodeOffset) {
// Particle
dx = this->xx[index] - pos_x;
dy = this->yy[index] - pos_y;
dz = this->zz[index] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if (r < critRadius) {
partInteract->push_back(index);
}
index = this->fParticle[index].nextNode;
} else {
// Node
FNode* curNode = &this->fNode[index - this->nodeOffset];
partRadius = curNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, curNode);
dx = curNode->u.n.partCenter[0] - pos_x;
dy = curNode->u.n.partCenter[1] - pos_y;
dz = curNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent - this->nodeOffset].u.n.sibling;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
} else {
if (2*partRadius > (r * bhAngle)) {
// Open node, move on to first child
index = curNode->u.n.nextNode;
} else {
// Accept node, add to interact list, move on to sibling
nodeInteract->push_back(index);
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent-nodeOffset].u.n.sibling;
parent = this->fNode[parent-nodeOffset].u.n.parent;
}
}
}
}
}
// Force calculation for this particle
this->fParticle[p].force =
forceCalculation(p, partInteract, nodeInteract);
delete partInteract;
delete nodeInteract;
}
void BHForceTree::treeForceGadgetTopDownFast(
ID_T p, // Particle to calculate force on
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius) // Accept or ignore node not opened
{
vector<POSVEL_T>* xInteract = new vector<POSVEL_T>;
vector<POSVEL_T>* yInteract = new vector<POSVEL_T>;
vector<POSVEL_T>* zInteract = new vector<POSVEL_T>;
vector<POSVEL_T>* mInteract = new vector<POSVEL_T>;
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
POSVEL_T pos_x = this->xx[p];
POSVEL_T pos_y = this->yy[p];
POSVEL_T pos_z = this->zz[p];
// Follow thread through tree from root choosing nodes and particles
// which will contribute to the force of the given particle
ID_T root = this->particleCount;
ID_T index = root;
while (index >= 0) {
if (index < this->nodeOffset) {
// Particle
dx = this->xx[index] - pos_x;
dy = this->yy[index] - pos_y;
dz = this->zz[index] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if (r < critRadius && p != index) {
xInteract->push_back(this->xx[index]);
yInteract->push_back(this->yy[index]);
zInteract->push_back(this->zz[index]);
mInteract->push_back(this->mass[index]);
}
index = this->fParticle[index].nextNode;
} else {
// Node
FNode* curNode = &this->fNode[index - this->nodeOffset];
partRadius = curNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, curNode);
dx = curNode->u.n.partCenter[0] - pos_x;
dy = curNode->u.n.partCenter[1] - pos_y;
dz = curNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent - this->nodeOffset].u.n.sibling;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
} else {
if (2*partRadius > (r * bhAngle)) {
// Open node, move on to first child
index = curNode->u.n.nextNode;
} else {
// Accept node, add to interact list, move on to sibling
xInteract->push_back(curNode->u.n.partCenter[0]);
yInteract->push_back(curNode->u.n.partCenter[1]);
zInteract->push_back(curNode->u.n.partCenter[2]);
mInteract->push_back(curNode->u.n.partMass);
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent-nodeOffset].u.n.sibling;
parent = this->fNode[parent-nodeOffset].u.n.parent;
}
}
}
}
}
// Force calculation for this particle
this->fParticle[p].force =
forceCalculationFast(p, xInteract, yInteract, zInteract, mInteract);
delete xInteract;
delete yInteract;
delete zInteract;
delete mInteract;
}
void BHForceTree::treeForceGadgetTopDownFast2(
ID_T p, // Particle to calculate force on
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius, // Accept or ignore node not opened
vector<POSVEL_T>* xInteract,
vector<POSVEL_T>* yInteract,
vector<POSVEL_T>* zInteract,
vector<POSVEL_T>* mInteract,
double *timeWalk,
double *timeEval)
{
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
POSVEL_T pos_x = this->xx[p];
POSVEL_T pos_y = this->yy[p];
POSVEL_T pos_z = this->zz[p];
// Follow thread through tree from root choosing nodes and particles
// which will contribute to the force of the given particle
ID_T root = this->particleCount;
ID_T index = root;
clock_t start, end;
start = clock();
while (index >= 0) {
if (index < this->nodeOffset) {
// Particle
dx = this->xx[index] - pos_x;
dy = this->yy[index] - pos_y;
dz = this->zz[index] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if (r < critRadius && p != index) {
xInteract->push_back(this->xx[index]);
yInteract->push_back(this->yy[index]);
zInteract->push_back(this->zz[index]);
mInteract->push_back(this->mass[index]);
}
index = this->fParticle[index].nextNode;
} else {
// Node
FNode* curNode = &this->fNode[index - this->nodeOffset];
partRadius = curNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, curNode);
dx = curNode->u.n.partCenter[0] - pos_x;
dy = curNode->u.n.partCenter[1] - pos_y;
dz = curNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent - this->nodeOffset].u.n.sibling;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
} else {
if (2*partRadius > (r * bhAngle)) {
// Open node, move on to first child
index = curNode->u.n.nextNode;
} else {
// Accept node, add to interact list, move on to sibling
xInteract->push_back(curNode->u.n.partCenter[0]);
yInteract->push_back(curNode->u.n.partCenter[1]);
zInteract->push_back(curNode->u.n.partCenter[2]);
mInteract->push_back(curNode->u.n.partMass);
index = curNode->u.n.sibling;
// If there is no sibling go up a level until we find a node
ID_T parent = curNode->u.n.parent;
while (index == -1 && parent != -1 && parent != root) {
index = this->fNode[parent-nodeOffset].u.n.sibling;
parent = this->fNode[parent-nodeOffset].u.n.parent;
}
}
}
}
}
end = clock();
*timeWalk = 1.0*(end-start)/CLOCKS_PER_SEC;
// Force calculation for this particle
start = clock();
this->fParticle[p].force =
forceCalculationFast(p, xInteract, yInteract, zInteract, mInteract);
end = clock();
*timeEval = 1.0*(end-start)/CLOCKS_PER_SEC;
xInteract->clear();
yInteract->clear();
zInteract->clear();
mInteract->clear();
}
/////////////////////////////////////////////////////////////////////////
//
// Short range gravity calculation for a single particle
// Starting with the particle walk up the parents processing siblings
// by testing particles and by opening, accepting or ignoring nodes.
// Stop moving up parents when the nearest side is beyond critical radius.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::treeForceGadgetBottomUp(
ID_T p, // Particle to calculate force on
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius) // Accept or ignore node not opened
{
// Collect into interaction lists
vector<ID_T>* partInteract = new vector<ID_T>;
vector<ID_T>* nodeInteract = new vector<ID_T>;
// Location of particle
POSVEL_T dx, dy, dz, r, partRadius;
POSVEL_T pos_x = this->xx[p];
POSVEL_T pos_y = this->yy[p];
POSVEL_T pos_z = this->zz[p];
ID_T curId = p;
ID_T parent = this->fParticle[curId].parent;
while (parent != -1) {
ID_T child = this->fNode[parent - this->nodeOffset].u.n.nextNode;
while (child != -1) {
if (child != curId) {
if (child < this->nodeOffset) {
// Particle
dx = this->xx[child] - pos_x;
dy = this->yy[child] - pos_y;
dz = this->zz[child] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if (r < critRadius) {
partInteract->push_back(child);
}
child = this->fParticle[child].sibling;
}
else {
// Node
FNode* childNode = &this->fNode[child - this->nodeOffset];
partRadius = childNode->u.n.partRadius;
POSVEL_T distToNearPoint =
distanceToNearestPoint(pos_x, pos_y, pos_z, childNode);
dx = childNode->u.n.partCenter[0] - pos_x;
dy = childNode->u.n.partCenter[1] - pos_y;
dz = childNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Check for ignore of node first
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore
} else
if (2*partRadius < (r * bhAngle)) {
// Accept
nodeInteract->push_back(child);
} else {
// Open node
recurseOpenNode(childNode, pos_x, pos_y, pos_z,
bhAngle, critRadius,
partInteract, nodeInteract);
}
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
}
else {
if (curId < this->nodeOffset)
child = this->fParticle[curId].sibling;
else
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
}
curId = parent;
parent = this->fNode[parent - this->nodeOffset].u.n.parent;
}
// Force calculation for this particle
this->fParticle[p].force =
forceCalculation(p, partInteract, nodeInteract);
delete partInteract;
delete nodeInteract;
}
/////////////////////////////////////////////////////////////////////////
//
// Open this node recursively adding accepted nodes and particles
// to the interact list
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::recurseOpenNode(
FNode* curNode,
POSVEL_T pos_x,
POSVEL_T pos_y,
POSVEL_T pos_z,
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius, // Accept or ignore node not opened
vector<ID_T>* partInteract,
vector<ID_T>* nodeInteract)
{
POSVEL_T dx, dy, dz, r, partRadius;
ID_T child = curNode->u.n.nextNode;
while (child != -1) {
if (child < this->nodeOffset) {
// Particle
dx = this->xx[child] - pos_x;
dy = this->yy[child] - pos_y;
dz = this->zz[child] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
if (r < critRadius) {
partInteract->push_back(child);
}
child = this->fParticle[child].sibling;
} else {
FNode* childNode = &this->fNode[child - this->nodeOffset];
partRadius = childNode->u.n.partRadius;
POSVEL_T distToNearPoint =
distanceToNearestPoint(pos_x, pos_y, pos_z, childNode);
dx = childNode->u.n.partCenter[0] - pos_x;
dy = childNode->u.n.partCenter[1] - pos_y;
dz = childNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Check for ignore of node first
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore
} else
if (2*partRadius < (r * bhAngle)) {
// Accept
nodeInteract->push_back(child);
} else {
// Open node
recurseOpenNode(childNode, pos_x, pos_y, pos_z,
bhAngle, critRadius,
partInteract, nodeInteract);
}
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Short range gravity calculation for every particle in the tree
// Recurses through the tree saving previous work for reuse when popping
// out of recursion. Based on Barnes treecode.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::treeForceBarnesAdjust(
POSVEL_T bhAngle, // Open node to examine children
POSVEL_T critRadius) // Accept or ignore node not opened
{
ID_T root = this->particleCount;
vector<ID_T>* active = new vector<ID_T>;
vector<ID_T>* partInteract = new vector<ID_T>;
vector<ID_T>* nodeInteract = new vector<ID_T>;
active->push_back(root);
// Walk uses opening angle, critical radius for open, accept and ignore
walkTreeBarnesAdjust(active, partInteract, nodeInteract,
root, bhAngle, critRadius);
delete active;
delete partInteract;
delete nodeInteract;
}
///////////////////////////////////////////////////////////////////////////
//
// Walk the BH tree for the given particle or node (identifier curId)
// Recursion starts with a new active list which will contain particles
// and nodes which possibly will contribute to the force on a particle.
// Particles on the active list will always be chosen for the interact list.
// Nodes on the active list may be OPENED if they are close enough
// or ACCEPTED and used in summary if they are within a critical radius
// and IGNORED otherwise. Nodes that are opened
// have all their children (particles or nodes) added to the active list.
//
// After the children are added a new level of recursion starts by
// calculating a new size for that level, starting a fresh active list
// and building on the current interact lists.
//
// Recursion continues until the active list has been completely processed.
// When a level of recursion is complete the active list is destroyed
// and new items put on the interact lists are popped off.
//
// The advantage to this method is that items in the interaction list may
// not need to be processed again when we are doing the low levels of the tree.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::walkTreeBarnesAdjust(
vector<ID_T>* curActive, // nodes to be acted on
vector<ID_T>* partInteract, // particles for force
vector<ID_T>* nodeInteract, // nodes for force
ID_T curId, // current particle or node
POSVEL_T bhAngle, // open node
POSVEL_T critRadius) // accept or ignore node
{
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
// Current active list
int begIndx = 0;
int endIndx = curActive->size();
// Construct active list for each recursion
vector<ID_T>* newActive = new vector<ID_T>;
// Set the location for the particle or node for the walk
POSVEL_T pos_x, pos_y, pos_z;
if (curId < this->nodeOffset) {
pos_x = this->xx[curId];
pos_y = this->yy[curId];
pos_z = this->zz[curId];
} else {
FNode* curNode = &this->fNode[curId - this->nodeOffset];
pos_x = curNode->u.n.partCenter[0];
pos_y = curNode->u.n.partCenter[1];
pos_z = curNode->u.n.partCenter[2];
}
/////////////////////////////////////////////////////////////////////////
//
// Process the active list window adding children to end of list
// Valid particles and accepted nodes are copied to the interact list
//
int hasChildren = 0;
int pcount = 0;
int ncount = 0;
for (int indx = begIndx; indx < endIndx; indx++) {
// If the current active element is a cell it will be
// ACCEPTED and copied to the interact list
// OPENED and its children will be added to the end of the active list
// IGNORED because it is too far away
if ((*curActive)[indx] >= this->nodeOffset) {
hasChildren = 1;
FNode* actNode = &this->fNode[(*curActive)[indx] - this->nodeOffset];
partRadius = actNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, actNode);
dx = actNode->u.n.partCenter[0] - pos_x;
dy = actNode->u.n.partCenter[1] - pos_y;
dz = actNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
}
else {
if (2*partRadius > (r * bhAngle)) {
// Open node, move on to first child
ID_T child =
this->fNode[(*curActive)[indx] - this->nodeOffset].u.n.nextNode;
while (child != -1) {
if (child >= this->nodeOffset) {
// Child is a node which is active and must be considered
newActive->push_back(child);
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
else {
// Child is a particle, add to interaction list
partInteract->push_back(child);
pcount++;
child = this->fParticle[child].sibling;
}
}
} else {
// Accept node, add to interact list, move on to sibling
nodeInteract->push_back((*curActive)[indx]);
ncount++;
}
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// At this point a new level of children may have been added to active list
// Process children by dividing the node size and recursing
//
if (hasChildren) {
// Current item on active list is a cell
if (curId >= this->nodeOffset) {
// Process each child
ID_T child = fNode[curId - this->nodeOffset].u.n.nextNode;
while (child != -1) {
// Child is a node
if (child >= this->nodeOffset) {
FNode* childNode = &this->fNode[child - this->nodeOffset];
// Recurse on walk tree to process child
walkTreeBarnesAdjust(newActive, partInteract, nodeInteract,
child, bhAngle, critRadius);
child = childNode->u.n.sibling;
}
// Child is a particle
else {
walkTreeBarnesAdjust(newActive, partInteract, nodeInteract,
child, bhAngle, critRadius);
child = this->fParticle[child].sibling;
}
}
}
// Current item on active list is a particle
else {
walkTreeBarnesAdjust(newActive, partInteract, nodeInteract,
curId, bhAngle, critRadius);
}
}
/////////////////////////////////////////////////////////////////////////
//
// If no new items were added to active list we are done and can process
// the interact list for this particle p (which can't be a cell)
//
else {
if (curId > this->nodeOffset)
cout << "ERROR: POP OUT ON NODE " << curId << endl;
// Since the interact lists might contain accepted nodes from upper levels
// which need to be opened for this particle, adjust the lists first
vector<ID_T>* adjNodeInteract = new vector<ID_T>;
vector<ID_T>* adjPartInteract = new vector<ID_T>;
static Timings::TimerRef adjtimer = Timings::getTimer("Barnes Adjustment");
Timings::startTimer(adjtimer);
adjustInteraction(curId,
partInteract, nodeInteract,
adjPartInteract, adjNodeInteract,
bhAngle, critRadius);
Timings::stopTimer(adjtimer);
// Calculate force for the particle
this->fParticle[curId].force =
forceCalculation(curId, adjPartInteract, adjNodeInteract);
delete adjNodeInteract;
delete adjPartInteract;
}
// Active list is new for every recursion level
// Interact lists are appended to at each recursion level
// So interact lists must be popped by the correct number for this recursion
for (int i = 0; i < pcount; i++)
partInteract->pop_back();
for (int i = 0; i < ncount; i++)
nodeInteract->pop_back();
delete newActive;
}
/////////////////////////////////////////////////////////////////////////
//
// Recursion enters with a guess for the interact lists which were set
// on previous invocations of the method. Check the node interact list
// which contains nodes which were accepted to see if they should
// actually be opened relative to this new current particle or node
// If so remove from the nodeInteract list and add to the active list
//
// Particles in the interact list might actually be grouped and used
// with their parent node as an accept, but leaving them will lead to
// a better answer, not a worse. So we won't change the partInteract
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::adjustInteraction(
ID_T p0,
vector<ID_T>* partInteract,
vector<ID_T>* nodeInteract,
vector<ID_T>* adjPartInteract,
vector<ID_T>* adjNodeInteract,
POSVEL_T bhAngle,
POSVEL_T critRadius)
{
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
// Get location of particle being adjusted for
POSVEL_T pos_x = this->xx[p0];
POSVEL_T pos_y = this->yy[p0];
POSVEL_T pos_z = this->zz[p0];
// Copy all particles to the adjust list, will only add new particles
int numberOfParticles = (int) partInteract->size();
for (int p = 0; p < numberOfParticles; p++)
adjPartInteract->push_back((*partInteract)[p]);
// Process each node to see if status changes from accept to ignore or open
int numberOfNodes = (int) nodeInteract->size();
for (int n = 0; n < numberOfNodes; n++) {
FNode* curNode = &this->fNode[(*nodeInteract)[n] - this->nodeOffset];
partRadius = curNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, curNode);
dx = curNode->u.n.partCenter[0] - pos_x;
dy = curNode->u.n.partCenter[1] - pos_y;
dz = curNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
}
else {
if (2*partRadius > (r * bhAngle)) {
// Node must be opened and constituent parts examined
adjustNodeInteract(p0, curNode, adjPartInteract, adjNodeInteract,
bhAngle, critRadius);
} else {
// Accept node, add to interact list, move on to sibling
adjNodeInteract->push_back((*nodeInteract)[n]);
}
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Recursive part of interaction adjustment
// Examine children of current node recursively for inclusion into interaction
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::adjustNodeInteract(
ID_T p0,
FNode* curNode,
vector<ID_T>* adjPartInteract,
vector<ID_T>* adjNodeInteract,
POSVEL_T bhAngle,
POSVEL_T critRadius)
{
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
// Get location of particle being adjusted for
POSVEL_T pos_x = this->xx[p0];
POSVEL_T pos_y = this->yy[p0];
POSVEL_T pos_z = this->zz[p0];
// Current node is to be opened and recursively checked for interactions
ID_T child = curNode->u.n.nextNode;
while (child != -1) {
if (child < this->nodeOffset) {
// Child is a particle, add to adjusted particle interact list
adjPartInteract->push_back(child);
child = this->fParticle[child].sibling;
}
else {
// Child is a node, check to see if it should be opened, accepted, ignored
FNode* childNode = &this->fNode[child - this->nodeOffset];
partRadius = childNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, childNode);
dx = childNode->u.n.partCenter[0] - pos_x;
dy = childNode->u.n.partCenter[1] - pos_y;
dz = childNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
}
else {
if (2*partRadius > (r * bhAngle)) {
// Node must be opened and constituent parts examined
adjustNodeInteract(p0, childNode, adjPartInteract, adjNodeInteract,
bhAngle, critRadius);
} else {
// Accept node
adjNodeInteract->push_back(child);
}
}
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Short range gravity calculation for every particle in the tree
// Recurses through the tree saving previous work for reuse when popping
// out of recursion. Based on Barnes treecode with quick scan.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::treeForceBarnesQuick(
POSVEL_T bhAngle, // Open a node
POSVEL_T critRadius) // Accept or ignore node not opened
{
ID_T root = this->particleCount;
vector<ID_T>* active = new vector<ID_T>;
vector<ID_T>* partInteract = new vector<ID_T>;
vector<ID_T>* nodeInteract = new vector<ID_T>;
active->push_back(root);
// Quick walk of tree accepts nodes that do not touch target node
walkTreeBarnesQuick(active, partInteract, nodeInteract,
root, bhAngle, critRadius);
delete active;
delete partInteract;
delete nodeInteract;
}
/////////////////////////////////////////////////////////////////////////
//
// Walk the BH tree for the given particle or node (identifier curId)
// Recursion starts with a new active list which will contain particles
// and nodes which possibly will contribute to the force on a particle.
// Particles on the active list will always be chosen for the interact list.
// Nodes on the active list may be OPENED if they are close enough
// or ACCEPTED and used in summary if they are not. Nodes that are opened
// have all their children (particles or nodes) added to the active list.
//
// After the children are added a new level of recursion starts by
// calculating a new size for that level, starting a fresh active list
// and building on the current interact lists.
//
// Recursion continues until the active list has been completely processed.
// When a level of recursion is complete the active list is destroyed
// and new items put on the interact lists are popped off.
//
// The advantage to this method is that items in the interaction list may
// not need to be processed again when we are doing the low levels of the tree.
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::walkTreeBarnesQuick(
vector<ID_T>* curActive, // nodes to be acted on
vector<ID_T>* partInteract, // particles for force
vector<ID_T>* nodeInteract, // nodes for force
ID_T curId, // current particle or node
POSVEL_T bhAngle, // open node
POSVEL_T critRadius) // accept or ignore
{
POSVEL_T dx, dy, dz, r, partRadius, distToNearPoint;
// Current active list
int begIndx = 0;
int endIndx = curActive->size();
// Construct active list for each recursion
vector<ID_T>* newActive = new vector<ID_T>;
// Set the location for the particle or node for the walk
POSVEL_T pos_x, pos_y, pos_z;
if (curId < this->nodeOffset) {
pos_x = this->xx[curId];
pos_y = this->yy[curId];
pos_z = this->zz[curId];
} else {
FNode* curNode = &this->fNode[curId - this->nodeOffset];
pos_x = curNode->u.n.partCenter[0];
pos_y = curNode->u.n.partCenter[1];
pos_z = curNode->u.n.partCenter[2];
}
/////////////////////////////////////////////////////////////////////////
//
// Process the active list window adding children to end of list
// Valid particles and accepted nodes are copied to the interact list
//
int hasChildren = 0;
int pcount = 0;
int ncount = 0;
for (int indx = begIndx; indx < endIndx; indx++) {
// If the current active element is a cell it will be
// ACCEPTED and copied to the interact list
// OPENED and its children will be added to the end of the active list
// IGNORED because it is too far away
if ((*curActive)[indx] >= this->nodeOffset) {
hasChildren = 1;
FNode* actNode = &this->fNode[(*curActive)[indx] - this->nodeOffset];
partRadius = actNode->u.n.partRadius;
distToNearPoint = distanceToNearestPoint(pos_x, pos_y, pos_z, actNode);
dx = actNode->u.n.partCenter[0] - pos_x;
dy = actNode->u.n.partCenter[1] - pos_y;
dz = actNode->u.n.partCenter[2] - pos_z;
r = sqrt(dx * dx + dy * dy + dz * dz);
// Node is ignored if it is too far away from the particle
// Distance from particle to particle radius exceeds critical radius
// Distance from particle to nearest side of node exceeds critical radius
if ((r - partRadius) > critRadius || distToNearPoint > critRadius) {
// Ignore node, move on to sibling of this node
}
else {
if (2*partRadius > (r * bhAngle)) {
// Open node, move on to first child
ID_T child =
this->fNode[(*curActive)[indx] - this->nodeOffset].u.n.nextNode;
while (child != -1) {
if (child >= this->nodeOffset) {
// Child is a node which is active and must be considered
newActive->push_back(child);
child = this->fNode[child - this->nodeOffset].u.n.sibling;
}
else {
// Child is a particle, add to interaction list
partInteract->push_back(child);
pcount++;
child = this->fParticle[child].sibling;
}
}
} else {
// Accept node, add to interact list, move on to sibling
nodeInteract->push_back((*curActive)[indx]);
ncount++;
}
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// At this point a new level of children may have been added to active list
// Process children by dividing the node size and recursing
//
if (hasChildren) {
// Current item on active list is a cell
if (curId >= this->nodeOffset) {
// Process each child
ID_T child = fNode[curId - this->nodeOffset].u.n.nextNode;
while (child != -1) {
// Child is a node
if (child >= this->nodeOffset) {
FNode* childNode = &this->fNode[child - this->nodeOffset];
// Recurse on walk tree to process child
walkTreeBarnesQuick(newActive, partInteract, nodeInteract,
child, bhAngle, critRadius);
child = childNode->u.n.sibling;
}
// Child is a particle
else {
walkTreeBarnesQuick(newActive, partInteract, nodeInteract,
child, bhAngle, critRadius);
child = this->fParticle[child].sibling;
}
}
}
// Current item on active list is a particle
else {
walkTreeBarnesQuick(newActive, partInteract, nodeInteract,
curId, bhAngle, critRadius);
}
}
/////////////////////////////////////////////////////////////////////////
//
// If no new items were added to active list we are done and can process
// the interact list for this particle p (which can't be a cell)
//
else {
if (curId > this->nodeOffset)
cout << "ERROR: POP OUT ON NODE " << curId << endl;
this->fParticle[curId].force =
forceCalculation(curId, partInteract, nodeInteract);
}
// Active list is new for every recursion level
// Interact lists are appended to at each recursion level
// So interact lists must be popped by the correct number for this recursion
for (int i = 0; i < pcount; i++)
partInteract->pop_back();
for (int i = 0; i < ncount; i++)
nodeInteract->pop_back();
delete newActive;
}
/////////////////////////////////////////////////////////////////////////
//
// Short range force calculation
// Potential is calculated and is used to determine the acceleration of
// the particle. Acceleration is applied to the current velocity to
// produce the velocity at the next time step.
//
/////////////////////////////////////////////////////////////////////////
POSVEL_T BHForceTree::forceCalculation(
ID_T p0, // Target particle index
vector<ID_T>* partInteract, // Particles acting on p
vector<ID_T>* nodeInteract) // Nodes acting on p
{
POSVEL_T accel[DIMENSION];
POSVEL_T phi = 0.0;
POSVEL_T pos0_x = this->xx[p0];
POSVEL_T pos0_y = this->yy[p0];
POSVEL_T pos0_z = this->zz[p0];
for (int dim = 0; dim < DIMENSION; dim++)
accel[dim] = 0.0;
int numberOfNodes = (int) nodeInteract->size();
int numberOfParticles = (int) partInteract->size();
// Particles contributing to the force use location and mass of one particle
for (int p = 0; p < numberOfParticles; p++) {
ID_T particle = (*partInteract)[p];
if (p0 != particle) {
POSVEL_T dx = this->xx[particle] - pos0_x;
POSVEL_T dy = this->yy[particle] - pos0_y;
POSVEL_T dz = this->zz[particle] - pos0_z;
POSVEL_T r2 = dx * dx + dy * dy + dz * dz;
POSVEL_T f_over_r = this->mass[particle] * m_fl->f_over_r(r2);
//POSVEL_T f_over_r = this->mass[particle] / r2;
phi -= f_over_r;
//if (p0 == 171893) cout << "Top Particle used " << particle << " phi " << phi << endl;
accel[0] += dx * f_over_r * m_fcoeff;
accel[1] += dy * f_over_r * m_fcoeff;
accel[2] += dz * f_over_r * m_fcoeff;
this->vx[p0] += dx * f_over_r * m_fcoeff;
this->vy[p0] += dy * f_over_r * m_fcoeff;
this->vz[p0] += dz * f_over_r * m_fcoeff;
}
}
// Nodes contributing to force use center of mass and total particle mass
for (int n = 0; n < numberOfNodes; n++) {
FNode* node = &this->fNode[(*nodeInteract)[n] - this->nodeOffset];
POSVEL_T dx = node->u.n.partCenter[0] - pos0_x;
POSVEL_T dy = node->u.n.partCenter[1] - pos0_y;
POSVEL_T dz = node->u.n.partCenter[2] - pos0_z;
POSVEL_T r2 = dx * dx + dy * dy + dz * dz;
POSVEL_T f_over_r = node->u.n.partMass * m_fl->f_over_r(r2);
//POSVEL_T f_over_r = node->u.n.partMass / r2;
phi -= f_over_r;
//if (p0 == 171893) cout << "Top node used " << (*nodeInteract)[n] << " phi " << phi << endl;
accel[0] += dx * f_over_r * m_fcoeff;
accel[1] += dy * f_over_r * m_fcoeff;
accel[2] += dz * f_over_r * m_fcoeff;
this->vx[p0] += dx * f_over_r * m_fcoeff;
this->vy[p0] += dy * f_over_r * m_fcoeff;
this->vz[p0] += dz * f_over_r * m_fcoeff;
}
return phi;
}
POSVEL_T BHForceTree::forceCalculationFast(
ID_T p0, // Target particle index
vector<POSVEL_T>* xInteract,
vector<POSVEL_T>* yInteract,
vector<POSVEL_T>* zInteract,
vector<POSVEL_T>* mInteract)
{
POSVEL_T phi = 0.0;
POSVEL_T pos0_x = this->xx[p0];
POSVEL_T pos0_y = this->yy[p0];
POSVEL_T pos0_z = this->zz[p0];
int nInteract = (int) xInteract->size();
for (int p = 0; p < nInteract; p++) {
POSVEL_T dx = (*xInteract)[p] - pos0_x;
POSVEL_T dy = (*yInteract)[p] - pos0_y;
POSVEL_T dz = (*zInteract)[p] - pos0_z;
POSVEL_T r2 = dx * dx + dy * dy + dz * dz;
POSVEL_T f_over_r = (*mInteract)[p] * m_fl->f_over_r(r2);
//POSVEL_T f_over_r = this->mass[particle] / r2;
this->vx[p0] += dx * f_over_r * m_fcoeff;
this->vy[p0] += dy * f_over_r * m_fcoeff;
this->vz[p0] += dz * f_over_r * m_fcoeff;
}
return phi;
}
/////////////////////////////////////////////////////////////////////////
//
// Return the distance^2 from location to the closest point on FNode
//
/////////////////////////////////////////////////////////////////////////
POSVEL_T BHForceTree::distanceToNearestPoint(
POSVEL_T pos_x,
POSVEL_T pos_y,
POSVEL_T pos_z,
FNode* node)
{
// Calculate bounding box of current node
// Nearest point in bounding box decides whether particle or node is used
POSVEL_T dx, dy, dz, r;
POSVEL_T minBound[DIMENSION], maxBound[DIMENSION];
for (int dim = 0; dim < DIMENSION; dim++) {
minBound[dim] = node->geoCenter[dim] - (node->geoSide[dim] * 0.5);
maxBound[dim] = node->geoCenter[dim] + (node->geoSide[dim] * 0.5);
}
if (pos_x < minBound[0])
dx = minBound[0] - pos_x;
else if (pos_x > maxBound[0])
dx = pos_x - maxBound[0];
else
dx = 0.0;
if (pos_y < minBound[1])
dy = minBound[1] - pos_y;
else if (pos_y > maxBound[1])
dy = pos_y - maxBound[1];
else
dy = 0.0;
if (pos_z < minBound[2])
dz = minBound[2] - pos_z;
else if (pos_z > maxBound[2])
dz = pos_z - maxBound[2];
else
dz = 0.0;
r = sqrt(dx * dx + dy * dy + dz * dz);
return r;
}
/////////////////////////////////////////////////////////////////////////
//
// Return the distance from location to the fNode center of mass
//
/////////////////////////////////////////////////////////////////////////
POSVEL_T BHForceTree::distanceToCenterOfMass(
POSVEL_T xLoc,
POSVEL_T yLoc,
POSVEL_T zLoc,
FNode* node)
{
POSVEL_T xdist = (POSVEL_T) fabs(xLoc - node->u.n.partCenter[0]);
POSVEL_T ydist = (POSVEL_T) fabs(yLoc - node->u.n.partCenter[1]);
POSVEL_T zdist = (POSVEL_T) fabs(zLoc - node->u.n.partCenter[2]);
POSVEL_T dist = sqrt((xdist * xdist) + (ydist * ydist) + (zdist * zdist));
return dist;
}
/////////////////////////////////////////////////////////////////////////
//
// Return the distance from location to the fNode furthest corner
//
/////////////////////////////////////////////////////////////////////////
POSVEL_T BHForceTree::distanceToFarCorner(
POSVEL_T xLoc,
POSVEL_T yLoc,
POSVEL_T zLoc,
FNode* node)
{
POSVEL_T distance = 0.0;
POSVEL_T corner[DIMENSION];
POSVEL_T xdist, ydist, zdist, dist;
for (int k = -1; k <= 1; k=k+2) {
corner[2] = node->geoCenter[2] + (k * (node->geoSide[2] * 0.5));
for (int j = -1; j <= 1; j=j+2) {
corner[1] = node->geoCenter[1] + (j * (node->geoSide[1] * 0.5));
for (int i = -1; i <= 1; i=i+2) {
corner[0] = node->geoCenter[0] + (i * (node->geoSide[0] * 0.5));
xdist = (POSVEL_T) fabs(xLoc - corner[0]);
ydist = (POSVEL_T) fabs(yLoc - corner[1]);
zdist = (POSVEL_T) fabs(zLoc - corner[2]);
dist = sqrt((xdist * xdist) + (ydist * ydist) + (zdist * zdist));
if (dist > distance)
distance = dist;
}
}
}
return distance;
}
/////////////////////////////////////////////////////////////////////////
//
// Return the distance from location to the fNode nearest corner
//
/////////////////////////////////////////////////////////////////////////
POSVEL_T BHForceTree::distanceToNearCorner(
POSVEL_T xLoc,
POSVEL_T yLoc,
POSVEL_T zLoc,
FNode* node)
{
POSVEL_T distance = MAX_FLOAT;
POSVEL_T corner[DIMENSION];
POSVEL_T xdist, ydist, zdist, dist;
for (int k = -1; k <= 1; k=k+2) {
corner[2] = node->geoCenter[2] + (k * (node->geoSide[2] * 0.5));
for (int j = -1; j <= 1; j=j+2) {
corner[1] = node->geoCenter[1] + (j * (node->geoSide[1] * 0.5));
for (int i = -1; i <= 1; i=i+2) {
corner[0] = node->geoCenter[0] + (i * (node->geoSide[0] * 0.5));
xdist = (POSVEL_T) fabs(xLoc - corner[0]);
ydist = (POSVEL_T) fabs(yLoc - corner[1]);
zdist = (POSVEL_T) fabs(zLoc - corner[2]);
dist = sqrt((xdist * xdist) + (ydist * ydist) + (zdist * zdist));
if (dist < distance)
distance = dist;
}
}
}
return distance;
}
/////////////////////////////////////////////////////////////////////////
//
// Print BH tree with indentations indicating levels
// Since the tree has been threaded changing the recursive tree with children
// into an iterative tree with next nodes and parents, walk the tree
// iteratively keeping track of parents to indicate when levels change
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::printBHForceTree()
{
ID_T curIndex = this->nodeOffset;
vector<ID_T> parents;
parents.push_back(-1);
ID_T parentIndex = 0;
while (curIndex != -1) {
// Get the parent of the current index
ID_T parent;
if (curIndex >= this->nodeOffset)
parent = this->fNode[curIndex - this->nodeOffset].u.n.parent;
else
parent = this->fParticle[curIndex].parent;
// Pop the stack of parents until the level is right
while (parent != parents[parentIndex]) {
parents.pop_back();
parentIndex--;
}
// Print FNode
if (curIndex >= this->nodeOffset) {
FNode* fn = &this->fNode[curIndex-this->nodeOffset];
cout << parentIndex << ":" << setw(parentIndex) << " ";
cout << "N " << curIndex
<< " sibling " << fn->u.n.sibling
<< " next " << fn->u.n.nextNode
<< " parent " << fn->u.n.parent
<< " [" << (fn->geoCenter[0]-fn->geoSide[0]/2.0)
<< ":" << (fn->geoCenter[0]+fn->geoSide[0]/2.0) << "] "
<< " [" << (fn->geoCenter[1]-fn->geoSide[1]/2.0)
<< ":" << (fn->geoCenter[1]+fn->geoSide[1]/2.0) << "] "
<< " [" << (fn->geoCenter[2]-fn->geoSide[2]/2.0)
<< ":" << (fn->geoCenter[2]+fn->geoSide[2]/2.0) << "] "
<< " (" << fn->u.n.partCenter[0]
<< " ," << fn->u.n.partCenter[1]
<< " ," << fn->u.n.partCenter[2]
<< ") MASS " << fn->u.n.partMass
<< " RADIUS " << fn->u.n.partRadius
<< endl;
// Push back the new FNode which will have children
parents.push_back(curIndex);
parentIndex++;
// Walk to next node (either particle or node)
curIndex = this->fNode[curIndex-this->nodeOffset].u.n.nextNode;
}
// Print FParticle
else {
cout << parentIndex << ":" << setw(parentIndex) << " ";
cout << "P " << curIndex
<< " sibling " << this->fParticle[curIndex].sibling
<< " next " << this->fParticle[curIndex].nextNode
<< " parent " << this->fParticle[curIndex].parent
<< " (" << xx[curIndex]
<< " ," << yy[curIndex]
<< " ," << zz[curIndex] << ")" << endl;
// Walk to next node (either particle or node)
curIndex = this->fParticle[curIndex].nextNode;
}
}
}
/////////////////////////////////////////////////////////////////////////
//
// Print the force values for comparison
//
/////////////////////////////////////////////////////////////////////////
void BHForceTree::printForceValues()
{
for (int p = 0; p < this->particleCount; p++) {
cout << "Particle: " << setw(8) << p
<< " force " << this->fParticle[p].force << endl;
}
}
/////////////////////////////////////////////////////////////////////////
//
// Get the index of the child which should contain this particle
//
/////////////////////////////////////////////////////////////////////////
int BHForceTree::getChildIndex(FNode* node, ID_T pindx)
{
// Vary Z dimension fastest in making octtree children
int index = 0;
if (this->xx[pindx] >= node->geoCenter[0]) index += 4;
if (this->yy[pindx] >= node->geoCenter[1]) index += 2;
if (this->zz[pindx] >= node->geoCenter[2]) index += 1;
return index;
}
|
package com.fillmore_labs.avro_demo.confluent;
import com.google.common.graph.Graph;
import com.google.common.graph.GraphBuilder;
import com.google.common.graph.ImmutableGraph;
import io.confluent.kafka.schemaregistry.CompatibilityChecker;
import io.confluent.kafka.schemaregistry.CompatibilityLevel;
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
import java.io.PrintStream;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.SchemaValidationException;
import org.apache.avro.SchemaValidator;
import org.apache.avro.SchemaValidatorBuilder;
public final class CompatibilityHelper {
private CompatibilityHelper() {}
public static ImmutableGraph<String> calculateCompatibility(Map<String, Schema> schemaMap) {
var graphBuilder = GraphBuilder.directed().allowsSelfLoops(true).<String>immutable();
var validator = new CanReadValidator();
for (var writer : schemaMap.entrySet()) {
var writerSchema = writer.getValue();
for (var reader : schemaMap.entrySet()) {
var readerSchema = reader.getValue();
if (validator.canRead(readerSchema, writerSchema)) {
graphBuilder.putEdge(writer.getKey(), reader.getKey());
}
}
}
return graphBuilder.build();
}
public static void logCompatible(
PrintStream out,
Graph<String> graph,
Map<String, Schema> schemaMap,
Map<String, ByteBuffer> encoded,
Map<String, ?> config) {
var checker = CompatibilityChecker.checker(CompatibilityLevel.FORWARD);
for (var writer : graph.nodes().stream().sorted().toList()) {
var buffer = encoded.get(writer);
if (buffer != null && buffer.hasArray() && buffer.arrayOffset() >= 0) {
try (var deserializer = new KafkaAvroDeserializer()) {
deserializer.configure(config, /* isKey= */ false);
var writerSchema = schemaMap.get(writer);
var parsedWriterSchema = new AvroSchema(writerSchema);
var generic = deserializer.deserialize("TOPIC", buffer.array());
out.println("---");
for (var reader : graph.successors(writer).stream().sorted().toList()) {
var readerSchema = schemaMap.get(reader);
var decoded = deserializer.deserialize("TOPIC", buffer.array(), readerSchema);
var parsedReaderSchema = new AvroSchema(readerSchema);
var violations = checker.isCompatible(parsedWriterSchema, List.of(parsedReaderSchema));
out.printf(
"%s can be read as %s: %s -> %s%s%n",
writer,
reader,
generic,
decoded,
violations.isEmpty() ? "" : " (Confluent incompatible)");
}
}
}
}
}
private static final class CanReadValidator {
private final SchemaValidator validator =
new SchemaValidatorBuilder().canReadStrategy().validateAll();
public boolean canRead(Schema toValidate, Schema existing) {
try {
validator.validate(toValidate, List.of(existing));
return true;
} catch (SchemaValidationException e) {
return false;
}
}
}
}
|
<reponame>abkl/csim
import { mean, sampleStandardDeviation } from "simple-statistics"
import { fields, gameValues } from "../src/utils/game-constants"
// Types:
interface TotalStats {
totalMeans: {
[s: string]: number
}
totalStdDeviations: {
[s: string]: number
}
}
type GameItem = "Cargo" | "Hatch Panel"
export interface ScoresObj {
"Robot Starting Platform": "level 1" | "level 2"
"Leave Habitat": "TRUE" | "FALSE"
"Habitat Return": "Did not return" | "level 1" | "level 2" | "level 3"
"Team Number": string
"Match Number": string
"Your Name": string
[s: string]: string
}
export interface Score {
Cargo: number
"Hatch Panel": number
"Sandstorm Cross": number
"Habitat Return": number
"Total Score": number
"Team Number": string
[s: string]: number | string
}
export interface MatchData {
"Team Number": string
"Match Number": string
[s: string]: any
}
interface Stats {
"Mean Number of Balls": { mean: number; standardDeviation: number }
"Mean Number of Rings": { mean: number; standardDeviation: number }
"Points Scored": { mean: number; standardDeviation: number }
[key: string]: { mean: number; standardDeviation: number }
}
export interface Teams {
[s: string]: {
matchData: MatchData[]
stats: Stats | {}
}
}
export interface TeamCollection {
originalScores: Score[] | []
teams: Teams
}
// Convert Sheets data into Collection of Teams Objects Functions:
export const sheetsDataToJSON = (sheetsData: any[][]) => {
if (sheetsData.length < 2) {
throw Error("invalid data passed!")
}
const dataKeys = sheetsData[0]
const d = sheetsData.slice(1)
return d.map(rows =>
rows.reduce(
(currentObject, cell, index) => ({
...currentObject,
[dataKeys[index]]: cell,
}),
{}
)
)
}
// Transform Match Data into total game Items and calculate total scores functions:
export const getGameItemKeyFromMatchDataKey = (
keys: string[],
gameItem: GameItem,
dropped?: boolean
) =>
!dropped
? keys.filter(key => RegExp(gameItem).test(key) && !/dropped/.test(key))
: keys.filter(key => RegExp(gameItem).test(key) && /dropped/.test(key))
export const calculateScore = (data: ScoresObj): Score => {
const scoreCargo =
getGameItemKeyFromMatchDataKey(Object.keys(data), "Cargo")
.map(key => data[key])
.reduce((pv: number, cv) => pv + parseInt(cv, 10), 0) * gameValues.Cargo
const scoreHatchPanel =
getGameItemKeyFromMatchDataKey(Object.keys(data), "Hatch Panel")
.map(key => data[key])
.reduce((pv: number, cv) => pv + parseInt(cv, 10), 0) *
gameValues["Hatch Panel"]
const scoreHabitatReturn =
// @ts-ignore
gameValues["Habitat Return"][data["Habitat Return"]] || 0
const scoreSandstormCross =
// @ts-ignore
data["Leave Habitat"] === "TRUE"
? gameValues["Sandstorm Cross"][data["Robot Starting Platform"]]
: 0
return {
"Match Number": data["Match Number"],
Cargo: scoreCargo,
"Hatch Panel": scoreHatchPanel,
"Habitat Return": scoreHabitatReturn,
"Sandstorm Cross": scoreSandstormCross,
// @ts-ignore
"Team Number": data["Team Number"],
"Scouter Name": data["<NAME>"],
"Total Score":
scoreCargo + scoreHatchPanel + scoreSandstormCross + scoreHabitatReturn,
}
}
export const collectMatchDataIntoTeamObj = (d: MatchData[]): TeamCollection =>
d.reduce(
(teamTotals: TeamCollection, matchData) => {
return teamTotals.teams[matchData["Team Number"]]
? {
teams: {
...teamTotals.teams,
[matchData["Team Number"]]: {
matchData: [
...teamTotals.teams[matchData["Team Number"]].matchData,
matchData,
],
stats: {},
},
},
originalScores: [
...teamTotals.originalScores,
calculateScore(matchData as ScoresObj),
],
}
: {
teams: {
...teamTotals.teams,
[matchData["Team Number"]]: { matchData: [matchData], stats: {} },
},
originalScores: [
...teamTotals.originalScores,
calculateScore(matchData as ScoresObj),
],
}
},
{
teams: {},
originalScores: [],
}
)
export const calculateTotalStats = (scores: Score[]): TotalStats => {
const intermediateObj = scores.reduce(
(pv, cv) => {
return {
Cargo: [...pv.Cargo, cv.Cargo],
"Hatch Panel": [...pv["Hatch Panel"], cv["Hatch Panel"]],
"Sandstorm Cross": [...pv["Sandstorm Cross"], cv["Sandstorm Cross"]],
"Habitat Return": [...pv["Habitat Return"], cv["Habitat Return"]],
"Total Score": [...pv["Total Score"], cv["Total Score"]],
}
},
{
Cargo: [],
"Hatch Panel": [],
"Sandstorm Cross": [],
"Habitat Return": [],
"Total Score": [],
}
)
return {
totalMeans: {
Cargo: intermediateObj.Cargo.length > 0 ? mean(intermediateObj.Cargo) : 0,
"Hatch Panel":
intermediateObj["Hatch Panel"].length > 0
? mean(intermediateObj["Hatch Panel"])
: 0,
"Sandstorm Cross":
intermediateObj["Sandstorm Cross"].length > 0
? mean(intermediateObj["Sandstorm Cross"])
: 0,
"Habitat Return":
intermediateObj["Habitat Return"].length > 0
? mean(intermediateObj["Habitat Return"])
: 0,
"Total Score":
intermediateObj["Total Score"].length > 0
? mean(intermediateObj["Total Score"])
: 0,
},
totalStdDeviations: {
Cargo:
intermediateObj.Cargo.length > 0
? sampleStandardDeviation(intermediateObj.Cargo)
: 0,
"Hatch Panel":
intermediateObj["Hatch Panel"].length > 0
? sampleStandardDeviation(intermediateObj["Hatch Panel"])
: 0,
"Sandstorm Cross":
intermediateObj["Sandstorm Cross"].length > 0
? sampleStandardDeviation(intermediateObj["Sandstorm Cross"])
: 0,
"Habitat Return":
intermediateObj["Habitat Return"].length > 0
? sampleStandardDeviation(intermediateObj["Habitat Return"])
: 0,
"Total Score":
intermediateObj["Total Score"].length > 0
? sampleStandardDeviation(intermediateObj["Total Score"])
: 0,
},
}
}
|
'use strict';
const fs = require('fs');
let write = (file, data) => {
let json = JSON.stringify(data, null, 2);
return fs.writeFileSync(file, json, 'utf8');
}
module.exports = (file) => {
if (!file) { throw new Error("the file name is required"); }
let data;
try {
data = JSON.parse(fs.readFileSync(file, 'utf-8'));
} catch (e) {
data = {}
}
let get = (key, def) => {
return new Promise((resolve, reject) => {
if (def && !data[key]) {
return resolve(def)
}
return resolve(data[key]);
})
};
let set = (key, value) => {
return new Promise((resolve, reject) => {
data[key] = value;
try {
write(file, data);
return resolve(value);
} catch (e) {
return reject(e);
}
})
}
return {
get: get,
set: set
}
}
|
from django.db import models
class Bb(models.Model):
ammount = models.FloatField(
null=True,
blank=True,
verbose_name='Цена')
comment = models.CharField(
max_length=50,
verbose_name='Коментарий')
published = models.DateField(
auto_now_add=False,
db_index=False,
verbose_name='Опубликовано')
def __str__(self):
return self.comment
class Meta:
ordering = ['-published']
verbose_name_plural = 'Записи'
verbose_name = 'Запись'
|
<gh_stars>1-10
require_relative '../lib/interface_data_control.rb'
describe InterfaceDataControl do
let(:new_class) { InterfaceDataControl.new }
it 'Raises argument error when less than two arguments are given' do
expect { InterfaceDataControl.new('value') }.to raise_error(ArgumentError)
end
it 'Raises argument error when league_data_options is not given an argument' do
expect { new_class.league_data_options }.to raise_error(ArgumentError)
end
it 'Raises no file directory error when suggest_best_xi called when the league variable is not given' do
expect { new_class.suggest_best_xi }.to raise_error(Errno::ENOENT)
end
it 'Raises no file directory error when gets_league_hash called when the league variable is not given' do
expect { new_class.gets_league_hash }.to raise_error(Errno::ENOENT)
end
it 'Raises no file directory error when gets_team_hash called when the league variable is not given' do
expect { new_class.gets_team_hash }.to raise_error(Errno::ENOENT)
end
context 'When no argument is given' do
it 'Raises argument error when more than two arguments are given' do
expect { new_class.gets_player_hash('a', 'b', 'c') }.to raise_error(ArgumentError)
end
end
context 'When argument is given' do
it 'Raises no file directory error when gets_player_hash called when the league variable is not given' do
expect { new_class.gets_player_hash }.to raise_error(Errno::ENOENT)
end
end
end
|
class BankAccount:
def __init__(self):
self.balance = 0
self.transaction_count = 0
def deposit(self, amount):
self.balance += amount
self.transaction_count += 1
def withdraw(self, amount):
self.balance -= amount
self.transaction_count += 1
def get_balance(self):
return self.balance
def get_transaction_count(self):
return self.transaction_count
# Test the BankAccount class
account = BankAccount()
account.deposit(1000)
account.withdraw(500)
account.deposit(200)
print(account.get_balance()) # Output: 700
print(account.get_transaction_count()) # Output: 3 |
<reponame>PW486/leetcode
SELECT w.name, w.population, w.area
FROM World w
WHERE w.area > 3000000 OR w.poplution > 25000000;
|
#include <iostream>
using namespace std;
int main(){
int a,b;
while(cin>>a>>b)cout<<(b>=a+7?"PASS!":"FAIL! You see see you!")<<endl;
} |
<filename>src/components/MenuLinks/index.tsx<gh_stars>0
import React from 'react'
import * as S from './styled'
import getThemeColor from "../../utils/getThemeColor"
import links from './content'
const MenuLinks = () => (
<S.MenuLinksWrapper>
<S.MenuLinksList>
{links.map((link,i) => (
<S.MenuLinksItem key={i}>
<S.MenuLinksLink cover direction="up" bg={getThemeColor()} duration={0.5} to={link.url} activeClassName="active">{link.Label}</S.MenuLinksLink>
</S.MenuLinksItem>
))}
</S.MenuLinksList>
</S.MenuLinksWrapper>
)
export default MenuLinks |
<reponame>siklu/mina-sshd
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.server.auth.password;
import org.apache.sshd.common.RuntimeSshException;
import org.apache.sshd.common.SshConstants;
import org.apache.sshd.common.auth.UserAuthMethodFactory;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.ValidateUtils;
import org.apache.sshd.common.util.buffer.Buffer;
import org.apache.sshd.server.auth.AbstractUserAuth;
import org.apache.sshd.server.session.ServerSession;
/**
* Implements the server-side "password" authentication mechanism
*
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
*/
public class UserAuthPassword extends AbstractUserAuth {
public static final String NAME = UserAuthPasswordFactory.NAME;
public UserAuthPassword() {
super(NAME);
}
@Override
public Boolean doAuth(Buffer buffer, boolean init) throws Exception {
ValidateUtils.checkTrue(init, "Instance not initialized");
ServerSession session = getServerSession();
if (!UserAuthMethodFactory.isSecureAuthenticationTransport(session)) {
if (log.isDebugEnabled()) {
log.debug("doAuth({}) session is not secure", session);
}
return false;
}
String username = getUsername();
boolean newPassword = buffer.getBoolean();
String password = buffer.getString();
if (newPassword) {
return handleClientPasswordChangeRequest(
buffer, session, username, password, buffer.getString());
} else {
return checkPassword(buffer, session, username, password);
}
}
/**
* Invokes the configured {@link PasswordAuthenticator} and returns the result. If
* {@link PasswordChangeRequiredException} thrown by the authenticator then
* {@link #handleServerPasswordChangeRequest(Buffer, ServerSession, String, String, PasswordChangeRequiredException)}
* is invoked
*
* @param buffer The received {@link Buffer} to be re-used if need to send a password change request
* @param session The {@link ServerSession} through which the request was received
* @param username The username
* @param password The password
* @return The authentication result - if {@code null} then exception was handled internally and
* authentication is still in progress
* @throws Exception If internal error during authentication (exception for {@link PasswordChangeRequiredException}
* which is handled internally)
* @see #handleServerPasswordChangeRequest(Buffer, ServerSession, String, String,
* PasswordChangeRequiredException)
*/
protected Boolean checkPassword(
Buffer buffer, ServerSession session, String username, String password)
throws Exception {
boolean debugEnabled = log.isDebugEnabled();
PasswordAuthenticator auth = session.getPasswordAuthenticator();
if (auth == null) {
if (debugEnabled) {
log.debug("checkPassword({}) no password authenticator", session);
}
return false;
}
try {
boolean authed;
try {
authed = auth.authenticate(username, password, session);
} catch (Error e) {
warn("checkPassword({}) failed ({}) to consult authenticator: {}",
session, e.getClass().getSimpleName(), e.getMessage(), e);
throw new RuntimeSshException(e);
}
if (debugEnabled) {
log.debug("checkPassword({}) authentication result: {}", session, authed);
}
return authed;
} catch (PasswordChangeRequiredException e) {
if (debugEnabled) {
log.debug("checkPassword({}) password change required: {}", session, e.getMessage());
}
return handleServerPasswordChangeRequest(buffer, session, username, password, e);
}
}
/**
* Invoked when the client sends a {@code SSH_MSG_USERAUTH_REQUEST} indicating a password change. Throws
* {@link UnsupportedOperationException} by default
*
* @param buffer The {@link Buffer} to re-use in order to respond
* @param session The associated {@link ServerSession}
* @param username The username
* @param oldPassword The <PASSWORD>
* @param newPassword The <PASSWORD>
* @return Password change and authentication result - {@code null} means authentication incomplete -
* i.e., handler has sent some extra query.
* @throws Exception If failed to handle the request.
*/
protected Boolean handleClientPasswordChangeRequest(
Buffer buffer, ServerSession session, String username, String oldPassword, String newPassword)
throws Exception {
boolean debugEnabled = log.isDebugEnabled();
if (!UserAuthMethodFactory.isDataIntegrityAuthenticationTransport(session)) {
if (debugEnabled) {
log.debug("handleClientPasswordChangeRequest({}) session is not validated via MAC", session);
}
return false;
}
PasswordAuthenticator auth = session.getPasswordAuthenticator();
if (auth == null) {
if (debugEnabled) {
log.debug("handleClientPasswordChangeRequest({}) no password authenticator", session);
}
return false;
}
return auth.handleClientPasswordChangeRequest(session, username, oldPassword, newPassword);
}
/**
* Invoked by {@link #checkPassword(Buffer, ServerSession, String, String)} when a
* {@link PasswordChangeRequiredException} was thrown by the authenticator. By default it re-throws the original
* exception.
*
* @param buffer The received {@link Buffer} to be re-used if need to send a password change request
* @param session The {@link ServerSession} through which the request was received
* @param username The username
* @param password The (rejected) password
* @param e The original thrown exception
* @return {@code null} by default to indicate incomplete authentication
* @throws Exception If failed to dispatch the message
*/
protected Boolean handleServerPasswordChangeRequest(
Buffer buffer, ServerSession session, String username, String password, PasswordChangeRequiredException e)
throws Exception {
String prompt = e.getPrompt();
String lang = e.getLanguage();
if (log.isDebugEnabled()) {
log.debug("handlePasswordChangeRequest({}) password change required - prompt={}, lang={}",
session, prompt, lang);
}
buffer = session.createBuffer(SshConstants.SSH_MSG_USERAUTH_PASSWD_CHANGEREQ,
GenericUtils.length(prompt) + GenericUtils.length(lang) + Integer.SIZE);
buffer.putString(prompt);
buffer.putString(lang);
session.writePacket(buffer);
return null; // authentication incomplete
}
}
|
#!/bin/bash
. docker/app_start_up.sh
# Now start our various services...
# Start php-fpm
service php5.6-fpm restart
# Start nginx in foreground. This will keep the docker image open.
nginx -g 'daemon off;'
|
def update_object(request):
obj = SomeObject.objects.get(id=request.POST['id'])
form = SomeForm(request.POST, instance=obj)
if form.is_valid():
form.save()
return redirect('app_index')
else:
return render(request, 'app/update.html', {'form': form} |
#!/bin/sh
d=$(cd $(dirname $0) && pwd)
cd $d
python example.py
|
<filename>zakupki/unzipper.py
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zakupki.settings")
os.environ["DJANGO_SETTINGS_MODULE"] = "zakupki.settings"
import django
django.setup()
from zakupki.settings import BASE_DIR
import zipfile
import pathlib
def unzip_file(_from_dirname, _to_dirname, filename):
if not os.path.exists(os.path.join(BASE_DIR, 'unzipped', _to_dirname, filename.split('.')[0])):
zip_ref = zipfile.ZipFile(os.path.join(_from_dirname, filename), 'r')
folder = os.path.join(BASE_DIR, 'unzipped', _to_dirname, filename.split('.')[0])
pathlib.Path(folder).mkdir(parents=True, exist_ok=True)
zip_ref.extractall(folder)
zip_ref.close()
subdirs = []
for dirname, dirnames, filenames in os.walk('.\\downloaded'):
for subdirname in dirnames:
subdirs.append(subdirname)
for filename in filenames:
for _dir in subdirs:
if _dir in dirname:
try:
unzip_file(dirname, _dir, filename)
except:
pass
|
#!/usr/bin/env bash
set -e
/apps/squid/sbin/squid -NsY -f /apps/squid/conf/squid.conf &
e2guardian -N &
wait -n |
def findAnagrams(word):
perms = set(["".join(perm) for perm in itertools.permutations(word)])
return perms |
<reponame>kilfu0701/jekyll-mulang
require "jekyll-mulang/version"
require "jekyll-mulang/pager"
require "jekyll-mulang/pagination"
module Jekyll
module MuLang
end
end
|
<gh_stars>1-10
import java.util.ArrayList;
public class HeroisDoFilme {
ArrayList<Filme> Filmes;
ArrayList<HeroisDoFilme> Heroi;
public HeroisDoFilme() {
this.Filmes = new ArrayList<>();
this.Heroi = new ArrayList<>();
}
public void totalInvestimento() {
System.out.println("");
}
public void qntdFilmes() {
System.out.println("");
}
}
|
// CondChan is a sync.Cond with the ability to wait in select statement.
package condchan
import (
"sync"
"sync/atomic"
"unsafe"
)
// CondChan implements a condition variable, a rendezvous point for goroutines waiting for or announcing the occurrence
// of an event.
//
// Each Cond has an associated Locker L (often a *Mutex or *RWMutex),
// which must be held when changing the condition and when calling the Wait method.
//
// A Cond must not be copied after first use.
type CondChan struct {
L sync.Locker
ch chan struct{}
chL sync.RWMutex
noCopy noCopy
checker copyChecker
}
type selectFn func(<-chan struct{})
// New returns a new CondChan with Locker l.
func New(l sync.Locker) *CondChan {
return &CondChan{
L: l,
ch: make(chan struct{}),
chL: sync.RWMutex{},
}
}
// Select atomically unlocks cc.L and executes fn.
// After later resuming execution, Wait locks cc.L before returning.
//
// fn is executed passing channel in to it.
// Passed channel will signal by emitting struct{} or by closing.
// Inside fn should be select statement using passed channel together with the other channels that signals execution continuation.
func (cc *CondChan) Select(fn selectFn) {
cc.checker.check()
cc.chL.RLock()
ch := cc.ch
cc.chL.RUnlock()
cc.L.Unlock()
fn(ch)
cc.L.Lock()
}
// Wait atomically unlocks cc.L and suspends execution of the calling goroutine.
// After later resuming execution, Wait locks cc.L before returning.
// Unlike in other systems, Wait cannot return unless awoken by Broadcast or Signal.
func (cc *CondChan) Wait() {
cc.checker.check()
cc.chL.RLock()
ch := cc.ch
cc.chL.RUnlock()
cc.L.Unlock()
<-ch
cc.L.Lock()
}
// Signal wakes one goroutine waiting on cc, if there is any.
// It is allowed but not required for the caller to hold cc.L during the call.
func (cc *CondChan) Signal() {
cc.checker.check()
cc.chL.RLock()
select {
case cc.ch <- struct{}{}:
default:
}
cc.chL.RUnlock()
}
// Broadcast wakes all goroutines waiting on cc.
// It is allowed but not required for the caller to hold cc.L during the call.
func (cc *CondChan) Broadcast() {
cc.checker.check()
cc.chL.Lock()
close(cc.ch)
cc.ch = make(chan struct{})
cc.chL.Unlock()
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Below code is borrowed from sync.cond ///////////////////////////////////////////////////////////////////////////////
// copyChecker holds back pointer to itself to detect object copying.
type copyChecker uintptr
func (c *copyChecker) check() {
if uintptr(*c) != uintptr(unsafe.Pointer(c)) &&
!atomic.CompareAndSwapUintptr((*uintptr)(c), 0, uintptr(unsafe.Pointer(c))) &&
uintptr(*c) != uintptr(unsafe.Pointer(c)) {
panic("sync.Cond is copied")
}
}
// noCopy may be embedded into structs which must not be copied
// after the first use.
//
// See https://golang.org/issues/8005#issuecomment-190753527
// for details.
type noCopy struct{}
// Lock is a no-op used by -copylocks checker from `go vet`.
func (*noCopy) Lock() {}
func (*noCopy) Unlock() {}
|
'use strict';
const newArray = require('./new-array');
module.exports = (size, fillWith) => {
const fillWithFn = typeof fillWith === 'function' ?
fillWith :
() => fillWith;
return newArray(size, (i, lastRow, rows) => newArray(size, (j, lastElement, row) => fillWithFn(i, j, lastElement, lastRow, row, rows)));
};
|
def camel_to_snake(string):
output = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', output).lower()
camel_to_snake(helloWorldToSnakeCase) |
import { rmSync, readdirSync, writeFileSync } from 'fs';
import { join } from 'path';
import titleCase from '../lib/titleCase.js';
import replaceInFile from '../lib/replaceInFile.js';
export default ({ basePath, asarExtractPath }, { name, channel }) => {
// Exec file
const execPath = join(basePath, `Discord${channel === 'stable' ? '' : titleCase(channel)}`);
const allowlist = [
'discord.png',
`discord${channel === 'stable' ? '' : `-${channel}`}.desktop`,
'resources'
];
for (const f of readdirSync(basePath)) {
if (!allowlist.includes(f)) {
rmSync(join(basePath, f), { force: true, recursive: true });
}
}
writeFileSync(execPath, `#!/bin/sh
electron "$(dirname "\${BASH_SOURCE[0]}")/resources/app.asar"`);
replaceInFile(join(asarExtractPath, 'app_bootstrap', 'index.js'), `"use strict";`, `"use strict";
process.resourcesPath = require('path').join(require.main.filename, '..', '..', '..');
process.execPath = require('path').join(require.main.filename, '..', '..', '..', '..', 'DiscordCanary');
console.log(process.resourcesPath, process.execPath, __dirname, process.platform, process.versions.electron);`);
replaceInFile(join(asarExtractPath, 'app_bootstrap', 'buildInfo.js'), `process.resourcesPath`, `__dirname, '..', '..'`)
}; |
<filename>src/routes/UserSettings/CommunitySettingsTab/CommunitySettingsTab.js
import PropTypes from 'prop-types'
import React, { Component } from 'react'
import './CommunitySettingsTab.scss'
import { Link } from 'react-router-dom'
import { DEFAULT_AVATAR } from 'store/models/Community'
import { communityUrl } from 'util/navigation'
import Loading from 'components/Loading'
import RoundImage from 'components/RoundImage'
const { array, func } = PropTypes
export default class CommunitySettingsTab extends Component {
static propTypes = {
memberships: array,
updateMembershipSettings: func
}
render () {
const { memberships, leaveCommunity, updateMembershipSettings } = this.props
if (!memberships) return <Loading />
return <div>
{memberships.map(m =>
<CommunityControl
membership={m}
leaveCommunity={leaveCommunity}
updateMembershipSettings={updateMembershipSettings}
key={m.id} />)}
</div>
}
}
export function CommunityControl ({ membership, leaveCommunity }) {
const leave = () => {
if (window.confirm(`Are you sure you want to leave ${community.name}?`)) {
leaveCommunity(community.id)
}
}
const { community } = membership
return <div styleName='community-control'>
<div styleName='row'>
<Link to={communityUrl(community.slug)}>
<RoundImage url={community.avatarUrl || DEFAULT_AVATAR} medium styleName='avatar' />
</Link>
<Link to={communityUrl(community.slug)} styleName='name'>{community.name}</Link>
<span onClick={leave} styleName='leave-button'>Leave</span>
</div>
</div>
}
|
# coding=utf-8
"""Test _utils module."""
from __future__ import absolute_import, division, print_function
import os
import shutil
from future.standard_library import install_aliases
import numpy as np
import pytest
import torch
from iftorch._utils import dataloader, reverse_mapping
from iftorch.save_utils import (
get_path,
create_directory,
save_mapping,
load_mapping,
save_array,
load_array,
reload_array
)
install_aliases()
def test_dataloader():
"""Tests dataloader."""
dataset = np.array([[1, 1], [2, 2], [3, 3], [4, 4]])
# With no shuffling
results = list(dataloader(dataset, 2, shuffle=False))
np.testing.assert_array_equal(results[0], np.array([[1, 1], [2, 2]]))
np.testing.assert_array_equal(results[1], np.array([[3, 3], [4, 4]]))
results = list(dataloader(dataset, 3, shuffle=False))
np.testing.assert_array_equal(
results[0],
np.array([[1, 1], [2, 2], [3, 3]])
)
np.testing.assert_array_equal(results[1], np.array([[4, 4]]))
# With shuffling
np.random.seed(12345)
results = list(dataloader(dataset, 3, shuffle=True))
np.random.seed(12345)
indices = list(range(4))
np.random.shuffle(indices)
np.testing.assert_array_equal(
results[0],
np.array([2 * [index + 1] for index in indices[:3]])
)
np.testing.assert_array_equal(
results[1], np.array([2 * [indices[3] + 1]])
)
def test_reverse_mapping():
"""Tests _reverse_mapping."""
original_dict = {'12': 34, '56': 78}
reversed_dict = {34: '12', 78: '56'}
assert reversed_dict == reverse_mapping(original_dict)
def test_create_directory():
"""Tests create_directory."""
create_directory('test_dir')
assert os.path.isdir('test_dir')
create_directory('test_dir/test_subdir')
assert os.path.isdir('test_dir/test_subdir')
create_directory('test_dir/test_subdir')
shutil.rmtree('test_dir')
def test_get_path():
"""Tests get_path."""
assert (
get_path('test_dir/test_subdir', 'test_file', 'txt')
== 'test_dir/test_subdir/test_file.txt'
)
assert get_path(None, 'test_file', 'txt') == 'test_file.txt'
def test_save_load_mapping():
"""Test save_mapping and load_mapping."""
create_directory('test_dir')
sqlite_path = 'test_dir/test.sqlite'
mapping = {1: 3, '2': '4', 3: '5', '4': 7}
save_mapping(mapping, sqlite_path, 'test')
reloaded_mapping = load_mapping(sqlite_path, 'test')
assert mapping == reloaded_mapping
shutil.rmtree('test_dir')
def test_save_load_array():
"""Test save_array and load_array."""
create_directory('test_dir')
h5_path = 'test_dir/test.h5'
array = np.asarray([1.0, 2.0, 3.0])
save_array(array, h5_path, 'test')
reloaded_array = load_array(h5_path, 'test')
np.testing.assert_array_equal(array, reloaded_array)
shutil.rmtree('test_dir')
def test_reload_array():
"""Test reload_array."""
create_directory('test_dir')
h5_path = 'test_dir/test.h5'
array = np.asarray([1.0, 2.0, 3.0])
save_array(array, h5_path, 'array')
reloaded_array = reload_array('numpy', h5_path, 'array')
np.testing.assert_array_equal(array, reloaded_array)
reloaded_array = reload_array('h5py', h5_path, 'array')
np.testing.assert_array_equal(array[0:2], reloaded_array[0:2])
np.testing.assert_array_equal(array, reloaded_array[0:3])
reloaded_array = reload_array('torch', h5_path, 'array')
assert isinstance(reloaded_array, torch.FloatTensor)
np.testing.assert_array_equal(array,
reloaded_array.cpu().detach().numpy())
with pytest.raises(ValueError) as info:
reload_array('wrong_type', h5_path, 'array')
assert (
str(info.value)
== "Option 'wrong_type' not in "
"{'numpy', 'torch', 'torch.cuda', 'h5py'}."
)
shutil.rmtree('test_dir')
|
#!/bin/bash
numServers=$1
baseJettyPort=8900
baseStopPort=9900
die () {
echo >&2 "$@"
exit 1
}
[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: stop.sh {numServers}"
cd ../server
for (( i=1; i <= $numServers; i++ ))
do
stopPort=`expr $baseStopPort + $i`
echo "stopping server$i, stop port is $stopPort"
cd ../server$i
java -DSTOP.PORT=$stopPort -DSTOP.KEY=key -jar start.jar --stop
done
mkdir ../server-lastlogs
for (( i=1; i <= $numServers; i++ ))
do
cd ../server$i
jettyPort=`expr $baseJettyPort + $i`
echo "Make sure jetty stops and wait for it: $jettyPort"
pid=`lsof -i:$jettyPort -sTCP:LISTEN -t`
echo "pid:$pid"
#kill $pid
#wait $pid
if [ ! -z "$pid" ]
then
while [ -e /proc/$pid ]; do sleep 1; done
fi
# save the last shutdown logs
echo "copy server$i.log to lastlogs"
cp -r -f server$i.log ../server-lastlogs/server-last$i.log
done
# stop zk runner
java -DSTOP.PORT=1313 -DSTOP.KEY=key -jar start.jar --stop
echo "wait for port to be available: $baseJettyPort"
pid=`lsof -i:$baseJettyPort -sTCP:LISTEN -t`
echo "pid:$pid"
#kill $pid
#wait $pid
if [ ! -z "$pid" ]
then
while [ -e /proc/$pid ]; do sleep 0.1; done
fi
nc -w 30 127.0.0.1 $baseJettyPort
sleep 5
|
package git
import (
"errors"
"github.com/lxn/walk"
"gpics/base/config"
"log"
"net/url"
"strings"
"sync"
"time"
)
func RepName(u string) (string, error) {
rs, err := url.Parse(u)
if err != nil {
return "", nil
}
us := strings.Split(rs.Path, "/")
if len(us) < 3 {
return "", errors.New("解析仓库名称失败")
}
return strings.TrimSuffix(us[2], ".git"), nil
}
func Pull() error {
dir := walk.Resources.RootDirPath()
return pull(dir)
}
func Push() error {
dir := walk.Resources.RootDirPath()
return push(dir)
}
func Version() error {
return version("")
}
func Branch() (string, error) {
dir := walk.Resources.RootDirPath()
b, err := branch(dir)
if err != nil {
return "", nil
}
sts := strings.Split(b, " ")
if len(sts) < 2 {
return "", errors.New("解析当前分支失败" + b)
}
return strings.TrimSuffix(sts[1], "\n"), nil
}
var mu = new(sync.Mutex)
func AutoCommit() (e error) {
ws, ok := config.Workspace()
if !ok {
return errors.New("自动提交失败,原因:获取工作空间失败")
}
if err := add(ws, "."); err != nil {
return err
}
if err := commit(ws, "自动提交"); err != nil {
return err
}
if v, _ := config.BoolValue(config.AutoCommitKey); v {
if err := RemoteCommit(); err != nil {
return err
}
}
return nil
}
func RemoteCommit() error {
timeout, _ := config.IntValue(config.TimeOutKey)
ch := make(chan error)
defer close(ch)
go remoteCommit(mu, ch)
select {
case err := <-ch:
return err
case <-time.After(time.Second * time.Duration(timeout)):
return errors.New("请求超时!")
}
}
// 因网络等原因 很容易超时失败
func remoteCommit(mu *sync.Mutex, ch chan error) {
mu.Lock()
defer mu.Unlock()
if err := Pull(); err != nil {
log.Println("pull err:", err)
ch <- errors.New("pull 失败")
return
}
if err := Push(); err != nil {
log.Println("push err:", err)
ch <- errors.New("push 失败")
return
}
ch <- nil
log.Println("提交 成功")
}
|
<reponame>linux-on-ibm-z/influxdb
package write
import (
"bufio"
"bytes"
"context"
"fmt"
"io"
"io/ioutil"
"strings"
"testing"
"time"
"github.com/google/go-cmp/cmp"
platform "github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/mock"
"github.com/influxdata/influxdb/v2/pkg/testing/assert"
"github.com/stretchr/testify/require"
)
func TestScanLines(t *testing.T) {
tests := []struct {
name string
input string
want []string
wantErr bool
}{
{
name: "3 lines produced including their newlines",
input: "m1,t1=v1 f1=1\nm2,t2=v2 f2=2\nm3,t3=v3 f3=3",
want: []string{"m1,t1=v1 f1=1\n", "m2,t2=v2 f2=2\n", "m3,t3=v3 f3=3"},
},
{
name: "single line without newline",
input: "m1,t1=v1 f1=1",
want: []string{"m1,t1=v1 f1=1"},
},
{
name: "single line with newline",
input: "m1,t1=v1 f1=1\n",
want: []string{"m1,t1=v1 f1=1\n"},
},
{
name: "no lines",
input: "",
want: []string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scanner := bufio.NewScanner(strings.NewReader(tt.input))
scanner.Split(ScanLines)
got := []string{}
for scanner.Scan() {
got = append(got, scanner.Text())
}
err := scanner.Err()
if (err != nil) != tt.wantErr {
t.Errorf("ScanLines() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !cmp.Equal(got, tt.want) {
t.Errorf("%q. ScanLines() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
}
})
}
}
// errorReader mocks io.Reader but returns an error.
type errorReader struct{}
func (r *errorReader) Read(p []byte) (n int, err error) {
return 0, fmt.Errorf("error")
}
func TestBatcher_read(t *testing.T) {
type args struct {
cancel bool
r io.Reader
max int
}
tests := []struct {
name string
args args
want []string
expErr error
}{
{
name: "reading two lines produces 2 lines",
args: args{
r: strings.NewReader("m1,t1=v1 f1=1\nm2,t2=v2 f2=2"),
},
want: []string{"m1,t1=v1 f1=1\n", "m2,t2=v2 f2=2"},
},
{
name: "canceling returns no lines",
args: args{
cancel: true,
r: strings.NewReader("m1,t1=v1 f1=1"),
},
want: nil,
expErr: context.Canceled,
},
{
name: "error from reader returns error",
args: args{
r: &errorReader{},
},
want: nil,
expErr: fmt.Errorf("error"),
},
{
name: "error when input exceeds max line length",
args: args{
r: strings.NewReader("m1,t1=v1 f1=1"),
max: 5,
},
want: nil,
expErr: ErrLineTooLong,
},
{
name: "lines greater than MaxScanTokenSize are allowed",
args: args{
r: strings.NewReader(strings.Repeat("a", bufio.MaxScanTokenSize+1)),
max: bufio.MaxScanTokenSize + 2,
},
want: []string{strings.Repeat("a", bufio.MaxScanTokenSize+1)},
},
{
name: "lines greater than MaxScanTokenSize by default are not allowed",
args: args{
r: strings.NewReader(strings.Repeat("a", bufio.MaxScanTokenSize+1)),
},
want: nil,
expErr: ErrLineTooLong,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ctx := context.Background()
var cancel context.CancelFunc
if tt.args.cancel {
ctx, cancel = context.WithCancel(ctx)
cancel()
}
b := &Batcher{MaxLineLength: tt.args.max}
var got []string
lines := make(chan []byte)
errC := make(chan error, 1)
go b.read(ctx, tt.args.r, lines, errC)
if cancel == nil {
for line := range lines {
got = append(got, string(line))
}
}
err := <-errC
assert.Equal(t, err, tt.expErr)
assert.Equal(t, got, tt.want)
})
}
}
func TestBatcher_write(t *testing.T) {
type fields struct {
MaxFlushBytes int
MaxFlushInterval time.Duration
}
type args struct {
cancel bool
writeError bool
org platform.ID
bucket platform.ID
line string
lines chan []byte
errC chan error
}
tests := []struct {
name string
fields fields
args args
want string
wantErr bool
wantNoCall bool
}{
{
name: "sending a single line will send a line to the service",
fields: fields{
MaxFlushBytes: 1,
},
args: args{
org: platform.ID(1),
bucket: platform.ID(2),
line: "m1,t1=v1 f1=1",
lines: make(chan []byte),
errC: make(chan error),
},
want: "m1,t1=v1 f1=1",
},
{
name: "sending a single line and waiting for a timeout will send a line to the service",
fields: fields{
MaxFlushInterval: time.Millisecond,
},
args: args{
org: platform.ID(1),
bucket: platform.ID(2),
line: "m1,t1=v1 f1=1",
lines: make(chan []byte),
errC: make(chan error),
},
want: "m1,t1=v1 f1=1",
},
{
name: "write service returning error stops the write after timeout",
fields: fields{
MaxFlushInterval: time.Millisecond,
},
args: args{
writeError: true,
org: platform.ID(1),
bucket: platform.ID(2),
line: "m1,t1=v1 f1=1",
lines: make(chan []byte),
errC: make(chan error),
},
wantErr: true,
},
{
name: "canceling will write no data to service",
fields: fields{
MaxFlushBytes: 1,
},
args: args{
cancel: true,
org: platform.ID(1),
bucket: platform.ID(2),
line: "m1,t1=v1 f1=1",
lines: make(chan []byte, 1),
errC: make(chan error, 1),
},
wantErr: true,
wantNoCall: true,
},
{
name: "write service returning error stops the write",
fields: fields{
MaxFlushBytes: 1,
},
args: args{
writeError: true,
org: platform.ID(1),
bucket: platform.ID(2),
line: "m1,t1=v1 f1=1",
lines: make(chan []byte),
errC: make(chan error),
},
wantErr: true,
},
{
name: "blank line is not sent to service",
fields: fields{
MaxFlushBytes: 1,
},
args: args{
org: platform.ID(1),
bucket: platform.ID(2),
line: "\n",
lines: make(chan []byte),
errC: make(chan error),
},
wantNoCall: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ctx := context.Background()
var cancel context.CancelFunc
if tt.args.cancel {
ctx, cancel = context.WithCancel(ctx)
}
// mocking the write service here to either return an error
// or get back all the bytes from the reader.
writeCalled := false
var got string
svc := &mock.WriteService{
WriteToF: func(ctx context.Context, _ platform.BucketFilter, r io.Reader) error {
writeCalled = true
if tt.args.writeError {
return fmt.Errorf("error")
}
b, err := ioutil.ReadAll(r)
got = string(b)
return err
},
}
b := &Batcher{
MaxFlushBytes: tt.fields.MaxFlushBytes,
MaxFlushInterval: tt.fields.MaxFlushInterval,
Service: svc,
}
writeFn := func(batch []byte) error {
return svc.WriteTo(ctx, platform.BucketFilter{ID: &tt.args.bucket, OrganizationID: &tt.args.org}, bytes.NewReader(batch))
}
go b.write(ctx, writeFn, tt.args.lines, tt.args.errC)
if cancel != nil {
cancel()
time.Sleep(500 * time.Millisecond)
}
tt.args.lines <- []byte(tt.args.line)
// if the max flush interval is not zero, we are testing to see
// if the data is flushed via the timer rather than forced by
// closing the channel.
if tt.fields.MaxFlushInterval != 0 {
time.Sleep(tt.fields.MaxFlushInterval * 100)
}
close(tt.args.lines)
err := <-tt.args.errC
if (err != nil) != tt.wantErr {
t.Errorf("ScanLines.read() error = %v, wantErr %v", err, tt.wantErr)
return
}
require.Equal(t, tt.wantNoCall, !writeCalled)
require.Equal(t, tt.want, got)
})
}
}
func TestBatcher_WriteTo(t *testing.T) {
createReader := func(data string) func() io.Reader {
if data == "error" {
return func() io.Reader {
return &errorReader{}
}
}
return func() io.Reader {
return strings.NewReader(data)
}
}
type fields struct {
MaxFlushBytes int
MaxFlushInterval time.Duration
}
type args struct {
writeError bool
org platform.ID
bucket platform.ID
r func() io.Reader
}
tests := []struct {
name string
fields fields
args args
want string
wantFlushes int
wantErr bool
}{
{
name: "a line of line protocol is sent to the service",
fields: fields{
MaxFlushBytes: 1,
},
args: args{
org: platform.ID(1),
bucket: platform.ID(2),
r: createReader("m1,t1=v1 f1=1"),
},
want: "m1,t1=v1 f1=1",
wantFlushes: 1,
},
{
name: "multiple lines cause multiple flushes",
fields: fields{
MaxFlushBytes: len([]byte("m1,t1=v1 f1=1\n")),
},
args: args{
org: platform.ID(1),
bucket: platform.ID(2),
r: createReader("m1,t1=v1 f1=1\nm2,t2=v2 f2=2\nm3,t3=v3 f3=3"),
},
want: "m3,t3=v3 f3=3",
wantFlushes: 3,
},
{
name: "errors during read return error",
fields: fields{},
args: args{
org: platform.ID(1),
bucket: platform.ID(2),
r: createReader("error"),
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// mocking the write service here to either return an error
// or get back all the bytes from the reader.
var (
got string
gotFlushes int
)
svc := &mock.WriteService{
WriteToF: func(ctx context.Context, _ platform.BucketFilter, r io.Reader) error {
if tt.args.writeError {
return fmt.Errorf("error")
}
b, err := ioutil.ReadAll(r)
got = string(b)
gotFlushes++
return err
},
}
b := &Batcher{
MaxFlushBytes: tt.fields.MaxFlushBytes,
MaxFlushInterval: tt.fields.MaxFlushInterval,
Service: svc,
}
ctx := context.Background()
if err := b.WriteTo(
ctx,
platform.BucketFilter{ID: &tt.args.bucket, OrganizationID: &tt.args.org},
tt.args.r(),
); (err != nil) != tt.wantErr {
t.Errorf("Batcher.Write() error = %v, wantErr %v", err, tt.wantErr)
}
if gotFlushes != tt.wantFlushes {
t.Errorf("%q. Batcher.Write() flushes %d want %d", tt.name, gotFlushes, tt.wantFlushes)
}
if !cmp.Equal(got, tt.want) {
t.Errorf("%q. Batcher.Write() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
}
})
// test the same data, but now with WriteTo function
t.Run("WriteTo_"+tt.name, func(t *testing.T) {
// mocking the write service here to either return an error
// or get back all the bytes from the reader.
var (
got string
gotFlushes int
)
svc := &mock.WriteService{
WriteToF: func(ctx context.Context, _ platform.BucketFilter, r io.Reader) error {
if tt.args.writeError {
return fmt.Errorf("error")
}
b, err := ioutil.ReadAll(r)
got = string(b)
gotFlushes++
return err
},
}
b := &Batcher{
MaxFlushBytes: tt.fields.MaxFlushBytes,
MaxFlushInterval: tt.fields.MaxFlushInterval,
Service: svc,
}
ctx := context.Background()
bucketFilter := platform.BucketFilter{ID: &tt.args.bucket, OrganizationID: &tt.args.org}
if err := b.WriteTo(ctx, bucketFilter, tt.args.r()); (err != nil) != tt.wantErr {
t.Errorf("Batcher.Write() error = %v, wantErr %v", err, tt.wantErr)
}
if gotFlushes != tt.wantFlushes {
t.Errorf("%q. Batcher.Write() flushes %d want %d", tt.name, gotFlushes, tt.wantFlushes)
}
if !cmp.Equal(got, tt.want) {
t.Errorf("%q. Batcher.Write() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
}
})
}
}
func TestBatcher_WriteTimeout(t *testing.T) {
// mocking the write service here to either return an error
// or get back all the bytes from the reader.
bucketId := platform.ID(2)
orgId := platform.ID(1)
var got string
svc := &mock.WriteService{
WriteToF: func(ctx context.Context, filter platform.BucketFilter, r io.Reader) error {
b, err := ioutil.ReadAll(r)
got = string(b)
return err
},
}
b := &Batcher{
Service: svc,
}
// this mimics a reader like stdin that may never return data.
r, _ := io.Pipe()
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond)
defer cancel()
if err := b.WriteTo(ctx, platform.BucketFilter{ID: &bucketId, OrganizationID: &orgId}, r); err != context.DeadlineExceeded {
t.Errorf("Batcher.Write() with timeout error = %v", err)
}
require.Empty(t, got, "Batcher.Write() with timeout received data")
}
func TestBatcher_WriteWithoutService(t *testing.T) {
b := Batcher{}
err := b.WriteTo(context.Background(), platform.BucketFilter{}, strings.NewReader("m1,t1=v1 f1=1"))
require.Error(t, err)
require.Contains(t, err.Error(), "write service required")
}
|
def multiply(a, b):
result = 0
if b < 0:
a = -a
b = -b
while b:
if b & 1:
result += a
b >>= 1
a <<= 1
return result |
[{
"title":"创伤RAPT量表",
"summary":"<div class='tips'>* 胸部 AIS>2分<br/>**腹部 AIS>2分<br/>***头部 AIS>2分<br/>****GCS<8分<br/>免责声明 <br/> 本量表参考《创伤骨科患者深静脉血栓形成筛查与治疗的专家共识 中华创伤骨科杂志 2013,15(12)》<br/> 请患者在医务工作者指导下进行填写,本量表最终结果仅供专业医务工作者参考</div>",
"items":
[
{
"title":"病史",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"恶性肿瘤","value":2},
{"key":"凝血异常","value":2},
{"key":"VTE病史","value":3}
],
"value":"",
"scored":true,
"checkedStyle":"none"
},
{
"title":"医源性损伤",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"中心静脉导管>24h","value":2},{"key":"24h内输血>4 units","value":2},
{"key":"手术时间>2h","value":2},{"key":"修复或结扎大血管","value":3}
],
"value":"",
"scored":true,
"checkedStyle":"none"
},
{
"title":"创伤程度",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"胸部创伤是否较为严重?*","value":2},{"key":"腹部创伤是否较为严重?**","value":2},
{"key":"头部创伤是否较为严重?***","value":3},{"key":"脊柱骨折","value":3},
{"key":"重度昏迷持续4h以上****","value":4},{"key":"下肢复杂骨折","value":4},
{"key":"盆骨骨折", "value":4}, {"key":"脊髓损伤 (截瘫、四肢瘫)", "value":4}
],
"value":"",
"scored":true,
"checkedStyle":"none"
}
]
},
{
"title": "膝关节HSS评分(左腿)",
"summary": "<div class='tips'>免责声明:<br/>本量表参考http://www.haodf.com/zhuanjiaguandian/zhuyuchang_733246266.htm <br/>请患者在医务工作者指导下进行填写,本量表最终结果仅供专业医务工作者参考</div>",
"items": [{
"title": "疼痛",
"summary": "",
"type": "radio",
"keys": [{
"key": "行走时无疼痛",
"value": 15
}, {
"key": "行走时轻度疼痛",
"value": 10
}, {
"key": "行走时中度疼痛",
"value": 5
}, {
"key": "行走时严重疼痛",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "功能",
"summary": "",
"type": "radio",
"keys": [{
"key": "行走站立无限制",
"value": 12
}, {
"key": "行走2500~5000米和站立半小时以上",
"value": 10
}, {
"key": "行走500~2500米和站立可达半小时",
"value": 5
}, {
"key": "行走少于500米",
"value": 4
}, {
"key": "不能行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "功能",
"summary": "",
"type": "radio",
"keys": [{
"key": "能上楼梯",
"value": 5
}, {
"key": "能上楼梯,但需支具",
"value": 2
}, {
"key": "无法行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "功能",
"summary": "",
"type": "radio",
"keys": [{
"key": "屋内行走,无需支具",
"value": 5
}, {
"key": "屋内行走,需要支具",
"value": 2
}, {
"key": "无法行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "活动度",
"summary": "",
"type": "input",
"keys": [{
"key": "每活动8度得1分",
"value": 18
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "肌力",
"summary": "",
"type": "radio",
"keys": [{
"key": "优:完全能对抗阻力",
"value": 10
}, {
"key": "中:能带动关节活动",
"value": 4
}, {
"key": "良:部分对抗阻力",
"value": 8
}, {
"key": "差:不能带动关节活动",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "屈曲畸形",
"summary": "",
"type": "radio",
"keys": [{
"key": "无畸形",
"value": 10
}, {
"key": "小于5度",
"value": 8
}, {
"key": "5~10度",
"value": 5
}, {
"key": "大于10度",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "稳定性",
"summary": "",
"type": "radio",
"keys": [{
"key": "正常",
"value": 10
}, {
"key": "轻度不稳0~5度",
"value": 8
}, {
"key": "中度不稳5~15度",
"value": 5
}, {
"key": "严重不稳大于15度",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "单手杖",
"value": -1
}, {
"key": "单拐杖",
"value": -2
}, {
"key": "双拐杖",
"value": -3
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
},{
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "伸直滞缺5度",
"value": -2
}, {
"key": "伸直滞缺10度",
"value": -3
}, {
"key": "伸直滞缺15度",
"value": -5
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
},{
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "每5度外翻",
"value": -1
}, {
"key": "每10度外翻",
"value": -2
}, {
"key": "每15度外翻",
"value": -3
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
},{
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "每5度内翻",
"value": -1
}, {
"key": "每10度内翻",
"value": -2
}, {
"key": "每15度内翻",
"value": -3
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}]
},
{
"title": "膝关节HSS评分(右腿)",
"summary": "<div class='tips'>免责声明:<br/>本量表参考http://www.haodf.com/zhuanjiaguandian/zhuyuchang_733246266.htm <br/>请患者在医务工作者指导下进行填写,本量表最终结果仅供专业医务工作者参考</div>",
"items": [{
"title": "疼痛",
"summary": "",
"type": "radio",
"keys": [{
"key": "行走时无疼痛",
"value": 15
}, {
"key": "行走时轻度疼痛",
"value": 10
}, {
"key": "行走时中度疼痛",
"value": 5
}, {
"key": "行走时严重疼痛",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "功能",
"summary": "",
"type": "radio",
"keys": [{
"key": "行走站立无限制",
"value": 12
}, {
"key": "行走2500~5000米和站立半小时以上",
"value": 10
}, {
"key": "行走500~2500米和站立可达半小时",
"value": 5
}, {
"key": "行走少于500米",
"value": 4
}, {
"key": "不能行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "功能",
"summary": "",
"type": "radio",
"keys": [{
"key": "能上楼梯",
"value": 5
}, {
"key": "能上楼梯,但需支具",
"value": 2
}, {
"key": "无法行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "功能",
"summary": "",
"type": "radio",
"keys": [{
"key": "屋内行走,无需支具",
"value": 5
}, {
"key": "屋内行走,需要支具",
"value": 2
}, {
"key": "无法行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "活动度",
"summary": "",
"type": "input",
"keys": [{
"key": "每活动8度得1分",
"value": 18
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "肌力",
"summary": "",
"type": "radio",
"keys": [{
"key": "优:完全能对抗阻力",
"value": 10
}, {
"key": "中:能带动关节活动",
"value": 4
}, {
"key": "良:部分对抗阻力",
"value": 8
}, {
"key": "差:不能带动关节活动",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "屈曲畸形",
"summary": "",
"type": "radio",
"keys": [{
"key": "无畸形",
"value": 10
}, {
"key": "小于5度",
"value": 8
}, {
"key": "5~10度",
"value": 5
}, {
"key": "大于10度",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "稳定性",
"summary": "",
"type": "radio",
"keys": [{
"key": "正常",
"value": 10
}, {
"key": "轻度不稳0~5度",
"value": 8
}, {
"key": "中度不稳5~15度",
"value": 5
}, {
"key": "严重不稳大于15度",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "单手杖",
"value": -1
}, {
"key": "单拐杖",
"value": -2
}, {
"key": "双拐杖",
"value": -3
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
},{
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "伸直滞缺5度",
"value": -2
}, {
"key": "伸直滞缺10度",
"value": -3
}, {
"key": "伸直滞缺15度",
"value": -5
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
},{
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "每5度外翻",
"value": -1
}, {
"key": "每10度外翻",
"value": -2
}, {
"key": "每15度外翻",
"value": -3
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
},{
"title": "减分项目",
"summary": "",
"type": "radio",
"keys": [{
"key": "每5度内翻",
"value": -1
}, {
"key": "每10度内翻",
"value": -2
}, {
"key": "每15度内翻",
"value": -3
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}]
},
{
"title":"caprini",
"summary":"<div class='tips'>免责声明:<br/>本量表参考A Validation Study of a Retrospective Venous Thromboembolism Risk Scoring Method. Annals of Surgery Volume 251, Number 2, February 2010.<br/>请患者在医务工作者指导下进行填写,本量表最终结果仅供专业医务工作者参考</div>",
"items":
[
{
"title":"如果患者在既往一个月内发生过下面的情况,请选择对应项目:",
"summary":"",
"type":"checkbox",
"keys":[
{"key": "住院准备做个时间小于45分钟的手术(例如腕关节腱鞘炎、腱鞘囊肿、体表肿物切除手术,肌腱修补手术等)", "value":1},
{"key": "一个月内曾经做过时间大于45分钟的其他手术", "value":1},
{"key": "患有静脉曲张", "value": 1},
{"key" : "患过炎症性肠病,如克罗恩病、溃疡性结肠炎", "value":1},
{"key": "一个月内下肢会时常出现水肿", "value":1},
{"key": "一个月内发生过急性心肌梗塞", "value":1},
{"key":"一个月内发生过充血性心力衰竭", "value":1},
{"key": "一个月内曾经发生严重感染,例如:肺炎、手术后感染等、败血症、脏器穿孔引起的急性腹膜炎、急性心内膜炎、心包炎、蜂窝织炎等", "value":1},
{"key":"患有一些肺部疾病,例如:肺气肿、慢性阻塞性肺疾病(老慢支)、肺心病", "value":1},
{"key":"行动不便,卧床不起,但连续卧床时间小于3天", "value":1},
{"key":"有吸烟的习惯", "value":1},
{"key":"患有糖尿病,需要使用胰岛素治疗,或者我因甲状腺切除,现在正在口服甲状腺激素", "value":1},
{"key":"过去一个月经受化疗", "value":1},
{"key":"过去一个月曾输过血", "value":1}
],
"value":"",
"scored":true,
"checkedStyle":"dot"
},
{
"title":"如果患者在既往一个月内发生过下面的情况,请选择对应项目",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"患过或正患有恶性肿瘤/癌症(不限一个月)","value":2},
{"key":"这次住院要做个时间大于45分钟的手术(多数住院手术都大于45分钟,包括关节置换、骨折复位内固定术、外固定支架术,关节镜手术等)","value":2}, {"key":"因为石膏或者模具固定下肢,导致过去一个月脚活动很少","value":2},
{"key":"颈部或者胸部放置过静脉留置针或者中心静脉留置管", "value":2}, {"key":"过去一个月曾连续卧床超过3天(72小时)", "value":2}
],
"value":"",
"scored":true,
"checkedStyle":"dot"
},
{
"title":"如果患者在既往一个月内发生过下面的情况,请选择对应项目",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"曾经发生过深静脉血栓或者肺栓塞(不限一个月内)","value":3},
{"key":"直系亲属发生过血栓(不限一个月内)","value":3},
{"key":"曾被告知自己的凝血化验指标异常,或者直系亲属的凝血指标异常(抗心磷脂抗体阳性、凝血酶原20210A阳性、凝血因子Vleiden阳性、狼疮抗凝物阳性、血清同型半胱氨酸酶升高)(如果不知道请跳过)","value":3}],
"value":"",
"scored":true,
"checkedStyle":"dot"
},
{
"title":"如果患者在既往一个月内发生过下面的情况,请选择对应项目",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"接受了髋关节或者膝关节置换手术", "value":5},
{"key":"发生过髋关节、骨盆、腿部骨折", "value":5},
{"key":"发生过严重创伤(例如:跌倒或者车祸后全身多处骨折)","value":5},
{"key":"因为脊髓损伤引起瘫痪","value":5},
{"key":"发生过脑卒中(脑出血或脑梗)","value":5}],
"value":"",
"scored":true,
"checkedStyle":"dot"
}
]
},
{
"title":"术后骨科筛查量表",
"summary":"",
"items":[
{
"title":"血常规项目",
"summary":"",
"type":"input",
"keys":[
{"key":"白细胞","value":"未见异常","tip":"3.5-9.5 10^9/L"},
{"key":"红细胞","value":"未见异常","tip":"3.8-5.10^12/L"},
{"key":"细胞比积","value":"未见异常","tip":"35.0-45.0%"},
{"key":"血红蛋白","value":"未见异常","tip":"115-150 g/L"},
{"key":"血小板","value":"未见异常","tip":"125-300 10^9/L"},
{"key":"中性细胞绝对值","value":"未见异常","tip":"1.8-6.3"},
{"key":"中性细胞百分比","value":"未见异常","tip":"40-75%"},
{"key":"高敏感指标","value":"未见异常"},
{"key":"CRP","value":"未见异常","tip":"0-10"},
{"key":"降钙素原","value":"未见异常","tip":"< 0.1ng /ml"},
{"key":"细菌涂片","value":"未见异常"},
{"key":"细菌培养","value":"未见异常"},
{"key":"切口是否红肿","value":"自己填写"},
{"key":"切口是否渗出","value":"自己填写"},
{"key":"渗出物颜色","value":"自己填写"},
{"key":"渗出物性状","value":"自己填写"},
{"key":"渗出物量","value":"自己填写"},
{"key":"引流量","value":"自己填写"},
{"key":"输血量","value":"自己填写"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
}
]
},
{
"title":"术前骨科筛查量表",
"summary":"",
"items":[
{
"title":"尿常规项目",
"summary":"",
"type":"input",
"keys":[
{"key":"葡萄糖","value":"未见异常","tip":"NA"},
{"key":"胆红素","value":"未见异常","tip":"NA"},
{"key":"酮体","value":"未见异常","tip":"NA"},
{"key":"比重","value":"未见异常","tip":"1.005-1.03"},
{"key":"红细胞","value":"未见异常","tip":"NA"},
{"key":"PH值","value":"未见异常","tip":"5.5-6.5"},
{"key":"尿蛋白","value":"未见异常","tip":"NA"},
{"key":"亚硝酸盐","value":"未见异常","tip":"NA"},
{"key":"白细胞","value":"未见异常","tip":"NA"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"血常规项目",
"summary":"",
"type":"input",
"keys":[
{"key":"白细胞","value":"未见异常","tip":"3.5-9.5 10^9/L"},
{"key":"红细胞","value":"未见异常","tip":"3.8-5.10^12/L"},
{"key":"细胞比积","value":"未见异常","tip":"35.0-45.0%"},
{"key":"血红蛋白","value":"未见异常","tip":"115-150 g/L"},
{"key":"血小板","value":"未见异常","tip":"125-300 10^9/L"},
{"key":"中性细胞绝对值","value":"未见异常","tip":"1.8-6.3"},
{"key":"中性细胞百分比","value":"未见异常","tip":"40-75%"},
{"key":"血糖","value":"未见异常","tip":"3.90-5.80mmol/L"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"凝血功能",
"summary":"",
"type":"input",
"keys":[
{"key":"凝血酶原时间","value":"未见异常","tip":"11-14s"},
{"key":"国际标准化比率","value":"未见异常","tip":"0.82-1.15"},
{"key":"部分凝血活酶时间","value":"未见异常","tip":"20.0-40.0"},
{"key":"纤维蛋白原","value":"未见异常","tip":"2.00-4.00"},
{"key":"凝血酶时间","value":"未见异常","tip":"13.0-21.0"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"CRP",
"summary":"",
"type":"input",
"keys":[
{"key":"CRP","value":"未见异常","tip":"0-10"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"肝功能",
"summary":"",
"type":"input",
"keys":[
{"key":"总蛋白","value":"未见异常","tip":"60-80g/L"},
{"key":"白蛋白","value":"未见异常","tip":"35-55g/L"},
{"key":"谷丙转氨酶","value":"未见异常","tip":"0-65U/L"},
{"key":"谷草转氨酶","value":"未见异常","tip":"8_37U/L"},
{"key":"尿素","value":"未见异常","tip":"2.5-6.4mmol/L"},
{"key":"肌酐","value":"未见异常","tip":"53-115umol/L"},
{"key":"尿酸","value":"未见异常","tip":"210-430umol/L"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"电解质",
"summary":"",
"type":"input",
"keys":[
{"key":"K","value":"未见异常","tip":"3.5-5.5 mmol/L"},
{"key":"Na","value":"未见异常","tip":"135-145 mmol/L"},
{"key":"CI","value":"未见异常","tip":"95-105 mmol/L"},
{"key":"Ca","value":"未见异常","tip":"2.08-2.60 mmol/L"},
{"key":"Mg","value":"未见异常","tip":"0.65-1.05 mmol/L"},
{"key":"P","value":"未见异常","tip":"0.80-1.60 mmol/L"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"艾滋",
"summary":"",
"type":"input",
"keys":[
{"key":"艾滋","value":"未见异常","tip":"阴性"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"乙肝",
"summary":"",
"type":"input",
"keys":[
{"key":"乙肝","value":"未见异常","tip":"阴性"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"丙肝",
"summary":"",
"type":"input",
"keys":[
{"key":"丙肝","value":"未见异常","tip":"阴性"}
],
"value":"",
"scored":false,
"checkedStyle":"none"
}
]
},
{
"title":"术中骨科筛查量表",
"summary":"本项目数值默认为患者未见异常,如果患者某项指标不在正常范围,请填入实际数值",
"items":[
{
"title":"术中骨科筛查量表",
"summary":"",
"type":"input",
"keys":[
{"key":"手术时间","value":"","tip":"<span style='text-align:right;display: inline-block;'>上肢60-90分<br/>下肢90-120分</span>"},
{"key":"止血带时长","value":"","tip":"<span style='text-align:right;display: inline-block;'>如果超时休息20-30分钟再打</span>"},
{"key":"出血量","value":""},
{"key":"输血量","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
}
]
},
{
"label":"请根据患者实际自身情况选择对应项目,如有任何不清楚地方,请向专业医务人员询问",
"title":"四类药物禁忌(女性)",
"summary":"<div class='tips'>免责声明:<br/>本量表由对应的药品与MIMS网站内容整理而成<br/>请患者在医务工作者指导下进行填写,本量表最终结果仅供专业医务工作者参考</div>",
"items":[
{
"title":"对药物过敏",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"对药物过敏","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您是孕妇或者在哺乳期吗",
"summary":"",
"type":"radio",
"keys":[
{"key":"孕妇或哺乳期","value":""},
{"key":"先兆流产","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有任何溃疡或出血症状吗",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"消化道出血或溃疡","value":""},
{"key":"脑出血","value":""},
{"key":"阴道出血","value":""},
{"key":"注射部位血肿", "value":""},
{"key":"凝血异常伴出血倾向","value":""},
{"key":"应用非甾体抗炎药后发生胃肠道出血或穿孔病史","value":""},
{"key":"可能引起出血的器质性损伤","value":""},
{"key":"活动性出血","value":""},
{"key":"皮肤瘀斑,牙龈出血,鼻衄、伤口出血等","value":""},
{"key":"月经量过多","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有其他消化道疾患吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"炎症性肠病,如克罗恩病、溃疡性结肠炎","value":"炎症性肠病"},
{"key":"导致食管排空延迟的食管异常","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您的肝脏正常吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"肝功能不全","value":""},
{"key":"急性肝病","value":""},
{"key":"慢性肝病或最近曾患肝炎者","value":""},
{"key":"GGT和转氨酶升高", "value":""},
{"key":"伴凝血异常和临床相关出血风险的肝病","value":""},
{"key":"肝硬化","value":"肝硬化"},
{"key":"一过性转氨酶升高","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您的肾脏正常吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"肾功能不全","value":""},
{"key":"肾结石","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您的心血管正常吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"冠状动脉搭桥手术(CABG)围手术期","value":""},
{"key":"充血性心力衰竭","value":""},
{"key":"已确定的缺血性心脏疾病,外周动脉血管和/或脑血管疾病","value":""},
{"key":"急性心肌梗塞", "value":""},
{"key":"高血压","value":""},
{"key":"活动或近期的动脉、静脉血栓性疾病","value":""},
{"key":"急性心内膜炎","value":""},
{"key":"不稳定心绞痛","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您近期有过外伤或接受过手术和麻醉吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"近期手术","value":""},
{"key":"蛛网膜下腔麻醉或硬膜外麻醉","value":""},
{"key":"外伤","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有精神疾病史吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"未能充分控制的癫痫","value":""},
{"key":"精神病","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有任何代谢性疾病吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"电解质代谢异常","value":""},
{"key":"低钙血症","value":""},
{"key":"高钙血症","value":""},
{"key":"高尿酸血症", "value":""},
{"key":"维生素D增多症","value":""},
{"key":"高磷血症伴肾性佝偻病","value":""},
{"key":"卟啉症","value":""},
{"key":"贫血","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有肿瘤相关疾病史吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"与雌激素有关的肿瘤","value":""},
{"key":"乳腺癌","value":""},
{"key":"良性或恶性肝脏肿瘤", "value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有其他疾患吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"青光眼","value":""},
{"key":"不能站立或坐直至少30分钟","value":""},
{"key":"未治疗的子宫内膜增生","value":""},
{"key":"酒精、安眠药、镇痛剂或其它精神药物急性中毒", "value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
}
]
},{
"label":"请根据患者实际自身情况选择对应项目,如有任何不清楚地方,请向专业医务人员询问",
"title":"四类药物禁忌(男性)",
"summary":"<div class='tips'>免责声明:<br/>本量表由对应的药品与MIMS网站内容整理而成<br/>请患者在医务工作者指导下进行填写,本量表最终结果仅供专业医务工作者参考</div>",
"items":[
{
"title":"对药物过敏",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"对药物过敏","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有任何溃疡或出血症状吗",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"消化道出血或溃疡","value":""},
{"key":"脑出血","value":""},
{"key":"注射部位血肿", "value":""},
{"key":"凝血异常伴出血倾向","value":""},
{"key":"应用非甾体抗炎药后发生胃肠道出血或穿孔病史","value":""},
{"key":"可能引起出血的器质性损伤","value":""},
{"key":"活动性出血","value":""},
{"key":"皮肤瘀斑,牙龈出血,鼻衄、伤口出血等","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有其他消化道疾患吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"炎症性肠病,如克罗恩病、溃疡性结肠炎","value":""},
{"key":"导致食管排空延迟的食管异常","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您的肝脏正常吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"肝功能不全","value":""},
{"key":"急性肝病","value":""},
{"key":"慢性肝病或最近曾患肝炎者","value":""},
{"key":"GGT和转氨酶升高", "value":""},
{"key":"伴凝血异常和临床相关出血风险的肝病","value":""},
{"key":"肝硬化","value":"肝硬化"},
{"key":"一过性转氨酶升高","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您的肾脏正常吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"肾功能不全","value":""},
{"key":"肾结石","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您的心血管正常吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"冠状动脉搭桥手术(CABG)围手术期","value":""},
{"key":"充血性心力衰竭","value":""},
{"key":"已确定的缺血性心脏疾病,外周动脉血管和/或脑血管疾病","value":""},
{"key":"急性心肌梗塞", "value":""},
{"key":"高血压","value":""},
{"key":"活动或近期的动脉、静脉血栓性疾病","value":""},
{"key":"急性心内膜炎","value":""},
{"key":"不稳定心绞痛","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您近期有过外伤或接受过手术和麻醉吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"近期手术","value":""},
{"key":"蛛网膜下腔麻醉或硬膜外麻醉","value":""},
{"key":"外伤","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有精神疾病史吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"未能充分控制的癫痫","value":""},
{"key":"精神病","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有任何代谢性疾病吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"电解质代谢异常","value":""},
{"key":"低钙血症","value":""},
{"key":"高钙血症","value":""},
{"key":"高尿酸血症", "value":""},
{"key":"维生素D增多症","value":""},
{"key":"高磷血症伴肾性佝偻病","value":""},
{"key":"卟啉症","value":""},
{"key":"贫血","value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有肿瘤相关疾病史吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"与雌激素有关的肿瘤","value":""},
{"key":"前列腺癌","value":""},
{"key":"良性或恶性肝脏肿瘤", "value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
},
{
"title":"您有其他疾患吗?",
"summary":"",
"type":"checkbox",
"keys":[
{"key":"青光眼","value":""},
{"key":"不能站立或坐直至少30分钟","value":""},
{"key":"酒精、安眠药、镇痛剂或其它精神药物急性中毒", "value":""}
],
"value":"",
"scored":false,
"checkedStyle":"none"
}
]
},
{
"title": "Harris 髋关节功能评分",
"summary": "<div class='tips'>免责声明:<br/>本量表参考<br/>1. http://www.chinaqking.com/yc/2013/368485.html <br/>2.http://www.orthopaedicscore.com/scorepages/harris_hip_score.html<br/>请患者在医务工作者指导下进行填写,本量表最终结果仅供专业医务工作者参考</div>",
"items": [
{
"title": "疼痛",
"summary": "",
"type": "radio",
"keys": [{
"key": "无",
"value": 44
}, {
"key": "轻微",
"value": 40
}, {
"key": "轻度,偶服止痛药",
"value": 30
}, {
"key": "轻度,常服止痛药",
"value": 20
},{
"key": "重度,活动受限",
"value": 10
},{
"key": "不能活动",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "跛行",
"summary": "",
"type": "radio",
"keys": [{
"key": "无",
"value": 11
}, {
"key": "轻度",
"value": 8
}, {
"key": "中度",
"value": 5
}, {
"key": "重度",
"value": 0
}, {
"key": "不能行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "行走时辅助",
"summary": "",
"type": "radio",
"keys": [{
"key": "不用",
"value": 11
}, {
"key": "长距离用一个手杖",
"value": 7
}, {
"key": "单拐",
"value": 3
}, {
"key": "2个手杖",
"value": 2
}, {
"key": "2个拐杖",
"value": 0
}, {
"key": "不能行走",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "行走距离",
"summary": "",
"type": "radio",
"keys": [{
"key": "不受限",
"value": 11
}, {
"key": "3公里以上",
"value": 8
}, {
"key": "1000米-1500米",
"value": 5
}, {
"key": "室内活动",
"value": 2
}, {
"key": "卧床或坐椅",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "上楼梯",
"summary": "",
"type": "radio",
"keys": [{
"key": "一步一阶,不用扶手",
"value": 4
}, {
"key": "一步一阶,用扶手",
"value": 2
}, {
"key": "某种方式上楼",
"value": 1
}, {
"key": "不能上楼",
"value": 0
}, {
"key": "卧床或坐椅",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "穿袜子,系鞋带",
"summary": "",
"type": "radio",
"keys": [{
"key": "容易",
"value": 4
}, {
"key": "困难",
"value": 2
}, {
"key": "不能",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "坐椅子",
"summary": "",
"type": "radio",
"keys": [{
"key": "任何角度坐椅子,没有不适",
"value": 5
}, {
"key": "高椅子坐半个小时以上",
"value": 3
}, {
"key": "坐椅子不能超过半小时",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "公共交通",
"summary": "",
"type": "radio",
"keys": [{
"key": "上公共交通",
"value": 1
}, {
"key": "不能上公共交通",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "Harris-畸形<br/>是否具备下列所有四条 <br/>固定内收畸形<10°,<br/>固定内旋畸形<10°,<br/>肢体短缩<3.2CM,<br/>固定屈曲畸形<30°",
"summary": "",
"type": "radio",
"keys": [{
"key": "具备",
"value": 4
}, {
"key": "不具备",
"value": 0
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "屈曲角度",
"summary": "",
"type": "radio",
"keys": [{
"key": "不能",
"value": 0
},{
"key": "0°-8°",
"value": 0.4
},{
"key": "9°-16°",
"value": 0.8
},{
"key": "17°-24°",
"value": 1.2
},{
"key": "25°-32°",
"value": 1.6
},{
"key": "33°-40°",
"value": 2
},{
"key": "41°-45°",
"value": 2.25
},{
"key": "46°-55°",
"value": 2.55
},{
"key": "56°-65°",
"value": 2.85
},{
"key": "66°-70°",
"value": 3
},{
"key": "71°-75°",
"value": 3.15
},{
"key": "76°-80°",
"value": 3.3
},{
"key": "81°-90°",
"value": 3.6
},{
"key": "91°-100°",
"value": 3.75
},{
"key": "101°-110°",
"value": 3.9
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "外转角度",
"summary": "",
"type": "radio",
"keys": [{
"key": "不能",
"value": 0
}, {
"key": "0°-5°",
"value": 0.2
}, {
"key": "6°-10°",
"value": 0.4
}, {
"key": "11°-15°",
"value": 0.6
}, {
"key": "16°-20°",
"value": 0.65
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}, {
"title": "外旋角度",
"summary": "",
"type": "radio",
"keys": [{
"key": "不能",
"value": 0
}, {
"key": "0°-5°",
"value": 0.1
}, {
"key": "6°-10°",
"value": 0.2
}],
"value": "",
"scored": true,
"checkedStyle": "dot"
}
]
}
]
|
#!/bin/bash
set -eux
cd ..
mvn -P package
java -cp target/selenese-runner.jar jp.vmi.selenium.selenese.utils.CommandDumper | sed 's/,.*//' | dos2unix > docs/selenese-runner-command-list.txt
|
TERMUX_PKG_HOMEPAGE=https://git-scm.com/
TERMUX_PKG_DESCRIPTION="Fast, scalable, distributed revision control system"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=2.35.1
TERMUX_PKG_REVISION=2
TERMUX_PKG_SRCURL=https://mirrors.kernel.org/pub/software/scm/git/git-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=d768528e6443f65a203036266f1ca50f9d127ba89751e32ead37117ed9191080
TERMUX_PKG_DEPENDS="libcurl, libiconv, less, openssl, pcre2, zlib"
## This requires a working $TERMUX_PREFIX/bin/sh on the host building:
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
ac_cv_fread_reads_directories=yes
ac_cv_header_libintl_h=no
ac_cv_iconv_omits_bom=no
ac_cv_snprintf_returns_bogus=no
--with-curl
--with-shell=$TERMUX_PREFIX/bin/sh
--with-tcltk=$TERMUX_PREFIX/bin/wish
"
# expat is only used by git-http-push for remote lock management over DAV, so disable:
# NO_INSTALL_HARDLINKS to use symlinks instead of hardlinks (which does not work on Android M):
TERMUX_PKG_EXTRA_MAKE_ARGS="
NO_NSEC=1
NO_GETTEXT=1
NO_EXPAT=1
NO_INSTALL_HARDLINKS=1
PERL_PATH=$TERMUX_PREFIX/bin/perl
USE_LIBPCRE2=1
"
TERMUX_PKG_BUILD_IN_SRC=true
# Things to remove to save space:
# bin/git-cvsserver - server emulating CVS
# bin/git-shell - restricted login shell for Git-only SSH access
TERMUX_PKG_RM_AFTER_INSTALL="
bin/git-cvsserver
bin/git-shell
libexec/git-core/git-shell
libexec/git-core/git-cvsserver
share/man/man1/git-cvsserver.1
share/man/man1/git-shell.1
"
termux_step_pre_configure() {
# Certain packages are not safe to build on device because their
# build.sh script deletes specific files in $TERMUX_PREFIX.
if $TERMUX_ON_DEVICE_BUILD; then
termux_error_exit "Package '$TERMUX_PKG_NAME' is not safe for on-device builds."
fi
# Setup perl so that the build process can execute it:
rm -f $TERMUX_PREFIX/bin/perl
ln -s $(command -v perl) $TERMUX_PREFIX/bin/perl
# Force fresh perl files (otherwise files from earlier builds
# remains without bumped modification times, so are not picked
# up by the package):
rm -Rf $TERMUX_PREFIX/share/git-perl
# Fixes build if utfcpp is installed:
CPPFLAGS="-I$TERMUX_PKG_SRCDIR $CPPFLAGS"
}
termux_step_make() {
make -j $TERMUX_MAKE_PROCESSES $TERMUX_PKG_EXTRA_MAKE_ARGS
make -j $TERMUX_MAKE_PROCESSES -C contrib/subtree $TERMUX_PKG_EXTRA_MAKE_ARGS
}
termux_step_make_install() {
make $TERMUX_PKG_EXTRA_MAKE_ARGS install
make -C contrib/subtree $TERMUX_PKG_EXTRA_MAKE_ARGS install
# Installing man requires asciidoc and xmlto, so git uses separate make targets for man pages
make -j $TERMUX_MAKE_PROCESSES install-man
make -j $TERMUX_MAKE_PROCESSES -C contrib/subtree install-man
}
termux_step_post_make_install() {
mkdir -p $TERMUX_PREFIX/etc/bash_completion.d/
cp $TERMUX_PKG_SRCDIR/contrib/completion/git-completion.bash \
$TERMUX_PKG_SRCDIR/contrib/completion/git-prompt.sh \
$TERMUX_PREFIX/etc/bash_completion.d/
# Remove the build machine perl setup in termux_step_pre_configure to avoid it being packaged:
rm $TERMUX_PREFIX/bin/perl
# Remove clutter:
rm -Rf $TERMUX_PREFIX/lib/*-linux*/perl
# Remove duplicated binaries in bin/ with symlink to the one in libexec/git-core:
(cd $TERMUX_PREFIX/bin; ln -s -f ../libexec/git-core/git git)
(cd $TERMUX_PREFIX/bin; ln -s -f ../libexec/git-core/git-upload-pack git-upload-pack)
}
termux_step_post_massage() {
if [ ! -f libexec/git-core/git-remote-https ]; then
termux_error_exit "Git built without https support"
fi
}
|
<filename>assembler/assembly-parser.cpp<gh_stars>1-10
/*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cctype>
#include <errno.h>
#include <iterator>
#include "assembly-type.h"
#include "ins_emit.h"
#include "modifiers.h"
#include "opcode_parsing.h"
#include "operand_types_print.h"
#include "utils/number-utils.h"
namespace panda::pandasm {
bool Parser::ParseRecordFields()
{
if (!open_ && *context_ == Token::Type::DEL_BRACE_L) {
curr_record_->body_location.begin = GetCurrentPosition(false);
open_ = true;
++context_;
}
curr_record_->body_presence = true;
if (!open_) {
context_.err = GetError("Expected keyword.", Error::ErrorType::ERR_BAD_KEYWORD);
return false;
}
if (context_.Mask()) {
return true;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
curr_record_->body_location.end = GetCurrentPosition(true);
++context_;
open_ = false;
return true;
}
curr_record_->field_list.emplace_back(program_.lang);
curr_fld_ = &(curr_record_->field_list[curr_record_->field_list.size() - 1]);
curr_fld_->line_of_def = line_stric_;
context_.ins_number = curr_record_->field_list.size();
LOG(DEBUG, ASSEMBLER) << "parse line " << line_stric_ << " as field (.field name)";
if (!ParseRecordField()) {
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
curr_record_->body_location.end = GetCurrentPosition(true);
++context_;
open_ = false;
} else {
context_.err = GetError("Expected a new field on the next line.", Error::ErrorType::ERR_BAD_KEYWORD);
return false;
}
}
return true;
}
bool Parser::ParseFieldName()
{
if (PrefixedValidName()) {
std::string field_name = std::string(context_.GiveToken().data(), context_.GiveToken().length());
auto match_names = [&field_name](const pandasm::Field &f) { return field_name == f.name; };
const auto iter = std::find_if(curr_record_->field_list.begin(), curr_record_->field_list.end(), match_names);
if (iter != curr_record_->field_list.end()) {
if (iter->is_defined) {
context_.err =
GetError("Repeated field names in the same record.", Error::ErrorType::ERR_REPEATING_FIELD_NAME);
return false;
}
curr_record_->field_list.erase(iter);
}
curr_fld_ = &(curr_record_->field_list[curr_record_->field_list.size() - 1]);
curr_fld_->name = field_name;
++context_;
return true;
}
context_.err = GetError("Invalid field name.", Error::ErrorType::ERR_BAD_OPERATION_NAME);
return false;
}
bool Parser::ParseType(Type *type)
{
ASSERT(TypeValidName());
std::string component_name(context_.GiveToken());
size_t rank = 0;
++context_;
while (*context_ == Token::Type::DEL_SQUARE_BRACKET_L) {
++context_;
if (*context_ != Token::Type::DEL_SQUARE_BRACKET_R) {
context_.err = GetError("Expected ']'.", Error::ErrorType::ERR_BAD_ARRAY_TYPE_BOUND);
return false;
}
++context_;
++rank;
}
*type = Type(component_name, rank);
if (type->IsArray()) {
program_.array_types.insert(*type);
}
return true;
}
bool Parser::ParseFieldType()
{
LOG(DEBUG, ASSEMBLER) << "started searching for field type value (line " << line_stric_
<< "): " << context_.tokens[context_.number - 1].whole_line;
if (!TypeValidName()) {
context_.err = GetError("Not a correct type.", Error::ErrorType::ERR_BAD_FIELD_VALUE_TYPE);
return false;
}
if (!ParseType(&curr_fld_->type)) {
return false;
}
curr_fld_->metadata->SetFieldType(curr_fld_->type);
LOG(DEBUG, ASSEMBLER) << "field type found (line " << line_stric_ << "): " << context_.GiveToken();
return true;
}
bool Parser::ParseRecordField()
{
if (!ParseFieldType()) {
return false;
}
if (context_.Mask()) {
context_.err = GetError("Expected field name.", Error::ErrorType::ERR_BAD_FIELD_MISSING_NAME, +1);
return false;
}
if (!ParseFieldName()) {
return false;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
curr_record_->body_location.end = GetCurrentPosition(true);
++context_;
open_ = false;
return true;
}
metadata_ = curr_fld_->metadata.get();
ParseMetaDef();
return context_.Mask();
}
bool Parser::ParseFunctionCode()
{
if (!open_ && *context_ == Token::Type::DEL_BRACE_L) {
open_ = true;
curr_func_->body_location.begin = GetCurrentPosition(false);
++context_;
}
curr_func_->body_presence = true;
if (!open_) {
context_.err = GetError("Expected keyword.", Error::ErrorType::ERR_BAD_KEYWORD);
return false;
}
if (context_.Mask()) {
return true;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
curr_func_->body_location.end = GetCurrentPosition(true);
++context_;
open_ = false;
return true;
}
curr_ins_ = &curr_func_->ins.emplace_back();
LOG(DEBUG, ASSEMBLER) << "parse line " << line_stric_
<< " as instruction ([label:] operation [operand,] [# comment])";
ParseFunctionInstruction();
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
curr_func_->body_location.end = GetCurrentPosition(true);
++context_;
open_ = false;
}
return true;
}
void Parser::ParseAsRecord(const std::vector<Token> &tokens)
{
LOG(DEBUG, ASSEMBLER) << "started parsing of record (line " << line_stric_ << "): " << tokens[0].whole_line;
func_def_ = false;
record_def_ = true;
if (!open_) {
++context_;
} else {
context_.err =
GetError("No record can be defined inside another record.", Error::ErrorType::ERR_BAD_DEFINITION);
return;
}
if (ParseRecordFullSign()) {
metadata_ = curr_record_->metadata.get();
if (ParseMetaDef()) {
if (!open_ && *context_ == Token::Type::DEL_BRACE_L) {
curr_record_->body_location.begin = GetCurrentPosition(false);
++context_;
open_ = true;
LOG(DEBUG, ASSEMBLER) << "record body is open, line " << line_stric_ << ": " << tokens[0].whole_line;
}
if (open_ && !context_.Mask() && *context_ != Token::Type::DEL_BRACE_R) {
ParseRecordFields();
} else if (open_) {
curr_record_->body_presence = true;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
LOG(DEBUG, ASSEMBLER) << "record body is closed, line " << line_stric_ << ": " << tokens[0].whole_line;
curr_record_->body_location.end = GetCurrentPosition(true);
++context_;
open_ = false;
}
}
}
}
void Parser::ParseAsFunction(const std::vector<Token> &tokens)
{
LOG(DEBUG, ASSEMBLER) << "started parsing of function (line " << line_stric_ << "): " << tokens[0].whole_line;
record_def_ = false;
func_def_ = true;
if (!open_) {
++context_;
} else {
context_.err =
GetError("No one function can be defined inside another function.", Error::ErrorType::ERR_BAD_DEFINITION);
return;
}
if (ParseFunctionFullSign()) {
metadata_ = curr_func_->metadata.get();
if (ParseMetaDef()) {
if (!open_ && *context_ == Token::Type::DEL_BRACE_L) {
curr_func_->body_location.begin = GetCurrentPosition(false);
++context_;
open_ = true;
LOG(DEBUG, ASSEMBLER) << "function body is open, line " << line_stric_ << ": " << tokens[0].whole_line;
}
if (open_ && !context_.Mask() && *context_ != Token::Type::DEL_BRACE_R) {
ParseFunctionCode();
} else if (open_) {
curr_func_->body_presence = true;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
LOG(DEBUG, ASSEMBLER) << "function body is closed, line " << line_stric_ << ": "
<< tokens[0].whole_line;
curr_func_->body_location.end = GetCurrentPosition(true);
++context_;
open_ = false;
}
}
}
}
void Parser::ParseAsBraceRight(const std::vector<Token> &tokens)
{
if (!open_) {
context_.err =
GetError("Delimiter '}' for the code area is outside a function.", Error::ErrorType::ERR_BAD_BOUND);
return;
}
LOG(DEBUG, ASSEMBLER) << "body is closed (line " << line_stric_ << "): " << tokens[0].whole_line;
open_ = false;
if (func_def_) {
curr_func_->body_location.end = GetCurrentPosition(true);
} else if (record_def_) {
curr_record_->body_location.end = GetCurrentPosition(true);
} else {
LOG(FATAL, ASSEMBLER) << "Internal error: either function or record must be parsed here";
}
++context_;
}
void Parser::ParseResetFunctionLabelsAndParams()
{
if (open_ || err_.err != Error::ErrorType::ERR_NONE) {
return;
}
for (const auto &f : program_.function_table) {
for (const auto &k : f.second.label_table) {
if (!k.second.file_location->is_defined) {
context_.err = Error("This label does not exist.", line_stric_, Error::ErrorType::ERR_BAD_LABEL_EXT, "",
k.second.file_location->bound_left, k.second.file_location->bound_right,
k.second.file_location->whole_line);
SetError();
}
}
}
for (const auto &t : context_.function_arguments_lists) {
curr_func_ = &(program_.function_table.at(t.first));
curr_func_->regs_num = curr_func_->value_of_first_param + 1;
for (const auto &v : t.second) {
if (!curr_func_->ins.empty() && curr_func_->ins.size() >= v.first &&
!curr_func_->ins[v.first - 1].regs.empty()) {
curr_func_->ins[v.first - 1].regs[v.second] += curr_func_->value_of_first_param + 1;
size_t max_reg_number = (1 << curr_func_->ins[v.first - 1].MaxRegEncodingWidth());
if (curr_func_->ins[v.first - 1].regs[v.second] >= max_reg_number) {
const auto &debug = curr_func_->ins[v.first - 1].ins_debug;
context_.err =
Error("Register width mismatch.", debug.line_number, Error::ErrorType::ERR_BAD_NAME_REG, "",
debug.bound_left, debug.bound_right, debug.whole_line);
SetError();
break;
}
}
}
}
}
void Parser::ParseResetFunctionTable()
{
for (const auto &k : program_.function_table) {
if (!k.second.file_location->is_defined) {
context_.err = Error("This function does not exist.", k.second.file_location->line_number,
Error::ErrorType::ERR_BAD_ID_FUNCTION, "", k.second.file_location->bound_left,
k.second.file_location->bound_right, k.second.file_location->whole_line);
SetError();
} else if (k.second.HasImplementation() != k.second.body_presence) {
context_.err =
Error("Inconsistent definition of the function and its metadata.", k.second.file_location->line_number,
Error::ErrorType::ERR_BAD_DEFINITION_FUNCTION, "", k.second.file_location->bound_left,
k.second.file_location->bound_right, k.second.file_location->whole_line);
SetError();
} else {
for (auto insn_it = k.second.ins.begin(); insn_it != k.second.ins.end(); ++insn_it) {
bool is_calli = insn_it->opcode == Opcode::CALLI_DYN || insn_it->opcode == Opcode::CALLI_DYN_SHORT ||
insn_it->opcode == Opcode::CALLI_DYN_RANGE;
if (is_calli || !insn_it->IsCall()) {
continue;
}
bool is_initobj = insn_it->opcode == Opcode::INITOBJ || insn_it->opcode == Opcode::INITOBJ_SHORT ||
insn_it->opcode == Opcode::INITOBJ_RANGE;
auto diff = is_initobj ? 0 : 1;
if (insn_it->OperandListLength() - diff < program_.function_table.at(insn_it->ids[0]).GetParamsNum()) {
auto insn_idx = std::distance(k.second.ins.begin(), insn_it);
const auto &debug = curr_func_->ins[insn_idx].ins_debug;
context_.err = Error("Function argument mismatch.", debug.line_number,
Error::ErrorType::ERR_FUNCTION_ARGUMENT_MISMATCH, "", debug.bound_left,
debug.bound_right, debug.whole_line);
SetError();
}
}
}
}
}
void Parser::ParseResetRecordTable()
{
for (const auto &k : program_.record_table) {
if (!k.second.file_location->is_defined) {
context_.err = Error("This record does not exist.", k.second.file_location->line_number,
Error::ErrorType::ERR_BAD_ID_RECORD, "", k.second.file_location->bound_left,
k.second.file_location->bound_right, k.second.file_location->whole_line);
SetError();
} else if (k.second.HasImplementation() != k.second.body_presence) {
context_.err = Error("Inconsistency of the definition of the record and its metadata.",
k.second.file_location->line_number, Error::ErrorType::ERR_BAD_DEFINITION_RECORD, "",
k.second.file_location->bound_left, k.second.file_location->bound_right,
k.second.file_location->whole_line);
SetError();
} else {
for (const auto &fld : k.second.field_list) {
if (!fld.is_defined) {
context_.err =
Error("This field does not exist.", fld.line_of_def, Error::ErrorType::ERR_BAD_ID_FIELD, "",
fld.bound_left, fld.bound_right, fld.whole_line);
SetError();
}
}
}
}
}
void Parser::ParseResetTables()
{
if (err_.err != Error::ErrorType::ERR_NONE) {
return;
}
ParseResetFunctionTable();
if (err_.err != Error::ErrorType::ERR_NONE) {
return;
}
ParseResetRecordTable();
}
void Parser::ParseAsLanguageDirective()
{
++context_;
if (context_.Mask()) {
context_.err = GetError("Incorrect .language directive: Expected language",
Error::ErrorType::ERR_BAD_DIRECTIVE_DECLARATION);
return;
}
auto lang = context_.GiveToken();
auto res = extensions::LanguageFromString(lang);
if (!res) {
context_.err =
GetError("Incorrect .language directive: Unknown language", Error::ErrorType::ERR_UNKNOWN_LANGUAGE);
return;
}
++context_;
if (!context_.Mask()) {
context_.err = GetError("Incorrect .language directive: Unexpected token",
Error::ErrorType::ERR_BAD_DIRECTIVE_DECLARATION);
}
program_.lang = res.value();
}
Function::CatchBlock Parser::PrepareCatchBlock(bool is_catchall, size_t size, size_t catchall_tokens_num,
size_t catch_tokens_num)
{
constexpr size_t TRY_BEGIN = 0;
constexpr size_t TRY_END = 1;
constexpr size_t CATCH_BEGIN = 2;
constexpr size_t CATCH_END = 3;
Function::CatchBlock catch_block;
catch_block.whole_line = context_.tokens[0].whole_line;
std::vector<std::string> label_names {"try block begin", "try block end", "catch block begin"};
std::vector<std::string> labels;
bool full_catch_block = (is_catchall && size == catchall_tokens_num) || (!is_catchall && size == catch_tokens_num);
if (full_catch_block) {
label_names.emplace_back("catch block end");
}
if (!is_catchall) {
catch_block.exception_record = context_.GiveToken();
++context_;
}
bool skip_comma = is_catchall;
for (auto label_name : label_names) {
if (!skip_comma) {
if (*context_ != Token::Type::DEL_COMMA) {
context_.err = GetError("Expected comma.", Error::ErrorType::ERR_BAD_DIRECTIVE_DECLARATION);
return catch_block;
}
++context_;
}
skip_comma = false;
if (!LabelValidName()) {
context_.err =
GetError(std::string("Invalid name of the ") + label_name + " label.", Error::ErrorType::ERR_BAD_LABEL);
return catch_block;
}
labels.emplace_back(context_.GiveToken());
AddObjectInTable(false, *label_table_);
++context_;
}
ASSERT(context_.Mask());
catch_block.try_begin_label = labels[TRY_BEGIN];
catch_block.try_end_label = labels[TRY_END];
catch_block.catch_begin_label = labels[CATCH_BEGIN];
if (full_catch_block) {
catch_block.catch_end_label = labels[CATCH_END];
} else {
catch_block.catch_end_label = labels[CATCH_BEGIN];
}
return catch_block;
}
void Parser::ParseAsCatchDirective()
{
ASSERT(*context_ == Token::Type::ID_CATCH || *context_ == Token::Type::ID_CATCHALL);
constexpr size_t CATCH_DIRECTIVE_TOKENS_NUM = 8;
constexpr size_t CATCHALL_DIRECTIVE_TOKENS_NUM = 6;
constexpr size_t CATCH_FULL_DIRECTIVE_TOKENS_NUM = 10;
constexpr size_t CATCHALL_FULL_DIRECTIVE_TOKENS_NUM = 8;
bool is_catchall = *context_ == Token::Type::ID_CATCHALL;
size_t size = context_.tokens.size();
if (is_catchall && size != CATCHALL_DIRECTIVE_TOKENS_NUM && size != CATCHALL_FULL_DIRECTIVE_TOKENS_NUM) {
context_.err = GetError(
"Incorrect catch block declaration. Must be in the format: .catchall <try_begin_label>, <try_end_label>, "
"<catch_begin_label>[, <catch_end_label>]",
Error::ErrorType::ERR_BAD_DIRECTIVE_DECLARATION);
return;
}
if (!is_catchall && size != CATCH_DIRECTIVE_TOKENS_NUM && size != CATCH_FULL_DIRECTIVE_TOKENS_NUM) {
context_.err = GetError(
"Incorrect catch block declaration. Must be in the format: .catch <exception_record>, <try_begin_label>, "
"<try_end_label>, <catch_begin_label>[, <catch_end_label>]",
Error::ErrorType::ERR_BAD_DIRECTIVE_DECLARATION);
return;
}
++context_;
if (!is_catchall && !RecordValidName()) {
context_.err = GetError("Invalid name of the exception record.", Error::ErrorType::ERR_BAD_RECORD_NAME);
return;
}
Function::CatchBlock catch_block =
PrepareCatchBlock(is_catchall, size, CATCHALL_FULL_DIRECTIVE_TOKENS_NUM, CATCH_FULL_DIRECTIVE_TOKENS_NUM);
curr_func_->catch_blocks.push_back(catch_block);
}
void Parser::ParseAsCatchall(const std::vector<Token> &tokens)
{
std::string directive_name = *context_ == Token::Type::ID_CATCH ? ".catch" : ".catchall";
if (!func_def_) {
context_.err = GetError(directive_name + " directive is outside a function body.",
Error::ErrorType::ERR_INCORRECT_DIRECTIVE_LOCATION);
return;
}
LOG(DEBUG, ASSEMBLER) << "started parsing of " << directive_name << " directive (line " << line_stric_
<< "): " << tokens[0].whole_line;
ParseAsCatchDirective();
}
void Parser::ParseAsLanguage(const std::vector<Token> &tokens, bool &is_lang_parsed, bool &is_first_statement)
{
if (is_lang_parsed) {
context_.err = GetError("Multiple .language directives", Error::ErrorType::ERR_MULTIPLE_DIRECTIVES);
return;
}
if (!is_first_statement) {
context_.err = GetError(".language directive must be specified before any other declarations",
Error::ErrorType::ERR_INCORRECT_DIRECTIVE_LOCATION);
return;
}
LOG(DEBUG, ASSEMBLER) << "started parsing of .language directive (line " << line_stric_
<< "): " << tokens[0].whole_line;
ParseAsLanguageDirective();
is_lang_parsed = true;
}
bool Parser::ParseAfterLine(bool &is_first_statement)
{
SetError();
if (!context_.Mask() && err_.err == Error::ErrorType::ERR_NONE) {
context_.err = GetError("There should be nothing after.", Error::ErrorType::ERR_BAD_END);
}
if (err_.err != Error::ErrorType::ERR_NONE) {
LOG(DEBUG, ASSEMBLER) << "processing aborted (error detected)";
return false;
}
LOG(DEBUG, ASSEMBLER) << "parsing of line " << line_stric_ << " is successful";
SetError();
is_first_statement = false;
return true;
}
Expected<Program, Error> Parser::ParseAfterMainLoop(const std::string &file_name)
{
ParseResetFunctionLabelsAndParams();
if (open_ && err_.err == Error::ErrorType::ERR_NONE) {
context_.err = Error("Code area is not closed.", curr_func_->file_location->line_number,
Error::ErrorType::ERR_BAD_CLOSE, "", 0, curr_func_->name.size(), curr_func_->name);
SetError();
}
ParseResetTables();
if (err_.err != Error::ErrorType::ERR_NONE) {
return Unexpected(err_);
}
for (auto &func : program_.function_table) {
if (func.second.metadata->HasImplementation()) {
func.second.source_file = file_name;
}
}
for (auto &rec : program_.record_table) {
if (rec.second.HasImplementation()) {
rec.second.source_file = file_name;
}
}
return std::move(program_);
}
Expected<Program, Error> Parser::Parse(TokenSet &vectors_tokens, const std::string &file_name)
{
bool is_lang_parsed = false;
bool is_first_statement = true;
for (const auto &tokens : vectors_tokens) {
++line_stric_;
if (tokens.empty()) {
continue;
}
LOG(DEBUG, ASSEMBLER) << "started parsing of line " << line_stric_ << ": " << tokens[0].whole_line;
context_.Make(tokens);
switch (*context_) {
case Token::Type::ID_CATCH:
case Token::Type::ID_CATCHALL: {
ParseAsCatchall(tokens);
break;
}
case Token::Type::ID_LANG: {
ParseAsLanguage(tokens, is_lang_parsed, is_first_statement);
break;
}
case Token::Type::ID_REC: {
ParseAsRecord(tokens);
break;
}
case Token::Type::ID_FUN: {
ParseAsFunction(tokens);
break;
}
case Token::Type::DEL_BRACE_R: {
ParseAsBraceRight(tokens);
break;
}
default: {
if (func_def_) {
ParseFunctionCode();
} else if (record_def_) {
ParseRecordFields();
}
}
}
if (!ParseAfterLine(is_first_statement)) {
break;
}
}
return ParseAfterMainLoop(file_name);
}
Expected<Program, Error> Parser::Parse(const std::string &source, const std::string &file_name)
{
auto ss = std::stringstream(source);
std::string line;
Lexer l;
std::vector<std::vector<Token>> v;
while (std::getline(ss, line)) {
auto [tokens, error] = l.TokenizeString(line);
if (error.err != Error::ErrorType::ERR_NONE) {
return Unexpected(error);
}
v.push_back(tokens);
}
return Parse(v, file_name);
}
void Parser::SetError()
{
err_ = context_.err;
}
bool Parser::RegValidName()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (curr_func_->GetParamsNum() > 0) {
return context_.ValidateRegisterName('v') || context_.ValidateRegisterName('a', curr_func_->GetParamsNum() - 1);
}
return context_.ValidateRegisterName('v');
}
bool Parser::ParamValidName()
{
return context_.ValidateParameterName(curr_func_->GetParamsNum());
}
bool IsAlphaNumeric(char c)
{
return std::isalnum(c) != 0 || c == '_';
}
bool IsNonDigit(char c)
{
return std::isalpha(c) != 0 || c == '_';
}
bool Parser::PrefixedValidName()
{
auto s = context_.GiveToken();
if (!IsNonDigit(s[0])) {
return false;
}
size_t i = 1;
while (i < s.size()) {
if (s[i] == '.') {
++i;
if (i >= s.size() || !IsNonDigit(s[i])) {
return false;
}
} else if (!IsAlphaNumeric(s[i]) && s[i] != '$') {
return false;
}
++i;
}
return true;
}
bool Parser::RecordValidName()
{
return PrefixedValidName();
}
bool Parser::FunctionValidName()
{
return PrefixedValidName();
}
bool Parser::LabelValidName()
{
auto token = context_.GiveToken();
if (!IsNonDigit(token[0])) {
return false;
}
token.remove_prefix(1);
for (auto i : token) {
if (!IsAlphaNumeric(i)) {
return false;
}
}
return true;
}
bool Parser::ParseLabel()
{
LOG(DEBUG, ASSEMBLER) << "started searching for label (line " << line_stric_
<< "): " << context_.tokens[0].whole_line;
context_++;
if (*context_ == Token::Type::DEL_COLON) {
context_--;
if (LabelValidName()) {
if (AddObjectInTable(true, *label_table_)) {
curr_ins_->set_label = true;
curr_ins_->label = context_.GiveToken();
LOG(DEBUG, ASSEMBLER) << "label detected (line " << line_stric_ << "): " << context_.GiveToken();
context_++;
context_++;
return !context_.Mask();
}
LOG(DEBUG, ASSEMBLER) << "label is detected (line " << line_stric_ << "): " << context_.GiveToken()
<< ", but this label already exists";
context_.err = GetError("This label already exists.", Error::ErrorType::ERR_BAD_LABEL_EXT);
} else {
LOG(DEBUG, ASSEMBLER) << "label with non-standard character is detected, attempt to create a label is "
"supported, but this cannot be any label name (line "
<< line_stric_ << "): " << context_.GiveToken();
context_.err = GetError(
"Invalid name of label. Label can only contain characters: '_', '0' - '9', 'a' - 'z', 'A' - 'Z'; and "
"starts with any letter or with '_'.",
Error::ErrorType::ERR_BAD_LABEL);
}
return false;
}
context_--;
LOG(DEBUG, ASSEMBLER) << "label is not detected (line " << line_stric_ << ")";
return true;
}
static Opcode TokenToOpcode(Token::Type id)
{
ASSERT(id > Token::Type::OPERATION);
ASSERT(id < Token::Type::KEYWORD);
using utype = std::underlying_type_t<Token::Type>;
return static_cast<Opcode>(static_cast<utype>(id) - static_cast<utype>(Token::Type::OPERATION) - 1);
}
bool Parser::ParseOperation()
{
if (context_.Mask()) {
LOG(DEBUG, ASSEMBLER) << "no more tokens (line " << line_stric_ << "): " << context_.tokens[0].whole_line;
return false;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
return false;
}
LOG(DEBUG, ASSEMBLER) << "started searching for operation (line " << line_stric_
<< "): " << context_.tokens[0].whole_line;
if (*context_ > Token::Type::OPERATION && *context_ < Token::Type::KEYWORD) {
SetOperationInformation();
context_.UpSignOperation();
curr_ins_->opcode = TokenToOpcode(context_.id);
LOG(DEBUG, ASSEMBLER) << "operation is detected (line " << line_stric_ << "): " << context_.GiveToken()
<< " (operand type: " << OperandTypePrint(curr_ins_->opcode) << ")";
context_++;
return true;
}
LOG(DEBUG, ASSEMBLER) << "founded " << context_.GiveToken() << ", it is not an operation (line " << line_stric_
<< ")";
context_.err = GetError("Invalid operation name.", Error::ErrorType::ERR_BAD_OPERATION_NAME);
return false;
}
bool Parser::ParseOperandVreg()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (*context_ != Token::Type::ID) {
context_.err = GetError("Expected register.", Error::ErrorType::ERR_BAD_OPERAND, +1);
return false;
}
std::string_view p = context_.GiveToken();
if (p[0] == 'v') {
p.remove_prefix(1);
int64_t number = ToNumber(p);
if (number > *(context_.max_value_of_reg)) {
*(context_.max_value_of_reg) = number;
}
curr_ins_->regs.push_back(number);
} else if (p[0] == 'a') {
p.remove_prefix(1);
curr_ins_->regs.push_back(ToNumber(p));
context_.function_arguments_list->emplace_back(context_.ins_number, curr_ins_->regs.size() - 1);
}
++context_;
return true;
}
bool Parser::ParseOperandCall()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (!FunctionValidName()) {
context_.err = GetError("Invalid name of function.", Error::ErrorType::ERR_BAD_NAME_REG);
return false;
}
std::string_view p = context_.GiveToken();
curr_ins_->ids.emplace_back(p.data(), p.length());
AddObjectInTable(false, program_.function_table);
++context_;
return true;
}
static bool IsOctal(char c)
{
return c >= '0' && c <= '7';
}
static bool IsHex(char c)
{
return std::isxdigit(c) != 0;
}
static uint8_t FromHex(char c)
{
constexpr size_t DIGIT_NUM = 10;
if (c >= '0' && c <= '9') {
return c - '0';
}
if (c >= 'A' && c <= 'F') {
return c - 'A' + DIGIT_NUM;
}
return c - 'a' + DIGIT_NUM;
}
static uint8_t FromOctal(char c)
{
return c - '0';
}
Expected<char, Error> Parser::ParseOctalEscapeSequence(std::string_view s, size_t *i)
{
constexpr size_t OCT_SHIFT = 3;
size_t idx = *i;
size_t n = 0;
uint32_t r = 0;
while (idx < s.length() && IsOctal(s[idx]) && n < OCT_SHIFT) {
r |= FromOctal(s[idx++]);
r <<= 3U;
++n;
}
r >>= 3U;
*i += n;
return r;
}
Expected<char, Error> Parser::ParseHexEscapeSequence(std::string_view s, size_t *i)
{
constexpr size_t HEX_SHIFT = 2;
uint32_t r = 0;
size_t idx = *i;
for (size_t j = 0; j < HEX_SHIFT; j++) {
char v = s[(*i)++];
if (!IsHex(v)) {
return Unexpected(GetError("Invalid hexadecimal escape sequence",
Error::ErrorType::ERR_BAD_STRING_INVALID_HEX_ESCAPE_SEQUENCE, idx - HEX_SHIFT));
}
r |= FromHex(v);
r <<= 4U;
}
r >>= 4U;
return r;
}
Expected<char, Error> Parser::ParseEscapeSequence(std::string_view s, size_t *i)
{
size_t idx = *i;
char c = s[idx];
if (IsOctal(c)) {
return ParseOctalEscapeSequence(s, i);
}
++(*i);
switch (c) {
case '\'':
case '"':
case '\\':
return c;
case 'a':
return '\a';
case 'b':
return '\b';
case 'f':
return '\f';
case 'n':
return '\n';
case 'r':
return '\r';
case 't':
return '\t';
case 'v':
return '\v';
default:
break;
}
if (c == 'x') {
return ParseHexEscapeSequence(s, i);
}
return Unexpected(
GetError("Unknown escape sequence", Error::ErrorType::ERR_BAD_STRING_UNKNOWN_ESCAPE_SEQUENCE, idx - 1));
}
std::optional<std::string> Parser::ParseStringLiteral()
{
if (*context_ != Token::Type::ID_STRING) {
context_.err = GetError("Expected string literal", Error::ErrorType::ERR_BAD_OPERAND);
return {};
}
auto token = context_.GiveToken();
size_t i = 1; /* skip leading quote */
size_t len = token.length();
std::string s;
while (i < len - 1) {
char c = token[i++];
if (c != '\\') {
s.append(1, c);
continue;
}
auto res = ParseEscapeSequence(token, &i);
if (!res) {
context_.err = res.Error();
return {};
}
s.append(1, res.Value());
}
program_.strings.insert(s);
return s;
}
bool Parser::ParseOperandString()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
auto res = ParseStringLiteral();
if (!res) {
return false;
}
curr_ins_->ids.push_back(res.value());
++context_;
return true;
}
bool Parser::ParseOperandComma()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (context_++ != Token::Type::DEL_COMMA) {
if (!context_.Mask() && *context_ != Token::Type::DEL_BRACKET_R) {
--context_;
}
context_.err = GetError("Expected comma.", Error::ErrorType::ERR_BAD_NUMBER_OPERANDS);
return false;
}
return true;
}
bool Parser::ParseOperandInteger()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (*context_ != Token::Type::ID) {
if (*context_ == Token::Type::DEL_BRACE_R) {
--context_;
}
context_.err = GetError("Expected immediate.", Error::ErrorType::ERR_BAD_OPERAND, +1);
return false;
}
std::string_view p = context_.GiveToken();
if (!ValidateInteger(p)) {
context_.err = GetError("Expected integer.", Error::ErrorType::ERR_BAD_INTEGER_NAME);
return false;
}
int64_t n = IntegerNumber(p);
if (errno == ERANGE) {
context_.err =
GetError("Too large immediate (length is more than 64 bit).", Error::ErrorType::ERR_BAD_INTEGER_WIDTH);
return false;
}
curr_ins_->imms.push_back(n);
++context_;
return true;
}
bool Parser::ParseOperandFloat(bool is_64bit)
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (*context_ != Token::Type::ID) {
if (*context_ == Token::Type::DEL_BRACE_R) {
--context_;
}
context_.err = GetError("Expected immediate.", Error::ErrorType::ERR_BAD_OPERAND, +1);
return false;
}
std::string_view p = context_.GiveToken();
if (!ValidateFloat(p)) {
context_.err = GetError("Expected float.", Error::ErrorType::ERR_BAD_FLOAT_NAME);
return false;
}
double n = FloatNumber(p, is_64bit);
if (errno == ERANGE) {
context_.err =
GetError("Too large immediate (length is more than 64 bit).", Error::ErrorType::ERR_BAD_FLOAT_WIDTH);
return false;
}
curr_ins_->imms.push_back(n);
++context_;
return true;
}
bool Parser::ParseOperandLabel()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (!LabelValidName()) {
context_.err = GetError("Invalid name of label.", Error::ErrorType::ERR_BAD_NAME_ID);
return false;
}
std::string_view p = context_.GiveToken();
curr_ins_->ids.emplace_back(p.data(), p.length());
AddObjectInTable(false, *label_table_);
++context_;
return true;
}
bool Parser::ParseOperandId()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (*context_ != Token::Type::ID) {
context_.err = GetError("Expected label.", Error::ErrorType::ERR_BAD_OPERAND);
return false;
}
if (!LabelValidName()) {
context_.err = GetError("Invalid name of label.", Error::ErrorType::ERR_BAD_NAME_ID);
return false;
}
std::string_view p = context_.GiveToken();
curr_ins_->ids.emplace_back(p.data(), p.length());
AddObjectInTable(false, *label_table_);
++context_;
return true;
}
bool Parser::ParseOperandType(Type::VerificationType ver_type)
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (*context_ != Token::Type::ID) {
context_.err = GetError("Expected type.", Error::ErrorType::ERR_BAD_OPERAND);
return false;
}
if (!TypeValidName()) {
context_.err = GetError("Invalid name of type.", Error::ErrorType::ERR_BAD_NAME_ID);
return false;
}
Type type;
if (!ParseType(&type)) {
return false;
}
bool is_object = (context_.GiveToken() == "]") ? (false) : (true);
if (is_object) {
AddObjectInTable(false, program_.record_table);
if (ver_type == Type::VerificationType::TYPE_ID_ARRAY) {
GetWarning("Unexpected type_id received! Expected array, but object given",
Error::ErrorType::WAR_UNEXPECTED_TYPE_ID);
}
} else {
if (!type.IsArrayContainsPrimTypes() &&
program_.record_table.find(type.GetComponentName()) == program_.record_table.end()) {
std::string ComponentName = type.GetComponentName();
context_.token = ComponentName;
AddObjectInTable(false, program_.record_table);
}
if (ver_type == Type::VerificationType::TYPE_ID_OBJECT) {
GetWarning("Unexpected type_id received! Expected object, but array given",
Error::ErrorType::WAR_UNEXPECTED_TYPE_ID);
}
}
curr_ins_->ids.push_back(type.GetName());
return true;
}
bool Parser::ParseOperandField()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (*context_ != Token::Type::ID) {
context_.err = GetError("Expected field.", Error::ErrorType::ERR_BAD_OPERAND);
return false;
}
if (!PrefixedValidName()) {
context_.err = GetError("Invalid field name.", Error::ErrorType::ERR_BAD_NAME_ID);
return false;
}
std::string_view p = context_.GiveToken();
std::string record_full_name = std::string(p);
// Some names of records in pandastdlib starts with 'panda.', and therefore,
// the record name is before the second dot, and the field name is after the second dot.
auto pos_point = record_full_name.find_last_of('.');
std::string record_name = record_full_name.substr(0, pos_point);
std::string field_name = record_full_name.substr(pos_point + 1);
auto it_record = program_.record_table.find(record_name);
if (it_record == program_.record_table.end()) {
context_.token = record_name;
AddObjectInTable(false, program_.record_table);
it_record = program_.record_table.find(record_name);
}
auto it_field = std::find_if(it_record->second.field_list.begin(), it_record->second.field_list.end(),
[&field_name](pandasm::Field &field) { return field_name == field.name; });
if (!field_name.empty() && it_field == it_record->second.field_list.end()) {
it_record->second.field_list.emplace_back(program_.lang);
auto &field = it_record->second.field_list.back();
field.name = field_name;
field.line_of_def = line_stric_;
field.whole_line = context_.tokens[context_.number - 1].whole_line;
field.bound_left = context_.tokens[context_.number - 1].bound_left + record_name.length() + 1;
field.bound_right = context_.tokens[context_.number - 1].bound_right;
field.is_defined = false;
}
curr_ins_->ids.emplace_back(p.data(), p.length());
++context_;
return true;
}
bool Parser::ParseOperandNone()
{
if (context_.err.err != Error::ErrorType::ERR_NONE) {
return false;
}
if (open_ && *context_ == Token::Type::DEL_BRACE_R) {
return false;
}
if (!context_.Mask()) {
context_.err = GetError("Invalid number of operands.", Error::ErrorType::ERR_BAD_NUMBER_OPERANDS);
--context_;
return false;
}
return true;
}
bool Parser::ParseRecordFullSign()
{
return ParseRecordName();
}
bool Parser::ParseFunctionFullSign()
{
if (!ParseFunctionReturn()) {
return false;
}
if (!ParseFunctionName()) {
return false;
}
if (*context_ == Token::Type::DEL_BRACKET_L) {
++context_;
if (ParseFunctionArgs()) {
if (*context_ == Token::Type::DEL_BRACKET_R) {
++context_;
return true;
}
context_.err = GetError("Expected ')'.", Error::ErrorType::ERR_BAD_ARGS_BOUND);
}
} else {
context_.err = GetError("Expected '('.", Error::ErrorType::ERR_BAD_ARGS_BOUND);
}
return false;
}
bool Parser::ParseRecordName()
{
LOG(DEBUG, ASSEMBLER) << "started searching for record name (line " << line_stric_
<< "): " << context_.tokens[context_.number - 1].whole_line;
if (!RecordValidName()) {
if (*context_ == Token::Type::DEL_BRACKET_L) {
context_.err = GetError("No record name.", Error::ErrorType::ERR_BAD_RECORD_NAME);
return false;
}
context_.err = GetError("Invalid name of the record.", Error::ErrorType::ERR_BAD_RECORD_NAME);
return false;
}
auto iter = program_.record_table.find(std::string(context_.GiveToken().data(), context_.GiveToken().length()));
if (iter == program_.record_table.end() || !iter->second.file_location->is_defined) {
SetRecordInformation();
} else {
context_.err = GetError("This record already exists.", Error::ErrorType::ERR_BAD_ID_RECORD);
return false;
}
LOG(DEBUG, ASSEMBLER) << "record name found (line " << line_stric_ << "): " << context_.GiveToken();
++context_;
return true;
}
void Parser::SetRecordInformation()
{
AddObjectInTable(true, program_.record_table);
curr_record_ = &(program_.record_table.at(std::string(context_.GiveToken().data(), context_.GiveToken().length())));
}
bool Parser::ParseFunctionName()
{
LOG(DEBUG, ASSEMBLER) << "started searching for function name (line " << line_stric_
<< "): " << context_.tokens[context_.number - 1].whole_line;
if (!FunctionValidName()) {
if (*context_ == Token::Type::DEL_BRACKET_L) {
context_.err = GetError("No function name.", Error::ErrorType::ERR_BAD_FUNCTION_NAME);
return false;
}
context_.err = GetError("Invalid name of the function.", Error::ErrorType::ERR_BAD_FUNCTION_NAME);
return false;
}
auto iter = program_.function_table.find(std::string(context_.GiveToken().data(), context_.GiveToken().length()));
if (iter == program_.function_table.end() || !iter->second.file_location->is_defined) {
SetFunctionInformation();
} else {
context_.err = GetError("This function already exists.", Error::ErrorType::ERR_BAD_ID_FUNCTION);
return false;
}
LOG(DEBUG, ASSEMBLER) << "function name found (line " << line_stric_ << "): " << context_.GiveToken();
++context_;
return true;
}
void Parser::SetFunctionInformation()
{
std::string p = std::string(context_.GiveToken());
AddObjectInTable(true, program_.function_table);
curr_func_ = &(program_.function_table.at(p));
label_table_ = &(curr_func_->label_table);
curr_func_->return_type = context_.curr_func_return_type;
context_.max_value_of_reg = &(curr_func_->value_of_first_param);
context_.function_arguments_list = &(context_.function_arguments_lists[curr_func_->name]);
}
void Parser::SetOperationInformation()
{
context_.ins_number = curr_func_->ins.size();
auto &curr_debug = curr_func_->ins.back().ins_debug;
curr_debug.line_number = line_stric_;
curr_debug.whole_line = context_.tokens[context_.number - 1].whole_line;
curr_debug.bound_left = context_.tokens[context_.number - 1].bound_left;
curr_debug.bound_right = context_.tokens[context_.number - 1].bound_right;
}
bool Parser::ParseFunctionReturn()
{
LOG(DEBUG, ASSEMBLER) << "started searching for return function value (line " << line_stric_
<< "): " << context_.tokens[context_.number - 1].whole_line;
if (!TypeValidName()) {
if (*context_ == Token::Type::DEL_BRACKET_L) {
context_.err = GetError("No return type.", Error::ErrorType::ERR_BAD_FUNCTION_RETURN_VALUE);
return false;
}
context_.err = GetError("Invalid return type.", Error::ErrorType::ERR_BAD_FUNCTION_RETURN_VALUE);
return false;
}
if (!ParseType(&context_.curr_func_return_type)) {
return false;
}
LOG(DEBUG, ASSEMBLER) << "return type found (line " << line_stric_ << "): " << context_.GiveToken();
return true;
}
bool Parser::TypeValidName()
{
if (Type::GetId(context_.GiveToken()) != panda_file::Type::TypeId::REFERENCE) {
return true;
}
return PrefixedValidName();
}
bool Parser::ParseFunctionArg()
{
if (*context_ != Token::Type::ID) {
context_.err = GetError("Expected identifier.", Error::ErrorType::ERR_BAD_FUNCTION_PARAMETERS);
return false;
}
if (!TypeValidName()) {
context_.err = GetError("Invalid parameter type.", Error::ErrorType::ERR_BAD_TYPE);
return false;
}
Type type;
if (!ParseType(&type)) {
return false;
}
if (context_.Mask()) {
return false;
}
if (*context_ != Token::Type::ID) {
context_.err = GetError("Expected identifier.", Error::ErrorType::ERR_BAD_FUNCTION_PARAMETERS);
return false;
}
if (!ParamValidName()) {
context_.err = GetError("Invalid parameter name.", Error::ErrorType::ERR_BAD_PARAM_NAME);
return false;
}
++context_;
Function::Parameter parameter(type, program_.lang);
metadata_ = parameter.metadata.get();
if (*context_ == Token::Type::DEL_LT && !ParseMetaDef()) {
return false;
}
curr_func_->params.push_back(std::move(parameter));
return true;
}
bool Parser::ParseFunctionArgComma(bool &comma)
{
if (comma && *context_ != Token::Type::DEL_COMMA) {
context_.err = GetError("Expected comma.", Error::ErrorType::ERR_BAD_NUMBER_OPERANDS);
return false;
}
if (comma) {
++context_;
}
comma = true;
return true;
}
bool Parser::ParseFunctionArgs()
{
LOG(DEBUG, ASSEMBLER) << "started searching for function parameters (line " << line_stric_
<< "): " << context_.tokens[context_.number - 1].whole_line;
bool comma = false;
while (true) {
if (context_.Mask()) {
return false;
}
if (context_.id != Token::Type::DEL_COMMA && context_.id != Token::Type::ID) {
break;
}
if (!ParseFunctionArgComma(comma)) {
return false;
}
if (!ParseFunctionArg()) {
return false;
}
}
LOG(DEBUG, ASSEMBLER) << "parameters found (line " << line_stric_ << "): ";
return true;
}
bool Parser::ParseMetaDef()
{
LOG(DEBUG, ASSEMBLER) << "started searching for meta information (line " << line_stric_
<< "): " << context_.tokens[context_.number - 1].whole_line;
if (context_.Mask()) {
return false;
}
bool flag = false;
if (*context_ == Token::Type::DEL_LT) {
flag = true;
++context_;
}
if (!ParseMetaList(flag)) {
return false;
}
if (!flag && *context_ == Token::Type::DEL_GT) {
context_.err = GetError("Expected '<'.", Error::ErrorType::ERR_BAD_METADATA_BOUND);
++context_;
return false;
}
LOG(DEBUG, ASSEMBLER) << "searching for meta information (line " << line_stric_ << ") is successful";
if (flag && context_.err.err == Error::ErrorType::ERR_NONE) {
++context_;
}
return true;
}
void Parser::SetMetadataContextError(const Metadata::Error &err, bool has_value)
{
constexpr int64_t NO_VALUE_OFF = -1;
constexpr int64_t SPECIAL_OFF = -2;
constexpr int64_t STANDARD_OFF = -3;
switch (err.GetType()) {
case Metadata::Error::Type::UNKNOWN_ATTRIBUTE: {
context_.err = GetError(err.GetMessage(), Error::ErrorType::ERR_BAD_METADATA_UNKNOWN_ATTRIBUTE, 0,
has_value ? STANDARD_OFF : NO_VALUE_OFF);
break;
}
case Metadata::Error::Type::MISSING_ATTRIBUTE: {
context_.err = GetError(err.GetMessage(), Error::ErrorType::ERR_BAD_METADATA_MISSING_ATTRIBUTE);
break;
}
case Metadata::Error::Type::MISSING_VALUE: {
context_.err = GetError(err.GetMessage(), Error::ErrorType::ERR_BAD_METADATA_MISSING_VALUE);
break;
}
case Metadata::Error::Type::UNEXPECTED_ATTRIBUTE: {
context_.err = GetError(err.GetMessage(), Error::ErrorType::ERR_BAD_METADATA_UNEXPECTED_ATTRIBUTE, 0,
has_value ? STANDARD_OFF : NO_VALUE_OFF);
break;
}
case Metadata::Error::Type::UNEXPECTED_VALUE: {
context_.err =
GetError(err.GetMessage(), Error::ErrorType::ERR_BAD_METADATA_UNEXPECTED_VALUE, 0, SPECIAL_OFF);
break;
}
case Metadata::Error::Type::INVALID_VALUE: {
context_.err = GetError(err.GetMessage(), Error::ErrorType::ERR_BAD_METADATA_INVALID_VALUE, 0, -1);
break;
}
case Metadata::Error::Type::MULTIPLE_ATTRIBUTE: {
context_.err = GetError(err.GetMessage(), Error::ErrorType::ERR_BAD_METADATA_MULTIPLE_ATTRIBUTE, 0,
has_value ? STANDARD_OFF : NO_VALUE_OFF);
break;
}
default: {
UNREACHABLE();
}
}
}
bool Parser::ParseMetaListComma(bool &comma, bool eq)
{
if (!eq && comma && *context_ != Token::Type::DEL_COMMA) {
context_.err = GetError("Expected comma.", Error::ErrorType::ERR_BAD_NUMBER_OPERANDS);
return false;
}
if (!eq && comma) {
++context_;
}
comma = true;
return true;
}
bool Parser::MeetExpMetaList(bool eq)
{
if (!eq && *context_ != Token::Type::ID) {
context_.err = GetError("Expected identifier.", Error::ErrorType::ERR_BAD_DEFINITION_METADATA, +1);
return false;
}
if (eq && *context_ != Token::Type::ID && *context_ != Token::Type::ID_STRING) {
context_.err =
GetError("Expected identifier or string literal.", Error::ErrorType::ERR_BAD_DEFINITION_METADATA, +1);
return false;
}
if (!eq && !PrefixedValidName()) {
context_.err = GetError("Invalid attribute name.", Error::ErrorType::ERR_BAD_NAME_ID);
return false;
}
return true;
}
bool Parser::BuildMetaListAttr(bool &eq, std::string &attribute_name, std::string &attribute_value)
{
if (eq && *context_ == Token::Type::ID_STRING) {
auto res = ParseStringLiteral();
if (!res) {
return false;
}
attribute_value = res.value();
} else if (eq) {
attribute_value = context_.GiveToken();
} else {
attribute_name = context_.GiveToken();
}
++context_;
if (context_.Mask()) {
return false;
}
if (*context_ == Token::Type::DEL_EQ) {
if (eq) {
context_.err = GetError("Unexpected '='.", Error::ErrorType::ERR_BAD_NOEXP_DELIM);
return false;
}
++context_;
eq = true;
} else {
std::optional<Metadata::Error> res;
bool has_value = eq;
if (has_value) {
res = metadata_->SetAttributeValue(attribute_name, attribute_value);
} else {
res = metadata_->SetAttribute(attribute_name);
}
eq = false;
if (res) {
auto err = res.value();
SetMetadataContextError(err, has_value);
return false;
}
}
return true;
}
bool Parser::ParseMetaList(bool flag)
{
if (!flag && !context_.Mask() && *context_ != Token::Type::DEL_GT && *context_ != Token::Type::DEL_BRACE_L) {
context_.err = GetError("No meta data expected.", Error::ErrorType::ERR_BAD_DEFINITION_METADATA);
return false;
}
bool comma = false;
bool eq = false;
std::string attribute_name;
std::string attribute_value;
while (true) {
if (context_.Mask()) {
context_.err = GetError("Expected '>'.", Error::ErrorType::ERR_BAD_METADATA_BOUND, +1);
return false;
}
if (context_.id != Token::Type::DEL_COMMA && context_.id != Token::Type::ID &&
context_.id != Token::Type::ID_STRING && context_.id != Token::Type::DEL_EQ) {
break;
}
if (!ParseMetaListComma(comma, eq)) {
return false;
}
if (!MeetExpMetaList(eq)) {
return false;
}
if (!BuildMetaListAttr(eq, attribute_name, attribute_value)) {
return false;
}
}
if (flag && *context_ != Token::Type::DEL_GT) {
context_.err = GetError("Expected '>'.", Error::ErrorType::ERR_BAD_METADATA_BOUND);
++context_;
return false;
}
auto res = metadata_->ValidateData();
if (res) {
auto err = res.value();
SetMetadataContextError(err, false);
return false;
}
return true;
}
bool Parser::ParseFunctionInstruction()
{
if (ParseLabel()) {
if (ParseOperation()) {
if (ParseOperands()) {
return true;
}
}
}
return context_.Mask();
}
} // namespace panda::pandasm
|
#!/bin/sh
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
INPUT=$(cat /dev/stdin)
VALID=false
REVSIG=false
IFS='
'
if [ "$AUREUS_VERIFY_COMMITS_ALLOW_SHA1" = 1 ]; then
GPG_RES="$(echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null)"
else
# Note how we've disabled SHA1 with the --weak-digest option, disabling
# signatures - including selfsigs - that use SHA1. While you might think that
# collision attacks shouldn't be an issue as they'd be an attack on yourself,
# in fact because what's being signed is a commit object that's
# semi-deterministically generated by untrusted input (the pull-req) in theory
# an attacker could construct a pull-req that results in a commit object that
# they've created a collision for. Not the most likely attack, but preventing
# it is pretty easy so we do so as a "belt-and-suspenders" measure.
GPG_RES=""
for LINE in "$(gpg --version)"; do
case "$LINE" in
"gpg (GnuPG) 1.4.1"*|"gpg (GnuPG) 2.0."*)
echo "Please upgrade to at least gpg 2.1.10 to check for weak signatures" > /dev/stderr
GPG_RES="$(echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null)"
;;
# We assume if you're running 2.1+, you're probably running 2.1.10+
# gpg will fail otherwise
# We assume if you're running 1.X, it is either 1.4.1X or 1.4.20+
# gpg will fail otherwise
esac
done
[ "$GPG_RES" = "" ] && GPG_RES="$(echo "$INPUT" | gpg --trust-model always --weak-digest sha1 "$@" 2>/dev/null)"
fi
for LINE in $(echo "$GPG_RES"); do
case "$LINE" in
"[GNUPG:] VALIDSIG "*)
while read KEY; do
[ "${LINE#?GNUPG:? VALIDSIG * * * * * * * * * }" = "$KEY" ] && VALID=true
done < ./contrib/verify-commits/trusted-keys
;;
"[GNUPG:] REVKEYSIG "*)
[ "$AUREUS_VERIFY_COMMITS_ALLOW_REVSIG" != 1 ] && exit 1
REVSIG=true
GOODREVSIG="[GNUPG:] GOODSIG ${LINE#* * *}"
;;
esac
done
if ! $VALID; then
exit 1
fi
if $VALID && $REVSIG; then
echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null | grep "\[GNUPG:\] \(NEWSIG\|SIG_ID\|VALIDSIG\)"
echo "$GOODREVSIG"
else
echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null
fi
|
#!/bin/sh
while
#until
ls > /dev/null
do
echo "ahhaa"
sleep 3
done
X=1
while [ $X -le 10 ]
do
echo "X on $X"
X=$((X+1))
done
X=1
until [ $X -gt 10 ]
do
echo "X on $X"
X=$((X+1))
done
while :
do
echo "Teeme"
sleep 2
echo -e "Tahad lõpetada? [j/e] \c"
read answer
if [ "Xj" = "X$answer" ] ; then
echo "hea küll, lõpetame"
break
fi
done
echo "tehtud"
# sama asi teist moodi
until
echo -e "Tahad lõpetada? [j/e] \c"
read answer
[ "Xj" = "X$answer" ]
do
echo "Teeme"
sleep 2
done
echo "tehtud"
while true
do
echo "Teeme"
sleep 2
echo -e "Tahad lõpetada? [j/e] \c"
read answer
case $answer in
[Jj]*)
echo "hea küll, lõpetame"
break
;;
[Ee]*)
continue
;;
esac
echo "teeme midagi veel, kui ei ole jah/ei"
done
echo "tehtud"
# töötleme faili sisu
cat asjad.txt | while read rida
do
echo "Rida: $rida"
done
while read rida
do
echo "Rida: $rida"
done < asjad.txt
ls -al | while read rida
do
echo $rida
done
|
/* Produced by CVXGEN, 2021-03-12 04:35:32 -0500. */
/* CVXGEN is Copyright (C) 2006-2017 <NAME>, <EMAIL>. */
/* The code in this file is Copyright (C) 2006-2017 <NAME>. */
/* CVXGEN, or solvers produced by CVXGEN, cannot be used for commercial */
/* applications without prior written permission from <NAME>. */
/* Filename: testsolver.c. */
/* Description: Basic test harness for solver.c. */
#include "solver.h"
Vars vars;
Params params;
Workspace work;
Settings settings;
#define NUMTESTS 0
int main(int argc, char **argv) {
int num_iters;
#if (NUMTESTS > 0)
int i;
double time;
double time_per;
#endif
set_defaults();
setup_indexing();
load_default_data();
/* Solve problem instance for the record. */
settings.verbose = 1;
num_iters = solve();
#ifndef ZERO_LIBRARY_MODE
#if (NUMTESTS > 0)
/* Now solve multiple problem instances for timing purposes. */
settings.verbose = 0;
tic();
for (i = 0; i < NUMTESTS; i++) {
solve();
}
time = tocq();
printf("Timed %d solves over %.3f seconds.\n", NUMTESTS, time);
time_per = time / NUMTESTS;
if (time_per > 1) {
printf("Actual time taken per solve: %.3g s.\n", time_per);
} else if (time_per > 1e-3) {
printf("Actual time taken per solve: %.3g ms.\n", 1e3*time_per);
} else {
printf("Actual time taken per solve: %.3g us.\n", 1e6*time_per);
}
#endif
#endif
return 0;
}
void load_default_data(void) {
params.x_ss_1[0] = 0.20319161029830202;
params.x_ss_1[1] = 0.8325912904724193;
params.x_ss_1[2] = -0.8363810443482227;
params.Q[0] = 1.510827605197663;
params.Q[1] = 1.8929469543476547;
params.Q[2] = 1.896293088933438;
params.x_ss_2[0] = -1.497658758144655;
params.x_ss_2[1] = -1.171028487447253;
params.x_ss_2[2] = -1.7941311867966805;
params.x_ss_3[0] = -0.23676062539745413;
params.x_ss_3[1] = -1.8804951564857322;
params.x_ss_3[2] = -0.17266710242115568;
params.x_ss_4[0] = 0.596576190459043;
params.x_ss_4[1] = -0.8860508694080989;
params.x_ss_4[2] = 0.7050196079205251;
params.x_ss_5[0] = 0.3634512696654033;
params.x_ss_5[1] = -1.9040724704913385;
params.x_ss_5[2] = 0.23541635196352795;
params.x_ss_6[0] = -0.9629902123701384;
params.x_ss_6[1] = -0.3395952119597214;
params.x_ss_6[2] = -0.865899672914725;
params.x_ss_7[0] = 0.7725516732519853;
params.x_ss_7[1] = -0.23818512931704205;
params.x_ss_7[2] = -1.372529046100147;
params.x_ss_8[0] = 0.17859607212737894;
params.x_ss_8[1] = 1.1212590580454682;
params.x_ss_8[2] = -0.774545870495281;
params.x_ss_9[0] = -1.1121684642712744;
params.x_ss_9[1] = -0.44811496977740495;
params.x_ss_9[2] = 1.7455345994417217;
params.x_ss_10[0] = 1.9039816898917352;
params.x_ss_10[1] = 0.6895347036512547;
params.x_ss_10[2] = 1.6113364341535923;
params.x_ss_11[0] = 1.383003485172717;
params.x_ss_11[1] = -0.48802383468444344;
params.x_ss_11[2] = -1.631131964513103;
params.x_ss_12[0] = 0.6136436100941447;
params.x_ss_12[1] = 0.2313630495538037;
params.x_ss_12[2] = -0.5537409477496875;
params.x_ss_13[0] = -1.0997819806406723;
params.x_ss_13[1] = -0.3739203344950055;
params.x_ss_13[2] = -0.12423900520332376;
params.x_ss_14[0] = -0.923057686995755;
params.x_ss_14[1] = -0.8328289030982696;
params.x_ss_14[2] = -0.16925440270808823;
params.x_ss_15[0] = 1.442135651787706;
params.x_ss_15[1] = 0.34501161787128565;
params.x_ss_15[2] = -0.8660485502711608;
params.x_ss_16[0] = -0.8880899735055947;
params.x_ss_16[1] = -0.1815116979122129;
params.x_ss_16[2] = -1.17835862158005;
params.x_ss_17[0] = -1.1944851558277074;
params.x_ss_17[1] = 0.05614023926976763;
params.x_ss_17[2] = -1.6510825248767813;
params.x_ss_18[0] = -0.06565787059365391;
params.x_ss_18[1] = -0.5512951504486665;
params.x_ss_18[2] = 0.8307464872626844;
params.x_ss_19[0] = 0.9869848924080182;
params.x_ss_19[1] = 0.7643716874230573;
params.x_ss_19[2] = 0.7567216550196565;
params.x_ss_20[0] = -0.5055995034042868;
params.x_ss_20[1] = 0.6725392189410702;
params.x_ss_20[2] = -0.6406053441727284;
params.x_ss_21[0] = 0.29117547947550015;
params.x_ss_21[1] = -0.6967713677405021;
params.x_ss_21[2] = -0.21941980294587182;
params.x_ss_22[0] = -1.753884276680243;
params.x_ss_22[1] = -1.0292983112626475;
params.x_ss_22[2] = 1.8864104246942706;
params.x_ss_23[0] = -1.077663182579704;
params.x_ss_23[1] = 0.7659100437893209;
params.x_ss_23[2] = 0.6019074328549583;
params.x_ss_24[0] = 0.8957565577499285;
params.x_ss_24[1] = -0.09964555746227477;
params.x_ss_24[2] = 0.38665509840745127;
params.x_ss_25[0] = -1.7321223042686946;
params.x_ss_25[1] = -1.7097514487110663;
params.x_ss_25[2] = -1.2040958948116867;
params.x_ss_26[0] = -1.3925560119658358;
params.x_ss_26[1] = -1.5995826216742213;
params.x_ss_26[2] = -1.4828245415645833;
params.Q_last[0] = 1.5532777318076536;
params.Q_last[1] = 1.1878148249238782;
params.Q_last[2] = 1.9521012430312084;
params.Af[0] = 0.7264471152297065;
params.Af[1] = 0.16407869343908477;
params.Af[2] = 0.8287224032315907;
params.Af[3] = -0.9444533161899464;
params.Af[4] = 1.7069027370149112;
params.x_0[0] = 1.3567722311998827;
params.x_0[1] = 0.9052779937121489;
params.x_0[2] = -0.07904017565835986;
params.Bf[0] = 1.3684127435065871;
params.A[0] = 0.979009293697437;
params.A[1] = 0.6413036255984501;
params.A[2] = 1.6559010680237511;
params.A[3] = 0.5346622551502991;
params.A[4] = -0.5362376605895625;
params.B[0] = 0.2113782926017822;
params.u_max[0] = -1.2144776931994525;
params.u_last[0] = -1.2317108144255875;
params.du_max_f[0] = 0.9026784957312834;
params.du_max[0] = 1.1397468137245244;
params.x_max_2[0] = 1.8883934547350631;
params.x_max_3[0] = 1.4038856681660068;
}
|
<filename>node_modules/@medusajs/medusa/dist/repositories/product.d.ts
import { FindManyOptions, FindOperator, OrderByCondition, Repository } from "typeorm";
import { ProductTag } from "..";
import { Product } from "../models/product";
declare type DefaultWithoutRelations = Omit<FindManyOptions<Product>, "relations">;
declare type CustomOptions = {
where?: DefaultWithoutRelations["where"] & {
tags?: FindOperator<ProductTag>;
};
order?: OrderByCondition;
skip?: number;
take?: number;
withDeleted?: boolean;
};
declare type FindWithRelationsOptions = CustomOptions;
export declare class ProductRepository extends Repository<Product> {
private mergeEntitiesWithRelations;
private queryProducts;
private getGroupedRelations;
private queryProductsWithIds;
findWithRelationsAndCount(relations?: Array<keyof Product>, idsOrOptionsWithoutRelations?: FindWithRelationsOptions): Promise<[Product[], number]>;
findWithRelations(relations?: Array<keyof Product>, idsOrOptionsWithoutRelations?: FindWithRelationsOptions, withDeleted?: boolean): Promise<Product[]>;
findOneWithRelations(relations?: Array<keyof Product>, optionsWithoutRelations?: FindWithRelationsOptions): Promise<Product>;
}
export {};
|
<reponame>Zaibot/node-arrayq
require('../es6/prototype');
const { Suite } = require('benchmark');
const data = require('./data.json');
module.exports = () =>
new Suite()
.add('length === 0', () => data.length === 0)
.add('qEmpty', () => data.qEmpty());
|
#!/bin/bash
mkdir -p "$PREFIX/bin"
export MACHTYPE=x86_64
export BINDIR=$(pwd)/bin
mkdir -p "$BINDIR"
(cd kent/src/lib && make)
(cd kent/src/htslib && make)
(cd kent/src/jkOwnLib && make)
(cd kent/src/hg/lib && make)
(cd kent/src/utils/pslLiftSubrangeBlat && make)
cp bin/pslLiftSubrangeBlat "$PREFIX/bin"
chmod +x "$PREFIX/bin/pslLiftSubrangeBlat"
|
<reponame>shrtCKT/FCG-to-Vector<gh_stars>1-10
package ml.cluster;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import ml.data.Attribute;
import ml.data.DataInstance;
public class AgglomerativeHierarchical {
public enum ProximityMetric {
SingleLink, CompleteLink, GroupAverage, IntraClusterSimilarity, CentroidSimilarity,
UPGMASimilarity
}
public static class ClusterPair{
final int firstCluster;
final int secondCluster;
double proximity;
public ClusterPair(int firstCluster, int secondCluster) {
super();
this.firstCluster = firstCluster;
this.secondCluster = secondCluster;
}
public ClusterPair(int firstCluster, int secondCluster, double proximity) {
this(firstCluster, secondCluster);
this.proximity = proximity;
}
public double getProximity() {
return proximity;
}
public void setProximity(double proximity) {
this.proximity = proximity;
}
public int getFirstCluster() {
return firstCluster;
}
public int getSecondCluster() {
return secondCluster;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + firstCluster;
result = 31 * result + secondCluster;
return result;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof ClusterPair)) {
return false;
}
if (obj == this) {
return true;
}
ClusterPair other = (ClusterPair)obj;
return (this.firstCluster == other.firstCluster && this.secondCluster == other.secondCluster);
}
}
// Map cluster id to cluster member mask.
HashMap<Integer, Cluster> clusterMembership;
List<Cluster> finalClusters;
double[][] proxMatrix;
/***
* The distance function to be used.
*/
DistanceFunction distFn;
/***
* Similarity function to be used.
*/
SimilarityFunction simFn;
ProximityMetric proxMetric;
/***
* When run in cutHeight mode, the culster algorithm, this holds the merge hieght on a dendrogram.
*/
double[] mergeHieghts;
/***
* Uses a distance function as opposed to a similarity function.
*
* @param distFunction
* @param proxMetric
*/
public AgglomerativeHierarchical(DistanceFunction distFunction, ProximityMetric proxMetric) {
this.distFn = distFunction;
this.simFn = null;
this.proxMetric = proxMetric;
}
/***
* Uses a similarity function as opposed to a distance function.
* @param simFunction
* @param proxMetric
*/
public AgglomerativeHierarchical(SimilarityFunction simFunction, ProximityMetric proxMetric) {
this.distFn = null;
this.simFn = simFunction;
this.proxMetric = proxMetric;
}
public List<Cluster> getClusters() {
return finalClusters;
}
public void cluster(List<DataInstance> data, int k, List<Attribute> attributeList) {
cluster(data, k, attributeList, false);
}
public void cluster(List<DataInstance> data, int k, List<Attribute> attributeList,
boolean cutHieghtMode) {
mergeHieghts = null;
if (cutHieghtMode) {
mergeHieghts = new double[data.size()];
}
// Create N clusters each with one member point
clusterMembership = new HashMap<Integer, Cluster>();
for (int i = 0; i < data.size(); i++) {
Cluster cMemebers = new Cluster(i, data.get(i));
cMemebers.getMembers().set(i);
clusterMembership.put(i, cMemebers);
}
initializeProximityMatrix(data, attributeList);
int numClusters = data.size();
int count = 0;
do {
ClusterPair closestC = null;
if (simFn != null) {
closestC = pickMostSimilarClusters();
} else {
closestC = pickClosestClusters();
}
// merge closest two clusters
if (cutHieghtMode) {
mergeHieghts[count++] = closestC.getProximity();
}
mergeClusters(closestC.getFirstCluster(), closestC.getSecondCluster(), data, attributeList);
numClusters--;
} while(numClusters > k);
finalClusters = new ArrayList<Cluster>();
for (Map.Entry<Integer, Cluster> me : clusterMembership.entrySet()) {
finalClusters.add(me.getValue());
}
}
private void initializeProximityMatrix(List<DataInstance> data, List<Attribute> attributeList) {
proxMatrix = new double[data.size()][data.size()];
for (int i = 0; i < data.size(); i++) {
for (int j = i; j < data.size(); j++) {
if (simFn == null) {
if (i == j) {
proxMatrix[i][j] = 0.0;
continue;
}
proxMatrix[i][j] = distFn.distance(data.get(i), data.get(j), attributeList);
} else {
proxMatrix[i][j] = simFn.similarity(data.get(i), data.get(j), attributeList);
}
proxMatrix[j][i] = proxMatrix[i][j];
}
}
}
private ClusterPair pickClosestClusters() {
double minProx = Double.POSITIVE_INFINITY;
ClusterPair pair = null;
for (Integer i : clusterMembership.keySet()) {
for (Integer j : clusterMembership.keySet()) {
if (j <= i) {
continue;
}
if (proxMatrix[i][j] < minProx) {
minProx = proxMatrix[i][j];
pair = new ClusterPair(i,j, minProx);
}
}
}
return pair;
}
private ClusterPair pickMostSimilarClusters() {
double maxSim = Double.NEGATIVE_INFINITY;
ClusterPair pair = null;
for (Integer i : clusterMembership.keySet()) {
for (Integer j : clusterMembership.keySet()) {
if (j <= i) {
continue;
}
if (proxMatrix[i][j] > maxSim) {
maxSim = proxMatrix[i][j];
pair = new ClusterPair(i,j, maxSim);
}
}
}
return pair;
}
private double calcProximity(int cluster1, int cluster2, List<DataInstance> data, List<Attribute> attributeList) {
if (this.proxMetric == ProximityMetric.SingleLink) {
return singleLinkProximity(cluster1, cluster2, data, attributeList);
} else if (this.proxMetric == ProximityMetric.CompleteLink) {
return completeLinkProximity(cluster1, cluster2, data, attributeList);
} else if (this.proxMetric == ProximityMetric.GroupAverage) {
return groupAverageProximity(cluster1, cluster2, data, attributeList);
} else if (this.proxMetric == ProximityMetric.IntraClusterSimilarity) {
return intraClusterSimilarity(cluster1, cluster2, data, attributeList);
} else if (this.proxMetric == ProximityMetric.CentroidSimilarity) {
return centroidSimilarity(cluster1, cluster2, data, attributeList);
} else {
return upgmaSimilarity(cluster1, cluster2, data, attributeList);
}
}
/***
* Defines the proximity between two clusters as the distance beweet the closest two points that
* are in different clusters.
*
* @param cluster1 cluster ID of cluster 1.
* @param cluster2 cluster ID of cluster 2.
* @param data the dataset.
* @param attributeList the attribute list.
* @return the proximity between the two clusters.
*/
private double singleLinkProximity(int cluster1, int cluster2, List<DataInstance> data, List<Attribute> attributeList) {
double minProx = Double.POSITIVE_INFINITY;
BitSet c1 = clusterMembership.get(cluster1).getMembers();
BitSet c2 = clusterMembership.get(cluster2).getMembers();
for (int i = c1.nextSetBit(0); i > -1; i = c1.nextSetBit(i + 1)) {
for (int j = c2.nextSetBit(0); j > -1; j = c2.nextSetBit(j + 1)) {
double dist = distFn.distance(data.get(i), data.get(j), attributeList);
if (dist < minProx) {
minProx = dist;
}
}
}
return minProx;
}
/***
* Defines the proximity between two clusters as the distance beweet the farthes two points that
* are in different clusters.
*
* @param cluster1 cluster ID of cluster 1.
* @param cluster2 cluster ID of cluster 2.
* @param data the dataset.
* @param attributeList the attribute list.
* @return the proximity between the two clusters.
*/
private double completeLinkProximity(int cluster1, int cluster2, List<DataInstance> data, List<Attribute> attributeList) {
double maxProx = Double.NEGATIVE_INFINITY;
BitSet c1 = clusterMembership.get(cluster1).getMembers();
BitSet c2 = clusterMembership.get(cluster2).getMembers();
for (int i = c1.nextSetBit(0); i > -1; i = c1.nextSetBit(i + 1)) {
for (int j = c2.nextSetBit(0); j > -1; j = c2.nextSetBit(j + 1)) {
double dist = distFn.distance(data.get(i), data.get(j), attributeList);
if (maxProx < dist) {
maxProx = dist;
}
}
}
return maxProx;
}
/***
* Defines the proximity between two clusters as the average pairwise distance between the
* memebers of the two clusters.
*
* @param cluster1 cluster ID of cluster 1.
* @param cluster2 cluster ID of cluster 2.
* @param data the dataset.
* @param attributeList the attribute list.
* @return the proximity between the two clusters.
*/
private double groupAverageProximity(int cluster1, int cluster2, List<DataInstance> data,
List<Attribute> attributeList) {
double totalProx = 0;
int count = 0;
BitSet c1 = clusterMembership.get(cluster1).getMembers();
BitSet c2 = clusterMembership.get(cluster2).getMembers();
for (int i = c1.nextSetBit(0); i > -1; i = c1.nextSetBit(i + 1)) {
for (int j = c2.nextSetBit(0); j > -1; j = c2.nextSetBit(j + 1)) {
totalProx += distFn.distance(data.get(i), data.get(j), attributeList);
count++;
}
}
return totalProx/count;
}
private double intraClusterSimilarity(int cluster1, int cluster2, List<DataInstance> data,
List<Attribute> attributeList) {
// If cluster Z = merge of Cluster X and Y
// and Sim(A) = SUM_over_memebers_d_of_A(d, CentroidOf_A),
// Then intraClusterSimilarity = Sim(Z) - (Sim(X) + Sim(Y))
double simC1 = intraClusterSimilarity(clusterMembership.get(cluster1).getCentroid(),
clusterMembership.get(cluster1).getMembers(), data, attributeList);
double simC2 = intraClusterSimilarity(clusterMembership.get(cluster2).getCentroid(),
clusterMembership.get(cluster2).getMembers(), data, attributeList);
BitSet newClusterMembers = new BitSet();
newClusterMembers.or(clusterMembership.get(cluster1).getMembers());
newClusterMembers.or(clusterMembership.get(cluster2).getMembers());
DataInstance newClusterCentroid = calculateCentroid(data, newClusterMembers, attributeList);
double simNewC = intraClusterSimilarity(newClusterCentroid, newClusterMembers, data,
attributeList);
return simNewC - (simC1 + simC2);
}
private double intraClusterSimilarity(DataInstance centroid, BitSet clusterMembers,
List<DataInstance> data, List<Attribute> attributeList) {
CosineDistance cosFn = (CosineDistance) simFn;
double magnitude = cosFn.magnitude(centroid, attributeList);
double dotSum = 0.0;
for (int i = clusterMembers.nextSetBit(0); i > -1; i = clusterMembers.nextSetBit(i + 1)) {
dotSum += cosFn.dotProduct(data.get(i), centroid, attributeList);
}
dotSum = magnitude == 0 ? 0 : dotSum / magnitude;
return dotSum;
}
private double centroidSimilarity(int cluster1, int cluster2, List<DataInstance> data,
List<Attribute> attributeList) {
return simFn.similarity(clusterMembership.get(cluster1).getCentroid(),
clusterMembership.get(cluster2).getCentroid(), attributeList);
}
private double upgmaSimilarity(int cluster1, int cluster2, List<DataInstance> data,
List<Attribute> attributeList) {
double upgma = 0;
for (int i = clusterMembership.get(cluster1).getMembers().nextSetBit(0); i > -1;
i = clusterMembership.get(cluster1).getMembers().nextSetBit(i + 1)) {
for (int j = clusterMembership.get(cluster2).getMembers().nextSetBit(0); j > -1;
j = clusterMembership.get(cluster2).getMembers().nextSetBit(j + 1)) {
upgma += simFn.similarity(data.get(i), data.get(j), attributeList);
}
}
double size = clusterMembership.get(cluster1).getMembers().cardinality() *
clusterMembership.get(cluster2).getMembers().cardinality();
upgma = (size == 0 ? 0 : upgma / size);
return upgma;
}
/**
* Merges the second cluster to the first and names the new cluster with
* the same id as firstCluster.
* @param firstCluster cluster id of first cluster.
* @param secondCluster cluster id of second cluster.
* @param data
*/
private void mergeClusters(int firstCluster, int secondCluster, List<DataInstance> data, List<Attribute> attributeList) {
// merge
clusterMembership.get(firstCluster).getMembers().or(clusterMembership.get(secondCluster).getMembers());
// Recalculate centroid.
if (proxMetric == ProximityMetric.IntraClusterSimilarity ||
proxMetric == ProximityMetric.CentroidSimilarity ||
proxMetric == ProximityMetric.UPGMASimilarity) {
DataInstance newCentroid = calculateCentroid(
data, clusterMembership.get(firstCluster).getMembers(), attributeList);
clusterMembership.get(firstCluster).setCentroid(newCentroid);
// System.err.printf("Merge %d %s\n and %d %s \nCetroid = %s\n", firstCluster,
// clusterMembership.get(firstCluster).getCentroid(), secondCluster,
// clusterMembership.get(secondCluster).getCentroid(),
// newCentroid);
}
// Remove second
clusterMembership.remove(secondCluster);
// Recalculate proximity form all clusters to firstCluster(i.e. new cluster).
for (Integer i : clusterMembership.keySet()) {
if (i == firstCluster) {
if (proxMetric == ProximityMetric.IntraClusterSimilarity ||
proxMetric == ProximityMetric.CentroidSimilarity ||
proxMetric == ProximityMetric.UPGMASimilarity) {
proxMatrix[i][firstCluster] = calcProximity(i, firstCluster, data, attributeList);
proxMatrix[firstCluster][i] = proxMatrix[i][firstCluster];
} else {
proxMatrix[i][firstCluster] = 0; // distance of a cluster to it's self is zero.
proxMatrix[firstCluster][i] = proxMatrix[i][firstCluster];
}
} else {
proxMatrix[i][firstCluster] = calcProximity(i, firstCluster, data, attributeList);
proxMatrix[firstCluster][i] = proxMatrix[i][firstCluster];
} // TODO What about i == secondCluster
}
}
/**
* Computes the cluster centroids.
*
* @param centroids
* @param data
* @param mask
* @param clusterMembership
* @param attributeList
*/
public static DataInstance calculateCentroid(final List<DataInstance> data,
final BitSet clusterMemebers, final List<Attribute> attributeList) {
// Initialize the new centroids to 0
DataInstance newCentroid = new DataInstance();
for (Map.Entry<Integer, Object> me : data.get(0).getAttributes()
.entrySet()) {
newCentroid.setAttributeValueAt(me.getKey(), 0.0);
}
// Calculate each attribute's sum.
int count = 0;
for (int i = clusterMemebers.nextSetBit(0); i > -1; i = clusterMemebers.nextSetBit(i + 1)) {
count++;
for (Map.Entry<Integer, Object> me : data.get(0).getAttributes()
.entrySet()) {
int attribIndex = me.getKey();
// Skip Non-Continuous attributes
if (attributeList.get(attribIndex).getType() != Attribute.Type.CONTINUOUS) {
continue;
}
double sum = (Double) newCentroid.getAttributeValueAt(attribIndex)
+ (Double) data.get(i).getAttributeValueAt(attribIndex);
newCentroid.setAttributeValueAt(attribIndex, sum);
}
}
// Calculate each attribute's average.
for (Map.Entry<Integer, Object> me : data.get(0).getAttributes().entrySet()) {
int attributeIndex = me.getKey();
newCentroid.setAttributeValueAt(
attributeIndex,
(count == 0 ? 0 : (Double) newCentroid.getAttributeValueAt(attributeIndex)/ count));
}
return newCentroid;
}
/***
* Returns the various similarities for each cluster merges.
* This helps decide on the number of clusters, K.
*
* @param data
* @param attributeList
* @return
*/
public double[] cutHieght(List<DataInstance> data, List<Attribute> attributeList) {
cluster(data, 1, attributeList, true);
return mergeHieghts;
}
} |
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 2903번: 중앙 이동 알고리즘
*
* @see https://www.acmicpc.net/problem/2903
*
*/
public class Boj2903 {
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
br.close();
int[] dp = new int[N + 1];
dp[0] = 4;
for(int i = 1; i < N + 1; i++){ // i 번째 값(y라 가정), i - 1 번째 결과 값(x라 가정) y = (( 2 * (x^(1/2)) ) - 1)^2 이므로
int sqrt = (int) Math.sqrt(dp[i - 1]); // * 계산식 정의
int raw = sqrt + sqrt - 1;
dp[i] = (int)Math.pow(raw, 2);
}
System.out.println(dp[N]); // 결과값 출력
}
}
|
#!/bin/bash
print_bytes() {
#if [ "$1" -eq 0 ] || [ "$1" -lt 100 ]; then
# bytes="0 kB/s"
#elif [ "$1" -lt 1000 ]; then
# bytes="0$(echo "scale=1;$1/1000" | bc -l ) kB/s"
if [ "$1" -eq 0 ] || [ "$1" -lt 1000 ]; then
bytes="0 kB/s"
elif [ "$1" -lt 1000000 ]; then
bytes="$(echo "scale=0;$1/1000" | bc -l ) kB/s"
else
bytes="$(echo "scale=1;$1/1000000" | bc -l ) MB/s"
fi
echo "$bytes"
}
INTERVAL=10
INTERFACES="enp0s25 wlp3s0"
declare -A bytes
for interface in $INTERFACES; do
bytes[past_rx_$interface]="$(cat /sys/class/net/"$interface"/statistics/rx_bytes)"
bytes[past_tx_$interface]="$(cat /sys/class/net/"$interface"/statistics/tx_bytes)"
done
while true; do
down=0
up=0
for interface in $INTERFACES; do
bytes[now_rx_$interface]="$(cat /sys/class/net/"$interface"/statistics/rx_bytes)"
bytes[now_tx_$interface]="$(cat /sys/class/net/"$interface"/statistics/tx_bytes)"
bytes_down=$((((${bytes[now_rx_$interface]} - ${bytes[past_rx_$interface]})) / INTERVAL))
bytes_up=$((((${bytes[now_tx_$interface]} - ${bytes[past_tx_$interface]})) / INTERVAL))
down=$(((( "$down" + "$bytes_down" ))))
up=$(((( "$up" + "$bytes_up" ))))
bytes[past_rx_$interface]=${bytes[now_rx_$interface]}
bytes[past_tx_$interface]=${bytes[now_tx_$interface]}
done
echo "Download: $(print_bytes $down) / Upload: $(print_bytes $up)"
sleep $INTERVAL
done
|
from django.contrib.staticfiles.urls import urlpatterns as staticfiles_urlpatterns
from devilry.project.common.default_urls import devilry_urls
from devilry.project.common.http_error_handlers import * # noqa
urlpatterns = []
urlpatterns.extend(devilry_urls)
urlpatterns.extend(staticfiles_urlpatterns)
|
#!/bin/bash
killall -s SIGQUIT poolbench.exe
sleep 1
./poolbench.exe config.json
|
<reponame>Mogztter/neo4j-apoc-procedures
package apoc.bolt;
import java.util.Collections;
import java.util.Map;
import static apoc.util.Util.toBoolean;
public class BoltConfig {
private final boolean virtual;
private final boolean addStatistics;
private final boolean readOnly;
private final boolean withRelationshipNodeProperties;
public BoltConfig(Map<String, Object> configMap) {
if (configMap == null) {
configMap = Collections.emptyMap();
}
this.virtual = toBoolean(configMap.getOrDefault("virtual", false));
this.addStatistics = toBoolean(configMap.getOrDefault("statistics", false));
this.readOnly = toBoolean(configMap.getOrDefault("readOnly", true));
this.withRelationshipNodeProperties = toBoolean(configMap.getOrDefault("withRelationshipNodeProperties", false));
}
public boolean isVirtual() {
return virtual;
}
public boolean isAddStatistics() {
return addStatistics;
}
public boolean isReadOnly() {
return readOnly;
}
public boolean isWithRelationshipNodeProperties() {
return withRelationshipNodeProperties;
}
}
|
#!/bin/bash
# Sets the selected_bg_color to one of the Solarized accent colors
# Color prompt
read -p "Please chose an accent color. (yellow, orange, red, magenta, violet, blue, cyan, green or custom) : " color
if [ $color = yellow ]
then
hexcolor=b58900
elif [ $color = orange ]
then
hexcolor=cb4b16
elif [ $color = red ]
then
hexcolor=dc322ff
elif [ $color = magenta ]
then
hexcolor=d33682
elif [ $color = violet ]
then
hexcolor=6c71c4
elif [ $color = blue ]
then
hexcolor=268bd2
elif [ $color = cyan ]
then
hexcolor=2aa198
elif [ $color = green ]
then
hexcolor=859900
elif [ $color = custom ]
then
read -p "Please chose an hex code for the color (without the #): " hexcolor
fi
# Sed operations
sed -i 's/selected_bg_color #.*;/selected_bg_color #$hexcolor;/g' gtk-3.0/gtk.css
sed -i 's/selected_bg_color #.*;/selected_bg_color #$hexcolor;/g' gtk-3.0/gtk-dark.css
sed -i 's/nselected_bg_color:#.*\\ntext/nselected_bg_color:#$hexcolor\\ntext/g' gtk-2.0/gtkrc
# Feedback
echo "Accent color set to $color (#$hexcolor)"
|
#!/bin/bash
file_req_ext=".mp3"
next_iteration='next_iteration.txt' #must be empty file in start
file_names='file_names.txt'
#base_dir='/home/naji/bashir-workspace/kids_corner/420p/usb1/read_father_aud'
base_dir='/home/naji/Downloads/temp/ytdown/audio_books_backup/aDAS'
if [ -s $file_names -a ! -s $next_iteration ];then
echo "Processing previous entries: $file_names is not empty"
else
echo "Loading file names list into $file_names"
ls -a $base_dir | grep -i 'mp3' | sort > $file_names
fi
#exit 0
#cd "$base_dir"
while read line; do
echo "$line"
file_ext=`echo $line|tail -c 5`
#echo "$file_ext"
if [[ "$file_req_ext" == "$file_ext" ]]; then
dir_name=${line::-4}
full_path="$base_dir/$dir_name"
file_path="$base_dir/$line"
if [ -d "$full_path" ]; then
echo "Duplicate paths not allowed $full_path"
echo $line >> $next_iteration
#rm -rf "$full_path"
else
echo "success: $line"
mp3splt -a -t 5.0 -o Lesson_01-@n -d "$full_path" "$file_path"
fi
else
echo "Invalid file type "
fi
#break 1
done < $file_names
if [[ ! -s $next_iteration ]]; then
exit 0
else
cat $next_iteration > $file_names
cat /dev/null > $next_iteration
fi
|
#include <iostream>
#include <string>
#include "CQChartsVariant.h" // Assuming the necessary header file
std::string processPlotModelValue(Plot* plot_, int visibleColumnInd, bool ok) {
auto value = plot_->modelValue(visibleColumnInd, ok);
if (!ok) {
return "Error: Retrieval of plot model value failed.";
} else {
bool boolValue;
if (CQChartsVariant::toBool(value, boolValue, ok)) {
if (ok) {
// Perform specific operation based on the boolean value
if (boolValue) {
// Perform operation for true value
return "Operation performed for true value.";
} else {
// Perform operation for false value
return "Operation performed for false value.";
}
} else {
return "Error: Invalid boolean value retrieved.";
}
} else {
return "Error: Value cannot be converted to boolean.";
}
}
}
int main() {
// Example usage
Plot* plot = new Plot(); // Assuming Plot class exists
int columnInd = 0;
bool ok = true;
std::cout << processPlotModelValue(plot, columnInd, ok) << std::endl;
return 0;
} |
<gh_stars>1-10
#include<bits/stdc++.h>
#define fi first
#define se second
using namespace std;
map < int , vector < pair < int, int > > > gg;
map < int, int > col;
void addEdge(int a, int b, int c) {
if(a > b) swap(a, b);
a = a - 1, b = b + 1;
gg[a].push_back({b, c});
gg[b].push_back({a, c});
}
bool dfs(int v, int ccol = 1) {
col[v] = ccol;
for(auto to : gg[v]) {
if(!col[to.fi] && !dfs(to.fi, (to.se ? 3 - ccol : ccol)))
return 0;
else if(col[to.fi] && (to.se && col[to.fi] == col[v] || !to.se && col[to.fi] != col[v]))
return 0;
}
return 1;
}
bool check() {
col.clear();
for(auto v : gg) {
if(!col[v.fi] && !dfs(v.fi))
return 1;
}
return 0;
}
vector < pair < int, int > > aa;
bool ok(int m, int len) {
gg.clear();
for(int i = 0; i < m; i++) {
bool h = (aa[i].first < 0);
if(abs(aa[i].first) < 1 || abs(aa[i].second) > 2 * len)
return 0;
if(h)
addEdge(-aa[i].first, aa[i].second, 1);
else
addEdge(aa[i].first, aa[i].second, 0);
}
if(check()) return 0;
return 1;
}
int main() {
ios_base::sync_with_stdio(0);
cin.tie(0);
int x = 0;
while(1) {
x++;
int len;
cin >> len;
if(len == -1) break;
if(len == 0) {
cout << 0 << endl;
continue;
}
int n;
cin >> n;
aa.resize(0);
for(int i = 0; i < n; i++) {
int a, b;
string s;
cin >> a >> b >> s;
a *= 2, b *= 2;
if(s[0] == 'o') a *= -1;
aa.push_back({a, b});
}
int l = 0, r = n + 2;
while(r - l > 1) {
int m = (l + r) / 2;
if(ok(m, len))
l = m;
else
r = m;
}
cout << l << endl;
}
return 0;
}
|
let facade = require('gamecloud')
let {NotifyType, ResType, ActivityType,em_Condition_Type,em_Condition_Checkmode} = facade.const
let EventData = facade.Util.EventData
let LargeNumberCalculator = facade.Util.LargeNumberCalculator
/**
* 添加资源消息句柄
* @param {EventData} event
*/
function handle(event){ //用户资源发生变化
switch(event.data.type){ //后续处理
case ResType.Coin:
//任务检测
this.notifyEvent('user.task', {user:event.user, data:{type:em_Condition_Type.totalSpendMoney, value:event.data.value}})
//累计分段积分
this.service.activity.addScore(event.user.id, ActivityType.Money, event.data.value);
break;
case ResType.Diamond:
if(event.data.value < 0){ //消费了钻石
//任务检测
this.notifyEvent('user.task', {user:event.user, data:{type:em_Condition_Type.totalSpendDiamond, value:-event.data.value}});
//累计分段积分
this.service.activity.addScore(event.user.id, ActivityType.Diamond, -event.data.value);
}
//将当前钻石数量同步到单独的字段上以便于统计分析
event.user.diamond = event.user.getPocket().GetRes(ResType.Diamond);
break;
case ResType.Action:
event.user.baseMgr.item.AutoAddAP(); //检测体力自动恢复
if(event.data.value < 0){
//任务检测
this.notifyEvent('user.task', {user:event.user, data:{type:em_Condition_Type.useAction, value:-event.data.value}});
//累计分段积分
this.service.activity.addScore(event.user.id, ActivityType.Action, -event.data.value);
}
break;
case ResType.Road:
event.user.getTaskMgr().Execute(em_Condition_Type.totalRoad, 1);
event.user.getTaskMgr().Execute(em_Condition_Type.getRoad, event.data.id, em_Condition_Checkmode.absolute);
break;
case ResType.Role:
event.user.getTaskMgr().Execute(em_Condition_Type.totalRole, 1);
event.user.getTaskMgr().Execute(em_Condition_Type.getRole, event.data.id, em_Condition_Checkmode.absolute);
break;
case ResType.Scene:
event.user.getTaskMgr().Execute(em_Condition_Type.totalScene, 1);
event.user.getTaskMgr().Execute(em_Condition_Type.getScene, event.data.id, em_Condition_Checkmode.absolute);
break;
}
event.user.notify({type: NotifyType.action, info: event.user.getPocket().getActionData()});
}
module.exports.handle = handle;
|
#!/bin/sh
DIRNAME="$(dirname "$(readlink -f "$0")")"
DEFAULT_IMAGE="quay.io/k8stopologyawarewg/resource-topology-exporter:latest"
REPOOWNER=${REPOOWNER:-k8stopologyawarewg}
IMAGENAME=${IMAGENAME:-resource-topology-exporter}
IMAGETAG=${IMAGETAG:-latest}
export RTE_CONTAINER_IMAGE=${RTE_CONTAINER_IMAGE:-quay.io/${REPOOWNER}/${IMAGENAME}:${IMAGETAG}}
export RTE_POLL_INTERVAL="${RTE_POLL_INTERVAL:-60s}"
envsubst < ${DIRNAME}/../manifests/resource-topology-exporter.yaml
|
#!/bin/bash
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This init script installs Google Cloud Datalab on the master node of a
# Dataproc cluster.
set -exo pipefail
readonly NOT_SUPPORTED_MESSAGE="Datalab initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Jupyter Component instead: https://cloud.google.com/dataproc/docs/concepts/components/jupyter"
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
readonly ROLE="$(/usr/share/google/get_metadata_value attributes/dataproc-role)"
readonly PROJECT="$(/usr/share/google/get_metadata_value ../project/project-id)"
readonly SPARK_PACKAGES="$(/usr/share/google/get_metadata_value attributes/spark-packages || true)"
readonly SPARK_CONF='/etc/spark/conf/spark-defaults.conf'
readonly DATALAB_DIR="${HOME}/datalab"
readonly PYTHONPATH="/env/python:$(find /usr/lib/spark/python/lib -name '*.zip' | paste -sd:)"
readonly DOCKER_IMAGE="$(/usr/share/google/get_metadata_value attributes/docker-image ||
echo 'gcr.io/cloud-datalab/datalab:local')"
# For running the docker init action
readonly DEAFULT_INIT_ACTIONS_REPO=gs://dataproc-initialization-actions
readonly INIT_ACTIONS_REPO="$(/usr/share/google/get_metadata_value attributes/INIT_ACTIONS_REPO ||
echo ${DEAFULT_INIT_ACTIONS_REPO})"
readonly INIT_ACTIONS_BRANCH="$(/usr/share/google/get_metadata_value attributes/INIT_ACTIONS_BRANCH ||
echo 'master')"
# Expose every possible spark configuration to the container.
VOLUMES="$(echo /etc/{hadoop*,hive*,*spark*})"
CONNECTORS_LIB=/usr/lib/hadoop/lib
if [[ -d /usr/local/share/google/dataproc/lib ]]; then
CONNECTORS_LIB="/usr/local/share/google/dataproc/lib"
fi
if [[ -L ${CONNECTORS_LIB}/gcs-connector.jar ]]; then
VOLUMES+=" ${CONNECTORS_LIB}/gcs-connector.jar"
else
VOLUMES+=" $(compgen -G ${CONNECTORS_LIB}/gcs*)"
fi
if [[ -L ${CONNECTORS_LIB}/bigquery-connector.jar ]]; then
VOLUMES+=" ${CONNECTORS_LIB}/bigquery-connector.jar"
elif compgen -G "${CONNECTORS_LIB}/bigquery*" >/dev/null; then
VOLUMES+=" $(compgen -G ${CONNECTORS_LIB}/bigquery*)"
fi
readonly VOLUMES
readonly VOLUME_FLAGS="$(echo "${VOLUMES}" | sed 's/\S*/-v &:&/g')"
function err() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*" >&2
return 1
}
function install_docker() {
# Run the docker init action to install docker.
local init_actions_dir
init_actions_dir=$(mktemp -d -t dataproc-init-actions-XXXX)
if [[ ${INIT_ACTIONS_REPO} == gs://* ]]; then
gsutil -m rsync -r "${INIT_ACTIONS_REPO}" "${init_actions_dir}"
else
git clone -b "${INIT_ACTIONS_BRANCH}" --single-branch "${INIT_ACTIONS_REPO}" "${init_actions_dir}"
fi
find "${init_actions_dir}" -name '*.sh' -exec chmod +x {} \;
"${init_actions_dir}/docker/docker.sh"
}
function docker_pull() {
for ((i = 0; i < 10; i++)); do
if (gcloud docker -- pull "$1"); then
return 0
fi
sleep 5
done
return 1
}
function configure_master() {
mkdir -p "${DATALAB_DIR}"
docker_pull "${DOCKER_IMAGE}" || err "Failed to pull ${DOCKER_IMAGE}"
# For some reason Spark has issues resolving the user's directory inside of
# Datalab.
# TODO(pmkc) consider fixing in Dataproc proper.
if ! grep -q '^spark\.sql\.warehouse\.dir=' "${SPARK_CONF}"; then
echo 'spark.sql.warehouse.dir=/root/spark-warehouse' >>"${SPARK_CONF}"
fi
# Docker gives a "too many symlinks" error if volumes are not yet automounted.
# Ensure that the volumes are mounted to avoid the error.
touch ${VOLUMES}
# Build PySpark Submit Arguments
pyspark_submit_args=''
for package in ${SPARK_PACKAGES//','/' '}; do
pyspark_submit_args+="--packages ${package} "
done
pyspark_submit_args+='pyspark-shell'
# Java is too complicated to simply volume mount into the image, so we need
# to install it in a child image.
mkdir -p datalab-pyspark
pushd datalab-pyspark
cp /etc/apt/trusted.gpg .
cp /etc/apt/sources.list.d/dataproc.list .
cat <<EOF >Dockerfile
FROM ${DOCKER_IMAGE}
# Enabling APT to download from HTTPS repository.
RUN apt-get update
RUN apt-get install -y apt-transport-https software-properties-common
ADD dataproc.list /etc/apt/sources.list.d/
ADD trusted.gpg /tmp/vm_trusted.gpg
RUN apt-key add /tmp/vm_trusted.gpg
# Add Ubuntu 18.04 LTS (bionic) repository to Ubuntu 16.04 LTS (xenial) container,
# so pacakges built on Debian 10 can pull in their dependencies.
RUN add-apt-repository 'deb http://archive.ubuntu.com/ubuntu bionic main'
RUN apt-get update
RUN apt-get install -y hive spark-python openjdk-8-jre-headless
# Workers do not run docker, so have a different python environment.
# To run python3, you need to run the conda init action.
# The conda init action correctly sets up python in PATH and
# /etc/spark/conf/spark-env.sh, but running pyspark via shell.py does
# not pick up spark-env.sh. So, set PYSPARK_PYTHON explicitly to either
# system python or conda python. It is on the user to set up the same
# version of python for workers and the datalab docker container.
ENV PYSPARK_PYTHON=$(ls /opt/conda/bin/python || command -v python)
ENV SPARK_HOME='/usr/lib/spark'
ENV JAVA_HOME='/usr/lib/jvm/java-8-openjdk-amd64'
ENV PYTHONPATH='${PYTHONPATH}'
ENV PYTHONSTARTUP='/usr/lib/spark/python/pyspark/shell.py'
ENV PYSPARK_SUBMIT_ARGS='${pyspark_submit_args}'
ENV DATALAB_ENV='GCE'
EOF
docker build -t datalab-pyspark .
popd
}
function run_datalab() {
if docker run -d --restart always --net=host \
-v "${DATALAB_DIR}:/content/datalab" ${VOLUME_FLAGS} datalab-pyspark; then
echo 'Cloud Datalab Jupyter server successfully deployed.'
else
err 'Failed to run Cloud Datalab'
fi
}
function main() {
if [[ "${ROLE}" == 'Master' ]]; then
install_docker
configure_master
run_datalab
fi
}
main
|
#!/bin/bash
cat <<EOF
#
# sniproxy_dnsmasq-install.sh
# Support OS: Debian / Ubuntu / CentOS
#
# Please choose to install the following softwares:
#
1. install SNIProxy + DNSmasq
2. install SNIProxy only
3. install DNSmasq only
4. exit
# Before install, make sure the OpenVPN client service has been stopped!
# service openvpn-client@(client name) stop
# service openvpn-client@(client name) status
EOF
no_command() {
if ! command -v $1 >/dev/null 2>&1; then
if [ -z "$3" ]; then
$2 install -y $1
else
$2 install -y $3
fi
fi
}
check_command() {
if ! command -v $1 >/dev/null 2>&1; then
return 0
else
echo "$1 has already existed. Nothing to do."
return 1
fi
}
## make choice
read -p "Please choose your option: [1-4]" answer
case $answer in
1 | 2 | 3)
## check OS
source /etc/os-release
case $ID in
debian | ubuntu)
echo System OS is $PRETTY_NAME
apt update
sniproxy_install=debian_build
dnsmasq_install=debian_apt
no_command bc apt
no_command wget apt
no_command curl apt
no_command unzip apt
no_command netstat apt net-tools
no_command pkill apt procps
# continue check
;;&
debian)
if test "$(echo "$VERSION_ID >= 10" | bc)" -ne 0; then
sniproxy_install=debian_apt
fi
;;
ubuntu)
if test "$(echo "$VERSION_ID >= 20.04" | bc)" -ne 0; then
sniproxy_install=debian_apt
fi
;;
centos | fedora | rhel | sangoma)
echo System OS is $PRETTY_NAME
sniproxy_install=centos_build
dnsmasq_install=centos_yum
no_command bc yum
yumdnf="yum"
if test "$(echo "$VERSION_ID >= 22" | bc)" -ne 0; then
yumdnf="dnf"
fi
no_command wget $yumdnf
no_command curl $yumdnf
no_command unzip $yumdnf
no_command netstat $yumdnf net-tools
no_command pkill $yumdnf procps-ng
;;
*)
echo System OS is $PRETTY_NAME
echo Unsupported system OS.
exit 2
;;
esac
## continue check
;;&
## choose to install sniproxy
1 | 2)
echo "continue to install SNIProxy..."
## read domain
read -p "Please input your domain name: " domain
echo "The domain is $domain"
## install sniproxy
if check_command sniproxy; then
case $sniproxy_install in
debian_apt)
apt install -y sniproxy
;;
debian_build)
apt install -y autotools-dev cdbs debhelper dh-autoreconf dpkg-dev gettext libev-dev libpcre3-dev libudns-dev pkg-config fakeroot devscripts build-essential
mkdir ~/sniproxy && cd ~/sniproxy
wget -O master.zip https://github.com/dlundquist/sniproxy/archive/master.zip
rm -rf sniproxy-master
unzip master.zip && cd sniproxy-master
./autogen.sh && dpkg-buildpackage
sniproxy_deb=$(ls .. | grep "sniproxy_.*.deb") && echo ${sniproxy_deb}
[[ ! -z ${sniproxy_deb} ]] && dpkg -i ../${sniproxy_deb}
cd ~
;;
centos_build)
$yumdnf install -y autoconf automake curl gettext-devel libev-devel pcre-devel perl pkgconfig rpm-build
yum install -y epel-release yum-utils
yum-config-manager --enable epel
yum install -y udns-devel
no_command gcc $yumdnf
mkdir ~/sniproxy && cd ~/sniproxy
wget -O master.zip https://github.com/dlundquist/sniproxy/archive/master.zip
rm -rf sniproxy-master
unzip master.zip && cd sniproxy-master
./autogen.sh && ./configure && make dist
rpmbuild --define "_sourcedir $(pwd)" -ba redhat/sniproxy.spec
cd ~
mv ~/rpmbuild/RPMS/x86_64/sniproxy-*.rpm .
$yumdnf install -y sniproxy-0.*.x86_64.rpm
;;
esac
fi
## enable sniproxy autorun (old init.d mode)
#sed -i "s/#DAEMON_ARGS=\"-c \/etc\/sniproxy.conf\"/DAEMON_ARGS=\"-c \/etc\/sniproxy.conf\"/" /etc/default/sniproxy
#sed -i "s/ENABLED=0/ENABLED=1/" /etc/default/sniproxy
## create modified sniproxy.conf file
mkdir -p /var/log/sniproxy
if [ -f "/etc/sniproxy.conf" ]; then
mv /etc/sniproxy.conf /etc/sniproxy_backup.conf
fi
cat >/etc/sniproxy.conf <<EOF
# sniproxy configuration file
# lines that start with # are comments
# lines with only white space are ignored
user nobody
#group nogroup
#user daemon
# PID file, needs to be placed in directory writable by user
pidfile /var/run/sniproxy.pid
# The DNS resolver is required for tables configured using wildcard or hostname
# targets. If no resolver is specified, the nameserver and search domain are
# loaded from /etc/resolv.conf.
resolver {
# Specify name server
#
# NOTE: it is strongly recommended to use a local caching DNS server, since
# uDNS and thus SNIProxy only uses single socket to each name server so
# each DNS query is only protected by the 16 bit query ID and lacks
# additional source port randomization. Additionally no caching is
# preformed within SNIProxy, so a local resolver can improve performance.
# nameserver 127.0.0.1
nameserver 1.1.1.1
nameserver 8.8.8.8
# DNS search domain
# search example.com
# Specify which type of address to lookup in DNS:
#
# * ipv4_only query for IPv4 addresses (default)
# * ipv6_only query for IPv6 addresses
# * ipv4_first query for both IPv4 and IPv6, use IPv4 is present
# * ipv6_first query for both IPv4 and IPv6, use IPv6 is present
mode ipv4_only
}
error_log {
# Log to the daemon syslog facility
#syslog daemon
# Alternatively we could log to file
filename /var/log/sniproxy/sniproxy-error.log
# Control the verbosity of the log
priority notice
}
# Global access log for all listeners
access_log {
# Same options as error_log
# filename /var/log/sniproxy/sniproxy-access-all.log
filename /tmp/sniproxy-access-all.log
}
# blocks are delimited with {...}
listen 80 {
proto http
table http_hosts
# Enable SO_REUSEPORT to allow multiple processess to bind to this ip:port pair
reuseport no
# Fallback server to use if we can not parse the client request
# fallback localhost:8080
# Specify the source address for outgoing connections.
#
# Use "source client" to enable transparent proxy support. This requires
# running sniproxy as root ("user root").
#
# Do not include a port in this address, otherwise you will be limited
# to a single connection to each backend server.
#
# NOTE: binding to a specific address prevents the operating system from
# selecting and source address and port optimally and may significantly
# reduce the maximum number of simultaneous connections possible.
# source 192.0.2.10
# Log the content of bad requests
#bad_requests log
# Override global access log for this listener
access_log {
# Same options as error_log
# filename /var/log/sniproxy/sniproxy-access.log
filename /tmp/sniproxy-access.log
priority notice
}
}
#listen [::]:443 {
# proto tls
# controls if this listener will accept IPv4 connections as well on
# supported operating systems such as Linux or FreeBSD, but not OpenBSD.
# ipv6_v6only on
# table https_hosts
#}
#listen 0.0.0.0 443 {
listen 443 {
# This listener will only accept IPv4 connections since it is bound to the
# IPv4 any address.
proto tls
table https_hosts
access_log {
# filename /var/log/sniproxy/sniproxy-access.log
filename /tmp/sniproxy-access.log
priority notice
}
}
#listen 192.0.2.10:80 {
# protocol http
# # this will use default table
#}
#listen [2001:0db8::10]:80 {
# protocol http
# # this will use default table
#}
#listen unix:/var/run/proxy.sock {
# protocol http
# # this will use default table
#}
# named tables are defined with the table directive
table http_hosts {
# example.com 192.0.2.10:8001
# example.net 192.0.2.10:8002
# example.org 192.0.2.10:8003 proxy_protocol
# Each table entry is composed of three parts:
#
# pattern:
# valid Perl-compatible Regular Expression that matches the
# hostname
#
# target:
# - a DNS name
# - an IP address and TCP port
# - an IP address (will connect to the same port as the listener received the
# connection)
# - '*' to use the hostname that the client requested
#
# pattern target
#.*\.itunes\.apple\.com$ *:443
#.* 127.0.0.1:4443
# external http port 80 to web service http port 81
$domain 127.0.0.1:81
# allows all http websites
#.* *
# Pulto TV
(.*.|)pluto.tv$ *
# pbs
(.*.|)pbs.org$ *
(.*.|)pbskids.org$ *
# Netflix
(.*.|)netflix.*$ *
(.*.|)nflximg.*$ *
(.*.|)nflxvideo.*$ *
(.*.|)nflxso.*$ *
(.*.|)nflxext.*$ *
# Hulu
(.*.|)hulu.com$ *
(.*.|)hulustream.com$ *
}
# named tables are defined with the table directive
table https_hosts {
# When proxying to local sockets you should use different tables since the
# local socket server most likely will not detect which protocol is being
# used
# example.org unix:/var/run/server.sock
# external https port 443 to web service https port 444
$domain 127.0.0.1:444
# allows all https websites
#.* *
# Pulto TV
(.*.|)pluto.tv$ *
# pbs
(.*.|)pbs.org$ *
(.*.|)pbskids.org$ *
# Netflix
(.*.|)netflix.*$ *
(.*.|)nflximg.*$ *
(.*.|)nflxvideo.*$ *
(.*.|)nflxso.*$ *
(.*.|)nflxext.*$ *
# Hulu
(.*.|)hulu.com$ *
(.*.|)hulustream.com$ *
}
# if no table specified the default 'default' table is defined
table {
# If no port is specified the port of the incoming listener is used
# example.com 192.0.2.10
# example.net 192.0.2.20
}
EOF
## find sniproxy (user:daemon) and kill
#pkill -u daemon -f sniproxy
pkill -f /usr/sbin/sniproxy
## create sniproxy.service
cat >/etc/systemd/system/sniproxy.service <<EOF
[Unit]
Description=SNI Proxy Service
Documentation=https://github.com/dlundquist/sniproxy
After=network.target
[Service]
Type=forking
ExecStart=/usr/sbin/sniproxy -c /etc/sniproxy.conf
#PIDFile=/var/run/sniproxy.pid
Restart=on-failure
#Restart=always
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable sniproxy.service
systemctl restart sniproxy.service
## continue check
;;&
## choose to install dnsmasq
1 | 3)
echo "continue to install DNSmasq..."
if check_command dnsmasq; then
case $dnsmasq_install in
debian_apt)
apt install -y dnsmasq
# install dig and nslookup
no_command dig apt dnsutils
;;
centos_yum)
$yumdnf install -y dnsmasq
# install dig and nslookup
no_command dig $yumdnf bind-utils
;;
esac
fi
## disable dnsmasq modify /etc/resolv.conf
cat >>/etc/default/dnsmasq <<EOF
# disable dnsmasq modify /etc/resolv.conf
DNSMASQ_EXCEPT=lo
#
EOF
## enable conf-dir
cat >>/etc/dnsmasq.conf <<EOF
# include conf-dir
conf-dir=/etc/dnsmasq.d/,*.conf
#
EOF
## get my ip address
#myip=$(curl 'https://ipapi.co/ip/')
myip=$(curl --silent http://api.ipify.org/)
# get internal IP for nat network VPS
myip1=$(hostname -I)
## create /etc/dnsmasq.d/us-ip.conf
cat >/etc/dnsmasq.d/us-ip.conf <<EOF
listen-address=127.0.0.1, $myip1
server=1.1.1.1
server=8.8.8.8
# Pluto TV & PBS
address=/pluto.tv/pbs.org/pbskids.org/$myip
# Netflix
address=/netflix.com/netflix.net/nflximg.com/nflximg.net/nflxvideo.com/nflxvideo.net/nflxso.com/nflxso.net/nflxext.com/nflxext.net/$myip
# Hulu
address=/hulu.com/hulustream.com/$myip
EOF
systemctl restart dnsmasq.service
cat <<EOF
===================================================
This VPS IP is $myip
EOF
## continue check
;;&
## sniproxy information
1 | 2)
echo ""
echo "==============================================================="
echo "ps -ef | grep sniproxy"
ps -ef | grep sniproxy
echo ""
sniproxy -V
echo "netstat -lntup | grep caddy"
netstat -lntup | grep caddy
echo "netstat -lntup | grep sniproxy"
netstat -lntup | grep sniproxy
echo ""
echo "sniproxy has been installed!"
## go exit
;;
*)
echo "exit"
;;
esac
exit 0
|
<gh_stars>0
const ms = require("ms");
module.exports = {
name: "givstart",
description: "Starts a giveaway",
category: "giveaway",
usage: "givstart <time> <winner count> <price>\n **Example:** !givstart 2d 10 Discord nitro",
memberPermissions: ["MANAGE_GUILD"],
aliases: ["gstart"],
execute(bot, message, args) {
const time = args[0];
const winnerCount = args[1];
const prize = args.slice(2).join(" ");
if (!time) {
return message.channel.send("Please provide an end time");
}
if (!winnerCount) {
return message.channel.send("Please provide a winner count");
}
if (!prize) {
return message.channel.send("Please provide a prize");
}
bot.giveawayManager.start(message.channel, {
time: ms(time),
prize: prize,
winnerCount: winnerCount,
messages: {
giveaway: "**🎉🎉 New Giveaway 🎉🎉**",
giveawayEnded: "**GIVEAWAY ENDED**",
timeRemaining: "Time remaining: **{duration}**!",
inviteToParticipate: "React with 🎉 to participate!",
winMessage: "Congratulations, {winners}! You won **{prize}**!",
embedFooter: "Giveaways",
noWinner: "Giveaway cancelled, no valid participations.",
hostedBy: "Hosted by: {user}",
winners: "winner(s)",
endedAt: "Ended at",
units: {
seconds: "seconds",
minutes: "minutes",
hours: "hours",
days: "days",
pluralS: false,
},
},
});
},
};
|
<reponame>dubbl/procgen
var generate_pumpkin = function(seed) {
var canvas = document.getElementById('canvas');
if (!canvas.getContext){
alert('Your browser does not support canvas.');
return;
}
var ctx = canvas.getContext('2d');
var cw = canvas.width;
var ch = canvas.height;
ctx.clearRect(-1, -1, cw+1, ch+1);
if (typeof seed !== 'number') {
seed = (new Date()).getTime();
}
window.location.hash = seed.toString();
var rng = CustomRandom(seed);
var p = {}; // pumpkin object
// Generating basic body measurements
p.start = {};
p.start.x = cw/2;
p.start.y = ch/8;
p.height = ch / rng.nextInt(2, 4);
p.width_modifier = cw * rng.nextInt(15, 30)/p.height;
p.height_modifier1 = cw/ rng.nextInt(6, 12);
p.height_modifier2 = rng.nextInt(0, p.start.y - (p.start.y/8));
p.stop = {};
p.stop.x = p.start.x;
p.stop.y = p.start.y + p.height;
if (rng.next() < 0.8) {
p.inner_color = 'rgb(200, 200, 50)';
} else {
p.inner_color = 'rgb(10, 10, 10)';
}
ctx.shadowBlur = 0;
ctx.shadowColor = "rgba(0, 0, 0, 0.5)";
p.rotation_angle = rng.nextInt(0, 10);
p.rotation_dir = rng.next() >= 0.5 ? 1: -1;
ctx.rotate((Math.PI / 180) * p.rotation_angle * p.rotation_dir);
ctx.translate(0, -5 * p.rotation_angle * p.rotation_dir);
generate_body(ctx, cw, ch, rng, p);
generate_stripes(ctx, cw, ch, rng, p);
generate_stump(ctx, cw, ch, rng, p);
generate_eyes(ctx, cw, ch, rng, p);
generate_nose(ctx, cw, ch, rng, p);
ctx.translate(0, 5 * p.rotation_angle * p.rotation_dir);
ctx.rotate((Math.PI/180) * -p.rotation_angle * p.rotation_dir);
return false;
};
var generate_nose = function(ctx, cw, ch, rng, p) {
console.log('Generating nose of pumpkin...');
if (rng.next() < 0.1 || !p.has_eyes) {
console.log('No nose for this one.');
return;
}
ctx.beginPath();
ctx.fillStyle = p.inner_color;
ctx.shadowOffsetY = 2;
p.nose_type = rng.next();
p.nose_radius_x = p.nose_radius_y = rng.nextInt(5, p.eye_width_x);
p.nose_start = {}
p.nose_start.x = p.start.x;
// draw nose underneath the eyes
p.nose_start.y = Math.max(
p.start.y + p.height / 2 - (p.eye_width_y - p.eye_width_x),
p.left_eye_start.y + p.eye_width_y + p.nose_radius_y
);
if (p.nose_type < 0.5) {
// draw circle nose
p.nose_radius_x = p.nose_radius_y = rng.nextInt(5, p.eye_width_x / 2);
ctx.ellipse(
p.nose_start.x, // center x
p.nose_start.y, // center y
p.nose_radius_x, // radius x
p.nose_radius_y, // radius y
0, // rotation
0, // start angle
2 * Math.PI // end angle
);
} else {
p.nose_rotated = rng.next() > 0.5;
draw_triangle(ctx, p.nose_start, p.nose_radius_x, p.nose_radius_y, p.nose_rotated);
}
ctx.fill();
ctx.closePath();
ctx.shadowOffsetY = 0;
}
var draw_triangle = function(ctx, start, width, height, rotated) {
if (!rotated) {
ctx.moveTo(start.x, start.y);
} else {
start.y -= height;
}
ctx.lineTo(start.x - width, start.y + height);
ctx.lineTo(start.x + width, start.y + height);
ctx.fill();
if (rotated) {
ctx.lineTo(start.x, start.y + height * 2);
}
ctx.fill();
ctx.closePath();
};
var generate_eyes = function(ctx, cw, ch, rng, p) {
console.log('Generating eyes of pumpkin...');
if (rng.next() < 0.1) {
console.log('No eyes for this one.');
return;
}
p.has_eyes = true;
p.eye_center_offset = rng.nextInt(5, 20);
p.eye_width_x = p.eye_width_y = rng.nextInt(5, 15);
p.eye_width_y_factor = 1;
p.eye_type = rng.next();
p.left_eye_start = {
x: p.start.x - p.eye_width_x * 1.5,
y: p.start.y + p.height / 3 - (p.eye_width_y - p.eye_width_x),
};
p.right_eye_start = {
x: p.start.x + p.eye_width_x * 1.5,
y: p.start.y + p.height / 3 - (p.eye_width_y - p.eye_width_x),
};
ctx.beginPath();
ctx.fillStyle = p.inner_color;
ctx.shadowOffsetY = 2;
ctx.shadowBlur = 0;
ctx.shadowColor = "rgba(0, 0, 0, 0.5)";
if (p.eye_type < 0.5) {
// draw circle eyes
p.eye_width_y_factor = 1;
ctx.shadowOffsetX = -2;
var eye_angryness = 0;
if (rng.next() < 0.1) {
// go for a o.0 effect
p.eye_width_y_factor = 1.35;
} else if (rng.next() < 0.6) {
// angry eyes >.<
eye_angryness = 0.5 + rng.next();
}
// draw left eye
ctx.ellipse(
p.start.x - p.eye_width_x * 1.5, // center x
p.start.y + p.height / 3, // center y
p.eye_width_x, // radius x
p.eye_width_y, // radius y
0, // rotation
0, // start angle
(2 - eye_angryness) * Math.PI // end angle
);
ctx.fill();
ctx.closePath();
p.eye_width_y *= p.eye_width_y_factor;
ctx.shadowOffsetX *= -1;
ctx.beginPath();
ctx.ellipse(
p.start.x + p.eye_width_x * 1.5, // center x
p.start.y + p.height / 3 - (p.eye_width_y - p.eye_width_x),
p.eye_width_x, // radius x
p.eye_width_y, // radius y
rng.nextInt(0, 10) * Math.PI/180, // rotation
(eye_angryness - 2) * Math.PI, // start angle
0, // end angle
true // counterclockwise
);
} else {
ctx.shadowOffsetX = -1;
p.eye_rotated = rng.next() > 0.3;
p.eye_width_x = p.eye_width_y *= 1.3;
draw_triangle(ctx, p.left_eye_start, p.eye_width_x, p.eye_width_y, p.eye_rotated);
ctx.closePath();
ctx.shadowOffsetX *= -1;
ctx.beginPath();
draw_triangle(ctx, p.right_eye_start, p.eye_width_x, p.eye_width_y, p.eye_rotated);
ctx.closePath();
}
ctx.fill();
ctx.closePath();
ctx.shadowOffsetX = 0;
ctx.shadowOffsetY = 0;
};
var generate_stump = function(ctx, cw, ch, rng, p) {
console.log('Generating stump of pumpkin...');
if (rng.next() < 0.1) {
console.log('No stump for this one.');
return;
}
var stump_length = rng.nextInt(10, 40),
stump_upper_width = rng.nextInt(1, 15),
stump_lower_width = rng.nextInt(3, 15),
stump_curve_x = rng.nextInt(stump_upper_width, 5),
stump_curve_x_direction = rng.next() > 0.5 ? 1 : -1;
var gradient = ctx.createLinearGradient(
p.start.x - 10, p.stop.y,
p.start.x, p.stop.y
);
var red = rng.nextInt(130, 170),
green = rng.nextInt(50, 80),
blue = rng.nextInt(10, 30);
var color1 = 'rgb(' + red + ', ' + green + ', ' + blue +')';
var darken_factor = rng.next();
red *= darken_factor;
green *= darken_factor;
blue *= darken_factor;
var color2 = 'rgb(' + Math.round(red) + ', ' + Math.round(green) + ', ' + Math.round(blue) + ')';
gradient.addColorStop(0, color2);
gradient.addColorStop(1, color1);
ctx.fillStyle = gradient;
ctx.beginPath();
ctx.moveTo(p.start.x - stump_lower_width / 2, p.start.y);
ctx.quadraticCurveTo(
p.start.x - stump_upper_width / 2 - stump_curve_x * stump_curve_x_direction, p.start.y - stump_length,
p.start.x - stump_upper_width / 2, p.start.y - stump_length
);
ctx.lineTo(p.start.x + stump_upper_width / 2, p.start.y - stump_length);
ctx.quadraticCurveTo(
p.start.x + stump_lower_width / 2 - stump_curve_x * stump_curve_x_direction, p.start.y,
p.start.x + stump_lower_width / 2, p.start.y
);
ctx.fill();
ctx.closePath();
if (rng.next() < 0.3) {
console.log('No stump cut for this one.');
return;
}
ctx.beginPath();
if (rng.next() < 0.6) {
// otherwise reuse gradient of stump, cut is facing away
red = rng.nextInt(200, 250);
green = rng.nextInt(120, 150);
ctx.fillStyle = 'rgb(' + red + ', ' + green + ', 50)';
}
ctx.ellipse(
p.start.x, // center x
p.start.y - stump_length, // center y
stump_upper_width / 2, // radius x
rng.nextInt(1, stump_upper_width / 3), // radius y
rng.nextInt(0, 20) * Math.PI/180, // rotation
0, // start angle
2 * Math.PI // end angle
);
ctx.fill();
ctx.closePath();
};
var generate_stripes = function(ctx, cw, ch, rng, p) {
console.log('Generating vertical stripes of pumpkin...');
if (rng.next() < 0.1) {
console.log('No stripes for this one.');
return;
}
var gradient = ctx.createLinearGradient(
p.start.x, p.start.y,
p.start.x, p.stop.y
);
var color1 = 'rgba(0, 0, 0, 0.75)',
color2 = 'rgba(0, 0, 0, 0)';
transparent_area = rng.next(0.1, 0.5);
gradient.addColorStop(0, color1);
if (rng.next() < 0.9) {
gradient.addColorStop(transparent_area, color2);
gradient.addColorStop(1 - transparent_area, color2);
}
gradient.addColorStop(1, color1);
ctx.strokeStyle = gradient;
var end_reached = false;
for (var x=0;!end_reached;x++) {
ctx.beginPath();
ctx.moveTo(p.start.x, p.start.y);
ctx.bezierCurveTo(
p.width_modifier + p.width_modifier * x, p.height_modifier2,
p.width_modifier + p.width_modifier * x, p.stop.y + p.height_modifier1,
p.stop.x, p.stop.y
);
ctx.stroke();
/* check whether we would draw outside of right pumpkin border
* in the next iteration
*/
end_reached = p.width_modifier + p.width_modifier * (x + 1) > cw - p.width_modifier;
}
};
var generate_body = function(ctx, cw, ch, rng, p) {
console.log('Generating body and color of pumpkin...');
// x1 and x2 for gradient
var angle = rng.nextInt(0, 20);
var angle2 = rng.nextInt(0, 20);
var gradient = ctx.createLinearGradient(
p.start.x - angle, p.start.y,
p.start.x - angle2, p.stop.y
);
var color1 = 'rgb(' + rng.nextInt(200, 255) + ', ' + rng.nextInt(80, 110) + ', 0)';
var color2 = 'rgb(' + rng.nextInt(120, 150) + ', ' + rng.nextInt(30, 80) + ', 0)';
var color3 = 'rgb(' + rng.nextInt(0, 20) + ', ' + rng.nextInt(1, 20) + ', 0)';
gradient.addColorStop(0, color1);
gradient.addColorStop(0.5, color2);
gradient.addColorStop(1, color3);
ctx.fillStyle=gradient;
ctx.beginPath();
ctx.moveTo(p.start.x, p.start.y);
ctx.bezierCurveTo(
p.width_modifier, p.height_modifier2, // control point 1
p.width_modifier, p.stop.y + p.height_modifier1, // control point 2
p.stop.x, p.stop.y // end point
);
ctx.bezierCurveTo(
cw - p.width_modifier, p.stop.y + p.height_modifier1,
cw - p.width_modifier, p.height_modifier2,
p.start.x, p.start.y
);
ctx.fill();
};
var initialize_everything = function(e) {
document.getElementById('btn_generate').onclick = generate_pumpkin;
var seed = e;
if (window.location.hash !== '') {
seed = parseInt(window.location.hash.substr(1));
}
generate_pumpkin(seed);
};
window.onload = initialize_everything;
|
// TODOS:
// add browserify
// add ng-annotate
var gulp = require('gulp');
var concat = require('gulp-concat-sourcemap');
var del = require('del');
var inject = require('gulp-inject');
var rename = require('gulp-rename');
var useref = require('gulp-useref');
var replace = require('gulp-replace');
var git = require('git-rev');
var destDirectory = '.tmp/';
var concatFileName = 'bundle';
gulp.task('concat:js', [], function(){
return gulp.src([
'!./src/client/app/**/*spec*.js',
'!./src/client/common/**/*spec*.js',
'./src/client/app/**/*.mdl.js',
'./src/client/common/**/*.mdl.js',
'./src/client/app/**/*.js',
'./src/client/common/**/*.js',
'./src/client/app/*.js',
'./src/client/app.js',
])
.pipe(concat(concatFileName + '.js', {sourcesContent :true, prefix : 0}))
.pipe(gulp.dest(destDirectory));
});
gulp.task('concat:vendors', function () {
var assets = useref.assets();
return gulp.src('./src/client/index.html')
.pipe(assets)
.pipe(gulp.dest('.tmp/'));
});
module.exports = gulp.task('concat', [ 'concat:vendors', 'concat:js' ], function(){
del(destDirectory + concatFileName + '*js-**');
return gulp.src('src/client/index.html')
// .pipe(inject(bundleFiles, {relative: true}))
.pipe(gulp.dest('src/client'));
});
|
package cn.catguild.selfdevelop.gitcli.util;
import org.junit.jupiter.api.Test;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.util.zip.Deflater;
import static org.assertj.core.api.Assertions.assertThat;
class ZLibUtilsTest {
/**测试内容*/
String store = "blob 10\u0000helloworld";
/**git原生压缩文件*/
String gitPath = ".gitTest/objects/62/0ffd0fd9579a46e46ef4505b198ee0a01a57f2";
String zlibPath = ".gitTest/objects/62/0ffd0fd9579a46e46ef4505b198ee0a01a57f2zlib";
/**
* 测试压缩效果
* git hash-object 命令,压缩等级是 1
*
* @throws IOException
*/
@Test
void compress() throws IOException {
FileOutputStream fileOutputStream = new FileOutputStream(zlibPath);
ZLibUtils.compress(store.getBytes(StandardCharsets.UTF_8),fileOutputStream, new Deflater(1));
fileOutputStream.close();
}
/**
* 压缩内容比对
*
* @throws FileNotFoundException
*/
@Test
void decompress() throws FileNotFoundException {
byte[] decompress1 = ZLibUtils.decompress(new FileInputStream(gitPath));
byte[] decompress2 = ZLibUtils.decompress(new FileInputStream(zlibPath));
String s1 = new String(decompress1);
String s2 = new String(decompress2);
assertThat(s1).isEqualTo(s2);
assertThat(decompress1).isEqualTo(decompress2);
}
/**
* 测试效果
* 测试内容: "blob 10\u0000helloworld"
*
* @throws NoSuchAlgorithmException
*/
@Test
void gitContrastTest() throws NoSuchAlgorithmException, FileNotFoundException {
byte[] decompress1 = ZLibUtils.decompress(new FileInputStream(gitPath));
}
}
|
<filename>src/result.ts
import {Option, some, none} from "./option";
export const emptyOk = <E>(): Result<void, E> => new Ok(undefined);
export const emptyErr = <T>(): Result<T, void> => new Err(undefined);
export const ok = <T, E>(value: T): Result<T, E> => new Ok(value);
export const err = <T, E>(error: E): Result<T, E> => new Err(error);
export interface Result<T, E> {
isErr(): boolean;
isOk(): boolean;
ok(): Option<T>;
err(): Option<E>;
map<U>(fn: (value: T) => U): Result<U, E>;
mapErr<F>(fn: (error: E) => F): Result<T, F>;
mapOrElse<U>(fnMap: (value: T) => U, fnOrElse: (error: E) => U): U;
and<U>(res: Result<U, E>): Result<U, E>;
andThen<U>(fn: (value: T) => Result<U, E>): Result<U, E>;
or<F>(res: Result<T, F>): Result<T, F>;
orElse<F>(fn: (error: E) => Result<T, F>): Result<T, F>;
expectErr(error: any): E;
expect(error: any): T;
unwrap(): T;
unwrapErr(): E;
unwrapOr(value: T): T;
unwrapOrElse(fn: (error: E) => T): T;
}
export class Ok<T, E> implements Result<T, E> {
private readonly value: T;
constructor(value: T) {
this.value = value;
}
isErr(): boolean {
return false;
}
isOk(): boolean {
return true;
}
ok(): Option<T> {
return some(this.value);
}
err(): Option<E> {
return none();
}
unwrap(): T {
return this.value;
}
unwrapErr(): E {
throw this.value;
}
expectErr(error: any): E {
throw error;
}
expect(_error: any): T {
return this.value;
}
unwrapOr(_value: T): T {
return this.value;
}
unwrapOrElse(_fn: (error: E) => T): T {
return this.value;
}
or<F>(_res: Result<T, F>): Result<T, F> {
return ok(this.value);
}
orElse<F>(_fn: (error: E) => Result<T, F>): Result<T, F> {
return ok(this.value);
}
and<U>(res: Result<U, E>): Result<U, E> {
return res;
}
andThen<U>(fn: (value: T) => Result<U, E>): Result<U, E> {
return fn(this.value);
}
map<U>(fn: (value: T) => U): Result<U, E> {
return ok(fn(this.value));
}
mapErr<F>(_fn: (error: E) => F): Result<T, F> {
return ok(this.value);
}
mapOrElse<U>(fnMap: (value: T) => U, fnOrElse: (error: E) => U): U {
return this.map(fnMap).unwrapOrElse(fnOrElse);
}
}
export class Err<T, E> implements Result<T, E> {
private readonly error: E;
constructor(error: E) {
this.error = error;
}
isErr(): boolean {
return true;
}
isOk(): boolean {
return false;
}
ok(): Option<T> {
return none();
}
err(): Option<E> {
return some(this.error);
}
unwrap(): T {
throw this.error;
}
unwrapErr(): E {
return this.error;
}
expect(error: any): T {
throw error;
}
expectErr(_error: any): E {
return this.error;
}
unwrapOr(value: T): T {
return value;
}
unwrapOrElse(fn: (error: E) => T): T {
return fn(this.error);
}
or<F>(res: Result<T, F>): Result<T, F> {
return res;
}
orElse<F>(fn: (error: E) => Result<T, F>): Result<T, F> {
return fn(this.error);
}
and<U>(_res: Result<U, E>): Result<U, E> {
return err(this.error);
}
andThen<U>(_fn: (value: T) => Result<U, E>): Result<U, E> {
return err(this.error);
}
map<U>(_fn: (value: T) => U): Result<U, E> {
return err(this.error);
}
mapErr<F>(fn: (error: E) => F): Result<T, F> {
return err(fn(this.error));
}
mapOrElse<U>(fnMap: (value: T) => U, fnOrElse: (error: E) => U): U {
return this.map(fnMap).unwrapOrElse(fnOrElse);
}
} |
export default {
SplashScreen: 'SplashScreen',
RegisterScreen: 'RegisterScreen',
HomeScreen: 'HomeScreen',
};
|
<filename>open-sphere-plugins/csv-common/src/main/java/io/opensphere/csvcommon/common/datetime/DateColumn.java
package io.opensphere.csvcommon.common.datetime;
import io.opensphere.core.common.configuration.date.DateFormat.Type;
/**
* Contains information on a date column within the csv data.
*
*/
public class DateColumn
{
/**
* The column type indicating if it represents a Datetime, a date or just a
* time.
*/
private Type myDateColumnType;
/**
* The primary column index representing the date.
*/
private int myPrimaryColumnIndex;
/**
* If the date values spans a second column, the index of the second column.
*/
private int mySecondaryColumnIndex = -1;
/**
* The format to use for the primary column data.
*/
private String myPrimaryColumnFormat;
/**
* The format to use for the secondary column data.
*/
private String mySecondaryColumnFormat;
/**
* Gets the column type indicating if it represents a Datetime, a date or
* just a time.
*
* @return The column type indicating if it represents a Datetime, a date or
* just a time.
*/
public Type getDateColumnType()
{
return myDateColumnType;
}
/**
* Gets the primary column format.
*
* @return The primary column format.
*/
public String getPrimaryColumnFormat()
{
return myPrimaryColumnFormat;
}
/**
* Gets the primary column index representing the date.
*
* @return The primary column index representing the date.
*/
public int getPrimaryColumnIndex()
{
return myPrimaryColumnIndex;
}
/**
* Gets the secondary column format.
*
* @return The secondary column format.
*/
public String getSecondaryColumnFormat()
{
return mySecondaryColumnFormat;
}
/**
* If the date values spans a second column, gets the index of the second
* column.
*
* @return If the date values spans a second column, the index of the second
* column otherwise -1;
*/
public int getSecondaryColumnIndex()
{
return mySecondaryColumnIndex;
}
/**
* Sets the column type indicating if it represents a Datetime, a date or
* just a time.
*
* @param dateColumnType The column type indicating if it represents a
* Datetime, a date or just a time.
*/
public void setDateColumnType(Type dateColumnType)
{
myDateColumnType = dateColumnType;
}
/**
* Sets the primary column format.
*
* @param primaryColumnFormat The primary column format.
*/
public void setPrimaryColumnFormat(String primaryColumnFormat)
{
myPrimaryColumnFormat = primaryColumnFormat;
}
/**
* Sets the primary column index representing the date.
*
* @param primaryColumnIndex The primary column index representing the date.
*/
public void setPrimaryColumnIndex(int primaryColumnIndex)
{
myPrimaryColumnIndex = primaryColumnIndex;
}
/**
* Sets the secondary column format.
*
* @param secondaryColumnFormat The secondary column format.
*/
public void setSecondaryColumnFormat(String secondaryColumnFormat)
{
mySecondaryColumnFormat = secondaryColumnFormat;
}
/**
* If the date values spans a second column, sets the index of the second
* column.
*
* @param secondaryColumnIndex If the date values spans a second column, the
* index of the second column otherwise -1;
*/
public void setSecondaryColumnIndex(int secondaryColumnIndex)
{
mySecondaryColumnIndex = secondaryColumnIndex;
}
}
|
package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 集团预算释放
*
* @author auto create
* @since 1.0, 2021-08-06 10:17:49
*/
public class AlipayBossFncAntbudgetGroupbudgetRefundModel extends AlipayObject {
private static final long serialVersionUID = 6163781283313377443L;
/**
* 批次id,作为幂等id
*/
@ApiField("batch_id")
private String batchId;
/**
* 集团预算释放集合
*/
@ApiListField("group_budget_release_request_list")
@ApiField("gb_release_request")
private List<GbReleaseRequest> groupBudgetReleaseRequestList;
/**
* 命名空间 例如: 智科:ZK,业管:RESOURCE
*/
@ApiField("ns")
private String ns;
public String getBatchId() {
return this.batchId;
}
public void setBatchId(String batchId) {
this.batchId = batchId;
}
public List<GbReleaseRequest> getGroupBudgetReleaseRequestList() {
return this.groupBudgetReleaseRequestList;
}
public void setGroupBudgetReleaseRequestList(List<GbReleaseRequest> groupBudgetReleaseRequestList) {
this.groupBudgetReleaseRequestList = groupBudgetReleaseRequestList;
}
public String getNs() {
return this.ns;
}
public void setNs(String ns) {
this.ns = ns;
}
}
|
function run() {
curl -s -XPOST \
-H "Authorization: Bearer $(gcloud auth print-identity-token)"\
-H 'Content-Type: application/json' \
"https://vaccine-allocation-sipjq3uhla-uc.a.run.app/${1}" \
--data '{"state_code": "'"${2}"'", "district": "'"${3}"'"}'
}
rm logfile_tev
while read -r state_code district; do
echo "${state_code} - ${district} -> STARTED"
(
result=$(run tev "${state_code}" "${district}");
echo ${state_code} - ${district} - ${result} >> ./logfile_tev
) &
sleep 1
done < missing_tev |
#!/bin/bash
# dv-trio.sh
input=$1 #
output=$1".txt" #
#save original GT value and move Famseq GT to be the GT value for sample
while read line; # do while there are lines from input file
do #
read -d '\t' -r -a line_fields <<< "$line" #
IFS=': ' read -r -a format_fields <<< "${line_fields[8]}" #
IFS=': ' read -r -a samp1_fields <<< "${line_fields[9]}" #
IFS=': ' read -r -a samp2_fields <<< "${line_fields[10]}" #
IFS=': ' read -r -a samp3_fields <<< "${line_fields[11]}" #
arr_len=${#format_fields[@]} # how many FORMAT fields for this variant
len=$(($arr_len-1))
#
format_redo=""
sample1_redo=""
sample2_redo=""
sample3_redo=""
FGT_found=false
#
for (( i=0; i<=$len; i++ )); # for the number of format fields in this variant line
do
if [ ${format_fields[$i]} == "FGT" ]; # find the famseq GT field
then #
FGT_nbr=$i
FGT_found=true # yes
else # not this format field
format_redo=$format_redo":"${format_fields[$i]} # save it
sample1_redo=$sample1_redo":"${samp1_fields[$i]}
sample2_redo=$sample2_redo":"${samp2_fields[$i]}
sample3_redo=$sample3_redo":"${samp3_fields[$i]}
#
fi
done #
#echo $FGT_nbr
#
if [ "$FGT_found" = true ]; # if a FGT field found then move it to the front of the line for FORMAT fields, which is a VCF requirement
then
format_redo=${format_fields[$FGT_nbr]}$format_redo
sample1_redo=${samp1_fields[$FGT_nbr]}$sample1_redo
sample2_redo=${samp2_fields[$FGT_nbr]}$sample2_redo
sample3_redo=${samp3_fields[$FGT_nbr]}$sample3_redo
else # no FGT, which means it is a indel, then just use original
format_redo=${line_fields[8]}
sample1_redo=${line_fields[9]}
sample2_redo=${line_fields[10]}
sample3_redo=${line_fields[11]}
fi
#
tab="\t"
base=""
for (( i=0; i<8; i++ ));
do
base=$base${line_fields[$i]}$tab # build the front part of the variant line, which has no change
done
#
echo -e $base$format_redo$tab$sample1_redo$tab$sample2_redo$tab$sample3_redo | sed -e 's/:GT:/:OGT:/g' -e 's/FGT:/GT:/g' >> $output # put new variant line with the GT and FGt reordered
done < $input #
#
echo -e "OUT\t$input" > $input".done"
#
|
//#####################################################################
// Copyright 2005-2008, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform_Arrays/FACE_ARRAYS.h>
#include <PhysBAM_Tools/Log/DEBUG_PRINT.h>
#include <PhysBAM_Tools/Log/DEBUG_UTILITIES.h>
#include <PhysBAM_Tools/Math_Tools/Hash.h>
#include <PhysBAM_Tools/Parallel_Computation/MPI_UNIFORM_GRID.h>
#ifdef USE_MPI
#include <PhysBAM_Tools/Arrays/ARRAY.h>
#include <PhysBAM_Tools/Data_Structures/PAIR.h>
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_CELL.h>
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_NODE.h>
#include <PhysBAM_Tools/Matrices/MATRIX_1X1.h>
#include <PhysBAM_Tools/Matrices/SYMMETRIC_MATRIX_2X2.h>
#include <PhysBAM_Tools/Matrices/SYMMETRIC_MATRIX_3X3.h>
#include <PhysBAM_Tools/Parallel_Computation/LOCAL_GRID.h>
#include <PhysBAM_Tools/Parallel_Computation/MPI_PACKAGE.h>
#include <PhysBAM_Tools/Parallel_Computation/MPI_UTILITIES.h>
#endif
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class T_GRID> MPI_UNIFORM_GRID<T_GRID>::
MPI_UNIFORM_GRID(T_GRID& local_grid_input,const int number_of_ghost_cells_input,const bool skip_initialization,const TV_INT& processes_per_dimension,
const TV_BOOL& periodic_input,MPI::Group* group_input)
:MPI_GRID<T_GRID>(local_grid_input,number_of_ghost_cells_input,skip_initialization,processes_per_dimension,periodic_input,group_input),threaded_grid(0),mpi_threaded_grid(0)
{}
template<class T_GRID> MPI_UNIFORM_GRID<T_GRID>::
MPI_UNIFORM_GRID(ARRAY<THREAD_PACKAGE>& buffers_input,const int tid_input,const int number_of_threads,T_GRID& local_grid_input,const int number_of_ghost_cells_input,
const bool skip_mpi,const bool skip_initialization,const TV_INT& processes_per_dimension,const TV_BOOL& periodic_input,MPI::Group* group_input)
:MPI_GRID<T_GRID>(local_grid_input,number_of_ghost_cells_input,skip_mpi,processes_per_dimension,periodic_input,group_input),threaded_grid(0),mpi_threaded_grid(0)
{
PHYSBAM_ASSERT(skip_mpi); //If using threading can't use mpi yet
assert(!skip_mpi || !group_input);
threaded_grid=new THREADED_UNIFORM_GRID<T_GRID>(buffers_input,tid_input,number_of_threads,local_grid_input,number_of_ghost_cells_input,skip_initialization,processes_per_dimension,periodic_input);
}
template<class T_GRID> MPI_UNIFORM_GRID<T_GRID>::
MPI_UNIFORM_GRID(T_GRID& local_grid_input,const int number_of_ghost_cells_input,const int number_of_threads,const int tid_input,ARRAY<THREAD_PACKAGE>* buffers_input,const bool skip_initialization,const TV_INT& mpi_processes_per_dimension,const TV_INT& threaded_processes_per_dimension,const TV_BOOL& periodic_input,MPI::Group* group_input)
:MPI_GRID<T_GRID>(local_grid_input,number_of_ghost_cells_input,true),threaded_grid(0),mpi_threaded_grid(0)
{
mpi_threaded_grid=new MPI_THREADED_UNIFORM_GRID<T_GRID>(local_grid_input,number_of_ghost_cells_input,number_of_threads,tid_input,buffers_input,skip_initialization,mpi_processes_per_dimension,threaded_processes_per_dimension,periodic_input,group_input);
}
#ifdef USE_MPI
//#####################################################################
// Function Package_Cell_Data
//#####################################################################
template<class T_GRID> template<class T2> MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID>::
Package_Cell_Data(ARRAYS_ND_BASE<VECTOR<T2,TV::dimension> >& data,const RANGE<TV_INT>& region) const
{
return MPI_PACKAGE(data,region);
}
//#####################################################################
// Function Package_Face_Data
//#####################################################################
template<class T_GRID> template<class T_FACE_ARRAYS2> MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID>::
Package_Face_Data(T_FACE_ARRAYS2& data,const ARRAY<RANGE<TV_INT> >& regions) const
{
MPI::Aint displacements[T_GRID::dimension];MPI::Datatype old_types[T_GRID::dimension];int lengths[T_GRID::dimension];
for(int axis=1;axis<=T_GRID::dimension;axis++){
lengths[axis-1]=1;
displacements[axis-1]=(MPI::Aint)&data.Component(axis)(regions(axis).Minimum_Corner());
old_types[axis-1]=MPI_PACKAGE::Make_Arrays_Type(data.Component(axis),regions(axis));}
MPI::Datatype datatype=MPI::Datatype::Create_struct(T_GRID::dimension,lengths,displacements,old_types);
for(int axis=1;axis<=T_GRID::dimension;axis++) old_types[axis-1].Free();
return MPI_PACKAGE(datatype);
}
//#####################################################################
// Function Package_Common_Face_Data
//#####################################################################
template<class T_GRID> template<class T_FACE_ARRAYS2> MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID>::
Package_Common_Face_Data(T_FACE_ARRAYS2& data,const int axis,const RANGE<TV_INT>& region) const
{
return MPI_PACKAGE(data.Component(axis),region);
}
//#####################################################################
// Function Gather_Cell_Data
//#####################################################################
template<class T_GRID> template<class T_ARRAYS> bool MPI_UNIFORM_GRID<T_GRID>::
Gather_Cell_Data(const T_ARRAYS& local_data,T_ARRAYS& global_data) const
{
#if 0
int tag=Get_Unique_Tag();
int processes=comm->Get_size(),rank=comm->Get_rank(),master=0;
T_GRID mac_global_grid=global_grid.Get_MAC_Grid();
int ghost_cells=(local_grid.Domain_Indices().Minimum_Corner()-local_data.Domain_Indices().Minimum_Corner()).Max();
RANGE<TV_INT> my_region=Find_Region_Box(rank+1,RANGE<TV_INT>::Zero_Box(),ghost_cells);
T_GRID mac_local_grid=local_grid.Get_MAC_Grid();
if(rank != master){
ARRAY<MPI_PACKAGE> packages;ARRAY<MPI::Request> requests;
MPI_PACKAGE package=Package_Cell_Data(const_cast<T_ARRAYS&>(local_data),my_region); // TODO change
packages.Append(package);requests.Append(package.Isend(*comm,master,tag));
MPI_UTILITIES::Wait_All(requests);MPI_PACKAGE::Free_All(packages);}
else{
LOCAL_GRID<T_GRID> my_local_grid(mac_global_grid,mac_local_grid);
my_local_grid.Put(local_data,my_region,global_data);
for(int p=2;p<=processes;p++){
MPI::Status status;
comm->Probe(MPI::ANY_SOURCE,tag,status);
int source=status.Get_source();
ARRAY<char> buffer(status.Get_count(MPI::PACKED));//int position=0;
comm->Recv(&buffer(1),buffer.m,MPI::PACKED,source,tag);
T_GRID other_grid=Restrict_Grid(all_coordinates(p));
T_GRID mac_other_grid=other_grid.Get_MAC_Grid();
T_ARRAYS other_array(mac_other_grid.Domain_Indices(ghost_cells));
RANGE<TV_INT> other_region=Find_Region_Box(p,RANGE<TV_INT>::Zero_Box(),ghost_cells);
MPI_PACKAGE package=Package_Cell_Data(other_array,other_region); // TODO: other_array.Domain_Indices());
package.Unpack(buffer,*comm);
package.Free();
LOCAL_GRID<T_GRID> other_local_grid(mac_global_grid,mac_other_grid);
other_local_grid.Put(other_array,other_region,global_data);}}
return rank!=master;
#endif
return false;
}
//#####################################################################
// Function Scatter_Cell_Data
//#####################################################################
template<class T_GRID> template<class T_ARRAYS> void MPI_UNIFORM_GRID<T_GRID>::
Scatter_Cell_Data(const T_ARRAYS& global_data,T_ARRAYS& local_data) const
{
#if 0
int tag=Get_Unique_Tag();
int processes=comm->Get_size(),rank=comm->Get_rank(),master=0;
int ghost_cells=(local_grid.Domain_Indices().Minimum_Corner()-local_data.Domain_Indices().Minimum_Corner()).Max();
T_GRID mac_global_grid=global_grid.Get_MAC_Grid();
T_GRID mac_local_grid=local_grid.Get_MAC_Grid();
if(rank != master){
MPI::Status status;
comm->Probe(0,tag,status);
ARRAY<char> buffer(status.Get_count(MPI::PACKED));
comm->Recv(&buffer(1),buffer.m,MPI::PACKED,0,tag);
MPI_PACKAGE package=Package_Cell_Data(local_data,local_data.Domain_Indices());
package.Unpack(buffer,*comm);
package.Free();}
else{
LOCAL_GRID<T_GRID> my_local_grid(mac_global_grid,mac_local_grid);
my_local_grid.Get(global_data,local_data);
ARRAY<MPI_PACKAGE> packages;ARRAY<MPI::Request> requests;ARRAY<T_ARRAYS > other_arrays(processes);
for(int p=2;p<=processes;p++){
T_GRID other_grid=Restrict_Grid(all_coordinates(p));T_GRID mac_other_grid=other_grid.Get_MAC_Grid();
LOCAL_GRID<T_GRID> other_local_grid(mac_global_grid,mac_other_grid);
other_arrays(p).Resize(mac_other_grid.Domain_Indices(ghost_cells),false,false);
other_local_grid.Get(global_data,other_arrays(p));
MPI_PACKAGE package=Package_Cell_Data(other_arrays(p),other_arrays(p).Domain_Indices());
packages.Append(package);requests.Append(package.Isend(*comm,p-1,tag));}
MPI_UTILITIES::Wait_All(requests);MPI_PACKAGE::Free_All(packages);}
#endif
}
//#####################################################################
// Function Get_Non_Overlapping_Face_Grid
//#####################################################################
template<class T_GRID> T_GRID MPI_UNIFORM_GRID<T_GRID>::
Get_Non_Overlapping_Face_Grid(const int axis) const
{
T_GRID face_grid=local_grid.Get_Face_Grid(axis);
if(side_neighbor_ranks(2*axis)!=MPI::PROC_NULL){
const TV_INT counts=face_grid.Numbers_Of_Cells()+TV_INT::All_Ones_Vector()-TV_INT::Axis_Vector(axis);
const RANGE<TV> box=face_grid.Domain()+RANGE<TV>(TV(),-face_grid.DX()*TV::Axis_Vector(axis));
return T_GRID(counts,box).Get_MAC_Grid_At_Regular_Positions();}
else return face_grid.Get_MAC_Grid_At_Regular_Positions();
}
//#####################################################################
#else
//#####################################################################
namespace MPI{class Request{};}
namespace PhysBAM{class MPI_PACKAGE{};}
template<class T_GRID> template<class T2> MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID>::Package_Cell_Data(ARRAYS_ND_BASE<VECTOR<T2,TV::dimension> >&,const RANGE<TV_INT>&) const {PHYSBAM_FUNCTION_IS_NOT_DEFINED();}
template<class T_GRID> template<class T_FACE_ARRAYS2> MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID>::Package_Face_Data(T_FACE_ARRAYS2&,const ARRAY<RANGE<TV_INT> >&) const {PHYSBAM_FUNCTION_IS_NOT_DEFINED();}
template<class T_GRID> template<class T_FACE_ARRAYS2> MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID>::Package_Common_Face_Data(T_FACE_ARRAYS2&,const int,const RANGE<TV_INT>&) const {PHYSBAM_FUNCTION_IS_NOT_DEFINED();}
template<class T_GRID> T_GRID MPI_UNIFORM_GRID<T_GRID>::Get_Non_Overlapping_Face_Grid(const int) const {PHYSBAM_FUNCTION_IS_NOT_DEFINED();}
template<class T_GRID> template<class T_ARRAYS> bool MPI_UNIFORM_GRID<T_GRID>::Gather_Cell_Data(const T_ARRAYS& local_data,T_ARRAYS& global_data) const {PHYSBAM_FUNCTION_IS_NOT_DEFINED();}
template<class T_GRID> template<class T_ARRAYS> void MPI_UNIFORM_GRID<T_GRID>::Scatter_Cell_Data(const T_ARRAYS& global_data,T_ARRAYS& local_data) const {PHYSBAM_FUNCTION_IS_NOT_DEFINED();}
//#####################################################################
#endif
//#####################################################################
#define P(...) __VA_ARGS__
#ifdef USE_MPI
#define INSTANTIATION_HELPER_LENGTH(T_GRID,length) \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Cell_Data(T_ARRAYS_BASE::REBIND<VECTOR<T_GRID::SCALAR,length> >::TYPE&,const RANGE<T_GRID::VECTOR_INT>&) const;
#define INSTANTIATION_HELPER_MPI(T,T_GRID,d) \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Cell_Data(T_ARRAYS_BASE&,const RANGE<T_GRID::VECTOR_INT>&) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Cell_Data(T_ARRAYS_BASE::REBIND<int>::TYPE&,const RANGE<T_GRID::VECTOR_INT>&) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Cell_Data(T_ARRAYS_BASE::REBIND<bool>::TYPE&,const RANGE<T_GRID::VECTOR_INT>&) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Cell_Data(T_ARRAYS_BASE::REBIND<MATRIX<T_GRID::SCALAR,1> >::TYPE&,const RANGE<T_GRID::VECTOR_INT>&) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Cell_Data(T_ARRAYS_BASE::REBIND<SYMMETRIC_MATRIX<T_GRID::SCALAR,2> >::TYPE&,const RANGE<T_GRID::VECTOR_INT>&) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Cell_Data(T_ARRAYS_BASE::REBIND<SYMMETRIC_MATRIX<T_GRID::SCALAR,3> >::TYPE&,const RANGE<T_GRID::VECTOR_INT>&) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS::REBIND<bool>::TYPE& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS::REBIND<VECTOR<T,d> >::TYPE& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS::REBIND<VECTOR<T,d+2> >::TYPE& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS::REBIND<MATRIX<T,1> >::TYPE& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS::REBIND<SYMMETRIC_MATRIX<T,2> >::TYPE& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS::REBIND<SYMMETRIC_MATRIX<T,3> >::TYPE& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Face_Data(T_FACE_ARRAYS::REBIND<VECTOR<T,d+3> >::TYPE& data,const ARRAY<RANGE<T_GRID::VECTOR_INT> >& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS::REBIND<bool>::TYPE& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS::REBIND<VECTOR<T,d> >::TYPE& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS::REBIND<VECTOR<T,d+2> >::TYPE& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS::REBIND<MATRIX<T,1> >::TYPE& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS::REBIND<SYMMETRIC_MATRIX<T,2> >::TYPE& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS::REBIND<SYMMETRIC_MATRIX<T,3> >::TYPE& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
template MPI_PACKAGE MPI_UNIFORM_GRID<T_GRID >::Package_Common_Face_Data(T_FACE_ARRAYS::REBIND<VECTOR<T,d+3> >::TYPE& data,const int,const RANGE<T_GRID::VECTOR_INT>& region) const; \
INSTANTIATION_HELPER_LENGTH(P(T_GRID),1);INSTANTIATION_HELPER_LENGTH(P(T_GRID),2);INSTANTIATION_HELPER_LENGTH(P(T_GRID),3);INSTANTIATION_HELPER_LENGTH(P(T_GRID),4);INSTANTIATION_HELPER_LENGTH(P(T_GRID),5);
#else
#define INSTANTIATION_HELPER_MPI(T,T_GRID,d)
#endif
#define INSTANTIATION_HELPER(T,T_GRID,d) \
template class MPI_UNIFORM_GRID<T_GRID >; \
template bool MPI_UNIFORM_GRID<T_GRID >::Gather_Cell_Data(const T_ARRAYS_BASE&,T_ARRAYS_BASE&) const; \
template void MPI_UNIFORM_GRID<T_GRID >::Scatter_Cell_Data(const T_ARRAYS_BASE&,T_ARRAYS_BASE&) const; \
INSTANTIATION_HELPER_MPI(T,P(T_GRID),d)
INSTANTIATION_HELPER(float,P(GRID<VECTOR<float,1> >),1);
INSTANTIATION_HELPER(float,P(GRID<VECTOR<float,2> >),2);
INSTANTIATION_HELPER(float,P(GRID<VECTOR<float,3> >),3);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
INSTANTIATION_HELPER(double,P(GRID<VECTOR<double,1> >),1);
INSTANTIATION_HELPER(double,P(GRID<VECTOR<double,2> >),2);
INSTANTIATION_HELPER(double,P(GRID<VECTOR<double,3> >),3);
#endif
|
<reponame>tdrv90/freeCodeCamp<gh_stars>0
// (1) Create a function called reusableFunction which prints "Hi World" to the dev console.
// (2) Call the function.
// Example
function ourReusableFunction() {
console.log("Heyya, World");
}
ourReusableFunction();
// Only change code below this line
function reusableFunction() {
console.log('Hi World');
}
reusableFunction(); |
#!/bin/bash
# Create a resource group
az group create --name myResourceGroup --location eastus2
# Create a scale set in Availability Zone 1
az vmss create \
--resource-group myResourceGroup \
--name myScaleSet \
--image UbuntuLTS \
--upgrade-policy-mode automatic \
--admin-username azureuser \
--generate-ssh-keys \
--zones 1
# Apply the Custom Script Extension that installs a basic Nginx webserver
az vmss extension set \
--publisher Microsoft.Azure.Extensions \
--version 2.0 \
--name CustomScript \
--resource-group myResourceGroup \
--vmss-name myScaleSet \
--settings '{"fileUris":["https://raw.githubusercontent.com/Azure-Samples/compute-automation-configurations/master/automate_nginx.sh"],"commandToExecute":"./automate_nginx.sh"}'
# Output the public IP address to access the site in a web browser
az network public-ip show \
--resource-group myResourceGroup \
--name myScaleSetLBPublicIP \
--query [ipAddress] \
--output tsv
|
<reponame>sivabalajii/groovy-plugin<gh_stars>10-100
/*
* The MIT License
*
* Copyright (c) 2019, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.plugins.groovy;
import hudson.util.FormValidation;
import org.junit.Rule;
import org.junit.Test;
import org.jvnet.hudson.test.Issue;
import org.jvnet.hudson.test.JenkinsRule;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
public class StringScriptSourceTest {
@Rule
public JenkinsRule j = new JenkinsRule();
@Issue("SECURITY-1293")
@Test
public void blockASTTest() throws Exception {
StringScriptSource.DescriptorImpl d = j.jenkins.getDescriptorByType(StringScriptSource.DescriptorImpl.class);
assertThat(d.doCheckScript("import groovy.transform.*\n" +
"import jenkins.model.Jenkins\n" +
"import hudson.model.FreeStyleProject\n" +
"@ASTTest(value={ assert Jenkins.getInstance().createProject(FreeStyleProject.class, \"should-not-exist\") })\n" +
"@Field int x\n" +
"echo 'hello'\n").toString(), containsString("Annotation ASTTest cannot be used in the sandbox"));
assertNull(j.jenkins.getItem("should-not-exist"));
}
@Issue("SECURITY-1293")
@Test
public void blockGrab() throws Exception {
StringScriptSource.DescriptorImpl d = j.jenkins.getDescriptorByType(StringScriptSource.DescriptorImpl.class);
assertThat(d.doCheckScript("@Grab(group='foo', module='bar', version='1.0')\ndef foo\n").toString(),
containsString("Annotation Grab cannot be used in the sandbox"));
}
@Issue("SECURITY-1338")
@Test
public void doNotExecuteConstructors() throws Exception {
StringScriptSource.DescriptorImpl d = j.jenkins.getDescriptorByType(StringScriptSource.DescriptorImpl.class);
assertThat(d.doCheckScript("class DoNotRunConstructor {\n" +
" static void main(String[] args) {}\n" +
" DoNotRunConstructor() {\n" +
" assert jenkins.model.Jenkins.instance.createProject(hudson.model.FreeStyleProject, 'should-not-exist')\n" +
" }\n" +
"}\n").kind, equalTo(FormValidation.Kind.OK)); // Compilation ends before the constructor is invoked.
assertNull(j.jenkins.getItem("should-not-exist"));
}
}
|
package demo.sap.safetyandroid.test.core;
import android.content.Context;
import android.hardware.fingerprint.FingerprintManager;
import android.os.Looper;
import android.os.SystemClock;
import androidx.test.InstrumentationRegistry;
import androidx.test.espresso.UiController;
import androidx.test.espresso.ViewAction;
import androidx.test.uiautomator.UiDevice;
import android.view.View;
import android.webkit.CookieManager;
import android.widget.TextView;
import demo.sap.safetyandroid.app.SAPWizardApplication;
import demo.sap.safetyandroid.test.core.ClientPolicy;
import demo.sap.safetyandroid.test.core.ClientPolicyManager;
import demo.sap.safetyandroid.test.core.factory.LoginPageFactory;
import demo.sap.safetyandroid.test.core.factory.PasscodePageFactory;
import demo.sap.safetyandroid.test.pages.ActivationPage;
import demo.sap.safetyandroid.test.pages.EulaScreenPage;
import demo.sap.safetyandroid.test.pages.PasscodePage;
import demo.sap.safetyandroid.test.pages.WelcomePage;
import org.hamcrest.Matcher;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.isEmptyString;
import android.app.Activity;
public class Utils {
public static void doOnboarding(Activity context) {
checkCredentials();
WelcomePage welcomePage = new WelcomePage();
welcomePage.clickGetStarted();
SystemClock.sleep(2000);
AbstractLoginPage loginPage = LoginPageFactory.getLoginPage();
loginPage.authenticate();
PasscodePageFactory.PasscodeFlow();
EulaScreenPage eulaScreenPage = new EulaScreenPage();
if(Constants.EULASCREEN == Constants.EulaScreen.DENY) {
eulaScreenPage.clickDeny();
} else {
eulaScreenPage.clickAllow();
}
}
public static void doOnboardingBack(Activity context) {
checkCredentials();
WelcomePage welcomePage = new WelcomePage();
welcomePage.clickGetStarted();
SystemClock.sleep(2000);
AbstractLoginPage loginPage = LoginPageFactory.getLoginPage();
loginPage.authenticate();
PasscodePageFactory.PasscodeFlowBack();
}
public static void clearSessionCookies() throws InterruptedException {
CountDownLatch clearSessionCookiesLatch = new CountDownLatch(1);
Looper.prepare();
CookieManager.getInstance().removeSessionCookies(value -> clearSessionCookiesLatch.countDown());
clearSessionCookiesLatch.await(1000, TimeUnit.MILLISECONDS);
}
public static String getStringFromUiWithId(int resourceId) throws InterruptedException {
final String[] uiString = new String[1];
CountDownLatch countDownLatch = new CountDownLatch(1);
onView(withId(resourceId)).perform(new ViewAction() {
@Override
public Matcher<View> getConstraints() {
return isDisplayed();
}
@Override
public String getDescription() {
return "Getting description.";
}
@Override
public void perform(UiController uiController, View view) {
TextView textView = (TextView) view;
synchronized (uiString) {
uiString[0] = textView.getText().toString();
}
countDownLatch.countDown();
}
});
countDownLatch.await(1000, TimeUnit.MILLISECONDS);
synchronized (uiString) {
return uiString[0];
}
}
public static String getResourceString(int id) {
Context targetContext = InstrumentationRegistry.getTargetContext();
return targetContext.getResources().getString(id);
}
public static void skipFingerprint() {
// Get application context
Context context = InstrumentationRegistry.getInstrumentation().getTargetContext().getApplicationContext();
// Get the current clientpolicy
ClientPolicy clientPolicy = ClientPolicyManager.getInstance().getClientPolicy();
boolean isFingerprintAllowed = clientPolicy.getPasscodePolicy().allowsFingerprint();
//is Device supports Fingerprint
FingerprintManager fingerprintManager = (FingerprintManager) context.getSystemService(Context.FINGERPRINT_SERVICE);
if (fingerprintManager.isHardwareDetected() && fingerprintManager.hasEnrolledFingerprints()) {
//skip fingerprint
if (isFingerprintAllowed == true) {
PasscodePage.SetFingerprintPage setFingerprintPage = new PasscodePage().new SetFingerprintPage();
setFingerprintPage.skipFingerprint();
setFingerprintPage.leavePage();
}
}
}
public static void checkCredentials() {
Assert.assertThat("Credentials are not defined in the Credentials class!", Credentials.USERNAME.trim(), not(isEmptyString()));
}
public static void pressBack() {
UiDevice mDevice = UiDevice.getInstance(InstrumentationRegistry.getInstrumentation());
mDevice.pressBack();
SystemClock.sleep(500);
}
}
|
<filename>frontend/components/Editor.js<gh_stars>0
import { html, Component } from "../common/Preact.js"
import isEqual from "https://cdn.jsdelivr.net/npm/lodash-es@4/isEqual.js"
import immer from "https://unpkg.com/immer@7.0/dist/immer.esm.js"
import { create_pluto_connection, resolvable_promise } from "../common/PlutoConnection.js"
import { create_counter_statistics, send_statistics_if_enabled, store_statistics_sample, finalize_statistics, init_feedback } from "../common/Feedback.js"
import { FilePicker } from "./FilePicker.js"
import { Notebook } from "./Notebook.js"
import { LiveDocs } from "./LiveDocs.js"
import { DropRuler } from "./DropRuler.js"
import { SelectionArea } from "./SelectionArea.js"
import { UndoDelete } from "./UndoDelete.js"
import { SlideControls } from "./SlideControls.js"
import { link_open_path } from "./Welcome.js"
import { empty_cell_data, code_differs } from "./Cell.js"
import { offline_html } from "../common/OfflineHTMLExport.js"
import { slice_utf8, length_utf8 } from "../common/UnicodeTools.js"
import { handle_log } from "../common/Logging.js"
import { has_ctrl_or_cmd_pressed, ctrl_or_cmd_name, is_mac_keyboard } from "../common/KeyboardShortcuts.js"
const default_path = "..."
export class Editor extends Component {
constructor() {
super()
this.state = {
notebook: {
path: default_path,
shortpath: "",
in_temp_dir: true,
notebook_id: new URLSearchParams(window.location.search).get("id"),
cells: [],
},
desired_doc_query: null,
recently_deleted: null,
connected: false,
loading: true,
}
// convenience method
const set_notebook_state = (updater) => {
return new Promise((resolve) => {
this.setState((prevstate) => {
return {
notebook: {
...prevstate.notebook,
...updater(prevstate.notebook),
},
}
}, resolve)
})
}
this.set_notebook_state = set_notebook_state.bind(this)
// convenience method
const set_cell_state = (cell_id, new_state_props) => {
return new Promise((resolve) => {
this.setState((prevstate) => {
return {
notebook: {
...prevstate.notebook,
cells: prevstate.notebook.cells.map((c) => {
return c.cell_id == cell_id ? { ...c, ...new_state_props } : c
}),
},
}
}, resolve)
})
}
this.set_cell_state = set_cell_state.bind(this)
// bonds only send their latest value to the back-end when all cells have completed - this is triggered using a promise
this.all_completed = true
this.all_completed_promise = resolvable_promise()
// statistics that are accumulated over time
this.counter_statistics = create_counter_statistics()
// these are things that can be done to the local notebook
this.actions = {
add_local_cell: (cell, new_index) => {
return set_notebook_state((prevstate) => {
if (prevstate.cells.some((c) => c.cell_id == cell.cell_id)) {
console.warn("Tried to add cell with existing cell_id. Canceled.")
console.log(cell)
console.log(prevstate)
return prevstate
}
const before = prevstate.cells
return {
cells: [...before.slice(0, new_index), cell, ...before.slice(new_index)],
}
})
},
update_local_cell_output: (cell, { output, queued, running, runtime, errored }) => {
this.counter_statistics.numRuns++
return set_cell_state(cell.cell_id, {
queued: queued,
running: running,
runtime: runtime,
errored: errored,
output: { ...output, timestamp: Date.now() },
})
},
update_local_cell_input: (cell, by_me, code, folded) => {
return set_cell_state(cell.cell_id, {
remote_code: {
body: code,
submitted_by_me: by_me,
timestamp: Date.now(),
},
local_code: {
body: code,
},
code_folded: folded,
})
},
delete_local_cell: (cell) => {
// TODO: event listeners? gc?
return set_notebook_state((prevstate) => {
return {
cells: prevstate.cells.filter((c) => c !== cell),
}
})
},
move_local_cells: (cells, new_index) => {
return set_notebook_state((prevstate) => {
// The set of moved cell can be scatter across the notebook (not necessarily contiguous)
// but this action will move all of them to a single cluster
// The first cell of that cluster will be at index `new_index`.
const old_first_index = prevstate.cells.findIndex((c) => cells.includes(c))
const before = prevstate.cells.filter((c, i) => i < new_index && !cells.includes(c))
const after = prevstate.cells.filter((c, i) => i >= new_index && !cells.includes(c))
return {
cells: [...before, ...cells, ...after],
}
})
},
}
const on_remote_notebooks = ({ message }) => {
const old_path = this.state.notebook.path
message.notebooks.forEach((nb) => {
if (nb.notebook_id == this.state.notebook.notebook_id) {
set_notebook_state(() => nb)
update_stored_recent_notebooks(nb.path, old_path)
}
})
}
// these are update message that are _not_ a response to a `send(*, *, {create_promise: true})`
const on_update = (update, by_me) => {
if (update.notebook_id == null) {
switch (update.type) {
case "notebook_list":
on_remote_notebooks(update)
break
}
} else {
if (this.state.notebook.notebook_id === update.notebook_id) {
const message = update.message
const cell = this.state.notebook.cells.find((c) => c.cell_id == update.cell_id)
switch (update.type) {
case "cell_output":
if (cell != null) {
this.actions.update_local_cell_output(cell, message)
}
break
case "cell_queued":
if (cell != null) {
set_cell_state(update.cell_id, {
running: false,
queued: true,
})
}
break
case "cell_running":
if (cell != null) {
set_cell_state(update.cell_id, {
running: true,
queued: false,
})
}
break
case "cell_folded":
if (cell != null) {
set_cell_state(update.cell_id, {
code_folded: message.folded,
})
}
break
case "cell_input":
if (cell != null) {
this.actions.update_local_cell_input(cell, by_me, message.code, message.folded)
}
break
case "cell_deleted":
if (cell != null) {
this.actions.delete_local_cell(cell)
}
break
case "cells_moved":
const cells = message.cells.map((cell_id) => this.state.notebook.cells.find((c) => c.cell_id == cell_id))
this.actions.move_local_cells(cells, message.index)
break
case "cell_added":
const new_cell = empty_cell_data(update.cell_id)
new_cell.queued = new_cell.running = false
new_cell.output.body = ""
this.actions.add_local_cell(new_cell, message.index)
break
case "bond_update":
// by someone else
break
case "log":
handle_log(message, this.state.notebook.path)
break
default:
console.error("Received unknown update type!")
console.log(update)
alert("Something went wrong 🙈\n Try clearing your browser cache and refreshing the page")
break
}
}
}
}
this.on_update = on_update
const on_establish_connection = (client) => {
// nasty
Object.assign(this.client, client)
window.version_info = this.client.version_info // for debugging
const run_all = this.client.session_options.evaluation.run_notebook_on_load
// on socket success
this.client.send("get_all_notebooks", {}, {}).then(on_remote_notebooks)
this.client
.send("get_all_cells", {}, { notebook_id: this.state.notebook.notebook_id })
.then((update) => {
this.setState(
{
notebook: {
...this.state.notebook,
cells: update.message.cells.map((cell) => {
const cell_data = empty_cell_data(cell.cell_id)
cell_data.running = false
cell_data.queued = run_all
cell_data.code_folded = true
return cell_data
}),
},
},
() => {
// For cell outputs, we request them all, and then batch all responses into one using Promise.all
// We could experiment with loading the first ~5 cell outputs in the first batch, and the rest in a second, to speed up the time-to-first-usable-content.
const outputs_promise = Promise.all(
this.state.notebook.cells.map((cell_data) => {
return this.client.send(
"get_output",
{},
{
notebook_id: this.state.notebook.notebook_id,
cell_id: cell_data.cell_id,
}
)
})
).then((updates) => {
updates.forEach((u, i) => {
const cell_data = this.state.notebook.cells[i]
if (!run_all || cell_data.running || cell_data.queued) {
this.actions.update_local_cell_output(cell_data, u.message)
} else {
// the cell completed running asynchronously, after Pluto received and processed the :getouput request, but before this message was added to this client's queue.
}
})
})
// Same for cell inputs
// We process all updates in one go, so that React doesn't do its Thing™ for every cell input. (This makes page loading very slow.)
const inputs_promise = Promise.all(
this.state.notebook.cells.map((cell_data) => {
return this.client.send(
"getinput",
{},
{
notebook_id: this.state.notebook.notebook_id,
cell_id: cell_data.cell_id,
}
)
})
).then((updates) => {
updates.forEach((u, i) => {
const cell_data = this.state.notebook.cells[i]
this.actions.update_local_cell_input(cell_data, false, u.message.code, u.message.folded)
})
})
Promise.all([outputs_promise, inputs_promise]).then(() => {
this.setState({
loading: false,
})
console.info("All cells loaded! 🚂 enjoy the ride")
})
}
)
})
.catch(console.error)
}
const on_connection_status = (val) => this.setState({ connected: val })
const on_reconnect = () => {
console.warn("Reconnected! Checking states")
return true
}
this.client = {}
create_pluto_connection({
on_unrequested_update: on_update,
on_connection_status: on_connection_status,
on_reconnect: on_reconnect,
connect_metadata: { notebook_id: this.state.notebook.notebook_id },
}).then(on_establish_connection)
// these are things that can be done to the remote notebook
this.requests = {
change_remote_cell: (cell_id, new_code, create_promise = false) => {
this.counter_statistics.numEvals++
// set_cell_state(cell_id, { running: true })
return this.client.send(
"change_cell",
{ code: new_code },
{
notebook_id: this.state.notebook.notebook_id,
cell_id: cell_id,
},
create_promise
)
},
wrap_remote_cell: (cell_id, block = "begin") => {
const cell = this.state.notebook.cells.find((c) => c.cell_id == cell_id)
const new_code = block + "\n\t" + cell.local_code.body.replace(/\n/g, "\n\t") + "\n" + "end"
this.actions.update_local_cell_input(cell, false, new_code, cell.code_folded)
this.requests.change_remote_cell(cell_id, new_code)
},
split_remote_cell: async (cell_id, boundaries, submit = false) => {
const index = this.state.notebook.cells.findIndex((c) => c.cell_id == cell_id)
const cell = this.state.notebook.cells[index]
const old_code = cell.local_code.body
const padded_boundaries = [0, ...boundaries]
const parts = boundaries.map((b, i) => slice_utf8(old_code, padded_boundaries[i], b).trim()).filter((x) => x !== "")
const new_ids = []
// for loop because we need to wait for each addition to finish before adding the next, otherwise their order would be random
for (const [i, part] of parts.entries()) {
if (i === 0) {
new_ids.push(cell_id)
} else {
const update = await this.requests.add_remote_cell_at(index + i, true)
on_update(update, true)
new_ids.push(update.cell_id)
}
}
await Promise.all(
parts.map(async (part, i) => {
const id = new_ids[i]
// we set the cell's remote_code to force its value
await this.actions.update_local_cell_input({ cell_id: id }, false, part, false)
// we need to reset the remote_code, otherwise the cell will falsely report that it is in sync with the remote
const new_state = this.state.notebook.cells.find((c) => c.cell_id === id)
await this.set_cell_state(id, {
remote_code: {
...new_state.remote_code,
body: i === 0 ? old_code : "",
},
})
})
)
if (submit) {
const cells = new_ids.map((id) => this.state.notebook.cells.find((c) => c.cell_id == id))
await this.requests.set_and_run_multiple(cells)
}
},
interrupt_remote: (cell_id) => {
set_notebook_state((prevstate) => {
return {
cells: prevstate.cells.map((c) => {
return { ...c, errored: c.errored || c.running || c.queued }
}),
}
})
this.client.send(
"interrupt_all",
{},
{
notebook_id: this.state.notebook.notebook_id,
},
false
)
},
move_remote_cells: (cells, new_index) => {
// Indexing works as if a new cell is added.
// e.g. if the third cell (at js-index 2) of [0, 1, 2, 3, 4]
// is moved to the end, that would be new js-index = 5
this.client.send(
"move_multiple_cells",
{
cells: cells.map((c) => c.cell_id),
index: new_index,
},
{
notebook_id: this.state.notebook.notebook_id,
},
false
)
},
add_remote_cell_at: (index, create_promise = false) => {
return this.client.send(
"add_cell",
{ index: index },
{
notebook_id: this.state.notebook.notebook_id,
},
create_promise
)
},
add_remote_cell: (cell_id, before_or_after, create_promise = false) => {
const index = this.state.notebook.cells.findIndex((c) => c.cell_id == cell_id)
const delta = before_or_after == "before" ? 0 : 1
return this.requests.add_remote_cell_at(index + delta, create_promise)
},
delete_cell: (cell_id) => {
if (this.state.notebook.cells.length <= 1) {
this.requests.add_remote_cell(cell_id, "after")
}
const index = this.state.notebook.cells.findIndex((c) => c.cell_id == cell_id)
const cell = this.state.notebook.cells[index]
this.setState({
recently_deleted: {
index: index,
body: this.state.notebook.cells[index].local_code.body,
},
})
set_cell_state(cell_id, {
queued: true,
}).then(() => {
this.actions.update_local_cell_input(cell, false, "", true)
})
this.client.send(
"delete_cell",
{},
{
notebook_id: this.state.notebook.notebook_id,
cell_id: cell_id,
},
false
)
},
confirm_delete_multiple: (cells) => {
if (cells.length <= 1 || confirm(`Delete ${cells.length} cells?`)) {
if (cells.some((f) => f.running || f.queued)) {
if (confirm("This cell is still running - would you like to interrupt the notebook?")) {
this.requests.interrupt_remote(cells[0].cell_id)
}
} else {
cells.forEach((f) => this.requests.delete_cell(f.cell_id))
}
}
},
fold_remote_cell: (cell_id, newFolded) => {
this.client.send(
"fold_cell",
{ folded: newFolded },
{
notebook_id: this.state.notebook.notebook_id,
cell_id: cell_id,
},
false
)
},
set_and_run_all_changed_remote_cells: () => {
const changed = this.state.notebook.cells.filter((cell) => code_differs(cell))
return this.requests.set_and_run_multiple(changed)
},
set_and_run_multiple: (cells) => {
const promises = cells.map((cell) => {
set_cell_state(cell.cell_id, { queued: true })
return this.client
.send(
"set_input",
{ code: cell.local_code.body },
{
notebook_id: this.state.notebook.notebook_id,
cell_id: cell.cell_id,
}
)
.then((u) => {
this.actions.update_local_cell_input(cell, true, u.message.code, u.message.folded)
})
})
Promise.all(promises)
.then(() =>
this.client.send(
"run_multiple_cells",
{
cells: cells.map((c) => c.cell_id),
},
{
notebook_id: this.state.notebook.notebook_id,
}
)
)
.catch(console.error)
return cells.length != 0
},
set_bond: (symbol, value, is_first_value) => {
this.counter_statistics.numBondSets++
if (this.all_completed) {
// instead of waiting for this component to update, we reset the promise right now
// this prevents very fast bonds from sending multiple values within the ping interval
this.all_completed = false
Object.assign(this.all_completed_promise, resolvable_promise())
}
this.client
.send(
"set_bond",
{
sym: symbol,
val: value,
is_first_value: is_first_value,
},
{ notebook_id: this.state.notebook.notebook_id }
)
.then(({ message }) => {
// the back-end tells us whether any cells depend on the bound value
if (message.triggered_other_cells) {
// there are dependent cells, those cells will start running and returning output soon
// when the last running cell returns its output, the all_completed_promise is resolved, and a new bond value can be sent
} else {
// there are no dependent cells, so we resolve the promise right now
if (!this.all_completed) {
this.all_completed = true
this.all_completed_promise.resolve()
}
}
})
},
}
this.selected_friends = (cell_id) => {
const cell = this.state.notebook.cells.find((c) => c.cell_id === cell_id)
if (cell.selected) {
return this.state.notebook.cells.filter((c) => c.selected)
} else {
return [cell]
}
}
this.submit_file_change = (new_path, reset_cm_value) => {
const old_path = this.state.notebook.path
if (old_path === new_path) {
return
}
if (this.state.in_temp_dir || confirm("Are you sure? Will move from\n\n" + old_path + "\n\nto\n\n" + new_path)) {
this.setState({ loading: true })
this.client
.send(
"move_notebook_file",
{
path: new_path,
},
{ notebook_id: this.state.notebook.notebook_id }
)
.then((u) => {
this.setState({
loading: false,
})
if (u.message.success) {
this.setState({
path: new_path,
})
document.activeElement.blur()
} else {
this.setState({
path: old_path,
})
reset_cm_value()
alert("Failed to move file:\n\n" + u.message.reason)
}
})
} else {
this.setState({
path: old_path,
})
reset_cm_value()
}
}
document.addEventListener("keydown", (e) => {
if (e.key === "q" && has_ctrl_or_cmd_pressed(e)) {
// This one can't be done as cmd+q on mac, because that closes chrome - Dral
if (this.state.notebook.cells.some((c) => c.running || c.queued)) {
this.requests.interrupt_remote()
}
e.preventDefault()
} else if (e.key === "s" && has_ctrl_or_cmd_pressed(e)) {
const some_cells_ran = this.requests.set_and_run_all_changed_remote_cells()
if (!some_cells_ran) {
// all cells were in sync allready
// TODO: let user know that the notebook autosaves
}
e.preventDefault()
} else if (e.key === "Backspace" || e.key === "Delete") {
const selected = this.state.notebook.cells.filter((c) => c.selected)
if (selected.length > 0) {
this.requests.confirm_delete_multiple(selected)
e.preventDefault()
}
} else if ((e.key === "?" && has_ctrl_or_cmd_pressed(e)) || e.key === "F1") {
// On mac "cmd+shift+?" is used by chrome, so that is why this needs to be ctrl as well on mac
// Also pressing "ctrl+shift" on mac causes the key to show up as "/", this madness
// I hope we can find a better solution for this later - Dral
alert(
`Shortcuts 🎹
Shift+Enter: run cell
${ctrl_or_cmd_name}+Enter: run cell and add cell below
Delete or Backspace: delete empty cell
PageUp or fn+Up: select cell above
PageDown or fn+Down: select cell below
${ctrl_or_cmd_name}+Q: interrupt notebook
${ctrl_or_cmd_name}+S: submit all changes
The notebook file saves every time you run`
)
e.preventDefault()
}
})
window.addEventListener("beforeunload", (event) => {
const first_unsaved = this.state.notebook.cells.find((cell) => code_differs(cell))
if (first_unsaved != null) {
window.dispatchEvent(new CustomEvent("cell_focus", { detail: { cell_id: first_unsaved.cell_id } }))
// } else if (this.state.notebook.in_temp_dir) {
// window.scrollTo(0, 0)
// // TODO: focus file picker
} else {
console.warn("unloading 👉 disconnecting websocket")
this.client.kill()
return // and don't prevent the unload
}
console.log("Preventing unload")
event.stopImmediatePropagation()
event.preventDefault()
event.returnValue = ""
})
setTimeout(() => {
init_feedback()
finalize_statistics(this.state, this.client, this.counter_statistics).then(store_statistics_sample)
setInterval(() => {
finalize_statistics(this.state, this.client, this.counter_statistics).then((statistics) => {
store_statistics_sample(statistics)
send_statistics_if_enabled(statistics)
})
this.counter_statistics = create_counter_statistics()
}, 10 * 60 * 1000) // 10 minutes - statistics interval
}, 20 * 1000) // 20 seconds - load feedback a little later for snappier UI
}
componentDidUpdate() {
document.title = "🎈 " + this.state.notebook.shortpath + " ⚡ Pluto.jl ⚡"
const any_code_differs = this.state.notebook.cells.some((cell) => code_differs(cell))
document.body.classList.toggle("code_differs", any_code_differs)
document.body.classList.toggle("loading", this.state.loading)
if (this.state.connected) {
document.querySelector("meta[name=theme-color]").content = "#fff"
document.body.classList.remove("disconnected")
} else {
document.querySelector("meta[name=theme-color]").content = "#DEAF91"
document.body.classList.add("disconnected")
}
const all_completed_now = !this.state.notebook.cells.some((cell) => cell.running || cell.queued)
if (all_completed_now && !this.all_completed) {
this.all_completed = true
this.all_completed_promise.resolve()
}
if (!all_completed_now && this.all_completed) {
this.all_completed = false
Object.assign(this.all_completed_promise, resolvable_promise())
}
}
render() {
const circle = (fill) => html`<svg width="48" height="48" viewBox="0 0 48 48" style="height: .7em; width: .7em; margin-left: .3em; margin-right: .2em;">
<circle cx="24" cy="24" r="24" fill=${fill}></circle>
</svg>`
const triangle = (fill) => html`<svg
width="48"
height="48"
viewBox="0 0 48 48"
style="height: .7em; width: .7em; margin-left: .3em; margin-right: .2em; margin-bottom: -.1em;"
>
<polygon
points="24,0 48
,40 0,40"
fill=${fill}
stroke="none"
/>
</svg>`
return html`
<header>
<aside id="export">
<div id="container">
<div class="export_title">export</div>
<a href="./notebookfile?id=${this.state.notebook.notebook_id}" target="_blank" class="export_card">
<header>${triangle("#a270ba")} Notebook file</header>
<section>Download a copy of the <b>.jl</b> script.</section>
</a>
<a
href="#"
class="export_card"
onClick=${(e) => {
offline_html({ pluto_version: this.client.version_info.pluto, head: document.head, body: document.body }).then((html) => {
if (html != null) {
const fake_anchor = document.createElement("a")
fake_anchor.download = this.state.notebook.shortpath + ".html"
fake_anchor.href = URL.createObjectURL(
new Blob([html], {
type: "text/html",
})
)
document.body.appendChild(fake_anchor)
fake_anchor.click()
document.body.removeChild(fake_anchor)
}
})
}}
>
<header>${circle("#E86F51")} Static HTML</header>
<section>An <b>.html</b> file for your web page, or to share online.</section>
</a>
<a
href="#"
class="export_card"
style=${window.chrome == null ? "opacity: .7;" : ""}
onClick=${() => {
if (window.chrome == null) {
alert("PDF generation works best on Google Chome.\n\n(We're working on it!)")
}
window.print()
}}
>
<header>${circle("#3D6117")} Static PDF</header>
<section>A static <b>.pdf</b> file for print or email.</section>
</a>
<!--<div class="export_title">
future
</div>
<a class="export_card" style="border-color: #00000021; opacity: .7;">
<header>mybinder.org</header>
<section>Publish an interactive notebook online.</section>
</a>-->
<button
title="Close"
class="toggle_export"
onClick=${() => document.body.querySelector("header").classList.toggle("show_export", false)}
>
<span></span>
</button>
</div>
</aside>
<nav id="at_the_top">
<a href="./">
<h1><img id="logo-big" src="img/logo.svg" alt="Pluto.jl" /><img id="logo-small" src="img/favicon_unsaturated.svg" /></h1>
</a>
<${FilePicker}
client=${this.client}
value=${this.state.notebook.in_temp_dir ? "" : this.state.notebook.path}
on_submit=${this.submit_file_change}
suggest_new_file=${{
base: this.client.session_options == null ? "" : this.client.session_options.server.notebook_path_suggestion,
name: this.state.notebook.shortpath,
}}
placeholder="Save notebook..."
button_label=${this.state.notebook.in_temp_dir ? "Choose" : "Move"}
/>
<button class="toggle_export" title="Export..." onClick=${() => document.body.querySelector("header").classList.toggle("show_export")}>
<span></span>
</button>
</nav>
</header>
<main>
<preamble>
<button onClick=${() => this.requests.set_and_run_all_changed_remote_cells()} class="runallchanged" title="Save and run all changed cells">
<span></span>
</button>
</preamble>
<${Notebook}
...${this.state.notebook}
on_update_doc_query=${(query) => this.setState({ desired_doc_query: query })}
on_cell_input=${(cell, new_val) => {
this.set_cell_state(cell.cell_id, {
local_code: {
body: new_val,
},
})
}}
on_focus_neighbor=${(cell_id, delta) => {
const i = this.state.notebook.cells.findIndex((c) => c.cell_id === cell_id)
const new_i = i + delta
if (new_i >= 0 && new_i < this.state.notebook.cells.length) {
window.dispatchEvent(
new CustomEvent("cell_focus", {
detail: {
cell_id: this.state.notebook.cells[new_i].cell_id,
line: delta === -1 ? Infinity : -1,
},
})
)
}
}}
disable_input=${!this.state.connected}
focus_after_creation=${!this.state.loading}
all_completed_promise=${this.all_completed_promise}
selected_friends=${this.selected_friends}
requests=${this.requests}
client=${this.client}
/>
<${DropRuler} requests=${this.requests} selected_friends=${this.selected_friends} />
<${SelectionArea}
cells=${this.state.notebook.cells}
on_selection=${(selected_cell_ids) => {
let current_selected_cells = this.state.notebook.cells.filter((x) => x.selected).map((x) => x.cell_id)
if (!isEqual(current_selected_cells, selected_cell_ids)) {
this.setState(
immer((state) => {
for (let cell of state.notebook.cells) {
cell.selected = selected_cell_ids.includes(cell.cell_id)
}
})
)
}
}}
/>
</main>
<${LiveDocs}
desired_doc_query=${this.state.desired_doc_query}
on_update_doc_query=${(query) => this.setState({ desired_doc_query: query })}
client=${this.client}
notebook=${this.state.notebook}
/>
<${UndoDelete}
recently_deleted=${this.state.recently_deleted}
on_click=${() => {
this.requests.add_remote_cell_at(this.state.recently_deleted.index, true).then((update) => {
this.on_update(update, true)
this.actions.update_local_cell_input({ cell_id: update.cell_id }, false, this.state.recently_deleted.body, false).then(() => {
this.requests.change_remote_cell(update.cell_id, this.state.recently_deleted.body)
})
})
}}
/>
<${SlideControls} />
<footer>
<div id="info">
<form id="feedback" action="#" method="post">
<a id="statistics-info" href="statistics-info">Statistics</a>
<label for="opinion">🙋 How can we make <a href="https://github.com/fonsp/Pluto.jl">Pluto.jl</a> better?</label>
<input type="text" name="opinion" id="opinion" autocomplete="off" placeholder="Instant feedback..." />
<button>Send</button>
</form>
</div>
</footer>
`
}
}
/* LOCALSTORAGE NOTEBOOKS LIST */
export const update_stored_recent_notebooks = (recent_path, also_delete = undefined) => {
const storedString = localStorage.getItem("recent notebooks")
const storedList = !!storedString ? JSON.parse(storedString) : []
const oldpaths = storedList
const newpaths = [recent_path].concat(
oldpaths.filter((path) => {
return path != recent_path && path != also_delete
})
)
localStorage.setItem("recent notebooks", JSON.stringify(newpaths.slice(0, 50)))
}
|
#!/bin/bash
#
# Script to setup RunCloud config on a Miranj production server
#
APP_NAME="boilerplate-craft"
CDN_NAME="boilerplate-cdn"
REDIRECTS_NAME="boilerplate-redirects"
REPO_NAME="boilerplate"
REPO_DIR="/home/runcloud/webapps/$APP_NAME/$REPO_NAME"
TARGET_DIR="/etc/nginx-rc/extra.d"
cd $TARGET_DIR
# CDN
ln -s $REPO_DIR/serverconfigs/nginx/cdn/static.conf $CDN_NAME.location.static.include.conf
ln -s $REPO_DIR/serverconfigs/nginx/cdn/proxy.conf $CDN_NAME.location.proxy.include.conf
# Craft
ln -s $REPO_DIR/serverconfigs/nginx/craft/http.conf $APP_NAME.location.http.include.conf
ln -s $REPO_DIR/serverconfigs/envs/production/env.conf $APP_NAME.location.main-before.include.conf
ln -s $REPO_DIR/serverconfigs/nginx/craft/main.conf $APP_NAME.location.main.include.conf
ln -s $REPO_DIR/serverconfigs/nginx/cdn/static.conf $APP_NAME.location.static.include.conf
ln -s $REPO_DIR/serverconfigs/nginx/craft/proxy.conf $APP_NAME.location.proxy.include.conf
# Redirects
ln -s $REPO_DIR/serverconfigs/envs/production/env.conf $REDIRECTS_NAME.location.main-before.include.conf
ln -s $REPO_DIR/serverconfigs/nginx/redirects/main.conf $REDIRECTS_NAME.location.main.include.conf
|
<filename>src/components/_global/js/modules/videos.js<gh_stars>100-1000
/*
*************************************
* <!-- Videos -->
*************************************
*/
import {
templateUrl,
homeUrl,
ajaxUrl,
browser,
UixModuleInstance,
UixGUID,
UixMath,
UixCssProperty,
UixDebounce,
UixThrottle
} from '@uixkit/core/_global/js';
export const VIDEOS = ( ( module, $, window, document ) => {
if ( window.VIDEOS === null ) return false;
module.VIDEOS = module.VIDEOS || {};
module.VIDEOS.version = '0.1.3';
module.VIDEOS.documentReady = function( $ ) {
let windowWidth = window.innerWidth,
windowHeight = window.innerHeight;
/*
---------------------------
Video Embed
---------------------------
*/
$( '.uix-video' ).each( function() {
const $this = $( this );
const curVideoID = $this.find( 'video' ).attr( 'id' ),
coverPlayBtnID = 'videocover-' + curVideoID,
videoWrapperW = $this.closest( '[data-embed-video-wrapper]' ).width();
let dataAuto = $this.data( 'embed-video-autoplay' ),
dataLoop = $this.data( 'embed-video-loop' ),
dataControls = $this.data( 'embed-video-controls' ),
dataW = $this.data( 'embed-video-width' ),
dataH = $this.data( 'embed-video-height' );
//Push a new ID to video
//Solve the problem that ajax asynchronous loading does not play
$this.find( '.video-js' ).attr( 'id', curVideoID );
if ( typeof dataAuto === typeof undefined ) {
dataAuto = true;
}
if ( typeof dataLoop === typeof undefined ) {
dataLoop = true;
}
if ( typeof dataControls === typeof undefined ) {
dataControls = false;
}
if ( typeof dataW === typeof undefined || dataW == 'auto' ) {
dataW = videoWrapperW;
}
if ( typeof dataH === typeof undefined || dataH == 'auto' ) {
dataH = videoWrapperW/1.77777777777778;
}
//Display cover and play buttons when some mobile device browsers cannot automatically play video
if ( $( '#' + coverPlayBtnID ).length == 0 ) {
$( '<div id="'+coverPlayBtnID+'" class="uix-video__cover"><span class="uix-video__cover__placeholder" style="background-image:url('+$this.find( 'video' ).attr( 'poster' )+')"></span><span class="uix-video__cover__playbtn"></span></div>' ).insertBefore( $this );
const btnEv = ( Modernizr.touchevents ) ? 'touchstart' : 'click';
$( '#' + coverPlayBtnID + ' .uix-video__cover__playbtn' ).on( btnEv, function( e ) {
e.preventDefault();
myPlayer.play();
$( '#' + coverPlayBtnID ).hide();
});
//Prevent some devices from automatically playing video and trigger with buttons
if ( !dataAuto || browser.isAndroid ) {
$( '#' + coverPlayBtnID + ' .uix-video__cover__playbtn' ).show();
}
}
/* --------- HTML5 video autoplay on mobile revisited */
if ( windowWidth <= 768 ) {
$this.find( '.video-js' ).attr({
'playsinline' : 'true'
});
}
const myPlayer = videojs( curVideoID,
{
width : dataW,
height : dataH,
loop : dataLoop,
autoplay : dataAuto
},
function onPlayerReady() {
const initVideo = function( obj ) {
//Get Video Dimensions
let curW = obj.videoWidth(),
curH = obj.videoHeight(),
newW = curW,
newH = curH;
newW = videoWrapperW;
//Scaled/Proportional Content
newH = curH*(newW/curW);
if ( !isNaN( newW ) && !isNaN( newH ) ) {
obj.height( newH );
obj.width( newW );
}
//Show this video wrapper
$this.css( 'visibility', 'visible' );
//Hide loading effect
$this.find( '.vjs-loading-spinner, .vjs-big-play-button' ).hide();
}
initVideo( this );
/* --------- Video initialize */
this.on( 'loadedmetadata', function() {
initVideo( this );
});
/* --------- Set, tell the player it's in fullscreen */
if ( dataAuto ) {
this.muted( true ); //Fix an error of Video auto play is not working in browser
this.play();
}
/* --------- Disable control bar play button click */
if ( !dataControls ) {
this.controls( false );
}
/* --------- Determine if the video is auto played from mobile devices */
let autoPlayOK = false;
this.on( 'timeupdate', function() {
let duration = this.duration();
if ( duration > 0 ) {
autoPlayOK = true;
if ( this.currentTime() > 0 ) {
autoPlayOK = true;
this.off( 'timeupdate' );
//Hide cover and play buttons when the video automatically played
$( '#' + coverPlayBtnID ).hide();
}
}
});
});
});
/*
---------------------------
Video Popup Interaction
---------------------------
*/
const modalDialogTrigger = '[data-video-win]';
//Add video container
$( modalDialogTrigger ).each( function() {
const $this = $( this );
let videoSrcIfm = '',
videoSrcMp4 = $this.data( 'video-mp4' ),
videoSrcWebm = $this.data( 'video-webm' ),
videoSrcOgv = $this.data( 'video-ogv' ),
videoPoster = $this.data( 'video-poster' ),
videoContainerMid = $this.data( 'modal-id' ),
videoContainerVid = videoContainerMid + '--videopush';
if ( typeof videoSrcMp4 === typeof undefined ) {
videoSrcMp4 = '';
}
if ( typeof videoSrcWebm === typeof undefined ) {
videoSrcWebm = '';
}
if ( typeof videoSrcOgv === typeof undefined ) {
videoSrcOgv = '';
}
if ( $this.find( '[data-video-iframe]' ).length > 0 ) {
videoSrcIfm = $this.find( '[data-video-iframe]' ).html();
}
//Add modal dialog
if ( $( '#' + videoContainerMid ).length == 0 ) {
let v = '',
vmp4 = '',
vwebm = '',
vogv = '';
if ( videoSrcMp4 != '' ) {
vmp4 = '<source src="'+videoSrcMp4+'" type="video/mp4">';
}
if ( videoSrcWebm != '' ) {
vwebm = '<source src="'+videoSrcWebm+'" type="video/webm">';
}
if ( videoSrcOgv != '' ) {
vogv = '<source src="'+videoSrcOgv+'" type="video/ogv">';
}
v += '<div class="uix-modal-box is-fullscreen is-video" role="dialog" tabindex="-1" aria-hidden="true" id="'+videoContainerMid+'">';
v += '<button type="button" class="uix-modal-box__close" data-modal-close-trigger="true"></button>';
v += '<div class="uix-modal-box__content" role="document">';
v += '<div class="uix-modal-box__video-waiting"></div><div class="uix-modal-box__video-container" data-video-player-init="0">';
if ( $this.find( '[data-video-iframe]' ).length > 0 && videoSrcIfm != '' ) {
//If iframe
v += '<div id="'+videoContainerVid+'" class="embed-responsive embed-responsive-16by9">';
v += videoSrcIfm;
v += '</div>';
} else {
//If local video
v += '<video id="'+videoContainerVid+'" class="video-js vjs-default-skin" controls poster="'+videoPoster+'">';
v += vmp4 + vwebm + vogv;
v += '</video>';
}
v += '</div>';
v += '</div>';
v += '</div>';
//Wait until previous .append() is complete
$( v ).appendTo( 'body' );
}
});
//Check out: http://docs.videojs.com/tutorial-player-workflows.html
$( document ).off( 'click.VIDEOS' ).on( 'click.VIDEOS', modalDialogTrigger, function() {
const vid = $( this ).data( 'modal-id' ) + '--videopush',
newMaxW = windowWidth - 80,
newMaxH = windowHeight - 80,
$vContainer = $( '#' + vid ).closest( '.uix-modal-box__video-container' ),
$vLoader = $vContainer.prev( '.uix-modal-box__video-waiting' ),
myPlayerInit = $vContainer.data( 'video-player-init' );
let $ifm = false;
//----- Hidden/Display the wrapper of video
const displayVC = function() {
TweenMax.set( $vContainer, {
alpha: 1
});
$vLoader.removeClass( 'is-active' );
};
const hiddenVC = function() {
TweenMax.set( $vContainer, {
alpha: 0
});
$vLoader.addClass( 'is-active' );
};
//----- Embed iframe
if ( $( '#' + vid ).find( 'iframe' ).length > 0 ) {
$ifm = $( '#' + vid ).find( 'iframe' );
} else {
hiddenVC();
}
if ( $ifm && typeof $ifm === 'object' ) {
if ( $ifm.length > 0 ) {
let curW = $ifm.width(),
curH = $ifm.height(),
newW = curW,
newH = curH;
if ( curH > newMaxH ) {
newH = newMaxH;
//Scaled/Proportional Content
newW = curW*(newH/curH);
}
if ( newW > newMaxW ) {
newW = newMaxW;
//Scaled/Proportional Content
newH = curH*(newW/curW);
}
$ifm.css({
'left' : ( newMaxW - newW )/2 + 'px',
'top' : ( newMaxH - newH )/2 + 'px',
'height' : newH + 'px',
'width' : newW + 'px'
});
if ( windowWidth <= 768 ) {
$ifm.css({
'top' : 0
}).parent( '.embed-responsive' ).css({
'top' : ( newMaxH - newH )/2 + 'px'
});
}
}
return false;
}
//----- HTML5 video autoplay on mobile revisited
if ( windowWidth <= 768 ) {
$( '#' + vid ).attr({
'playsinline' : 'true'
});
}
//----- Embed local video
const myPlayer = videojs( vid,
{
width : 1,
height : 1,
autoplay : true,
loop : true
},
function onPlayerReady() {
const initVideo = function( obj ) {
//Get Video Dimensions
let curW = obj.videoWidth(),
curH = obj.videoHeight(),
newW = curW,
newH = curH;
//Resise modal
if ( curH > newMaxH ) {
newH = newMaxH;
//Scaled/Proportional Content
newW = curW*(newH/curH);
}
if ( newW > newMaxW ) {
newW = newMaxW;
//Scaled/Proportional Content
newH = curH*(newW/curW);
}
obj.height( newH );
obj.width( newW );
//In order to allow CSS to support video centering
$vContainer.find( ' > div.video-js' ).css({
'width' : newW + 'px'
});
//Vertically center the video area
let mt = parseFloat( windowHeight - newH )/2 - 50;
$vContainer.css({
'transform' : 'translateY('+ mt +'px)'
});
//Display the wrapper of video
displayVC();
}
initVideo( this );
/* --------- Video Modal initialize */
this.on( 'loadedmetadata', function() {
initVideo( this );
//If a player instance has already been created for this variable.
$vContainer.data( 'video-player-init', 1 );
});
/* --------- Set, tell the player it's in fullscreen */
//this.exitFullscreen();
//this.requestFullscreen();
this.play();
/* --------- Disable control bar play button click */
//this.controls( false );
/* --------- Display video playback progress */
this.on( 'timeupdate', function() {
let duration = this.duration(),
progressAmount = '0%';
if (duration > 0) {
progressAmount = ((this.currentTime() / duration) * 100) + "%";
}
//console.log( progressAmount );
});
/* --------- Callback for when a video has ended */
this.on( 'ended', function() {
//console.log( 'video is done!' );
});
});
/* --------- Display the wrapper of video */
if ( myPlayerInit === 1 ) {
displayVC();
}
/* --------- Close the modal */
$( document ).off( 'click.VIDEOS_CLOSE' ).on( 'click.VIDEOS_CLOSE', '.uix-modal-box [data-modal-close-trigger], .uix-modal-mask:not(.js-uix-disabled)', function() {
myPlayer.ready(function() {
this.pause();
});
});
});
};
module.components.documentReady.push( module.VIDEOS.documentReady );
return class VIDEOS {
constructor() {
this.module = module;
}
};
})( UixModuleInstance, jQuery, window, document );
|
<filename>sunspot/spec/api/indexer/attributes_spec.rb
require File.expand_path('spec_helper', File.dirname(__FILE__))
require 'bigdecimal'
describe 'indexing attribute fields', :type => :indexer do
it 'should correctly index a stored string attribute field' do
session.index(post(:title => 'A Title'))
expect(connection).to have_add_with(:title_ss => 'A Title')
end
it 'should correctly index an integer attribute field' do
session.index(post(:blog_id => 4))
expect(connection).to have_add_with(:blog_id_i => '4')
end
it 'should correctly index a long attribute field' do
session.index(Namespaced::Comment.new(:hash => 2**30))
expect(connection).to have_add_with(:hash_l => '1073741824')
end
it 'should correctly index a float attribute field' do
session.index(post(:ratings_average => 2.23))
expect(connection).to have_add_with(:average_rating_ft => '2.23')
end
it 'should correctly index a double attribute field' do
session.index(Namespaced::Comment.new(:average_rating => 2.23))
expect(connection).to have_add_with(:average_rating_e => '2.23')
end
it 'should correctly index a trie integer attribute field' do
session.index(Photo.new(:size => 104856))
expect(connection).to have_add_with(:size_it => '104856')
end
it 'should correctly index a trie float attribute field' do
session.index(Photo.new(:average_rating => 2.23))
expect(connection).to have_add_with(:average_rating_ft => '2.23')
end
it 'should correctly index a trie time attribute field' do
session.index(Photo.new(:created_at => Time.parse('2009-12-16 15:00:00 -0400')))
expect(connection).to have_add_with(:created_at_dt => '2009-12-16T19:00:00Z')
end
it 'should allow indexing by a multiple-value field' do
session.index(post(:category_ids => [3, 14]))
expect(connection).to have_add_with(:category_ids_im => ['3', '14'])
end
it 'should not index a single-value field with newlines as multiple' do
session.index(post(:title => "Multi\nLine"))
expect(connection.adds.last.first.field_by_name(:title_ss).value).to eq("Multi\nLine")
end
it 'should correctly index a time field' do
session.index(
post(:published_at => Time.parse('1983-07-08 05:00:00 -0400'))
)
expect(connection).to have_add_with(:published_at_dt => '1983-07-08T09:00:00Z')
end
it 'should correctly index a time field that\'s after 32-bit Y2K' do
session.index(
post(:published_at => DateTime.parse('2050-07-08 05:00:00 -0400'))
)
expect(connection).to have_add_with(:published_at_dt => '2050-07-08T09:00:00Z')
end
it 'should correctly index a date field' do
session.index(post(:expire_date => Date.new(2009, 07, 13)))
expect(connection).to have_add_with(:expire_date_d => '2009-07-13T00:00:00Z')
end
it 'should correctly index a date range field' do
session.index(post(:featured_for => Date.new(2009, 07, 13)..Date.new(2009, 12, 25)))
expect(connection).to have_add_with(:featured_for_dr => '[2009-07-13T00:00:00Z TO 2009-12-25T00:00:00Z]')
end
it 'should correctly index a boolean field' do
session.index(post(:featured => true))
expect(connection).to have_add_with(:featured_bs => 'true')
end
it 'should correctly index a false boolean field' do
session.index(post(:featured => false))
expect(connection).to have_add_with(:featured_bs => 'false')
end
it 'should not index a nil boolean field' do
session.index(post)
expect(connection).not_to have_add_with(:featured_bs)
end
it 'should index latitude and longitude as a pair' do
session.index(post(:coordinates => Sunspot::Util::Coordinates.new(40.7, -73.5)))
expect(connection).to have_add_with(:coordinates_s => 'dr5xx3nytvgs')
end
it 'should index latitude and longitude passed as non-Floats' do
coordinates = Sunspot::Util::Coordinates.new(
BigDecimal('40.7'), BigDecimal('-73.5')
)
session.index(post(:coordinates => coordinates))
expect(connection).to have_add_with(:coordinates_s => 'dr5xx3nytvgs')
end
it 'should correctly index an attribute field with block access' do
session.index(post(:title => 'The Blog Post'))
expect(connection).to have_add_with(:sort_title_s => 'blog post')
end
it 'should correctly index an attribute field with instance-external block access' do
session.index(post(:category_ids => [1, 2, 3]))
expect(connection).to have_add_with(:primary_category_id_i => '1')
end
it 'should correctly index a field that is defined on a superclass' do
Sunspot.setup(SuperClass) { string :author_name }
session.index(post(:author_name => '<NAME>'))
expect(connection).to have_add_with(:author_name_s => '<NAME>')
end
it 'should throw a NoMethodError only if a nonexistent type is defined' do
expect { Sunspot.setup(Post) { string :author_name } }.not_to raise_error
expect { Sunspot.setup(Post) { bogus :journey } }.to raise_error(NoMethodError)
end
it 'should throw a NoMethodError if a nonexistent field argument is passed' do
expect { Sunspot.setup(Post) { string :author_name, :bogus => :argument } }.to raise_error(ArgumentError)
end
it 'should throw an ArgumentError if single-value field tries to index multiple values' do
expect do
Sunspot.setup(Post) { string :author_name }
session.index(post(:author_name => ['<NAME>', '<NAME>']))
end.to raise_error(ArgumentError)
end
it 'should throw an ArgumentError if specifying more_like_this on type that does not support it' do
expect do
Sunspot.setup(Post) { integer :popularity, :more_like_this => true }
end.to raise_error(ArgumentError)
end
it 'should use a specified field name when the :as option is set' do
session.index(post(:title => 'A Title'))
expect(connection).to have_add_with(:legacy_field_s => 'legacy A Title')
end
it 'should use a specified field name when the :as option is set for array values' do
session.index(post(:title => 'Another Title'))
expect(connection).to have_add_with(:legacy_array_field_sm => ['first string', 'second string'])
end
end
|
noweave -x -delay 2015PH10813.nw > output.tex
notangle -RbintreeSignature-complete 2015PH10813.nw > bintreeSignature-complete.sml
notangle -RbintreeStructure-complete 2015PH10813.nw > bintreeStructure-complete.sml
notangle -RtestCase-complete 2015PH10813.nw > testCase-complete.sml
pdflatex output.tex |
<reponame>MrinAK/Spring-Boot
package eu.itdc.internetprovider.service;
import eu.itdc.internetprovider.persistence.entity.Product;
import eu.itdc.internetprovider.persistence.repository.ProductRepository;
import eu.itdc.internetprovider.service.dto.ProductDTO;
import eu.itdc.internetprovider.service.exception.ResourceNotFound;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.stream.Collectors;
@Service
public class ProductService {
private final ProductRepository productRepository;
private final AuthenticationFacade authenticationFacade;
@Autowired
public ProductService(ProductRepository productRepository, AuthenticationFacade authenticationFacade) {
this.productRepository = productRepository;
this.authenticationFacade = authenticationFacade;
}
public List<ProductDTO> findAll() {
return productRepository.findAll()
.stream()
.map(product -> new ProductDTO(
product.getId(),
product.getName(),
product.getFee(),
product.getBandwidth(),
product.getStatus().name()))
.collect(Collectors.toList());
}
public void create(ProductDTO productDTO) {
Product product = Product.create(productDTO.getName(),
productDTO.getFee(),
productDTO.getBandwidth(),
authenticationFacade.getAuthentication());
productRepository.save(product);
}
public ProductDTO findById(Long productId) {
Product product = productRepository.findById(productId)
.orElseThrow(() -> new ResourceNotFound(String.format("Product with Id %d doesn't exist", productId)));
return new ProductDTO(product.getId(),
product.getName(),
product.getFee(),
product.getBandwidth(),
product.getStatus().name());
}
@Transactional
public void deleteById(Long productId) {
Product product = productRepository.findById(productId)
.orElseThrow(() -> new ResourceNotFound(String.format("Product with Id %d doesn't exist", productId)));
product.delete();
productRepository.save(product);
}
@Transactional
public ProductDTO updateById(Long productId, ProductDTO productDTO) {
Product existingProduct = productRepository.findById(productId)
.orElseThrow(() -> new ResourceNotFound(String.format("Product with Id %d doesn't exist", productId)));
Product updatedProduct = Product.create(productDTO.getName(),
productDTO.getFee(),
productDTO.getBandwidth(),
authenticationFacade.getAuthentication());
existingProduct.update(updatedProduct);
productRepository.save(existingProduct);
return new ProductDTO(existingProduct.getId(),
existingProduct.getName(),
existingProduct.getFee(),
existingProduct.getBandwidth(),
existingProduct.getStatus().name());
}
}
|
<reponame>cookiexyx/vue-mobile-template
'use strict';
exports.__esModule = true;
var _utils = require('../utils');
exports.default = {
computed: {
$t: function $t() {
var name = this.$options.name;
var prefix = name ? (0, _utils.camelize)(name) + '.' : '';
var messages = this.$vantMessages[this.$vantLang];
return function (path) {
for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
args[_key - 1] = arguments[_key];
}
var message = (0, _utils.get)(messages, prefix + path) || (0, _utils.get)(messages, path);
return typeof message === 'function' ? message.apply(null, args) : message;
};
}
}
}; // component mixin |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.